Mutate the global heap valuenumber for any HW intrinsic that performs a memory store...
[platform/upstream/coreclr.git] / src / jit / gentree.cpp
1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
7 XX                                                                           XX
8 XX                               GenTree                                     XX
9 XX                                                                           XX
10 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
11 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
12 */
13
14 #include "jitpch.h"
15 #include "simd.h"
16
17 #ifdef _MSC_VER
18 #pragma hdrstop
19 #endif
20
21 /*****************************************************************************/
22
23 const unsigned short GenTree::gtOperKindTable[] = {
24 #define GTNODE(en, st, cm, ok) ok + GTK_COMMUTE *cm,
25 #include "gtlist.h"
26 };
27
28 #ifdef LEGACY_BACKEND
29 /*****************************************************************************/
30 // static
31 genTreeOps GenTree::OpAsgToOper(genTreeOps op)
32 {
33     // Precondition.
34     assert(OperIsAssignment(op) && op != GT_ASG);
35     switch (op)
36     {
37         case GT_ASG_ADD:
38             return GT_ADD;
39         case GT_ASG_SUB:
40             return GT_SUB;
41         case GT_ASG_MUL:
42             return GT_MUL;
43         case GT_ASG_DIV:
44             return GT_DIV;
45         case GT_ASG_MOD:
46             return GT_MOD;
47
48         case GT_ASG_UDIV:
49             return GT_UDIV;
50         case GT_ASG_UMOD:
51             return GT_UMOD;
52
53         case GT_ASG_OR:
54             return GT_OR;
55         case GT_ASG_XOR:
56             return GT_XOR;
57         case GT_ASG_AND:
58             return GT_AND;
59         case GT_ASG_LSH:
60             return GT_LSH;
61         case GT_ASG_RSH:
62             return GT_RSH;
63         case GT_ASG_RSZ:
64             return GT_RSZ;
65
66         case GT_CHS:
67             return GT_NEG;
68
69         default:
70             unreached(); // Precondition implies we don't get here.
71     }
72 }
73 #endif // LEGACY_BACKEND
74
75 /*****************************************************************************
76  *
77  *  The types of different GenTree nodes
78  */
79
80 #ifdef DEBUG
81
82 #define INDENT_SIZE 3
83
84 //--------------------------------------------
85 //
86 // IndentStack: This struct is used, along with its related enums and strings,
87 //    to control both the indendtation and the printing of arcs.
88 //
89 // Notes:
90 //    The mode of printing is set in the Constructor, using its 'compiler' argument.
91 //    Currently it only prints arcs when fgOrder == fgOrderLinear.
92 //    The type of arc to print is specified by the IndentInfo enum, and is controlled
93 //    by the caller of the Push() method.
94
95 enum IndentChars
96 {
97     ICVertical,
98     ICBottom,
99     ICTop,
100     ICMiddle,
101     ICDash,
102     ICEmbedded,
103     ICTerminal,
104     ICError,
105     IndentCharCount
106 };
107
108 // clang-format off
109 // Sets of strings for different dumping options            vert             bot             top             mid             dash       embedded    terminal    error
110 static const char*  emptyIndents[IndentCharCount]   = {     " ",             " ",            " ",            " ",            " ",           "{",      "",        "?"  };
111 static const char*  asciiIndents[IndentCharCount]   = {     "|",            "\\",            "/",            "+",            "-",           "{",      "*",       "?"  };
112 static const char*  unicodeIndents[IndentCharCount] = { "\xe2\x94\x82", "\xe2\x94\x94", "\xe2\x94\x8c", "\xe2\x94\x9c", "\xe2\x94\x80",     "{", "\xe2\x96\x8c", "?"  };
113 // clang-format on
114
115 typedef ArrayStack<Compiler::IndentInfo> IndentInfoStack;
116 struct IndentStack
117 {
118     IndentInfoStack stack;
119     const char**    indents;
120
121     // Constructor for IndentStack.  Uses 'compiler' to determine the mode of printing.
122     IndentStack(Compiler* compiler) : stack(compiler)
123     {
124         if (compiler->asciiTrees)
125         {
126             indents = asciiIndents;
127         }
128         else
129         {
130             indents = unicodeIndents;
131         }
132     }
133
134     // Return the depth of the current indentation.
135     unsigned Depth()
136     {
137         return stack.Height();
138     }
139
140     // Push a new indentation onto the stack, of the given type.
141     void Push(Compiler::IndentInfo info)
142     {
143         stack.Push(info);
144     }
145
146     // Pop the most recent indentation type off the stack.
147     Compiler::IndentInfo Pop()
148     {
149         return stack.Pop();
150     }
151
152     // Print the current indentation and arcs.
153     void print()
154     {
155         unsigned indentCount = Depth();
156         for (unsigned i = 0; i < indentCount; i++)
157         {
158             unsigned index = indentCount - 1 - i;
159             switch (stack.Index(index))
160             {
161                 case Compiler::IndentInfo::IINone:
162                     printf("   ");
163                     break;
164                 case Compiler::IndentInfo::IIEmbedded:
165                     printf("%s  ", indents[ICEmbedded]);
166                     break;
167                 case Compiler::IndentInfo::IIArc:
168                     if (index == 0)
169                     {
170                         printf("%s%s%s", indents[ICMiddle], indents[ICDash], indents[ICDash]);
171                     }
172                     else
173                     {
174                         printf("%s  ", indents[ICVertical]);
175                     }
176                     break;
177                 case Compiler::IndentInfo::IIArcBottom:
178                     printf("%s%s%s", indents[ICBottom], indents[ICDash], indents[ICDash]);
179                     break;
180                 case Compiler::IndentInfo::IIArcTop:
181                     printf("%s%s%s", indents[ICTop], indents[ICDash], indents[ICDash]);
182                     break;
183                 case Compiler::IndentInfo::IIError:
184                     printf("%s%s%s", indents[ICError], indents[ICDash], indents[ICDash]);
185                     break;
186                 default:
187                     unreached();
188             }
189         }
190         printf("%s", indents[ICTerminal]);
191     }
192 };
193
194 //------------------------------------------------------------------------
195 // printIndent: This is a static method which simply invokes the 'print'
196 //    method on its 'indentStack' argument.
197 //
198 // Arguments:
199 //    indentStack - specifies the information for the indentation & arcs to be printed
200 //
201 // Notes:
202 //    This method exists to localize the checking for the case where indentStack is null.
203
204 static void printIndent(IndentStack* indentStack)
205 {
206     if (indentStack == nullptr)
207     {
208         return;
209     }
210     indentStack->print();
211 }
212
213 #endif
214
215 #if defined(DEBUG) || NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS
216
217 static const char* opNames[] = {
218 #define GTNODE(en, st, cm, ok) #en,
219 #include "gtlist.h"
220 };
221
222 const char* GenTree::OpName(genTreeOps op)
223 {
224     assert((unsigned)op < _countof(opNames));
225
226     return opNames[op];
227 }
228
229 #endif
230
231 #if MEASURE_NODE_SIZE && SMALL_TREE_NODES
232
233 static const char* opStructNames[] = {
234 #define GTNODE(en, st, cm, ok) #st,
235 #include "gtlist.h"
236 };
237
238 const char* GenTree::OpStructName(genTreeOps op)
239 {
240     assert((unsigned)op < _countof(opStructNames));
241
242     return opStructNames[op];
243 }
244
245 #endif
246
247 /*****************************************************************************
248  *
249  *  When 'SMALL_TREE_NODES' is enabled, we allocate tree nodes in 2 different
250  *  sizes: 'TREE_NODE_SZ_SMALL' for most nodes and 'TREE_NODE_SZ_LARGE' for the
251  *  few nodes (such as calls) that have more fields and take up a lot more space.
252  */
253
254 #if SMALL_TREE_NODES
255
256 /* GT_COUNT'th oper is overloaded as 'undefined oper', so allocate storage for GT_COUNT'th oper also */
257 /* static */
258 unsigned char GenTree::s_gtNodeSizes[GT_COUNT + 1];
259
260 #if NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS
261
262 unsigned char GenTree::s_gtTrueSizes[GT_COUNT + 1]{
263 #define GTNODE(en, st, cm, ok) sizeof(st),
264 #include "gtlist.h"
265 };
266
267 #endif // NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS
268
269 #if COUNT_AST_OPERS
270 LONG GenTree::s_gtNodeCounts[GT_COUNT + 1] = {0};
271 #endif // COUNT_AST_OPERS
272
273 /* static */
274 void GenTree::InitNodeSize()
275 {
276     /* 'GT_LCL_VAR' often gets changed to 'GT_REG_VAR' */
277
278     assert(GenTree::s_gtNodeSizes[GT_LCL_VAR] >= GenTree::s_gtNodeSizes[GT_REG_VAR]);
279
280     /* Set all sizes to 'small' first */
281
282     for (unsigned op = 0; op <= GT_COUNT; op++)
283     {
284         GenTree::s_gtNodeSizes[op] = TREE_NODE_SZ_SMALL;
285     }
286
287     // Now set all of the appropriate entries to 'large'
288     CLANG_FORMAT_COMMENT_ANCHOR;
289
290 // clang-format off
291 #if defined(FEATURE_HFA) || defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
292     // On ARM32, ARM64 and System V for struct returning
293     // there is code that does GT_ASG-tree.CopyObj call.
294     // CopyObj is a large node and the GT_ASG is small, which triggers an exception.
295     GenTree::s_gtNodeSizes[GT_ASG]              = TREE_NODE_SZ_LARGE;
296     GenTree::s_gtNodeSizes[GT_RETURN]           = TREE_NODE_SZ_LARGE;
297 #endif // defined(FEATURE_HFA) || defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
298
299     GenTree::s_gtNodeSizes[GT_CALL]             = TREE_NODE_SZ_LARGE;
300     GenTree::s_gtNodeSizes[GT_CAST]             = TREE_NODE_SZ_LARGE;
301     GenTree::s_gtNodeSizes[GT_FTN_ADDR]         = TREE_NODE_SZ_LARGE;
302     GenTree::s_gtNodeSizes[GT_BOX]              = TREE_NODE_SZ_LARGE;
303     GenTree::s_gtNodeSizes[GT_INDEX]            = TREE_NODE_SZ_LARGE;
304     GenTree::s_gtNodeSizes[GT_INDEX_ADDR]        = TREE_NODE_SZ_LARGE;
305     GenTree::s_gtNodeSizes[GT_ARR_BOUNDS_CHECK] = TREE_NODE_SZ_LARGE;
306 #ifdef FEATURE_SIMD
307     GenTree::s_gtNodeSizes[GT_SIMD_CHK] = TREE_NODE_SZ_LARGE;
308 #endif // FEATURE_SIMD
309 #ifdef FEATURE_HW_INTRINSICS
310     GenTree::s_gtNodeSizes[GT_HW_INTRINSIC_CHK] = TREE_NODE_SZ_LARGE;
311 #endif // FEATURE_HW_INTRINSICS
312
313     GenTree::s_gtNodeSizes[GT_ARR_ELEM]         = TREE_NODE_SZ_LARGE;
314     GenTree::s_gtNodeSizes[GT_ARR_INDEX]        = TREE_NODE_SZ_LARGE;
315     GenTree::s_gtNodeSizes[GT_ARR_OFFSET]       = TREE_NODE_SZ_LARGE;
316     GenTree::s_gtNodeSizes[GT_RET_EXPR]         = TREE_NODE_SZ_LARGE;
317     GenTree::s_gtNodeSizes[GT_OBJ]              = TREE_NODE_SZ_LARGE;
318     GenTree::s_gtNodeSizes[GT_FIELD]            = TREE_NODE_SZ_LARGE;
319     GenTree::s_gtNodeSizes[GT_STMT]             = TREE_NODE_SZ_LARGE;
320     GenTree::s_gtNodeSizes[GT_CMPXCHG]          = TREE_NODE_SZ_LARGE;
321     GenTree::s_gtNodeSizes[GT_QMARK]            = TREE_NODE_SZ_LARGE;
322     GenTree::s_gtNodeSizes[GT_LEA]              = TREE_NODE_SZ_LARGE;
323     GenTree::s_gtNodeSizes[GT_STORE_OBJ]        = TREE_NODE_SZ_LARGE;
324     GenTree::s_gtNodeSizes[GT_DYN_BLK]          = TREE_NODE_SZ_LARGE;
325     GenTree::s_gtNodeSizes[GT_STORE_DYN_BLK]    = TREE_NODE_SZ_LARGE;
326     GenTree::s_gtNodeSizes[GT_INTRINSIC]        = TREE_NODE_SZ_LARGE;
327     GenTree::s_gtNodeSizes[GT_ALLOCOBJ]         = TREE_NODE_SZ_LARGE;
328 #if USE_HELPERS_FOR_INT_DIV
329     GenTree::s_gtNodeSizes[GT_DIV]              = TREE_NODE_SZ_LARGE;
330     GenTree::s_gtNodeSizes[GT_UDIV]             = TREE_NODE_SZ_LARGE;
331     GenTree::s_gtNodeSizes[GT_MOD]              = TREE_NODE_SZ_LARGE;
332     GenTree::s_gtNodeSizes[GT_UMOD]             = TREE_NODE_SZ_LARGE;
333 #endif
334 #ifdef FEATURE_PUT_STRUCT_ARG_STK
335     // TODO-Throughput: This should not need to be a large node. The object info should be
336     // obtained from the child node.
337     GenTree::s_gtNodeSizes[GT_PUTARG_STK]       = TREE_NODE_SZ_LARGE;
338 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
339     GenTree::s_gtNodeSizes[GT_PUTARG_SPLIT]     = TREE_NODE_SZ_LARGE;
340 #endif
341 #endif // FEATURE_PUT_STRUCT_ARG_STK
342
343     assert(GenTree::s_gtNodeSizes[GT_RETURN] == GenTree::s_gtNodeSizes[GT_ASG]);
344
345     // This list of assertions should come to contain all GenTree subtypes that are declared
346     // "small".
347     assert(sizeof(GenTreeLclFld) <= GenTree::s_gtNodeSizes[GT_LCL_FLD]);
348     assert(sizeof(GenTreeLclVar) <= GenTree::s_gtNodeSizes[GT_LCL_VAR]);
349
350     static_assert_no_msg(sizeof(GenTree)             <= TREE_NODE_SZ_SMALL);
351     static_assert_no_msg(sizeof(GenTreeUnOp)         <= TREE_NODE_SZ_SMALL);
352     static_assert_no_msg(sizeof(GenTreeOp)           <= TREE_NODE_SZ_SMALL);
353     static_assert_no_msg(sizeof(GenTreeVal)          <= TREE_NODE_SZ_SMALL);
354     static_assert_no_msg(sizeof(GenTreeIntConCommon) <= TREE_NODE_SZ_SMALL);
355     static_assert_no_msg(sizeof(GenTreePhysReg)      <= TREE_NODE_SZ_SMALL);
356 #ifndef LEGACY_BACKEND
357     static_assert_no_msg(sizeof(GenTreeJumpTable)    <= TREE_NODE_SZ_SMALL);
358 #endif // !LEGACY_BACKEND
359     static_assert_no_msg(sizeof(GenTreeIntCon)       <= TREE_NODE_SZ_SMALL);
360     static_assert_no_msg(sizeof(GenTreeLngCon)       <= TREE_NODE_SZ_SMALL);
361     static_assert_no_msg(sizeof(GenTreeDblCon)       <= TREE_NODE_SZ_SMALL);
362     static_assert_no_msg(sizeof(GenTreeStrCon)       <= TREE_NODE_SZ_SMALL);
363     static_assert_no_msg(sizeof(GenTreeLclVarCommon) <= TREE_NODE_SZ_SMALL);
364     static_assert_no_msg(sizeof(GenTreeLclVar)       <= TREE_NODE_SZ_SMALL);
365     static_assert_no_msg(sizeof(GenTreeLclFld)       <= TREE_NODE_SZ_SMALL);
366     static_assert_no_msg(sizeof(GenTreeRegVar)       <= TREE_NODE_SZ_SMALL);
367     static_assert_no_msg(sizeof(GenTreeCC)           <= TREE_NODE_SZ_SMALL);
368     static_assert_no_msg(sizeof(GenTreeCast)         <= TREE_NODE_SZ_LARGE); // *** large node
369     static_assert_no_msg(sizeof(GenTreeBox)          <= TREE_NODE_SZ_LARGE); // *** large node
370     static_assert_no_msg(sizeof(GenTreeField)        <= TREE_NODE_SZ_LARGE); // *** large node
371     static_assert_no_msg(sizeof(GenTreeArgList)      <= TREE_NODE_SZ_SMALL);
372     static_assert_no_msg(sizeof(GenTreeFieldList)    <= TREE_NODE_SZ_SMALL);
373     static_assert_no_msg(sizeof(GenTreeColon)        <= TREE_NODE_SZ_SMALL);
374     static_assert_no_msg(sizeof(GenTreeCall)         <= TREE_NODE_SZ_LARGE); // *** large node
375     static_assert_no_msg(sizeof(GenTreeCmpXchg)      <= TREE_NODE_SZ_LARGE); // *** large node
376     static_assert_no_msg(sizeof(GenTreeFptrVal)      <= TREE_NODE_SZ_LARGE); // *** large node
377     static_assert_no_msg(sizeof(GenTreeQmark)        <= TREE_NODE_SZ_LARGE); // *** large node
378     static_assert_no_msg(sizeof(GenTreeIntrinsic)    <= TREE_NODE_SZ_LARGE); // *** large node
379     static_assert_no_msg(sizeof(GenTreeIndex)        <= TREE_NODE_SZ_LARGE); // *** large node
380     static_assert_no_msg(sizeof(GenTreeArrLen)       <= TREE_NODE_SZ_LARGE); // *** large node
381     static_assert_no_msg(sizeof(GenTreeBoundsChk)    <= TREE_NODE_SZ_LARGE); // *** large node
382     static_assert_no_msg(sizeof(GenTreeArrElem)      <= TREE_NODE_SZ_LARGE); // *** large node
383     static_assert_no_msg(sizeof(GenTreeArrIndex)     <= TREE_NODE_SZ_LARGE); // *** large node
384     static_assert_no_msg(sizeof(GenTreeArrOffs)      <= TREE_NODE_SZ_LARGE); // *** large node
385     static_assert_no_msg(sizeof(GenTreeIndir)        <= TREE_NODE_SZ_SMALL);
386     static_assert_no_msg(sizeof(GenTreeStoreInd)     <= TREE_NODE_SZ_SMALL);
387     static_assert_no_msg(sizeof(GenTreeAddrMode)     <= TREE_NODE_SZ_SMALL);
388     static_assert_no_msg(sizeof(GenTreeObj)          <= TREE_NODE_SZ_LARGE); // *** large node
389     static_assert_no_msg(sizeof(GenTreeBlk)          <= TREE_NODE_SZ_SMALL);
390     static_assert_no_msg(sizeof(GenTreeRetExpr)      <= TREE_NODE_SZ_LARGE); // *** large node
391     static_assert_no_msg(sizeof(GenTreeStmt)         <= TREE_NODE_SZ_LARGE); // *** large node
392     static_assert_no_msg(sizeof(GenTreeClsVar)       <= TREE_NODE_SZ_SMALL);
393     static_assert_no_msg(sizeof(GenTreeArgPlace)     <= TREE_NODE_SZ_SMALL);
394     static_assert_no_msg(sizeof(GenTreeLabel)        <= TREE_NODE_SZ_SMALL);
395     static_assert_no_msg(sizeof(GenTreePhiArg)       <= TREE_NODE_SZ_SMALL);
396     static_assert_no_msg(sizeof(GenTreeAllocObj)     <= TREE_NODE_SZ_LARGE); // *** large node
397 #ifndef FEATURE_PUT_STRUCT_ARG_STK
398     static_assert_no_msg(sizeof(GenTreePutArgStk)    <= TREE_NODE_SZ_SMALL);
399 #else  // FEATURE_PUT_STRUCT_ARG_STK
400     // TODO-Throughput: This should not need to be a large node. The object info should be
401     // obtained from the child node.
402     static_assert_no_msg(sizeof(GenTreePutArgStk)    <= TREE_NODE_SZ_LARGE);
403 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
404     static_assert_no_msg(sizeof(GenTreePutArgSplit)  <= TREE_NODE_SZ_LARGE);
405 #endif
406 #endif // FEATURE_PUT_STRUCT_ARG_STK
407
408 #ifdef FEATURE_SIMD
409     static_assert_no_msg(sizeof(GenTreeSIMD)         <= TREE_NODE_SZ_SMALL);
410 #endif // FEATURE_SIMD
411
412 #ifdef FEATURE_HW_INTRINSICS
413     static_assert_no_msg(sizeof(GenTreeHWIntrinsic)  <= TREE_NODE_SZ_SMALL);
414 #endif // FEATURE_HW_INTRINSICS
415     // clang-format on
416 }
417
418 size_t GenTree::GetNodeSize() const
419 {
420     return GenTree::s_gtNodeSizes[gtOper];
421 }
422
423 #ifdef DEBUG
424 bool GenTree::IsNodeProperlySized() const
425 {
426     size_t size;
427
428     if (gtDebugFlags & GTF_DEBUG_NODE_SMALL)
429     {
430         size = TREE_NODE_SZ_SMALL;
431     }
432     else
433     {
434         assert(gtDebugFlags & GTF_DEBUG_NODE_LARGE);
435         size = TREE_NODE_SZ_LARGE;
436     }
437
438     return GenTree::s_gtNodeSizes[gtOper] <= size;
439 }
440 #endif
441
442 #if SMALL_TREE_NODES
443 //------------------------------------------------------------------------
444 // ReplaceWith: replace this with the src node. The source must be an isolated node
445 //              and cannot be used after the replacement.
446 //
447 // Arguments:
448 //    src  - source tree, that replaces this.
449 //    comp - the compiler instance to transfer annotations for arrays.
450 //
451 void GenTree::ReplaceWith(GenTree* src, Compiler* comp)
452 {
453     // The source may be big only if the target is also a big node
454     assert((gtDebugFlags & GTF_DEBUG_NODE_LARGE) || GenTree::s_gtNodeSizes[src->gtOper] == TREE_NODE_SZ_SMALL);
455
456     // The check is effective only if nodes have been already threaded.
457     assert((src->gtPrev == nullptr) && (src->gtNext == nullptr));
458
459     RecordOperBashing(OperGet(), src->OperGet()); // nop unless NODEBASH_STATS is enabled
460
461     GenTree* prev = gtPrev;
462     GenTree* next = gtNext;
463     // The VTable pointer is copied intentionally here
464     memcpy((void*)this, (void*)src, src->GetNodeSize());
465     this->gtPrev = prev;
466     this->gtNext = next;
467
468 #ifdef DEBUG
469     gtSeqNum = 0;
470 #endif
471     // Transfer any annotations.
472     if (src->OperGet() == GT_IND && src->gtFlags & GTF_IND_ARR_INDEX)
473     {
474         ArrayInfo arrInfo;
475         bool      b = comp->GetArrayInfoMap()->Lookup(src, &arrInfo);
476         assert(b);
477         comp->GetArrayInfoMap()->Set(this, arrInfo);
478     }
479     DEBUG_DESTROY_NODE(src);
480 }
481
482 #endif
483
484 /*****************************************************************************
485  *
486  *  When 'NODEBASH_STATS' is enabled in "jit.h" we record all instances of
487  *  an existing GenTree node having its operator changed. This can be useful
488  *  for two (related) things - to see what is being bashed (and what isn't),
489  *  and to verify that the existing choices for what nodes are marked 'large'
490  *  are reasonable (to minimize "wasted" space).
491  *
492  *  And yes, the hash function / logic is simplistic, but it is conflict-free
493  *  and transparent for what we need.
494  */
495
496 #if NODEBASH_STATS
497
498 #define BASH_HASH_SIZE 211
499
500 inline unsigned hashme(genTreeOps op1, genTreeOps op2)
501 {
502     return ((op1 * 104729) ^ (op2 * 56569)) % BASH_HASH_SIZE;
503 }
504
505 struct BashHashDsc
506 {
507     unsigned __int32 bhFullHash; // the hash value (unique for all old->new pairs)
508     unsigned __int32 bhCount;    // the same old->new bashings seen so far
509     unsigned __int8  bhOperOld;  // original gtOper
510     unsigned __int8  bhOperNew;  // new      gtOper
511 };
512
513 static BashHashDsc BashHash[BASH_HASH_SIZE];
514
515 void GenTree::RecordOperBashing(genTreeOps operOld, genTreeOps operNew)
516 {
517     unsigned     hash = hashme(operOld, operNew);
518     BashHashDsc* desc = BashHash + hash;
519
520     if (desc->bhFullHash != hash)
521     {
522         noway_assert(desc->bhCount == 0); // if this ever fires, need fix the hash fn
523         desc->bhFullHash = hash;
524     }
525
526     desc->bhCount += 1;
527     desc->bhOperOld = operOld;
528     desc->bhOperNew = operNew;
529 }
530
531 void GenTree::ReportOperBashing(FILE* f)
532 {
533     unsigned total = 0;
534
535     fflush(f);
536
537     fprintf(f, "\n");
538     fprintf(f, "Bashed gtOper stats:\n");
539     fprintf(f, "\n");
540     fprintf(f, "    Old operator        New operator     #bytes old->new      Count\n");
541     fprintf(f, "    ---------------------------------------------------------------\n");
542
543     for (unsigned h = 0; h < BASH_HASH_SIZE; h++)
544     {
545         unsigned count = BashHash[h].bhCount;
546         if (count == 0)
547             continue;
548
549         unsigned opOld = BashHash[h].bhOperOld;
550         unsigned opNew = BashHash[h].bhOperNew;
551
552         fprintf(f, "    GT_%-13s -> GT_%-13s [size: %3u->%3u] %c %7u\n", OpName((genTreeOps)opOld),
553                 OpName((genTreeOps)opNew), s_gtTrueSizes[opOld], s_gtTrueSizes[opNew],
554                 (s_gtTrueSizes[opOld] < s_gtTrueSizes[opNew]) ? 'X' : ' ', count);
555         total += count;
556     }
557     fprintf(f, "\n");
558     fprintf(f, "Total bashings: %u\n", total);
559     fprintf(f, "\n");
560
561     fflush(f);
562 }
563
564 #endif // NODEBASH_STATS
565
566 #else // SMALL_TREE_NODES
567
568 #ifdef DEBUG
569 bool GenTree::IsNodeProperlySized() const
570 {
571     return true;
572 }
573 #endif
574
575 #endif // SMALL_TREE_NODES
576
577 /*****************************************************************************/
578
579 #if MEASURE_NODE_SIZE
580
581 void GenTree::DumpNodeSizes(FILE* fp)
582 {
583 // Dump the sizes of the various GenTree flavors
584
585 #if SMALL_TREE_NODES
586     fprintf(fp, "Small tree node size = %3u bytes\n", TREE_NODE_SZ_SMALL);
587 #endif
588     fprintf(fp, "Large tree node size = %3u bytes\n", TREE_NODE_SZ_LARGE);
589     fprintf(fp, "\n");
590
591 #if SMALL_TREE_NODES
592
593     // Verify that node sizes are set kosherly and dump sizes
594     for (unsigned op = GT_NONE + 1; op < GT_COUNT; op++)
595     {
596         unsigned needSize = s_gtTrueSizes[op];
597         unsigned nodeSize = s_gtNodeSizes[op];
598
599         const char* structNm = OpStructName((genTreeOps)op);
600         const char* operName = OpName((genTreeOps)op);
601
602         bool repeated = false;
603
604         // Have we seen this struct flavor before?
605         for (unsigned mop = GT_NONE + 1; mop < op; mop++)
606         {
607             if (strcmp(structNm, OpStructName((genTreeOps)mop)) == 0)
608             {
609                 repeated = true;
610                 break;
611             }
612         }
613
614         // Don't repeat the same GenTree flavor unless we have an error
615         if (!repeated || needSize > nodeSize)
616         {
617             unsigned sizeChar = '?';
618
619             if (nodeSize == TREE_NODE_SZ_SMALL)
620                 sizeChar = 'S';
621             else if (nodeSize == TREE_NODE_SZ_LARGE)
622                 sizeChar = 'L';
623
624             fprintf(fp, "GT_%-16s ... %-19s = %3u bytes (%c)", operName, structNm, needSize, sizeChar);
625             if (needSize > nodeSize)
626             {
627                 fprintf(fp, " -- ERROR -- allocation is only %u bytes!", nodeSize);
628             }
629             else if (needSize <= TREE_NODE_SZ_SMALL && nodeSize == TREE_NODE_SZ_LARGE)
630             {
631                 fprintf(fp, " ... could be small");
632             }
633
634             fprintf(fp, "\n");
635         }
636     }
637
638 #endif
639 }
640
641 #endif // MEASURE_NODE_SIZE
642
643 /*****************************************************************************
644  *
645  *  Walk all basic blocks and call the given function pointer for all tree
646  *  nodes contained therein.
647  */
648
649 void Compiler::fgWalkAllTreesPre(fgWalkPreFn* visitor, void* pCallBackData)
650 {
651     BasicBlock* block;
652
653     for (block = fgFirstBB; block; block = block->bbNext)
654     {
655         GenTree* tree;
656
657         for (tree = block->bbTreeList; tree; tree = tree->gtNext)
658         {
659             assert(tree->gtOper == GT_STMT);
660
661             fgWalkTreePre(&tree->gtStmt.gtStmtExpr, visitor, pCallBackData);
662         }
663     }
664 }
665
666 //-----------------------------------------------------------
667 // CopyReg: Copy the _gtRegNum/_gtRegPair/gtRegTag fields.
668 //
669 // Arguments:
670 //     from   -  GenTree node from which to copy
671 //
672 // Return Value:
673 //     None
674 void GenTree::CopyReg(GenTree* from)
675 {
676     // To do the copy, use _gtRegPair, which must be bigger than _gtRegNum. Note that the values
677     // might be undefined (so gtRegTag == GT_REGTAG_NONE).
678     _gtRegPair = from->_gtRegPair;
679     C_ASSERT(sizeof(_gtRegPair) >= sizeof(_gtRegNum));
680     INDEBUG(gtRegTag = from->gtRegTag;)
681
682     // Also copy multi-reg state if this is a call node
683     if (IsCall())
684     {
685         assert(from->IsCall());
686         this->AsCall()->CopyOtherRegs(from->AsCall());
687     }
688     else if (IsCopyOrReload())
689     {
690         this->AsCopyOrReload()->CopyOtherRegs(from->AsCopyOrReload());
691     }
692 }
693
694 //------------------------------------------------------------------
695 // gtHasReg: Whether node beeen assigned a register by LSRA
696 //
697 // Arguments:
698 //    None
699 //
700 // Return Value:
701 //    Returns true if the node was assigned a register.
702 //
703 //    In case of multi-reg call nodes, it is considered
704 //    having a reg if regs are allocated for all its
705 //    return values.
706 //
707 //    In case of GT_COPY or GT_RELOAD of a multi-reg call,
708 //    GT_COPY/GT_RELOAD is considered having a reg if it
709 //    has a reg assigned to any of its positions.
710 //
711 // Assumption:
712 //    In order for this to work properly, gtClearReg must be called
713 //    prior to setting the register value.
714 //
715 bool GenTree::gtHasReg() const
716 {
717     bool hasReg;
718
719 #if CPU_LONG_USES_REGPAIR
720     if (isRegPairType(TypeGet()))
721     {
722         assert(_gtRegNum != REG_NA);
723         INDEBUG(assert(gtRegTag == GT_REGTAG_REGPAIR));
724         return (gtRegPair != REG_PAIR_NONE);
725     }
726     assert(_gtRegNum != REG_PAIR_NONE);
727     INDEBUG(assert(gtRegTag == GT_REGTAG_REG));
728 #endif
729     if (IsMultiRegCall())
730     {
731         // Have to cast away const-ness because GetReturnTypeDesc() is a non-const method
732         GenTree*     tree     = const_cast<GenTree*>(this);
733         GenTreeCall* call     = tree->AsCall();
734         unsigned     regCount = call->GetReturnTypeDesc()->GetReturnRegCount();
735         hasReg                = false;
736
737         // A Multi-reg call node is said to have regs, if it has
738         // reg assigned to each of its result registers.
739         for (unsigned i = 0; i < regCount; ++i)
740         {
741             hasReg = (call->GetRegNumByIdx(i) != REG_NA);
742             if (!hasReg)
743             {
744                 break;
745             }
746         }
747     }
748     else if (IsCopyOrReloadOfMultiRegCall())
749     {
750         GenTree*             tree         = const_cast<GenTree*>(this);
751         GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
752         GenTreeCall*         call         = copyOrReload->gtGetOp1()->AsCall();
753         unsigned             regCount     = call->GetReturnTypeDesc()->GetReturnRegCount();
754         hasReg                            = false;
755
756         // A Multi-reg copy or reload node is said to have regs,
757         // if it has valid regs in any of the positions.
758         for (unsigned i = 0; i < regCount; ++i)
759         {
760             hasReg = (copyOrReload->GetRegNumByIdx(i) != REG_NA);
761             if (hasReg)
762             {
763                 break;
764             }
765         }
766     }
767     else
768     {
769         hasReg = (gtRegNum != REG_NA);
770     }
771
772     return hasReg;
773 }
774
775 //-----------------------------------------------------------------------------
776 // GetRegisterDstCount: Get the number of registers defined by the node.
777 //
778 // Arguments:
779 //    None
780 //
781 // Return Value:
782 //    The number of registers that this node defines.
783 //
784 // Notes:
785 //    This should not be called on a contained node.
786 //    This does not look at the actual register assignments, if any, and so
787 //    is valid after Lowering.
788 //
789 int GenTree::GetRegisterDstCount() const
790 {
791     assert(!isContained());
792     if (!IsMultiRegNode())
793     {
794         return (IsValue()) ? 1 : 0;
795     }
796     else if (IsMultiRegCall())
797     {
798         // temporarily cast away const-ness as AsCall() method is not declared const
799         GenTree* temp = const_cast<GenTree*>(this);
800         return temp->AsCall()->GetReturnTypeDesc()->GetReturnRegCount();
801     }
802     else if (IsCopyOrReloadOfMultiRegCall())
803     {
804         // A multi-reg copy or reload, will have valid regs for only those
805         // positions that need to be copied or reloaded.  Hence we need
806         // to consider only those registers for computing reg mask.
807
808         GenTree*             tree         = const_cast<GenTree*>(this);
809         GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
810         GenTreeCall*         call         = copyOrReload->gtGetOp1()->AsCall();
811         return call->GetReturnTypeDesc()->GetReturnRegCount();
812     }
813 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
814     else if (OperIsPutArgSplit())
815     {
816         return (const_cast<GenTree*>(this))->AsPutArgSplit()->gtNumRegs;
817     }
818     // A PUTARG_REG could be a MultiRegOp on ARM since we could move a double register to two int registers,
819     // either for all double parameters w/SoftFP or for varargs).
820     else
821     {
822         assert(OperIsMultiRegOp());
823         return (TypeGet() == TYP_LONG) ? 2 : 1;
824     }
825 #endif // !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
826     assert(!"Unexpected multi-reg node");
827     return 0;
828 }
829
830 //---------------------------------------------------------------
831 // gtGetRegMask: Get the reg mask of the node.
832 //
833 // Arguments:
834 //    None
835 //
836 // Return Value:
837 //    Reg Mask of GenTree node.
838 //
839 regMaskTP GenTree::gtGetRegMask() const
840 {
841     regMaskTP resultMask;
842
843 #if CPU_LONG_USES_REGPAIR
844     if (isRegPairType(TypeGet()))
845     {
846         resultMask = genRegPairMask(gtRegPair);
847     }
848     else
849 #endif
850     {
851         if (IsMultiRegCall())
852         {
853             // temporarily cast away const-ness as AsCall() method is not declared const
854             resultMask    = genRegMask(gtRegNum);
855             GenTree* temp = const_cast<GenTree*>(this);
856             resultMask |= temp->AsCall()->GetOtherRegMask();
857         }
858         else if (IsCopyOrReloadOfMultiRegCall())
859         {
860             // A multi-reg copy or reload, will have valid regs for only those
861             // positions that need to be copied or reloaded.  Hence we need
862             // to consider only those registers for computing reg mask.
863
864             GenTree*             tree         = const_cast<GenTree*>(this);
865             GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
866             GenTreeCall*         call         = copyOrReload->gtGetOp1()->AsCall();
867             unsigned             regCount     = call->GetReturnTypeDesc()->GetReturnRegCount();
868
869             resultMask = RBM_NONE;
870             for (unsigned i = 0; i < regCount; ++i)
871             {
872                 regNumber reg = copyOrReload->GetRegNumByIdx(i);
873                 if (reg != REG_NA)
874                 {
875                     resultMask |= genRegMask(reg);
876                 }
877             }
878         }
879 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
880         else if (OperIsPutArgSplit())
881         {
882             GenTree*            tree     = const_cast<GenTree*>(this);
883             GenTreePutArgSplit* splitArg = tree->AsPutArgSplit();
884             unsigned            regCount = splitArg->gtNumRegs;
885
886             resultMask = RBM_NONE;
887             for (unsigned i = 0; i < regCount; ++i)
888             {
889                 regNumber reg = splitArg->GetRegNumByIdx(i);
890                 assert(reg != REG_NA);
891                 resultMask |= genRegMask(reg);
892             }
893         }
894 #endif
895         else
896         {
897             resultMask = genRegMask(gtRegNum);
898         }
899     }
900
901     return resultMask;
902 }
903
904 //---------------------------------------------------------------
905 // GetOtherRegMask: Get the reg mask of gtOtherRegs of call node
906 //
907 // Arguments:
908 //    None
909 //
910 // Return Value:
911 //    Reg mask of gtOtherRegs of call node.
912 //
913 regMaskTP GenTreeCall::GetOtherRegMask() const
914 {
915     regMaskTP resultMask = RBM_NONE;
916
917 #if FEATURE_MULTIREG_RET
918     for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
919     {
920         if (gtOtherRegs[i] != REG_NA)
921         {
922             resultMask |= genRegMask((regNumber)gtOtherRegs[i]);
923             continue;
924         }
925         break;
926     }
927 #endif
928
929     return resultMask;
930 }
931
932 //-------------------------------------------------------------------------
933 // IsPure:
934 //    Returns true if this call is pure. For now, this uses the same
935 //    definition of "pure" that is that used by HelperCallProperties: a
936 //    pure call does not read or write any aliased (e.g. heap) memory or
937 //    have other global side effects (e.g. class constructors, finalizers),
938 //    but is allowed to throw an exception.
939 //
940 //    NOTE: this call currently only returns true if the call target is a
941 //    helper method that is known to be pure. No other analysis is
942 //    performed.
943 //
944 // Arguments:
945 //    Copiler - the compiler context.
946 //
947 // Returns:
948 //    True if the call is pure; false otherwise.
949 //
950 bool GenTreeCall::IsPure(Compiler* compiler) const
951 {
952     return (gtCallType == CT_HELPER) &&
953            compiler->s_helperCallProperties.IsPure(compiler->eeGetHelperNum(gtCallMethHnd));
954 }
955
956 //-------------------------------------------------------------------------
957 // HasSideEffects:
958 //    Returns true if this call has any side effects. All non-helpers are considered to have side-effects. Only helpers
959 //    that do not mutate the heap, do not run constructors, may not throw, and are either a) pure or b) non-finalizing
960 //    allocation functions are considered side-effect-free.
961 //
962 // Arguments:
963 //     compiler         - the compiler instance
964 //     ignoreExceptions - when `true`, ignores exception side effects
965 //     ignoreCctors     - when `true`, ignores class constructor side effects
966 //
967 // Return Value:
968 //      true if this call has any side-effects; false otherwise.
969 bool GenTreeCall::HasSideEffects(Compiler* compiler, bool ignoreExceptions, bool ignoreCctors) const
970 {
971     // Generally all GT_CALL nodes are considered to have side-effects, but we may have extra information about helper
972     // calls that can prove them side-effect-free.
973     if (gtCallType != CT_HELPER)
974     {
975         return true;
976     }
977
978     CorInfoHelpFunc       helper           = compiler->eeGetHelperNum(gtCallMethHnd);
979     HelperCallProperties& helperProperties = compiler->s_helperCallProperties;
980
981     // We definitely care about the side effects if MutatesHeap is true
982     if (helperProperties.MutatesHeap(helper))
983     {
984         return true;
985     }
986
987     // Unless we have been instructed to ignore cctors (CSE, for example, ignores cctors), consider them side effects.
988     if (!ignoreCctors && helperProperties.MayRunCctor(helper))
989     {
990         return true;
991     }
992
993     // If we also care about exceptions then check if the helper can throw
994     if (!ignoreExceptions && !helperProperties.NoThrow(helper))
995     {
996         return true;
997     }
998
999     // If this is not a Pure helper call or an allocator (that will not need to run a finalizer)
1000     // then this call has side effects.
1001     return !helperProperties.IsPure(helper) &&
1002            (!helperProperties.IsAllocator(helper) || helperProperties.MayFinalize(helper));
1003 }
1004
1005 #ifndef LEGACY_BACKEND
1006
1007 //-------------------------------------------------------------------------
1008 // HasNonStandardAddedArgs: Return true if the method has non-standard args added to the call
1009 // argument list during argument morphing (fgMorphArgs), e.g., passed in R10 or R11 on AMD64.
1010 // See also GetNonStandardAddedArgCount().
1011 //
1012 // Arguments:
1013 //     compiler - the compiler instance
1014 //
1015 // Return Value:
1016 //      true if there are any such args, false otherwise.
1017 //
1018 bool GenTreeCall::HasNonStandardAddedArgs(Compiler* compiler) const
1019 {
1020     return GetNonStandardAddedArgCount(compiler) != 0;
1021 }
1022
1023 //-------------------------------------------------------------------------
1024 // GetNonStandardAddedArgCount: Get the count of non-standard arguments that have been added
1025 // during call argument morphing (fgMorphArgs). Do not count non-standard args that are already
1026 // counted in the argument list prior to morphing.
1027 //
1028 // This function is used to help map the caller and callee arguments during tail call setup.
1029 //
1030 // Arguments:
1031 //     compiler - the compiler instance
1032 //
1033 // Return Value:
1034 //      The count of args, as described.
1035 //
1036 // Notes:
1037 //      It would be more general to have fgMorphArgs set a bit on the call node when such
1038 //      args are added to a call, and a bit on each such arg, and then have this code loop
1039 //      over the call args when the special call bit is set, counting the args with the special
1040 //      arg bit. This seems pretty heavyweight, though. Instead, this logic needs to be kept
1041 //      in sync with fgMorphArgs.
1042 //
1043 int GenTreeCall::GetNonStandardAddedArgCount(Compiler* compiler) const
1044 {
1045     if (IsUnmanaged() && !compiler->opts.ShouldUsePInvokeHelpers())
1046     {
1047         // R11 = PInvoke cookie param
1048         return 1;
1049     }
1050     else if (IsVirtualStub())
1051     {
1052         // R11 = Virtual stub param
1053         return 1;
1054     }
1055     else if ((gtCallType == CT_INDIRECT) && (gtCallCookie != nullptr))
1056     {
1057         // R10 = PInvoke target param
1058         // R11 = PInvoke cookie param
1059         return 2;
1060     }
1061     return 0;
1062 }
1063
1064 #endif // !LEGACY_BACKEND
1065
1066 //-------------------------------------------------------------------------
1067 // TreatAsHasRetBufArg:
1068 //
1069 // Arguments:
1070 //     compiler, the compiler instance so that we can call eeGetHelperNum
1071 //
1072 // Return Value:
1073 //     Returns true if we treat the call as if it has a retBuf argument
1074 //     This method may actually have a retBuf argument
1075 //     or it could be a JIT helper that we are still transforming during
1076 //     the importer phase.
1077 //
1078 // Notes:
1079 //     On ARM64 marking the method with the GTF_CALL_M_RETBUFFARG flag
1080 //     will make HasRetBufArg() return true, but will also force the
1081 //     use of register x8 to pass the RetBuf argument.
1082 //
1083 //     These two Jit Helpers that we handle here by returning true
1084 //     aren't actually defined to return a struct, so they don't expect
1085 //     their RetBuf to be passed in x8, instead they  expect it in x0.
1086 //
1087 bool GenTreeCall::TreatAsHasRetBufArg(Compiler* compiler) const
1088 {
1089     if (HasRetBufArg())
1090     {
1091         return true;
1092     }
1093     else
1094     {
1095         // If we see a Jit helper call that returns a TYP_STRUCT we will
1096         // transform it as if it has a Return Buffer Argument
1097         //
1098         if (IsHelperCall() && (gtReturnType == TYP_STRUCT))
1099         {
1100             // There are two possible helper calls that use this path:
1101             //  CORINFO_HELP_GETFIELDSTRUCT and CORINFO_HELP_UNBOX_NULLABLE
1102             //
1103             CorInfoHelpFunc helpFunc = compiler->eeGetHelperNum(gtCallMethHnd);
1104
1105             if (helpFunc == CORINFO_HELP_GETFIELDSTRUCT)
1106             {
1107                 return true;
1108             }
1109             else if (helpFunc == CORINFO_HELP_UNBOX_NULLABLE)
1110             {
1111                 return true;
1112             }
1113             else
1114             {
1115                 assert(!"Unexpected JIT helper in TreatAsHasRetBufArg");
1116             }
1117         }
1118     }
1119     return false;
1120 }
1121
1122 //-------------------------------------------------------------------------
1123 // IsHelperCall: Determine if this GT_CALL node is a specific helper call.
1124 //
1125 // Arguments:
1126 //     compiler - the compiler instance so that we can call eeFindHelper
1127 //
1128 // Return Value:
1129 //     Returns true if this GT_CALL node is a call to the specified helper.
1130 //
1131 bool GenTreeCall::IsHelperCall(Compiler* compiler, unsigned helper) const
1132 {
1133     return IsHelperCall(compiler->eeFindHelper(helper));
1134 }
1135
1136 //------------------------------------------------------------------------
1137 // GenTreeCall::ReplaceCallOperand:
1138 //    Replaces a given operand to a call node and updates the call
1139 //    argument table if necessary.
1140 //
1141 // Arguments:
1142 //    useEdge - the use edge that points to the operand to be replaced.
1143 //    replacement - the replacement node.
1144 //
1145 void GenTreeCall::ReplaceCallOperand(GenTree** useEdge, GenTree* replacement)
1146 {
1147     assert(useEdge != nullptr);
1148     assert(replacement != nullptr);
1149     assert(TryGetUse(*useEdge, &useEdge));
1150
1151     GenTree* originalOperand = *useEdge;
1152     *useEdge                 = replacement;
1153
1154     const bool isArgument =
1155         (replacement != gtControlExpr) &&
1156         ((gtCallType != CT_INDIRECT) || ((replacement != gtCallCookie) && (replacement != gtCallAddr)));
1157
1158     if (isArgument)
1159     {
1160         if ((originalOperand->gtFlags & GTF_LATE_ARG) != 0)
1161         {
1162             replacement->gtFlags |= GTF_LATE_ARG;
1163         }
1164         else
1165         {
1166             assert((replacement->gtFlags & GTF_LATE_ARG) == 0);
1167
1168             fgArgTabEntry* fp = Compiler::gtArgEntryByNode(this, originalOperand);
1169             assert(fp->node == originalOperand);
1170             fp->node = replacement;
1171         }
1172     }
1173 }
1174
1175 //-------------------------------------------------------------------------
1176 // AreArgsComplete: Determine if this GT_CALL node's arguments have been processed.
1177 //
1178 // Return Value:
1179 //     Returns true if fgMorphArgs has processed the arguments.
1180 //
1181 bool GenTreeCall::AreArgsComplete() const
1182 {
1183     if (fgArgInfo == nullptr)
1184     {
1185         return false;
1186     }
1187     if (fgArgInfo->AreArgsComplete())
1188     {
1189         assert((gtCallLateArgs != nullptr) || !fgArgInfo->HasRegArgs());
1190         return true;
1191     }
1192     assert(gtCallArgs == nullptr);
1193     return false;
1194 }
1195
1196 #if !defined(FEATURE_PUT_STRUCT_ARG_STK) && !defined(LEGACY_BACKEND)
1197 unsigned GenTreePutArgStk::getArgSize()
1198 {
1199     return genTypeSize(genActualType(gtOp1->gtType));
1200 }
1201 #endif // !defined(FEATURE_PUT_STRUCT_ARG_STK) && !defined(LEGACY_BACKEND)
1202
1203 /*****************************************************************************
1204  *
1205  *  Returns non-zero if the two trees are identical.
1206  */
1207
1208 bool GenTree::Compare(GenTree* op1, GenTree* op2, bool swapOK)
1209 {
1210     genTreeOps oper;
1211     unsigned   kind;
1212
1213 //  printf("tree1:\n"); gtDispTree(op1);
1214 //  printf("tree2:\n"); gtDispTree(op2);
1215
1216 AGAIN:
1217
1218     if (op1 == nullptr)
1219     {
1220         return (op2 == nullptr);
1221     }
1222     if (op2 == nullptr)
1223     {
1224         return false;
1225     }
1226     if (op1 == op2)
1227     {
1228         return true;
1229     }
1230
1231     assert(op1->gtOper != GT_STMT);
1232     assert(op2->gtOper != GT_STMT);
1233
1234     oper = op1->OperGet();
1235
1236     /* The operators must be equal */
1237
1238     if (oper != op2->gtOper)
1239     {
1240         return false;
1241     }
1242
1243     /* The types must be equal */
1244
1245     if (op1->gtType != op2->gtType)
1246     {
1247         return false;
1248     }
1249
1250     /* Overflow must be equal */
1251     if (op1->gtOverflowEx() != op2->gtOverflowEx())
1252     {
1253         return false;
1254     }
1255
1256     /* Sensible flags must be equal */
1257     if ((op1->gtFlags & (GTF_UNSIGNED)) != (op2->gtFlags & (GTF_UNSIGNED)))
1258     {
1259         return false;
1260     }
1261
1262     /* Figure out what kind of nodes we're comparing */
1263
1264     kind = op1->OperKind();
1265
1266     /* Is this a constant node? */
1267
1268     if (kind & GTK_CONST)
1269     {
1270         switch (oper)
1271         {
1272             case GT_CNS_INT:
1273                 if (op1->gtIntCon.gtIconVal == op2->gtIntCon.gtIconVal)
1274                 {
1275                     return true;
1276                 }
1277                 break;
1278 #if 0
1279             // TODO-CQ: Enable this in the future
1280         case GT_CNS_LNG:
1281             if  (op1->gtLngCon.gtLconVal == op2->gtLngCon.gtLconVal)
1282                 return true;
1283             break;
1284
1285         case GT_CNS_DBL:
1286             if  (op1->gtDblCon.gtDconVal == op2->gtDblCon.gtDconVal)
1287                 return true;
1288             break;
1289 #endif
1290             default:
1291                 break;
1292         }
1293
1294         return false;
1295     }
1296
1297     /* Is this a leaf node? */
1298
1299     if (kind & GTK_LEAF)
1300     {
1301         switch (oper)
1302         {
1303             case GT_LCL_VAR:
1304                 if (op1->gtLclVarCommon.gtLclNum != op2->gtLclVarCommon.gtLclNum)
1305                 {
1306                     break;
1307                 }
1308
1309                 return true;
1310
1311             case GT_LCL_FLD:
1312                 if (op1->gtLclFld.gtLclNum != op2->gtLclFld.gtLclNum ||
1313                     op1->gtLclFld.gtLclOffs != op2->gtLclFld.gtLclOffs)
1314                 {
1315                     break;
1316                 }
1317
1318                 return true;
1319
1320             case GT_CLS_VAR:
1321                 if (op1->gtClsVar.gtClsVarHnd != op2->gtClsVar.gtClsVarHnd)
1322                 {
1323                     break;
1324                 }
1325
1326                 return true;
1327
1328             case GT_LABEL:
1329                 return true;
1330
1331             case GT_ARGPLACE:
1332                 if ((op1->gtType == TYP_STRUCT) &&
1333                     (op1->gtArgPlace.gtArgPlaceClsHnd != op2->gtArgPlace.gtArgPlaceClsHnd))
1334                 {
1335                     break;
1336                 }
1337                 return true;
1338
1339             default:
1340                 break;
1341         }
1342
1343         return false;
1344     }
1345
1346     /* Is it a 'simple' unary/binary operator? */
1347
1348     if (kind & GTK_UNOP)
1349     {
1350         if (IsExOp(kind))
1351         {
1352             // ExOp operators extend unary operator with extra, non-GenTree* members.  In many cases,
1353             // these should be included in the comparison.
1354             switch (oper)
1355             {
1356                 case GT_ARR_LENGTH:
1357                     if (op1->gtArrLen.ArrLenOffset() != op2->gtArrLen.ArrLenOffset())
1358                     {
1359                         return false;
1360                     }
1361                     break;
1362                 case GT_CAST:
1363                     if (op1->gtCast.gtCastType != op2->gtCast.gtCastType)
1364                     {
1365                         return false;
1366                     }
1367                     break;
1368                 case GT_OBJ:
1369                     if (op1->AsObj()->gtClass != op2->AsObj()->gtClass)
1370                     {
1371                         return false;
1372                     }
1373                     break;
1374
1375                 // For the ones below no extra argument matters for comparison.
1376                 case GT_BOX:
1377                     break;
1378
1379                 default:
1380                     assert(!"unexpected unary ExOp operator");
1381             }
1382         }
1383         return Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp1);
1384     }
1385
1386     if (kind & GTK_BINOP)
1387     {
1388         if (IsExOp(kind))
1389         {
1390             // ExOp operators extend unary operator with extra, non-GenTree* members.  In many cases,
1391             // these should be included in the hash code.
1392             switch (oper)
1393             {
1394                 case GT_INTRINSIC:
1395                     if (op1->gtIntrinsic.gtIntrinsicId != op2->gtIntrinsic.gtIntrinsicId)
1396                     {
1397                         return false;
1398                     }
1399                     break;
1400                 case GT_LEA:
1401                     if (op1->gtAddrMode.gtScale != op2->gtAddrMode.gtScale)
1402                     {
1403                         return false;
1404                     }
1405                     if (op1->gtAddrMode.Offset() != op2->gtAddrMode.Offset())
1406                     {
1407                         return false;
1408                     }
1409                     break;
1410                 case GT_INDEX:
1411                     if (op1->gtIndex.gtIndElemSize != op2->gtIndex.gtIndElemSize)
1412                     {
1413                         return false;
1414                     }
1415                     break;
1416                 case GT_INDEX_ADDR:
1417                     if (op1->AsIndexAddr()->gtElemSize != op2->AsIndexAddr()->gtElemSize)
1418                     {
1419                         return false;
1420                     }
1421                     break;
1422 #ifdef FEATURE_SIMD
1423                 case GT_SIMD:
1424                     if ((op1->AsSIMD()->gtSIMDIntrinsicID != op2->AsSIMD()->gtSIMDIntrinsicID) ||
1425                         (op1->AsSIMD()->gtSIMDBaseType != op2->AsSIMD()->gtSIMDBaseType) ||
1426                         (op1->AsSIMD()->gtSIMDSize != op2->AsSIMD()->gtSIMDSize))
1427                     {
1428                         return false;
1429                     }
1430                     break;
1431 #endif // FEATURE_SIMD
1432
1433 #ifdef FEATURE_HW_INTRINSICS
1434                 case GT_HWIntrinsic:
1435                     if ((op1->AsHWIntrinsic()->gtHWIntrinsicId != op2->AsHWIntrinsic()->gtHWIntrinsicId) ||
1436                         (op1->AsHWIntrinsic()->gtSIMDBaseType != op2->AsHWIntrinsic()->gtSIMDBaseType) ||
1437                         (op1->AsHWIntrinsic()->gtSIMDSize != op2->AsHWIntrinsic()->gtSIMDSize))
1438                     {
1439                         return false;
1440                     }
1441                     break;
1442 #endif
1443
1444                 // For the ones below no extra argument matters for comparison.
1445                 case GT_QMARK:
1446                     break;
1447
1448                 default:
1449                     assert(!"unexpected binary ExOp operator");
1450             }
1451         }
1452
1453         if (op1->gtOp.gtOp2)
1454         {
1455             if (!Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp1, swapOK))
1456             {
1457                 if (swapOK && OperIsCommutative(oper) &&
1458                     ((op1->gtOp.gtOp1->gtFlags | op1->gtOp.gtOp2->gtFlags | op2->gtOp.gtOp1->gtFlags |
1459                       op2->gtOp.gtOp2->gtFlags) &
1460                      GTF_ALL_EFFECT) == 0)
1461                 {
1462                     if (Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp2, swapOK))
1463                     {
1464                         op1 = op1->gtOp.gtOp2;
1465                         op2 = op2->gtOp.gtOp1;
1466                         goto AGAIN;
1467                     }
1468                 }
1469
1470                 return false;
1471             }
1472
1473             op1 = op1->gtOp.gtOp2;
1474             op2 = op2->gtOp.gtOp2;
1475
1476             goto AGAIN;
1477         }
1478         else
1479         {
1480
1481             op1 = op1->gtOp.gtOp1;
1482             op2 = op2->gtOp.gtOp1;
1483
1484             if (!op1)
1485             {
1486                 return (op2 == nullptr);
1487             }
1488             if (!op2)
1489             {
1490                 return false;
1491             }
1492
1493             goto AGAIN;
1494         }
1495     }
1496
1497     /* See what kind of a special operator we have here */
1498
1499     switch (oper)
1500     {
1501         case GT_FIELD:
1502             if (op1->gtField.gtFldHnd != op2->gtField.gtFldHnd)
1503             {
1504                 break;
1505             }
1506
1507             op1 = op1->gtField.gtFldObj;
1508             op2 = op2->gtField.gtFldObj;
1509
1510             if (op1 || op2)
1511             {
1512                 if (op1 && op2)
1513                 {
1514                     goto AGAIN;
1515                 }
1516             }
1517
1518             return true;
1519
1520         case GT_CALL:
1521
1522             if (op1->gtCall.gtCallType != op2->gtCall.gtCallType)
1523             {
1524                 return false;
1525             }
1526
1527             if (op1->gtCall.gtCallType != CT_INDIRECT)
1528             {
1529                 if (op1->gtCall.gtCallMethHnd != op2->gtCall.gtCallMethHnd)
1530                 {
1531                     return false;
1532                 }
1533
1534 #ifdef FEATURE_READYTORUN_COMPILER
1535                 if (op1->gtCall.gtEntryPoint.addr != op2->gtCall.gtEntryPoint.addr)
1536                 {
1537                     return false;
1538                 }
1539 #endif
1540             }
1541             else
1542             {
1543                 if (!Compare(op1->gtCall.gtCallAddr, op2->gtCall.gtCallAddr))
1544                 {
1545                     return false;
1546                 }
1547             }
1548
1549             if (Compare(op1->gtCall.gtCallLateArgs, op2->gtCall.gtCallLateArgs) &&
1550                 Compare(op1->gtCall.gtCallArgs, op2->gtCall.gtCallArgs) &&
1551                 Compare(op1->gtCall.gtControlExpr, op2->gtCall.gtControlExpr) &&
1552                 Compare(op1->gtCall.gtCallObjp, op2->gtCall.gtCallObjp))
1553             {
1554                 return true;
1555             }
1556             break;
1557
1558         case GT_ARR_ELEM:
1559
1560             if (op1->gtArrElem.gtArrRank != op2->gtArrElem.gtArrRank)
1561             {
1562                 return false;
1563             }
1564
1565             // NOTE: gtArrElemSize may need to be handled
1566
1567             unsigned dim;
1568             for (dim = 0; dim < op1->gtArrElem.gtArrRank; dim++)
1569             {
1570                 if (!Compare(op1->gtArrElem.gtArrInds[dim], op2->gtArrElem.gtArrInds[dim]))
1571                 {
1572                     return false;
1573                 }
1574             }
1575
1576             op1 = op1->gtArrElem.gtArrObj;
1577             op2 = op2->gtArrElem.gtArrObj;
1578             goto AGAIN;
1579
1580         case GT_ARR_OFFSET:
1581             if (op1->gtArrOffs.gtCurrDim != op2->gtArrOffs.gtCurrDim ||
1582                 op1->gtArrOffs.gtArrRank != op2->gtArrOffs.gtArrRank)
1583             {
1584                 return false;
1585             }
1586             return (Compare(op1->gtArrOffs.gtOffset, op2->gtArrOffs.gtOffset) &&
1587                     Compare(op1->gtArrOffs.gtIndex, op2->gtArrOffs.gtIndex) &&
1588                     Compare(op1->gtArrOffs.gtArrObj, op2->gtArrOffs.gtArrObj));
1589
1590         case GT_CMPXCHG:
1591             return Compare(op1->gtCmpXchg.gtOpLocation, op2->gtCmpXchg.gtOpLocation) &&
1592                    Compare(op1->gtCmpXchg.gtOpValue, op2->gtCmpXchg.gtOpValue) &&
1593                    Compare(op1->gtCmpXchg.gtOpComparand, op2->gtCmpXchg.gtOpComparand);
1594
1595         case GT_ARR_BOUNDS_CHECK:
1596 #ifdef FEATURE_SIMD
1597         case GT_SIMD_CHK:
1598 #endif // FEATURE_SIMD
1599 #ifdef FEATURE_HW_INTRINSICS
1600         case GT_HW_INTRINSIC_CHK:
1601 #endif // FEATURE_HW_INTRINSICS
1602             return Compare(op1->gtBoundsChk.gtIndex, op2->gtBoundsChk.gtIndex) &&
1603                    Compare(op1->gtBoundsChk.gtArrLen, op2->gtBoundsChk.gtArrLen) &&
1604                    (op1->gtBoundsChk.gtThrowKind == op2->gtBoundsChk.gtThrowKind);
1605
1606         case GT_STORE_DYN_BLK:
1607         case GT_DYN_BLK:
1608             return Compare(op1->gtDynBlk.Addr(), op2->gtDynBlk.Addr()) &&
1609                    Compare(op1->gtDynBlk.Data(), op2->gtDynBlk.Data()) &&
1610                    Compare(op1->gtDynBlk.gtDynamicSize, op2->gtDynBlk.gtDynamicSize);
1611
1612         default:
1613             assert(!"unexpected operator");
1614     }
1615
1616     return false;
1617 }
1618
1619 /*****************************************************************************
1620  *
1621  *  Returns non-zero if the given tree contains a use of a local #lclNum.
1622  */
1623
1624 bool Compiler::gtHasRef(GenTree* tree, ssize_t lclNum, bool defOnly)
1625 {
1626     genTreeOps oper;
1627     unsigned   kind;
1628
1629 AGAIN:
1630
1631     assert(tree);
1632
1633     oper = tree->OperGet();
1634     kind = tree->OperKind();
1635
1636     assert(oper != GT_STMT);
1637
1638     /* Is this a constant node? */
1639
1640     if (kind & GTK_CONST)
1641     {
1642         return false;
1643     }
1644
1645     /* Is this a leaf node? */
1646
1647     if (kind & GTK_LEAF)
1648     {
1649         if (oper == GT_LCL_VAR)
1650         {
1651             if (tree->gtLclVarCommon.gtLclNum == (unsigned)lclNum)
1652             {
1653                 if (!defOnly)
1654                 {
1655                     return true;
1656                 }
1657             }
1658         }
1659         else if (oper == GT_RET_EXPR)
1660         {
1661             return gtHasRef(tree->gtRetExpr.gtInlineCandidate, lclNum, defOnly);
1662         }
1663
1664         return false;
1665     }
1666
1667     /* Is it a 'simple' unary/binary operator? */
1668
1669     if (kind & GTK_SMPOP)
1670     {
1671         if (tree->gtGetOp2IfPresent())
1672         {
1673             if (gtHasRef(tree->gtOp.gtOp1, lclNum, defOnly))
1674             {
1675                 return true;
1676             }
1677
1678             tree = tree->gtOp.gtOp2;
1679             goto AGAIN;
1680         }
1681         else
1682         {
1683             tree = tree->gtOp.gtOp1;
1684
1685             if (!tree)
1686             {
1687                 return false;
1688             }
1689
1690             if (GenTree::OperIsAssignment(oper))
1691             {
1692                 // 'tree' is the gtOp1 of an assignment node. So we can handle
1693                 // the case where defOnly is either true or false.
1694
1695                 if (tree->gtOper == GT_LCL_VAR && tree->gtLclVarCommon.gtLclNum == (unsigned)lclNum)
1696                 {
1697                     return true;
1698                 }
1699                 else if (tree->gtOper == GT_FIELD && lclNum == (ssize_t)tree->gtField.gtFldHnd)
1700                 {
1701                     return true;
1702                 }
1703             }
1704
1705             goto AGAIN;
1706         }
1707     }
1708
1709     /* See what kind of a special operator we have here */
1710
1711     switch (oper)
1712     {
1713         case GT_FIELD:
1714             if (lclNum == (ssize_t)tree->gtField.gtFldHnd)
1715             {
1716                 if (!defOnly)
1717                 {
1718                     return true;
1719                 }
1720             }
1721
1722             tree = tree->gtField.gtFldObj;
1723             if (tree)
1724             {
1725                 goto AGAIN;
1726             }
1727             break;
1728
1729         case GT_CALL:
1730
1731             if (tree->gtCall.gtCallObjp)
1732             {
1733                 if (gtHasRef(tree->gtCall.gtCallObjp, lclNum, defOnly))
1734                 {
1735                     return true;
1736                 }
1737             }
1738
1739             if (tree->gtCall.gtCallArgs)
1740             {
1741                 if (gtHasRef(tree->gtCall.gtCallArgs, lclNum, defOnly))
1742                 {
1743                     return true;
1744                 }
1745             }
1746
1747             if (tree->gtCall.gtCallLateArgs)
1748             {
1749                 if (gtHasRef(tree->gtCall.gtCallLateArgs, lclNum, defOnly))
1750                 {
1751                     return true;
1752                 }
1753             }
1754
1755             if (tree->gtCall.gtControlExpr)
1756             {
1757                 if (gtHasRef(tree->gtCall.gtControlExpr, lclNum, defOnly))
1758                 {
1759                     return true;
1760                 }
1761             }
1762
1763             if (tree->gtCall.gtCallType == CT_INDIRECT)
1764             {
1765                 // pinvoke-calli cookie is a constant, or constant indirection
1766                 assert(tree->gtCall.gtCallCookie == nullptr || tree->gtCall.gtCallCookie->gtOper == GT_CNS_INT ||
1767                        tree->gtCall.gtCallCookie->gtOper == GT_IND);
1768
1769                 tree = tree->gtCall.gtCallAddr;
1770             }
1771             else
1772             {
1773                 tree = nullptr;
1774             }
1775
1776             if (tree)
1777             {
1778                 goto AGAIN;
1779             }
1780
1781             break;
1782
1783         case GT_ARR_ELEM:
1784             if (gtHasRef(tree->gtArrElem.gtArrObj, lclNum, defOnly))
1785             {
1786                 return true;
1787             }
1788
1789             unsigned dim;
1790             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
1791             {
1792                 if (gtHasRef(tree->gtArrElem.gtArrInds[dim], lclNum, defOnly))
1793                 {
1794                     return true;
1795                 }
1796             }
1797
1798             break;
1799
1800         case GT_ARR_OFFSET:
1801             if (gtHasRef(tree->gtArrOffs.gtOffset, lclNum, defOnly) ||
1802                 gtHasRef(tree->gtArrOffs.gtIndex, lclNum, defOnly) ||
1803                 gtHasRef(tree->gtArrOffs.gtArrObj, lclNum, defOnly))
1804             {
1805                 return true;
1806             }
1807             break;
1808
1809         case GT_CMPXCHG:
1810             if (gtHasRef(tree->gtCmpXchg.gtOpLocation, lclNum, defOnly))
1811             {
1812                 return true;
1813             }
1814             if (gtHasRef(tree->gtCmpXchg.gtOpValue, lclNum, defOnly))
1815             {
1816                 return true;
1817             }
1818             if (gtHasRef(tree->gtCmpXchg.gtOpComparand, lclNum, defOnly))
1819             {
1820                 return true;
1821             }
1822             break;
1823
1824         case GT_ARR_BOUNDS_CHECK:
1825 #ifdef FEATURE_SIMD
1826         case GT_SIMD_CHK:
1827 #endif // FEATURE_SIMD
1828 #ifdef FEATURE_HW_INTRINSICS
1829         case GT_HW_INTRINSIC_CHK:
1830 #endif // FEATURE_HW_INTRINSICS
1831             if (gtHasRef(tree->gtBoundsChk.gtIndex, lclNum, defOnly))
1832             {
1833                 return true;
1834             }
1835             if (gtHasRef(tree->gtBoundsChk.gtArrLen, lclNum, defOnly))
1836             {
1837                 return true;
1838             }
1839             break;
1840
1841         case GT_STORE_DYN_BLK:
1842             if (gtHasRef(tree->gtDynBlk.Data(), lclNum, defOnly))
1843             {
1844                 return true;
1845             }
1846             __fallthrough;
1847         case GT_DYN_BLK:
1848             if (gtHasRef(tree->gtDynBlk.Addr(), lclNum, defOnly))
1849             {
1850                 return true;
1851             }
1852             if (gtHasRef(tree->gtDynBlk.gtDynamicSize, lclNum, defOnly))
1853             {
1854                 return true;
1855             }
1856             break;
1857
1858         default:
1859 #ifdef DEBUG
1860             gtDispTree(tree);
1861 #endif
1862             assert(!"unexpected operator");
1863     }
1864
1865     return false;
1866 }
1867
1868 struct AddrTakenDsc
1869 {
1870     Compiler* comp;
1871     bool      hasAddrTakenLcl;
1872 };
1873
1874 /* static */
1875 Compiler::fgWalkResult Compiler::gtHasLocalsWithAddrOpCB(GenTree** pTree, fgWalkData* data)
1876 {
1877     GenTree*  tree = *pTree;
1878     Compiler* comp = data->compiler;
1879
1880     if (tree->gtOper == GT_LCL_VAR)
1881     {
1882         unsigned   lclNum = tree->gtLclVarCommon.gtLclNum;
1883         LclVarDsc* varDsc = &comp->lvaTable[lclNum];
1884
1885         if (varDsc->lvHasLdAddrOp || varDsc->lvAddrExposed)
1886         {
1887             ((AddrTakenDsc*)data->pCallbackData)->hasAddrTakenLcl = true;
1888             return WALK_ABORT;
1889         }
1890     }
1891
1892     return WALK_CONTINUE;
1893 }
1894
1895 /*****************************************************************************
1896  *
1897  *  Return true if this tree contains locals with lvHasLdAddrOp or lvAddrExposed
1898  *  flag(s) set.
1899  */
1900
1901 bool Compiler::gtHasLocalsWithAddrOp(GenTree* tree)
1902 {
1903     AddrTakenDsc desc;
1904
1905     desc.comp            = this;
1906     desc.hasAddrTakenLcl = false;
1907
1908     fgWalkTreePre(&tree, gtHasLocalsWithAddrOpCB, &desc);
1909
1910     return desc.hasAddrTakenLcl;
1911 }
1912
1913 #ifdef DEBUG
1914
1915 /*****************************************************************************
1916  *
1917  *  Helper used to compute hash values for trees.
1918  */
1919
1920 inline unsigned genTreeHashAdd(unsigned old, unsigned add)
1921 {
1922     return (old + old / 2) ^ add;
1923 }
1924
1925 inline unsigned genTreeHashAdd(unsigned old, void* add)
1926 {
1927     return genTreeHashAdd(old, (unsigned)(size_t)add);
1928 }
1929
1930 /*****************************************************************************
1931  *
1932  *  Given an arbitrary expression tree, compute a hash value for it.
1933  */
1934
1935 unsigned Compiler::gtHashValue(GenTree* tree)
1936 {
1937     genTreeOps oper;
1938     unsigned   kind;
1939
1940     unsigned hash = 0;
1941
1942     GenTree* temp;
1943
1944 AGAIN:
1945     assert(tree);
1946     assert(tree->gtOper != GT_STMT);
1947
1948     /* Figure out what kind of a node we have */
1949
1950     oper = tree->OperGet();
1951     kind = tree->OperKind();
1952
1953     /* Include the operator value in the hash */
1954
1955     hash = genTreeHashAdd(hash, oper);
1956
1957     /* Is this a constant or leaf node? */
1958
1959     if (kind & (GTK_CONST | GTK_LEAF))
1960     {
1961         size_t add;
1962
1963         switch (oper)
1964         {
1965             UINT64 bits;
1966             case GT_LCL_VAR:
1967                 add = tree->gtLclVar.gtLclNum;
1968                 break;
1969             case GT_LCL_FLD:
1970                 hash = genTreeHashAdd(hash, tree->gtLclFld.gtLclNum);
1971                 add  = tree->gtLclFld.gtLclOffs;
1972                 break;
1973
1974             case GT_CNS_INT:
1975                 add = tree->gtIntCon.gtIconVal;
1976                 break;
1977             case GT_CNS_LNG:
1978                 bits = (UINT64)tree->gtLngCon.gtLconVal;
1979 #ifdef _TARGET_64BIT_
1980                 add = bits;
1981 #else // 32-bit target
1982                 add = genTreeHashAdd(uhi32(bits), ulo32(bits));
1983 #endif
1984                 break;
1985             case GT_CNS_DBL:
1986                 bits = *(UINT64*)(&tree->gtDblCon.gtDconVal);
1987 #ifdef _TARGET_64BIT_
1988                 add = bits;
1989 #else // 32-bit target
1990                 add = genTreeHashAdd(uhi32(bits), ulo32(bits));
1991 #endif
1992                 break;
1993             case GT_CNS_STR:
1994                 add = tree->gtStrCon.gtSconCPX;
1995                 break;
1996
1997             case GT_JMP:
1998                 add = tree->gtVal.gtVal1;
1999                 break;
2000
2001             default:
2002                 add = 0;
2003                 break;
2004         }
2005
2006         // clang-format off
2007         // narrow 'add' into a 32-bit 'val'
2008         unsigned val;
2009 #ifdef _TARGET_64BIT_
2010         val = genTreeHashAdd(uhi32(add), ulo32(add));
2011 #else // 32-bit target
2012         val = add;
2013 #endif
2014         // clang-format on
2015
2016         hash = genTreeHashAdd(hash, val);
2017         goto DONE;
2018     }
2019
2020     /* Is it a 'simple' unary/binary operator? */
2021
2022     GenTree* op1;
2023
2024     if (kind & GTK_UNOP)
2025     {
2026         op1 = tree->gtOp.gtOp1;
2027         /* Special case: no sub-operand at all */
2028
2029         if (GenTree::IsExOp(kind))
2030         {
2031             // ExOp operators extend operators with extra, non-GenTree* members.  In many cases,
2032             // these should be included in the hash code.
2033             switch (oper)
2034             {
2035                 case GT_ARR_LENGTH:
2036                     hash += tree->gtArrLen.ArrLenOffset();
2037                     break;
2038                 case GT_CAST:
2039                     hash ^= tree->gtCast.gtCastType;
2040                     break;
2041                 case GT_INDEX:
2042                     hash += tree->gtIndex.gtIndElemSize;
2043                     break;
2044                 case GT_INDEX_ADDR:
2045                     hash += tree->AsIndexAddr()->gtElemSize;
2046                     break;
2047                 case GT_ALLOCOBJ:
2048                     hash = genTreeHashAdd(hash, static_cast<unsigned>(
2049                                                     reinterpret_cast<uintptr_t>(tree->gtAllocObj.gtAllocObjClsHnd)));
2050                     hash = genTreeHashAdd(hash, tree->gtAllocObj.gtNewHelper);
2051                     break;
2052                 case GT_RUNTIMELOOKUP:
2053                     hash =
2054                         genTreeHashAdd(hash,
2055                                        static_cast<unsigned>(reinterpret_cast<uintptr_t>(tree->gtRuntimeLookup.gtHnd)));
2056                     break;
2057
2058                 case GT_OBJ:
2059                     hash =
2060                         genTreeHashAdd(hash, static_cast<unsigned>(reinterpret_cast<uintptr_t>(tree->gtObj.gtClass)));
2061                     break;
2062                 // For the ones below no extra argument matters for comparison.
2063                 case GT_BOX:
2064                     break;
2065
2066                 default:
2067                     assert(!"unexpected unary ExOp operator");
2068             }
2069         }
2070
2071         if (!op1)
2072         {
2073             goto DONE;
2074         }
2075
2076         tree = op1;
2077         goto AGAIN;
2078     }
2079
2080     if (kind & GTK_BINOP)
2081     {
2082         if (GenTree::IsExOp(kind))
2083         {
2084             // ExOp operators extend operators with extra, non-GenTree* members.  In many cases,
2085             // these should be included in the hash code.
2086             switch (oper)
2087             {
2088                 case GT_INTRINSIC:
2089                     hash += tree->gtIntrinsic.gtIntrinsicId;
2090                     break;
2091                 case GT_LEA:
2092                     hash += static_cast<unsigned>(tree->gtAddrMode.Offset() << 3) + tree->gtAddrMode.gtScale;
2093                     break;
2094
2095                 case GT_BLK:
2096                 case GT_STORE_BLK:
2097                     hash += tree->gtBlk.gtBlkSize;
2098                     break;
2099
2100                 case GT_OBJ:
2101                 case GT_STORE_OBJ:
2102                     hash ^= reinterpret_cast<unsigned>(tree->AsObj()->gtClass);
2103                     break;
2104
2105                 case GT_DYN_BLK:
2106                 case GT_STORE_DYN_BLK:
2107                     hash += gtHashValue(tree->AsDynBlk()->gtDynamicSize);
2108                     break;
2109
2110                 // For the ones below no extra argument matters for comparison.
2111                 case GT_ARR_INDEX:
2112                 case GT_QMARK:
2113                 case GT_INDEX:
2114                 case GT_INDEX_ADDR:
2115                     break;
2116
2117 #ifdef FEATURE_SIMD
2118                 case GT_SIMD:
2119                     hash += tree->gtSIMD.gtSIMDIntrinsicID;
2120                     hash += tree->gtSIMD.gtSIMDBaseType;
2121                     hash += tree->gtSIMD.gtSIMDSize;
2122                     break;
2123 #endif // FEATURE_SIMD
2124
2125 #ifdef FEATURE_HW_INTRINSICS
2126                 case GT_HWIntrinsic:
2127                     hash += tree->gtHWIntrinsic.gtHWIntrinsicId;
2128                     hash += tree->gtHWIntrinsic.gtSIMDBaseType;
2129                     hash += tree->gtHWIntrinsic.gtSIMDSize;
2130                     break;
2131 #endif // FEATURE_HW_INTRINSICS
2132
2133                 default:
2134                     assert(!"unexpected binary ExOp operator");
2135             }
2136         }
2137
2138         op1          = tree->gtOp.gtOp1;
2139         GenTree* op2 = tree->gtOp.gtOp2;
2140
2141         /* Is there a second sub-operand? */
2142
2143         if (!op2)
2144         {
2145             /* Special case: no sub-operands at all */
2146
2147             if (!op1)
2148             {
2149                 goto DONE;
2150             }
2151
2152             /* This is a unary operator */
2153
2154             tree = op1;
2155             goto AGAIN;
2156         }
2157
2158         /* This is a binary operator */
2159
2160         unsigned hsh1 = gtHashValue(op1);
2161
2162         /* Add op1's hash to the running value and continue with op2 */
2163
2164         hash = genTreeHashAdd(hash, hsh1);
2165
2166         tree = op2;
2167         goto AGAIN;
2168     }
2169
2170     /* See what kind of a special operator we have here */
2171     switch (tree->gtOper)
2172     {
2173         case GT_FIELD:
2174             if (tree->gtField.gtFldObj)
2175             {
2176                 temp = tree->gtField.gtFldObj;
2177                 assert(temp);
2178                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2179             }
2180             break;
2181
2182         case GT_STMT:
2183             temp = tree->gtStmt.gtStmtExpr;
2184             assert(temp);
2185             hash = genTreeHashAdd(hash, gtHashValue(temp));
2186             break;
2187
2188         case GT_ARR_ELEM:
2189
2190             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrElem.gtArrObj));
2191
2192             unsigned dim;
2193             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
2194             {
2195                 hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrElem.gtArrInds[dim]));
2196             }
2197
2198             break;
2199
2200         case GT_ARR_OFFSET:
2201             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtOffset));
2202             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtIndex));
2203             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtArrObj));
2204             break;
2205
2206         case GT_CALL:
2207
2208             if (tree->gtCall.gtCallObjp && tree->gtCall.gtCallObjp->gtOper != GT_NOP)
2209             {
2210                 temp = tree->gtCall.gtCallObjp;
2211                 assert(temp);
2212                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2213             }
2214
2215             if (tree->gtCall.gtCallArgs)
2216             {
2217                 temp = tree->gtCall.gtCallArgs;
2218                 assert(temp);
2219                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2220             }
2221
2222             if (tree->gtCall.gtCallType == CT_INDIRECT)
2223             {
2224                 temp = tree->gtCall.gtCallAddr;
2225                 assert(temp);
2226                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2227             }
2228             else
2229             {
2230                 hash = genTreeHashAdd(hash, tree->gtCall.gtCallMethHnd);
2231             }
2232
2233             if (tree->gtCall.gtCallLateArgs)
2234             {
2235                 temp = tree->gtCall.gtCallLateArgs;
2236                 assert(temp);
2237                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2238             }
2239             break;
2240
2241         case GT_CMPXCHG:
2242             hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpLocation));
2243             hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpValue));
2244             hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpComparand));
2245             break;
2246
2247         case GT_ARR_BOUNDS_CHECK:
2248 #ifdef FEATURE_SIMD
2249         case GT_SIMD_CHK:
2250 #endif // FEATURE_SIMD
2251 #ifdef FEATURE_HW_INTRINSICS
2252         case GT_HW_INTRINSIC_CHK:
2253 #endif // FEATURE_HW_INTRINSICS
2254             hash = genTreeHashAdd(hash, gtHashValue(tree->gtBoundsChk.gtIndex));
2255             hash = genTreeHashAdd(hash, gtHashValue(tree->gtBoundsChk.gtArrLen));
2256             hash = genTreeHashAdd(hash, tree->gtBoundsChk.gtThrowKind);
2257             break;
2258
2259         case GT_STORE_DYN_BLK:
2260             hash = genTreeHashAdd(hash, gtHashValue(tree->gtDynBlk.Data()));
2261             __fallthrough;
2262         case GT_DYN_BLK:
2263             hash = genTreeHashAdd(hash, gtHashValue(tree->gtDynBlk.Addr()));
2264             hash = genTreeHashAdd(hash, gtHashValue(tree->gtDynBlk.gtDynamicSize));
2265             break;
2266
2267         default:
2268 #ifdef DEBUG
2269             gtDispTree(tree);
2270 #endif
2271             assert(!"unexpected operator");
2272             break;
2273     }
2274
2275 DONE:
2276
2277     return hash;
2278 }
2279
2280 #endif // DEBUG
2281
2282 /*****************************************************************************
2283  *
2284  *  Given an arbitrary expression tree, attempts to find the set of all local variables
2285  *  referenced by the tree, and return them as "*result".
2286  *  If "findPtr" is null, this is a tracked variable set;
2287  *  if it is non-null, this is an "all var set."
2288  *  The "*result" value is valid only if the call returns "true."  It may return "false"
2289  *  for several reasons:
2290  *     If "findPtr" is NULL, and the expression contains an untracked variable.
2291  *     If "findPtr" is non-NULL, and the expression contains a variable that can't be represented
2292  *        in an "all var set."
2293  *     If the expression accesses address-exposed variables.
2294  *
2295  *  If there
2296  *  are any indirections or global refs in the expression, the "*refsPtr" argument
2297  *  will be assigned the appropriate bit set based on the 'varRefKinds' type.
2298  *  It won't be assigned anything when there are no indirections or global
2299  *  references, though, so this value should be initialized before the call.
2300  *  If we encounter an expression that is equal to *findPtr we set *findPtr
2301  *  to NULL.
2302  */
2303 bool Compiler::lvaLclVarRefs(GenTree* tree, GenTree** findPtr, varRefKinds* refsPtr, void* result)
2304 {
2305     genTreeOps   oper;
2306     unsigned     kind;
2307     varRefKinds  refs = VR_NONE;
2308     ALLVARSET_TP allVars(AllVarSetOps::UninitVal());
2309     VARSET_TP    trkdVars(VarSetOps::UninitVal());
2310     if (findPtr)
2311     {
2312         AllVarSetOps::AssignNoCopy(this, allVars, AllVarSetOps::MakeEmpty(this));
2313     }
2314     else
2315     {
2316         VarSetOps::AssignNoCopy(this, trkdVars, VarSetOps::MakeEmpty(this));
2317     }
2318
2319 AGAIN:
2320
2321     assert(tree);
2322     assert(tree->gtOper != GT_STMT);
2323
2324     /* Remember whether we've come across the expression we're looking for */
2325
2326     if (findPtr && *findPtr == tree)
2327     {
2328         *findPtr = nullptr;
2329     }
2330
2331     /* Figure out what kind of a node we have */
2332
2333     oper = tree->OperGet();
2334     kind = tree->OperKind();
2335
2336     /* Is this a constant or leaf node? */
2337
2338     if (kind & (GTK_CONST | GTK_LEAF))
2339     {
2340         if (oper == GT_LCL_VAR)
2341         {
2342             unsigned lclNum = tree->gtLclVarCommon.gtLclNum;
2343
2344             /* Should we use the variable table? */
2345
2346             if (findPtr)
2347             {
2348                 if (lclNum >= lclMAX_ALLSET_TRACKED)
2349                 {
2350                     return false;
2351                 }
2352
2353                 AllVarSetOps::AddElemD(this, allVars, lclNum);
2354             }
2355             else
2356             {
2357                 assert(lclNum < lvaCount);
2358                 LclVarDsc* varDsc = lvaTable + lclNum;
2359
2360                 if (varDsc->lvTracked == false)
2361                 {
2362                     return false;
2363                 }
2364
2365                 // Don't deal with expressions with address-exposed variables.
2366                 if (varDsc->lvAddrExposed)
2367                 {
2368                     return false;
2369                 }
2370
2371                 VarSetOps::AddElemD(this, trkdVars, varDsc->lvVarIndex);
2372             }
2373         }
2374         else if (oper == GT_LCL_FLD)
2375         {
2376             /* We can't track every field of every var. Moreover, indirections
2377                may access different parts of the var as different (but
2378                overlapping) fields. So just treat them as indirect accesses */
2379
2380             if (varTypeIsGC(tree->TypeGet()))
2381             {
2382                 refs = VR_IND_REF;
2383             }
2384             else
2385             {
2386                 refs = VR_IND_SCL;
2387             }
2388         }
2389         else if (oper == GT_CLS_VAR)
2390         {
2391             refs = VR_GLB_VAR;
2392         }
2393
2394         if (refs != VR_NONE)
2395         {
2396             /* Write it back to callers parameter using an 'or' */
2397             *refsPtr = varRefKinds((*refsPtr) | refs);
2398         }
2399         lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2400         return true;
2401     }
2402
2403     /* Is it a 'simple' unary/binary operator? */
2404
2405     if (kind & GTK_SMPOP)
2406     {
2407         if (oper == GT_IND)
2408         {
2409             assert(tree->gtOp.gtOp2 == nullptr);
2410
2411             /* Set the proper indirection bit */
2412
2413             if ((tree->gtFlags & GTF_IND_INVARIANT) == 0)
2414             {
2415                 if (varTypeIsGC(tree->TypeGet()))
2416                 {
2417                     refs = VR_IND_REF;
2418                 }
2419                 else
2420                 {
2421                     refs = VR_IND_SCL;
2422                 }
2423
2424                 // If the flag GTF_IND_TGTANYWHERE is set this indirection
2425                 // could also point at a global variable
2426
2427                 if (tree->gtFlags & GTF_IND_TGTANYWHERE)
2428                 {
2429                     refs = varRefKinds(((int)refs) | ((int)VR_GLB_VAR));
2430                 }
2431             }
2432
2433             /* Write it back to callers parameter using an 'or' */
2434             *refsPtr = varRefKinds((*refsPtr) | refs);
2435
2436             // For IL volatile memory accesses we mark the GT_IND node
2437             // with a GTF_DONT_CSE flag.
2438             //
2439             // This flag is also set for the left hand side of an assignment.
2440             //
2441             // If this flag is set then we return false
2442             //
2443             if (tree->gtFlags & GTF_DONT_CSE)
2444             {
2445                 return false;
2446             }
2447         }
2448
2449         if (tree->gtGetOp2IfPresent())
2450         {
2451             /* It's a binary operator */
2452             if (!lvaLclVarRefsAccum(tree->gtOp.gtOp1, findPtr, refsPtr, &allVars, &trkdVars))
2453             {
2454                 return false;
2455             }
2456             // Otherwise...
2457             tree = tree->gtOp.gtOp2;
2458             assert(tree);
2459             goto AGAIN;
2460         }
2461         else
2462         {
2463             /* It's a unary (or nilary) operator */
2464
2465             tree = tree->gtOp.gtOp1;
2466             if (tree)
2467             {
2468                 goto AGAIN;
2469             }
2470
2471             lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2472             return true;
2473         }
2474     }
2475
2476     switch (oper)
2477     {
2478         case GT_ARR_ELEM:
2479             if (!lvaLclVarRefsAccum(tree->gtArrElem.gtArrObj, findPtr, refsPtr, &allVars, &trkdVars))
2480             {
2481                 return false;
2482             }
2483
2484             unsigned dim;
2485             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
2486             {
2487                 VARSET_TP tmpVs(VarSetOps::UninitVal());
2488                 if (!lvaLclVarRefsAccum(tree->gtArrElem.gtArrInds[dim], findPtr, refsPtr, &allVars, &trkdVars))
2489                 {
2490                     return false;
2491                 }
2492             }
2493             lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2494             return true;
2495
2496         case GT_ARR_OFFSET:
2497             if (!lvaLclVarRefsAccum(tree->gtArrOffs.gtOffset, findPtr, refsPtr, &allVars, &trkdVars))
2498             {
2499                 return false;
2500             }
2501             // Otherwise...
2502             if (!lvaLclVarRefsAccum(tree->gtArrOffs.gtIndex, findPtr, refsPtr, &allVars, &trkdVars))
2503             {
2504                 return false;
2505             }
2506             // Otherwise...
2507             if (!lvaLclVarRefsAccum(tree->gtArrOffs.gtArrObj, findPtr, refsPtr, &allVars, &trkdVars))
2508             {
2509                 return false;
2510             }
2511             // Otherwise...
2512             lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2513             return true;
2514
2515         case GT_ARR_BOUNDS_CHECK:
2516 #ifdef FEATURE_SIMD
2517         case GT_SIMD_CHK:
2518 #endif // FEATURE_SIMD
2519 #ifdef FEATURE_HW_INTRINSICS
2520         case GT_HW_INTRINSIC_CHK:
2521 #endif // FEATURE_HW_INTRINSICS
2522         {
2523             if (!lvaLclVarRefsAccum(tree->gtBoundsChk.gtIndex, findPtr, refsPtr, &allVars, &trkdVars))
2524             {
2525                 return false;
2526             }
2527             // Otherwise...
2528             if (!lvaLclVarRefsAccum(tree->gtBoundsChk.gtArrLen, findPtr, refsPtr, &allVars, &trkdVars))
2529             {
2530                 return false;
2531             }
2532             // Otherwise...
2533             lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2534             return true;
2535         }
2536
2537         case GT_STORE_DYN_BLK:
2538             if (!lvaLclVarRefsAccum(tree->gtDynBlk.Data(), findPtr, refsPtr, &allVars, &trkdVars))
2539             {
2540                 return false;
2541             }
2542             // Otherwise...
2543             __fallthrough;
2544         case GT_DYN_BLK:
2545             if (!lvaLclVarRefsAccum(tree->gtDynBlk.Addr(), findPtr, refsPtr, &allVars, &trkdVars))
2546             {
2547                 return false;
2548             }
2549             // Otherwise...
2550             if (!lvaLclVarRefsAccum(tree->gtDynBlk.gtDynamicSize, findPtr, refsPtr, &allVars, &trkdVars))
2551             {
2552                 return false;
2553             }
2554             // Otherwise...
2555             lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2556             break;
2557
2558         case GT_CALL:
2559             /* Allow calls to the Shared Static helper */
2560             if (IsSharedStaticHelper(tree))
2561             {
2562                 *refsPtr = varRefKinds((*refsPtr) | VR_INVARIANT);
2563                 lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2564                 return true;
2565             }
2566             break;
2567         default:
2568             break;
2569
2570     } // end switch (oper)
2571
2572     return false;
2573 }
2574
2575 bool Compiler::lvaLclVarRefsAccum(
2576     GenTree* tree, GenTree** findPtr, varRefKinds* refsPtr, ALLVARSET_TP* allVars, VARSET_TP* trkdVars)
2577 {
2578     if (findPtr)
2579     {
2580         ALLVARSET_TP tmpVs(AllVarSetOps::UninitVal());
2581         if (!lvaLclVarRefs(tree, findPtr, refsPtr, &tmpVs))
2582         {
2583             return false;
2584         }
2585         // Otherwise...
2586         AllVarSetOps::UnionD(this, *allVars, tmpVs);
2587     }
2588     else
2589     {
2590         VARSET_TP tmpVs(VarSetOps::UninitVal());
2591         if (!lvaLclVarRefs(tree, findPtr, refsPtr, &tmpVs))
2592         {
2593             return false;
2594         }
2595         // Otherwise...
2596         VarSetOps::UnionD(this, *trkdVars, tmpVs);
2597     }
2598     return true;
2599 }
2600
2601 void Compiler::lvaLclVarRefsAccumIntoRes(GenTree**           findPtr,
2602                                          void*               result,
2603                                          ALLVARSET_VALARG_TP allVars,
2604                                          VARSET_VALARG_TP    trkdVars)
2605 {
2606     if (findPtr)
2607     {
2608         ALLVARSET_TP* avsPtr = (ALLVARSET_TP*)result;
2609         AllVarSetOps::AssignNoCopy(this, (*avsPtr), allVars);
2610     }
2611     else
2612     {
2613         VARSET_TP* vsPtr = (VARSET_TP*)result;
2614         VarSetOps::AssignNoCopy(this, (*vsPtr), trkdVars);
2615     }
2616 }
2617
2618 /*****************************************************************************
2619  *
2620  *  Return a relational operator that is the reverse of the given one.
2621  */
2622
2623 /* static */
2624 genTreeOps GenTree::ReverseRelop(genTreeOps relop)
2625 {
2626     static const genTreeOps reverseOps[] = {
2627         GT_NE, // GT_EQ
2628         GT_EQ, // GT_NE
2629         GT_GE, // GT_LT
2630         GT_GT, // GT_LE
2631         GT_LT, // GT_GE
2632         GT_LE, // GT_GT
2633 #ifndef LEGACY_BACKEND
2634         GT_TEST_NE, // GT_TEST_EQ
2635         GT_TEST_EQ, // GT_TEST_NE
2636 #endif
2637     };
2638
2639     assert(reverseOps[GT_EQ - GT_EQ] == GT_NE);
2640     assert(reverseOps[GT_NE - GT_EQ] == GT_EQ);
2641
2642     assert(reverseOps[GT_LT - GT_EQ] == GT_GE);
2643     assert(reverseOps[GT_LE - GT_EQ] == GT_GT);
2644     assert(reverseOps[GT_GE - GT_EQ] == GT_LT);
2645     assert(reverseOps[GT_GT - GT_EQ] == GT_LE);
2646
2647 #ifndef LEGACY_BACKEND
2648     assert(reverseOps[GT_TEST_EQ - GT_EQ] == GT_TEST_NE);
2649     assert(reverseOps[GT_TEST_NE - GT_EQ] == GT_TEST_EQ);
2650 #endif
2651
2652     assert(OperIsCompare(relop));
2653     assert(relop >= GT_EQ && (unsigned)(relop - GT_EQ) < sizeof(reverseOps));
2654
2655     return reverseOps[relop - GT_EQ];
2656 }
2657
2658 /*****************************************************************************
2659  *
2660  *  Return a relational operator that will work for swapped operands.
2661  */
2662
2663 /* static */
2664 genTreeOps GenTree::SwapRelop(genTreeOps relop)
2665 {
2666     static const genTreeOps swapOps[] = {
2667         GT_EQ, // GT_EQ
2668         GT_NE, // GT_NE
2669         GT_GT, // GT_LT
2670         GT_GE, // GT_LE
2671         GT_LE, // GT_GE
2672         GT_LT, // GT_GT
2673 #ifndef LEGACY_BACKEND
2674         GT_TEST_EQ, // GT_TEST_EQ
2675         GT_TEST_NE, // GT_TEST_NE
2676 #endif
2677     };
2678
2679     assert(swapOps[GT_EQ - GT_EQ] == GT_EQ);
2680     assert(swapOps[GT_NE - GT_EQ] == GT_NE);
2681
2682     assert(swapOps[GT_LT - GT_EQ] == GT_GT);
2683     assert(swapOps[GT_LE - GT_EQ] == GT_GE);
2684     assert(swapOps[GT_GE - GT_EQ] == GT_LE);
2685     assert(swapOps[GT_GT - GT_EQ] == GT_LT);
2686
2687 #ifndef LEGACY_BACKEND
2688     assert(swapOps[GT_TEST_EQ - GT_EQ] == GT_TEST_EQ);
2689     assert(swapOps[GT_TEST_NE - GT_EQ] == GT_TEST_NE);
2690 #endif
2691
2692     assert(OperIsCompare(relop));
2693     assert(relop >= GT_EQ && (unsigned)(relop - GT_EQ) < sizeof(swapOps));
2694
2695     return swapOps[relop - GT_EQ];
2696 }
2697
2698 /*****************************************************************************
2699  *
2700  *  Reverse the meaning of the given test condition.
2701  */
2702
2703 GenTree* Compiler::gtReverseCond(GenTree* tree)
2704 {
2705     if (tree->OperIsCompare())
2706     {
2707         tree->SetOper(GenTree::ReverseRelop(tree->OperGet()));
2708
2709         // Flip the GTF_RELOP_NAN_UN bit
2710         //     a ord b   === (a != NaN && b != NaN)
2711         //     a unord b === (a == NaN || b == NaN)
2712         // => !(a ord b) === (a unord b)
2713         if (varTypeIsFloating(tree->gtOp.gtOp1->TypeGet()))
2714         {
2715             tree->gtFlags ^= GTF_RELOP_NAN_UN;
2716         }
2717     }
2718     else if (tree->OperIs(GT_JCC, GT_SETCC))
2719     {
2720         GenTreeCC* cc   = tree->AsCC();
2721         cc->gtCondition = GenTree::ReverseRelop(cc->gtCondition);
2722     }
2723     else if (tree->OperIs(GT_JCMP))
2724     {
2725         // Flip the GTF_JCMP_EQ
2726         //
2727         // This causes switching
2728         //     cbz <=> cbnz
2729         //     tbz <=> tbnz
2730         tree->gtFlags ^= GTF_JCMP_EQ;
2731     }
2732     else
2733     {
2734         tree = gtNewOperNode(GT_NOT, TYP_INT, tree);
2735     }
2736
2737     return tree;
2738 }
2739
2740 /*****************************************************************************/
2741
2742 #ifdef DEBUG
2743
2744 bool GenTree::gtIsValid64RsltMul()
2745 {
2746     if ((gtOper != GT_MUL) || !(gtFlags & GTF_MUL_64RSLT))
2747     {
2748         return false;
2749     }
2750
2751     GenTree* op1 = gtOp.gtOp1;
2752     GenTree* op2 = gtOp.gtOp2;
2753
2754     if (TypeGet() != TYP_LONG || op1->TypeGet() != TYP_LONG || op2->TypeGet() != TYP_LONG)
2755     {
2756         return false;
2757     }
2758
2759     if (gtOverflow())
2760     {
2761         return false;
2762     }
2763
2764     // op1 has to be conv.i8(i4Expr)
2765     if ((op1->gtOper != GT_CAST) || (genActualType(op1->CastFromType()) != TYP_INT))
2766     {
2767         return false;
2768     }
2769
2770     // op2 has to be conv.i8(i4Expr)
2771     if ((op2->gtOper != GT_CAST) || (genActualType(op2->CastFromType()) != TYP_INT))
2772     {
2773         return false;
2774     }
2775
2776     // The signedness of both casts must be the same
2777     if (((op1->gtFlags & GTF_UNSIGNED) != 0) != ((op2->gtFlags & GTF_UNSIGNED) != 0))
2778     {
2779         return false;
2780     }
2781
2782     // Do unsigned mul iff both the casts are unsigned
2783     if (((op1->gtFlags & GTF_UNSIGNED) != 0) != ((gtFlags & GTF_UNSIGNED) != 0))
2784     {
2785         return false;
2786     }
2787
2788     return true;
2789 }
2790
2791 #endif // DEBUG
2792
2793 //------------------------------------------------------------------------------
2794 // gtSetListOrder : Figure out the evaluation order for a list of values.
2795 //
2796 //
2797 // Arguments:
2798 //    list  - List to figure out the evaluation order for
2799 //    isListCallArgs - True iff the list is a list of call arguments
2800 //    callArgsInRegs -  True iff the list is a list of call arguments and they are passed in registers
2801 //
2802 // Return Value:
2803 //    True if the operation can be a root of a bitwise rotation tree; false otherwise.
2804
2805 unsigned Compiler::gtSetListOrder(GenTree* list, bool isListCallArgs, bool callArgsInRegs)
2806 {
2807     assert((list != nullptr) && list->OperIsAnyList());
2808     assert(!callArgsInRegs || isListCallArgs);
2809
2810     ArrayStack<GenTree*> listNodes(this);
2811
2812     do
2813     {
2814         listNodes.Push(list);
2815         list = list->gtOp.gtOp2;
2816     } while ((list != nullptr) && (list->OperIsAnyList()));
2817
2818     unsigned nxtlvl = (list == nullptr) ? 0 : gtSetEvalOrder(list);
2819     while (listNodes.Height() > 0)
2820     {
2821 #if FEATURE_STACK_FP_X87
2822         /* Save the current FP stack level since an argument list
2823         * will implicitly pop the FP stack when pushing the argument */
2824         unsigned FPlvlSave = codeGen->genGetFPstkLevel();
2825 #endif // FEATURE_STACK_FP_X87
2826
2827         list = listNodes.Pop();
2828         assert(list && list->OperIsAnyList());
2829         GenTree* next = list->gtOp.gtOp2;
2830
2831         unsigned level = 0;
2832         unsigned ftreg = 0;
2833
2834         // TODO: Do we have to compute costs differently for argument lists and
2835         // all other lists?
2836         // https://github.com/dotnet/coreclr/issues/7095
2837         unsigned costSz = (isListCallArgs || (next == nullptr)) ? 0 : 1;
2838         unsigned costEx = (isListCallArgs || (next == nullptr)) ? 0 : 1;
2839
2840         if (next != nullptr)
2841         {
2842             ftreg |= next->gtRsvdRegs;
2843             if (isListCallArgs)
2844             {
2845                 if (level < nxtlvl)
2846                 {
2847                     level = nxtlvl;
2848                 }
2849             }
2850             costEx += next->gtCostEx;
2851             costSz += next->gtCostSz;
2852         }
2853
2854         GenTree* op1 = list->gtOp.gtOp1;
2855         unsigned lvl = gtSetEvalOrder(op1);
2856
2857 #if FEATURE_STACK_FP_X87
2858         // restore the FP level
2859         codeGen->genResetFPstkLevel(FPlvlSave);
2860 #endif // FEATURE_STACK_FP_X87
2861
2862         list->gtRsvdRegs = (regMaskSmall)(ftreg | op1->gtRsvdRegs);
2863
2864         // Swap the level counts
2865         if (list->gtFlags & GTF_REVERSE_OPS)
2866         {
2867             unsigned tmpl;
2868
2869             tmpl   = lvl;
2870             lvl    = nxtlvl;
2871             nxtlvl = tmpl;
2872         }
2873
2874         // TODO: Do we have to compute levels differently for argument lists and
2875         // all other lists?
2876         // https://github.com/dotnet/coreclr/issues/7095
2877         if (isListCallArgs)
2878         {
2879             if (level < lvl)
2880             {
2881                 level = lvl;
2882             }
2883         }
2884         else
2885         {
2886             if (lvl < 1)
2887             {
2888                 level = nxtlvl;
2889             }
2890             else if (lvl == nxtlvl)
2891             {
2892                 level = lvl + 1;
2893             }
2894             else
2895             {
2896                 level = lvl;
2897             }
2898         }
2899
2900         if (op1->gtCostEx != 0)
2901         {
2902             costEx += op1->gtCostEx;
2903             costEx += (callArgsInRegs || !isListCallArgs) ? 0 : IND_COST_EX;
2904         }
2905
2906         if (op1->gtCostSz != 0)
2907         {
2908             costSz += op1->gtCostSz;
2909 #ifdef _TARGET_XARCH_
2910             if (callArgsInRegs) // push is smaller than mov to reg
2911 #endif
2912             {
2913                 costSz += 1;
2914             }
2915         }
2916
2917         list->SetCosts(costEx, costSz);
2918
2919         nxtlvl = level;
2920     }
2921
2922     return nxtlvl;
2923 }
2924
2925 //-----------------------------------------------------------------------------
2926 // gtWalkOp: Traverse and mark an address expression
2927 //
2928 // Arguments:
2929 //    op1WB - An out parameter which is either the address expression, or one
2930 //            of its operands.
2931 //    op2WB - An out parameter which starts as either null or one of the operands
2932 //            of the address expression.
2933 //    base  - The base address of the addressing mode, or null if 'constOnly' is false
2934 //    constOnly - True if we will only traverse into ADDs with constant op2.
2935 //
2936 // This routine is a helper routine for gtSetEvalOrder() and is used to identify the
2937 // base and index nodes, which will be validated against those identified by
2938 // genCreateAddrMode().
2939 // It also marks the ADD nodes involved in the address expression with the
2940 // GTF_ADDRMODE_NO_CSE flag which prevents them from being considered for CSE's.
2941 //
2942 // Its two output parameters are modified under the following conditions:
2943 //
2944 // It is called once with the original address expression as 'op1WB', and
2945 // with 'constOnly' set to false. On this first invocation, *op1WB is always
2946 // an ADD node, and it will consider the operands of the ADD even if its op2 is
2947 // not a constant. However, when it encounters a non-constant or the base in the
2948 // op2 position, it stops iterating. That operand is returned in the 'op2WB' out
2949 // parameter, and will be considered on the third invocation of this method if
2950 // it is an ADD.
2951 //
2952 // It is called the second time with the two operands of the original expression, in
2953 // the original order, and the third time in reverse order. For these invocations
2954 // 'constOnly' is true, so it will only traverse cascaded ADD nodes if they have a
2955 // constant op2.
2956 //
2957 // The result, after three invocations, is that the values of the two out parameters
2958 // correspond to the base and index in some fashion. This method doesn't attempt
2959 // to determine or validate the scale or offset, if any.
2960 //
2961 // Assumptions (presumed to be ensured by genCreateAddrMode()):
2962 //    If an ADD has a constant operand, it is in the op2 position.
2963 //
2964 // Notes:
2965 //    This method, and its invocation sequence, are quite confusing, and since they
2966 //    were not originally well-documented, this specification is a possibly-imperfect
2967 //    reconstruction.
2968 //    The motivation for the handling of the NOP case is unclear.
2969 //    Note that 'op2WB' is only modified in the initial (!constOnly) case,
2970 //    or if a NOP is encountered in the op1 position.
2971 //
2972 void Compiler::gtWalkOp(GenTree** op1WB, GenTree** op2WB, GenTree* base, bool constOnly)
2973 {
2974     GenTree* op1 = *op1WB;
2975     GenTree* op2 = *op2WB;
2976
2977     op1 = op1->gtEffectiveVal();
2978
2979     // Now we look for op1's with non-overflow GT_ADDs [of constants]
2980     while ((op1->gtOper == GT_ADD) && (!op1->gtOverflow()) && (!constOnly || (op1->gtOp.gtOp2->IsCnsIntOrI())))
2981     {
2982         // mark it with GTF_ADDRMODE_NO_CSE
2983         op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
2984
2985         if (!constOnly)
2986         {
2987             op2 = op1->gtOp.gtOp2;
2988         }
2989         op1 = op1->gtOp.gtOp1;
2990
2991         // If op1 is a GT_NOP then swap op1 and op2.
2992         // (Why? Also, presumably op2 is not a GT_NOP in this case?)
2993         if (op1->gtOper == GT_NOP)
2994         {
2995             GenTree* tmp;
2996
2997             tmp = op1;
2998             op1 = op2;
2999             op2 = tmp;
3000         }
3001
3002         if (!constOnly && ((op2 == base) || (!op2->IsCnsIntOrI())))
3003         {
3004             break;
3005         }
3006
3007         op1 = op1->gtEffectiveVal();
3008     }
3009
3010     *op1WB = op1;
3011     *op2WB = op2;
3012 }
3013
3014 #ifdef DEBUG
3015 /*****************************************************************************
3016  * This is a workaround. It is to help implement an assert in gtSetEvalOrder() that the values
3017  * gtWalkOp() leaves in op1 and op2 correspond with the values of adr, idx, mul, and cns
3018  * that are returned by genCreateAddrMode(). It's essentially impossible to determine
3019  * what gtWalkOp() *should* return for all possible trees. This simply loosens one assert
3020  * to handle the following case:
3021
3022          indir     int
3023                     const(h)  int    4 field
3024                  +         byref
3025                     lclVar    byref  V00 this               <-- op2
3026               comma     byref                           <-- adr (base)
3027                  indir     byte
3028                     lclVar    byref  V00 this
3029            +         byref
3030                  const     int    2                     <-- mul == 4
3031               <<        int                                 <-- op1
3032                  lclVar    int    V01 arg1              <-- idx
3033
3034  * Here, we are planning to generate the address mode [edx+4*eax], where eax = idx and edx = the GT_COMMA expression.
3035  * To check adr equivalence with op2, we need to walk down the GT_ADD tree just like gtWalkOp() does.
3036  */
3037 GenTree* Compiler::gtWalkOpEffectiveVal(GenTree* op)
3038 {
3039     for (;;)
3040     {
3041         op = op->gtEffectiveVal();
3042
3043         if ((op->gtOper != GT_ADD) || op->gtOverflow() || !op->gtOp.gtOp2->IsCnsIntOrI())
3044         {
3045             break;
3046         }
3047
3048         op = op->gtOp.gtOp1;
3049     }
3050
3051     return op;
3052 }
3053 #endif // DEBUG
3054
3055 /*****************************************************************************
3056  *
3057  *  Given a tree, set the gtCostEx and gtCostSz fields which
3058  *  are used to measure the relative costs of the codegen of the tree
3059  *
3060  */
3061
3062 void Compiler::gtPrepareCost(GenTree* tree)
3063 {
3064 #if FEATURE_STACK_FP_X87
3065     codeGen->genResetFPstkLevel();
3066 #endif // FEATURE_STACK_FP_X87
3067     gtSetEvalOrder(tree);
3068 }
3069
3070 bool Compiler::gtIsLikelyRegVar(GenTree* tree)
3071 {
3072     if (tree->gtOper != GT_LCL_VAR)
3073     {
3074         return false;
3075     }
3076
3077     assert(tree->gtLclVar.gtLclNum < lvaTableCnt);
3078     LclVarDsc* varDsc = lvaTable + tree->gtLclVar.gtLclNum;
3079
3080     if (varDsc->lvDoNotEnregister)
3081     {
3082         return false;
3083     }
3084
3085     if (varDsc->lvRefCntWtd < (BB_UNITY_WEIGHT * 3))
3086     {
3087         return false;
3088     }
3089
3090 #ifdef _TARGET_X86_
3091     if (varTypeIsFloating(tree->TypeGet()))
3092         return false;
3093     if (varTypeIsLong(tree->TypeGet()))
3094         return false;
3095 #endif
3096
3097     return true;
3098 }
3099
3100 //------------------------------------------------------------------------
3101 // gtCanSwapOrder: Returns true iff the secondNode can be swapped with firstNode.
3102 //
3103 // Arguments:
3104 //    firstNode  - An operand of a tree that can have GTF_REVERSE_OPS set.
3105 //    secondNode - The other operand of the tree.
3106 //
3107 // Return Value:
3108 //    Returns a boolean indicating whether it is safe to reverse the execution
3109 //    order of the two trees, considering any exception, global effects, or
3110 //    ordering constraints.
3111 //
3112 bool Compiler::gtCanSwapOrder(GenTree* firstNode, GenTree* secondNode)
3113 {
3114     // Relative of order of global / side effects can't be swapped.
3115
3116     bool canSwap = true;
3117
3118     if (optValnumCSE_phase)
3119     {
3120         canSwap = optCSE_canSwap(firstNode, secondNode);
3121     }
3122
3123     // We cannot swap in the presence of special side effects such as GT_CATCH_ARG.
3124
3125     if (canSwap && (firstNode->gtFlags & GTF_ORDER_SIDEEFF))
3126     {
3127         canSwap = false;
3128     }
3129
3130     // When strict side effect order is disabled we allow GTF_REVERSE_OPS to be set
3131     // when one or both sides contains a GTF_CALL or GTF_EXCEPT.
3132     // Currently only the C and C++ languages allow non strict side effect order.
3133
3134     unsigned strictEffects = GTF_GLOB_EFFECT;
3135
3136     if (canSwap && (firstNode->gtFlags & strictEffects))
3137     {
3138         // op1 has side efects that can't be reordered.
3139         // Check for some special cases where we still may be able to swap.
3140
3141         if (secondNode->gtFlags & strictEffects)
3142         {
3143             // op2 has also has non reorderable side effects - can't swap.
3144             canSwap = false;
3145         }
3146         else
3147         {
3148             // No side effects in op2 - we can swap iff op1 has no way of modifying op2,
3149             // i.e. through byref assignments or calls or op2 is a constant.
3150
3151             if (firstNode->gtFlags & strictEffects & GTF_PERSISTENT_SIDE_EFFECTS)
3152             {
3153                 // We have to be conservative - can swap iff op2 is constant.
3154                 if (!secondNode->OperIsConst())
3155                 {
3156                     canSwap = false;
3157                 }
3158             }
3159         }
3160     }
3161     return canSwap;
3162 }
3163
3164 /*****************************************************************************
3165  *
3166  *  Given a tree, figure out the order in which its sub-operands should be
3167  *  evaluated. If the second operand of a binary operator is more expensive
3168  *  than the first operand, then try to swap the operand trees. Updates the
3169  *  GTF_REVERSE_OPS bit if necessary in this case.
3170  *
3171  *  Returns the Sethi 'complexity' estimate for this tree (the higher
3172  *  the number, the higher is the tree's resources requirement).
3173  *
3174  *  This function sets:
3175  *      1. gtCostEx to the execution complexity estimate
3176  *      2. gtCostSz to the code size estimate
3177  *      3. gtRsvdRegs to the set of fixed registers trashed by the tree
3178  *      4. gtFPlvl to the "floating point depth" value for node, i.e. the max. number
3179  *         of operands the tree will push on the x87 (coprocessor) stack. Also sets
3180  *         genFPstkLevel, tmpDoubleSpillMax, and possibly gtFPstLvlRedo.
3181  *      5. Sometimes sets GTF_ADDRMODE_NO_CSE on nodes in the tree.
3182  *      6. DEBUG-only: clears GTF_DEBUG_NODE_MORPHED.
3183  */
3184
3185 #ifdef _PREFAST_
3186 #pragma warning(push)
3187 #pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
3188 #endif
3189 unsigned Compiler::gtSetEvalOrder(GenTree* tree)
3190 {
3191     assert(tree);
3192     assert(tree->gtOper != GT_STMT);
3193
3194 #ifdef DEBUG
3195     /* Clear the GTF_DEBUG_NODE_MORPHED flag as well */
3196     tree->gtDebugFlags &= ~GTF_DEBUG_NODE_MORPHED;
3197 #endif
3198
3199     /* Is this a FP value? */
3200
3201     bool isflt = varTypeIsFloating(tree->TypeGet());
3202
3203     /* Figure out what kind of a node we have */
3204
3205     const genTreeOps oper = tree->OperGet();
3206     const unsigned   kind = tree->OperKind();
3207
3208     /* Assume no fixed registers will be trashed */
3209
3210     regMaskTP ftreg = RBM_NONE; // Set of registers that will be used by the subtree
3211     unsigned  level;
3212     int       costEx;
3213     int       costSz;
3214
3215 #ifdef DEBUG
3216     costEx = -1;
3217     costSz = -1;
3218 #endif
3219
3220     /* Is this a constant or a leaf node? */
3221
3222     if (kind & (GTK_LEAF | GTK_CONST))
3223     {
3224         switch (oper)
3225         {
3226             GenTreeIntConCommon* con;
3227
3228 #ifdef _TARGET_ARM_
3229             case GT_CNS_LNG:
3230                 costSz = 9;
3231                 costEx = 4;
3232                 goto COMMON_CNS;
3233
3234             case GT_CNS_STR:
3235                 // Uses movw/movt
3236                 costSz = 7;
3237                 costEx = 3;
3238                 goto COMMON_CNS;
3239
3240             case GT_CNS_INT:
3241
3242                 // If the constant is a handle then it will need to have a relocation
3243                 //  applied to it.
3244                 // Any constant that requires a reloc must use the movw/movt sequence
3245                 //
3246                 con = tree->AsIntConCommon();
3247
3248                 if (con->ImmedValNeedsReloc(this) || !codeGen->validImmForInstr(INS_mov, tree->gtIntCon.gtIconVal))
3249                 {
3250                     // Uses movw/movt
3251                     costSz = 7;
3252                     costEx = 3;
3253                 }
3254                 else if (((unsigned)tree->gtIntCon.gtIconVal) <= 0x00ff)
3255                 {
3256                     // mov  Rd, <const8>
3257                     costSz = 1;
3258                     costEx = 1;
3259                 }
3260                 else
3261                 {
3262                     // Uses movw/mvn
3263                     costSz = 3;
3264                     costEx = 1;
3265                 }
3266                 goto COMMON_CNS;
3267
3268 #elif defined _TARGET_XARCH_
3269
3270             case GT_CNS_LNG:
3271                 costSz = 10;
3272                 costEx = 3;
3273                 goto COMMON_CNS;
3274
3275             case GT_CNS_STR:
3276                 costSz = 4;
3277                 costEx = 1;
3278                 goto COMMON_CNS;
3279
3280             case GT_CNS_INT:
3281             {
3282                 // If the constant is a handle then it will need to have a relocation
3283                 //  applied to it.
3284                 //
3285                 con = tree->AsIntConCommon();
3286
3287                 bool iconNeedsReloc = con->ImmedValNeedsReloc(this);
3288
3289                 if (!iconNeedsReloc && con->FitsInI8())
3290                 {
3291                     costSz = 1;
3292                     costEx = 1;
3293                 }
3294 #if defined(_TARGET_AMD64_)
3295                 else if (iconNeedsReloc || !con->FitsInI32())
3296                 {
3297                     costSz = 10;
3298                     costEx = 3;
3299                 }
3300 #endif // _TARGET_AMD64_
3301                 else
3302                 {
3303                     costSz = 4;
3304                     costEx = 1;
3305                 }
3306                 goto COMMON_CNS;
3307             }
3308
3309 #elif defined(_TARGET_ARM64_)
3310             case GT_CNS_LNG:
3311             case GT_CNS_STR:
3312             case GT_CNS_INT:
3313                 // TODO-ARM64-NYI: Need cost estimates.
3314                 costSz = 1;
3315                 costEx = 1;
3316                 goto COMMON_CNS;
3317
3318 #else
3319             case GT_CNS_LNG:
3320             case GT_CNS_STR:
3321             case GT_CNS_INT:
3322 #error "Unknown _TARGET_"
3323 #endif
3324
3325             COMMON_CNS:
3326                 /*
3327                     Note that some code below depends on constants always getting
3328                     moved to be the second operand of a binary operator. This is
3329                     easily accomplished by giving constants a level of 0, which
3330                     we do on the next line. If you ever decide to change this, be
3331                     aware that unless you make other arrangements for integer
3332                     constants to be moved, stuff will break.
3333                  */
3334
3335                 level = 0;
3336                 break;
3337
3338             case GT_CNS_DBL:
3339                 level = 0;
3340                 /* We use fldz and fld1 to load 0.0 and 1.0, but all other  */
3341                 /* floating point constants are loaded using an indirection */
3342                 if ((*((__int64*)&(tree->gtDblCon.gtDconVal)) == 0) ||
3343                     (*((__int64*)&(tree->gtDblCon.gtDconVal)) == I64(0x3ff0000000000000)))
3344                 {
3345                     costEx = 1;
3346                     costSz = 1;
3347                 }
3348                 else
3349                 {
3350                     costEx = IND_COST_EX;
3351                     costSz = 4;
3352                 }
3353                 break;
3354
3355             case GT_LCL_VAR:
3356                 level = 1;
3357                 if (gtIsLikelyRegVar(tree))
3358                 {
3359                     costEx = 1;
3360                     costSz = 1;
3361                     /* Sign-extend and zero-extend are more expensive to load */
3362                     if (lvaTable[tree->gtLclVar.gtLclNum].lvNormalizeOnLoad())
3363                     {
3364                         costEx += 1;
3365                         costSz += 1;
3366                     }
3367                 }
3368                 else
3369                 {
3370                     costEx = IND_COST_EX;
3371                     costSz = 2;
3372                     /* Sign-extend and zero-extend are more expensive to load */
3373                     if (varTypeIsSmall(tree->TypeGet()))
3374                     {
3375                         costEx += 1;
3376                         costSz += 1;
3377                     }
3378                 }
3379 #if defined(_TARGET_AMD64_)
3380                 // increase costSz for floating point locals
3381                 if (isflt)
3382                 {
3383                     costSz += 1;
3384                     if (!gtIsLikelyRegVar(tree))
3385                     {
3386                         costSz += 1;
3387                     }
3388                 }
3389 #endif
3390 #if CPU_LONG_USES_REGPAIR
3391                 if (varTypeIsLong(tree->TypeGet()))
3392                 {
3393                     costEx *= 2; // Longs are twice as expensive
3394                     costSz *= 2;
3395                 }
3396 #endif
3397                 break;
3398
3399             case GT_CLS_VAR:
3400 #ifdef _TARGET_ARM_
3401                 // We generate movw/movt/ldr
3402                 level  = 1;
3403                 costEx = 3 + IND_COST_EX; // 6
3404                 costSz = 4 + 4 + 2;       // 10
3405                 break;
3406 #endif
3407             case GT_LCL_FLD:
3408                 level  = 1;
3409                 costEx = IND_COST_EX;
3410                 costSz = 4;
3411                 if (varTypeIsSmall(tree->TypeGet()))
3412                 {
3413                     costEx += 1;
3414                     costSz += 1;
3415                 }
3416                 break;
3417
3418             case GT_PHI_ARG:
3419             case GT_ARGPLACE:
3420                 level  = 0;
3421                 costEx = 0;
3422                 costSz = 0;
3423                 break;
3424
3425             default:
3426                 level  = 1;
3427                 costEx = 1;
3428                 costSz = 1;
3429                 break;
3430         }
3431 #if FEATURE_STACK_FP_X87
3432         if (isflt && (oper != GT_PHI_ARG))
3433         {
3434             codeGen->genIncrementFPstkLevel();
3435         }
3436 #endif // FEATURE_STACK_FP_X87
3437         goto DONE;
3438     }
3439
3440     /* Is it a 'simple' unary/binary operator? */
3441
3442     if (kind & GTK_SMPOP)
3443     {
3444         int      lvlb; // preference for op2
3445         unsigned lvl2; // scratch variable
3446
3447         GenTree* op1 = tree->gtOp.gtOp1;
3448         GenTree* op2 = tree->gtGetOp2IfPresent();
3449
3450         costEx = 0;
3451         costSz = 0;
3452
3453         if (tree->OperIsAddrMode())
3454         {
3455             if (op1 == nullptr)
3456             {
3457                 op1 = op2;
3458                 op2 = nullptr;
3459             }
3460         }
3461
3462         /* Check for a nilary operator */
3463
3464         if (op1 == nullptr)
3465         {
3466             assert(op2 == nullptr);
3467
3468             level = 0;
3469
3470             goto DONE;
3471         }
3472
3473         /* Is this a unary operator? */
3474
3475         if (op2 == nullptr)
3476         {
3477             /* Process the operand of the operator */
3478
3479             /* Most Unary ops have costEx of 1 */
3480             costEx = 1;
3481             costSz = 1;
3482
3483             level = gtSetEvalOrder(op1);
3484             ftreg |= op1->gtRsvdRegs;
3485
3486             /* Special handling for some operators */
3487
3488             switch (oper)
3489             {
3490                 case GT_JTRUE:
3491                     costEx = 2;
3492                     costSz = 2;
3493                     break;
3494
3495                 case GT_SWITCH:
3496                     costEx = 10;
3497                     costSz = 5;
3498                     break;
3499
3500                 case GT_CAST:
3501 #if defined(_TARGET_ARM_)
3502                     costEx = 1;
3503                     costSz = 1;
3504                     if (isflt || varTypeIsFloating(op1->TypeGet()))
3505                     {
3506                         costEx = 3;
3507                         costSz = 4;
3508                     }
3509 #elif defined(_TARGET_ARM64_)
3510                     costEx = 1;
3511                     costSz = 2;
3512                     if (isflt || varTypeIsFloating(op1->TypeGet()))
3513                     {
3514                         costEx = 2;
3515                         costSz = 4;
3516                     }
3517 #elif defined(_TARGET_XARCH_)
3518                     costEx = 1;
3519                     costSz = 2;
3520
3521                     if (isflt || varTypeIsFloating(op1->TypeGet()))
3522                     {
3523                         /* cast involving floats always go through memory */
3524                         costEx = IND_COST_EX * 2;
3525                         costSz = 6;
3526
3527 #if FEATURE_STACK_FP_X87
3528                         if (isflt != varTypeIsFloating(op1->TypeGet()))
3529                         {
3530                             isflt ? codeGen->genIncrementFPstkLevel()  // Cast from int to float
3531                                   : codeGen->genDecrementFPstkLevel(); // Cast from float to int
3532                         }
3533 #endif // FEATURE_STACK_FP_X87
3534                     }
3535 #else
3536 #error "Unknown _TARGET_"
3537 #endif
3538
3539 #if CPU_LONG_USES_REGPAIR
3540                     if (varTypeIsLong(tree->TypeGet()))
3541                     {
3542                         if (varTypeIsUnsigned(tree->TypeGet()))
3543                         {
3544                             /* Cast to unsigned long */
3545                             costEx += 1;
3546                             costSz += 2;
3547                         }
3548                         else
3549                         {
3550                             /* Cast to signed long is slightly more costly */
3551                             costEx += 2;
3552                             costSz += 3;
3553                         }
3554                     }
3555 #endif // CPU_LONG_USES_REGPAIR
3556
3557                     /* Overflow casts are a lot more expensive */
3558                     if (tree->gtOverflow())
3559                     {
3560                         costEx += 6;
3561                         costSz += 6;
3562                     }
3563
3564                     break;
3565
3566                 case GT_LIST:
3567                 case GT_FIELD_LIST:
3568                 case GT_NOP:
3569                     costEx = 0;
3570                     costSz = 0;
3571                     break;
3572
3573                 case GT_INTRINSIC:
3574                     // GT_INTRINSIC intrinsics Sin, Cos, Sqrt, Abs ... have higher costs.
3575                     // TODO: tune these costs target specific as some of these are
3576                     // target intrinsics and would cost less to generate code.
3577                     switch (tree->gtIntrinsic.gtIntrinsicId)
3578                     {
3579                         default:
3580                             assert(!"missing case for gtIntrinsicId");
3581                             costEx = 12;
3582                             costSz = 12;
3583                             break;
3584
3585                         case CORINFO_INTRINSIC_Sin:
3586                         case CORINFO_INTRINSIC_Cos:
3587                         case CORINFO_INTRINSIC_Sqrt:
3588                         case CORINFO_INTRINSIC_Cbrt:
3589                         case CORINFO_INTRINSIC_Cosh:
3590                         case CORINFO_INTRINSIC_Sinh:
3591                         case CORINFO_INTRINSIC_Tan:
3592                         case CORINFO_INTRINSIC_Tanh:
3593                         case CORINFO_INTRINSIC_Asin:
3594                         case CORINFO_INTRINSIC_Asinh:
3595                         case CORINFO_INTRINSIC_Acos:
3596                         case CORINFO_INTRINSIC_Acosh:
3597                         case CORINFO_INTRINSIC_Atan:
3598                         case CORINFO_INTRINSIC_Atanh:
3599                         case CORINFO_INTRINSIC_Atan2:
3600                         case CORINFO_INTRINSIC_Log10:
3601                         case CORINFO_INTRINSIC_Pow:
3602                         case CORINFO_INTRINSIC_Exp:
3603                         case CORINFO_INTRINSIC_Ceiling:
3604                         case CORINFO_INTRINSIC_Floor:
3605                         case CORINFO_INTRINSIC_Object_GetType:
3606                             // Giving intrinsics a large fixed execution cost is because we'd like to CSE
3607                             // them, even if they are implemented by calls. This is different from modeling
3608                             // user calls since we never CSE user calls.
3609                             costEx = 36;
3610                             costSz = 4;
3611                             break;
3612
3613                         case CORINFO_INTRINSIC_Abs:
3614                             costEx = 5;
3615                             costSz = 15;
3616                             break;
3617
3618                         case CORINFO_INTRINSIC_Round:
3619                             costEx = 3;
3620                             costSz = 4;
3621 #if FEATURE_STACK_FP_X87
3622                             if (tree->TypeGet() == TYP_INT)
3623                             {
3624                                 // This is a special case to handle the following
3625                                 // optimization: conv.i4(round.d(d)) -> round.i(d)
3626                                 codeGen->genDecrementFPstkLevel();
3627                             }
3628 #endif // FEATURE_STACK_FP_X87
3629                             break;
3630                     }
3631                     level++;
3632                     break;
3633
3634                 case GT_NOT:
3635                 case GT_NEG:
3636                     // We need to ensure that -x is evaluated before x or else
3637                     // we get burned while adjusting genFPstkLevel in x*-x where
3638                     // the rhs x is the last use of the enregsitered x.
3639                     //
3640                     // Even in the integer case we want to prefer to
3641                     // evaluate the side without the GT_NEG node, all other things
3642                     // being equal.  Also a GT_NOT requires a scratch register
3643
3644                     level++;
3645                     break;
3646
3647                 case GT_ADDR:
3648
3649 #if FEATURE_STACK_FP_X87
3650                     /* If the operand was floating point, pop the value from the stack */
3651
3652                     if (varTypeIsFloating(op1->TypeGet()))
3653                     {
3654                         codeGen->genDecrementFPstkLevel();
3655                     }
3656 #endif // FEATURE_STACK_FP_X87
3657                     costEx = 0;
3658                     costSz = 1;
3659
3660                     // If we have a GT_ADDR of an GT_IND we can just copy the costs from indOp1
3661                     if (op1->OperGet() == GT_IND)
3662                     {
3663                         GenTree* indOp1 = op1->gtOp.gtOp1;
3664                         costEx          = indOp1->gtCostEx;
3665                         costSz          = indOp1->gtCostSz;
3666                     }
3667                     break;
3668
3669                 case GT_ARR_LENGTH:
3670                     level++;
3671
3672                     /* Array Len should be the same as an indirections, which have a costEx of IND_COST_EX */
3673                     costEx = IND_COST_EX - 1;
3674                     costSz = 2;
3675                     break;
3676
3677                 case GT_MKREFANY:
3678                 case GT_OBJ:
3679                     // We estimate the cost of a GT_OBJ or GT_MKREFANY to be two loads (GT_INDs)
3680                     costEx = 2 * IND_COST_EX;
3681                     costSz = 2 * 2;
3682                     break;
3683
3684                 case GT_BOX:
3685                     // We estimate the cost of a GT_BOX to be two stores (GT_INDs)
3686                     costEx = 2 * IND_COST_EX;
3687                     costSz = 2 * 2;
3688                     break;
3689
3690                 case GT_BLK:
3691                 case GT_IND:
3692
3693                     /* An indirection should always have a non-zero level.
3694                      * Only constant leaf nodes have level 0.
3695                      */
3696
3697                     if (level == 0)
3698                     {
3699                         level = 1;
3700                     }
3701
3702                     /* Indirections have a costEx of IND_COST_EX */
3703                     costEx = IND_COST_EX;
3704                     costSz = 2;
3705
3706                     /* If we have to sign-extend or zero-extend, bump the cost */
3707                     if (varTypeIsSmall(tree->TypeGet()))
3708                     {
3709                         costEx += 1;
3710                         costSz += 1;
3711                     }
3712
3713                     if (isflt)
3714                     {
3715 #if FEATURE_STACK_FP_X87
3716                         /* Indirect loads of FP values push a new value on the FP stack */
3717                         codeGen->genIncrementFPstkLevel();
3718 #endif // FEATURE_STACK_FP_X87
3719                         if (tree->TypeGet() == TYP_DOUBLE)
3720                         {
3721                             costEx += 1;
3722                         }
3723 #ifdef _TARGET_ARM_
3724                         costSz += 2;
3725 #endif // _TARGET_ARM_
3726                     }
3727
3728                     // Can we form an addressing mode with this indirection?
3729                     // TODO-CQ: Consider changing this to op1->gtEffectiveVal() to take into account
3730                     // addressing modes hidden under a comma node.
3731
3732                     if (op1->gtOper == GT_ADD)
3733                     {
3734                         bool rev;
3735 #if SCALED_ADDR_MODES
3736                         unsigned mul;
3737 #endif
3738                         unsigned cns;
3739                         GenTree* base;
3740                         GenTree* idx;
3741
3742                         // See if we can form a complex addressing mode.
3743
3744                         GenTree* addr = op1->gtEffectiveVal();
3745
3746                         bool doAddrMode = true;
3747                         // See if we can form a complex addressing mode.
3748                         // Always use an addrMode for an array index indirection.
3749                         // TODO-1stClassStructs: Always do this, but first make sure it's
3750                         // done in Lowering as well.
3751                         if ((tree->gtFlags & GTF_IND_ARR_INDEX) == 0)
3752                         {
3753                             if (tree->TypeGet() == TYP_STRUCT)
3754                             {
3755                                 doAddrMode = false;
3756                             }
3757                             else if (varTypeIsStruct(tree))
3758                             {
3759                                 // This is a heuristic attempting to match prior behavior when indirections
3760                                 // under a struct assignment would not be considered for addressing modes.
3761                                 if (compCurStmt != nullptr)
3762                                 {
3763                                     GenTree* expr = compCurStmt->gtStmt.gtStmtExpr;
3764                                     if ((expr->OperGet() == GT_ASG) &&
3765                                         ((expr->gtGetOp1() == tree) || (expr->gtGetOp2() == tree)))
3766                                     {
3767                                         doAddrMode = false;
3768                                     }
3769                                 }
3770                             }
3771                         }
3772                         if ((doAddrMode) &&
3773                             codeGen->genCreateAddrMode(addr,     // address
3774                                                        0,        // mode
3775                                                        false,    // fold
3776                                                        RBM_NONE, // reg mask
3777                                                        &rev,     // reverse ops
3778                                                        &base,    // base addr
3779                                                        &idx,     // index val
3780 #if SCALED_ADDR_MODES
3781                                                        &mul, // scaling
3782 #endif
3783                                                        &cns,  // displacement
3784                                                        true)) // don't generate code
3785                         {
3786                             // We can form a complex addressing mode, so mark each of the interior
3787                             // nodes with GTF_ADDRMODE_NO_CSE and calculate a more accurate cost.
3788
3789                             addr->gtFlags |= GTF_ADDRMODE_NO_CSE;
3790 #ifdef _TARGET_XARCH_
3791                             // addrmodeCount is the count of items that we used to form
3792                             // an addressing mode.  The maximum value is 4 when we have
3793                             // all of these:   { base, idx, cns, mul }
3794                             //
3795                             unsigned addrmodeCount = 0;
3796                             if (base)
3797                             {
3798                                 costEx += base->gtCostEx;
3799                                 costSz += base->gtCostSz;
3800                                 addrmodeCount++;
3801                             }
3802
3803                             if (idx)
3804                             {
3805                                 costEx += idx->gtCostEx;
3806                                 costSz += idx->gtCostSz;
3807                                 addrmodeCount++;
3808                             }
3809
3810                             if (cns)
3811                             {
3812                                 if (((signed char)cns) == ((int)cns))
3813                                 {
3814                                     costSz += 1;
3815                                 }
3816                                 else
3817                                 {
3818                                     costSz += 4;
3819                                 }
3820                                 addrmodeCount++;
3821                             }
3822                             if (mul)
3823                             {
3824                                 addrmodeCount++;
3825                             }
3826                             // When we form a complex addressing mode we can reduced the costs
3827                             // associated with the interior GT_ADD and GT_LSH nodes:
3828                             //
3829                             //                      GT_ADD      -- reduce this interior GT_ADD by (-3,-3)
3830                             //                      /   \       --
3831                             //                  GT_ADD  'cns'   -- reduce this interior GT_ADD by (-2,-2)
3832                             //                  /   \           --
3833                             //               'base'  GT_LSL     -- reduce this interior GT_LSL by (-1,-1)
3834                             //                      /   \       --
3835                             //                   'idx'  'mul'
3836                             //
3837                             if (addrmodeCount > 1)
3838                             {
3839                                 // The number of interior GT_ADD and GT_LSL will always be one less than addrmodeCount
3840                                 //
3841                                 addrmodeCount--;
3842
3843                                 GenTree* tmp = addr;
3844                                 while (addrmodeCount > 0)
3845                                 {
3846                                     // decrement the gtCosts for the interior GT_ADD or GT_LSH node by the remaining
3847                                     // addrmodeCount
3848                                     tmp->SetCosts(tmp->gtCostEx - addrmodeCount, tmp->gtCostSz - addrmodeCount);
3849
3850                                     addrmodeCount--;
3851                                     if (addrmodeCount > 0)
3852                                     {
3853                                         GenTree* tmpOp1 = tmp->gtOp.gtOp1;
3854                                         GenTree* tmpOp2 = tmp->gtGetOp2();
3855                                         assert(tmpOp2 != nullptr);
3856
3857                                         if ((tmpOp1 != base) && (tmpOp1->OperGet() == GT_ADD))
3858                                         {
3859                                             tmp = tmpOp1;
3860                                         }
3861                                         else if (tmpOp2->OperGet() == GT_LSH)
3862                                         {
3863                                             tmp = tmpOp2;
3864                                         }
3865                                         else if (tmpOp1->OperGet() == GT_LSH)
3866                                         {
3867                                             tmp = tmpOp1;
3868                                         }
3869                                         else if (tmpOp2->OperGet() == GT_ADD)
3870                                         {
3871                                             tmp = tmpOp2;
3872                                         }
3873                                         else
3874                                         {
3875                                             // We can very rarely encounter a tree that has a GT_COMMA node
3876                                             // that is difficult to walk, so we just early out without decrementing.
3877                                             addrmodeCount = 0;
3878                                         }
3879                                     }
3880                                 }
3881                             }
3882 #elif defined _TARGET_ARM_
3883                             if (base)
3884                             {
3885                                 costEx += base->gtCostEx;
3886                                 costSz += base->gtCostSz;
3887                                 if ((base->gtOper == GT_LCL_VAR) && ((idx == NULL) || (cns == 0)))
3888                                 {
3889                                     costSz -= 1;
3890                                 }
3891                             }
3892
3893                             if (idx)
3894                             {
3895                                 costEx += idx->gtCostEx;
3896                                 costSz += idx->gtCostSz;
3897                                 if (mul > 0)
3898                                 {
3899                                     costSz += 2;
3900                                 }
3901                             }
3902
3903                             if (cns)
3904                             {
3905                                 if (cns >= 128) // small offsets fits into a 16-bit instruction
3906                                 {
3907                                     if (cns < 4096) // medium offsets require a 32-bit instruction
3908                                     {
3909                                         if (!isflt)
3910                                             costSz += 2;
3911                                     }
3912                                     else
3913                                     {
3914                                         costEx += 2; // Very large offsets require movw/movt instructions
3915                                         costSz += 8;
3916                                     }
3917                                 }
3918                             }
3919 #elif defined _TARGET_ARM64_
3920                             if (base)
3921                             {
3922                                 costEx += base->gtCostEx;
3923                                 costSz += base->gtCostSz;
3924                             }
3925
3926                             if (idx)
3927                             {
3928                                 costEx += idx->gtCostEx;
3929                                 costSz += idx->gtCostSz;
3930                             }
3931
3932                             if (cns != 0)
3933                             {
3934                                 if (cns >= (4096 * genTypeSize(tree->TypeGet())))
3935                                 {
3936                                     costEx += 1;
3937                                     costSz += 4;
3938                                 }
3939                             }
3940 #else
3941 #error "Unknown _TARGET_"
3942 #endif
3943
3944                             assert(addr->gtOper == GT_ADD);
3945                             assert(!addr->gtOverflow());
3946                             assert(op2 == nullptr);
3947                             assert(mul != 1);
3948
3949                             // If we have an addressing mode, we have one of:
3950                             //   [base             + cns]
3951                             //   [       idx * mul      ]  // mul >= 2, else we would use base instead of idx
3952                             //   [       idx * mul + cns]  // mul >= 2, else we would use base instead of idx
3953                             //   [base + idx * mul      ]  // mul can be 0, 2, 4, or 8
3954                             //   [base + idx * mul + cns]  // mul can be 0, 2, 4, or 8
3955                             // Note that mul == 0 is semantically equivalent to mul == 1.
3956                             // Note that cns can be zero.
3957                             CLANG_FORMAT_COMMENT_ANCHOR;
3958
3959 #if SCALED_ADDR_MODES
3960                             assert((base != nullptr) || (idx != nullptr && mul >= 2));
3961 #else
3962                             assert(base != NULL);
3963 #endif
3964
3965                             INDEBUG(GenTree* op1Save = addr);
3966
3967                             // Walk 'addr' identifying non-overflow ADDs that will be part of the address mode.
3968                             // Note that we will be modifying 'op1' and 'op2' so that eventually they should
3969                             // map to the base and index.
3970                             op1 = addr;
3971                             gtWalkOp(&op1, &op2, base, false);
3972
3973                             // op1 and op2 are now descendents of the root GT_ADD of the addressing mode.
3974                             assert(op1 != op1Save);
3975                             assert(op2 != nullptr);
3976
3977                             // Walk the operands again (the third operand is unused in this case).
3978                             // This time we will only consider adds with constant op2's, since
3979                             // we have already found either a non-ADD op1 or a non-constant op2.
3980                             gtWalkOp(&op1, &op2, nullptr, true);
3981
3982 #if defined(_TARGET_XARCH_) || defined(LEGACY_BACKEND)
3983                             // For XARCH we will fold GT_ADDs in the op2 position into the addressing mode, so we call
3984                             // gtWalkOp on both operands of the original GT_ADD.
3985                             // This is not done for ARMARCH. Though the stated reason is that we don't try to create a
3986                             // scaled index, in fact we actually do create them (even base + index*scale + offset).
3987
3988                             // At this point, 'op2' may itself be an ADD of a constant that should be folded
3989                             // into the addressing mode.
3990                             // Walk op2 looking for non-overflow GT_ADDs of constants.
3991                             gtWalkOp(&op2, &op1, nullptr, true);
3992 #endif // defined(_TARGET_XARCH_) || defined(LEGACY_BACKEND)
3993
3994                             // OK we are done walking the tree
3995                             // Now assert that op1 and op2 correspond with base and idx
3996                             // in one of the several acceptable ways.
3997
3998                             // Note that sometimes op1/op2 is equal to idx/base
3999                             // and other times op1/op2 is a GT_COMMA node with
4000                             // an effective value that is idx/base
4001
4002                             if (mul > 1)
4003                             {
4004                                 if ((op1 != base) && (op1->gtOper == GT_LSH))
4005                                 {
4006                                     op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
4007                                     if (op1->gtOp.gtOp1->gtOper == GT_MUL)
4008                                     {
4009                                         op1->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
4010                                     }
4011                                     assert((base == nullptr) || (op2 == base) ||
4012                                            (op2->gtEffectiveVal() == base->gtEffectiveVal()) ||
4013                                            (gtWalkOpEffectiveVal(op2) == gtWalkOpEffectiveVal(base)));
4014                                 }
4015                                 else
4016                                 {
4017                                     assert(op2);
4018                                     assert(op2->gtOper == GT_LSH || op2->gtOper == GT_MUL);
4019                                     op2->gtFlags |= GTF_ADDRMODE_NO_CSE;
4020                                     // We may have eliminated multiple shifts and multiplies in the addressing mode,
4021                                     // so navigate down through them to get to "idx".
4022                                     GenTree* op2op1 = op2->gtOp.gtOp1;
4023                                     while ((op2op1->gtOper == GT_LSH || op2op1->gtOper == GT_MUL) && op2op1 != idx)
4024                                     {
4025                                         op2op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
4026                                         op2op1 = op2op1->gtOp.gtOp1;
4027                                     }
4028                                     assert(op1->gtEffectiveVal() == base);
4029                                     assert(op2op1 == idx);
4030                                 }
4031                             }
4032                             else
4033                             {
4034                                 assert(mul == 0);
4035
4036                                 if ((op1 == idx) || (op1->gtEffectiveVal() == idx))
4037                                 {
4038                                     if (idx != nullptr)
4039                                     {
4040                                         if ((op1->gtOper == GT_MUL) || (op1->gtOper == GT_LSH))
4041                                         {
4042                                             if ((op1->gtOp.gtOp1->gtOper == GT_NOP) ||
4043                                                 (op1->gtOp.gtOp1->gtOper == GT_MUL &&
4044                                                  op1->gtOp.gtOp1->gtOp.gtOp1->gtOper == GT_NOP))
4045                                             {
4046                                                 op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
4047                                                 if (op1->gtOp.gtOp1->gtOper == GT_MUL)
4048                                                 {
4049                                                     op1->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
4050                                                 }
4051                                             }
4052                                         }
4053                                     }
4054                                     assert((op2 == base) || (op2->gtEffectiveVal() == base));
4055                                 }
4056                                 else if ((op1 == base) || (op1->gtEffectiveVal() == base))
4057                                 {
4058                                     if (idx != nullptr)
4059                                     {
4060                                         assert(op2);
4061                                         if ((op2->gtOper == GT_MUL) || (op2->gtOper == GT_LSH))
4062                                         {
4063                                             if ((op2->gtOp.gtOp1->gtOper == GT_NOP) ||
4064                                                 (op2->gtOp.gtOp1->gtOper == GT_MUL &&
4065                                                  op2->gtOp.gtOp1->gtOp.gtOp1->gtOper == GT_NOP))
4066                                             {
4067                                                 op2->gtFlags |= GTF_ADDRMODE_NO_CSE;
4068                                                 if (op2->gtOp.gtOp1->gtOper == GT_MUL)
4069                                                 {
4070                                                     op2->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
4071                                                 }
4072                                             }
4073                                         }
4074                                         assert((op2 == idx) || (op2->gtEffectiveVal() == idx));
4075                                     }
4076                                 }
4077                                 else
4078                                 {
4079                                     // op1 isn't base or idx. Is this possible? Or should there be an assert?
4080                                 }
4081                             }
4082                             goto DONE;
4083
4084                         } // end  if  (genCreateAddrMode(...))
4085
4086                     } // end if  (op1->gtOper == GT_ADD)
4087                     else if (gtIsLikelyRegVar(op1))
4088                     {
4089                         /* Indirection of an enregister LCL_VAR, don't increase costEx/costSz */
4090                         goto DONE;
4091                     }
4092 #ifdef _TARGET_XARCH_
4093                     else if (op1->IsCnsIntOrI())
4094                     {
4095                         // Indirection of a CNS_INT, subtract 1 from costEx
4096                         // makes costEx 3 for x86 and 4 for amd64
4097                         //
4098                         costEx += (op1->gtCostEx - 1);
4099                         costSz += op1->gtCostSz;
4100                         goto DONE;
4101                     }
4102 #endif
4103                     break;
4104
4105                 default:
4106                     break;
4107             }
4108             costEx += op1->gtCostEx;
4109             costSz += op1->gtCostSz;
4110             goto DONE;
4111         }
4112
4113         /* Binary operator - check for certain special cases */
4114
4115         lvlb = 0;
4116
4117         /* Default Binary ops have a cost of 1,1 */
4118         costEx = 1;
4119         costSz = 1;
4120
4121 #ifdef _TARGET_ARM_
4122         if (isflt)
4123         {
4124             costSz += 2;
4125         }
4126 #endif
4127 #ifndef _TARGET_64BIT_
4128         if (varTypeIsLong(op1->TypeGet()))
4129         {
4130             /* Operations on longs are more expensive */
4131             costEx += 3;
4132             costSz += 3;
4133         }
4134 #endif
4135         switch (oper)
4136         {
4137             case GT_MOD:
4138             case GT_UMOD:
4139
4140                 /* Modulo by a power of 2 is easy */
4141
4142                 if (op2->IsCnsIntOrI())
4143                 {
4144                     size_t ival = op2->gtIntConCommon.IconValue();
4145
4146                     if (ival > 0 && ival == genFindLowestBit(ival))
4147                     {
4148                         break;
4149                     }
4150                 }
4151
4152                 __fallthrough;
4153
4154             case GT_DIV:
4155             case GT_UDIV:
4156
4157                 if (isflt)
4158                 {
4159                     /* fp division is very expensive to execute */
4160                     costEx = 36; // TYP_DOUBLE
4161                     costSz += 3;
4162                 }
4163                 else
4164                 {
4165                     /* integer division is also very expensive */
4166                     costEx = 20;
4167                     costSz += 2;
4168
4169                     // Encourage the first operand to be evaluated (into EAX/EDX) first */
4170                     lvlb -= 3;
4171
4172 #ifdef _TARGET_XARCH_
4173                     // the idiv and div instruction requires EAX/EDX
4174                     ftreg |= RBM_EAX | RBM_EDX;
4175 #endif
4176                 }
4177                 break;
4178
4179             case GT_MUL:
4180
4181                 if (isflt)
4182                 {
4183                     /* FP multiplication instructions are more expensive */
4184                     costEx += 4;
4185                     costSz += 3;
4186                 }
4187                 else
4188                 {
4189                     /* Integer multiplication instructions are more expensive */
4190                     costEx += 3;
4191                     costSz += 2;
4192
4193                     if (tree->gtOverflow())
4194                     {
4195                         /* Overflow check are more expensive */
4196                         costEx += 3;
4197                         costSz += 3;
4198                     }
4199
4200 #ifdef _TARGET_X86_
4201                     if ((tree->gtType == TYP_LONG) || tree->gtOverflow())
4202                     {
4203                         /* We use imulEAX for TYP_LONG and overflow multiplications */
4204                         // Encourage the first operand to be evaluated (into EAX/EDX) first */
4205                         lvlb -= 4;
4206
4207                         // the imulEAX instruction ob x86 requires EDX:EAX
4208                         ftreg |= (RBM_EAX | RBM_EDX);
4209
4210                         /* The 64-bit imul instruction costs more */
4211                         costEx += 4;
4212                     }
4213 #endif //  _TARGET_X86_
4214                 }
4215                 break;
4216
4217             case GT_ADD:
4218             case GT_SUB:
4219 #ifdef LEGACY_BACKEND
4220             case GT_ASG_ADD:
4221             case GT_ASG_SUB:
4222 #endif
4223                 if (isflt)
4224                 {
4225                     /* FP instructions are a bit more expensive */
4226                     costEx += 4;
4227                     costSz += 3;
4228                     break;
4229                 }
4230
4231                 /* Overflow check are more expensive */
4232                 if (tree->gtOverflow())
4233                 {
4234                     costEx += 3;
4235                     costSz += 3;
4236                 }
4237                 break;
4238
4239             case GT_COMMA:
4240
4241                 /* Comma tosses the result of the left operand */
4242                 gtSetEvalOrderAndRestoreFPstkLevel(op1);
4243                 level = gtSetEvalOrder(op2);
4244
4245                 ftreg |= op1->gtRsvdRegs | op2->gtRsvdRegs;
4246
4247                 /* GT_COMMA cost is the sum of op1 and op2 costs */
4248                 costEx = (op1->gtCostEx + op2->gtCostEx);
4249                 costSz = (op1->gtCostSz + op2->gtCostSz);
4250
4251                 goto DONE;
4252
4253             case GT_COLON:
4254
4255                 level = gtSetEvalOrderAndRestoreFPstkLevel(op1);
4256                 lvl2  = gtSetEvalOrder(op2);
4257
4258                 if (level < lvl2)
4259                 {
4260                     level = lvl2;
4261                 }
4262                 else if (level == lvl2)
4263                 {
4264                     level += 1;
4265                 }
4266
4267                 ftreg |= op1->gtRsvdRegs | op2->gtRsvdRegs;
4268                 costEx = op1->gtCostEx + op2->gtCostEx;
4269                 costSz = op1->gtCostSz + op2->gtCostSz;
4270
4271                 goto DONE;
4272
4273             case GT_LIST:
4274             case GT_FIELD_LIST:
4275             {
4276                 const bool isListCallArgs = false;
4277                 const bool callArgsInRegs = false;
4278                 return gtSetListOrder(tree, isListCallArgs, callArgsInRegs);
4279             }
4280
4281             default:
4282                 break;
4283         }
4284
4285         /* Assignments need a bit of special handling */
4286
4287         if (GenTree::OperIsAssignment(oper))
4288         {
4289             /* Process the target */
4290
4291             level = gtSetEvalOrder(op1);
4292
4293 #if FEATURE_STACK_FP_X87
4294
4295             /* If assigning an FP value, the target won't get pushed */
4296
4297             if (isflt && !tree->IsPhiDefn())
4298             {
4299                 op1->gtFPlvl--;
4300                 codeGen->genDecrementFPstkLevel();
4301             }
4302
4303 #endif // FEATURE_STACK_FP_X87
4304
4305             if (gtIsLikelyRegVar(op1))
4306             {
4307                 assert(lvlb == 0);
4308                 lvl2 = gtSetEvalOrder(op2);
4309                 if (oper != GT_ASG)
4310                 {
4311                     ftreg |= op2->gtRsvdRegs;
4312                 }
4313
4314                 /* Assignment to an enregistered LCL_VAR */
4315                 costEx = op2->gtCostEx;
4316                 costSz = max(3, op2->gtCostSz); // 3 is an estimate for a reg-reg assignment
4317                 goto DONE_OP1_AFTER_COST;
4318             }
4319             else if (oper != GT_ASG)
4320             {
4321                 // Assign-Op instructions read and write op1
4322                 //
4323                 costEx += op1->gtCostEx;
4324 #ifdef _TARGET_ARM_
4325                 costSz += op1->gtCostSz;
4326 #endif
4327             }
4328
4329             goto DONE_OP1;
4330         }
4331
4332         /* Process the sub-operands */
4333
4334         level = gtSetEvalOrder(op1);
4335         if (lvlb < 0)
4336         {
4337             level -= lvlb; // lvlb is negative, so this increases level
4338             lvlb = 0;
4339         }
4340
4341     DONE_OP1:
4342         assert(lvlb >= 0);
4343         lvl2 = gtSetEvalOrder(op2) + lvlb;
4344         ftreg |= op1->gtRsvdRegs;
4345         // For assignment, we execute op2 before op1, except that for block
4346         // ops the destination address is evaluated first.
4347         if ((oper != GT_ASG) || tree->OperIsBlkOp())
4348         {
4349             ftreg |= op2->gtRsvdRegs;
4350         }
4351
4352         costEx += (op1->gtCostEx + op2->gtCostEx);
4353         costSz += (op1->gtCostSz + op2->gtCostSz);
4354
4355     DONE_OP1_AFTER_COST:
4356 #if FEATURE_STACK_FP_X87
4357         /*
4358             Binary FP operators pop 2 operands and produce 1 result;
4359             FP comparisons pop 2 operands and produces 0 results.
4360             assignments consume 1 value and don't produce anything.
4361          */
4362
4363         if (isflt && !tree->IsPhiDefn())
4364         {
4365             assert(oper != GT_COMMA);
4366             codeGen->genDecrementFPstkLevel();
4367         }
4368 #endif // FEATURE_STACK_FP_X87
4369
4370         bool bReverseInAssignment = false;
4371         if (GenTree::OperIsAssignment(oper))
4372         {
4373             GenTree* op1Val = op1;
4374
4375             if (tree->gtOper == GT_ASG)
4376             {
4377                 // Skip over the GT_IND/GT_ADDR tree (if one exists)
4378                 //
4379                 if ((op1->gtOper == GT_IND) && (op1->gtOp.gtOp1->gtOper == GT_ADDR))
4380                 {
4381                     op1Val = op1->gtOp.gtOp1->gtOp.gtOp1;
4382                 }
4383             }
4384
4385             switch (op1Val->gtOper)
4386             {
4387                 case GT_IND:
4388
4389                     // Struct assignments are different from scalar assignments in that semantically
4390                     // the address of op1 is evaluated prior to op2.
4391                     if (!varTypeIsStruct(op1))
4392                     {
4393                         // If we have any side effects on the GT_IND child node
4394                         // we have to evaluate op1 first.
4395                         if (op1Val->gtOp.gtOp1->gtFlags & GTF_ALL_EFFECT)
4396                         {
4397                             break;
4398                         }
4399                     }
4400
4401                     // In case op2 assigns to a local var that is used in op1Val, we have to evaluate op1Val first.
4402                     if (op2->gtFlags & GTF_ASG)
4403                     {
4404                         break;
4405                     }
4406
4407                     // If op2 is simple then evaluate op1 first
4408
4409                     if (op2->OperKind() & GTK_LEAF)
4410                     {
4411                         break;
4412                     }
4413
4414                 // fall through and set GTF_REVERSE_OPS
4415
4416                 case GT_LCL_VAR:
4417                 case GT_LCL_FLD:
4418                 case GT_BLK:
4419                 case GT_OBJ:
4420                 case GT_DYN_BLK:
4421
4422                     // We evaluate op2 before op1
4423                     bReverseInAssignment = true;
4424                     tree->gtFlags |= GTF_REVERSE_OPS;
4425                     break;
4426
4427                 default:
4428                     break;
4429             }
4430         }
4431         else if (kind & GTK_RELOP)
4432         {
4433             /* Float compares remove both operands from the FP stack */
4434             /* Also FP comparison uses EAX for flags */
4435
4436             if (varTypeIsFloating(op1->TypeGet()))
4437             {
4438 #if FEATURE_STACK_FP_X87
4439                 codeGen->genDecrementFPstkLevel(2);
4440 #endif // FEATURE_STACK_FP_X87
4441 #ifdef _TARGET_XARCH_
4442                 ftreg |= RBM_EAX;
4443 #endif
4444                 level++;
4445                 lvl2++;
4446             }
4447 #if CPU_LONG_USES_REGPAIR
4448             if (varTypeIsLong(op1->TypeGet()))
4449             {
4450                 costEx *= 2; // Longs are twice as expensive
4451                 costSz *= 2;
4452             }
4453 #endif
4454             if ((tree->gtFlags & GTF_RELOP_JMP_USED) == 0)
4455             {
4456                 /* Using a setcc instruction is more expensive */
4457                 costEx += 3;
4458             }
4459         }
4460
4461         /* Check for other interesting cases */
4462
4463         switch (oper)
4464         {
4465             case GT_LSH:
4466             case GT_RSH:
4467             case GT_RSZ:
4468             case GT_ROL:
4469             case GT_ROR:
4470 #ifdef LEGACY_BACKEND
4471             case GT_ASG_LSH:
4472             case GT_ASG_RSH:
4473             case GT_ASG_RSZ:
4474 #endif
4475                 /* Variable sized shifts are more expensive and use REG_SHIFT */
4476
4477                 if (!op2->IsCnsIntOrI())
4478                 {
4479                     costEx += 3;
4480                     if (REG_SHIFT != REG_NA)
4481                     {
4482                         ftreg |= RBM_SHIFT;
4483                     }
4484
4485 #ifndef _TARGET_64BIT_
4486                     // Variable sized LONG shifts require the use of a helper call
4487                     //
4488                     if (tree->gtType == TYP_LONG)
4489                     {
4490                         level += 5;
4491                         lvl2 += 5;
4492                         costEx += 3 * IND_COST_EX;
4493                         costSz += 4;
4494                         ftreg |= RBM_CALLEE_TRASH;
4495                     }
4496 #endif // !_TARGET_64BIT_
4497                 }
4498                 break;
4499
4500             case GT_INTRINSIC:
4501
4502                 switch (tree->gtIntrinsic.gtIntrinsicId)
4503                 {
4504                     case CORINFO_INTRINSIC_Atan2:
4505                     case CORINFO_INTRINSIC_Pow:
4506                         // These math intrinsics are actually implemented by user calls.
4507                         // Increase the Sethi 'complexity' by two to reflect the argument
4508                         // register requirement.
4509                         level += 2;
4510                         break;
4511                     default:
4512                         assert(!"Unknown binary GT_INTRINSIC operator");
4513                         break;
4514                 }
4515
4516                 break;
4517
4518             default:
4519                 break;
4520         }
4521
4522         /* We need to evalutate constants later as many places in codegen
4523            can't handle op1 being a constant. This is normally naturally
4524            enforced as constants have the least level of 0. However,
4525            sometimes we end up with a tree like "cns1 < nop(cns2)". In
4526            such cases, both sides have a level of 0. So encourage constants
4527            to be evaluated last in such cases */
4528
4529         if ((level == 0) && (level == lvl2) && (op1->OperKind() & GTK_CONST) &&
4530             (tree->OperIsCommutative() || tree->OperIsCompare()))
4531         {
4532             lvl2++;
4533         }
4534
4535         /* We try to swap operands if the second one is more expensive */
4536         bool     tryToSwap;
4537         GenTree* opA;
4538         GenTree* opB;
4539
4540         if (tree->gtFlags & GTF_REVERSE_OPS)
4541         {
4542             opA = op2;
4543             opB = op1;
4544         }
4545         else
4546         {
4547             opA = op1;
4548             opB = op2;
4549         }
4550
4551         if (fgOrder == FGOrderLinear)
4552         {
4553             // Don't swap anything if we're in linear order; we're really just interested in the costs.
4554             tryToSwap = false;
4555         }
4556         else if (bReverseInAssignment)
4557         {
4558             // Assignments are special, we want the reverseops flags
4559             // so if possible it was set above.
4560             tryToSwap = false;
4561         }
4562         else if ((oper == GT_INTRINSIC) &&
4563                  Compiler::IsIntrinsicImplementedByUserCall(tree->AsIntrinsic()->gtIntrinsicId))
4564         {
4565             // We do not swap operand execution order for intrinsics that are implemented by user calls
4566             // because of trickiness around ensuring the execution order does not change during rationalization.
4567             tryToSwap = false;
4568         }
4569         else
4570         {
4571             if (tree->gtFlags & GTF_REVERSE_OPS)
4572             {
4573                 tryToSwap = (level > lvl2);
4574             }
4575             else
4576             {
4577                 tryToSwap = (level < lvl2);
4578             }
4579
4580             // Try to force extra swapping when in the stress mode:
4581             if (compStressCompile(STRESS_REVERSE_FLAG, 60) && ((tree->gtFlags & GTF_REVERSE_OPS) == 0) &&
4582                 ((op2->OperKind() & GTK_CONST) == 0))
4583             {
4584                 tryToSwap = true;
4585             }
4586         }
4587
4588         if (tryToSwap)
4589         {
4590             bool canSwap = gtCanSwapOrder(opA, opB);
4591
4592             if (canSwap)
4593             {
4594                 /* Can we swap the order by commuting the operands? */
4595
4596                 switch (oper)
4597                 {
4598                     case GT_EQ:
4599                     case GT_NE:
4600                     case GT_LT:
4601                     case GT_LE:
4602                     case GT_GE:
4603                     case GT_GT:
4604                         if (GenTree::SwapRelop(oper) != oper)
4605                         {
4606                             tree->SetOper(GenTree::SwapRelop(oper), GenTree::PRESERVE_VN);
4607                         }
4608
4609                         __fallthrough;
4610
4611                     case GT_ADD:
4612                     case GT_MUL:
4613
4614                     case GT_OR:
4615                     case GT_XOR:
4616                     case GT_AND:
4617
4618                         /* Swap the operands */
4619
4620                         tree->gtOp.gtOp1 = op2;
4621                         tree->gtOp.gtOp2 = op1;
4622
4623 #if FEATURE_STACK_FP_X87
4624                         /* We may have to recompute FP levels */
4625                         if (op1->gtFPlvl || op2->gtFPlvl)
4626                             gtFPstLvlRedo = true;
4627 #endif // FEATURE_STACK_FP_X87
4628                         break;
4629
4630                     case GT_QMARK:
4631                     case GT_COLON:
4632                     case GT_MKREFANY:
4633                         break;
4634
4635                     case GT_LIST:
4636                     case GT_FIELD_LIST:
4637                         break;
4638
4639                     case GT_SUB:
4640 #ifdef LEGACY_BACKEND
4641                         // For LSRA we require that LclVars be "evaluated" just prior to their use,
4642                         // so that if they must be reloaded, it is done at the right place.
4643                         // This means that we allow reverse evaluation for all BINOPs.
4644                         // (Note that this doesn't affect the order of the operands in the instruction).
4645                         if (!isflt)
4646                             break;
4647 #endif // LEGACY_BACKEND
4648
4649                         __fallthrough;
4650
4651                     default:
4652
4653                         /* Mark the operand's evaluation order to be swapped */
4654                         if (tree->gtFlags & GTF_REVERSE_OPS)
4655                         {
4656                             tree->gtFlags &= ~GTF_REVERSE_OPS;
4657                         }
4658                         else
4659                         {
4660                             tree->gtFlags |= GTF_REVERSE_OPS;
4661                         }
4662
4663 #if FEATURE_STACK_FP_X87
4664                         /* We may have to recompute FP levels */
4665                         if (op1->gtFPlvl || op2->gtFPlvl)
4666                             gtFPstLvlRedo = true;
4667 #endif // FEATURE_STACK_FP_X87
4668
4669                         break;
4670                 }
4671             }
4672         }
4673
4674         /* Swap the level counts */
4675         if (tree->gtFlags & GTF_REVERSE_OPS)
4676         {
4677             unsigned tmpl;
4678
4679             tmpl  = level;
4680             level = lvl2;
4681             lvl2  = tmpl;
4682         }
4683
4684         /* Compute the sethi number for this binary operator */
4685
4686         if (level < 1)
4687         {
4688             level = lvl2;
4689         }
4690         else if (level == lvl2)
4691         {
4692             level += 1;
4693         }
4694
4695         goto DONE;
4696     }
4697
4698     /* See what kind of a special operator we have here */
4699
4700     switch (oper)
4701     {
4702         unsigned lvl2; // Scratch variable
4703
4704         case GT_CALL:
4705
4706             assert(tree->gtFlags & GTF_CALL);
4707
4708             level  = 0;
4709             costEx = 5;
4710             costSz = 2;
4711
4712             /* Evaluate the 'this' argument, if present */
4713
4714             if (tree->gtCall.gtCallObjp)
4715             {
4716                 GenTree* thisVal = tree->gtCall.gtCallObjp;
4717
4718                 lvl2 = gtSetEvalOrder(thisVal);
4719                 if (level < lvl2)
4720                 {
4721                     level = lvl2;
4722                 }
4723                 costEx += thisVal->gtCostEx;
4724                 costSz += thisVal->gtCostSz + 1;
4725                 ftreg |= thisVal->gtRsvdRegs;
4726             }
4727
4728             /* Evaluate the arguments, right to left */
4729
4730             if (tree->gtCall.gtCallArgs)
4731             {
4732 #if FEATURE_STACK_FP_X87
4733                 unsigned FPlvlSave = codeGen->genGetFPstkLevel();
4734 #endif // FEATURE_STACK_FP_X87
4735                 const bool isListCallArgs = true;
4736                 const bool callArgsInRegs = false;
4737                 lvl2                      = gtSetListOrder(tree->gtCall.gtCallArgs, isListCallArgs, callArgsInRegs);
4738                 if (level < lvl2)
4739                 {
4740                     level = lvl2;
4741                 }
4742                 costEx += tree->gtCall.gtCallArgs->gtCostEx;
4743                 costSz += tree->gtCall.gtCallArgs->gtCostSz;
4744                 ftreg |= tree->gtCall.gtCallArgs->gtRsvdRegs;
4745 #if FEATURE_STACK_FP_X87
4746                 codeGen->genResetFPstkLevel(FPlvlSave);
4747 #endif // FEATURE_STACK_FP_X87
4748             }
4749
4750             /* Evaluate the temp register arguments list
4751              * This is a "hidden" list and its only purpose is to
4752              * extend the life of temps until we make the call */
4753
4754             if (tree->gtCall.gtCallLateArgs)
4755             {
4756 #if FEATURE_STACK_FP_X87
4757                 unsigned FPlvlSave = codeGen->genGetFPstkLevel();
4758 #endif // FEATURE_STACK_FP_X87
4759                 const bool isListCallArgs = true;
4760                 const bool callArgsInRegs = true;
4761                 lvl2                      = gtSetListOrder(tree->gtCall.gtCallLateArgs, isListCallArgs, callArgsInRegs);
4762                 if (level < lvl2)
4763                 {
4764                     level = lvl2;
4765                 }
4766                 costEx += tree->gtCall.gtCallLateArgs->gtCostEx;
4767                 costSz += tree->gtCall.gtCallLateArgs->gtCostSz;
4768                 ftreg |= tree->gtCall.gtCallLateArgs->gtRsvdRegs;
4769 #if FEATURE_STACK_FP_X87
4770                 codeGen->genResetFPstkLevel(FPlvlSave);
4771 #endif // FEATURE_STACK_FP_X87
4772             }
4773
4774             if (tree->gtCall.gtCallType == CT_INDIRECT)
4775             {
4776                 // pinvoke-calli cookie is a constant, or constant indirection
4777                 assert(tree->gtCall.gtCallCookie == nullptr || tree->gtCall.gtCallCookie->gtOper == GT_CNS_INT ||
4778                        tree->gtCall.gtCallCookie->gtOper == GT_IND);
4779
4780                 GenTree* indirect = tree->gtCall.gtCallAddr;
4781
4782                 lvl2 = gtSetEvalOrder(indirect);
4783                 if (level < lvl2)
4784                 {
4785                     level = lvl2;
4786                 }
4787                 costEx += indirect->gtCostEx + IND_COST_EX;
4788                 costSz += indirect->gtCostSz;
4789                 ftreg |= indirect->gtRsvdRegs;
4790             }
4791             else
4792             {
4793 #ifdef _TARGET_ARM_
4794                 if (tree->gtCall.IsVirtualStub())
4795                 {
4796                     // We generate movw/movt/ldr
4797                     costEx += (1 + IND_COST_EX);
4798                     costSz += 8;
4799                     if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT)
4800                     {
4801                         // Must use R12 for the ldr target -- REG_JUMP_THUNK_PARAM
4802                         costSz += 2;
4803                     }
4804                 }
4805                 else if (!opts.jitFlags->IsSet(JitFlags::JIT_FLAG_PREJIT))
4806                 {
4807                     costEx += 2;
4808                     costSz += 6;
4809                 }
4810                 costSz += 2;
4811 #endif
4812 #ifdef _TARGET_XARCH_
4813                 costSz += 3;
4814 #endif
4815             }
4816
4817             level += 1;
4818
4819             /* Virtual calls are a bit more expensive */
4820             if (tree->gtCall.IsVirtual())
4821             {
4822                 costEx += 2 * IND_COST_EX;
4823                 costSz += 2;
4824
4825                 /* Virtual stub calls also must reserve the VIRTUAL_STUB_PARAM reg */
4826                 if (tree->gtCall.IsVirtualStub())
4827                 {
4828                     ftreg |= virtualStubParamInfo->GetRegMask();
4829                 }
4830             }
4831
4832 #ifdef FEATURE_READYTORUN_COMPILER
4833 #if defined(_TARGET_ARMARCH_)
4834             if (tree->gtCall.IsR2RRelativeIndir())
4835             {
4836                 ftreg |= RBM_R2R_INDIRECT_PARAM;
4837             }
4838 #endif
4839 #endif
4840
4841 #ifdef LEGACY_BACKEND
4842             // Normally function calls don't preserve caller save registers
4843             //   and thus are much more expensive.
4844             // However a few function calls do preserve these registers
4845             //   such as the GC WriteBarrier helper calls.
4846
4847             if (!(tree->gtFlags & GTF_CALL_REG_SAVE))
4848 #endif // LEGACY_BACKEND
4849             {
4850                 level += 5;
4851                 costEx += 3 * IND_COST_EX;
4852                 ftreg |= RBM_CALLEE_TRASH;
4853             }
4854
4855 #if FEATURE_STACK_FP_X87
4856             if (isflt)
4857                 codeGen->genIncrementFPstkLevel();
4858 #endif // FEATURE_STACK_FP_X87
4859
4860             break;
4861
4862         case GT_ARR_ELEM:
4863
4864             level  = gtSetEvalOrder(tree->gtArrElem.gtArrObj);
4865             costEx = tree->gtArrElem.gtArrObj->gtCostEx;
4866             costSz = tree->gtArrElem.gtArrObj->gtCostSz;
4867
4868             unsigned dim;
4869             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
4870             {
4871                 lvl2 = gtSetEvalOrder(tree->gtArrElem.gtArrInds[dim]);
4872                 if (level < lvl2)
4873                 {
4874                     level = lvl2;
4875                 }
4876                 costEx += tree->gtArrElem.gtArrInds[dim]->gtCostEx;
4877                 costSz += tree->gtArrElem.gtArrInds[dim]->gtCostSz;
4878             }
4879
4880 #if FEATURE_STACK_FP_X87
4881             if (isflt)
4882                 codeGen->genIncrementFPstkLevel();
4883 #endif // FEATURE_STACK_FP_X87
4884             level += tree->gtArrElem.gtArrRank;
4885             costEx += 2 + (tree->gtArrElem.gtArrRank * (IND_COST_EX + 1));
4886             costSz += 2 + (tree->gtArrElem.gtArrRank * 2);
4887             break;
4888
4889         case GT_ARR_OFFSET:
4890             level  = gtSetEvalOrder(tree->gtArrOffs.gtOffset);
4891             costEx = tree->gtArrOffs.gtOffset->gtCostEx;
4892             costSz = tree->gtArrOffs.gtOffset->gtCostSz;
4893             lvl2   = gtSetEvalOrder(tree->gtArrOffs.gtIndex);
4894             level  = max(level, lvl2);
4895             costEx += tree->gtArrOffs.gtIndex->gtCostEx;
4896             costSz += tree->gtArrOffs.gtIndex->gtCostSz;
4897             lvl2  = gtSetEvalOrder(tree->gtArrOffs.gtArrObj);
4898             level = max(level, lvl2);
4899             costEx += tree->gtArrOffs.gtArrObj->gtCostEx;
4900             costSz += tree->gtArrOffs.gtArrObj->gtCostSz;
4901             break;
4902
4903         case GT_CMPXCHG:
4904
4905             level  = gtSetEvalOrder(tree->gtCmpXchg.gtOpLocation);
4906             costSz = tree->gtCmpXchg.gtOpLocation->gtCostSz;
4907
4908             lvl2 = gtSetEvalOrder(tree->gtCmpXchg.gtOpValue);
4909             if (level < lvl2)
4910             {
4911                 level = lvl2;
4912             }
4913             costSz += tree->gtCmpXchg.gtOpValue->gtCostSz;
4914
4915             lvl2 = gtSetEvalOrder(tree->gtCmpXchg.gtOpComparand);
4916             if (level < lvl2)
4917             {
4918                 level = lvl2;
4919             }
4920             costSz += tree->gtCmpXchg.gtOpComparand->gtCostSz;
4921
4922             costEx = MAX_COST; // Seriously, what could be more expensive than lock cmpxchg?
4923             costSz += 5;       // size of lock cmpxchg [reg+C], reg
4924 #ifdef _TARGET_XARCH_
4925             ftreg |= RBM_EAX; // cmpxchg must be evaluated into eax.
4926 #endif
4927             break;
4928
4929         case GT_ARR_BOUNDS_CHECK:
4930 #ifdef FEATURE_SIMD
4931         case GT_SIMD_CHK:
4932 #endif // FEATURE_SIMD
4933 #ifdef FEATURE_HW_INTRINSICS
4934         case GT_HW_INTRINSIC_CHK:
4935 #endif // FEATURE_HW_INTRINSICS
4936
4937             costEx = 4; // cmp reg,reg and jae throw (not taken)
4938             costSz = 7; // jump to cold section
4939
4940             level = gtSetEvalOrder(tree->gtBoundsChk.gtIndex);
4941             costEx += tree->gtBoundsChk.gtIndex->gtCostEx;
4942             costSz += tree->gtBoundsChk.gtIndex->gtCostSz;
4943
4944             lvl2 = gtSetEvalOrder(tree->gtBoundsChk.gtArrLen);
4945             if (level < lvl2)
4946             {
4947                 level = lvl2;
4948             }
4949             costEx += tree->gtBoundsChk.gtArrLen->gtCostEx;
4950             costSz += tree->gtBoundsChk.gtArrLen->gtCostSz;
4951
4952             break;
4953
4954         case GT_STORE_DYN_BLK:
4955         case GT_DYN_BLK:
4956         {
4957             costEx = 0;
4958             costSz = 0;
4959             level  = 0;
4960             if (oper == GT_STORE_DYN_BLK)
4961             {
4962                 lvl2  = gtSetEvalOrder(tree->gtDynBlk.Data());
4963                 level = max(level, lvl2);
4964                 costEx += tree->gtDynBlk.Data()->gtCostEx;
4965                 costSz += tree->gtDynBlk.Data()->gtCostSz;
4966             }
4967             lvl2               = gtSetEvalOrder(tree->gtDynBlk.Addr());
4968             level              = max(level, lvl2);
4969             costEx             = tree->gtDynBlk.Addr()->gtCostEx;
4970             costSz             = tree->gtDynBlk.Addr()->gtCostSz;
4971             unsigned sizeLevel = gtSetEvalOrder(tree->gtDynBlk.gtDynamicSize);
4972
4973             // Determine whether the size node should be evaluated first.
4974             // We would like to do this if the sizeLevel is larger than the current level,
4975             // but we have to ensure that we obey ordering constraints.
4976             if (tree->AsDynBlk()->gtEvalSizeFirst != (level < sizeLevel))
4977             {
4978                 bool canChange = true;
4979
4980                 GenTree* sizeNode = tree->AsDynBlk()->gtDynamicSize;
4981                 GenTree* dst      = tree->AsDynBlk()->Addr();
4982                 GenTree* src      = tree->AsDynBlk()->Data();
4983
4984                 if (tree->AsDynBlk()->gtEvalSizeFirst)
4985                 {
4986                     canChange = gtCanSwapOrder(sizeNode, dst);
4987                     if (canChange && (src != nullptr))
4988                     {
4989                         canChange = gtCanSwapOrder(sizeNode, src);
4990                     }
4991                 }
4992                 else
4993                 {
4994                     canChange = gtCanSwapOrder(dst, sizeNode);
4995                     if (canChange && (src != nullptr))
4996                     {
4997                         gtCanSwapOrder(src, sizeNode);
4998                     }
4999                 }
5000                 if (canChange)
5001                 {
5002                     tree->AsDynBlk()->gtEvalSizeFirst = (level < sizeLevel);
5003                 }
5004             }
5005             level = max(level, sizeLevel);
5006             costEx += tree->gtDynBlk.gtDynamicSize->gtCostEx;
5007             costSz += tree->gtDynBlk.gtDynamicSize->gtCostSz;
5008         }
5009         break;
5010
5011         case GT_INDEX_ADDR:
5012             costEx = 6; // cmp reg,reg; jae throw; mov reg, [addrmode]  (not taken)
5013             costSz = 9; // jump to cold section
5014
5015             level = gtSetEvalOrder(tree->AsIndexAddr()->Index());
5016             costEx += tree->AsIndexAddr()->Index()->gtCostEx;
5017             costSz += tree->AsIndexAddr()->Index()->gtCostSz;
5018
5019             lvl2 = gtSetEvalOrder(tree->AsIndexAddr()->Arr());
5020             if (level < lvl2)
5021             {
5022                 level = lvl2;
5023             }
5024             costEx += tree->AsIndexAddr()->Arr()->gtCostEx;
5025             costSz += tree->AsIndexAddr()->Arr()->gtCostSz;
5026             break;
5027
5028         default:
5029 #ifdef DEBUG
5030             if (verbose)
5031             {
5032                 printf("unexpected operator in this tree:\n");
5033                 gtDispTree(tree);
5034             }
5035 #endif
5036             NO_WAY("unexpected operator");
5037     }
5038
5039 DONE:
5040
5041 #if FEATURE_STACK_FP_X87
5042     // printf("[FPlvl=%2u] ", genGetFPstkLevel()); gtDispTree(tree, 0, true);
5043     noway_assert((unsigned char)codeGen->genFPstkLevel == codeGen->genFPstkLevel);
5044     tree->gtFPlvl = (unsigned char)codeGen->genFPstkLevel;
5045
5046     if (codeGen->genFPstkLevel > tmpDoubleSpillMax)
5047         tmpDoubleSpillMax = codeGen->genFPstkLevel;
5048 #endif // FEATURE_STACK_FP_X87
5049
5050     tree->gtRsvdRegs = (regMaskSmall)ftreg;
5051
5052     // Some path through this function must have set the costs.
5053     assert(costEx != -1);
5054     assert(costSz != -1);
5055
5056     tree->SetCosts(costEx, costSz);
5057
5058     return level;
5059 }
5060 #ifdef _PREFAST_
5061 #pragma warning(pop)
5062 #endif
5063
5064 #if FEATURE_STACK_FP_X87
5065
5066 /*****************************************************************************/
5067 void Compiler::gtComputeFPlvls(GenTree* tree)
5068 {
5069     genTreeOps oper;
5070     unsigned   kind;
5071     bool       isflt;
5072     unsigned   savFPstkLevel;
5073
5074     noway_assert(tree);
5075     noway_assert(tree->gtOper != GT_STMT);
5076
5077     /* Figure out what kind of a node we have */
5078
5079     oper  = tree->OperGet();
5080     kind  = tree->OperKind();
5081     isflt = varTypeIsFloating(tree->TypeGet()) ? 1 : 0;
5082
5083     /* Is this a constant or leaf node? */
5084
5085     if (kind & (GTK_CONST | GTK_LEAF))
5086     {
5087         codeGen->genFPstkLevel += isflt;
5088         goto DONE;
5089     }
5090
5091     /* Is it a 'simple' unary/binary operator? */
5092
5093     if (kind & GTK_SMPOP)
5094     {
5095         GenTree* op1 = tree->gtOp.gtOp1;
5096         GenTree* op2 = tree->gtGetOp2IfPresent();
5097
5098         /* Check for some special cases */
5099
5100         switch (oper)
5101         {
5102             case GT_IND:
5103
5104                 gtComputeFPlvls(op1);
5105
5106                 /* Indirect loads of FP values push a new value on the FP stack */
5107
5108                 codeGen->genFPstkLevel += isflt;
5109                 goto DONE;
5110
5111             case GT_CAST:
5112
5113                 gtComputeFPlvls(op1);
5114
5115                 /* Casts between non-FP and FP push on / pop from the FP stack */
5116
5117                 if (varTypeIsFloating(op1->TypeGet()))
5118                 {
5119                     if (isflt == false)
5120                         codeGen->genFPstkLevel--;
5121                 }
5122                 else
5123                 {
5124                     if (isflt != false)
5125                         codeGen->genFPstkLevel++;
5126                 }
5127
5128                 goto DONE;
5129
5130             case GT_LIST:  /* GT_LIST presumably part of an argument list */
5131             case GT_COMMA: /* Comma tosses the result of the left operand */
5132
5133                 savFPstkLevel = codeGen->genFPstkLevel;
5134                 gtComputeFPlvls(op1);
5135                 codeGen->genFPstkLevel = savFPstkLevel;
5136
5137                 if (op2)
5138                     gtComputeFPlvls(op2);
5139
5140                 goto DONE;
5141
5142             default:
5143                 break;
5144         }
5145
5146         if (!op1)
5147         {
5148             if (!op2)
5149                 goto DONE;
5150
5151             gtComputeFPlvls(op2);
5152             goto DONE;
5153         }
5154
5155         if (!op2)
5156         {
5157             gtComputeFPlvls(op1);
5158             if (oper == GT_ADDR)
5159             {
5160                 /* If the operand was floating point pop the value from the stack */
5161                 if (varTypeIsFloating(op1->TypeGet()))
5162                 {
5163                     noway_assert(codeGen->genFPstkLevel);
5164                     codeGen->genFPstkLevel--;
5165                 }
5166             }
5167
5168             // This is a special case to handle the following
5169             // optimization: conv.i4(round.d(d)) -> round.i(d)
5170
5171             if (oper == GT_INTRINSIC && tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Round &&
5172                 tree->TypeGet() == TYP_INT)
5173             {
5174                 codeGen->genFPstkLevel--;
5175             }
5176
5177             goto DONE;
5178         }
5179
5180         /* FP assignments need a bit special handling */
5181
5182         if (isflt && (kind & GTK_ASGOP))
5183         {
5184             /* The target of the assignment won't get pushed */
5185
5186             if (tree->gtFlags & GTF_REVERSE_OPS)
5187             {
5188                 gtComputeFPlvls(op2);
5189                 gtComputeFPlvls(op1);
5190                 op1->gtFPlvl--;
5191                 codeGen->genFPstkLevel--;
5192             }
5193             else
5194             {
5195                 gtComputeFPlvls(op1);
5196                 op1->gtFPlvl--;
5197                 codeGen->genFPstkLevel--;
5198                 gtComputeFPlvls(op2);
5199             }
5200
5201             codeGen->genFPstkLevel--;
5202             goto DONE;
5203         }
5204
5205         /* Here we have a binary operator; visit operands in proper order */
5206
5207         if (tree->gtFlags & GTF_REVERSE_OPS)
5208         {
5209             gtComputeFPlvls(op2);
5210             gtComputeFPlvls(op1);
5211         }
5212         else
5213         {
5214             gtComputeFPlvls(op1);
5215             gtComputeFPlvls(op2);
5216         }
5217
5218         /*
5219             Binary FP operators pop 2 operands and produce 1 result;
5220             assignments consume 1 value and don't produce any.
5221          */
5222
5223         if (isflt)
5224             codeGen->genFPstkLevel--;
5225
5226         /* Float compares remove both operands from the FP stack */
5227
5228         if (kind & GTK_RELOP)
5229         {
5230             if (varTypeIsFloating(op1->TypeGet()))
5231                 codeGen->genFPstkLevel -= 2;
5232         }
5233
5234         goto DONE;
5235     }
5236
5237     /* See what kind of a special operator we have here */
5238
5239     switch (oper)
5240     {
5241         case GT_FIELD:
5242             gtComputeFPlvls(tree->gtField.gtFldObj);
5243             codeGen->genFPstkLevel += isflt;
5244             break;
5245
5246         case GT_CALL:
5247
5248             if (tree->gtCall.gtCallObjp)
5249                 gtComputeFPlvls(tree->gtCall.gtCallObjp);
5250
5251             if (tree->gtCall.gtCallArgs)
5252             {
5253                 savFPstkLevel = codeGen->genFPstkLevel;
5254                 gtComputeFPlvls(tree->gtCall.gtCallArgs);
5255                 codeGen->genFPstkLevel = savFPstkLevel;
5256             }
5257
5258             if (tree->gtCall.gtCallLateArgs)
5259             {
5260                 savFPstkLevel = codeGen->genFPstkLevel;
5261                 gtComputeFPlvls(tree->gtCall.gtCallLateArgs);
5262                 codeGen->genFPstkLevel = savFPstkLevel;
5263             }
5264
5265             codeGen->genFPstkLevel += isflt;
5266             break;
5267
5268         case GT_ARR_ELEM:
5269
5270             gtComputeFPlvls(tree->gtArrElem.gtArrObj);
5271
5272             unsigned dim;
5273             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
5274                 gtComputeFPlvls(tree->gtArrElem.gtArrInds[dim]);
5275
5276             /* Loads of FP values push a new value on the FP stack */
5277             codeGen->genFPstkLevel += isflt;
5278             break;
5279
5280         case GT_CMPXCHG:
5281             // Evaluate the trees left to right
5282             gtComputeFPlvls(tree->gtCmpXchg.gtOpLocation);
5283             gtComputeFPlvls(tree->gtCmpXchg.gtOpValue);
5284             gtComputeFPlvls(tree->gtCmpXchg.gtOpComparand);
5285             noway_assert(!isflt);
5286             break;
5287
5288         case GT_ARR_BOUNDS_CHECK:
5289             gtComputeFPlvls(tree->gtBoundsChk.gtIndex);
5290             gtComputeFPlvls(tree->gtBoundsChk.gtArrLen);
5291             noway_assert(!isflt);
5292             break;
5293
5294         default:
5295 #ifdef DEBUG
5296             noway_assert(!"Unhandled special operator in gtComputeFPlvls()");
5297 #endif
5298             break;
5299     }
5300
5301 DONE:
5302
5303     noway_assert((unsigned char)codeGen->genFPstkLevel == codeGen->genFPstkLevel);
5304
5305     tree->gtFPlvl = (unsigned char)codeGen->genFPstkLevel;
5306 }
5307
5308 #endif // FEATURE_STACK_FP_X87
5309
5310 /*****************************************************************************
5311  *
5312  *  If the given tree is an integer constant that can be used
5313  *  in a scaled index address mode as a multiplier (e.g. "[4*index]"), then return
5314  *  the scale factor: 2, 4, or 8. Otherwise, return 0. Note that we never return 1,
5315  *  to match the behavior of GetScaleIndexShf().
5316  */
5317
5318 unsigned GenTree::GetScaleIndexMul()
5319 {
5320     if (IsCnsIntOrI() && jitIsScaleIndexMul(gtIntConCommon.IconValue()) && gtIntConCommon.IconValue() != 1)
5321     {
5322         return (unsigned)gtIntConCommon.IconValue();
5323     }
5324
5325     return 0;
5326 }
5327
5328 /*****************************************************************************
5329  *
5330  *  If the given tree is the right-hand side of a left shift (that is,
5331  *  'y' in the tree 'x' << 'y'), and it is an integer constant that can be used
5332  *  in a scaled index address mode as a multiplier (e.g. "[4*index]"), then return
5333  *  the scale factor: 2, 4, or 8. Otherwise, return 0.
5334  */
5335
5336 unsigned GenTree::GetScaleIndexShf()
5337 {
5338     if (IsCnsIntOrI() && jitIsScaleIndexShift(gtIntConCommon.IconValue()))
5339     {
5340         return (unsigned)(1 << gtIntConCommon.IconValue());
5341     }
5342
5343     return 0;
5344 }
5345
5346 /*****************************************************************************
5347  *
5348  *  If the given tree is a scaled index (i.e. "op * 4" or "op << 2"), returns
5349  *  the multiplier: 2, 4, or 8; otherwise returns 0. Note that "1" is never
5350  *  returned.
5351  */
5352
5353 unsigned GenTree::GetScaledIndex()
5354 {
5355     // with (!opts.OptEnabled(CLFLG_CONSTANTFOLD) we can have
5356     //   CNS_INT * CNS_INT
5357     //
5358     if (gtOp.gtOp1->IsCnsIntOrI())
5359     {
5360         return 0;
5361     }
5362
5363     switch (gtOper)
5364     {
5365         case GT_MUL:
5366             return gtOp.gtOp2->GetScaleIndexMul();
5367
5368         case GT_LSH:
5369             return gtOp.gtOp2->GetScaleIndexShf();
5370
5371         default:
5372             assert(!"GenTree::GetScaledIndex() called with illegal gtOper");
5373             break;
5374     }
5375
5376     return 0;
5377 }
5378
5379 /*****************************************************************************
5380  *
5381  *  Returns true if "addr" is a GT_ADD node, at least one of whose arguments is an integer (<= 32 bit)
5382  *  constant.  If it returns true, it sets "*offset" to (one of the) constant value(s), and
5383  *  "*addr" to the other argument.
5384  */
5385
5386 bool GenTree::IsAddWithI32Const(GenTree** addr, int* offset)
5387 {
5388     if (OperGet() == GT_ADD)
5389     {
5390         if (gtOp.gtOp1->IsIntCnsFitsInI32())
5391         {
5392             *offset = (int)gtOp.gtOp1->gtIntCon.gtIconVal;
5393             *addr   = gtOp.gtOp2;
5394             return true;
5395         }
5396         else if (gtOp.gtOp2->IsIntCnsFitsInI32())
5397         {
5398             *offset = (int)gtOp.gtOp2->gtIntCon.gtIconVal;
5399             *addr   = gtOp.gtOp1;
5400             return true;
5401         }
5402     }
5403     // Otherwise...
5404     return false;
5405 }
5406
5407 //------------------------------------------------------------------------
5408 // gtGetChildPointer: If 'parent' is the parent of this node, return the pointer
5409 //    to the child node so that it can be modified; otherwise, return nullptr.
5410 //
5411 // Arguments:
5412 //    parent - The possible parent of this node
5413 //
5414 // Return Value:
5415 //    If "child" is a child of "parent", returns a pointer to the child node in the parent
5416 //    (i.e. a pointer to a GenTree pointer).
5417 //    Otherwise, returns nullptr.
5418 //
5419 // Assumptions:
5420 //    'parent' must be non-null
5421 //
5422 // Notes:
5423 //    When FEATURE_MULTIREG_ARGS is defined we can get here with GT_OBJ tree.
5424 //    This happens when we have a struct that is passed in multiple registers.
5425 //
5426 //    Also note that when FEATURE_UNIX_AMD64_STRUCT_PASSING is defined the GT_LDOBJ
5427 //    later gets converted to a GT_FIELD_LIST with two GT_LCL_FLDs in Lower/LowerXArch.
5428 //
5429
5430 GenTree** GenTree::gtGetChildPointer(GenTree* parent) const
5431
5432 {
5433     switch (parent->OperGet())
5434     {
5435         default:
5436             if (!parent->OperIsSimple())
5437             {
5438                 return nullptr;
5439             }
5440             if (this == parent->gtOp.gtOp1)
5441             {
5442                 return &(parent->gtOp.gtOp1);
5443             }
5444             if (this == parent->gtOp.gtOp2)
5445             {
5446                 return &(parent->gtOp.gtOp2);
5447             }
5448             break;
5449
5450         case GT_CMPXCHG:
5451             if (this == parent->gtCmpXchg.gtOpLocation)
5452             {
5453                 return &(parent->gtCmpXchg.gtOpLocation);
5454             }
5455             if (this == parent->gtCmpXchg.gtOpValue)
5456             {
5457                 return &(parent->gtCmpXchg.gtOpValue);
5458             }
5459             if (this == parent->gtCmpXchg.gtOpComparand)
5460             {
5461                 return &(parent->gtCmpXchg.gtOpComparand);
5462             }
5463             break;
5464
5465         case GT_ARR_BOUNDS_CHECK:
5466 #ifdef FEATURE_SIMD
5467         case GT_SIMD_CHK:
5468 #endif // FEATURE_SIMD
5469 #ifdef FEATURE_HW_INTRINSICS
5470         case GT_HW_INTRINSIC_CHK:
5471 #endif // FEATURE_HW_INTRINSICS
5472             if (this == parent->gtBoundsChk.gtIndex)
5473             {
5474                 return &(parent->gtBoundsChk.gtIndex);
5475             }
5476             if (this == parent->gtBoundsChk.gtArrLen)
5477             {
5478                 return &(parent->gtBoundsChk.gtArrLen);
5479             }
5480             if (this == parent->gtBoundsChk.gtIndRngFailBB)
5481             {
5482                 return &(parent->gtBoundsChk.gtIndRngFailBB);
5483             }
5484             break;
5485
5486         case GT_ARR_ELEM:
5487             if (this == parent->gtArrElem.gtArrObj)
5488             {
5489                 return &(parent->gtArrElem.gtArrObj);
5490             }
5491             for (int i = 0; i < GT_ARR_MAX_RANK; i++)
5492             {
5493                 if (this == parent->gtArrElem.gtArrInds[i])
5494                 {
5495                     return &(parent->gtArrElem.gtArrInds[i]);
5496                 }
5497             }
5498             break;
5499
5500         case GT_ARR_OFFSET:
5501             if (this == parent->gtArrOffs.gtOffset)
5502             {
5503                 return &(parent->gtArrOffs.gtOffset);
5504             }
5505             if (this == parent->gtArrOffs.gtIndex)
5506             {
5507                 return &(parent->gtArrOffs.gtIndex);
5508             }
5509             if (this == parent->gtArrOffs.gtArrObj)
5510             {
5511                 return &(parent->gtArrOffs.gtArrObj);
5512             }
5513             break;
5514
5515         case GT_STORE_DYN_BLK:
5516         case GT_DYN_BLK:
5517             if (this == parent->gtDynBlk.gtOp1)
5518             {
5519                 return &(parent->gtDynBlk.gtOp1);
5520             }
5521             if (this == parent->gtDynBlk.gtOp2)
5522             {
5523                 return &(parent->gtDynBlk.gtOp2);
5524             }
5525             if (this == parent->gtDynBlk.gtDynamicSize)
5526             {
5527                 return &(parent->gtDynBlk.gtDynamicSize);
5528             }
5529             break;
5530
5531         case GT_FIELD:
5532             if (this == parent->AsField()->gtFldObj)
5533             {
5534                 return &(parent->AsField()->gtFldObj);
5535             }
5536             break;
5537
5538         case GT_RET_EXPR:
5539             if (this == parent->gtRetExpr.gtInlineCandidate)
5540             {
5541                 return &(parent->gtRetExpr.gtInlineCandidate);
5542             }
5543             break;
5544
5545         case GT_CALL:
5546         {
5547             GenTreeCall* call = parent->AsCall();
5548
5549             if (this == call->gtCallObjp)
5550             {
5551                 return &(call->gtCallObjp);
5552             }
5553             if (this == call->gtCallArgs)
5554             {
5555                 return reinterpret_cast<GenTree**>(&(call->gtCallArgs));
5556             }
5557             if (this == call->gtCallLateArgs)
5558             {
5559                 return reinterpret_cast<GenTree**>(&(call->gtCallLateArgs));
5560             }
5561             if (this == call->gtControlExpr)
5562             {
5563                 return &(call->gtControlExpr);
5564             }
5565             if (call->gtCallType == CT_INDIRECT)
5566             {
5567                 if (this == call->gtCallCookie)
5568                 {
5569                     return &(call->gtCallCookie);
5570                 }
5571                 if (this == call->gtCallAddr)
5572                 {
5573                     return &(call->gtCallAddr);
5574                 }
5575             }
5576         }
5577         break;
5578
5579         case GT_STMT:
5580             noway_assert(!"Illegal node for gtGetChildPointer()");
5581             unreached();
5582     }
5583
5584     return nullptr;
5585 }
5586
5587 bool GenTree::TryGetUse(GenTree* def, GenTree*** use)
5588 {
5589     assert(def != nullptr);
5590     assert(use != nullptr);
5591
5592     switch (OperGet())
5593     {
5594         // Leaf nodes
5595         case GT_LCL_VAR:
5596         case GT_LCL_FLD:
5597         case GT_LCL_VAR_ADDR:
5598         case GT_LCL_FLD_ADDR:
5599         case GT_CATCH_ARG:
5600         case GT_LABEL:
5601         case GT_FTN_ADDR:
5602         case GT_RET_EXPR:
5603         case GT_CNS_INT:
5604         case GT_CNS_LNG:
5605         case GT_CNS_DBL:
5606         case GT_CNS_STR:
5607         case GT_MEMORYBARRIER:
5608         case GT_JMP:
5609         case GT_JCC:
5610         case GT_SETCC:
5611         case GT_NO_OP:
5612         case GT_START_NONGC:
5613         case GT_PROF_HOOK:
5614 #if !FEATURE_EH_FUNCLETS
5615         case GT_END_LFIN:
5616 #endif // !FEATURE_EH_FUNCLETS
5617         case GT_PHI_ARG:
5618 #ifndef LEGACY_BACKEND
5619         case GT_JMPTABLE:
5620 #endif // LEGACY_BACKEND
5621         case GT_REG_VAR:
5622         case GT_CLS_VAR:
5623         case GT_CLS_VAR_ADDR:
5624         case GT_ARGPLACE:
5625         case GT_PHYSREG:
5626         case GT_EMITNOP:
5627         case GT_PINVOKE_PROLOG:
5628         case GT_PINVOKE_EPILOG:
5629         case GT_IL_OFFSET:
5630             return false;
5631
5632         // Standard unary operators
5633         case GT_STORE_LCL_VAR:
5634         case GT_STORE_LCL_FLD:
5635         case GT_NOT:
5636         case GT_NEG:
5637         case GT_COPY:
5638         case GT_RELOAD:
5639         case GT_ARR_LENGTH:
5640         case GT_CAST:
5641         case GT_BITCAST:
5642         case GT_CKFINITE:
5643         case GT_LCLHEAP:
5644         case GT_ADDR:
5645         case GT_IND:
5646         case GT_OBJ:
5647         case GT_BLK:
5648         case GT_BOX:
5649         case GT_ALLOCOBJ:
5650         case GT_RUNTIMELOOKUP:
5651         case GT_INIT_VAL:
5652         case GT_JTRUE:
5653         case GT_SWITCH:
5654         case GT_NULLCHECK:
5655         case GT_PUTARG_REG:
5656         case GT_PUTARG_STK:
5657         case GT_RETURNTRAP:
5658         case GT_NOP:
5659         case GT_RETURN:
5660         case GT_RETFILT:
5661             if (def == this->AsUnOp()->gtOp1)
5662             {
5663                 *use = &this->AsUnOp()->gtOp1;
5664                 return true;
5665             }
5666             return false;
5667
5668         // Variadic nodes
5669         case GT_PHI:
5670             assert(this->AsUnOp()->gtOp1 != nullptr);
5671             return this->AsUnOp()->gtOp1->TryGetUseList(def, use);
5672
5673         case GT_FIELD_LIST:
5674             return TryGetUseList(def, use);
5675
5676 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
5677         case GT_PUTARG_SPLIT:
5678             if (this->AsUnOp()->gtOp1->gtOper == GT_FIELD_LIST)
5679             {
5680                 return this->AsUnOp()->gtOp1->TryGetUseList(def, use);
5681             }
5682             if (def == this->AsUnOp()->gtOp1)
5683             {
5684                 *use = &this->AsUnOp()->gtOp1;
5685                 return true;
5686             }
5687             return false;
5688 #endif // !LEGACY_BACKEND && _TARGET_ARM_
5689
5690 #ifdef FEATURE_SIMD
5691         case GT_SIMD:
5692             if (this->AsSIMD()->gtSIMDIntrinsicID == SIMDIntrinsicInitN)
5693             {
5694                 assert(this->AsSIMD()->gtOp1 != nullptr);
5695                 return this->AsSIMD()->gtOp1->TryGetUseList(def, use);
5696             }
5697
5698             return TryGetUseBinOp(def, use);
5699 #endif // FEATURE_SIMD
5700
5701 #ifdef FEATURE_HW_INTRINSICS
5702         case GT_HWIntrinsic:
5703             if ((this->AsHWIntrinsic()->gtOp1 != nullptr) && this->AsHWIntrinsic()->gtOp1->OperIsList())
5704             {
5705                 return this->AsHWIntrinsic()->gtOp1->TryGetUseList(def, use);
5706             }
5707
5708             return TryGetUseBinOp(def, use);
5709 #endif // FEATURE_HW_INTRINSICS
5710
5711         // Special nodes
5712         case GT_CMPXCHG:
5713         {
5714             GenTreeCmpXchg* const cmpXchg = this->AsCmpXchg();
5715             if (def == cmpXchg->gtOpLocation)
5716             {
5717                 *use = &cmpXchg->gtOpLocation;
5718                 return true;
5719             }
5720             if (def == cmpXchg->gtOpValue)
5721             {
5722                 *use = &cmpXchg->gtOpValue;
5723                 return true;
5724             }
5725             if (def == cmpXchg->gtOpComparand)
5726             {
5727                 *use = &cmpXchg->gtOpComparand;
5728                 return true;
5729             }
5730             return false;
5731         }
5732
5733         case GT_ARR_BOUNDS_CHECK:
5734 #ifdef FEATURE_SIMD
5735         case GT_SIMD_CHK:
5736 #endif // FEATURE_SIMD
5737 #ifdef FEATURE_HW_INTRINSICS
5738         case GT_HW_INTRINSIC_CHK:
5739 #endif // FEATURE_HW_INTRINSICS
5740         {
5741             GenTreeBoundsChk* const boundsChk = this->AsBoundsChk();
5742             if (def == boundsChk->gtIndex)
5743             {
5744                 *use = &boundsChk->gtIndex;
5745                 return true;
5746             }
5747             if (def == boundsChk->gtArrLen)
5748             {
5749                 *use = &boundsChk->gtArrLen;
5750                 return true;
5751             }
5752             return false;
5753         }
5754
5755         case GT_FIELD:
5756             if (def == this->AsField()->gtFldObj)
5757             {
5758                 *use = &this->AsField()->gtFldObj;
5759                 return true;
5760             }
5761             return false;
5762
5763         case GT_STMT:
5764             if (def == this->AsStmt()->gtStmtExpr)
5765             {
5766                 *use = &this->AsStmt()->gtStmtExpr;
5767                 return true;
5768             }
5769             return false;
5770
5771         case GT_ARR_ELEM:
5772         {
5773             GenTreeArrElem* const arrElem = this->AsArrElem();
5774             if (def == arrElem->gtArrObj)
5775             {
5776                 *use = &arrElem->gtArrObj;
5777                 return true;
5778             }
5779             for (unsigned i = 0; i < arrElem->gtArrRank; i++)
5780             {
5781                 if (def == arrElem->gtArrInds[i])
5782                 {
5783                     *use = &arrElem->gtArrInds[i];
5784                     return true;
5785                 }
5786             }
5787             return false;
5788         }
5789
5790         case GT_ARR_OFFSET:
5791         {
5792             GenTreeArrOffs* const arrOffs = this->AsArrOffs();
5793             if (def == arrOffs->gtOffset)
5794             {
5795                 *use = &arrOffs->gtOffset;
5796                 return true;
5797             }
5798             if (def == arrOffs->gtIndex)
5799             {
5800                 *use = &arrOffs->gtIndex;
5801                 return true;
5802             }
5803             if (def == arrOffs->gtArrObj)
5804             {
5805                 *use = &arrOffs->gtArrObj;
5806                 return true;
5807             }
5808             return false;
5809         }
5810
5811         case GT_DYN_BLK:
5812         {
5813             GenTreeDynBlk* const dynBlock = this->AsDynBlk();
5814             if (def == dynBlock->gtOp1)
5815             {
5816                 *use = &dynBlock->gtOp1;
5817                 return true;
5818             }
5819             if (def == dynBlock->gtDynamicSize)
5820             {
5821                 *use = &dynBlock->gtDynamicSize;
5822                 return true;
5823             }
5824             return false;
5825         }
5826
5827         case GT_STORE_DYN_BLK:
5828         {
5829             GenTreeDynBlk* const dynBlock = this->AsDynBlk();
5830             if (def == dynBlock->gtOp1)
5831             {
5832                 *use = &dynBlock->gtOp1;
5833                 return true;
5834             }
5835             if (def == dynBlock->gtOp2)
5836             {
5837                 *use = &dynBlock->gtOp2;
5838                 return true;
5839             }
5840             if (def == dynBlock->gtDynamicSize)
5841             {
5842                 *use = &dynBlock->gtDynamicSize;
5843                 return true;
5844             }
5845             return false;
5846         }
5847
5848         case GT_CALL:
5849         {
5850             GenTreeCall* const call = this->AsCall();
5851             if (def == call->gtCallObjp)
5852             {
5853                 *use = &call->gtCallObjp;
5854                 return true;
5855             }
5856             if (def == call->gtControlExpr)
5857             {
5858                 *use = &call->gtControlExpr;
5859                 return true;
5860             }
5861             if (call->gtCallType == CT_INDIRECT)
5862             {
5863                 if (def == call->gtCallCookie)
5864                 {
5865                     *use = &call->gtCallCookie;
5866                     return true;
5867                 }
5868                 if (def == call->gtCallAddr)
5869                 {
5870                     *use = &call->gtCallAddr;
5871                     return true;
5872                 }
5873             }
5874             if ((call->gtCallArgs != nullptr) && call->gtCallArgs->TryGetUseList(def, use))
5875             {
5876                 return true;
5877             }
5878
5879             return (call->gtCallLateArgs != nullptr) && call->gtCallLateArgs->TryGetUseList(def, use);
5880         }
5881
5882         // Binary nodes
5883         default:
5884             assert(this->OperIsBinary());
5885             return TryGetUseBinOp(def, use);
5886     }
5887 }
5888
5889 bool GenTree::TryGetUseList(GenTree* def, GenTree*** use)
5890 {
5891     assert(def != nullptr);
5892     assert(use != nullptr);
5893
5894     for (GenTreeArgList* node = this->AsArgList(); node != nullptr; node = node->Rest())
5895     {
5896         if (def == node->gtOp1)
5897         {
5898             *use = &node->gtOp1;
5899             return true;
5900         }
5901     }
5902     return false;
5903 }
5904
5905 bool GenTree::TryGetUseBinOp(GenTree* def, GenTree*** use)
5906 {
5907     assert(def != nullptr);
5908     assert(use != nullptr);
5909     assert(this->OperIsBinary());
5910
5911     GenTreeOp* const binOp = this->AsOp();
5912     if (def == binOp->gtOp1)
5913     {
5914         *use = &binOp->gtOp1;
5915         return true;
5916     }
5917     if (def == binOp->gtOp2)
5918     {
5919         *use = &binOp->gtOp2;
5920         return true;
5921     }
5922     return false;
5923 }
5924
5925 //------------------------------------------------------------------------
5926 // GenTree::ReplaceOperand:
5927 //    Replace a given operand to this node with a new operand. If the
5928 //    current node is a call node, this will also udpate the call
5929 //    argument table if necessary.
5930 //
5931 // Arguments:
5932 //    useEdge - the use edge that points to the operand to be replaced.
5933 //    replacement - the replacement node.
5934 //
5935 void GenTree::ReplaceOperand(GenTree** useEdge, GenTree* replacement)
5936 {
5937     assert(useEdge != nullptr);
5938     assert(replacement != nullptr);
5939     assert(TryGetUse(*useEdge, &useEdge));
5940
5941     if (OperGet() == GT_CALL)
5942     {
5943         AsCall()->ReplaceCallOperand(useEdge, replacement);
5944     }
5945     else
5946     {
5947         *useEdge = replacement;
5948     }
5949 }
5950
5951 //------------------------------------------------------------------------
5952 // gtGetParent: Get the parent of this node, and optionally capture the
5953 //    pointer to the child so that it can be modified.
5954 //
5955 // Arguments:
5956
5957 //    parentChildPointer - A pointer to a GenTree** (yes, that's three
5958 //                         levels, i.e. GenTree ***), which if non-null,
5959 //                         will be set to point to the field in the parent
5960 //                         that points to this node.
5961 //
5962 //    Return value       - The parent of this node.
5963 //
5964 //    Notes:
5965 //
5966 //    This requires that the execution order must be defined (i.e. gtSetEvalOrder() has been called).
5967 //    To enable the child to be replaced, it accepts an argument, parentChildPointer that, if non-null,
5968 //    will be set to point to the child pointer in the parent that points to this node.
5969
5970 GenTree* GenTree::gtGetParent(GenTree*** parentChildPtrPtr) const
5971 {
5972     // Find the parent node; it must be after this node in the execution order.
5973     GenTree** parentChildPtr = nullptr;
5974     GenTree*  parent;
5975     for (parent = gtNext; parent != nullptr; parent = parent->gtNext)
5976     {
5977         parentChildPtr = gtGetChildPointer(parent);
5978         if (parentChildPtr != nullptr)
5979         {
5980             break;
5981         }
5982     }
5983     if (parentChildPtrPtr != nullptr)
5984     {
5985         *parentChildPtrPtr = parentChildPtr;
5986     }
5987     return parent;
5988 }
5989
5990 //------------------------------------------------------------------------------
5991 // OperMayThrow : Check whether the operation requires GTF_ASG flag regardless
5992 //                of the children's flags.
5993 //
5994
5995 bool GenTree::OperRequiresAsgFlag()
5996 {
5997     if (OperIsAssignment() || OperIs(GT_XADD, GT_XCHG, GT_LOCKADD, GT_CMPXCHG, GT_MEMORYBARRIER))
5998     {
5999         return true;
6000     }
6001 #ifdef FEATURE_HW_INTRINSICS
6002     if (gtOper == GT_HWIntrinsic)
6003     {
6004         GenTreeHWIntrinsic* hwIntrinsicNode = this->AsHWIntrinsic();
6005         if (hwIntrinsicNode->OperIsMemoryStore())
6006         {
6007             // A MemoryStore operation is an assignment
6008             return true;
6009         }
6010     }
6011 #endif // FEATURE_HW_INTRINSICS
6012     return false;
6013 }
6014
6015 //------------------------------------------------------------------------------
6016 // OperIsImplicitIndir : Check whether the operation contains an implicit
6017 //                       indirection.
6018 // Arguments:
6019 //    this      -  a GenTree node
6020 //
6021 // Return Value:
6022 //    True if the given node contains an implicit indirection
6023 //
6024 // Note that for the GT_HWIntrinsic node we have to examine the
6025 // details of the node to determine its result.
6026 //
6027
6028 bool GenTree::OperIsImplicitIndir() const
6029 {
6030     switch (gtOper)
6031     {
6032         case GT_LOCKADD:
6033         case GT_XADD:
6034         case GT_XCHG:
6035         case GT_CMPXCHG:
6036         case GT_BLK:
6037         case GT_OBJ:
6038         case GT_DYN_BLK:
6039         case GT_STORE_BLK:
6040         case GT_STORE_OBJ:
6041         case GT_STORE_DYN_BLK:
6042         case GT_BOX:
6043         case GT_ARR_INDEX:
6044         case GT_ARR_ELEM:
6045         case GT_ARR_OFFSET:
6046             return true;
6047 #ifdef FEATURE_HW_INTRINSICS
6048         case GT_HWIntrinsic:
6049         {
6050             GenTreeHWIntrinsic* hwIntrinsicNode = (const_cast<GenTree*>(this))->AsHWIntrinsic();
6051             return hwIntrinsicNode->OperIsMemoryLoadOrStore();
6052         }
6053 #endif // FEATURE_HW_INTRINSICS
6054         default:
6055             return false;
6056     }
6057 }
6058
6059 //------------------------------------------------------------------------------
6060 // OperMayThrow : Check whether the operation may throw.
6061 //
6062 //
6063 // Arguments:
6064 //    comp      -  Compiler instance
6065 //
6066 // Return Value:
6067 //    True if the given operator may cause an exception
6068
6069 bool GenTree::OperMayThrow(Compiler* comp)
6070 {
6071     GenTree* op;
6072
6073     switch (gtOper)
6074     {
6075         case GT_MOD:
6076         case GT_DIV:
6077         case GT_UMOD:
6078         case GT_UDIV:
6079
6080             /* Division with a non-zero, non-minus-one constant does not throw an exception */
6081
6082             op = gtOp.gtOp2;
6083
6084             if (varTypeIsFloating(op->TypeGet()))
6085             {
6086                 return false; // Floating point division does not throw.
6087             }
6088
6089             // For integers only division by 0 or by -1 can throw
6090             if (op->IsIntegralConst() && !op->IsIntegralConst(0) && !op->IsIntegralConst(-1))
6091             {
6092                 return false;
6093             }
6094             return true;
6095
6096         case GT_INTRINSIC:
6097             // If this is an intrinsic that represents the object.GetType(), it can throw an NullReferenceException.
6098             // Report it as may throw.
6099             // Note: Some of the rest of the existing intrinsics could potentially throw an exception (for example
6100             //       the array and string element access ones). They are handled differently than the GetType intrinsic
6101             //       and are not marked with GTF_EXCEPT. If these are revisited at some point to be marked as
6102             //       GTF_EXCEPT,
6103             //       the code below might need to be specialized to handle them properly.
6104             if ((this->gtFlags & GTF_EXCEPT) != 0)
6105             {
6106                 return true;
6107             }
6108
6109             break;
6110
6111         case GT_CALL:
6112
6113             CorInfoHelpFunc helper;
6114             helper = comp->eeGetHelperNum(this->AsCall()->gtCallMethHnd);
6115             return ((helper == CORINFO_HELP_UNDEF) || !comp->s_helperCallProperties.NoThrow(helper));
6116
6117         case GT_IND:
6118         case GT_BLK:
6119         case GT_OBJ:
6120         case GT_DYN_BLK:
6121         case GT_STORE_BLK:
6122         case GT_NULLCHECK:
6123             return (((this->gtFlags & GTF_IND_NONFAULTING) == 0) && comp->fgAddrCouldBeNull(this->AsIndir()->Addr()));
6124
6125         case GT_ARR_LENGTH:
6126             return (((this->gtFlags & GTF_IND_NONFAULTING) == 0) && comp->fgAddrCouldBeNull(gtOp.gtOp1));
6127
6128         case GT_ARR_BOUNDS_CHECK:
6129         case GT_ARR_ELEM:
6130         case GT_ARR_INDEX:
6131         case GT_ARR_OFFSET:
6132         case GT_LCLHEAP:
6133         case GT_CKFINITE:
6134 #ifdef FEATURE_SIMD
6135         case GT_SIMD_CHK:
6136 #endif // FEATURE_SIMD
6137 #ifdef FEATURE_HW_INTRINSICS
6138         case GT_HW_INTRINSIC_CHK:
6139 #endif // FEATURE_HW_INTRINSICS
6140         case GT_INDEX_ADDR:
6141             return true;
6142
6143 #ifdef FEATURE_HW_INTRINSICS
6144         case GT_HWIntrinsic:
6145         {
6146             GenTreeHWIntrinsic* hwIntrinsicNode = this->AsHWIntrinsic();
6147             assert(hwIntrinsicNode != nullptr);
6148             if (hwIntrinsicNode->OperIsMemoryStore() || hwIntrinsicNode->OperIsMemoryLoad())
6149             {
6150                 // This operation contains an implicit indirection
6151                 //   it could throw a null reference exception.
6152                 //
6153                 return true;
6154             }
6155         }
6156 #endif // FEATURE_HW_INTRINSICS
6157
6158         default:
6159             break;
6160     }
6161
6162     /* Overflow arithmetic operations also throw exceptions */
6163
6164     if (gtOverflowEx())
6165     {
6166         return true;
6167     }
6168
6169     return false;
6170 }
6171
6172 #if DEBUGGABLE_GENTREE
6173 // static
6174 GenTree::VtablePtr GenTree::s_vtablesForOpers[] = {nullptr};
6175 GenTree::VtablePtr GenTree::s_vtableForOp       = nullptr;
6176
6177 GenTree::VtablePtr GenTree::GetVtableForOper(genTreeOps oper)
6178 {
6179     noway_assert(oper < GT_COUNT);
6180
6181     // First, check a cache.
6182
6183     if (s_vtablesForOpers[oper] != nullptr)
6184     {
6185         return s_vtablesForOpers[oper];
6186     }
6187
6188     // Otherwise, look up the correct vtable entry. Note that we want the most derived GenTree subtype
6189     // for an oper. E.g., GT_LCL_VAR is defined in GTSTRUCT_3 as GenTreeLclVar and in GTSTRUCT_N as
6190     // GenTreeLclVarCommon. We want the GenTreeLclVar vtable, since nothing should actually be
6191     // instantiated as a GenTreeLclVarCommon.
6192
6193     VtablePtr res = nullptr;
6194     switch (oper)
6195     {
6196
6197 // clang-format off
6198
6199 #define GTSTRUCT_0(nm, tag)                             /*handle explicitly*/
6200 #define GTSTRUCT_1(nm, tag)                             \
6201         case tag:                                       \
6202         {                                               \
6203             GenTree##nm gt;                             \
6204             res = *reinterpret_cast<VtablePtr*>(&gt);   \
6205         }                                               \
6206         break;
6207 #define GTSTRUCT_2(nm, tag, tag2)                       \
6208         case tag:                                       \
6209         case tag2:                                      \
6210         {                                               \
6211             GenTree##nm gt;                             \
6212             res = *reinterpret_cast<VtablePtr*>(&gt);   \
6213         }                                               \
6214         break;
6215 #define GTSTRUCT_3(nm, tag, tag2, tag3)                 \
6216         case tag:                                       \
6217         case tag2:                                      \
6218         case tag3:                                      \
6219         {                                               \
6220             GenTree##nm gt;                             \
6221             res = *reinterpret_cast<VtablePtr*>(&gt);   \
6222         }                                               \
6223         break;
6224 #define GTSTRUCT_4(nm, tag, tag2, tag3, tag4)           \
6225         case tag:                                       \
6226         case tag2:                                      \
6227         case tag3:                                      \
6228         case tag4:                                      \
6229         {                                               \
6230             GenTree##nm gt;                             \
6231             res = *reinterpret_cast<VtablePtr*>(&gt);   \
6232         }                                               \
6233         break;
6234 #define GTSTRUCT_N(nm, ...)                             /*handle explicitly*/
6235 #define GTSTRUCT_2_SPECIAL(nm, tag, tag2)               /*handle explicitly*/
6236 #define GTSTRUCT_3_SPECIAL(nm, tag, tag2, tag3)         /*handle explicitly*/
6237 #include "gtstructs.h"
6238
6239         // clang-format on
6240
6241         // Handle the special cases.
6242         // The following opers are in GTSTRUCT_N but no other place (namely, no subtypes).
6243
6244         case GT_STORE_BLK:
6245         case GT_BLK:
6246         {
6247             GenTreeBlk gt;
6248             res = *reinterpret_cast<VtablePtr*>(&gt);
6249         }
6250         break;
6251
6252         case GT_IND:
6253         case GT_NULLCHECK:
6254         {
6255             GenTreeIndir gt;
6256             res = *reinterpret_cast<VtablePtr*>(&gt);
6257         }
6258         break;
6259
6260         // Handle GT_LIST (but not GT_FIELD_LIST, which is also in a GTSTRUCT_1).
6261
6262         case GT_LIST:
6263         {
6264             GenTreeArgList gt;
6265             res = *reinterpret_cast<VtablePtr*>(&gt);
6266         }
6267         break;
6268
6269         // We don't need to handle GTSTRUCT_N for LclVarCommon, since all those allowed opers are specified
6270         // in their proper subtype. Similarly for GenTreeIndir.
6271
6272         default:
6273         {
6274             // Should be unary or binary op.
6275             if (s_vtableForOp == nullptr)
6276             {
6277                 unsigned opKind = OperKind(oper);
6278                 assert(!IsExOp(opKind));
6279                 assert(OperIsSimple(oper) || OperIsLeaf(oper));
6280                 // Need to provide non-null operands.
6281                 GenTreeIntCon dummyOp(TYP_INT, 0);
6282                 GenTreeOp     gt(oper, TYP_INT, &dummyOp, ((opKind & GTK_UNOP) ? nullptr : &dummyOp));
6283                 s_vtableForOp = *reinterpret_cast<VtablePtr*>(&gt);
6284             }
6285             res = s_vtableForOp;
6286             break;
6287         }
6288     }
6289     s_vtablesForOpers[oper] = res;
6290     return res;
6291 }
6292
6293 void GenTree::SetVtableForOper(genTreeOps oper)
6294 {
6295     *reinterpret_cast<VtablePtr*>(this) = GetVtableForOper(oper);
6296 }
6297 #endif // DEBUGGABLE_GENTREE
6298
6299 GenTree* Compiler::gtNewOperNode(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2)
6300 {
6301     assert(op1 != nullptr);
6302     assert(op2 != nullptr);
6303
6304     // We should not be allocating nodes that extend GenTreeOp with this;
6305     // should call the appropriate constructor for the extended type.
6306     assert(!GenTree::IsExOp(GenTree::OperKind(oper)));
6307
6308     GenTree* node = new (this, oper) GenTreeOp(oper, type, op1, op2);
6309
6310     return node;
6311 }
6312
6313 GenTree* Compiler::gtNewQmarkNode(var_types type, GenTree* cond, GenTree* colon)
6314 {
6315     compQmarkUsed   = true;
6316     GenTree* result = new (this, GT_QMARK) GenTreeQmark(type, cond, colon, this);
6317 #ifdef DEBUG
6318     if (compQmarkRationalized)
6319     {
6320         fgCheckQmarkAllowedForm(result);
6321     }
6322 #endif
6323     return result;
6324 }
6325
6326 GenTreeQmark::GenTreeQmark(var_types type, GenTree* cond, GenTree* colonOp, Compiler* comp)
6327     : GenTreeOp(GT_QMARK, type, cond, colonOp)
6328 #ifdef LEGACY_BACKEND
6329     , gtThenLiveSet(VarSetOps::UninitVal())
6330     , gtElseLiveSet(VarSetOps::UninitVal())
6331 #endif
6332 {
6333     // These must follow a specific form.
6334     assert(cond != nullptr && cond->TypeGet() == TYP_INT);
6335     assert(colonOp != nullptr && colonOp->OperGet() == GT_COLON);
6336
6337 #ifdef LEGACY_BACKEND
6338     comp->impInlineRoot()->compQMarks->Push(this);
6339 #endif
6340 }
6341
6342 GenTreeIntCon* Compiler::gtNewIconNode(ssize_t value, var_types type)
6343 {
6344     return new (this, GT_CNS_INT) GenTreeIntCon(type, value);
6345 }
6346
6347 // return a new node representing the value in a physical register
6348 GenTree* Compiler::gtNewPhysRegNode(regNumber reg, var_types type)
6349 {
6350     assert(genIsValidIntReg(reg) || (reg == REG_SPBASE));
6351     GenTree* result = new (this, GT_PHYSREG) GenTreePhysReg(reg, type);
6352     return result;
6353 }
6354
6355 #ifndef LEGACY_BACKEND
6356 GenTree* Compiler::gtNewJmpTableNode()
6357 {
6358     GenTree* node                     = new (this, GT_JMPTABLE) GenTreeJumpTable(TYP_INT);
6359     node->gtJumpTable.gtJumpTableAddr = 0;
6360     return node;
6361 }
6362 #endif // !LEGACY_BACKEND
6363
6364 /*****************************************************************************
6365  *
6366  *  Converts an annotated token into an icon flags (so that we will later be
6367  *  able to tell the type of the handle that will be embedded in the icon
6368  *  node)
6369  */
6370
6371 unsigned Compiler::gtTokenToIconFlags(unsigned token)
6372 {
6373     unsigned flags = 0;
6374
6375     switch (TypeFromToken(token))
6376     {
6377         case mdtTypeRef:
6378         case mdtTypeDef:
6379         case mdtTypeSpec:
6380             flags = GTF_ICON_CLASS_HDL;
6381             break;
6382
6383         case mdtMethodDef:
6384             flags = GTF_ICON_METHOD_HDL;
6385             break;
6386
6387         case mdtFieldDef:
6388             flags = GTF_ICON_FIELD_HDL;
6389             break;
6390
6391         default:
6392             flags = GTF_ICON_TOKEN_HDL;
6393             break;
6394     }
6395
6396     return flags;
6397 }
6398
6399 //-----------------------------------------------------------------------------------------
6400 // gtNewIndOfIconHandleNode: Creates an indirection GenTree node of a constant handle
6401 //
6402 // Arguments:
6403 //    indType     - The type returned by the indirection node
6404 //    addr        - The constant address to read from
6405 //    iconFlags   - The GTF_ICON flag value that specifies the kind of handle that we have
6406 //    isInvariant - The indNode should also be marked as invariant
6407 //
6408 // Return Value:
6409 //    Returns a GT_IND node representing value at the address provided by 'value'
6410 //
6411 // Notes:
6412 //    The GT_IND node is marked as non-faulting
6413 //    If the indType is GT_REF we also mark the indNode as GTF_GLOB_REF
6414 //
6415
6416 GenTree* Compiler::gtNewIndOfIconHandleNode(var_types indType, size_t addr, unsigned iconFlags, bool isInvariant)
6417 {
6418     GenTree* addrNode = gtNewIconHandleNode(addr, iconFlags);
6419     GenTree* indNode  = gtNewOperNode(GT_IND, indType, addrNode);
6420
6421     // This indirection won't cause an exception.
6422     //
6423     indNode->gtFlags |= GTF_IND_NONFAULTING;
6424
6425     // String Literal handles are indirections that return a TYP_REF.
6426     // They are pointers into the GC heap and they are not invariant
6427     // as the address is a reportable GC-root and as such it can be
6428     // modified during a GC collection
6429     //
6430     if (indType == TYP_REF)
6431     {
6432         // This indirection points into the gloabal heap
6433         indNode->gtFlags |= GTF_GLOB_REF;
6434     }
6435     if (isInvariant)
6436     {
6437         // This indirection also is invariant.
6438         indNode->gtFlags |= GTF_IND_INVARIANT;
6439     }
6440     return indNode;
6441 }
6442
6443 /*****************************************************************************
6444  *
6445  *  Allocates a integer constant entry that represents a HANDLE to something.
6446  *  It may not be allowed to embed HANDLEs directly into the JITed code (for eg,
6447  *  as arguments to JIT helpers). Get a corresponding value that can be embedded.
6448  *  If the handle needs to be accessed via an indirection, pValue points to it.
6449  */
6450
6451 GenTree* Compiler::gtNewIconEmbHndNode(void* value, void* pValue, unsigned iconFlags, void* compileTimeHandle)
6452 {
6453     GenTree* iconNode;
6454     GenTree* handleNode;
6455
6456     if (value != nullptr)
6457     {
6458         // When 'value' is non-null, pValue is required to be null
6459         assert(pValue == nullptr);
6460
6461         // use 'value' to construct an integer constant node
6462         iconNode = gtNewIconHandleNode((size_t)value, iconFlags);
6463
6464         // 'value' is the handle
6465         handleNode = iconNode;
6466     }
6467     else
6468     {
6469         // When 'value' is null, pValue is required to be non-null
6470         assert(pValue != nullptr);
6471
6472         // use 'pValue' to construct an integer constant node
6473         iconNode = gtNewIconHandleNode((size_t)pValue, iconFlags);
6474
6475         // 'pValue' is an address of a location that contains the handle
6476
6477         // construct the indirection of 'pValue'
6478         handleNode = gtNewOperNode(GT_IND, TYP_I_IMPL, iconNode);
6479
6480         // This indirection won't cause an exception.
6481         handleNode->gtFlags |= GTF_IND_NONFAULTING;
6482 #if 0
6483         // It should also be invariant, but marking it as such leads to bad diffs.
6484
6485         // This indirection also is invariant.
6486         handleNode->gtFlags |= GTF_IND_INVARIANT;
6487 #endif
6488     }
6489
6490     iconNode->gtIntCon.gtCompileTimeHandle = (size_t)compileTimeHandle;
6491
6492     return handleNode;
6493 }
6494
6495 /*****************************************************************************/
6496 GenTree* Compiler::gtNewStringLiteralNode(InfoAccessType iat, void* pValue)
6497 {
6498     GenTree* tree = nullptr;
6499
6500     switch (iat)
6501     {
6502         case IAT_VALUE: // constructStringLiteral in CoreRT case can return IAT_VALUE
6503             tree         = gtNewIconEmbHndNode(pValue, nullptr, GTF_ICON_STR_HDL, nullptr);
6504             tree->gtType = TYP_REF;
6505             tree         = gtNewOperNode(GT_NOP, TYP_REF, tree); // prevents constant folding
6506             break;
6507
6508         case IAT_PVALUE: // The value needs to be accessed via an indirection
6509             // Create an indirection
6510             tree = gtNewIndOfIconHandleNode(TYP_REF, (size_t)pValue, GTF_ICON_STR_HDL, false);
6511             break;
6512
6513         case IAT_PPVALUE: // The value needs to be accessed via a double indirection
6514             // Create the first indirection
6515             tree = gtNewIndOfIconHandleNode(TYP_I_IMPL, (size_t)pValue, GTF_ICON_PSTR_HDL, true);
6516
6517             // Create the second indirection
6518             tree = gtNewOperNode(GT_IND, TYP_REF, tree);
6519             // This indirection won't cause an exception.
6520             tree->gtFlags |= GTF_IND_NONFAULTING;
6521             // This indirection points into the gloabal heap (it is String Object)
6522             tree->gtFlags |= GTF_GLOB_REF;
6523             break;
6524
6525         default:
6526             noway_assert(!"Unexpected InfoAccessType");
6527     }
6528
6529     return tree;
6530 }
6531
6532 /*****************************************************************************/
6533
6534 GenTree* Compiler::gtNewLconNode(__int64 value)
6535 {
6536 #ifdef _TARGET_64BIT_
6537     GenTree* node = new (this, GT_CNS_INT) GenTreeIntCon(TYP_LONG, value);
6538 #else
6539     GenTree* node = new (this, GT_CNS_LNG) GenTreeLngCon(value);
6540 #endif
6541
6542     return node;
6543 }
6544
6545 GenTree* Compiler::gtNewDconNode(double value)
6546 {
6547     GenTree* node = new (this, GT_CNS_DBL) GenTreeDblCon(value);
6548
6549     return node;
6550 }
6551
6552 GenTree* Compiler::gtNewSconNode(int CPX, CORINFO_MODULE_HANDLE scpHandle)
6553 {
6554
6555 #if SMALL_TREE_NODES
6556
6557     /* 'GT_CNS_STR' nodes later get transformed into 'GT_CALL' */
6558
6559     assert(GenTree::s_gtNodeSizes[GT_CALL] > GenTree::s_gtNodeSizes[GT_CNS_STR]);
6560
6561     GenTree* node = new (this, GT_CALL) GenTreeStrCon(CPX, scpHandle DEBUGARG(/*largeNode*/ true));
6562 #else
6563     GenTree* node = new (this, GT_CNS_STR) GenTreeStrCon(CPX, scpHandle DEBUGARG(/*largeNode*/ true));
6564 #endif
6565
6566     return node;
6567 }
6568
6569 GenTree* Compiler::gtNewZeroConNode(var_types type)
6570 {
6571     GenTree* zero;
6572     switch (type)
6573     {
6574         case TYP_INT:
6575             zero = gtNewIconNode(0);
6576             break;
6577
6578         case TYP_BYREF:
6579             __fallthrough;
6580
6581         case TYP_REF:
6582             zero         = gtNewIconNode(0);
6583             zero->gtType = type;
6584             break;
6585
6586         case TYP_LONG:
6587             zero = gtNewLconNode(0);
6588             break;
6589
6590         case TYP_FLOAT:
6591             zero         = gtNewDconNode(0.0);
6592             zero->gtType = type;
6593             break;
6594
6595         case TYP_DOUBLE:
6596             zero = gtNewDconNode(0.0);
6597             break;
6598
6599         default:
6600             assert(!"Bad type");
6601             zero = nullptr;
6602             break;
6603     }
6604     return zero;
6605 }
6606
6607 GenTree* Compiler::gtNewOneConNode(var_types type)
6608 {
6609     switch (type)
6610     {
6611         case TYP_INT:
6612         case TYP_UINT:
6613             return gtNewIconNode(1);
6614
6615         case TYP_LONG:
6616         case TYP_ULONG:
6617             return gtNewLconNode(1);
6618
6619         case TYP_FLOAT:
6620         {
6621             GenTree* one = gtNewDconNode(1.0);
6622             one->gtType  = type;
6623             return one;
6624         }
6625
6626         case TYP_DOUBLE:
6627             return gtNewDconNode(1.0);
6628
6629         default:
6630             assert(!"Bad type");
6631             return nullptr;
6632     }
6633 }
6634
6635 #ifdef FEATURE_SIMD
6636 //---------------------------------------------------------------------
6637 // gtNewSIMDVectorZero: create a GT_SIMD node for Vector<T>.Zero
6638 //
6639 // Arguments:
6640 //    simdType  -  simd vector type
6641 //    baseType  -  element type of vector
6642 //    size      -  size of vector in bytes
6643 GenTree* Compiler::gtNewSIMDVectorZero(var_types simdType, var_types baseType, unsigned size)
6644 {
6645     baseType         = genActualType(baseType);
6646     GenTree* initVal = gtNewZeroConNode(baseType);
6647     initVal->gtType  = baseType;
6648     return gtNewSIMDNode(simdType, initVal, nullptr, SIMDIntrinsicInit, baseType, size);
6649 }
6650
6651 //---------------------------------------------------------------------
6652 // gtNewSIMDVectorOne: create a GT_SIMD node for Vector<T>.One
6653 //
6654 // Arguments:
6655 //    simdType  -  simd vector type
6656 //    baseType  -  element type of vector
6657 //    size      -  size of vector in bytes
6658 GenTree* Compiler::gtNewSIMDVectorOne(var_types simdType, var_types baseType, unsigned size)
6659 {
6660     GenTree* initVal;
6661     if (varTypeIsSmallInt(baseType))
6662     {
6663         unsigned baseSize = genTypeSize(baseType);
6664         int      val;
6665         if (baseSize == 1)
6666         {
6667             val = 0x01010101;
6668         }
6669         else
6670         {
6671             val = 0x00010001;
6672         }
6673         initVal = gtNewIconNode(val);
6674     }
6675     else
6676     {
6677         initVal = gtNewOneConNode(baseType);
6678     }
6679
6680     baseType        = genActualType(baseType);
6681     initVal->gtType = baseType;
6682     return gtNewSIMDNode(simdType, initVal, nullptr, SIMDIntrinsicInit, baseType, size);
6683 }
6684 #endif // FEATURE_SIMD
6685
6686 GenTreeCall* Compiler::gtNewIndCallNode(GenTree* addr, var_types type, GenTreeArgList* args, IL_OFFSETX ilOffset)
6687 {
6688     return gtNewCallNode(CT_INDIRECT, (CORINFO_METHOD_HANDLE)addr, type, args, ilOffset);
6689 }
6690
6691 GenTreeCall* Compiler::gtNewCallNode(
6692     gtCallTypes callType, CORINFO_METHOD_HANDLE callHnd, var_types type, GenTreeArgList* args, IL_OFFSETX ilOffset)
6693 {
6694     GenTreeCall* node = new (this, GT_CALL) GenTreeCall(genActualType(type));
6695
6696     node->gtFlags |= (GTF_CALL | GTF_GLOB_REF);
6697     if (args)
6698     {
6699         node->gtFlags |= (args->gtFlags & GTF_ALL_EFFECT);
6700     }
6701     node->gtCallType      = callType;
6702     node->gtCallMethHnd   = callHnd;
6703     node->gtCallArgs      = args;
6704     node->gtCallObjp      = nullptr;
6705     node->fgArgInfo       = nullptr;
6706     node->callSig         = nullptr;
6707     node->gtRetClsHnd     = nullptr;
6708     node->gtControlExpr   = nullptr;
6709     node->gtCallMoreFlags = 0;
6710
6711     if (callType == CT_INDIRECT)
6712     {
6713         node->gtCallCookie = nullptr;
6714     }
6715     else
6716     {
6717         node->gtInlineCandidateInfo = nullptr;
6718     }
6719     node->gtCallLateArgs = nullptr;
6720     node->gtReturnType   = type;
6721
6722 #ifdef LEGACY_BACKEND
6723     node->gtCallRegUsedMask = RBM_NONE;
6724 #endif // LEGACY_BACKEND
6725
6726 #ifdef FEATURE_READYTORUN_COMPILER
6727     node->gtEntryPoint.addr       = nullptr;
6728     node->gtEntryPoint.accessType = IAT_VALUE;
6729 #endif
6730
6731 #if defined(DEBUG) || defined(INLINE_DATA)
6732     // These get updated after call node is built.
6733     node->gtInlineObservation = InlineObservation::CALLEE_UNUSED_INITIAL;
6734     node->gtRawILOffset       = BAD_IL_OFFSET;
6735 #endif
6736
6737     // Spec: Managed Retval sequence points needs to be generated while generating debug info for debuggable code.
6738     //
6739     // Implementation note: if not generating MRV info genCallSite2ILOffsetMap will be NULL and
6740     // codegen will pass BAD_IL_OFFSET as IL offset of a call node to emitter, which will cause emitter
6741     // not to emit IP mapping entry.
6742     if (opts.compDbgCode && opts.compDbgInfo)
6743     {
6744         // Managed Retval - IL offset of the call.  This offset is used to emit a
6745         // CALL_INSTRUCTION type sequence point while emitting corresponding native call.
6746         //
6747         // TODO-Cleanup:
6748         // a) (Opt) We need not store this offset if the method doesn't return a
6749         // value.  Rather it can be made BAD_IL_OFFSET to prevent a sequence
6750         // point being emitted.
6751         //
6752         // b) (Opt) Add new sequence points only if requested by debugger through
6753         // a new boundary type - ICorDebugInfo::BoundaryTypes
6754         if (genCallSite2ILOffsetMap == nullptr)
6755         {
6756             genCallSite2ILOffsetMap = new (getAllocator()) CallSiteILOffsetTable(getAllocator());
6757         }
6758
6759         // Make sure that there are no duplicate entries for a given call node
6760         IL_OFFSETX value;
6761         assert(!genCallSite2ILOffsetMap->Lookup(node, &value));
6762         genCallSite2ILOffsetMap->Set(node, ilOffset);
6763     }
6764
6765     // Initialize gtOtherRegs
6766     node->ClearOtherRegs();
6767
6768     // Initialize spill flags of gtOtherRegs
6769     node->ClearOtherRegFlags();
6770
6771 #if (defined(_TARGET_X86_) || defined(_TARGET_ARM_)) && !defined(LEGACY_BACKEND)
6772     // Initialize the multi-reg long return info if necessary
6773     if (varTypeIsLong(node))
6774     {
6775         // The return type will remain as the incoming long type
6776         node->gtReturnType = node->gtType;
6777
6778         // Initialize Return type descriptor of call node
6779         ReturnTypeDesc* retTypeDesc = node->GetReturnTypeDesc();
6780         retTypeDesc->InitializeLongReturnType(this);
6781
6782         // must be a long returned in two registers
6783         assert(retTypeDesc->GetReturnRegCount() == 2);
6784     }
6785 #endif // (defined(_TARGET_X86_) || defined(_TARGET_ARM_)) && !defined(_LEGACY_BACKEND_)
6786
6787     return node;
6788 }
6789
6790 GenTree* Compiler::gtNewLclvNode(unsigned lnum, var_types type, IL_OFFSETX ILoffs)
6791 {
6792     // We need to ensure that all struct values are normalized.
6793     // It might be nice to assert this in general, but we have assignments of int to long.
6794     if (varTypeIsStruct(type))
6795     {
6796         // Make an exception for implicit by-ref parameters during global morph, since
6797         // their lvType has been updated to byref but their appearances have not yet all
6798         // been rewritten and so may have struct type still.
6799         assert(type == lvaTable[lnum].lvType ||
6800                (lvaIsImplicitByRefLocal(lnum) && fgGlobalMorph && (lvaTable[lnum].lvType == TYP_BYREF)));
6801     }
6802     GenTree* node = new (this, GT_LCL_VAR) GenTreeLclVar(type, lnum, ILoffs);
6803
6804     /* Cannot have this assert because the inliner uses this function
6805      * to add temporaries */
6806
6807     // assert(lnum < lvaCount);
6808
6809     return node;
6810 }
6811
6812 GenTree* Compiler::gtNewLclLNode(unsigned lnum, var_types type, IL_OFFSETX ILoffs)
6813 {
6814     // We need to ensure that all struct values are normalized.
6815     // It might be nice to assert this in general, but we have assignments of int to long.
6816     if (varTypeIsStruct(type))
6817     {
6818         // Make an exception for implicit by-ref parameters during global morph, since
6819         // their lvType has been updated to byref but their appearances have not yet all
6820         // been rewritten and so may have struct type still.
6821         assert(type == lvaTable[lnum].lvType ||
6822                (lvaIsImplicitByRefLocal(lnum) && fgGlobalMorph && (lvaTable[lnum].lvType == TYP_BYREF)));
6823     }
6824 #if SMALL_TREE_NODES
6825     /* This local variable node may later get transformed into a large node */
6826
6827     // assert(GenTree::s_gtNodeSizes[GT_CALL] > GenTree::s_gtNodeSizes[GT_LCL_VAR]);
6828
6829     GenTree* node = new (this, GT_CALL) GenTreeLclVar(type, lnum, ILoffs DEBUGARG(/*largeNode*/ true));
6830 #else
6831     GenTree* node = new (this, GT_LCL_VAR) GenTreeLclVar(type, lnum, ILoffs DEBUGARG(/*largeNode*/ true));
6832 #endif
6833
6834     return node;
6835 }
6836
6837 GenTreeLclFld* Compiler::gtNewLclFldNode(unsigned lnum, var_types type, unsigned offset)
6838 {
6839     GenTreeLclFld* node = new (this, GT_LCL_FLD) GenTreeLclFld(type, lnum, offset);
6840
6841     /* Cannot have this assert because the inliner uses this function
6842      * to add temporaries */
6843
6844     // assert(lnum < lvaCount);
6845
6846     node->gtFieldSeq = FieldSeqStore::NotAField();
6847     return node;
6848 }
6849
6850 GenTree* Compiler::gtNewInlineCandidateReturnExpr(GenTree* inlineCandidate, var_types type)
6851
6852 {
6853     assert(GenTree::s_gtNodeSizes[GT_RET_EXPR] == TREE_NODE_SZ_LARGE);
6854
6855     GenTree* node = new (this, GT_RET_EXPR) GenTreeRetExpr(type);
6856
6857     node->gtRetExpr.gtInlineCandidate = inlineCandidate;
6858
6859     if (varTypeIsStruct(inlineCandidate) && !inlineCandidate->OperIsBlkOp())
6860     {
6861         node->gtRetExpr.gtRetClsHnd = gtGetStructHandle(inlineCandidate);
6862     }
6863
6864     // GT_RET_EXPR node eventually might be bashed back to GT_CALL (when inlining is aborted for example).
6865     // Therefore it should carry the GTF_CALL flag so that all the rules about spilling can apply to it as well.
6866     // For example, impImportLeave or CEE_POP need to spill GT_RET_EXPR before empty the evaluation stack.
6867     node->gtFlags |= GTF_CALL;
6868
6869     return node;
6870 }
6871
6872 GenTreeArgList* Compiler::gtNewListNode(GenTree* op1, GenTreeArgList* op2)
6873 {
6874     assert((op1 != nullptr) && (op1->OperGet() != GT_LIST));
6875
6876     return new (this, GT_LIST) GenTreeArgList(op1, op2);
6877 }
6878
6879 /*****************************************************************************
6880  *
6881  *  Create a list out of one value.
6882  */
6883
6884 GenTreeArgList* Compiler::gtNewArgList(GenTree* arg)
6885 {
6886     return new (this, GT_LIST) GenTreeArgList(arg);
6887 }
6888
6889 /*****************************************************************************
6890  *
6891  *  Create a list out of the two values.
6892  */
6893
6894 GenTreeArgList* Compiler::gtNewArgList(GenTree* arg1, GenTree* arg2)
6895 {
6896     return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2));
6897 }
6898
6899 /*****************************************************************************
6900  *
6901  *  Create a list out of the three values.
6902  */
6903
6904 GenTreeArgList* Compiler::gtNewArgList(GenTree* arg1, GenTree* arg2, GenTree* arg3)
6905 {
6906     return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2, arg3));
6907 }
6908
6909 /*****************************************************************************
6910  *
6911  *  Create a list out of the three values.
6912  */
6913
6914 GenTreeArgList* Compiler::gtNewArgList(GenTree* arg1, GenTree* arg2, GenTree* arg3, GenTree* arg4)
6915 {
6916     return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2, arg3, arg4));
6917 }
6918
6919 /*****************************************************************************
6920  *
6921  *  Given a GT_CALL node, access the fgArgInfo and find the entry
6922  *  that has the matching argNum and return the fgArgTableEntryPtr
6923  */
6924
6925 fgArgTabEntry* Compiler::gtArgEntryByArgNum(GenTreeCall* call, unsigned argNum)
6926 {
6927     fgArgInfo* argInfo = call->fgArgInfo;
6928     noway_assert(argInfo != nullptr);
6929
6930     unsigned        argCount       = argInfo->ArgCount();
6931     fgArgTabEntry** argTable       = argInfo->ArgTable();
6932     fgArgTabEntry*  curArgTabEntry = nullptr;
6933
6934     for (unsigned i = 0; i < argCount; i++)
6935     {
6936         curArgTabEntry = argTable[i];
6937         if (curArgTabEntry->argNum == argNum)
6938         {
6939             return curArgTabEntry;
6940         }
6941     }
6942     noway_assert(!"gtArgEntryByArgNum: argNum not found");
6943     return nullptr;
6944 }
6945
6946 /*****************************************************************************
6947  *
6948  *  Given a GT_CALL node, access the fgArgInfo and find the entry
6949  *  that has the matching node and return the fgArgTableEntryPtr
6950  */
6951
6952 fgArgTabEntry* Compiler::gtArgEntryByNode(GenTreeCall* call, GenTree* node)
6953 {
6954     fgArgInfo* argInfo = call->fgArgInfo;
6955     noway_assert(argInfo != nullptr);
6956
6957     unsigned        argCount       = argInfo->ArgCount();
6958     fgArgTabEntry** argTable       = argInfo->ArgTable();
6959     fgArgTabEntry*  curArgTabEntry = nullptr;
6960
6961     for (unsigned i = 0; i < argCount; i++)
6962     {
6963         curArgTabEntry = argTable[i];
6964
6965         if (curArgTabEntry->node == node)
6966         {
6967             return curArgTabEntry;
6968         }
6969         else if (curArgTabEntry->parent != nullptr)
6970         {
6971             assert(curArgTabEntry->parent->OperIsList());
6972             if (curArgTabEntry->parent->Current() == node)
6973             {
6974                 return curArgTabEntry;
6975             }
6976         }
6977         else // (curArgTabEntry->parent == NULL)
6978         {
6979             if (call->gtCallObjp == node)
6980             {
6981                 return curArgTabEntry;
6982             }
6983         }
6984     }
6985     noway_assert(!"gtArgEntryByNode: node not found");
6986     return nullptr;
6987 }
6988
6989 /*****************************************************************************
6990  *
6991  *  Find and return the entry with the given "lateArgInx".  Requires that one is found
6992  *  (asserts this).
6993  */
6994 fgArgTabEntry* Compiler::gtArgEntryByLateArgIndex(GenTreeCall* call, unsigned lateArgInx)
6995 {
6996     fgArgInfo* argInfo = call->fgArgInfo;
6997     noway_assert(argInfo != nullptr);
6998
6999     unsigned        argCount       = argInfo->ArgCount();
7000     fgArgTabEntry** argTable       = argInfo->ArgTable();
7001     fgArgTabEntry*  curArgTabEntry = nullptr;
7002
7003     for (unsigned i = 0; i < argCount; i++)
7004     {
7005         curArgTabEntry = argTable[i];
7006         if (curArgTabEntry->lateArgInx == lateArgInx)
7007         {
7008             return curArgTabEntry;
7009         }
7010     }
7011     noway_assert(!"gtArgEntryByNode: node not found");
7012     return nullptr;
7013 }
7014
7015 /*****************************************************************************
7016  *
7017  *  Given an fgArgTabEntry*, return true if it is the 'this' pointer argument.
7018  */
7019 bool Compiler::gtArgIsThisPtr(fgArgTabEntry* argEntry)
7020 {
7021     return (argEntry->parent == nullptr);
7022 }
7023
7024 /*****************************************************************************
7025  *
7026  *  Create a node that will assign 'src' to 'dst'.
7027  */
7028
7029 GenTree* Compiler::gtNewAssignNode(GenTree* dst, GenTree* src)
7030 {
7031     /* Mark the target as being assigned */
7032
7033     if ((dst->gtOper == GT_LCL_VAR) || (dst->OperGet() == GT_LCL_FLD))
7034     {
7035         dst->gtFlags |= GTF_VAR_DEF;
7036         if (dst->IsPartialLclFld(this))
7037         {
7038             // We treat these partial writes as combined uses and defs.
7039             dst->gtFlags |= GTF_VAR_USEASG;
7040         }
7041     }
7042     dst->gtFlags |= GTF_DONT_CSE;
7043
7044     /* Create the assignment node */
7045
7046     GenTree* asg = gtNewOperNode(GT_ASG, dst->TypeGet(), dst, src);
7047
7048     /* Mark the expression as containing an assignment */
7049
7050     asg->gtFlags |= GTF_ASG;
7051
7052     return asg;
7053 }
7054
7055 //------------------------------------------------------------------------
7056 // gtNewObjNode: Creates a new Obj node.
7057 //
7058 // Arguments:
7059 //    structHnd - The class handle of the struct type.
7060 //    addr      - The address of the struct.
7061 //
7062 // Return Value:
7063 //    Returns a node representing the struct value at the given address.
7064 //
7065 // Assumptions:
7066 //    Any entry and exit conditions, such as required preconditions of
7067 //    data structures, memory to be freed by caller, etc.
7068 //
7069 // Notes:
7070 //    It will currently return a GT_OBJ node for any struct type, but may
7071 //    return a GT_IND or a non-indirection for a scalar type.
7072 //    The node will not yet have its GC info initialized. This is because
7073 //    we may not need this info if this is an r-value.
7074
7075 GenTree* Compiler::gtNewObjNode(CORINFO_CLASS_HANDLE structHnd, GenTree* addr)
7076 {
7077     var_types nodeType = impNormStructType(structHnd);
7078     assert(varTypeIsStruct(nodeType));
7079     unsigned size = info.compCompHnd->getClassSize(structHnd);
7080
7081     // It would be convenient to set the GC info at this time, but we don't actually require
7082     // it unless this is going to be a destination.
7083     if (!varTypeIsStruct(nodeType))
7084     {
7085         if ((addr->gtOper == GT_ADDR) && (addr->gtGetOp1()->TypeGet() == nodeType))
7086         {
7087             return addr->gtGetOp1();
7088         }
7089         else
7090         {
7091             return gtNewOperNode(GT_IND, nodeType, addr);
7092         }
7093     }
7094     GenTreeBlk* newBlkOrObjNode = new (this, GT_OBJ) GenTreeObj(nodeType, addr, structHnd, size);
7095
7096     // An Obj is not a global reference, if it is known to be a local struct.
7097     if ((addr->gtFlags & GTF_GLOB_REF) == 0)
7098     {
7099         GenTreeLclVarCommon* lclNode = addr->IsLocalAddrExpr();
7100         if (lclNode != nullptr)
7101         {
7102             newBlkOrObjNode->gtFlags |= GTF_IND_NONFAULTING;
7103             if (!lvaIsImplicitByRefLocal(lclNode->gtLclNum))
7104             {
7105                 newBlkOrObjNode->gtFlags &= ~GTF_GLOB_REF;
7106             }
7107         }
7108     }
7109     return newBlkOrObjNode;
7110 }
7111
7112 //------------------------------------------------------------------------
7113 // gtSetObjGcInfo: Set the GC info on an object node
7114 //
7115 // Arguments:
7116 //    objNode - The object node of interest
7117
7118 void Compiler::gtSetObjGcInfo(GenTreeObj* objNode)
7119 {
7120     CORINFO_CLASS_HANDLE structHnd  = objNode->gtClass;
7121     var_types            nodeType   = objNode->TypeGet();
7122     unsigned             size       = objNode->gtBlkSize;
7123     unsigned             slots      = 0;
7124     unsigned             gcPtrCount = 0;
7125     BYTE*                gcPtrs     = nullptr;
7126
7127     assert(varTypeIsStruct(nodeType));
7128     assert(size == info.compCompHnd->getClassSize(structHnd));
7129     assert(nodeType == impNormStructType(structHnd));
7130
7131     if (nodeType == TYP_STRUCT)
7132     {
7133         if (size >= TARGET_POINTER_SIZE)
7134         {
7135             // Get the GC fields info
7136             var_types simdBaseType; // Dummy argument
7137             slots    = (unsigned)(roundUp(size, TARGET_POINTER_SIZE) / TARGET_POINTER_SIZE);
7138             gcPtrs   = new (this, CMK_ASTNode) BYTE[slots];
7139             nodeType = impNormStructType(structHnd, gcPtrs, &gcPtrCount, &simdBaseType);
7140         }
7141     }
7142     objNode->SetGCInfo(gcPtrs, gcPtrCount, slots);
7143     assert(objNode->gtType == nodeType);
7144 }
7145
7146 //------------------------------------------------------------------------
7147 // gtNewStructVal: Return a node that represents a struct value
7148 //
7149 // Arguments:
7150 //    structHnd - The class for the struct
7151 //    addr      - The address of the struct
7152 //
7153 // Return Value:
7154 //    A block, object or local node that represents the struct value pointed to by 'addr'.
7155
7156 GenTree* Compiler::gtNewStructVal(CORINFO_CLASS_HANDLE structHnd, GenTree* addr)
7157 {
7158     if (addr->gtOper == GT_ADDR)
7159     {
7160         GenTree* val = addr->gtGetOp1();
7161         if (val->OperGet() == GT_LCL_VAR)
7162         {
7163             unsigned   lclNum = addr->gtGetOp1()->AsLclVarCommon()->gtLclNum;
7164             LclVarDsc* varDsc = &(lvaTable[lclNum]);
7165             if (varTypeIsStruct(varDsc) && (varDsc->lvVerTypeInfo.GetClassHandle() == structHnd) &&
7166                 !lvaIsImplicitByRefLocal(lclNum))
7167             {
7168                 return addr->gtGetOp1();
7169             }
7170         }
7171     }
7172     return gtNewObjNode(structHnd, addr);
7173 }
7174
7175 //------------------------------------------------------------------------
7176 // gtNewBlockVal: Return a node that represents a possibly untyped block value
7177 //
7178 // Arguments:
7179 //    addr      - The address of the block
7180 //    size      - The size of the block
7181 //
7182 // Return Value:
7183 //    A block, object or local node that represents the block value pointed to by 'addr'.
7184
7185 GenTree* Compiler::gtNewBlockVal(GenTree* addr, unsigned size)
7186 {
7187     // By default we treat this as an opaque struct type with known size.
7188     var_types blkType = TYP_STRUCT;
7189     if ((addr->gtOper == GT_ADDR) && (addr->gtGetOp1()->OperGet() == GT_LCL_VAR))
7190     {
7191         GenTree* val = addr->gtGetOp1();
7192 #if FEATURE_SIMD
7193         if (varTypeIsSIMD(val))
7194         {
7195             if (genTypeSize(val->TypeGet()) == size)
7196             {
7197                 blkType = val->TypeGet();
7198                 return addr->gtGetOp1();
7199             }
7200         }
7201         else
7202 #endif // FEATURE_SIMD
7203 #ifndef LEGACY_BACKEND
7204             if (val->TypeGet() == TYP_STRUCT)
7205         {
7206             GenTreeLclVarCommon* lcl    = addr->gtGetOp1()->AsLclVarCommon();
7207             LclVarDsc*           varDsc = &(lvaTable[lcl->gtLclNum]);
7208             if ((varDsc->TypeGet() == TYP_STRUCT) && (varDsc->lvExactSize == size))
7209             {
7210                 return addr->gtGetOp1();
7211             }
7212         }
7213 #endif // !LEGACY_BACKEND
7214     }
7215     return new (this, GT_BLK) GenTreeBlk(GT_BLK, blkType, addr, size);
7216 }
7217
7218 // Creates a new assignment node for a CpObj.
7219 // Parameters (exactly the same as MSIL CpObj):
7220 //
7221 //  dstAddr    - The target to copy the struct to
7222 //  srcAddr    - The source to copy the struct from
7223 //  structHnd  - A class token that represents the type of object being copied. May be null
7224 //               if FEATURE_SIMD is enabled and the source has a SIMD type.
7225 //  isVolatile - Is this marked as volatile memory?
7226
7227 GenTree* Compiler::gtNewCpObjNode(GenTree* dstAddr, GenTree* srcAddr, CORINFO_CLASS_HANDLE structHnd, bool isVolatile)
7228 {
7229     GenTree* lhs = gtNewStructVal(structHnd, dstAddr);
7230     GenTree* src = nullptr;
7231     unsigned size;
7232
7233     if (lhs->OperIsBlk())
7234     {
7235         size = lhs->AsBlk()->gtBlkSize;
7236         if (lhs->OperGet() == GT_OBJ)
7237         {
7238             gtSetObjGcInfo(lhs->AsObj());
7239         }
7240     }
7241     else
7242     {
7243         size = genTypeSize(lhs->gtType);
7244     }
7245
7246     if (srcAddr->OperGet() == GT_ADDR)
7247     {
7248         src = srcAddr->gtOp.gtOp1;
7249     }
7250     else
7251     {
7252         src = gtNewOperNode(GT_IND, lhs->TypeGet(), srcAddr);
7253     }
7254
7255     GenTree* result = gtNewBlkOpNode(lhs, src, size, isVolatile, true);
7256     return result;
7257 }
7258
7259 //------------------------------------------------------------------------
7260 // FixupInitBlkValue: Fixup the init value for an initBlk operation
7261 //
7262 // Arguments:
7263 //    asgType - The type of assignment that the initBlk is being transformed into
7264 //
7265 // Return Value:
7266 //    Modifies the constant value on this node to be the appropriate "fill"
7267 //    value for the initblk.
7268 //
7269 // Notes:
7270 //    The initBlk MSIL instruction takes a byte value, which must be
7271 //    extended to the size of the assignment when an initBlk is transformed
7272 //    to an assignment of a primitive type.
7273 //    This performs the appropriate extension.
7274
7275 void GenTreeIntCon::FixupInitBlkValue(var_types asgType)
7276 {
7277     assert(varTypeIsIntegralOrI(asgType));
7278     unsigned size = genTypeSize(asgType);
7279     if (size > 1)
7280     {
7281         size_t cns = gtIconVal;
7282         cns        = cns & 0xFF;
7283         cns |= cns << 8;
7284         if (size >= 4)
7285         {
7286             cns |= cns << 16;
7287 #ifdef _TARGET_64BIT_
7288             if (size == 8)
7289             {
7290                 cns |= cns << 32;
7291             }
7292 #endif // _TARGET_64BIT_
7293
7294             // Make the type match for evaluation types.
7295             gtType = asgType;
7296
7297             // if we are initializing a GC type the value being assigned must be zero (null).
7298             assert(!varTypeIsGC(asgType) || (cns == 0));
7299         }
7300
7301         gtIconVal = cns;
7302     }
7303 }
7304
7305 //
7306 //------------------------------------------------------------------------
7307 // gtBlockOpInit: Initializes a BlkOp GenTree
7308 //
7309 // Arguments:
7310 //    result     - an assignment node that is to be initialized.
7311 //    dst        - the target (destination) we want to either initialize or copy to.
7312 //    src        - the init value for InitBlk or the source struct for CpBlk/CpObj.
7313 //    isVolatile - specifies whether this node is a volatile memory operation.
7314 //
7315 // Assumptions:
7316 //    'result' is an assignment that is newly constructed.
7317 //    If 'dst' is TYP_STRUCT, then it must be a block node or lclVar.
7318 //
7319 // Notes:
7320 //    This procedure centralizes all the logic to both enforce proper structure and
7321 //    to properly construct any InitBlk/CpBlk node.
7322
7323 void Compiler::gtBlockOpInit(GenTree* result, GenTree* dst, GenTree* srcOrFillVal, bool isVolatile)
7324 {
7325     if (!result->OperIsBlkOp())
7326     {
7327         assert(dst->TypeGet() != TYP_STRUCT);
7328         return;
7329     }
7330 #ifdef DEBUG
7331     // If the copy involves GC pointers, the caller must have already set
7332     // the node additional members (gtGcPtrs, gtGcPtrCount, gtSlots) on the dst.
7333     if ((dst->gtOper == GT_OBJ) && dst->AsBlk()->HasGCPtr())
7334     {
7335         GenTreeObj* objNode = dst->AsObj();
7336         assert(objNode->gtGcPtrs != nullptr);
7337         assert(!IsUninitialized(objNode->gtGcPtrs));
7338         assert(!IsUninitialized(objNode->gtGcPtrCount));
7339         assert(!IsUninitialized(objNode->gtSlots) && objNode->gtSlots > 0);
7340
7341         for (unsigned i = 0; i < objNode->gtGcPtrCount; ++i)
7342         {
7343             CorInfoGCType t = (CorInfoGCType)objNode->gtGcPtrs[i];
7344             switch (t)
7345             {
7346                 case TYPE_GC_NONE:
7347                 case TYPE_GC_REF:
7348                 case TYPE_GC_BYREF:
7349                 case TYPE_GC_OTHER:
7350                     break;
7351                 default:
7352                     unreached();
7353             }
7354         }
7355     }
7356 #endif // DEBUG
7357
7358     /* In the case of CpBlk, we want to avoid generating
7359     * nodes where the source and destination are the same
7360     * because of two reasons, first, is useless, second
7361     * it introduces issues in liveness and also copying
7362     * memory from an overlapping memory location is
7363     * undefined both as per the ECMA standard and also
7364     * the memcpy semantics specify that.
7365     *
7366     * NOTE: In this case we'll only detect the case for addr of a local
7367     * and a local itself, any other complex expressions won't be
7368     * caught.
7369     *
7370     * TODO-Cleanup: though having this logic is goodness (i.e. avoids self-assignment
7371     * of struct vars very early), it was added because fgInterBlockLocalVarLiveness()
7372     * isn't handling self-assignment of struct variables correctly.  This issue may not
7373     * surface if struct promotion is ON (which is the case on x86/arm).  But still the
7374     * fundamental issue exists that needs to be addressed.
7375     */
7376     if (result->OperIsCopyBlkOp())
7377     {
7378         GenTree* currSrc = srcOrFillVal;
7379         GenTree* currDst = dst;
7380
7381         if (currSrc->OperIsBlk() && (currSrc->AsBlk()->Addr()->OperGet() == GT_ADDR))
7382         {
7383             currSrc = currSrc->AsBlk()->Addr()->gtGetOp1();
7384         }
7385         if (currDst->OperIsBlk() && (currDst->AsBlk()->Addr()->OperGet() == GT_ADDR))
7386         {
7387             currDst = currDst->AsBlk()->Addr()->gtGetOp1();
7388         }
7389
7390         if (currSrc->OperGet() == GT_LCL_VAR && currDst->OperGet() == GT_LCL_VAR &&
7391             currSrc->gtLclVarCommon.gtLclNum == currDst->gtLclVarCommon.gtLclNum)
7392         {
7393             // Make this a NOP
7394             // TODO-Cleanup: probably doesn't matter, but could do this earlier and avoid creating a GT_ASG
7395             result->gtBashToNOP();
7396             return;
7397         }
7398     }
7399
7400     // Propagate all effect flags from children
7401     result->gtFlags |= dst->gtFlags & GTF_ALL_EFFECT;
7402     result->gtFlags |= result->gtOp.gtOp2->gtFlags & GTF_ALL_EFFECT;
7403
7404     // REVERSE_OPS is necessary because the use must occur before the def
7405     result->gtFlags |= GTF_REVERSE_OPS;
7406
7407     result->gtFlags |= (dst->gtFlags & GTF_EXCEPT) | (srcOrFillVal->gtFlags & GTF_EXCEPT);
7408
7409     if (isVolatile)
7410     {
7411         result->gtFlags |= GTF_BLK_VOLATILE;
7412     }
7413
7414 #ifdef FEATURE_SIMD
7415     if (result->OperIsCopyBlkOp() && varTypeIsSIMD(srcOrFillVal))
7416     {
7417         // If the source is a GT_SIMD node of SIMD type, then the dst lclvar struct
7418         // should be labeled as simd intrinsic related struct.
7419         // This is done so that the morpher can transform any field accesses into
7420         // intrinsics, thus avoiding conflicting access methods (fields vs. whole-register).
7421
7422         GenTree* src = srcOrFillVal;
7423         if (src->OperIsIndir() && (src->AsIndir()->Addr()->OperGet() == GT_ADDR))
7424         {
7425             src = src->AsIndir()->Addr()->gtGetOp1();
7426         }
7427 #ifdef FEATURE_HW_INTRINSICS
7428         if ((src->OperGet() == GT_SIMD) || (src->OperGet() == GT_HWIntrinsic))
7429 #else
7430         if (src->OperGet() == GT_SIMD)
7431 #endif // FEATURE_HW_INTRINSICS
7432         {
7433             if (dst->OperIsBlk() && (dst->AsIndir()->Addr()->OperGet() == GT_ADDR))
7434             {
7435                 dst = dst->AsIndir()->Addr()->gtGetOp1();
7436             }
7437
7438             if (dst->OperIsLocal() && varTypeIsStruct(dst))
7439             {
7440                 setLclRelatedToSIMDIntrinsic(dst);
7441             }
7442         }
7443     }
7444 #endif // FEATURE_SIMD
7445 }
7446
7447 //------------------------------------------------------------------------
7448 // gtNewBlkOpNode: Creates a GenTree for a block (struct) assignment.
7449 //
7450 // Arguments:
7451 //    dst           - Destination or target to copy to / initialize the buffer.
7452 //    srcOrFillVall - the size of the buffer to copy/initialize or zero, in the case of CpObj.
7453 //    size          - The size of the buffer or a class token (in the case of CpObj).
7454 //    isVolatile    - Whether this is a volatile memory operation or not.
7455 //    isCopyBlock   - True if this is a block copy (rather than a block init).
7456 //
7457 // Return Value:
7458 //    Returns the newly constructed and initialized block operation.
7459 //
7460 // Notes:
7461 //    If size is zero, the dst must be a GT_OBJ with the class handle.
7462 //    'dst' must be a block node or lclVar.
7463 //
7464 GenTree* Compiler::gtNewBlkOpNode(GenTree* dst, GenTree* srcOrFillVal, unsigned size, bool isVolatile, bool isCopyBlock)
7465 {
7466     assert(dst->OperIsBlk() || dst->OperIsLocal());
7467     if (isCopyBlock)
7468     {
7469         srcOrFillVal->gtFlags |= GTF_DONT_CSE;
7470         if (srcOrFillVal->OperIsIndir() && (srcOrFillVal->gtGetOp1()->gtOper == GT_ADDR))
7471         {
7472             srcOrFillVal = srcOrFillVal->gtGetOp1()->gtGetOp1();
7473         }
7474     }
7475     else
7476     {
7477         // InitBlk
7478         assert(varTypeIsIntegral(srcOrFillVal));
7479         if (varTypeIsStruct(dst))
7480         {
7481             if (!srcOrFillVal->IsIntegralConst(0))
7482             {
7483                 srcOrFillVal = gtNewOperNode(GT_INIT_VAL, TYP_INT, srcOrFillVal);
7484             }
7485         }
7486     }
7487
7488     GenTree* result = gtNewAssignNode(dst, srcOrFillVal);
7489     gtBlockOpInit(result, dst, srcOrFillVal, isVolatile);
7490     return result;
7491 }
7492
7493 //------------------------------------------------------------------------
7494 // gtNewPutArgReg: Creates a new PutArgReg node.
7495 //
7496 // Arguments:
7497 //    type   - The actual type of the argument
7498 //    arg    - The argument node
7499 //    argReg - The register that the argument will be passed in
7500 //
7501 // Return Value:
7502 //    Returns the newly created PutArgReg node.
7503 //
7504 // Notes:
7505 //    The node is generated as GenTreeMultiRegOp on RyuJIT/armel, GenTreeOp on all the other archs.
7506 //
7507 GenTree* Compiler::gtNewPutArgReg(var_types type, GenTree* arg, regNumber argReg)
7508 {
7509     assert(arg != nullptr);
7510
7511     GenTree* node = nullptr;
7512 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
7513     // A PUTARG_REG could be a MultiRegOp on arm since we could move a double register to two int registers.
7514     node = new (this, GT_PUTARG_REG) GenTreeMultiRegOp(GT_PUTARG_REG, type, arg, nullptr);
7515 #else
7516     node          = gtNewOperNode(GT_PUTARG_REG, type, arg);
7517 #endif
7518     node->gtRegNum = argReg;
7519
7520     return node;
7521 }
7522
7523 //------------------------------------------------------------------------
7524 // gtNewBitCastNode: Creates a new BitCast node.
7525 //
7526 // Arguments:
7527 //    type   - The actual type of the argument
7528 //    arg    - The argument node
7529 //    argReg - The register that the argument will be passed in
7530 //
7531 // Return Value:
7532 //    Returns the newly created BitCast node.
7533 //
7534 // Notes:
7535 //    The node is generated as GenTreeMultiRegOp on RyuJIT/arm, as GenTreeOp on all the other archs.
7536 //
7537 GenTree* Compiler::gtNewBitCastNode(var_types type, GenTree* arg)
7538 {
7539     assert(arg != nullptr);
7540
7541     GenTree* node = nullptr;
7542 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
7543     // A BITCAST could be a MultiRegOp on arm since we could move a double register to two int registers.
7544     node = new (this, GT_BITCAST) GenTreeMultiRegOp(GT_BITCAST, type, arg, nullptr);
7545 #else
7546     node          = gtNewOperNode(GT_BITCAST, type, arg);
7547 #endif
7548
7549     return node;
7550 }
7551
7552 /*****************************************************************************
7553  *
7554  *  Clones the given tree value and returns a copy of the given tree.
7555  *  If 'complexOK' is false, the cloning is only done provided the tree
7556  *     is not too complex (whatever that may mean);
7557  *  If 'complexOK' is true, we try slightly harder to clone the tree.
7558  *  In either case, NULL is returned if the tree cannot be cloned
7559  *
7560  *  Note that there is the function gtCloneExpr() which does a more
7561  *  complete job if you can't handle this function failing.
7562  */
7563
7564 GenTree* Compiler::gtClone(GenTree* tree, bool complexOK)
7565 {
7566     GenTree* copy;
7567
7568     switch (tree->gtOper)
7569     {
7570         case GT_CNS_INT:
7571
7572 #if defined(LATE_DISASM)
7573             if (tree->IsIconHandle())
7574             {
7575                 copy = gtNewIconHandleNode(tree->gtIntCon.gtIconVal, tree->gtFlags, tree->gtIntCon.gtFieldSeq);
7576                 copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
7577                 copy->gtType                       = tree->gtType;
7578             }
7579             else
7580 #endif
7581             {
7582                 copy = new (this, GT_CNS_INT)
7583                     GenTreeIntCon(tree->gtType, tree->gtIntCon.gtIconVal, tree->gtIntCon.gtFieldSeq);
7584                 copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
7585             }
7586             break;
7587
7588         case GT_CNS_LNG:
7589             copy = gtNewLconNode(tree->gtLngCon.gtLconVal);
7590             break;
7591
7592         case GT_LCL_VAR:
7593             // Remember that the LclVar node has been cloned. The flag will be set
7594             // on 'copy' as well.
7595             tree->gtFlags |= GTF_VAR_CLONED;
7596             copy = gtNewLclvNode(tree->gtLclVarCommon.gtLclNum, tree->gtType, tree->gtLclVar.gtLclILoffs);
7597             break;
7598
7599         case GT_LCL_FLD:
7600         case GT_LCL_FLD_ADDR:
7601             // Remember that the LclVar node has been cloned. The flag will be set
7602             // on 'copy' as well.
7603             tree->gtFlags |= GTF_VAR_CLONED;
7604             copy = new (this, tree->gtOper)
7605                 GenTreeLclFld(tree->gtOper, tree->TypeGet(), tree->gtLclFld.gtLclNum, tree->gtLclFld.gtLclOffs);
7606             copy->gtLclFld.gtFieldSeq = tree->gtLclFld.gtFieldSeq;
7607             break;
7608
7609         case GT_CLS_VAR:
7610             copy = new (this, GT_CLS_VAR)
7611                 GenTreeClsVar(tree->gtType, tree->gtClsVar.gtClsVarHnd, tree->gtClsVar.gtFieldSeq);
7612             break;
7613
7614         case GT_REG_VAR:
7615             assert(!"clone regvar");
7616
7617         default:
7618             if (!complexOK)
7619             {
7620                 return nullptr;
7621             }
7622
7623             if (tree->gtOper == GT_FIELD)
7624             {
7625                 GenTree* objp;
7626
7627                 // copied from line 9850
7628
7629                 objp = nullptr;
7630                 if (tree->gtField.gtFldObj)
7631                 {
7632                     objp = gtClone(tree->gtField.gtFldObj, false);
7633                     if (!objp)
7634                     {
7635                         return objp;
7636                     }
7637                 }
7638
7639                 copy = gtNewFieldRef(tree->TypeGet(), tree->gtField.gtFldHnd, objp, tree->gtField.gtFldOffset);
7640                 copy->gtField.gtFldMayOverlap = tree->gtField.gtFldMayOverlap;
7641             }
7642             else if (tree->OperIs(GT_ADD, GT_SUB))
7643             {
7644                 GenTree* op1 = tree->gtOp.gtOp1;
7645                 GenTree* op2 = tree->gtOp.gtOp2;
7646
7647                 if (op1->OperIsLeaf() && op2->OperIsLeaf())
7648                 {
7649                     op1 = gtClone(op1);
7650                     if (op1 == nullptr)
7651                     {
7652                         return nullptr;
7653                     }
7654                     op2 = gtClone(op2);
7655                     if (op2 == nullptr)
7656                     {
7657                         return nullptr;
7658                     }
7659
7660                     copy = gtNewOperNode(tree->OperGet(), tree->TypeGet(), op1, op2);
7661                 }
7662                 else
7663                 {
7664                     return nullptr;
7665                 }
7666             }
7667             else if (tree->gtOper == GT_ADDR)
7668             {
7669                 GenTree* op1 = gtClone(tree->gtOp.gtOp1);
7670                 if (op1 == nullptr)
7671                 {
7672                     return nullptr;
7673                 }
7674                 copy = gtNewOperNode(GT_ADDR, tree->TypeGet(), op1);
7675             }
7676             else
7677             {
7678                 return nullptr;
7679             }
7680
7681             break;
7682     }
7683
7684     copy->gtFlags |= tree->gtFlags & ~GTF_NODE_MASK;
7685 #if defined(DEBUG)
7686     copy->gtDebugFlags |= tree->gtDebugFlags & ~GTF_DEBUG_NODE_MASK;
7687 #endif // defined(DEBUG)
7688
7689     return copy;
7690 }
7691
7692 //------------------------------------------------------------------------
7693 // gtCloneExpr: Create a copy of `tree`, adding flags `addFlags`, mapping
7694 //              local `varNum` to int constant `varVal` if it appears at
7695 //              the root, and mapping uses of local `deepVarNum` to constant
7696 //              `deepVarVal` if they occur beyond the root.
7697 //
7698 // Arguments:
7699 //    tree - GenTree to create a copy of
7700 //    addFlags - GTF_* flags to add to the copied tree nodes
7701 //    varNum - lclNum to replace at the root, or ~0 for no root replacement
7702 //    varVal - If replacing at root, replace local `varNum` with IntCns `varVal`
7703 //    deepVarNum - lclNum to replace uses of beyond the root, or ~0 for no replacement
7704 //    deepVarVal - If replacing beyond root, replace `deepVarNum` with IntCns `deepVarVal`
7705 //
7706 // Return Value:
7707 //    A copy of the given tree with the replacements and added flags specified.
7708 //
7709 // Notes:
7710 //    Top-level callers should generally call the overload that doesn't have
7711 //    the explicit `deepVarNum` and `deepVarVal` parameters; those are used in
7712 //    recursive invocations to avoid replacing defs.
7713
7714 GenTree* Compiler::gtCloneExpr(
7715     GenTree* tree, unsigned addFlags, unsigned varNum, int varVal, unsigned deepVarNum, int deepVarVal)
7716 {
7717     if (tree == nullptr)
7718     {
7719         return nullptr;
7720     }
7721
7722     /* Figure out what kind of a node we have */
7723
7724     genTreeOps oper = tree->OperGet();
7725     unsigned   kind = tree->OperKind();
7726     GenTree*   copy;
7727
7728     /* Is this a constant or leaf node? */
7729
7730     if (kind & (GTK_CONST | GTK_LEAF))
7731     {
7732         switch (oper)
7733         {
7734             case GT_CNS_INT:
7735
7736 #if defined(LATE_DISASM)
7737                 if (tree->IsIconHandle())
7738                 {
7739                     copy = gtNewIconHandleNode(tree->gtIntCon.gtIconVal, tree->gtFlags, tree->gtIntCon.gtFieldSeq);
7740                     copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
7741                     copy->gtType                       = tree->gtType;
7742                 }
7743                 else
7744 #endif
7745                 {
7746                     copy                               = gtNewIconNode(tree->gtIntCon.gtIconVal, tree->gtType);
7747                     copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
7748                     copy->gtIntCon.gtFieldSeq          = tree->gtIntCon.gtFieldSeq;
7749                 }
7750                 goto DONE;
7751
7752             case GT_CNS_LNG:
7753                 copy = gtNewLconNode(tree->gtLngCon.gtLconVal);
7754                 goto DONE;
7755
7756             case GT_CNS_DBL:
7757                 copy         = gtNewDconNode(tree->gtDblCon.gtDconVal);
7758                 copy->gtType = tree->gtType; // keep the same type
7759                 goto DONE;
7760
7761             case GT_CNS_STR:
7762                 copy = gtNewSconNode(tree->gtStrCon.gtSconCPX, tree->gtStrCon.gtScpHnd);
7763                 goto DONE;
7764
7765             case GT_LCL_VAR:
7766
7767                 if (tree->gtLclVarCommon.gtLclNum == varNum)
7768                 {
7769                     copy = gtNewIconNode(varVal, tree->gtType);
7770                     if (tree->gtFlags & GTF_VAR_ARR_INDEX)
7771                     {
7772                         copy->LabelIndex(this);
7773                     }
7774                 }
7775                 else
7776                 {
7777                     // Remember that the LclVar node has been cloned. The flag will
7778                     // be set on 'copy' as well.
7779                     tree->gtFlags |= GTF_VAR_CLONED;
7780                     copy = gtNewLclvNode(tree->gtLclVar.gtLclNum, tree->gtType, tree->gtLclVar.gtLclILoffs);
7781                     copy->AsLclVarCommon()->SetSsaNum(tree->AsLclVarCommon()->GetSsaNum());
7782                 }
7783                 copy->gtFlags = tree->gtFlags;
7784                 goto DONE;
7785
7786             case GT_LCL_FLD:
7787                 if (tree->gtLclFld.gtLclNum == varNum)
7788                 {
7789                     IMPL_LIMITATION("replacing GT_LCL_FLD with a constant");
7790                 }
7791                 else
7792                 {
7793                     // Remember that the LclVar node has been cloned. The flag will
7794                     // be set on 'copy' as well.
7795                     tree->gtFlags |= GTF_VAR_CLONED;
7796                     copy = new (this, GT_LCL_FLD)
7797                         GenTreeLclFld(tree->TypeGet(), tree->gtLclFld.gtLclNum, tree->gtLclFld.gtLclOffs);
7798                     copy->gtLclFld.gtFieldSeq = tree->gtLclFld.gtFieldSeq;
7799                     copy->gtFlags             = tree->gtFlags;
7800                 }
7801                 goto DONE;
7802
7803             case GT_CLS_VAR:
7804                 copy = new (this, GT_CLS_VAR)
7805                     GenTreeClsVar(tree->TypeGet(), tree->gtClsVar.gtClsVarHnd, tree->gtClsVar.gtFieldSeq);
7806                 goto DONE;
7807
7808             case GT_RET_EXPR:
7809                 // GT_RET_EXPR is unique node, that contains a link to a gtInlineCandidate node,
7810                 // that is part of another statement. We cannot clone both here and cannot
7811                 // create another GT_RET_EXPR that points to the same gtInlineCandidate.
7812                 NO_WAY("Cloning of GT_RET_EXPR node not supported");
7813                 goto DONE;
7814
7815             case GT_MEMORYBARRIER:
7816                 copy = new (this, GT_MEMORYBARRIER) GenTree(GT_MEMORYBARRIER, TYP_VOID);
7817                 goto DONE;
7818
7819             case GT_ARGPLACE:
7820                 copy = gtNewArgPlaceHolderNode(tree->gtType, tree->gtArgPlace.gtArgPlaceClsHnd);
7821                 goto DONE;
7822
7823             case GT_REG_VAR:
7824                 NO_WAY("Cloning of GT_REG_VAR node not supported");
7825                 goto DONE;
7826
7827             case GT_FTN_ADDR:
7828                 copy = new (this, oper) GenTreeFptrVal(tree->gtType, tree->gtFptrVal.gtFptrMethod);
7829
7830 #ifdef FEATURE_READYTORUN_COMPILER
7831                 copy->gtFptrVal.gtEntryPoint = tree->gtFptrVal.gtEntryPoint;
7832 #endif
7833                 goto DONE;
7834
7835             case GT_CATCH_ARG:
7836             case GT_NO_OP:
7837                 copy = new (this, oper) GenTree(oper, tree->gtType);
7838                 goto DONE;
7839
7840 #if !FEATURE_EH_FUNCLETS
7841             case GT_END_LFIN:
7842 #endif // !FEATURE_EH_FUNCLETS
7843             case GT_JMP:
7844                 copy = new (this, oper) GenTreeVal(oper, tree->gtType, tree->gtVal.gtVal1);
7845                 goto DONE;
7846
7847             case GT_LABEL:
7848                 copy = new (this, oper) GenTreeLabel(tree->gtLabel.gtLabBB);
7849                 goto DONE;
7850
7851             default:
7852                 NO_WAY("Cloning of node not supported");
7853                 goto DONE;
7854         }
7855     }
7856
7857     /* Is it a 'simple' unary/binary operator? */
7858
7859     if (kind & GTK_SMPOP)
7860     {
7861         /* If necessary, make sure we allocate a "fat" tree node */
7862         CLANG_FORMAT_COMMENT_ANCHOR;
7863
7864 #if SMALL_TREE_NODES
7865         switch (oper)
7866         {
7867             /* These nodes sometimes get bashed to "fat" ones */
7868
7869             case GT_MUL:
7870             case GT_DIV:
7871             case GT_MOD:
7872
7873             case GT_UDIV:
7874             case GT_UMOD:
7875
7876                 //  In the implementation of gtNewLargeOperNode you have
7877                 //  to give an oper that will create a small node,
7878                 //  otherwise it asserts.
7879                 //
7880                 if (GenTree::s_gtNodeSizes[oper] == TREE_NODE_SZ_SMALL)
7881                 {
7882                     copy = gtNewLargeOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1,
7883                                               tree->OperIsBinary() ? tree->gtOp.gtOp2 : nullptr);
7884                 }
7885                 else // Always a large tree
7886                 {
7887                     if (tree->OperIsBinary())
7888                     {
7889                         copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2);
7890                     }
7891                     else
7892                     {
7893                         copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1);
7894                     }
7895                 }
7896                 break;
7897
7898             case GT_CAST:
7899                 copy =
7900                     new (this, LargeOpOpcode()) GenTreeCast(tree->TypeGet(), tree->gtCast.CastOp(), tree->IsUnsigned(),
7901                                                             tree->gtCast.gtCastType DEBUGARG(/*largeNode*/ TRUE));
7902                 break;
7903
7904             // The nodes below this are not bashed, so they can be allocated at their individual sizes.
7905
7906             case GT_LIST:
7907                 assert((tree->gtOp.gtOp2 == nullptr) || tree->gtOp.gtOp2->OperIsList());
7908                 copy             = new (this, GT_LIST) GenTreeArgList(tree->gtOp.gtOp1);
7909                 copy->gtOp.gtOp2 = tree->gtOp.gtOp2;
7910                 break;
7911
7912             case GT_FIELD_LIST:
7913                 copy = new (this, GT_FIELD_LIST) GenTreeFieldList(tree->gtOp.gtOp1, tree->AsFieldList()->gtFieldOffset,
7914                                                                   tree->AsFieldList()->gtFieldType, nullptr);
7915                 copy->gtOp.gtOp2 = tree->gtOp.gtOp2;
7916                 copy->gtFlags    = (copy->gtFlags & ~GTF_FIELD_LIST_HEAD) | (tree->gtFlags & GTF_FIELD_LIST_HEAD);
7917                 break;
7918
7919             case GT_INDEX:
7920             {
7921                 GenTreeIndex* asInd = tree->AsIndex();
7922                 copy                = new (this, GT_INDEX)
7923                     GenTreeIndex(asInd->TypeGet(), asInd->Arr(), asInd->Index(), asInd->gtIndElemSize);
7924                 copy->AsIndex()->gtStructElemClass = asInd->gtStructElemClass;
7925             }
7926             break;
7927
7928             case GT_INDEX_ADDR:
7929             {
7930                 GenTreeIndexAddr* asIndAddr = tree->AsIndexAddr();
7931
7932                 copy = new (this, GT_INDEX_ADDR)
7933                     GenTreeIndexAddr(asIndAddr->Arr(), asIndAddr->Index(), asIndAddr->gtElemType,
7934                                      asIndAddr->gtStructElemClass, asIndAddr->gtElemSize, asIndAddr->gtLenOffset,
7935                                      asIndAddr->gtElemOffset);
7936                 copy->AsIndexAddr()->gtIndRngFailBB = asIndAddr->gtIndRngFailBB;
7937                 copy->AsIndexAddr()->gtStkDepth     = asIndAddr->gtStkDepth;
7938             }
7939             break;
7940
7941             case GT_ALLOCOBJ:
7942             {
7943                 GenTreeAllocObj* asAllocObj = tree->AsAllocObj();
7944                 copy = new (this, GT_ALLOCOBJ) GenTreeAllocObj(tree->TypeGet(), asAllocObj->gtNewHelper,
7945                                                                asAllocObj->gtAllocObjClsHnd, asAllocObj->gtOp1);
7946             }
7947             break;
7948
7949             case GT_RUNTIMELOOKUP:
7950             {
7951                 GenTreeRuntimeLookup* asRuntimeLookup = tree->AsRuntimeLookup();
7952
7953                 copy = new (this, GT_RUNTIMELOOKUP)
7954                     GenTreeRuntimeLookup(asRuntimeLookup->gtHnd, asRuntimeLookup->gtHndType, asRuntimeLookup->gtOp1);
7955             }
7956             break;
7957
7958             case GT_ARR_LENGTH:
7959                 copy = gtNewArrLen(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtArrLen.ArrLenOffset());
7960                 break;
7961
7962             case GT_ARR_INDEX:
7963                 copy = new (this, GT_ARR_INDEX)
7964                     GenTreeArrIndex(tree->TypeGet(),
7965                                     gtCloneExpr(tree->gtArrIndex.ArrObj(), addFlags, deepVarNum, deepVarVal),
7966                                     gtCloneExpr(tree->gtArrIndex.IndexExpr(), addFlags, deepVarNum, deepVarVal),
7967                                     tree->gtArrIndex.gtCurrDim, tree->gtArrIndex.gtArrRank,
7968                                     tree->gtArrIndex.gtArrElemType);
7969                 break;
7970
7971             case GT_QMARK:
7972                 copy = new (this, GT_QMARK) GenTreeQmark(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2, this);
7973 #ifdef LEGACY_BACKEND
7974                 VarSetOps::AssignAllowUninitRhs(this, copy->gtQmark.gtThenLiveSet, tree->gtQmark.gtThenLiveSet);
7975                 VarSetOps::AssignAllowUninitRhs(this, copy->gtQmark.gtElseLiveSet, tree->gtQmark.gtElseLiveSet);
7976 #endif
7977                 break;
7978
7979             case GT_OBJ:
7980                 copy = new (this, GT_OBJ)
7981                     GenTreeObj(tree->TypeGet(), tree->gtOp.gtOp1, tree->AsObj()->gtClass, tree->gtBlk.gtBlkSize);
7982                 copy->AsObj()->CopyGCInfo(tree->AsObj());
7983                 copy->gtBlk.gtBlkOpGcUnsafe = tree->gtBlk.gtBlkOpGcUnsafe;
7984                 break;
7985
7986             case GT_BLK:
7987                 copy = new (this, GT_BLK) GenTreeBlk(GT_BLK, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtBlk.gtBlkSize);
7988                 copy->gtBlk.gtBlkOpGcUnsafe = tree->gtBlk.gtBlkOpGcUnsafe;
7989                 break;
7990
7991             case GT_DYN_BLK:
7992                 copy = new (this, GT_DYN_BLK) GenTreeDynBlk(tree->gtOp.gtOp1, tree->gtDynBlk.gtDynamicSize);
7993                 copy->gtBlk.gtBlkOpGcUnsafe = tree->gtBlk.gtBlkOpGcUnsafe;
7994                 break;
7995
7996             case GT_BOX:
7997                 copy = new (this, GT_BOX)
7998                     GenTreeBox(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtBox.gtAsgStmtWhenInlinedBoxValue,
7999                                tree->gtBox.gtCopyStmtWhenInlinedBoxValue);
8000                 break;
8001
8002             case GT_INTRINSIC:
8003                 copy = new (this, GT_INTRINSIC)
8004                     GenTreeIntrinsic(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2,
8005                                      tree->gtIntrinsic.gtIntrinsicId, tree->gtIntrinsic.gtMethodHandle);
8006 #ifdef FEATURE_READYTORUN_COMPILER
8007                 copy->gtIntrinsic.gtEntryPoint = tree->gtIntrinsic.gtEntryPoint;
8008 #endif
8009                 break;
8010
8011             case GT_LEA:
8012             {
8013                 GenTreeAddrMode* addrModeOp = tree->AsAddrMode();
8014                 copy                        = new (this, GT_LEA)
8015                     GenTreeAddrMode(addrModeOp->TypeGet(), addrModeOp->Base(), addrModeOp->Index(), addrModeOp->gtScale,
8016                                     static_cast<unsigned>(addrModeOp->Offset()));
8017             }
8018             break;
8019
8020             case GT_COPY:
8021             case GT_RELOAD:
8022             {
8023                 copy = new (this, oper) GenTreeCopyOrReload(oper, tree->TypeGet(), tree->gtGetOp1());
8024             }
8025             break;
8026
8027 #ifdef FEATURE_SIMD
8028             case GT_SIMD:
8029             {
8030                 GenTreeSIMD* simdOp = tree->AsSIMD();
8031                 copy                = gtNewSIMDNode(simdOp->TypeGet(), simdOp->gtGetOp1(), simdOp->gtGetOp2IfPresent(),
8032                                      simdOp->gtSIMDIntrinsicID, simdOp->gtSIMDBaseType, simdOp->gtSIMDSize);
8033             }
8034             break;
8035 #endif
8036
8037 #ifdef FEATURE_HW_INTRINSICS
8038             case GT_HWIntrinsic:
8039             {
8040                 GenTreeHWIntrinsic* hwintrinsicOp = tree->AsHWIntrinsic();
8041                 copy                              = new (this, GT_HWIntrinsic)
8042                     GenTreeHWIntrinsic(hwintrinsicOp->TypeGet(), hwintrinsicOp->gtGetOp1(),
8043                                        hwintrinsicOp->gtGetOp2IfPresent(), hwintrinsicOp->gtHWIntrinsicId,
8044                                        hwintrinsicOp->gtSIMDBaseType, hwintrinsicOp->gtSIMDSize);
8045             }
8046             break;
8047 #endif
8048
8049             default:
8050                 assert(!GenTree::IsExOp(tree->OperKind()) && tree->OperIsSimple());
8051                 // We're in the SimpleOp case, so it's always unary or binary.
8052                 if (GenTree::OperIsUnary(tree->OperGet()))
8053                 {
8054                     copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, /*doSimplifications*/ false);
8055                 }
8056                 else
8057                 {
8058                     assert(GenTree::OperIsBinary(tree->OperGet()));
8059                     copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2);
8060                 }
8061                 break;
8062         }
8063 #else
8064         // We're in the SimpleOp case, so it's always unary or binary.
8065         copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2);
8066 #endif
8067
8068         // Some flags are conceptually part of the gtOper, and should be copied immediately.
8069         if (tree->gtOverflowEx())
8070         {
8071             copy->gtFlags |= GTF_OVERFLOW;
8072         }
8073
8074         if (tree->gtOp.gtOp1)
8075         {
8076             if (tree->gtOper == GT_ASG)
8077             {
8078                 // Don't replace varNum if it appears as the LHS of an assign.
8079                 copy->gtOp.gtOp1 = gtCloneExpr(tree->gtOp.gtOp1, addFlags, -1, 0, deepVarNum, deepVarVal);
8080             }
8081             else
8082             {
8083                 copy->gtOp.gtOp1 = gtCloneExpr(tree->gtOp.gtOp1, addFlags, deepVarNum, deepVarVal);
8084             }
8085         }
8086
8087         if (tree->gtGetOp2IfPresent())
8088         {
8089             copy->gtOp.gtOp2 = gtCloneExpr(tree->gtOp.gtOp2, addFlags, deepVarNum, deepVarVal);
8090         }
8091
8092         /* Flags */
8093         addFlags |= tree->gtFlags;
8094
8095         // Copy any node annotations, if necessary.
8096         switch (tree->gtOper)
8097         {
8098             case GT_ASG:
8099             {
8100                 IndirectAssignmentAnnotation* pIndirAnnot = nullptr;
8101                 if (m_indirAssignMap != nullptr && GetIndirAssignMap()->Lookup(tree, &pIndirAnnot))
8102                 {
8103                     IndirectAssignmentAnnotation* pNewIndirAnnot = new (this, CMK_Unknown)
8104                         IndirectAssignmentAnnotation(pIndirAnnot->m_lclNum, pIndirAnnot->m_fieldSeq,
8105                                                      pIndirAnnot->m_isEntire);
8106                     GetIndirAssignMap()->Set(copy, pNewIndirAnnot);
8107                 }
8108             }
8109             break;
8110
8111             case GT_STOREIND:
8112             case GT_IND:
8113             case GT_OBJ:
8114             case GT_STORE_OBJ:
8115             {
8116                 ArrayInfo arrInfo;
8117                 if (!tree->AsIndir()->gtOp1->OperIs(GT_INDEX_ADDR) && TryGetArrayInfo(tree->AsIndir(), &arrInfo))
8118                 {
8119                     GetArrayInfoMap()->Set(copy, arrInfo);
8120                 }
8121             }
8122             break;
8123
8124             default:
8125                 break;
8126         }
8127
8128 #ifdef DEBUG
8129         /* GTF_NODE_MASK should not be propagated from 'tree' to 'copy' */
8130         addFlags &= ~GTF_NODE_MASK;
8131 #endif
8132
8133         // Effects flags propagate upwards.
8134         if (copy->gtOp.gtOp1 != nullptr)
8135         {
8136             copy->gtFlags |= (copy->gtOp.gtOp1->gtFlags & GTF_ALL_EFFECT);
8137         }
8138         if (copy->gtGetOp2IfPresent() != nullptr)
8139         {
8140             copy->gtFlags |= (copy->gtGetOp2()->gtFlags & GTF_ALL_EFFECT);
8141         }
8142
8143 #ifdef LEGACY_BACKEND
8144         // The early morph for TailCall creates a GT_NOP with GTF_REG_VAL flag set
8145         // Thus we have to copy the gtRegNum/gtRegPair value if we clone it here.
8146         //
8147         if (tree->InReg())
8148         {
8149             copy->CopyReg(tree);
8150         }
8151 #endif // LEGACY_BACKEND
8152
8153         goto DONE;
8154     }
8155
8156     /* See what kind of a special operator we have here */
8157
8158     switch (oper)
8159     {
8160         case GT_STMT:
8161             copy = gtCloneExpr(tree->gtStmt.gtStmtExpr, addFlags, deepVarNum, deepVarVal);
8162             copy = gtNewStmt(copy, tree->gtStmt.gtStmtILoffsx);
8163             goto DONE;
8164
8165         case GT_CALL:
8166
8167             copy = new (this, GT_CALL) GenTreeCall(tree->TypeGet());
8168
8169             copy->gtCall.gtCallObjp = tree->gtCall.gtCallObjp
8170                                           ? gtCloneExpr(tree->gtCall.gtCallObjp, addFlags, deepVarNum, deepVarVal)
8171                                           : nullptr;
8172             copy->gtCall.gtCallArgs =
8173                 tree->gtCall.gtCallArgs
8174                     ? gtCloneExpr(tree->gtCall.gtCallArgs, addFlags, deepVarNum, deepVarVal)->AsArgList()
8175                     : nullptr;
8176             copy->gtCall.gtCallMoreFlags = tree->gtCall.gtCallMoreFlags;
8177             copy->gtCall.gtCallLateArgs =
8178                 tree->gtCall.gtCallLateArgs
8179                     ? gtCloneExpr(tree->gtCall.gtCallLateArgs, addFlags, deepVarNum, deepVarVal)->AsArgList()
8180                     : nullptr;
8181
8182 #if !FEATURE_FIXED_OUT_ARGS
8183             copy->gtCall.regArgList      = tree->gtCall.regArgList;
8184             copy->gtCall.regArgListCount = tree->gtCall.regArgListCount;
8185 #endif
8186
8187             // The call sig comes from the EE and doesn't change throughout the compilation process, meaning
8188             // we only really need one physical copy of it. Therefore a shallow pointer copy will suffice.
8189             // (Note that this still holds even if the tree we are cloning was created by an inlinee compiler,
8190             // because the inlinee still uses the inliner's memory allocator anyway.)
8191             copy->gtCall.callSig = tree->gtCall.callSig;
8192
8193             copy->gtCall.gtCallType    = tree->gtCall.gtCallType;
8194             copy->gtCall.gtReturnType  = tree->gtCall.gtReturnType;
8195             copy->gtCall.gtControlExpr = tree->gtCall.gtControlExpr;
8196
8197             /* Copy the union */
8198             if (tree->gtCall.gtCallType == CT_INDIRECT)
8199             {
8200                 copy->gtCall.gtCallCookie =
8201                     tree->gtCall.gtCallCookie ? gtCloneExpr(tree->gtCall.gtCallCookie, addFlags, deepVarNum, deepVarVal)
8202                                               : nullptr;
8203                 copy->gtCall.gtCallAddr = tree->gtCall.gtCallAddr
8204                                               ? gtCloneExpr(tree->gtCall.gtCallAddr, addFlags, deepVarNum, deepVarVal)
8205                                               : nullptr;
8206             }
8207             else if (tree->gtCall.IsVirtualStub())
8208             {
8209                 copy->gtCall.gtCallMethHnd      = tree->gtCall.gtCallMethHnd;
8210                 copy->gtCall.gtStubCallStubAddr = tree->gtCall.gtStubCallStubAddr;
8211             }
8212             else
8213             {
8214                 copy->gtCall.gtCallMethHnd         = tree->gtCall.gtCallMethHnd;
8215                 copy->gtCall.gtInlineCandidateInfo = tree->gtCall.gtInlineCandidateInfo;
8216             }
8217
8218             if (tree->gtCall.fgArgInfo)
8219             {
8220                 // Create and initialize the fgArgInfo for our copy of the call tree
8221                 copy->gtCall.fgArgInfo = new (this, CMK_Unknown) fgArgInfo(copy->AsCall(), tree->AsCall());
8222             }
8223             else
8224             {
8225                 copy->gtCall.fgArgInfo = nullptr;
8226             }
8227             copy->gtCall.gtRetClsHnd = tree->gtCall.gtRetClsHnd;
8228
8229 #if FEATURE_MULTIREG_RET
8230             copy->gtCall.gtReturnTypeDesc = tree->gtCall.gtReturnTypeDesc;
8231 #endif
8232
8233 #ifdef LEGACY_BACKEND
8234             copy->gtCall.gtCallRegUsedMask = tree->gtCall.gtCallRegUsedMask;
8235 #endif // LEGACY_BACKEND
8236
8237 #ifdef FEATURE_READYTORUN_COMPILER
8238             copy->gtCall.setEntryPoint(tree->gtCall.gtEntryPoint);
8239 #endif
8240
8241 #ifdef DEBUG
8242             copy->gtCall.gtInlineObservation = tree->gtCall.gtInlineObservation;
8243 #endif
8244
8245             copy->AsCall()->CopyOtherRegFlags(tree->AsCall());
8246             break;
8247
8248         case GT_FIELD:
8249
8250             copy = gtNewFieldRef(tree->TypeGet(), tree->gtField.gtFldHnd, nullptr, tree->gtField.gtFldOffset);
8251
8252             copy->gtField.gtFldObj = tree->gtField.gtFldObj
8253                                          ? gtCloneExpr(tree->gtField.gtFldObj, addFlags, deepVarNum, deepVarVal)
8254                                          : nullptr;
8255             copy->gtField.gtFldMayOverlap = tree->gtField.gtFldMayOverlap;
8256 #ifdef FEATURE_READYTORUN_COMPILER
8257             copy->gtField.gtFieldLookup = tree->gtField.gtFieldLookup;
8258 #endif
8259
8260             break;
8261
8262         case GT_ARR_ELEM:
8263         {
8264             GenTree* inds[GT_ARR_MAX_RANK];
8265             for (unsigned dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
8266             {
8267                 inds[dim] = gtCloneExpr(tree->gtArrElem.gtArrInds[dim], addFlags, deepVarNum, deepVarVal);
8268             }
8269             copy = new (this, GT_ARR_ELEM)
8270                 GenTreeArrElem(tree->TypeGet(), gtCloneExpr(tree->gtArrElem.gtArrObj, addFlags, deepVarNum, deepVarVal),
8271                                tree->gtArrElem.gtArrRank, tree->gtArrElem.gtArrElemSize, tree->gtArrElem.gtArrElemType,
8272                                &inds[0]);
8273         }
8274         break;
8275
8276         case GT_ARR_OFFSET:
8277         {
8278             copy = new (this, GT_ARR_OFFSET)
8279                 GenTreeArrOffs(tree->TypeGet(), gtCloneExpr(tree->gtArrOffs.gtOffset, addFlags, deepVarNum, deepVarVal),
8280                                gtCloneExpr(tree->gtArrOffs.gtIndex, addFlags, deepVarNum, deepVarVal),
8281                                gtCloneExpr(tree->gtArrOffs.gtArrObj, addFlags, deepVarNum, deepVarVal),
8282                                tree->gtArrOffs.gtCurrDim, tree->gtArrOffs.gtArrRank, tree->gtArrOffs.gtArrElemType);
8283         }
8284         break;
8285
8286         case GT_CMPXCHG:
8287             copy = new (this, GT_CMPXCHG)
8288                 GenTreeCmpXchg(tree->TypeGet(),
8289                                gtCloneExpr(tree->gtCmpXchg.gtOpLocation, addFlags, deepVarNum, deepVarVal),
8290                                gtCloneExpr(tree->gtCmpXchg.gtOpValue, addFlags, deepVarNum, deepVarVal),
8291                                gtCloneExpr(tree->gtCmpXchg.gtOpComparand, addFlags, deepVarNum, deepVarVal));
8292             break;
8293
8294         case GT_ARR_BOUNDS_CHECK:
8295 #ifdef FEATURE_SIMD
8296         case GT_SIMD_CHK:
8297 #endif // FEATURE_SIMD
8298 #ifdef FEATURE_HW_INTRINSICS
8299         case GT_HW_INTRINSIC_CHK:
8300 #endif // FEATURE_HW_INTRINSICS
8301             copy = new (this, oper)
8302                 GenTreeBoundsChk(oper, tree->TypeGet(),
8303                                  gtCloneExpr(tree->gtBoundsChk.gtIndex, addFlags, deepVarNum, deepVarVal),
8304                                  gtCloneExpr(tree->gtBoundsChk.gtArrLen, addFlags, deepVarNum, deepVarVal),
8305                                  tree->gtBoundsChk.gtThrowKind);
8306             copy->gtBoundsChk.gtIndRngFailBB = tree->gtBoundsChk.gtIndRngFailBB;
8307             copy->gtBoundsChk.gtStkDepth     = tree->gtBoundsChk.gtStkDepth;
8308             break;
8309
8310         case GT_STORE_DYN_BLK:
8311         case GT_DYN_BLK:
8312             copy = new (this, oper)
8313                 GenTreeDynBlk(gtCloneExpr(tree->gtDynBlk.Addr(), addFlags, deepVarNum, deepVarVal),
8314                               gtCloneExpr(tree->gtDynBlk.gtDynamicSize, addFlags, deepVarNum, deepVarVal));
8315             break;
8316
8317         default:
8318 #ifdef DEBUG
8319             gtDispTree(tree);
8320 #endif
8321             NO_WAY("unexpected operator");
8322     }
8323
8324 DONE:
8325
8326     // If it has a zero-offset field seq, copy annotation.
8327     if (tree->TypeGet() == TYP_BYREF)
8328     {
8329         FieldSeqNode* fldSeq = nullptr;
8330         if (GetZeroOffsetFieldMap()->Lookup(tree, &fldSeq))
8331         {
8332             GetZeroOffsetFieldMap()->Set(copy, fldSeq);
8333         }
8334     }
8335
8336     copy->gtVNPair = tree->gtVNPair; // A cloned tree gets the orginal's Value number pair
8337
8338     /* We assume the FP stack level will be identical */
8339
8340     copy->gtCopyFPlvl(tree);
8341
8342     /* Compute the flags for the copied node. Note that we can do this only
8343        if we didnt gtFoldExpr(copy) */
8344
8345     if (copy->gtOper == oper)
8346     {
8347         addFlags |= tree->gtFlags;
8348
8349 #ifdef DEBUG
8350         /* GTF_NODE_MASK should not be propagated from 'tree' to 'copy' */
8351         addFlags &= ~GTF_NODE_MASK;
8352 #endif
8353         // Some other flags depend on the context of the expression, and should not be preserved.
8354         // For example, GTF_RELOP_QMARK:
8355         if (copy->OperKind() & GTK_RELOP)
8356         {
8357             addFlags &= ~GTF_RELOP_QMARK;
8358         }
8359         // On the other hand, if we're creating such a context, restore this flag.
8360         if (copy->OperGet() == GT_QMARK)
8361         {
8362             copy->gtOp.gtOp1->gtFlags |= GTF_RELOP_QMARK;
8363         }
8364
8365         copy->gtFlags |= addFlags;
8366
8367         // Update side effect flags since they may be different from the source side effect flags.
8368         // For example, we may have replaced some locals with constants and made indirections non-throwing.
8369         gtUpdateNodeSideEffects(copy);
8370     }
8371
8372     /* GTF_COLON_COND should be propagated from 'tree' to 'copy' */
8373     copy->gtFlags |= (tree->gtFlags & GTF_COLON_COND);
8374
8375 #if defined(DEBUG)
8376     // Non-node debug flags should be propagated from 'tree' to 'copy'
8377     copy->gtDebugFlags |= (tree->gtDebugFlags & ~GTF_DEBUG_NODE_MASK);
8378 #endif
8379
8380     /* Make sure to copy back fields that may have been initialized */
8381
8382     copy->CopyRawCosts(tree);
8383     copy->gtRsvdRegs = tree->gtRsvdRegs;
8384     copy->CopyReg(tree);
8385     return copy;
8386 }
8387
8388 //------------------------------------------------------------------------
8389 // gtReplaceTree: Replace a tree with a new tree.
8390 //
8391 // Arguments:
8392 //    stmt            - The top-level root stmt of the tree being replaced.
8393 //                      Must not be null.
8394 //    tree            - The tree being replaced. Must not be null.
8395 //    replacementTree - The replacement tree. Must not be null.
8396 //
8397 // Return Value:
8398 //    The tree node that replaces the old tree.
8399 //
8400 // Assumptions:
8401 //    The sequencing of the stmt has been done.
8402 //
8403 // Notes:
8404 //    The caller must ensure that the original statement has been sequenced,
8405 //    and the side effect flags are updated on the statement nodes,
8406 //    but this method will sequence 'replacementTree', and insert it into the
8407 //    proper place in the statement sequence.
8408
8409 GenTree* Compiler::gtReplaceTree(GenTree* stmt, GenTree* tree, GenTree* replacementTree)
8410 {
8411     assert(fgStmtListThreaded);
8412     assert(tree != nullptr);
8413     assert(stmt != nullptr);
8414     assert(replacementTree != nullptr);
8415
8416     GenTree** treePtr    = nullptr;
8417     GenTree*  treeParent = tree->gtGetParent(&treePtr);
8418
8419     assert(treeParent != nullptr || tree == stmt->gtStmt.gtStmtExpr);
8420
8421     if (treePtr == nullptr)
8422     {
8423         // Replace the stmt expr and rebuild the linear order for "stmt".
8424         assert(treeParent == nullptr);
8425         assert(fgOrder != FGOrderLinear);
8426         stmt->gtStmt.gtStmtExpr = tree;
8427         fgSetStmtSeq(stmt);
8428     }
8429     else
8430     {
8431         assert(treeParent != nullptr);
8432
8433         // Check to see if the node to be replaced is a call argument and if so,
8434         // set `treeParent` to the call node.
8435         GenTree* cursor = treeParent;
8436         while ((cursor != nullptr) && (cursor->OperGet() == GT_LIST))
8437         {
8438             cursor = cursor->gtNext;
8439         }
8440
8441         if ((cursor != nullptr) && (cursor->OperGet() == GT_CALL))
8442         {
8443             treeParent = cursor;
8444         }
8445
8446 #ifdef DEBUG
8447         GenTree** useEdge;
8448         assert(treeParent->TryGetUse(tree, &useEdge));
8449         assert(useEdge == treePtr);
8450 #endif // DEBUG
8451
8452         GenTree* treeFirstNode = fgGetFirstNode(tree);
8453         GenTree* treeLastNode  = tree;
8454         GenTree* treePrevNode  = treeFirstNode->gtPrev;
8455         GenTree* treeNextNode  = treeLastNode->gtNext;
8456
8457         treeParent->ReplaceOperand(treePtr, replacementTree);
8458
8459         // Build the linear order for "replacementTree".
8460         fgSetTreeSeq(replacementTree, treePrevNode);
8461
8462         // Restore linear-order Prev and Next for "replacementTree".
8463         if (treePrevNode != nullptr)
8464         {
8465             treeFirstNode         = fgGetFirstNode(replacementTree);
8466             treeFirstNode->gtPrev = treePrevNode;
8467             treePrevNode->gtNext  = treeFirstNode;
8468         }
8469         else
8470         {
8471             // Update the linear oder start of "stmt" if treeFirstNode
8472             // appears to have replaced the original first node.
8473             assert(treeFirstNode == stmt->gtStmt.gtStmtList);
8474             stmt->gtStmt.gtStmtList = fgGetFirstNode(replacementTree);
8475         }
8476
8477         if (treeNextNode != nullptr)
8478         {
8479             treeLastNode         = replacementTree;
8480             treeLastNode->gtNext = treeNextNode;
8481             treeNextNode->gtPrev = treeLastNode;
8482         }
8483     }
8484
8485     return replacementTree;
8486 }
8487
8488 //------------------------------------------------------------------------
8489 // gtUpdateSideEffects: Update the side effects of a tree and its ancestors
8490 //
8491 // Arguments:
8492 //    stmt            - The tree's statement
8493 //    tree            - Tree to update the side effects for
8494 //
8495 // Note: If tree's order hasn't been established, the method updates side effect
8496 //       flags on all statement's nodes.
8497
8498 void Compiler::gtUpdateSideEffects(GenTree* stmt, GenTree* tree)
8499 {
8500     if (fgStmtListThreaded)
8501     {
8502         gtUpdateTreeAncestorsSideEffects(tree);
8503     }
8504     else
8505     {
8506         gtUpdateStmtSideEffects(stmt);
8507     }
8508 }
8509
8510 //------------------------------------------------------------------------
8511 // gtUpdateTreeAncestorsSideEffects: Update the side effects of a tree and its ancestors
8512 //                                   when statement order has been established.
8513 //
8514 // Arguments:
8515 //    tree            - Tree to update the side effects for
8516
8517 void Compiler::gtUpdateTreeAncestorsSideEffects(GenTree* tree)
8518 {
8519     assert(fgStmtListThreaded);
8520     while (tree != nullptr)
8521     {
8522         gtUpdateNodeSideEffects(tree);
8523         tree = tree->gtGetParent(nullptr);
8524     }
8525 }
8526
8527 //------------------------------------------------------------------------
8528 // gtUpdateStmtSideEffects: Update the side effects for statement tree nodes.
8529 //
8530 // Arguments:
8531 //    stmt            - The statement to update side effects on
8532
8533 void Compiler::gtUpdateStmtSideEffects(GenTree* stmt)
8534 {
8535     fgWalkTree(&stmt->gtStmt.gtStmtExpr, fgUpdateSideEffectsPre, fgUpdateSideEffectsPost);
8536 }
8537
8538 //------------------------------------------------------------------------
8539 // gtUpdateNodeOperSideEffects: Update the side effects based on the node operation.
8540 //
8541 // Arguments:
8542 //    tree            - Tree to update the side effects on
8543 //
8544 // Notes:
8545 //    This method currently only updates GTF_EXCEPT and GTF_ASG flags. The other side effect
8546 //    flags may remain unnecessarily (conservatively) set.
8547 //    The caller of this method is expected to update the flags based on the children's flags.
8548
8549 void Compiler::gtUpdateNodeOperSideEffects(GenTree* tree)
8550 {
8551     if (tree->OperMayThrow(this))
8552     {
8553         tree->gtFlags |= GTF_EXCEPT;
8554     }
8555     else
8556     {
8557         tree->gtFlags &= ~GTF_EXCEPT;
8558         if (tree->OperIsIndirOrArrLength())
8559         {
8560             tree->gtFlags |= GTF_IND_NONFAULTING;
8561         }
8562     }
8563
8564     if (tree->OperRequiresAsgFlag())
8565     {
8566         tree->gtFlags |= GTF_ASG;
8567     }
8568     else
8569     {
8570         tree->gtFlags &= ~GTF_ASG;
8571     }
8572 }
8573
8574 //------------------------------------------------------------------------
8575 // gtUpdateNodeSideEffects: Update the side effects based on the node operation and
8576 //                          children's side efects.
8577 //
8578 // Arguments:
8579 //    tree            - Tree to update the side effects on
8580 //
8581 // Notes:
8582 //    This method currently only updates GTF_EXCEPT and GTF_ASG flags. The other side effect
8583 //    flags may remain unnecessarily (conservatively) set.
8584
8585 void Compiler::gtUpdateNodeSideEffects(GenTree* tree)
8586 {
8587     gtUpdateNodeOperSideEffects(tree);
8588     unsigned nChildren = tree->NumChildren();
8589     for (unsigned childNum = 0; childNum < nChildren; childNum++)
8590     {
8591         GenTree* child = tree->GetChild(childNum);
8592         if (child != nullptr)
8593         {
8594             tree->gtFlags |= (child->gtFlags & GTF_ALL_EFFECT);
8595         }
8596     }
8597 }
8598
8599 //------------------------------------------------------------------------
8600 // fgUpdateSideEffectsPre: Update the side effects based on the tree operation.
8601 //
8602 // Arguments:
8603 //    pTree            - Pointer to the tree to update the side effects
8604 //    fgWalkPre        - Walk data
8605 //
8606 // Notes:
8607 //    This method currently only updates GTF_EXCEPT and GTF_ASG flags. The other side effect
8608 //    flags may remain unnecessarily (conservatively) set.
8609
8610 Compiler::fgWalkResult Compiler::fgUpdateSideEffectsPre(GenTree** pTree, fgWalkData* fgWalkPre)
8611 {
8612     fgWalkPre->compiler->gtUpdateNodeOperSideEffects(*pTree);
8613
8614     return WALK_CONTINUE;
8615 }
8616
8617 //------------------------------------------------------------------------
8618 // fgUpdateSideEffectsPost: Update the side effects of the parent based on the tree's flags.
8619 //
8620 // Arguments:
8621 //    pTree            - Pointer to the tree
8622 //    fgWalkPost       - Walk data
8623 //
8624 // Notes:
8625 //    The routine is used for updating the stale side effect flags for ancestor
8626 //    nodes starting from treeParent up to the top-level stmt expr.
8627
8628 Compiler::fgWalkResult Compiler::fgUpdateSideEffectsPost(GenTree** pTree, fgWalkData* fgWalkPost)
8629 {
8630     GenTree* tree   = *pTree;
8631     GenTree* parent = fgWalkPost->parent;
8632     if (parent != nullptr)
8633     {
8634         parent->gtFlags |= (tree->gtFlags & GTF_ALL_EFFECT);
8635     }
8636     return WALK_CONTINUE;
8637 }
8638
8639 /*****************************************************************************
8640  *
8641  *  Compares two trees and returns true when both trees are the same.
8642  *  Instead of fully comparing the two trees this method can just return false.
8643  *  Thus callers should not assume that the trees are different when false is returned.
8644  *  Only when true is returned can the caller perform code optimizations.
8645  *  The current implementation only compares a limited set of LEAF/CONST node
8646  *  and returns false for all othere trees.
8647  */
8648 bool Compiler::gtCompareTree(GenTree* op1, GenTree* op2)
8649 {
8650     /* Make sure that both trees are of the same GT node kind */
8651     if (op1->OperGet() != op2->OperGet())
8652     {
8653         return false;
8654     }
8655
8656     /* Make sure that both trees are returning the same type */
8657     if (op1->gtType != op2->gtType)
8658     {
8659         return false;
8660     }
8661
8662     /* Figure out what kind of a node we have */
8663
8664     genTreeOps oper = op1->OperGet();
8665     unsigned   kind = op1->OperKind();
8666
8667     /* Is this a constant or leaf node? */
8668
8669     if (kind & (GTK_CONST | GTK_LEAF))
8670     {
8671         switch (oper)
8672         {
8673             case GT_CNS_INT:
8674                 if ((op1->gtIntCon.gtIconVal == op2->gtIntCon.gtIconVal) && GenTree::SameIconHandleFlag(op1, op2))
8675                 {
8676                     return true;
8677                 }
8678                 break;
8679
8680             case GT_CNS_LNG:
8681                 if (op1->gtLngCon.gtLconVal == op2->gtLngCon.gtLconVal)
8682                 {
8683                     return true;
8684                 }
8685                 break;
8686
8687             case GT_CNS_STR:
8688                 if (op1->gtStrCon.gtSconCPX == op2->gtStrCon.gtSconCPX)
8689                 {
8690                     return true;
8691                 }
8692                 break;
8693
8694             case GT_LCL_VAR:
8695                 if (op1->gtLclVarCommon.gtLclNum == op2->gtLclVarCommon.gtLclNum)
8696                 {
8697                     return true;
8698                 }
8699                 break;
8700
8701             case GT_CLS_VAR:
8702                 if (op1->gtClsVar.gtClsVarHnd == op2->gtClsVar.gtClsVarHnd)
8703                 {
8704                     return true;
8705                 }
8706                 break;
8707
8708             default:
8709                 // we return false for these unhandled 'oper' kinds
8710                 break;
8711         }
8712     }
8713     return false;
8714 }
8715
8716 GenTree* Compiler::gtGetThisArg(GenTreeCall* call)
8717 {
8718     if (call->gtCallObjp != nullptr)
8719     {
8720         if (call->gtCallObjp->gtOper != GT_NOP && call->gtCallObjp->gtOper != GT_ASG)
8721         {
8722             if (!(call->gtCallObjp->gtFlags & GTF_LATE_ARG))
8723             {
8724                 return call->gtCallObjp;
8725             }
8726         }
8727
8728         if (call->gtCallLateArgs)
8729         {
8730             regNumber      thisReg         = REG_ARG_0;
8731             unsigned       argNum          = 0;
8732             fgArgTabEntry* thisArgTabEntry = gtArgEntryByArgNum(call, argNum);
8733             GenTree*       result          = thisArgTabEntry->node;
8734
8735 #if !FEATURE_FIXED_OUT_ARGS
8736             GenTree* lateArgs = call->gtCallLateArgs;
8737             regList  list     = call->regArgList;
8738             int      index    = 0;
8739             while (lateArgs != NULL)
8740             {
8741                 assert(lateArgs->gtOper == GT_LIST);
8742                 assert(index < call->regArgListCount);
8743                 regNumber curArgReg = list[index];
8744                 if (curArgReg == thisReg)
8745                 {
8746                     if (optAssertionPropagatedCurrentStmt)
8747                         result = lateArgs->gtOp.gtOp1;
8748
8749                     assert(result == lateArgs->gtOp.gtOp1);
8750                 }
8751
8752                 lateArgs = lateArgs->gtOp.gtOp2;
8753                 index++;
8754             }
8755 #endif
8756             return result;
8757         }
8758     }
8759     return nullptr;
8760 }
8761
8762 bool GenTree::gtSetFlags() const
8763 {
8764     //
8765     // When FEATURE_SET_FLAGS (_TARGET_ARM_) is active the method returns true
8766     //    when the gtFlags has the flag GTF_SET_FLAGS set
8767     // otherwise the architecture will be have instructions that typically set
8768     //    the flags and this method will return true.
8769     //
8770     //    Exceptions: GT_IND (load/store) is not allowed to set the flags
8771     //                and on XARCH the GT_MUL/GT_DIV and all overflow instructions
8772     //                do not set the condition flags
8773     //
8774     // Precondition we have a GTK_SMPOP
8775     //
8776     if (!varTypeIsIntegralOrI(TypeGet()) && (TypeGet() != TYP_VOID))
8777     {
8778         return false;
8779     }
8780
8781 #if defined(LEGACY_BACKEND) && !FEATURE_SET_FLAGS && defined(_TARGET_XARCH_)
8782     // Return true if/when the codegen for this node will set the flags
8783     //
8784     //
8785     if ((gtOper == GT_IND) || (gtOper == GT_MUL) || (gtOper == GT_DIV))
8786     {
8787         return false;
8788     }
8789     else if (gtOverflowEx())
8790     {
8791         return false;
8792     }
8793     else
8794     {
8795         return true;
8796     }
8797 #else // !(defined(LEGACY_BACKEND) && !FEATURE_SET_FLAGS && defined(_TARGET_XARCH_))
8798
8799 #if FEATURE_SET_FLAGS && defined(LEGACY_BACKEND)
8800     assert(OperIsSimple());
8801 #endif
8802     if (((gtFlags & GTF_SET_FLAGS) != 0) && (gtOper != GT_IND))
8803     {
8804         // GTF_SET_FLAGS is not valid on GT_IND and is overlaid with GTF_NONFAULTING_IND
8805         return true;
8806     }
8807     else
8808     {
8809         return false;
8810     }
8811 #endif // !(defined(LEGACY_BACKEND) && !FEATURE_SET_FLAGS && defined(_TARGET_XARCH_))
8812 }
8813
8814 bool GenTree::gtRequestSetFlags()
8815 {
8816     bool result = false;
8817
8818 #if FEATURE_SET_FLAGS
8819     // This method is a Nop unless FEATURE_SET_FLAGS is defined
8820
8821     // In order to set GTF_SET_FLAGS
8822     //              we must have a GTK_SMPOP
8823     //          and we have a integer or machine size type (not floating point or TYP_LONG on 32-bit)
8824     //
8825     if (!OperIsSimple())
8826         return false;
8827
8828     if (!varTypeIsIntegralOrI(TypeGet()))
8829         return false;
8830
8831     switch (gtOper)
8832     {
8833         case GT_IND:
8834         case GT_ARR_LENGTH:
8835             // These will turn into simple load from memory instructions
8836             // and we can't force the setting of the flags on load from memory
8837             break;
8838
8839         case GT_MUL:
8840         case GT_DIV:
8841             // These instructions don't set the flags (on x86/x64)
8842             //
8843             break;
8844
8845         default:
8846             // Otherwise we can set the flags for this gtOper
8847             // and codegen must set the condition flags.
8848             //
8849             gtFlags |= GTF_SET_FLAGS;
8850             result = true;
8851             break;
8852     }
8853 #endif // FEATURE_SET_FLAGS
8854
8855     // Codegen for this tree must set the condition flags if
8856     // this method returns true.
8857     //
8858     return result;
8859 }
8860
8861 /*****************************************************************************/
8862 void GenTree::CopyTo(class Compiler* comp, const GenTree& gt)
8863 {
8864     SetOperRaw(gt.OperGet());
8865
8866     gtType          = gt.gtType;
8867     gtAssertionInfo = gt.gtAssertionInfo;
8868
8869     gtRegNum = gt.gtRegNum; // one union member.
8870     CopyCosts(&gt);
8871
8872     gtFlags  = gt.gtFlags;
8873     gtVNPair = gt.gtVNPair;
8874
8875     gtRsvdRegs = gt.gtRsvdRegs;
8876
8877 #ifdef LEGACY_BACKEND
8878     gtUsedRegs = gt.gtUsedRegs;
8879 #endif // LEGACY_BACKEND
8880
8881 #if FEATURE_STACK_FP_X87
8882     gtFPlvl = gt.gtFPlvl;
8883 #endif // FEATURE_STACK_FP_X87
8884
8885     gtNext = gt.gtNext;
8886     gtPrev = gt.gtPrev;
8887 #ifdef DEBUG
8888     gtTreeID = gt.gtTreeID;
8889     gtSeqNum = gt.gtSeqNum;
8890 #endif
8891     // Largest node subtype:
8892     void* remDst = reinterpret_cast<char*>(this) + sizeof(GenTree);
8893     void* remSrc = reinterpret_cast<char*>(const_cast<GenTree*>(&gt)) + sizeof(GenTree);
8894     memcpy(remDst, remSrc, TREE_NODE_SZ_LARGE - sizeof(GenTree));
8895 }
8896
8897 void GenTree::CopyToSmall(const GenTree& gt)
8898 {
8899     // Small node size is defined by GenTreeOp.
8900     void* remDst = reinterpret_cast<char*>(this) + sizeof(GenTree);
8901     void* remSrc = reinterpret_cast<char*>(const_cast<GenTree*>(&gt)) + sizeof(GenTree);
8902     memcpy(remDst, remSrc, TREE_NODE_SZ_SMALL - sizeof(GenTree));
8903 }
8904
8905 unsigned GenTree::NumChildren()
8906 {
8907     if (OperIsConst() || OperIsLeaf())
8908     {
8909         return 0;
8910     }
8911     else if (OperIsUnary())
8912     {
8913         if (OperGet() == GT_NOP || OperGet() == GT_RETURN || OperGet() == GT_RETFILT)
8914         {
8915             if (gtOp.gtOp1 == nullptr)
8916             {
8917                 return 0;
8918             }
8919             else
8920             {
8921                 return 1;
8922             }
8923         }
8924         else
8925         {
8926             return 1;
8927         }
8928     }
8929     else if (OperIsBinary())
8930     {
8931         // All binary operators except LEA have at least one arg; the second arg may sometimes be null, however.
8932         if (OperGet() == GT_LEA)
8933         {
8934             unsigned childCount = 0;
8935             if (gtOp.gtOp1 != nullptr)
8936             {
8937                 childCount++;
8938             }
8939             if (gtOp.gtOp2 != nullptr)
8940             {
8941                 childCount++;
8942             }
8943             return childCount;
8944         }
8945 #ifdef FEATURE_HW_INTRINSICS
8946         // GT_HWIntrinsic require special handling
8947         if (OperGet() == GT_HWIntrinsic)
8948         {
8949             if (gtOp.gtOp1 == nullptr)
8950             {
8951                 return 0;
8952             }
8953         }
8954 #endif
8955         // Special case for assignment of dynamic block.
8956         // This is here to duplicate the former case where the size may be evaluated prior to the
8957         // source and destination addresses. In order to do this, we treat the size as a child of the
8958         // assignment.
8959         // TODO-1stClassStructs-Cleanup: Remove all this special casing, and ensure that the diffs are reasonable.
8960         if ((OperGet() == GT_ASG) && (gtOp.gtOp1->OperGet() == GT_DYN_BLK) && (gtOp.gtOp1->AsDynBlk()->gtEvalSizeFirst))
8961         {
8962             return 3;
8963         }
8964         assert(gtOp.gtOp1 != nullptr);
8965         if (gtOp.gtOp2 == nullptr)
8966         {
8967             return 1;
8968         }
8969         else
8970         {
8971             return 2;
8972         }
8973     }
8974     else
8975     {
8976         // Special
8977         switch (OperGet())
8978         {
8979             case GT_CMPXCHG:
8980                 return 3;
8981
8982             case GT_ARR_BOUNDS_CHECK:
8983 #ifdef FEATURE_SIMD
8984             case GT_SIMD_CHK:
8985 #endif // FEATURE_SIMD
8986 #ifdef FEATURE_HW_INTRINSICS
8987             case GT_HW_INTRINSIC_CHK:
8988 #endif // FEATURE_HW_INTRINSICS
8989                 return 2;
8990
8991             case GT_FIELD:
8992             case GT_STMT:
8993                 return 1;
8994
8995             case GT_ARR_ELEM:
8996                 return 1 + AsArrElem()->gtArrRank;
8997
8998             // This really has two children, but if the size is evaluated first, we treat it as a child of the
8999             // parent assignment.
9000             case GT_DYN_BLK:
9001                 if (AsDynBlk()->gtEvalSizeFirst)
9002                 {
9003                     return 1;
9004                 }
9005                 else
9006                 {
9007                     return 2;
9008                 }
9009
9010             case GT_ARR_OFFSET:
9011             case GT_STORE_DYN_BLK:
9012                 return 3;
9013
9014             case GT_CALL:
9015             {
9016                 GenTreeCall* call = AsCall();
9017                 unsigned     res  = 0; // arg list(s) (including late args).
9018                 if (call->gtCallObjp != nullptr)
9019                 {
9020                     res++; // Add objp?
9021                 }
9022                 if (call->gtCallArgs != nullptr)
9023                 {
9024                     res++; // Add args?
9025                 }
9026                 if (call->gtCallLateArgs != nullptr)
9027                 {
9028                     res++; // Add late args?
9029                 }
9030                 if (call->gtControlExpr != nullptr)
9031                 {
9032                     res++;
9033                 }
9034
9035                 if (call->gtCallType == CT_INDIRECT)
9036                 {
9037                     if (call->gtCallCookie != nullptr)
9038                     {
9039                         res++;
9040                     }
9041                     if (call->gtCallAddr != nullptr)
9042                     {
9043                         res++;
9044                     }
9045                 }
9046                 return res;
9047             }
9048             case GT_NONE:
9049                 return 0;
9050             default:
9051                 unreached();
9052         }
9053     }
9054 }
9055
9056 GenTree* GenTree::GetChild(unsigned childNum)
9057 {
9058     assert(childNum < NumChildren()); // Precondition.
9059     assert(NumChildren() <= MAX_CHILDREN);
9060     assert(!(OperIsConst() || OperIsLeaf()));
9061     if (OperIsUnary())
9062     {
9063         return AsUnOp()->gtOp1;
9064     }
9065     // Special case for assignment of dynamic block.
9066     // This code is here to duplicate the former case where the size may be evaluated prior to the
9067     // source and destination addresses. In order to do this, we treat the size as a child of the
9068     // assignment.
9069     // TODO-1stClassStructs: Revisit the need to duplicate former behavior, so that we can remove
9070     // these special cases.
9071     if ((OperGet() == GT_ASG) && (gtOp.gtOp1->OperGet() == GT_DYN_BLK) && (childNum == 2))
9072     {
9073         return gtOp.gtOp1->AsDynBlk()->gtDynamicSize;
9074     }
9075     else if (OperIsBinary())
9076     {
9077         if (OperIsAddrMode())
9078         {
9079             // If this is the first (0th) child, only return op1 if it is non-null
9080             // Otherwise, we return gtOp2.
9081             if (childNum == 0 && AsOp()->gtOp1 != nullptr)
9082             {
9083                 return AsOp()->gtOp1;
9084             }
9085             return AsOp()->gtOp2;
9086         }
9087         // TODO-Cleanup: Consider handling ReverseOps here, and then we wouldn't have to handle it in
9088         // fgGetFirstNode().  However, it seems that it causes loop hoisting behavior to change.
9089         if (childNum == 0)
9090         {
9091             return AsOp()->gtOp1;
9092         }
9093         else
9094         {
9095             return AsOp()->gtOp2;
9096         }
9097     }
9098     else
9099     {
9100         // Special
9101         switch (OperGet())
9102         {
9103             case GT_CMPXCHG:
9104                 switch (childNum)
9105                 {
9106                     case 0:
9107                         return AsCmpXchg()->gtOpLocation;
9108                     case 1:
9109                         return AsCmpXchg()->gtOpValue;
9110                     case 2:
9111                         return AsCmpXchg()->gtOpComparand;
9112                     default:
9113                         unreached();
9114                 }
9115             case GT_ARR_BOUNDS_CHECK:
9116 #ifdef FEATURE_SIMD
9117             case GT_SIMD_CHK:
9118 #endif // FEATURE_SIMD
9119 #ifdef FEATURE_HW_INTRINSICS
9120             case GT_HW_INTRINSIC_CHK:
9121 #endif // FEATURE_HW_INTRINSICS
9122                 switch (childNum)
9123                 {
9124                     case 0:
9125                         return AsBoundsChk()->gtIndex;
9126                     case 1:
9127                         return AsBoundsChk()->gtArrLen;
9128                     default:
9129                         unreached();
9130                 }
9131
9132             case GT_STORE_DYN_BLK:
9133                 switch (childNum)
9134                 {
9135                     case 0:
9136                         return AsDynBlk()->Addr();
9137                     case 1:
9138                         return AsDynBlk()->Data();
9139                     case 2:
9140                         return AsDynBlk()->gtDynamicSize;
9141                     default:
9142                         unreached();
9143                 }
9144             case GT_DYN_BLK:
9145                 switch (childNum)
9146                 {
9147                     case 0:
9148                         return AsDynBlk()->Addr();
9149                     case 1:
9150                         assert(!AsDynBlk()->gtEvalSizeFirst);
9151                         return AsDynBlk()->gtDynamicSize;
9152                     default:
9153                         unreached();
9154                 }
9155
9156             case GT_FIELD:
9157                 return AsField()->gtFldObj;
9158
9159             case GT_STMT:
9160                 return AsStmt()->gtStmtExpr;
9161
9162             case GT_ARR_ELEM:
9163                 if (childNum == 0)
9164                 {
9165                     return AsArrElem()->gtArrObj;
9166                 }
9167                 else
9168                 {
9169                     return AsArrElem()->gtArrInds[childNum - 1];
9170                 }
9171
9172             case GT_ARR_OFFSET:
9173                 switch (childNum)
9174                 {
9175                     case 0:
9176                         return AsArrOffs()->gtOffset;
9177                     case 1:
9178                         return AsArrOffs()->gtIndex;
9179                     case 2:
9180                         return AsArrOffs()->gtArrObj;
9181                     default:
9182                         unreached();
9183                 }
9184
9185             case GT_CALL:
9186             {
9187                 // The if chain below assumes that all possible children are non-null.
9188                 // If some are null, "virtually skip them."
9189                 // If there isn't "virtually skip it."
9190                 GenTreeCall* call = AsCall();
9191
9192                 if (call->gtCallObjp == nullptr)
9193                 {
9194                     childNum++;
9195                 }
9196                 if (childNum >= 1 && call->gtCallArgs == nullptr)
9197                 {
9198                     childNum++;
9199                 }
9200                 if (childNum >= 2 && call->gtCallLateArgs == nullptr)
9201                 {
9202                     childNum++;
9203                 }
9204                 if (childNum >= 3 && call->gtControlExpr == nullptr)
9205                 {
9206                     childNum++;
9207                 }
9208                 if (call->gtCallType == CT_INDIRECT)
9209                 {
9210                     if (childNum >= 4 && call->gtCallCookie == nullptr)
9211                     {
9212                         childNum++;
9213                     }
9214                 }
9215
9216                 if (childNum == 0)
9217                 {
9218                     return call->gtCallObjp;
9219                 }
9220                 else if (childNum == 1)
9221                 {
9222                     return call->gtCallArgs;
9223                 }
9224                 else if (childNum == 2)
9225                 {
9226                     return call->gtCallLateArgs;
9227                 }
9228                 else if (childNum == 3)
9229                 {
9230                     return call->gtControlExpr;
9231                 }
9232                 else
9233                 {
9234                     assert(call->gtCallType == CT_INDIRECT);
9235                     if (childNum == 4)
9236                     {
9237                         return call->gtCallCookie;
9238                     }
9239                     else
9240                     {
9241                         assert(childNum == 5);
9242                         return call->gtCallAddr;
9243                     }
9244                 }
9245             }
9246             case GT_NONE:
9247                 unreached();
9248             default:
9249                 unreached();
9250         }
9251     }
9252 }
9253
9254 GenTreeUseEdgeIterator::GenTreeUseEdgeIterator()
9255     : m_advance(nullptr), m_node(nullptr), m_edge(nullptr), m_argList(nullptr), m_state(-1)
9256 {
9257 }
9258
9259 GenTreeUseEdgeIterator::GenTreeUseEdgeIterator(GenTree* node)
9260     : m_advance(nullptr), m_node(node), m_edge(nullptr), m_argList(nullptr), m_state(0)
9261 {
9262     assert(m_node != nullptr);
9263
9264     // NOTE: the switch statement below must be updated when introducing new nodes.
9265
9266     switch (m_node->OperGet())
9267     {
9268         // Leaf nodes
9269         case GT_LCL_VAR:
9270         case GT_LCL_FLD:
9271         case GT_LCL_VAR_ADDR:
9272         case GT_LCL_FLD_ADDR:
9273         case GT_CATCH_ARG:
9274         case GT_LABEL:
9275         case GT_FTN_ADDR:
9276         case GT_RET_EXPR:
9277         case GT_CNS_INT:
9278         case GT_CNS_LNG:
9279         case GT_CNS_DBL:
9280         case GT_CNS_STR:
9281         case GT_MEMORYBARRIER:
9282         case GT_JMP:
9283         case GT_JCC:
9284         case GT_SETCC:
9285         case GT_NO_OP:
9286         case GT_START_NONGC:
9287         case GT_PROF_HOOK:
9288 #if !FEATURE_EH_FUNCLETS
9289         case GT_END_LFIN:
9290 #endif // !FEATURE_EH_FUNCLETS
9291         case GT_PHI_ARG:
9292 #ifndef LEGACY_BACKEND
9293         case GT_JMPTABLE:
9294 #endif // LEGACY_BACKEND
9295         case GT_REG_VAR:
9296         case GT_CLS_VAR:
9297         case GT_CLS_VAR_ADDR:
9298         case GT_ARGPLACE:
9299         case GT_PHYSREG:
9300         case GT_EMITNOP:
9301         case GT_PINVOKE_PROLOG:
9302         case GT_PINVOKE_EPILOG:
9303         case GT_IL_OFFSET:
9304             m_state = -1;
9305             return;
9306
9307         // Standard unary operators
9308         case GT_STORE_LCL_VAR:
9309         case GT_STORE_LCL_FLD:
9310         case GT_NOT:
9311         case GT_NEG:
9312         case GT_COPY:
9313         case GT_RELOAD:
9314         case GT_ARR_LENGTH:
9315         case GT_CAST:
9316         case GT_BITCAST:
9317         case GT_CKFINITE:
9318         case GT_LCLHEAP:
9319         case GT_ADDR:
9320         case GT_IND:
9321         case GT_OBJ:
9322         case GT_BLK:
9323         case GT_BOX:
9324         case GT_ALLOCOBJ:
9325         case GT_RUNTIMELOOKUP:
9326         case GT_INIT_VAL:
9327         case GT_JTRUE:
9328         case GT_SWITCH:
9329         case GT_NULLCHECK:
9330         case GT_PUTARG_REG:
9331         case GT_PUTARG_STK:
9332 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
9333         case GT_PUTARG_SPLIT:
9334 #endif // !LEGACY_BACKEND && _TARGET_ARM_
9335         case GT_RETURNTRAP:
9336             m_edge = &m_node->AsUnOp()->gtOp1;
9337             assert(*m_edge != nullptr);
9338             m_advance = &GenTreeUseEdgeIterator::Terminate;
9339             return;
9340
9341         // Unary operators with an optional operand
9342         case GT_NOP:
9343         case GT_RETURN:
9344         case GT_RETFILT:
9345             if (m_node->AsUnOp()->gtOp1 == nullptr)
9346             {
9347                 assert(m_node->NullOp1Legal());
9348                 m_state = -1;
9349             }
9350             else
9351             {
9352                 m_edge    = &m_node->AsUnOp()->gtOp1;
9353                 m_advance = &GenTreeUseEdgeIterator::Terminate;
9354             }
9355             return;
9356
9357         // Variadic nodes
9358         case GT_PHI:
9359             SetEntryStateForList(m_node->AsUnOp()->gtOp1);
9360             return;
9361
9362         case GT_FIELD_LIST:
9363             SetEntryStateForList(m_node);
9364             return;
9365
9366 #ifdef FEATURE_SIMD
9367         case GT_SIMD:
9368             if (m_node->AsSIMD()->gtSIMDIntrinsicID == SIMDIntrinsicInitN)
9369             {
9370                 SetEntryStateForList(m_node->AsSIMD()->gtOp1);
9371             }
9372             else
9373             {
9374                 SetEntryStateForBinOp();
9375             }
9376             return;
9377 #endif // FEATURE_SIMD
9378
9379 #ifdef FEATURE_HW_INTRINSICS
9380         case GT_HWIntrinsic:
9381             if (m_node->AsHWIntrinsic()->gtOp1 == nullptr)
9382             {
9383                 assert(m_node->NullOp1Legal());
9384                 m_state = -1;
9385             }
9386             else if (m_node->AsHWIntrinsic()->gtOp1->OperIsList())
9387             {
9388                 SetEntryStateForList(m_node->AsHWIntrinsic()->gtOp1);
9389             }
9390             else
9391             {
9392                 SetEntryStateForBinOp();
9393             }
9394             return;
9395 #endif // FEATURE_HW_INTRINSICS
9396
9397         // LEA, which may have no first operand
9398         case GT_LEA:
9399             if (m_node->AsAddrMode()->gtOp1 == nullptr)
9400             {
9401                 m_edge    = &m_node->AsAddrMode()->gtOp2;
9402                 m_advance = &GenTreeUseEdgeIterator::Terminate;
9403             }
9404             else
9405             {
9406                 SetEntryStateForBinOp();
9407             }
9408             return;
9409
9410         // Special nodes
9411         case GT_CMPXCHG:
9412             m_edge = &m_node->AsCmpXchg()->gtOpLocation;
9413             assert(*m_edge != nullptr);
9414             m_advance = &GenTreeUseEdgeIterator::AdvanceCmpXchg;
9415             return;
9416
9417         case GT_ARR_BOUNDS_CHECK:
9418 #ifdef FEATURE_SIMD
9419         case GT_SIMD_CHK:
9420 #endif // FEATURE_SIMD
9421 #ifdef FEATURE_HW_INTRINSICS
9422         case GT_HW_INTRINSIC_CHK:
9423 #endif // FEATURE_HW_INTRINSICS
9424             m_edge = &m_node->AsBoundsChk()->gtIndex;
9425             assert(*m_edge != nullptr);
9426             m_advance = &GenTreeUseEdgeIterator::AdvanceBoundsChk;
9427             return;
9428
9429         case GT_FIELD:
9430             if (m_node->AsField()->gtFldObj == nullptr)
9431             {
9432                 m_state = -1;
9433             }
9434             else
9435             {
9436                 m_edge    = &m_node->AsField()->gtFldObj;
9437                 m_advance = &GenTreeUseEdgeIterator::Terminate;
9438             }
9439             return;
9440
9441         case GT_STMT:
9442             if (m_node->AsStmt()->gtStmtExpr == nullptr)
9443             {
9444                 m_state = -1;
9445             }
9446             else
9447             {
9448                 m_edge    = &m_node->AsStmt()->gtStmtExpr;
9449                 m_advance = &GenTreeUseEdgeIterator::Terminate;
9450             }
9451             return;
9452
9453         case GT_ARR_ELEM:
9454             m_edge = &m_node->AsArrElem()->gtArrObj;
9455             assert(*m_edge != nullptr);
9456             m_advance = &GenTreeUseEdgeIterator::AdvanceArrElem;
9457             return;
9458
9459         case GT_ARR_OFFSET:
9460             m_edge = &m_node->AsArrOffs()->gtOffset;
9461             assert(*m_edge != nullptr);
9462             m_advance = &GenTreeUseEdgeIterator::AdvanceArrOffset;
9463             return;
9464
9465         case GT_DYN_BLK:
9466         {
9467             GenTreeDynBlk* const dynBlock = m_node->AsDynBlk();
9468             m_edge                        = dynBlock->gtEvalSizeFirst ? &dynBlock->gtDynamicSize : &dynBlock->gtOp1;
9469             assert(*m_edge != nullptr);
9470             m_advance = &GenTreeUseEdgeIterator::AdvanceDynBlk;
9471         }
9472             return;
9473
9474         case GT_STORE_DYN_BLK:
9475         {
9476             GenTreeDynBlk* const dynBlock = m_node->AsDynBlk();
9477             if (dynBlock->gtEvalSizeFirst)
9478             {
9479                 m_edge = &dynBlock->gtDynamicSize;
9480             }
9481             else
9482             {
9483                 m_edge = dynBlock->IsReverseOp() ? &dynBlock->gtOp2 : &dynBlock->gtOp1;
9484             }
9485             assert(*m_edge != nullptr);
9486
9487             m_advance = &GenTreeUseEdgeIterator::AdvanceStoreDynBlk;
9488         }
9489             return;
9490
9491         case GT_CALL:
9492             AdvanceCall<CALL_INSTANCE>();
9493             return;
9494
9495         // Binary nodes
9496         default:
9497             assert(m_node->OperIsBinary());
9498             SetEntryStateForBinOp();
9499             return;
9500     }
9501 }
9502
9503 //------------------------------------------------------------------------
9504 // GenTreeUseEdgeIterator::AdvanceCmpXchg: produces the next operand of a CmpXchg node and advances the state.
9505 //
9506 void GenTreeUseEdgeIterator::AdvanceCmpXchg()
9507 {
9508     switch (m_state)
9509     {
9510         case 0:
9511             m_edge  = &m_node->AsCmpXchg()->gtOpValue;
9512             m_state = 1;
9513             break;
9514         case 1:
9515             m_edge    = &m_node->AsCmpXchg()->gtOpComparand;
9516             m_advance = &GenTreeUseEdgeIterator::Terminate;
9517             break;
9518         default:
9519             unreached();
9520     }
9521
9522     assert(*m_edge != nullptr);
9523 }
9524
9525 //------------------------------------------------------------------------
9526 // GenTreeUseEdgeIterator::AdvanceBoundsChk: produces the next operand of a BoundsChk node and advances the state.
9527 //
9528 void GenTreeUseEdgeIterator::AdvanceBoundsChk()
9529 {
9530     m_edge = &m_node->AsBoundsChk()->gtArrLen;
9531     assert(*m_edge != nullptr);
9532     m_advance = &GenTreeUseEdgeIterator::Terminate;
9533 }
9534
9535 //------------------------------------------------------------------------
9536 // GenTreeUseEdgeIterator::AdvanceArrElem: produces the next operand of a ArrElem node and advances the state.
9537 //
9538 // Because these nodes are variadic, this function uses `m_state` to index into the list of array indices.
9539 //
9540 void GenTreeUseEdgeIterator::AdvanceArrElem()
9541 {
9542     if (m_state < m_node->AsArrElem()->gtArrRank)
9543     {
9544         m_edge = &m_node->AsArrElem()->gtArrInds[m_state];
9545         assert(*m_edge != nullptr);
9546         m_state++;
9547     }
9548     else
9549     {
9550         m_state = -1;
9551     }
9552 }
9553
9554 //------------------------------------------------------------------------
9555 // GenTreeUseEdgeIterator::AdvanceArrOffset: produces the next operand of a ArrOffset node and advances the state.
9556 //
9557 void GenTreeUseEdgeIterator::AdvanceArrOffset()
9558 {
9559     switch (m_state)
9560     {
9561         case 0:
9562             m_edge  = &m_node->AsArrOffs()->gtIndex;
9563             m_state = 1;
9564             break;
9565         case 1:
9566             m_edge    = &m_node->AsArrOffs()->gtArrObj;
9567             m_advance = &GenTreeUseEdgeIterator::Terminate;
9568             break;
9569         default:
9570             unreached();
9571     }
9572
9573     assert(*m_edge != nullptr);
9574 }
9575
9576 //------------------------------------------------------------------------
9577 // GenTreeUseEdgeIterator::AdvanceDynBlk: produces the next operand of a DynBlk node and advances the state.
9578 //
9579 void GenTreeUseEdgeIterator::AdvanceDynBlk()
9580 {
9581     GenTreeDynBlk* const dynBlock = m_node->AsDynBlk();
9582
9583     m_edge = dynBlock->gtEvalSizeFirst ? &dynBlock->gtOp1 : &dynBlock->gtDynamicSize;
9584     assert(*m_edge != nullptr);
9585     m_advance = &GenTreeUseEdgeIterator::Terminate;
9586 }
9587
9588 //------------------------------------------------------------------------
9589 // GenTreeUseEdgeIterator::AdvanceStoreDynBlk: produces the next operand of a StoreDynBlk node and advances the state.
9590 //
9591 // These nodes are moderately complicated but rare enough that templating this function is probably not
9592 // worth the extra complexity.
9593 //
9594 void GenTreeUseEdgeIterator::AdvanceStoreDynBlk()
9595 {
9596     GenTreeDynBlk* const dynBlock = m_node->AsDynBlk();
9597     if (dynBlock->gtEvalSizeFirst)
9598     {
9599         switch (m_state)
9600         {
9601             case 0:
9602                 m_edge  = dynBlock->IsReverseOp() ? &dynBlock->gtOp2 : &dynBlock->gtOp1;
9603                 m_state = 1;
9604                 break;
9605             case 1:
9606                 m_edge    = dynBlock->IsReverseOp() ? &dynBlock->gtOp1 : &dynBlock->gtOp2;
9607                 m_advance = &GenTreeUseEdgeIterator::Terminate;
9608                 break;
9609             default:
9610                 unreached();
9611         }
9612     }
9613     else
9614     {
9615         switch (m_state)
9616         {
9617             case 0:
9618                 m_edge  = dynBlock->IsReverseOp() ? &dynBlock->gtOp1 : &dynBlock->gtOp2;
9619                 m_state = 1;
9620                 break;
9621             case 1:
9622                 m_edge    = &dynBlock->gtDynamicSize;
9623                 m_advance = &GenTreeUseEdgeIterator::Terminate;
9624                 break;
9625             default:
9626                 unreached();
9627         }
9628     }
9629
9630     assert(*m_edge != nullptr);
9631 }
9632
9633 //------------------------------------------------------------------------
9634 // GenTreeUseEdgeIterator::AdvanceBinOp: produces the next operand of a binary node and advances the state.
9635 //
9636 // This function must be instantiated s.t. `ReverseOperands` is `true` iff the node is marked with the
9637 // `GTF_REVERSE_OPS` flag.
9638 //
9639 template <bool ReverseOperands>
9640 void           GenTreeUseEdgeIterator::AdvanceBinOp()
9641 {
9642     assert(ReverseOperands == ((m_node->gtFlags & GTF_REVERSE_OPS) != 0));
9643
9644     m_edge = !ReverseOperands ? &m_node->AsOp()->gtOp2 : &m_node->AsOp()->gtOp1;
9645     assert(*m_edge != nullptr);
9646     m_advance = &GenTreeUseEdgeIterator::Terminate;
9647 }
9648
9649 //------------------------------------------------------------------------
9650 // GenTreeUseEdgeIterator::SetEntryStateForBinOp: produces the first operand of a binary node and chooses
9651 //                                                the appropriate advance function.
9652 //
9653 void GenTreeUseEdgeIterator::SetEntryStateForBinOp()
9654 {
9655     assert(m_node != nullptr);
9656     assert(m_node->OperIsBinary());
9657
9658     GenTreeOp* const node = m_node->AsOp();
9659
9660     if (node->gtOp2 == nullptr)
9661     {
9662         assert(node->gtOp1 != nullptr);
9663         assert(node->NullOp2Legal());
9664         m_edge    = &node->gtOp1;
9665         m_advance = &GenTreeUseEdgeIterator::Terminate;
9666     }
9667     else if ((node->gtFlags & GTF_REVERSE_OPS) != 0)
9668     {
9669         m_edge    = &m_node->AsOp()->gtOp2;
9670         m_advance = &GenTreeUseEdgeIterator::AdvanceBinOp<true>;
9671     }
9672     else
9673     {
9674         m_edge    = &m_node->AsOp()->gtOp1;
9675         m_advance = &GenTreeUseEdgeIterator::AdvanceBinOp<false>;
9676     }
9677 }
9678
9679 //------------------------------------------------------------------------
9680 // GenTreeUseEdgeIterator::AdvanceList: produces the next operand of a variadic node and advances the state.
9681 //
9682 // This function does not use `m_state` for anything meaningful; it simply walks the `m_argList` until
9683 // there are no further entries.
9684 //
9685 void GenTreeUseEdgeIterator::AdvanceList()
9686 {
9687     assert(m_state == 0);
9688
9689     if (m_argList == nullptr)
9690     {
9691         m_state = -1;
9692     }
9693     else
9694     {
9695         GenTreeArgList* listNode = m_argList->AsArgList();
9696         m_edge                   = &listNode->gtOp1;
9697         m_argList                = listNode->Rest();
9698     }
9699 }
9700
9701 //------------------------------------------------------------------------
9702 // GenTreeUseEdgeIterator::SetEntryStateForList: produces the first operand of a list node.
9703 //
9704 void GenTreeUseEdgeIterator::SetEntryStateForList(GenTree* list)
9705 {
9706     m_argList = list;
9707     m_advance = &GenTreeUseEdgeIterator::AdvanceList;
9708     AdvanceList();
9709 }
9710
9711 //------------------------------------------------------------------------
9712 // GenTreeUseEdgeIterator::AdvanceCall: produces the next operand of a call node and advances the state.
9713 //
9714 // This function is a bit tricky: in order to avoid doing unnecessary work, it is instantiated with the
9715 // state number the iterator will be in when it is called. For example, `AdvanceCall<CALL_INSTANCE>`
9716 // is the instantiation used when the iterator is at the `CALL_INSTANCE` state (i.e. the entry state).
9717 // This sort of templating allows each state to avoid processing earlier states without unnecessary
9718 // duplication of code.
9719 //
9720 // Note that this method expands the argument lists (`gtCallArgs` and `gtCallLateArgs`) into their
9721 // component operands.
9722 //
9723 template <int state>
9724 void          GenTreeUseEdgeIterator::AdvanceCall()
9725 {
9726     GenTreeCall* const call = m_node->AsCall();
9727
9728     switch (state)
9729     {
9730         case CALL_INSTANCE:
9731             m_argList = call->gtCallArgs;
9732             m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_ARGS>;
9733             if (call->gtCallObjp != nullptr)
9734             {
9735                 m_edge = &call->gtCallObjp;
9736                 return;
9737             }
9738             __fallthrough;
9739
9740         case CALL_ARGS:
9741             if (m_argList != nullptr)
9742             {
9743                 GenTreeArgList* argNode = m_argList->AsArgList();
9744                 m_edge                  = &argNode->gtOp1;
9745                 m_argList               = argNode->Rest();
9746                 return;
9747             }
9748             m_argList = call->gtCallLateArgs;
9749             m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_LATE_ARGS>;
9750             __fallthrough;
9751
9752         case CALL_LATE_ARGS:
9753             if (m_argList != nullptr)
9754             {
9755                 GenTreeArgList* argNode = m_argList->AsArgList();
9756                 m_edge                  = &argNode->gtOp1;
9757                 m_argList               = argNode->Rest();
9758                 return;
9759             }
9760             m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_CONTROL_EXPR>;
9761             __fallthrough;
9762
9763         case CALL_CONTROL_EXPR:
9764             if (call->gtControlExpr != nullptr)
9765             {
9766                 if (call->gtCallType == CT_INDIRECT)
9767                 {
9768                     m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_COOKIE>;
9769                 }
9770                 else
9771                 {
9772                     m_advance = &GenTreeUseEdgeIterator::Terminate;
9773                 }
9774                 m_edge = &call->gtControlExpr;
9775                 return;
9776             }
9777             else if (call->gtCallType != CT_INDIRECT)
9778             {
9779                 m_state = -1;
9780                 return;
9781             }
9782             __fallthrough;
9783
9784         case CALL_COOKIE:
9785             assert(call->gtCallType == CT_INDIRECT);
9786
9787             m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_ADDRESS>;
9788             if (call->gtCallCookie != nullptr)
9789             {
9790                 m_edge = &call->gtCallCookie;
9791                 return;
9792             }
9793             __fallthrough;
9794
9795         case CALL_ADDRESS:
9796             assert(call->gtCallType == CT_INDIRECT);
9797
9798             m_advance = &GenTreeUseEdgeIterator::Terminate;
9799             if (call->gtCallAddr != nullptr)
9800             {
9801                 m_edge = &call->gtCallAddr;
9802             }
9803             return;
9804
9805         default:
9806             unreached();
9807     }
9808 }
9809
9810 //------------------------------------------------------------------------
9811 // GenTreeUseEdgeIterator::Terminate: advances the iterator to the terminal state.
9812 //
9813 void GenTreeUseEdgeIterator::Terminate()
9814 {
9815     m_state = -1;
9816 }
9817
9818 //------------------------------------------------------------------------
9819 // GenTreeUseEdgeIterator::operator++: advances the iterator to the next operand.
9820 //
9821 GenTreeUseEdgeIterator& GenTreeUseEdgeIterator::operator++()
9822 {
9823     // If we've reached the terminal state, do nothing.
9824     if (m_state != -1)
9825     {
9826         (this->*m_advance)();
9827     }
9828
9829     return *this;
9830 }
9831
9832 GenTreeUseEdgeIterator GenTree::UseEdgesBegin()
9833 {
9834     return GenTreeUseEdgeIterator(this);
9835 }
9836
9837 GenTreeUseEdgeIterator GenTree::UseEdgesEnd()
9838 {
9839     return GenTreeUseEdgeIterator();
9840 }
9841
9842 IteratorPair<GenTreeUseEdgeIterator> GenTree::UseEdges()
9843 {
9844     return MakeIteratorPair(UseEdgesBegin(), UseEdgesEnd());
9845 }
9846
9847 GenTreeOperandIterator GenTree::OperandsBegin()
9848 {
9849     return GenTreeOperandIterator(this);
9850 }
9851
9852 GenTreeOperandIterator GenTree::OperandsEnd()
9853 {
9854     return GenTreeOperandIterator();
9855 }
9856
9857 IteratorPair<GenTreeOperandIterator> GenTree::Operands()
9858 {
9859     return MakeIteratorPair(OperandsBegin(), OperandsEnd());
9860 }
9861
9862 bool GenTree::Precedes(GenTree* other)
9863 {
9864     assert(other != nullptr);
9865
9866     for (GenTree* node = gtNext; node != nullptr; node = node->gtNext)
9867     {
9868         if (node == other)
9869         {
9870             return true;
9871         }
9872     }
9873
9874     return false;
9875 }
9876
9877 #ifdef DEBUG
9878
9879 /* static */ int GenTree::gtDispFlags(unsigned flags, unsigned debugFlags)
9880 {
9881     int charsDisplayed = 11; // 11 is the "baseline" number of flag characters displayed
9882
9883 #ifdef LEGACY_BACKEND
9884     printf("%c", (flags & GTF_ASG) ? 'A' : '-');
9885 #else  // !LEGACY_BACKEND
9886     printf("%c", (flags & GTF_ASG) ? 'A' : (IsContained(flags) ? 'c' : '-'));
9887 #endif // LEGACY_BACKEND
9888     printf("%c", (flags & GTF_CALL) ? 'C' : '-');
9889     printf("%c", (flags & GTF_EXCEPT) ? 'X' : '-');
9890     printf("%c", (flags & GTF_GLOB_REF) ? 'G' : '-');
9891     printf("%c", (debugFlags & GTF_DEBUG_NODE_MORPHED) ? '+' : // First print '+' if GTF_DEBUG_NODE_MORPHED is set
9892                      (flags & GTF_ORDER_SIDEEFF) ? 'O' : '-'); // otherwise print 'O' or '-'
9893     printf("%c", (flags & GTF_COLON_COND) ? '?' : '-');
9894     printf("%c", (flags & GTF_DONT_CSE) ? 'N' :           // N is for No cse
9895                      (flags & GTF_MAKE_CSE) ? 'H' : '-'); // H is for Hoist this expr
9896     printf("%c", (flags & GTF_REVERSE_OPS) ? 'R' : '-');
9897     printf("%c", (flags & GTF_UNSIGNED) ? 'U' : (flags & GTF_BOOLEAN) ? 'B' : '-');
9898 #if FEATURE_SET_FLAGS
9899     printf("%c", (flags & GTF_SET_FLAGS) ? 'S' : '-');
9900     ++charsDisplayed;
9901 #endif
9902     printf("%c", (flags & GTF_LATE_ARG) ? 'L' : '-');
9903     printf("%c", (flags & GTF_SPILLED) ? 'z' : (flags & GTF_SPILL) ? 'Z' : '-');
9904
9905     return charsDisplayed;
9906 }
9907
9908 /*****************************************************************************/
9909
9910 void Compiler::gtDispNodeName(GenTree* tree)
9911 {
9912     /* print the node name */
9913
9914     const char* name;
9915
9916     assert(tree);
9917     if (tree->gtOper < GT_COUNT)
9918     {
9919         name = GenTree::OpName(tree->OperGet());
9920     }
9921     else
9922     {
9923         name = "<ERROR>";
9924     }
9925     char  buf[32];
9926     char* bufp = &buf[0];
9927
9928     if ((tree->gtOper == GT_CNS_INT) && tree->IsIconHandle())
9929     {
9930         sprintf_s(bufp, sizeof(buf), " %s(h)%c", name, 0);
9931     }
9932     else if (tree->gtOper == GT_PUTARG_STK)
9933     {
9934         sprintf_s(bufp, sizeof(buf), " %s [+0x%02x]%c", name, tree->AsPutArgStk()->getArgOffset(), 0);
9935     }
9936     else if (tree->gtOper == GT_CALL)
9937     {
9938         const char* callType = "CALL";
9939         const char* gtfType  = "";
9940         const char* ctType   = "";
9941         char        gtfTypeBuf[100];
9942
9943         if (tree->gtCall.gtCallType == CT_USER_FUNC)
9944         {
9945             if (tree->gtCall.IsVirtual())
9946             {
9947                 callType = "CALLV";
9948             }
9949         }
9950         else if (tree->gtCall.gtCallType == CT_HELPER)
9951         {
9952             ctType = " help";
9953         }
9954         else if (tree->gtCall.gtCallType == CT_INDIRECT)
9955         {
9956             ctType = " ind";
9957         }
9958         else
9959         {
9960             assert(!"Unknown gtCallType");
9961         }
9962
9963         if (tree->gtFlags & GTF_CALL_NULLCHECK)
9964         {
9965             gtfType = " nullcheck";
9966         }
9967         if (tree->gtCall.IsVirtualVtable())
9968         {
9969             gtfType = " ind";
9970         }
9971         else if (tree->gtCall.IsVirtualStub())
9972         {
9973             gtfType = " stub";
9974         }
9975 #ifdef FEATURE_READYTORUN_COMPILER
9976         else if (tree->gtCall.IsR2RRelativeIndir())
9977         {
9978             gtfType = " r2r_ind";
9979         }
9980 #endif // FEATURE_READYTORUN_COMPILER
9981         else if (tree->gtFlags & GTF_CALL_UNMANAGED)
9982         {
9983             char* gtfTypeBufWalk = gtfTypeBuf;
9984             gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf, sizeof(gtfTypeBuf), " unman");
9985             if (tree->gtFlags & GTF_CALL_POP_ARGS)
9986             {
9987                 gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf, sizeof(gtfTypeBuf), " popargs");
9988             }
9989             if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_UNMGD_THISCALL)
9990             {
9991                 gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf, sizeof(gtfTypeBuf), " thiscall");
9992             }
9993             gtfType = gtfTypeBuf;
9994         }
9995
9996         sprintf_s(bufp, sizeof(buf), " %s%s%s%c", callType, ctType, gtfType, 0);
9997     }
9998     else if (tree->gtOper == GT_ARR_ELEM)
9999     {
10000         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s[", name);
10001         for (unsigned rank = tree->gtArrElem.gtArrRank - 1; rank; rank--)
10002         {
10003             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), ",");
10004         }
10005         SimpleSprintf_s(bufp, buf, sizeof(buf), "]");
10006     }
10007     else if (tree->gtOper == GT_ARR_OFFSET || tree->gtOper == GT_ARR_INDEX)
10008     {
10009         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s[", name);
10010         unsigned char currDim;
10011         unsigned char rank;
10012         if (tree->gtOper == GT_ARR_OFFSET)
10013         {
10014             currDim = tree->gtArrOffs.gtCurrDim;
10015             rank    = tree->gtArrOffs.gtArrRank;
10016         }
10017         else
10018         {
10019             currDim = tree->gtArrIndex.gtCurrDim;
10020             rank    = tree->gtArrIndex.gtArrRank;
10021         }
10022
10023         for (unsigned char dim = 0; dim < rank; dim++)
10024         {
10025             // Use a defacto standard i,j,k for the dimensions.
10026             // Note that we only support up to rank 3 arrays with these nodes, so we won't run out of characters.
10027             char dimChar = '*';
10028             if (dim == currDim)
10029             {
10030                 dimChar = 'i' + dim;
10031             }
10032             else if (dim > currDim)
10033             {
10034                 dimChar = ' ';
10035             }
10036
10037             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "%c", dimChar);
10038             if (dim != rank - 1)
10039             {
10040                 bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), ",");
10041             }
10042         }
10043         SimpleSprintf_s(bufp, buf, sizeof(buf), "]");
10044     }
10045     else if (tree->gtOper == GT_LEA)
10046     {
10047         GenTreeAddrMode* lea = tree->AsAddrMode();
10048         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s(", name);
10049         if (lea->Base() != nullptr)
10050         {
10051             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "b+");
10052         }
10053         if (lea->Index() != nullptr)
10054         {
10055             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "(i*%d)+", lea->gtScale);
10056         }
10057         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "%d)", lea->Offset());
10058     }
10059     else if (tree->gtOper == GT_ARR_BOUNDS_CHECK)
10060     {
10061         switch (tree->gtBoundsChk.gtThrowKind)
10062         {
10063             case SCK_RNGCHK_FAIL:
10064                 sprintf_s(bufp, sizeof(buf), " %s_Rng", name);
10065                 break;
10066             case SCK_ARG_EXCPN:
10067                 sprintf_s(bufp, sizeof(buf), " %s_Arg", name);
10068                 break;
10069             case SCK_ARG_RNG_EXCPN:
10070                 sprintf_s(bufp, sizeof(buf), " %s_ArgRng", name);
10071                 break;
10072             default:
10073                 unreached();
10074         }
10075     }
10076     else if (tree->gtOverflowEx())
10077     {
10078         sprintf_s(bufp, sizeof(buf), " %s_ovfl%c", name, 0);
10079     }
10080     else if (tree->OperIsBlk() && !tree->OperIsDynBlk())
10081     {
10082         sprintf_s(bufp, sizeof(buf), " %s(%d)", name, tree->AsBlk()->gtBlkSize);
10083     }
10084     else
10085     {
10086         sprintf_s(bufp, sizeof(buf), " %s%c", name, 0);
10087     }
10088
10089     if (strlen(buf) < 10)
10090     {
10091         printf(" %-10s", buf);
10092     }
10093     else
10094     {
10095         printf(" %s", buf);
10096     }
10097 }
10098
10099 void Compiler::gtDispVN(GenTree* tree)
10100 {
10101     if (tree->gtVNPair.GetLiberal() != ValueNumStore::NoVN)
10102     {
10103         assert(tree->gtVNPair.GetConservative() != ValueNumStore::NoVN);
10104         printf(" ");
10105         vnpPrint(tree->gtVNPair, 0);
10106     }
10107 }
10108
10109 //------------------------------------------------------------------------
10110 // gtDispNode: Print a tree to jitstdout.
10111 //
10112 // Arguments:
10113 //    tree - the tree to be printed
10114 //    indentStack - the specification for the current level of indentation & arcs
10115 //    msg         - a contextual method (i.e. from the parent) to print
10116 //
10117 // Return Value:
10118 //    None.
10119 //
10120 // Notes:
10121 //    'indentStack' may be null, in which case no indentation or arcs are printed
10122 //    'msg' may be null
10123
10124 void Compiler::gtDispNode(GenTree* tree, IndentStack* indentStack, __in __in_z __in_opt const char* msg, bool isLIR)
10125 {
10126     bool printPointer = true; // always true..
10127     bool printFlags   = true; // always true..
10128     bool printCost    = true; // always true..
10129
10130     int msgLength = 25;
10131
10132     GenTree* prev;
10133
10134     if (tree->gtSeqNum)
10135     {
10136         printf("N%03u ", tree->gtSeqNum);
10137         if (tree->gtCostsInitialized)
10138         {
10139             printf("(%3u,%3u) ", tree->gtCostEx, tree->gtCostSz);
10140         }
10141         else
10142         {
10143             printf("(???"
10144                    ",???"
10145                    ") "); // This probably indicates a bug: the node has a sequence number, but not costs.
10146         }
10147     }
10148     else
10149     {
10150         if (tree->gtOper == GT_STMT)
10151         {
10152             prev = tree->gtStmt.gtStmtExpr;
10153         }
10154         else
10155         {
10156             prev = tree;
10157         }
10158
10159         bool     hasSeqNum = true;
10160         unsigned dotNum    = 0;
10161         do
10162         {
10163             dotNum++;
10164             prev = prev->gtPrev;
10165
10166             if ((prev == nullptr) || (prev == tree))
10167             {
10168                 hasSeqNum = false;
10169                 break;
10170             }
10171
10172             assert(prev);
10173         } while (prev->gtSeqNum == 0);
10174
10175         // If we have an indent stack, don't add additional characters,
10176         // as it will mess up the alignment.
10177         bool displayDotNum = tree->gtOper != GT_STMT && hasSeqNum && (indentStack == nullptr);
10178         if (displayDotNum)
10179         {
10180             printf("N%03u.%02u ", prev->gtSeqNum, dotNum);
10181         }
10182         else
10183         {
10184             printf("     ");
10185         }
10186
10187         if (tree->gtCostsInitialized)
10188         {
10189             printf("(%3u,%3u) ", tree->gtCostEx, tree->gtCostSz);
10190         }
10191         else
10192         {
10193             if (displayDotNum)
10194             {
10195                 // Do better alignment in this case
10196                 printf("       ");
10197             }
10198             else
10199             {
10200                 printf("          ");
10201             }
10202         }
10203     }
10204
10205     if (optValnumCSE_phase)
10206     {
10207         if (IS_CSE_INDEX(tree->gtCSEnum))
10208         {
10209             printf("CSE #%02d (%s)", GET_CSE_INDEX(tree->gtCSEnum), (IS_CSE_USE(tree->gtCSEnum) ? "use" : "def"));
10210         }
10211         else
10212         {
10213             printf("             ");
10214         }
10215     }
10216
10217     /* Print the node ID */
10218     printTreeID(tree);
10219     printf(" ");
10220
10221     if (tree->gtOper >= GT_COUNT)
10222     {
10223         printf(" **** ILLEGAL NODE ****");
10224         return;
10225     }
10226
10227     if (printFlags)
10228     {
10229         /* First print the flags associated with the node */
10230         switch (tree->gtOper)
10231         {
10232             case GT_LEA:
10233             case GT_BLK:
10234             case GT_OBJ:
10235             case GT_DYN_BLK:
10236             case GT_STORE_BLK:
10237             case GT_STORE_OBJ:
10238             case GT_STORE_DYN_BLK:
10239
10240             case GT_IND:
10241                 // We prefer printing V or U
10242                 if ((tree->gtFlags & (GTF_IND_VOLATILE | GTF_IND_UNALIGNED)) == 0)
10243                 {
10244                     if (tree->gtFlags & GTF_IND_TGTANYWHERE)
10245                     {
10246                         printf("*");
10247                         --msgLength;
10248                         break;
10249                     }
10250                     if (tree->gtFlags & GTF_IND_INVARIANT)
10251                     {
10252                         printf("#");
10253                         --msgLength;
10254                         break;
10255                     }
10256                     if (tree->gtFlags & GTF_IND_ARR_INDEX)
10257                     {
10258                         printf("a");
10259                         --msgLength;
10260                         break;
10261                     }
10262                     if (tree->gtFlags & GTF_IND_NONFAULTING)
10263                     {
10264                         printf("x");
10265                         --msgLength;
10266                         break;
10267                     }
10268                 }
10269                 __fallthrough;
10270
10271             case GT_INDEX:
10272             case GT_INDEX_ADDR:
10273
10274                 if ((tree->gtFlags & (GTF_IND_VOLATILE | GTF_IND_UNALIGNED)) == 0) // We prefer printing V or U over R
10275                 {
10276                     if (tree->gtFlags & GTF_INX_REFARR_LAYOUT)
10277                     {
10278                         printf("R");
10279                         --msgLength;
10280                         break;
10281                     } // R means RefArray
10282                 }
10283                 __fallthrough;
10284
10285             case GT_FIELD:
10286             case GT_CLS_VAR:
10287                 if (tree->gtFlags & GTF_IND_VOLATILE)
10288                 {
10289                     printf("V");
10290                     --msgLength;
10291                     break;
10292                 }
10293                 if (tree->gtFlags & GTF_IND_UNALIGNED)
10294                 {
10295                     printf("U");
10296                     --msgLength;
10297                     break;
10298                 }
10299                 goto DASH;
10300
10301             case GT_ASG:
10302                 if (tree->OperIsInitBlkOp())
10303                 {
10304                     printf("I");
10305                     --msgLength;
10306                     break;
10307                 }
10308                 goto DASH;
10309
10310             case GT_CALL:
10311                 if (tree->gtFlags & GTF_CALL_INLINE_CANDIDATE)
10312                 {
10313                     printf("I");
10314                     --msgLength;
10315                     break;
10316                 }
10317                 if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_RETBUFFARG)
10318                 {
10319                     printf("S");
10320                     --msgLength;
10321                     break;
10322                 }
10323                 if (tree->gtFlags & GTF_CALL_HOISTABLE)
10324                 {
10325                     printf("H");
10326                     --msgLength;
10327                     break;
10328                 }
10329
10330                 goto DASH;
10331
10332             case GT_MUL:
10333 #if !defined(_TARGET_64BIT_) && !defined(LEGACY_BACKEND)
10334             case GT_MUL_LONG:
10335 #endif
10336                 if (tree->gtFlags & GTF_MUL_64RSLT)
10337                 {
10338                     printf("L");
10339                     --msgLength;
10340                     break;
10341                 }
10342                 goto DASH;
10343
10344             case GT_ADDR:
10345                 if (tree->gtFlags & GTF_ADDR_ONSTACK)
10346                 {
10347                     printf("L");
10348                     --msgLength;
10349                     break;
10350                 } // L means LclVar
10351                 goto DASH;
10352
10353             case GT_LCL_FLD:
10354             case GT_LCL_VAR:
10355             case GT_LCL_VAR_ADDR:
10356             case GT_LCL_FLD_ADDR:
10357             case GT_STORE_LCL_FLD:
10358             case GT_STORE_LCL_VAR:
10359             case GT_REG_VAR:
10360                 if (tree->gtFlags & GTF_VAR_USEASG)
10361                 {
10362                     printf("U");
10363                     --msgLength;
10364                     break;
10365                 }
10366                 if (tree->gtFlags & GTF_VAR_DEF)
10367                 {
10368                     printf("D");
10369                     --msgLength;
10370                     break;
10371                 }
10372                 if (tree->gtFlags & GTF_VAR_CAST)
10373                 {
10374                     printf("C");
10375                     --msgLength;
10376                     break;
10377                 }
10378                 if (tree->gtFlags & GTF_VAR_ARR_INDEX)
10379                 {
10380                     printf("i");
10381                     --msgLength;
10382                     break;
10383                 }
10384                 goto DASH;
10385
10386             case GT_EQ:
10387             case GT_NE:
10388             case GT_LT:
10389             case GT_LE:
10390             case GT_GE:
10391             case GT_GT:
10392 #ifndef LEGACY_BACKEND
10393             case GT_TEST_EQ:
10394             case GT_TEST_NE:
10395 #endif
10396                 if (tree->gtFlags & GTF_RELOP_NAN_UN)
10397                 {
10398                     printf("N");
10399                     --msgLength;
10400                     break;
10401                 }
10402                 if (tree->gtFlags & GTF_RELOP_JMP_USED)
10403                 {
10404                     printf("J");
10405                     --msgLength;
10406                     break;
10407                 }
10408                 if (tree->gtFlags & GTF_RELOP_QMARK)
10409                 {
10410                     printf("Q");
10411                     --msgLength;
10412                     break;
10413                 }
10414                 goto DASH;
10415
10416             case GT_JCMP:
10417                 printf((tree->gtFlags & GTF_JCMP_TST) ? "T" : "C");
10418                 printf((tree->gtFlags & GTF_JCMP_EQ) ? "EQ" : "NE");
10419                 goto DASH;
10420
10421             case GT_FIELD_LIST:
10422                 if (tree->gtFlags & GTF_FIELD_LIST_HEAD)
10423                 {
10424                     printf("H");
10425                     --msgLength;
10426                     break;
10427                 }
10428                 goto DASH;
10429
10430             default:
10431             DASH:
10432                 printf("-");
10433                 --msgLength;
10434                 break;
10435         }
10436
10437         /* Then print the general purpose flags */
10438         unsigned flags = tree->gtFlags;
10439
10440         if (tree->OperIsBinary())
10441         {
10442             genTreeOps oper = tree->OperGet();
10443
10444             // Check for GTF_ADDRMODE_NO_CSE flag on add/mul/shl Binary Operators
10445             if ((oper == GT_ADD) || (oper == GT_MUL) || (oper == GT_LSH))
10446             {
10447                 if ((tree->gtFlags & GTF_ADDRMODE_NO_CSE) != 0)
10448                 {
10449                     flags |= GTF_DONT_CSE; // Force the GTF_ADDRMODE_NO_CSE flag to print out like GTF_DONT_CSE
10450                 }
10451             }
10452         }
10453         else // !tree->OperIsBinary()
10454         {
10455             // the GTF_REVERSE flag only applies to binary operations
10456             flags &= ~GTF_REVERSE_OPS; // we use this value for GTF_VAR_ARR_INDEX above
10457         }
10458
10459         msgLength -= GenTree::gtDispFlags(flags, tree->gtDebugFlags);
10460 /*
10461     printf("%c", (flags & GTF_ASG           ) ? 'A' : '-');
10462     printf("%c", (flags & GTF_CALL          ) ? 'C' : '-');
10463     printf("%c", (flags & GTF_EXCEPT        ) ? 'X' : '-');
10464     printf("%c", (flags & GTF_GLOB_REF      ) ? 'G' : '-');
10465     printf("%c", (flags & GTF_ORDER_SIDEEFF ) ? 'O' : '-');
10466     printf("%c", (flags & GTF_COLON_COND    ) ? '?' : '-');
10467     printf("%c", (flags & GTF_DONT_CSE      ) ? 'N' :        // N is for No cse
10468                  (flags & GTF_MAKE_CSE      ) ? 'H' : '-');  // H is for Hoist this expr
10469     printf("%c", (flags & GTF_REVERSE_OPS   ) ? 'R' : '-');
10470     printf("%c", (flags & GTF_UNSIGNED      ) ? 'U' :
10471                  (flags & GTF_BOOLEAN       ) ? 'B' : '-');
10472     printf("%c", (flags & GTF_SET_FLAGS     ) ? 'S' : '-');
10473     printf("%c", (flags & GTF_SPILLED       ) ? 'z' : '-');
10474     printf("%c", (flags & GTF_SPILL         ) ? 'Z' : '-');
10475 */
10476
10477 #if FEATURE_STACK_FP_X87
10478         BYTE fpLvl = (BYTE)tree->gtFPlvl;
10479         if (IsUninitialized(fpLvl) || fpLvl == 0x00)
10480         {
10481             printf("-");
10482         }
10483         else
10484         {
10485             printf("%1u", tree->gtFPlvl);
10486         }
10487 #endif // FEATURE_STACK_FP_X87
10488     }
10489
10490     // If we're printing a node for LIR, we use the space normally associated with the message
10491     // to display the node's temp name (if any)
10492     const bool hasOperands = tree->OperandsBegin() != tree->OperandsEnd();
10493     if (isLIR)
10494     {
10495         assert(msg == nullptr);
10496
10497         // If the tree does not have any operands, we do not display the indent stack. This gives us
10498         // two additional characters for alignment.
10499         if (!hasOperands)
10500         {
10501             msgLength += 1;
10502         }
10503
10504         if (tree->IsValue())
10505         {
10506             const size_t bufLength = msgLength - 1;
10507             msg                    = reinterpret_cast<char*>(alloca(bufLength * sizeof(char)));
10508             sprintf_s(const_cast<char*>(msg), bufLength, "t%d = %s", tree->gtTreeID, hasOperands ? "" : " ");
10509         }
10510     }
10511
10512     /* print the msg associated with the node */
10513
10514     if (msg == nullptr)
10515     {
10516         msg = "";
10517     }
10518     if (msgLength < 0)
10519     {
10520         msgLength = 0;
10521     }
10522
10523     printf(isLIR ? " %+*s" : " %-*s", msgLength, msg);
10524
10525     /* Indent the node accordingly */
10526     if (!isLIR || hasOperands)
10527     {
10528         printIndent(indentStack);
10529     }
10530
10531     gtDispNodeName(tree);
10532
10533     assert(tree == nullptr || tree->gtOper < GT_COUNT);
10534
10535     if (tree)
10536     {
10537         /* print the type of the node */
10538         if (tree->gtOper != GT_CAST)
10539         {
10540             printf(" %-6s", varTypeName(tree->TypeGet()));
10541             if (tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_STORE_LCL_VAR)
10542             {
10543                 LclVarDsc* varDsc = &lvaTable[tree->gtLclVarCommon.gtLclNum];
10544                 if (varDsc->lvAddrExposed)
10545                 {
10546                     printf("(AX)"); // Variable has address exposed.
10547                 }
10548
10549                 if (varDsc->lvUnusedStruct)
10550                 {
10551                     assert(varDsc->lvPromoted);
10552                     printf("(U)"); // Unused struct
10553                 }
10554                 else if (varDsc->lvPromoted)
10555                 {
10556                     if (varTypeIsPromotable(varDsc))
10557                     {
10558                         printf("(P)"); // Promoted struct
10559                     }
10560                     else
10561                     {
10562                         // Promoted implicit by-refs can have this state during
10563                         // global morph while they are being rewritten
10564                         assert(fgGlobalMorph);
10565                         printf("(P?!)"); // Promoted struct
10566                     }
10567                 }
10568             }
10569
10570             if (tree->gtOper == GT_STMT)
10571             {
10572                 if (opts.compDbgInfo)
10573                 {
10574                     IL_OFFSET endIL = tree->gtStmt.gtStmtLastILoffs;
10575
10576                     printf("(IL ");
10577                     if (tree->gtStmt.gtStmtILoffsx == BAD_IL_OFFSET)
10578                     {
10579                         printf("  ???");
10580                     }
10581                     else
10582                     {
10583                         printf("0x%03X", jitGetILoffs(tree->gtStmt.gtStmtILoffsx));
10584                     }
10585                     printf("...");
10586                     if (endIL == BAD_IL_OFFSET)
10587                     {
10588                         printf("  ???");
10589                     }
10590                     else
10591                     {
10592                         printf("0x%03X", endIL);
10593                     }
10594                     printf(")");
10595                 }
10596             }
10597
10598             if (tree->IsArgPlaceHolderNode() && (tree->gtArgPlace.gtArgPlaceClsHnd != nullptr))
10599             {
10600                 printf(" => [clsHnd=%08X]", dspPtr(tree->gtArgPlace.gtArgPlaceClsHnd));
10601             }
10602
10603             if (tree->gtOper == GT_RUNTIMELOOKUP)
10604             {
10605 #ifdef _TARGET_64BIT_
10606                 printf(" 0x%llx", dspPtr(tree->gtRuntimeLookup.gtHnd));
10607 #else
10608                 printf(" 0x%x", dspPtr(tree->gtRuntimeLookup.gtHnd));
10609 #endif
10610
10611                 switch (tree->gtRuntimeLookup.gtHndType)
10612                 {
10613                     case CORINFO_HANDLETYPE_CLASS:
10614                         printf(" class");
10615                         break;
10616                     case CORINFO_HANDLETYPE_METHOD:
10617                         printf(" method");
10618                         break;
10619                     case CORINFO_HANDLETYPE_FIELD:
10620                         printf(" field");
10621                         break;
10622                     default:
10623                         printf(" unknown");
10624                         break;
10625                 }
10626             }
10627         }
10628
10629         // for tracking down problems in reguse prediction or liveness tracking
10630
10631         if (verbose && 0)
10632         {
10633             printf(" RR=");
10634             dspRegMask(tree->gtRsvdRegs);
10635 #ifdef LEGACY_BACKEND
10636             printf(",UR=");
10637             dspRegMask(tree->gtUsedRegs);
10638 #endif // LEGACY_BACKEND
10639             printf("\n");
10640         }
10641     }
10642 }
10643
10644 void Compiler::gtDispRegVal(GenTree* tree)
10645 {
10646     switch (tree->GetRegTag())
10647     {
10648         // Don't display NOREG; the absence of this tag will imply this state
10649         // case GenTree::GT_REGTAG_NONE:       printf(" NOREG");   break;
10650
10651         case GenTree::GT_REGTAG_REG:
10652             printf(" REG %s", compRegVarName(tree->gtRegNum));
10653             break;
10654
10655 #if CPU_LONG_USES_REGPAIR
10656         case GenTree::GT_REGTAG_REGPAIR:
10657             printf(" PAIR %s", compRegPairName(tree->gtRegPair));
10658             break;
10659 #endif
10660
10661         default:
10662             break;
10663     }
10664
10665     if (tree->IsMultiRegCall())
10666     {
10667         // 0th reg is gtRegNum, which is already printed above.
10668         // Print the remaining regs of a multi-reg call node.
10669         GenTreeCall* call     = tree->AsCall();
10670         unsigned     regCount = call->GetReturnTypeDesc()->TryGetReturnRegCount();
10671         for (unsigned i = 1; i < regCount; ++i)
10672         {
10673             printf(",%s", compRegVarName(call->GetRegNumByIdx(i)));
10674         }
10675     }
10676     else if (tree->IsCopyOrReloadOfMultiRegCall())
10677     {
10678         GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
10679         GenTreeCall*         call         = tree->gtGetOp1()->AsCall();
10680         unsigned             regCount     = call->GetReturnTypeDesc()->TryGetReturnRegCount();
10681         for (unsigned i = 1; i < regCount; ++i)
10682         {
10683             printf(",%s", compRegVarName(copyOrReload->GetRegNumByIdx(i)));
10684         }
10685     }
10686
10687 #if FEATURE_MULTIREG_RET
10688     if (tree->IsCopyOrReload())
10689     {
10690         for (int i = 1; i < MAX_RET_REG_COUNT; i++)
10691         {
10692             regNumber reg = (regNumber)tree->AsCopyOrReload()->GetRegNumByIdx(i);
10693             if (reg == REG_NA)
10694             {
10695                 break;
10696             }
10697             printf(",%s", compRegVarName(reg));
10698         }
10699     }
10700 #endif
10701
10702 #if !defined(LEGACY_BACKEND) && defined(_TARGET_ARM_)
10703     if (tree->OperIsMultiRegOp() && (tree->AsMultiRegOp()->gtOtherReg != REG_NA))
10704     {
10705         printf(",%s", compRegVarName(tree->AsMultiRegOp()->gtOtherReg));
10706     }
10707 #endif
10708
10709 #ifdef LEGACY_BACKEND
10710     if (tree->InReg())
10711     {
10712         printf(" RV");
10713     }
10714 #endif
10715 }
10716
10717 // We usually/commonly don't expect to print anything longer than this string,
10718 #define LONGEST_COMMON_LCL_VAR_DISPLAY "V99 PInvokeFrame"
10719 #define LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH (sizeof(LONGEST_COMMON_LCL_VAR_DISPLAY))
10720 #define BUF_SIZE (LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH * 2)
10721
10722 void Compiler::gtGetLclVarNameInfo(unsigned lclNum, const char** ilKindOut, const char** ilNameOut, unsigned* ilNumOut)
10723 {
10724     const char* ilKind = nullptr;
10725     const char* ilName = nullptr;
10726
10727     unsigned ilNum = compMap2ILvarNum(lclNum);
10728
10729     if (ilNum == (unsigned)ICorDebugInfo::RETBUF_ILNUM)
10730     {
10731         ilName = "RetBuf";
10732     }
10733     else if (ilNum == (unsigned)ICorDebugInfo::VARARGS_HND_ILNUM)
10734     {
10735         ilName = "VarArgHandle";
10736     }
10737     else if (ilNum == (unsigned)ICorDebugInfo::TYPECTXT_ILNUM)
10738     {
10739         ilName = "TypeCtx";
10740     }
10741     else if (ilNum == (unsigned)ICorDebugInfo::UNKNOWN_ILNUM)
10742     {
10743 #if FEATURE_ANYCSE
10744         if (lclNumIsTrueCSE(lclNum))
10745         {
10746             ilKind = "cse";
10747             ilNum  = lclNum - optCSEstart;
10748         }
10749         else if (lclNum >= optCSEstart)
10750         {
10751             // Currently any new LclVar's introduced after the CSE phase
10752             // are believed to be created by the "rationalizer" that is what is meant by the "rat" prefix.
10753             ilKind = "rat";
10754             ilNum  = lclNum - (optCSEstart + optCSEcount);
10755         }
10756         else
10757 #endif // FEATURE_ANYCSE
10758         {
10759             if (lclNum == info.compLvFrameListRoot)
10760             {
10761                 ilName = "FramesRoot";
10762             }
10763             else if (lclNum == lvaInlinedPInvokeFrameVar)
10764             {
10765                 ilName = "PInvokeFrame";
10766             }
10767             else if (lclNum == lvaGSSecurityCookie)
10768             {
10769                 ilName = "GsCookie";
10770             }
10771 #if FEATURE_FIXED_OUT_ARGS
10772             else if (lclNum == lvaPInvokeFrameRegSaveVar)
10773             {
10774                 ilName = "PInvokeFrameRegSave";
10775             }
10776             else if (lclNum == lvaOutgoingArgSpaceVar)
10777             {
10778                 ilName = "OutArgs";
10779             }
10780 #endif // FEATURE_FIXED_OUT_ARGS
10781 #ifdef _TARGET_ARM_
10782             else if (lclNum == lvaPromotedStructAssemblyScratchVar)
10783             {
10784                 ilName = "PromotedStructScratch";
10785             }
10786 #endif // _TARGET_ARM_
10787 #if !FEATURE_EH_FUNCLETS
10788             else if (lclNum == lvaShadowSPslotsVar)
10789             {
10790                 ilName = "EHSlots";
10791             }
10792 #endif // !FEATURE_EH_FUNCLETS
10793             else if (lclNum == lvaLocAllocSPvar)
10794             {
10795                 ilName = "LocAllocSP";
10796             }
10797 #if FEATURE_EH_FUNCLETS
10798             else if (lclNum == lvaPSPSym)
10799             {
10800                 ilName = "PSPSym";
10801             }
10802 #endif // FEATURE_EH_FUNCLETS
10803             else
10804             {
10805                 ilKind = "tmp";
10806                 if (compIsForInlining())
10807                 {
10808                     ilNum = lclNum - impInlineInfo->InlinerCompiler->info.compLocalsCount;
10809                 }
10810                 else
10811                 {
10812                     ilNum = lclNum - info.compLocalsCount;
10813                 }
10814             }
10815         }
10816     }
10817     else if (lclNum < (compIsForInlining() ? impInlineInfo->InlinerCompiler->info.compArgsCount : info.compArgsCount))
10818     {
10819         if (ilNum == 0 && !info.compIsStatic)
10820         {
10821             ilName = "this";
10822         }
10823         else
10824         {
10825             ilKind = "arg";
10826         }
10827     }
10828     else
10829     {
10830         if (!lvaTable[lclNum].lvIsStructField)
10831         {
10832             ilKind = "loc";
10833         }
10834         if (compIsForInlining())
10835         {
10836             ilNum -= impInlineInfo->InlinerCompiler->info.compILargsCount;
10837         }
10838         else
10839         {
10840             ilNum -= info.compILargsCount;
10841         }
10842     }
10843
10844     *ilKindOut = ilKind;
10845     *ilNameOut = ilName;
10846     *ilNumOut  = ilNum;
10847 }
10848
10849 /*****************************************************************************/
10850 int Compiler::gtGetLclVarName(unsigned lclNum, char* buf, unsigned buf_remaining)
10851 {
10852     char*    bufp_next    = buf;
10853     unsigned charsPrinted = 0;
10854     int      sprintf_result;
10855
10856     sprintf_result = sprintf_s(bufp_next, buf_remaining, "V%02u", lclNum);
10857
10858     if (sprintf_result < 0)
10859     {
10860         return sprintf_result;
10861     }
10862
10863     charsPrinted += sprintf_result;
10864     bufp_next += sprintf_result;
10865     buf_remaining -= sprintf_result;
10866
10867     const char* ilKind = nullptr;
10868     const char* ilName = nullptr;
10869     unsigned    ilNum  = 0;
10870
10871     gtGetLclVarNameInfo(lclNum, &ilKind, &ilName, &ilNum);
10872
10873     if (ilName != nullptr)
10874     {
10875         sprintf_result = sprintf_s(bufp_next, buf_remaining, " %s", ilName);
10876         if (sprintf_result < 0)
10877         {
10878             return sprintf_result;
10879         }
10880         charsPrinted += sprintf_result;
10881         bufp_next += sprintf_result;
10882         buf_remaining -= sprintf_result;
10883     }
10884     else if (ilKind != nullptr)
10885     {
10886         sprintf_result = sprintf_s(bufp_next, buf_remaining, " %s%d", ilKind, ilNum);
10887         if (sprintf_result < 0)
10888         {
10889             return sprintf_result;
10890         }
10891         charsPrinted += sprintf_result;
10892         bufp_next += sprintf_result;
10893         buf_remaining -= sprintf_result;
10894     }
10895
10896     assert(charsPrinted > 0);
10897     assert(buf_remaining > 0);
10898
10899     return (int)charsPrinted;
10900 }
10901
10902 /*****************************************************************************
10903  * Get the local var name, and create a copy of the string that can be used in debug output.
10904  */
10905 char* Compiler::gtGetLclVarName(unsigned lclNum)
10906 {
10907     char buf[BUF_SIZE];
10908     int  charsPrinted = gtGetLclVarName(lclNum, buf, _countof(buf));
10909     if (charsPrinted < 0)
10910     {
10911         return nullptr;
10912     }
10913
10914     char* retBuf = new (this, CMK_DebugOnly) char[charsPrinted + 1];
10915     strcpy_s(retBuf, charsPrinted + 1, buf);
10916     return retBuf;
10917 }
10918
10919 /*****************************************************************************/
10920 void Compiler::gtDispLclVar(unsigned lclNum, bool padForBiggestDisp)
10921 {
10922     char buf[BUF_SIZE];
10923     int  charsPrinted = gtGetLclVarName(lclNum, buf, _countof(buf));
10924
10925     if (charsPrinted < 0)
10926     {
10927         return;
10928     }
10929
10930     printf("%s", buf);
10931
10932     if (padForBiggestDisp && (charsPrinted < LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH))
10933     {
10934         printf("%*c", LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH - charsPrinted, ' ');
10935     }
10936 }
10937
10938 /*****************************************************************************/
10939 void Compiler::gtDispConst(GenTree* tree)
10940 {
10941     assert(tree->OperKind() & GTK_CONST);
10942
10943     switch (tree->gtOper)
10944     {
10945         case GT_CNS_INT:
10946             if (tree->IsIconHandle(GTF_ICON_STR_HDL))
10947             {
10948                 const wchar_t* str = eeGetCPString(tree->gtIntCon.gtIconVal);
10949                 if (str != nullptr)
10950                 {
10951                     printf(" 0x%X \"%S\"", dspPtr(tree->gtIntCon.gtIconVal), str);
10952                 }
10953                 else
10954                 {
10955                     // Note that eGetCPString isn't currently implemented on Linux/ARM
10956                     // and instead always returns nullptr
10957                     printf(" 0x%X [ICON_STR_HDL]", dspPtr(tree->gtIntCon.gtIconVal));
10958                 }
10959             }
10960             else
10961             {
10962                 ssize_t dspIconVal = tree->IsIconHandle() ? dspPtr(tree->gtIntCon.gtIconVal) : tree->gtIntCon.gtIconVal;
10963
10964                 if (tree->TypeGet() == TYP_REF)
10965                 {
10966                     assert(tree->gtIntCon.gtIconVal == 0);
10967                     printf(" null");
10968                 }
10969                 else if ((tree->gtIntCon.gtIconVal > -1000) && (tree->gtIntCon.gtIconVal < 1000))
10970                 {
10971                     printf(" %ld", dspIconVal);
10972 #ifdef _TARGET_64BIT_
10973                 }
10974                 else if ((tree->gtIntCon.gtIconVal & 0xFFFFFFFF00000000LL) != 0)
10975                 {
10976                     printf(" 0x%llx", dspIconVal);
10977 #endif
10978                 }
10979                 else
10980                 {
10981                     printf(" 0x%X", dspIconVal);
10982                 }
10983
10984                 if (tree->IsIconHandle())
10985                 {
10986                     switch (tree->GetIconHandleFlag())
10987                     {
10988                         case GTF_ICON_SCOPE_HDL:
10989                             printf(" scope");
10990                             break;
10991                         case GTF_ICON_CLASS_HDL:
10992                             printf(" class");
10993                             break;
10994                         case GTF_ICON_METHOD_HDL:
10995                             printf(" method");
10996                             break;
10997                         case GTF_ICON_FIELD_HDL:
10998                             printf(" field");
10999                             break;
11000                         case GTF_ICON_STATIC_HDL:
11001                             printf(" static");
11002                             break;
11003                         case GTF_ICON_STR_HDL:
11004                             unreached(); // This case is handled above
11005                             break;
11006                         case GTF_ICON_PSTR_HDL:
11007                             printf(" pstr");
11008                             break;
11009                         case GTF_ICON_PTR_HDL:
11010                             printf(" ptr");
11011                             break;
11012                         case GTF_ICON_VARG_HDL:
11013                             printf(" vararg");
11014                             break;
11015                         case GTF_ICON_PINVKI_HDL:
11016                             printf(" pinvoke");
11017                             break;
11018                         case GTF_ICON_TOKEN_HDL:
11019                             printf(" token");
11020                             break;
11021                         case GTF_ICON_TLS_HDL:
11022                             printf(" tls");
11023                             break;
11024                         case GTF_ICON_FTN_ADDR:
11025                             printf(" ftn");
11026                             break;
11027                         case GTF_ICON_CIDMID_HDL:
11028                             printf(" cid/mid");
11029                             break;
11030                         case GTF_ICON_BBC_PTR:
11031                             printf(" bbc");
11032                             break;
11033                         default:
11034                             printf(" UNKNOWN");
11035                             break;
11036                     }
11037                 }
11038
11039                 if ((tree->gtFlags & GTF_ICON_FIELD_OFF) != 0)
11040                 {
11041                     printf(" field offset");
11042                 }
11043
11044 #ifdef FEATURE_SIMD
11045                 if ((tree->gtFlags & GTF_ICON_SIMD_COUNT) != 0)
11046                 {
11047                     printf(" Vector<T>.Count");
11048                 }
11049 #endif
11050
11051                 if ((tree->IsReuseRegVal()) != 0)
11052                 {
11053                     printf(" reuse reg val");
11054                 }
11055             }
11056
11057             gtDispFieldSeq(tree->gtIntCon.gtFieldSeq);
11058
11059             break;
11060
11061         case GT_CNS_LNG:
11062             printf(" 0x%016I64x", tree->gtLngCon.gtLconVal);
11063             break;
11064
11065         case GT_CNS_DBL:
11066             if (*((__int64*)&tree->gtDblCon.gtDconVal) == (__int64)I64(0x8000000000000000))
11067             {
11068                 printf(" -0.00000");
11069             }
11070             else
11071             {
11072                 printf(" %#.17g", tree->gtDblCon.gtDconVal);
11073             }
11074             break;
11075         case GT_CNS_STR:
11076             printf("<string constant>");
11077             break;
11078         default:
11079             assert(!"unexpected constant node");
11080     }
11081
11082     gtDispRegVal(tree);
11083 }
11084
11085 void Compiler::gtDispFieldSeq(FieldSeqNode* pfsn)
11086 {
11087     if (pfsn == FieldSeqStore::NotAField() || (pfsn == nullptr))
11088     {
11089         return;
11090     }
11091
11092     // Otherwise...
11093     printf(" Fseq[");
11094     while (pfsn != nullptr)
11095     {
11096         assert(pfsn != FieldSeqStore::NotAField()); // Can't exist in a field sequence list except alone
11097         CORINFO_FIELD_HANDLE fldHnd = pfsn->m_fieldHnd;
11098         // First check the "pseudo" field handles...
11099         if (fldHnd == FieldSeqStore::FirstElemPseudoField)
11100         {
11101             printf("#FirstElem");
11102         }
11103         else if (fldHnd == FieldSeqStore::ConstantIndexPseudoField)
11104         {
11105             printf("#ConstantIndex");
11106         }
11107         else
11108         {
11109             printf("%s", eeGetFieldName(fldHnd));
11110         }
11111         pfsn = pfsn->m_next;
11112         if (pfsn != nullptr)
11113         {
11114             printf(", ");
11115         }
11116     }
11117     printf("]");
11118 }
11119
11120 //------------------------------------------------------------------------
11121 // gtDispLeaf: Print a single leaf node to jitstdout.
11122 //
11123 // Arguments:
11124 //    tree - the tree to be printed
11125 //    indentStack - the specification for the current level of indentation & arcs
11126 //
11127 // Return Value:
11128 //    None.
11129 //
11130 // Notes:
11131 //    'indentStack' may be null, in which case no indentation or arcs are printed
11132
11133 void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack)
11134 {
11135     if (tree->OperKind() & GTK_CONST)
11136     {
11137         gtDispConst(tree);
11138         return;
11139     }
11140
11141     bool isLclFld = false;
11142
11143     switch (tree->gtOper)
11144     {
11145         unsigned   varNum;
11146         LclVarDsc* varDsc;
11147
11148         case GT_LCL_FLD:
11149         case GT_LCL_FLD_ADDR:
11150         case GT_STORE_LCL_FLD:
11151             isLclFld = true;
11152             __fallthrough;
11153
11154         case GT_PHI_ARG:
11155         case GT_LCL_VAR:
11156         case GT_LCL_VAR_ADDR:
11157         case GT_STORE_LCL_VAR:
11158             printf(" ");
11159             varNum = tree->gtLclVarCommon.gtLclNum;
11160             varDsc = &lvaTable[varNum];
11161             gtDispLclVar(varNum);
11162             if (tree->gtLclVarCommon.HasSsaName())
11163             {
11164                 if (tree->gtFlags & GTF_VAR_USEASG)
11165                 {
11166                     assert(tree->gtFlags & GTF_VAR_DEF);
11167                     printf("ud:%d->%d", tree->gtLclVarCommon.gtSsaNum, GetSsaNumForLocalVarDef(tree));
11168                 }
11169                 else
11170                 {
11171                     printf("%s:%d", (tree->gtFlags & GTF_VAR_DEF) ? "d" : "u", tree->gtLclVarCommon.gtSsaNum);
11172                 }
11173             }
11174
11175             if (isLclFld)
11176             {
11177                 printf("[+%u]", tree->gtLclFld.gtLclOffs);
11178                 gtDispFieldSeq(tree->gtLclFld.gtFieldSeq);
11179             }
11180
11181             if (varDsc->lvRegister)
11182             {
11183                 printf(" ");
11184                 varDsc->PrintVarReg();
11185             }
11186 #ifndef LEGACY_BACKEND
11187             else if (tree->InReg())
11188             {
11189 #if CPU_LONG_USES_REGPAIR
11190                 if (isRegPairType(tree->TypeGet()))
11191                     printf(" %s", compRegPairName(tree->gtRegPair));
11192                 else
11193 #endif
11194                     printf(" %s", compRegVarName(tree->gtRegNum));
11195             }
11196 #endif // !LEGACY_BACKEND
11197
11198             if (varDsc->lvPromoted)
11199             {
11200                 if (!varTypeIsPromotable(varDsc) && !varDsc->lvUnusedStruct)
11201                 {
11202                     // Promoted implicit byrefs can get in this state while they are being rewritten
11203                     // in global morph.
11204                     assert(fgGlobalMorph);
11205                 }
11206                 else
11207                 {
11208                     CORINFO_CLASS_HANDLE typeHnd = varDsc->lvVerTypeInfo.GetClassHandle();
11209                     CORINFO_FIELD_HANDLE fldHnd;
11210
11211                     for (unsigned i = varDsc->lvFieldLclStart; i < varDsc->lvFieldLclStart + varDsc->lvFieldCnt; ++i)
11212                     {
11213                         LclVarDsc*  fieldVarDsc = &lvaTable[i];
11214                         const char* fieldName;
11215 #if !defined(_TARGET_64BIT_)
11216                         if (varTypeIsLong(varDsc))
11217                         {
11218                             fieldName = (i == 0) ? "lo" : "hi";
11219                         }
11220                         else
11221 #endif // !defined(_TARGET_64BIT_)
11222                         {
11223                             fldHnd    = info.compCompHnd->getFieldInClass(typeHnd, fieldVarDsc->lvFldOrdinal);
11224                             fieldName = eeGetFieldName(fldHnd);
11225                         }
11226
11227                         printf("\n");
11228                         printf("                                                  ");
11229                         printIndent(indentStack);
11230                         printf("    %-6s V%02u.%s (offs=0x%02x) -> ", varTypeName(fieldVarDsc->TypeGet()),
11231                                tree->gtLclVarCommon.gtLclNum, fieldName, fieldVarDsc->lvFldOffset);
11232                         gtDispLclVar(i);
11233
11234                         if (fieldVarDsc->lvRegister)
11235                         {
11236                             printf(" ");
11237                             fieldVarDsc->PrintVarReg();
11238                         }
11239
11240                         if (fieldVarDsc->lvTracked && fgLocalVarLivenessDone && // Includes local variable liveness
11241                             ((tree->gtFlags & GTF_VAR_DEATH) != 0))
11242                         {
11243                             printf(" (last use)");
11244                         }
11245                     }
11246                 }
11247             }
11248             else // a normal not-promoted lclvar
11249             {
11250                 if (varDsc->lvTracked && fgLocalVarLivenessDone && ((tree->gtFlags & GTF_VAR_DEATH) != 0))
11251                 {
11252                     printf(" (last use)");
11253                 }
11254             }
11255             break;
11256
11257         case GT_REG_VAR:
11258             printf(" ");
11259             gtDispLclVar(tree->gtRegVar.gtLclNum);
11260             if (isFloatRegType(tree->gtType))
11261             {
11262                 assert(tree->gtRegVar.gtRegNum == tree->gtRegNum);
11263                 printf(" FPV%u", tree->gtRegNum);
11264             }
11265             else
11266             {
11267                 printf(" %s", compRegVarName(tree->gtRegVar.gtRegNum));
11268             }
11269
11270             varNum = tree->gtRegVar.gtLclNum;
11271             varDsc = &lvaTable[varNum];
11272
11273             if (varDsc->lvTracked && fgLocalVarLivenessDone && ((tree->gtFlags & GTF_VAR_DEATH) != 0))
11274             {
11275                 printf(" (last use)");
11276             }
11277
11278             break;
11279
11280         case GT_JMP:
11281         {
11282             const char* methodName;
11283             const char* className;
11284
11285             methodName = eeGetMethodName((CORINFO_METHOD_HANDLE)tree->gtVal.gtVal1, &className);
11286             printf(" %s.%s\n", className, methodName);
11287         }
11288         break;
11289
11290         case GT_CLS_VAR:
11291             printf(" Hnd=%#x", dspPtr(tree->gtClsVar.gtClsVarHnd));
11292             gtDispFieldSeq(tree->gtClsVar.gtFieldSeq);
11293             break;
11294
11295         case GT_CLS_VAR_ADDR:
11296             printf(" Hnd=%#x", dspPtr(tree->gtClsVar.gtClsVarHnd));
11297             break;
11298
11299         case GT_LABEL:
11300             if (tree->gtLabel.gtLabBB)
11301             {
11302                 printf(" dst=BB%02u", tree->gtLabel.gtLabBB->bbNum);
11303             }
11304             else
11305             {
11306                 printf(" dst=<null>");
11307             }
11308
11309             break;
11310
11311         case GT_FTN_ADDR:
11312         {
11313             const char* methodName;
11314             const char* className;
11315
11316             methodName = eeGetMethodName((CORINFO_METHOD_HANDLE)tree->gtFptrVal.gtFptrMethod, &className);
11317             printf(" %s.%s\n", className, methodName);
11318         }
11319         break;
11320
11321 #if !FEATURE_EH_FUNCLETS
11322         case GT_END_LFIN:
11323             printf(" endNstLvl=%d", tree->gtVal.gtVal1);
11324             break;
11325 #endif // !FEATURE_EH_FUNCLETS
11326
11327         // Vanilla leaves. No qualifying information available. So do nothing
11328
11329         case GT_NO_OP:
11330         case GT_START_NONGC:
11331         case GT_PROF_HOOK:
11332         case GT_CATCH_ARG:
11333         case GT_MEMORYBARRIER:
11334         case GT_ARGPLACE:
11335         case GT_PINVOKE_PROLOG:
11336 #ifndef LEGACY_BACKEND
11337         case GT_JMPTABLE:
11338 #endif // !LEGACY_BACKEND
11339             break;
11340
11341         case GT_RET_EXPR:
11342             printf("(inl return from call ");
11343             printTreeID(tree->gtRetExpr.gtInlineCandidate);
11344             printf(")");
11345             break;
11346
11347         case GT_PHYSREG:
11348             printf(" %s", getRegName(tree->gtPhysReg.gtSrcReg, varTypeIsFloating(tree)));
11349             break;
11350
11351         case GT_IL_OFFSET:
11352             printf(" IL offset: ");
11353             if (tree->gtStmt.gtStmtILoffsx == BAD_IL_OFFSET)
11354             {
11355                 printf("???");
11356             }
11357             else
11358             {
11359                 printf("0x%x", jitGetILoffs(tree->gtStmt.gtStmtILoffsx));
11360             }
11361             break;
11362
11363         case GT_JCC:
11364         case GT_SETCC:
11365             printf(" cond=%s", GenTree::OpName(tree->AsCC()->gtCondition));
11366             break;
11367         case GT_JCMP:
11368             printf(" cond=%s%s", (tree->gtFlags & GTF_JCMP_TST) ? "TEST_" : "",
11369                    (tree->gtFlags & GTF_JCMP_EQ) ? "EQ" : "NE");
11370
11371         default:
11372             assert(!"don't know how to display tree leaf node");
11373     }
11374
11375     gtDispRegVal(tree);
11376 }
11377
11378 //------------------------------------------------------------------------
11379 // gtDispLeaf: Print a child node to jitstdout.
11380 //
11381 // Arguments:
11382 //    tree - the tree to be printed
11383 //    indentStack - the specification for the current level of indentation & arcs
11384 //    arcType     - the type of arc to use for this child
11385 //    msg         - a contextual method (i.e. from the parent) to print
11386 //    topOnly     - a boolean indicating whether to print the children, or just the top node
11387 //
11388 // Return Value:
11389 //    None.
11390 //
11391 // Notes:
11392 //    'indentStack' may be null, in which case no indentation or arcs are printed
11393 //    'msg' has a default value of null
11394 //    'topOnly' is an optional argument that defaults to false
11395
11396 void Compiler::gtDispChild(GenTree*             child,
11397                            IndentStack*         indentStack,
11398                            IndentInfo           arcType,
11399                            __in_opt const char* msg,     /* = nullptr  */
11400                            bool                 topOnly) /* = false */
11401 {
11402     indentStack->Push(arcType);
11403     gtDispTree(child, indentStack, msg, topOnly);
11404     indentStack->Pop();
11405 }
11406
11407 #ifdef FEATURE_SIMD
11408 // Intrinsic Id to name map
11409 extern const char* const simdIntrinsicNames[] = {
11410 #define SIMD_INTRINSIC(mname, inst, id, name, r, ac, arg1, arg2, arg3, t1, t2, t3, t4, t5, t6, t7, t8, t9, t10) name,
11411 #include "simdintrinsiclist.h"
11412 };
11413 #endif // FEATURE_SIMD
11414
11415 #ifdef FEATURE_HW_INTRINSICS
11416 extern const char* getHWIntrinsicName(NamedIntrinsic intrinsic);
11417 #endif // FEATURE_HW_INTRINSICS
11418
11419 /*****************************************************************************/
11420
11421 void Compiler::gtDispTree(GenTree*     tree,
11422                           IndentStack* indentStack,                 /* = nullptr */
11423                           __in __in_z __in_opt const char* msg,     /* = nullptr  */
11424                           bool                             topOnly, /* = false */
11425                           bool                             isLIR)   /* = false */
11426 {
11427     if (tree == nullptr)
11428     {
11429         printf(" [%08X] <NULL>\n", tree);
11430         printf(""); // null string means flush
11431         return;
11432     }
11433
11434     if (indentStack == nullptr)
11435     {
11436         indentStack = new (this, CMK_DebugOnly) IndentStack(this);
11437     }
11438
11439     if (IsUninitialized(tree))
11440     {
11441         /* Value used to initalize nodes */
11442         printf("Uninitialized tree node!");
11443         return;
11444     }
11445
11446     if (tree->gtOper >= GT_COUNT)
11447     {
11448         gtDispNode(tree, indentStack, msg, isLIR);
11449         printf("Bogus operator!");
11450         return;
11451     }
11452
11453     /* Is tree a leaf node? */
11454
11455     if (tree->OperIsLeaf() || tree->OperIsLocalStore()) // local stores used to be leaves
11456     {
11457         gtDispNode(tree, indentStack, msg, isLIR);
11458         gtDispLeaf(tree, indentStack);
11459         gtDispVN(tree);
11460         printf("\n");
11461         if (tree->OperIsLocalStore() && !topOnly)
11462         {
11463             gtDispChild(tree->gtOp.gtOp1, indentStack, IINone);
11464         }
11465         return;
11466     }
11467
11468     // Determine what kind of arc to propagate.
11469     IndentInfo myArc    = IINone;
11470     IndentInfo lowerArc = IINone;
11471     if (indentStack->Depth() > 0)
11472     {
11473         myArc = indentStack->Pop();
11474         switch (myArc)
11475         {
11476             case IIArcBottom:
11477                 indentStack->Push(IIArc);
11478                 lowerArc = IINone;
11479                 break;
11480             case IIArc:
11481                 indentStack->Push(IIArc);
11482                 lowerArc = IIArc;
11483                 break;
11484             case IIArcTop:
11485                 indentStack->Push(IINone);
11486                 lowerArc = IIArc;
11487                 break;
11488             case IIEmbedded:
11489                 indentStack->Push(IIEmbedded);
11490                 lowerArc = IIEmbedded;
11491                 break;
11492             case IINone:
11493                 indentStack->Push(IINone);
11494                 lowerArc = IINone;
11495                 break;
11496             default:
11497                 unreached();
11498                 break;
11499         }
11500     }
11501
11502     // Special case formatting for PHI nodes -- arg lists like calls.
11503
11504     if (tree->OperGet() == GT_PHI)
11505     {
11506         gtDispNode(tree, indentStack, msg, isLIR);
11507         gtDispVN(tree);
11508         printf("\n");
11509
11510         if (!topOnly)
11511         {
11512             if (tree->gtOp.gtOp1 != nullptr)
11513             {
11514                 IndentInfo arcType = IIArcTop;
11515                 for (GenTreeArgList* args = tree->gtOp.gtOp1->AsArgList(); args != nullptr; args = args->Rest())
11516                 {
11517                     if (args->Rest() == nullptr)
11518                     {
11519                         arcType = IIArcBottom;
11520                     }
11521                     gtDispChild(args->Current(), indentStack, arcType);
11522                     arcType = IIArc;
11523                 }
11524             }
11525         }
11526         return;
11527     }
11528
11529     /* Is it a 'simple' unary/binary operator? */
11530
11531     const char* childMsg = nullptr;
11532
11533     if (tree->OperIsSimple())
11534     {
11535         if (!topOnly)
11536         {
11537             if (tree->gtGetOp2IfPresent())
11538             {
11539                 // Label the childMsgs of the GT_COLON operator
11540                 // op2 is the then part
11541
11542                 if (tree->gtOper == GT_COLON)
11543                 {
11544                     childMsg = "then";
11545                 }
11546                 gtDispChild(tree->gtOp.gtOp2, indentStack, IIArcTop, childMsg, topOnly);
11547             }
11548         }
11549
11550         // Now, get the right type of arc for this node
11551         if (myArc != IINone)
11552         {
11553             indentStack->Pop();
11554             indentStack->Push(myArc);
11555         }
11556
11557         gtDispNode(tree, indentStack, msg, isLIR);
11558
11559         // Propagate lowerArc to the lower children.
11560         if (indentStack->Depth() > 0)
11561         {
11562             (void)indentStack->Pop();
11563             indentStack->Push(lowerArc);
11564         }
11565
11566         if (tree->gtOper == GT_CAST)
11567         {
11568             /* Format a message that explains the effect of this GT_CAST */
11569
11570             var_types fromType  = genActualType(tree->gtCast.CastOp()->TypeGet());
11571             var_types toType    = tree->CastToType();
11572             var_types finalType = tree->TypeGet();
11573
11574             /* if GTF_UNSIGNED is set then force fromType to an unsigned type */
11575             if (tree->gtFlags & GTF_UNSIGNED)
11576             {
11577                 fromType = genUnsignedType(fromType);
11578             }
11579
11580             if (finalType != toType)
11581             {
11582                 printf(" %s <-", varTypeName(finalType));
11583             }
11584
11585             printf(" %s <- %s", varTypeName(toType), varTypeName(fromType));
11586         }
11587
11588         if (tree->gtOper == GT_OBJ && (tree->gtFlags & GTF_VAR_DEATH))
11589         {
11590             printf(" (last use)");
11591         }
11592         if (tree->OperIsBlkOp())
11593         {
11594             if (tree->OperIsCopyBlkOp())
11595             {
11596                 printf(" (copy)");
11597             }
11598             else if (tree->OperIsInitBlkOp())
11599             {
11600                 printf(" (init)");
11601             }
11602             if (tree->OperIsStoreBlk() && (tree->AsBlk()->gtBlkOpKind != GenTreeBlk::BlkOpKindInvalid))
11603             {
11604                 switch (tree->AsBlk()->gtBlkOpKind)
11605                 {
11606                     case GenTreeBlk::BlkOpKindRepInstr:
11607                         printf(" (RepInstr)");
11608                         break;
11609                     case GenTreeBlk::BlkOpKindUnroll:
11610                         printf(" (Unroll)");
11611                         break;
11612                     case GenTreeBlk::BlkOpKindHelper:
11613                         printf(" (Helper)");
11614                         break;
11615                     default:
11616                         unreached();
11617                 }
11618             }
11619         }
11620         else if (tree->OperIsFieldList())
11621         {
11622             printf(" %s at offset %d", varTypeName(tree->AsFieldList()->gtFieldType),
11623                    tree->AsFieldList()->gtFieldOffset);
11624         }
11625 #if FEATURE_PUT_STRUCT_ARG_STK
11626         else if (tree->OperGet() == GT_PUTARG_STK)
11627         {
11628             printf(" (%d slots)", tree->AsPutArgStk()->gtNumSlots);
11629             if (tree->AsPutArgStk()->gtPutArgStkKind != GenTreePutArgStk::Kind::Invalid)
11630             {
11631                 switch (tree->AsPutArgStk()->gtPutArgStkKind)
11632                 {
11633                     case GenTreePutArgStk::Kind::RepInstr:
11634                         printf(" (RepInstr)");
11635                         break;
11636                     case GenTreePutArgStk::Kind::Unroll:
11637                         printf(" (Unroll)");
11638                         break;
11639                     case GenTreePutArgStk::Kind::Push:
11640                         printf(" (Push)");
11641                         break;
11642                     case GenTreePutArgStk::Kind::PushAllSlots:
11643                         printf(" (PushAllSlots)");
11644                         break;
11645                     default:
11646                         unreached();
11647                 }
11648             }
11649         }
11650 #endif // FEATURE_PUT_STRUCT_ARG_STK
11651
11652         IndirectAssignmentAnnotation* pIndirAnnote;
11653         if (tree->gtOper == GT_ASG && GetIndirAssignMap()->Lookup(tree, &pIndirAnnote))
11654         {
11655             printf("  indir assign of V%02d:", pIndirAnnote->m_lclNum);
11656             if (pIndirAnnote->m_isEntire)
11657             {
11658                 printf("d:%d", pIndirAnnote->m_defSsaNum);
11659             }
11660             else
11661             {
11662                 printf("ud:%d->%d", pIndirAnnote->m_useSsaNum, pIndirAnnote->m_defSsaNum);
11663             }
11664         }
11665
11666         if (tree->gtOper == GT_INTRINSIC)
11667         {
11668             switch (tree->gtIntrinsic.gtIntrinsicId)
11669             {
11670                 case CORINFO_INTRINSIC_Sin:
11671                     printf(" sin");
11672                     break;
11673                 case CORINFO_INTRINSIC_Cos:
11674                     printf(" cos");
11675                     break;
11676                 case CORINFO_INTRINSIC_Cbrt:
11677                     printf(" cbrt");
11678                     break;
11679                 case CORINFO_INTRINSIC_Sqrt:
11680                     printf(" sqrt");
11681                     break;
11682                 case CORINFO_INTRINSIC_Abs:
11683                     printf(" abs");
11684                     break;
11685                 case CORINFO_INTRINSIC_Round:
11686                     printf(" round");
11687                     break;
11688                 case CORINFO_INTRINSIC_Cosh:
11689                     printf(" cosh");
11690                     break;
11691                 case CORINFO_INTRINSIC_Sinh:
11692                     printf(" sinh");
11693                     break;
11694                 case CORINFO_INTRINSIC_Tan:
11695                     printf(" tan");
11696                     break;
11697                 case CORINFO_INTRINSIC_Tanh:
11698                     printf(" tanh");
11699                     break;
11700                 case CORINFO_INTRINSIC_Asin:
11701                     printf(" asin");
11702                     break;
11703                 case CORINFO_INTRINSIC_Asinh:
11704                     printf(" asinh");
11705                     break;
11706                 case CORINFO_INTRINSIC_Acos:
11707                     printf(" acos");
11708                     break;
11709                 case CORINFO_INTRINSIC_Acosh:
11710                     printf(" acosh");
11711                     break;
11712                 case CORINFO_INTRINSIC_Atan:
11713                     printf(" atan");
11714                     break;
11715                 case CORINFO_INTRINSIC_Atan2:
11716                     printf(" atan2");
11717                     break;
11718                 case CORINFO_INTRINSIC_Atanh:
11719                     printf(" atanh");
11720                     break;
11721                 case CORINFO_INTRINSIC_Log10:
11722                     printf(" log10");
11723                     break;
11724                 case CORINFO_INTRINSIC_Pow:
11725                     printf(" pow");
11726                     break;
11727                 case CORINFO_INTRINSIC_Exp:
11728                     printf(" exp");
11729                     break;
11730                 case CORINFO_INTRINSIC_Ceiling:
11731                     printf(" ceiling");
11732                     break;
11733                 case CORINFO_INTRINSIC_Floor:
11734                     printf(" floor");
11735                     break;
11736                 case CORINFO_INTRINSIC_Object_GetType:
11737                     printf(" objGetType");
11738                     break;
11739
11740                 default:
11741                     unreached();
11742             }
11743         }
11744
11745 #ifdef FEATURE_SIMD
11746         if (tree->gtOper == GT_SIMD)
11747         {
11748             printf(" %s %s", varTypeName(tree->gtSIMD.gtSIMDBaseType),
11749                    simdIntrinsicNames[tree->gtSIMD.gtSIMDIntrinsicID]);
11750         }
11751 #endif // FEATURE_SIMD
11752
11753 #ifdef FEATURE_HW_INTRINSICS
11754         if (tree->gtOper == GT_HWIntrinsic)
11755         {
11756             printf(" %s %s",
11757                    tree->gtHWIntrinsic.gtSIMDBaseType == TYP_UNKNOWN ? ""
11758                                                                      : varTypeName(tree->gtHWIntrinsic.gtSIMDBaseType),
11759                    getHWIntrinsicName(tree->gtHWIntrinsic.gtHWIntrinsicId));
11760         }
11761 #endif // FEATURE_HW_INTRINSICS
11762
11763         gtDispRegVal(tree);
11764         gtDispVN(tree);
11765         printf("\n");
11766
11767         if (!topOnly && tree->gtOp.gtOp1)
11768         {
11769
11770             // Label the child of the GT_COLON operator
11771             // op1 is the else part
11772
11773             if (tree->gtOper == GT_COLON)
11774             {
11775                 childMsg = "else";
11776             }
11777             else if (tree->gtOper == GT_QMARK)
11778             {
11779                 childMsg = "   if";
11780             }
11781             gtDispChild(tree->gtOp.gtOp1, indentStack, IIArcBottom, childMsg, topOnly);
11782         }
11783
11784         return;
11785     }
11786
11787     // Now, get the right type of arc for this node
11788     if (myArc != IINone)
11789     {
11790         indentStack->Pop();
11791         indentStack->Push(myArc);
11792     }
11793     gtDispNode(tree, indentStack, msg, isLIR);
11794
11795     // Propagate lowerArc to the lower children.
11796     if (indentStack->Depth() > 0)
11797     {
11798         (void)indentStack->Pop();
11799         indentStack->Push(lowerArc);
11800     }
11801
11802     // See what kind of a special operator we have here, and handle its special children.
11803
11804     switch (tree->gtOper)
11805     {
11806         case GT_FIELD:
11807             printf(" %s", eeGetFieldName(tree->gtField.gtFldHnd), 0);
11808
11809             if (tree->gtField.gtFldObj && !topOnly)
11810             {
11811                 gtDispVN(tree);
11812                 printf("\n");
11813                 gtDispChild(tree->gtField.gtFldObj, indentStack, IIArcBottom);
11814             }
11815             else
11816             {
11817                 gtDispRegVal(tree);
11818                 gtDispVN(tree);
11819                 printf("\n");
11820             }
11821             break;
11822
11823         case GT_CALL:
11824         {
11825             GenTreeCall* call = tree->AsCall();
11826             assert(call->gtFlags & GTF_CALL);
11827             unsigned numChildren = call->NumChildren();
11828             GenTree* lastChild   = nullptr;
11829             if (numChildren != 0)
11830             {
11831                 lastChild = call->GetChild(numChildren - 1);
11832             }
11833
11834             if (call->gtCallType != CT_INDIRECT)
11835             {
11836                 const char* methodName;
11837                 const char* className;
11838
11839                 methodName = eeGetMethodName(call->gtCallMethHnd, &className);
11840
11841                 printf(" %s.%s", className, methodName);
11842             }
11843
11844             if ((call->gtFlags & GTF_CALL_UNMANAGED) && (call->gtCallMoreFlags & GTF_CALL_M_FRAME_VAR_DEATH))
11845             {
11846                 printf(" (FramesRoot last use)");
11847             }
11848
11849             if (((call->gtFlags & GTF_CALL_INLINE_CANDIDATE) != 0) && (call->gtInlineCandidateInfo != nullptr) &&
11850                 (call->gtInlineCandidateInfo->exactContextHnd != nullptr))
11851             {
11852                 printf(" (exactContextHnd=0x%p)", dspPtr(call->gtInlineCandidateInfo->exactContextHnd));
11853             }
11854
11855             gtDispVN(call);
11856             if (call->IsMultiRegCall())
11857             {
11858                 gtDispRegVal(call);
11859             }
11860             printf("\n");
11861
11862             if (!topOnly)
11863             {
11864                 char  buf[64];
11865                 char* bufp;
11866
11867                 bufp = &buf[0];
11868
11869                 if ((call->gtCallObjp != nullptr) && (call->gtCallObjp->gtOper != GT_NOP) &&
11870                     (!call->gtCallObjp->IsArgPlaceHolderNode()))
11871                 {
11872                     if (call->gtCallObjp->gtOper == GT_ASG)
11873                     {
11874                         sprintf_s(bufp, sizeof(buf), "this SETUP%c", 0);
11875                     }
11876                     else
11877                     {
11878                         sprintf_s(bufp, sizeof(buf), "this in %s%c", compRegVarName(REG_ARG_0), 0);
11879                     }
11880                     gtDispChild(call->gtCallObjp, indentStack, (call->gtCallObjp == lastChild) ? IIArcBottom : IIArc,
11881                                 bufp, topOnly);
11882                 }
11883
11884                 if (call->gtCallArgs)
11885                 {
11886                     gtDispArgList(call, indentStack);
11887                 }
11888
11889                 if (call->gtCallType == CT_INDIRECT)
11890                 {
11891                     gtDispChild(call->gtCallAddr, indentStack, (call->gtCallAddr == lastChild) ? IIArcBottom : IIArc,
11892                                 "calli tgt", topOnly);
11893                 }
11894
11895                 if (call->gtControlExpr != nullptr)
11896                 {
11897                     gtDispChild(call->gtControlExpr, indentStack,
11898                                 (call->gtControlExpr == lastChild) ? IIArcBottom : IIArc, "control expr", topOnly);
11899                 }
11900
11901 #if !FEATURE_FIXED_OUT_ARGS
11902                 regList list = call->regArgList;
11903 #endif
11904                 /* process the late argument list */
11905                 int lateArgIndex = 0;
11906                 for (GenTreeArgList* lateArgs = call->gtCallLateArgs; lateArgs;
11907                      (lateArgIndex++, lateArgs = lateArgs->Rest()))
11908                 {
11909                     GenTree* argx;
11910
11911                     argx = lateArgs->Current();
11912
11913                     IndentInfo arcType = (lateArgs->Rest() == nullptr) ? IIArcBottom : IIArc;
11914                     gtGetLateArgMsg(call, argx, lateArgIndex, -1, bufp, sizeof(buf));
11915                     gtDispChild(argx, indentStack, arcType, bufp, topOnly);
11916                 }
11917             }
11918         }
11919         break;
11920
11921         case GT_STMT:
11922             printf("\n");
11923
11924             if (!topOnly)
11925             {
11926                 gtDispChild(tree->gtStmt.gtStmtExpr, indentStack, IIArcBottom);
11927             }
11928             break;
11929
11930         case GT_ARR_ELEM:
11931             gtDispVN(tree);
11932             printf("\n");
11933
11934             if (!topOnly)
11935             {
11936                 gtDispChild(tree->gtArrElem.gtArrObj, indentStack, IIArc, nullptr, topOnly);
11937
11938                 unsigned dim;
11939                 for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
11940                 {
11941                     IndentInfo arcType = ((dim + 1) == tree->gtArrElem.gtArrRank) ? IIArcBottom : IIArc;
11942                     gtDispChild(tree->gtArrElem.gtArrInds[dim], indentStack, arcType, nullptr, topOnly);
11943                 }
11944             }
11945             break;
11946
11947         case GT_ARR_OFFSET:
11948             gtDispVN(tree);
11949             printf("\n");
11950             if (!topOnly)
11951             {
11952                 gtDispChild(tree->gtArrOffs.gtOffset, indentStack, IIArc, nullptr, topOnly);
11953                 gtDispChild(tree->gtArrOffs.gtIndex, indentStack, IIArc, nullptr, topOnly);
11954                 gtDispChild(tree->gtArrOffs.gtArrObj, indentStack, IIArcBottom, nullptr, topOnly);
11955             }
11956             break;
11957
11958         case GT_CMPXCHG:
11959             gtDispVN(tree);
11960             printf("\n");
11961             if (!topOnly)
11962             {
11963                 gtDispChild(tree->gtCmpXchg.gtOpLocation, indentStack, IIArc, nullptr, topOnly);
11964                 gtDispChild(tree->gtCmpXchg.gtOpValue, indentStack, IIArc, nullptr, topOnly);
11965                 gtDispChild(tree->gtCmpXchg.gtOpComparand, indentStack, IIArcBottom, nullptr, topOnly);
11966             }
11967             break;
11968
11969         case GT_ARR_BOUNDS_CHECK:
11970 #ifdef FEATURE_SIMD
11971         case GT_SIMD_CHK:
11972 #endif // FEATURE_SIMD
11973 #ifdef FEATURE_HW_INTRINSICS
11974         case GT_HW_INTRINSIC_CHK:
11975 #endif // FEATURE_HW_INTRINSICS
11976             gtDispVN(tree);
11977             printf("\n");
11978             if (!topOnly)
11979             {
11980                 gtDispChild(tree->gtBoundsChk.gtIndex, indentStack, IIArc, nullptr, topOnly);
11981                 gtDispChild(tree->gtBoundsChk.gtArrLen, indentStack, IIArcBottom, nullptr, topOnly);
11982             }
11983             break;
11984
11985         case GT_STORE_DYN_BLK:
11986         case GT_DYN_BLK:
11987             if (tree->OperIsCopyBlkOp())
11988             {
11989                 printf(" (copy)");
11990             }
11991             else if (tree->OperIsInitBlkOp())
11992             {
11993                 printf(" (init)");
11994             }
11995             gtDispVN(tree);
11996             printf("\n");
11997             if (!topOnly)
11998             {
11999                 if (tree->gtDynBlk.Data() != nullptr)
12000                 {
12001                     gtDispChild(tree->gtDynBlk.Data(), indentStack, IIArc, nullptr, topOnly);
12002                 }
12003                 gtDispChild(tree->gtDynBlk.Addr(), indentStack, IIArc, nullptr, topOnly);
12004                 gtDispChild(tree->gtDynBlk.gtDynamicSize, indentStack, IIArcBottom, nullptr, topOnly);
12005             }
12006             break;
12007
12008         default:
12009             printf("<DON'T KNOW HOW TO DISPLAY THIS NODE> :");
12010             printf(""); // null string means flush
12011             break;
12012     }
12013 }
12014
12015 //------------------------------------------------------------------------
12016 // gtGetArgMsg: Construct a message about the given argument
12017 //
12018 // Arguments:
12019 //    call      - The call for which 'arg' is an argument
12020 //    arg       - The argument for which a message should be constructed
12021 //    argNum    - The ordinal number of the arg in the argument list
12022 //    listCount - When printing in LIR form this is the count for a GT_FIELD_LIST
12023 //                or -1 if we are not printing in LIR form
12024 //    bufp      - A pointer to the buffer into which the message is written
12025 //    bufLength - The length of the buffer pointed to by bufp
12026 //
12027 // Return Value:
12028 //    No return value, but bufp is written.
12029 //
12030 // Assumptions:
12031 //    'call' must be a call node
12032 //    'arg' must be an argument to 'call' (else gtArgEntryByNode will assert)
12033
12034 void Compiler::gtGetArgMsg(
12035     GenTreeCall* call, GenTree* arg, unsigned argNum, int listCount, char* bufp, unsigned bufLength)
12036 {
12037     if (call->gtCallLateArgs != nullptr)
12038     {
12039         fgArgTabEntry* curArgTabEntry = gtArgEntryByArgNum(call, argNum);
12040         assert(curArgTabEntry);
12041
12042         if (arg->gtFlags & GTF_LATE_ARG)
12043         {
12044             sprintf_s(bufp, bufLength, "arg%d SETUP%c", argNum, 0);
12045         }
12046         else
12047         {
12048 #ifdef _TARGET_ARM_
12049             if (curArgTabEntry->isSplit)
12050             {
12051                 regNumber firstReg = curArgTabEntry->regNum;
12052                 if (listCount == -1)
12053                 {
12054                     if (curArgTabEntry->numRegs == 1)
12055                     {
12056                         sprintf_s(bufp, bufLength, "arg%d %s out+%02x%c", argNum, compRegVarName(firstReg),
12057                                   (curArgTabEntry->slotNum) * TARGET_POINTER_SIZE, 0);
12058                     }
12059                     else
12060                     {
12061                         regNumber lastReg   = REG_STK;
12062                         char      separator = (curArgTabEntry->numRegs == 2) ? ',' : '-';
12063                         if (curArgTabEntry->isHfaRegArg)
12064                         {
12065                             unsigned lastRegNum = genMapFloatRegNumToRegArgNum(firstReg) + curArgTabEntry->numRegs - 1;
12066                             lastReg             = genMapFloatRegArgNumToRegNum(lastRegNum);
12067                         }
12068                         else
12069                         {
12070                             unsigned lastRegNum = genMapIntRegNumToRegArgNum(firstReg) + curArgTabEntry->numRegs - 1;
12071                             lastReg             = genMapIntRegArgNumToRegNum(lastRegNum);
12072                         }
12073                         sprintf_s(bufp, bufLength, "arg%d %s%c%s out+%02x%c", argNum, compRegVarName(firstReg),
12074                                   separator, compRegVarName(lastReg), (curArgTabEntry->slotNum) * TARGET_POINTER_SIZE,
12075                                   0);
12076                     }
12077                 }
12078                 else
12079                 {
12080                     unsigned curArgNum = BAD_VAR_NUM;
12081                     bool     isFloat   = curArgTabEntry->isHfaRegArg;
12082                     if (isFloat)
12083                     {
12084                         curArgNum = genMapFloatRegNumToRegArgNum(firstReg) + listCount;
12085                     }
12086                     else
12087                     {
12088                         curArgNum = genMapIntRegNumToRegArgNum(firstReg) + listCount;
12089                     }
12090
12091                     if (!isFloat && curArgNum < MAX_REG_ARG)
12092                     {
12093                         regNumber curReg = genMapIntRegArgNumToRegNum(curArgNum);
12094                         sprintf_s(bufp, bufLength, "arg%d m%d %s%c", argNum, listCount, compRegVarName(curReg), 0);
12095                     }
12096                     else if (isFloat && curArgNum < MAX_FLOAT_REG_ARG)
12097                     {
12098                         regNumber curReg = genMapFloatRegArgNumToRegNum(curArgNum);
12099                         sprintf_s(bufp, bufLength, "arg%d m%d %s%c", argNum, listCount, compRegVarName(curReg), 0);
12100                     }
12101                     else
12102                     {
12103                         unsigned stackSlot = listCount - curArgTabEntry->numRegs;
12104                         sprintf_s(bufp, bufLength, "arg%d m%d out+%s%c", argNum, listCount, stackSlot, 0);
12105                     }
12106                 }
12107                 return;
12108             }
12109 #endif // _TARGET_ARM_
12110 #if FEATURE_FIXED_OUT_ARGS
12111             if (listCount == -1)
12112             {
12113                 sprintf_s(bufp, bufLength, "arg%d out+%02x%c", argNum, curArgTabEntry->slotNum * TARGET_POINTER_SIZE,
12114                           0);
12115             }
12116             else // listCount is 0,1,2 or 3
12117             {
12118                 assert(listCount <= MAX_ARG_REG_COUNT);
12119                 sprintf_s(bufp, bufLength, "arg%d out+%02x%c", argNum,
12120                           (curArgTabEntry->slotNum + listCount) * TARGET_POINTER_SIZE, 0);
12121             }
12122 #else
12123             sprintf_s(bufp, bufLength, "arg%d on STK%c", argNum, 0);
12124 #endif
12125         }
12126     }
12127     else
12128     {
12129         sprintf_s(bufp, bufLength, "arg%d%c", argNum, 0);
12130     }
12131 }
12132
12133 //------------------------------------------------------------------------
12134 // gtGetLateArgMsg: Construct a message about the given argument
12135 //
12136 // Arguments:
12137 //    call         - The call for which 'arg' is an argument
12138 //    argx         - The argument for which a message should be constructed
12139 //    lateArgIndex - The ordinal number of the arg in the lastArg  list
12140 //    listCount    - When printing in LIR form this is the count for a multireg GT_FIELD_LIST
12141 //                   or -1 if we are not printing in LIR form
12142 //    bufp         - A pointer to the buffer into which the message is written
12143 //    bufLength    - The length of the buffer pointed to by bufp
12144 //
12145 // Return Value:
12146 //    No return value, but bufp is written.
12147 //
12148 // Assumptions:
12149 //    'call' must be a call node
12150 //    'arg' must be an argument to 'call' (else gtArgEntryByNode will assert)
12151
12152 void Compiler::gtGetLateArgMsg(
12153     GenTreeCall* call, GenTree* argx, int lateArgIndex, int listCount, char* bufp, unsigned bufLength)
12154 {
12155     assert(!argx->IsArgPlaceHolderNode()); // No place holders nodes are in gtCallLateArgs;
12156
12157     fgArgTabEntry* curArgTabEntry = gtArgEntryByLateArgIndex(call, lateArgIndex);
12158     assert(curArgTabEntry);
12159     regNumber argReg = curArgTabEntry->regNum;
12160
12161 #if !FEATURE_FIXED_OUT_ARGS
12162     assert(lateArgIndex < call->regArgListCount);
12163     assert(argReg == call->regArgList[lateArgIndex]);
12164 #else
12165     if (argReg == REG_STK)
12166     {
12167         sprintf_s(bufp, bufLength, "arg%d in out+%02x%c", curArgTabEntry->argNum,
12168                   curArgTabEntry->slotNum * TARGET_POINTER_SIZE, 0);
12169     }
12170     else
12171 #endif
12172     {
12173         if (gtArgIsThisPtr(curArgTabEntry))
12174         {
12175             sprintf_s(bufp, bufLength, "this in %s%c", compRegVarName(argReg), 0);
12176         }
12177 #ifdef _TARGET_ARM_
12178         else if (curArgTabEntry->isSplit)
12179         {
12180             regNumber firstReg = curArgTabEntry->regNum;
12181             unsigned  argNum   = curArgTabEntry->argNum;
12182             if (listCount == -1)
12183             {
12184                 if (curArgTabEntry->numRegs == 1)
12185                 {
12186                     sprintf_s(bufp, bufLength, "arg%d %s out+%02x%c", argNum, compRegVarName(firstReg),
12187                               (curArgTabEntry->slotNum) * TARGET_POINTER_SIZE, 0);
12188                 }
12189                 else
12190                 {
12191                     regNumber lastReg   = REG_STK;
12192                     char      separator = (curArgTabEntry->numRegs == 2) ? ',' : '-';
12193                     if (curArgTabEntry->isHfaRegArg)
12194                     {
12195                         unsigned lastRegNum = genMapFloatRegNumToRegArgNum(firstReg) + curArgTabEntry->numRegs - 1;
12196                         lastReg             = genMapFloatRegArgNumToRegNum(lastRegNum);
12197                     }
12198                     else
12199                     {
12200                         unsigned lastRegNum = genMapIntRegNumToRegArgNum(firstReg) + curArgTabEntry->numRegs - 1;
12201                         lastReg             = genMapIntRegArgNumToRegNum(lastRegNum);
12202                     }
12203                     sprintf_s(bufp, bufLength, "arg%d %s%c%s out+%02x%c", argNum, compRegVarName(firstReg), separator,
12204                               compRegVarName(lastReg), (curArgTabEntry->slotNum) * TARGET_POINTER_SIZE, 0);
12205                 }
12206             }
12207             else
12208             {
12209                 unsigned curArgNum = BAD_VAR_NUM;
12210                 bool     isFloat   = curArgTabEntry->isHfaRegArg;
12211                 if (isFloat)
12212                 {
12213                     curArgNum = genMapFloatRegNumToRegArgNum(firstReg) + listCount;
12214                 }
12215                 else
12216                 {
12217                     curArgNum = genMapIntRegNumToRegArgNum(firstReg) + listCount;
12218                 }
12219
12220                 if (!isFloat && curArgNum < MAX_REG_ARG)
12221                 {
12222                     regNumber curReg = genMapIntRegArgNumToRegNum(curArgNum);
12223                     sprintf_s(bufp, bufLength, "arg%d m%d %s%c", argNum, listCount, compRegVarName(curReg), 0);
12224                 }
12225                 else if (isFloat && curArgNum < MAX_FLOAT_REG_ARG)
12226                 {
12227                     regNumber curReg = genMapFloatRegArgNumToRegNum(curArgNum);
12228                     sprintf_s(bufp, bufLength, "arg%d m%d %s%c", argNum, listCount, compRegVarName(curReg), 0);
12229                 }
12230                 else
12231                 {
12232                     unsigned stackSlot = listCount - curArgTabEntry->numRegs;
12233                     sprintf_s(bufp, bufLength, "arg%d m%d out+%s%c", argNum, listCount, stackSlot, 0);
12234                 }
12235             }
12236             return;
12237         }
12238 #endif // _TARGET_ARM_
12239         else
12240         {
12241 #if FEATURE_MULTIREG_ARGS
12242             if (curArgTabEntry->numRegs >= 2)
12243             {
12244                 regNumber otherRegNum;
12245 #if defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
12246                 assert(curArgTabEntry->numRegs == 2);
12247                 otherRegNum = curArgTabEntry->otherRegNum;
12248 #else
12249                 otherRegNum = (regNumber)(((unsigned)curArgTabEntry->regNum) + curArgTabEntry->numRegs - 1);
12250 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
12251
12252                 if (listCount == -1)
12253                 {
12254                     char seperator = (curArgTabEntry->numRegs == 2) ? ',' : '-';
12255
12256                     sprintf_s(bufp, bufLength, "arg%d %s%c%s%c", curArgTabEntry->argNum, compRegVarName(argReg),
12257                               seperator, compRegVarName(otherRegNum), 0);
12258                 }
12259                 else // listCount is 0,1,2 or 3
12260                 {
12261                     assert(listCount <= MAX_ARG_REG_COUNT);
12262                     regNumber curReg = (listCount == 1) ? otherRegNum : (regNumber)((unsigned)(argReg) + listCount);
12263                     sprintf_s(bufp, bufLength, "arg%d m%d %s%c", curArgTabEntry->argNum, listCount,
12264                               compRegVarName(curReg), 0);
12265                 }
12266             }
12267             else
12268 #endif
12269             {
12270                 sprintf_s(bufp, bufLength, "arg%d in %s%c", curArgTabEntry->argNum, compRegVarName(argReg), 0);
12271             }
12272         }
12273     }
12274 }
12275
12276 //------------------------------------------------------------------------
12277 // gtDispArgList: Dump the tree for a call arg list
12278 //
12279 // Arguments:
12280 //    call         - The call to dump arguments for
12281 //    indentStack  - the specification for the current level of indentation & arcs
12282 //
12283 // Return Value:
12284 //    None.
12285 //
12286 void Compiler::gtDispArgList(GenTreeCall* call, IndentStack* indentStack)
12287 {
12288     GenTree*  args      = call->gtCallArgs;
12289     unsigned  argnum    = 0;
12290     const int BufLength = 256;
12291     char      buf[BufLength];
12292     char*     bufp        = &buf[0];
12293     unsigned  numChildren = call->NumChildren();
12294     assert(numChildren != 0);
12295     bool argListIsLastChild = (args == call->GetChild(numChildren - 1));
12296
12297     IndentInfo arcType = IIArc;
12298     if (call->gtCallObjp != nullptr)
12299     {
12300         argnum++;
12301     }
12302
12303     while (args != nullptr)
12304     {
12305         assert(args->gtOper == GT_LIST);
12306         GenTree* arg = args->gtOp.gtOp1;
12307         if (!arg->IsNothingNode() && !arg->IsArgPlaceHolderNode())
12308         {
12309             gtGetArgMsg(call, arg, argnum, -1, bufp, BufLength);
12310             if (argListIsLastChild && (args->gtOp.gtOp2 == nullptr))
12311             {
12312                 arcType = IIArcBottom;
12313             }
12314             gtDispChild(arg, indentStack, arcType, bufp, false);
12315         }
12316         args = args->gtOp.gtOp2;
12317         argnum++;
12318     }
12319 }
12320
12321 //------------------------------------------------------------------------
12322 // gtDispArgList: Dump the tree for a call arg list
12323 //
12324 // Arguments:
12325 //    tree         - The call for which 'arg' is an argument
12326 //    indentStack  - the specification for the current level of indentation & arcs
12327 //
12328 // Return Value:
12329 //    None.
12330 //
12331 // Assumptions:
12332 //    'tree' must be a GT_LIST node
12333
12334 void Compiler::gtDispTreeList(GenTree* tree, IndentStack* indentStack /* = nullptr */)
12335 {
12336     for (/*--*/; tree != nullptr; tree = tree->gtNext)
12337     {
12338         gtDispTree(tree, indentStack);
12339         printf("\n");
12340     }
12341 }
12342
12343 //------------------------------------------------------------------------
12344 // Compiler::gtDispRange: dumps a range of LIR.
12345 //
12346 // Arguments:
12347 //    range - the range of LIR to display.
12348 //
12349 void Compiler::gtDispRange(LIR::ReadOnlyRange const& range)
12350 {
12351     for (GenTree* node : range)
12352     {
12353         gtDispLIRNode(node);
12354     }
12355 }
12356
12357 //------------------------------------------------------------------------
12358 // Compiler::gtDispTreeRange: dumps the LIR range that contains all of the
12359 //                            nodes in the dataflow tree rooted at a given
12360 //                            node.
12361 //
12362 // Arguments:
12363 //    containingRange - the LIR range that contains the root node.
12364 //    tree - the root of the dataflow tree.
12365 //
12366 void Compiler::gtDispTreeRange(LIR::Range& containingRange, GenTree* tree)
12367 {
12368     bool unused;
12369     gtDispRange(containingRange.GetTreeRange(tree, &unused));
12370 }
12371
12372 //------------------------------------------------------------------------
12373 // Compiler::gtDispLIRNode: dumps a single LIR node.
12374 //
12375 // Arguments:
12376 //    node - the LIR node to dump.
12377 //    prefixMsg - an optional prefix for each line of output.
12378 //
12379 void Compiler::gtDispLIRNode(GenTree* node, const char* prefixMsg /* = nullptr */)
12380 {
12381     auto displayOperand = [](GenTree* operand, const char* message, IndentInfo operandArc, IndentStack& indentStack,
12382                              size_t prefixIndent) {
12383         assert(operand != nullptr);
12384         assert(message != nullptr);
12385
12386         if (prefixIndent != 0)
12387         {
12388             printf("%*s", (int)prefixIndent, "");
12389         }
12390
12391         // 49 spaces for alignment
12392         printf("%-49s", "");
12393 #if FEATURE_SET_FLAGS
12394         // additional flag enlarges the flag field by one character
12395         printf(" ");
12396 #endif
12397
12398         indentStack.Push(operandArc);
12399         indentStack.print();
12400         indentStack.Pop();
12401         operandArc = IIArc;
12402
12403         printf("  t%-5d %-6s %s\n", operand->gtTreeID, varTypeName(operand->TypeGet()), message);
12404     };
12405
12406     IndentStack indentStack(this);
12407
12408     size_t prefixIndent = 0;
12409     if (prefixMsg != nullptr)
12410     {
12411         prefixIndent = strlen(prefixMsg);
12412     }
12413
12414     const int bufLength = 256;
12415     char      buf[bufLength];
12416
12417     const bool nodeIsCall = node->IsCall();
12418
12419     // Visit operands
12420     IndentInfo operandArc = IIArcTop;
12421     for (GenTree* operand : node->Operands())
12422     {
12423         if (operand->IsArgPlaceHolderNode() || !operand->IsValue())
12424         {
12425             // Either of these situations may happen with calls.
12426             continue;
12427         }
12428
12429         if (nodeIsCall)
12430         {
12431             GenTreeCall* call = node->AsCall();
12432             if (operand == call->gtCallObjp)
12433             {
12434                 sprintf_s(buf, sizeof(buf), "this in %s", compRegVarName(REG_ARG_0));
12435                 displayOperand(operand, buf, operandArc, indentStack, prefixIndent);
12436             }
12437             else if (operand == call->gtCallAddr)
12438             {
12439                 displayOperand(operand, "calli tgt", operandArc, indentStack, prefixIndent);
12440             }
12441             else if (operand == call->gtControlExpr)
12442             {
12443                 displayOperand(operand, "control expr", operandArc, indentStack, prefixIndent);
12444             }
12445             else if (operand == call->gtCallCookie)
12446             {
12447                 displayOperand(operand, "cookie", operandArc, indentStack, prefixIndent);
12448             }
12449             else
12450             {
12451                 fgArgTabEntry* curArgTabEntry = gtArgEntryByNode(call, operand);
12452                 assert(curArgTabEntry);
12453
12454                 if (operand->OperGet() == GT_LIST)
12455                 {
12456                     int listIndex = 0;
12457                     for (GenTreeArgList* element = operand->AsArgList(); element != nullptr; element = element->Rest())
12458                     {
12459                         operand = element->Current();
12460                         if (curArgTabEntry->lateArgInx == (unsigned)-1)
12461                         {
12462                             gtGetArgMsg(call, operand, curArgTabEntry->argNum, listIndex, buf, sizeof(buf));
12463                         }
12464                         else
12465                         {
12466                             gtGetLateArgMsg(call, operand, curArgTabEntry->lateArgInx, listIndex, buf, sizeof(buf));
12467                         }
12468
12469                         displayOperand(operand, buf, operandArc, indentStack, prefixIndent);
12470                         operandArc = IIArc;
12471                     }
12472                 }
12473                 else
12474                 {
12475                     if (curArgTabEntry->lateArgInx == (unsigned)-1)
12476                     {
12477                         gtGetArgMsg(call, operand, curArgTabEntry->argNum, -1, buf, sizeof(buf));
12478                     }
12479                     else
12480                     {
12481                         gtGetLateArgMsg(call, operand, curArgTabEntry->lateArgInx, -1, buf, sizeof(buf));
12482                     }
12483
12484                     displayOperand(operand, buf, operandArc, indentStack, prefixIndent);
12485                 }
12486             }
12487         }
12488         else if (node->OperIsDynBlkOp())
12489         {
12490             if (operand == node->AsBlk()->Addr())
12491             {
12492                 displayOperand(operand, "lhs", operandArc, indentStack, prefixIndent);
12493             }
12494             else if (operand == node->AsBlk()->Data())
12495             {
12496                 displayOperand(operand, "rhs", operandArc, indentStack, prefixIndent);
12497             }
12498             else
12499             {
12500                 assert(operand == node->AsDynBlk()->gtDynamicSize);
12501                 displayOperand(operand, "size", operandArc, indentStack, prefixIndent);
12502             }
12503         }
12504         else if (node->OperGet() == GT_DYN_BLK)
12505         {
12506             if (operand == node->AsBlk()->Addr())
12507             {
12508                 displayOperand(operand, "lhs", operandArc, indentStack, prefixIndent);
12509             }
12510             else
12511             {
12512                 assert(operand == node->AsDynBlk()->gtDynamicSize);
12513                 displayOperand(operand, "size", operandArc, indentStack, prefixIndent);
12514             }
12515         }
12516         else if (node->OperIsAssignment())
12517         {
12518             if (operand == node->gtGetOp1())
12519             {
12520                 displayOperand(operand, "lhs", operandArc, indentStack, prefixIndent);
12521             }
12522             else
12523             {
12524                 displayOperand(operand, "rhs", operandArc, indentStack, prefixIndent);
12525             }
12526         }
12527         else
12528         {
12529             displayOperand(operand, "", operandArc, indentStack, prefixIndent);
12530         }
12531
12532         operandArc = IIArc;
12533     }
12534
12535     // Visit the operator
12536
12537     if (prefixMsg != nullptr)
12538     {
12539         printf("%s", prefixMsg);
12540     }
12541
12542     const bool topOnly = true;
12543     const bool isLIR   = true;
12544     gtDispTree(node, &indentStack, nullptr, topOnly, isLIR);
12545 }
12546
12547 /*****************************************************************************/
12548 #endif // DEBUG
12549
12550 /*****************************************************************************
12551  *
12552  *  Check if the given node can be folded,
12553  *  and call the methods to perform the folding
12554  */
12555
12556 GenTree* Compiler::gtFoldExpr(GenTree* tree)
12557 {
12558     unsigned kind = tree->OperKind();
12559
12560     /* We must have a simple operation to fold */
12561
12562     // If we're in CSE, it's not safe to perform tree
12563     // folding given that it can will potentially
12564     // change considered CSE candidates.
12565     if (optValnumCSE_phase)
12566     {
12567         return tree;
12568     }
12569
12570     if (!(kind & GTK_SMPOP))
12571     {
12572         return tree;
12573     }
12574
12575     GenTree* op1 = tree->gtOp.gtOp1;
12576
12577     /* Filter out non-foldable trees that can have constant children */
12578
12579     assert(kind & (GTK_UNOP | GTK_BINOP));
12580     switch (tree->gtOper)
12581     {
12582         case GT_RETFILT:
12583         case GT_RETURN:
12584         case GT_IND:
12585             return tree;
12586         default:
12587             break;
12588     }
12589
12590     /* try to fold the current node */
12591
12592     if ((kind & GTK_UNOP) && op1)
12593     {
12594         if (op1->OperKind() & GTK_CONST)
12595         {
12596             return gtFoldExprConst(tree);
12597         }
12598     }
12599     else if ((kind & GTK_BINOP) && op1 && tree->gtOp.gtOp2 &&
12600              // Don't take out conditionals for debugging
12601              !((opts.compDbgCode || opts.MinOpts()) && tree->OperIsCompare()))
12602     {
12603         GenTree* op2 = tree->gtOp.gtOp2;
12604
12605         // The atomic operations are exempted here because they are never computable statically;
12606         // one of their arguments is an address.
12607         if (((op1->OperKind() & op2->OperKind()) & GTK_CONST) && !tree->OperIsAtomicOp())
12608         {
12609             /* both nodes are constants - fold the expression */
12610             return gtFoldExprConst(tree);
12611         }
12612         else if ((op1->OperKind() | op2->OperKind()) & GTK_CONST)
12613         {
12614             /* at least one is a constant - see if we have a
12615              * special operator that can use only one constant
12616              * to fold - e.g. booleans */
12617
12618             return gtFoldExprSpecial(tree);
12619         }
12620         else if (tree->OperIsCompare())
12621         {
12622             /* comparisons of two local variables can sometimes be folded */
12623
12624             return gtFoldExprCompare(tree);
12625         }
12626         else if (op2->OperGet() == GT_COLON)
12627         {
12628             assert(tree->OperGet() == GT_QMARK);
12629
12630             GenTree* colon_op1 = op2->gtOp.gtOp1;
12631             GenTree* colon_op2 = op2->gtOp.gtOp2;
12632
12633             if (gtCompareTree(colon_op1, colon_op2))
12634             {
12635                 // Both sides of the GT_COLON are the same tree
12636
12637                 GenTree* sideEffList = nullptr;
12638                 gtExtractSideEffList(op1, &sideEffList);
12639
12640                 fgUpdateRefCntForExtract(op1, sideEffList);   // Decrement refcounts for op1, Keeping any side-effects
12641                 fgUpdateRefCntForExtract(colon_op1, nullptr); // Decrement refcounts for colon_op1
12642
12643                 // Clear colon flags only if the qmark itself is not conditionaly executed
12644                 if ((tree->gtFlags & GTF_COLON_COND) == 0)
12645                 {
12646                     fgWalkTreePre(&colon_op2, gtClearColonCond);
12647                 }
12648
12649                 if (sideEffList == nullptr)
12650                 {
12651                     // No side-effects, just return colon_op2
12652                     return colon_op2;
12653                 }
12654                 else
12655                 {
12656 #ifdef DEBUG
12657                     if (verbose)
12658                     {
12659                         printf("\nIdentical GT_COLON trees with side effects! Extracting side effects...\n");
12660                         gtDispTree(sideEffList);
12661                         printf("\n");
12662                     }
12663 #endif
12664                     // Change the GT_COLON into a GT_COMMA node with the side-effects
12665                     op2->ChangeOper(GT_COMMA);
12666                     op2->gtFlags |= (sideEffList->gtFlags & GTF_ALL_EFFECT);
12667                     op2->gtOp.gtOp1 = sideEffList;
12668                     return op2;
12669                 }
12670             }
12671         }
12672     }
12673
12674     /* Return the original node (folded/bashed or not) */
12675
12676     return tree;
12677 }
12678
12679 //------------------------------------------------------------------------
12680 // gtFoldExprCall: see if a call is foldable
12681 //
12682 // Arguments:
12683 //    call - call to examine
12684 //
12685 // Returns:
12686 //    The original call if no folding happened.
12687 //    An alternative tree if folding happens.
12688 //
12689 // Notes:
12690 //    Checks for calls to Type.op_Equality, Type.op_Inequality, and
12691 //    Enum.HasFlag, and if the call is to one of these,
12692 //    attempts to optimize.
12693
12694 GenTree* Compiler::gtFoldExprCall(GenTreeCall* call)
12695 {
12696     // Can only fold calls to special intrinsics.
12697     if ((call->gtCallMoreFlags & GTF_CALL_M_SPECIAL_INTRINSIC) == 0)
12698     {
12699         return call;
12700     }
12701
12702     // Defer folding if not optimizing.
12703     if (opts.compDbgCode || opts.MinOpts())
12704     {
12705         return call;
12706     }
12707
12708     // Fetch id of the intrinsic.
12709     const CorInfoIntrinsics methodID = info.compCompHnd->getIntrinsicID(call->gtCallMethHnd);
12710
12711     switch (methodID)
12712     {
12713         case CORINFO_INTRINSIC_TypeEQ:
12714         case CORINFO_INTRINSIC_TypeNEQ:
12715         {
12716             noway_assert(call->TypeGet() == TYP_INT);
12717             GenTree* op1 = call->gtCallArgs->gtOp.gtOp1;
12718             GenTree* op2 = call->gtCallArgs->gtOp.gtOp2->gtOp.gtOp1;
12719
12720             // If either operand is known to be a RuntimeType, this can be folded
12721             GenTree* result = gtFoldTypeEqualityCall(methodID, op1, op2);
12722             if (result != nullptr)
12723             {
12724                 return result;
12725             }
12726             break;
12727         }
12728
12729         default:
12730             break;
12731     }
12732
12733     // Check for a new-style jit intrinsic.
12734     const NamedIntrinsic ni = lookupNamedIntrinsic(call->gtCallMethHnd);
12735
12736     if (ni == NI_System_Enum_HasFlag)
12737     {
12738         GenTree* thisOp = call->gtCallObjp;
12739         GenTree* flagOp = call->gtCallArgs->gtOp.gtOp1;
12740         GenTree* result = gtOptimizeEnumHasFlag(thisOp, flagOp);
12741
12742         if (result != nullptr)
12743         {
12744             return result;
12745         }
12746     }
12747
12748     return call;
12749 }
12750
12751 //------------------------------------------------------------------------
12752 // gtFoldTypeEqualityCall: see if a (potential) type equality call is foldable
12753 //
12754 // Arguments:
12755 //    methodID -- type equality intrinsic ID
12756 //    op1 -- first argument to call
12757 //    op2 -- second argument to call
12758 //
12759 // Returns:
12760 //    nulltpr if no folding happened.
12761 //    An alternative tree if folding happens.
12762 //
12763 // Notes:
12764 //    If either operand is known to be a a RuntimeType, then the type
12765 //    equality methods will simply check object identity and so we can
12766 //    fold the call into a simple compare of the call's operands.
12767
12768 GenTree* Compiler::gtFoldTypeEqualityCall(CorInfoIntrinsics methodID, GenTree* op1, GenTree* op2)
12769 {
12770     // The method must be be a type equality intrinsic
12771     assert(methodID == CORINFO_INTRINSIC_TypeEQ || methodID == CORINFO_INTRINSIC_TypeNEQ);
12772
12773     if ((gtGetTypeProducerKind(op1) == TPK_Unknown) && (gtGetTypeProducerKind(op2) == TPK_Unknown))
12774     {
12775         return nullptr;
12776     }
12777
12778     const genTreeOps simpleOp = (methodID == CORINFO_INTRINSIC_TypeEQ) ? GT_EQ : GT_NE;
12779
12780     JITDUMP("\nFolding call to Type:op_%s to a simple compare via %s\n",
12781             methodID == CORINFO_INTRINSIC_TypeEQ ? "Equality" : "Inequality", GenTree::OpName(simpleOp));
12782
12783     GenTree* compare = gtNewOperNode(simpleOp, TYP_INT, op1, op2);
12784
12785     return compare;
12786 }
12787
12788 /*****************************************************************************
12789  *
12790  *  Some comparisons can be folded:
12791  *
12792  *    locA        == locA
12793  *    classVarA   == classVarA
12794  *    locA + locB == locB + locA
12795  *
12796  */
12797
12798 GenTree* Compiler::gtFoldExprCompare(GenTree* tree)
12799 {
12800     GenTree* op1 = tree->gtOp.gtOp1;
12801     GenTree* op2 = tree->gtOp.gtOp2;
12802
12803     assert(tree->OperIsCompare());
12804
12805     /* Filter out cases that cannot be folded here */
12806
12807     /* Do not fold floats or doubles (e.g. NaN != Nan) */
12808
12809     if (varTypeIsFloating(op1->TypeGet()))
12810     {
12811         return tree;
12812     }
12813
12814     /* Currently we can only fold when the two subtrees exactly match */
12815
12816     if ((tree->gtFlags & GTF_SIDE_EFFECT) || GenTree::Compare(op1, op2, true) == false)
12817     {
12818         return tree; /* return unfolded tree */
12819     }
12820
12821     GenTree* cons;
12822
12823     switch (tree->gtOper)
12824     {
12825         case GT_EQ:
12826         case GT_LE:
12827         case GT_GE:
12828             cons = gtNewIconNode(true); /* Folds to GT_CNS_INT(true) */
12829             break;
12830
12831         case GT_NE:
12832         case GT_LT:
12833         case GT_GT:
12834             cons = gtNewIconNode(false); /* Folds to GT_CNS_INT(false) */
12835             break;
12836
12837         default:
12838             assert(!"Unexpected relOp");
12839             return tree;
12840     }
12841
12842     /* The node has beeen folded into 'cons' */
12843
12844     if (fgGlobalMorph)
12845     {
12846         fgMorphTreeDone(cons);
12847     }
12848     else
12849     {
12850         cons->gtNext = tree->gtNext;
12851         cons->gtPrev = tree->gtPrev;
12852     }
12853     if (lvaLocalVarRefCounted)
12854     {
12855         lvaRecursiveDecRefCounts(tree);
12856     }
12857     return cons;
12858 }
12859
12860 //------------------------------------------------------------------------
12861 // gtFoldTypeCompare: see if a type comparison can be further simplified
12862 //
12863 // Arguments:
12864 //    tree -- tree possibly comparing types
12865 //
12866 // Returns:
12867 //    An alternative tree if folding happens.
12868 //    Original tree otherwise.
12869 //
12870 // Notes:
12871 //    Checks for
12872 //        typeof(...) == obj.GetType()
12873 //        typeof(...) == typeof(...)
12874 //
12875 //    And potentially optimizes away the need to obtain actual
12876 //    RuntimeType objects to do the comparison.
12877
12878 GenTree* Compiler::gtFoldTypeCompare(GenTree* tree)
12879 {
12880     // Only handle EQ and NE
12881     // (maybe relop vs null someday)
12882     const genTreeOps oper = tree->OperGet();
12883     if ((oper != GT_EQ) && (oper != GT_NE))
12884     {
12885         return tree;
12886     }
12887
12888     // Screen for the right kinds of operands
12889     GenTree* const         op1     = tree->gtOp.gtOp1;
12890     const TypeProducerKind op1Kind = gtGetTypeProducerKind(op1);
12891     if (op1Kind == TPK_Unknown)
12892     {
12893         return tree;
12894     }
12895
12896     GenTree* const         op2     = tree->gtOp.gtOp2;
12897     const TypeProducerKind op2Kind = gtGetTypeProducerKind(op2);
12898     if (op2Kind == TPK_Unknown)
12899     {
12900         return tree;
12901     }
12902
12903     // We must have a handle on one side or the other here to optimize,
12904     // otherwise we can't be sure that optimizing is sound.
12905     const bool op1IsFromHandle = (op1Kind == TPK_Handle);
12906     const bool op2IsFromHandle = (op2Kind == TPK_Handle);
12907
12908     if (!(op1IsFromHandle || op2IsFromHandle))
12909     {
12910         return tree;
12911     }
12912
12913     // If both types are created via handles, we can simply compare
12914     // handles (or the indirection cells for handles) instead of the
12915     // types that they'd create.
12916     if (op1IsFromHandle && op2IsFromHandle)
12917     {
12918         JITDUMP("Optimizing compare of types-from-handles to instead compare handles\n");
12919         GenTree*             op1ClassFromHandle = tree->gtOp.gtOp1->gtCall.gtCallArgs->gtOp.gtOp1;
12920         GenTree*             op2ClassFromHandle = tree->gtOp.gtOp2->gtCall.gtCallArgs->gtOp.gtOp1;
12921         GenTree*             op1TunneledHandle  = nullptr;
12922         GenTree*             op2TunneledHandle  = nullptr;
12923         CORINFO_CLASS_HANDLE cls1Hnd            = nullptr;
12924         CORINFO_CLASS_HANDLE cls2Hnd            = nullptr;
12925         unsigned             runtimeLookupCount = 0;
12926
12927         // Try and find class handle for op1
12928         if ((op1ClassFromHandle->gtOper == GT_CNS_INT) && (op1ClassFromHandle->gtType == TYP_I_IMPL))
12929         {
12930             assert(op1ClassFromHandle->IsIconHandle(GTF_ICON_CLASS_HDL));
12931             cls1Hnd = (CORINFO_CLASS_HANDLE)op1ClassFromHandle->gtIntCon.gtCompileTimeHandle;
12932         }
12933         else if (op1ClassFromHandle->OperGet() == GT_RUNTIMELOOKUP)
12934         {
12935             cls1Hnd = op1ClassFromHandle->AsRuntimeLookup()->GetClassHandle();
12936             runtimeLookupCount++;
12937         }
12938         // Tunnel through indirs we may see when prejitting
12939         else if (op1ClassFromHandle->gtOper == GT_IND)
12940         {
12941             // The handle indirs we can optimize will be marked as non-faulting.
12942             // Certain others (eg from refanytype) may not be.
12943             if (op1ClassFromHandle->gtFlags & GTF_IND_NONFAULTING)
12944             {
12945                 GenTree* op1HandleLiteral = op1ClassFromHandle->gtOp.gtOp1;
12946
12947                 // If, after tunneling, we have a constant handle,
12948                 // remember the class and the value tree for later.
12949                 if ((op1HandleLiteral->gtOper == GT_CNS_INT) && (op1HandleLiteral->gtType == TYP_I_IMPL))
12950                 {
12951                     JITDUMP("tunneling through indir on op1\n");
12952                     op1TunneledHandle = op1HandleLiteral;
12953
12954                     // These handle constants should be class handles.
12955                     assert(op1TunneledHandle->IsIconHandle(GTF_ICON_CLASS_HDL));
12956                     cls1Hnd = (CORINFO_CLASS_HANDLE)op1TunneledHandle->gtIntCon.gtCompileTimeHandle;
12957                 }
12958             }
12959         }
12960
12961         // Try and find class handle for op2
12962         if ((op2ClassFromHandle->gtOper == GT_CNS_INT) && (op2ClassFromHandle->gtType == TYP_I_IMPL))
12963         {
12964             assert(op2ClassFromHandle->IsIconHandle(GTF_ICON_CLASS_HDL));
12965             cls2Hnd = (CORINFO_CLASS_HANDLE)op2ClassFromHandle->gtIntCon.gtCompileTimeHandle;
12966         }
12967         else if (op2ClassFromHandle->OperGet() == GT_RUNTIMELOOKUP)
12968         {
12969             cls2Hnd = op2ClassFromHandle->AsRuntimeLookup()->GetClassHandle();
12970             runtimeLookupCount++;
12971         }
12972         // Tunnel through indirs we may see when prejitting
12973         else if (op2ClassFromHandle->gtOper == GT_IND)
12974         {
12975             // The handle indirs we can optimize will be marked as non-faulting.
12976             // Certain others (eg from refanytype) may not be.
12977             if (op2ClassFromHandle->gtFlags & GTF_IND_NONFAULTING)
12978             {
12979                 GenTree* op2HandleLiteral = op2ClassFromHandle->gtOp.gtOp1;
12980
12981                 // If, after tunneling, we have a constant handle,
12982                 // remember the class and the value tree for later.
12983                 if ((op2HandleLiteral->gtOper == GT_CNS_INT) && (op2HandleLiteral->gtType == TYP_I_IMPL))
12984                 {
12985                     JITDUMP("tunneling through indir on op2\n");
12986                     op2TunneledHandle = op2HandleLiteral;
12987
12988                     // These handle constants should be class handles.
12989                     assert(op2TunneledHandle->IsIconHandle(GTF_ICON_CLASS_HDL));
12990                     cls2Hnd = (CORINFO_CLASS_HANDLE)op2TunneledHandle->gtIntCon.gtCompileTimeHandle;
12991                 }
12992             }
12993         }
12994
12995         // If we have class handles, try and resolve the type equality test completely.
12996         if ((cls1Hnd != nullptr) && (cls2Hnd != nullptr))
12997         {
12998             JITDUMP("Asking runtime to compare %p (%s) and %p (%s) for equality\n", dspPtr(cls1Hnd),
12999                     info.compCompHnd->getClassName(cls1Hnd), dspPtr(cls2Hnd), info.compCompHnd->getClassName(cls2Hnd));
13000             TypeCompareState s = info.compCompHnd->compareTypesForEquality(cls1Hnd, cls2Hnd);
13001
13002             if (s != TypeCompareState::May)
13003             {
13004                 // Type comparison result is known.
13005                 const bool typesAreEqual = (s == TypeCompareState::Must);
13006                 const bool operatorIsEQ  = (oper == GT_EQ);
13007                 const int  compareResult = operatorIsEQ ^ typesAreEqual ? 0 : 1;
13008                 JITDUMP("Runtime reports comparison is known at jit time: %u\n", compareResult);
13009                 GenTree* result = gtNewIconNode(compareResult);
13010
13011                 // Any runtime lookups that fed into this compare are
13012                 // now dead code, so they no longer require the runtime context.
13013                 assert(lvaGenericsContextUseCount >= runtimeLookupCount);
13014                 lvaGenericsContextUseCount -= runtimeLookupCount;
13015                 return result;
13016             }
13017         }
13018
13019         JITDUMP("Could not find handle for %s%s\n", (cls1Hnd == nullptr) ? " cls1" : "",
13020                 (cls2Hnd == nullptr) ? " cls2" : "");
13021
13022         // We can't answer the equality comparison definitively at jit
13023         // time, but can still simplfy the comparison.
13024         //
13025         // If we successfully tunneled through both operands, compare
13026         // the tunnneled values, otherwise compare the orignal values.
13027         GenTree* compare = nullptr;
13028         if ((op1TunneledHandle != nullptr) && (op2TunneledHandle != nullptr))
13029         {
13030             compare = gtNewOperNode(oper, TYP_INT, op1TunneledHandle, op2TunneledHandle);
13031         }
13032         else
13033         {
13034             compare = gtNewOperNode(oper, TYP_INT, op1ClassFromHandle, op2ClassFromHandle);
13035         }
13036
13037         // Drop any now-irrelvant flags
13038         compare->gtFlags |= tree->gtFlags & (GTF_RELOP_JMP_USED | GTF_RELOP_QMARK | GTF_DONT_CSE);
13039
13040         return compare;
13041     }
13042
13043     // Just one operand creates a type from a handle.
13044     //
13045     // If the other operand is fetching the type from an object,
13046     // we can sometimes optimize the type compare into a simpler
13047     // method table comparison.
13048     //
13049     // TODO: if other operand is null...
13050     if (op1Kind != TPK_GetType && op2Kind != TPK_GetType)
13051     {
13052         return tree;
13053     }
13054
13055     GenTree* const opHandle = op1IsFromHandle ? op1 : op2;
13056     GenTree* const opOther  = op1IsFromHandle ? op2 : op1;
13057
13058     // Tunnel through the handle operand to get at the class handle involved.
13059     GenTree* const       opHandleArgument = opHandle->gtCall.gtCallArgs->gtOp.gtOp1;
13060     GenTree*             opHandleLiteral  = opHandleArgument;
13061     CORINFO_CLASS_HANDLE clsHnd           = nullptr;
13062
13063     // Unwrap any GT_NOP node used to prevent constant folding
13064     if ((opHandleLiteral->gtOper == GT_NOP) && (opHandleLiteral->gtType == TYP_I_IMPL))
13065     {
13066         opHandleLiteral = opHandleLiteral->gtOp.gtOp1;
13067     }
13068
13069     // For runtime lookups we can get at the handle directly
13070     if (opHandleLiteral->gtOper == GT_RUNTIMELOOKUP)
13071     {
13072         clsHnd = opHandleLiteral->AsRuntimeLookup()->GetClassHandle();
13073     }
13074     else
13075     {
13076         // Tunnel through prejit indirs if necessary
13077         if (opHandleLiteral->gtOper == GT_IND)
13078         {
13079             // Handle indirs should be marked as nonfaulting.
13080             assert((opHandleLiteral->gtFlags & GTF_IND_NONFAULTING) != 0);
13081             opHandleLiteral = opHandleLiteral->gtOp.gtOp1;
13082         }
13083
13084         if ((opHandleLiteral->gtOper == GT_CNS_INT) && (opHandleLiteral->gtType == TYP_I_IMPL))
13085         {
13086             assert(opHandleLiteral->IsIconHandle(GTF_ICON_CLASS_HDL));
13087             clsHnd = CORINFO_CLASS_HANDLE(opHandleLiteral->gtIntCon.gtCompileTimeHandle);
13088         }
13089     }
13090
13091     // If we couldn't find the class handle, give up.
13092     if (clsHnd == nullptr)
13093     {
13094         return tree;
13095     }
13096
13097     // Ask the VM if this type can be equality tested by a simple method
13098     // table comparison.
13099     if (!info.compCompHnd->canInlineTypeCheckWithObjectVTable(clsHnd))
13100     {
13101         return tree;
13102     }
13103
13104     // We're good to go.
13105     JITDUMP("Optimizing compare of obj.GetType()"
13106             " and type-from-handle to compare method table pointer\n");
13107
13108     // opHandleArgument is the method table we're looking for.
13109     GenTree* const knownMT = opHandleArgument;
13110
13111     // Fetch object method table from the object itself.
13112     GenTree* objOp = nullptr;
13113
13114     // Note we may see intrinsified or regular calls to GetType
13115     if (opOther->OperGet() == GT_INTRINSIC)
13116     {
13117         objOp = opOther->gtUnOp.gtOp1;
13118     }
13119     else
13120     {
13121         assert(opOther->OperGet() == GT_CALL);
13122         objOp = opOther->gtCall.gtCallObjp;
13123     }
13124
13125     GenTree* const objMT = gtNewOperNode(GT_IND, TYP_I_IMPL, objOp);
13126
13127     // Update various flags
13128     objMT->gtFlags |= GTF_EXCEPT;
13129     compCurBB->bbFlags |= BBF_HAS_VTABREF;
13130     optMethodFlags |= OMF_HAS_VTABLEREF;
13131
13132     // Compare the two method tables
13133     GenTree* const compare = gtNewOperNode(oper, TYP_INT, objMT, knownMT);
13134
13135     // Drop any any now irrelevant flags
13136     compare->gtFlags |= tree->gtFlags & (GTF_RELOP_JMP_USED | GTF_RELOP_QMARK | GTF_DONT_CSE);
13137
13138     // And we're done
13139     return compare;
13140 }
13141
13142 /*****************************************************************************
13143  *
13144  *  Some binary operators can be folded even if they have only one
13145  *  operand constant - e.g. boolean operators, add with 0
13146  *  multiply with 1, etc
13147  */
13148
13149 GenTree* Compiler::gtFoldExprSpecial(GenTree* tree)
13150 {
13151     GenTree*   op1  = tree->gtOp.gtOp1;
13152     GenTree*   op2  = tree->gtOp.gtOp2;
13153     genTreeOps oper = tree->OperGet();
13154
13155     GenTree* op;
13156     GenTree* cons;
13157     ssize_t  val;
13158
13159     assert(tree->OperKind() & GTK_BINOP);
13160
13161     /* Filter out operators that cannot be folded here */
13162     if (oper == GT_CAST)
13163     {
13164         return tree;
13165     }
13166
13167     /* We only consider TYP_INT for folding
13168      * Do not fold pointer arithmetic (e.g. addressing modes!) */
13169
13170     if (oper != GT_QMARK && !varTypeIsIntOrI(tree->gtType))
13171     {
13172         return tree;
13173     }
13174
13175     /* Find out which is the constant node */
13176
13177     if (op1->IsCnsIntOrI())
13178     {
13179         op   = op2;
13180         cons = op1;
13181     }
13182     else if (op2->IsCnsIntOrI())
13183     {
13184         op   = op1;
13185         cons = op2;
13186     }
13187     else
13188     {
13189         return tree;
13190     }
13191
13192     /* Get the constant value */
13193
13194     val = cons->gtIntConCommon.IconValue();
13195
13196     /* Here op is the non-constant operand, val is the constant,
13197        first is true if the constant is op1 */
13198
13199     switch (oper)
13200     {
13201         case GT_EQ:
13202         case GT_NE:
13203         case GT_GT:
13204
13205             // Optimize boxed value classes; these are always false.  This IL is
13206             // generated when a generic value is tested against null:
13207             //     <T> ... foo(T x) { ... if ((object)x == null) ...
13208             if (val == 0 && op->IsBoxedValue())
13209             {
13210                 JITDUMP("\nAttempting to optimize BOX(valueType) %s null [%06u]\n", GenTree::OpName(oper),
13211                         dspTreeID(tree));
13212
13213                 // We don't expect GT_GT with signed compares, and we
13214                 // can't predict the result if we do see it, since the
13215                 // boxed object addr could have its high bit set.
13216                 if ((oper == GT_GT) && !tree->IsUnsigned())
13217                 {
13218                     JITDUMP(" bailing; unexpected signed compare via GT_GT\n");
13219                 }
13220                 else
13221                 {
13222                     // The tree under the box must be side effect free
13223                     // since we will drop it if we optimize.
13224                     assert(!gtTreeHasSideEffects(op->gtBox.gtOp.gtOp1, GTF_SIDE_EFFECT));
13225
13226                     // See if we can optimize away the box and related statements.
13227                     GenTree* boxSourceTree = gtTryRemoveBoxUpstreamEffects(op);
13228                     bool     didOptimize   = (boxSourceTree != nullptr);
13229
13230                     // If optimization succeeded, remove the box.
13231                     if (didOptimize)
13232                     {
13233                         // Set up the result of the compare.
13234                         int compareResult = 0;
13235                         if (oper == GT_GT)
13236                         {
13237                             // GT_GT(null, box) == false
13238                             // GT_GT(box, null) == true
13239                             compareResult = (op1 == op);
13240                         }
13241                         else if (oper == GT_EQ)
13242                         {
13243                             // GT_EQ(box, null) == false
13244                             // GT_EQ(null, box) == false
13245                             compareResult = 0;
13246                         }
13247                         else
13248                         {
13249                             assert(oper == GT_NE);
13250                             // GT_NE(box, null) == true
13251                             // GT_NE(null, box) == true
13252                             compareResult = 1;
13253                         }
13254
13255                         JITDUMP("\nSuccess: replacing BOX(valueType) %s null with %d\n", GenTree::OpName(oper),
13256                                 compareResult);
13257
13258                         op = gtNewIconNode(compareResult);
13259
13260                         if (fgGlobalMorph)
13261                         {
13262                             fgMorphTreeDone(op);
13263                         }
13264                         else
13265                         {
13266                             op->gtNext = tree->gtNext;
13267                             op->gtPrev = tree->gtPrev;
13268                         }
13269
13270                         return op;
13271                     }
13272                 }
13273             }
13274
13275             break;
13276
13277         case GT_ADD:
13278 #ifdef LEGACY_BACKEND
13279         case GT_ASG_ADD:
13280 #endif
13281             if (val == 0)
13282             {
13283                 goto DONE_FOLD;
13284             }
13285             break;
13286
13287         case GT_MUL:
13288 #ifdef LEGACY_BACKEND
13289         case GT_ASG_MUL:
13290 #endif
13291             if (val == 1)
13292             {
13293                 goto DONE_FOLD;
13294             }
13295             else if (val == 0)
13296             {
13297                 /* Multiply by zero - return the 'zero' node, but not if side effects */
13298                 if (!(op->gtFlags & GTF_SIDE_EFFECT))
13299                 {
13300                     if (lvaLocalVarRefCounted)
13301                     {
13302                         lvaRecursiveDecRefCounts(op);
13303                     }
13304                     op = cons;
13305                     goto DONE_FOLD;
13306                 }
13307             }
13308             break;
13309
13310         case GT_DIV:
13311         case GT_UDIV:
13312 #ifdef LEGACY_BACKEND
13313         case GT_ASG_DIV:
13314 #endif
13315             if ((op2 == cons) && (val == 1) && !(op1->OperKind() & GTK_CONST))
13316             {
13317                 goto DONE_FOLD;
13318             }
13319             break;
13320
13321         case GT_SUB:
13322 #ifdef LEGACY_BACKEND
13323         case GT_ASG_SUB:
13324 #endif
13325             if ((op2 == cons) && (val == 0) && !(op1->OperKind() & GTK_CONST))
13326             {
13327                 goto DONE_FOLD;
13328             }
13329             break;
13330
13331         case GT_AND:
13332             if (val == 0)
13333             {
13334                 /* AND with zero - return the 'zero' node, but not if side effects */
13335
13336                 if (!(op->gtFlags & GTF_SIDE_EFFECT))
13337                 {
13338                     if (lvaLocalVarRefCounted)
13339                     {
13340                         lvaRecursiveDecRefCounts(op);
13341                     }
13342                     op = cons;
13343                     goto DONE_FOLD;
13344                 }
13345             }
13346             else
13347             {
13348                 /* The GTF_BOOLEAN flag is set for nodes that are part
13349                  * of a boolean expression, thus all their children
13350                  * are known to evaluate to only 0 or 1 */
13351
13352                 if (tree->gtFlags & GTF_BOOLEAN)
13353                 {
13354
13355                     /* The constant value must be 1
13356                      * AND with 1 stays the same */
13357                     assert(val == 1);
13358                     goto DONE_FOLD;
13359                 }
13360             }
13361             break;
13362
13363         case GT_OR:
13364             if (val == 0)
13365             {
13366                 goto DONE_FOLD;
13367             }
13368             else if (tree->gtFlags & GTF_BOOLEAN)
13369             {
13370                 /* The constant value must be 1 - OR with 1 is 1 */
13371
13372                 assert(val == 1);
13373
13374                 /* OR with one - return the 'one' node, but not if side effects */
13375
13376                 if (!(op->gtFlags & GTF_SIDE_EFFECT))
13377                 {
13378                     if (lvaLocalVarRefCounted)
13379                     {
13380                         lvaRecursiveDecRefCounts(op);
13381                     }
13382                     op = cons;
13383                     goto DONE_FOLD;
13384                 }
13385             }
13386             break;
13387
13388         case GT_LSH:
13389         case GT_RSH:
13390         case GT_RSZ:
13391         case GT_ROL:
13392         case GT_ROR:
13393 #ifdef LEGACY_BACKEND
13394         case GT_ASG_LSH:
13395         case GT_ASG_RSH:
13396         case GT_ASG_RSZ:
13397 #endif
13398             if (val == 0)
13399             {
13400                 if (op2 == cons)
13401                 {
13402                     goto DONE_FOLD;
13403                 }
13404                 else if (!(op->gtFlags & GTF_SIDE_EFFECT))
13405                 {
13406                     if (lvaLocalVarRefCounted)
13407                     {
13408                         lvaRecursiveDecRefCounts(op);
13409                     }
13410                     op = cons;
13411                     goto DONE_FOLD;
13412                 }
13413             }
13414             break;
13415
13416         case GT_QMARK:
13417         {
13418             assert(op1 == cons && op2 == op && op2->gtOper == GT_COLON);
13419             assert(op2->gtOp.gtOp1 && op2->gtOp.gtOp2);
13420
13421             assert(val == 0 || val == 1);
13422
13423             GenTree* opToDelete;
13424             if (val)
13425             {
13426                 op         = op2->AsColon()->ThenNode();
13427                 opToDelete = op2->AsColon()->ElseNode();
13428             }
13429             else
13430             {
13431                 op         = op2->AsColon()->ElseNode();
13432                 opToDelete = op2->AsColon()->ThenNode();
13433             }
13434             if (lvaLocalVarRefCounted)
13435             {
13436                 lvaRecursiveDecRefCounts(opToDelete);
13437             }
13438
13439             // Clear colon flags only if the qmark itself is not conditionaly executed
13440             if ((tree->gtFlags & GTF_COLON_COND) == 0)
13441             {
13442                 fgWalkTreePre(&op, gtClearColonCond);
13443             }
13444         }
13445
13446             goto DONE_FOLD;
13447
13448         default:
13449             break;
13450     }
13451
13452     /* The node is not foldable */
13453
13454     return tree;
13455
13456 DONE_FOLD:
13457
13458     /* The node has beeen folded into 'op' */
13459
13460     // If there was an assigment update, we just morphed it into
13461     // a use, update the flags appropriately
13462     if (op->gtOper == GT_LCL_VAR)
13463     {
13464         assert(tree->OperIsAssignment() || (op->gtFlags & (GTF_VAR_USEASG | GTF_VAR_DEF)) == 0);
13465
13466         op->gtFlags &= ~(GTF_VAR_USEASG | GTF_VAR_DEF);
13467     }
13468
13469     op->gtNext = tree->gtNext;
13470     op->gtPrev = tree->gtPrev;
13471
13472     return op;
13473 }
13474
13475 //------------------------------------------------------------------------
13476 // gtTryRemoveBoxUpstreamEffects: given an unused value type box,
13477 //    try and remove the upstream allocation and unnecessary parts of
13478 //    the copy.
13479 //
13480 // Arguments:
13481 //    op  - the box node to optimize
13482 //    options - controls whether and how trees are modified
13483 //        (see notes)
13484 //
13485 // Return Value:
13486 //    A tree representing the original value to box, if removal
13487 //    is successful/possible (but see note). nullptr if removal fails.
13488 //
13489 // Notes:
13490 //    Value typed box gets special treatment because it has associated
13491 //    side effects that can be removed if the box result is not used.
13492 //
13493 //    By default (options == BR_REMOVE_AND_NARROW) this method will
13494 //    try and remove unnecessary trees and will try and reduce remaning
13495 //    operations to the minimal set, possibly narrowing the width of
13496 //    loads from the box source if it is a struct.
13497 //
13498 //    To perform a trial removal, pass BR_DONT_REMOVE. This can be
13499 //    useful to determine if this optimization should only be
13500 //    performed if some other conditions hold true.
13501 //
13502 //    To remove but not alter the access to the box source, pass
13503 //    BR_REMOVE_BUT_NOT_NARROW.
13504 //
13505 //    To remove and return the tree for the type handle used for
13506 //    the boxed newobj, pass BR_REMOVE_BUT_NOT_NARROW_WANT_TYPE_HANDLE.
13507 //    This can be useful when the only part of the box that is "live"
13508 //    is its type.
13509 //
13510 //    If removal fails, is is possible that a subsequent pass may be
13511 //    able to optimize.  Blocking side effects may now be minimized
13512 //    (null or bounds checks might have been removed) or might be
13513 //    better known (inline return placeholder updated with the actual
13514 //    return expression). So the box is perhaps best left as is to
13515 //    help trigger this re-examination.
13516
13517 GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions options)
13518 {
13519     assert(op->IsBoxedValue());
13520
13521     // grab related parts for the optimization
13522     GenTreeBox* box      = op->AsBox();
13523     GenTree*    asgStmt  = box->gtAsgStmtWhenInlinedBoxValue;
13524     GenTree*    copyStmt = box->gtCopyStmtWhenInlinedBoxValue;
13525
13526     assert(asgStmt->gtOper == GT_STMT);
13527     assert(copyStmt->gtOper == GT_STMT);
13528
13529     JITDUMP("gtTryRemoveBoxUpstreamEffects: %s to %s of BOX (valuetype)"
13530             " [%06u] (assign/newobj [%06u] copy [%06u])\n",
13531             (options == BR_DONT_REMOVE) ? "checking if it is possible" : "attempting",
13532             (options == BR_MAKE_LOCAL_COPY) ? "make local unboxed version" : "remove side effects", dspTreeID(op),
13533             dspTreeID(asgStmt), dspTreeID(copyStmt));
13534
13535     // If we don't recognize the form of the assign, bail.
13536     GenTree* asg = asgStmt->gtStmt.gtStmtExpr;
13537     if (asg->gtOper != GT_ASG)
13538     {
13539         JITDUMP(" bailing; unexpected assignment op %s\n", GenTree::OpName(asg->gtOper));
13540         return nullptr;
13541     }
13542
13543     // If we're eventually going to return the type handle, remember it now.
13544     GenTree* boxTypeHandle = nullptr;
13545     if ((options == BR_REMOVE_AND_NARROW_WANT_TYPE_HANDLE) || (options == BR_DONT_REMOVE_WANT_TYPE_HANDLE))
13546     {
13547         GenTree*   asgSrc     = asg->gtOp.gtOp2;
13548         genTreeOps asgSrcOper = asgSrc->OperGet();
13549
13550         // Allocation may be via AllocObj or via helper call, depending
13551         // on when this is invoked and whether the jit is using AllocObj
13552         // for R2R allocations.
13553         if (asgSrcOper == GT_ALLOCOBJ)
13554         {
13555             GenTreeAllocObj* allocObj = asgSrc->AsAllocObj();
13556             boxTypeHandle             = allocObj->gtOp.gtOp1;
13557         }
13558         else if (asgSrcOper == GT_CALL)
13559         {
13560             GenTreeCall* newobjCall = asgSrc->AsCall();
13561             GenTree*     newobjArgs = newobjCall->gtCallArgs;
13562
13563             // In R2R expansions the handle may not be an explicit operand to the helper,
13564             // so we can't remove the box.
13565             if (newobjArgs == nullptr)
13566             {
13567                 assert(newobjCall->IsHelperCall(this, CORINFO_HELP_READYTORUN_NEW));
13568                 JITDUMP(" bailing; newobj via R2R helper\n");
13569                 return nullptr;
13570             }
13571
13572             boxTypeHandle = newobjArgs->AsArgList()->Current();
13573         }
13574         else
13575         {
13576             unreached();
13577         }
13578
13579         assert(boxTypeHandle != nullptr);
13580     }
13581
13582     // If we don't recognize the form of the copy, bail.
13583     GenTree* copy = copyStmt->gtStmt.gtStmtExpr;
13584     if (copy->gtOper != GT_ASG)
13585     {
13586         // GT_RET_EXPR is a tolerable temporary failure.
13587         // The jit will revisit this optimization after
13588         // inlining is done.
13589         if (copy->gtOper == GT_RET_EXPR)
13590         {
13591             JITDUMP(" bailing; must wait for replacement of copy %s\n", GenTree::OpName(copy->gtOper));
13592         }
13593         else
13594         {
13595             // Anything else is a missed case we should
13596             // figure out how to handle.  One known case
13597             // is GT_COMMAs enclosing the GT_ASG we are
13598             // looking for.
13599             JITDUMP(" bailing; unexpected copy op %s\n", GenTree::OpName(copy->gtOper));
13600         }
13601         return nullptr;
13602     }
13603
13604     // Handle case where we are optimizing the box into a local copy
13605     if (options == BR_MAKE_LOCAL_COPY)
13606     {
13607         // Drill into the box to get at the box temp local and the box type
13608         GenTree* boxTemp = box->BoxOp();
13609         assert(boxTemp->IsLocal());
13610         const unsigned boxTempLcl = boxTemp->AsLclVar()->GetLclNum();
13611         assert(lvaTable[boxTempLcl].lvType == TYP_REF);
13612         CORINFO_CLASS_HANDLE boxClass = lvaTable[boxTempLcl].lvClassHnd;
13613         assert(boxClass != nullptr);
13614
13615         // Verify that the copyDst has the expected shape
13616         // (blk|obj|ind (add (boxTempLcl, ptr-size)))
13617         //
13618         // The shape here is constrained to the patterns we produce
13619         // over in impImportAndPushBox for the inlined box case.
13620         GenTree* copyDst = copy->gtOp.gtOp1;
13621
13622         if (!copyDst->OperIs(GT_BLK, GT_IND, GT_OBJ))
13623         {
13624             JITDUMP("Unexpected copy dest operator %s\n", GenTree::OpName(copyDst->gtOper));
13625             return nullptr;
13626         }
13627
13628         GenTree* copyDstAddr = copyDst->gtOp.gtOp1;
13629         if (copyDstAddr->OperGet() != GT_ADD)
13630         {
13631             JITDUMP("Unexpected copy dest address tree\n");
13632             return nullptr;
13633         }
13634
13635         GenTree* copyDstAddrOp1 = copyDstAddr->gtOp.gtOp1;
13636         if ((copyDstAddrOp1->OperGet() != GT_LCL_VAR) || (copyDstAddrOp1->gtLclVarCommon.gtLclNum != boxTempLcl))
13637         {
13638             JITDUMP("Unexpected copy dest address 1st addend\n");
13639             return nullptr;
13640         }
13641
13642         GenTree* copyDstAddrOp2 = copyDstAddr->gtOp.gtOp2;
13643         if (!copyDstAddrOp2->IsIntegralConst(TARGET_POINTER_SIZE))
13644         {
13645             JITDUMP("Unexpected copy dest address 2nd addend\n");
13646             return nullptr;
13647         }
13648
13649         // Screening checks have all passed. Do the transformation.
13650         //
13651         // Retype the box temp to be a struct
13652         JITDUMP("Retyping box temp V%02u to struct %s\n", boxTempLcl, eeGetClassName(boxClass));
13653         lvaTable[boxTempLcl].lvType   = TYP_UNDEF;
13654         const bool isUnsafeValueClass = false;
13655         lvaSetStruct(boxTempLcl, boxClass, isUnsafeValueClass);
13656
13657         // Remove the newobj and assigment to box temp
13658         JITDUMP("Bashing NEWOBJ [%06u] to NOP\n", dspTreeID(asg));
13659         asg->gtBashToNOP();
13660
13661         // Update the copy from the value to be boxed to the box temp
13662         GenTree* newDst     = gtNewOperNode(GT_ADDR, TYP_BYREF, gtNewLclvNode(boxTempLcl, TYP_STRUCT));
13663         copyDst->gtOp.gtOp1 = newDst;
13664
13665         // Return the address of the now-struct typed box temp
13666         GenTree* retValue = gtNewOperNode(GT_ADDR, TYP_BYREF, gtNewLclvNode(boxTempLcl, TYP_STRUCT));
13667
13668         return retValue;
13669     }
13670
13671     // If the copy is a struct copy, make sure we know how to isolate
13672     // any source side effects.
13673     GenTree* copySrc = copy->gtOp.gtOp2;
13674
13675     // If the copy source is from a pending inline, wait for it to resolve.
13676     if (copySrc->gtOper == GT_RET_EXPR)
13677     {
13678         JITDUMP(" bailing; must wait for replacement of copy source %s\n", GenTree::OpName(copySrc->gtOper));
13679         return nullptr;
13680     }
13681
13682     bool hasSrcSideEffect = false;
13683     bool isStructCopy     = false;
13684
13685     if (gtTreeHasSideEffects(copySrc, GTF_SIDE_EFFECT))
13686     {
13687         hasSrcSideEffect = true;
13688
13689         if (copySrc->gtType == TYP_STRUCT)
13690         {
13691             isStructCopy = true;
13692
13693             if ((copySrc->gtOper != GT_OBJ) && (copySrc->gtOper != GT_IND) && (copySrc->gtOper != GT_FIELD))
13694             {
13695                 // We don't know how to handle other cases, yet.
13696                 JITDUMP(" bailing; unexpected copy source struct op with side effect %s\n",
13697                         GenTree::OpName(copySrc->gtOper));
13698                 return nullptr;
13699             }
13700         }
13701     }
13702
13703     // If this was a trial removal, we're done.
13704     if (options == BR_DONT_REMOVE)
13705     {
13706         return copySrc;
13707     }
13708
13709     if (options == BR_DONT_REMOVE_WANT_TYPE_HANDLE)
13710     {
13711         return boxTypeHandle;
13712     }
13713
13714     // Otherwise, proceed with the optimization.
13715     //
13716     // Change the assignment expression to a NOP.
13717     JITDUMP("\nBashing NEWOBJ [%06u] to NOP\n", dspTreeID(asg));
13718     asg->gtBashToNOP();
13719
13720     // Change the copy expression so it preserves key
13721     // source side effects.
13722     JITDUMP("\nBashing COPY [%06u]", dspTreeID(copy));
13723
13724     if (!hasSrcSideEffect)
13725     {
13726         // If there were no copy source side effects just bash
13727         // the copy to a NOP.
13728         copy->gtBashToNOP();
13729         JITDUMP(" to NOP; no source side effects.\n");
13730     }
13731     else if (!isStructCopy)
13732     {
13733         // For scalar types, go ahead and produce the
13734         // value as the copy is fairly cheap and likely
13735         // the optimizer can trim things down to just the
13736         // minimal side effect parts.
13737         copyStmt->gtStmt.gtStmtExpr = copySrc;
13738         JITDUMP(" to scalar read via [%06u]\n", dspTreeID(copySrc));
13739     }
13740     else
13741     {
13742         // For struct types read the first byte of the
13743         // source struct; there's no need to read the
13744         // entire thing, and no place to put it.
13745         assert(copySrc->gtOper == GT_OBJ || copySrc->gtOper == GT_IND || copySrc->gtOper == GT_FIELD);
13746         copyStmt->gtStmt.gtStmtExpr = copySrc;
13747
13748         if (options == BR_REMOVE_AND_NARROW || options == BR_REMOVE_AND_NARROW_WANT_TYPE_HANDLE)
13749         {
13750             JITDUMP(" to read first byte of struct via modified [%06u]\n", dspTreeID(copySrc));
13751             copySrc->ChangeOper(GT_IND);
13752             copySrc->gtType = TYP_BYTE;
13753         }
13754         else
13755         {
13756             JITDUMP(" to read entire struct via modified [%06u]\n", dspTreeID(copySrc));
13757         }
13758     }
13759
13760     if (fgStmtListThreaded)
13761     {
13762         fgSetStmtSeq(asgStmt);
13763         fgSetStmtSeq(copyStmt);
13764     }
13765
13766     // Box effects were successfully optimized.
13767
13768     if (options == BR_REMOVE_AND_NARROW_WANT_TYPE_HANDLE)
13769     {
13770         return boxTypeHandle;
13771     }
13772     else
13773     {
13774         return copySrc;
13775     }
13776 }
13777
13778 //------------------------------------------------------------------------
13779 // gtOptimizeEnumHasFlag: given the operands for a call to Enum.HasFlag,
13780 //    try and optimize the call to a simple and/compare tree.
13781 //
13782 // Arguments:
13783 //    thisOp  - first argument to the call
13784 //    flagOp  - second argument to the call
13785 //
13786 // Return Value:
13787 //    A new cmp/amd tree if successful. nullptr on failure.
13788 //
13789 // Notes:
13790 //    If successful, may allocate new temps and modify connected
13791 //    statements.
13792
13793 GenTree* Compiler::gtOptimizeEnumHasFlag(GenTree* thisOp, GenTree* flagOp)
13794 {
13795     JITDUMP("Considering optimizing call to Enum.HasFlag....\n");
13796
13797     // Operands must be boxes
13798     if (!thisOp->IsBoxedValue() || !flagOp->IsBoxedValue())
13799     {
13800         JITDUMP("bailing, need both inputs to be BOXes\n");
13801         return nullptr;
13802     }
13803
13804     // Operands must have same type
13805     bool                 isExactThis   = false;
13806     bool                 isNonNullThis = false;
13807     CORINFO_CLASS_HANDLE thisHnd       = gtGetClassHandle(thisOp, &isExactThis, &isNonNullThis);
13808
13809     if (thisHnd == nullptr)
13810     {
13811         JITDUMP("bailing, can't find type for 'this' operand\n");
13812         return nullptr;
13813     }
13814
13815     // A boxed thisOp should have exact type and non-null instance
13816     assert(isExactThis);
13817     assert(isNonNullThis);
13818
13819     bool                 isExactFlag   = false;
13820     bool                 isNonNullFlag = false;
13821     CORINFO_CLASS_HANDLE flagHnd       = gtGetClassHandle(flagOp, &isExactFlag, &isNonNullFlag);
13822
13823     if (flagHnd == nullptr)
13824     {
13825         JITDUMP("bailing, can't find type for 'flag' operand\n");
13826         return nullptr;
13827     }
13828
13829     // A boxed flagOp should have exact type and non-null instance
13830     assert(isExactFlag);
13831     assert(isNonNullFlag);
13832
13833     if (flagHnd != thisHnd)
13834     {
13835         JITDUMP("bailing, operand types differ\n");
13836         return nullptr;
13837     }
13838
13839     // If we have a shared type instance we can't safely check type
13840     // equality, so bail.
13841     DWORD classAttribs = info.compCompHnd->getClassAttribs(thisHnd);
13842     if (classAttribs & CORINFO_FLG_SHAREDINST)
13843     {
13844         JITDUMP("bailing, have shared instance type\n");
13845         return nullptr;
13846     }
13847
13848     // Simulate removing the box for thisOP. We need to know that it can
13849     // be safely removed before we can optimize.
13850     GenTree* thisVal = gtTryRemoveBoxUpstreamEffects(thisOp, BR_DONT_REMOVE);
13851     if (thisVal == nullptr)
13852     {
13853         // Note we may fail here if the this operand comes from
13854         // a call. We should be able to retry this post-inlining.
13855         JITDUMP("bailing, can't undo box of 'this' operand\n");
13856         return nullptr;
13857     }
13858
13859     GenTree* flagVal = gtTryRemoveBoxUpstreamEffects(flagOp, BR_REMOVE_BUT_NOT_NARROW);
13860     if (flagVal == nullptr)
13861     {
13862         // Note we may fail here if the flag operand comes from
13863         // a call. We should be able to retry this post-inlining.
13864         JITDUMP("bailing, can't undo box of 'flag' operand\n");
13865         return nullptr;
13866     }
13867
13868     // Yes, both boxes can be cleaned up. Optimize.
13869     JITDUMP("Optimizing call to Enum.HasFlag\n");
13870
13871     // Undo the boxing of thisOp and prepare to operate directly
13872     // on the original enum values.
13873     thisVal = gtTryRemoveBoxUpstreamEffects(thisOp, BR_REMOVE_BUT_NOT_NARROW);
13874
13875     // Our trial removal above should guarantee successful removal here.
13876     assert(thisVal != nullptr);
13877
13878     // We should have a consistent view of the type
13879     var_types type = thisVal->TypeGet();
13880     assert(type == flagVal->TypeGet());
13881
13882     // The thisVal and flagVal trees come from earlier statements.
13883     //
13884     // Unless they are invariant values, we need to evaluate them both
13885     // to temps at those points to safely transmit the values here.
13886     //
13887     // Also we need to use the flag twice, so we need two trees for it.
13888     GenTree* thisValOpt     = nullptr;
13889     GenTree* flagValOpt     = nullptr;
13890     GenTree* flagValOptCopy = nullptr;
13891
13892     if (thisVal->IsIntegralConst())
13893     {
13894         thisValOpt = gtClone(thisVal);
13895         assert(thisValOpt != nullptr);
13896     }
13897     else
13898     {
13899         const unsigned thisTmp         = lvaGrabTemp(true DEBUGARG("Enum:HasFlag this temp"));
13900         GenTree*       thisAsg         = gtNewTempAssign(thisTmp, thisVal);
13901         GenTree*       thisAsgStmt     = thisOp->AsBox()->gtCopyStmtWhenInlinedBoxValue;
13902         thisAsgStmt->gtStmt.gtStmtExpr = thisAsg;
13903         thisValOpt                     = gtNewLclvNode(thisTmp, type);
13904     }
13905
13906     if (flagVal->IsIntegralConst())
13907     {
13908         flagValOpt = gtClone(flagVal);
13909         assert(flagValOpt != nullptr);
13910         flagValOptCopy = gtClone(flagVal);
13911         assert(flagValOptCopy != nullptr);
13912     }
13913     else
13914     {
13915         const unsigned flagTmp         = lvaGrabTemp(true DEBUGARG("Enum:HasFlag flag temp"));
13916         GenTree*       flagAsg         = gtNewTempAssign(flagTmp, flagVal);
13917         GenTree*       flagAsgStmt     = flagOp->AsBox()->gtCopyStmtWhenInlinedBoxValue;
13918         flagAsgStmt->gtStmt.gtStmtExpr = flagAsg;
13919         flagValOpt                     = gtNewLclvNode(flagTmp, type);
13920         flagValOptCopy                 = gtNewLclvNode(flagTmp, type);
13921     }
13922
13923     // Turn the call into (thisValTmp & flagTmp) == flagTmp.
13924     GenTree* andTree = gtNewOperNode(GT_AND, type, thisValOpt, flagValOpt);
13925     GenTree* cmpTree = gtNewOperNode(GT_EQ, TYP_INT, andTree, flagValOptCopy);
13926
13927     JITDUMP("Optimized call to Enum.HasFlag\n");
13928
13929     return cmpTree;
13930 }
13931
13932 /*****************************************************************************
13933  *
13934  *  Fold the given constant tree.
13935  */
13936
13937 #ifdef _PREFAST_
13938 #pragma warning(push)
13939 #pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
13940 #endif
13941 GenTree* Compiler::gtFoldExprConst(GenTree* tree)
13942 {
13943     unsigned kind = tree->OperKind();
13944
13945     SSIZE_T       i1, i2, itemp;
13946     INT64         lval1, lval2, ltemp;
13947     float         f1, f2;
13948     double        d1, d2;
13949     var_types     switchType;
13950     FieldSeqNode* fieldSeq = FieldSeqStore::NotAField(); // default unless we override it when folding
13951
13952     assert(kind & (GTK_UNOP | GTK_BINOP));
13953
13954     GenTree* op1 = tree->gtOp.gtOp1;
13955     GenTree* op2 = tree->gtGetOp2IfPresent();
13956
13957     if (!opts.OptEnabled(CLFLG_CONSTANTFOLD))
13958     {
13959         return tree;
13960     }
13961
13962     if (tree->OperGet() == GT_NOP)
13963     {
13964         return tree;
13965     }
13966
13967 #ifdef FEATURE_SIMD
13968     if (tree->OperGet() == GT_SIMD)
13969     {
13970         return tree;
13971     }
13972 #endif // FEATURE_SIMD
13973
13974     if (tree->gtOper == GT_ALLOCOBJ)
13975     {
13976         return tree;
13977     }
13978
13979     if (tree->gtOper == GT_RUNTIMELOOKUP)
13980     {
13981         return tree;
13982     }
13983
13984     if (kind & GTK_UNOP)
13985     {
13986         assert(op1->OperKind() & GTK_CONST);
13987
13988         switch (op1->gtType)
13989         {
13990             case TYP_INT:
13991
13992                 /* Fold constant INT unary operator */
13993                 assert(op1->gtIntCon.ImmedValCanBeFolded(this, tree->OperGet()));
13994                 i1 = (int)op1->gtIntCon.gtIconVal;
13995
13996                 // If we fold a unary oper, then the folded constant
13997                 // is considered a ConstantIndexField if op1 was one
13998                 //
13999
14000                 if ((op1->gtIntCon.gtFieldSeq != nullptr) && op1->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
14001                 {
14002                     fieldSeq = op1->gtIntCon.gtFieldSeq;
14003                 }
14004
14005                 switch (tree->gtOper)
14006                 {
14007                     case GT_NOT:
14008                         i1 = ~i1;
14009                         break;
14010
14011                     case GT_NEG:
14012 #ifdef LEGACY_BACKEND
14013                     case GT_CHS:
14014 #endif
14015                         i1 = -i1;
14016                         break;
14017
14018                     case GT_CAST:
14019                         // assert (genActualType(tree->CastToType()) == tree->gtType);
14020                         switch (tree->CastToType())
14021                         {
14022                             case TYP_BYTE:
14023                                 itemp = INT32(INT8(i1));
14024                                 goto CHK_OVF;
14025
14026                             case TYP_SHORT:
14027                                 itemp = INT32(INT16(i1));
14028                             CHK_OVF:
14029                                 if (tree->gtOverflow() && ((itemp != i1) || ((tree->gtFlags & GTF_UNSIGNED) && i1 < 0)))
14030                                 {
14031                                     goto INT_OVF;
14032                                 }
14033                                 i1 = itemp;
14034                                 goto CNS_INT;
14035
14036                             case TYP_USHORT:
14037                                 itemp = INT32(UINT16(i1));
14038                                 if (tree->gtOverflow())
14039                                 {
14040                                     if (itemp != i1)
14041                                     {
14042                                         goto INT_OVF;
14043                                     }
14044                                 }
14045                                 i1 = itemp;
14046                                 goto CNS_INT;
14047
14048                             case TYP_BOOL:
14049                             case TYP_UBYTE:
14050                                 itemp = INT32(UINT8(i1));
14051                                 if (tree->gtOverflow())
14052                                 {
14053                                     if (itemp != i1)
14054                                     {
14055                                         goto INT_OVF;
14056                                     }
14057                                 }
14058                                 i1 = itemp;
14059                                 goto CNS_INT;
14060
14061                             case TYP_UINT:
14062                                 if (!(tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && i1 < 0)
14063                                 {
14064                                     goto INT_OVF;
14065                                 }
14066                                 goto CNS_INT;
14067
14068                             case TYP_INT:
14069                                 if ((tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && i1 < 0)
14070                                 {
14071                                     goto INT_OVF;
14072                                 }
14073                                 goto CNS_INT;
14074
14075                             case TYP_ULONG:
14076                                 if (tree->IsUnsigned())
14077                                 {
14078                                     lval1 = UINT64(UINT32(i1));
14079                                 }
14080                                 else
14081                                 {
14082                                     if (tree->gtOverflow() && (i1 < 0))
14083                                     {
14084                                         goto LNG_OVF;
14085                                     }
14086                                     lval1 = UINT64(INT32(i1));
14087                                 }
14088                                 goto CNS_LONG;
14089
14090                             case TYP_LONG:
14091                                 if (tree->IsUnsigned())
14092                                 {
14093                                     lval1 = INT64(UINT32(i1));
14094                                 }
14095                                 else
14096                                 {
14097                                     lval1 = INT64(INT32(i1));
14098                                 }
14099                                 goto CNS_LONG;
14100
14101                             case TYP_FLOAT:
14102                                 if (tree->gtFlags & GTF_UNSIGNED)
14103                                 {
14104                                     f1 = forceCastToFloat(UINT32(i1));
14105                                 }
14106                                 else
14107                                 {
14108                                     f1 = forceCastToFloat(INT32(i1));
14109                                 }
14110                                 d1 = f1;
14111                                 goto CNS_DOUBLE;
14112
14113                             case TYP_DOUBLE:
14114                                 if (tree->gtFlags & GTF_UNSIGNED)
14115                                 {
14116                                     d1 = (double)UINT32(i1);
14117                                 }
14118                                 else
14119                                 {
14120                                     d1 = (double)INT32(i1);
14121                                 }
14122                                 goto CNS_DOUBLE;
14123
14124                             default:
14125                                 assert(!"BAD_TYP");
14126                                 break;
14127                         }
14128                         return tree;
14129
14130                     default:
14131                         return tree;
14132                 }
14133
14134                 goto CNS_INT;
14135
14136             case TYP_LONG:
14137
14138                 /* Fold constant LONG unary operator */
14139
14140                 assert(op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
14141                 lval1 = op1->gtIntConCommon.LngValue();
14142
14143                 switch (tree->gtOper)
14144                 {
14145                     case GT_NOT:
14146                         lval1 = ~lval1;
14147                         break;
14148
14149                     case GT_NEG:
14150 #ifdef LEGACY_BACKEND
14151                     case GT_CHS:
14152 #endif
14153                         lval1 = -lval1;
14154                         break;
14155
14156                     case GT_CAST:
14157                         assert(genActualType(tree->CastToType()) == tree->gtType);
14158                         switch (tree->CastToType())
14159                         {
14160                             case TYP_BYTE:
14161                                 i1 = INT32(INT8(lval1));
14162                                 goto CHECK_INT_OVERFLOW;
14163
14164                             case TYP_SHORT:
14165                                 i1 = INT32(INT16(lval1));
14166                                 goto CHECK_INT_OVERFLOW;
14167
14168                             case TYP_USHORT:
14169                                 i1 = INT32(UINT16(lval1));
14170                                 goto CHECK_UINT_OVERFLOW;
14171
14172                             case TYP_UBYTE:
14173                                 i1 = INT32(UINT8(lval1));
14174                                 goto CHECK_UINT_OVERFLOW;
14175
14176                             case TYP_INT:
14177                                 i1 = INT32(lval1);
14178
14179                             CHECK_INT_OVERFLOW:
14180                                 if (tree->gtOverflow())
14181                                 {
14182                                     if (i1 != lval1)
14183                                     {
14184                                         goto INT_OVF;
14185                                     }
14186                                     if ((tree->gtFlags & GTF_UNSIGNED) && i1 < 0)
14187                                     {
14188                                         goto INT_OVF;
14189                                     }
14190                                 }
14191                                 goto CNS_INT;
14192
14193                             case TYP_UINT:
14194                                 i1 = UINT32(lval1);
14195
14196                             CHECK_UINT_OVERFLOW:
14197                                 if (tree->gtOverflow() && UINT32(i1) != lval1)
14198                                 {
14199                                     goto INT_OVF;
14200                                 }
14201                                 goto CNS_INT;
14202
14203                             case TYP_ULONG:
14204                                 if (!(tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && lval1 < 0)
14205                                 {
14206                                     goto LNG_OVF;
14207                                 }
14208                                 goto CNS_LONG;
14209
14210                             case TYP_LONG:
14211                                 if ((tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && lval1 < 0)
14212                                 {
14213                                     goto LNG_OVF;
14214                                 }
14215                                 goto CNS_LONG;
14216
14217                             case TYP_FLOAT:
14218                             case TYP_DOUBLE:
14219                                 if ((tree->gtFlags & GTF_UNSIGNED) && lval1 < 0)
14220                                 {
14221                                     d1 = FloatingPointUtils::convertUInt64ToDouble((unsigned __int64)lval1);
14222                                 }
14223                                 else
14224                                 {
14225                                     d1 = (double)lval1;
14226                                 }
14227
14228                                 if (tree->CastToType() == TYP_FLOAT)
14229                                 {
14230                                     f1 = forceCastToFloat(d1); // truncate precision
14231                                     d1 = f1;
14232                                 }
14233                                 goto CNS_DOUBLE;
14234                             default:
14235                                 assert(!"BAD_TYP");
14236                                 break;
14237                         }
14238                         return tree;
14239
14240                     default:
14241                         return tree;
14242                 }
14243
14244                 goto CNS_LONG;
14245
14246             case TYP_FLOAT:
14247             case TYP_DOUBLE:
14248                 assert(op1->gtOper == GT_CNS_DBL);
14249
14250                 /* Fold constant DOUBLE unary operator */
14251
14252                 d1 = op1->gtDblCon.gtDconVal;
14253
14254                 switch (tree->gtOper)
14255                 {
14256                     case GT_NEG:
14257 #ifdef LEGACY_BACKEND
14258                     case GT_CHS:
14259 #endif
14260                         d1 = -d1;
14261                         break;
14262
14263                     case GT_CAST:
14264
14265                         if (tree->gtOverflowEx())
14266                         {
14267                             return tree;
14268                         }
14269
14270                         assert(genActualType(tree->CastToType()) == tree->gtType);
14271
14272                         if ((op1->gtType == TYP_FLOAT && !_finite(forceCastToFloat(d1))) ||
14273                             (op1->gtType == TYP_DOUBLE && !_finite(d1)))
14274                         {
14275                             // The floating point constant is not finite.  The ECMA spec says, in
14276                             // III 3.27, that "...if overflow occurs converting a floating point type
14277                             // to an integer, ..., the value returned is unspecified."  However, it would
14278                             // at least be desirable to have the same value returned for casting an overflowing
14279                             // constant to an int as would obtained by passing that constant as a parameter
14280                             // then casting that parameter to an int type.  We will assume that the C compiler's
14281                             // cast logic will yield the desired result (and trust testing to tell otherwise).
14282                             // Cross-compilation is an issue here; if that becomes an important scenario, we should
14283                             // capture the target-specific values of overflow casts to the various integral types as
14284                             // constants in a target-specific function.
14285                             CLANG_FORMAT_COMMENT_ANCHOR;
14286
14287                             // Don't fold conversions of +inf/-inf to integral value on all platforms
14288                             // as the value returned by JIT helper doesn't match with the C compiler's cast result.
14289                             // We want the behavior to be same with or without folding.
14290                             return tree;
14291                         }
14292
14293                         if (d1 <= -1.0 && varTypeIsUnsigned(tree->CastToType()))
14294                         {
14295                             // Don't fold conversions of these cases becasue the result is unspecified per ECMA spec
14296                             // and the native math doing the fold doesn't match the run-time computation on all
14297                             // platforms.
14298                             // We want the behavior to be same with or without folding.
14299                             return tree;
14300                         }
14301
14302                         switch (tree->CastToType())
14303                         {
14304                             case TYP_BYTE:
14305                                 i1 = INT32(INT8(d1));
14306                                 goto CNS_INT;
14307
14308                             case TYP_SHORT:
14309                                 i1 = INT32(INT16(d1));
14310                                 goto CNS_INT;
14311
14312                             case TYP_USHORT:
14313                                 i1 = INT32(UINT16(d1));
14314                                 goto CNS_INT;
14315
14316                             case TYP_UBYTE:
14317                                 i1 = INT32(UINT8(d1));
14318                                 goto CNS_INT;
14319
14320                             case TYP_INT:
14321                                 i1 = INT32(d1);
14322                                 goto CNS_INT;
14323
14324                             case TYP_UINT:
14325                                 i1 = forceCastToUInt32(d1);
14326                                 goto CNS_INT;
14327
14328                             case TYP_LONG:
14329                                 lval1 = INT64(d1);
14330                                 goto CNS_LONG;
14331
14332                             case TYP_ULONG:
14333                                 lval1 = FloatingPointUtils::convertDoubleToUInt64(d1);
14334                                 goto CNS_LONG;
14335
14336                             case TYP_FLOAT:
14337                                 d1 = forceCastToFloat(d1);
14338                                 goto CNS_DOUBLE;
14339
14340                             case TYP_DOUBLE:
14341                                 if (op1->gtType == TYP_FLOAT)
14342                                 {
14343                                     d1 = forceCastToFloat(d1); // truncate precision
14344                                 }
14345                                 goto CNS_DOUBLE; // redundant cast
14346
14347                             default:
14348                                 assert(!"BAD_TYP");
14349                                 break;
14350                         }
14351                         return tree;
14352
14353                     default:
14354                         return tree;
14355                 }
14356                 goto CNS_DOUBLE;
14357
14358             default:
14359                 /* not a foldable typ - e.g. RET const */
14360                 return tree;
14361         }
14362     }
14363
14364     /* We have a binary operator */
14365
14366     assert(kind & GTK_BINOP);
14367     assert(op2);
14368     assert(op1->OperKind() & GTK_CONST);
14369     assert(op2->OperKind() & GTK_CONST);
14370
14371     if (tree->gtOper == GT_COMMA)
14372     {
14373         return op2;
14374     }
14375
14376     if (tree->OperIsAnyList())
14377     {
14378         return tree;
14379     }
14380
14381     switchType = op1->gtType;
14382
14383     // Normally we will just switch on op1 types, but for the case where
14384     //  only op2 is a GC type and op1 is not a GC type, we use the op2 type.
14385     //  This makes us handle this as a case of folding for GC type.
14386     //
14387     if (varTypeIsGC(op2->gtType) && !varTypeIsGC(op1->gtType))
14388     {
14389         switchType = op2->gtType;
14390     }
14391
14392     switch (switchType)
14393     {
14394
14395         /*-------------------------------------------------------------------------
14396          * Fold constant REF of BYREF binary operator
14397          * These can only be comparisons or null pointers
14398          */
14399
14400         case TYP_REF:
14401
14402             /* String nodes are an RVA at this point */
14403
14404             if (op1->gtOper == GT_CNS_STR || op2->gtOper == GT_CNS_STR)
14405             {
14406                 return tree;
14407             }
14408
14409             __fallthrough;
14410
14411         case TYP_BYREF:
14412
14413             i1 = op1->gtIntConCommon.IconValue();
14414             i2 = op2->gtIntConCommon.IconValue();
14415
14416             switch (tree->gtOper)
14417             {
14418                 case GT_EQ:
14419                     i1 = (i1 == i2);
14420                     goto FOLD_COND;
14421
14422                 case GT_NE:
14423                     i1 = (i1 != i2);
14424                     goto FOLD_COND;
14425
14426                 case GT_ADD:
14427                     noway_assert(tree->gtType != TYP_REF);
14428                     // We only fold a GT_ADD that involves a null reference.
14429                     if (((op1->TypeGet() == TYP_REF) && (i1 == 0)) || ((op2->TypeGet() == TYP_REF) && (i2 == 0)))
14430                     {
14431 #ifdef DEBUG
14432                         if (verbose)
14433                         {
14434                             printf("\nFolding operator with constant nodes into a constant:\n");
14435                             gtDispTree(tree);
14436                         }
14437 #endif
14438                         // Fold into GT_IND of null byref
14439                         tree->ChangeOperConst(GT_CNS_INT);
14440                         tree->gtType              = TYP_BYREF;
14441                         tree->gtIntCon.gtIconVal  = 0;
14442                         tree->gtIntCon.gtFieldSeq = FieldSeqStore::NotAField();
14443                         if (vnStore != nullptr)
14444                         {
14445                             fgValueNumberTreeConst(tree);
14446                         }
14447 #ifdef DEBUG
14448                         if (verbose)
14449                         {
14450                             printf("\nFolded to null byref:\n");
14451                             gtDispTree(tree);
14452                         }
14453 #endif
14454                         goto DONE;
14455                     }
14456
14457                 default:
14458                     break;
14459             }
14460
14461             return tree;
14462
14463         /*-------------------------------------------------------------------------
14464          * Fold constant INT binary operator
14465          */
14466
14467         case TYP_INT:
14468
14469             if (tree->OperIsCompare() && (tree->gtType == TYP_BYTE))
14470             {
14471                 tree->gtType = TYP_INT;
14472             }
14473
14474             assert(tree->gtType == TYP_INT || varTypeIsGC(tree->TypeGet()) || tree->gtOper == GT_MKREFANY);
14475
14476             // No GC pointer types should be folded here...
14477             //
14478             assert(!varTypeIsGC(op1->gtType) && !varTypeIsGC(op2->gtType));
14479
14480             assert(op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
14481             assert(op2->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
14482
14483             i1 = op1->gtIntConCommon.IconValue();
14484             i2 = op2->gtIntConCommon.IconValue();
14485
14486             switch (tree->gtOper)
14487             {
14488                 case GT_EQ:
14489                     i1 = (INT32(i1) == INT32(i2));
14490                     break;
14491                 case GT_NE:
14492                     i1 = (INT32(i1) != INT32(i2));
14493                     break;
14494
14495                 case GT_LT:
14496                     if (tree->gtFlags & GTF_UNSIGNED)
14497                     {
14498                         i1 = (UINT32(i1) < UINT32(i2));
14499                     }
14500                     else
14501                     {
14502                         i1 = (INT32(i1) < INT32(i2));
14503                     }
14504                     break;
14505
14506                 case GT_LE:
14507                     if (tree->gtFlags & GTF_UNSIGNED)
14508                     {
14509                         i1 = (UINT32(i1) <= UINT32(i2));
14510                     }
14511                     else
14512                     {
14513                         i1 = (INT32(i1) <= INT32(i2));
14514                     }
14515                     break;
14516
14517                 case GT_GE:
14518                     if (tree->gtFlags & GTF_UNSIGNED)
14519                     {
14520                         i1 = (UINT32(i1) >= UINT32(i2));
14521                     }
14522                     else
14523                     {
14524                         i1 = (INT32(i1) >= INT32(i2));
14525                     }
14526                     break;
14527
14528                 case GT_GT:
14529                     if (tree->gtFlags & GTF_UNSIGNED)
14530                     {
14531                         i1 = (UINT32(i1) > UINT32(i2));
14532                     }
14533                     else
14534                     {
14535                         i1 = (INT32(i1) > INT32(i2));
14536                     }
14537                     break;
14538
14539                 case GT_ADD:
14540                     itemp = i1 + i2;
14541                     if (tree->gtOverflow())
14542                     {
14543                         if (tree->gtFlags & GTF_UNSIGNED)
14544                         {
14545                             if (INT64(UINT32(itemp)) != INT64(UINT32(i1)) + INT64(UINT32(i2)))
14546                             {
14547                                 goto INT_OVF;
14548                             }
14549                         }
14550                         else
14551                         {
14552                             if (INT64(INT32(itemp)) != INT64(INT32(i1)) + INT64(INT32(i2)))
14553                             {
14554                                 goto INT_OVF;
14555                             }
14556                         }
14557                     }
14558                     i1       = itemp;
14559                     fieldSeq = GetFieldSeqStore()->Append(op1->gtIntCon.gtFieldSeq, op2->gtIntCon.gtFieldSeq);
14560                     break;
14561                 case GT_SUB:
14562                     itemp = i1 - i2;
14563                     if (tree->gtOverflow())
14564                     {
14565                         if (tree->gtFlags & GTF_UNSIGNED)
14566                         {
14567                             if (INT64(UINT32(itemp)) != ((INT64)((UINT32)i1) - (INT64)((UINT32)i2)))
14568                             {
14569                                 goto INT_OVF;
14570                             }
14571                         }
14572                         else
14573                         {
14574                             if (INT64(INT32(itemp)) != INT64(INT32(i1)) - INT64(INT32(i2)))
14575                             {
14576                                 goto INT_OVF;
14577                             }
14578                         }
14579                     }
14580                     i1 = itemp;
14581                     break;
14582                 case GT_MUL:
14583                     itemp = i1 * i2;
14584                     if (tree->gtOverflow())
14585                     {
14586                         if (tree->gtFlags & GTF_UNSIGNED)
14587                         {
14588                             if (INT64(UINT32(itemp)) != ((INT64)((UINT32)i1) * (INT64)((UINT32)i2)))
14589                             {
14590                                 goto INT_OVF;
14591                             }
14592                         }
14593                         else
14594                         {
14595                             if (INT64(INT32(itemp)) != INT64(INT32(i1)) * INT64(INT32(i2)))
14596                             {
14597                                 goto INT_OVF;
14598                             }
14599                         }
14600                     }
14601                     // For the very particular case of the "constant array index" pseudo-field, we
14602                     // assume that multiplication is by the field width, and preserves that field.
14603                     // This could obviously be made more robust by a more complicated set of annotations...
14604                     if ((op1->gtIntCon.gtFieldSeq != nullptr) && op1->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
14605                     {
14606                         assert(op2->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField());
14607                         fieldSeq = op1->gtIntCon.gtFieldSeq;
14608                     }
14609                     else if ((op2->gtIntCon.gtFieldSeq != nullptr) &&
14610                              op2->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
14611                     {
14612                         assert(op1->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField());
14613                         fieldSeq = op2->gtIntCon.gtFieldSeq;
14614                     }
14615                     i1 = itemp;
14616                     break;
14617
14618                 case GT_OR:
14619                     i1 |= i2;
14620                     break;
14621                 case GT_XOR:
14622                     i1 ^= i2;
14623                     break;
14624                 case GT_AND:
14625                     i1 &= i2;
14626                     break;
14627
14628                 case GT_LSH:
14629                     i1 <<= (i2 & 0x1f);
14630                     break;
14631                 case GT_RSH:
14632                     i1 >>= (i2 & 0x1f);
14633                     break;
14634                 case GT_RSZ:
14635                     /* logical shift -> make it unsigned to not propagate the sign bit */
14636                     i1 = UINT32(i1) >> (i2 & 0x1f);
14637                     break;
14638                 case GT_ROL:
14639                     i1 = (i1 << (i2 & 0x1f)) | (UINT32(i1) >> ((32 - i2) & 0x1f));
14640                     break;
14641                 case GT_ROR:
14642                     i1 = (i1 << ((32 - i2) & 0x1f)) | (UINT32(i1) >> (i2 & 0x1f));
14643                     break;
14644
14645                 /* DIV and MOD can generate an INT 0 - if division by 0
14646                  * or overflow - when dividing MIN by -1 */
14647
14648                 case GT_DIV:
14649                 case GT_MOD:
14650                 case GT_UDIV:
14651                 case GT_UMOD:
14652                     if (INT32(i2) == 0)
14653                     {
14654                         // Division by zero:
14655                         // We have to evaluate this expression and throw an exception
14656                         return tree;
14657                     }
14658                     else if ((INT32(i2) == -1) && (UINT32(i1) == 0x80000000))
14659                     {
14660                         // Overflow Division:
14661                         // We have to evaluate this expression and throw an exception
14662                         return tree;
14663                     }
14664
14665                     if (tree->gtOper == GT_DIV)
14666                     {
14667                         i1 = INT32(i1) / INT32(i2);
14668                     }
14669                     else if (tree->gtOper == GT_MOD)
14670                     {
14671                         i1 = INT32(i1) % INT32(i2);
14672                     }
14673                     else if (tree->gtOper == GT_UDIV)
14674                     {
14675                         i1 = UINT32(i1) / UINT32(i2);
14676                     }
14677                     else
14678                     {
14679                         assert(tree->gtOper == GT_UMOD);
14680                         i1 = UINT32(i1) % UINT32(i2);
14681                     }
14682                     break;
14683
14684                 default:
14685                     return tree;
14686             }
14687
14688         /* We get here after folding to a GT_CNS_INT type
14689          * change the node to the new type / value and make sure the node sizes are OK */
14690         CNS_INT:
14691         FOLD_COND:
14692
14693 #ifdef DEBUG
14694             if (verbose)
14695             {
14696                 printf("\nFolding operator with constant nodes into a constant:\n");
14697                 gtDispTree(tree);
14698             }
14699 #endif
14700
14701 #ifdef _TARGET_64BIT_
14702             // Some operations are performed as 64 bit instead of 32 bit so the upper 32 bits
14703             // need to be discarded. Since constant values are stored as ssize_t and the node
14704             // has TYP_INT the result needs to be sign extended rather than zero extended.
14705             i1 = INT32(i1);
14706 #endif // _TARGET_64BIT_
14707
14708             /* Also all conditional folding jumps here since the node hanging from
14709              * GT_JTRUE has to be a GT_CNS_INT - value 0 or 1 */
14710
14711             tree->ChangeOperConst(GT_CNS_INT);
14712             tree->gtType              = TYP_INT;
14713             tree->gtIntCon.gtIconVal  = i1;
14714             tree->gtIntCon.gtFieldSeq = fieldSeq;
14715             if (vnStore != nullptr)
14716             {
14717                 fgValueNumberTreeConst(tree);
14718             }
14719 #ifdef DEBUG
14720             if (verbose)
14721             {
14722                 printf("Bashed to int constant:\n");
14723                 gtDispTree(tree);
14724             }
14725 #endif
14726             goto DONE;
14727
14728         /* This operation is going to cause an overflow exception. Morph into
14729            an overflow helper. Put a dummy constant value for code generation.
14730
14731            We could remove all subsequent trees in the current basic block,
14732            unless this node is a child of GT_COLON
14733
14734            NOTE: Since the folded value is not constant we should not change the
14735                  "tree" node - otherwise we confuse the logic that checks if the folding
14736                  was successful - instead use one of the operands, e.g. op1
14737          */
14738
14739         LNG_OVF:
14740             // Don't fold overflow operations if not global morph phase.
14741             // The reason for this is that this optimization is replacing a gentree node
14742             // with another new gentree node. Say a GT_CALL(arglist) has one 'arg'
14743             // involving overflow arithmetic.  During assertion prop, it is possible
14744             // that the 'arg' could be constant folded and the result could lead to an
14745             // overflow.  In such a case 'arg' will get replaced with GT_COMMA node
14746             // but fgMorphArgs() - see the logic around "if(lateArgsComputed)" - doesn't
14747             // update args table. For this reason this optimization is enabled only
14748             // for global morphing phase.
14749             //
14750             // X86/Arm32 legacy codegen note: This is not an issue on x86 for the reason that
14751             // it doesn't use arg table for calls.  In addition x86/arm32 legacy codegen doesn't
14752             // expect long constants to show up as an operand of overflow cast operation.
14753             //
14754             // TODO-CQ: Once fgMorphArgs() is fixed this restriction could be removed.
14755             CLANG_FORMAT_COMMENT_ANCHOR;
14756
14757 #ifndef LEGACY_BACKEND
14758             if (!fgGlobalMorph)
14759             {
14760                 assert(tree->gtOverflow());
14761                 return tree;
14762             }
14763 #endif // !LEGACY_BACKEND
14764
14765             op1 = gtNewLconNode(0);
14766             if (vnStore != nullptr)
14767             {
14768                 op1->gtVNPair.SetBoth(vnStore->VNZeroForType(TYP_LONG));
14769             }
14770             goto OVF;
14771
14772         INT_OVF:
14773 #ifndef LEGACY_BACKEND
14774             // Don't fold overflow operations if not global morph phase.
14775             // The reason for this is that this optimization is replacing a gentree node
14776             // with another new gentree node. Say a GT_CALL(arglist) has one 'arg'
14777             // involving overflow arithmetic.  During assertion prop, it is possible
14778             // that the 'arg' could be constant folded and the result could lead to an
14779             // overflow.  In such a case 'arg' will get replaced with GT_COMMA node
14780             // but fgMorphArgs() - see the logic around "if(lateArgsComputed)" - doesn't
14781             // update args table. For this reason this optimization is enabled only
14782             // for global morphing phase.
14783             //
14784             // X86/Arm32 legacy codegen note: This is not an issue on x86 for the reason that
14785             // it doesn't use arg table for calls.  In addition x86/arm32 legacy codegen doesn't
14786             // expect long constants to show up as an operand of overflow cast operation.
14787             //
14788             // TODO-CQ: Once fgMorphArgs() is fixed this restriction could be removed.
14789
14790             if (!fgGlobalMorph)
14791             {
14792                 assert(tree->gtOverflow());
14793                 return tree;
14794             }
14795 #endif // !LEGACY_BACKEND
14796
14797             op1 = gtNewIconNode(0);
14798             if (vnStore != nullptr)
14799             {
14800                 op1->gtVNPair.SetBoth(vnStore->VNZeroForType(TYP_INT));
14801             }
14802             goto OVF;
14803
14804         OVF:
14805 #ifdef DEBUG
14806             if (verbose)
14807             {
14808                 printf("\nFolding binary operator with constant nodes into a comma throw:\n");
14809                 gtDispTree(tree);
14810             }
14811 #endif
14812             /* We will change the cast to a GT_COMMA and attach the exception helper as gtOp.gtOp1.
14813              * The constant expression zero becomes op2. */
14814
14815             assert(tree->gtOverflow());
14816             assert(tree->gtOper == GT_ADD || tree->gtOper == GT_SUB || tree->gtOper == GT_CAST ||
14817                    tree->gtOper == GT_MUL);
14818             assert(op1);
14819
14820             op2 = op1;
14821             op1 = gtNewHelperCallNode(CORINFO_HELP_OVERFLOW, TYP_VOID,
14822                                       gtNewArgList(gtNewIconNode(compCurBB->bbTryIndex)));
14823
14824             if (vnStore != nullptr)
14825             {
14826                 op1->gtVNPair =
14827                     vnStore->VNPWithExc(ValueNumPair(ValueNumStore::VNForVoid(), ValueNumStore::VNForVoid()),
14828                                         vnStore->VNPExcSetSingleton(vnStore->VNPairForFunc(TYP_REF, VNF_OverflowExc)));
14829             }
14830
14831             tree = gtNewOperNode(GT_COMMA, tree->gtType, op1, op2);
14832
14833             return tree;
14834
14835         /*-------------------------------------------------------------------------
14836          * Fold constant LONG binary operator
14837          */
14838
14839         case TYP_LONG:
14840
14841             // No GC pointer types should be folded here...
14842             //
14843             assert(!varTypeIsGC(op1->gtType) && !varTypeIsGC(op2->gtType));
14844
14845             // op1 is known to be a TYP_LONG, op2 is normally a TYP_LONG, unless we have a shift operator in which case
14846             // it is a TYP_INT
14847             //
14848             assert((op2->gtType == TYP_LONG) || (op2->gtType == TYP_INT));
14849
14850             assert(op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
14851             assert(op2->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
14852
14853             lval1 = op1->gtIntConCommon.LngValue();
14854
14855             // For the shift operators we can have a op2 that is a TYP_INT and thus will be GT_CNS_INT
14856             if (op2->OperGet() == GT_CNS_INT)
14857             {
14858                 lval2 = op2->gtIntConCommon.IconValue();
14859             }
14860             else
14861             {
14862                 lval2 = op2->gtIntConCommon.LngValue();
14863             }
14864
14865             switch (tree->gtOper)
14866             {
14867                 case GT_EQ:
14868                     i1 = (lval1 == lval2);
14869                     goto FOLD_COND;
14870                 case GT_NE:
14871                     i1 = (lval1 != lval2);
14872                     goto FOLD_COND;
14873
14874                 case GT_LT:
14875                     if (tree->gtFlags & GTF_UNSIGNED)
14876                     {
14877                         i1 = (UINT64(lval1) < UINT64(lval2));
14878                     }
14879                     else
14880                     {
14881                         i1 = (lval1 < lval2);
14882                     }
14883                     goto FOLD_COND;
14884
14885                 case GT_LE:
14886                     if (tree->gtFlags & GTF_UNSIGNED)
14887                     {
14888                         i1 = (UINT64(lval1) <= UINT64(lval2));
14889                     }
14890                     else
14891                     {
14892                         i1 = (lval1 <= lval2);
14893                     }
14894                     goto FOLD_COND;
14895
14896                 case GT_GE:
14897                     if (tree->gtFlags & GTF_UNSIGNED)
14898                     {
14899                         i1 = (UINT64(lval1) >= UINT64(lval2));
14900                     }
14901                     else
14902                     {
14903                         i1 = (lval1 >= lval2);
14904                     }
14905                     goto FOLD_COND;
14906
14907                 case GT_GT:
14908                     if (tree->gtFlags & GTF_UNSIGNED)
14909                     {
14910                         i1 = (UINT64(lval1) > UINT64(lval2));
14911                     }
14912                     else
14913                     {
14914                         i1 = (lval1 > lval2);
14915                     }
14916                     goto FOLD_COND;
14917
14918                 case GT_ADD:
14919                     ltemp = lval1 + lval2;
14920
14921                 LNG_ADD_CHKOVF:
14922                     /* For the SIGNED case - If there is one positive and one negative operand, there can be no overflow
14923                      * If both are positive, the result has to be positive, and similary for negatives.
14924                      *
14925                      * For the UNSIGNED case - If a UINT32 operand is bigger than the result then OVF */
14926
14927                     if (tree->gtOverflow())
14928                     {
14929                         if (tree->gtFlags & GTF_UNSIGNED)
14930                         {
14931                             if ((UINT64(lval1) > UINT64(ltemp)) || (UINT64(lval2) > UINT64(ltemp)))
14932                             {
14933                                 goto LNG_OVF;
14934                             }
14935                         }
14936                         else if (((lval1 < 0) == (lval2 < 0)) && ((lval1 < 0) != (ltemp < 0)))
14937                         {
14938                             goto LNG_OVF;
14939                         }
14940                     }
14941                     lval1 = ltemp;
14942                     break;
14943
14944                 case GT_SUB:
14945                     ltemp = lval1 - lval2;
14946                     if (tree->gtOverflow())
14947                     {
14948                         if (tree->gtFlags & GTF_UNSIGNED)
14949                         {
14950                             if (UINT64(lval2) > UINT64(lval1))
14951                             {
14952                                 goto LNG_OVF;
14953                             }
14954                         }
14955                         else
14956                         {
14957                             /* If both operands are +ve or both are -ve, there can be no
14958                                overflow. Else use the logic for : lval1 + (-lval2) */
14959
14960                             if ((lval1 < 0) != (lval2 < 0))
14961                             {
14962                                 if (lval2 == INT64_MIN)
14963                                 {
14964                                     goto LNG_OVF;
14965                                 }
14966                                 lval2 = -lval2;
14967                                 goto LNG_ADD_CHKOVF;
14968                             }
14969                         }
14970                     }
14971                     lval1 = ltemp;
14972                     break;
14973
14974                 case GT_MUL:
14975                     ltemp = lval1 * lval2;
14976
14977                     if (tree->gtOverflow() && lval2 != 0)
14978                     {
14979
14980                         if (tree->gtFlags & GTF_UNSIGNED)
14981                         {
14982                             UINT64 ultemp = ltemp;
14983                             UINT64 ulval1 = lval1;
14984                             UINT64 ulval2 = lval2;
14985                             if ((ultemp / ulval2) != ulval1)
14986                             {
14987                                 goto LNG_OVF;
14988                             }
14989                         }
14990                         else
14991                         {
14992                             // This does a multiply and then reverses it.  This test works great except for MIN_INT *
14993                             //-1.  In that case we mess up the sign on ltmp.  Make sure to double check the sign.
14994                             // if either is 0, then no overflow
14995                             if (lval1 != 0) // lval2 checked above.
14996                             {
14997                                 if (((lval1 < 0) == (lval2 < 0)) && (ltemp < 0))
14998                                 {
14999                                     goto LNG_OVF;
15000                                 }
15001                                 if (((lval1 < 0) != (lval2 < 0)) && (ltemp > 0))
15002                                 {
15003                                     goto LNG_OVF;
15004                                 }
15005
15006                                 // TODO-Amd64-Unix: Remove the code that disables optimizations for this method when the
15007                                 // clang
15008                                 // optimizer is fixed and/or the method implementation is refactored in a simpler code.
15009                                 // There is a bug in the clang-3.5 optimizer. The issue is that in release build the
15010                                 // optimizer is mistyping (or just wrongly decides to use 32 bit operation for a corner
15011                                 // case of MIN_LONG) the args of the (ltemp / lval2) to int (it does a 32 bit div
15012                                 // operation instead of 64 bit.). For the case of lval1 and lval2 equal to MIN_LONG
15013                                 // (0x8000000000000000) this results in raising a SIGFPE.
15014                                 // Optimizations disabled for now. See compiler.h.
15015                                 if ((ltemp / lval2) != lval1)
15016                                 {
15017                                     goto LNG_OVF;
15018                                 }
15019                             }
15020                         }
15021                     }
15022
15023                     lval1 = ltemp;
15024                     break;
15025
15026                 case GT_OR:
15027                     lval1 |= lval2;
15028                     break;
15029                 case GT_XOR:
15030                     lval1 ^= lval2;
15031                     break;
15032                 case GT_AND:
15033                     lval1 &= lval2;
15034                     break;
15035
15036                 case GT_LSH:
15037                     lval1 <<= (lval2 & 0x3f);
15038                     break;
15039                 case GT_RSH:
15040                     lval1 >>= (lval2 & 0x3f);
15041                     break;
15042                 case GT_RSZ:
15043                     /* logical shift -> make it unsigned to not propagate the sign bit */
15044                     lval1 = UINT64(lval1) >> (lval2 & 0x3f);
15045                     break;
15046                 case GT_ROL:
15047                     lval1 = (lval1 << (lval2 & 0x3f)) | (UINT64(lval1) >> ((64 - lval2) & 0x3f));
15048                     break;
15049                 case GT_ROR:
15050                     lval1 = (lval1 << ((64 - lval2) & 0x3f)) | (UINT64(lval1) >> (lval2 & 0x3f));
15051                     break;
15052
15053                 // Both DIV and IDIV on x86 raise an exception for min_int (and min_long) / -1.  So we preserve
15054                 // that behavior here.
15055                 case GT_DIV:
15056                     if (!lval2)
15057                     {
15058                         return tree;
15059                     }
15060
15061                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
15062                     {
15063                         return tree;
15064                     }
15065                     lval1 /= lval2;
15066                     break;
15067
15068                 case GT_MOD:
15069                     if (!lval2)
15070                     {
15071                         return tree;
15072                     }
15073                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
15074                     {
15075                         return tree;
15076                     }
15077                     lval1 %= lval2;
15078                     break;
15079
15080                 case GT_UDIV:
15081                     if (!lval2)
15082                     {
15083                         return tree;
15084                     }
15085                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
15086                     {
15087                         return tree;
15088                     }
15089                     lval1 = UINT64(lval1) / UINT64(lval2);
15090                     break;
15091
15092                 case GT_UMOD:
15093                     if (!lval2)
15094                     {
15095                         return tree;
15096                     }
15097                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
15098                     {
15099                         return tree;
15100                     }
15101                     lval1 = UINT64(lval1) % UINT64(lval2);
15102                     break;
15103                 default:
15104                     return tree;
15105             }
15106
15107         CNS_LONG:
15108
15109             if (fieldSeq != FieldSeqStore::NotAField())
15110             {
15111                 return tree;
15112             }
15113
15114 #ifdef DEBUG
15115             if (verbose)
15116             {
15117                 printf("\nFolding long operator with constant nodes into a constant:\n");
15118                 gtDispTree(tree);
15119             }
15120 #endif
15121             assert((GenTree::s_gtNodeSizes[GT_CNS_NATIVELONG] == TREE_NODE_SZ_SMALL) ||
15122                    (tree->gtDebugFlags & GTF_DEBUG_NODE_LARGE));
15123
15124             tree->ChangeOperConst(GT_CNS_NATIVELONG);
15125             tree->gtIntConCommon.SetLngValue(lval1);
15126             if (vnStore != nullptr)
15127             {
15128                 fgValueNumberTreeConst(tree);
15129             }
15130
15131 #ifdef DEBUG
15132             if (verbose)
15133             {
15134                 printf("Bashed to long constant:\n");
15135                 gtDispTree(tree);
15136             }
15137 #endif
15138             goto DONE;
15139
15140         /*-------------------------------------------------------------------------
15141          * Fold constant FLOAT or DOUBLE binary operator
15142          */
15143
15144         case TYP_FLOAT:
15145         case TYP_DOUBLE:
15146
15147             if (tree->gtOverflowEx())
15148             {
15149                 return tree;
15150             }
15151
15152             assert(op1->gtOper == GT_CNS_DBL);
15153             d1 = op1->gtDblCon.gtDconVal;
15154
15155             assert(varTypeIsFloating(op2->gtType));
15156             assert(op2->gtOper == GT_CNS_DBL);
15157             d2 = op2->gtDblCon.gtDconVal;
15158
15159             /* Special case - check if we have NaN operands.
15160              * For comparisons if not an unordered operation always return 0.
15161              * For unordered operations (i.e. the GTF_RELOP_NAN_UN flag is set)
15162              * the result is always true - return 1. */
15163
15164             if (_isnan(d1) || _isnan(d2))
15165             {
15166 #ifdef DEBUG
15167                 if (verbose)
15168                 {
15169                     printf("Double operator(s) is NaN\n");
15170                 }
15171 #endif
15172                 if (tree->OperKind() & GTK_RELOP)
15173                 {
15174                     if (tree->gtFlags & GTF_RELOP_NAN_UN)
15175                     {
15176                         /* Unordered comparison with NaN always succeeds */
15177                         i1 = 1;
15178                         goto FOLD_COND;
15179                     }
15180                     else
15181                     {
15182                         /* Normal comparison with NaN always fails */
15183                         i1 = 0;
15184                         goto FOLD_COND;
15185                     }
15186                 }
15187             }
15188
15189             switch (tree->gtOper)
15190             {
15191                 case GT_EQ:
15192                     i1 = (d1 == d2);
15193                     goto FOLD_COND;
15194                 case GT_NE:
15195                     i1 = (d1 != d2);
15196                     goto FOLD_COND;
15197
15198                 case GT_LT:
15199                     i1 = (d1 < d2);
15200                     goto FOLD_COND;
15201                 case GT_LE:
15202                     i1 = (d1 <= d2);
15203                     goto FOLD_COND;
15204                 case GT_GE:
15205                     i1 = (d1 >= d2);
15206                     goto FOLD_COND;
15207                 case GT_GT:
15208                     i1 = (d1 > d2);
15209                     goto FOLD_COND;
15210
15211 #if FEATURE_STACK_FP_X87
15212                 case GT_ADD:
15213                     d1 += d2;
15214                     break;
15215                 case GT_SUB:
15216                     d1 -= d2;
15217                     break;
15218                 case GT_MUL:
15219                     d1 *= d2;
15220                     break;
15221                 case GT_DIV:
15222                     if (!d2)
15223                         return tree;
15224                     d1 /= d2;
15225                     break;
15226 #else  //! FEATURE_STACK_FP_X87
15227                 // non-x86 arch: floating point arithmetic should be done in declared
15228                 // precision while doing constant folding. For this reason though TYP_FLOAT
15229                 // constants are stored as double constants, while performing float arithmetic,
15230                 // double constants should be converted to float.  Here is an example case
15231                 // where performing arithmetic in double precision would lead to incorrect
15232                 // results.
15233                 //
15234                 // Example:
15235                 // float a = float.MaxValue;
15236                 // float b = a*a;   This will produce +inf in single precision and 1.1579207543382391e+077 in double
15237                 //                  precision.
15238                 // flaot c = b/b;   This will produce NaN in single precision and 1 in double precision.
15239                 case GT_ADD:
15240                     if (op1->TypeGet() == TYP_FLOAT)
15241                     {
15242                         f1 = forceCastToFloat(d1);
15243                         f2 = forceCastToFloat(d2);
15244                         d1 = f1 + f2;
15245                     }
15246                     else
15247                     {
15248                         d1 += d2;
15249                     }
15250                     break;
15251
15252                 case GT_SUB:
15253                     if (op1->TypeGet() == TYP_FLOAT)
15254                     {
15255                         f1 = forceCastToFloat(d1);
15256                         f2 = forceCastToFloat(d2);
15257                         d1 = f1 - f2;
15258                     }
15259                     else
15260                     {
15261                         d1 -= d2;
15262                     }
15263                     break;
15264
15265                 case GT_MUL:
15266                     if (op1->TypeGet() == TYP_FLOAT)
15267                     {
15268                         f1 = forceCastToFloat(d1);
15269                         f2 = forceCastToFloat(d2);
15270                         d1 = f1 * f2;
15271                     }
15272                     else
15273                     {
15274                         d1 *= d2;
15275                     }
15276                     break;
15277
15278                 case GT_DIV:
15279                     if (!d2)
15280                     {
15281                         return tree;
15282                     }
15283                     if (op1->TypeGet() == TYP_FLOAT)
15284                     {
15285                         f1 = forceCastToFloat(d1);
15286                         f2 = forceCastToFloat(d2);
15287                         d1 = f1 / f2;
15288                     }
15289                     else
15290                     {
15291                         d1 /= d2;
15292                     }
15293                     break;
15294 #endif //! FEATURE_STACK_FP_X87
15295
15296                 default:
15297                     return tree;
15298             }
15299
15300         CNS_DOUBLE:
15301
15302 #ifdef DEBUG
15303             if (verbose)
15304             {
15305                 printf("\nFolding fp operator with constant nodes into a fp constant:\n");
15306                 gtDispTree(tree);
15307             }
15308 #endif
15309
15310             assert((GenTree::s_gtNodeSizes[GT_CNS_DBL] == TREE_NODE_SZ_SMALL) ||
15311                    (tree->gtDebugFlags & GTF_DEBUG_NODE_LARGE));
15312
15313             tree->ChangeOperConst(GT_CNS_DBL);
15314             tree->gtDblCon.gtDconVal = d1;
15315             if (vnStore != nullptr)
15316             {
15317                 fgValueNumberTreeConst(tree);
15318             }
15319 #ifdef DEBUG
15320             if (verbose)
15321             {
15322                 printf("Bashed to fp constant:\n");
15323                 gtDispTree(tree);
15324             }
15325 #endif
15326             goto DONE;
15327
15328         default:
15329             /* not a foldable typ */
15330             return tree;
15331     }
15332
15333 //-------------------------------------------------------------------------
15334
15335 DONE:
15336
15337     /* Make sure no side effect flags are set on this constant node */
15338
15339     tree->gtFlags &= ~GTF_ALL_EFFECT;
15340
15341     return tree;
15342 }
15343 #ifdef _PREFAST_
15344 #pragma warning(pop)
15345 #endif
15346
15347 //------------------------------------------------------------------------
15348 // gtNewTempAssign: Create an assignment of the given value to a temp.
15349 //
15350 // Arguments:
15351 //    tmp - local number for a compiler temp
15352 //    val - value to assign to the temp
15353 //
15354 // Return Value:
15355 //    Normally a new assignment node.
15356 //    However may return a nop node if val is simply a reference to the temp.
15357 //
15358 // Notes:
15359 //    Self-assignments may be represented via NOPs.
15360 //
15361 //    May update the type of the temp, if it was previously unknown.
15362 //
15363 //    May set compFloatingPointUsed.
15364 //
15365
15366 GenTree* Compiler::gtNewTempAssign(unsigned tmp, GenTree* val)
15367 {
15368     // Self-assignment is a nop.
15369     if (val->OperGet() == GT_LCL_VAR && val->gtLclVarCommon.gtLclNum == tmp)
15370     {
15371         return gtNewNothingNode();
15372     }
15373
15374     LclVarDsc* varDsc = lvaTable + tmp;
15375
15376     if (varDsc->TypeGet() == TYP_I_IMPL && val->TypeGet() == TYP_BYREF)
15377     {
15378         impBashVarAddrsToI(val);
15379     }
15380
15381     var_types valTyp = val->TypeGet();
15382     if (val->OperGet() == GT_LCL_VAR && lvaTable[val->gtLclVar.gtLclNum].lvNormalizeOnLoad())
15383     {
15384         valTyp      = lvaGetRealType(val->gtLclVar.gtLclNum);
15385         val->gtType = valTyp;
15386     }
15387     var_types dstTyp = varDsc->TypeGet();
15388
15389     /* If the variable's lvType is not yet set then set it here */
15390     if (dstTyp == TYP_UNDEF)
15391     {
15392         varDsc->lvType = dstTyp = genActualType(valTyp);
15393         if (varTypeIsGC(dstTyp))
15394         {
15395             varDsc->lvStructGcCount = 1;
15396         }
15397 #if FEATURE_SIMD
15398         else if (varTypeIsSIMD(dstTyp))
15399         {
15400             varDsc->lvSIMDType = 1;
15401         }
15402 #endif
15403     }
15404
15405 #ifdef DEBUG
15406     /* Make sure the actual types match               */
15407     if (genActualType(valTyp) != genActualType(dstTyp))
15408     {
15409         // Plus some other exceptions that are apparently legal:
15410         // 1) TYP_REF or BYREF = TYP_I_IMPL
15411         bool ok = false;
15412         if (varTypeIsGC(dstTyp) && (valTyp == TYP_I_IMPL))
15413         {
15414             ok = true;
15415         }
15416         // 2) TYP_DOUBLE = TYP_FLOAT or TYP_FLOAT = TYP_DOUBLE
15417         else if (varTypeIsFloating(dstTyp) && varTypeIsFloating(valTyp))
15418         {
15419             ok = true;
15420         }
15421
15422         if (!ok)
15423         {
15424             gtDispTree(val);
15425             assert(!"Incompatible types for gtNewTempAssign");
15426         }
15427     }
15428 #endif
15429
15430     // Floating Point assignments can be created during inlining
15431     // see "Zero init inlinee locals:" in fgInlinePrependStatements
15432     // thus we may need to set compFloatingPointUsed to true here.
15433     //
15434     if (varTypeIsFloating(dstTyp) && (compFloatingPointUsed == false))
15435     {
15436         compFloatingPointUsed = true;
15437     }
15438
15439     /* Create the assignment node */
15440
15441     GenTree* asg;
15442     GenTree* dest = gtNewLclvNode(tmp, dstTyp);
15443     dest->gtFlags |= GTF_VAR_DEF;
15444
15445     // With first-class structs, we should be propagating the class handle on all non-primitive
15446     // struct types. We don't have a convenient way to do that for all SIMD temps, since some
15447     // internal trees use SIMD types that are not used by the input IL. In this case, we allow
15448     // a null type handle and derive the necessary information about the type from its varType.
15449     CORINFO_CLASS_HANDLE structHnd = gtGetStructHandleIfPresent(val);
15450     if (varTypeIsStruct(valTyp) && ((structHnd != NO_CLASS_HANDLE) || (varTypeIsSIMD(valTyp))))
15451     {
15452         // The GT_OBJ may be be a child of a GT_COMMA.
15453         GenTree* valx = val->gtEffectiveVal(/*commaOnly*/ true);
15454
15455         if (valx->gtOper == GT_OBJ)
15456         {
15457             assert(structHnd != nullptr);
15458             lvaSetStruct(tmp, structHnd, false);
15459         }
15460         dest->gtFlags |= GTF_DONT_CSE;
15461         valx->gtFlags |= GTF_DONT_CSE;
15462         asg = impAssignStruct(dest, val, structHnd, (unsigned)CHECK_SPILL_NONE);
15463     }
15464     else
15465     {
15466         asg = gtNewAssignNode(dest, val);
15467     }
15468
15469 #ifndef LEGACY_BACKEND
15470     if (compRationalIRForm)
15471     {
15472         Rationalizer::RewriteAssignmentIntoStoreLcl(asg->AsOp());
15473     }
15474 #endif // !LEGACY_BACKEND
15475
15476     return asg;
15477 }
15478
15479 /*****************************************************************************
15480  *
15481  *  Create a helper call to access a COM field (iff 'assg' is non-zero this is
15482  *  an assignment and 'assg' is the new value).
15483  */
15484
15485 GenTree* Compiler::gtNewRefCOMfield(GenTree*                objPtr,
15486                                     CORINFO_RESOLVED_TOKEN* pResolvedToken,
15487                                     CORINFO_ACCESS_FLAGS    access,
15488                                     CORINFO_FIELD_INFO*     pFieldInfo,
15489                                     var_types               lclTyp,
15490                                     CORINFO_CLASS_HANDLE    structType,
15491                                     GenTree*                assg)
15492 {
15493     assert(pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER ||
15494            pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_ADDR_HELPER ||
15495            pFieldInfo->fieldAccessor == CORINFO_FIELD_STATIC_ADDR_HELPER);
15496
15497     /* If we can't access it directly, we need to call a helper function */
15498     GenTreeArgList* args       = nullptr;
15499     var_types       helperType = TYP_BYREF;
15500
15501     if (pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER)
15502     {
15503         if (access & CORINFO_ACCESS_SET)
15504         {
15505             assert(assg != nullptr);
15506             // helper needs pointer to struct, not struct itself
15507             if (pFieldInfo->helper == CORINFO_HELP_SETFIELDSTRUCT)
15508             {
15509                 assert(structType != nullptr);
15510                 assg = impGetStructAddr(assg, structType, (unsigned)CHECK_SPILL_ALL, true);
15511             }
15512             else if (lclTyp == TYP_DOUBLE && assg->TypeGet() == TYP_FLOAT)
15513             {
15514                 assg = gtNewCastNode(TYP_DOUBLE, assg, false, TYP_DOUBLE);
15515             }
15516             else if (lclTyp == TYP_FLOAT && assg->TypeGet() == TYP_DOUBLE)
15517             {
15518                 assg = gtNewCastNode(TYP_FLOAT, assg, false, TYP_FLOAT);
15519             }
15520
15521             args       = gtNewArgList(assg);
15522             helperType = TYP_VOID;
15523         }
15524         else if (access & CORINFO_ACCESS_GET)
15525         {
15526             helperType = lclTyp;
15527
15528             // The calling convention for the helper does not take into
15529             // account optimization of primitive structs.
15530             if ((pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT) && !varTypeIsStruct(lclTyp))
15531             {
15532                 helperType = TYP_STRUCT;
15533             }
15534         }
15535     }
15536
15537     if (pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT || pFieldInfo->helper == CORINFO_HELP_SETFIELDSTRUCT)
15538     {
15539         assert(pFieldInfo->structType != nullptr);
15540         args = gtNewListNode(gtNewIconEmbClsHndNode(pFieldInfo->structType), args);
15541     }
15542
15543     GenTree* fieldHnd = impTokenToHandle(pResolvedToken);
15544     if (fieldHnd == nullptr)
15545     { // compDonotInline()
15546         return nullptr;
15547     }
15548
15549     args = gtNewListNode(fieldHnd, args);
15550
15551     // If it's a static field, we shouldn't have an object node
15552     // If it's an instance field, we have an object node
15553     assert((pFieldInfo->fieldAccessor != CORINFO_FIELD_STATIC_ADDR_HELPER) ^ (objPtr == nullptr));
15554
15555     if (objPtr != nullptr)
15556     {
15557         args = gtNewListNode(objPtr, args);
15558     }
15559
15560     GenTree* tree = gtNewHelperCallNode(pFieldInfo->helper, genActualType(helperType), args);
15561
15562     if (pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER)
15563     {
15564         if (access & CORINFO_ACCESS_GET)
15565         {
15566             if (pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT)
15567             {
15568                 if (!varTypeIsStruct(lclTyp))
15569                 {
15570                     // get the result as primitive type
15571                     tree = impGetStructAddr(tree, structType, (unsigned)CHECK_SPILL_ALL, true);
15572                     tree = gtNewOperNode(GT_IND, lclTyp, tree);
15573                 }
15574             }
15575             else if (varTypeIsIntegral(lclTyp) && genTypeSize(lclTyp) < genTypeSize(TYP_INT))
15576             {
15577                 // The helper does not extend the small return types.
15578                 tree = gtNewCastNode(genActualType(lclTyp), tree, false, lclTyp);
15579             }
15580         }
15581     }
15582     else
15583     {
15584         // OK, now do the indirection
15585         if (access & CORINFO_ACCESS_GET)
15586         {
15587             if (varTypeIsStruct(lclTyp))
15588             {
15589                 tree = gtNewObjNode(structType, tree);
15590             }
15591             else
15592             {
15593                 tree = gtNewOperNode(GT_IND, lclTyp, tree);
15594             }
15595             tree->gtFlags |= (GTF_EXCEPT | GTF_GLOB_REF);
15596         }
15597         else if (access & CORINFO_ACCESS_SET)
15598         {
15599             if (varTypeIsStruct(lclTyp))
15600             {
15601                 tree = impAssignStructPtr(tree, assg, structType, (unsigned)CHECK_SPILL_ALL);
15602             }
15603             else
15604             {
15605                 tree = gtNewOperNode(GT_IND, lclTyp, tree);
15606                 tree->gtFlags |= (GTF_EXCEPT | GTF_GLOB_REF | GTF_IND_TGTANYWHERE);
15607                 tree = gtNewAssignNode(tree, assg);
15608             }
15609         }
15610     }
15611
15612     return (tree);
15613 }
15614
15615 /*****************************************************************************
15616  *
15617  *  Return true if the given node (excluding children trees) contains side effects.
15618  *  Note that it does not recurse, and children need to be handled separately.
15619  *  It may return false even if the node has GTF_SIDE_EFFECT (because of its children).
15620  *
15621  *  Similar to OperMayThrow() (but handles GT_CALLs specially), but considers
15622  *  assignments too.
15623  */
15624
15625 bool Compiler::gtNodeHasSideEffects(GenTree* tree, unsigned flags)
15626 {
15627     if (flags & GTF_ASG)
15628     {
15629         if (tree->OperIsAssignment())
15630         {
15631             return true;
15632         }
15633     }
15634
15635     // Are there only GTF_CALL side effects remaining? (and no other side effect kinds)
15636     if (flags & GTF_CALL)
15637     {
15638         if (tree->OperGet() == GT_CALL)
15639         {
15640             GenTreeCall* const call             = tree->AsCall();
15641             const bool         ignoreExceptions = (flags & GTF_EXCEPT) == 0;
15642             const bool         ignoreCctors     = (flags & GTF_IS_IN_CSE) != 0; // We can CSE helpers that run cctors.
15643             if (!call->HasSideEffects(this, ignoreExceptions, ignoreCctors))
15644             {
15645                 // If this call is otherwise side effect free, check its arguments.
15646                 for (GenTreeArgList* args = call->gtCallArgs; args != nullptr; args = args->Rest())
15647                 {
15648                     if (gtTreeHasSideEffects(args->Current(), flags))
15649                     {
15650                         return true;
15651                     }
15652                 }
15653                 // I'm a little worried that args that assign to temps that are late args will look like
15654                 // side effects...but better to be conservative for now.
15655                 for (GenTreeArgList* args = call->gtCallLateArgs; args != nullptr; args = args->Rest())
15656                 {
15657                     if (gtTreeHasSideEffects(args->Current(), flags))
15658                     {
15659                         return true;
15660                     }
15661                 }
15662
15663                 // Otherwise:
15664                 return false;
15665             }
15666
15667             // Otherwise the GT_CALL is considered to have side-effects.
15668             return true;
15669         }
15670     }
15671
15672     if (flags & GTF_EXCEPT)
15673     {
15674         if (tree->OperMayThrow(this))
15675         {
15676             return true;
15677         }
15678     }
15679
15680     // Expressions declared as CSE by (e.g.) hoisting code are considered to have relevant side
15681     // effects (if we care about GTF_MAKE_CSE).
15682     if ((flags & GTF_MAKE_CSE) && (tree->gtFlags & GTF_MAKE_CSE))
15683     {
15684         return true;
15685     }
15686
15687     return false;
15688 }
15689
15690 /*****************************************************************************
15691  * Returns true if the expr tree has any side effects.
15692  */
15693
15694 bool Compiler::gtTreeHasSideEffects(GenTree* tree, unsigned flags /* = GTF_SIDE_EFFECT*/)
15695 {
15696     // These are the side effect flags that we care about for this tree
15697     unsigned sideEffectFlags = tree->gtFlags & flags;
15698
15699     // Does this tree have any Side-effect flags set that we care about?
15700     if (sideEffectFlags == 0)
15701     {
15702         // no it doesn't..
15703         return false;
15704     }
15705
15706     if (sideEffectFlags == GTF_CALL)
15707     {
15708         if (tree->OperGet() == GT_CALL)
15709         {
15710             // Generally all trees that contain GT_CALL nodes are considered to have side-effects.
15711             //
15712             if (tree->gtCall.gtCallType == CT_HELPER)
15713             {
15714                 // If this node is a helper call we may not care about the side-effects.
15715                 // Note that gtNodeHasSideEffects checks the side effects of the helper itself
15716                 // as well as the side effects of its arguments.
15717                 return gtNodeHasSideEffects(tree, flags);
15718             }
15719         }
15720         else if (tree->OperGet() == GT_INTRINSIC)
15721         {
15722             if (gtNodeHasSideEffects(tree, flags))
15723             {
15724                 return true;
15725             }
15726
15727             if (gtNodeHasSideEffects(tree->gtOp.gtOp1, flags))
15728             {
15729                 return true;
15730             }
15731
15732             if ((tree->gtOp.gtOp2 != nullptr) && gtNodeHasSideEffects(tree->gtOp.gtOp2, flags))
15733             {
15734                 return true;
15735             }
15736
15737             return false;
15738         }
15739     }
15740
15741     return true;
15742 }
15743
15744 GenTree* Compiler::gtBuildCommaList(GenTree* list, GenTree* expr)
15745 {
15746     // 'list' starts off as null,
15747     //        and when it is null we haven't started the list yet.
15748     //
15749     if (list != nullptr)
15750     {
15751         // Create a GT_COMMA that appends 'expr' in front of the remaining set of expressions in (*list)
15752         GenTree* result = gtNewOperNode(GT_COMMA, TYP_VOID, expr, list);
15753
15754         // Set the flags in the comma node
15755         result->gtFlags |= (list->gtFlags & GTF_ALL_EFFECT);
15756         result->gtFlags |= (expr->gtFlags & GTF_ALL_EFFECT);
15757
15758         // 'list' and 'expr' should have valuenumbers defined for both or for neither one (unless we are remorphing,
15759         // in which case a prior transform involving either node may have discarded or otherwise invalidated the value
15760         // numbers).
15761         assert((list->gtVNPair.BothDefined() == expr->gtVNPair.BothDefined()) || !fgGlobalMorph);
15762
15763         // Set the ValueNumber 'gtVNPair' for the new GT_COMMA node
15764         //
15765         if (list->gtVNPair.BothDefined() && expr->gtVNPair.BothDefined())
15766         {
15767             // The result of a GT_COMMA node is op2, the normal value number is op2vnp
15768             // But we also need to include the union of side effects from op1 and op2.
15769             // we compute this value into exceptions_vnp.
15770             ValueNumPair op1vnp;
15771             ValueNumPair op1Xvnp = ValueNumStore::VNPForEmptyExcSet();
15772             ValueNumPair op2vnp;
15773             ValueNumPair op2Xvnp = ValueNumStore::VNPForEmptyExcSet();
15774
15775             vnStore->VNPUnpackExc(expr->gtVNPair, &op1vnp, &op1Xvnp);
15776             vnStore->VNPUnpackExc(list->gtVNPair, &op2vnp, &op2Xvnp);
15777
15778             ValueNumPair exceptions_vnp = ValueNumStore::VNPForEmptyExcSet();
15779
15780             exceptions_vnp = vnStore->VNPExcSetUnion(exceptions_vnp, op1Xvnp);
15781             exceptions_vnp = vnStore->VNPExcSetUnion(exceptions_vnp, op2Xvnp);
15782
15783             result->gtVNPair = vnStore->VNPWithExc(op2vnp, exceptions_vnp);
15784         }
15785
15786         return result;
15787     }
15788     else
15789     {
15790         // The 'expr' will start the list of expressions
15791         return expr;
15792     }
15793 }
15794
15795 /*****************************************************************************
15796  *
15797  *  Extracts side effects from the given expression
15798  *  and appends them to a given list (actually a GT_COMMA list)
15799  *  If ignore root is specified, the method doesn't treat the top
15800  *  level tree node as having side-effect.
15801  */
15802
15803 void Compiler::gtExtractSideEffList(GenTree*  expr,
15804                                     GenTree** pList,
15805                                     unsigned  flags /* = GTF_SIDE_EFFECT*/,
15806                                     bool      ignoreRoot /* = false */)
15807 {
15808     assert(expr);
15809     assert(expr->gtOper != GT_STMT);
15810
15811     /* If no side effect in the expression return */
15812
15813     if (!gtTreeHasSideEffects(expr, flags))
15814     {
15815         return;
15816     }
15817
15818     genTreeOps oper = expr->OperGet();
15819     unsigned   kind = expr->OperKind();
15820
15821     // Look for any side effects that we care about
15822     //
15823     if (!ignoreRoot && gtNodeHasSideEffects(expr, flags))
15824     {
15825         // Add the side effect to the list and return
15826         //
15827         *pList = gtBuildCommaList(*pList, expr);
15828         return;
15829     }
15830
15831     if (kind & GTK_LEAF)
15832     {
15833         return;
15834     }
15835
15836     if (oper == GT_LOCKADD || oper == GT_XADD || oper == GT_XCHG || oper == GT_CMPXCHG)
15837     {
15838         // XADD both adds to the memory location and also fetches the old value.  If we only need the side
15839         // effect of this instruction, change it into a GT_LOCKADD node (the add only)
15840         if (oper == GT_XADD)
15841         {
15842             expr->SetOperRaw(GT_LOCKADD);
15843             assert(genActualType(expr->gtType) == genActualType(expr->gtOp.gtOp2->gtType));
15844             expr->gtType = TYP_VOID;
15845         }
15846
15847         // These operations are kind of important to keep
15848         *pList = gtBuildCommaList(*pList, expr);
15849         return;
15850     }
15851
15852     if (kind & GTK_SMPOP)
15853     {
15854         GenTree* op1 = expr->gtOp.gtOp1;
15855         GenTree* op2 = expr->gtGetOp2IfPresent();
15856
15857         if (flags & GTF_EXCEPT)
15858         {
15859             // Special case - GT_ADDR of GT_IND nodes of TYP_STRUCT
15860             // have to be kept together
15861
15862             if (oper == GT_ADDR && op1->OperIsIndir() && op1->gtType == TYP_STRUCT)
15863             {
15864                 *pList = gtBuildCommaList(*pList, expr);
15865
15866 #ifdef DEBUG
15867                 if (verbose)
15868                 {
15869                     printf("Keep the GT_ADDR and GT_IND together:\n");
15870                 }
15871 #endif
15872                 return;
15873             }
15874         }
15875
15876         /* Continue searching for side effects in the subtrees of the expression
15877          * NOTE: Be careful to preserve the right ordering - side effects are prepended
15878          * to the list */
15879
15880         /* Continue searching for side effects in the subtrees of the expression
15881          * NOTE: Be careful to preserve the right ordering
15882          * as side effects are prepended to the list */
15883
15884         if (expr->gtFlags & GTF_REVERSE_OPS)
15885         {
15886             assert(oper != GT_COMMA);
15887             if (op1)
15888             {
15889                 gtExtractSideEffList(op1, pList, flags);
15890             }
15891             if (op2)
15892             {
15893                 gtExtractSideEffList(op2, pList, flags);
15894             }
15895         }
15896         else
15897         {
15898             if (op2)
15899             {
15900                 gtExtractSideEffList(op2, pList, flags);
15901             }
15902             if (op1)
15903             {
15904                 gtExtractSideEffList(op1, pList, flags);
15905             }
15906         }
15907     }
15908
15909     if (expr->OperGet() == GT_CALL)
15910     {
15911         // Generally all GT_CALL nodes are considered to have side-effects.
15912         // So if we get here it must be a Helper call that we decided does
15913         // not have side effects that we needed to keep
15914         //
15915         assert(expr->gtCall.gtCallType == CT_HELPER);
15916
15917         // We can remove this Helper call, but there still could be
15918         // side-effects in the arguments that we may need to keep
15919         //
15920         GenTree* args;
15921         for (args = expr->gtCall.gtCallArgs; args; args = args->gtOp.gtOp2)
15922         {
15923             assert(args->OperIsList());
15924             gtExtractSideEffList(args->Current(), pList, flags);
15925         }
15926         for (args = expr->gtCall.gtCallLateArgs; args; args = args->gtOp.gtOp2)
15927         {
15928             assert(args->OperIsList());
15929             gtExtractSideEffList(args->Current(), pList, flags);
15930         }
15931     }
15932
15933     if (expr->OperGet() == GT_ARR_BOUNDS_CHECK
15934 #ifdef FEATURE_SIMD
15935         || expr->OperGet() == GT_SIMD_CHK
15936 #endif // FEATURE_SIMD
15937 #ifdef FEATURE_HW_INTRINSICS
15938         || expr->OperGet() == GT_HW_INTRINSIC_CHK
15939 #endif // FEATURE_HW_INTRINSICS
15940         )
15941     {
15942         gtExtractSideEffList(expr->AsBoundsChk()->gtIndex, pList, flags);
15943         gtExtractSideEffList(expr->AsBoundsChk()->gtArrLen, pList, flags);
15944     }
15945
15946     if (expr->OperGet() == GT_DYN_BLK || expr->OperGet() == GT_STORE_DYN_BLK)
15947     {
15948         if (expr->AsDynBlk()->Data() != nullptr)
15949         {
15950             gtExtractSideEffList(expr->AsDynBlk()->Data(), pList, flags);
15951         }
15952         gtExtractSideEffList(expr->AsDynBlk()->Addr(), pList, flags);
15953         gtExtractSideEffList(expr->AsDynBlk()->gtDynamicSize, pList, flags);
15954     }
15955 }
15956
15957 /*****************************************************************************
15958  *
15959  *  For debugging only - displays a tree node list and makes sure all the
15960  *  links are correctly set.
15961  */
15962
15963 #ifdef DEBUG
15964
15965 void dispNodeList(GenTree* list, bool verbose)
15966 {
15967     GenTree* last = nullptr;
15968     GenTree* next;
15969
15970     if (!list)
15971     {
15972         return;
15973     }
15974
15975     for (;;)
15976     {
15977         next = list->gtNext;
15978
15979         if (verbose)
15980         {
15981             printf("%08X -> %08X -> %08X\n", last, list, next);
15982         }
15983
15984         assert(!last || last->gtNext == list);
15985
15986         assert(next == nullptr || next->gtPrev == list);
15987
15988         if (!next)
15989         {
15990             break;
15991         }
15992
15993         last = list;
15994         list = next;
15995     }
15996     printf(""); // null string means flush
15997 }
15998
15999 /*****************************************************************************
16000  * Callback to assert that the nodes of a qmark-colon subtree are marked
16001  */
16002
16003 /* static */
16004 Compiler::fgWalkResult Compiler::gtAssertColonCond(GenTree** pTree, fgWalkData* data)
16005 {
16006     assert(data->pCallbackData == nullptr);
16007
16008     assert((*pTree)->gtFlags & GTF_COLON_COND);
16009
16010     return WALK_CONTINUE;
16011 }
16012 #endif // DEBUG
16013
16014 /*****************************************************************************
16015  * Callback to mark the nodes of a qmark-colon subtree that are conditionally
16016  * executed.
16017  */
16018
16019 /* static */
16020 Compiler::fgWalkResult Compiler::gtMarkColonCond(GenTree** pTree, fgWalkData* data)
16021 {
16022     assert(data->pCallbackData == nullptr);
16023
16024     (*pTree)->gtFlags |= GTF_COLON_COND;
16025
16026     return WALK_CONTINUE;
16027 }
16028
16029 /*****************************************************************************
16030  * Callback to clear the conditionally executed flags of nodes that no longer
16031    will be conditionally executed. Note that when we find another colon we must
16032    stop, as the nodes below this one WILL be conditionally executed. This callback
16033    is called when folding a qmark condition (ie the condition is constant).
16034  */
16035
16036 /* static */
16037 Compiler::fgWalkResult Compiler::gtClearColonCond(GenTree** pTree, fgWalkData* data)
16038 {
16039     GenTree* tree = *pTree;
16040
16041     assert(data->pCallbackData == nullptr);
16042
16043     if (tree->OperGet() == GT_COLON)
16044     {
16045         // Nodes below this will be conditionally executed.
16046         return WALK_SKIP_SUBTREES;
16047     }
16048
16049     tree->gtFlags &= ~GTF_COLON_COND;
16050     return WALK_CONTINUE;
16051 }
16052
16053 struct FindLinkData
16054 {
16055     GenTree*  nodeToFind;
16056     GenTree** result;
16057 };
16058
16059 /*****************************************************************************
16060  *
16061  *  Callback used by the tree walker to implement fgFindLink()
16062  */
16063 static Compiler::fgWalkResult gtFindLinkCB(GenTree** pTree, Compiler::fgWalkData* cbData)
16064 {
16065     FindLinkData* data = (FindLinkData*)cbData->pCallbackData;
16066     if (*pTree == data->nodeToFind)
16067     {
16068         data->result = pTree;
16069         return Compiler::WALK_ABORT;
16070     }
16071
16072     return Compiler::WALK_CONTINUE;
16073 }
16074
16075 GenTree** Compiler::gtFindLink(GenTree* stmt, GenTree* node)
16076 {
16077     assert(stmt->gtOper == GT_STMT);
16078
16079     FindLinkData data = {node, nullptr};
16080
16081     fgWalkResult result = fgWalkTreePre(&stmt->gtStmt.gtStmtExpr, gtFindLinkCB, &data);
16082
16083     if (result == WALK_ABORT)
16084     {
16085         assert(data.nodeToFind == *data.result);
16086         return data.result;
16087     }
16088     else
16089     {
16090         return nullptr;
16091     }
16092 }
16093
16094 /*****************************************************************************
16095  *
16096  *  Callback that checks if a tree node has oper type GT_CATCH_ARG
16097  */
16098
16099 static Compiler::fgWalkResult gtFindCatchArg(GenTree** pTree, Compiler::fgWalkData* /* data */)
16100 {
16101     return ((*pTree)->OperGet() == GT_CATCH_ARG) ? Compiler::WALK_ABORT : Compiler::WALK_CONTINUE;
16102 }
16103
16104 /*****************************************************************************/
16105 bool Compiler::gtHasCatchArg(GenTree* tree)
16106 {
16107     if (((tree->gtFlags & GTF_ORDER_SIDEEFF) != 0) && (fgWalkTreePre(&tree, gtFindCatchArg) == WALK_ABORT))
16108     {
16109         return true;
16110     }
16111     return false;
16112 }
16113
16114 //------------------------------------------------------------------------
16115 // gtHasCallOnStack:
16116 //
16117 // Arguments:
16118 //    parentStack: a context (stack of parent nodes)
16119 //
16120 // Return Value:
16121 //     returns true if any of the parent nodes are a GT_CALL
16122 //
16123 // Assumptions:
16124 //    We have a stack of parent nodes. This generally requires that
16125 //    we are performing a recursive tree walk using struct fgWalkData
16126 //
16127 //------------------------------------------------------------------------
16128 /* static */ bool Compiler::gtHasCallOnStack(GenTreeStack* parentStack)
16129 {
16130     for (int i = 0; i < parentStack->Height(); i++)
16131     {
16132         GenTree* node = parentStack->Index(i);
16133         if (node->OperGet() == GT_CALL)
16134         {
16135             return true;
16136         }
16137     }
16138     return false;
16139 }
16140
16141 //------------------------------------------------------------------------
16142 // gtCheckQuirkAddrExposedLclVar:
16143 //
16144 // Arguments:
16145 //    tree: an address taken GenTree node that is a GT_LCL_VAR
16146 //    parentStack: a context (stack of parent nodes)
16147 //    The 'parentStack' is used to ensure that we are in an argument context.
16148 //
16149 // Return Value:
16150 //    None
16151 //
16152 // Notes:
16153 //    When allocation size of this LclVar is 32-bits we will quirk the size to 64-bits
16154 //    because some PInvoke signatures incorrectly specify a ByRef to an INT32
16155 //    when they actually write a SIZE_T or INT64. There are cases where overwriting
16156 //    these extra 4 bytes corrupts some data (such as a saved register) that leads to A/V
16157 //    Wheras previously the JIT64 codegen did not lead to an A/V
16158 //
16159 // Assumptions:
16160 //    'tree' is known to be address taken and that we have a stack
16161 //    of parent nodes. Both of these generally requires that
16162 //    we are performing a recursive tree walk using struct fgWalkData
16163 //------------------------------------------------------------------------
16164 void Compiler::gtCheckQuirkAddrExposedLclVar(GenTree* tree, GenTreeStack* parentStack)
16165 {
16166 #ifdef _TARGET_64BIT_
16167     // We only need to Quirk for _TARGET_64BIT_
16168
16169     // Do we have a parent node that is a Call?
16170     if (!Compiler::gtHasCallOnStack(parentStack))
16171     {
16172         // No, so we don't apply the Quirk
16173         return;
16174     }
16175     noway_assert(tree->gtOper == GT_LCL_VAR);
16176     unsigned   lclNum  = tree->gtLclVarCommon.gtLclNum;
16177     LclVarDsc* varDsc  = &lvaTable[lclNum];
16178     var_types  vartype = varDsc->TypeGet();
16179
16180     if (varDsc->lvIsParam)
16181     {
16182         // We can't Quirk the size of an incoming parameter
16183         return;
16184     }
16185
16186     // We may need to Quirk the storage size for this LCL_VAR
16187     if (genActualType(vartype) == TYP_INT)
16188     {
16189         varDsc->lvQuirkToLong = true;
16190 #ifdef DEBUG
16191         if (verbose)
16192         {
16193             printf("\nAdding a Quirk for the storage size of LvlVar V%02d:", lclNum);
16194             printf(" (%s ==> %s)\n", varTypeName(vartype), varTypeName(TYP_LONG));
16195         }
16196 #endif // DEBUG
16197     }
16198 #endif
16199 }
16200
16201 //------------------------------------------------------------------------
16202 // gtGetTypeProducerKind: determine if a tree produces a runtime type, and
16203 //    if so, how.
16204 //
16205 // Arguments:
16206 //    tree - tree to examine
16207 //
16208 // Return Value:
16209 //    TypeProducerKind for the tree.
16210 //
16211 // Notes:
16212 //    Checks to see if this tree returns a RuntimeType value, and if so,
16213 //    how that value is determined.
16214 //
16215 //    Currently handles these cases
16216 //    1) The result of Object::GetType
16217 //    2) The result of typeof(...)
16218 //    3) A null reference
16219 //    4) Tree is otherwise known to have type RuntimeType
16220 //
16221 //    The null reference case is surprisingly common because operator
16222 //    overloading turns the otherwise innocuous
16223 //
16224 //        Type t = ....;
16225 //        if (t == null)
16226 //
16227 //    into a method call.
16228
16229 Compiler::TypeProducerKind Compiler::gtGetTypeProducerKind(GenTree* tree)
16230 {
16231     if (tree->gtOper == GT_CALL)
16232     {
16233         if (tree->gtCall.gtCallType == CT_HELPER)
16234         {
16235             if (gtIsTypeHandleToRuntimeTypeHelper(tree->AsCall()))
16236             {
16237                 return TPK_Handle;
16238             }
16239         }
16240         else if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_SPECIAL_INTRINSIC)
16241         {
16242             if (info.compCompHnd->getIntrinsicID(tree->gtCall.gtCallMethHnd) == CORINFO_INTRINSIC_Object_GetType)
16243             {
16244                 return TPK_GetType;
16245             }
16246         }
16247     }
16248     else if ((tree->gtOper == GT_INTRINSIC) && (tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Object_GetType))
16249     {
16250         return TPK_GetType;
16251     }
16252     else if ((tree->gtOper == GT_CNS_INT) && (tree->gtIntCon.gtIconVal == 0))
16253     {
16254         return TPK_Null;
16255     }
16256     else
16257     {
16258         bool                 isExact   = false;
16259         bool                 isNonNull = false;
16260         CORINFO_CLASS_HANDLE clsHnd    = gtGetClassHandle(tree, &isExact, &isNonNull);
16261
16262         if (clsHnd == info.compCompHnd->getBuiltinClass(CLASSID_RUNTIME_TYPE))
16263         {
16264             return TPK_Other;
16265         }
16266     }
16267     return TPK_Unknown;
16268 }
16269
16270 //------------------------------------------------------------------------
16271 // gtIsTypeHandleToRuntimeTypeHelperCall -- see if tree is constructing
16272 //    a RuntimeType from a handle
16273 //
16274 // Arguments:
16275 //    tree - tree to examine
16276 //
16277 // Return Value:
16278 //    True if so
16279
16280 bool Compiler::gtIsTypeHandleToRuntimeTypeHelper(GenTreeCall* call)
16281 {
16282     return call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE) ||
16283            call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE_MAYBENULL);
16284 }
16285
16286 bool Compiler::gtIsActiveCSE_Candidate(GenTree* tree)
16287 {
16288     return (optValnumCSE_phase && IS_CSE_INDEX(tree->gtCSEnum));
16289 }
16290
16291 /*****************************************************************************/
16292
16293 struct ComplexityStruct
16294 {
16295     unsigned m_numNodes;
16296     unsigned m_nodeLimit;
16297     ComplexityStruct(unsigned nodeLimit) : m_numNodes(0), m_nodeLimit(nodeLimit)
16298     {
16299     }
16300 };
16301
16302 static Compiler::fgWalkResult ComplexityExceedsWalker(GenTree** pTree, Compiler::fgWalkData* data)
16303 {
16304     ComplexityStruct* pComplexity = (ComplexityStruct*)data->pCallbackData;
16305     if (++pComplexity->m_numNodes > pComplexity->m_nodeLimit)
16306     {
16307         return Compiler::WALK_ABORT;
16308     }
16309     else
16310     {
16311         return Compiler::WALK_CONTINUE;
16312     }
16313 }
16314
16315 bool Compiler::gtComplexityExceeds(GenTree** tree, unsigned limit)
16316 {
16317     ComplexityStruct complexity(limit);
16318     if (fgWalkTreePre(tree, &ComplexityExceedsWalker, &complexity) == WALK_ABORT)
16319     {
16320         return true;
16321     }
16322     else
16323     {
16324         return false;
16325     }
16326 }
16327
16328 bool GenTree::IsPhiNode()
16329 {
16330     return (OperGet() == GT_PHI_ARG) || (OperGet() == GT_PHI) || IsPhiDefn();
16331 }
16332
16333 bool GenTree::IsPhiDefn()
16334 {
16335     bool res = ((OperGet() == GT_ASG) && (gtOp.gtOp2 != nullptr) && (gtOp.gtOp2->OperGet() == GT_PHI)) ||
16336                ((OperGet() == GT_STORE_LCL_VAR) && (gtOp.gtOp1 != nullptr) && (gtOp.gtOp1->OperGet() == GT_PHI));
16337     assert(!res || OperGet() == GT_STORE_LCL_VAR || gtOp.gtOp1->OperGet() == GT_LCL_VAR);
16338     return res;
16339 }
16340
16341 bool GenTree::IsPhiDefnStmt()
16342 {
16343     if (OperGet() != GT_STMT)
16344     {
16345         return false;
16346     }
16347     GenTree* asg = gtStmt.gtStmtExpr;
16348     return asg->IsPhiDefn();
16349 }
16350
16351 // IsPartialLclFld: Check for a GT_LCL_FLD whose type is a different size than the lclVar.
16352 //
16353 // Arguments:
16354 //    comp      - the Compiler object.
16355 //
16356 // Return Value:
16357 //    Returns "true" iff 'this' is a GT_LCL_FLD or GT_STORE_LCL_FLD on which the type
16358 //    is not the same size as the type of the GT_LCL_VAR
16359
16360 bool GenTree::IsPartialLclFld(Compiler* comp)
16361 {
16362     return ((gtOper == GT_LCL_FLD) &&
16363             (comp->lvaTable[this->gtLclVarCommon.gtLclNum].lvExactSize != genTypeSize(gtType)));
16364 }
16365
16366 bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire)
16367 {
16368     GenTreeBlk* blkNode = nullptr;
16369     if (OperIsAssignment())
16370     {
16371         if (gtOp.gtOp1->IsLocal())
16372         {
16373             GenTreeLclVarCommon* lclVarTree = gtOp.gtOp1->AsLclVarCommon();
16374             *pLclVarTree                    = lclVarTree;
16375             if (pIsEntire != nullptr)
16376             {
16377                 if (lclVarTree->IsPartialLclFld(comp))
16378                 {
16379                     *pIsEntire = false;
16380                 }
16381                 else
16382                 {
16383                     *pIsEntire = true;
16384                 }
16385             }
16386             return true;
16387         }
16388         else if (gtOp.gtOp1->OperGet() == GT_IND)
16389         {
16390             GenTree* indArg = gtOp.gtOp1->gtOp.gtOp1;
16391             return indArg->DefinesLocalAddr(comp, genTypeSize(gtOp.gtOp1->TypeGet()), pLclVarTree, pIsEntire);
16392         }
16393         else if (gtOp.gtOp1->OperIsBlk())
16394         {
16395             blkNode = gtOp.gtOp1->AsBlk();
16396         }
16397     }
16398     else if (OperIsBlk())
16399     {
16400         blkNode = this->AsBlk();
16401     }
16402     if (blkNode != nullptr)
16403     {
16404         GenTree* destAddr = blkNode->Addr();
16405         unsigned width    = blkNode->gtBlkSize;
16406         // Do we care about whether this assigns the entire variable?
16407         if (pIsEntire != nullptr && width == 0)
16408         {
16409             assert(blkNode->gtOper == GT_DYN_BLK);
16410             GenTree* blockWidth = blkNode->AsDynBlk()->gtDynamicSize;
16411             if (blockWidth->IsCnsIntOrI())
16412             {
16413                 if (blockWidth->IsIconHandle())
16414                 {
16415                     // If it's a handle, it must be a class handle.  We only create such block operations
16416                     // for initialization of struct types, so the type of the argument(s) will match this
16417                     // type, by construction, and be "entire".
16418                     assert(blockWidth->IsIconHandle(GTF_ICON_CLASS_HDL));
16419                     width = comp->info.compCompHnd->getClassSize(
16420                         CORINFO_CLASS_HANDLE(blockWidth->gtIntConCommon.IconValue()));
16421                 }
16422                 else
16423                 {
16424                     ssize_t swidth = blockWidth->AsIntConCommon()->IconValue();
16425                     assert(swidth >= 0);
16426                     // cpblk of size zero exists in the wild (in yacc-generated code in SQL) and is valid IL.
16427                     if (swidth == 0)
16428                     {
16429                         return false;
16430                     }
16431                     width = unsigned(swidth);
16432                 }
16433             }
16434         }
16435         return destAddr->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
16436     }
16437     // Otherwise...
16438     return false;
16439 }
16440
16441 // Returns true if this GenTree defines a result which is based on the address of a local.
16442 bool GenTree::DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire)
16443 {
16444     if (OperGet() == GT_ADDR || OperGet() == GT_LCL_VAR_ADDR)
16445     {
16446         GenTree* addrArg = this;
16447         if (OperGet() == GT_ADDR)
16448         {
16449             addrArg = gtOp.gtOp1;
16450         }
16451
16452         if (addrArg->IsLocal() || addrArg->OperIsLocalAddr())
16453         {
16454             GenTreeLclVarCommon* addrArgLcl = addrArg->AsLclVarCommon();
16455             *pLclVarTree                    = addrArgLcl;
16456             if (pIsEntire != nullptr)
16457             {
16458                 unsigned lclOffset = 0;
16459                 if (addrArg->OperIsLocalField())
16460                 {
16461                     lclOffset = addrArg->gtLclFld.gtLclOffs;
16462                 }
16463
16464                 if (lclOffset != 0)
16465                 {
16466                     // We aren't updating the bytes at [0..lclOffset-1] so *pIsEntire should be set to false
16467                     *pIsEntire = false;
16468                 }
16469                 else
16470                 {
16471                     unsigned lclNum   = addrArgLcl->GetLclNum();
16472                     unsigned varWidth = comp->lvaLclExactSize(lclNum);
16473                     if (comp->lvaTable[lclNum].lvNormalizeOnStore())
16474                     {
16475                         // It's normalize on store, so use the full storage width -- writing to low bytes won't
16476                         // necessarily yield a normalized value.
16477                         varWidth = genTypeStSz(var_types(comp->lvaTable[lclNum].lvType)) * sizeof(int);
16478                     }
16479                     *pIsEntire = (varWidth == width);
16480                 }
16481             }
16482             return true;
16483         }
16484         else if (addrArg->OperGet() == GT_IND)
16485         {
16486             // A GT_ADDR of a GT_IND can both be optimized away, recurse using the child of the GT_IND
16487             return addrArg->gtOp.gtOp1->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
16488         }
16489     }
16490     else if (OperGet() == GT_ADD)
16491     {
16492         if (gtOp.gtOp1->IsCnsIntOrI())
16493         {
16494             // If we just adding a zero then we allow an IsEntire match against width
16495             //  otherwise we change width to zero to disallow an IsEntire Match
16496             return gtOp.gtOp2->DefinesLocalAddr(comp, gtOp.gtOp1->IsIntegralConst(0) ? width : 0, pLclVarTree,
16497                                                 pIsEntire);
16498         }
16499         else if (gtOp.gtOp2->IsCnsIntOrI())
16500         {
16501             // If we just adding a zero then we allow an IsEntire match against width
16502             //  otherwise we change width to zero to disallow an IsEntire Match
16503             return gtOp.gtOp1->DefinesLocalAddr(comp, gtOp.gtOp2->IsIntegralConst(0) ? width : 0, pLclVarTree,
16504                                                 pIsEntire);
16505         }
16506     }
16507     // Post rationalization we could have GT_IND(GT_LEA(..)) trees.
16508     else if (OperGet() == GT_LEA)
16509     {
16510         // This method gets invoked during liveness computation and therefore it is critical
16511         // that we don't miss 'use' of any local.  The below logic is making the assumption
16512         // that in case of LEA(base, index, offset) - only base can be a GT_LCL_VAR_ADDR
16513         // and index is not.
16514         CLANG_FORMAT_COMMENT_ANCHOR;
16515
16516 #ifdef DEBUG
16517         GenTree* index = gtOp.gtOp2;
16518         if (index != nullptr)
16519         {
16520             assert(!index->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire));
16521         }
16522 #endif // DEBUG
16523
16524         // base
16525         GenTree* base = gtOp.gtOp1;
16526         if (base != nullptr)
16527         {
16528             // Lea could have an Indir as its base.
16529             if (base->OperGet() == GT_IND)
16530             {
16531                 base = base->gtOp.gtOp1->gtEffectiveVal(/*commas only*/ true);
16532             }
16533             return base->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
16534         }
16535     }
16536     // Otherwise...
16537     return false;
16538 }
16539
16540 //------------------------------------------------------------------------
16541 // IsLocalExpr: Determine if this is a LclVarCommon node and return some
16542 //              additional info about it in the two out parameters.
16543 //
16544 // Arguments:
16545 //    comp        - The Compiler instance
16546 //    pLclVarTree - An "out" argument that returns the local tree as a
16547 //                  LclVarCommon, if it is indeed local.
16548 //    pFldSeq     - An "out" argument that returns the value numbering field
16549 //                  sequence for the node, if any.
16550 //
16551 // Return Value:
16552 //    Returns true, and sets the out arguments accordingly, if this is
16553 //    a LclVarCommon node.
16554
16555 bool GenTree::IsLocalExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq)
16556 {
16557     if (IsLocal()) // Note that this covers "GT_LCL_FLD."
16558     {
16559         *pLclVarTree = AsLclVarCommon();
16560         if (OperGet() == GT_LCL_FLD)
16561         {
16562             // Otherwise, prepend this field to whatever we've already accumulated outside in.
16563             *pFldSeq = comp->GetFieldSeqStore()->Append(AsLclFld()->gtFieldSeq, *pFldSeq);
16564         }
16565         return true;
16566     }
16567     else
16568     {
16569         return false;
16570     }
16571 }
16572
16573 // If this tree evaluates some sum of a local address and some constants,
16574 // return the node for the local being addressed
16575
16576 GenTreeLclVarCommon* GenTree::IsLocalAddrExpr()
16577 {
16578     if (OperGet() == GT_ADDR)
16579     {
16580         return gtOp.gtOp1->IsLocal() ? gtOp.gtOp1->AsLclVarCommon() : nullptr;
16581     }
16582     else if (OperIsLocalAddr())
16583     {
16584         return this->AsLclVarCommon();
16585     }
16586     else if (OperGet() == GT_ADD)
16587     {
16588         if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
16589         {
16590             return gtOp.gtOp2->IsLocalAddrExpr();
16591         }
16592         else if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
16593         {
16594             return gtOp.gtOp1->IsLocalAddrExpr();
16595         }
16596     }
16597     // Otherwise...
16598     return nullptr;
16599 }
16600
16601 bool GenTree::IsLocalAddrExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq)
16602 {
16603     if (OperGet() == GT_ADDR)
16604     {
16605         assert(!comp->compRationalIRForm);
16606         GenTree* addrArg = gtOp.gtOp1;
16607         if (addrArg->IsLocal()) // Note that this covers "GT_LCL_FLD."
16608         {
16609             *pLclVarTree = addrArg->AsLclVarCommon();
16610             if (addrArg->OperGet() == GT_LCL_FLD)
16611             {
16612                 // Otherwise, prepend this field to whatever we've already accumulated outside in.
16613                 *pFldSeq = comp->GetFieldSeqStore()->Append(addrArg->AsLclFld()->gtFieldSeq, *pFldSeq);
16614             }
16615             return true;
16616         }
16617         else
16618         {
16619             return false;
16620         }
16621     }
16622     else if (OperIsLocalAddr())
16623     {
16624         *pLclVarTree = this->AsLclVarCommon();
16625         if (this->OperGet() == GT_LCL_FLD_ADDR)
16626         {
16627             *pFldSeq = comp->GetFieldSeqStore()->Append(this->AsLclFld()->gtFieldSeq, *pFldSeq);
16628         }
16629         return true;
16630     }
16631     else if (OperGet() == GT_ADD)
16632     {
16633         if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
16634         {
16635             if (gtOp.gtOp1->AsIntCon()->gtFieldSeq == nullptr)
16636             {
16637                 return false;
16638             }
16639             // Otherwise, prepend this field to whatever we've already accumulated outside in.
16640             *pFldSeq = comp->GetFieldSeqStore()->Append(gtOp.gtOp1->AsIntCon()->gtFieldSeq, *pFldSeq);
16641             return gtOp.gtOp2->IsLocalAddrExpr(comp, pLclVarTree, pFldSeq);
16642         }
16643         else if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
16644         {
16645             if (gtOp.gtOp2->AsIntCon()->gtFieldSeq == nullptr)
16646             {
16647                 return false;
16648             }
16649             // Otherwise, prepend this field to whatever we've already accumulated outside in.
16650             *pFldSeq = comp->GetFieldSeqStore()->Append(gtOp.gtOp2->AsIntCon()->gtFieldSeq, *pFldSeq);
16651             return gtOp.gtOp1->IsLocalAddrExpr(comp, pLclVarTree, pFldSeq);
16652         }
16653     }
16654     // Otherwise...
16655     return false;
16656 }
16657
16658 //------------------------------------------------------------------------
16659 // IsLclVarUpdateTree: Determine whether this is an assignment tree of the
16660 //                     form Vn = Vn 'oper' 'otherTree' where Vn is a lclVar
16661 //
16662 // Arguments:
16663 //    pOtherTree - An "out" argument in which 'otherTree' will be returned.
16664 //    pOper      - An "out" argument in which 'oper' will be returned.
16665 //
16666 // Return Value:
16667 //    If the tree is of the above form, the lclNum of the variable being
16668 //    updated is returned, and 'pOtherTree' and 'pOper' are set.
16669 //    Otherwise, returns BAD_VAR_NUM.
16670 //
16671 // Notes:
16672 //    'otherTree' can have any shape.
16673 //     We avoid worrying about whether the op is commutative by only considering the
16674 //     first operand of the rhs. It is expected that most trees of this form will
16675 //     already have the lclVar on the lhs.
16676 //     TODO-CQ: Evaluate whether there are missed opportunities due to this, or
16677 //     whether gtSetEvalOrder will already have put the lclVar on the lhs in
16678 //     the cases of interest.
16679
16680 unsigned GenTree::IsLclVarUpdateTree(GenTree** pOtherTree, genTreeOps* pOper)
16681 {
16682     unsigned lclNum = BAD_VAR_NUM;
16683     if (OperIsAssignment())
16684     {
16685         GenTree* lhs = gtOp.gtOp1;
16686         if (lhs->OperGet() == GT_LCL_VAR)
16687         {
16688             unsigned lhsLclNum = lhs->AsLclVarCommon()->gtLclNum;
16689             if (gtOper == GT_ASG)
16690             {
16691                 GenTree* rhs = gtOp.gtOp2;
16692                 if (rhs->OperIsBinary() && (rhs->gtOp.gtOp1->gtOper == GT_LCL_VAR) &&
16693                     (rhs->gtOp.gtOp1->AsLclVarCommon()->gtLclNum == lhsLclNum))
16694                 {
16695                     lclNum      = lhsLclNum;
16696                     *pOtherTree = rhs->gtOp.gtOp2;
16697                     *pOper      = rhs->gtOper;
16698                 }
16699             }
16700 #ifdef LEGACY_BACKEND
16701             else
16702             {
16703                 lclNum      = lhsLclNum;
16704                 *pOper      = GenTree::OpAsgToOper(gtOper);
16705                 *pOtherTree = gtOp.gtOp2;
16706             }
16707 #endif
16708         }
16709     }
16710     return lclNum;
16711 }
16712
16713 //------------------------------------------------------------------------
16714 // canBeContained: check whether this tree node may be a subcomponent of its parent for purposes
16715 //                 of code generation.
16716 //
16717 // Return value: returns true if it is possible to contain this node and false otherwise.
16718 bool GenTree::canBeContained() const
16719 {
16720     assert(IsLIR());
16721
16722     if (gtHasReg())
16723     {
16724         return false;
16725     }
16726
16727     // It is not possible for nodes that do not produce values or that are not containable values
16728     // to be contained.
16729     if (((OperKind() & (GTK_NOVALUE | GTK_NOCONTAIN)) != 0) || (OperIsHWIntrinsic() && !isContainableHWIntrinsic()))
16730     {
16731         return false;
16732     }
16733
16734     return true;
16735 }
16736
16737 //------------------------------------------------------------------------
16738 // isContained: check whether this tree node is a subcomponent of its parent for codegen purposes
16739 //
16740 // Return Value:
16741 //    Returns true if there is no code generated explicitly for this node.
16742 //    Essentially, it will be rolled into the code generation for the parent.
16743 //
16744 // Assumptions:
16745 //    This method relies upon the value of the GTF_CONTAINED flag.
16746 //    Therefore this method is only valid after Lowering.
16747 //    Also note that register allocation or other subsequent phases may cause
16748 //    nodes to become contained (or not) and therefore this property may change.
16749 //
16750 bool GenTree::isContained() const
16751 {
16752 #ifdef LEGACY_BACKEND
16753     return false;
16754 #else // !LEGACY_BACKEND
16755     assert(IsLIR());
16756     const bool isMarkedContained = ((gtFlags & GTF_CONTAINED) != 0);
16757
16758 #ifdef DEBUG
16759     if (!canBeContained())
16760     {
16761         assert(!isMarkedContained);
16762     }
16763
16764     // these actually produce a register (the flags reg, we just don't model it)
16765     // and are a separate instruction from the branch that consumes the result.
16766     // They can only produce a result if the child is a SIMD equality comparison.
16767     else if (OperKind() & GTK_RELOP)
16768     {
16769         // We have to cast away const-ness since AsOp() method is non-const.
16770         GenTree* childNode = const_cast<GenTree*>(this)->AsOp()->gtOp1;
16771         assert((isMarkedContained == false) || childNode->IsSIMDEqualityOrInequality());
16772     }
16773
16774     // these either produce a result in register or set flags reg.
16775     else if (IsSIMDEqualityOrInequality())
16776     {
16777         assert(!isMarkedContained);
16778     }
16779
16780     // if it's contained it can't be unused.
16781     if (isMarkedContained)
16782     {
16783         assert(!IsUnusedValue());
16784     }
16785 #endif // DEBUG
16786     return isMarkedContained;
16787 #endif // !LEGACY_BACKEND
16788 }
16789
16790 // return true if node is contained and an indir
16791 bool GenTree::isContainedIndir() const
16792 {
16793     return isIndir() && isContained();
16794 }
16795
16796 bool GenTree::isIndirAddrMode()
16797 {
16798     return isIndir() && AsIndir()->Addr()->OperIsAddrMode() && AsIndir()->Addr()->isContained();
16799 }
16800
16801 bool GenTree::isIndir() const
16802 {
16803     return OperGet() == GT_IND || OperGet() == GT_STOREIND;
16804 }
16805
16806 bool GenTreeIndir::HasBase()
16807 {
16808     return Base() != nullptr;
16809 }
16810
16811 bool GenTreeIndir::HasIndex()
16812 {
16813     return Index() != nullptr;
16814 }
16815
16816 GenTree* GenTreeIndir::Base()
16817 {
16818     GenTree* addr = Addr();
16819
16820     if (isIndirAddrMode())
16821     {
16822         GenTree* result = addr->AsAddrMode()->Base();
16823         if (result != nullptr)
16824         {
16825             result = result->gtEffectiveVal();
16826         }
16827         return result;
16828     }
16829     else
16830     {
16831         return addr; // TODO: why do we return 'addr' here, but we return 'nullptr' in the equivalent Index() case?
16832     }
16833 }
16834
16835 GenTree* GenTreeIndir::Index()
16836 {
16837     if (isIndirAddrMode())
16838     {
16839         GenTree* result = Addr()->AsAddrMode()->Index();
16840         if (result != nullptr)
16841         {
16842             result = result->gtEffectiveVal();
16843         }
16844         return result;
16845     }
16846     else
16847     {
16848         return nullptr;
16849     }
16850 }
16851
16852 unsigned GenTreeIndir::Scale()
16853 {
16854     if (HasIndex())
16855     {
16856         return Addr()->AsAddrMode()->gtScale;
16857     }
16858     else
16859     {
16860         return 1;
16861     }
16862 }
16863
16864 ssize_t GenTreeIndir::Offset()
16865 {
16866     if (isIndirAddrMode())
16867     {
16868         return Addr()->AsAddrMode()->Offset();
16869     }
16870     else if (Addr()->gtOper == GT_CLS_VAR_ADDR)
16871     {
16872         return static_cast<ssize_t>(reinterpret_cast<intptr_t>(Addr()->gtClsVar.gtClsVarHnd));
16873     }
16874     else if (Addr()->IsCnsIntOrI() && Addr()->isContained())
16875     {
16876         return Addr()->AsIntConCommon()->IconValue();
16877     }
16878     else
16879     {
16880         return 0;
16881     }
16882 }
16883
16884 //------------------------------------------------------------------------
16885 // GenTreeIntConCommon::ImmedValNeedsReloc: does this immediate value needs recording a relocation with the VM?
16886 //
16887 // Arguments:
16888 //    comp - Compiler instance
16889 //
16890 // Return Value:
16891 //    True if this immediate value requires us to record a relocation for it; false otherwise.
16892
16893 bool GenTreeIntConCommon::ImmedValNeedsReloc(Compiler* comp)
16894 {
16895     return comp->opts.compReloc && (gtOper == GT_CNS_INT) && IsIconHandle();
16896 }
16897
16898 //------------------------------------------------------------------------
16899 // ImmedValCanBeFolded: can this immediate value be folded for op?
16900 //
16901 // Arguments:
16902 //    comp - Compiler instance
16903 //    op - Tree operator
16904 //
16905 // Return Value:
16906 //    True if this immediate value can be folded for op; false otherwise.
16907
16908 bool GenTreeIntConCommon::ImmedValCanBeFolded(Compiler* comp, genTreeOps op)
16909 {
16910     // In general, immediate values that need relocations can't be folded.
16911     // There are cases where we do want to allow folding of handle comparisons
16912     // (e.g., typeof(T) == typeof(int)).
16913     return !ImmedValNeedsReloc(comp) || (op == GT_EQ) || (op == GT_NE);
16914 }
16915
16916 #ifdef _TARGET_AMD64_
16917 // Returns true if this absolute address fits within the base of an addr mode.
16918 // On Amd64 this effectively means, whether an absolute indirect address can
16919 // be encoded as 32-bit offset relative to IP or zero.
16920 bool GenTreeIntConCommon::FitsInAddrBase(Compiler* comp)
16921 {
16922 #ifndef LEGACY_BACKEND
16923 #ifdef DEBUG
16924     // Early out if PC-rel encoding of absolute addr is disabled.
16925     if (!comp->opts.compEnablePCRelAddr)
16926     {
16927         return false;
16928     }
16929 #endif
16930 #endif //! LEGACY_BACKEND
16931
16932     if (comp->opts.compReloc)
16933     {
16934         // During Ngen JIT is always asked to generate relocatable code.
16935         // Hence JIT will try to encode only icon handles as pc-relative offsets.
16936         return IsIconHandle() && (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue()));
16937     }
16938     else
16939     {
16940         // During Jitting, we are allowed to generate non-relocatable code.
16941         // On Amd64 we can encode an absolute indirect addr as an offset relative to zero or RIP.
16942         // An absolute indir addr that can fit within 32-bits can ben encoded as an offset relative
16943         // to zero. All other absolute indir addr could be attempted to be encoded as RIP relative
16944         // based on reloc hint provided by VM.  RIP relative encoding is preferred over relative
16945         // to zero, because the former is one byte smaller than the latter.  For this reason
16946         // we check for reloc hint first and then whether addr fits in 32-bits next.
16947         //
16948         // VM starts off with an initial state to allow both data and code address to be encoded as
16949         // pc-relative offsets.  Hence JIT will attempt to encode all absolute addresses as pc-relative
16950         // offsets.  It is possible while jitting a method, an address could not be encoded as a
16951         // pc-relative offset.  In that case VM will note the overflow and will trigger re-jitting
16952         // of the method with reloc hints turned off for all future methods. Second time around
16953         // jitting will succeed since JIT will not attempt to encode data addresses as pc-relative
16954         // offsets.  Note that JIT will always attempt to relocate code addresses (.e.g call addr).
16955         // After an overflow, VM will assume any relocation recorded is for a code address and will
16956         // emit jump thunk if it cannot be encoded as pc-relative offset.
16957         return (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue())) || FitsInI32();
16958     }
16959 }
16960
16961 // Returns true if this icon value is encoded as addr needs recording a relocation with VM
16962 bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
16963 {
16964     if (comp->opts.compReloc)
16965     {
16966         // During Ngen JIT is always asked to generate relocatable code.
16967         // Hence JIT will try to encode only icon handles as pc-relative offsets.
16968         return IsIconHandle() && (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue()));
16969     }
16970     else
16971     {
16972         return IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue());
16973     }
16974 }
16975
16976 #elif defined(_TARGET_X86_)
16977 // Returns true if this absolute address fits within the base of an addr mode.
16978 // On x86 all addresses are 4-bytes and can be directly encoded in an addr mode.
16979 bool GenTreeIntConCommon::FitsInAddrBase(Compiler* comp)
16980 {
16981 #ifndef LEGACY_BACKEND
16982 #ifdef DEBUG
16983     // Early out if PC-rel encoding of absolute addr is disabled.
16984     if (!comp->opts.compEnablePCRelAddr)
16985     {
16986         return false;
16987     }
16988 #endif
16989 #endif //! LEGACY_BACKEND
16990
16991     return IsCnsIntOrI();
16992 }
16993
16994 // Returns true if this icon value is encoded as addr needs recording a relocation with VM
16995 bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
16996 {
16997     // If generating relocatable code, icons should be reported for recording relocatons.
16998     return comp->opts.compReloc && IsIconHandle();
16999 }
17000 #endif //_TARGET_X86_
17001
17002 bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pObj, GenTree** pStatic, FieldSeqNode** pFldSeq)
17003 {
17004     FieldSeqNode* newFldSeq    = nullptr;
17005     GenTree*      baseAddr     = nullptr;
17006     bool          mustBeStatic = false;
17007
17008     FieldSeqNode* statStructFldSeq = nullptr;
17009     if (TypeGet() == TYP_REF)
17010     {
17011         // Recognize struct static field patterns...
17012         if (OperGet() == GT_IND)
17013         {
17014             GenTree*       addr = gtOp.gtOp1;
17015             GenTreeIntCon* icon = nullptr;
17016             if (addr->OperGet() == GT_CNS_INT)
17017             {
17018                 icon = addr->AsIntCon();
17019             }
17020             else if (addr->OperGet() == GT_ADD)
17021             {
17022                 // op1 should never be a field sequence (or any other kind of handle)
17023                 assert((addr->gtOp.gtOp1->gtOper != GT_CNS_INT) || !addr->gtOp.gtOp1->IsIconHandle());
17024                 if (addr->gtOp.gtOp2->OperGet() == GT_CNS_INT)
17025                 {
17026                     icon = addr->gtOp.gtOp2->AsIntCon();
17027                 }
17028             }
17029             if (icon != nullptr && !icon->IsIconHandle(GTF_ICON_STR_HDL) // String handles are a source of TYP_REFs.
17030                 && icon->gtFieldSeq != nullptr &&
17031                 icon->gtFieldSeq->m_next == nullptr // A static field should be a singleton
17032                 // TODO-Review: A pseudoField here indicates an issue - this requires investigation
17033                 // See test case src\ddsuites\src\clr\x86\CoreMangLib\Dev\Globalization\CalendarRegressions.exe
17034                 && !(FieldSeqStore::IsPseudoField(icon->gtFieldSeq->m_fieldHnd)) &&
17035                 icon->gtFieldSeq != FieldSeqStore::NotAField()) // Ignore non-fields.
17036             {
17037                 statStructFldSeq = icon->gtFieldSeq;
17038             }
17039             else
17040             {
17041                 addr = addr->gtEffectiveVal();
17042
17043                 // Perhaps it's a direct indirection of a helper call or a cse with a zero offset annotation.
17044                 if ((addr->OperGet() == GT_CALL) || (addr->OperGet() == GT_LCL_VAR))
17045                 {
17046                     FieldSeqNode* zeroFieldSeq = nullptr;
17047                     if (comp->GetZeroOffsetFieldMap()->Lookup(addr, &zeroFieldSeq))
17048                     {
17049                         if (zeroFieldSeq->m_next == nullptr)
17050                         {
17051                             statStructFldSeq = zeroFieldSeq;
17052                         }
17053                     }
17054                 }
17055             }
17056         }
17057         else if (OperGet() == GT_CLS_VAR)
17058         {
17059             GenTreeClsVar* clsVar = AsClsVar();
17060             if (clsVar->gtFieldSeq != nullptr && clsVar->gtFieldSeq->m_next == nullptr)
17061             {
17062                 statStructFldSeq = clsVar->gtFieldSeq;
17063             }
17064         }
17065         else if (OperIsLocal())
17066         {
17067             // If we have a GT_LCL_VAR, it can be result of a CSE substitution
17068             // If it is then the CSE assignment will have a ValueNum that
17069             // describes the RHS of the CSE assignment.
17070             //
17071             // The CSE could be a pointer to a boxed struct
17072             //
17073             GenTreeLclVarCommon* lclVar = AsLclVarCommon();
17074             ValueNum             vn     = gtVNPair.GetLiberal();
17075             if (vn != ValueNumStore::NoVN)
17076             {
17077                 // Is the ValueNum a MapSelect involving a SharedStatic helper?
17078                 VNFuncApp funcApp1;
17079                 if (comp->vnStore->GetVNFunc(vn, &funcApp1) && (funcApp1.m_func == VNF_MapSelect) &&
17080                     (comp->vnStore->IsSharedStatic(funcApp1.m_args[1])))
17081                 {
17082                     ValueNum mapVN = funcApp1.m_args[0];
17083                     // Is this new 'mapVN' ValueNum, a MapSelect involving a handle?
17084                     VNFuncApp funcApp2;
17085                     if (comp->vnStore->GetVNFunc(mapVN, &funcApp2) && (funcApp2.m_func == VNF_MapSelect) &&
17086                         (comp->vnStore->IsVNHandle(funcApp2.m_args[1])))
17087                     {
17088                         ValueNum fldHndVN = funcApp2.m_args[1];
17089                         // Is this new 'fldHndVN' VNhandle a FieldHandle?
17090                         unsigned flags = comp->vnStore->GetHandleFlags(fldHndVN);
17091                         if (flags == GTF_ICON_FIELD_HDL)
17092                         {
17093                             CORINFO_FIELD_HANDLE fieldHnd =
17094                                 CORINFO_FIELD_HANDLE(comp->vnStore->ConstantValue<ssize_t>(fldHndVN));
17095
17096                             // Record this field sequence in 'statStructFldSeq' as it is likely to be a Boxed Struct
17097                             // field access.
17098                             statStructFldSeq = comp->GetFieldSeqStore()->CreateSingleton(fieldHnd);
17099                         }
17100                     }
17101                 }
17102             }
17103         }
17104
17105         if (statStructFldSeq != nullptr)
17106         {
17107             assert(statStructFldSeq->m_next == nullptr);
17108             // Is this a pointer to a boxed struct?
17109             if (comp->gtIsStaticFieldPtrToBoxedStruct(TYP_REF, statStructFldSeq->m_fieldHnd))
17110             {
17111                 *pFldSeq = comp->GetFieldSeqStore()->Append(statStructFldSeq, *pFldSeq);
17112                 *pObj    = nullptr;
17113                 *pStatic = this;
17114                 return true;
17115             }
17116         }
17117
17118         // Otherwise...
17119         *pObj    = this;
17120         *pStatic = nullptr;
17121         return true;
17122     }
17123     else if (OperGet() == GT_ADD)
17124     {
17125         // If one operator is a field sequence/handle, the other operator must not also be a field sequence/handle.
17126         if ((gtOp.gtOp1->OperGet() == GT_CNS_INT) && gtOp.gtOp1->IsIconHandle())
17127         {
17128             assert((gtOp.gtOp2->gtOper != GT_CNS_INT) || !gtOp.gtOp2->IsIconHandle());
17129             newFldSeq = gtOp.gtOp1->AsIntCon()->gtFieldSeq;
17130             baseAddr  = gtOp.gtOp2;
17131         }
17132         else if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
17133         {
17134             assert((gtOp.gtOp1->gtOper != GT_CNS_INT) || !gtOp.gtOp1->IsIconHandle());
17135             newFldSeq = gtOp.gtOp2->AsIntCon()->gtFieldSeq;
17136             baseAddr  = gtOp.gtOp1;
17137         }
17138     }
17139     else
17140     {
17141         // Check if "this" has a zero-offset annotation.
17142         if (!comp->GetZeroOffsetFieldMap()->Lookup(this, &newFldSeq))
17143         {
17144             // If not, this is not a field address.
17145             return false;
17146         }
17147         else
17148         {
17149             baseAddr     = this;
17150             mustBeStatic = true;
17151         }
17152     }
17153
17154     // If not we don't have a field seq, it's not a field address.
17155     if (newFldSeq == nullptr || newFldSeq == FieldSeqStore::NotAField())
17156     {
17157         return false;
17158     }
17159
17160     // Prepend this field to whatever we've already accumulated (outside-in).
17161     *pFldSeq = comp->GetFieldSeqStore()->Append(newFldSeq, *pFldSeq);
17162
17163     // Is it a static or instance field?
17164     if (!FieldSeqStore::IsPseudoField(newFldSeq->m_fieldHnd) &&
17165         comp->info.compCompHnd->isFieldStatic(newFldSeq->m_fieldHnd))
17166     {
17167         // It is a static field.  We're done.
17168         *pObj    = nullptr;
17169         *pStatic = baseAddr;
17170         return true;
17171     }
17172     else if ((baseAddr != nullptr) && !mustBeStatic)
17173     {
17174         // It's an instance field...but it must be for a struct field, since we've not yet encountered
17175         // a "TYP_REF" address.  Analyze the reset of the address.
17176         return baseAddr->gtEffectiveVal()->IsFieldAddr(comp, pObj, pStatic, pFldSeq);
17177     }
17178
17179     // Otherwise...
17180     return false;
17181 }
17182
17183 bool Compiler::gtIsStaticFieldPtrToBoxedStruct(var_types fieldNodeType, CORINFO_FIELD_HANDLE fldHnd)
17184 {
17185     if (fieldNodeType != TYP_REF)
17186     {
17187         return false;
17188     }
17189     CORINFO_CLASS_HANDLE fldCls = nullptr;
17190     noway_assert(fldHnd != nullptr);
17191     CorInfoType cit      = info.compCompHnd->getFieldType(fldHnd, &fldCls);
17192     var_types   fieldTyp = JITtype2varType(cit);
17193     return fieldTyp != TYP_REF;
17194 }
17195
17196 CORINFO_CLASS_HANDLE Compiler::gtGetStructHandleIfPresent(GenTree* tree)
17197 {
17198     CORINFO_CLASS_HANDLE structHnd = NO_CLASS_HANDLE;
17199     tree                           = tree->gtEffectiveVal();
17200     if (varTypeIsStruct(tree->gtType))
17201     {
17202         switch (tree->gtOper)
17203         {
17204             default:
17205                 break;
17206             case GT_MKREFANY:
17207                 structHnd = impGetRefAnyClass();
17208                 break;
17209             case GT_OBJ:
17210                 structHnd = tree->gtObj.gtClass;
17211                 break;
17212             case GT_CALL:
17213                 structHnd = tree->gtCall.gtRetClsHnd;
17214                 break;
17215             case GT_RET_EXPR:
17216                 structHnd = tree->gtRetExpr.gtRetClsHnd;
17217                 break;
17218             case GT_ARGPLACE:
17219                 structHnd = tree->gtArgPlace.gtArgPlaceClsHnd;
17220                 break;
17221             case GT_INDEX:
17222                 structHnd = tree->gtIndex.gtStructElemClass;
17223                 break;
17224             case GT_INDEX_ADDR:
17225                 structHnd = tree->AsIndexAddr()->gtStructElemClass;
17226                 break;
17227             case GT_FIELD:
17228                 info.compCompHnd->getFieldType(tree->gtField.gtFldHnd, &structHnd);
17229                 break;
17230             case GT_ASG:
17231                 structHnd = gtGetStructHandleIfPresent(tree->gtGetOp1());
17232                 break;
17233             case GT_LCL_FLD:
17234 #ifdef FEATURE_SIMD
17235                 if (varTypeIsSIMD(tree))
17236                 {
17237                     structHnd = gtGetStructHandleForSIMD(tree->gtType, TYP_FLOAT);
17238                 }
17239 #endif
17240                 break;
17241             case GT_LCL_VAR:
17242                 structHnd = lvaTable[tree->AsLclVarCommon()->gtLclNum].lvVerTypeInfo.GetClassHandle();
17243                 break;
17244             case GT_RETURN:
17245                 structHnd = gtGetStructHandleIfPresent(tree->gtOp.gtOp1);
17246                 break;
17247             case GT_IND:
17248 #ifdef FEATURE_SIMD
17249                 if (varTypeIsSIMD(tree))
17250                 {
17251                     structHnd = gtGetStructHandleForSIMD(tree->gtType, TYP_FLOAT);
17252                 }
17253                 else
17254 #endif
17255                 {
17256                     ArrayInfo arrInfo;
17257                     if (TryGetArrayInfo(tree->AsIndir(), &arrInfo))
17258                     {
17259                         structHnd = EncodeElemType(arrInfo.m_elemType, arrInfo.m_elemStructType);
17260                     }
17261                 }
17262                 break;
17263 #ifdef FEATURE_SIMD
17264             case GT_SIMD:
17265                 structHnd = gtGetStructHandleForSIMD(tree->gtType, tree->AsSIMD()->gtSIMDBaseType);
17266                 break;
17267 #endif // FEATURE_SIMD
17268 #ifdef FEATURE_HW_INTRINSICS
17269             case GT_HWIntrinsic:
17270                 structHnd = gtGetStructHandleForHWSIMD(tree->gtType, tree->AsHWIntrinsic()->gtSIMDBaseType);
17271                 break;
17272 #endif
17273                 break;
17274         }
17275     }
17276     return structHnd;
17277 }
17278
17279 CORINFO_CLASS_HANDLE Compiler::gtGetStructHandle(GenTree* tree)
17280 {
17281     CORINFO_CLASS_HANDLE structHnd = gtGetStructHandleIfPresent(tree);
17282     assert(structHnd != NO_CLASS_HANDLE);
17283     return structHnd;
17284 }
17285
17286 //------------------------------------------------------------------------
17287 // gtGetClassHandle: find class handle for a ref type
17288 //
17289 // Arguments:
17290 //    tree -- tree to find handle for
17291 //    isExact   [out] -- whether handle is exact type
17292 //    isNonNull [out] -- whether tree value is known not to be null
17293 //
17294 // Return Value:
17295 //    nullptr if class handle is unknown,
17296 //        otherwise the class handle.
17297 //    isExact set true if tree type is known to be exactly the handle type,
17298 //        otherwise actual type may be a subtype.
17299 //    isNonNull set true if tree value is known not to be null,
17300 //        otherwise a null value is possible.
17301
17302 CORINFO_CLASS_HANDLE Compiler::gtGetClassHandle(GenTree* tree, bool* isExact, bool* isNonNull)
17303 {
17304     // Set default values for our out params.
17305     *isNonNull                    = false;
17306     *isExact                      = false;
17307     CORINFO_CLASS_HANDLE objClass = nullptr;
17308
17309     // Bail out if we're just importing and not generating code, since
17310     // the jit uses TYP_REF for CORINFO_TYPE_VAR locals and args, but
17311     // these may not be ref types.
17312     if (compIsForImportOnly())
17313     {
17314         return objClass;
17315     }
17316
17317     // Bail out if the tree is not a ref type.
17318     var_types treeType = tree->TypeGet();
17319     if (treeType != TYP_REF)
17320     {
17321         return objClass;
17322     }
17323
17324     // Tunnel through commas.
17325     GenTree*         obj   = tree->gtEffectiveVal(false);
17326     const genTreeOps objOp = obj->OperGet();
17327
17328     switch (objOp)
17329     {
17330         case GT_COMMA:
17331         {
17332             // gtEffectiveVal above means we shouldn't see commas here.
17333             assert(!"unexpected GT_COMMA");
17334             break;
17335         }
17336
17337         case GT_LCL_VAR:
17338         {
17339             // For locals, pick up type info from the local table.
17340             const unsigned objLcl = obj->AsLclVar()->GetLclNum();
17341
17342             objClass = lvaTable[objLcl].lvClassHnd;
17343             *isExact = lvaTable[objLcl].lvClassIsExact;
17344             break;
17345         }
17346
17347         case GT_FIELD:
17348         {
17349             // For fields, get the type from the field handle.
17350             CORINFO_FIELD_HANDLE fieldHnd = obj->gtField.gtFldHnd;
17351
17352             if (fieldHnd != nullptr)
17353             {
17354                 CORINFO_CLASS_HANDLE fieldClass   = nullptr;
17355                 CorInfoType          fieldCorType = info.compCompHnd->getFieldType(fieldHnd, &fieldClass);
17356                 if (fieldCorType == CORINFO_TYPE_CLASS)
17357                 {
17358                     objClass = fieldClass;
17359                 }
17360             }
17361
17362             break;
17363         }
17364
17365         case GT_RET_EXPR:
17366         {
17367             // If we see a RET_EXPR, recurse through to examine the
17368             // return value expression.
17369             GenTree* retExpr = tree->gtRetExpr.gtInlineCandidate;
17370             objClass         = gtGetClassHandle(retExpr, isExact, isNonNull);
17371             break;
17372         }
17373
17374         case GT_CALL:
17375         {
17376             GenTreeCall* call = tree->AsCall();
17377             if (call->IsInlineCandidate())
17378             {
17379                 // For inline candidates, we've already cached the return
17380                 // type class handle in the inline info.
17381                 InlineCandidateInfo* inlInfo = call->gtInlineCandidateInfo;
17382                 assert(inlInfo != nullptr);
17383
17384                 // Grab it as our first cut at a return type.
17385                 assert(inlInfo->methInfo.args.retType == CORINFO_TYPE_CLASS);
17386                 objClass = inlInfo->methInfo.args.retTypeClass;
17387
17388                 // If the method is shared, the above may not capture
17389                 // the most precise return type information (that is,
17390                 // it may represent a shared return type and as such,
17391                 // have instances of __Canon). See if we can use the
17392                 // context to get at something more definite.
17393                 //
17394                 // For now, we do this here on demand rather than when
17395                 // processing the call, but we could/should apply
17396                 // similar sharpening to the argument and local types
17397                 // of the inlinee.
17398                 const unsigned retClassFlags = info.compCompHnd->getClassAttribs(objClass);
17399                 if (retClassFlags & CORINFO_FLG_SHAREDINST)
17400                 {
17401                     CORINFO_CONTEXT_HANDLE context = inlInfo->exactContextHnd;
17402
17403                     if (context != nullptr)
17404                     {
17405                         CORINFO_CLASS_HANDLE exactClass = nullptr;
17406
17407                         if (((size_t)context & CORINFO_CONTEXTFLAGS_MASK) == CORINFO_CONTEXTFLAGS_CLASS)
17408                         {
17409                             exactClass = (CORINFO_CLASS_HANDLE)((size_t)context & ~CORINFO_CONTEXTFLAGS_MASK);
17410                         }
17411                         else
17412                         {
17413                             CORINFO_METHOD_HANDLE exactMethod =
17414                                 (CORINFO_METHOD_HANDLE)((size_t)context & ~CORINFO_CONTEXTFLAGS_MASK);
17415                             exactClass = info.compCompHnd->getMethodClass(exactMethod);
17416                         }
17417
17418                         // Grab the signature in this context.
17419                         CORINFO_SIG_INFO sig;
17420                         eeGetMethodSig(call->gtCallMethHnd, &sig, exactClass);
17421                         assert(sig.retType == CORINFO_TYPE_CLASS);
17422                         objClass = sig.retTypeClass;
17423                     }
17424                 }
17425             }
17426             else if (call->gtCallType == CT_USER_FUNC)
17427             {
17428                 // For user calls, we can fetch the approximate return
17429                 // type info from the method handle. Unfortunately
17430                 // we've lost the exact context, so this is the best
17431                 // we can do for now.
17432                 CORINFO_METHOD_HANDLE method     = call->gtCallMethHnd;
17433                 CORINFO_CLASS_HANDLE  exactClass = nullptr;
17434                 CORINFO_SIG_INFO      sig;
17435                 eeGetMethodSig(method, &sig, exactClass);
17436                 if (sig.retType == CORINFO_TYPE_VOID)
17437                 {
17438                     // This is a constructor call.
17439                     const unsigned methodFlags = info.compCompHnd->getMethodAttribs(method);
17440                     assert((methodFlags & CORINFO_FLG_CONSTRUCTOR) != 0);
17441                     objClass   = info.compCompHnd->getMethodClass(method);
17442                     *isExact   = true;
17443                     *isNonNull = true;
17444                 }
17445                 else
17446                 {
17447                     assert(sig.retType == CORINFO_TYPE_CLASS);
17448                     objClass = sig.retTypeClass;
17449                 }
17450             }
17451
17452             break;
17453         }
17454
17455         case GT_CNS_STR:
17456         {
17457             // For literal strings, we know the class and that the
17458             // value is not null.
17459             objClass   = impGetStringClass();
17460             *isExact   = true;
17461             *isNonNull = true;
17462             break;
17463         }
17464
17465         case GT_IND:
17466         {
17467             // indir(addr(lcl)) --> lcl
17468             //
17469             // This comes up during constrained callvirt on ref types.
17470             GenTreeIndir* indir = obj->AsIndir();
17471             if (indir->HasBase() && !indir->HasIndex())
17472             {
17473                 GenTree*             base = indir->Base();
17474                 GenTreeLclVarCommon* lcl  = base->IsLocalAddrExpr();
17475
17476                 if ((lcl != nullptr) && (base->OperGet() != GT_ADD))
17477                 {
17478                     const unsigned objLcl = lcl->GetLclNum();
17479                     objClass              = lvaTable[objLcl].lvClassHnd;
17480                     *isExact              = lvaTable[objLcl].lvClassIsExact;
17481                 }
17482             }
17483             break;
17484         }
17485
17486         case GT_BOX:
17487         {
17488             // Box should just wrap a local var reference which has
17489             // the type we're looking for. Also box only represents a
17490             // non-nullable value type so result cannot be null.
17491             GenTreeBox* box     = obj->AsBox();
17492             GenTree*    boxTemp = box->BoxOp();
17493             assert(boxTemp->IsLocal());
17494             const unsigned boxTempLcl = boxTemp->AsLclVar()->GetLclNum();
17495             objClass                  = lvaTable[boxTempLcl].lvClassHnd;
17496             *isExact                  = lvaTable[boxTempLcl].lvClassIsExact;
17497             *isNonNull                = true;
17498             break;
17499         }
17500
17501         default:
17502         {
17503             break;
17504         }
17505     }
17506
17507     return objClass;
17508 }
17509
17510 void GenTree::ParseArrayAddress(
17511     Compiler* comp, ArrayInfo* arrayInfo, GenTree** pArr, ValueNum* pInxVN, FieldSeqNode** pFldSeq)
17512 {
17513     *pArr                = nullptr;
17514     ValueNum      inxVN  = ValueNumStore::NoVN;
17515     ssize_t       offset = 0;
17516     FieldSeqNode* fldSeq = nullptr;
17517
17518     ParseArrayAddressWork(comp, 1, pArr, &inxVN, &offset, &fldSeq);
17519
17520     // If we didn't find an array reference (perhaps it is the constant null?) we will give up.
17521     if (*pArr == nullptr)
17522     {
17523         return;
17524     }
17525
17526     // OK, new we have to figure out if any part of the "offset" is a constant contribution to the index.
17527     // First, sum the offsets of any fields in fldSeq.
17528     unsigned      fieldOffsets = 0;
17529     FieldSeqNode* fldSeqIter   = fldSeq;
17530     // Also, find the first non-pseudo field...
17531     assert(*pFldSeq == nullptr);
17532     while (fldSeqIter != nullptr)
17533     {
17534         if (fldSeqIter == FieldSeqStore::NotAField())
17535         {
17536             // TODO-Review: A NotAField here indicates a failure to properly maintain the field sequence
17537             // See test case self_host_tests_x86\jit\regression\CLR-x86-JIT\v1-m12-beta2\ b70992\ b70992.exe
17538             // Safest thing to do here is to drop back to MinOpts
17539             CLANG_FORMAT_COMMENT_ANCHOR;
17540
17541 #ifdef DEBUG
17542             if (comp->opts.optRepeat)
17543             {
17544                 // We don't guarantee preserving these annotations through the entire optimizer, so
17545                 // just conservatively return null if under optRepeat.
17546                 *pArr = nullptr;
17547                 return;
17548             }
17549 #endif // DEBUG
17550             noway_assert(!"fldSeqIter is NotAField() in ParseArrayAddress");
17551         }
17552
17553         if (!FieldSeqStore::IsPseudoField(fldSeqIter->m_fieldHnd))
17554         {
17555             if (*pFldSeq == nullptr)
17556             {
17557                 *pFldSeq = fldSeqIter;
17558             }
17559             CORINFO_CLASS_HANDLE fldCls = nullptr;
17560             noway_assert(fldSeqIter->m_fieldHnd != nullptr);
17561             CorInfoType cit = comp->info.compCompHnd->getFieldType(fldSeqIter->m_fieldHnd, &fldCls);
17562             fieldOffsets += comp->compGetTypeSize(cit, fldCls);
17563         }
17564         fldSeqIter = fldSeqIter->m_next;
17565     }
17566
17567     // Is there some portion of the "offset" beyond the first-elem offset and the struct field suffix we just computed?
17568     if (!FitsIn<ssize_t>(fieldOffsets + arrayInfo->m_elemOffset) || !FitsIn<ssize_t>(arrayInfo->m_elemSize))
17569     {
17570         // This seems unlikely, but no harm in being safe...
17571         *pInxVN = comp->GetValueNumStore()->VNForExpr(nullptr, TYP_INT);
17572         return;
17573     }
17574     // Otherwise...
17575     ssize_t offsetAccountedFor = static_cast<ssize_t>(fieldOffsets + arrayInfo->m_elemOffset);
17576     ssize_t elemSize           = static_cast<ssize_t>(arrayInfo->m_elemSize);
17577
17578     ssize_t constIndOffset = offset - offsetAccountedFor;
17579     // This should be divisible by the element size...
17580     assert((constIndOffset % elemSize) == 0);
17581     ssize_t constInd = constIndOffset / elemSize;
17582
17583     ValueNumStore* vnStore = comp->GetValueNumStore();
17584
17585     if (inxVN == ValueNumStore::NoVN)
17586     {
17587         // Must be a constant index.
17588         *pInxVN = vnStore->VNForPtrSizeIntCon(constInd);
17589     }
17590     else
17591     {
17592         //
17593         // Perform ((inxVN / elemSizeVN) + vnForConstInd)
17594         //
17595
17596         // The value associated with the index value number (inxVN) is the offset into the array,
17597         // which has been scaled by element size. We need to recover the array index from that offset
17598         if (vnStore->IsVNConstant(inxVN))
17599         {
17600             ssize_t index = vnStore->CoercedConstantValue<ssize_t>(inxVN);
17601             noway_assert(elemSize > 0 && ((index % elemSize) == 0));
17602             *pInxVN = vnStore->VNForPtrSizeIntCon((index / elemSize) + constInd);
17603         }
17604         else
17605         {
17606             bool canFoldDiv = false;
17607
17608             // If the index VN is a MUL by elemSize, see if we can eliminate it instead of adding
17609             // the division by elemSize.
17610             VNFuncApp funcApp;
17611             if (vnStore->GetVNFunc(inxVN, &funcApp) && funcApp.m_func == (VNFunc)GT_MUL)
17612             {
17613                 ValueNum vnForElemSize = vnStore->VNForLongCon(elemSize);
17614
17615                 // One of the multiply operand is elemSize, so the resulting
17616                 // index VN should simply be the other operand.
17617                 if (funcApp.m_args[1] == vnForElemSize)
17618                 {
17619                     *pInxVN    = funcApp.m_args[0];
17620                     canFoldDiv = true;
17621                 }
17622                 else if (funcApp.m_args[0] == vnForElemSize)
17623                 {
17624                     *pInxVN    = funcApp.m_args[1];
17625                     canFoldDiv = true;
17626                 }
17627             }
17628
17629             // Perform ((inxVN / elemSizeVN) + vnForConstInd)
17630             if (!canFoldDiv)
17631             {
17632                 ValueNum vnForElemSize = vnStore->VNForPtrSizeIntCon(elemSize);
17633                 ValueNum vnForScaledInx =
17634                     vnStore->VNForFunc(TYP_I_IMPL, GetVNFuncForOper(GT_DIV, false), inxVN, vnForElemSize);
17635                 *pInxVN = vnForScaledInx;
17636             }
17637
17638             if (constInd != 0)
17639             {
17640                 ValueNum vnForConstInd = comp->GetValueNumStore()->VNForPtrSizeIntCon(constInd);
17641                 *pInxVN                = comp->GetValueNumStore()->VNForFunc(TYP_I_IMPL,
17642                                                               GetVNFuncForOper(GT_ADD, (gtFlags & GTF_UNSIGNED) != 0),
17643                                                               *pInxVN, vnForConstInd);
17644             }
17645         }
17646     }
17647 }
17648
17649 void GenTree::ParseArrayAddressWork(
17650     Compiler* comp, ssize_t inputMul, GenTree** pArr, ValueNum* pInxVN, ssize_t* pOffset, FieldSeqNode** pFldSeq)
17651 {
17652     if (TypeGet() == TYP_REF)
17653     {
17654         // This must be the array pointer.
17655         *pArr = this;
17656         assert(inputMul == 1); // Can't multiply the array pointer by anything.
17657     }
17658     else
17659     {
17660         switch (OperGet())
17661         {
17662             case GT_CNS_INT:
17663                 *pFldSeq = comp->GetFieldSeqStore()->Append(*pFldSeq, gtIntCon.gtFieldSeq);
17664                 *pOffset += (inputMul * gtIntCon.gtIconVal);
17665                 return;
17666
17667             case GT_ADD:
17668             case GT_SUB:
17669                 gtOp.gtOp1->ParseArrayAddressWork(comp, inputMul, pArr, pInxVN, pOffset, pFldSeq);
17670                 if (OperGet() == GT_SUB)
17671                 {
17672                     inputMul = -inputMul;
17673                 }
17674                 gtOp.gtOp2->ParseArrayAddressWork(comp, inputMul, pArr, pInxVN, pOffset, pFldSeq);
17675                 return;
17676
17677             case GT_MUL:
17678             {
17679                 // If one op is a constant, continue parsing down.
17680                 ssize_t  subMul   = 0;
17681                 GenTree* nonConst = nullptr;
17682                 if (gtOp.gtOp1->IsCnsIntOrI())
17683                 {
17684                     // If the other arg is an int constant, and is a "not-a-field", choose
17685                     // that as the multiplier, thus preserving constant index offsets...
17686                     if (gtOp.gtOp2->OperGet() == GT_CNS_INT &&
17687                         gtOp.gtOp2->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField())
17688                     {
17689                         subMul   = gtOp.gtOp2->gtIntConCommon.IconValue();
17690                         nonConst = gtOp.gtOp1;
17691                     }
17692                     else
17693                     {
17694                         subMul   = gtOp.gtOp1->gtIntConCommon.IconValue();
17695                         nonConst = gtOp.gtOp2;
17696                     }
17697                 }
17698                 else if (gtOp.gtOp2->IsCnsIntOrI())
17699                 {
17700                     subMul   = gtOp.gtOp2->gtIntConCommon.IconValue();
17701                     nonConst = gtOp.gtOp1;
17702                 }
17703                 if (nonConst != nullptr)
17704                 {
17705                     nonConst->ParseArrayAddressWork(comp, inputMul * subMul, pArr, pInxVN, pOffset, pFldSeq);
17706                     return;
17707                 }
17708                 // Otherwise, exit the switch, treat as a contribution to the index.
17709             }
17710             break;
17711
17712             case GT_LSH:
17713                 // If one op is a constant, continue parsing down.
17714                 if (gtOp.gtOp2->IsCnsIntOrI())
17715                 {
17716                     ssize_t subMul = 1 << gtOp.gtOp2->gtIntConCommon.IconValue();
17717                     gtOp.gtOp1->ParseArrayAddressWork(comp, inputMul * subMul, pArr, pInxVN, pOffset, pFldSeq);
17718                     return;
17719                 }
17720                 // Otherwise, exit the switch, treat as a contribution to the index.
17721                 break;
17722
17723             case GT_COMMA:
17724                 // We don't care about exceptions for this purpose.
17725                 if ((gtOp.gtOp1->OperGet() == GT_ARR_BOUNDS_CHECK) || gtOp.gtOp1->IsNothingNode())
17726                 {
17727                     gtOp.gtOp2->ParseArrayAddressWork(comp, inputMul, pArr, pInxVN, pOffset, pFldSeq);
17728                     return;
17729                 }
17730                 break;
17731
17732             default:
17733                 break;
17734         }
17735         // If we didn't return above, must be a constribution to the non-constant part of the index VN.
17736         ValueNum vn = comp->GetValueNumStore()->VNNormVal(gtVNPair.GetLiberal()); // We don't care about exceptions for
17737                                                                                   // this purpose.
17738         if (inputMul != 1)
17739         {
17740             ValueNum mulVN = comp->GetValueNumStore()->VNForLongCon(inputMul);
17741             vn             = comp->GetValueNumStore()->VNForFunc(TypeGet(), GetVNFuncForOper(GT_MUL, false), mulVN, vn);
17742         }
17743         if (*pInxVN == ValueNumStore::NoVN)
17744         {
17745             *pInxVN = vn;
17746         }
17747         else
17748         {
17749             *pInxVN = comp->GetValueNumStore()->VNForFunc(TypeGet(), GetVNFuncForOper(GT_ADD, false), *pInxVN, vn);
17750         }
17751     }
17752 }
17753
17754 bool GenTree::ParseArrayElemForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq)
17755 {
17756     if (OperIsIndir())
17757     {
17758         if (gtFlags & GTF_IND_ARR_INDEX)
17759         {
17760             bool b = comp->GetArrayInfoMap()->Lookup(this, arrayInfo);
17761             assert(b);
17762             return true;
17763         }
17764
17765         // Otherwise...
17766         GenTree* addr = AsIndir()->Addr();
17767         return addr->ParseArrayElemAddrForm(comp, arrayInfo, pFldSeq);
17768     }
17769     else
17770     {
17771         return false;
17772     }
17773 }
17774
17775 bool GenTree::ParseArrayElemAddrForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq)
17776 {
17777     switch (OperGet())
17778     {
17779         case GT_ADD:
17780         {
17781             GenTree* arrAddr = nullptr;
17782             GenTree* offset  = nullptr;
17783             if (gtOp.gtOp1->TypeGet() == TYP_BYREF)
17784             {
17785                 arrAddr = gtOp.gtOp1;
17786                 offset  = gtOp.gtOp2;
17787             }
17788             else if (gtOp.gtOp2->TypeGet() == TYP_BYREF)
17789             {
17790                 arrAddr = gtOp.gtOp2;
17791                 offset  = gtOp.gtOp1;
17792             }
17793             else
17794             {
17795                 return false;
17796             }
17797             if (!offset->ParseOffsetForm(comp, pFldSeq))
17798             {
17799                 return false;
17800             }
17801             return arrAddr->ParseArrayElemAddrForm(comp, arrayInfo, pFldSeq);
17802         }
17803
17804         case GT_ADDR:
17805         {
17806             GenTree* addrArg = gtOp.gtOp1;
17807             if (addrArg->OperGet() != GT_IND)
17808             {
17809                 return false;
17810             }
17811             else
17812             {
17813                 // The "Addr" node might be annotated with a zero-offset field sequence.
17814                 FieldSeqNode* zeroOffsetFldSeq = nullptr;
17815                 if (comp->GetZeroOffsetFieldMap()->Lookup(this, &zeroOffsetFldSeq))
17816                 {
17817                     *pFldSeq = comp->GetFieldSeqStore()->Append(*pFldSeq, zeroOffsetFldSeq);
17818                 }
17819                 return addrArg->ParseArrayElemForm(comp, arrayInfo, pFldSeq);
17820             }
17821         }
17822
17823         default:
17824             return false;
17825     }
17826 }
17827
17828 bool GenTree::ParseOffsetForm(Compiler* comp, FieldSeqNode** pFldSeq)
17829 {
17830     switch (OperGet())
17831     {
17832         case GT_CNS_INT:
17833         {
17834             GenTreeIntCon* icon = AsIntCon();
17835             *pFldSeq            = comp->GetFieldSeqStore()->Append(*pFldSeq, icon->gtFieldSeq);
17836             return true;
17837         }
17838
17839         case GT_ADD:
17840             if (!gtOp.gtOp1->ParseOffsetForm(comp, pFldSeq))
17841             {
17842                 return false;
17843             }
17844             return gtOp.gtOp2->ParseOffsetForm(comp, pFldSeq);
17845
17846         default:
17847             return false;
17848     }
17849 }
17850
17851 void GenTree::LabelIndex(Compiler* comp, bool isConst)
17852 {
17853     switch (OperGet())
17854     {
17855         case GT_CNS_INT:
17856             // If we got here, this is a contribution to the constant part of the index.
17857             if (isConst)
17858             {
17859                 gtIntCon.gtFieldSeq =
17860                     comp->GetFieldSeqStore()->CreateSingleton(FieldSeqStore::ConstantIndexPseudoField);
17861             }
17862             return;
17863
17864         case GT_LCL_VAR:
17865             gtFlags |= GTF_VAR_ARR_INDEX;
17866             return;
17867
17868         case GT_ADD:
17869         case GT_SUB:
17870             gtOp.gtOp1->LabelIndex(comp, isConst);
17871             gtOp.gtOp2->LabelIndex(comp, isConst);
17872             break;
17873
17874         case GT_CAST:
17875             gtOp.gtOp1->LabelIndex(comp, isConst);
17876             break;
17877
17878         case GT_ARR_LENGTH:
17879             gtFlags |= GTF_ARRLEN_ARR_IDX;
17880             return;
17881
17882         default:
17883             // For all other operators, peel off one constant; and then label the other if it's also a constant.
17884             if (OperIsArithmetic() || OperIsCompare())
17885             {
17886                 if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
17887                 {
17888                     gtOp.gtOp1->LabelIndex(comp, isConst);
17889                     break;
17890                 }
17891                 else if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
17892                 {
17893                     gtOp.gtOp2->LabelIndex(comp, isConst);
17894                     break;
17895                 }
17896                 // Otherwise continue downward on both, labeling vars.
17897                 gtOp.gtOp1->LabelIndex(comp, false);
17898                 gtOp.gtOp2->LabelIndex(comp, false);
17899             }
17900             break;
17901     }
17902 }
17903
17904 // Note that the value of the below field doesn't matter; it exists only to provide a distinguished address.
17905 //
17906 // static
17907 FieldSeqNode FieldSeqStore::s_notAField(nullptr, nullptr);
17908
17909 // FieldSeqStore methods.
17910 FieldSeqStore::FieldSeqStore(CompAllocator* alloc) : m_alloc(alloc), m_canonMap(new (alloc) FieldSeqNodeCanonMap(alloc))
17911 {
17912 }
17913
17914 FieldSeqNode* FieldSeqStore::CreateSingleton(CORINFO_FIELD_HANDLE fieldHnd)
17915 {
17916     FieldSeqNode  fsn(fieldHnd, nullptr);
17917     FieldSeqNode* res = nullptr;
17918     if (m_canonMap->Lookup(fsn, &res))
17919     {
17920         return res;
17921     }
17922     else
17923     {
17924         res  = reinterpret_cast<FieldSeqNode*>(m_alloc->Alloc(sizeof(FieldSeqNode)));
17925         *res = fsn;
17926         m_canonMap->Set(fsn, res);
17927         return res;
17928     }
17929 }
17930
17931 FieldSeqNode* FieldSeqStore::Append(FieldSeqNode* a, FieldSeqNode* b)
17932 {
17933     if (a == nullptr)
17934     {
17935         return b;
17936     }
17937     else if (a == NotAField())
17938     {
17939         return NotAField();
17940     }
17941     else if (b == nullptr)
17942     {
17943         return a;
17944     }
17945     else if (b == NotAField())
17946     {
17947         return NotAField();
17948         // Extremely special case for ConstantIndex pseudo-fields -- appending consecutive such
17949         // together collapse to one.
17950     }
17951     else if (a->m_next == nullptr && a->m_fieldHnd == ConstantIndexPseudoField &&
17952              b->m_fieldHnd == ConstantIndexPseudoField)
17953     {
17954         return b;
17955     }
17956     else
17957     {
17958         FieldSeqNode* tmp = Append(a->m_next, b);
17959         FieldSeqNode  fsn(a->m_fieldHnd, tmp);
17960         FieldSeqNode* res = nullptr;
17961         if (m_canonMap->Lookup(fsn, &res))
17962         {
17963             return res;
17964         }
17965         else
17966         {
17967             res  = reinterpret_cast<FieldSeqNode*>(m_alloc->Alloc(sizeof(FieldSeqNode)));
17968             *res = fsn;
17969             m_canonMap->Set(fsn, res);
17970             return res;
17971         }
17972     }
17973 }
17974
17975 // Static vars.
17976 int FieldSeqStore::FirstElemPseudoFieldStruct;
17977 int FieldSeqStore::ConstantIndexPseudoFieldStruct;
17978
17979 CORINFO_FIELD_HANDLE FieldSeqStore::FirstElemPseudoField =
17980     (CORINFO_FIELD_HANDLE)&FieldSeqStore::FirstElemPseudoFieldStruct;
17981 CORINFO_FIELD_HANDLE FieldSeqStore::ConstantIndexPseudoField =
17982     (CORINFO_FIELD_HANDLE)&FieldSeqStore::ConstantIndexPseudoFieldStruct;
17983
17984 bool FieldSeqNode::IsFirstElemFieldSeq()
17985 {
17986     // this must be non-null per ISO C++
17987     return m_fieldHnd == FieldSeqStore::FirstElemPseudoField;
17988 }
17989
17990 bool FieldSeqNode::IsConstantIndexFieldSeq()
17991 {
17992     // this must be non-null per ISO C++
17993     return m_fieldHnd == FieldSeqStore::ConstantIndexPseudoField;
17994 }
17995
17996 bool FieldSeqNode::IsPseudoField()
17997 {
17998     if (this == nullptr)
17999     {
18000         return false;
18001     }
18002     return m_fieldHnd == FieldSeqStore::FirstElemPseudoField || m_fieldHnd == FieldSeqStore::ConstantIndexPseudoField;
18003 }
18004
18005 #ifdef FEATURE_SIMD
18006 GenTreeSIMD* Compiler::gtNewSIMDNode(
18007     var_types type, GenTree* op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
18008 {
18009     assert(op1 != nullptr);
18010     SetOpLclRelatedToSIMDIntrinsic(op1);
18011
18012     return new (this, GT_SIMD) GenTreeSIMD(type, op1, simdIntrinsicID, baseType, size);
18013 }
18014
18015 GenTreeSIMD* Compiler::gtNewSIMDNode(
18016     var_types type, GenTree* op1, GenTree* op2, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
18017 {
18018     assert(op1 != nullptr);
18019     SetOpLclRelatedToSIMDIntrinsic(op1);
18020     SetOpLclRelatedToSIMDIntrinsic(op2);
18021
18022     return new (this, GT_SIMD) GenTreeSIMD(type, op1, op2, simdIntrinsicID, baseType, size);
18023 }
18024
18025 //-------------------------------------------------------------------
18026 // SetOpLclRelatedToSIMDIntrinsic: Determine if the tree has a local var that needs to be set
18027 // as used by a SIMD intrinsic, and if so, set that local var appropriately.
18028 //
18029 // Arguments:
18030 //     op - The tree, to be an operand of a new GT_SIMD node, to check.
18031 //
18032 void Compiler::SetOpLclRelatedToSIMDIntrinsic(GenTree* op)
18033 {
18034     if (op != nullptr)
18035     {
18036         if (op->OperIsLocal())
18037         {
18038             setLclRelatedToSIMDIntrinsic(op);
18039         }
18040         else if ((op->OperGet() == GT_OBJ) && (op->gtOp.gtOp1->OperGet() == GT_ADDR) &&
18041                  op->gtOp.gtOp1->gtOp.gtOp1->OperIsLocal())
18042         {
18043             setLclRelatedToSIMDIntrinsic(op->gtOp.gtOp1->gtOp.gtOp1);
18044         }
18045     }
18046 }
18047
18048 bool GenTree::isCommutativeSIMDIntrinsic()
18049 {
18050     assert(gtOper == GT_SIMD);
18051     switch (AsSIMD()->gtSIMDIntrinsicID)
18052     {
18053         case SIMDIntrinsicAdd:
18054         case SIMDIntrinsicBitwiseAnd:
18055         case SIMDIntrinsicBitwiseOr:
18056         case SIMDIntrinsicBitwiseXor:
18057         case SIMDIntrinsicEqual:
18058         case SIMDIntrinsicMax:
18059         case SIMDIntrinsicMin:
18060         case SIMDIntrinsicMul:
18061         case SIMDIntrinsicOpEquality:
18062         case SIMDIntrinsicOpInEquality:
18063             return true;
18064         default:
18065             return false;
18066     }
18067 }
18068 #endif // FEATURE_SIMD
18069
18070 #ifdef FEATURE_HW_INTRINSICS
18071 bool GenTree::isCommutativeHWIntrinsic() const
18072 {
18073     assert(gtOper == GT_HWIntrinsic);
18074
18075 #ifdef _TARGET_XARCH_
18076     HWIntrinsicFlag flags = Compiler::flagsOfHWIntrinsic(AsHWIntrinsic()->gtHWIntrinsicId);
18077     return ((flags & HW_Flag_Commutative) != 0);
18078 #else
18079     return false;
18080 #endif // _TARGET_XARCH_
18081 }
18082
18083 bool GenTree::isContainableHWIntrinsic() const
18084 {
18085     assert(gtOper == GT_HWIntrinsic);
18086
18087 #ifdef _TARGET_XARCH_
18088     HWIntrinsicFlag flags = Compiler::flagsOfHWIntrinsic(AsHWIntrinsic()->gtHWIntrinsicId);
18089     return ((flags & HW_Flag_NoContainment) == 0);
18090 #else
18091     return false;
18092 #endif // _TARGET_XARCH_
18093 }
18094
18095 bool GenTree::isRMWHWIntrinsic(Compiler* comp)
18096 {
18097     assert(gtOper == GT_HWIntrinsic);
18098     assert(comp != nullptr);
18099
18100 #ifdef _TARGET_XARCH_
18101     if (!comp->canUseVexEncoding())
18102     {
18103         HWIntrinsicFlag flags = Compiler::flagsOfHWIntrinsic(AsHWIntrinsic()->gtHWIntrinsicId);
18104         return ((flags & HW_Flag_NoRMWSemantics) == 0);
18105     }
18106
18107     switch (AsHWIntrinsic()->gtHWIntrinsicId)
18108     {
18109         case NI_SSE42_Crc32:
18110             return true;
18111
18112         default:
18113             return false;
18114     }
18115 #else
18116     return false;
18117 #endif // _TARGET_XARCH_
18118 }
18119
18120 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(var_types      type,
18121                                                        NamedIntrinsic hwIntrinsicID,
18122                                                        var_types      baseType,
18123                                                        unsigned       size)
18124 {
18125     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, hwIntrinsicID, baseType, size);
18126 }
18127
18128 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(
18129     var_types type, GenTree* op1, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned simdSize)
18130 {
18131     SetOpLclRelatedToSIMDIntrinsic(op1);
18132
18133     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, op1, hwIntrinsicID, baseType, simdSize);
18134 }
18135
18136 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(
18137     var_types type, GenTree* op1, GenTree* op2, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned simdSize)
18138 {
18139     SetOpLclRelatedToSIMDIntrinsic(op1);
18140     SetOpLclRelatedToSIMDIntrinsic(op2);
18141
18142     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, op1, op2, hwIntrinsicID, baseType, simdSize);
18143 }
18144
18145 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(var_types      type,
18146                                                        GenTree*       op1,
18147                                                        GenTree*       op2,
18148                                                        GenTree*       op3,
18149                                                        NamedIntrinsic hwIntrinsicID,
18150                                                        var_types      baseType,
18151                                                        unsigned       size)
18152 {
18153     SetOpLclRelatedToSIMDIntrinsic(op1);
18154     SetOpLclRelatedToSIMDIntrinsic(op2);
18155     SetOpLclRelatedToSIMDIntrinsic(op3);
18156
18157     return new (this, GT_HWIntrinsic)
18158         GenTreeHWIntrinsic(type, gtNewArgList(op1, op2, op3), hwIntrinsicID, baseType, size);
18159 }
18160
18161 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(var_types      type,
18162                                                        GenTree*       op1,
18163                                                        GenTree*       op2,
18164                                                        GenTree*       op3,
18165                                                        GenTree*       op4,
18166                                                        NamedIntrinsic hwIntrinsicID,
18167                                                        var_types      baseType,
18168                                                        unsigned       size)
18169 {
18170     SetOpLclRelatedToSIMDIntrinsic(op1);
18171     SetOpLclRelatedToSIMDIntrinsic(op2);
18172     SetOpLclRelatedToSIMDIntrinsic(op3);
18173     SetOpLclRelatedToSIMDIntrinsic(op4);
18174
18175     return new (this, GT_HWIntrinsic)
18176         GenTreeHWIntrinsic(type, gtNewArgList(op1, op2, op3, op4), hwIntrinsicID, baseType, size);
18177 }
18178
18179 GenTreeHWIntrinsic* Compiler::gtNewScalarHWIntrinsicNode(var_types type, GenTree* op1, NamedIntrinsic hwIntrinsicID)
18180 {
18181     SetOpLclRelatedToSIMDIntrinsic(op1);
18182
18183     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, op1, hwIntrinsicID, TYP_UNKNOWN, 0);
18184 }
18185
18186 GenTreeHWIntrinsic* Compiler::gtNewScalarHWIntrinsicNode(var_types      type,
18187                                                          GenTree*       op1,
18188                                                          GenTree*       op2,
18189                                                          NamedIntrinsic hwIntrinsicID)
18190 {
18191     SetOpLclRelatedToSIMDIntrinsic(op1);
18192     SetOpLclRelatedToSIMDIntrinsic(op2);
18193
18194     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, op1, op2, hwIntrinsicID, TYP_UNKNOWN, 0);
18195 }
18196
18197 //---------------------------------------------------------------------------------------
18198 // gtNewMustThrowException:
18199 //    create a throw node (calling into JIT helper) that must be thrown.
18200 //    The result would be a comma node: COMMA(jithelperthrow(void), x) where x's type should be specified.
18201 //
18202 // Arguments
18203 //    helper      -  JIT helper ID
18204 //    type        -  return type of the node
18205 //
18206 // Return Value
18207 //    pointer to the throw node
18208 //
18209 GenTree* Compiler::gtNewMustThrowException(unsigned helper, var_types type, CORINFO_CLASS_HANDLE clsHnd)
18210 {
18211     GenTreeCall* node = gtNewHelperCallNode(helper, TYP_VOID);
18212     node->gtCallMoreFlags |= GTF_CALL_M_DOES_NOT_RETURN;
18213     if (type != TYP_VOID)
18214     {
18215         unsigned dummyTemp = lvaGrabTemp(true DEBUGARG("dummy temp of must thrown exception"));
18216         if (type == TYP_STRUCT)
18217         {
18218             lvaSetStruct(dummyTemp, clsHnd, false);
18219             type = lvaTable[dummyTemp].lvType; // struct type is normalized
18220         }
18221         else
18222         {
18223             lvaTable[dummyTemp].lvType = type;
18224         }
18225         GenTree* dummyNode = gtNewLclvNode(dummyTemp, type);
18226         return gtNewOperNode(GT_COMMA, type, node, dummyNode);
18227     }
18228     return node;
18229 }
18230
18231 // Returns true for the HW Instrinsic instructions that have MemoryLoad semantics, false otherwise
18232 bool GenTreeHWIntrinsic::OperIsMemoryLoad()
18233 {
18234 #ifdef _TARGET_XARCH_
18235     // Some xarch instructions have MemoryLoad sematics
18236     HWIntrinsicCategory category = Compiler::categoryOfHWIntrinsic(gtHWIntrinsicId);
18237     if (category == HW_Category_MemoryLoad)
18238     {
18239         return true;
18240     }
18241     else if (category == HW_Category_IMM)
18242     {
18243         // Some AVX instructions here also have MemoryLoad sematics
18244
18245         // Do we have 3 operands?
18246         if (Compiler::numArgsOfHWIntrinsic(this) != 3)
18247         {
18248             return false;
18249         }
18250         else // We have 3 operands/args
18251         {
18252             GenTreeArgList* argList = gtOp.gtOp1->AsArgList();
18253
18254             if ((gtHWIntrinsicId == NI_AVX_InsertVector128 || gtHWIntrinsicId == NI_AVX2_InsertVector128) &&
18255                 (argList->Current()->TypeGet() == TYP_I_IMPL)) // Is the type of the first arg TYP_I_IMPL?
18256             {
18257                 // This is Avx/Avx2.InsertVector128
18258                 return true;
18259             }
18260         }
18261     }
18262 #endif // _TARGET_XARCH_
18263     return false;
18264 }
18265
18266 // Returns true for the HW Instrinsic instructions that have MemoryStore semantics, false otherwise
18267 bool GenTreeHWIntrinsic::OperIsMemoryStore()
18268 {
18269 #ifdef _TARGET_XARCH_
18270     // Some xarch instructions have MemoryStore sematics
18271     HWIntrinsicCategory category = Compiler::categoryOfHWIntrinsic(gtHWIntrinsicId);
18272     if (category == HW_Category_MemoryStore)
18273     {
18274         return true;
18275     }
18276     else if (category == HW_Category_IMM)
18277     {
18278         // Some AVX instructions here also have MemoryStore sematics
18279
18280         // Do we have 3 operands?
18281         if (Compiler::numArgsOfHWIntrinsic(this) != 3)
18282         {
18283             return false;
18284         }
18285         else // We have 3 operands/args
18286         {
18287             if ((gtHWIntrinsicId == NI_AVX_ExtractVector128 || gtHWIntrinsicId == NI_AVX2_ExtractVector128))
18288             {
18289                 // This is Avx/Avx2.ExtractVector128
18290                 return true;
18291             }
18292         }
18293     }
18294 #endif // _TARGET_XARCH_
18295     return false;
18296 }
18297
18298 // Returns true for the HW Instrinsic instructions that have MemoryLoad semantics, false otherwise
18299 bool GenTreeHWIntrinsic::OperIsMemoryLoadOrStore()
18300 {
18301 #ifdef _TARGET_XARCH_
18302     // Some xarch instructions have MemoryLoad sematics
18303     HWIntrinsicCategory category = Compiler::categoryOfHWIntrinsic(gtHWIntrinsicId);
18304     if ((category == HW_Category_MemoryLoad) || (category == HW_Category_MemoryStore))
18305     {
18306         return true;
18307     }
18308     else if (category == HW_Category_IMM)
18309     {
18310         // Some AVX instructions here also have MemoryLoad or MemoryStore sematics
18311
18312         // Do we have 3 operands?
18313         if (Compiler::numArgsOfHWIntrinsic(this) != 3)
18314         {
18315             return false;
18316         }
18317         else // We have 3 operands/args
18318         {
18319             GenTreeArgList* argList = gtOp.gtOp1->AsArgList();
18320
18321             if ((gtHWIntrinsicId == NI_AVX_InsertVector128 || gtHWIntrinsicId == NI_AVX2_InsertVector128) &&
18322                 (argList->Current()->TypeGet() == TYP_I_IMPL)) // Is the type of the first arg TYP_I_IMPL?
18323             {
18324                 // This is Avx/Avx2.InsertVector128
18325                 return true;
18326             }
18327             else if ((gtHWIntrinsicId == NI_AVX_ExtractVector128 || gtHWIntrinsicId == NI_AVX2_ExtractVector128))
18328             {
18329                 // This is Avx/Avx2.ExtractVector128
18330                 return true;
18331             }
18332         }
18333     }
18334 #endif // _TARGET_XARCH_
18335     return false;
18336 }
18337
18338 #endif // FEATURE_HW_INTRINSICS
18339
18340 //---------------------------------------------------------------------------------------
18341 // InitializeStructReturnType:
18342 //    Initialize the Return Type Descriptor for a method that returns a struct type
18343 //
18344 // Arguments
18345 //    comp        -  Compiler Instance
18346 //    retClsHnd   -  VM handle to the struct type returned by the method
18347 //
18348 // Return Value
18349 //    None
18350 //
18351 void ReturnTypeDesc::InitializeStructReturnType(Compiler* comp, CORINFO_CLASS_HANDLE retClsHnd)
18352 {
18353     assert(!m_inited);
18354
18355 #if FEATURE_MULTIREG_RET
18356
18357     assert(retClsHnd != NO_CLASS_HANDLE);
18358     unsigned structSize = comp->info.compCompHnd->getClassSize(retClsHnd);
18359
18360     Compiler::structPassingKind howToReturnStruct;
18361     var_types                   returnType = comp->getReturnTypeForStruct(retClsHnd, &howToReturnStruct, structSize);
18362
18363     switch (howToReturnStruct)
18364     {
18365         case Compiler::SPK_PrimitiveType:
18366         {
18367             assert(returnType != TYP_UNKNOWN);
18368             assert(!varTypeIsStruct(returnType));
18369             m_regType[0] = returnType;
18370             break;
18371         }
18372
18373         case Compiler::SPK_ByValueAsHfa:
18374         {
18375             assert(varTypeIsStruct(returnType));
18376             var_types hfaType = comp->GetHfaType(retClsHnd);
18377
18378             // We should have an hfa struct type
18379             assert(varTypeIsFloating(hfaType));
18380
18381             // Note that the retail build issues a warning about a potential divsion by zero without this Max function
18382             unsigned elemSize = Max((unsigned)1, EA_SIZE_IN_BYTES(emitActualTypeSize(hfaType)));
18383
18384             // The size of this struct should be evenly divisible by elemSize
18385             assert((structSize % elemSize) == 0);
18386
18387             unsigned hfaCount = (structSize / elemSize);
18388             for (unsigned i = 0; i < hfaCount; ++i)
18389             {
18390                 m_regType[i] = hfaType;
18391             }
18392
18393             if (comp->compFloatingPointUsed == false)
18394             {
18395                 comp->compFloatingPointUsed = true;
18396             }
18397             break;
18398         }
18399
18400         case Compiler::SPK_ByValue:
18401         {
18402             assert(varTypeIsStruct(returnType));
18403
18404 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
18405
18406             SYSTEMV_AMD64_CORINFO_STRUCT_REG_PASSING_DESCRIPTOR structDesc;
18407             comp->eeGetSystemVAmd64PassStructInRegisterDescriptor(retClsHnd, &structDesc);
18408
18409             assert(structDesc.passedInRegisters);
18410             for (int i = 0; i < structDesc.eightByteCount; i++)
18411             {
18412                 assert(i < MAX_RET_REG_COUNT);
18413                 m_regType[i] = comp->GetEightByteType(structDesc, i);
18414             }
18415
18416 #elif defined(_TARGET_ARM64_)
18417
18418             // a non-HFA struct returned using two registers
18419             //
18420             assert((structSize > TARGET_POINTER_SIZE) && (structSize <= (2 * TARGET_POINTER_SIZE)));
18421
18422             BYTE gcPtrs[2] = {TYPE_GC_NONE, TYPE_GC_NONE};
18423             comp->info.compCompHnd->getClassGClayout(retClsHnd, &gcPtrs[0]);
18424             for (unsigned i = 0; i < 2; ++i)
18425             {
18426                 m_regType[i] = comp->getJitGCType(gcPtrs[i]);
18427             }
18428
18429 #else //  _TARGET_XXX_
18430
18431             // This target needs support here!
18432             //
18433             NYI("Unsupported TARGET returning a TYP_STRUCT in InitializeStructReturnType");
18434
18435 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
18436
18437             break; // for case SPK_ByValue
18438         }
18439
18440         case Compiler::SPK_ByReference:
18441
18442             // We are returning using the return buffer argument
18443             // There are no return registers
18444             break;
18445
18446         default:
18447
18448             unreached(); // By the contract of getReturnTypeForStruct we should never get here.
18449
18450     } // end of switch (howToReturnStruct)
18451
18452 #endif //  FEATURE_MULTIREG_RET
18453
18454 #ifdef DEBUG
18455     m_inited = true;
18456 #endif
18457 }
18458
18459 //---------------------------------------------------------------------------------------
18460 // InitializeLongReturnType:
18461 //    Initialize the Return Type Descriptor for a method that returns a TYP_LONG
18462 //
18463 // Arguments
18464 //    comp        -  Compiler Instance
18465 //
18466 // Return Value
18467 //    None
18468 //
18469 void ReturnTypeDesc::InitializeLongReturnType(Compiler* comp)
18470 {
18471 #if defined(_TARGET_X86_) || defined(_TARGET_ARM_)
18472
18473     // Setups up a ReturnTypeDesc for returning a long using two registers
18474     //
18475     assert(MAX_RET_REG_COUNT >= 2);
18476     m_regType[0] = TYP_INT;
18477     m_regType[1] = TYP_INT;
18478
18479 #else // not (_TARGET_X86_ or _TARGET_ARM_)
18480
18481     m_regType[0] = TYP_LONG;
18482
18483 #endif // _TARGET_X86_ or _TARGET_ARM_
18484
18485 #ifdef DEBUG
18486     m_inited = true;
18487 #endif
18488 }
18489
18490 //-------------------------------------------------------------------
18491 // GetABIReturnReg:  Return ith return register as per target ABI
18492 //
18493 // Arguments:
18494 //     idx   -   Index of the return register.
18495 //               The first return register has an index of 0 and so on.
18496 //
18497 // Return Value:
18498 //     Returns ith return register as per target ABI.
18499 //
18500 // Notes:
18501 //     x86 and ARM return long in multiple registers.
18502 //     ARM and ARM64 return HFA struct in multiple registers.
18503 //
18504 regNumber ReturnTypeDesc::GetABIReturnReg(unsigned idx)
18505 {
18506     unsigned count = GetReturnRegCount();
18507     assert(idx < count);
18508
18509     regNumber resultReg = REG_NA;
18510
18511 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
18512     var_types regType0 = GetReturnRegType(0);
18513
18514     if (idx == 0)
18515     {
18516         if (varTypeIsIntegralOrI(regType0))
18517         {
18518             resultReg = REG_INTRET;
18519         }
18520         else
18521         {
18522             noway_assert(varTypeIsFloating(regType0));
18523             resultReg = REG_FLOATRET;
18524         }
18525     }
18526     else if (idx == 1)
18527     {
18528         var_types regType1 = GetReturnRegType(1);
18529
18530         if (varTypeIsIntegralOrI(regType1))
18531         {
18532             if (varTypeIsIntegralOrI(regType0))
18533             {
18534                 resultReg = REG_INTRET_1;
18535             }
18536             else
18537             {
18538                 resultReg = REG_INTRET;
18539             }
18540         }
18541         else
18542         {
18543             noway_assert(varTypeIsFloating(regType1));
18544
18545             if (varTypeIsFloating(regType0))
18546             {
18547                 resultReg = REG_FLOATRET_1;
18548             }
18549             else
18550             {
18551                 resultReg = REG_FLOATRET;
18552             }
18553         }
18554     }
18555
18556 #elif defined(_TARGET_X86_)
18557
18558     if (idx == 0)
18559     {
18560         resultReg = REG_LNGRET_LO;
18561     }
18562     else if (idx == 1)
18563     {
18564         resultReg = REG_LNGRET_HI;
18565     }
18566
18567 #elif defined(_TARGET_ARM_)
18568
18569     var_types regType = GetReturnRegType(idx);
18570     if (varTypeIsIntegralOrI(regType))
18571     {
18572         // Ints are returned in one return register.
18573         // Longs are returned in two return registers.
18574         if (idx == 0)
18575         {
18576             resultReg = REG_LNGRET_LO;
18577         }
18578         else if (idx == 1)
18579         {
18580             resultReg = REG_LNGRET_HI;
18581         }
18582     }
18583     else
18584     {
18585         // Floats are returned in one return register (f0).
18586         // Doubles are returned in one return register (d0).
18587         // Structs are returned in four registers with HFAs.
18588         assert(idx < MAX_RET_REG_COUNT); // Up to 4 return registers for HFA's
18589         if (regType == TYP_DOUBLE)
18590         {
18591             resultReg = (regNumber)((unsigned)(REG_FLOATRET) + idx * 2); // d0, d1, d2 or d3
18592         }
18593         else
18594         {
18595             resultReg = (regNumber)((unsigned)(REG_FLOATRET) + idx); // f0, f1, f2 or f3
18596         }
18597     }
18598
18599 #elif defined(_TARGET_ARM64_)
18600
18601     var_types regType = GetReturnRegType(idx);
18602     if (varTypeIsIntegralOrI(regType))
18603     {
18604         noway_assert(idx < 2);                              // Up to 2 return registers for 16-byte structs
18605         resultReg = (idx == 0) ? REG_INTRET : REG_INTRET_1; // X0 or X1
18606     }
18607     else
18608     {
18609         noway_assert(idx < 4);                                   // Up to 4 return registers for HFA's
18610         resultReg = (regNumber)((unsigned)(REG_FLOATRET) + idx); // V0, V1, V2 or V3
18611     }
18612
18613 #endif // TARGET_XXX
18614
18615     assert(resultReg != REG_NA);
18616     return resultReg;
18617 }
18618
18619 //--------------------------------------------------------------------------------
18620 // GetABIReturnRegs: get the mask of return registers as per target arch ABI.
18621 //
18622 // Arguments:
18623 //    None
18624 //
18625 // Return Value:
18626 //    reg mask of return registers in which the return type is returned.
18627 //
18628 // Note:
18629 //    This routine can be used when the caller is not particular about the order
18630 //    of return registers and wants to know the set of return registers.
18631 //
18632 // static
18633 regMaskTP ReturnTypeDesc::GetABIReturnRegs()
18634 {
18635     regMaskTP resultMask = RBM_NONE;
18636
18637     unsigned count = GetReturnRegCount();
18638     for (unsigned i = 0; i < count; ++i)
18639     {
18640         resultMask |= genRegMask(GetABIReturnReg(i));
18641     }
18642
18643     return resultMask;
18644 }
18645
18646 #ifndef LEGACY_BACKEND
18647
18648 //------------------------------------------------------------------------
18649 // The following functions manage the gtRsvdRegs set of temporary registers
18650 // created by LSRA during code generation.
18651
18652 //------------------------------------------------------------------------
18653 // AvailableTempRegCount: return the number of available temporary registers in the (optional) given set
18654 // (typically, RBM_ALLINT or RBM_ALLFLOAT).
18655 //
18656 // Arguments:
18657 //    mask - (optional) Check for available temporary registers only in this set.
18658 //
18659 // Return Value:
18660 //    Count of available temporary registers in given set.
18661 //
18662 unsigned GenTree::AvailableTempRegCount(regMaskTP mask /* = (regMaskTP)-1 */) const
18663 {
18664     return genCountBits(gtRsvdRegs & mask);
18665 }
18666
18667 //------------------------------------------------------------------------
18668 // GetSingleTempReg: There is expected to be exactly one available temporary register
18669 // in the given mask in the gtRsvdRegs set. Get that register. No future calls to get
18670 // a temporary register are expected. Removes the register from the set, but only in
18671 // DEBUG to avoid doing unnecessary work in non-DEBUG builds.
18672 //
18673 // Arguments:
18674 //    mask - (optional) Get an available temporary register only in this set.
18675 //
18676 // Return Value:
18677 //    Available temporary register in given mask.
18678 //
18679 regNumber GenTree::GetSingleTempReg(regMaskTP mask /* = (regMaskTP)-1 */)
18680 {
18681     regMaskTP availableSet = gtRsvdRegs & mask;
18682     assert(genCountBits(availableSet) == 1);
18683     regNumber tempReg = genRegNumFromMask(availableSet);
18684     INDEBUG(gtRsvdRegs &= ~availableSet;) // Remove the register from the set, so it can't be used again.
18685     return tempReg;
18686 }
18687
18688 //------------------------------------------------------------------------
18689 // ExtractTempReg: Find the lowest number temporary register from the gtRsvdRegs set
18690 // that is also in the optional given mask (typically, RBM_ALLINT or RBM_ALLFLOAT),
18691 // and return it. Remove this register from the temporary register set, so it won't
18692 // be returned again.
18693 //
18694 // Arguments:
18695 //    mask - (optional) Extract an available temporary register only in this set.
18696 //
18697 // Return Value:
18698 //    Available temporary register in given mask.
18699 //
18700 regNumber GenTree::ExtractTempReg(regMaskTP mask /* = (regMaskTP)-1 */)
18701 {
18702     regMaskTP availableSet = gtRsvdRegs & mask;
18703     assert(genCountBits(availableSet) >= 1);
18704     regMaskTP tempRegMask = genFindLowestBit(availableSet);
18705     gtRsvdRegs &= ~tempRegMask;
18706     return genRegNumFromMask(tempRegMask);
18707 }
18708
18709 #endif // !LEGACY_BACKEND