Merge pull request #23876 from briansull/jit-dump
[platform/upstream/coreclr.git] / src / jit / gentree.cpp
1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
7 XX                                                                           XX
8 XX                               GenTree                                     XX
9 XX                                                                           XX
10 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
11 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
12 */
13
14 #include "jitpch.h"
15 #include "hwintrinsic.h"
16 #include "simd.h"
17
18 #ifdef _MSC_VER
19 #pragma hdrstop
20 #endif
21
22 /*****************************************************************************/
23
24 const unsigned short GenTree::gtOperKindTable[] = {
25 #define GTNODE(en, st, cm, ok) ok + GTK_COMMUTE *cm,
26 #include "gtlist.h"
27 };
28
29 /*****************************************************************************
30  *
31  *  The types of different GenTree nodes
32  */
33
34 #ifdef DEBUG
35
36 #define INDENT_SIZE 3
37
38 //--------------------------------------------
39 //
40 // IndentStack: This struct is used, along with its related enums and strings,
41 //    to control both the indendtation and the printing of arcs.
42 //
43 // Notes:
44 //    The mode of printing is set in the Constructor, using its 'compiler' argument.
45 //    Currently it only prints arcs when fgOrder == fgOrderLinear.
46 //    The type of arc to print is specified by the IndentInfo enum, and is controlled
47 //    by the caller of the Push() method.
48
49 enum IndentChars
50 {
51     ICVertical,
52     ICBottom,
53     ICTop,
54     ICMiddle,
55     ICDash,
56     ICEmbedded,
57     ICTerminal,
58     ICError,
59     IndentCharCount
60 };
61
62 // clang-format off
63 // Sets of strings for different dumping options            vert             bot             top             mid             dash       embedded    terminal    error
64 static const char*  emptyIndents[IndentCharCount]   = {     " ",             " ",            " ",            " ",            " ",           "{",      "",        "?"  };
65 static const char*  asciiIndents[IndentCharCount]   = {     "|",            "\\",            "/",            "+",            "-",           "{",      "*",       "?"  };
66 static const char*  unicodeIndents[IndentCharCount] = { "\xe2\x94\x82", "\xe2\x94\x94", "\xe2\x94\x8c", "\xe2\x94\x9c", "\xe2\x94\x80",     "{", "\xe2\x96\x8c", "?"  };
67 // clang-format on
68
69 typedef ArrayStack<Compiler::IndentInfo> IndentInfoStack;
70 struct IndentStack
71 {
72     IndentInfoStack stack;
73     const char**    indents;
74
75     // Constructor for IndentStack.  Uses 'compiler' to determine the mode of printing.
76     IndentStack(Compiler* compiler) : stack(compiler->getAllocator(CMK_DebugOnly))
77     {
78         if (compiler->asciiTrees)
79         {
80             indents = asciiIndents;
81         }
82         else
83         {
84             indents = unicodeIndents;
85         }
86     }
87
88     // Return the depth of the current indentation.
89     unsigned Depth()
90     {
91         return stack.Height();
92     }
93
94     // Push a new indentation onto the stack, of the given type.
95     void Push(Compiler::IndentInfo info)
96     {
97         stack.Push(info);
98     }
99
100     // Pop the most recent indentation type off the stack.
101     Compiler::IndentInfo Pop()
102     {
103         return stack.Pop();
104     }
105
106     // Print the current indentation and arcs.
107     void print()
108     {
109         unsigned indentCount = Depth();
110         for (unsigned i = 0; i < indentCount; i++)
111         {
112             unsigned index = indentCount - 1 - i;
113             switch (stack.Index(index))
114             {
115                 case Compiler::IndentInfo::IINone:
116                     printf("   ");
117                     break;
118                 case Compiler::IndentInfo::IIEmbedded:
119                     printf("%s  ", indents[ICEmbedded]);
120                     break;
121                 case Compiler::IndentInfo::IIArc:
122                     if (index == 0)
123                     {
124                         printf("%s%s%s", indents[ICMiddle], indents[ICDash], indents[ICDash]);
125                     }
126                     else
127                     {
128                         printf("%s  ", indents[ICVertical]);
129                     }
130                     break;
131                 case Compiler::IndentInfo::IIArcBottom:
132                     printf("%s%s%s", indents[ICBottom], indents[ICDash], indents[ICDash]);
133                     break;
134                 case Compiler::IndentInfo::IIArcTop:
135                     printf("%s%s%s", indents[ICTop], indents[ICDash], indents[ICDash]);
136                     break;
137                 case Compiler::IndentInfo::IIError:
138                     printf("%s%s%s", indents[ICError], indents[ICDash], indents[ICDash]);
139                     break;
140                 default:
141                     unreached();
142             }
143         }
144         printf("%s", indents[ICTerminal]);
145     }
146 };
147
148 //------------------------------------------------------------------------
149 // printIndent: This is a static method which simply invokes the 'print'
150 //    method on its 'indentStack' argument.
151 //
152 // Arguments:
153 //    indentStack - specifies the information for the indentation & arcs to be printed
154 //
155 // Notes:
156 //    This method exists to localize the checking for the case where indentStack is null.
157
158 static void printIndent(IndentStack* indentStack)
159 {
160     if (indentStack == nullptr)
161     {
162         return;
163     }
164     indentStack->print();
165 }
166
167 #endif
168
169 #if defined(DEBUG) || NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS
170
171 static const char* opNames[] = {
172 #define GTNODE(en, st, cm, ok) #en,
173 #include "gtlist.h"
174 };
175
176 const char* GenTree::OpName(genTreeOps op)
177 {
178     assert((unsigned)op < _countof(opNames));
179
180     return opNames[op];
181 }
182
183 #endif
184
185 #if MEASURE_NODE_SIZE
186
187 static const char* opStructNames[] = {
188 #define GTNODE(en, st, cm, ok) #st,
189 #include "gtlist.h"
190 };
191
192 const char* GenTree::OpStructName(genTreeOps op)
193 {
194     assert((unsigned)op < _countof(opStructNames));
195
196     return opStructNames[op];
197 }
198
199 #endif
200
201 //
202 //  We allocate tree nodes in 2 different sizes:
203 //  - TREE_NODE_SZ_SMALL for most nodes
204 //  - TREE_NODE_SZ_LARGE for the few nodes (such as calls) that have
205 //    more fields and take up a lot more space.
206 //
207
208 /* GT_COUNT'th oper is overloaded as 'undefined oper', so allocate storage for GT_COUNT'th oper also */
209 /* static */
210 unsigned char GenTree::s_gtNodeSizes[GT_COUNT + 1];
211
212 #if NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS
213
214 unsigned char GenTree::s_gtTrueSizes[GT_COUNT + 1]{
215 #define GTNODE(en, st, cm, ok) sizeof(st),
216 #include "gtlist.h"
217 };
218
219 #endif // NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS
220
221 #if COUNT_AST_OPERS
222 LONG GenTree::s_gtNodeCounts[GT_COUNT + 1] = {0};
223 #endif // COUNT_AST_OPERS
224
225 /* static */
226 void GenTree::InitNodeSize()
227 {
228     /* Set all sizes to 'small' first */
229
230     for (unsigned op = 0; op <= GT_COUNT; op++)
231     {
232         GenTree::s_gtNodeSizes[op] = TREE_NODE_SZ_SMALL;
233     }
234
235     // Now set all of the appropriate entries to 'large'
236     CLANG_FORMAT_COMMENT_ANCHOR;
237
238 // clang-format off
239 #if defined(FEATURE_HFA) || defined(UNIX_AMD64_ABI)
240     // On ARM32, ARM64 and System V for struct returning
241     // there is code that does GT_ASG-tree.CopyObj call.
242     // CopyObj is a large node and the GT_ASG is small, which triggers an exception.
243     GenTree::s_gtNodeSizes[GT_ASG]              = TREE_NODE_SZ_LARGE;
244     GenTree::s_gtNodeSizes[GT_RETURN]           = TREE_NODE_SZ_LARGE;
245 #endif // defined(FEATURE_HFA) || defined(UNIX_AMD64_ABI)
246
247     GenTree::s_gtNodeSizes[GT_CALL]             = TREE_NODE_SZ_LARGE;
248     GenTree::s_gtNodeSizes[GT_CAST]             = TREE_NODE_SZ_LARGE;
249     GenTree::s_gtNodeSizes[GT_FTN_ADDR]         = TREE_NODE_SZ_LARGE;
250     GenTree::s_gtNodeSizes[GT_BOX]              = TREE_NODE_SZ_LARGE;
251     GenTree::s_gtNodeSizes[GT_INDEX]            = TREE_NODE_SZ_LARGE;
252     GenTree::s_gtNodeSizes[GT_INDEX_ADDR]        = TREE_NODE_SZ_LARGE;
253     GenTree::s_gtNodeSizes[GT_ARR_BOUNDS_CHECK] = TREE_NODE_SZ_LARGE;
254 #ifdef FEATURE_SIMD
255     GenTree::s_gtNodeSizes[GT_SIMD_CHK] = TREE_NODE_SZ_LARGE;
256 #endif // FEATURE_SIMD
257 #ifdef FEATURE_HW_INTRINSICS
258     GenTree::s_gtNodeSizes[GT_HW_INTRINSIC_CHK] = TREE_NODE_SZ_LARGE;
259 #endif // FEATURE_HW_INTRINSICS
260
261     GenTree::s_gtNodeSizes[GT_ARR_ELEM]         = TREE_NODE_SZ_LARGE;
262     GenTree::s_gtNodeSizes[GT_ARR_INDEX]        = TREE_NODE_SZ_LARGE;
263     GenTree::s_gtNodeSizes[GT_ARR_OFFSET]       = TREE_NODE_SZ_LARGE;
264     GenTree::s_gtNodeSizes[GT_RET_EXPR]         = TREE_NODE_SZ_LARGE;
265     GenTree::s_gtNodeSizes[GT_OBJ]              = TREE_NODE_SZ_LARGE;
266     GenTree::s_gtNodeSizes[GT_FIELD]            = TREE_NODE_SZ_LARGE;
267     GenTree::s_gtNodeSizes[GT_STMT]             = TREE_NODE_SZ_LARGE;
268     GenTree::s_gtNodeSizes[GT_CMPXCHG]          = TREE_NODE_SZ_LARGE;
269     GenTree::s_gtNodeSizes[GT_QMARK]            = TREE_NODE_SZ_LARGE;
270     GenTree::s_gtNodeSizes[GT_LEA]              = TREE_NODE_SZ_LARGE;
271     GenTree::s_gtNodeSizes[GT_STORE_OBJ]        = TREE_NODE_SZ_LARGE;
272     GenTree::s_gtNodeSizes[GT_DYN_BLK]          = TREE_NODE_SZ_LARGE;
273     GenTree::s_gtNodeSizes[GT_STORE_DYN_BLK]    = TREE_NODE_SZ_LARGE;
274     GenTree::s_gtNodeSizes[GT_INTRINSIC]        = TREE_NODE_SZ_LARGE;
275     GenTree::s_gtNodeSizes[GT_ALLOCOBJ]         = TREE_NODE_SZ_LARGE;
276 #if USE_HELPERS_FOR_INT_DIV
277     GenTree::s_gtNodeSizes[GT_DIV]              = TREE_NODE_SZ_LARGE;
278     GenTree::s_gtNodeSizes[GT_UDIV]             = TREE_NODE_SZ_LARGE;
279     GenTree::s_gtNodeSizes[GT_MOD]              = TREE_NODE_SZ_LARGE;
280     GenTree::s_gtNodeSizes[GT_UMOD]             = TREE_NODE_SZ_LARGE;
281 #endif
282 #ifdef FEATURE_PUT_STRUCT_ARG_STK
283     // TODO-Throughput: This should not need to be a large node. The object info should be
284     // obtained from the child node.
285     GenTree::s_gtNodeSizes[GT_PUTARG_STK]       = TREE_NODE_SZ_LARGE;
286 #if FEATURE_ARG_SPLIT
287     GenTree::s_gtNodeSizes[GT_PUTARG_SPLIT]     = TREE_NODE_SZ_LARGE;
288 #endif // FEATURE_ARG_SPLIT
289 #endif // FEATURE_PUT_STRUCT_ARG_STK
290
291     assert(GenTree::s_gtNodeSizes[GT_RETURN] == GenTree::s_gtNodeSizes[GT_ASG]);
292
293     // This list of assertions should come to contain all GenTree subtypes that are declared
294     // "small".
295     assert(sizeof(GenTreeLclFld) <= GenTree::s_gtNodeSizes[GT_LCL_FLD]);
296     assert(sizeof(GenTreeLclVar) <= GenTree::s_gtNodeSizes[GT_LCL_VAR]);
297
298     static_assert_no_msg(sizeof(GenTree)             <= TREE_NODE_SZ_SMALL);
299     static_assert_no_msg(sizeof(GenTreeUnOp)         <= TREE_NODE_SZ_SMALL);
300     static_assert_no_msg(sizeof(GenTreeOp)           <= TREE_NODE_SZ_SMALL);
301     static_assert_no_msg(sizeof(GenTreeVal)          <= TREE_NODE_SZ_SMALL);
302     static_assert_no_msg(sizeof(GenTreeIntConCommon) <= TREE_NODE_SZ_SMALL);
303     static_assert_no_msg(sizeof(GenTreePhysReg)      <= TREE_NODE_SZ_SMALL);
304     static_assert_no_msg(sizeof(GenTreeIntCon)       <= TREE_NODE_SZ_SMALL);
305     static_assert_no_msg(sizeof(GenTreeLngCon)       <= TREE_NODE_SZ_SMALL);
306     static_assert_no_msg(sizeof(GenTreeDblCon)       <= TREE_NODE_SZ_SMALL);
307     static_assert_no_msg(sizeof(GenTreeStrCon)       <= TREE_NODE_SZ_SMALL);
308     static_assert_no_msg(sizeof(GenTreeLclVarCommon) <= TREE_NODE_SZ_SMALL);
309     static_assert_no_msg(sizeof(GenTreeLclVar)       <= TREE_NODE_SZ_SMALL);
310     static_assert_no_msg(sizeof(GenTreeLclFld)       <= TREE_NODE_SZ_SMALL);
311     static_assert_no_msg(sizeof(GenTreeCC)           <= TREE_NODE_SZ_SMALL);
312     static_assert_no_msg(sizeof(GenTreeCast)         <= TREE_NODE_SZ_LARGE); // *** large node
313     static_assert_no_msg(sizeof(GenTreeBox)          <= TREE_NODE_SZ_LARGE); // *** large node
314     static_assert_no_msg(sizeof(GenTreeField)        <= TREE_NODE_SZ_LARGE); // *** large node
315     static_assert_no_msg(sizeof(GenTreeArgList)      <= TREE_NODE_SZ_SMALL);
316     static_assert_no_msg(sizeof(GenTreeFieldList)    <= TREE_NODE_SZ_SMALL);
317     static_assert_no_msg(sizeof(GenTreeColon)        <= TREE_NODE_SZ_SMALL);
318     static_assert_no_msg(sizeof(GenTreeCall)         <= TREE_NODE_SZ_LARGE); // *** large node
319     static_assert_no_msg(sizeof(GenTreeCmpXchg)      <= TREE_NODE_SZ_LARGE); // *** large node
320     static_assert_no_msg(sizeof(GenTreeFptrVal)      <= TREE_NODE_SZ_LARGE); // *** large node
321     static_assert_no_msg(sizeof(GenTreeQmark)        <= TREE_NODE_SZ_LARGE); // *** large node
322     static_assert_no_msg(sizeof(GenTreeIntrinsic)    <= TREE_NODE_SZ_LARGE); // *** large node
323     static_assert_no_msg(sizeof(GenTreeIndex)        <= TREE_NODE_SZ_LARGE); // *** large node
324     static_assert_no_msg(sizeof(GenTreeArrLen)       <= TREE_NODE_SZ_LARGE); // *** large node
325     static_assert_no_msg(sizeof(GenTreeBoundsChk)    <= TREE_NODE_SZ_LARGE); // *** large node
326     static_assert_no_msg(sizeof(GenTreeArrElem)      <= TREE_NODE_SZ_LARGE); // *** large node
327     static_assert_no_msg(sizeof(GenTreeArrIndex)     <= TREE_NODE_SZ_LARGE); // *** large node
328     static_assert_no_msg(sizeof(GenTreeArrOffs)      <= TREE_NODE_SZ_LARGE); // *** large node
329     static_assert_no_msg(sizeof(GenTreeIndir)        <= TREE_NODE_SZ_SMALL);
330     static_assert_no_msg(sizeof(GenTreeStoreInd)     <= TREE_NODE_SZ_SMALL);
331     static_assert_no_msg(sizeof(GenTreeAddrMode)     <= TREE_NODE_SZ_SMALL);
332     static_assert_no_msg(sizeof(GenTreeObj)          <= TREE_NODE_SZ_LARGE); // *** large node
333     static_assert_no_msg(sizeof(GenTreeBlk)          <= TREE_NODE_SZ_SMALL);
334     static_assert_no_msg(sizeof(GenTreeRetExpr)      <= TREE_NODE_SZ_LARGE); // *** large node
335     static_assert_no_msg(sizeof(GenTreeStmt)         <= TREE_NODE_SZ_LARGE); // *** large node
336     static_assert_no_msg(sizeof(GenTreeClsVar)       <= TREE_NODE_SZ_SMALL);
337     static_assert_no_msg(sizeof(GenTreeArgPlace)     <= TREE_NODE_SZ_SMALL);
338     static_assert_no_msg(sizeof(GenTreePhiArg)       <= TREE_NODE_SZ_SMALL);
339     static_assert_no_msg(sizeof(GenTreeAllocObj)     <= TREE_NODE_SZ_LARGE); // *** large node
340 #ifndef FEATURE_PUT_STRUCT_ARG_STK
341     static_assert_no_msg(sizeof(GenTreePutArgStk)    <= TREE_NODE_SZ_SMALL);
342 #else  // FEATURE_PUT_STRUCT_ARG_STK
343     // TODO-Throughput: This should not need to be a large node. The object info should be
344     // obtained from the child node.
345     static_assert_no_msg(sizeof(GenTreePutArgStk)    <= TREE_NODE_SZ_LARGE);
346 #if FEATURE_ARG_SPLIT
347     static_assert_no_msg(sizeof(GenTreePutArgSplit)  <= TREE_NODE_SZ_LARGE);
348 #endif // FEATURE_ARG_SPLIT
349 #endif // FEATURE_PUT_STRUCT_ARG_STK
350
351 #ifdef FEATURE_SIMD
352     static_assert_no_msg(sizeof(GenTreeSIMD)         <= TREE_NODE_SZ_SMALL);
353 #endif // FEATURE_SIMD
354
355 #ifdef FEATURE_HW_INTRINSICS
356     static_assert_no_msg(sizeof(GenTreeHWIntrinsic)  <= TREE_NODE_SZ_SMALL);
357 #endif // FEATURE_HW_INTRINSICS
358     // clang-format on
359 }
360
361 size_t GenTree::GetNodeSize() const
362 {
363     return GenTree::s_gtNodeSizes[gtOper];
364 }
365
366 #ifdef DEBUG
367 bool GenTree::IsNodeProperlySized() const
368 {
369     size_t size;
370
371     if (gtDebugFlags & GTF_DEBUG_NODE_SMALL)
372     {
373         size = TREE_NODE_SZ_SMALL;
374     }
375     else
376     {
377         assert(gtDebugFlags & GTF_DEBUG_NODE_LARGE);
378         size = TREE_NODE_SZ_LARGE;
379     }
380
381     return GenTree::s_gtNodeSizes[gtOper] <= size;
382 }
383 #endif
384
385 //------------------------------------------------------------------------
386 // ReplaceWith: replace this with the src node. The source must be an isolated node
387 //              and cannot be used after the replacement.
388 //
389 // Arguments:
390 //    src  - source tree, that replaces this.
391 //    comp - the compiler instance to transfer annotations for arrays.
392 //
393 void GenTree::ReplaceWith(GenTree* src, Compiler* comp)
394 {
395     // The source may be big only if the target is also a big node
396     assert((gtDebugFlags & GTF_DEBUG_NODE_LARGE) || GenTree::s_gtNodeSizes[src->gtOper] == TREE_NODE_SZ_SMALL);
397
398     // The check is effective only if nodes have been already threaded.
399     assert((src->gtPrev == nullptr) && (src->gtNext == nullptr));
400
401     RecordOperBashing(OperGet(), src->OperGet()); // nop unless NODEBASH_STATS is enabled
402
403     GenTree* prev = gtPrev;
404     GenTree* next = gtNext;
405     // The VTable pointer is copied intentionally here
406     memcpy((void*)this, (void*)src, src->GetNodeSize());
407     this->gtPrev = prev;
408     this->gtNext = next;
409
410 #ifdef DEBUG
411     gtSeqNum = 0;
412 #endif
413     // Transfer any annotations.
414     if (src->OperGet() == GT_IND && src->gtFlags & GTF_IND_ARR_INDEX)
415     {
416         ArrayInfo arrInfo;
417         bool      b = comp->GetArrayInfoMap()->Lookup(src, &arrInfo);
418         assert(b);
419         comp->GetArrayInfoMap()->Set(this, arrInfo);
420     }
421     DEBUG_DESTROY_NODE(src);
422 }
423
424 /*****************************************************************************
425  *
426  *  When 'NODEBASH_STATS' is enabled in "jit.h" we record all instances of
427  *  an existing GenTree node having its operator changed. This can be useful
428  *  for two (related) things - to see what is being bashed (and what isn't),
429  *  and to verify that the existing choices for what nodes are marked 'large'
430  *  are reasonable (to minimize "wasted" space).
431  *
432  *  And yes, the hash function / logic is simplistic, but it is conflict-free
433  *  and transparent for what we need.
434  */
435
436 #if NODEBASH_STATS
437
438 #define BASH_HASH_SIZE 211
439
440 inline unsigned hashme(genTreeOps op1, genTreeOps op2)
441 {
442     return ((op1 * 104729) ^ (op2 * 56569)) % BASH_HASH_SIZE;
443 }
444
445 struct BashHashDsc
446 {
447     unsigned __int32 bhFullHash; // the hash value (unique for all old->new pairs)
448     unsigned __int32 bhCount;    // the same old->new bashings seen so far
449     unsigned __int8  bhOperOld;  // original gtOper
450     unsigned __int8  bhOperNew;  // new      gtOper
451 };
452
453 static BashHashDsc BashHash[BASH_HASH_SIZE];
454
455 void GenTree::RecordOperBashing(genTreeOps operOld, genTreeOps operNew)
456 {
457     unsigned     hash = hashme(operOld, operNew);
458     BashHashDsc* desc = BashHash + hash;
459
460     if (desc->bhFullHash != hash)
461     {
462         noway_assert(desc->bhCount == 0); // if this ever fires, need fix the hash fn
463         desc->bhFullHash = hash;
464     }
465
466     desc->bhCount += 1;
467     desc->bhOperOld = operOld;
468     desc->bhOperNew = operNew;
469 }
470
471 void GenTree::ReportOperBashing(FILE* f)
472 {
473     unsigned total = 0;
474
475     fflush(f);
476
477     fprintf(f, "\n");
478     fprintf(f, "Bashed gtOper stats:\n");
479     fprintf(f, "\n");
480     fprintf(f, "    Old operator        New operator     #bytes old->new      Count\n");
481     fprintf(f, "    ---------------------------------------------------------------\n");
482
483     for (unsigned h = 0; h < BASH_HASH_SIZE; h++)
484     {
485         unsigned count = BashHash[h].bhCount;
486         if (count == 0)
487             continue;
488
489         unsigned opOld = BashHash[h].bhOperOld;
490         unsigned opNew = BashHash[h].bhOperNew;
491
492         fprintf(f, "    GT_%-13s -> GT_%-13s [size: %3u->%3u] %c %7u\n", OpName((genTreeOps)opOld),
493                 OpName((genTreeOps)opNew), s_gtTrueSizes[opOld], s_gtTrueSizes[opNew],
494                 (s_gtTrueSizes[opOld] < s_gtTrueSizes[opNew]) ? 'X' : ' ', count);
495         total += count;
496     }
497     fprintf(f, "\n");
498     fprintf(f, "Total bashings: %u\n", total);
499     fprintf(f, "\n");
500
501     fflush(f);
502 }
503
504 #endif // NODEBASH_STATS
505
506 /*****************************************************************************/
507
508 #if MEASURE_NODE_SIZE
509
510 void GenTree::DumpNodeSizes(FILE* fp)
511 {
512     // Dump the sizes of the various GenTree flavors
513
514     fprintf(fp, "Small tree node size = %3u bytes\n", TREE_NODE_SZ_SMALL);
515     fprintf(fp, "Large tree node size = %3u bytes\n", TREE_NODE_SZ_LARGE);
516     fprintf(fp, "\n");
517
518     // Verify that node sizes are set kosherly and dump sizes
519     for (unsigned op = GT_NONE + 1; op < GT_COUNT; op++)
520     {
521         unsigned needSize = s_gtTrueSizes[op];
522         unsigned nodeSize = s_gtNodeSizes[op];
523
524         const char* structNm = OpStructName((genTreeOps)op);
525         const char* operName = OpName((genTreeOps)op);
526
527         bool repeated = false;
528
529         // Have we seen this struct flavor before?
530         for (unsigned mop = GT_NONE + 1; mop < op; mop++)
531         {
532             if (strcmp(structNm, OpStructName((genTreeOps)mop)) == 0)
533             {
534                 repeated = true;
535                 break;
536             }
537         }
538
539         // Don't repeat the same GenTree flavor unless we have an error
540         if (!repeated || needSize > nodeSize)
541         {
542             unsigned sizeChar = '?';
543
544             if (nodeSize == TREE_NODE_SZ_SMALL)
545                 sizeChar = 'S';
546             else if (nodeSize == TREE_NODE_SZ_LARGE)
547                 sizeChar = 'L';
548
549             fprintf(fp, "GT_%-16s ... %-19s = %3u bytes (%c)", operName, structNm, needSize, sizeChar);
550             if (needSize > nodeSize)
551             {
552                 fprintf(fp, " -- ERROR -- allocation is only %u bytes!", nodeSize);
553             }
554             else if (needSize <= TREE_NODE_SZ_SMALL && nodeSize == TREE_NODE_SZ_LARGE)
555             {
556                 fprintf(fp, " ... could be small");
557             }
558
559             fprintf(fp, "\n");
560         }
561     }
562 }
563
564 #endif // MEASURE_NODE_SIZE
565
566 /*****************************************************************************
567  *
568  *  Walk all basic blocks and call the given function pointer for all tree
569  *  nodes contained therein.
570  */
571
572 void Compiler::fgWalkAllTreesPre(fgWalkPreFn* visitor, void* pCallBackData)
573 {
574     for (BasicBlock* block = fgFirstBB; block != nullptr; block = block->bbNext)
575     {
576         for (GenTreeStmt* stmt = block->firstStmt(); stmt != nullptr; stmt = stmt->getNextStmt())
577         {
578             fgWalkTreePre(&stmt->gtStmtExpr, visitor, pCallBackData);
579         }
580     }
581 }
582
583 //-----------------------------------------------------------
584 // CopyReg: Copy the _gtRegNum/gtRegTag fields.
585 //
586 // Arguments:
587 //     from   -  GenTree node from which to copy
588 //
589 // Return Value:
590 //     None
591 void GenTree::CopyReg(GenTree* from)
592 {
593     _gtRegNum = from->_gtRegNum;
594     INDEBUG(gtRegTag = from->gtRegTag;)
595
596     // Also copy multi-reg state if this is a call node
597     if (IsCall())
598     {
599         assert(from->IsCall());
600         this->AsCall()->CopyOtherRegs(from->AsCall());
601     }
602     else if (IsCopyOrReload())
603     {
604         this->AsCopyOrReload()->CopyOtherRegs(from->AsCopyOrReload());
605     }
606 }
607
608 //------------------------------------------------------------------
609 // gtHasReg: Whether node beeen assigned a register by LSRA
610 //
611 // Arguments:
612 //    None
613 //
614 // Return Value:
615 //    Returns true if the node was assigned a register.
616 //
617 //    In case of multi-reg call nodes, it is considered
618 //    having a reg if regs are allocated for all its
619 //    return values.
620 //
621 //    In case of GT_COPY or GT_RELOAD of a multi-reg call,
622 //    GT_COPY/GT_RELOAD is considered having a reg if it
623 //    has a reg assigned to any of its positions.
624 //
625 // Assumption:
626 //    In order for this to work properly, gtClearReg must be called
627 //    prior to setting the register value.
628 //
629 bool GenTree::gtHasReg() const
630 {
631     bool hasReg;
632
633     if (IsMultiRegCall())
634     {
635         // Have to cast away const-ness because GetReturnTypeDesc() is a non-const method
636         GenTree*     tree     = const_cast<GenTree*>(this);
637         GenTreeCall* call     = tree->AsCall();
638         unsigned     regCount = call->GetReturnTypeDesc()->GetReturnRegCount();
639         hasReg                = false;
640
641         // A Multi-reg call node is said to have regs, if it has
642         // reg assigned to each of its result registers.
643         for (unsigned i = 0; i < regCount; ++i)
644         {
645             hasReg = (call->GetRegNumByIdx(i) != REG_NA);
646             if (!hasReg)
647             {
648                 break;
649             }
650         }
651     }
652     else if (IsCopyOrReloadOfMultiRegCall())
653     {
654         GenTree*             tree         = const_cast<GenTree*>(this);
655         GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
656         GenTreeCall*         call         = copyOrReload->gtGetOp1()->AsCall();
657         unsigned             regCount     = call->GetReturnTypeDesc()->GetReturnRegCount();
658         hasReg                            = false;
659
660         // A Multi-reg copy or reload node is said to have regs,
661         // if it has valid regs in any of the positions.
662         for (unsigned i = 0; i < regCount; ++i)
663         {
664             hasReg = (copyOrReload->GetRegNumByIdx(i) != REG_NA);
665             if (hasReg)
666             {
667                 break;
668             }
669         }
670     }
671     else
672     {
673         hasReg = (gtRegNum != REG_NA);
674     }
675
676     return hasReg;
677 }
678
679 //-----------------------------------------------------------------------------
680 // GetRegisterDstCount: Get the number of registers defined by the node.
681 //
682 // Arguments:
683 //    None
684 //
685 // Return Value:
686 //    The number of registers that this node defines.
687 //
688 // Notes:
689 //    This should not be called on a contained node.
690 //    This does not look at the actual register assignments, if any, and so
691 //    is valid after Lowering.
692 //
693 int GenTree::GetRegisterDstCount() const
694 {
695     assert(!isContained());
696     if (!IsMultiRegNode())
697     {
698         return (IsValue()) ? 1 : 0;
699     }
700     else if (IsMultiRegCall())
701     {
702         // temporarily cast away const-ness as AsCall() method is not declared const
703         GenTree* temp = const_cast<GenTree*>(this);
704         return temp->AsCall()->GetReturnTypeDesc()->GetReturnRegCount();
705     }
706     else if (IsCopyOrReload())
707     {
708         return gtGetOp1()->GetRegisterDstCount();
709     }
710 #if FEATURE_ARG_SPLIT
711     else if (OperIsPutArgSplit())
712     {
713         return (const_cast<GenTree*>(this))->AsPutArgSplit()->gtNumRegs;
714     }
715 #endif
716 #if !defined(_TARGET_64BIT_)
717     else if (OperIsMultiRegOp())
718     {
719         // A MultiRegOp is a GT_MUL_LONG, GT_PUTARG_REG, or GT_BITCAST.
720         // For the latter two (ARM-only), they only have multiple registers if they produce a long value
721         // (GT_MUL_LONG always produces a long value).
722         CLANG_FORMAT_COMMENT_ANCHOR;
723 #ifdef _TARGET_ARM_
724         return (TypeGet() == TYP_LONG) ? 2 : 1;
725 #else
726         assert(OperIs(GT_MUL_LONG));
727         return 2;
728 #endif
729     }
730 #endif
731     assert(!"Unexpected multi-reg node");
732     return 0;
733 }
734
735 //---------------------------------------------------------------
736 // gtGetRegMask: Get the reg mask of the node.
737 //
738 // Arguments:
739 //    None
740 //
741 // Return Value:
742 //    Reg Mask of GenTree node.
743 //
744 regMaskTP GenTree::gtGetRegMask() const
745 {
746     regMaskTP resultMask;
747
748     if (IsMultiRegCall())
749     {
750         // temporarily cast away const-ness as AsCall() method is not declared const
751         resultMask    = genRegMask(gtRegNum);
752         GenTree* temp = const_cast<GenTree*>(this);
753         resultMask |= temp->AsCall()->GetOtherRegMask();
754     }
755     else if (IsCopyOrReloadOfMultiRegCall())
756     {
757         // A multi-reg copy or reload, will have valid regs for only those
758         // positions that need to be copied or reloaded.  Hence we need
759         // to consider only those registers for computing reg mask.
760
761         GenTree*             tree         = const_cast<GenTree*>(this);
762         GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
763         GenTreeCall*         call         = copyOrReload->gtGetOp1()->AsCall();
764         unsigned             regCount     = call->GetReturnTypeDesc()->GetReturnRegCount();
765
766         resultMask = RBM_NONE;
767         for (unsigned i = 0; i < regCount; ++i)
768         {
769             regNumber reg = copyOrReload->GetRegNumByIdx(i);
770             if (reg != REG_NA)
771             {
772                 resultMask |= genRegMask(reg);
773             }
774         }
775     }
776 #if FEATURE_ARG_SPLIT
777     else if (OperIsPutArgSplit())
778     {
779         GenTree*            tree     = const_cast<GenTree*>(this);
780         GenTreePutArgSplit* splitArg = tree->AsPutArgSplit();
781         unsigned            regCount = splitArg->gtNumRegs;
782
783         resultMask = RBM_NONE;
784         for (unsigned i = 0; i < regCount; ++i)
785         {
786             regNumber reg = splitArg->GetRegNumByIdx(i);
787             assert(reg != REG_NA);
788             resultMask |= genRegMask(reg);
789         }
790     }
791 #endif // FEATURE_ARG_SPLIT
792     else
793     {
794         resultMask = genRegMask(gtRegNum);
795     }
796
797     return resultMask;
798 }
799
800 //---------------------------------------------------------------
801 // GetOtherRegMask: Get the reg mask of gtOtherRegs of call node
802 //
803 // Arguments:
804 //    None
805 //
806 // Return Value:
807 //    Reg mask of gtOtherRegs of call node.
808 //
809 regMaskTP GenTreeCall::GetOtherRegMask() const
810 {
811     regMaskTP resultMask = RBM_NONE;
812
813 #if FEATURE_MULTIREG_RET
814     for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
815     {
816         if (gtOtherRegs[i] != REG_NA)
817         {
818             resultMask |= genRegMask((regNumber)gtOtherRegs[i]);
819             continue;
820         }
821         break;
822     }
823 #endif
824
825     return resultMask;
826 }
827
828 //-------------------------------------------------------------------------
829 // IsPure:
830 //    Returns true if this call is pure. For now, this uses the same
831 //    definition of "pure" that is that used by HelperCallProperties: a
832 //    pure call does not read or write any aliased (e.g. heap) memory or
833 //    have other global side effects (e.g. class constructors, finalizers),
834 //    but is allowed to throw an exception.
835 //
836 //    NOTE: this call currently only returns true if the call target is a
837 //    helper method that is known to be pure. No other analysis is
838 //    performed.
839 //
840 // Arguments:
841 //    Copiler - the compiler context.
842 //
843 // Returns:
844 //    True if the call is pure; false otherwise.
845 //
846 bool GenTreeCall::IsPure(Compiler* compiler) const
847 {
848     return (gtCallType == CT_HELPER) &&
849            compiler->s_helperCallProperties.IsPure(compiler->eeGetHelperNum(gtCallMethHnd));
850 }
851
852 //-------------------------------------------------------------------------
853 // HasSideEffects:
854 //    Returns true if this call has any side effects. All non-helpers are considered to have side-effects. Only helpers
855 //    that do not mutate the heap, do not run constructors, may not throw, and are either a) pure or b) non-finalizing
856 //    allocation functions are considered side-effect-free.
857 //
858 // Arguments:
859 //     compiler         - the compiler instance
860 //     ignoreExceptions - when `true`, ignores exception side effects
861 //     ignoreCctors     - when `true`, ignores class constructor side effects
862 //
863 // Return Value:
864 //      true if this call has any side-effects; false otherwise.
865 bool GenTreeCall::HasSideEffects(Compiler* compiler, bool ignoreExceptions, bool ignoreCctors) const
866 {
867     // Generally all GT_CALL nodes are considered to have side-effects, but we may have extra information about helper
868     // calls that can prove them side-effect-free.
869     if (gtCallType != CT_HELPER)
870     {
871         return true;
872     }
873
874     CorInfoHelpFunc       helper           = compiler->eeGetHelperNum(gtCallMethHnd);
875     HelperCallProperties& helperProperties = compiler->s_helperCallProperties;
876
877     // We definitely care about the side effects if MutatesHeap is true
878     if (helperProperties.MutatesHeap(helper))
879     {
880         return true;
881     }
882
883     // Unless we have been instructed to ignore cctors (CSE, for example, ignores cctors), consider them side effects.
884     if (!ignoreCctors && helperProperties.MayRunCctor(helper))
885     {
886         return true;
887     }
888
889     // If we also care about exceptions then check if the helper can throw
890     if (!ignoreExceptions && !helperProperties.NoThrow(helper))
891     {
892         return true;
893     }
894
895     // If this is not a Pure helper call or an allocator (that will not need to run a finalizer)
896     // then this call has side effects.
897     return !helperProperties.IsPure(helper) &&
898            (!helperProperties.IsAllocator(helper) || ((gtCallMoreFlags & GTF_CALL_M_ALLOC_SIDE_EFFECTS) != 0));
899 }
900
901 //-------------------------------------------------------------------------
902 // HasNonStandardAddedArgs: Return true if the method has non-standard args added to the call
903 // argument list during argument morphing (fgMorphArgs), e.g., passed in R10 or R11 on AMD64.
904 // See also GetNonStandardAddedArgCount().
905 //
906 // Arguments:
907 //     compiler - the compiler instance
908 //
909 // Return Value:
910 //      true if there are any such args, false otherwise.
911 //
912 bool GenTreeCall::HasNonStandardAddedArgs(Compiler* compiler) const
913 {
914     return GetNonStandardAddedArgCount(compiler) != 0;
915 }
916
917 //-------------------------------------------------------------------------
918 // GetNonStandardAddedArgCount: Get the count of non-standard arguments that have been added
919 // during call argument morphing (fgMorphArgs). Do not count non-standard args that are already
920 // counted in the argument list prior to morphing.
921 //
922 // This function is used to help map the caller and callee arguments during tail call setup.
923 //
924 // Arguments:
925 //     compiler - the compiler instance
926 //
927 // Return Value:
928 //      The count of args, as described.
929 //
930 // Notes:
931 //      It would be more general to have fgMorphArgs set a bit on the call node when such
932 //      args are added to a call, and a bit on each such arg, and then have this code loop
933 //      over the call args when the special call bit is set, counting the args with the special
934 //      arg bit. This seems pretty heavyweight, though. Instead, this logic needs to be kept
935 //      in sync with fgMorphArgs.
936 //
937 int GenTreeCall::GetNonStandardAddedArgCount(Compiler* compiler) const
938 {
939     if (IsUnmanaged() && !compiler->opts.ShouldUsePInvokeHelpers())
940     {
941         // R11 = PInvoke cookie param
942         return 1;
943     }
944     else if (IsVirtualStub())
945     {
946         // R11 = Virtual stub param
947         return 1;
948     }
949     else if ((gtCallType == CT_INDIRECT) && (gtCallCookie != nullptr))
950     {
951         // R10 = PInvoke target param
952         // R11 = PInvoke cookie param
953         return 2;
954     }
955     return 0;
956 }
957
958 //-------------------------------------------------------------------------
959 // TreatAsHasRetBufArg:
960 //
961 // Arguments:
962 //     compiler, the compiler instance so that we can call eeGetHelperNum
963 //
964 // Return Value:
965 //     Returns true if we treat the call as if it has a retBuf argument
966 //     This method may actually have a retBuf argument
967 //     or it could be a JIT helper that we are still transforming during
968 //     the importer phase.
969 //
970 // Notes:
971 //     On ARM64 marking the method with the GTF_CALL_M_RETBUFFARG flag
972 //     will make HasRetBufArg() return true, but will also force the
973 //     use of register x8 to pass the RetBuf argument.
974 //
975 //     These two Jit Helpers that we handle here by returning true
976 //     aren't actually defined to return a struct, so they don't expect
977 //     their RetBuf to be passed in x8, instead they  expect it in x0.
978 //
979 bool GenTreeCall::TreatAsHasRetBufArg(Compiler* compiler) const
980 {
981     if (HasRetBufArg())
982     {
983         return true;
984     }
985     else
986     {
987         // If we see a Jit helper call that returns a TYP_STRUCT we will
988         // transform it as if it has a Return Buffer Argument
989         //
990         if (IsHelperCall() && (gtReturnType == TYP_STRUCT))
991         {
992             // There are two possible helper calls that use this path:
993             //  CORINFO_HELP_GETFIELDSTRUCT and CORINFO_HELP_UNBOX_NULLABLE
994             //
995             CorInfoHelpFunc helpFunc = compiler->eeGetHelperNum(gtCallMethHnd);
996
997             if (helpFunc == CORINFO_HELP_GETFIELDSTRUCT)
998             {
999                 return true;
1000             }
1001             else if (helpFunc == CORINFO_HELP_UNBOX_NULLABLE)
1002             {
1003                 return true;
1004             }
1005             else
1006             {
1007                 assert(!"Unexpected JIT helper in TreatAsHasRetBufArg");
1008             }
1009         }
1010     }
1011     return false;
1012 }
1013
1014 //-------------------------------------------------------------------------
1015 // IsHelperCall: Determine if this GT_CALL node is a specific helper call.
1016 //
1017 // Arguments:
1018 //     compiler - the compiler instance so that we can call eeFindHelper
1019 //
1020 // Return Value:
1021 //     Returns true if this GT_CALL node is a call to the specified helper.
1022 //
1023 bool GenTreeCall::IsHelperCall(Compiler* compiler, unsigned helper) const
1024 {
1025     return IsHelperCall(compiler->eeFindHelper(helper));
1026 }
1027
1028 //------------------------------------------------------------------------
1029 // GenTreeCall::ReplaceCallOperand:
1030 //    Replaces a given operand to a call node and updates the call
1031 //    argument table if necessary.
1032 //
1033 // Arguments:
1034 //    useEdge - the use edge that points to the operand to be replaced.
1035 //    replacement - the replacement node.
1036 //
1037 void GenTreeCall::ReplaceCallOperand(GenTree** useEdge, GenTree* replacement)
1038 {
1039     assert(useEdge != nullptr);
1040     assert(replacement != nullptr);
1041     assert(TryGetUse(*useEdge, &useEdge));
1042
1043     GenTree* originalOperand = *useEdge;
1044     *useEdge                 = replacement;
1045
1046     const bool isArgument =
1047         (replacement != gtControlExpr) &&
1048         ((gtCallType != CT_INDIRECT) || ((replacement != gtCallCookie) && (replacement != gtCallAddr)));
1049
1050     if (isArgument)
1051     {
1052         if ((originalOperand->gtFlags & GTF_LATE_ARG) != 0)
1053         {
1054             replacement->gtFlags |= GTF_LATE_ARG;
1055         }
1056         else
1057         {
1058             assert((replacement->gtFlags & GTF_LATE_ARG) == 0);
1059
1060             fgArgTabEntry* fp = Compiler::gtArgEntryByNode(this, originalOperand);
1061             assert(fp->node == originalOperand);
1062             fp->node = replacement;
1063         }
1064     }
1065 }
1066
1067 //-------------------------------------------------------------------------
1068 // AreArgsComplete: Determine if this GT_CALL node's arguments have been processed.
1069 //
1070 // Return Value:
1071 //     Returns true if fgMorphArgs has processed the arguments.
1072 //
1073 bool GenTreeCall::AreArgsComplete() const
1074 {
1075     if (fgArgInfo == nullptr)
1076     {
1077         return false;
1078     }
1079     if (fgArgInfo->AreArgsComplete())
1080     {
1081         assert((gtCallLateArgs != nullptr) || !fgArgInfo->HasRegArgs());
1082         return true;
1083     }
1084     assert(gtCallArgs == nullptr);
1085     return false;
1086 }
1087
1088 #if !defined(FEATURE_PUT_STRUCT_ARG_STK)
1089 unsigned GenTreePutArgStk::getArgSize()
1090 {
1091     return genTypeSize(genActualType(gtOp1->gtType));
1092 }
1093 #endif // !defined(FEATURE_PUT_STRUCT_ARG_STK)
1094
1095 /*****************************************************************************
1096  *
1097  *  Returns non-zero if the two trees are identical.
1098  */
1099
1100 bool GenTree::Compare(GenTree* op1, GenTree* op2, bool swapOK)
1101 {
1102     genTreeOps oper;
1103     unsigned   kind;
1104
1105 //  printf("tree1:\n"); gtDispTree(op1);
1106 //  printf("tree2:\n"); gtDispTree(op2);
1107
1108 AGAIN:
1109
1110     if (op1 == nullptr)
1111     {
1112         return (op2 == nullptr);
1113     }
1114     if (op2 == nullptr)
1115     {
1116         return false;
1117     }
1118     if (op1 == op2)
1119     {
1120         return true;
1121     }
1122
1123     assert(op1->gtOper != GT_STMT);
1124     assert(op2->gtOper != GT_STMT);
1125
1126     oper = op1->OperGet();
1127
1128     /* The operators must be equal */
1129
1130     if (oper != op2->gtOper)
1131     {
1132         return false;
1133     }
1134
1135     /* The types must be equal */
1136
1137     if (op1->gtType != op2->gtType)
1138     {
1139         return false;
1140     }
1141
1142     /* Overflow must be equal */
1143     if (op1->gtOverflowEx() != op2->gtOverflowEx())
1144     {
1145         return false;
1146     }
1147
1148     /* Sensible flags must be equal */
1149     if ((op1->gtFlags & (GTF_UNSIGNED)) != (op2->gtFlags & (GTF_UNSIGNED)))
1150     {
1151         return false;
1152     }
1153
1154     /* Figure out what kind of nodes we're comparing */
1155
1156     kind = op1->OperKind();
1157
1158     /* Is this a constant node? */
1159
1160     if (kind & GTK_CONST)
1161     {
1162         switch (oper)
1163         {
1164             case GT_CNS_INT:
1165                 if (op1->gtIntCon.gtIconVal == op2->gtIntCon.gtIconVal)
1166                 {
1167                     return true;
1168                 }
1169                 break;
1170 #if 0
1171             // TODO-CQ: Enable this in the future
1172         case GT_CNS_LNG:
1173             if  (op1->gtLngCon.gtLconVal == op2->gtLngCon.gtLconVal)
1174                 return true;
1175             break;
1176
1177         case GT_CNS_DBL:
1178             if  (op1->gtDblCon.gtDconVal == op2->gtDblCon.gtDconVal)
1179                 return true;
1180             break;
1181 #endif
1182             default:
1183                 break;
1184         }
1185
1186         return false;
1187     }
1188
1189     /* Is this a leaf node? */
1190
1191     if (kind & GTK_LEAF)
1192     {
1193         switch (oper)
1194         {
1195             case GT_LCL_VAR:
1196                 if (op1->gtLclVarCommon.gtLclNum != op2->gtLclVarCommon.gtLclNum)
1197                 {
1198                     break;
1199                 }
1200
1201                 return true;
1202
1203             case GT_LCL_FLD:
1204                 if (op1->gtLclFld.gtLclNum != op2->gtLclFld.gtLclNum ||
1205                     op1->gtLclFld.gtLclOffs != op2->gtLclFld.gtLclOffs)
1206                 {
1207                     break;
1208                 }
1209
1210                 return true;
1211
1212             case GT_CLS_VAR:
1213                 if (op1->gtClsVar.gtClsVarHnd != op2->gtClsVar.gtClsVarHnd)
1214                 {
1215                     break;
1216                 }
1217
1218                 return true;
1219
1220             case GT_LABEL:
1221                 return true;
1222
1223             case GT_ARGPLACE:
1224                 if ((op1->gtType == TYP_STRUCT) &&
1225                     (op1->gtArgPlace.gtArgPlaceClsHnd != op2->gtArgPlace.gtArgPlaceClsHnd))
1226                 {
1227                     break;
1228                 }
1229                 return true;
1230
1231             default:
1232                 break;
1233         }
1234
1235         return false;
1236     }
1237
1238     /* Is it a 'simple' unary/binary operator? */
1239
1240     if (kind & GTK_UNOP)
1241     {
1242         if (IsExOp(kind))
1243         {
1244             // ExOp operators extend unary operator with extra, non-GenTree* members.  In many cases,
1245             // these should be included in the comparison.
1246             switch (oper)
1247             {
1248                 case GT_ARR_LENGTH:
1249                     if (op1->gtArrLen.ArrLenOffset() != op2->gtArrLen.ArrLenOffset())
1250                     {
1251                         return false;
1252                     }
1253                     break;
1254                 case GT_CAST:
1255                     if (op1->gtCast.gtCastType != op2->gtCast.gtCastType)
1256                     {
1257                         return false;
1258                     }
1259                     break;
1260                 case GT_OBJ:
1261                     if (op1->AsObj()->gtClass != op2->AsObj()->gtClass)
1262                     {
1263                         return false;
1264                     }
1265                     break;
1266
1267                 // For the ones below no extra argument matters for comparison.
1268                 case GT_BOX:
1269                 case GT_RUNTIMELOOKUP:
1270                     break;
1271
1272                 default:
1273                     assert(!"unexpected unary ExOp operator");
1274             }
1275         }
1276         return Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp1);
1277     }
1278
1279     if (kind & GTK_BINOP)
1280     {
1281         if (IsExOp(kind))
1282         {
1283             // ExOp operators extend unary operator with extra, non-GenTree* members.  In many cases,
1284             // these should be included in the hash code.
1285             switch (oper)
1286             {
1287                 case GT_INTRINSIC:
1288                     if (op1->gtIntrinsic.gtIntrinsicId != op2->gtIntrinsic.gtIntrinsicId)
1289                     {
1290                         return false;
1291                     }
1292                     break;
1293                 case GT_LEA:
1294                     if (op1->gtAddrMode.gtScale != op2->gtAddrMode.gtScale)
1295                     {
1296                         return false;
1297                     }
1298                     if (op1->gtAddrMode.Offset() != op2->gtAddrMode.Offset())
1299                     {
1300                         return false;
1301                     }
1302                     break;
1303                 case GT_INDEX:
1304                     if (op1->gtIndex.gtIndElemSize != op2->gtIndex.gtIndElemSize)
1305                     {
1306                         return false;
1307                     }
1308                     break;
1309                 case GT_INDEX_ADDR:
1310                     if (op1->AsIndexAddr()->gtElemSize != op2->AsIndexAddr()->gtElemSize)
1311                     {
1312                         return false;
1313                     }
1314                     break;
1315 #ifdef FEATURE_SIMD
1316                 case GT_SIMD:
1317                     if ((op1->AsSIMD()->gtSIMDIntrinsicID != op2->AsSIMD()->gtSIMDIntrinsicID) ||
1318                         (op1->AsSIMD()->gtSIMDBaseType != op2->AsSIMD()->gtSIMDBaseType) ||
1319                         (op1->AsSIMD()->gtSIMDSize != op2->AsSIMD()->gtSIMDSize))
1320                     {
1321                         return false;
1322                     }
1323                     break;
1324 #endif // FEATURE_SIMD
1325
1326 #ifdef FEATURE_HW_INTRINSICS
1327                 case GT_HWIntrinsic:
1328                     if ((op1->AsHWIntrinsic()->gtHWIntrinsicId != op2->AsHWIntrinsic()->gtHWIntrinsicId) ||
1329                         (op1->AsHWIntrinsic()->gtSIMDBaseType != op2->AsHWIntrinsic()->gtSIMDBaseType) ||
1330                         (op1->AsHWIntrinsic()->gtSIMDSize != op2->AsHWIntrinsic()->gtSIMDSize) ||
1331                         (op1->AsHWIntrinsic()->gtIndexBaseType != op2->AsHWIntrinsic()->gtIndexBaseType))
1332                     {
1333                         return false;
1334                     }
1335                     break;
1336 #endif
1337
1338                 // For the ones below no extra argument matters for comparison.
1339                 case GT_QMARK:
1340                     break;
1341
1342                 default:
1343                     assert(!"unexpected binary ExOp operator");
1344             }
1345         }
1346
1347         if (op1->gtOp.gtOp2)
1348         {
1349             if (!Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp1, swapOK))
1350             {
1351                 if (swapOK && OperIsCommutative(oper) &&
1352                     ((op1->gtOp.gtOp1->gtFlags | op1->gtOp.gtOp2->gtFlags | op2->gtOp.gtOp1->gtFlags |
1353                       op2->gtOp.gtOp2->gtFlags) &
1354                      GTF_ALL_EFFECT) == 0)
1355                 {
1356                     if (Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp2, swapOK))
1357                     {
1358                         op1 = op1->gtOp.gtOp2;
1359                         op2 = op2->gtOp.gtOp1;
1360                         goto AGAIN;
1361                     }
1362                 }
1363
1364                 return false;
1365             }
1366
1367             op1 = op1->gtOp.gtOp2;
1368             op2 = op2->gtOp.gtOp2;
1369
1370             goto AGAIN;
1371         }
1372         else
1373         {
1374
1375             op1 = op1->gtOp.gtOp1;
1376             op2 = op2->gtOp.gtOp1;
1377
1378             if (!op1)
1379             {
1380                 return (op2 == nullptr);
1381             }
1382             if (!op2)
1383             {
1384                 return false;
1385             }
1386
1387             goto AGAIN;
1388         }
1389     }
1390
1391     /* See what kind of a special operator we have here */
1392
1393     switch (oper)
1394     {
1395         case GT_FIELD:
1396             if (op1->gtField.gtFldHnd != op2->gtField.gtFldHnd)
1397             {
1398                 break;
1399             }
1400
1401             op1 = op1->gtField.gtFldObj;
1402             op2 = op2->gtField.gtFldObj;
1403
1404             if (op1 || op2)
1405             {
1406                 if (op1 && op2)
1407                 {
1408                     goto AGAIN;
1409                 }
1410             }
1411
1412             return true;
1413
1414         case GT_CALL:
1415
1416             if (op1->gtCall.gtCallType != op2->gtCall.gtCallType)
1417             {
1418                 return false;
1419             }
1420
1421             if (op1->gtCall.gtCallType != CT_INDIRECT)
1422             {
1423                 if (op1->gtCall.gtCallMethHnd != op2->gtCall.gtCallMethHnd)
1424                 {
1425                     return false;
1426                 }
1427
1428 #ifdef FEATURE_READYTORUN_COMPILER
1429                 if (op1->gtCall.gtEntryPoint.addr != op2->gtCall.gtEntryPoint.addr)
1430                 {
1431                     return false;
1432                 }
1433 #endif
1434             }
1435             else
1436             {
1437                 if (!Compare(op1->gtCall.gtCallAddr, op2->gtCall.gtCallAddr))
1438                 {
1439                     return false;
1440                 }
1441             }
1442
1443             if (Compare(op1->gtCall.gtCallLateArgs, op2->gtCall.gtCallLateArgs) &&
1444                 Compare(op1->gtCall.gtCallArgs, op2->gtCall.gtCallArgs) &&
1445                 Compare(op1->gtCall.gtControlExpr, op2->gtCall.gtControlExpr) &&
1446                 Compare(op1->gtCall.gtCallObjp, op2->gtCall.gtCallObjp))
1447             {
1448                 return true;
1449             }
1450             break;
1451
1452         case GT_ARR_ELEM:
1453
1454             if (op1->gtArrElem.gtArrRank != op2->gtArrElem.gtArrRank)
1455             {
1456                 return false;
1457             }
1458
1459             // NOTE: gtArrElemSize may need to be handled
1460
1461             unsigned dim;
1462             for (dim = 0; dim < op1->gtArrElem.gtArrRank; dim++)
1463             {
1464                 if (!Compare(op1->gtArrElem.gtArrInds[dim], op2->gtArrElem.gtArrInds[dim]))
1465                 {
1466                     return false;
1467                 }
1468             }
1469
1470             op1 = op1->gtArrElem.gtArrObj;
1471             op2 = op2->gtArrElem.gtArrObj;
1472             goto AGAIN;
1473
1474         case GT_ARR_OFFSET:
1475             if (op1->gtArrOffs.gtCurrDim != op2->gtArrOffs.gtCurrDim ||
1476                 op1->gtArrOffs.gtArrRank != op2->gtArrOffs.gtArrRank)
1477             {
1478                 return false;
1479             }
1480             return (Compare(op1->gtArrOffs.gtOffset, op2->gtArrOffs.gtOffset) &&
1481                     Compare(op1->gtArrOffs.gtIndex, op2->gtArrOffs.gtIndex) &&
1482                     Compare(op1->gtArrOffs.gtArrObj, op2->gtArrOffs.gtArrObj));
1483
1484         case GT_CMPXCHG:
1485             return Compare(op1->gtCmpXchg.gtOpLocation, op2->gtCmpXchg.gtOpLocation) &&
1486                    Compare(op1->gtCmpXchg.gtOpValue, op2->gtCmpXchg.gtOpValue) &&
1487                    Compare(op1->gtCmpXchg.gtOpComparand, op2->gtCmpXchg.gtOpComparand);
1488
1489         case GT_ARR_BOUNDS_CHECK:
1490 #ifdef FEATURE_SIMD
1491         case GT_SIMD_CHK:
1492 #endif // FEATURE_SIMD
1493 #ifdef FEATURE_HW_INTRINSICS
1494         case GT_HW_INTRINSIC_CHK:
1495 #endif // FEATURE_HW_INTRINSICS
1496             return Compare(op1->gtBoundsChk.gtIndex, op2->gtBoundsChk.gtIndex) &&
1497                    Compare(op1->gtBoundsChk.gtArrLen, op2->gtBoundsChk.gtArrLen) &&
1498                    (op1->gtBoundsChk.gtThrowKind == op2->gtBoundsChk.gtThrowKind);
1499
1500         case GT_STORE_DYN_BLK:
1501         case GT_DYN_BLK:
1502             return Compare(op1->gtDynBlk.Addr(), op2->gtDynBlk.Addr()) &&
1503                    Compare(op1->gtDynBlk.Data(), op2->gtDynBlk.Data()) &&
1504                    Compare(op1->gtDynBlk.gtDynamicSize, op2->gtDynBlk.gtDynamicSize);
1505
1506         default:
1507             assert(!"unexpected operator");
1508     }
1509
1510     return false;
1511 }
1512
1513 /*****************************************************************************
1514  *
1515  *  Returns non-zero if the given tree contains a use of a local #lclNum.
1516  */
1517
1518 bool Compiler::gtHasRef(GenTree* tree, ssize_t lclNum, bool defOnly)
1519 {
1520     genTreeOps oper;
1521     unsigned   kind;
1522
1523 AGAIN:
1524
1525     assert(tree);
1526
1527     oper = tree->OperGet();
1528     kind = tree->OperKind();
1529
1530     assert(oper != GT_STMT);
1531
1532     /* Is this a constant node? */
1533
1534     if (kind & GTK_CONST)
1535     {
1536         return false;
1537     }
1538
1539     /* Is this a leaf node? */
1540
1541     if (kind & GTK_LEAF)
1542     {
1543         if (oper == GT_LCL_VAR)
1544         {
1545             if (tree->gtLclVarCommon.gtLclNum == (unsigned)lclNum)
1546             {
1547                 if (!defOnly)
1548                 {
1549                     return true;
1550                 }
1551             }
1552         }
1553         else if (oper == GT_RET_EXPR)
1554         {
1555             return gtHasRef(tree->gtRetExpr.gtInlineCandidate, lclNum, defOnly);
1556         }
1557
1558         return false;
1559     }
1560
1561     /* Is it a 'simple' unary/binary operator? */
1562
1563     if (kind & GTK_SMPOP)
1564     {
1565         if (tree->gtGetOp2IfPresent())
1566         {
1567             if (gtHasRef(tree->gtOp.gtOp1, lclNum, defOnly))
1568             {
1569                 return true;
1570             }
1571
1572             tree = tree->gtOp.gtOp2;
1573             goto AGAIN;
1574         }
1575         else
1576         {
1577             tree = tree->gtOp.gtOp1;
1578
1579             if (!tree)
1580             {
1581                 return false;
1582             }
1583
1584             if (oper == GT_ASG)
1585             {
1586                 // 'tree' is the gtOp1 of an assignment node. So we can handle
1587                 // the case where defOnly is either true or false.
1588
1589                 if (tree->gtOper == GT_LCL_VAR && tree->gtLclVarCommon.gtLclNum == (unsigned)lclNum)
1590                 {
1591                     return true;
1592                 }
1593                 else if (tree->gtOper == GT_FIELD && lclNum == (ssize_t)tree->gtField.gtFldHnd)
1594                 {
1595                     return true;
1596                 }
1597             }
1598
1599             goto AGAIN;
1600         }
1601     }
1602
1603     /* See what kind of a special operator we have here */
1604
1605     switch (oper)
1606     {
1607         case GT_FIELD:
1608             if (lclNum == (ssize_t)tree->gtField.gtFldHnd)
1609             {
1610                 if (!defOnly)
1611                 {
1612                     return true;
1613                 }
1614             }
1615
1616             tree = tree->gtField.gtFldObj;
1617             if (tree)
1618             {
1619                 goto AGAIN;
1620             }
1621             break;
1622
1623         case GT_CALL:
1624
1625             if (tree->gtCall.gtCallObjp)
1626             {
1627                 if (gtHasRef(tree->gtCall.gtCallObjp, lclNum, defOnly))
1628                 {
1629                     return true;
1630                 }
1631             }
1632
1633             if (tree->gtCall.gtCallArgs)
1634             {
1635                 if (gtHasRef(tree->gtCall.gtCallArgs, lclNum, defOnly))
1636                 {
1637                     return true;
1638                 }
1639             }
1640
1641             if (tree->gtCall.gtCallLateArgs)
1642             {
1643                 if (gtHasRef(tree->gtCall.gtCallLateArgs, lclNum, defOnly))
1644                 {
1645                     return true;
1646                 }
1647             }
1648
1649             if (tree->gtCall.gtControlExpr)
1650             {
1651                 if (gtHasRef(tree->gtCall.gtControlExpr, lclNum, defOnly))
1652                 {
1653                     return true;
1654                 }
1655             }
1656
1657             if (tree->gtCall.gtCallType == CT_INDIRECT)
1658             {
1659                 // pinvoke-calli cookie is a constant, or constant indirection
1660                 assert(tree->gtCall.gtCallCookie == nullptr || tree->gtCall.gtCallCookie->gtOper == GT_CNS_INT ||
1661                        tree->gtCall.gtCallCookie->gtOper == GT_IND);
1662
1663                 tree = tree->gtCall.gtCallAddr;
1664             }
1665             else
1666             {
1667                 tree = nullptr;
1668             }
1669
1670             if (tree)
1671             {
1672                 goto AGAIN;
1673             }
1674
1675             break;
1676
1677         case GT_ARR_ELEM:
1678             if (gtHasRef(tree->gtArrElem.gtArrObj, lclNum, defOnly))
1679             {
1680                 return true;
1681             }
1682
1683             unsigned dim;
1684             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
1685             {
1686                 if (gtHasRef(tree->gtArrElem.gtArrInds[dim], lclNum, defOnly))
1687                 {
1688                     return true;
1689                 }
1690             }
1691
1692             break;
1693
1694         case GT_ARR_OFFSET:
1695             if (gtHasRef(tree->gtArrOffs.gtOffset, lclNum, defOnly) ||
1696                 gtHasRef(tree->gtArrOffs.gtIndex, lclNum, defOnly) ||
1697                 gtHasRef(tree->gtArrOffs.gtArrObj, lclNum, defOnly))
1698             {
1699                 return true;
1700             }
1701             break;
1702
1703         case GT_CMPXCHG:
1704             if (gtHasRef(tree->gtCmpXchg.gtOpLocation, lclNum, defOnly))
1705             {
1706                 return true;
1707             }
1708             if (gtHasRef(tree->gtCmpXchg.gtOpValue, lclNum, defOnly))
1709             {
1710                 return true;
1711             }
1712             if (gtHasRef(tree->gtCmpXchg.gtOpComparand, lclNum, defOnly))
1713             {
1714                 return true;
1715             }
1716             break;
1717
1718         case GT_ARR_BOUNDS_CHECK:
1719 #ifdef FEATURE_SIMD
1720         case GT_SIMD_CHK:
1721 #endif // FEATURE_SIMD
1722 #ifdef FEATURE_HW_INTRINSICS
1723         case GT_HW_INTRINSIC_CHK:
1724 #endif // FEATURE_HW_INTRINSICS
1725             if (gtHasRef(tree->gtBoundsChk.gtIndex, lclNum, defOnly))
1726             {
1727                 return true;
1728             }
1729             if (gtHasRef(tree->gtBoundsChk.gtArrLen, lclNum, defOnly))
1730             {
1731                 return true;
1732             }
1733             break;
1734
1735         case GT_STORE_DYN_BLK:
1736             if (gtHasRef(tree->gtDynBlk.Data(), lclNum, defOnly))
1737             {
1738                 return true;
1739             }
1740             __fallthrough;
1741         case GT_DYN_BLK:
1742             if (gtHasRef(tree->gtDynBlk.Addr(), lclNum, defOnly))
1743             {
1744                 return true;
1745             }
1746             if (gtHasRef(tree->gtDynBlk.gtDynamicSize, lclNum, defOnly))
1747             {
1748                 return true;
1749             }
1750             break;
1751
1752         default:
1753 #ifdef DEBUG
1754             gtDispTree(tree);
1755 #endif
1756             assert(!"unexpected operator");
1757     }
1758
1759     return false;
1760 }
1761
1762 struct AddrTakenDsc
1763 {
1764     Compiler* comp;
1765     bool      hasAddrTakenLcl;
1766 };
1767
1768 /* static */
1769 Compiler::fgWalkResult Compiler::gtHasLocalsWithAddrOpCB(GenTree** pTree, fgWalkData* data)
1770 {
1771     GenTree*  tree = *pTree;
1772     Compiler* comp = data->compiler;
1773
1774     if (tree->gtOper == GT_LCL_VAR)
1775     {
1776         unsigned   lclNum = tree->gtLclVarCommon.gtLclNum;
1777         LclVarDsc* varDsc = &comp->lvaTable[lclNum];
1778
1779         if (varDsc->lvHasLdAddrOp || varDsc->lvAddrExposed)
1780         {
1781             ((AddrTakenDsc*)data->pCallbackData)->hasAddrTakenLcl = true;
1782             return WALK_ABORT;
1783         }
1784     }
1785
1786     return WALK_CONTINUE;
1787 }
1788
1789 /*****************************************************************************
1790  *
1791  *  Return true if this tree contains locals with lvHasLdAddrOp or lvAddrExposed
1792  *  flag(s) set.
1793  */
1794
1795 bool Compiler::gtHasLocalsWithAddrOp(GenTree* tree)
1796 {
1797     AddrTakenDsc desc;
1798
1799     desc.comp            = this;
1800     desc.hasAddrTakenLcl = false;
1801
1802     fgWalkTreePre(&tree, gtHasLocalsWithAddrOpCB, &desc);
1803
1804     return desc.hasAddrTakenLcl;
1805 }
1806
1807 #ifdef DEBUG
1808
1809 /*****************************************************************************
1810  *
1811  *  Helper used to compute hash values for trees.
1812  */
1813
1814 inline unsigned genTreeHashAdd(unsigned old, unsigned add)
1815 {
1816     return (old + old / 2) ^ add;
1817 }
1818
1819 inline unsigned genTreeHashAdd(unsigned old, void* add)
1820 {
1821     return genTreeHashAdd(old, (unsigned)(size_t)add);
1822 }
1823
1824 /*****************************************************************************
1825  *
1826  *  Given an arbitrary expression tree, compute a hash value for it.
1827  */
1828
1829 unsigned Compiler::gtHashValue(GenTree* tree)
1830 {
1831     genTreeOps oper;
1832     unsigned   kind;
1833
1834     unsigned hash = 0;
1835
1836     GenTree* temp;
1837
1838 AGAIN:
1839     assert(tree);
1840     assert(tree->gtOper != GT_STMT);
1841
1842     /* Figure out what kind of a node we have */
1843
1844     oper = tree->OperGet();
1845     kind = tree->OperKind();
1846
1847     /* Include the operator value in the hash */
1848
1849     hash = genTreeHashAdd(hash, oper);
1850
1851     /* Is this a constant or leaf node? */
1852
1853     if (kind & (GTK_CONST | GTK_LEAF))
1854     {
1855         size_t add;
1856
1857         switch (oper)
1858         {
1859             UINT64 bits;
1860             case GT_LCL_VAR:
1861                 add = tree->gtLclVar.gtLclNum;
1862                 break;
1863             case GT_LCL_FLD:
1864                 hash = genTreeHashAdd(hash, tree->gtLclFld.gtLclNum);
1865                 add  = tree->gtLclFld.gtLclOffs;
1866                 break;
1867
1868             case GT_CNS_INT:
1869                 add = tree->gtIntCon.gtIconVal;
1870                 break;
1871             case GT_CNS_LNG:
1872                 bits = (UINT64)tree->gtLngCon.gtLconVal;
1873 #ifdef _HOST_64BIT_
1874                 add = bits;
1875 #else // 32-bit host
1876                 add = genTreeHashAdd(uhi32(bits), ulo32(bits));
1877 #endif
1878                 break;
1879             case GT_CNS_DBL:
1880                 bits = *(UINT64*)(&tree->gtDblCon.gtDconVal);
1881 #ifdef _HOST_64BIT_
1882                 add = bits;
1883 #else // 32-bit host
1884                 add = genTreeHashAdd(uhi32(bits), ulo32(bits));
1885 #endif
1886                 break;
1887             case GT_CNS_STR:
1888                 add = tree->gtStrCon.gtSconCPX;
1889                 break;
1890
1891             case GT_JMP:
1892                 add = tree->gtVal.gtVal1;
1893                 break;
1894
1895             default:
1896                 add = 0;
1897                 break;
1898         }
1899
1900         // clang-format off
1901         // narrow 'add' into a 32-bit 'val'
1902         unsigned val;
1903 #ifdef _HOST_64BIT_
1904         val = genTreeHashAdd(uhi32(add), ulo32(add));
1905 #else // 32-bit host
1906         val = add;
1907 #endif
1908         // clang-format on
1909
1910         hash = genTreeHashAdd(hash, val);
1911         goto DONE;
1912     }
1913
1914     /* Is it a 'simple' unary/binary operator? */
1915
1916     GenTree* op1;
1917
1918     if (kind & GTK_UNOP)
1919     {
1920         op1 = tree->gtOp.gtOp1;
1921         /* Special case: no sub-operand at all */
1922
1923         if (GenTree::IsExOp(kind))
1924         {
1925             // ExOp operators extend operators with extra, non-GenTree* members.  In many cases,
1926             // these should be included in the hash code.
1927             switch (oper)
1928             {
1929                 case GT_ARR_LENGTH:
1930                     hash += tree->gtArrLen.ArrLenOffset();
1931                     break;
1932                 case GT_CAST:
1933                     hash ^= tree->gtCast.gtCastType;
1934                     break;
1935                 case GT_INDEX:
1936                     hash += tree->gtIndex.gtIndElemSize;
1937                     break;
1938                 case GT_INDEX_ADDR:
1939                     hash += tree->AsIndexAddr()->gtElemSize;
1940                     break;
1941                 case GT_ALLOCOBJ:
1942                     hash = genTreeHashAdd(hash, static_cast<unsigned>(
1943                                                     reinterpret_cast<uintptr_t>(tree->gtAllocObj.gtAllocObjClsHnd)));
1944                     hash = genTreeHashAdd(hash, tree->gtAllocObj.gtNewHelper);
1945                     break;
1946                 case GT_RUNTIMELOOKUP:
1947                     hash =
1948                         genTreeHashAdd(hash,
1949                                        static_cast<unsigned>(reinterpret_cast<uintptr_t>(tree->gtRuntimeLookup.gtHnd)));
1950                     break;
1951
1952                 case GT_OBJ:
1953                     hash =
1954                         genTreeHashAdd(hash, static_cast<unsigned>(reinterpret_cast<uintptr_t>(tree->gtObj.gtClass)));
1955                     break;
1956                 // For the ones below no extra argument matters for comparison.
1957                 case GT_BOX:
1958                     break;
1959
1960                 default:
1961                     assert(!"unexpected unary ExOp operator");
1962             }
1963         }
1964
1965         if (!op1)
1966         {
1967             goto DONE;
1968         }
1969
1970         tree = op1;
1971         goto AGAIN;
1972     }
1973
1974     if (kind & GTK_BINOP)
1975     {
1976         if (GenTree::IsExOp(kind))
1977         {
1978             // ExOp operators extend operators with extra, non-GenTree* members.  In many cases,
1979             // these should be included in the hash code.
1980             switch (oper)
1981             {
1982                 case GT_INTRINSIC:
1983                     hash += tree->gtIntrinsic.gtIntrinsicId;
1984                     break;
1985                 case GT_LEA:
1986                     hash += static_cast<unsigned>(tree->gtAddrMode.Offset() << 3) + tree->gtAddrMode.gtScale;
1987                     break;
1988
1989                 case GT_BLK:
1990                 case GT_STORE_BLK:
1991                     hash += tree->gtBlk.gtBlkSize;
1992                     break;
1993
1994                 case GT_OBJ:
1995                 case GT_STORE_OBJ:
1996                     hash ^= PtrToUlong(tree->AsObj()->gtClass);
1997                     break;
1998
1999                 case GT_DYN_BLK:
2000                 case GT_STORE_DYN_BLK:
2001                     hash += gtHashValue(tree->AsDynBlk()->gtDynamicSize);
2002                     break;
2003
2004                 // For the ones below no extra argument matters for comparison.
2005                 case GT_ARR_INDEX:
2006                 case GT_QMARK:
2007                 case GT_INDEX:
2008                 case GT_INDEX_ADDR:
2009                     break;
2010
2011 #ifdef FEATURE_SIMD
2012                 case GT_SIMD:
2013                     hash += tree->gtSIMD.gtSIMDIntrinsicID;
2014                     hash += tree->gtSIMD.gtSIMDBaseType;
2015                     hash += tree->gtSIMD.gtSIMDSize;
2016                     break;
2017 #endif // FEATURE_SIMD
2018
2019 #ifdef FEATURE_HW_INTRINSICS
2020                 case GT_HWIntrinsic:
2021                     hash += tree->gtHWIntrinsic.gtHWIntrinsicId;
2022                     hash += tree->gtHWIntrinsic.gtSIMDBaseType;
2023                     hash += tree->gtHWIntrinsic.gtSIMDSize;
2024                     hash += tree->gtHWIntrinsic.gtIndexBaseType;
2025                     break;
2026 #endif // FEATURE_HW_INTRINSICS
2027
2028                 default:
2029                     assert(!"unexpected binary ExOp operator");
2030             }
2031         }
2032
2033         op1          = tree->gtOp.gtOp1;
2034         GenTree* op2 = tree->gtOp.gtOp2;
2035
2036         /* Is there a second sub-operand? */
2037
2038         if (!op2)
2039         {
2040             /* Special case: no sub-operands at all */
2041
2042             if (!op1)
2043             {
2044                 goto DONE;
2045             }
2046
2047             /* This is a unary operator */
2048
2049             tree = op1;
2050             goto AGAIN;
2051         }
2052
2053         /* This is a binary operator */
2054
2055         unsigned hsh1 = gtHashValue(op1);
2056
2057         /* Add op1's hash to the running value and continue with op2 */
2058
2059         hash = genTreeHashAdd(hash, hsh1);
2060
2061         tree = op2;
2062         goto AGAIN;
2063     }
2064
2065     /* See what kind of a special operator we have here */
2066     switch (tree->gtOper)
2067     {
2068         case GT_FIELD:
2069             if (tree->gtField.gtFldObj)
2070             {
2071                 temp = tree->gtField.gtFldObj;
2072                 assert(temp);
2073                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2074             }
2075             break;
2076
2077         case GT_STMT:
2078             temp = tree->gtStmt.gtStmtExpr;
2079             assert(temp);
2080             hash = genTreeHashAdd(hash, gtHashValue(temp));
2081             break;
2082
2083         case GT_ARR_ELEM:
2084
2085             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrElem.gtArrObj));
2086
2087             unsigned dim;
2088             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
2089             {
2090                 hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrElem.gtArrInds[dim]));
2091             }
2092
2093             break;
2094
2095         case GT_ARR_OFFSET:
2096             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtOffset));
2097             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtIndex));
2098             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtArrObj));
2099             break;
2100
2101         case GT_CALL:
2102
2103             if (tree->gtCall.gtCallObjp && tree->gtCall.gtCallObjp->gtOper != GT_NOP)
2104             {
2105                 temp = tree->gtCall.gtCallObjp;
2106                 assert(temp);
2107                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2108             }
2109
2110             if (tree->gtCall.gtCallArgs)
2111             {
2112                 temp = tree->gtCall.gtCallArgs;
2113                 assert(temp);
2114                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2115             }
2116
2117             if (tree->gtCall.gtCallType == CT_INDIRECT)
2118             {
2119                 temp = tree->gtCall.gtCallAddr;
2120                 assert(temp);
2121                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2122             }
2123             else
2124             {
2125                 hash = genTreeHashAdd(hash, tree->gtCall.gtCallMethHnd);
2126             }
2127
2128             if (tree->gtCall.gtCallLateArgs)
2129             {
2130                 temp = tree->gtCall.gtCallLateArgs;
2131                 assert(temp);
2132                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2133             }
2134             break;
2135
2136         case GT_CMPXCHG:
2137             hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpLocation));
2138             hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpValue));
2139             hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpComparand));
2140             break;
2141
2142         case GT_ARR_BOUNDS_CHECK:
2143 #ifdef FEATURE_SIMD
2144         case GT_SIMD_CHK:
2145 #endif // FEATURE_SIMD
2146 #ifdef FEATURE_HW_INTRINSICS
2147         case GT_HW_INTRINSIC_CHK:
2148 #endif // FEATURE_HW_INTRINSICS
2149             hash = genTreeHashAdd(hash, gtHashValue(tree->gtBoundsChk.gtIndex));
2150             hash = genTreeHashAdd(hash, gtHashValue(tree->gtBoundsChk.gtArrLen));
2151             hash = genTreeHashAdd(hash, tree->gtBoundsChk.gtThrowKind);
2152             break;
2153
2154         case GT_STORE_DYN_BLK:
2155             hash = genTreeHashAdd(hash, gtHashValue(tree->gtDynBlk.Data()));
2156             __fallthrough;
2157         case GT_DYN_BLK:
2158             hash = genTreeHashAdd(hash, gtHashValue(tree->gtDynBlk.Addr()));
2159             hash = genTreeHashAdd(hash, gtHashValue(tree->gtDynBlk.gtDynamicSize));
2160             break;
2161
2162         default:
2163 #ifdef DEBUG
2164             gtDispTree(tree);
2165 #endif
2166             assert(!"unexpected operator");
2167             break;
2168     }
2169
2170 DONE:
2171
2172     return hash;
2173 }
2174
2175 #endif // DEBUG
2176
2177 /*****************************************************************************
2178  *
2179  *  Return a relational operator that is the reverse of the given one.
2180  */
2181
2182 /* static */
2183 genTreeOps GenTree::ReverseRelop(genTreeOps relop)
2184 {
2185     static const genTreeOps reverseOps[] = {
2186         GT_NE,      // GT_EQ
2187         GT_EQ,      // GT_NE
2188         GT_GE,      // GT_LT
2189         GT_GT,      // GT_LE
2190         GT_LT,      // GT_GE
2191         GT_LE,      // GT_GT
2192         GT_TEST_NE, // GT_TEST_EQ
2193         GT_TEST_EQ, // GT_TEST_NE
2194     };
2195
2196     assert(reverseOps[GT_EQ - GT_EQ] == GT_NE);
2197     assert(reverseOps[GT_NE - GT_EQ] == GT_EQ);
2198
2199     assert(reverseOps[GT_LT - GT_EQ] == GT_GE);
2200     assert(reverseOps[GT_LE - GT_EQ] == GT_GT);
2201     assert(reverseOps[GT_GE - GT_EQ] == GT_LT);
2202     assert(reverseOps[GT_GT - GT_EQ] == GT_LE);
2203
2204     assert(reverseOps[GT_TEST_EQ - GT_EQ] == GT_TEST_NE);
2205     assert(reverseOps[GT_TEST_NE - GT_EQ] == GT_TEST_EQ);
2206
2207     assert(OperIsCompare(relop));
2208     assert(relop >= GT_EQ && (unsigned)(relop - GT_EQ) < sizeof(reverseOps));
2209
2210     return reverseOps[relop - GT_EQ];
2211 }
2212
2213 /*****************************************************************************
2214  *
2215  *  Return a relational operator that will work for swapped operands.
2216  */
2217
2218 /* static */
2219 genTreeOps GenTree::SwapRelop(genTreeOps relop)
2220 {
2221     static const genTreeOps swapOps[] = {
2222         GT_EQ,      // GT_EQ
2223         GT_NE,      // GT_NE
2224         GT_GT,      // GT_LT
2225         GT_GE,      // GT_LE
2226         GT_LE,      // GT_GE
2227         GT_LT,      // GT_GT
2228         GT_TEST_EQ, // GT_TEST_EQ
2229         GT_TEST_NE, // GT_TEST_NE
2230     };
2231
2232     assert(swapOps[GT_EQ - GT_EQ] == GT_EQ);
2233     assert(swapOps[GT_NE - GT_EQ] == GT_NE);
2234
2235     assert(swapOps[GT_LT - GT_EQ] == GT_GT);
2236     assert(swapOps[GT_LE - GT_EQ] == GT_GE);
2237     assert(swapOps[GT_GE - GT_EQ] == GT_LE);
2238     assert(swapOps[GT_GT - GT_EQ] == GT_LT);
2239
2240     assert(swapOps[GT_TEST_EQ - GT_EQ] == GT_TEST_EQ);
2241     assert(swapOps[GT_TEST_NE - GT_EQ] == GT_TEST_NE);
2242
2243     assert(OperIsCompare(relop));
2244     assert(relop >= GT_EQ && (unsigned)(relop - GT_EQ) < sizeof(swapOps));
2245
2246     return swapOps[relop - GT_EQ];
2247 }
2248
2249 /*****************************************************************************
2250  *
2251  *  Reverse the meaning of the given test condition.
2252  */
2253
2254 GenTree* Compiler::gtReverseCond(GenTree* tree)
2255 {
2256     if (tree->OperIsCompare())
2257     {
2258         tree->SetOper(GenTree::ReverseRelop(tree->OperGet()));
2259
2260         // Flip the GTF_RELOP_NAN_UN bit
2261         //     a ord b   === (a != NaN && b != NaN)
2262         //     a unord b === (a == NaN || b == NaN)
2263         // => !(a ord b) === (a unord b)
2264         if (varTypeIsFloating(tree->gtOp.gtOp1->TypeGet()))
2265         {
2266             tree->gtFlags ^= GTF_RELOP_NAN_UN;
2267         }
2268     }
2269     else if (tree->OperIs(GT_JCC, GT_SETCC))
2270     {
2271         GenTreeCC* cc   = tree->AsCC();
2272         cc->gtCondition = GenCondition::Reverse(cc->gtCondition);
2273     }
2274     else if (tree->OperIs(GT_JCMP))
2275     {
2276         // Flip the GTF_JCMP_EQ
2277         //
2278         // This causes switching
2279         //     cbz <=> cbnz
2280         //     tbz <=> tbnz
2281         tree->gtFlags ^= GTF_JCMP_EQ;
2282     }
2283     else
2284     {
2285         tree = gtNewOperNode(GT_NOT, TYP_INT, tree);
2286     }
2287
2288     return tree;
2289 }
2290
2291 /*****************************************************************************/
2292
2293 #ifdef DEBUG
2294
2295 bool GenTree::gtIsValid64RsltMul()
2296 {
2297     if ((gtOper != GT_MUL) || !(gtFlags & GTF_MUL_64RSLT))
2298     {
2299         return false;
2300     }
2301
2302     GenTree* op1 = gtOp.gtOp1;
2303     GenTree* op2 = gtOp.gtOp2;
2304
2305     if (TypeGet() != TYP_LONG || op1->TypeGet() != TYP_LONG || op2->TypeGet() != TYP_LONG)
2306     {
2307         return false;
2308     }
2309
2310     if (gtOverflow())
2311     {
2312         return false;
2313     }
2314
2315     // op1 has to be conv.i8(i4Expr)
2316     if ((op1->gtOper != GT_CAST) || (genActualType(op1->CastFromType()) != TYP_INT))
2317     {
2318         return false;
2319     }
2320
2321     // op2 has to be conv.i8(i4Expr)
2322     if ((op2->gtOper != GT_CAST) || (genActualType(op2->CastFromType()) != TYP_INT))
2323     {
2324         return false;
2325     }
2326
2327     // The signedness of both casts must be the same
2328     if (((op1->gtFlags & GTF_UNSIGNED) != 0) != ((op2->gtFlags & GTF_UNSIGNED) != 0))
2329     {
2330         return false;
2331     }
2332
2333     // Do unsigned mul iff both the casts are unsigned
2334     if (((op1->gtFlags & GTF_UNSIGNED) != 0) != ((gtFlags & GTF_UNSIGNED) != 0))
2335     {
2336         return false;
2337     }
2338
2339     return true;
2340 }
2341
2342 #endif // DEBUG
2343
2344 //------------------------------------------------------------------------------
2345 // gtSetListOrder : Figure out the evaluation order for a list of values.
2346 //
2347 //
2348 // Arguments:
2349 //    list  - List to figure out the evaluation order for
2350 //    isListCallArgs - True iff the list is a list of call arguments
2351 //    callArgsInRegs -  True iff the list is a list of call arguments and they are passed in registers
2352 //
2353 // Return Value:
2354 //    True if the operation can be a root of a bitwise rotation tree; false otherwise.
2355
2356 unsigned Compiler::gtSetListOrder(GenTree* list, bool isListCallArgs, bool callArgsInRegs)
2357 {
2358     assert((list != nullptr) && list->OperIsAnyList());
2359     assert(!callArgsInRegs || isListCallArgs);
2360
2361     ArrayStack<GenTree*> listNodes(getAllocator(CMK_ArrayStack));
2362
2363     do
2364     {
2365         listNodes.Push(list);
2366         list = list->gtOp.gtOp2;
2367     } while ((list != nullptr) && (list->OperIsAnyList()));
2368
2369     unsigned nxtlvl = (list == nullptr) ? 0 : gtSetEvalOrder(list);
2370     while (!listNodes.Empty())
2371     {
2372         list = listNodes.Pop();
2373         assert(list && list->OperIsAnyList());
2374         GenTree* next = list->gtOp.gtOp2;
2375
2376         unsigned level = 0;
2377
2378         // TODO: Do we have to compute costs differently for argument lists and
2379         // all other lists?
2380         // https://github.com/dotnet/coreclr/issues/7095
2381         unsigned costSz = (isListCallArgs || (next == nullptr)) ? 0 : 1;
2382         unsigned costEx = (isListCallArgs || (next == nullptr)) ? 0 : 1;
2383
2384         if (next != nullptr)
2385         {
2386             if (isListCallArgs)
2387             {
2388                 if (level < nxtlvl)
2389                 {
2390                     level = nxtlvl;
2391                 }
2392             }
2393             costEx += next->gtCostEx;
2394             costSz += next->gtCostSz;
2395         }
2396
2397         GenTree* op1 = list->gtOp.gtOp1;
2398         unsigned lvl = gtSetEvalOrder(op1);
2399
2400         // Swap the level counts
2401         if (list->gtFlags & GTF_REVERSE_OPS)
2402         {
2403             unsigned tmpl;
2404
2405             tmpl   = lvl;
2406             lvl    = nxtlvl;
2407             nxtlvl = tmpl;
2408         }
2409
2410         // TODO: Do we have to compute levels differently for argument lists and
2411         // all other lists?
2412         // https://github.com/dotnet/coreclr/issues/7095
2413         if (isListCallArgs)
2414         {
2415             if (level < lvl)
2416             {
2417                 level = lvl;
2418             }
2419         }
2420         else
2421         {
2422             if (lvl < 1)
2423             {
2424                 level = nxtlvl;
2425             }
2426             else if (lvl == nxtlvl)
2427             {
2428                 level = lvl + 1;
2429             }
2430             else
2431             {
2432                 level = lvl;
2433             }
2434         }
2435
2436         if (op1->gtCostEx != 0)
2437         {
2438             costEx += op1->gtCostEx;
2439             costEx += (callArgsInRegs || !isListCallArgs) ? 0 : IND_COST_EX;
2440         }
2441
2442         if (op1->gtCostSz != 0)
2443         {
2444             costSz += op1->gtCostSz;
2445 #ifdef _TARGET_XARCH_
2446             if (callArgsInRegs) // push is smaller than mov to reg
2447 #endif
2448             {
2449                 costSz += 1;
2450             }
2451         }
2452
2453         list->SetCosts(costEx, costSz);
2454
2455         nxtlvl = level;
2456     }
2457
2458     return nxtlvl;
2459 }
2460
2461 //-----------------------------------------------------------------------------
2462 // gtWalkOp: Traverse and mark an address expression
2463 //
2464 // Arguments:
2465 //    op1WB - An out parameter which is either the address expression, or one
2466 //            of its operands.
2467 //    op2WB - An out parameter which starts as either null or one of the operands
2468 //            of the address expression.
2469 //    base  - The base address of the addressing mode, or null if 'constOnly' is false
2470 //    constOnly - True if we will only traverse into ADDs with constant op2.
2471 //
2472 // This routine is a helper routine for gtSetEvalOrder() and is used to identify the
2473 // base and index nodes, which will be validated against those identified by
2474 // genCreateAddrMode().
2475 // It also marks the ADD nodes involved in the address expression with the
2476 // GTF_ADDRMODE_NO_CSE flag which prevents them from being considered for CSE's.
2477 //
2478 // Its two output parameters are modified under the following conditions:
2479 //
2480 // It is called once with the original address expression as 'op1WB', and
2481 // with 'constOnly' set to false. On this first invocation, *op1WB is always
2482 // an ADD node, and it will consider the operands of the ADD even if its op2 is
2483 // not a constant. However, when it encounters a non-constant or the base in the
2484 // op2 position, it stops iterating. That operand is returned in the 'op2WB' out
2485 // parameter, and will be considered on the third invocation of this method if
2486 // it is an ADD.
2487 //
2488 // It is called the second time with the two operands of the original expression, in
2489 // the original order, and the third time in reverse order. For these invocations
2490 // 'constOnly' is true, so it will only traverse cascaded ADD nodes if they have a
2491 // constant op2.
2492 //
2493 // The result, after three invocations, is that the values of the two out parameters
2494 // correspond to the base and index in some fashion. This method doesn't attempt
2495 // to determine or validate the scale or offset, if any.
2496 //
2497 // Assumptions (presumed to be ensured by genCreateAddrMode()):
2498 //    If an ADD has a constant operand, it is in the op2 position.
2499 //
2500 // Notes:
2501 //    This method, and its invocation sequence, are quite confusing, and since they
2502 //    were not originally well-documented, this specification is a possibly-imperfect
2503 //    reconstruction.
2504 //    The motivation for the handling of the NOP case is unclear.
2505 //    Note that 'op2WB' is only modified in the initial (!constOnly) case,
2506 //    or if a NOP is encountered in the op1 position.
2507 //
2508 void Compiler::gtWalkOp(GenTree** op1WB, GenTree** op2WB, GenTree* base, bool constOnly)
2509 {
2510     GenTree* op1 = *op1WB;
2511     GenTree* op2 = *op2WB;
2512
2513     op1 = op1->gtEffectiveVal();
2514
2515     // Now we look for op1's with non-overflow GT_ADDs [of constants]
2516     while ((op1->gtOper == GT_ADD) && (!op1->gtOverflow()) && (!constOnly || (op1->gtOp.gtOp2->IsCnsIntOrI())))
2517     {
2518         // mark it with GTF_ADDRMODE_NO_CSE
2519         op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
2520
2521         if (!constOnly)
2522         {
2523             op2 = op1->gtOp.gtOp2;
2524         }
2525         op1 = op1->gtOp.gtOp1;
2526
2527         // If op1 is a GT_NOP then swap op1 and op2.
2528         // (Why? Also, presumably op2 is not a GT_NOP in this case?)
2529         if (op1->gtOper == GT_NOP)
2530         {
2531             GenTree* tmp;
2532
2533             tmp = op1;
2534             op1 = op2;
2535             op2 = tmp;
2536         }
2537
2538         if (!constOnly && ((op2 == base) || (!op2->IsCnsIntOrI())))
2539         {
2540             break;
2541         }
2542
2543         op1 = op1->gtEffectiveVal();
2544     }
2545
2546     *op1WB = op1;
2547     *op2WB = op2;
2548 }
2549
2550 #ifdef DEBUG
2551 /*****************************************************************************
2552  * This is a workaround. It is to help implement an assert in gtSetEvalOrder() that the values
2553  * gtWalkOp() leaves in op1 and op2 correspond with the values of adr, idx, mul, and cns
2554  * that are returned by genCreateAddrMode(). It's essentially impossible to determine
2555  * what gtWalkOp() *should* return for all possible trees. This simply loosens one assert
2556  * to handle the following case:
2557
2558          indir     int
2559                     const(h)  int    4 field
2560                  +         byref
2561                     lclVar    byref  V00 this               <-- op2
2562               comma     byref                           <-- adr (base)
2563                  indir     byte
2564                     lclVar    byref  V00 this
2565            +         byref
2566                  const     int    2                     <-- mul == 4
2567               <<        int                                 <-- op1
2568                  lclVar    int    V01 arg1              <-- idx
2569
2570  * Here, we are planning to generate the address mode [edx+4*eax], where eax = idx and edx = the GT_COMMA expression.
2571  * To check adr equivalence with op2, we need to walk down the GT_ADD tree just like gtWalkOp() does.
2572  */
2573 GenTree* Compiler::gtWalkOpEffectiveVal(GenTree* op)
2574 {
2575     for (;;)
2576     {
2577         op = op->gtEffectiveVal();
2578
2579         if ((op->gtOper != GT_ADD) || op->gtOverflow() || !op->gtOp.gtOp2->IsCnsIntOrI())
2580         {
2581             break;
2582         }
2583
2584         op = op->gtOp.gtOp1;
2585     }
2586
2587     return op;
2588 }
2589 #endif // DEBUG
2590
2591 /*****************************************************************************
2592  *
2593  *  Given a tree, set the gtCostEx and gtCostSz fields which
2594  *  are used to measure the relative costs of the codegen of the tree
2595  *
2596  */
2597
2598 void Compiler::gtPrepareCost(GenTree* tree)
2599 {
2600     gtSetEvalOrder(tree);
2601 }
2602
2603 bool Compiler::gtIsLikelyRegVar(GenTree* tree)
2604 {
2605     if (tree->gtOper != GT_LCL_VAR)
2606     {
2607         return false;
2608     }
2609
2610     assert(tree->gtLclVar.gtLclNum < lvaTableCnt);
2611     LclVarDsc* varDsc = lvaTable + tree->gtLclVar.gtLclNum;
2612
2613     if (varDsc->lvDoNotEnregister)
2614     {
2615         return false;
2616     }
2617
2618     // Be pessimistic if ref counts are not yet set up.
2619     //
2620     // Perhaps we should be optimistic though.
2621     // See notes in GitHub issue 18969.
2622     if (!lvaLocalVarRefCounted())
2623     {
2624         return false;
2625     }
2626
2627     if (varDsc->lvRefCntWtd() < (BB_UNITY_WEIGHT * 3))
2628     {
2629         return false;
2630     }
2631
2632 #ifdef _TARGET_X86_
2633     if (varTypeIsFloating(tree->TypeGet()))
2634         return false;
2635     if (varTypeIsLong(tree->TypeGet()))
2636         return false;
2637 #endif
2638
2639     return true;
2640 }
2641
2642 //------------------------------------------------------------------------
2643 // gtCanSwapOrder: Returns true iff the secondNode can be swapped with firstNode.
2644 //
2645 // Arguments:
2646 //    firstNode  - An operand of a tree that can have GTF_REVERSE_OPS set.
2647 //    secondNode - The other operand of the tree.
2648 //
2649 // Return Value:
2650 //    Returns a boolean indicating whether it is safe to reverse the execution
2651 //    order of the two trees, considering any exception, global effects, or
2652 //    ordering constraints.
2653 //
2654 bool Compiler::gtCanSwapOrder(GenTree* firstNode, GenTree* secondNode)
2655 {
2656     // Relative of order of global / side effects can't be swapped.
2657
2658     bool canSwap = true;
2659
2660     if (optValnumCSE_phase)
2661     {
2662         canSwap = optCSE_canSwap(firstNode, secondNode);
2663     }
2664
2665     // We cannot swap in the presence of special side effects such as GT_CATCH_ARG.
2666
2667     if (canSwap && (firstNode->gtFlags & GTF_ORDER_SIDEEFF))
2668     {
2669         canSwap = false;
2670     }
2671
2672     // When strict side effect order is disabled we allow GTF_REVERSE_OPS to be set
2673     // when one or both sides contains a GTF_CALL or GTF_EXCEPT.
2674     // Currently only the C and C++ languages allow non strict side effect order.
2675
2676     unsigned strictEffects = GTF_GLOB_EFFECT;
2677
2678     if (canSwap && (firstNode->gtFlags & strictEffects))
2679     {
2680         // op1 has side efects that can't be reordered.
2681         // Check for some special cases where we still may be able to swap.
2682
2683         if (secondNode->gtFlags & strictEffects)
2684         {
2685             // op2 has also has non reorderable side effects - can't swap.
2686             canSwap = false;
2687         }
2688         else
2689         {
2690             // No side effects in op2 - we can swap iff op1 has no way of modifying op2,
2691             // i.e. through byref assignments or calls or op2 is a constant.
2692
2693             if (firstNode->gtFlags & strictEffects & GTF_PERSISTENT_SIDE_EFFECTS)
2694             {
2695                 // We have to be conservative - can swap iff op2 is constant.
2696                 if (!secondNode->OperIsConst())
2697                 {
2698                     canSwap = false;
2699                 }
2700             }
2701         }
2702     }
2703     return canSwap;
2704 }
2705
2706 //------------------------------------------------------------------------
2707 // Given an address expression, compute its costs and addressing mode opportunities,
2708 // and mark addressing mode candidates as GTF_DONT_CSE.
2709 //
2710 // Arguments:
2711 //    addr   - The address expression
2712 //    costEx - The execution cost of this address expression (in/out arg to be updated)
2713 //    costEx - The size cost of this address expression (in/out arg to be updated)
2714 //    type   - The type of the value being referenced by the parent of this address expression.
2715 //
2716 // Return Value:
2717 //    Returns true if it finds an addressing mode.
2718 //
2719 // Notes:
2720 //    TODO-Throughput - Consider actually instantiating these early, to avoid
2721 //    having to re-run the algorithm that looks for them (might also improve CQ).
2722 //
2723 bool Compiler::gtMarkAddrMode(GenTree* addr, int* pCostEx, int* pCostSz, var_types type)
2724 {
2725     // These are "out" parameters on the call to genCreateAddrMode():
2726     bool rev; // This will be true if the operands will need to be reversed. At this point we
2727               // don't care about this because we're not yet instantiating this addressing mode.
2728 #if SCALED_ADDR_MODES
2729     unsigned mul; // This is the index (scale) value for the addressing mode
2730 #endif
2731     ssize_t  cns;  // This is the constant offset
2732     GenTree* base; // This is the base of the address.
2733     GenTree* idx;  // This is the index.
2734
2735     if (codeGen->genCreateAddrMode(addr, false /*fold*/, &rev, &base, &idx,
2736 #if SCALED_ADDR_MODES
2737                                    &mul,
2738 #endif // SCALED_ADDR_MODES
2739                                    &cns))
2740     {
2741         // We can form a complex addressing mode, so mark each of the interior
2742         // nodes with GTF_ADDRMODE_NO_CSE and calculate a more accurate cost.
2743
2744         addr->gtFlags |= GTF_ADDRMODE_NO_CSE;
2745 #ifdef _TARGET_XARCH_
2746         // addrmodeCount is the count of items that we used to form
2747         // an addressing mode.  The maximum value is 4 when we have
2748         // all of these:   { base, idx, cns, mul }
2749         //
2750         unsigned addrmodeCount = 0;
2751         if (base)
2752         {
2753             *pCostEx += base->gtCostEx;
2754             *pCostSz += base->gtCostSz;
2755             addrmodeCount++;
2756         }
2757
2758         if (idx)
2759         {
2760             *pCostEx += idx->gtCostEx;
2761             *pCostSz += idx->gtCostSz;
2762             addrmodeCount++;
2763         }
2764
2765         if (cns)
2766         {
2767             if (((signed char)cns) == ((int)cns))
2768             {
2769                 *pCostSz += 1;
2770             }
2771             else
2772             {
2773                 *pCostSz += 4;
2774             }
2775             addrmodeCount++;
2776         }
2777         if (mul)
2778         {
2779             addrmodeCount++;
2780         }
2781         // When we form a complex addressing mode we can reduced the costs
2782         // associated with the interior GT_ADD and GT_LSH nodes:
2783         //
2784         //                      GT_ADD      -- reduce this interior GT_ADD by (-3,-3)
2785         //                      /   \       --
2786         //                  GT_ADD  'cns'   -- reduce this interior GT_ADD by (-2,-2)
2787         //                  /   \           --
2788         //               'base'  GT_LSL     -- reduce this interior GT_LSL by (-1,-1)
2789         //                      /   \       --
2790         //                   'idx'  'mul'
2791         //
2792         if (addrmodeCount > 1)
2793         {
2794             // The number of interior GT_ADD and GT_LSL will always be one less than addrmodeCount
2795             //
2796             addrmodeCount--;
2797
2798             GenTree* tmp = addr;
2799             while (addrmodeCount > 0)
2800             {
2801                 // decrement the gtCosts for the interior GT_ADD or GT_LSH node by the remaining
2802                 // addrmodeCount
2803                 tmp->SetCosts(tmp->gtCostEx - addrmodeCount, tmp->gtCostSz - addrmodeCount);
2804
2805                 addrmodeCount--;
2806                 if (addrmodeCount > 0)
2807                 {
2808                     GenTree* tmpOp1 = tmp->gtOp.gtOp1;
2809                     GenTree* tmpOp2 = tmp->gtGetOp2();
2810                     assert(tmpOp2 != nullptr);
2811
2812                     if ((tmpOp1 != base) && (tmpOp1->OperGet() == GT_ADD))
2813                     {
2814                         tmp = tmpOp1;
2815                     }
2816                     else if (tmpOp2->OperGet() == GT_LSH)
2817                     {
2818                         tmp = tmpOp2;
2819                     }
2820                     else if (tmpOp1->OperGet() == GT_LSH)
2821                     {
2822                         tmp = tmpOp1;
2823                     }
2824                     else if (tmpOp2->OperGet() == GT_ADD)
2825                     {
2826                         tmp = tmpOp2;
2827                     }
2828                     else
2829                     {
2830                         // We can very rarely encounter a tree that has a GT_COMMA node
2831                         // that is difficult to walk, so we just early out without decrementing.
2832                         addrmodeCount = 0;
2833                     }
2834                 }
2835             }
2836         }
2837 #elif defined _TARGET_ARM_
2838         if (base)
2839         {
2840             *pCostEx += base->gtCostEx;
2841             *pCostSz += base->gtCostSz;
2842             if ((base->gtOper == GT_LCL_VAR) && ((idx == NULL) || (cns == 0)))
2843             {
2844                 *pCostSz -= 1;
2845             }
2846         }
2847
2848         if (idx)
2849         {
2850             *pCostEx += idx->gtCostEx;
2851             *pCostSz += idx->gtCostSz;
2852             if (mul > 0)
2853             {
2854                 *pCostSz += 2;
2855             }
2856         }
2857
2858         if (cns)
2859         {
2860             if (cns >= 128) // small offsets fits into a 16-bit instruction
2861             {
2862                 if (cns < 4096) // medium offsets require a 32-bit instruction
2863                 {
2864                     if (!varTypeIsFloating(type))
2865                     {
2866                         *pCostSz += 2;
2867                     }
2868                 }
2869                 else
2870                 {
2871                     *pCostEx += 2; // Very large offsets require movw/movt instructions
2872                     *pCostSz += 8;
2873                 }
2874             }
2875         }
2876 #elif defined _TARGET_ARM64_
2877         if (base)
2878         {
2879             *pCostEx += base->gtCostEx;
2880             *pCostSz += base->gtCostSz;
2881         }
2882
2883         if (idx)
2884         {
2885             *pCostEx += idx->gtCostEx;
2886             *pCostSz += idx->gtCostSz;
2887         }
2888
2889         if (cns != 0)
2890         {
2891             if (cns >= (4096 * genTypeSize(type)))
2892             {
2893                 *pCostEx += 1;
2894                 *pCostSz += 4;
2895             }
2896         }
2897 #else
2898 #error "Unknown _TARGET_"
2899 #endif
2900
2901         assert(addr->gtOper == GT_ADD);
2902         assert(!addr->gtOverflow());
2903         assert(mul != 1);
2904
2905         // If we have an addressing mode, we have one of:
2906         //   [base             + cns]
2907         //   [       idx * mul      ]  // mul >= 2, else we would use base instead of idx
2908         //   [       idx * mul + cns]  // mul >= 2, else we would use base instead of idx
2909         //   [base + idx * mul      ]  // mul can be 0, 2, 4, or 8
2910         //   [base + idx * mul + cns]  // mul can be 0, 2, 4, or 8
2911         // Note that mul == 0 is semantically equivalent to mul == 1.
2912         // Note that cns can be zero.
2913         CLANG_FORMAT_COMMENT_ANCHOR;
2914
2915 #if SCALED_ADDR_MODES
2916         assert((base != nullptr) || (idx != nullptr && mul >= 2));
2917 #else
2918         assert(base != NULL);
2919 #endif
2920
2921         INDEBUG(GenTree* op1Save = addr);
2922
2923         // Walk 'addr' identifying non-overflow ADDs that will be part of the address mode.
2924         // Note that we will be modifying 'op1' and 'op2' so that eventually they should
2925         // map to the base and index.
2926         GenTree* op1 = addr;
2927         GenTree* op2 = nullptr;
2928         gtWalkOp(&op1, &op2, base, false);
2929
2930         // op1 and op2 are now descendents of the root GT_ADD of the addressing mode.
2931         assert(op1 != op1Save);
2932         assert(op2 != nullptr);
2933
2934         // Walk the operands again (the third operand is unused in this case).
2935         // This time we will only consider adds with constant op2's, since
2936         // we have already found either a non-ADD op1 or a non-constant op2.
2937         gtWalkOp(&op1, &op2, nullptr, true);
2938
2939 #if defined(_TARGET_XARCH_)
2940         // For XARCH we will fold GT_ADDs in the op2 position into the addressing mode, so we call
2941         // gtWalkOp on both operands of the original GT_ADD.
2942         // This is not done for ARMARCH. Though the stated reason is that we don't try to create a
2943         // scaled index, in fact we actually do create them (even base + index*scale + offset).
2944
2945         // At this point, 'op2' may itself be an ADD of a constant that should be folded
2946         // into the addressing mode.
2947         // Walk op2 looking for non-overflow GT_ADDs of constants.
2948         gtWalkOp(&op2, &op1, nullptr, true);
2949 #endif // defined(_TARGET_XARCH_)
2950
2951         // OK we are done walking the tree
2952         // Now assert that op1 and op2 correspond with base and idx
2953         // in one of the several acceptable ways.
2954
2955         // Note that sometimes op1/op2 is equal to idx/base
2956         // and other times op1/op2 is a GT_COMMA node with
2957         // an effective value that is idx/base
2958
2959         if (mul > 1)
2960         {
2961             if ((op1 != base) && (op1->gtOper == GT_LSH))
2962             {
2963                 op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
2964                 if (op1->gtOp.gtOp1->gtOper == GT_MUL)
2965                 {
2966                     op1->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
2967                 }
2968                 assert((base == nullptr) || (op2 == base) || (op2->gtEffectiveVal() == base->gtEffectiveVal()) ||
2969                        (gtWalkOpEffectiveVal(op2) == gtWalkOpEffectiveVal(base)));
2970             }
2971             else
2972             {
2973                 assert(op2);
2974                 assert(op2->gtOper == GT_LSH || op2->gtOper == GT_MUL);
2975                 op2->gtFlags |= GTF_ADDRMODE_NO_CSE;
2976                 // We may have eliminated multiple shifts and multiplies in the addressing mode,
2977                 // so navigate down through them to get to "idx".
2978                 GenTree* op2op1 = op2->gtOp.gtOp1;
2979                 while ((op2op1->gtOper == GT_LSH || op2op1->gtOper == GT_MUL) && op2op1 != idx)
2980                 {
2981                     op2op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
2982                     op2op1 = op2op1->gtOp.gtOp1;
2983                 }
2984                 assert(op1->gtEffectiveVal() == base);
2985                 assert(op2op1 == idx);
2986             }
2987         }
2988         else
2989         {
2990             assert(mul == 0);
2991
2992             if ((op1 == idx) || (op1->gtEffectiveVal() == idx))
2993             {
2994                 if (idx != nullptr)
2995                 {
2996                     if ((op1->gtOper == GT_MUL) || (op1->gtOper == GT_LSH))
2997                     {
2998                         if ((op1->gtOp.gtOp1->gtOper == GT_NOP) ||
2999                             (op1->gtOp.gtOp1->gtOper == GT_MUL && op1->gtOp.gtOp1->gtOp.gtOp1->gtOper == GT_NOP))
3000                         {
3001                             op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
3002                             if (op1->gtOp.gtOp1->gtOper == GT_MUL)
3003                             {
3004                                 op1->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
3005                             }
3006                         }
3007                     }
3008                 }
3009                 assert((op2 == base) || (op2->gtEffectiveVal() == base));
3010             }
3011             else if ((op1 == base) || (op1->gtEffectiveVal() == base))
3012             {
3013                 if (idx != nullptr)
3014                 {
3015                     assert(op2);
3016                     if ((op2->gtOper == GT_MUL) || (op2->gtOper == GT_LSH))
3017                     {
3018                         if ((op2->gtOp.gtOp1->gtOper == GT_NOP) ||
3019                             (op2->gtOp.gtOp1->gtOper == GT_MUL && op2->gtOp.gtOp1->gtOp.gtOp1->gtOper == GT_NOP))
3020                         {
3021                             op2->gtFlags |= GTF_ADDRMODE_NO_CSE;
3022                             if (op2->gtOp.gtOp1->gtOper == GT_MUL)
3023                             {
3024                                 op2->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
3025                             }
3026                         }
3027                     }
3028                     assert((op2 == idx) || (op2->gtEffectiveVal() == idx));
3029                 }
3030             }
3031             else
3032             {
3033                 // op1 isn't base or idx. Is this possible? Or should there be an assert?
3034             }
3035         }
3036         return true;
3037
3038     } // end  if  (genCreateAddrMode(...))
3039     return false;
3040 }
3041
3042 /*****************************************************************************
3043  *
3044  *  Given a tree, figure out the order in which its sub-operands should be
3045  *  evaluated. If the second operand of a binary operator is more expensive
3046  *  than the first operand, then try to swap the operand trees. Updates the
3047  *  GTF_REVERSE_OPS bit if necessary in this case.
3048  *
3049  *  Returns the Sethi 'complexity' estimate for this tree (the higher
3050  *  the number, the higher is the tree's resources requirement).
3051  *
3052  *  This function sets:
3053  *      1. gtCostEx to the execution complexity estimate
3054  *      2. gtCostSz to the code size estimate
3055  *      3. Sometimes sets GTF_ADDRMODE_NO_CSE on nodes in the tree.
3056  *      4. DEBUG-only: clears GTF_DEBUG_NODE_MORPHED.
3057  */
3058
3059 #ifdef _PREFAST_
3060 #pragma warning(push)
3061 #pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
3062 #endif
3063 unsigned Compiler::gtSetEvalOrder(GenTree* tree)
3064 {
3065     assert(tree);
3066     assert(tree->gtOper != GT_STMT);
3067
3068 #ifdef DEBUG
3069     /* Clear the GTF_DEBUG_NODE_MORPHED flag as well */
3070     tree->gtDebugFlags &= ~GTF_DEBUG_NODE_MORPHED;
3071 #endif
3072
3073     /* Is this a FP value? */
3074
3075     bool isflt = varTypeIsFloating(tree->TypeGet());
3076
3077     /* Figure out what kind of a node we have */
3078
3079     const genTreeOps oper = tree->OperGet();
3080     const unsigned   kind = tree->OperKind();
3081
3082     /* Assume no fixed registers will be trashed */
3083
3084     unsigned level;
3085     int      costEx;
3086     int      costSz;
3087
3088 #ifdef DEBUG
3089     costEx = -1;
3090     costSz = -1;
3091 #endif
3092
3093     /* Is this a constant or a leaf node? */
3094
3095     if (kind & (GTK_LEAF | GTK_CONST))
3096     {
3097         switch (oper)
3098         {
3099 #ifdef _TARGET_ARM_
3100             case GT_CNS_LNG:
3101                 costSz = 9;
3102                 costEx = 4;
3103                 goto COMMON_CNS;
3104
3105             case GT_CNS_STR:
3106                 // Uses movw/movt
3107                 costSz = 7;
3108                 costEx = 3;
3109                 goto COMMON_CNS;
3110
3111             case GT_CNS_INT:
3112             {
3113                 // If the constant is a handle then it will need to have a relocation
3114                 //  applied to it.
3115                 // Any constant that requires a reloc must use the movw/movt sequence
3116                 //
3117                 GenTreeIntConCommon* con = tree->AsIntConCommon();
3118
3119                 if (con->ImmedValNeedsReloc(this) ||
3120                     !codeGen->validImmForInstr(INS_mov, (target_ssize_t)tree->gtIntCon.gtIconVal))
3121                 {
3122                     // Uses movw/movt
3123                     costSz = 7;
3124                     costEx = 3;
3125                 }
3126                 else if (((unsigned)tree->gtIntCon.gtIconVal) <= 0x00ff)
3127                 {
3128                     // mov  Rd, <const8>
3129                     costSz = 1;
3130                     costEx = 1;
3131                 }
3132                 else
3133                 {
3134                     // Uses movw/mvn
3135                     costSz = 3;
3136                     costEx = 1;
3137                 }
3138                 goto COMMON_CNS;
3139             }
3140
3141 #elif defined _TARGET_XARCH_
3142
3143             case GT_CNS_LNG:
3144                 costSz = 10;
3145                 costEx = 3;
3146                 goto COMMON_CNS;
3147
3148             case GT_CNS_STR:
3149                 costSz = 4;
3150                 costEx = 1;
3151                 goto COMMON_CNS;
3152
3153             case GT_CNS_INT:
3154             {
3155                 // If the constant is a handle then it will need to have a relocation
3156                 //  applied to it.
3157                 //
3158                 GenTreeIntConCommon* con = tree->AsIntConCommon();
3159
3160                 bool iconNeedsReloc = con->ImmedValNeedsReloc(this);
3161
3162                 if (!iconNeedsReloc && con->FitsInI8())
3163                 {
3164                     costSz = 1;
3165                     costEx = 1;
3166                 }
3167 #if defined(_TARGET_AMD64_)
3168                 else if (iconNeedsReloc || !con->FitsInI32())
3169                 {
3170                     costSz = 10;
3171                     costEx = 3;
3172                 }
3173 #endif // _TARGET_AMD64_
3174                 else
3175                 {
3176                     costSz = 4;
3177                     costEx = 1;
3178                 }
3179                 goto COMMON_CNS;
3180             }
3181
3182 #elif defined(_TARGET_ARM64_)
3183             case GT_CNS_LNG:
3184             case GT_CNS_STR:
3185             case GT_CNS_INT:
3186                 // TODO-ARM64-NYI: Need cost estimates.
3187                 costSz = 1;
3188                 costEx = 1;
3189                 goto COMMON_CNS;
3190
3191 #else
3192             case GT_CNS_LNG:
3193             case GT_CNS_STR:
3194             case GT_CNS_INT:
3195 #error "Unknown _TARGET_"
3196 #endif
3197
3198             COMMON_CNS:
3199                 /*
3200                     Note that some code below depends on constants always getting
3201                     moved to be the second operand of a binary operator. This is
3202                     easily accomplished by giving constants a level of 0, which
3203                     we do on the next line. If you ever decide to change this, be
3204                     aware that unless you make other arrangements for integer
3205                     constants to be moved, stuff will break.
3206                  */
3207
3208                 level = 0;
3209                 break;
3210
3211             case GT_CNS_DBL:
3212                 level = 0;
3213                 /* We use fldz and fld1 to load 0.0 and 1.0, but all other  */
3214                 /* floating point constants are loaded using an indirection */
3215                 if ((*((__int64*)&(tree->gtDblCon.gtDconVal)) == 0) ||
3216                     (*((__int64*)&(tree->gtDblCon.gtDconVal)) == I64(0x3ff0000000000000)))
3217                 {
3218                     costEx = 1;
3219                     costSz = 1;
3220                 }
3221                 else
3222                 {
3223                     costEx = IND_COST_EX;
3224                     costSz = 4;
3225                 }
3226                 break;
3227
3228             case GT_LCL_VAR:
3229                 level = 1;
3230                 if (gtIsLikelyRegVar(tree))
3231                 {
3232                     costEx = 1;
3233                     costSz = 1;
3234                     /* Sign-extend and zero-extend are more expensive to load */
3235                     if (lvaTable[tree->gtLclVar.gtLclNum].lvNormalizeOnLoad())
3236                     {
3237                         costEx += 1;
3238                         costSz += 1;
3239                     }
3240                 }
3241                 else
3242                 {
3243                     costEx = IND_COST_EX;
3244                     costSz = 2;
3245                     /* Sign-extend and zero-extend are more expensive to load */
3246                     if (varTypeIsSmall(tree->TypeGet()))
3247                     {
3248                         costEx += 1;
3249                         costSz += 1;
3250                     }
3251                 }
3252 #if defined(_TARGET_AMD64_)
3253                 // increase costSz for floating point locals
3254                 if (isflt)
3255                 {
3256                     costSz += 1;
3257                     if (!gtIsLikelyRegVar(tree))
3258                     {
3259                         costSz += 1;
3260                     }
3261                 }
3262 #endif
3263                 break;
3264
3265             case GT_CLS_VAR:
3266 #ifdef _TARGET_ARM_
3267                 // We generate movw/movt/ldr
3268                 level  = 1;
3269                 costEx = 3 + IND_COST_EX; // 6
3270                 costSz = 4 + 4 + 2;       // 10
3271                 break;
3272 #endif
3273             case GT_LCL_FLD:
3274                 level  = 1;
3275                 costEx = IND_COST_EX;
3276                 costSz = 4;
3277                 if (varTypeIsSmall(tree->TypeGet()))
3278                 {
3279                     costEx += 1;
3280                     costSz += 1;
3281                 }
3282                 break;
3283
3284             case GT_PHI_ARG:
3285             case GT_ARGPLACE:
3286                 level  = 0;
3287                 costEx = 0;
3288                 costSz = 0;
3289                 break;
3290
3291             default:
3292                 level  = 1;
3293                 costEx = 1;
3294                 costSz = 1;
3295                 break;
3296         }
3297         goto DONE;
3298     }
3299
3300     /* Is it a 'simple' unary/binary operator? */
3301
3302     if (kind & GTK_SMPOP)
3303     {
3304         int      lvlb; // preference for op2
3305         unsigned lvl2; // scratch variable
3306
3307         GenTree* op1 = tree->gtOp.gtOp1;
3308         GenTree* op2 = tree->gtGetOp2IfPresent();
3309
3310         costEx = 0;
3311         costSz = 0;
3312
3313         if (tree->OperIsAddrMode())
3314         {
3315             if (op1 == nullptr)
3316             {
3317                 op1 = op2;
3318                 op2 = nullptr;
3319             }
3320         }
3321
3322         /* Check for a nilary operator */
3323
3324         if (op1 == nullptr)
3325         {
3326             assert(op2 == nullptr);
3327
3328             level = 0;
3329
3330             goto DONE;
3331         }
3332
3333         /* Is this a unary operator? */
3334
3335         if (op2 == nullptr)
3336         {
3337             /* Process the operand of the operator */
3338
3339             /* Most Unary ops have costEx of 1 */
3340             costEx = 1;
3341             costSz = 1;
3342
3343             level = gtSetEvalOrder(op1);
3344
3345             /* Special handling for some operators */
3346
3347             switch (oper)
3348             {
3349                 case GT_JTRUE:
3350                     costEx = 2;
3351                     costSz = 2;
3352                     break;
3353
3354                 case GT_SWITCH:
3355                     costEx = 10;
3356                     costSz = 5;
3357                     break;
3358
3359                 case GT_CAST:
3360 #if defined(_TARGET_ARM_)
3361                     costEx = 1;
3362                     costSz = 1;
3363                     if (isflt || varTypeIsFloating(op1->TypeGet()))
3364                     {
3365                         costEx = 3;
3366                         costSz = 4;
3367                     }
3368 #elif defined(_TARGET_ARM64_)
3369                     costEx = 1;
3370                     costSz = 2;
3371                     if (isflt || varTypeIsFloating(op1->TypeGet()))
3372                     {
3373                         costEx = 2;
3374                         costSz = 4;
3375                     }
3376 #elif defined(_TARGET_XARCH_)
3377                     costEx = 1;
3378                     costSz = 2;
3379
3380                     if (isflt || varTypeIsFloating(op1->TypeGet()))
3381                     {
3382                         /* cast involving floats always go through memory */
3383                         costEx = IND_COST_EX * 2;
3384                         costSz = 6;
3385                     }
3386 #else
3387 #error "Unknown _TARGET_"
3388 #endif
3389
3390                     /* Overflow casts are a lot more expensive */
3391                     if (tree->gtOverflow())
3392                     {
3393                         costEx += 6;
3394                         costSz += 6;
3395                     }
3396
3397                     break;
3398
3399                 case GT_LIST:
3400                 case GT_FIELD_LIST:
3401                 case GT_NOP:
3402                     costEx = 0;
3403                     costSz = 0;
3404                     break;
3405
3406                 case GT_INTRINSIC:
3407                     // GT_INTRINSIC intrinsics Sin, Cos, Sqrt, Abs ... have higher costs.
3408                     // TODO: tune these costs target specific as some of these are
3409                     // target intrinsics and would cost less to generate code.
3410                     switch (tree->gtIntrinsic.gtIntrinsicId)
3411                     {
3412                         default:
3413                             assert(!"missing case for gtIntrinsicId");
3414                             costEx = 12;
3415                             costSz = 12;
3416                             break;
3417
3418                         case CORINFO_INTRINSIC_Sin:
3419                         case CORINFO_INTRINSIC_Cos:
3420                         case CORINFO_INTRINSIC_Sqrt:
3421                         case CORINFO_INTRINSIC_Cbrt:
3422                         case CORINFO_INTRINSIC_Cosh:
3423                         case CORINFO_INTRINSIC_Sinh:
3424                         case CORINFO_INTRINSIC_Tan:
3425                         case CORINFO_INTRINSIC_Tanh:
3426                         case CORINFO_INTRINSIC_Asin:
3427                         case CORINFO_INTRINSIC_Asinh:
3428                         case CORINFO_INTRINSIC_Acos:
3429                         case CORINFO_INTRINSIC_Acosh:
3430                         case CORINFO_INTRINSIC_Atan:
3431                         case CORINFO_INTRINSIC_Atanh:
3432                         case CORINFO_INTRINSIC_Atan2:
3433                         case CORINFO_INTRINSIC_Log10:
3434                         case CORINFO_INTRINSIC_Pow:
3435                         case CORINFO_INTRINSIC_Exp:
3436                         case CORINFO_INTRINSIC_Ceiling:
3437                         case CORINFO_INTRINSIC_Floor:
3438                         case CORINFO_INTRINSIC_Object_GetType:
3439                             // Giving intrinsics a large fixed execution cost is because we'd like to CSE
3440                             // them, even if they are implemented by calls. This is different from modeling
3441                             // user calls since we never CSE user calls.
3442                             costEx = 36;
3443                             costSz = 4;
3444                             break;
3445
3446                         case CORINFO_INTRINSIC_Abs:
3447                             costEx = 5;
3448                             costSz = 15;
3449                             break;
3450
3451                         case CORINFO_INTRINSIC_Round:
3452                             costEx = 3;
3453                             costSz = 4;
3454                             break;
3455                     }
3456                     level++;
3457                     break;
3458
3459                 case GT_NOT:
3460                 case GT_NEG:
3461                     // We need to ensure that -x is evaluated before x or else
3462                     // we get burned while adjusting genFPstkLevel in x*-x where
3463                     // the rhs x is the last use of the enregistered x.
3464                     //
3465                     // Even in the integer case we want to prefer to
3466                     // evaluate the side without the GT_NEG node, all other things
3467                     // being equal.  Also a GT_NOT requires a scratch register
3468
3469                     level++;
3470                     break;
3471
3472                 case GT_ADDR:
3473
3474                     costEx = 0;
3475                     costSz = 1;
3476
3477                     // If we have a GT_ADDR of an GT_IND we can just copy the costs from indOp1
3478                     if (op1->OperGet() == GT_IND)
3479                     {
3480                         GenTree* indOp1 = op1->gtOp.gtOp1;
3481                         costEx          = indOp1->gtCostEx;
3482                         costSz          = indOp1->gtCostSz;
3483                     }
3484                     break;
3485
3486                 case GT_ARR_LENGTH:
3487                     level++;
3488
3489                     /* Array Len should be the same as an indirections, which have a costEx of IND_COST_EX */
3490                     costEx = IND_COST_EX - 1;
3491                     costSz = 2;
3492                     break;
3493
3494                 case GT_MKREFANY:
3495                 case GT_OBJ:
3496                     // We estimate the cost of a GT_OBJ or GT_MKREFANY to be two loads (GT_INDs)
3497                     costEx = 2 * IND_COST_EX;
3498                     costSz = 2 * 2;
3499                     break;
3500
3501                 case GT_BOX:
3502                     // We estimate the cost of a GT_BOX to be two stores (GT_INDs)
3503                     costEx = 2 * IND_COST_EX;
3504                     costSz = 2 * 2;
3505                     break;
3506
3507 #if defined(FEATURE_HW_INTRINSICS) && defined(_TARGET_XARCH_)
3508                 case GT_HWIntrinsic:
3509                 {
3510                     if (tree->AsHWIntrinsic()->OperIsMemoryLoadOrStore())
3511                     {
3512                         costEx = IND_COST_EX;
3513                         costSz = 2;
3514                         // See if we can form a complex addressing mode.
3515
3516                         GenTree* addr = op1->gtEffectiveVal();
3517
3518                         if (addr->OperIs(GT_ADD) && gtMarkAddrMode(addr, &costEx, &costSz, tree->TypeGet()))
3519                         {
3520                             goto DONE;
3521                         }
3522                     }
3523                 }
3524                 break;
3525 #endif // FEATURE_HW_INTRINSICS && _TARGET_XARCH_
3526
3527                 case GT_BLK:
3528                 case GT_IND:
3529
3530                     /* An indirection should always have a non-zero level.
3531                      * Only constant leaf nodes have level 0.
3532                      */
3533
3534                     if (level == 0)
3535                     {
3536                         level = 1;
3537                     }
3538
3539                     /* Indirections have a costEx of IND_COST_EX */
3540                     costEx = IND_COST_EX;
3541                     costSz = 2;
3542
3543                     /* If we have to sign-extend or zero-extend, bump the cost */
3544                     if (varTypeIsSmall(tree->TypeGet()))
3545                     {
3546                         costEx += 1;
3547                         costSz += 1;
3548                     }
3549
3550                     if (isflt)
3551                     {
3552                         if (tree->TypeGet() == TYP_DOUBLE)
3553                         {
3554                             costEx += 1;
3555                         }
3556 #ifdef _TARGET_ARM_
3557                         costSz += 2;
3558 #endif // _TARGET_ARM_
3559                     }
3560
3561                     // Can we form an addressing mode with this indirection?
3562                     // TODO-CQ: Consider changing this to op1->gtEffectiveVal() to take into account
3563                     // addressing modes hidden under a comma node.
3564
3565                     if (op1->gtOper == GT_ADD)
3566                     {
3567                         // See if we can form a complex addressing mode.
3568
3569                         GenTree* addr = op1->gtEffectiveVal();
3570
3571                         bool doAddrMode = true;
3572                         // See if we can form a complex addressing mode.
3573                         // Always use an addrMode for an array index indirection.
3574                         // TODO-1stClassStructs: Always do this, but first make sure it's
3575                         // done in Lowering as well.
3576                         if ((tree->gtFlags & GTF_IND_ARR_INDEX) == 0)
3577                         {
3578                             if (tree->TypeGet() == TYP_STRUCT)
3579                             {
3580                                 doAddrMode = false;
3581                             }
3582                             else if (varTypeIsStruct(tree))
3583                             {
3584                                 // This is a heuristic attempting to match prior behavior when indirections
3585                                 // under a struct assignment would not be considered for addressing modes.
3586                                 if (compCurStmt != nullptr)
3587                                 {
3588                                     GenTree* expr = compCurStmt->gtStmtExpr;
3589                                     if ((expr->OperGet() == GT_ASG) &&
3590                                         ((expr->gtGetOp1() == tree) || (expr->gtGetOp2() == tree)))
3591                                     {
3592                                         doAddrMode = false;
3593                                     }
3594                                 }
3595                             }
3596                         }
3597                         if (doAddrMode && gtMarkAddrMode(addr, &costEx, &costSz, tree->TypeGet()))
3598                         {
3599                             goto DONE;
3600                         }
3601                     } // end if  (op1->gtOper == GT_ADD)
3602                     else if (gtIsLikelyRegVar(op1))
3603                     {
3604                         /* Indirection of an enregister LCL_VAR, don't increase costEx/costSz */
3605                         goto DONE;
3606                     }
3607 #ifdef _TARGET_XARCH_
3608                     else if (op1->IsCnsIntOrI())
3609                     {
3610                         // Indirection of a CNS_INT, subtract 1 from costEx
3611                         // makes costEx 3 for x86 and 4 for amd64
3612                         //
3613                         costEx += (op1->gtCostEx - 1);
3614                         costSz += op1->gtCostSz;
3615                         goto DONE;
3616                     }
3617 #endif
3618                     break;
3619
3620                 default:
3621                     break;
3622             }
3623             costEx += op1->gtCostEx;
3624             costSz += op1->gtCostSz;
3625             goto DONE;
3626         }
3627
3628         /* Binary operator - check for certain special cases */
3629
3630         lvlb = 0;
3631
3632         /* Default Binary ops have a cost of 1,1 */
3633         costEx = 1;
3634         costSz = 1;
3635
3636 #ifdef _TARGET_ARM_
3637         if (isflt)
3638         {
3639             costSz += 2;
3640         }
3641 #endif
3642 #ifndef _TARGET_64BIT_
3643         if (varTypeIsLong(op1->TypeGet()))
3644         {
3645             /* Operations on longs are more expensive */
3646             costEx += 3;
3647             costSz += 3;
3648         }
3649 #endif
3650         switch (oper)
3651         {
3652             case GT_MOD:
3653             case GT_UMOD:
3654
3655                 /* Modulo by a power of 2 is easy */
3656
3657                 if (op2->IsCnsIntOrI())
3658                 {
3659                     size_t ival = op2->gtIntConCommon.IconValue();
3660
3661                     if (ival > 0 && ival == genFindLowestBit(ival))
3662                     {
3663                         break;
3664                     }
3665                 }
3666
3667                 __fallthrough;
3668
3669             case GT_DIV:
3670             case GT_UDIV:
3671
3672                 if (isflt)
3673                 {
3674                     /* fp division is very expensive to execute */
3675                     costEx = 36; // TYP_DOUBLE
3676                     costSz += 3;
3677                 }
3678                 else
3679                 {
3680                     /* integer division is also very expensive */
3681                     costEx = 20;
3682                     costSz += 2;
3683
3684                     // Encourage the first operand to be evaluated (into EAX/EDX) first */
3685                     lvlb -= 3;
3686                 }
3687                 break;
3688
3689             case GT_MUL:
3690
3691                 if (isflt)
3692                 {
3693                     /* FP multiplication instructions are more expensive */
3694                     costEx += 4;
3695                     costSz += 3;
3696                 }
3697                 else
3698                 {
3699                     /* Integer multiplication instructions are more expensive */
3700                     costEx += 3;
3701                     costSz += 2;
3702
3703                     if (tree->gtOverflow())
3704                     {
3705                         /* Overflow check are more expensive */
3706                         costEx += 3;
3707                         costSz += 3;
3708                     }
3709
3710 #ifdef _TARGET_X86_
3711                     if ((tree->gtType == TYP_LONG) || tree->gtOverflow())
3712                     {
3713                         /* We use imulEAX for TYP_LONG and overflow multiplications */
3714                         // Encourage the first operand to be evaluated (into EAX/EDX) first */
3715                         lvlb -= 4;
3716
3717                         /* The 64-bit imul instruction costs more */
3718                         costEx += 4;
3719                     }
3720 #endif //  _TARGET_X86_
3721                 }
3722                 break;
3723
3724             case GT_ADD:
3725             case GT_SUB:
3726                 if (isflt)
3727                 {
3728                     /* FP instructions are a bit more expensive */
3729                     costEx += 4;
3730                     costSz += 3;
3731                     break;
3732                 }
3733
3734                 /* Overflow check are more expensive */
3735                 if (tree->gtOverflow())
3736                 {
3737                     costEx += 3;
3738                     costSz += 3;
3739                 }
3740                 break;
3741
3742             case GT_COMMA:
3743
3744                 /* Comma tosses the result of the left operand */
3745                 gtSetEvalOrder(op1);
3746                 level = gtSetEvalOrder(op2);
3747
3748                 /* GT_COMMA cost is the sum of op1 and op2 costs */
3749                 costEx = (op1->gtCostEx + op2->gtCostEx);
3750                 costSz = (op1->gtCostSz + op2->gtCostSz);
3751
3752                 goto DONE;
3753
3754             case GT_COLON:
3755
3756                 level = gtSetEvalOrder(op1);
3757                 lvl2  = gtSetEvalOrder(op2);
3758
3759                 if (level < lvl2)
3760                 {
3761                     level = lvl2;
3762                 }
3763                 else if (level == lvl2)
3764                 {
3765                     level += 1;
3766                 }
3767
3768                 costEx = op1->gtCostEx + op2->gtCostEx;
3769                 costSz = op1->gtCostSz + op2->gtCostSz;
3770
3771                 goto DONE;
3772
3773             case GT_LIST:
3774             case GT_FIELD_LIST:
3775             {
3776                 const bool isListCallArgs = false;
3777                 const bool callArgsInRegs = false;
3778                 return gtSetListOrder(tree, isListCallArgs, callArgsInRegs);
3779             }
3780
3781             case GT_ASG:
3782                 /* Assignments need a bit of special handling */
3783                 /* Process the target */
3784                 level = gtSetEvalOrder(op1);
3785
3786                 if (gtIsLikelyRegVar(op1))
3787                 {
3788                     assert(lvlb == 0);
3789                     lvl2 = gtSetEvalOrder(op2);
3790
3791                     /* Assignment to an enregistered LCL_VAR */
3792                     costEx = op2->gtCostEx;
3793                     costSz = max(3, op2->gtCostSz); // 3 is an estimate for a reg-reg assignment
3794                     goto DONE_OP1_AFTER_COST;
3795                 }
3796                 goto DONE_OP1;
3797
3798             default:
3799                 break;
3800         }
3801
3802         /* Process the sub-operands */
3803
3804         level = gtSetEvalOrder(op1);
3805         if (lvlb < 0)
3806         {
3807             level -= lvlb; // lvlb is negative, so this increases level
3808             lvlb = 0;
3809         }
3810
3811     DONE_OP1:
3812         assert(lvlb >= 0);
3813         lvl2 = gtSetEvalOrder(op2) + lvlb;
3814
3815         costEx += (op1->gtCostEx + op2->gtCostEx);
3816         costSz += (op1->gtCostSz + op2->gtCostSz);
3817
3818     DONE_OP1_AFTER_COST:
3819
3820         bool bReverseInAssignment = false;
3821         if (oper == GT_ASG)
3822         {
3823             GenTree* op1Val = op1;
3824
3825             // Skip over the GT_IND/GT_ADDR tree (if one exists)
3826             //
3827             if ((op1->gtOper == GT_IND) && (op1->gtOp.gtOp1->gtOper == GT_ADDR))
3828             {
3829                 op1Val = op1->gtOp.gtOp1->gtOp.gtOp1;
3830             }
3831
3832             switch (op1Val->gtOper)
3833             {
3834                 case GT_IND:
3835                 case GT_BLK:
3836                 case GT_OBJ:
3837                 case GT_DYN_BLK:
3838
3839                     // In an indirection, the destination address is evaluated prior to the source.
3840                     // If we have any side effects on the target indirection,
3841                     // we have to evaluate op1 first.
3842                     // However, if the LHS is a lclVar address, SSA relies on using evaluation order for its
3843                     // renaming, and therefore the RHS must be evaluated first.
3844                     // If we have an assignment involving a lclVar address, the LHS may be marked as having
3845                     // side-effects.
3846                     // However the side-effects won't require that we evaluate the LHS address first:
3847                     // - The GTF_GLOB_REF might have been conservatively set on a FIELD of a local.
3848                     // - The local might be address-exposed, but that side-effect happens at the actual assignment (not
3849                     //   when its address is "evaluated") so it doesn't change the side effect to "evaluate" the address
3850                     //   after the RHS (note that in this case it won't be renamed by SSA anyway, but the reordering is
3851                     //   safe).
3852                     //
3853                     if (op1Val->AsIndir()->Addr()->IsLocalAddrExpr())
3854                     {
3855                         bReverseInAssignment = true;
3856                         tree->gtFlags |= GTF_REVERSE_OPS;
3857                         break;
3858                     }
3859                     if (op1Val->AsIndir()->Addr()->gtFlags & GTF_ALL_EFFECT)
3860                     {
3861                         break;
3862                     }
3863
3864                     // In case op2 assigns to a local var that is used in op1Val, we have to evaluate op1Val first.
3865                     if (op2->gtFlags & GTF_ASG)
3866                     {
3867                         break;
3868                     }
3869
3870                     // If op2 is simple then evaluate op1 first
3871
3872                     if (op2->OperKind() & GTK_LEAF)
3873                     {
3874                         break;
3875                     }
3876
3877                 // fall through and set GTF_REVERSE_OPS
3878
3879                 case GT_LCL_VAR:
3880                 case GT_LCL_FLD:
3881
3882                     // We evaluate op2 before op1
3883                     bReverseInAssignment = true;
3884                     tree->gtFlags |= GTF_REVERSE_OPS;
3885                     break;
3886
3887                 default:
3888                     break;
3889             }
3890         }
3891         else if (kind & GTK_RELOP)
3892         {
3893             /* Float compares remove both operands from the FP stack */
3894             /* Also FP comparison uses EAX for flags */
3895
3896             if (varTypeIsFloating(op1->TypeGet()))
3897             {
3898                 level++;
3899                 lvl2++;
3900             }
3901             if ((tree->gtFlags & GTF_RELOP_JMP_USED) == 0)
3902             {
3903                 /* Using a setcc instruction is more expensive */
3904                 costEx += 3;
3905             }
3906         }
3907
3908         /* Check for other interesting cases */
3909
3910         switch (oper)
3911         {
3912             case GT_LSH:
3913             case GT_RSH:
3914             case GT_RSZ:
3915             case GT_ROL:
3916             case GT_ROR:
3917                 /* Variable sized shifts are more expensive and use REG_SHIFT */
3918
3919                 if (!op2->IsCnsIntOrI())
3920                 {
3921                     costEx += 3;
3922 #ifndef _TARGET_64BIT_
3923                     // Variable sized LONG shifts require the use of a helper call
3924                     //
3925                     if (tree->gtType == TYP_LONG)
3926                     {
3927                         level += 5;
3928                         lvl2 += 5;
3929                         costEx += 3 * IND_COST_EX;
3930                         costSz += 4;
3931                     }
3932 #endif // !_TARGET_64BIT_
3933                 }
3934                 break;
3935
3936             case GT_INTRINSIC:
3937
3938                 switch (tree->gtIntrinsic.gtIntrinsicId)
3939                 {
3940                     case CORINFO_INTRINSIC_Atan2:
3941                     case CORINFO_INTRINSIC_Pow:
3942                         // These math intrinsics are actually implemented by user calls.
3943                         // Increase the Sethi 'complexity' by two to reflect the argument
3944                         // register requirement.
3945                         level += 2;
3946                         break;
3947                     default:
3948                         assert(!"Unknown binary GT_INTRINSIC operator");
3949                         break;
3950                 }
3951
3952                 break;
3953
3954             default:
3955                 break;
3956         }
3957
3958         /* We need to evalutate constants later as many places in codegen
3959            can't handle op1 being a constant. This is normally naturally
3960            enforced as constants have the least level of 0. However,
3961            sometimes we end up with a tree like "cns1 < nop(cns2)". In
3962            such cases, both sides have a level of 0. So encourage constants
3963            to be evaluated last in such cases */
3964
3965         if ((level == 0) && (level == lvl2) && (op1->OperKind() & GTK_CONST) &&
3966             (tree->OperIsCommutative() || tree->OperIsCompare()))
3967         {
3968             lvl2++;
3969         }
3970
3971         /* We try to swap operands if the second one is more expensive */
3972         bool     tryToSwap;
3973         GenTree* opA;
3974         GenTree* opB;
3975
3976         if (tree->gtFlags & GTF_REVERSE_OPS)
3977         {
3978             opA = op2;
3979             opB = op1;
3980         }
3981         else
3982         {
3983             opA = op1;
3984             opB = op2;
3985         }
3986
3987         if (fgOrder == FGOrderLinear)
3988         {
3989             // Don't swap anything if we're in linear order; we're really just interested in the costs.
3990             tryToSwap = false;
3991         }
3992         else if (bReverseInAssignment)
3993         {
3994             // Assignments are special, we want the reverseops flags
3995             // so if possible it was set above.
3996             tryToSwap = false;
3997         }
3998         else if ((oper == GT_INTRINSIC) && IsIntrinsicImplementedByUserCall(tree->AsIntrinsic()->gtIntrinsicId))
3999         {
4000             // We do not swap operand execution order for intrinsics that are implemented by user calls
4001             // because of trickiness around ensuring the execution order does not change during rationalization.
4002             tryToSwap = false;
4003         }
4004         else
4005         {
4006             if (tree->gtFlags & GTF_REVERSE_OPS)
4007             {
4008                 tryToSwap = (level > lvl2);
4009             }
4010             else
4011             {
4012                 tryToSwap = (level < lvl2);
4013             }
4014
4015             // Try to force extra swapping when in the stress mode:
4016             if (compStressCompile(STRESS_REVERSE_FLAG, 60) && ((tree->gtFlags & GTF_REVERSE_OPS) == 0) &&
4017                 ((op2->OperKind() & GTK_CONST) == 0))
4018             {
4019                 tryToSwap = true;
4020             }
4021         }
4022
4023         if (tryToSwap)
4024         {
4025             bool canSwap = gtCanSwapOrder(opA, opB);
4026
4027             if (canSwap)
4028             {
4029                 /* Can we swap the order by commuting the operands? */
4030
4031                 switch (oper)
4032                 {
4033                     case GT_EQ:
4034                     case GT_NE:
4035                     case GT_LT:
4036                     case GT_LE:
4037                     case GT_GE:
4038                     case GT_GT:
4039                         if (GenTree::SwapRelop(oper) != oper)
4040                         {
4041                             tree->SetOper(GenTree::SwapRelop(oper), GenTree::PRESERVE_VN);
4042                         }
4043
4044                         __fallthrough;
4045
4046                     case GT_ADD:
4047                     case GT_MUL:
4048
4049                     case GT_OR:
4050                     case GT_XOR:
4051                     case GT_AND:
4052
4053                         /* Swap the operands */
4054
4055                         tree->gtOp.gtOp1 = op2;
4056                         tree->gtOp.gtOp2 = op1;
4057                         break;
4058
4059                     case GT_QMARK:
4060                     case GT_COLON:
4061                     case GT_MKREFANY:
4062                         break;
4063
4064                     case GT_LIST:
4065                     case GT_FIELD_LIST:
4066                         break;
4067
4068                     default:
4069
4070                         /* Mark the operand's evaluation order to be swapped */
4071                         if (tree->gtFlags & GTF_REVERSE_OPS)
4072                         {
4073                             tree->gtFlags &= ~GTF_REVERSE_OPS;
4074                         }
4075                         else
4076                         {
4077                             tree->gtFlags |= GTF_REVERSE_OPS;
4078                         }
4079
4080                         break;
4081                 }
4082             }
4083         }
4084
4085         /* Swap the level counts */
4086         if (tree->gtFlags & GTF_REVERSE_OPS)
4087         {
4088             unsigned tmpl;
4089
4090             tmpl  = level;
4091             level = lvl2;
4092             lvl2  = tmpl;
4093         }
4094
4095         /* Compute the sethi number for this binary operator */
4096
4097         if (level < 1)
4098         {
4099             level = lvl2;
4100         }
4101         else if (level == lvl2)
4102         {
4103             level += 1;
4104         }
4105
4106         goto DONE;
4107     }
4108
4109     /* See what kind of a special operator we have here */
4110
4111     switch (oper)
4112     {
4113         unsigned lvl2; // Scratch variable
4114
4115         case GT_CALL:
4116
4117             assert(tree->gtFlags & GTF_CALL);
4118
4119             level  = 0;
4120             costEx = 5;
4121             costSz = 2;
4122
4123             /* Evaluate the 'this' argument, if present */
4124
4125             if (tree->gtCall.gtCallObjp)
4126             {
4127                 GenTree* thisVal = tree->gtCall.gtCallObjp;
4128
4129                 lvl2 = gtSetEvalOrder(thisVal);
4130                 if (level < lvl2)
4131                 {
4132                     level = lvl2;
4133                 }
4134                 costEx += thisVal->gtCostEx;
4135                 costSz += thisVal->gtCostSz + 1;
4136             }
4137
4138             /* Evaluate the arguments, right to left */
4139
4140             if (tree->gtCall.gtCallArgs)
4141             {
4142                 const bool isListCallArgs = true;
4143                 const bool callArgsInRegs = false;
4144                 lvl2                      = gtSetListOrder(tree->gtCall.gtCallArgs, isListCallArgs, callArgsInRegs);
4145                 if (level < lvl2)
4146                 {
4147                     level = lvl2;
4148                 }
4149                 costEx += tree->gtCall.gtCallArgs->gtCostEx;
4150                 costSz += tree->gtCall.gtCallArgs->gtCostSz;
4151             }
4152
4153             /* Evaluate the temp register arguments list
4154              * This is a "hidden" list and its only purpose is to
4155              * extend the life of temps until we make the call */
4156
4157             if (tree->gtCall.gtCallLateArgs)
4158             {
4159                 const bool isListCallArgs = true;
4160                 const bool callArgsInRegs = true;
4161                 lvl2                      = gtSetListOrder(tree->gtCall.gtCallLateArgs, isListCallArgs, callArgsInRegs);
4162                 if (level < lvl2)
4163                 {
4164                     level = lvl2;
4165                 }
4166                 costEx += tree->gtCall.gtCallLateArgs->gtCostEx;
4167                 costSz += tree->gtCall.gtCallLateArgs->gtCostSz;
4168             }
4169
4170             if (tree->gtCall.gtCallType == CT_INDIRECT)
4171             {
4172                 // pinvoke-calli cookie is a constant, or constant indirection
4173                 assert(tree->gtCall.gtCallCookie == nullptr || tree->gtCall.gtCallCookie->gtOper == GT_CNS_INT ||
4174                        tree->gtCall.gtCallCookie->gtOper == GT_IND);
4175
4176                 GenTree* indirect = tree->gtCall.gtCallAddr;
4177
4178                 lvl2 = gtSetEvalOrder(indirect);
4179                 if (level < lvl2)
4180                 {
4181                     level = lvl2;
4182                 }
4183                 costEx += indirect->gtCostEx + IND_COST_EX;
4184                 costSz += indirect->gtCostSz;
4185             }
4186             else
4187             {
4188 #ifdef _TARGET_ARM_
4189                 if (tree->gtCall.IsVirtualStub())
4190                 {
4191                     // We generate movw/movt/ldr
4192                     costEx += (1 + IND_COST_EX);
4193                     costSz += 8;
4194                     if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT)
4195                     {
4196                         // Must use R12 for the ldr target -- REG_JUMP_THUNK_PARAM
4197                         costSz += 2;
4198                     }
4199                 }
4200                 else if (!opts.jitFlags->IsSet(JitFlags::JIT_FLAG_PREJIT))
4201                 {
4202                     costEx += 2;
4203                     costSz += 6;
4204                 }
4205                 costSz += 2;
4206 #endif
4207 #ifdef _TARGET_XARCH_
4208                 costSz += 3;
4209 #endif
4210             }
4211
4212             level += 1;
4213
4214             /* Virtual calls are a bit more expensive */
4215             if (tree->gtCall.IsVirtual())
4216             {
4217                 costEx += 2 * IND_COST_EX;
4218                 costSz += 2;
4219             }
4220
4221             level += 5;
4222             costEx += 3 * IND_COST_EX;
4223             break;
4224
4225         case GT_ARR_ELEM:
4226
4227             level  = gtSetEvalOrder(tree->gtArrElem.gtArrObj);
4228             costEx = tree->gtArrElem.gtArrObj->gtCostEx;
4229             costSz = tree->gtArrElem.gtArrObj->gtCostSz;
4230
4231             unsigned dim;
4232             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
4233             {
4234                 lvl2 = gtSetEvalOrder(tree->gtArrElem.gtArrInds[dim]);
4235                 if (level < lvl2)
4236                 {
4237                     level = lvl2;
4238                 }
4239                 costEx += tree->gtArrElem.gtArrInds[dim]->gtCostEx;
4240                 costSz += tree->gtArrElem.gtArrInds[dim]->gtCostSz;
4241             }
4242
4243             level += tree->gtArrElem.gtArrRank;
4244             costEx += 2 + (tree->gtArrElem.gtArrRank * (IND_COST_EX + 1));
4245             costSz += 2 + (tree->gtArrElem.gtArrRank * 2);
4246             break;
4247
4248         case GT_ARR_OFFSET:
4249             level  = gtSetEvalOrder(tree->gtArrOffs.gtOffset);
4250             costEx = tree->gtArrOffs.gtOffset->gtCostEx;
4251             costSz = tree->gtArrOffs.gtOffset->gtCostSz;
4252             lvl2   = gtSetEvalOrder(tree->gtArrOffs.gtIndex);
4253             level  = max(level, lvl2);
4254             costEx += tree->gtArrOffs.gtIndex->gtCostEx;
4255             costSz += tree->gtArrOffs.gtIndex->gtCostSz;
4256             lvl2  = gtSetEvalOrder(tree->gtArrOffs.gtArrObj);
4257             level = max(level, lvl2);
4258             costEx += tree->gtArrOffs.gtArrObj->gtCostEx;
4259             costSz += tree->gtArrOffs.gtArrObj->gtCostSz;
4260             break;
4261
4262         case GT_CMPXCHG:
4263
4264             level  = gtSetEvalOrder(tree->gtCmpXchg.gtOpLocation);
4265             costSz = tree->gtCmpXchg.gtOpLocation->gtCostSz;
4266
4267             lvl2 = gtSetEvalOrder(tree->gtCmpXchg.gtOpValue);
4268             if (level < lvl2)
4269             {
4270                 level = lvl2;
4271             }
4272             costSz += tree->gtCmpXchg.gtOpValue->gtCostSz;
4273
4274             lvl2 = gtSetEvalOrder(tree->gtCmpXchg.gtOpComparand);
4275             if (level < lvl2)
4276             {
4277                 level = lvl2;
4278             }
4279             costSz += tree->gtCmpXchg.gtOpComparand->gtCostSz;
4280
4281             costEx = MAX_COST; // Seriously, what could be more expensive than lock cmpxchg?
4282             costSz += 5;       // size of lock cmpxchg [reg+C], reg
4283             break;
4284
4285         case GT_ARR_BOUNDS_CHECK:
4286 #ifdef FEATURE_SIMD
4287         case GT_SIMD_CHK:
4288 #endif // FEATURE_SIMD
4289 #ifdef FEATURE_HW_INTRINSICS
4290         case GT_HW_INTRINSIC_CHK:
4291 #endif // FEATURE_HW_INTRINSICS
4292
4293             costEx = 4; // cmp reg,reg and jae throw (not taken)
4294             costSz = 7; // jump to cold section
4295
4296             level = gtSetEvalOrder(tree->gtBoundsChk.gtIndex);
4297             costEx += tree->gtBoundsChk.gtIndex->gtCostEx;
4298             costSz += tree->gtBoundsChk.gtIndex->gtCostSz;
4299
4300             lvl2 = gtSetEvalOrder(tree->gtBoundsChk.gtArrLen);
4301             if (level < lvl2)
4302             {
4303                 level = lvl2;
4304             }
4305             costEx += tree->gtBoundsChk.gtArrLen->gtCostEx;
4306             costSz += tree->gtBoundsChk.gtArrLen->gtCostSz;
4307
4308             break;
4309
4310         case GT_STORE_DYN_BLK:
4311         case GT_DYN_BLK:
4312         {
4313             costEx = 0;
4314             costSz = 0;
4315             level  = 0;
4316             if (oper == GT_STORE_DYN_BLK)
4317             {
4318                 lvl2  = gtSetEvalOrder(tree->gtDynBlk.Data());
4319                 level = max(level, lvl2);
4320                 costEx += tree->gtDynBlk.Data()->gtCostEx;
4321                 costSz += tree->gtDynBlk.Data()->gtCostSz;
4322             }
4323             lvl2               = gtSetEvalOrder(tree->gtDynBlk.Addr());
4324             level              = max(level, lvl2);
4325             costEx             = tree->gtDynBlk.Addr()->gtCostEx;
4326             costSz             = tree->gtDynBlk.Addr()->gtCostSz;
4327             unsigned sizeLevel = gtSetEvalOrder(tree->gtDynBlk.gtDynamicSize);
4328
4329             // Determine whether the size node should be evaluated first.
4330             // We would like to do this if the sizeLevel is larger than the current level,
4331             // but we have to ensure that we obey ordering constraints.
4332             if (tree->AsDynBlk()->gtEvalSizeFirst != (level < sizeLevel))
4333             {
4334                 bool canChange = true;
4335
4336                 GenTree* sizeNode = tree->AsDynBlk()->gtDynamicSize;
4337                 GenTree* dst      = tree->AsDynBlk()->Addr();
4338                 GenTree* src      = tree->AsDynBlk()->Data();
4339
4340                 if (tree->AsDynBlk()->gtEvalSizeFirst)
4341                 {
4342                     canChange = gtCanSwapOrder(sizeNode, dst);
4343                     if (canChange && (src != nullptr))
4344                     {
4345                         canChange = gtCanSwapOrder(sizeNode, src);
4346                     }
4347                 }
4348                 else
4349                 {
4350                     canChange = gtCanSwapOrder(dst, sizeNode);
4351                     if (canChange && (src != nullptr))
4352                     {
4353                         gtCanSwapOrder(src, sizeNode);
4354                     }
4355                 }
4356                 if (canChange)
4357                 {
4358                     tree->AsDynBlk()->gtEvalSizeFirst = (level < sizeLevel);
4359                 }
4360             }
4361             level = max(level, sizeLevel);
4362             costEx += tree->gtDynBlk.gtDynamicSize->gtCostEx;
4363             costSz += tree->gtDynBlk.gtDynamicSize->gtCostSz;
4364         }
4365         break;
4366
4367         case GT_INDEX_ADDR:
4368             costEx = 6; // cmp reg,reg; jae throw; mov reg, [addrmode]  (not taken)
4369             costSz = 9; // jump to cold section
4370
4371             level = gtSetEvalOrder(tree->AsIndexAddr()->Index());
4372             costEx += tree->AsIndexAddr()->Index()->gtCostEx;
4373             costSz += tree->AsIndexAddr()->Index()->gtCostSz;
4374
4375             lvl2 = gtSetEvalOrder(tree->AsIndexAddr()->Arr());
4376             if (level < lvl2)
4377             {
4378                 level = lvl2;
4379             }
4380             costEx += tree->AsIndexAddr()->Arr()->gtCostEx;
4381             costSz += tree->AsIndexAddr()->Arr()->gtCostSz;
4382             break;
4383
4384         default:
4385 #ifdef DEBUG
4386             if (verbose)
4387             {
4388                 printf("unexpected operator in this tree:\n");
4389                 gtDispTree(tree);
4390             }
4391 #endif
4392             NO_WAY("unexpected operator");
4393     }
4394
4395 DONE:
4396
4397 #ifdef FEATURE_HW_INTRINSICS
4398     if ((oper == GT_HWIntrinsic) && (tree->gtGetOp1() == nullptr))
4399     {
4400         // We can have nullary HWIntrinsic nodes, and we must have non-zero cost.
4401         costEx = 1;
4402         costSz = 1;
4403     }
4404 #endif // FEATURE_HW_INTRINSICS
4405
4406     // Some path through this function must have set the costs.
4407     assert(costEx != -1);
4408     assert(costSz != -1);
4409
4410     tree->SetCosts(costEx, costSz);
4411
4412     return level;
4413 }
4414 #ifdef _PREFAST_
4415 #pragma warning(pop)
4416 #endif
4417
4418 /*****************************************************************************
4419  *
4420  *  If the given tree is an integer constant that can be used
4421  *  in a scaled index address mode as a multiplier (e.g. "[4*index]"), then return
4422  *  the scale factor: 2, 4, or 8. Otherwise, return 0. Note that we never return 1,
4423  *  to match the behavior of GetScaleIndexShf().
4424  */
4425
4426 unsigned GenTree::GetScaleIndexMul()
4427 {
4428     if (IsCnsIntOrI() && jitIsScaleIndexMul(gtIntConCommon.IconValue()) && gtIntConCommon.IconValue() != 1)
4429     {
4430         return (unsigned)gtIntConCommon.IconValue();
4431     }
4432
4433     return 0;
4434 }
4435
4436 /*****************************************************************************
4437  *
4438  *  If the given tree is the right-hand side of a left shift (that is,
4439  *  'y' in the tree 'x' << 'y'), and it is an integer constant that can be used
4440  *  in a scaled index address mode as a multiplier (e.g. "[4*index]"), then return
4441  *  the scale factor: 2, 4, or 8. Otherwise, return 0.
4442  */
4443
4444 unsigned GenTree::GetScaleIndexShf()
4445 {
4446     if (IsCnsIntOrI() && jitIsScaleIndexShift(gtIntConCommon.IconValue()))
4447     {
4448         return (unsigned)(1 << gtIntConCommon.IconValue());
4449     }
4450
4451     return 0;
4452 }
4453
4454 /*****************************************************************************
4455  *
4456  *  If the given tree is a scaled index (i.e. "op * 4" or "op << 2"), returns
4457  *  the multiplier: 2, 4, or 8; otherwise returns 0. Note that "1" is never
4458  *  returned.
4459  */
4460
4461 unsigned GenTree::GetScaledIndex()
4462 {
4463     // with (!opts.OptEnabled(CLFLG_CONSTANTFOLD) we can have
4464     //   CNS_INT * CNS_INT
4465     //
4466     if (gtOp.gtOp1->IsCnsIntOrI())
4467     {
4468         return 0;
4469     }
4470
4471     switch (gtOper)
4472     {
4473         case GT_MUL:
4474             return gtOp.gtOp2->GetScaleIndexMul();
4475
4476         case GT_LSH:
4477             return gtOp.gtOp2->GetScaleIndexShf();
4478
4479         default:
4480             assert(!"GenTree::GetScaledIndex() called with illegal gtOper");
4481             break;
4482     }
4483
4484     return 0;
4485 }
4486
4487 /*****************************************************************************
4488  *
4489  *  Returns true if "addr" is a GT_ADD node, at least one of whose arguments is an integer (<= 32 bit)
4490  *  constant.  If it returns true, it sets "*offset" to (one of the) constant value(s), and
4491  *  "*addr" to the other argument.
4492  */
4493
4494 bool GenTree::IsAddWithI32Const(GenTree** addr, int* offset)
4495 {
4496     if (OperGet() == GT_ADD)
4497     {
4498         if (gtOp.gtOp1->IsIntCnsFitsInI32())
4499         {
4500             *offset = (int)gtOp.gtOp1->gtIntCon.gtIconVal;
4501             *addr   = gtOp.gtOp2;
4502             return true;
4503         }
4504         else if (gtOp.gtOp2->IsIntCnsFitsInI32())
4505         {
4506             *offset = (int)gtOp.gtOp2->gtIntCon.gtIconVal;
4507             *addr   = gtOp.gtOp1;
4508             return true;
4509         }
4510     }
4511     // Otherwise...
4512     return false;
4513 }
4514
4515 //------------------------------------------------------------------------
4516 // gtGetChildPointer: If 'parent' is the parent of this node, return the pointer
4517 //    to the child node so that it can be modified; otherwise, return nullptr.
4518 //
4519 // Arguments:
4520 //    parent - The possible parent of this node
4521 //
4522 // Return Value:
4523 //    If "child" is a child of "parent", returns a pointer to the child node in the parent
4524 //    (i.e. a pointer to a GenTree pointer).
4525 //    Otherwise, returns nullptr.
4526 //
4527 // Assumptions:
4528 //    'parent' must be non-null
4529 //
4530 // Notes:
4531 //    When FEATURE_MULTIREG_ARGS is defined we can get here with GT_OBJ tree.
4532 //    This happens when we have a struct that is passed in multiple registers.
4533 //
4534 //    Also note that when UNIX_AMD64_ABI is defined the GT_LDOBJ
4535 //    later gets converted to a GT_FIELD_LIST with two GT_LCL_FLDs in Lower/LowerXArch.
4536 //
4537
4538 GenTree** GenTree::gtGetChildPointer(GenTree* parent) const
4539
4540 {
4541     switch (parent->OperGet())
4542     {
4543         default:
4544             if (!parent->OperIsSimple())
4545             {
4546                 return nullptr;
4547             }
4548             if (this == parent->gtOp.gtOp1)
4549             {
4550                 return &(parent->gtOp.gtOp1);
4551             }
4552             if (this == parent->gtOp.gtOp2)
4553             {
4554                 return &(parent->gtOp.gtOp2);
4555             }
4556             break;
4557
4558         case GT_CMPXCHG:
4559             if (this == parent->gtCmpXchg.gtOpLocation)
4560             {
4561                 return &(parent->gtCmpXchg.gtOpLocation);
4562             }
4563             if (this == parent->gtCmpXchg.gtOpValue)
4564             {
4565                 return &(parent->gtCmpXchg.gtOpValue);
4566             }
4567             if (this == parent->gtCmpXchg.gtOpComparand)
4568             {
4569                 return &(parent->gtCmpXchg.gtOpComparand);
4570             }
4571             break;
4572
4573         case GT_ARR_BOUNDS_CHECK:
4574 #ifdef FEATURE_SIMD
4575         case GT_SIMD_CHK:
4576 #endif // FEATURE_SIMD
4577 #ifdef FEATURE_HW_INTRINSICS
4578         case GT_HW_INTRINSIC_CHK:
4579 #endif // FEATURE_HW_INTRINSICS
4580             if (this == parent->gtBoundsChk.gtIndex)
4581             {
4582                 return &(parent->gtBoundsChk.gtIndex);
4583             }
4584             if (this == parent->gtBoundsChk.gtArrLen)
4585             {
4586                 return &(parent->gtBoundsChk.gtArrLen);
4587             }
4588             break;
4589
4590         case GT_ARR_ELEM:
4591             if (this == parent->gtArrElem.gtArrObj)
4592             {
4593                 return &(parent->gtArrElem.gtArrObj);
4594             }
4595             for (int i = 0; i < GT_ARR_MAX_RANK; i++)
4596             {
4597                 if (this == parent->gtArrElem.gtArrInds[i])
4598                 {
4599                     return &(parent->gtArrElem.gtArrInds[i]);
4600                 }
4601             }
4602             break;
4603
4604         case GT_ARR_OFFSET:
4605             if (this == parent->gtArrOffs.gtOffset)
4606             {
4607                 return &(parent->gtArrOffs.gtOffset);
4608             }
4609             if (this == parent->gtArrOffs.gtIndex)
4610             {
4611                 return &(parent->gtArrOffs.gtIndex);
4612             }
4613             if (this == parent->gtArrOffs.gtArrObj)
4614             {
4615                 return &(parent->gtArrOffs.gtArrObj);
4616             }
4617             break;
4618
4619         case GT_STORE_DYN_BLK:
4620         case GT_DYN_BLK:
4621             if (this == parent->gtDynBlk.gtOp1)
4622             {
4623                 return &(parent->gtDynBlk.gtOp1);
4624             }
4625             if (this == parent->gtDynBlk.gtOp2)
4626             {
4627                 return &(parent->gtDynBlk.gtOp2);
4628             }
4629             if (this == parent->gtDynBlk.gtDynamicSize)
4630             {
4631                 return &(parent->gtDynBlk.gtDynamicSize);
4632             }
4633             break;
4634
4635         case GT_FIELD:
4636             if (this == parent->AsField()->gtFldObj)
4637             {
4638                 return &(parent->AsField()->gtFldObj);
4639             }
4640             break;
4641
4642         case GT_RET_EXPR:
4643             if (this == parent->gtRetExpr.gtInlineCandidate)
4644             {
4645                 return &(parent->gtRetExpr.gtInlineCandidate);
4646             }
4647             break;
4648
4649         case GT_CALL:
4650         {
4651             GenTreeCall* call = parent->AsCall();
4652
4653             if (this == call->gtCallObjp)
4654             {
4655                 return &(call->gtCallObjp);
4656             }
4657             if (this == call->gtCallArgs)
4658             {
4659                 return reinterpret_cast<GenTree**>(&(call->gtCallArgs));
4660             }
4661             if (this == call->gtCallLateArgs)
4662             {
4663                 return reinterpret_cast<GenTree**>(&(call->gtCallLateArgs));
4664             }
4665             if (this == call->gtControlExpr)
4666             {
4667                 return &(call->gtControlExpr);
4668             }
4669             if (call->gtCallType == CT_INDIRECT)
4670             {
4671                 if (this == call->gtCallCookie)
4672                 {
4673                     return &(call->gtCallCookie);
4674                 }
4675                 if (this == call->gtCallAddr)
4676                 {
4677                     return &(call->gtCallAddr);
4678                 }
4679             }
4680         }
4681         break;
4682
4683         case GT_STMT:
4684             noway_assert(!"Illegal node for gtGetChildPointer()");
4685             unreached();
4686     }
4687
4688     return nullptr;
4689 }
4690
4691 bool GenTree::TryGetUse(GenTree* def, GenTree*** use)
4692 {
4693     assert(def != nullptr);
4694     assert(use != nullptr);
4695
4696     switch (OperGet())
4697     {
4698         // Leaf nodes
4699         case GT_LCL_VAR:
4700         case GT_LCL_FLD:
4701         case GT_LCL_VAR_ADDR:
4702         case GT_LCL_FLD_ADDR:
4703         case GT_CATCH_ARG:
4704         case GT_LABEL:
4705         case GT_FTN_ADDR:
4706         case GT_RET_EXPR:
4707         case GT_CNS_INT:
4708         case GT_CNS_LNG:
4709         case GT_CNS_DBL:
4710         case GT_CNS_STR:
4711         case GT_MEMORYBARRIER:
4712         case GT_JMP:
4713         case GT_JCC:
4714         case GT_SETCC:
4715         case GT_NO_OP:
4716         case GT_START_NONGC:
4717         case GT_START_PREEMPTGC:
4718         case GT_PROF_HOOK:
4719 #if !FEATURE_EH_FUNCLETS
4720         case GT_END_LFIN:
4721 #endif // !FEATURE_EH_FUNCLETS
4722         case GT_PHI_ARG:
4723         case GT_JMPTABLE:
4724         case GT_CLS_VAR:
4725         case GT_CLS_VAR_ADDR:
4726         case GT_ARGPLACE:
4727         case GT_PHYSREG:
4728         case GT_EMITNOP:
4729         case GT_PINVOKE_PROLOG:
4730         case GT_PINVOKE_EPILOG:
4731         case GT_IL_OFFSET:
4732             return false;
4733
4734         // Standard unary operators
4735         case GT_STORE_LCL_VAR:
4736         case GT_STORE_LCL_FLD:
4737         case GT_NOT:
4738         case GT_NEG:
4739         case GT_COPY:
4740         case GT_RELOAD:
4741         case GT_ARR_LENGTH:
4742         case GT_CAST:
4743         case GT_BITCAST:
4744         case GT_CKFINITE:
4745         case GT_LCLHEAP:
4746         case GT_ADDR:
4747         case GT_IND:
4748         case GT_OBJ:
4749         case GT_BLK:
4750         case GT_BOX:
4751         case GT_ALLOCOBJ:
4752         case GT_RUNTIMELOOKUP:
4753         case GT_INIT_VAL:
4754         case GT_JTRUE:
4755         case GT_SWITCH:
4756         case GT_NULLCHECK:
4757         case GT_PUTARG_REG:
4758         case GT_PUTARG_STK:
4759         case GT_RETURNTRAP:
4760         case GT_NOP:
4761         case GT_RETURN:
4762         case GT_RETFILT:
4763         case GT_BSWAP:
4764         case GT_BSWAP16:
4765             if (def == this->AsUnOp()->gtOp1)
4766             {
4767                 *use = &this->AsUnOp()->gtOp1;
4768                 return true;
4769             }
4770             return false;
4771
4772         // Variadic nodes
4773         case GT_PHI:
4774             assert(this->AsUnOp()->gtOp1 != nullptr);
4775             return this->AsUnOp()->gtOp1->TryGetUseList(def, use);
4776
4777         case GT_FIELD_LIST:
4778             return TryGetUseList(def, use);
4779
4780 #if FEATURE_ARG_SPLIT
4781         case GT_PUTARG_SPLIT:
4782             if (this->AsUnOp()->gtOp1->gtOper == GT_FIELD_LIST)
4783             {
4784                 return this->AsUnOp()->gtOp1->TryGetUseList(def, use);
4785             }
4786             if (def == this->AsUnOp()->gtOp1)
4787             {
4788                 *use = &this->AsUnOp()->gtOp1;
4789                 return true;
4790             }
4791             return false;
4792 #endif // FEATURE_ARG_SPLIT
4793
4794 #ifdef FEATURE_SIMD
4795         case GT_SIMD:
4796             if (this->AsSIMD()->gtSIMDIntrinsicID == SIMDIntrinsicInitN)
4797             {
4798                 assert(this->AsSIMD()->gtOp1 != nullptr);
4799                 return this->AsSIMD()->gtOp1->TryGetUseList(def, use);
4800             }
4801
4802             return TryGetUseBinOp(def, use);
4803 #endif // FEATURE_SIMD
4804
4805 #ifdef FEATURE_HW_INTRINSICS
4806         case GT_HWIntrinsic:
4807             if ((this->AsHWIntrinsic()->gtOp1 != nullptr) && this->AsHWIntrinsic()->gtOp1->OperIsList())
4808             {
4809                 return this->AsHWIntrinsic()->gtOp1->TryGetUseList(def, use);
4810             }
4811
4812             return TryGetUseBinOp(def, use);
4813 #endif // FEATURE_HW_INTRINSICS
4814
4815         // Special nodes
4816         case GT_CMPXCHG:
4817         {
4818             GenTreeCmpXchg* const cmpXchg = this->AsCmpXchg();
4819             if (def == cmpXchg->gtOpLocation)
4820             {
4821                 *use = &cmpXchg->gtOpLocation;
4822                 return true;
4823             }
4824             if (def == cmpXchg->gtOpValue)
4825             {
4826                 *use = &cmpXchg->gtOpValue;
4827                 return true;
4828             }
4829             if (def == cmpXchg->gtOpComparand)
4830             {
4831                 *use = &cmpXchg->gtOpComparand;
4832                 return true;
4833             }
4834             return false;
4835         }
4836
4837         case GT_ARR_BOUNDS_CHECK:
4838 #ifdef FEATURE_SIMD
4839         case GT_SIMD_CHK:
4840 #endif // FEATURE_SIMD
4841 #ifdef FEATURE_HW_INTRINSICS
4842         case GT_HW_INTRINSIC_CHK:
4843 #endif // FEATURE_HW_INTRINSICS
4844         {
4845             GenTreeBoundsChk* const boundsChk = this->AsBoundsChk();
4846             if (def == boundsChk->gtIndex)
4847             {
4848                 *use = &boundsChk->gtIndex;
4849                 return true;
4850             }
4851             if (def == boundsChk->gtArrLen)
4852             {
4853                 *use = &boundsChk->gtArrLen;
4854                 return true;
4855             }
4856             return false;
4857         }
4858
4859         case GT_FIELD:
4860             if (def == this->AsField()->gtFldObj)
4861             {
4862                 *use = &this->AsField()->gtFldObj;
4863                 return true;
4864             }
4865             return false;
4866
4867         case GT_STMT:
4868             if (def == this->AsStmt()->gtStmtExpr)
4869             {
4870                 *use = &this->AsStmt()->gtStmtExpr;
4871                 return true;
4872             }
4873             return false;
4874
4875         case GT_ARR_ELEM:
4876         {
4877             GenTreeArrElem* const arrElem = this->AsArrElem();
4878             if (def == arrElem->gtArrObj)
4879             {
4880                 *use = &arrElem->gtArrObj;
4881                 return true;
4882             }
4883             for (unsigned i = 0; i < arrElem->gtArrRank; i++)
4884             {
4885                 if (def == arrElem->gtArrInds[i])
4886                 {
4887                     *use = &arrElem->gtArrInds[i];
4888                     return true;
4889                 }
4890             }
4891             return false;
4892         }
4893
4894         case GT_ARR_OFFSET:
4895         {
4896             GenTreeArrOffs* const arrOffs = this->AsArrOffs();
4897             if (def == arrOffs->gtOffset)
4898             {
4899                 *use = &arrOffs->gtOffset;
4900                 return true;
4901             }
4902             if (def == arrOffs->gtIndex)
4903             {
4904                 *use = &arrOffs->gtIndex;
4905                 return true;
4906             }
4907             if (def == arrOffs->gtArrObj)
4908             {
4909                 *use = &arrOffs->gtArrObj;
4910                 return true;
4911             }
4912             return false;
4913         }
4914
4915         case GT_DYN_BLK:
4916         {
4917             GenTreeDynBlk* const dynBlock = this->AsDynBlk();
4918             if (def == dynBlock->gtOp1)
4919             {
4920                 *use = &dynBlock->gtOp1;
4921                 return true;
4922             }
4923             if (def == dynBlock->gtDynamicSize)
4924             {
4925                 *use = &dynBlock->gtDynamicSize;
4926                 return true;
4927             }
4928             return false;
4929         }
4930
4931         case GT_STORE_DYN_BLK:
4932         {
4933             GenTreeDynBlk* const dynBlock = this->AsDynBlk();
4934             if (def == dynBlock->gtOp1)
4935             {
4936                 *use = &dynBlock->gtOp1;
4937                 return true;
4938             }
4939             if (def == dynBlock->gtOp2)
4940             {
4941                 *use = &dynBlock->gtOp2;
4942                 return true;
4943             }
4944             if (def == dynBlock->gtDynamicSize)
4945             {
4946                 *use = &dynBlock->gtDynamicSize;
4947                 return true;
4948             }
4949             return false;
4950         }
4951
4952         case GT_CALL:
4953         {
4954             GenTreeCall* const call = this->AsCall();
4955             if (def == call->gtCallObjp)
4956             {
4957                 *use = &call->gtCallObjp;
4958                 return true;
4959             }
4960             if (def == call->gtControlExpr)
4961             {
4962                 *use = &call->gtControlExpr;
4963                 return true;
4964             }
4965             if (call->gtCallType == CT_INDIRECT)
4966             {
4967                 if (def == call->gtCallCookie)
4968                 {
4969                     *use = &call->gtCallCookie;
4970                     return true;
4971                 }
4972                 if (def == call->gtCallAddr)
4973                 {
4974                     *use = &call->gtCallAddr;
4975                     return true;
4976                 }
4977             }
4978             if ((call->gtCallArgs != nullptr) && call->gtCallArgs->TryGetUseList(def, use))
4979             {
4980                 return true;
4981             }
4982
4983             return (call->gtCallLateArgs != nullptr) && call->gtCallLateArgs->TryGetUseList(def, use);
4984         }
4985
4986         // Binary nodes
4987         default:
4988             assert(this->OperIsBinary());
4989             return TryGetUseBinOp(def, use);
4990     }
4991 }
4992
4993 bool GenTree::TryGetUseList(GenTree* def, GenTree*** use)
4994 {
4995     assert(def != nullptr);
4996     assert(use != nullptr);
4997
4998     for (GenTreeArgList* node = this->AsArgList(); node != nullptr; node = node->Rest())
4999     {
5000         if (def == node->gtOp1)
5001         {
5002             *use = &node->gtOp1;
5003             return true;
5004         }
5005     }
5006     return false;
5007 }
5008
5009 bool GenTree::TryGetUseBinOp(GenTree* def, GenTree*** use)
5010 {
5011     assert(def != nullptr);
5012     assert(use != nullptr);
5013     assert(this->OperIsBinary());
5014
5015     GenTreeOp* const binOp = this->AsOp();
5016     if (def == binOp->gtOp1)
5017     {
5018         *use = &binOp->gtOp1;
5019         return true;
5020     }
5021     if (def == binOp->gtOp2)
5022     {
5023         *use = &binOp->gtOp2;
5024         return true;
5025     }
5026     return false;
5027 }
5028
5029 //------------------------------------------------------------------------
5030 // GenTree::ReplaceOperand:
5031 //    Replace a given operand to this node with a new operand. If the
5032 //    current node is a call node, this will also udpate the call
5033 //    argument table if necessary.
5034 //
5035 // Arguments:
5036 //    useEdge - the use edge that points to the operand to be replaced.
5037 //    replacement - the replacement node.
5038 //
5039 void GenTree::ReplaceOperand(GenTree** useEdge, GenTree* replacement)
5040 {
5041     assert(useEdge != nullptr);
5042     assert(replacement != nullptr);
5043     assert(TryGetUse(*useEdge, &useEdge));
5044
5045     if (OperGet() == GT_CALL)
5046     {
5047         AsCall()->ReplaceCallOperand(useEdge, replacement);
5048     }
5049     else
5050     {
5051         *useEdge = replacement;
5052     }
5053 }
5054
5055 //------------------------------------------------------------------------
5056 // gtGetParent: Get the parent of this node, and optionally capture the
5057 //    pointer to the child so that it can be modified.
5058 //
5059 // Arguments:
5060
5061 //    parentChildPointer - A pointer to a GenTree** (yes, that's three
5062 //                         levels, i.e. GenTree ***), which if non-null,
5063 //                         will be set to point to the field in the parent
5064 //                         that points to this node.
5065 //
5066 //    Return value       - The parent of this node.
5067 //
5068 //    Notes:
5069 //
5070 //    This requires that the execution order must be defined (i.e. gtSetEvalOrder() has been called).
5071 //    To enable the child to be replaced, it accepts an argument, parentChildPointer that, if non-null,
5072 //    will be set to point to the child pointer in the parent that points to this node.
5073
5074 GenTree* GenTree::gtGetParent(GenTree*** parentChildPtrPtr) const
5075 {
5076     // Find the parent node; it must be after this node in the execution order.
5077     GenTree** parentChildPtr = nullptr;
5078     GenTree*  parent;
5079     for (parent = gtNext; parent != nullptr; parent = parent->gtNext)
5080     {
5081         parentChildPtr = gtGetChildPointer(parent);
5082         if (parentChildPtr != nullptr)
5083         {
5084             break;
5085         }
5086     }
5087     if (parentChildPtrPtr != nullptr)
5088     {
5089         *parentChildPtrPtr = parentChildPtr;
5090     }
5091     return parent;
5092 }
5093
5094 //------------------------------------------------------------------------------
5095 // OperRequiresAsgFlag : Check whether the operation requires GTF_ASG flag regardless
5096 //                       of the children's flags.
5097 //
5098
5099 bool GenTree::OperRequiresAsgFlag()
5100 {
5101     if (OperIs(GT_ASG) || OperIs(GT_XADD, GT_XCHG, GT_LOCKADD, GT_CMPXCHG, GT_MEMORYBARRIER))
5102     {
5103         return true;
5104     }
5105 #ifdef FEATURE_HW_INTRINSICS
5106     if (gtOper == GT_HWIntrinsic)
5107     {
5108         GenTreeHWIntrinsic* hwIntrinsicNode = this->AsHWIntrinsic();
5109         if (hwIntrinsicNode->OperIsMemoryStore())
5110         {
5111             // A MemoryStore operation is an assignment
5112             return true;
5113         }
5114     }
5115 #endif // FEATURE_HW_INTRINSICS
5116     return false;
5117 }
5118
5119 //------------------------------------------------------------------------------
5120 // OperRequiresCallFlag : Check whether the operation requires GTF_CALL flag regardless
5121 //                        of the children's flags.
5122 //
5123
5124 bool GenTree::OperRequiresCallFlag(Compiler* comp)
5125 {
5126     switch (gtOper)
5127     {
5128         case GT_CALL:
5129             return true;
5130
5131         case GT_INTRINSIC:
5132             return comp->IsIntrinsicImplementedByUserCall(this->AsIntrinsic()->gtIntrinsicId);
5133
5134 #if FEATURE_FIXED_OUT_ARGS && !defined(_TARGET_64BIT_)
5135         case GT_LSH:
5136         case GT_RSH:
5137         case GT_RSZ:
5138
5139             // Variable shifts of a long end up being helper calls, so mark the tree as such in morph.
5140             // This is potentially too conservative, since they'll get treated as having side effects.
5141             // It is important to mark them as calls so if they are part of an argument list,
5142             // they will get sorted and processed properly (for example, it is important to handle
5143             // all nested calls before putting struct arguments in the argument registers). We
5144             // could mark the trees just before argument processing, but it would require a full
5145             // tree walk of the argument tree, so we just do it when morphing, instead, even though we'll
5146             // mark non-argument trees (that will still get converted to calls, anyway).
5147             return (this->TypeGet() == TYP_LONG) && (gtGetOp2()->OperGet() != GT_CNS_INT);
5148 #endif // FEATURE_FIXED_OUT_ARGS && !_TARGET_64BIT_
5149
5150         default:
5151             return false;
5152     }
5153 }
5154
5155 //------------------------------------------------------------------------------
5156 // OperIsImplicitIndir : Check whether the operation contains an implicit
5157 //                       indirection.
5158 // Arguments:
5159 //    this      -  a GenTree node
5160 //
5161 // Return Value:
5162 //    True if the given node contains an implicit indirection
5163 //
5164 // Note that for the GT_HWIntrinsic node we have to examine the
5165 // details of the node to determine its result.
5166 //
5167
5168 bool GenTree::OperIsImplicitIndir() const
5169 {
5170     switch (gtOper)
5171     {
5172         case GT_LOCKADD:
5173         case GT_XADD:
5174         case GT_XCHG:
5175         case GT_CMPXCHG:
5176         case GT_BLK:
5177         case GT_OBJ:
5178         case GT_DYN_BLK:
5179         case GT_STORE_BLK:
5180         case GT_STORE_OBJ:
5181         case GT_STORE_DYN_BLK:
5182         case GT_BOX:
5183         case GT_ARR_INDEX:
5184         case GT_ARR_ELEM:
5185         case GT_ARR_OFFSET:
5186             return true;
5187 #ifdef FEATURE_HW_INTRINSICS
5188         case GT_HWIntrinsic:
5189         {
5190             GenTreeHWIntrinsic* hwIntrinsicNode = (const_cast<GenTree*>(this))->AsHWIntrinsic();
5191             return hwIntrinsicNode->OperIsMemoryLoadOrStore();
5192         }
5193 #endif // FEATURE_HW_INTRINSICS
5194         default:
5195             return false;
5196     }
5197 }
5198
5199 //------------------------------------------------------------------------------
5200 // OperMayThrow : Check whether the operation may throw.
5201 //
5202 //
5203 // Arguments:
5204 //    comp      -  Compiler instance
5205 //
5206 // Return Value:
5207 //    True if the given operator may cause an exception
5208
5209 bool GenTree::OperMayThrow(Compiler* comp)
5210 {
5211     GenTree* op;
5212
5213     switch (gtOper)
5214     {
5215         case GT_MOD:
5216         case GT_DIV:
5217         case GT_UMOD:
5218         case GT_UDIV:
5219
5220             /* Division with a non-zero, non-minus-one constant does not throw an exception */
5221
5222             op = gtOp.gtOp2;
5223
5224             if (varTypeIsFloating(op->TypeGet()))
5225             {
5226                 return false; // Floating point division does not throw.
5227             }
5228
5229             // For integers only division by 0 or by -1 can throw
5230             if (op->IsIntegralConst() && !op->IsIntegralConst(0) && !op->IsIntegralConst(-1))
5231             {
5232                 return false;
5233             }
5234             return true;
5235
5236         case GT_INTRINSIC:
5237             // If this is an intrinsic that represents the object.GetType(), it can throw an NullReferenceException.
5238             // Report it as may throw.
5239             // Note: Some of the rest of the existing intrinsics could potentially throw an exception (for example
5240             //       the array and string element access ones). They are handled differently than the GetType intrinsic
5241             //       and are not marked with GTF_EXCEPT. If these are revisited at some point to be marked as
5242             //       GTF_EXCEPT,
5243             //       the code below might need to be specialized to handle them properly.
5244             if ((this->gtFlags & GTF_EXCEPT) != 0)
5245             {
5246                 return true;
5247             }
5248
5249             break;
5250
5251         case GT_CALL:
5252
5253             CorInfoHelpFunc helper;
5254             helper = comp->eeGetHelperNum(this->AsCall()->gtCallMethHnd);
5255             return ((helper == CORINFO_HELP_UNDEF) || !comp->s_helperCallProperties.NoThrow(helper));
5256
5257         case GT_IND:
5258         case GT_BLK:
5259         case GT_OBJ:
5260         case GT_DYN_BLK:
5261         case GT_STORE_BLK:
5262         case GT_NULLCHECK:
5263             return (((this->gtFlags & GTF_IND_NONFAULTING) == 0) && comp->fgAddrCouldBeNull(this->AsIndir()->Addr()));
5264
5265         case GT_ARR_LENGTH:
5266             return (((this->gtFlags & GTF_IND_NONFAULTING) == 0) &&
5267                     comp->fgAddrCouldBeNull(this->AsArrLen()->ArrRef()));
5268
5269         case GT_ARR_ELEM:
5270             return comp->fgAddrCouldBeNull(this->gtArrElem.gtArrObj);
5271
5272         case GT_ARR_BOUNDS_CHECK:
5273         case GT_ARR_INDEX:
5274         case GT_ARR_OFFSET:
5275         case GT_LCLHEAP:
5276         case GT_CKFINITE:
5277 #ifdef FEATURE_SIMD
5278         case GT_SIMD_CHK:
5279 #endif // FEATURE_SIMD
5280 #ifdef FEATURE_HW_INTRINSICS
5281         case GT_HW_INTRINSIC_CHK:
5282 #endif // FEATURE_HW_INTRINSICS
5283         case GT_INDEX_ADDR:
5284             return true;
5285
5286 #ifdef FEATURE_HW_INTRINSICS
5287         case GT_HWIntrinsic:
5288         {
5289             GenTreeHWIntrinsic* hwIntrinsicNode = this->AsHWIntrinsic();
5290             assert(hwIntrinsicNode != nullptr);
5291             if (hwIntrinsicNode->OperIsMemoryLoadOrStore())
5292             {
5293                 // This operation contains an implicit indirection
5294                 //   it could throw a null reference exception.
5295                 //
5296                 return true;
5297             }
5298         }
5299 #endif // FEATURE_HW_INTRINSICS
5300
5301         default:
5302             break;
5303     }
5304
5305     /* Overflow arithmetic operations also throw exceptions */
5306
5307     if (gtOverflowEx())
5308     {
5309         return true;
5310     }
5311
5312     return false;
5313 }
5314
5315 #if DEBUGGABLE_GENTREE
5316 // static
5317 GenTree::VtablePtr GenTree::s_vtablesForOpers[] = {nullptr};
5318 GenTree::VtablePtr GenTree::s_vtableForOp       = nullptr;
5319
5320 GenTree::VtablePtr GenTree::GetVtableForOper(genTreeOps oper)
5321 {
5322     noway_assert(oper < GT_COUNT);
5323
5324     // First, check a cache.
5325
5326     if (s_vtablesForOpers[oper] != nullptr)
5327     {
5328         return s_vtablesForOpers[oper];
5329     }
5330
5331     // Otherwise, look up the correct vtable entry. Note that we want the most derived GenTree subtype
5332     // for an oper. E.g., GT_LCL_VAR is defined in GTSTRUCT_3 as GenTreeLclVar and in GTSTRUCT_N as
5333     // GenTreeLclVarCommon. We want the GenTreeLclVar vtable, since nothing should actually be
5334     // instantiated as a GenTreeLclVarCommon.
5335
5336     VtablePtr res = nullptr;
5337     switch (oper)
5338     {
5339
5340 // clang-format off
5341
5342 #define GTSTRUCT_0(nm, tag)                             /*handle explicitly*/
5343 #define GTSTRUCT_1(nm, tag)                             \
5344         case tag:                                       \
5345         {                                               \
5346             GenTree##nm gt;                             \
5347             res = *reinterpret_cast<VtablePtr*>(&gt);   \
5348         }                                               \
5349         break;
5350 #define GTSTRUCT_2(nm, tag, tag2)                       \
5351         case tag:                                       \
5352         case tag2:                                      \
5353         {                                               \
5354             GenTree##nm gt;                             \
5355             res = *reinterpret_cast<VtablePtr*>(&gt);   \
5356         }                                               \
5357         break;
5358 #define GTSTRUCT_3(nm, tag, tag2, tag3)                 \
5359         case tag:                                       \
5360         case tag2:                                      \
5361         case tag3:                                      \
5362         {                                               \
5363             GenTree##nm gt;                             \
5364             res = *reinterpret_cast<VtablePtr*>(&gt);   \
5365         }                                               \
5366         break;
5367 #define GTSTRUCT_4(nm, tag, tag2, tag3, tag4)           \
5368         case tag:                                       \
5369         case tag2:                                      \
5370         case tag3:                                      \
5371         case tag4:                                      \
5372         {                                               \
5373             GenTree##nm gt;                             \
5374             res = *reinterpret_cast<VtablePtr*>(&gt);   \
5375         }                                               \
5376         break;
5377 #define GTSTRUCT_N(nm, ...)                             /*handle explicitly*/
5378 #define GTSTRUCT_2_SPECIAL(nm, tag, tag2)               /*handle explicitly*/
5379 #define GTSTRUCT_3_SPECIAL(nm, tag, tag2, tag3)         /*handle explicitly*/
5380 #include "gtstructs.h"
5381
5382         // clang-format on
5383
5384         // Handle the special cases.
5385         // The following opers are in GTSTRUCT_N but no other place (namely, no subtypes).
5386
5387         case GT_STORE_BLK:
5388         case GT_BLK:
5389         {
5390             GenTreeBlk gt;
5391             res = *reinterpret_cast<VtablePtr*>(&gt);
5392         }
5393         break;
5394
5395         case GT_IND:
5396         case GT_NULLCHECK:
5397         {
5398             GenTreeIndir gt;
5399             res = *reinterpret_cast<VtablePtr*>(&gt);
5400         }
5401         break;
5402
5403         // Handle GT_LIST (but not GT_FIELD_LIST, which is also in a GTSTRUCT_1).
5404
5405         case GT_LIST:
5406         {
5407             GenTreeArgList gt;
5408             res = *reinterpret_cast<VtablePtr*>(&gt);
5409         }
5410         break;
5411
5412         // We don't need to handle GTSTRUCT_N for LclVarCommon, since all those allowed opers are specified
5413         // in their proper subtype. Similarly for GenTreeIndir.
5414
5415         default:
5416         {
5417             // Should be unary or binary op.
5418             if (s_vtableForOp == nullptr)
5419             {
5420                 unsigned opKind = OperKind(oper);
5421                 assert(!IsExOp(opKind));
5422                 assert(OperIsSimple(oper) || OperIsLeaf(oper));
5423                 // Need to provide non-null operands.
5424                 GenTreeIntCon dummyOp(TYP_INT, 0);
5425                 GenTreeOp     gt(oper, TYP_INT, &dummyOp, ((opKind & GTK_UNOP) ? nullptr : &dummyOp));
5426                 s_vtableForOp = *reinterpret_cast<VtablePtr*>(&gt);
5427             }
5428             res = s_vtableForOp;
5429             break;
5430         }
5431     }
5432     s_vtablesForOpers[oper] = res;
5433     return res;
5434 }
5435
5436 void GenTree::SetVtableForOper(genTreeOps oper)
5437 {
5438     *reinterpret_cast<VtablePtr*>(this) = GetVtableForOper(oper);
5439 }
5440 #endif // DEBUGGABLE_GENTREE
5441
5442 GenTree* Compiler::gtNewOperNode(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2)
5443 {
5444     assert(op1 != nullptr);
5445     assert(op2 != nullptr);
5446
5447     // We should not be allocating nodes that extend GenTreeOp with this;
5448     // should call the appropriate constructor for the extended type.
5449     assert(!GenTree::IsExOp(GenTree::OperKind(oper)));
5450
5451     GenTree* node = new (this, oper) GenTreeOp(oper, type, op1, op2);
5452
5453     return node;
5454 }
5455
5456 GenTree* Compiler::gtNewQmarkNode(var_types type, GenTree* cond, GenTree* colon)
5457 {
5458     compQmarkUsed = true;
5459     cond->gtFlags |= GTF_RELOP_QMARK;
5460     GenTree* result = new (this, GT_QMARK) GenTreeQmark(type, cond, colon, this);
5461 #ifdef DEBUG
5462     if (compQmarkRationalized)
5463     {
5464         fgCheckQmarkAllowedForm(result);
5465     }
5466 #endif
5467     return result;
5468 }
5469
5470 GenTreeQmark::GenTreeQmark(var_types type, GenTree* cond, GenTree* colonOp, Compiler* comp)
5471     : GenTreeOp(GT_QMARK, type, cond, colonOp)
5472 {
5473     // These must follow a specific form.
5474     assert(cond != nullptr && cond->TypeGet() == TYP_INT);
5475     assert(colonOp != nullptr && colonOp->OperGet() == GT_COLON);
5476 }
5477
5478 GenTreeIntCon* Compiler::gtNewIconNode(ssize_t value, var_types type)
5479 {
5480     return new (this, GT_CNS_INT) GenTreeIntCon(type, value);
5481 }
5482
5483 // return a new node representing the value in a physical register
5484 GenTree* Compiler::gtNewPhysRegNode(regNumber reg, var_types type)
5485 {
5486     assert(genIsValidIntReg(reg) || (reg == REG_SPBASE));
5487     GenTree* result = new (this, GT_PHYSREG) GenTreePhysReg(reg, type);
5488     return result;
5489 }
5490
5491 GenTree* Compiler::gtNewJmpTableNode()
5492 {
5493     return new (this, GT_JMPTABLE) GenTree(GT_JMPTABLE, TYP_I_IMPL);
5494 }
5495
5496 /*****************************************************************************
5497  *
5498  *  Converts an annotated token into an icon flags (so that we will later be
5499  *  able to tell the type of the handle that will be embedded in the icon
5500  *  node)
5501  */
5502
5503 unsigned Compiler::gtTokenToIconFlags(unsigned token)
5504 {
5505     unsigned flags = 0;
5506
5507     switch (TypeFromToken(token))
5508     {
5509         case mdtTypeRef:
5510         case mdtTypeDef:
5511         case mdtTypeSpec:
5512             flags = GTF_ICON_CLASS_HDL;
5513             break;
5514
5515         case mdtMethodDef:
5516             flags = GTF_ICON_METHOD_HDL;
5517             break;
5518
5519         case mdtFieldDef:
5520             flags = GTF_ICON_FIELD_HDL;
5521             break;
5522
5523         default:
5524             flags = GTF_ICON_TOKEN_HDL;
5525             break;
5526     }
5527
5528     return flags;
5529 }
5530
5531 //-----------------------------------------------------------------------------------------
5532 // gtNewIndOfIconHandleNode: Creates an indirection GenTree node of a constant handle
5533 //
5534 // Arguments:
5535 //    indType     - The type returned by the indirection node
5536 //    addr        - The constant address to read from
5537 //    iconFlags   - The GTF_ICON flag value that specifies the kind of handle that we have
5538 //    isInvariant - The indNode should also be marked as invariant
5539 //
5540 // Return Value:
5541 //    Returns a GT_IND node representing value at the address provided by 'value'
5542 //
5543 // Notes:
5544 //    The GT_IND node is marked as non-faulting
5545 //    If the indType is GT_REF we also mark the indNode as GTF_GLOB_REF
5546 //
5547
5548 GenTree* Compiler::gtNewIndOfIconHandleNode(var_types indType, size_t addr, unsigned iconFlags, bool isInvariant)
5549 {
5550     GenTree* addrNode = gtNewIconHandleNode(addr, iconFlags);
5551     GenTree* indNode  = gtNewOperNode(GT_IND, indType, addrNode);
5552
5553     // This indirection won't cause an exception.
5554     //
5555     indNode->gtFlags |= GTF_IND_NONFAULTING;
5556
5557     // String Literal handles are indirections that return a TYP_REF.
5558     // They are pointers into the GC heap and they are not invariant
5559     // as the address is a reportable GC-root and as such it can be
5560     // modified during a GC collection
5561     //
5562     if (indType == TYP_REF)
5563     {
5564         // This indirection points into the gloabal heap
5565         indNode->gtFlags |= GTF_GLOB_REF;
5566     }
5567     if (isInvariant)
5568     {
5569         // This indirection also is invariant.
5570         indNode->gtFlags |= GTF_IND_INVARIANT;
5571     }
5572     return indNode;
5573 }
5574
5575 /*****************************************************************************
5576  *
5577  *  Allocates a integer constant entry that represents a HANDLE to something.
5578  *  It may not be allowed to embed HANDLEs directly into the JITed code (for eg,
5579  *  as arguments to JIT helpers). Get a corresponding value that can be embedded.
5580  *  If the handle needs to be accessed via an indirection, pValue points to it.
5581  */
5582
5583 GenTree* Compiler::gtNewIconEmbHndNode(void* value, void* pValue, unsigned iconFlags, void* compileTimeHandle)
5584 {
5585     GenTree* iconNode;
5586     GenTree* handleNode;
5587
5588     if (value != nullptr)
5589     {
5590         // When 'value' is non-null, pValue is required to be null
5591         assert(pValue == nullptr);
5592
5593         // use 'value' to construct an integer constant node
5594         iconNode = gtNewIconHandleNode((size_t)value, iconFlags);
5595
5596         // 'value' is the handle
5597         handleNode = iconNode;
5598     }
5599     else
5600     {
5601         // When 'value' is null, pValue is required to be non-null
5602         assert(pValue != nullptr);
5603
5604         // use 'pValue' to construct an integer constant node
5605         iconNode = gtNewIconHandleNode((size_t)pValue, iconFlags);
5606
5607         // 'pValue' is an address of a location that contains the handle
5608
5609         // construct the indirection of 'pValue'
5610         handleNode = gtNewOperNode(GT_IND, TYP_I_IMPL, iconNode);
5611
5612         // This indirection won't cause an exception.
5613         handleNode->gtFlags |= GTF_IND_NONFAULTING;
5614 #if 0
5615         // It should also be invariant, but marking it as such leads to bad diffs.
5616
5617         // This indirection also is invariant.
5618         handleNode->gtFlags |= GTF_IND_INVARIANT;
5619 #endif
5620     }
5621
5622     iconNode->gtIntCon.gtCompileTimeHandle = (size_t)compileTimeHandle;
5623
5624     return handleNode;
5625 }
5626
5627 /*****************************************************************************/
5628 GenTree* Compiler::gtNewStringLiteralNode(InfoAccessType iat, void* pValue)
5629 {
5630     GenTree* tree = nullptr;
5631
5632     switch (iat)
5633     {
5634         case IAT_VALUE: // constructStringLiteral in CoreRT case can return IAT_VALUE
5635             tree         = gtNewIconEmbHndNode(pValue, nullptr, GTF_ICON_STR_HDL, nullptr);
5636             tree->gtType = TYP_REF;
5637             tree         = gtNewOperNode(GT_NOP, TYP_REF, tree); // prevents constant folding
5638             break;
5639
5640         case IAT_PVALUE: // The value needs to be accessed via an indirection
5641             // Create an indirection
5642             tree = gtNewIndOfIconHandleNode(TYP_REF, (size_t)pValue, GTF_ICON_STR_HDL, false);
5643             break;
5644
5645         case IAT_PPVALUE: // The value needs to be accessed via a double indirection
5646             // Create the first indirection
5647             tree = gtNewIndOfIconHandleNode(TYP_I_IMPL, (size_t)pValue, GTF_ICON_PSTR_HDL, true);
5648
5649             // Create the second indirection
5650             tree = gtNewOperNode(GT_IND, TYP_REF, tree);
5651             // This indirection won't cause an exception.
5652             tree->gtFlags |= GTF_IND_NONFAULTING;
5653             // This indirection points into the gloabal heap (it is String Object)
5654             tree->gtFlags |= GTF_GLOB_REF;
5655             break;
5656
5657         default:
5658             noway_assert(!"Unexpected InfoAccessType");
5659     }
5660
5661     return tree;
5662 }
5663
5664 /*****************************************************************************/
5665
5666 GenTree* Compiler::gtNewLconNode(__int64 value)
5667 {
5668 #ifdef _TARGET_64BIT_
5669     GenTree* node = new (this, GT_CNS_INT) GenTreeIntCon(TYP_LONG, value);
5670 #else
5671     GenTree* node = new (this, GT_CNS_LNG) GenTreeLngCon(value);
5672 #endif
5673
5674     return node;
5675 }
5676
5677 GenTree* Compiler::gtNewDconNode(double value, var_types type)
5678 {
5679     GenTree* node = new (this, GT_CNS_DBL) GenTreeDblCon(value, type);
5680
5681     return node;
5682 }
5683
5684 GenTree* Compiler::gtNewSconNode(int CPX, CORINFO_MODULE_HANDLE scpHandle)
5685 {
5686     // 'GT_CNS_STR' nodes later get transformed into 'GT_CALL'
5687     assert(GenTree::s_gtNodeSizes[GT_CALL] > GenTree::s_gtNodeSizes[GT_CNS_STR]);
5688     GenTree* node = new (this, GT_CALL) GenTreeStrCon(CPX, scpHandle DEBUGARG(/*largeNode*/ true));
5689     return node;
5690 }
5691
5692 GenTree* Compiler::gtNewZeroConNode(var_types type)
5693 {
5694     GenTree* zero;
5695     switch (type)
5696     {
5697         case TYP_INT:
5698             zero = gtNewIconNode(0);
5699             break;
5700
5701         case TYP_BYREF:
5702             __fallthrough;
5703
5704         case TYP_REF:
5705             zero         = gtNewIconNode(0);
5706             zero->gtType = type;
5707             break;
5708
5709         case TYP_LONG:
5710             zero = gtNewLconNode(0);
5711             break;
5712
5713         case TYP_FLOAT:
5714             zero         = gtNewDconNode(0.0);
5715             zero->gtType = type;
5716             break;
5717
5718         case TYP_DOUBLE:
5719             zero = gtNewDconNode(0.0);
5720             break;
5721
5722         default:
5723             noway_assert(!"Bad type in gtNewZeroConNode");
5724             zero = nullptr;
5725             break;
5726     }
5727     return zero;
5728 }
5729
5730 GenTree* Compiler::gtNewOneConNode(var_types type)
5731 {
5732     GenTree* one;
5733     switch (type)
5734     {
5735         case TYP_INT:
5736         case TYP_UINT:
5737             one = gtNewIconNode(1);
5738             break;
5739
5740         case TYP_LONG:
5741         case TYP_ULONG:
5742             one = gtNewLconNode(1);
5743             break;
5744
5745         case TYP_FLOAT:
5746         case TYP_DOUBLE:
5747             one         = gtNewDconNode(1.0);
5748             one->gtType = type;
5749             break;
5750
5751         default:
5752             noway_assert(!"Bad type in gtNewOneConNode");
5753             one = nullptr;
5754             break;
5755     }
5756     return one;
5757 }
5758
5759 #ifdef FEATURE_SIMD
5760 //---------------------------------------------------------------------
5761 // gtNewSIMDVectorZero: create a GT_SIMD node for Vector<T>.Zero
5762 //
5763 // Arguments:
5764 //    simdType  -  simd vector type
5765 //    baseType  -  element type of vector
5766 //    size      -  size of vector in bytes
5767 GenTree* Compiler::gtNewSIMDVectorZero(var_types simdType, var_types baseType, unsigned size)
5768 {
5769     baseType         = genActualType(baseType);
5770     GenTree* initVal = gtNewZeroConNode(baseType);
5771     initVal->gtType  = baseType;
5772     return gtNewSIMDNode(simdType, initVal, nullptr, SIMDIntrinsicInit, baseType, size);
5773 }
5774
5775 //---------------------------------------------------------------------
5776 // gtNewSIMDVectorOne: create a GT_SIMD node for Vector<T>.One
5777 //
5778 // Arguments:
5779 //    simdType  -  simd vector type
5780 //    baseType  -  element type of vector
5781 //    size      -  size of vector in bytes
5782 GenTree* Compiler::gtNewSIMDVectorOne(var_types simdType, var_types baseType, unsigned size)
5783 {
5784     GenTree* initVal;
5785     if (varTypeIsSmallInt(baseType))
5786     {
5787         unsigned baseSize = genTypeSize(baseType);
5788         int      val;
5789         if (baseSize == 1)
5790         {
5791             val = 0x01010101;
5792         }
5793         else
5794         {
5795             val = 0x00010001;
5796         }
5797         initVal = gtNewIconNode(val);
5798     }
5799     else
5800     {
5801         initVal = gtNewOneConNode(baseType);
5802     }
5803
5804     baseType        = genActualType(baseType);
5805     initVal->gtType = baseType;
5806     return gtNewSIMDNode(simdType, initVal, nullptr, SIMDIntrinsicInit, baseType, size);
5807 }
5808 #endif // FEATURE_SIMD
5809
5810 GenTreeCall* Compiler::gtNewIndCallNode(GenTree* addr, var_types type, GenTreeArgList* args, IL_OFFSETX ilOffset)
5811 {
5812     return gtNewCallNode(CT_INDIRECT, (CORINFO_METHOD_HANDLE)addr, type, args, ilOffset);
5813 }
5814
5815 GenTreeCall* Compiler::gtNewCallNode(
5816     gtCallTypes callType, CORINFO_METHOD_HANDLE callHnd, var_types type, GenTreeArgList* args, IL_OFFSETX ilOffset)
5817 {
5818     GenTreeCall* node = new (this, GT_CALL) GenTreeCall(genActualType(type));
5819
5820     node->gtFlags |= (GTF_CALL | GTF_GLOB_REF);
5821     if (args)
5822     {
5823         node->gtFlags |= (args->gtFlags & GTF_ALL_EFFECT);
5824     }
5825     node->gtCallType      = callType;
5826     node->gtCallMethHnd   = callHnd;
5827     node->gtCallArgs      = args;
5828     node->gtCallObjp      = nullptr;
5829     node->fgArgInfo       = nullptr;
5830     node->callSig         = nullptr;
5831     node->gtRetClsHnd     = nullptr;
5832     node->gtControlExpr   = nullptr;
5833     node->gtCallMoreFlags = 0;
5834
5835     if (callType == CT_INDIRECT)
5836     {
5837         node->gtCallCookie = nullptr;
5838     }
5839     else
5840     {
5841         node->gtInlineCandidateInfo = nullptr;
5842     }
5843     node->gtCallLateArgs = nullptr;
5844     node->gtReturnType   = type;
5845
5846 #ifdef FEATURE_READYTORUN_COMPILER
5847     node->gtEntryPoint.addr       = nullptr;
5848     node->gtEntryPoint.accessType = IAT_VALUE;
5849 #endif
5850
5851 #if defined(DEBUG) || defined(INLINE_DATA)
5852     // These get updated after call node is built.
5853     node->gtInlineObservation = InlineObservation::CALLEE_UNUSED_INITIAL;
5854     node->gtRawILOffset       = BAD_IL_OFFSET;
5855 #endif
5856
5857     // Spec: Managed Retval sequence points needs to be generated while generating debug info for debuggable code.
5858     //
5859     // Implementation note: if not generating MRV info genCallSite2ILOffsetMap will be NULL and
5860     // codegen will pass BAD_IL_OFFSET as IL offset of a call node to emitter, which will cause emitter
5861     // not to emit IP mapping entry.
5862     if (opts.compDbgCode && opts.compDbgInfo)
5863     {
5864         // Managed Retval - IL offset of the call.  This offset is used to emit a
5865         // CALL_INSTRUCTION type sequence point while emitting corresponding native call.
5866         //
5867         // TODO-Cleanup:
5868         // a) (Opt) We need not store this offset if the method doesn't return a
5869         // value.  Rather it can be made BAD_IL_OFFSET to prevent a sequence
5870         // point being emitted.
5871         //
5872         // b) (Opt) Add new sequence points only if requested by debugger through
5873         // a new boundary type - ICorDebugInfo::BoundaryTypes
5874         if (genCallSite2ILOffsetMap == nullptr)
5875         {
5876             genCallSite2ILOffsetMap = new (getAllocator()) CallSiteILOffsetTable(getAllocator());
5877         }
5878
5879         // Make sure that there are no duplicate entries for a given call node
5880         assert(!genCallSite2ILOffsetMap->Lookup(node));
5881         genCallSite2ILOffsetMap->Set(node, ilOffset);
5882     }
5883
5884     // Initialize gtOtherRegs
5885     node->ClearOtherRegs();
5886
5887     // Initialize spill flags of gtOtherRegs
5888     node->ClearOtherRegFlags();
5889
5890 #if defined(_TARGET_X86_) || defined(_TARGET_ARM_)
5891     // Initialize the multi-reg long return info if necessary
5892     if (varTypeIsLong(node))
5893     {
5894         // The return type will remain as the incoming long type
5895         node->gtReturnType = node->gtType;
5896
5897         // Initialize Return type descriptor of call node
5898         ReturnTypeDesc* retTypeDesc = node->GetReturnTypeDesc();
5899         retTypeDesc->InitializeLongReturnType(this);
5900
5901         // must be a long returned in two registers
5902         assert(retTypeDesc->GetReturnRegCount() == 2);
5903     }
5904 #endif // defined(_TARGET_X86_) || defined(_TARGET_ARM_)
5905
5906     return node;
5907 }
5908
5909 GenTree* Compiler::gtNewLclvNode(unsigned lnum, var_types type DEBUGARG(IL_OFFSETX ILoffs))
5910 {
5911     // We need to ensure that all struct values are normalized.
5912     // It might be nice to assert this in general, but we have assignments of int to long.
5913     if (varTypeIsStruct(type))
5914     {
5915         // Make an exception for implicit by-ref parameters during global morph, since
5916         // their lvType has been updated to byref but their appearances have not yet all
5917         // been rewritten and so may have struct type still.
5918         assert(type == lvaTable[lnum].lvType ||
5919                (lvaIsImplicitByRefLocal(lnum) && fgGlobalMorph && (lvaTable[lnum].lvType == TYP_BYREF)));
5920     }
5921     GenTree* node = new (this, GT_LCL_VAR) GenTreeLclVar(type, lnum DEBUGARG(ILoffs));
5922
5923     /* Cannot have this assert because the inliner uses this function
5924      * to add temporaries */
5925
5926     // assert(lnum < lvaCount);
5927
5928     return node;
5929 }
5930
5931 GenTree* Compiler::gtNewLclLNode(unsigned lnum, var_types type DEBUGARG(IL_OFFSETX ILoffs))
5932 {
5933     // We need to ensure that all struct values are normalized.
5934     // It might be nice to assert this in general, but we have assignments of int to long.
5935     if (varTypeIsStruct(type))
5936     {
5937         // Make an exception for implicit by-ref parameters during global morph, since
5938         // their lvType has been updated to byref but their appearances have not yet all
5939         // been rewritten and so may have struct type still.
5940         assert(type == lvaTable[lnum].lvType ||
5941                (lvaIsImplicitByRefLocal(lnum) && fgGlobalMorph && (lvaTable[lnum].lvType == TYP_BYREF)));
5942     }
5943     // This local variable node may later get transformed into a large node
5944     assert(GenTree::s_gtNodeSizes[GT_CALL] > GenTree::s_gtNodeSizes[GT_LCL_VAR]);
5945     GenTree* node = new (this, GT_CALL) GenTreeLclVar(type, lnum DEBUGARG(ILoffs) DEBUGARG(/*largeNode*/ true));
5946     return node;
5947 }
5948
5949 GenTreeLclFld* Compiler::gtNewLclFldNode(unsigned lnum, var_types type, unsigned offset)
5950 {
5951     GenTreeLclFld* node = new (this, GT_LCL_FLD) GenTreeLclFld(type, lnum, offset);
5952
5953     /* Cannot have this assert because the inliner uses this function
5954      * to add temporaries */
5955
5956     // assert(lnum < lvaCount);
5957
5958     node->gtFieldSeq = FieldSeqStore::NotAField();
5959     return node;
5960 }
5961
5962 GenTree* Compiler::gtNewInlineCandidateReturnExpr(GenTree* inlineCandidate, var_types type)
5963
5964 {
5965     assert(GenTree::s_gtNodeSizes[GT_RET_EXPR] == TREE_NODE_SZ_LARGE);
5966
5967     GenTree* node = new (this, GT_RET_EXPR) GenTreeRetExpr(type);
5968
5969     node->gtRetExpr.gtInlineCandidate = inlineCandidate;
5970
5971     if (varTypeIsStruct(inlineCandidate) && !inlineCandidate->OperIsBlkOp())
5972     {
5973         node->gtRetExpr.gtRetClsHnd = gtGetStructHandle(inlineCandidate);
5974     }
5975
5976     // GT_RET_EXPR node eventually might be bashed back to GT_CALL (when inlining is aborted for example).
5977     // Therefore it should carry the GTF_CALL flag so that all the rules about spilling can apply to it as well.
5978     // For example, impImportLeave or CEE_POP need to spill GT_RET_EXPR before empty the evaluation stack.
5979     node->gtFlags |= GTF_CALL;
5980
5981     return node;
5982 }
5983
5984 GenTreeArgList* Compiler::gtNewListNode(GenTree* op1, GenTreeArgList* op2)
5985 {
5986     assert((op1 != nullptr) && (op1->OperGet() != GT_LIST));
5987
5988     return new (this, GT_LIST) GenTreeArgList(op1, op2);
5989 }
5990
5991 /*****************************************************************************
5992  *
5993  *  Create a list out of one value.
5994  */
5995
5996 GenTreeArgList* Compiler::gtNewArgList(GenTree* arg)
5997 {
5998     return new (this, GT_LIST) GenTreeArgList(arg);
5999 }
6000
6001 /*****************************************************************************
6002  *
6003  *  Create a list out of the two values.
6004  */
6005
6006 GenTreeArgList* Compiler::gtNewArgList(GenTree* arg1, GenTree* arg2)
6007 {
6008     return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2));
6009 }
6010
6011 /*****************************************************************************
6012  *
6013  *  Create a list out of the three values.
6014  */
6015
6016 GenTreeArgList* Compiler::gtNewArgList(GenTree* arg1, GenTree* arg2, GenTree* arg3)
6017 {
6018     return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2, arg3));
6019 }
6020
6021 /*****************************************************************************
6022  *
6023  *  Create a list out of the three values.
6024  */
6025
6026 GenTreeArgList* Compiler::gtNewArgList(GenTree* arg1, GenTree* arg2, GenTree* arg3, GenTree* arg4)
6027 {
6028     return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2, arg3, arg4));
6029 }
6030
6031 /*****************************************************************************
6032  *
6033  *  Given a GT_CALL node, access the fgArgInfo and find the entry
6034  *  that has the matching argNum and return the fgArgTableEntryPtr
6035  */
6036
6037 fgArgTabEntry* Compiler::gtArgEntryByArgNum(GenTreeCall* call, unsigned argNum)
6038 {
6039     fgArgInfo* argInfo = call->fgArgInfo;
6040     noway_assert(argInfo != nullptr);
6041     return argInfo->GetArgEntry(argNum);
6042 }
6043
6044 /*****************************************************************************
6045  *
6046  *  Given a GT_CALL node, access the fgArgInfo and find the entry
6047  *  that has the matching node and return the fgArgTableEntryPtr
6048  */
6049
6050 fgArgTabEntry* Compiler::gtArgEntryByNode(GenTreeCall* call, GenTree* node)
6051 {
6052     fgArgInfo* argInfo = call->fgArgInfo;
6053     noway_assert(argInfo != nullptr);
6054
6055     unsigned        argCount       = argInfo->ArgCount();
6056     fgArgTabEntry** argTable       = argInfo->ArgTable();
6057     fgArgTabEntry*  curArgTabEntry = nullptr;
6058
6059     for (unsigned i = 0; i < argCount; i++)
6060     {
6061         curArgTabEntry = argTable[i];
6062
6063         if (curArgTabEntry->node == node)
6064         {
6065             return curArgTabEntry;
6066         }
6067         else if (curArgTabEntry->parent != nullptr)
6068         {
6069             assert(curArgTabEntry->parent->OperIsList());
6070             if (curArgTabEntry->parent->Current() == node)
6071             {
6072                 return curArgTabEntry;
6073             }
6074         }
6075         else // (curArgTabEntry->parent == NULL)
6076         {
6077             if (call->gtCallObjp == node)
6078             {
6079                 return curArgTabEntry;
6080             }
6081         }
6082     }
6083     noway_assert(!"gtArgEntryByNode: node not found");
6084     return nullptr;
6085 }
6086
6087 /*****************************************************************************
6088  *
6089  *  Find and return the entry with the given "lateArgInx".  Requires that one is found
6090  *  (asserts this).
6091  */
6092 fgArgTabEntry* Compiler::gtArgEntryByLateArgIndex(GenTreeCall* call, unsigned lateArgInx)
6093 {
6094     fgArgInfo* argInfo = call->fgArgInfo;
6095     noway_assert(argInfo != nullptr);
6096     assert(lateArgInx != UINT_MAX);
6097
6098     unsigned        argCount       = argInfo->ArgCount();
6099     fgArgTabEntry** argTable       = argInfo->ArgTable();
6100     fgArgTabEntry*  curArgTabEntry = nullptr;
6101
6102     for (unsigned i = 0; i < argCount; i++)
6103     {
6104         curArgTabEntry = argTable[i];
6105         if (curArgTabEntry->isLateArg() && curArgTabEntry->lateArgInx == lateArgInx)
6106         {
6107             return curArgTabEntry;
6108         }
6109     }
6110     noway_assert(!"gtArgEntryByNode: node not found");
6111     return nullptr;
6112 }
6113
6114 //------------------------------------------------------------------------
6115 // gtArgNodeByLateArgInx: Given a call instruction, find the argument with the given
6116 //                        late arg index (i.e. the given position in the gtCallLateArgs list).
6117 // Arguments:
6118 //    call - the call node
6119 //    lateArgInx - the index into the late args list
6120 //
6121 // Return value:
6122 //    The late argument node.
6123 //
6124 GenTree* Compiler::gtArgNodeByLateArgInx(GenTreeCall* call, unsigned lateArgInx)
6125 {
6126     GenTree* argx     = nullptr;
6127     unsigned regIndex = 0;
6128
6129     for (GenTreeArgList *list = call->gtCall.gtCallLateArgs; list != nullptr; regIndex++, list = list->Rest())
6130     {
6131         argx = list->Current();
6132         assert(!argx->IsArgPlaceHolderNode()); // No placeholder nodes are in gtCallLateArgs;
6133         if (regIndex == lateArgInx)
6134         {
6135             break;
6136         }
6137     }
6138     noway_assert(argx != nullptr);
6139     return argx;
6140 }
6141
6142 /*****************************************************************************
6143  *
6144  *  Given an fgArgTabEntry*, return true if it is the 'this' pointer argument.
6145  */
6146 bool Compiler::gtArgIsThisPtr(fgArgTabEntry* argEntry)
6147 {
6148     return (argEntry->parent == nullptr);
6149 }
6150
6151 /*****************************************************************************
6152  *
6153  *  Create a node that will assign 'src' to 'dst'.
6154  */
6155
6156 GenTree* Compiler::gtNewAssignNode(GenTree* dst, GenTree* src)
6157 {
6158     /* Mark the target as being assigned */
6159
6160     if ((dst->gtOper == GT_LCL_VAR) || (dst->OperGet() == GT_LCL_FLD))
6161     {
6162         dst->gtFlags |= GTF_VAR_DEF;
6163         if (dst->IsPartialLclFld(this))
6164         {
6165             // We treat these partial writes as combined uses and defs.
6166             dst->gtFlags |= GTF_VAR_USEASG;
6167         }
6168     }
6169     dst->gtFlags |= GTF_DONT_CSE;
6170
6171     /* Create the assignment node */
6172
6173     GenTree* asg = gtNewOperNode(GT_ASG, dst->TypeGet(), dst, src);
6174
6175     /* Mark the expression as containing an assignment */
6176
6177     asg->gtFlags |= GTF_ASG;
6178
6179     return asg;
6180 }
6181
6182 //------------------------------------------------------------------------
6183 // gtNewObjNode: Creates a new Obj node.
6184 //
6185 // Arguments:
6186 //    structHnd - The class handle of the struct type.
6187 //    addr      - The address of the struct.
6188 //
6189 // Return Value:
6190 //    Returns a node representing the struct value at the given address.
6191 //
6192 // Assumptions:
6193 //    Any entry and exit conditions, such as required preconditions of
6194 //    data structures, memory to be freed by caller, etc.
6195 //
6196 // Notes:
6197 //    It will currently return a GT_OBJ node for any struct type, but may
6198 //    return a GT_IND or a non-indirection for a scalar type.
6199 //    The node will not yet have its GC info initialized. This is because
6200 //    we may not need this info if this is an r-value.
6201
6202 GenTree* Compiler::gtNewObjNode(CORINFO_CLASS_HANDLE structHnd, GenTree* addr)
6203 {
6204     var_types nodeType = impNormStructType(structHnd);
6205     assert(varTypeIsStruct(nodeType));
6206     unsigned size = info.compCompHnd->getClassSize(structHnd);
6207
6208     // It would be convenient to set the GC info at this time, but we don't actually require
6209     // it unless this is going to be a destination.
6210     if (!varTypeIsStruct(nodeType))
6211     {
6212         if ((addr->gtOper == GT_ADDR) && (addr->gtGetOp1()->TypeGet() == nodeType))
6213         {
6214             return addr->gtGetOp1();
6215         }
6216         else
6217         {
6218             return gtNewOperNode(GT_IND, nodeType, addr);
6219         }
6220     }
6221     GenTreeBlk* newBlkOrObjNode = new (this, GT_OBJ) GenTreeObj(nodeType, addr, structHnd, size);
6222
6223     // An Obj is not a global reference, if it is known to be a local struct.
6224     if ((addr->gtFlags & GTF_GLOB_REF) == 0)
6225     {
6226         GenTreeLclVarCommon* lclNode = addr->IsLocalAddrExpr();
6227         if (lclNode != nullptr)
6228         {
6229             newBlkOrObjNode->gtFlags |= GTF_IND_NONFAULTING;
6230             if (!lvaIsImplicitByRefLocal(lclNode->gtLclNum))
6231             {
6232                 newBlkOrObjNode->gtFlags &= ~GTF_GLOB_REF;
6233             }
6234         }
6235     }
6236     return newBlkOrObjNode;
6237 }
6238
6239 //------------------------------------------------------------------------
6240 // gtSetObjGcInfo: Set the GC info on an object node
6241 //
6242 // Arguments:
6243 //    objNode - The object node of interest
6244
6245 void Compiler::gtSetObjGcInfo(GenTreeObj* objNode)
6246 {
6247     CORINFO_CLASS_HANDLE structHnd  = objNode->gtClass;
6248     var_types            nodeType   = objNode->TypeGet();
6249     unsigned             size       = objNode->gtBlkSize;
6250     unsigned             slots      = 0;
6251     unsigned             gcPtrCount = 0;
6252     BYTE*                gcPtrs     = nullptr;
6253
6254     assert(varTypeIsStruct(nodeType));
6255     assert(size == info.compCompHnd->getClassSize(structHnd));
6256     assert(nodeType == impNormStructType(structHnd));
6257
6258     if (nodeType == TYP_STRUCT)
6259     {
6260         if (size >= TARGET_POINTER_SIZE)
6261         {
6262             // Get the GC fields info
6263             var_types simdBaseType; // Dummy argument
6264             slots    = roundUp(size, TARGET_POINTER_SIZE) / TARGET_POINTER_SIZE;
6265             gcPtrs   = new (this, CMK_ASTNode) BYTE[slots];
6266             nodeType = impNormStructType(structHnd, gcPtrs, &gcPtrCount, &simdBaseType);
6267         }
6268     }
6269     objNode->SetGCInfo(gcPtrs, gcPtrCount, slots);
6270     assert(objNode->gtType == nodeType);
6271 }
6272
6273 //------------------------------------------------------------------------
6274 // gtNewStructVal: Return a node that represents a struct value
6275 //
6276 // Arguments:
6277 //    structHnd - The class for the struct
6278 //    addr      - The address of the struct
6279 //
6280 // Return Value:
6281 //    A block, object or local node that represents the struct value pointed to by 'addr'.
6282
6283 GenTree* Compiler::gtNewStructVal(CORINFO_CLASS_HANDLE structHnd, GenTree* addr)
6284 {
6285     if (addr->gtOper == GT_ADDR)
6286     {
6287         GenTree* val = addr->gtGetOp1();
6288         if (val->OperGet() == GT_LCL_VAR)
6289         {
6290             unsigned   lclNum = addr->gtGetOp1()->AsLclVarCommon()->gtLclNum;
6291             LclVarDsc* varDsc = &(lvaTable[lclNum]);
6292             if (varTypeIsStruct(varDsc) && (varDsc->lvVerTypeInfo.GetClassHandle() == structHnd) &&
6293                 !lvaIsImplicitByRefLocal(lclNum))
6294             {
6295                 return addr->gtGetOp1();
6296             }
6297         }
6298     }
6299     return gtNewObjNode(structHnd, addr);
6300 }
6301
6302 //------------------------------------------------------------------------
6303 // gtNewBlockVal: Return a node that represents a possibly untyped block value
6304 //
6305 // Arguments:
6306 //    addr      - The address of the block
6307 //    size      - The size of the block
6308 //
6309 // Return Value:
6310 //    A block, object or local node that represents the block value pointed to by 'addr'.
6311
6312 GenTree* Compiler::gtNewBlockVal(GenTree* addr, unsigned size)
6313 {
6314     // By default we treat this as an opaque struct type with known size.
6315     var_types blkType = TYP_STRUCT;
6316     if ((addr->gtOper == GT_ADDR) && (addr->gtGetOp1()->OperGet() == GT_LCL_VAR))
6317     {
6318         GenTree* val = addr->gtGetOp1();
6319 #if FEATURE_SIMD
6320         if (varTypeIsSIMD(val))
6321         {
6322             if (genTypeSize(val->TypeGet()) == size)
6323             {
6324                 blkType = val->TypeGet();
6325                 return addr->gtGetOp1();
6326             }
6327         }
6328         else
6329 #endif // FEATURE_SIMD
6330             if (val->TypeGet() == TYP_STRUCT)
6331         {
6332             GenTreeLclVarCommon* lcl    = addr->gtGetOp1()->AsLclVarCommon();
6333             LclVarDsc*           varDsc = &(lvaTable[lcl->gtLclNum]);
6334             if ((varDsc->TypeGet() == TYP_STRUCT) && (varDsc->lvExactSize == size))
6335             {
6336                 return addr->gtGetOp1();
6337             }
6338         }
6339     }
6340     return new (this, GT_BLK) GenTreeBlk(GT_BLK, blkType, addr, size);
6341 }
6342
6343 // Creates a new assignment node for a CpObj.
6344 // Parameters (exactly the same as MSIL CpObj):
6345 //
6346 //  dstAddr    - The target to copy the struct to
6347 //  srcAddr    - The source to copy the struct from
6348 //  structHnd  - A class token that represents the type of object being copied. May be null
6349 //               if FEATURE_SIMD is enabled and the source has a SIMD type.
6350 //  isVolatile - Is this marked as volatile memory?
6351
6352 GenTree* Compiler::gtNewCpObjNode(GenTree* dstAddr, GenTree* srcAddr, CORINFO_CLASS_HANDLE structHnd, bool isVolatile)
6353 {
6354     GenTree* lhs = gtNewStructVal(structHnd, dstAddr);
6355     GenTree* src = nullptr;
6356     unsigned size;
6357
6358     if (lhs->OperIsBlk())
6359     {
6360         size = lhs->AsBlk()->gtBlkSize;
6361         if (lhs->OperGet() == GT_OBJ)
6362         {
6363             gtSetObjGcInfo(lhs->AsObj());
6364         }
6365     }
6366     else
6367     {
6368         size = genTypeSize(lhs->gtType);
6369     }
6370
6371     if (srcAddr->OperGet() == GT_ADDR)
6372     {
6373         src = srcAddr->gtOp.gtOp1;
6374     }
6375     else
6376     {
6377         src = gtNewOperNode(GT_IND, lhs->TypeGet(), srcAddr);
6378     }
6379
6380     GenTree* result = gtNewBlkOpNode(lhs, src, size, isVolatile, true);
6381     return result;
6382 }
6383
6384 //------------------------------------------------------------------------
6385 // FixupInitBlkValue: Fixup the init value for an initBlk operation
6386 //
6387 // Arguments:
6388 //    asgType - The type of assignment that the initBlk is being transformed into
6389 //
6390 // Return Value:
6391 //    Modifies the constant value on this node to be the appropriate "fill"
6392 //    value for the initblk.
6393 //
6394 // Notes:
6395 //    The initBlk MSIL instruction takes a byte value, which must be
6396 //    extended to the size of the assignment when an initBlk is transformed
6397 //    to an assignment of a primitive type.
6398 //    This performs the appropriate extension.
6399
6400 void GenTreeIntCon::FixupInitBlkValue(var_types asgType)
6401 {
6402     assert(varTypeIsIntegralOrI(asgType));
6403     unsigned size = genTypeSize(asgType);
6404     if (size > 1)
6405     {
6406         size_t cns = gtIconVal;
6407         cns        = cns & 0xFF;
6408         cns |= cns << 8;
6409         if (size >= 4)
6410         {
6411             cns |= cns << 16;
6412 #ifdef _TARGET_64BIT_
6413             if (size == 8)
6414             {
6415                 cns |= cns << 32;
6416             }
6417 #endif // _TARGET_64BIT_
6418
6419             // Make the type match for evaluation types.
6420             gtType = asgType;
6421
6422             // if we are initializing a GC type the value being assigned must be zero (null).
6423             assert(!varTypeIsGC(asgType) || (cns == 0));
6424         }
6425
6426         gtIconVal = cns;
6427     }
6428 }
6429
6430 //
6431 //------------------------------------------------------------------------
6432 // gtBlockOpInit: Initializes a BlkOp GenTree
6433 //
6434 // Arguments:
6435 //    result     - an assignment node that is to be initialized.
6436 //    dst        - the target (destination) we want to either initialize or copy to.
6437 //    src        - the init value for InitBlk or the source struct for CpBlk/CpObj.
6438 //    isVolatile - specifies whether this node is a volatile memory operation.
6439 //
6440 // Assumptions:
6441 //    'result' is an assignment that is newly constructed.
6442 //    If 'dst' is TYP_STRUCT, then it must be a block node or lclVar.
6443 //
6444 // Notes:
6445 //    This procedure centralizes all the logic to both enforce proper structure and
6446 //    to properly construct any InitBlk/CpBlk node.
6447
6448 void Compiler::gtBlockOpInit(GenTree* result, GenTree* dst, GenTree* srcOrFillVal, bool isVolatile)
6449 {
6450     if (!result->OperIsBlkOp())
6451     {
6452         assert(dst->TypeGet() != TYP_STRUCT);
6453         return;
6454     }
6455 #ifdef DEBUG
6456     // If the copy involves GC pointers, the caller must have already set
6457     // the node additional members (gtGcPtrs, gtGcPtrCount, gtSlots) on the dst.
6458     if ((dst->gtOper == GT_OBJ) && dst->AsBlk()->HasGCPtr())
6459     {
6460         GenTreeObj* objNode = dst->AsObj();
6461         assert(objNode->gtGcPtrs != nullptr);
6462         assert(!IsUninitialized(objNode->gtGcPtrs));
6463         assert(!IsUninitialized(objNode->gtGcPtrCount));
6464         assert(!IsUninitialized(objNode->gtSlots) && objNode->gtSlots > 0);
6465
6466         for (unsigned i = 0; i < objNode->gtGcPtrCount; ++i)
6467         {
6468             CorInfoGCType t = (CorInfoGCType)objNode->gtGcPtrs[i];
6469             switch (t)
6470             {
6471                 case TYPE_GC_NONE:
6472                 case TYPE_GC_REF:
6473                 case TYPE_GC_BYREF:
6474                 case TYPE_GC_OTHER:
6475                     break;
6476                 default:
6477                     unreached();
6478             }
6479         }
6480     }
6481 #endif // DEBUG
6482
6483     /* In the case of CpBlk, we want to avoid generating
6484     * nodes where the source and destination are the same
6485     * because of two reasons, first, is useless, second
6486     * it introduces issues in liveness and also copying
6487     * memory from an overlapping memory location is
6488     * undefined both as per the ECMA standard and also
6489     * the memcpy semantics specify that.
6490     *
6491     * NOTE: In this case we'll only detect the case for addr of a local
6492     * and a local itself, any other complex expressions won't be
6493     * caught.
6494     *
6495     * TODO-Cleanup: though having this logic is goodness (i.e. avoids self-assignment
6496     * of struct vars very early), it was added because fgInterBlockLocalVarLiveness()
6497     * isn't handling self-assignment of struct variables correctly.  This issue may not
6498     * surface if struct promotion is ON (which is the case on x86/arm).  But still the
6499     * fundamental issue exists that needs to be addressed.
6500     */
6501     if (result->OperIsCopyBlkOp())
6502     {
6503         GenTree* currSrc = srcOrFillVal;
6504         GenTree* currDst = dst;
6505
6506         if (currSrc->OperIsBlk() && (currSrc->AsBlk()->Addr()->OperGet() == GT_ADDR))
6507         {
6508             currSrc = currSrc->AsBlk()->Addr()->gtGetOp1();
6509         }
6510         if (currDst->OperIsBlk() && (currDst->AsBlk()->Addr()->OperGet() == GT_ADDR))
6511         {
6512             currDst = currDst->AsBlk()->Addr()->gtGetOp1();
6513         }
6514
6515         if (currSrc->OperGet() == GT_LCL_VAR && currDst->OperGet() == GT_LCL_VAR &&
6516             currSrc->gtLclVarCommon.gtLclNum == currDst->gtLclVarCommon.gtLclNum)
6517         {
6518             // Make this a NOP
6519             // TODO-Cleanup: probably doesn't matter, but could do this earlier and avoid creating a GT_ASG
6520             result->gtBashToNOP();
6521             return;
6522         }
6523     }
6524
6525     // Propagate all effect flags from children
6526     result->gtFlags |= dst->gtFlags & GTF_ALL_EFFECT;
6527     result->gtFlags |= result->gtOp.gtOp2->gtFlags & GTF_ALL_EFFECT;
6528
6529     result->gtFlags |= (dst->gtFlags & GTF_EXCEPT) | (srcOrFillVal->gtFlags & GTF_EXCEPT);
6530
6531     if (isVolatile)
6532     {
6533         result->gtFlags |= GTF_BLK_VOLATILE;
6534     }
6535
6536 #ifdef FEATURE_SIMD
6537     if (result->OperIsCopyBlkOp() && varTypeIsSIMD(srcOrFillVal))
6538     {
6539         // If the source is a GT_SIMD node of SIMD type, then the dst lclvar struct
6540         // should be labeled as simd intrinsic related struct.
6541         // This is done so that the morpher can transform any field accesses into
6542         // intrinsics, thus avoiding conflicting access methods (fields vs. whole-register).
6543
6544         GenTree* src = srcOrFillVal;
6545         if (src->OperIsIndir() && (src->AsIndir()->Addr()->OperGet() == GT_ADDR))
6546         {
6547             src = src->AsIndir()->Addr()->gtGetOp1();
6548         }
6549 #ifdef FEATURE_HW_INTRINSICS
6550         if ((src->OperGet() == GT_SIMD) || (src->OperGet() == GT_HWIntrinsic))
6551 #else
6552         if (src->OperGet() == GT_SIMD)
6553 #endif // FEATURE_HW_INTRINSICS
6554         {
6555             if (dst->OperIsBlk() && (dst->AsIndir()->Addr()->OperGet() == GT_ADDR))
6556             {
6557                 dst = dst->AsIndir()->Addr()->gtGetOp1();
6558             }
6559
6560             if (dst->OperIsLocal() && varTypeIsStruct(dst))
6561             {
6562                 setLclRelatedToSIMDIntrinsic(dst);
6563             }
6564         }
6565     }
6566 #endif // FEATURE_SIMD
6567 }
6568
6569 //------------------------------------------------------------------------
6570 // gtNewBlkOpNode: Creates a GenTree for a block (struct) assignment.
6571 //
6572 // Arguments:
6573 //    dst           - Destination or target to copy to / initialize the buffer.
6574 //    srcOrFillVall - the size of the buffer to copy/initialize or zero, in the case of CpObj.
6575 //    size          - The size of the buffer or a class token (in the case of CpObj).
6576 //    isVolatile    - Whether this is a volatile memory operation or not.
6577 //    isCopyBlock   - True if this is a block copy (rather than a block init).
6578 //
6579 // Return Value:
6580 //    Returns the newly constructed and initialized block operation.
6581 //
6582 // Notes:
6583 //    If size is zero, the dst must be a GT_OBJ with the class handle.
6584 //    'dst' must be a block node or lclVar.
6585 //
6586 GenTree* Compiler::gtNewBlkOpNode(GenTree* dst, GenTree* srcOrFillVal, unsigned size, bool isVolatile, bool isCopyBlock)
6587 {
6588     assert(dst->OperIsBlk() || dst->OperIsLocal());
6589     if (isCopyBlock)
6590     {
6591         srcOrFillVal->gtFlags |= GTF_DONT_CSE;
6592         if (srcOrFillVal->OperIsIndir() && (srcOrFillVal->gtGetOp1()->gtOper == GT_ADDR))
6593         {
6594             srcOrFillVal = srcOrFillVal->gtGetOp1()->gtGetOp1();
6595         }
6596     }
6597     else
6598     {
6599         // InitBlk
6600         assert(varTypeIsIntegral(srcOrFillVal));
6601         if (varTypeIsStruct(dst))
6602         {
6603             if (!srcOrFillVal->IsIntegralConst(0))
6604             {
6605                 srcOrFillVal = gtNewOperNode(GT_INIT_VAL, TYP_INT, srcOrFillVal);
6606             }
6607         }
6608     }
6609
6610     GenTree* result = gtNewAssignNode(dst, srcOrFillVal);
6611     gtBlockOpInit(result, dst, srcOrFillVal, isVolatile);
6612     return result;
6613 }
6614
6615 //------------------------------------------------------------------------
6616 // gtNewPutArgReg: Creates a new PutArgReg node.
6617 //
6618 // Arguments:
6619 //    type   - The actual type of the argument
6620 //    arg    - The argument node
6621 //    argReg - The register that the argument will be passed in
6622 //
6623 // Return Value:
6624 //    Returns the newly created PutArgReg node.
6625 //
6626 // Notes:
6627 //    The node is generated as GenTreeMultiRegOp on RyuJIT/armel, GenTreeOp on all the other archs.
6628 //
6629 GenTree* Compiler::gtNewPutArgReg(var_types type, GenTree* arg, regNumber argReg)
6630 {
6631     assert(arg != nullptr);
6632
6633     GenTree* node = nullptr;
6634 #if defined(_TARGET_ARM_)
6635     // A PUTARG_REG could be a MultiRegOp on arm since we could move a double register to two int registers.
6636     node = new (this, GT_PUTARG_REG) GenTreeMultiRegOp(GT_PUTARG_REG, type, arg, nullptr);
6637     if (type == TYP_LONG)
6638     {
6639         node->AsMultiRegOp()->gtOtherReg = REG_NEXT(argReg);
6640     }
6641 #else
6642     node          = gtNewOperNode(GT_PUTARG_REG, type, arg);
6643 #endif
6644     node->gtRegNum = argReg;
6645
6646     return node;
6647 }
6648
6649 //------------------------------------------------------------------------
6650 // gtNewBitCastNode: Creates a new BitCast node.
6651 //
6652 // Arguments:
6653 //    type   - The actual type of the argument
6654 //    arg    - The argument node
6655 //    argReg - The register that the argument will be passed in
6656 //
6657 // Return Value:
6658 //    Returns the newly created BitCast node.
6659 //
6660 // Notes:
6661 //    The node is generated as GenTreeMultiRegOp on RyuJIT/arm, as GenTreeOp on all the other archs.
6662 //
6663 GenTree* Compiler::gtNewBitCastNode(var_types type, GenTree* arg)
6664 {
6665     assert(arg != nullptr);
6666
6667     GenTree* node = nullptr;
6668 #if defined(_TARGET_ARM_)
6669     // A BITCAST could be a MultiRegOp on arm since we could move a double register to two int registers.
6670     node = new (this, GT_BITCAST) GenTreeMultiRegOp(GT_BITCAST, type, arg, nullptr);
6671 #else
6672     node          = gtNewOperNode(GT_BITCAST, type, arg);
6673 #endif
6674
6675     return node;
6676 }
6677
6678 //------------------------------------------------------------------------
6679 // gtNewAllocObjNode: Helper to create an object allocation node.
6680 //
6681 // Arguments:
6682 //    pResolvedToken   - Resolved token for the object being allocated
6683 //    useParent     -    true iff the token represents a child of the object's class
6684 //
6685 // Return Value:
6686 //    Returns GT_ALLOCOBJ node that will be later morphed into an
6687 //    allocation helper call or local variable allocation on the stack.
6688
6689 GenTreeAllocObj* Compiler::gtNewAllocObjNode(CORINFO_RESOLVED_TOKEN* pResolvedToken, BOOL useParent)
6690 {
6691     const BOOL      mustRestoreHandle     = TRUE;
6692     BOOL* const     pRuntimeLookup        = nullptr;
6693     bool            usingReadyToRunHelper = false;
6694     CorInfoHelpFunc helper                = CORINFO_HELP_UNDEF;
6695     GenTree*        opHandle = impTokenToHandle(pResolvedToken, pRuntimeLookup, mustRestoreHandle, useParent);
6696
6697 #ifdef FEATURE_READYTORUN_COMPILER
6698     CORINFO_CONST_LOOKUP lookup = {};
6699
6700     if (opts.IsReadyToRun())
6701     {
6702         helper                                        = CORINFO_HELP_READYTORUN_NEW;
6703         CORINFO_LOOKUP_KIND* const pGenericLookupKind = nullptr;
6704         usingReadyToRunHelper =
6705             info.compCompHnd->getReadyToRunHelper(pResolvedToken, pGenericLookupKind, helper, &lookup);
6706     }
6707 #endif
6708
6709     if (!usingReadyToRunHelper)
6710     {
6711         if (opHandle == nullptr)
6712         {
6713             // We must be backing out of an inline.
6714             assert(compDonotInline());
6715             return nullptr;
6716         }
6717     }
6718
6719     bool            helperHasSideEffects;
6720     CorInfoHelpFunc helperTemp =
6721         info.compCompHnd->getNewHelper(pResolvedToken, info.compMethodHnd, &helperHasSideEffects);
6722
6723     if (!usingReadyToRunHelper)
6724     {
6725         helper = helperTemp;
6726     }
6727
6728     // TODO: ReadyToRun: When generic dictionary lookups are necessary, replace the lookup call
6729     // and the newfast call with a single call to a dynamic R2R cell that will:
6730     //      1) Load the context
6731     //      2) Perform the generic dictionary lookup and caching, and generate the appropriate stub
6732     //      3) Allocate and return the new object for boxing
6733     // Reason: performance (today, we'll always use the slow helper for the R2R generics case)
6734
6735     GenTreeAllocObj* allocObj =
6736         gtNewAllocObjNode(helper, helperHasSideEffects, pResolvedToken->hClass, TYP_REF, opHandle);
6737
6738 #ifdef FEATURE_READYTORUN_COMPILER
6739     if (usingReadyToRunHelper)
6740     {
6741         allocObj->gtEntryPoint = lookup;
6742     }
6743 #endif
6744
6745     return allocObj;
6746 }
6747
6748 /*****************************************************************************
6749  *
6750  *  Clones the given tree value and returns a copy of the given tree.
6751  *  If 'complexOK' is false, the cloning is only done provided the tree
6752  *     is not too complex (whatever that may mean);
6753  *  If 'complexOK' is true, we try slightly harder to clone the tree.
6754  *  In either case, NULL is returned if the tree cannot be cloned
6755  *
6756  *  Note that there is the function gtCloneExpr() which does a more
6757  *  complete job if you can't handle this function failing.
6758  */
6759
6760 GenTree* Compiler::gtClone(GenTree* tree, bool complexOK)
6761 {
6762     GenTree* copy;
6763
6764     switch (tree->gtOper)
6765     {
6766         case GT_CNS_INT:
6767
6768 #if defined(LATE_DISASM)
6769             if (tree->IsIconHandle())
6770             {
6771                 copy = gtNewIconHandleNode(tree->gtIntCon.gtIconVal, tree->gtFlags, tree->gtIntCon.gtFieldSeq);
6772                 copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
6773                 copy->gtType                       = tree->gtType;
6774             }
6775             else
6776 #endif
6777             {
6778                 copy = new (this, GT_CNS_INT)
6779                     GenTreeIntCon(tree->gtType, tree->gtIntCon.gtIconVal, tree->gtIntCon.gtFieldSeq);
6780                 copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
6781             }
6782             break;
6783
6784         case GT_CNS_LNG:
6785             copy = gtNewLconNode(tree->gtLngCon.gtLconVal);
6786             break;
6787
6788         case GT_LCL_VAR:
6789             // Remember that the LclVar node has been cloned. The flag will be set
6790             // on 'copy' as well.
6791             tree->gtFlags |= GTF_VAR_CLONED;
6792             copy = gtNewLclvNode(tree->gtLclVarCommon.gtLclNum, tree->gtType DEBUGARG(tree->gtLclVar.gtLclILoffs));
6793             break;
6794
6795         case GT_LCL_FLD:
6796         case GT_LCL_FLD_ADDR:
6797             // Remember that the LclVar node has been cloned. The flag will be set
6798             // on 'copy' as well.
6799             tree->gtFlags |= GTF_VAR_CLONED;
6800             copy = new (this, tree->gtOper)
6801                 GenTreeLclFld(tree->gtOper, tree->TypeGet(), tree->gtLclFld.gtLclNum, tree->gtLclFld.gtLclOffs);
6802             copy->gtLclFld.gtFieldSeq = tree->gtLclFld.gtFieldSeq;
6803             break;
6804
6805         case GT_CLS_VAR:
6806             copy = new (this, GT_CLS_VAR)
6807                 GenTreeClsVar(tree->gtType, tree->gtClsVar.gtClsVarHnd, tree->gtClsVar.gtFieldSeq);
6808             break;
6809
6810         default:
6811             if (!complexOK)
6812             {
6813                 return nullptr;
6814             }
6815
6816             if (tree->gtOper == GT_FIELD)
6817             {
6818                 GenTree* objp;
6819
6820                 // copied from line 9850
6821
6822                 objp = nullptr;
6823                 if (tree->gtField.gtFldObj)
6824                 {
6825                     objp = gtClone(tree->gtField.gtFldObj, false);
6826                     if (!objp)
6827                     {
6828                         return objp;
6829                     }
6830                 }
6831
6832                 copy = gtNewFieldRef(tree->TypeGet(), tree->gtField.gtFldHnd, objp, tree->gtField.gtFldOffset);
6833                 copy->gtField.gtFldMayOverlap = tree->gtField.gtFldMayOverlap;
6834 #ifdef FEATURE_READYTORUN_COMPILER
6835                 copy->gtField.gtFieldLookup = tree->gtField.gtFieldLookup;
6836 #endif
6837             }
6838             else if (tree->OperIs(GT_ADD, GT_SUB))
6839             {
6840                 GenTree* op1 = tree->gtOp.gtOp1;
6841                 GenTree* op2 = tree->gtOp.gtOp2;
6842
6843                 if (op1->OperIsLeaf() && op2->OperIsLeaf())
6844                 {
6845                     op1 = gtClone(op1);
6846                     if (op1 == nullptr)
6847                     {
6848                         return nullptr;
6849                     }
6850                     op2 = gtClone(op2);
6851                     if (op2 == nullptr)
6852                     {
6853                         return nullptr;
6854                     }
6855
6856                     copy = gtNewOperNode(tree->OperGet(), tree->TypeGet(), op1, op2);
6857                 }
6858                 else
6859                 {
6860                     return nullptr;
6861                 }
6862             }
6863             else if (tree->gtOper == GT_ADDR)
6864             {
6865                 GenTree* op1 = gtClone(tree->gtOp.gtOp1);
6866                 if (op1 == nullptr)
6867                 {
6868                     return nullptr;
6869                 }
6870                 copy = gtNewOperNode(GT_ADDR, tree->TypeGet(), op1);
6871             }
6872             else
6873             {
6874                 return nullptr;
6875             }
6876
6877             break;
6878     }
6879
6880     copy->gtFlags |= tree->gtFlags & ~GTF_NODE_MASK;
6881 #if defined(DEBUG)
6882     copy->gtDebugFlags |= tree->gtDebugFlags & ~GTF_DEBUG_NODE_MASK;
6883 #endif // defined(DEBUG)
6884
6885     return copy;
6886 }
6887
6888 //------------------------------------------------------------------------
6889 // gtCloneExpr: Create a copy of `tree`, adding flags `addFlags`, mapping
6890 //              local `varNum` to int constant `varVal` if it appears at
6891 //              the root, and mapping uses of local `deepVarNum` to constant
6892 //              `deepVarVal` if they occur beyond the root.
6893 //
6894 // Arguments:
6895 //    tree - GenTree to create a copy of
6896 //    addFlags - GTF_* flags to add to the copied tree nodes
6897 //    varNum - lclNum to replace at the root, or ~0 for no root replacement
6898 //    varVal - If replacing at root, replace local `varNum` with IntCns `varVal`
6899 //    deepVarNum - lclNum to replace uses of beyond the root, or ~0 for no replacement
6900 //    deepVarVal - If replacing beyond root, replace `deepVarNum` with IntCns `deepVarVal`
6901 //
6902 // Return Value:
6903 //    A copy of the given tree with the replacements and added flags specified.
6904 //
6905 // Notes:
6906 //    Top-level callers should generally call the overload that doesn't have
6907 //    the explicit `deepVarNum` and `deepVarVal` parameters; those are used in
6908 //    recursive invocations to avoid replacing defs.
6909
6910 GenTree* Compiler::gtCloneExpr(
6911     GenTree* tree, unsigned addFlags, unsigned varNum, int varVal, unsigned deepVarNum, int deepVarVal)
6912 {
6913     if (tree == nullptr)
6914     {
6915         return nullptr;
6916     }
6917
6918     /* Figure out what kind of a node we have */
6919
6920     genTreeOps oper = tree->OperGet();
6921     unsigned   kind = tree->OperKind();
6922     GenTree*   copy;
6923
6924     /* Is this a constant or leaf node? */
6925
6926     if (kind & (GTK_CONST | GTK_LEAF))
6927     {
6928         switch (oper)
6929         {
6930             case GT_CNS_INT:
6931
6932 #if defined(LATE_DISASM)
6933                 if (tree->IsIconHandle())
6934                 {
6935                     copy = gtNewIconHandleNode(tree->gtIntCon.gtIconVal, tree->gtFlags, tree->gtIntCon.gtFieldSeq);
6936                     copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
6937                     copy->gtType                       = tree->gtType;
6938                 }
6939                 else
6940 #endif
6941                 {
6942                     copy                               = gtNewIconNode(tree->gtIntCon.gtIconVal, tree->gtType);
6943                     copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
6944                     copy->gtIntCon.gtFieldSeq          = tree->gtIntCon.gtFieldSeq;
6945                 }
6946                 goto DONE;
6947
6948             case GT_CNS_LNG:
6949                 copy = gtNewLconNode(tree->gtLngCon.gtLconVal);
6950                 goto DONE;
6951
6952             case GT_CNS_DBL:
6953                 copy         = gtNewDconNode(tree->gtDblCon.gtDconVal);
6954                 copy->gtType = tree->gtType; // keep the same type
6955                 goto DONE;
6956
6957             case GT_CNS_STR:
6958                 copy = gtNewSconNode(tree->gtStrCon.gtSconCPX, tree->gtStrCon.gtScpHnd);
6959                 goto DONE;
6960
6961             case GT_LCL_VAR:
6962
6963                 if (tree->gtLclVarCommon.gtLclNum == varNum)
6964                 {
6965                     copy = gtNewIconNode(varVal, tree->gtType);
6966                     if (tree->gtFlags & GTF_VAR_ARR_INDEX)
6967                     {
6968                         copy->LabelIndex(this);
6969                     }
6970                 }
6971                 else
6972                 {
6973                     // Remember that the LclVar node has been cloned. The flag will
6974                     // be set on 'copy' as well.
6975                     tree->gtFlags |= GTF_VAR_CLONED;
6976                     copy = gtNewLclvNode(tree->gtLclVar.gtLclNum, tree->gtType DEBUGARG(tree->gtLclVar.gtLclILoffs));
6977                     copy->AsLclVarCommon()->SetSsaNum(tree->AsLclVarCommon()->GetSsaNum());
6978                 }
6979                 copy->gtFlags = tree->gtFlags;
6980                 goto DONE;
6981
6982             case GT_LCL_FLD:
6983                 if (tree->gtLclFld.gtLclNum == varNum)
6984                 {
6985                     IMPL_LIMITATION("replacing GT_LCL_FLD with a constant");
6986                 }
6987                 else
6988                 {
6989                     // Remember that the LclVar node has been cloned. The flag will
6990                     // be set on 'copy' as well.
6991                     tree->gtFlags |= GTF_VAR_CLONED;
6992                     copy = new (this, GT_LCL_FLD)
6993                         GenTreeLclFld(tree->TypeGet(), tree->gtLclFld.gtLclNum, tree->gtLclFld.gtLclOffs);
6994                     copy->gtLclFld.gtFieldSeq = tree->gtLclFld.gtFieldSeq;
6995                     copy->gtFlags             = tree->gtFlags;
6996                 }
6997                 goto DONE;
6998
6999             case GT_CLS_VAR:
7000                 copy = new (this, GT_CLS_VAR)
7001                     GenTreeClsVar(tree->TypeGet(), tree->gtClsVar.gtClsVarHnd, tree->gtClsVar.gtFieldSeq);
7002                 goto DONE;
7003
7004             case GT_RET_EXPR:
7005                 // GT_RET_EXPR is unique node, that contains a link to a gtInlineCandidate node,
7006                 // that is part of another statement. We cannot clone both here and cannot
7007                 // create another GT_RET_EXPR that points to the same gtInlineCandidate.
7008                 NO_WAY("Cloning of GT_RET_EXPR node not supported");
7009                 goto DONE;
7010
7011             case GT_MEMORYBARRIER:
7012                 copy = new (this, GT_MEMORYBARRIER) GenTree(GT_MEMORYBARRIER, TYP_VOID);
7013                 goto DONE;
7014
7015             case GT_ARGPLACE:
7016                 copy = gtNewArgPlaceHolderNode(tree->gtType, tree->gtArgPlace.gtArgPlaceClsHnd);
7017                 goto DONE;
7018
7019             case GT_FTN_ADDR:
7020                 copy = new (this, oper) GenTreeFptrVal(tree->gtType, tree->gtFptrVal.gtFptrMethod);
7021
7022 #ifdef FEATURE_READYTORUN_COMPILER
7023                 copy->gtFptrVal.gtEntryPoint = tree->gtFptrVal.gtEntryPoint;
7024 #endif
7025                 goto DONE;
7026
7027             case GT_CATCH_ARG:
7028             case GT_NO_OP:
7029             case GT_LABEL:
7030                 copy = new (this, oper) GenTree(oper, tree->gtType);
7031                 goto DONE;
7032
7033 #if !FEATURE_EH_FUNCLETS
7034             case GT_END_LFIN:
7035 #endif // !FEATURE_EH_FUNCLETS
7036             case GT_JMP:
7037                 copy = new (this, oper) GenTreeVal(oper, tree->gtType, tree->gtVal.gtVal1);
7038                 goto DONE;
7039
7040             default:
7041                 NO_WAY("Cloning of node not supported");
7042                 goto DONE;
7043         }
7044     }
7045
7046     /* Is it a 'simple' unary/binary operator? */
7047
7048     if (kind & GTK_SMPOP)
7049     {
7050         /* If necessary, make sure we allocate a "fat" tree node */
7051         CLANG_FORMAT_COMMENT_ANCHOR;
7052
7053         switch (oper)
7054         {
7055             /* These nodes sometimes get bashed to "fat" ones */
7056
7057             case GT_MUL:
7058             case GT_DIV:
7059             case GT_MOD:
7060
7061             case GT_UDIV:
7062             case GT_UMOD:
7063
7064                 //  In the implementation of gtNewLargeOperNode you have
7065                 //  to give an oper that will create a small node,
7066                 //  otherwise it asserts.
7067                 //
7068                 if (GenTree::s_gtNodeSizes[oper] == TREE_NODE_SZ_SMALL)
7069                 {
7070                     copy = gtNewLargeOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1,
7071                                               tree->OperIsBinary() ? tree->gtOp.gtOp2 : nullptr);
7072                 }
7073                 else // Always a large tree
7074                 {
7075                     if (tree->OperIsBinary())
7076                     {
7077                         copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2);
7078                     }
7079                     else
7080                     {
7081                         copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1);
7082                     }
7083                 }
7084                 break;
7085
7086             case GT_CAST:
7087                 copy =
7088                     new (this, LargeOpOpcode()) GenTreeCast(tree->TypeGet(), tree->gtCast.CastOp(), tree->IsUnsigned(),
7089                                                             tree->gtCast.gtCastType DEBUGARG(/*largeNode*/ TRUE));
7090                 break;
7091
7092             // The nodes below this are not bashed, so they can be allocated at their individual sizes.
7093
7094             case GT_LIST:
7095                 assert((tree->gtOp.gtOp2 == nullptr) || tree->gtOp.gtOp2->OperIsList());
7096                 copy             = new (this, GT_LIST) GenTreeArgList(tree->gtOp.gtOp1);
7097                 copy->gtOp.gtOp2 = tree->gtOp.gtOp2;
7098                 break;
7099
7100             case GT_FIELD_LIST:
7101                 copy = new (this, GT_FIELD_LIST) GenTreeFieldList(tree->gtOp.gtOp1, tree->AsFieldList()->gtFieldOffset,
7102                                                                   tree->AsFieldList()->gtFieldType, nullptr);
7103                 copy->gtOp.gtOp2 = tree->gtOp.gtOp2;
7104                 copy->gtFlags    = (copy->gtFlags & ~GTF_FIELD_LIST_HEAD) | (tree->gtFlags & GTF_FIELD_LIST_HEAD);
7105                 break;
7106
7107             case GT_INDEX:
7108             {
7109                 GenTreeIndex* asInd = tree->AsIndex();
7110                 copy                = new (this, GT_INDEX)
7111                     GenTreeIndex(asInd->TypeGet(), asInd->Arr(), asInd->Index(), asInd->gtIndElemSize);
7112                 copy->AsIndex()->gtStructElemClass = asInd->gtStructElemClass;
7113             }
7114             break;
7115
7116             case GT_INDEX_ADDR:
7117             {
7118                 GenTreeIndexAddr* asIndAddr = tree->AsIndexAddr();
7119
7120                 copy = new (this, GT_INDEX_ADDR)
7121                     GenTreeIndexAddr(asIndAddr->Arr(), asIndAddr->Index(), asIndAddr->gtElemType,
7122                                      asIndAddr->gtStructElemClass, asIndAddr->gtElemSize, asIndAddr->gtLenOffset,
7123                                      asIndAddr->gtElemOffset);
7124                 copy->AsIndexAddr()->gtIndRngFailBB = asIndAddr->gtIndRngFailBB;
7125             }
7126             break;
7127
7128             case GT_ALLOCOBJ:
7129             {
7130                 GenTreeAllocObj* asAllocObj = tree->AsAllocObj();
7131                 copy                        = new (this, GT_ALLOCOBJ)
7132                     GenTreeAllocObj(tree->TypeGet(), asAllocObj->gtNewHelper, asAllocObj->gtHelperHasSideEffects,
7133                                     asAllocObj->gtAllocObjClsHnd, asAllocObj->gtOp1);
7134             }
7135             break;
7136
7137             case GT_RUNTIMELOOKUP:
7138             {
7139                 GenTreeRuntimeLookup* asRuntimeLookup = tree->AsRuntimeLookup();
7140
7141                 copy = new (this, GT_RUNTIMELOOKUP)
7142                     GenTreeRuntimeLookup(asRuntimeLookup->gtHnd, asRuntimeLookup->gtHndType, asRuntimeLookup->gtOp1);
7143             }
7144             break;
7145
7146             case GT_ARR_LENGTH:
7147                 copy = gtNewArrLen(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtArrLen.ArrLenOffset());
7148                 break;
7149
7150             case GT_ARR_INDEX:
7151                 copy = new (this, GT_ARR_INDEX)
7152                     GenTreeArrIndex(tree->TypeGet(),
7153                                     gtCloneExpr(tree->gtArrIndex.ArrObj(), addFlags, deepVarNum, deepVarVal),
7154                                     gtCloneExpr(tree->gtArrIndex.IndexExpr(), addFlags, deepVarNum, deepVarVal),
7155                                     tree->gtArrIndex.gtCurrDim, tree->gtArrIndex.gtArrRank,
7156                                     tree->gtArrIndex.gtArrElemType);
7157                 break;
7158
7159             case GT_QMARK:
7160                 copy = new (this, GT_QMARK) GenTreeQmark(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2, this);
7161                 break;
7162
7163             case GT_OBJ:
7164                 copy = new (this, GT_OBJ)
7165                     GenTreeObj(tree->TypeGet(), tree->gtOp.gtOp1, tree->AsObj()->gtClass, tree->gtBlk.gtBlkSize);
7166                 copy->AsObj()->CopyGCInfo(tree->AsObj());
7167                 copy->gtBlk.gtBlkOpGcUnsafe = tree->gtBlk.gtBlkOpGcUnsafe;
7168                 break;
7169
7170             case GT_BLK:
7171                 copy = new (this, GT_BLK) GenTreeBlk(GT_BLK, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtBlk.gtBlkSize);
7172                 copy->gtBlk.gtBlkOpGcUnsafe = tree->gtBlk.gtBlkOpGcUnsafe;
7173                 break;
7174
7175             case GT_DYN_BLK:
7176                 copy = new (this, GT_DYN_BLK) GenTreeDynBlk(tree->gtOp.gtOp1, tree->gtDynBlk.gtDynamicSize);
7177                 copy->gtBlk.gtBlkOpGcUnsafe = tree->gtBlk.gtBlkOpGcUnsafe;
7178                 break;
7179
7180             case GT_BOX:
7181                 copy = new (this, GT_BOX)
7182                     GenTreeBox(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtBox.gtAsgStmtWhenInlinedBoxValue,
7183                                tree->gtBox.gtCopyStmtWhenInlinedBoxValue);
7184                 break;
7185
7186             case GT_INTRINSIC:
7187                 copy = new (this, GT_INTRINSIC)
7188                     GenTreeIntrinsic(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2,
7189                                      tree->gtIntrinsic.gtIntrinsicId, tree->gtIntrinsic.gtMethodHandle);
7190 #ifdef FEATURE_READYTORUN_COMPILER
7191                 copy->gtIntrinsic.gtEntryPoint = tree->gtIntrinsic.gtEntryPoint;
7192 #endif
7193                 break;
7194
7195             case GT_LEA:
7196             {
7197                 GenTreeAddrMode* addrModeOp = tree->AsAddrMode();
7198                 copy                        = new (this, GT_LEA)
7199                     GenTreeAddrMode(addrModeOp->TypeGet(), addrModeOp->Base(), addrModeOp->Index(), addrModeOp->gtScale,
7200                                     static_cast<unsigned>(addrModeOp->Offset()));
7201             }
7202             break;
7203
7204             case GT_COPY:
7205             case GT_RELOAD:
7206             {
7207                 copy = new (this, oper) GenTreeCopyOrReload(oper, tree->TypeGet(), tree->gtGetOp1());
7208             }
7209             break;
7210
7211 #ifdef FEATURE_SIMD
7212             case GT_SIMD:
7213             {
7214                 GenTreeSIMD* simdOp = tree->AsSIMD();
7215                 copy                = gtNewSIMDNode(simdOp->TypeGet(), simdOp->gtGetOp1(), simdOp->gtGetOp2IfPresent(),
7216                                      simdOp->gtSIMDIntrinsicID, simdOp->gtSIMDBaseType, simdOp->gtSIMDSize);
7217             }
7218             break;
7219 #endif
7220
7221 #ifdef FEATURE_HW_INTRINSICS
7222             case GT_HWIntrinsic:
7223             {
7224                 GenTreeHWIntrinsic* hwintrinsicOp = tree->AsHWIntrinsic();
7225                 copy                              = new (this, GT_HWIntrinsic)
7226                     GenTreeHWIntrinsic(hwintrinsicOp->TypeGet(), hwintrinsicOp->gtGetOp1(),
7227                                        hwintrinsicOp->gtGetOp2IfPresent(), hwintrinsicOp->gtHWIntrinsicId,
7228                                        hwintrinsicOp->gtSIMDBaseType, hwintrinsicOp->gtSIMDSize);
7229                 copy->AsHWIntrinsic()->gtIndexBaseType = hwintrinsicOp->gtIndexBaseType;
7230             }
7231             break;
7232 #endif
7233
7234             default:
7235                 assert(!GenTree::IsExOp(tree->OperKind()) && tree->OperIsSimple());
7236                 // We're in the SimpleOp case, so it's always unary or binary.
7237                 if (GenTree::OperIsUnary(tree->OperGet()))
7238                 {
7239                     copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, /*doSimplifications*/ false);
7240                 }
7241                 else
7242                 {
7243                     assert(GenTree::OperIsBinary(tree->OperGet()));
7244                     copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2);
7245                 }
7246                 break;
7247         }
7248
7249         // Some flags are conceptually part of the gtOper, and should be copied immediately.
7250         if (tree->gtOverflowEx())
7251         {
7252             copy->gtFlags |= GTF_OVERFLOW;
7253         }
7254
7255         if (tree->gtOp.gtOp1)
7256         {
7257             if (tree->gtOper == GT_ASG)
7258             {
7259                 // Don't replace varNum if it appears as the LHS of an assign.
7260                 copy->gtOp.gtOp1 = gtCloneExpr(tree->gtOp.gtOp1, addFlags, -1, 0, deepVarNum, deepVarVal);
7261             }
7262             else
7263             {
7264                 copy->gtOp.gtOp1 = gtCloneExpr(tree->gtOp.gtOp1, addFlags, deepVarNum, deepVarVal);
7265             }
7266         }
7267
7268         if (tree->gtGetOp2IfPresent())
7269         {
7270             copy->gtOp.gtOp2 = gtCloneExpr(tree->gtOp.gtOp2, addFlags, deepVarNum, deepVarVal);
7271         }
7272
7273         /* Flags */
7274         addFlags |= tree->gtFlags;
7275
7276         // Copy any node annotations, if necessary.
7277         switch (tree->gtOper)
7278         {
7279             case GT_STOREIND:
7280             case GT_IND:
7281             case GT_OBJ:
7282             case GT_STORE_OBJ:
7283             {
7284                 ArrayInfo arrInfo;
7285                 if (!tree->AsIndir()->gtOp1->OperIs(GT_INDEX_ADDR) && TryGetArrayInfo(tree->AsIndir(), &arrInfo))
7286                 {
7287                     GetArrayInfoMap()->Set(copy, arrInfo);
7288                 }
7289             }
7290             break;
7291
7292             default:
7293                 break;
7294         }
7295
7296 #ifdef DEBUG
7297         /* GTF_NODE_MASK should not be propagated from 'tree' to 'copy' */
7298         addFlags &= ~GTF_NODE_MASK;
7299 #endif
7300
7301         // Effects flags propagate upwards.
7302         if (copy->gtOp.gtOp1 != nullptr)
7303         {
7304             copy->gtFlags |= (copy->gtOp.gtOp1->gtFlags & GTF_ALL_EFFECT);
7305         }
7306         if (copy->gtGetOp2IfPresent() != nullptr)
7307         {
7308             copy->gtFlags |= (copy->gtGetOp2()->gtFlags & GTF_ALL_EFFECT);
7309         }
7310
7311         goto DONE;
7312     }
7313
7314     /* See what kind of a special operator we have here */
7315
7316     switch (oper)
7317     {
7318         case GT_STMT:
7319             copy = gtCloneExpr(tree->gtStmt.gtStmtExpr, addFlags, deepVarNum, deepVarVal);
7320             copy = gtNewStmt(copy, tree->gtStmt.gtStmtILoffsx);
7321             goto DONE;
7322
7323         case GT_CALL:
7324
7325             // We can't safely clone calls that have GT_RET_EXPRs via gtCloneExpr.
7326             // You must use gtCloneCandidateCall for these calls (and then do appropriate other fixup)
7327             if (tree->gtCall.IsInlineCandidate() || tree->gtCall.IsGuardedDevirtualizationCandidate())
7328             {
7329                 NO_WAY("Cloning of calls with associated GT_RET_EXPR nodes is not supported");
7330             }
7331
7332             copy = gtCloneExprCallHelper(tree->AsCall(), addFlags, deepVarNum, deepVarVal);
7333             break;
7334
7335         case GT_FIELD:
7336
7337             copy = gtNewFieldRef(tree->TypeGet(), tree->gtField.gtFldHnd, nullptr, tree->gtField.gtFldOffset);
7338
7339             copy->gtField.gtFldObj = tree->gtField.gtFldObj
7340                                          ? gtCloneExpr(tree->gtField.gtFldObj, addFlags, deepVarNum, deepVarVal)
7341                                          : nullptr;
7342             copy->gtField.gtFldMayOverlap = tree->gtField.gtFldMayOverlap;
7343 #ifdef FEATURE_READYTORUN_COMPILER
7344             copy->gtField.gtFieldLookup = tree->gtField.gtFieldLookup;
7345 #endif
7346
7347             break;
7348
7349         case GT_ARR_ELEM:
7350         {
7351             GenTree* inds[GT_ARR_MAX_RANK];
7352             for (unsigned dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
7353             {
7354                 inds[dim] = gtCloneExpr(tree->gtArrElem.gtArrInds[dim], addFlags, deepVarNum, deepVarVal);
7355             }
7356             copy = new (this, GT_ARR_ELEM)
7357                 GenTreeArrElem(tree->TypeGet(), gtCloneExpr(tree->gtArrElem.gtArrObj, addFlags, deepVarNum, deepVarVal),
7358                                tree->gtArrElem.gtArrRank, tree->gtArrElem.gtArrElemSize, tree->gtArrElem.gtArrElemType,
7359                                &inds[0]);
7360         }
7361         break;
7362
7363         case GT_ARR_OFFSET:
7364         {
7365             copy = new (this, GT_ARR_OFFSET)
7366                 GenTreeArrOffs(tree->TypeGet(), gtCloneExpr(tree->gtArrOffs.gtOffset, addFlags, deepVarNum, deepVarVal),
7367                                gtCloneExpr(tree->gtArrOffs.gtIndex, addFlags, deepVarNum, deepVarVal),
7368                                gtCloneExpr(tree->gtArrOffs.gtArrObj, addFlags, deepVarNum, deepVarVal),
7369                                tree->gtArrOffs.gtCurrDim, tree->gtArrOffs.gtArrRank, tree->gtArrOffs.gtArrElemType);
7370         }
7371         break;
7372
7373         case GT_CMPXCHG:
7374             copy = new (this, GT_CMPXCHG)
7375                 GenTreeCmpXchg(tree->TypeGet(),
7376                                gtCloneExpr(tree->gtCmpXchg.gtOpLocation, addFlags, deepVarNum, deepVarVal),
7377                                gtCloneExpr(tree->gtCmpXchg.gtOpValue, addFlags, deepVarNum, deepVarVal),
7378                                gtCloneExpr(tree->gtCmpXchg.gtOpComparand, addFlags, deepVarNum, deepVarVal));
7379             break;
7380
7381         case GT_ARR_BOUNDS_CHECK:
7382 #ifdef FEATURE_SIMD
7383         case GT_SIMD_CHK:
7384 #endif // FEATURE_SIMD
7385 #ifdef FEATURE_HW_INTRINSICS
7386         case GT_HW_INTRINSIC_CHK:
7387 #endif // FEATURE_HW_INTRINSICS
7388             copy = new (this, oper)
7389                 GenTreeBoundsChk(oper, tree->TypeGet(),
7390                                  gtCloneExpr(tree->gtBoundsChk.gtIndex, addFlags, deepVarNum, deepVarVal),
7391                                  gtCloneExpr(tree->gtBoundsChk.gtArrLen, addFlags, deepVarNum, deepVarVal),
7392                                  tree->gtBoundsChk.gtThrowKind);
7393             copy->gtBoundsChk.gtIndRngFailBB = tree->gtBoundsChk.gtIndRngFailBB;
7394             break;
7395
7396         case GT_STORE_DYN_BLK:
7397         case GT_DYN_BLK:
7398             copy = new (this, oper)
7399                 GenTreeDynBlk(gtCloneExpr(tree->gtDynBlk.Addr(), addFlags, deepVarNum, deepVarVal),
7400                               gtCloneExpr(tree->gtDynBlk.gtDynamicSize, addFlags, deepVarNum, deepVarVal));
7401             break;
7402
7403         default:
7404 #ifdef DEBUG
7405             gtDispTree(tree);
7406 #endif
7407             NO_WAY("unexpected operator");
7408     }
7409
7410 DONE:
7411
7412     // If it has a zero-offset field seq, copy annotation.
7413     if (tree->TypeGet() == TYP_BYREF)
7414     {
7415         FieldSeqNode* fldSeq = nullptr;
7416         if (GetZeroOffsetFieldMap()->Lookup(tree, &fldSeq))
7417         {
7418             GetZeroOffsetFieldMap()->Set(copy, fldSeq);
7419         }
7420     }
7421
7422     copy->gtVNPair = tree->gtVNPair; // A cloned tree gets the orginal's Value number pair
7423
7424     /* Compute the flags for the copied node. Note that we can do this only
7425        if we didnt gtFoldExpr(copy) */
7426
7427     if (copy->gtOper == oper)
7428     {
7429         addFlags |= tree->gtFlags;
7430
7431 #ifdef DEBUG
7432         /* GTF_NODE_MASK should not be propagated from 'tree' to 'copy' */
7433         addFlags &= ~GTF_NODE_MASK;
7434 #endif
7435         // Some other flags depend on the context of the expression, and should not be preserved.
7436         // For example, GTF_RELOP_QMARK:
7437         if (copy->OperKind() & GTK_RELOP)
7438         {
7439             addFlags &= ~GTF_RELOP_QMARK;
7440         }
7441         // On the other hand, if we're creating such a context, restore this flag.
7442         if (copy->OperGet() == GT_QMARK)
7443         {
7444             copy->gtOp.gtOp1->gtFlags |= GTF_RELOP_QMARK;
7445         }
7446
7447         copy->gtFlags |= addFlags;
7448
7449         // Update side effect flags since they may be different from the source side effect flags.
7450         // For example, we may have replaced some locals with constants and made indirections non-throwing.
7451         gtUpdateNodeSideEffects(copy);
7452     }
7453
7454     /* GTF_COLON_COND should be propagated from 'tree' to 'copy' */
7455     copy->gtFlags |= (tree->gtFlags & GTF_COLON_COND);
7456
7457 #if defined(DEBUG)
7458     // Non-node debug flags should be propagated from 'tree' to 'copy'
7459     copy->gtDebugFlags |= (tree->gtDebugFlags & ~GTF_DEBUG_NODE_MASK);
7460 #endif
7461
7462     /* Make sure to copy back fields that may have been initialized */
7463
7464     copy->CopyRawCosts(tree);
7465     copy->gtRsvdRegs = tree->gtRsvdRegs;
7466     copy->CopyReg(tree);
7467     return copy;
7468 }
7469
7470 //------------------------------------------------------------------------
7471 // gtCloneExprCallHelper: clone a call tree
7472 //
7473 // Notes:
7474 //    Do not invoke this method directly, instead call either gtCloneExpr
7475 //    or gtCloneCandidateCall, as appropriate.
7476 //
7477 // Arguments:
7478 //    tree - the call to clone
7479 //    addFlags - GTF_* flags to add to the copied tree nodes
7480 //    deepVarNum - lclNum to replace uses of beyond the root, or BAD_VAR_NUM for no replacement
7481 //    deepVarVal - If replacing beyond root, replace `deepVarNum` with IntCns `deepVarVal`
7482 //
7483 // Returns:
7484 //    Cloned copy of call and all subtrees.
7485
7486 GenTreeCall* Compiler::gtCloneExprCallHelper(GenTreeCall* tree, unsigned addFlags, unsigned deepVarNum, int deepVarVal)
7487 {
7488     GenTreeCall* copy = new (this, GT_CALL) GenTreeCall(tree->TypeGet());
7489
7490     copy->gtCallObjp = tree->gtCallObjp ? gtCloneExpr(tree->gtCallObjp, addFlags, deepVarNum, deepVarVal) : nullptr;
7491     copy->gtCallArgs =
7492         tree->gtCallArgs ? gtCloneExpr(tree->gtCallArgs, addFlags, deepVarNum, deepVarVal)->AsArgList() : nullptr;
7493     copy->gtCallMoreFlags = tree->gtCallMoreFlags;
7494     copy->gtCallLateArgs  = tree->gtCallLateArgs
7495                                ? gtCloneExpr(tree->gtCallLateArgs, addFlags, deepVarNum, deepVarVal)->AsArgList()
7496                                : nullptr;
7497
7498 #if !FEATURE_FIXED_OUT_ARGS
7499     copy->regArgList      = tree->regArgList;
7500     copy->regArgListCount = tree->regArgListCount;
7501 #endif
7502
7503     // The call sig comes from the EE and doesn't change throughout the compilation process, meaning
7504     // we only really need one physical copy of it. Therefore a shallow pointer copy will suffice.
7505     // (Note that this still holds even if the tree we are cloning was created by an inlinee compiler,
7506     // because the inlinee still uses the inliner's memory allocator anyway.)
7507     copy->callSig = tree->callSig;
7508
7509     copy->gtCallType    = tree->gtCallType;
7510     copy->gtReturnType  = tree->gtReturnType;
7511     copy->gtControlExpr = tree->gtControlExpr;
7512
7513     /* Copy the union */
7514     if (tree->gtCallType == CT_INDIRECT)
7515     {
7516         copy->gtCallCookie =
7517             tree->gtCallCookie ? gtCloneExpr(tree->gtCallCookie, addFlags, deepVarNum, deepVarVal) : nullptr;
7518         copy->gtCallAddr = tree->gtCallAddr ? gtCloneExpr(tree->gtCallAddr, addFlags, deepVarNum, deepVarVal) : nullptr;
7519     }
7520     else if (tree->IsVirtualStub())
7521     {
7522         copy->gtCallMethHnd      = tree->gtCallMethHnd;
7523         copy->gtStubCallStubAddr = tree->gtStubCallStubAddr;
7524     }
7525     else
7526     {
7527         copy->gtCallMethHnd         = tree->gtCallMethHnd;
7528         copy->gtInlineCandidateInfo = nullptr;
7529     }
7530
7531     if (tree->fgArgInfo)
7532     {
7533         // Create and initialize the fgArgInfo for our copy of the call tree
7534         copy->fgArgInfo = new (this, CMK_Unknown) fgArgInfo(copy, tree);
7535     }
7536     else
7537     {
7538         copy->fgArgInfo = nullptr;
7539     }
7540
7541     copy->gtRetClsHnd = tree->gtRetClsHnd;
7542
7543 #if FEATURE_MULTIREG_RET
7544     copy->gtReturnTypeDesc = tree->gtReturnTypeDesc;
7545 #endif
7546
7547 #ifdef FEATURE_READYTORUN_COMPILER
7548     copy->setEntryPoint(tree->gtEntryPoint);
7549 #endif
7550
7551 #if defined(DEBUG) || defined(INLINE_DATA)
7552     copy->gtInlineObservation = tree->gtInlineObservation;
7553     copy->gtRawILOffset       = tree->gtCall.gtRawILOffset;
7554 #endif
7555
7556     copy->CopyOtherRegFlags(tree);
7557
7558     return copy;
7559 }
7560
7561 //------------------------------------------------------------------------
7562 // gtCloneCandidateCall: clone a call that is an inline or guarded
7563 //    devirtualization candidate (~ any call that can have a GT_RET_EXPR)
7564 //
7565 // Notes:
7566 //    If the call really is a candidate, the caller must take additional steps
7567 //    after cloning to re-establish candidate info and the relationship between
7568 //    the candidate and any associated GT_RET_EXPR.
7569 //
7570 // Arguments:
7571 //    call - the call to clone
7572 //
7573 // Returns:
7574 //    Cloned copy of call and all subtrees.
7575
7576 GenTreeCall* Compiler::gtCloneCandidateCall(GenTreeCall* call)
7577 {
7578     assert(call->IsInlineCandidate() || call->IsGuardedDevirtualizationCandidate());
7579
7580     GenTreeCall* result = gtCloneExprCallHelper(call);
7581
7582     // There is some common post-processing in gtCloneExpr that we reproduce
7583     // here, for the fields that make sense for candidate calls.
7584     result->gtFlags |= call->gtFlags;
7585
7586 #if defined(DEBUG)
7587     result->gtDebugFlags |= (call->gtDebugFlags & ~GTF_DEBUG_NODE_MASK);
7588 #endif
7589
7590     result->CopyReg(call);
7591
7592     return result;
7593 }
7594
7595 //------------------------------------------------------------------------
7596 // gtReplaceTree: Replace a tree with a new tree.
7597 //
7598 // Arguments:
7599 //    stmt            - The top-level root stmt of the tree being replaced.
7600 //                      Must not be null.
7601 //    tree            - The tree being replaced. Must not be null.
7602 //    replacementTree - The replacement tree. Must not be null.
7603 //
7604 // Return Value:
7605 //    The tree node that replaces the old tree.
7606 //
7607 // Assumptions:
7608 //    The sequencing of the stmt has been done.
7609 //
7610 // Notes:
7611 //    The caller must ensure that the original statement has been sequenced,
7612 //    and the side effect flags are updated on the statement nodes,
7613 //    but this method will sequence 'replacementTree', and insert it into the
7614 //    proper place in the statement sequence.
7615
7616 GenTree* Compiler::gtReplaceTree(GenTreeStmt* stmt, GenTree* tree, GenTree* replacementTree)
7617 {
7618     assert(fgStmtListThreaded);
7619     assert(tree != nullptr);
7620     assert(stmt != nullptr);
7621     assert(replacementTree != nullptr);
7622
7623     GenTree** treePtr    = nullptr;
7624     GenTree*  treeParent = tree->gtGetParent(&treePtr);
7625
7626     assert(treeParent != nullptr || tree == stmt->gtStmtExpr);
7627
7628     if (treePtr == nullptr)
7629     {
7630         // Replace the stmt expr and rebuild the linear order for "stmt".
7631         assert(treeParent == nullptr);
7632         assert(fgOrder != FGOrderLinear);
7633         stmt->gtStmtExpr = tree;
7634         fgSetStmtSeq(stmt);
7635     }
7636     else
7637     {
7638         assert(treeParent != nullptr);
7639
7640         // Check to see if the node to be replaced is a call argument and if so,
7641         // set `treeParent` to the call node.
7642         GenTree* cursor = treeParent;
7643         while ((cursor != nullptr) && (cursor->OperGet() == GT_LIST))
7644         {
7645             cursor = cursor->gtNext;
7646         }
7647
7648         if ((cursor != nullptr) && (cursor->OperGet() == GT_CALL))
7649         {
7650             treeParent = cursor;
7651         }
7652
7653 #ifdef DEBUG
7654         GenTree** useEdge;
7655         assert(treeParent->TryGetUse(tree, &useEdge));
7656         assert(useEdge == treePtr);
7657 #endif // DEBUG
7658
7659         GenTree* treeFirstNode = fgGetFirstNode(tree);
7660         GenTree* treeLastNode  = tree;
7661         GenTree* treePrevNode  = treeFirstNode->gtPrev;
7662         GenTree* treeNextNode  = treeLastNode->gtNext;
7663
7664         treeParent->ReplaceOperand(treePtr, replacementTree);
7665
7666         // Build the linear order for "replacementTree".
7667         fgSetTreeSeq(replacementTree, treePrevNode);
7668
7669         // Restore linear-order Prev and Next for "replacementTree".
7670         if (treePrevNode != nullptr)
7671         {
7672             treeFirstNode         = fgGetFirstNode(replacementTree);
7673             treeFirstNode->gtPrev = treePrevNode;
7674             treePrevNode->gtNext  = treeFirstNode;
7675         }
7676         else
7677         {
7678             // Update the linear oder start of "stmt" if treeFirstNode
7679             // appears to have replaced the original first node.
7680             assert(treeFirstNode == stmt->gtStmtList);
7681             stmt->gtStmtList = fgGetFirstNode(replacementTree);
7682         }
7683
7684         if (treeNextNode != nullptr)
7685         {
7686             treeLastNode         = replacementTree;
7687             treeLastNode->gtNext = treeNextNode;
7688             treeNextNode->gtPrev = treeLastNode;
7689         }
7690     }
7691
7692     return replacementTree;
7693 }
7694
7695 //------------------------------------------------------------------------
7696 // gtUpdateSideEffects: Update the side effects of a tree and its ancestors
7697 //
7698 // Arguments:
7699 //    stmt            - The tree's statement
7700 //    tree            - Tree to update the side effects for
7701 //
7702 // Note: If tree's order hasn't been established, the method updates side effect
7703 //       flags on all statement's nodes.
7704
7705 void Compiler::gtUpdateSideEffects(GenTreeStmt* stmt, GenTree* tree)
7706 {
7707     if (fgStmtListThreaded)
7708     {
7709         gtUpdateTreeAncestorsSideEffects(tree);
7710     }
7711     else
7712     {
7713         gtUpdateStmtSideEffects(stmt);
7714     }
7715 }
7716
7717 //------------------------------------------------------------------------
7718 // gtUpdateTreeAncestorsSideEffects: Update the side effects of a tree and its ancestors
7719 //                                   when statement order has been established.
7720 //
7721 // Arguments:
7722 //    tree            - Tree to update the side effects for
7723
7724 void Compiler::gtUpdateTreeAncestorsSideEffects(GenTree* tree)
7725 {
7726     assert(fgStmtListThreaded);
7727     while (tree != nullptr)
7728     {
7729         gtUpdateNodeSideEffects(tree);
7730         tree = tree->gtGetParent(nullptr);
7731     }
7732 }
7733
7734 //------------------------------------------------------------------------
7735 // gtUpdateStmtSideEffects: Update the side effects for statement tree nodes.
7736 //
7737 // Arguments:
7738 //    stmt            - The statement to update side effects on
7739
7740 void Compiler::gtUpdateStmtSideEffects(GenTreeStmt* stmt)
7741 {
7742     fgWalkTree(&stmt->gtStmtExpr, fgUpdateSideEffectsPre, fgUpdateSideEffectsPost);
7743 }
7744
7745 //------------------------------------------------------------------------
7746 // gtUpdateNodeOperSideEffects: Update the side effects based on the node operation.
7747 //
7748 // Arguments:
7749 //    tree            - Tree to update the side effects on
7750 //
7751 // Notes:
7752 //    This method currently only updates GTF_EXCEPT, GTF_ASG, and GTF_CALL flags.
7753 //    The other side effect flags may remain unnecessarily (conservatively) set.
7754 //    The caller of this method is expected to update the flags based on the children's flags.
7755
7756 void Compiler::gtUpdateNodeOperSideEffects(GenTree* tree)
7757 {
7758     if (tree->OperMayThrow(this))
7759     {
7760         tree->gtFlags |= GTF_EXCEPT;
7761     }
7762     else
7763     {
7764         tree->gtFlags &= ~GTF_EXCEPT;
7765         if (tree->OperIsIndirOrArrLength())
7766         {
7767             tree->gtFlags |= GTF_IND_NONFAULTING;
7768         }
7769     }
7770
7771     if (tree->OperRequiresAsgFlag())
7772     {
7773         tree->gtFlags |= GTF_ASG;
7774     }
7775     else
7776     {
7777         tree->gtFlags &= ~GTF_ASG;
7778     }
7779
7780     if (tree->OperRequiresCallFlag(this))
7781     {
7782         tree->gtFlags |= GTF_CALL;
7783     }
7784     else
7785     {
7786         tree->gtFlags &= ~GTF_CALL;
7787     }
7788 }
7789
7790 //------------------------------------------------------------------------
7791 // gtUpdateNodeSideEffects: Update the side effects based on the node operation and
7792 //                          children's side efects.
7793 //
7794 // Arguments:
7795 //    tree            - Tree to update the side effects on
7796 //
7797 // Notes:
7798 //    This method currently only updates GTF_EXCEPT and GTF_ASG flags. The other side effect
7799 //    flags may remain unnecessarily (conservatively) set.
7800
7801 void Compiler::gtUpdateNodeSideEffects(GenTree* tree)
7802 {
7803     gtUpdateNodeOperSideEffects(tree);
7804     unsigned nChildren = tree->NumChildren();
7805     for (unsigned childNum = 0; childNum < nChildren; childNum++)
7806     {
7807         GenTree* child = tree->GetChild(childNum);
7808         if (child != nullptr)
7809         {
7810             tree->gtFlags |= (child->gtFlags & GTF_ALL_EFFECT);
7811         }
7812     }
7813 }
7814
7815 //------------------------------------------------------------------------
7816 // fgUpdateSideEffectsPre: Update the side effects based on the tree operation.
7817 //
7818 // Arguments:
7819 //    pTree            - Pointer to the tree to update the side effects
7820 //    fgWalkPre        - Walk data
7821 //
7822 // Notes:
7823 //    This method currently only updates GTF_EXCEPT and GTF_ASG flags. The other side effect
7824 //    flags may remain unnecessarily (conservatively) set.
7825
7826 Compiler::fgWalkResult Compiler::fgUpdateSideEffectsPre(GenTree** pTree, fgWalkData* fgWalkPre)
7827 {
7828     fgWalkPre->compiler->gtUpdateNodeOperSideEffects(*pTree);
7829
7830     return WALK_CONTINUE;
7831 }
7832
7833 //------------------------------------------------------------------------
7834 // fgUpdateSideEffectsPost: Update the side effects of the parent based on the tree's flags.
7835 //
7836 // Arguments:
7837 //    pTree            - Pointer to the tree
7838 //    fgWalkPost       - Walk data
7839 //
7840 // Notes:
7841 //    The routine is used for updating the stale side effect flags for ancestor
7842 //    nodes starting from treeParent up to the top-level stmt expr.
7843
7844 Compiler::fgWalkResult Compiler::fgUpdateSideEffectsPost(GenTree** pTree, fgWalkData* fgWalkPost)
7845 {
7846     GenTree* tree   = *pTree;
7847     GenTree* parent = fgWalkPost->parent;
7848     if (parent != nullptr)
7849     {
7850         parent->gtFlags |= (tree->gtFlags & GTF_ALL_EFFECT);
7851     }
7852     return WALK_CONTINUE;
7853 }
7854
7855 /*****************************************************************************
7856  *
7857  *  Compares two trees and returns true when both trees are the same.
7858  *  Instead of fully comparing the two trees this method can just return false.
7859  *  Thus callers should not assume that the trees are different when false is returned.
7860  *  Only when true is returned can the caller perform code optimizations.
7861  *  The current implementation only compares a limited set of LEAF/CONST node
7862  *  and returns false for all othere trees.
7863  */
7864 bool Compiler::gtCompareTree(GenTree* op1, GenTree* op2)
7865 {
7866     /* Make sure that both trees are of the same GT node kind */
7867     if (op1->OperGet() != op2->OperGet())
7868     {
7869         return false;
7870     }
7871
7872     /* Make sure that both trees are returning the same type */
7873     if (op1->gtType != op2->gtType)
7874     {
7875         return false;
7876     }
7877
7878     /* Figure out what kind of a node we have */
7879
7880     genTreeOps oper = op1->OperGet();
7881     unsigned   kind = op1->OperKind();
7882
7883     /* Is this a constant or leaf node? */
7884
7885     if (kind & (GTK_CONST | GTK_LEAF))
7886     {
7887         switch (oper)
7888         {
7889             case GT_CNS_INT:
7890                 if ((op1->gtIntCon.gtIconVal == op2->gtIntCon.gtIconVal) && GenTree::SameIconHandleFlag(op1, op2))
7891                 {
7892                     return true;
7893                 }
7894                 break;
7895
7896             case GT_CNS_LNG:
7897                 if (op1->gtLngCon.gtLconVal == op2->gtLngCon.gtLconVal)
7898                 {
7899                     return true;
7900                 }
7901                 break;
7902
7903             case GT_CNS_STR:
7904                 if (op1->gtStrCon.gtSconCPX == op2->gtStrCon.gtSconCPX)
7905                 {
7906                     return true;
7907                 }
7908                 break;
7909
7910             case GT_LCL_VAR:
7911                 if (op1->gtLclVarCommon.gtLclNum == op2->gtLclVarCommon.gtLclNum)
7912                 {
7913                     return true;
7914                 }
7915                 break;
7916
7917             case GT_CLS_VAR:
7918                 if (op1->gtClsVar.gtClsVarHnd == op2->gtClsVar.gtClsVarHnd)
7919                 {
7920                     return true;
7921                 }
7922                 break;
7923
7924             default:
7925                 // we return false for these unhandled 'oper' kinds
7926                 break;
7927         }
7928     }
7929     return false;
7930 }
7931
7932 GenTree* Compiler::gtGetThisArg(GenTreeCall* call)
7933 {
7934     if (call->gtCallObjp != nullptr)
7935     {
7936         if (call->gtCallObjp->gtOper != GT_NOP && call->gtCallObjp->gtOper != GT_ASG)
7937         {
7938             if (!(call->gtCallObjp->gtFlags & GTF_LATE_ARG))
7939             {
7940                 return call->gtCallObjp;
7941             }
7942         }
7943
7944         if (call->gtCallLateArgs)
7945         {
7946             unsigned       argNum          = 0;
7947             fgArgTabEntry* thisArgTabEntry = gtArgEntryByArgNum(call, argNum);
7948             GenTree*       result          = thisArgTabEntry->node;
7949
7950 #if !FEATURE_FIXED_OUT_ARGS && defined(DEBUG)
7951             regNumber thisReg  = REG_ARG_0;
7952             GenTree*  lateArgs = call->gtCallLateArgs;
7953             regList   list     = call->regArgList;
7954             int       index    = 0;
7955             while (lateArgs != NULL)
7956             {
7957                 assert(lateArgs->gtOper == GT_LIST);
7958                 assert(index < call->regArgListCount);
7959                 regNumber curArgReg = list[index];
7960                 if (curArgReg == thisReg)
7961                 {
7962                     assert(result == lateArgs->gtOp.gtOp1);
7963                 }
7964
7965                 lateArgs = lateArgs->gtOp.gtOp2;
7966                 index++;
7967             }
7968 #endif // !FEATURE_FIXED_OUT_ARGS && defined(DEBUG)
7969             return result;
7970         }
7971     }
7972     return nullptr;
7973 }
7974
7975 bool GenTree::gtSetFlags() const
7976 {
7977     //
7978     // When FEATURE_SET_FLAGS (_TARGET_ARM_) is active the method returns true
7979     //    when the gtFlags has the flag GTF_SET_FLAGS set
7980     // otherwise the architecture will be have instructions that typically set
7981     //    the flags and this method will return true.
7982     //
7983     //    Exceptions: GT_IND (load/store) is not allowed to set the flags
7984     //                and on XARCH the GT_MUL/GT_DIV and all overflow instructions
7985     //                do not set the condition flags
7986     //
7987     // Precondition we have a GTK_SMPOP
7988     //
7989     if (!varTypeIsIntegralOrI(TypeGet()) && (TypeGet() != TYP_VOID))
7990     {
7991         return false;
7992     }
7993
7994     if (((gtFlags & GTF_SET_FLAGS) != 0) && (gtOper != GT_IND))
7995     {
7996         // GTF_SET_FLAGS is not valid on GT_IND and is overlaid with GTF_NONFAULTING_IND
7997         return true;
7998     }
7999     else
8000     {
8001         return false;
8002     }
8003 }
8004
8005 bool GenTree::gtRequestSetFlags()
8006 {
8007     bool result = false;
8008
8009 #if FEATURE_SET_FLAGS
8010     // This method is a Nop unless FEATURE_SET_FLAGS is defined
8011
8012     // In order to set GTF_SET_FLAGS
8013     //              we must have a GTK_SMPOP
8014     //          and we have a integer or machine size type (not floating point or TYP_LONG on 32-bit)
8015     //
8016     if (!OperIsSimple())
8017         return false;
8018
8019     if (!varTypeIsIntegralOrI(TypeGet()))
8020         return false;
8021
8022     switch (gtOper)
8023     {
8024         case GT_IND:
8025         case GT_ARR_LENGTH:
8026             // These will turn into simple load from memory instructions
8027             // and we can't force the setting of the flags on load from memory
8028             break;
8029
8030         case GT_MUL:
8031         case GT_DIV:
8032             // These instructions don't set the flags (on x86/x64)
8033             //
8034             break;
8035
8036         default:
8037             // Otherwise we can set the flags for this gtOper
8038             // and codegen must set the condition flags.
8039             //
8040             gtFlags |= GTF_SET_FLAGS;
8041             result = true;
8042             break;
8043     }
8044 #endif // FEATURE_SET_FLAGS
8045
8046     // Codegen for this tree must set the condition flags if
8047     // this method returns true.
8048     //
8049     return result;
8050 }
8051
8052 unsigned GenTree::NumChildren()
8053 {
8054     if (OperIsConst() || OperIsLeaf())
8055     {
8056         return 0;
8057     }
8058     else if (OperIsUnary())
8059     {
8060         if (OperGet() == GT_NOP || OperGet() == GT_RETURN || OperGet() == GT_RETFILT)
8061         {
8062             if (gtOp.gtOp1 == nullptr)
8063             {
8064                 return 0;
8065             }
8066             else
8067             {
8068                 return 1;
8069             }
8070         }
8071         else
8072         {
8073             return 1;
8074         }
8075     }
8076     else if (OperIsBinary())
8077     {
8078         // All binary operators except LEA have at least one arg; the second arg may sometimes be null, however.
8079         if (OperGet() == GT_LEA)
8080         {
8081             unsigned childCount = 0;
8082             if (gtOp.gtOp1 != nullptr)
8083             {
8084                 childCount++;
8085             }
8086             if (gtOp.gtOp2 != nullptr)
8087             {
8088                 childCount++;
8089             }
8090             return childCount;
8091         }
8092 #ifdef FEATURE_HW_INTRINSICS
8093         // GT_HWIntrinsic require special handling
8094         if (OperGet() == GT_HWIntrinsic)
8095         {
8096             if (gtOp.gtOp1 == nullptr)
8097             {
8098                 return 0;
8099             }
8100         }
8101 #endif
8102         assert(gtOp.gtOp1 != nullptr);
8103         if (gtOp.gtOp2 == nullptr)
8104         {
8105             return 1;
8106         }
8107         else
8108         {
8109             return 2;
8110         }
8111     }
8112     else
8113     {
8114         // Special
8115         switch (OperGet())
8116         {
8117             case GT_CMPXCHG:
8118                 return 3;
8119
8120             case GT_ARR_BOUNDS_CHECK:
8121 #ifdef FEATURE_SIMD
8122             case GT_SIMD_CHK:
8123 #endif // FEATURE_SIMD
8124 #ifdef FEATURE_HW_INTRINSICS
8125             case GT_HW_INTRINSIC_CHK:
8126 #endif // FEATURE_HW_INTRINSICS
8127                 return 2;
8128
8129             case GT_FIELD:
8130             case GT_STMT:
8131                 return 1;
8132
8133             case GT_ARR_ELEM:
8134                 return 1 + AsArrElem()->gtArrRank;
8135
8136             case GT_DYN_BLK:
8137                 return 2;
8138
8139             case GT_ARR_OFFSET:
8140             case GT_STORE_DYN_BLK:
8141                 return 3;
8142
8143             case GT_CALL:
8144             {
8145                 GenTreeCall* call = AsCall();
8146                 unsigned     res  = 0; // arg list(s) (including late args).
8147                 if (call->gtCallObjp != nullptr)
8148                 {
8149                     res++; // Add objp?
8150                 }
8151                 if (call->gtCallArgs != nullptr)
8152                 {
8153                     res++; // Add args?
8154                 }
8155                 if (call->gtCallLateArgs != nullptr)
8156                 {
8157                     res++; // Add late args?
8158                 }
8159                 if (call->gtControlExpr != nullptr)
8160                 {
8161                     res++;
8162                 }
8163
8164                 if (call->gtCallType == CT_INDIRECT)
8165                 {
8166                     if (call->gtCallCookie != nullptr)
8167                     {
8168                         res++;
8169                     }
8170                     if (call->gtCallAddr != nullptr)
8171                     {
8172                         res++;
8173                     }
8174                 }
8175                 return res;
8176             }
8177             case GT_NONE:
8178                 return 0;
8179             default:
8180                 unreached();
8181         }
8182     }
8183 }
8184
8185 GenTree* GenTree::GetChild(unsigned childNum)
8186 {
8187     assert(childNum < NumChildren()); // Precondition.
8188     assert(NumChildren() <= MAX_CHILDREN);
8189     assert(!(OperIsConst() || OperIsLeaf()));
8190     if (OperIsUnary())
8191     {
8192         return AsUnOp()->gtOp1;
8193     }
8194     // Special case for assignment of dynamic block.
8195     // This code is here to duplicate the former case where the size may be evaluated prior to the
8196     // source and destination addresses. In order to do this, we treat the size as a child of the
8197     // assignment.
8198     // TODO-1stClassStructs: Revisit the need to duplicate former behavior, so that we can remove
8199     // these special cases.
8200     if ((OperGet() == GT_ASG) && (gtOp.gtOp1->OperGet() == GT_DYN_BLK) && (childNum == 2))
8201     {
8202         return gtOp.gtOp1->AsDynBlk()->gtDynamicSize;
8203     }
8204     else if (OperIsBinary())
8205     {
8206         if (OperIsAddrMode())
8207         {
8208             // If this is the first (0th) child, only return op1 if it is non-null
8209             // Otherwise, we return gtOp2.
8210             if (childNum == 0 && AsOp()->gtOp1 != nullptr)
8211             {
8212                 return AsOp()->gtOp1;
8213             }
8214             return AsOp()->gtOp2;
8215         }
8216         // TODO-Cleanup: Consider handling ReverseOps here, and then we wouldn't have to handle it in
8217         // fgGetFirstNode().  However, it seems that it causes loop hoisting behavior to change.
8218         if (childNum == 0)
8219         {
8220             return AsOp()->gtOp1;
8221         }
8222         else
8223         {
8224             return AsOp()->gtOp2;
8225         }
8226     }
8227     else
8228     {
8229         // Special
8230         switch (OperGet())
8231         {
8232             case GT_CMPXCHG:
8233                 switch (childNum)
8234                 {
8235                     case 0:
8236                         return AsCmpXchg()->gtOpLocation;
8237                     case 1:
8238                         return AsCmpXchg()->gtOpValue;
8239                     case 2:
8240                         return AsCmpXchg()->gtOpComparand;
8241                     default:
8242                         unreached();
8243                 }
8244             case GT_ARR_BOUNDS_CHECK:
8245 #ifdef FEATURE_SIMD
8246             case GT_SIMD_CHK:
8247 #endif // FEATURE_SIMD
8248 #ifdef FEATURE_HW_INTRINSICS
8249             case GT_HW_INTRINSIC_CHK:
8250 #endif // FEATURE_HW_INTRINSICS
8251                 switch (childNum)
8252                 {
8253                     case 0:
8254                         return AsBoundsChk()->gtIndex;
8255                     case 1:
8256                         return AsBoundsChk()->gtArrLen;
8257                     default:
8258                         unreached();
8259                 }
8260
8261             case GT_STORE_DYN_BLK:
8262                 switch (childNum)
8263                 {
8264                     case 0:
8265                         return AsDynBlk()->Addr();
8266                     case 1:
8267                         return AsDynBlk()->Data();
8268                     case 2:
8269                         return AsDynBlk()->gtDynamicSize;
8270                     default:
8271                         unreached();
8272                 }
8273             case GT_DYN_BLK:
8274                 switch (childNum)
8275                 {
8276                     case 0:
8277                         return AsDynBlk()->gtEvalSizeFirst ? AsDynBlk()->gtDynamicSize : AsDynBlk()->Addr();
8278                     case 1:
8279                         return AsDynBlk()->gtEvalSizeFirst ? AsDynBlk()->Addr() : AsDynBlk()->gtDynamicSize;
8280                     default:
8281                         unreached();
8282                 }
8283
8284             case GT_FIELD:
8285                 return AsField()->gtFldObj;
8286
8287             case GT_STMT:
8288                 return AsStmt()->gtStmtExpr;
8289
8290             case GT_ARR_ELEM:
8291                 if (childNum == 0)
8292                 {
8293                     return AsArrElem()->gtArrObj;
8294                 }
8295                 else
8296                 {
8297                     return AsArrElem()->gtArrInds[childNum - 1];
8298                 }
8299
8300             case GT_ARR_OFFSET:
8301                 switch (childNum)
8302                 {
8303                     case 0:
8304                         return AsArrOffs()->gtOffset;
8305                     case 1:
8306                         return AsArrOffs()->gtIndex;
8307                     case 2:
8308                         return AsArrOffs()->gtArrObj;
8309                     default:
8310                         unreached();
8311                 }
8312
8313             case GT_CALL:
8314             {
8315                 // The if chain below assumes that all possible children are non-null.
8316                 // If some are null, "virtually skip them."
8317                 // If there isn't "virtually skip it."
8318                 GenTreeCall* call = AsCall();
8319
8320                 if (call->gtCallObjp == nullptr)
8321                 {
8322                     childNum++;
8323                 }
8324                 if (childNum >= 1 && call->gtCallArgs == nullptr)
8325                 {
8326                     childNum++;
8327                 }
8328                 if (childNum >= 2 && call->gtCallLateArgs == nullptr)
8329                 {
8330                     childNum++;
8331                 }
8332                 if (childNum >= 3 && call->gtControlExpr == nullptr)
8333                 {
8334                     childNum++;
8335                 }
8336                 if (call->gtCallType == CT_INDIRECT)
8337                 {
8338                     if (childNum >= 4 && call->gtCallCookie == nullptr)
8339                     {
8340                         childNum++;
8341                     }
8342                 }
8343
8344                 if (childNum == 0)
8345                 {
8346                     return call->gtCallObjp;
8347                 }
8348                 else if (childNum == 1)
8349                 {
8350                     return call->gtCallArgs;
8351                 }
8352                 else if (childNum == 2)
8353                 {
8354                     return call->gtCallLateArgs;
8355                 }
8356                 else if (childNum == 3)
8357                 {
8358                     return call->gtControlExpr;
8359                 }
8360                 else
8361                 {
8362                     assert(call->gtCallType == CT_INDIRECT);
8363                     if (childNum == 4)
8364                     {
8365                         return call->gtCallCookie;
8366                     }
8367                     else
8368                     {
8369                         assert(childNum == 5);
8370                         return call->gtCallAddr;
8371                     }
8372                 }
8373             }
8374             case GT_NONE:
8375                 unreached();
8376             default:
8377                 unreached();
8378         }
8379     }
8380 }
8381
8382 GenTreeUseEdgeIterator::GenTreeUseEdgeIterator()
8383     : m_advance(nullptr), m_node(nullptr), m_edge(nullptr), m_argList(nullptr), m_state(-1)
8384 {
8385 }
8386
8387 GenTreeUseEdgeIterator::GenTreeUseEdgeIterator(GenTree* node)
8388     : m_advance(nullptr), m_node(node), m_edge(nullptr), m_argList(nullptr), m_state(0)
8389 {
8390     assert(m_node != nullptr);
8391
8392     // NOTE: the switch statement below must be updated when introducing new nodes.
8393
8394     switch (m_node->OperGet())
8395     {
8396         // Leaf nodes
8397         case GT_LCL_VAR:
8398         case GT_LCL_FLD:
8399         case GT_LCL_VAR_ADDR:
8400         case GT_LCL_FLD_ADDR:
8401         case GT_CATCH_ARG:
8402         case GT_LABEL:
8403         case GT_FTN_ADDR:
8404         case GT_RET_EXPR:
8405         case GT_CNS_INT:
8406         case GT_CNS_LNG:
8407         case GT_CNS_DBL:
8408         case GT_CNS_STR:
8409         case GT_MEMORYBARRIER:
8410         case GT_JMP:
8411         case GT_JCC:
8412         case GT_SETCC:
8413         case GT_NO_OP:
8414         case GT_START_NONGC:
8415         case GT_START_PREEMPTGC:
8416         case GT_PROF_HOOK:
8417 #if !FEATURE_EH_FUNCLETS
8418         case GT_END_LFIN:
8419 #endif // !FEATURE_EH_FUNCLETS
8420         case GT_PHI_ARG:
8421         case GT_JMPTABLE:
8422         case GT_CLS_VAR:
8423         case GT_CLS_VAR_ADDR:
8424         case GT_ARGPLACE:
8425         case GT_PHYSREG:
8426         case GT_EMITNOP:
8427         case GT_PINVOKE_PROLOG:
8428         case GT_PINVOKE_EPILOG:
8429         case GT_IL_OFFSET:
8430             m_state = -1;
8431             return;
8432
8433         // Standard unary operators
8434         case GT_STORE_LCL_VAR:
8435         case GT_STORE_LCL_FLD:
8436         case GT_NOT:
8437         case GT_NEG:
8438         case GT_COPY:
8439         case GT_RELOAD:
8440         case GT_ARR_LENGTH:
8441         case GT_CAST:
8442         case GT_BITCAST:
8443         case GT_CKFINITE:
8444         case GT_LCLHEAP:
8445         case GT_ADDR:
8446         case GT_IND:
8447         case GT_OBJ:
8448         case GT_BLK:
8449         case GT_BOX:
8450         case GT_ALLOCOBJ:
8451         case GT_RUNTIMELOOKUP:
8452         case GT_INIT_VAL:
8453         case GT_JTRUE:
8454         case GT_SWITCH:
8455         case GT_NULLCHECK:
8456         case GT_PUTARG_REG:
8457         case GT_PUTARG_STK:
8458         case GT_BSWAP:
8459         case GT_BSWAP16:
8460 #if FEATURE_ARG_SPLIT
8461         case GT_PUTARG_SPLIT:
8462 #endif // FEATURE_ARG_SPLIT
8463         case GT_RETURNTRAP:
8464             m_edge = &m_node->AsUnOp()->gtOp1;
8465             assert(*m_edge != nullptr);
8466             m_advance = &GenTreeUseEdgeIterator::Terminate;
8467             return;
8468
8469         // Unary operators with an optional operand
8470         case GT_NOP:
8471         case GT_RETURN:
8472         case GT_RETFILT:
8473             if (m_node->AsUnOp()->gtOp1 == nullptr)
8474             {
8475                 assert(m_node->NullOp1Legal());
8476                 m_state = -1;
8477             }
8478             else
8479             {
8480                 m_edge    = &m_node->AsUnOp()->gtOp1;
8481                 m_advance = &GenTreeUseEdgeIterator::Terminate;
8482             }
8483             return;
8484
8485         // Variadic nodes
8486         case GT_PHI:
8487             SetEntryStateForList(m_node->AsUnOp()->gtOp1);
8488             return;
8489
8490         case GT_FIELD_LIST:
8491             SetEntryStateForList(m_node);
8492             return;
8493
8494 #ifdef FEATURE_SIMD
8495         case GT_SIMD:
8496             if (m_node->AsSIMD()->gtSIMDIntrinsicID == SIMDIntrinsicInitN)
8497             {
8498                 SetEntryStateForList(m_node->AsSIMD()->gtOp1);
8499             }
8500             else
8501             {
8502                 SetEntryStateForBinOp();
8503             }
8504             return;
8505 #endif // FEATURE_SIMD
8506
8507 #ifdef FEATURE_HW_INTRINSICS
8508         case GT_HWIntrinsic:
8509             if (m_node->AsHWIntrinsic()->gtOp1 == nullptr)
8510             {
8511                 assert(m_node->NullOp1Legal());
8512                 m_state = -1;
8513             }
8514             else if (m_node->AsHWIntrinsic()->gtOp1->OperIsList())
8515             {
8516                 SetEntryStateForList(m_node->AsHWIntrinsic()->gtOp1);
8517             }
8518             else
8519             {
8520                 SetEntryStateForBinOp();
8521             }
8522             return;
8523 #endif // FEATURE_HW_INTRINSICS
8524
8525         // LEA, which may have no first operand
8526         case GT_LEA:
8527             if (m_node->AsAddrMode()->gtOp1 == nullptr)
8528             {
8529                 m_edge    = &m_node->AsAddrMode()->gtOp2;
8530                 m_advance = &GenTreeUseEdgeIterator::Terminate;
8531             }
8532             else
8533             {
8534                 SetEntryStateForBinOp();
8535             }
8536             return;
8537
8538         // Special nodes
8539         case GT_CMPXCHG:
8540             m_edge = &m_node->AsCmpXchg()->gtOpLocation;
8541             assert(*m_edge != nullptr);
8542             m_advance = &GenTreeUseEdgeIterator::AdvanceCmpXchg;
8543             return;
8544
8545         case GT_ARR_BOUNDS_CHECK:
8546 #ifdef FEATURE_SIMD
8547         case GT_SIMD_CHK:
8548 #endif // FEATURE_SIMD
8549 #ifdef FEATURE_HW_INTRINSICS
8550         case GT_HW_INTRINSIC_CHK:
8551 #endif // FEATURE_HW_INTRINSICS
8552             m_edge = &m_node->AsBoundsChk()->gtIndex;
8553             assert(*m_edge != nullptr);
8554             m_advance = &GenTreeUseEdgeIterator::AdvanceBoundsChk;
8555             return;
8556
8557         case GT_FIELD:
8558             if (m_node->AsField()->gtFldObj == nullptr)
8559             {
8560                 m_state = -1;
8561             }
8562             else
8563             {
8564                 m_edge    = &m_node->AsField()->gtFldObj;
8565                 m_advance = &GenTreeUseEdgeIterator::Terminate;
8566             }
8567             return;
8568
8569         case GT_STMT:
8570             if (m_node->AsStmt()->gtStmtExpr == nullptr)
8571             {
8572                 m_state = -1;
8573             }
8574             else
8575             {
8576                 m_edge    = &m_node->AsStmt()->gtStmtExpr;
8577                 m_advance = &GenTreeUseEdgeIterator::Terminate;
8578             }
8579             return;
8580
8581         case GT_ARR_ELEM:
8582             m_edge = &m_node->AsArrElem()->gtArrObj;
8583             assert(*m_edge != nullptr);
8584             m_advance = &GenTreeUseEdgeIterator::AdvanceArrElem;
8585             return;
8586
8587         case GT_ARR_OFFSET:
8588             m_edge = &m_node->AsArrOffs()->gtOffset;
8589             assert(*m_edge != nullptr);
8590             m_advance = &GenTreeUseEdgeIterator::AdvanceArrOffset;
8591             return;
8592
8593         case GT_DYN_BLK:
8594         {
8595             GenTreeDynBlk* const dynBlock = m_node->AsDynBlk();
8596             m_edge                        = dynBlock->gtEvalSizeFirst ? &dynBlock->gtDynamicSize : &dynBlock->gtOp1;
8597             assert(*m_edge != nullptr);
8598             m_advance = &GenTreeUseEdgeIterator::AdvanceDynBlk;
8599         }
8600             return;
8601
8602         case GT_STORE_DYN_BLK:
8603         {
8604             GenTreeDynBlk* const dynBlock = m_node->AsDynBlk();
8605             if (dynBlock->gtEvalSizeFirst)
8606             {
8607                 m_edge = &dynBlock->gtDynamicSize;
8608             }
8609             else
8610             {
8611                 m_edge = dynBlock->IsReverseOp() ? &dynBlock->gtOp2 : &dynBlock->gtOp1;
8612             }
8613             assert(*m_edge != nullptr);
8614
8615             m_advance = &GenTreeUseEdgeIterator::AdvanceStoreDynBlk;
8616         }
8617             return;
8618
8619         case GT_CALL:
8620             AdvanceCall<CALL_INSTANCE>();
8621             return;
8622
8623         // Binary nodes
8624         default:
8625             assert(m_node->OperIsBinary());
8626             SetEntryStateForBinOp();
8627             return;
8628     }
8629 }
8630
8631 //------------------------------------------------------------------------
8632 // GenTreeUseEdgeIterator::AdvanceCmpXchg: produces the next operand of a CmpXchg node and advances the state.
8633 //
8634 void GenTreeUseEdgeIterator::AdvanceCmpXchg()
8635 {
8636     switch (m_state)
8637     {
8638         case 0:
8639             m_edge  = &m_node->AsCmpXchg()->gtOpValue;
8640             m_state = 1;
8641             break;
8642         case 1:
8643             m_edge    = &m_node->AsCmpXchg()->gtOpComparand;
8644             m_advance = &GenTreeUseEdgeIterator::Terminate;
8645             break;
8646         default:
8647             unreached();
8648     }
8649
8650     assert(*m_edge != nullptr);
8651 }
8652
8653 //------------------------------------------------------------------------
8654 // GenTreeUseEdgeIterator::AdvanceBoundsChk: produces the next operand of a BoundsChk node and advances the state.
8655 //
8656 void GenTreeUseEdgeIterator::AdvanceBoundsChk()
8657 {
8658     m_edge = &m_node->AsBoundsChk()->gtArrLen;
8659     assert(*m_edge != nullptr);
8660     m_advance = &GenTreeUseEdgeIterator::Terminate;
8661 }
8662
8663 //------------------------------------------------------------------------
8664 // GenTreeUseEdgeIterator::AdvanceArrElem: produces the next operand of a ArrElem node and advances the state.
8665 //
8666 // Because these nodes are variadic, this function uses `m_state` to index into the list of array indices.
8667 //
8668 void GenTreeUseEdgeIterator::AdvanceArrElem()
8669 {
8670     if (m_state < m_node->AsArrElem()->gtArrRank)
8671     {
8672         m_edge = &m_node->AsArrElem()->gtArrInds[m_state];
8673         assert(*m_edge != nullptr);
8674         m_state++;
8675     }
8676     else
8677     {
8678         m_state = -1;
8679     }
8680 }
8681
8682 //------------------------------------------------------------------------
8683 // GenTreeUseEdgeIterator::AdvanceArrOffset: produces the next operand of a ArrOffset node and advances the state.
8684 //
8685 void GenTreeUseEdgeIterator::AdvanceArrOffset()
8686 {
8687     switch (m_state)
8688     {
8689         case 0:
8690             m_edge  = &m_node->AsArrOffs()->gtIndex;
8691             m_state = 1;
8692             break;
8693         case 1:
8694             m_edge    = &m_node->AsArrOffs()->gtArrObj;
8695             m_advance = &GenTreeUseEdgeIterator::Terminate;
8696             break;
8697         default:
8698             unreached();
8699     }
8700
8701     assert(*m_edge != nullptr);
8702 }
8703
8704 //------------------------------------------------------------------------
8705 // GenTreeUseEdgeIterator::AdvanceDynBlk: produces the next operand of a DynBlk node and advances the state.
8706 //
8707 void GenTreeUseEdgeIterator::AdvanceDynBlk()
8708 {
8709     GenTreeDynBlk* const dynBlock = m_node->AsDynBlk();
8710
8711     m_edge = dynBlock->gtEvalSizeFirst ? &dynBlock->gtOp1 : &dynBlock->gtDynamicSize;
8712     assert(*m_edge != nullptr);
8713     m_advance = &GenTreeUseEdgeIterator::Terminate;
8714 }
8715
8716 //------------------------------------------------------------------------
8717 // GenTreeUseEdgeIterator::AdvanceStoreDynBlk: produces the next operand of a StoreDynBlk node and advances the state.
8718 //
8719 // These nodes are moderately complicated but rare enough that templating this function is probably not
8720 // worth the extra complexity.
8721 //
8722 void GenTreeUseEdgeIterator::AdvanceStoreDynBlk()
8723 {
8724     GenTreeDynBlk* const dynBlock = m_node->AsDynBlk();
8725     if (dynBlock->gtEvalSizeFirst)
8726     {
8727         switch (m_state)
8728         {
8729             case 0:
8730                 m_edge  = dynBlock->IsReverseOp() ? &dynBlock->gtOp2 : &dynBlock->gtOp1;
8731                 m_state = 1;
8732                 break;
8733             case 1:
8734                 m_edge    = dynBlock->IsReverseOp() ? &dynBlock->gtOp1 : &dynBlock->gtOp2;
8735                 m_advance = &GenTreeUseEdgeIterator::Terminate;
8736                 break;
8737             default:
8738                 unreached();
8739         }
8740     }
8741     else
8742     {
8743         switch (m_state)
8744         {
8745             case 0:
8746                 m_edge  = dynBlock->IsReverseOp() ? &dynBlock->gtOp1 : &dynBlock->gtOp2;
8747                 m_state = 1;
8748                 break;
8749             case 1:
8750                 m_edge    = &dynBlock->gtDynamicSize;
8751                 m_advance = &GenTreeUseEdgeIterator::Terminate;
8752                 break;
8753             default:
8754                 unreached();
8755         }
8756     }
8757
8758     assert(*m_edge != nullptr);
8759 }
8760
8761 //------------------------------------------------------------------------
8762 // GenTreeUseEdgeIterator::AdvanceBinOp: produces the next operand of a binary node and advances the state.
8763 //
8764 // This function must be instantiated s.t. `ReverseOperands` is `true` iff the node is marked with the
8765 // `GTF_REVERSE_OPS` flag.
8766 //
8767 template <bool ReverseOperands>
8768 void           GenTreeUseEdgeIterator::AdvanceBinOp()
8769 {
8770     assert(ReverseOperands == ((m_node->gtFlags & GTF_REVERSE_OPS) != 0));
8771
8772     m_edge = !ReverseOperands ? &m_node->AsOp()->gtOp2 : &m_node->AsOp()->gtOp1;
8773     assert(*m_edge != nullptr);
8774     m_advance = &GenTreeUseEdgeIterator::Terminate;
8775 }
8776
8777 //------------------------------------------------------------------------
8778 // GenTreeUseEdgeIterator::SetEntryStateForBinOp: produces the first operand of a binary node and chooses
8779 //                                                the appropriate advance function.
8780 //
8781 void GenTreeUseEdgeIterator::SetEntryStateForBinOp()
8782 {
8783     assert(m_node != nullptr);
8784     assert(m_node->OperIsBinary());
8785
8786     GenTreeOp* const node = m_node->AsOp();
8787
8788     if (node->gtOp2 == nullptr)
8789     {
8790         assert(node->gtOp1 != nullptr);
8791         assert(node->NullOp2Legal());
8792         m_edge    = &node->gtOp1;
8793         m_advance = &GenTreeUseEdgeIterator::Terminate;
8794     }
8795     else if ((node->gtFlags & GTF_REVERSE_OPS) != 0)
8796     {
8797         m_edge    = &m_node->AsOp()->gtOp2;
8798         m_advance = &GenTreeUseEdgeIterator::AdvanceBinOp<true>;
8799     }
8800     else
8801     {
8802         m_edge    = &m_node->AsOp()->gtOp1;
8803         m_advance = &GenTreeUseEdgeIterator::AdvanceBinOp<false>;
8804     }
8805 }
8806
8807 //------------------------------------------------------------------------
8808 // GenTreeUseEdgeIterator::AdvanceList: produces the next operand of a variadic node and advances the state.
8809 //
8810 // This function does not use `m_state` for anything meaningful; it simply walks the `m_argList` until
8811 // there are no further entries.
8812 //
8813 void GenTreeUseEdgeIterator::AdvanceList()
8814 {
8815     assert(m_state == 0);
8816
8817     if (m_argList == nullptr)
8818     {
8819         m_state = -1;
8820     }
8821     else
8822     {
8823         GenTreeArgList* listNode = m_argList->AsArgList();
8824         m_edge                   = &listNode->gtOp1;
8825         m_argList                = listNode->Rest();
8826     }
8827 }
8828
8829 //------------------------------------------------------------------------
8830 // GenTreeUseEdgeIterator::SetEntryStateForList: produces the first operand of a list node.
8831 //
8832 void GenTreeUseEdgeIterator::SetEntryStateForList(GenTree* list)
8833 {
8834     m_argList = list;
8835     m_advance = &GenTreeUseEdgeIterator::AdvanceList;
8836     AdvanceList();
8837 }
8838
8839 //------------------------------------------------------------------------
8840 // GenTreeUseEdgeIterator::AdvanceCall: produces the next operand of a call node and advances the state.
8841 //
8842 // This function is a bit tricky: in order to avoid doing unnecessary work, it is instantiated with the
8843 // state number the iterator will be in when it is called. For example, `AdvanceCall<CALL_INSTANCE>`
8844 // is the instantiation used when the iterator is at the `CALL_INSTANCE` state (i.e. the entry state).
8845 // This sort of templating allows each state to avoid processing earlier states without unnecessary
8846 // duplication of code.
8847 //
8848 // Note that this method expands the argument lists (`gtCallArgs` and `gtCallLateArgs`) into their
8849 // component operands.
8850 //
8851 template <int state>
8852 void          GenTreeUseEdgeIterator::AdvanceCall()
8853 {
8854     GenTreeCall* const call = m_node->AsCall();
8855
8856     switch (state)
8857     {
8858         case CALL_INSTANCE:
8859             m_argList = call->gtCallArgs;
8860             m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_ARGS>;
8861             if (call->gtCallObjp != nullptr)
8862             {
8863                 m_edge = &call->gtCallObjp;
8864                 return;
8865             }
8866             __fallthrough;
8867
8868         case CALL_ARGS:
8869             if (m_argList != nullptr)
8870             {
8871                 GenTreeArgList* argNode = m_argList->AsArgList();
8872                 m_edge                  = &argNode->gtOp1;
8873                 m_argList               = argNode->Rest();
8874                 return;
8875             }
8876             m_argList = call->gtCallLateArgs;
8877             m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_LATE_ARGS>;
8878             __fallthrough;
8879
8880         case CALL_LATE_ARGS:
8881             if (m_argList != nullptr)
8882             {
8883                 GenTreeArgList* argNode = m_argList->AsArgList();
8884                 m_edge                  = &argNode->gtOp1;
8885                 m_argList               = argNode->Rest();
8886                 return;
8887             }
8888             m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_CONTROL_EXPR>;
8889             __fallthrough;
8890
8891         case CALL_CONTROL_EXPR:
8892             if (call->gtControlExpr != nullptr)
8893             {
8894                 if (call->gtCallType == CT_INDIRECT)
8895                 {
8896                     m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_COOKIE>;
8897                 }
8898                 else
8899                 {
8900                     m_advance = &GenTreeUseEdgeIterator::Terminate;
8901                 }
8902                 m_edge = &call->gtControlExpr;
8903                 return;
8904             }
8905             else if (call->gtCallType != CT_INDIRECT)
8906             {
8907                 m_state = -1;
8908                 return;
8909             }
8910             __fallthrough;
8911
8912         case CALL_COOKIE:
8913             assert(call->gtCallType == CT_INDIRECT);
8914
8915             m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_ADDRESS>;
8916             if (call->gtCallCookie != nullptr)
8917             {
8918                 m_edge = &call->gtCallCookie;
8919                 return;
8920             }
8921             __fallthrough;
8922
8923         case CALL_ADDRESS:
8924             assert(call->gtCallType == CT_INDIRECT);
8925
8926             m_advance = &GenTreeUseEdgeIterator::Terminate;
8927             if (call->gtCallAddr != nullptr)
8928             {
8929                 m_edge = &call->gtCallAddr;
8930             }
8931             return;
8932
8933         default:
8934             unreached();
8935     }
8936 }
8937
8938 //------------------------------------------------------------------------
8939 // GenTreeUseEdgeIterator::Terminate: advances the iterator to the terminal state.
8940 //
8941 void GenTreeUseEdgeIterator::Terminate()
8942 {
8943     m_state = -1;
8944 }
8945
8946 //------------------------------------------------------------------------
8947 // GenTreeUseEdgeIterator::operator++: advances the iterator to the next operand.
8948 //
8949 GenTreeUseEdgeIterator& GenTreeUseEdgeIterator::operator++()
8950 {
8951     // If we've reached the terminal state, do nothing.
8952     if (m_state != -1)
8953     {
8954         (this->*m_advance)();
8955     }
8956
8957     return *this;
8958 }
8959
8960 GenTreeUseEdgeIterator GenTree::UseEdgesBegin()
8961 {
8962     return GenTreeUseEdgeIterator(this);
8963 }
8964
8965 GenTreeUseEdgeIterator GenTree::UseEdgesEnd()
8966 {
8967     return GenTreeUseEdgeIterator();
8968 }
8969
8970 IteratorPair<GenTreeUseEdgeIterator> GenTree::UseEdges()
8971 {
8972     return MakeIteratorPair(UseEdgesBegin(), UseEdgesEnd());
8973 }
8974
8975 GenTreeOperandIterator GenTree::OperandsBegin()
8976 {
8977     return GenTreeOperandIterator(this);
8978 }
8979
8980 GenTreeOperandIterator GenTree::OperandsEnd()
8981 {
8982     return GenTreeOperandIterator();
8983 }
8984
8985 IteratorPair<GenTreeOperandIterator> GenTree::Operands()
8986 {
8987     return MakeIteratorPair(OperandsBegin(), OperandsEnd());
8988 }
8989
8990 bool GenTree::Precedes(GenTree* other)
8991 {
8992     assert(other != nullptr);
8993
8994     for (GenTree* node = gtNext; node != nullptr; node = node->gtNext)
8995     {
8996         if (node == other)
8997         {
8998             return true;
8999         }
9000     }
9001
9002     return false;
9003 }
9004
9005 #ifdef DEBUG
9006
9007 /* static */ int GenTree::gtDispFlags(unsigned flags, unsigned debugFlags)
9008 {
9009     int charsDisplayed = 11; // 11 is the "baseline" number of flag characters displayed
9010
9011     printf("%c", (flags & GTF_ASG) ? 'A' : (IsContained(flags) ? 'c' : '-'));
9012     printf("%c", (flags & GTF_CALL) ? 'C' : '-');
9013     printf("%c", (flags & GTF_EXCEPT) ? 'X' : '-');
9014     printf("%c", (flags & GTF_GLOB_REF) ? 'G' : '-');
9015     printf("%c", (debugFlags & GTF_DEBUG_NODE_MORPHED) ? '+' : // First print '+' if GTF_DEBUG_NODE_MORPHED is set
9016                      (flags & GTF_ORDER_SIDEEFF) ? 'O' : '-'); // otherwise print 'O' or '-'
9017     printf("%c", (flags & GTF_COLON_COND) ? '?' : '-');
9018     printf("%c", (flags & GTF_DONT_CSE) ? 'N' :           // N is for No cse
9019                      (flags & GTF_MAKE_CSE) ? 'H' : '-'); // H is for Hoist this expr
9020     printf("%c", (flags & GTF_REVERSE_OPS) ? 'R' : '-');
9021     printf("%c", (flags & GTF_UNSIGNED) ? 'U' : (flags & GTF_BOOLEAN) ? 'B' : '-');
9022 #if FEATURE_SET_FLAGS
9023     printf("%c", (flags & GTF_SET_FLAGS) ? 'S' : '-');
9024     ++charsDisplayed;
9025 #endif
9026     printf("%c", (flags & GTF_LATE_ARG) ? 'L' : '-');
9027     printf("%c", (flags & GTF_SPILLED) ? 'z' : (flags & GTF_SPILL) ? 'Z' : '-');
9028
9029     return charsDisplayed;
9030 }
9031
9032 /*****************************************************************************/
9033
9034 void Compiler::gtDispNodeName(GenTree* tree)
9035 {
9036     /* print the node name */
9037
9038     const char* name;
9039
9040     assert(tree);
9041     if (tree->gtOper < GT_COUNT)
9042     {
9043         name = GenTree::OpName(tree->OperGet());
9044     }
9045     else
9046     {
9047         name = "<ERROR>";
9048     }
9049     char  buf[32];
9050     char* bufp = &buf[0];
9051
9052     if ((tree->gtOper == GT_CNS_INT) && tree->IsIconHandle())
9053     {
9054         sprintf_s(bufp, sizeof(buf), " %s(h)%c", name, 0);
9055     }
9056     else if (tree->gtOper == GT_PUTARG_STK)
9057     {
9058         sprintf_s(bufp, sizeof(buf), " %s [+0x%02x]%c", name, tree->AsPutArgStk()->getArgOffset(), 0);
9059     }
9060     else if (tree->gtOper == GT_CALL)
9061     {
9062         const char* callType = "CALL";
9063         const char* gtfType  = "";
9064         const char* ctType   = "";
9065         char        gtfTypeBuf[100];
9066
9067         if (tree->gtCall.gtCallType == CT_USER_FUNC)
9068         {
9069             if (tree->gtCall.IsVirtual())
9070             {
9071                 callType = "CALLV";
9072             }
9073         }
9074         else if (tree->gtCall.gtCallType == CT_HELPER)
9075         {
9076             ctType = " help";
9077         }
9078         else if (tree->gtCall.gtCallType == CT_INDIRECT)
9079         {
9080             ctType = " ind";
9081         }
9082         else
9083         {
9084             assert(!"Unknown gtCallType");
9085         }
9086
9087         if (tree->gtFlags & GTF_CALL_NULLCHECK)
9088         {
9089             gtfType = " nullcheck";
9090         }
9091         if (tree->gtCall.IsVirtualVtable())
9092         {
9093             gtfType = " ind";
9094         }
9095         else if (tree->gtCall.IsVirtualStub())
9096         {
9097             gtfType = " stub";
9098         }
9099 #ifdef FEATURE_READYTORUN_COMPILER
9100         else if (tree->gtCall.IsR2RRelativeIndir())
9101         {
9102             gtfType = " r2r_ind";
9103         }
9104 #endif // FEATURE_READYTORUN_COMPILER
9105         else if (tree->gtFlags & GTF_CALL_UNMANAGED)
9106         {
9107             char* gtfTypeBufWalk = gtfTypeBuf;
9108             gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf, sizeof(gtfTypeBuf), " unman");
9109             if (tree->gtFlags & GTF_CALL_POP_ARGS)
9110             {
9111                 gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf, sizeof(gtfTypeBuf), " popargs");
9112             }
9113             if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_UNMGD_THISCALL)
9114             {
9115                 gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf, sizeof(gtfTypeBuf), " thiscall");
9116             }
9117             gtfType = gtfTypeBuf;
9118         }
9119
9120         sprintf_s(bufp, sizeof(buf), " %s%s%s%c", callType, ctType, gtfType, 0);
9121     }
9122     else if (tree->gtOper == GT_ARR_ELEM)
9123     {
9124         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s[", name);
9125         for (unsigned rank = tree->gtArrElem.gtArrRank - 1; rank; rank--)
9126         {
9127             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), ",");
9128         }
9129         SimpleSprintf_s(bufp, buf, sizeof(buf), "]");
9130     }
9131     else if (tree->gtOper == GT_ARR_OFFSET || tree->gtOper == GT_ARR_INDEX)
9132     {
9133         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s[", name);
9134         unsigned char currDim;
9135         unsigned char rank;
9136         if (tree->gtOper == GT_ARR_OFFSET)
9137         {
9138             currDim = tree->gtArrOffs.gtCurrDim;
9139             rank    = tree->gtArrOffs.gtArrRank;
9140         }
9141         else
9142         {
9143             currDim = tree->gtArrIndex.gtCurrDim;
9144             rank    = tree->gtArrIndex.gtArrRank;
9145         }
9146
9147         for (unsigned char dim = 0; dim < rank; dim++)
9148         {
9149             // Use a defacto standard i,j,k for the dimensions.
9150             // Note that we only support up to rank 3 arrays with these nodes, so we won't run out of characters.
9151             char dimChar = '*';
9152             if (dim == currDim)
9153             {
9154                 dimChar = 'i' + dim;
9155             }
9156             else if (dim > currDim)
9157             {
9158                 dimChar = ' ';
9159             }
9160
9161             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "%c", dimChar);
9162             if (dim != rank - 1)
9163             {
9164                 bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), ",");
9165             }
9166         }
9167         SimpleSprintf_s(bufp, buf, sizeof(buf), "]");
9168     }
9169     else if (tree->gtOper == GT_LEA)
9170     {
9171         GenTreeAddrMode* lea = tree->AsAddrMode();
9172         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s(", name);
9173         if (lea->Base() != nullptr)
9174         {
9175             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "b+");
9176         }
9177         if (lea->Index() != nullptr)
9178         {
9179             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "(i*%d)+", lea->gtScale);
9180         }
9181         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "%d)", lea->Offset());
9182     }
9183     else if (tree->gtOper == GT_ARR_BOUNDS_CHECK)
9184     {
9185         switch (tree->gtBoundsChk.gtThrowKind)
9186         {
9187             case SCK_RNGCHK_FAIL:
9188                 sprintf_s(bufp, sizeof(buf), " %s_Rng", name);
9189                 break;
9190             case SCK_ARG_EXCPN:
9191                 sprintf_s(bufp, sizeof(buf), " %s_Arg", name);
9192                 break;
9193             case SCK_ARG_RNG_EXCPN:
9194                 sprintf_s(bufp, sizeof(buf), " %s_ArgRng", name);
9195                 break;
9196             default:
9197                 unreached();
9198         }
9199     }
9200     else if (tree->gtOverflowEx())
9201     {
9202         sprintf_s(bufp, sizeof(buf), " %s_ovfl%c", name, 0);
9203     }
9204     else if (tree->OperIsBlk() && !tree->OperIsDynBlk())
9205     {
9206         sprintf_s(bufp, sizeof(buf), " %s(%d)", name, tree->AsBlk()->gtBlkSize);
9207     }
9208     else
9209     {
9210         sprintf_s(bufp, sizeof(buf), " %s%c", name, 0);
9211     }
9212
9213     if (strlen(buf) < 10)
9214     {
9215         printf(" %-10s", buf);
9216     }
9217     else
9218     {
9219         printf(" %s", buf);
9220     }
9221 }
9222
9223 //------------------------------------------------------------------------
9224 // gtDispZeroFieldSeq: If this node has a zero fieldSeq annotation
9225 //                      then print this Field Sequence
9226 //
9227 void Compiler::gtDispZeroFieldSeq(GenTree* tree)
9228 {
9229     NodeToFieldSeqMap* map = GetZeroOffsetFieldMap();
9230
9231     // THe most common case is having no entries in this map
9232     if (map->GetCount() > 0)
9233     {
9234         FieldSeqNode* fldSeq = nullptr;
9235         if (map->Lookup(tree, &fldSeq))
9236         {
9237             printf(" Zero");
9238             gtDispFieldSeq(fldSeq);
9239         }
9240     }
9241 }
9242
9243 //------------------------------------------------------------------------
9244 // gtDispVN: Utility function that prints a tree's ValueNumber: gtVNPair
9245 //
9246 void Compiler::gtDispVN(GenTree* tree)
9247 {
9248     if (tree->gtVNPair.GetLiberal() != ValueNumStore::NoVN)
9249     {
9250         assert(tree->gtVNPair.GetConservative() != ValueNumStore::NoVN);
9251         printf(" ");
9252         vnpPrint(tree->gtVNPair, 0);
9253     }
9254 }
9255
9256 //------------------------------------------------------------------------
9257 // gtDispCommonEndLine
9258 //     Utility function that prints the following node information
9259 //       1: The associated zero field sequence (if any)
9260 //       2. The register assigned to this node (if any)
9261 //       2. The value number assigned (if any)
9262 //       3. A newline character
9263 //
9264 void Compiler::gtDispCommonEndLine(GenTree* tree)
9265 {
9266     gtDispZeroFieldSeq(tree);
9267     gtDispRegVal(tree);
9268     gtDispVN(tree);
9269     printf("\n");
9270 }
9271
9272 //------------------------------------------------------------------------
9273 // gtDispNode: Print a tree to jitstdout.
9274 //
9275 // Arguments:
9276 //    tree - the tree to be printed
9277 //    indentStack - the specification for the current level of indentation & arcs
9278 //    msg         - a contextual method (i.e. from the parent) to print
9279 //
9280 // Return Value:
9281 //    None.
9282 //
9283 // Notes:
9284 //    'indentStack' may be null, in which case no indentation or arcs are printed
9285 //    'msg' may be null
9286
9287 void Compiler::gtDispNode(GenTree* tree, IndentStack* indentStack, __in __in_z __in_opt const char* msg, bool isLIR)
9288 {
9289     bool printPointer = true; // always true..
9290     bool printFlags   = true; // always true..
9291     bool printCost    = true; // always true..
9292
9293     int msgLength = 25;
9294
9295     GenTree* prev;
9296
9297     if (tree->gtSeqNum)
9298     {
9299         printf("N%03u ", tree->gtSeqNum);
9300         if (tree->gtCostsInitialized)
9301         {
9302             printf("(%3u,%3u) ", tree->gtCostEx, tree->gtCostSz);
9303         }
9304         else
9305         {
9306             printf("(???"
9307                    ",???"
9308                    ") "); // This probably indicates a bug: the node has a sequence number, but not costs.
9309         }
9310     }
9311     else
9312     {
9313         if (tree->gtOper == GT_STMT)
9314         {
9315             prev = tree->gtStmt.gtStmtExpr;
9316         }
9317         else
9318         {
9319             prev = tree;
9320         }
9321
9322         bool     hasSeqNum = true;
9323         unsigned dotNum    = 0;
9324         do
9325         {
9326             dotNum++;
9327             prev = prev->gtPrev;
9328
9329             if ((prev == nullptr) || (prev == tree))
9330             {
9331                 hasSeqNum = false;
9332                 break;
9333             }
9334
9335             assert(prev);
9336         } while (prev->gtSeqNum == 0);
9337
9338         // If we have an indent stack, don't add additional characters,
9339         // as it will mess up the alignment.
9340         bool displayDotNum = tree->gtOper != GT_STMT && hasSeqNum && (indentStack == nullptr);
9341         if (displayDotNum)
9342         {
9343             printf("N%03u.%02u ", prev->gtSeqNum, dotNum);
9344         }
9345         else
9346         {
9347             printf("     ");
9348         }
9349
9350         if (tree->gtCostsInitialized)
9351         {
9352             printf("(%3u,%3u) ", tree->gtCostEx, tree->gtCostSz);
9353         }
9354         else
9355         {
9356             if (displayDotNum)
9357             {
9358                 // Do better alignment in this case
9359                 printf("       ");
9360             }
9361             else
9362             {
9363                 printf("          ");
9364             }
9365         }
9366     }
9367
9368     if (optValnumCSE_phase)
9369     {
9370         if (IS_CSE_INDEX(tree->gtCSEnum))
9371         {
9372             printf("CSE #%02d (%s)", GET_CSE_INDEX(tree->gtCSEnum), (IS_CSE_USE(tree->gtCSEnum) ? "use" : "def"));
9373         }
9374         else
9375         {
9376             printf("             ");
9377         }
9378     }
9379
9380     /* Print the node ID */
9381     printTreeID(tree);
9382     printf(" ");
9383
9384     if (tree->gtOper >= GT_COUNT)
9385     {
9386         printf(" **** ILLEGAL NODE ****");
9387         return;
9388     }
9389
9390     if (printFlags)
9391     {
9392         /* First print the flags associated with the node */
9393         switch (tree->gtOper)
9394         {
9395             case GT_LEA:
9396             case GT_BLK:
9397             case GT_OBJ:
9398             case GT_DYN_BLK:
9399             case GT_STORE_BLK:
9400             case GT_STORE_OBJ:
9401             case GT_STORE_DYN_BLK:
9402
9403             case GT_IND:
9404                 // We prefer printing V or U
9405                 if ((tree->gtFlags & (GTF_IND_VOLATILE | GTF_IND_UNALIGNED)) == 0)
9406                 {
9407                     if (tree->gtFlags & GTF_IND_TGTANYWHERE)
9408                     {
9409                         printf("*");
9410                         --msgLength;
9411                         break;
9412                     }
9413                     if (tree->gtFlags & GTF_IND_TGT_NOT_HEAP)
9414                     {
9415                         printf("s");
9416                         --msgLength;
9417                         break;
9418                     }
9419                     if (tree->gtFlags & GTF_IND_INVARIANT)
9420                     {
9421                         printf("#");
9422                         --msgLength;
9423                         break;
9424                     }
9425                     if (tree->gtFlags & GTF_IND_ARR_INDEX)
9426                     {
9427                         printf("a");
9428                         --msgLength;
9429                         break;
9430                     }
9431                     if (tree->gtFlags & GTF_IND_NONFAULTING)
9432                     {
9433                         printf("n"); // print a n for non-faulting
9434                         --msgLength;
9435                         break;
9436                     }
9437                     if (tree->gtFlags & GTF_IND_ASG_LHS)
9438                     {
9439                         printf("D"); // print a D for definition
9440                         --msgLength;
9441                         break;
9442                     }
9443                 }
9444                 __fallthrough;
9445
9446             case GT_INDEX:
9447             case GT_INDEX_ADDR:
9448
9449                 if ((tree->gtFlags & (GTF_IND_VOLATILE | GTF_IND_UNALIGNED)) == 0) // We prefer printing V or U over R
9450                 {
9451                     if (tree->gtFlags & GTF_INX_REFARR_LAYOUT)
9452                     {
9453                         printf("R");
9454                         --msgLength;
9455                         break;
9456                     } // R means RefArray
9457                 }
9458                 __fallthrough;
9459
9460             case GT_FIELD:
9461             case GT_CLS_VAR:
9462                 if (tree->gtFlags & GTF_IND_VOLATILE)
9463                 {
9464                     printf("V");
9465                     --msgLength;
9466                     break;
9467                 }
9468                 if (tree->gtFlags & GTF_IND_UNALIGNED)
9469                 {
9470                     printf("U");
9471                     --msgLength;
9472                     break;
9473                 }
9474                 goto DASH;
9475
9476             case GT_ASG:
9477                 if (tree->OperIsInitBlkOp())
9478                 {
9479                     printf("I");
9480                     --msgLength;
9481                     break;
9482                 }
9483                 goto DASH;
9484
9485             case GT_CALL:
9486                 if (tree->gtCall.IsInlineCandidate())
9487                 {
9488                     if (tree->gtCall.IsGuardedDevirtualizationCandidate())
9489                     {
9490                         printf("&");
9491                     }
9492                     else
9493                     {
9494                         printf("I");
9495                     }
9496                     --msgLength;
9497                     break;
9498                 }
9499                 else if (tree->gtCall.IsGuardedDevirtualizationCandidate())
9500                 {
9501                     printf("G");
9502                     --msgLength;
9503                     break;
9504                 }
9505                 if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_RETBUFFARG)
9506                 {
9507                     printf("S");
9508                     --msgLength;
9509                     break;
9510                 }
9511                 if (tree->gtFlags & GTF_CALL_HOISTABLE)
9512                 {
9513                     printf("H");
9514                     --msgLength;
9515                     break;
9516                 }
9517
9518                 goto DASH;
9519
9520             case GT_MUL:
9521 #if !defined(_TARGET_64BIT_)
9522             case GT_MUL_LONG:
9523 #endif
9524                 if (tree->gtFlags & GTF_MUL_64RSLT)
9525                 {
9526                     printf("L");
9527                     --msgLength;
9528                     break;
9529                 }
9530                 goto DASH;
9531
9532             case GT_LCL_FLD:
9533             case GT_LCL_VAR:
9534             case GT_LCL_VAR_ADDR:
9535             case GT_LCL_FLD_ADDR:
9536             case GT_STORE_LCL_FLD:
9537             case GT_STORE_LCL_VAR:
9538                 if (tree->gtFlags & GTF_VAR_USEASG)
9539                 {
9540                     printf("U");
9541                     --msgLength;
9542                     break;
9543                 }
9544                 if (tree->gtFlags & GTF_VAR_DEF)
9545                 {
9546                     printf("D");
9547                     --msgLength;
9548                     break;
9549                 }
9550                 if (tree->gtFlags & GTF_VAR_CAST)
9551                 {
9552                     printf("C");
9553                     --msgLength;
9554                     break;
9555                 }
9556                 if (tree->gtFlags & GTF_VAR_ARR_INDEX)
9557                 {
9558                     printf("i");
9559                     --msgLength;
9560                     break;
9561                 }
9562                 goto DASH;
9563
9564             case GT_EQ:
9565             case GT_NE:
9566             case GT_LT:
9567             case GT_LE:
9568             case GT_GE:
9569             case GT_GT:
9570             case GT_TEST_EQ:
9571             case GT_TEST_NE:
9572                 if (tree->gtFlags & GTF_RELOP_NAN_UN)
9573                 {
9574                     printf("N");
9575                     --msgLength;
9576                     break;
9577                 }
9578                 if (tree->gtFlags & GTF_RELOP_JMP_USED)
9579                 {
9580                     printf("J");
9581                     --msgLength;
9582                     break;
9583                 }
9584                 if (tree->gtFlags & GTF_RELOP_QMARK)
9585                 {
9586                     printf("Q");
9587                     --msgLength;
9588                     break;
9589                 }
9590                 goto DASH;
9591
9592             case GT_JCMP:
9593                 printf((tree->gtFlags & GTF_JCMP_TST) ? "T" : "C");
9594                 printf((tree->gtFlags & GTF_JCMP_EQ) ? "EQ" : "NE");
9595                 goto DASH;
9596
9597             case GT_FIELD_LIST:
9598                 if (tree->gtFlags & GTF_FIELD_LIST_HEAD)
9599                 {
9600                     printf("H");
9601                     --msgLength;
9602                     break;
9603                 }
9604                 goto DASH;
9605
9606             default:
9607             DASH:
9608                 printf("-");
9609                 --msgLength;
9610                 break;
9611         }
9612
9613         /* Then print the general purpose flags */
9614         unsigned flags = tree->gtFlags;
9615
9616         if (tree->OperIsBinary())
9617         {
9618             genTreeOps oper = tree->OperGet();
9619
9620             // Check for GTF_ADDRMODE_NO_CSE flag on add/mul/shl Binary Operators
9621             if ((oper == GT_ADD) || (oper == GT_MUL) || (oper == GT_LSH))
9622             {
9623                 if ((tree->gtFlags & GTF_ADDRMODE_NO_CSE) != 0)
9624                 {
9625                     flags |= GTF_DONT_CSE; // Force the GTF_ADDRMODE_NO_CSE flag to print out like GTF_DONT_CSE
9626                 }
9627             }
9628         }
9629         else // !tree->OperIsBinary()
9630         {
9631             // the GTF_REVERSE flag only applies to binary operations
9632             flags &= ~GTF_REVERSE_OPS; // we use this value for GTF_VAR_ARR_INDEX above
9633         }
9634
9635         msgLength -= GenTree::gtDispFlags(flags, tree->gtDebugFlags);
9636         /*
9637             printf("%c", (flags & GTF_ASG           ) ? 'A' : '-');
9638             printf("%c", (flags & GTF_CALL          ) ? 'C' : '-');
9639             printf("%c", (flags & GTF_EXCEPT        ) ? 'X' : '-');
9640             printf("%c", (flags & GTF_GLOB_REF      ) ? 'G' : '-');
9641             printf("%c", (flags & GTF_ORDER_SIDEEFF ) ? 'O' : '-');
9642             printf("%c", (flags & GTF_COLON_COND    ) ? '?' : '-');
9643             printf("%c", (flags & GTF_DONT_CSE      ) ? 'N' :        // N is for No cse
9644                          (flags & GTF_MAKE_CSE      ) ? 'H' : '-');  // H is for Hoist this expr
9645             printf("%c", (flags & GTF_REVERSE_OPS   ) ? 'R' : '-');
9646             printf("%c", (flags & GTF_UNSIGNED      ) ? 'U' :
9647                          (flags & GTF_BOOLEAN       ) ? 'B' : '-');
9648             printf("%c", (flags & GTF_SET_FLAGS     ) ? 'S' : '-');
9649             printf("%c", (flags & GTF_SPILLED       ) ? 'z' : '-');
9650             printf("%c", (flags & GTF_SPILL         ) ? 'Z' : '-');
9651         */
9652     }
9653
9654     // If we're printing a node for LIR, we use the space normally associated with the message
9655     // to display the node's temp name (if any)
9656     const bool hasOperands = tree->OperandsBegin() != tree->OperandsEnd();
9657     if (isLIR)
9658     {
9659         assert(msg == nullptr);
9660
9661         // If the tree does not have any operands, we do not display the indent stack. This gives us
9662         // two additional characters for alignment.
9663         if (!hasOperands)
9664         {
9665             msgLength += 1;
9666         }
9667
9668         if (tree->IsValue())
9669         {
9670             const size_t bufLength = msgLength - 1;
9671             msg                    = reinterpret_cast<char*>(alloca(bufLength * sizeof(char)));
9672             sprintf_s(const_cast<char*>(msg), bufLength, "t%d = %s", tree->gtTreeID, hasOperands ? "" : " ");
9673         }
9674     }
9675
9676     /* print the msg associated with the node */
9677
9678     if (msg == nullptr)
9679     {
9680         msg = "";
9681     }
9682     if (msgLength < 0)
9683     {
9684         msgLength = 0;
9685     }
9686
9687     printf(isLIR ? " %+*s" : " %-*s", msgLength, msg);
9688
9689     /* Indent the node accordingly */
9690     if (!isLIR || hasOperands)
9691     {
9692         printIndent(indentStack);
9693     }
9694
9695     gtDispNodeName(tree);
9696
9697     assert(tree == nullptr || tree->gtOper < GT_COUNT);
9698
9699     if (tree)
9700     {
9701         /* print the type of the node */
9702         if (tree->gtOper != GT_CAST)
9703         {
9704             printf(" %-6s", varTypeName(tree->TypeGet()));
9705             if (tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_STORE_LCL_VAR)
9706             {
9707                 LclVarDsc* varDsc = &lvaTable[tree->gtLclVarCommon.gtLclNum];
9708                 if (varDsc->lvAddrExposed)
9709                 {
9710                     printf("(AX)"); // Variable has address exposed.
9711                 }
9712
9713                 if (varDsc->lvUnusedStruct)
9714                 {
9715                     assert(varDsc->lvPromoted);
9716                     printf("(U)"); // Unused struct
9717                 }
9718                 else if (varDsc->lvPromoted)
9719                 {
9720                     if (varTypeIsPromotable(varDsc))
9721                     {
9722                         printf("(P)"); // Promoted struct
9723                     }
9724                     else
9725                     {
9726                         // Promoted implicit by-refs can have this state during
9727                         // global morph while they are being rewritten
9728                         assert(fgGlobalMorph);
9729                         printf("(P?!)"); // Promoted struct
9730                     }
9731                 }
9732             }
9733
9734             if (tree->gtOper == GT_STMT)
9735             {
9736                 if (opts.compDbgInfo)
9737                 {
9738                     GenTreeStmt* stmt  = tree->AsStmt();
9739                     IL_OFFSET    endIL = stmt->gtStmtLastILoffs;
9740
9741                     printf("(IL ");
9742                     if (stmt->gtStmtILoffsx == BAD_IL_OFFSET)
9743                     {
9744                         printf("  ???");
9745                     }
9746                     else
9747                     {
9748                         printf("0x%03X", jitGetILoffs(stmt->gtStmtILoffsx));
9749                     }
9750                     printf("...");
9751                     if (endIL == BAD_IL_OFFSET)
9752                     {
9753                         printf("  ???");
9754                     }
9755                     else
9756                     {
9757                         printf("0x%03X", endIL);
9758                     }
9759                     printf(")");
9760                 }
9761             }
9762
9763             if (tree->IsArgPlaceHolderNode() && (tree->gtArgPlace.gtArgPlaceClsHnd != nullptr))
9764             {
9765                 printf(" => [clsHnd=%08X]", dspPtr(tree->gtArgPlace.gtArgPlaceClsHnd));
9766             }
9767
9768             if (tree->gtOper == GT_RUNTIMELOOKUP)
9769             {
9770 #ifdef _TARGET_64BIT_
9771                 printf(" 0x%llx", dspPtr(tree->gtRuntimeLookup.gtHnd));
9772 #else
9773                 printf(" 0x%x", dspPtr(tree->gtRuntimeLookup.gtHnd));
9774 #endif
9775
9776                 switch (tree->gtRuntimeLookup.gtHndType)
9777                 {
9778                     case CORINFO_HANDLETYPE_CLASS:
9779                         printf(" class");
9780                         break;
9781                     case CORINFO_HANDLETYPE_METHOD:
9782                         printf(" method");
9783                         break;
9784                     case CORINFO_HANDLETYPE_FIELD:
9785                         printf(" field");
9786                         break;
9787                     default:
9788                         printf(" unknown");
9789                         break;
9790                 }
9791             }
9792         }
9793
9794         // for tracking down problems in reguse prediction or liveness tracking
9795
9796         if (verbose && 0)
9797         {
9798             printf(" RR=");
9799             dspRegMask(tree->gtRsvdRegs);
9800             printf("\n");
9801         }
9802     }
9803 }
9804
9805 void Compiler::gtDispRegVal(GenTree* tree)
9806 {
9807     switch (tree->GetRegTag())
9808     {
9809         // Don't display NOREG; the absence of this tag will imply this state
9810         // case GenTree::GT_REGTAG_NONE:       printf(" NOREG");   break;
9811
9812         case GenTree::GT_REGTAG_REG:
9813             printf(" REG %s", compRegVarName(tree->gtRegNum));
9814             break;
9815
9816         default:
9817             break;
9818     }
9819
9820     if (tree->IsMultiRegCall())
9821     {
9822         // 0th reg is gtRegNum, which is already printed above.
9823         // Print the remaining regs of a multi-reg call node.
9824         GenTreeCall* call     = tree->AsCall();
9825         unsigned     regCount = call->GetReturnTypeDesc()->TryGetReturnRegCount();
9826         for (unsigned i = 1; i < regCount; ++i)
9827         {
9828             printf(",%s", compRegVarName(call->GetRegNumByIdx(i)));
9829         }
9830     }
9831     else if (tree->IsCopyOrReloadOfMultiRegCall())
9832     {
9833         GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
9834         GenTreeCall*         call         = tree->gtGetOp1()->AsCall();
9835         unsigned             regCount     = call->GetReturnTypeDesc()->TryGetReturnRegCount();
9836         for (unsigned i = 1; i < regCount; ++i)
9837         {
9838             printf(",%s", compRegVarName(copyOrReload->GetRegNumByIdx(i)));
9839         }
9840     }
9841
9842 #if FEATURE_MULTIREG_RET
9843     if (tree->IsCopyOrReload())
9844     {
9845         for (int i = 1; i < MAX_RET_REG_COUNT; i++)
9846         {
9847             regNumber reg = (regNumber)tree->AsCopyOrReload()->GetRegNumByIdx(i);
9848             if (reg == REG_NA)
9849             {
9850                 break;
9851             }
9852             printf(",%s", compRegVarName(reg));
9853         }
9854     }
9855 #endif
9856
9857 #if defined(_TARGET_ARM_)
9858     if (tree->OperIsMultiRegOp() && (tree->AsMultiRegOp()->gtOtherReg != REG_NA))
9859     {
9860         printf(",%s", compRegVarName(tree->AsMultiRegOp()->gtOtherReg));
9861     }
9862 #endif
9863 }
9864
9865 // We usually/commonly don't expect to print anything longer than this string,
9866 #define LONGEST_COMMON_LCL_VAR_DISPLAY "V99 PInvokeFrame"
9867 #define LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH (sizeof(LONGEST_COMMON_LCL_VAR_DISPLAY))
9868 #define BUF_SIZE (LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH * 2)
9869
9870 void Compiler::gtGetLclVarNameInfo(unsigned lclNum, const char** ilKindOut, const char** ilNameOut, unsigned* ilNumOut)
9871 {
9872     const char* ilKind = nullptr;
9873     const char* ilName = nullptr;
9874
9875     unsigned ilNum = compMap2ILvarNum(lclNum);
9876
9877     if (ilNum == (unsigned)ICorDebugInfo::RETBUF_ILNUM)
9878     {
9879         ilName = "RetBuf";
9880     }
9881     else if (ilNum == (unsigned)ICorDebugInfo::VARARGS_HND_ILNUM)
9882     {
9883         ilName = "VarArgHandle";
9884     }
9885     else if (ilNum == (unsigned)ICorDebugInfo::TYPECTXT_ILNUM)
9886     {
9887         ilName = "TypeCtx";
9888     }
9889     else if (ilNum == (unsigned)ICorDebugInfo::UNKNOWN_ILNUM)
9890     {
9891 #if FEATURE_ANYCSE
9892         if (lclNumIsTrueCSE(lclNum))
9893         {
9894             ilKind = "cse";
9895             ilNum  = lclNum - optCSEstart;
9896         }
9897         else if (lclNum >= optCSEstart)
9898         {
9899             // Currently any new LclVar's introduced after the CSE phase
9900             // are believed to be created by the "rationalizer" that is what is meant by the "rat" prefix.
9901             ilKind = "rat";
9902             ilNum  = lclNum - (optCSEstart + optCSEcount);
9903         }
9904         else
9905 #endif // FEATURE_ANYCSE
9906         {
9907             if (lclNum == info.compLvFrameListRoot)
9908             {
9909                 ilName = "FramesRoot";
9910             }
9911             else if (lclNum == lvaInlinedPInvokeFrameVar)
9912             {
9913                 ilName = "PInvokeFrame";
9914             }
9915             else if (lclNum == lvaGSSecurityCookie)
9916             {
9917                 ilName = "GsCookie";
9918             }
9919 #if FEATURE_FIXED_OUT_ARGS
9920             else if (lclNum == lvaPInvokeFrameRegSaveVar)
9921             {
9922                 ilName = "PInvokeFrameRegSave";
9923             }
9924             else if (lclNum == lvaOutgoingArgSpaceVar)
9925             {
9926                 ilName = "OutArgs";
9927             }
9928 #endif // FEATURE_FIXED_OUT_ARGS
9929 #ifdef _TARGET_ARM_
9930             else if (lclNum == lvaPromotedStructAssemblyScratchVar)
9931             {
9932                 ilName = "PromotedStructScratch";
9933             }
9934 #endif // _TARGET_ARM_
9935 #if !FEATURE_EH_FUNCLETS
9936             else if (lclNum == lvaShadowSPslotsVar)
9937             {
9938                 ilName = "EHSlots";
9939             }
9940 #endif // !FEATURE_EH_FUNCLETS
9941 #ifdef JIT32_GCENCODER
9942             else if (lclNum == lvaLocAllocSPvar)
9943             {
9944                 ilName = "LocAllocSP";
9945             }
9946 #endif // JIT32_GCENCODER
9947 #if FEATURE_EH_FUNCLETS
9948             else if (lclNum == lvaPSPSym)
9949             {
9950                 ilName = "PSPSym";
9951             }
9952 #endif // FEATURE_EH_FUNCLETS
9953             else
9954             {
9955                 ilKind = "tmp";
9956                 if (compIsForInlining())
9957                 {
9958                     ilNum = lclNum - impInlineInfo->InlinerCompiler->info.compLocalsCount;
9959                 }
9960                 else
9961                 {
9962                     ilNum = lclNum - info.compLocalsCount;
9963                 }
9964             }
9965         }
9966     }
9967     else if (lclNum < (compIsForInlining() ? impInlineInfo->InlinerCompiler->info.compArgsCount : info.compArgsCount))
9968     {
9969         if (ilNum == 0 && !info.compIsStatic)
9970         {
9971             ilName = "this";
9972         }
9973         else
9974         {
9975             ilKind = "arg";
9976         }
9977     }
9978     else
9979     {
9980         if (!lvaTable[lclNum].lvIsStructField)
9981         {
9982             ilKind = "loc";
9983         }
9984         if (compIsForInlining())
9985         {
9986             ilNum -= impInlineInfo->InlinerCompiler->info.compILargsCount;
9987         }
9988         else
9989         {
9990             ilNum -= info.compILargsCount;
9991         }
9992     }
9993
9994     *ilKindOut = ilKind;
9995     *ilNameOut = ilName;
9996     *ilNumOut  = ilNum;
9997 }
9998
9999 /*****************************************************************************/
10000 int Compiler::gtGetLclVarName(unsigned lclNum, char* buf, unsigned buf_remaining)
10001 {
10002     char*    bufp_next    = buf;
10003     unsigned charsPrinted = 0;
10004     int      sprintf_result;
10005
10006     sprintf_result = sprintf_s(bufp_next, buf_remaining, "V%02u", lclNum);
10007
10008     if (sprintf_result < 0)
10009     {
10010         return sprintf_result;
10011     }
10012
10013     charsPrinted += sprintf_result;
10014     bufp_next += sprintf_result;
10015     buf_remaining -= sprintf_result;
10016
10017     const char* ilKind = nullptr;
10018     const char* ilName = nullptr;
10019     unsigned    ilNum  = 0;
10020
10021     gtGetLclVarNameInfo(lclNum, &ilKind, &ilName, &ilNum);
10022
10023     if (ilName != nullptr)
10024     {
10025         sprintf_result = sprintf_s(bufp_next, buf_remaining, " %s", ilName);
10026         if (sprintf_result < 0)
10027         {
10028             return sprintf_result;
10029         }
10030         charsPrinted += sprintf_result;
10031         bufp_next += sprintf_result;
10032         buf_remaining -= sprintf_result;
10033     }
10034     else if (ilKind != nullptr)
10035     {
10036         sprintf_result = sprintf_s(bufp_next, buf_remaining, " %s%d", ilKind, ilNum);
10037         if (sprintf_result < 0)
10038         {
10039             return sprintf_result;
10040         }
10041         charsPrinted += sprintf_result;
10042         bufp_next += sprintf_result;
10043         buf_remaining -= sprintf_result;
10044     }
10045
10046     assert(charsPrinted > 0);
10047     assert(buf_remaining > 0);
10048
10049     return (int)charsPrinted;
10050 }
10051
10052 /*****************************************************************************
10053  * Get the local var name, and create a copy of the string that can be used in debug output.
10054  */
10055 char* Compiler::gtGetLclVarName(unsigned lclNum)
10056 {
10057     char buf[BUF_SIZE];
10058     int  charsPrinted = gtGetLclVarName(lclNum, buf, _countof(buf));
10059     if (charsPrinted < 0)
10060     {
10061         return nullptr;
10062     }
10063
10064     char* retBuf = new (this, CMK_DebugOnly) char[charsPrinted + 1];
10065     strcpy_s(retBuf, charsPrinted + 1, buf);
10066     return retBuf;
10067 }
10068
10069 /*****************************************************************************/
10070 void Compiler::gtDispLclVar(unsigned lclNum, bool padForBiggestDisp)
10071 {
10072     char buf[BUF_SIZE];
10073     int  charsPrinted = gtGetLclVarName(lclNum, buf, _countof(buf));
10074
10075     if (charsPrinted < 0)
10076     {
10077         return;
10078     }
10079
10080     printf("%s", buf);
10081
10082     if (padForBiggestDisp && (charsPrinted < LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH))
10083     {
10084         printf("%*c", LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH - charsPrinted, ' ');
10085     }
10086 }
10087
10088 /*****************************************************************************/
10089 void Compiler::gtDispConst(GenTree* tree)
10090 {
10091     assert(tree->OperKind() & GTK_CONST);
10092
10093     switch (tree->gtOper)
10094     {
10095         case GT_CNS_INT:
10096             if (tree->IsIconHandle(GTF_ICON_STR_HDL))
10097             {
10098                 const wchar_t* str = eeGetCPString(tree->gtIntCon.gtIconVal);
10099                 if (str != nullptr)
10100                 {
10101                     printf(" 0x%X \"%S\"", dspPtr(tree->gtIntCon.gtIconVal), str);
10102                 }
10103                 else
10104                 {
10105                     // Note that eGetCPString isn't currently implemented on Linux/ARM
10106                     // and instead always returns nullptr
10107                     printf(" 0x%X [ICON_STR_HDL]", dspPtr(tree->gtIntCon.gtIconVal));
10108                 }
10109             }
10110             else
10111             {
10112                 ssize_t dspIconVal = tree->IsIconHandle() ? dspPtr(tree->gtIntCon.gtIconVal) : tree->gtIntCon.gtIconVal;
10113
10114                 if (tree->TypeGet() == TYP_REF)
10115                 {
10116                     assert(tree->gtIntCon.gtIconVal == 0);
10117                     printf(" null");
10118                 }
10119                 else if ((tree->gtIntCon.gtIconVal > -1000) && (tree->gtIntCon.gtIconVal < 1000))
10120                 {
10121                     printf(" %ld", dspIconVal);
10122 #ifdef _TARGET_64BIT_
10123                 }
10124                 else if ((tree->gtIntCon.gtIconVal & 0xFFFFFFFF00000000LL) != 0)
10125                 {
10126                     printf(" 0x%llx", dspIconVal);
10127 #endif
10128                 }
10129                 else
10130                 {
10131                     printf(" 0x%X", dspIconVal);
10132                 }
10133
10134                 if (tree->IsIconHandle())
10135                 {
10136                     switch (tree->GetIconHandleFlag())
10137                     {
10138                         case GTF_ICON_SCOPE_HDL:
10139                             printf(" scope");
10140                             break;
10141                         case GTF_ICON_CLASS_HDL:
10142                             printf(" class");
10143                             break;
10144                         case GTF_ICON_METHOD_HDL:
10145                             printf(" method");
10146                             break;
10147                         case GTF_ICON_FIELD_HDL:
10148                             printf(" field");
10149                             break;
10150                         case GTF_ICON_STATIC_HDL:
10151                             printf(" static");
10152                             break;
10153                         case GTF_ICON_STR_HDL:
10154                             unreached(); // This case is handled above
10155                             break;
10156                         case GTF_ICON_PSTR_HDL:
10157                             printf(" pstr");
10158                             break;
10159                         case GTF_ICON_PTR_HDL:
10160                             printf(" ptr");
10161                             break;
10162                         case GTF_ICON_VARG_HDL:
10163                             printf(" vararg");
10164                             break;
10165                         case GTF_ICON_PINVKI_HDL:
10166                             printf(" pinvoke");
10167                             break;
10168                         case GTF_ICON_TOKEN_HDL:
10169                             printf(" token");
10170                             break;
10171                         case GTF_ICON_TLS_HDL:
10172                             printf(" tls");
10173                             break;
10174                         case GTF_ICON_FTN_ADDR:
10175                             printf(" ftn");
10176                             break;
10177                         case GTF_ICON_CIDMID_HDL:
10178                             printf(" cid/mid");
10179                             break;
10180                         case GTF_ICON_BBC_PTR:
10181                             printf(" bbc");
10182                             break;
10183                         default:
10184                             printf(" UNKNOWN");
10185                             break;
10186                     }
10187                 }
10188
10189                 if ((tree->gtFlags & GTF_ICON_FIELD_OFF) != 0)
10190                 {
10191                     printf(" field offset");
10192                 }
10193
10194 #ifdef FEATURE_SIMD
10195                 if ((tree->gtFlags & GTF_ICON_SIMD_COUNT) != 0)
10196                 {
10197                     printf(" Vector<T>.Count");
10198                 }
10199 #endif
10200
10201                 if ((tree->IsReuseRegVal()) != 0)
10202                 {
10203                     printf(" reuse reg val");
10204                 }
10205             }
10206
10207             gtDispFieldSeq(tree->gtIntCon.gtFieldSeq);
10208
10209             break;
10210
10211         case GT_CNS_LNG:
10212             printf(" 0x%016I64x", tree->gtLngCon.gtLconVal);
10213             break;
10214
10215         case GT_CNS_DBL:
10216             if (*((__int64*)&tree->gtDblCon.gtDconVal) == (__int64)I64(0x8000000000000000))
10217             {
10218                 printf(" -0.00000");
10219             }
10220             else
10221             {
10222                 printf(" %#.17g", tree->gtDblCon.gtDconVal);
10223             }
10224             break;
10225         case GT_CNS_STR:
10226             printf("<string constant>");
10227             break;
10228         default:
10229             assert(!"unexpected constant node");
10230     }
10231 }
10232
10233 void Compiler::gtDispFieldSeq(FieldSeqNode* pfsn)
10234 {
10235     if (pfsn == FieldSeqStore::NotAField() || (pfsn == nullptr))
10236     {
10237         return;
10238     }
10239
10240     // Otherwise...
10241     printf(" Fseq[");
10242     while (pfsn != nullptr)
10243     {
10244         assert(pfsn != FieldSeqStore::NotAField()); // Can't exist in a field sequence list except alone
10245         CORINFO_FIELD_HANDLE fldHnd = pfsn->m_fieldHnd;
10246         // First check the "pseudo" field handles...
10247         if (fldHnd == FieldSeqStore::FirstElemPseudoField)
10248         {
10249             printf("#FirstElem");
10250         }
10251         else if (fldHnd == FieldSeqStore::ConstantIndexPseudoField)
10252         {
10253             printf("#ConstantIndex");
10254         }
10255         else
10256         {
10257             printf("%s", eeGetFieldName(fldHnd));
10258         }
10259         pfsn = pfsn->m_next;
10260         if (pfsn != nullptr)
10261         {
10262             printf(", ");
10263         }
10264     }
10265     printf("]");
10266 }
10267
10268 //------------------------------------------------------------------------
10269 // gtDispLeaf: Print a single leaf node to jitstdout.
10270 //
10271 // Arguments:
10272 //    tree - the tree to be printed
10273 //    indentStack - the specification for the current level of indentation & arcs
10274 //
10275 // Return Value:
10276 //    None.
10277 //
10278 // Notes:
10279 //    'indentStack' may be null, in which case no indentation or arcs are printed
10280
10281 void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack)
10282 {
10283     if (tree->OperKind() & GTK_CONST)
10284     {
10285         gtDispConst(tree);
10286         return;
10287     }
10288
10289     bool isLclFld = false;
10290
10291     switch (tree->gtOper)
10292     {
10293         unsigned   varNum;
10294         LclVarDsc* varDsc;
10295
10296         case GT_LCL_FLD:
10297         case GT_LCL_FLD_ADDR:
10298         case GT_STORE_LCL_FLD:
10299             isLclFld = true;
10300             __fallthrough;
10301
10302         case GT_PHI_ARG:
10303         case GT_LCL_VAR:
10304         case GT_LCL_VAR_ADDR:
10305         case GT_STORE_LCL_VAR:
10306             printf(" ");
10307             varNum = tree->gtLclVarCommon.gtLclNum;
10308             varDsc = &lvaTable[varNum];
10309             gtDispLclVar(varNum);
10310             if (tree->gtLclVarCommon.HasSsaName())
10311             {
10312                 if (tree->gtFlags & GTF_VAR_USEASG)
10313                 {
10314                     assert(tree->gtFlags & GTF_VAR_DEF);
10315                     printf("ud:%d->%d", tree->gtLclVarCommon.gtSsaNum, GetSsaNumForLocalVarDef(tree));
10316                 }
10317                 else
10318                 {
10319                     printf("%s:%d", (tree->gtFlags & GTF_VAR_DEF) ? "d" : "u", tree->gtLclVarCommon.gtSsaNum);
10320                 }
10321             }
10322
10323             if (isLclFld)
10324             {
10325                 printf("[+%u]", tree->gtLclFld.gtLclOffs);
10326                 gtDispFieldSeq(tree->gtLclFld.gtFieldSeq);
10327             }
10328
10329             if (varDsc->lvRegister)
10330             {
10331                 printf(" ");
10332                 varDsc->PrintVarReg();
10333             }
10334             else if (tree->InReg())
10335             {
10336                 printf(" %s", compRegVarName(tree->gtRegNum));
10337             }
10338
10339             if (varDsc->lvPromoted)
10340             {
10341                 if (!varTypeIsPromotable(varDsc) && !varDsc->lvUnusedStruct)
10342                 {
10343                     // Promoted implicit byrefs can get in this state while they are being rewritten
10344                     // in global morph.
10345                     assert(fgGlobalMorph);
10346                 }
10347                 else
10348                 {
10349                     CORINFO_CLASS_HANDLE typeHnd = varDsc->lvVerTypeInfo.GetClassHandle();
10350                     CORINFO_FIELD_HANDLE fldHnd;
10351
10352                     for (unsigned i = varDsc->lvFieldLclStart; i < varDsc->lvFieldLclStart + varDsc->lvFieldCnt; ++i)
10353                     {
10354                         LclVarDsc*  fieldVarDsc = &lvaTable[i];
10355                         const char* fieldName;
10356 #if !defined(_TARGET_64BIT_)
10357                         if (varTypeIsLong(varDsc))
10358                         {
10359                             fieldName = (i == 0) ? "lo" : "hi";
10360                         }
10361                         else
10362 #endif // !defined(_TARGET_64BIT_)
10363                         {
10364                             fldHnd    = info.compCompHnd->getFieldInClass(typeHnd, fieldVarDsc->lvFldOrdinal);
10365                             fieldName = eeGetFieldName(fldHnd);
10366                         }
10367
10368                         printf("\n");
10369                         printf("                                                  ");
10370                         printIndent(indentStack);
10371                         printf("    %-6s V%02u.%s (offs=0x%02x) -> ", varTypeName(fieldVarDsc->TypeGet()),
10372                                tree->gtLclVarCommon.gtLclNum, fieldName, fieldVarDsc->lvFldOffset);
10373                         gtDispLclVar(i);
10374
10375                         if (fieldVarDsc->lvRegister)
10376                         {
10377                             printf(" ");
10378                             fieldVarDsc->PrintVarReg();
10379                         }
10380
10381                         if (fieldVarDsc->lvTracked && fgLocalVarLivenessDone && // Includes local variable liveness
10382                             ((tree->gtFlags & GTF_VAR_DEATH) != 0))
10383                         {
10384                             printf(" (last use)");
10385                         }
10386                     }
10387                 }
10388             }
10389             else // a normal not-promoted lclvar
10390             {
10391                 if (varDsc->lvTracked && fgLocalVarLivenessDone && ((tree->gtFlags & GTF_VAR_DEATH) != 0))
10392                 {
10393                     printf(" (last use)");
10394                 }
10395             }
10396             break;
10397
10398         case GT_JMP:
10399         {
10400             const char* methodName;
10401             const char* className;
10402
10403             methodName = eeGetMethodName((CORINFO_METHOD_HANDLE)tree->gtVal.gtVal1, &className);
10404             printf(" %s.%s\n", className, methodName);
10405         }
10406         break;
10407
10408         case GT_CLS_VAR:
10409             printf(" Hnd=%#x", dspPtr(tree->gtClsVar.gtClsVarHnd));
10410             gtDispFieldSeq(tree->gtClsVar.gtFieldSeq);
10411             break;
10412
10413         case GT_CLS_VAR_ADDR:
10414             printf(" Hnd=%#x", dspPtr(tree->gtClsVar.gtClsVarHnd));
10415             break;
10416
10417         case GT_LABEL:
10418             break;
10419
10420         case GT_FTN_ADDR:
10421         {
10422             const char* methodName;
10423             const char* className;
10424
10425             methodName = eeGetMethodName((CORINFO_METHOD_HANDLE)tree->gtFptrVal.gtFptrMethod, &className);
10426             printf(" %s.%s\n", className, methodName);
10427         }
10428         break;
10429
10430 #if !FEATURE_EH_FUNCLETS
10431         case GT_END_LFIN:
10432             printf(" endNstLvl=%d", tree->gtVal.gtVal1);
10433             break;
10434 #endif // !FEATURE_EH_FUNCLETS
10435
10436         // Vanilla leaves. No qualifying information available. So do nothing
10437
10438         case GT_NO_OP:
10439         case GT_START_NONGC:
10440         case GT_START_PREEMPTGC:
10441         case GT_PROF_HOOK:
10442         case GT_CATCH_ARG:
10443         case GT_MEMORYBARRIER:
10444         case GT_ARGPLACE:
10445         case GT_PINVOKE_PROLOG:
10446         case GT_JMPTABLE:
10447             break;
10448
10449         case GT_RET_EXPR:
10450             printf("(inl return from call ");
10451             printTreeID(tree->gtRetExpr.gtInlineCandidate);
10452             printf(")");
10453             break;
10454
10455         case GT_PHYSREG:
10456             printf(" %s", getRegName(tree->gtPhysReg.gtSrcReg, varTypeIsFloating(tree)));
10457             break;
10458
10459         case GT_IL_OFFSET:
10460             printf(" IL offset: ");
10461             if (tree->gtStmt.gtStmtILoffsx == BAD_IL_OFFSET)
10462             {
10463                 printf("???");
10464             }
10465             else
10466             {
10467                 printf("0x%x", jitGetILoffs(tree->gtStmt.gtStmtILoffsx));
10468             }
10469             break;
10470
10471         case GT_JCC:
10472         case GT_SETCC:
10473             printf(" cond=%s", tree->AsCC()->gtCondition.Name());
10474             break;
10475         case GT_JCMP:
10476             printf(" cond=%s%s", (tree->gtFlags & GTF_JCMP_TST) ? "TEST_" : "",
10477                    (tree->gtFlags & GTF_JCMP_EQ) ? "EQ" : "NE");
10478
10479         default:
10480             assert(!"don't know how to display tree leaf node");
10481     }
10482 }
10483
10484 //------------------------------------------------------------------------
10485 // gtDispLeaf: Print a child node to jitstdout.
10486 //
10487 // Arguments:
10488 //    tree - the tree to be printed
10489 //    indentStack - the specification for the current level of indentation & arcs
10490 //    arcType     - the type of arc to use for this child
10491 //    msg         - a contextual method (i.e. from the parent) to print
10492 //    topOnly     - a boolean indicating whether to print the children, or just the top node
10493 //
10494 // Return Value:
10495 //    None.
10496 //
10497 // Notes:
10498 //    'indentStack' may be null, in which case no indentation or arcs are printed
10499 //    'msg' has a default value of null
10500 //    'topOnly' is an optional argument that defaults to false
10501
10502 void Compiler::gtDispChild(GenTree*             child,
10503                            IndentStack*         indentStack,
10504                            IndentInfo           arcType,
10505                            __in_opt const char* msg,     /* = nullptr  */
10506                            bool                 topOnly) /* = false */
10507 {
10508     indentStack->Push(arcType);
10509     gtDispTree(child, indentStack, msg, topOnly);
10510     indentStack->Pop();
10511 }
10512
10513 #ifdef FEATURE_SIMD
10514 // Intrinsic Id to name map
10515 extern const char* const simdIntrinsicNames[] = {
10516 #define SIMD_INTRINSIC(mname, inst, id, name, r, ac, arg1, arg2, arg3, t1, t2, t3, t4, t5, t6, t7, t8, t9, t10) name,
10517 #include "simdintrinsiclist.h"
10518 };
10519 #endif // FEATURE_SIMD
10520
10521 /*****************************************************************************/
10522
10523 void Compiler::gtDispTree(GenTree*     tree,
10524                           IndentStack* indentStack,                 /* = nullptr */
10525                           __in __in_z __in_opt const char* msg,     /* = nullptr  */
10526                           bool                             topOnly, /* = false */
10527                           bool                             isLIR)   /* = false */
10528 {
10529     if (tree == nullptr)
10530     {
10531         printf(" [%08X] <NULL>\n", tree);
10532         printf(""); // null string means flush
10533         return;
10534     }
10535
10536     if (indentStack == nullptr)
10537     {
10538         indentStack = new (this, CMK_DebugOnly) IndentStack(this);
10539     }
10540
10541     if (IsUninitialized(tree))
10542     {
10543         /* Value used to initalize nodes */
10544         printf("Uninitialized tree node!\n");
10545         return;
10546     }
10547
10548     if (tree->gtOper >= GT_COUNT)
10549     {
10550         gtDispNode(tree, indentStack, msg, isLIR);
10551         printf("Bogus operator!\n");
10552         return;
10553     }
10554
10555     /* Is tree a leaf node? */
10556
10557     if (tree->OperIsLeaf() || tree->OperIsLocalStore()) // local stores used to be leaves
10558     {
10559         gtDispNode(tree, indentStack, msg, isLIR);
10560         gtDispLeaf(tree, indentStack);
10561         gtDispCommonEndLine(tree);
10562
10563         if (tree->OperIsLocalStore() && !topOnly)
10564         {
10565             gtDispChild(tree->gtOp.gtOp1, indentStack, IINone);
10566         }
10567         return;
10568     }
10569
10570     // Determine what kind of arc to propagate.
10571     IndentInfo myArc    = IINone;
10572     IndentInfo lowerArc = IINone;
10573     if (indentStack->Depth() > 0)
10574     {
10575         myArc = indentStack->Pop();
10576         switch (myArc)
10577         {
10578             case IIArcBottom:
10579                 indentStack->Push(IIArc);
10580                 lowerArc = IINone;
10581                 break;
10582             case IIArc:
10583                 indentStack->Push(IIArc);
10584                 lowerArc = IIArc;
10585                 break;
10586             case IIArcTop:
10587                 indentStack->Push(IINone);
10588                 lowerArc = IIArc;
10589                 break;
10590             case IIEmbedded:
10591                 indentStack->Push(IIEmbedded);
10592                 lowerArc = IIEmbedded;
10593                 break;
10594             case IINone:
10595                 indentStack->Push(IINone);
10596                 lowerArc = IINone;
10597                 break;
10598             default:
10599                 unreached();
10600                 break;
10601         }
10602     }
10603
10604     // Special case formatting for PHI nodes -- arg lists like calls.
10605
10606     if (tree->OperGet() == GT_PHI)
10607     {
10608         gtDispNode(tree, indentStack, msg, isLIR);
10609         gtDispCommonEndLine(tree);
10610
10611         if (!topOnly)
10612         {
10613             if (tree->gtOp.gtOp1 != nullptr)
10614             {
10615                 IndentInfo arcType = IIArcTop;
10616                 for (GenTreeArgList* args = tree->gtOp.gtOp1->AsArgList(); args != nullptr; args = args->Rest())
10617                 {
10618                     if (args->Rest() == nullptr)
10619                     {
10620                         arcType = IIArcBottom;
10621                     }
10622                     gtDispChild(args->Current(), indentStack, arcType);
10623                     arcType = IIArc;
10624                 }
10625             }
10626         }
10627         return;
10628     }
10629
10630     /* Is it a 'simple' unary/binary operator? */
10631
10632     const char* childMsg = nullptr;
10633
10634     if (tree->OperIsSimple())
10635     {
10636         if (!topOnly)
10637         {
10638             if (tree->gtGetOp2IfPresent())
10639             {
10640                 // Label the childMsgs of the GT_COLON operator
10641                 // op2 is the then part
10642
10643                 if (tree->gtOper == GT_COLON)
10644                 {
10645                     childMsg = "then";
10646                 }
10647                 gtDispChild(tree->gtOp.gtOp2, indentStack, IIArcTop, childMsg, topOnly);
10648             }
10649         }
10650
10651         // Now, get the right type of arc for this node
10652         if (myArc != IINone)
10653         {
10654             indentStack->Pop();
10655             indentStack->Push(myArc);
10656         }
10657
10658         gtDispNode(tree, indentStack, msg, isLIR);
10659
10660         // Propagate lowerArc to the lower children.
10661         if (indentStack->Depth() > 0)
10662         {
10663             (void)indentStack->Pop();
10664             indentStack->Push(lowerArc);
10665         }
10666
10667         if (tree->gtOper == GT_CAST)
10668         {
10669             /* Format a message that explains the effect of this GT_CAST */
10670
10671             var_types fromType  = genActualType(tree->gtCast.CastOp()->TypeGet());
10672             var_types toType    = tree->CastToType();
10673             var_types finalType = tree->TypeGet();
10674
10675             /* if GTF_UNSIGNED is set then force fromType to an unsigned type */
10676             if (tree->gtFlags & GTF_UNSIGNED)
10677             {
10678                 fromType = genUnsignedType(fromType);
10679             }
10680
10681             if (finalType != toType)
10682             {
10683                 printf(" %s <-", varTypeName(finalType));
10684             }
10685
10686             printf(" %s <- %s", varTypeName(toType), varTypeName(fromType));
10687         }
10688
10689         if (tree->gtOper == GT_OBJ && (tree->gtFlags & GTF_VAR_DEATH))
10690         {
10691             printf(" (last use)");
10692         }
10693         if (tree->OperIsBlkOp())
10694         {
10695             if (tree->OperIsCopyBlkOp())
10696             {
10697                 printf(" (copy)");
10698             }
10699             else if (tree->OperIsInitBlkOp())
10700             {
10701                 printf(" (init)");
10702             }
10703             if (tree->OperIsStoreBlk() && (tree->AsBlk()->gtBlkOpKind != GenTreeBlk::BlkOpKindInvalid))
10704             {
10705                 switch (tree->AsBlk()->gtBlkOpKind)
10706                 {
10707                     case GenTreeBlk::BlkOpKindRepInstr:
10708                         printf(" (RepInstr)");
10709                         break;
10710                     case GenTreeBlk::BlkOpKindUnroll:
10711                         printf(" (Unroll)");
10712                         break;
10713                     case GenTreeBlk::BlkOpKindHelper:
10714                         printf(" (Helper)");
10715                         break;
10716                     default:
10717                         unreached();
10718                 }
10719             }
10720         }
10721         else if (tree->OperIsFieldList())
10722         {
10723             printf(" %s at offset %d", varTypeName(tree->AsFieldList()->gtFieldType),
10724                    tree->AsFieldList()->gtFieldOffset);
10725         }
10726 #if FEATURE_PUT_STRUCT_ARG_STK
10727         else if (tree->OperGet() == GT_PUTARG_STK)
10728         {
10729             printf(" (%d slots)", tree->AsPutArgStk()->gtNumSlots);
10730             if (tree->AsPutArgStk()->gtPutArgStkKind != GenTreePutArgStk::Kind::Invalid)
10731             {
10732                 switch (tree->AsPutArgStk()->gtPutArgStkKind)
10733                 {
10734                     case GenTreePutArgStk::Kind::RepInstr:
10735                         printf(" (RepInstr)");
10736                         break;
10737                     case GenTreePutArgStk::Kind::Unroll:
10738                         printf(" (Unroll)");
10739                         break;
10740                     case GenTreePutArgStk::Kind::Push:
10741                         printf(" (Push)");
10742                         break;
10743                     case GenTreePutArgStk::Kind::PushAllSlots:
10744                         printf(" (PushAllSlots)");
10745                         break;
10746                     default:
10747                         unreached();
10748                 }
10749             }
10750         }
10751 #endif // FEATURE_PUT_STRUCT_ARG_STK
10752
10753         if (tree->gtOper == GT_INTRINSIC)
10754         {
10755             switch (tree->gtIntrinsic.gtIntrinsicId)
10756             {
10757                 case CORINFO_INTRINSIC_Sin:
10758                     printf(" sin");
10759                     break;
10760                 case CORINFO_INTRINSIC_Cos:
10761                     printf(" cos");
10762                     break;
10763                 case CORINFO_INTRINSIC_Cbrt:
10764                     printf(" cbrt");
10765                     break;
10766                 case CORINFO_INTRINSIC_Sqrt:
10767                     printf(" sqrt");
10768                     break;
10769                 case CORINFO_INTRINSIC_Abs:
10770                     printf(" abs");
10771                     break;
10772                 case CORINFO_INTRINSIC_Round:
10773                     printf(" round");
10774                     break;
10775                 case CORINFO_INTRINSIC_Cosh:
10776                     printf(" cosh");
10777                     break;
10778                 case CORINFO_INTRINSIC_Sinh:
10779                     printf(" sinh");
10780                     break;
10781                 case CORINFO_INTRINSIC_Tan:
10782                     printf(" tan");
10783                     break;
10784                 case CORINFO_INTRINSIC_Tanh:
10785                     printf(" tanh");
10786                     break;
10787                 case CORINFO_INTRINSIC_Asin:
10788                     printf(" asin");
10789                     break;
10790                 case CORINFO_INTRINSIC_Asinh:
10791                     printf(" asinh");
10792                     break;
10793                 case CORINFO_INTRINSIC_Acos:
10794                     printf(" acos");
10795                     break;
10796                 case CORINFO_INTRINSIC_Acosh:
10797                     printf(" acosh");
10798                     break;
10799                 case CORINFO_INTRINSIC_Atan:
10800                     printf(" atan");
10801                     break;
10802                 case CORINFO_INTRINSIC_Atan2:
10803                     printf(" atan2");
10804                     break;
10805                 case CORINFO_INTRINSIC_Atanh:
10806                     printf(" atanh");
10807                     break;
10808                 case CORINFO_INTRINSIC_Log10:
10809                     printf(" log10");
10810                     break;
10811                 case CORINFO_INTRINSIC_Pow:
10812                     printf(" pow");
10813                     break;
10814                 case CORINFO_INTRINSIC_Exp:
10815                     printf(" exp");
10816                     break;
10817                 case CORINFO_INTRINSIC_Ceiling:
10818                     printf(" ceiling");
10819                     break;
10820                 case CORINFO_INTRINSIC_Floor:
10821                     printf(" floor");
10822                     break;
10823                 case CORINFO_INTRINSIC_Object_GetType:
10824                     printf(" objGetType");
10825                     break;
10826
10827                 default:
10828                     unreached();
10829             }
10830         }
10831
10832 #ifdef FEATURE_SIMD
10833         if (tree->gtOper == GT_SIMD)
10834         {
10835             printf(" %s %s", varTypeName(tree->gtSIMD.gtSIMDBaseType),
10836                    simdIntrinsicNames[tree->gtSIMD.gtSIMDIntrinsicID]);
10837         }
10838 #endif // FEATURE_SIMD
10839
10840 #ifdef FEATURE_HW_INTRINSICS
10841         if (tree->gtOper == GT_HWIntrinsic)
10842         {
10843             printf(" %s %s",
10844                    tree->gtHWIntrinsic.gtSIMDBaseType == TYP_UNKNOWN ? ""
10845                                                                      : varTypeName(tree->gtHWIntrinsic.gtSIMDBaseType),
10846                    HWIntrinsicInfo::lookupName(tree->gtHWIntrinsic.gtHWIntrinsicId));
10847         }
10848 #endif // FEATURE_HW_INTRINSICS
10849
10850         gtDispCommonEndLine(tree);
10851
10852         if (!topOnly && tree->gtOp.gtOp1)
10853         {
10854
10855             // Label the child of the GT_COLON operator
10856             // op1 is the else part
10857
10858             if (tree->gtOper == GT_COLON)
10859             {
10860                 childMsg = "else";
10861             }
10862             else if (tree->gtOper == GT_QMARK)
10863             {
10864                 childMsg = "   if";
10865             }
10866             gtDispChild(tree->gtOp.gtOp1, indentStack, IIArcBottom, childMsg, topOnly);
10867         }
10868
10869         return;
10870     }
10871
10872     // Now, get the right type of arc for this node
10873     if (myArc != IINone)
10874     {
10875         indentStack->Pop();
10876         indentStack->Push(myArc);
10877     }
10878     gtDispNode(tree, indentStack, msg, isLIR);
10879
10880     // Propagate lowerArc to the lower children.
10881     if (indentStack->Depth() > 0)
10882     {
10883         (void)indentStack->Pop();
10884         indentStack->Push(lowerArc);
10885     }
10886
10887     // See what kind of a special operator we have here, and handle its special children.
10888
10889     switch (tree->gtOper)
10890     {
10891         case GT_FIELD:
10892             if (FieldSeqStore::IsPseudoField(tree->gtField.gtFldHnd))
10893             {
10894                 printf(" #PseudoField:0x%x", tree->gtField.gtFldOffset);
10895             }
10896             else
10897             {
10898                 printf(" %s", eeGetFieldName(tree->gtField.gtFldHnd), 0);
10899             }
10900
10901             gtDispCommonEndLine(tree);
10902
10903             if (tree->gtField.gtFldObj && !topOnly)
10904             {
10905                 gtDispChild(tree->gtField.gtFldObj, indentStack, IIArcBottom);
10906             }
10907
10908             break;
10909
10910         case GT_CALL:
10911         {
10912             GenTreeCall* call = tree->AsCall();
10913             assert(call->gtFlags & GTF_CALL);
10914             unsigned numChildren = call->NumChildren();
10915             GenTree* lastChild   = nullptr;
10916             if (numChildren != 0)
10917             {
10918                 lastChild = call->GetChild(numChildren - 1);
10919             }
10920
10921             if (call->gtCallType != CT_INDIRECT)
10922             {
10923                 const char* methodName;
10924                 const char* className;
10925
10926                 methodName = eeGetMethodName(call->gtCallMethHnd, &className);
10927
10928                 printf(" %s.%s", className, methodName);
10929             }
10930
10931             if ((call->gtFlags & GTF_CALL_UNMANAGED) && (call->gtCallMoreFlags & GTF_CALL_M_FRAME_VAR_DEATH))
10932             {
10933                 printf(" (FramesRoot last use)");
10934             }
10935
10936             if (((call->gtFlags & GTF_CALL_INLINE_CANDIDATE) != 0) && (call->gtInlineCandidateInfo != nullptr) &&
10937                 (call->gtInlineCandidateInfo->exactContextHnd != nullptr))
10938             {
10939                 printf(" (exactContextHnd=0x%p)", dspPtr(call->gtInlineCandidateInfo->exactContextHnd));
10940             }
10941
10942             gtDispCommonEndLine(tree);
10943
10944             if (!topOnly)
10945             {
10946                 char  buf[64];
10947                 char* bufp;
10948
10949                 bufp = &buf[0];
10950
10951                 if ((call->gtCallObjp != nullptr) && (call->gtCallObjp->gtOper != GT_NOP) &&
10952                     (!call->gtCallObjp->IsArgPlaceHolderNode()))
10953                 {
10954                     if (call->gtCallObjp->gtOper == GT_ASG)
10955                     {
10956                         sprintf_s(bufp, sizeof(buf), "this SETUP%c", 0);
10957                     }
10958                     else
10959                     {
10960                         sprintf_s(bufp, sizeof(buf), "this in %s%c", compRegVarName(REG_ARG_0), 0);
10961                     }
10962                     gtDispChild(call->gtCallObjp, indentStack, (call->gtCallObjp == lastChild) ? IIArcBottom : IIArc,
10963                                 bufp, topOnly);
10964                 }
10965
10966                 if (call->gtCallArgs)
10967                 {
10968                     gtDispArgList(call, indentStack);
10969                 }
10970
10971                 if (call->gtCallType == CT_INDIRECT)
10972                 {
10973                     gtDispChild(call->gtCallAddr, indentStack, (call->gtCallAddr == lastChild) ? IIArcBottom : IIArc,
10974                                 "calli tgt", topOnly);
10975                 }
10976
10977                 if (call->gtControlExpr != nullptr)
10978                 {
10979                     gtDispChild(call->gtControlExpr, indentStack,
10980                                 (call->gtControlExpr == lastChild) ? IIArcBottom : IIArc, "control expr", topOnly);
10981                 }
10982
10983 #if !FEATURE_FIXED_OUT_ARGS
10984                 regList list = call->regArgList;
10985 #endif
10986                 /* process the late argument list */
10987                 int lateArgIndex = 0;
10988                 for (GenTreeArgList* lateArgs = call->gtCallLateArgs; lateArgs;
10989                      (lateArgIndex++, lateArgs = lateArgs->Rest()))
10990                 {
10991                     GenTree* argx;
10992
10993                     argx = lateArgs->Current();
10994
10995                     IndentInfo arcType = (lateArgs->Rest() == nullptr) ? IIArcBottom : IIArc;
10996                     gtGetLateArgMsg(call, argx, lateArgIndex, -1, bufp, sizeof(buf));
10997                     gtDispChild(argx, indentStack, arcType, bufp, topOnly);
10998                 }
10999             }
11000         }
11001         break;
11002
11003         case GT_STMT:
11004             printf("\n");
11005
11006             if (!topOnly)
11007             {
11008                 gtDispChild(tree->gtStmt.gtStmtExpr, indentStack, IIArcBottom);
11009             }
11010             break;
11011
11012         case GT_ARR_ELEM:
11013             gtDispCommonEndLine(tree);
11014
11015             if (!topOnly)
11016             {
11017                 gtDispChild(tree->gtArrElem.gtArrObj, indentStack, IIArc, nullptr, topOnly);
11018
11019                 unsigned dim;
11020                 for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
11021                 {
11022                     IndentInfo arcType = ((dim + 1) == tree->gtArrElem.gtArrRank) ? IIArcBottom : IIArc;
11023                     gtDispChild(tree->gtArrElem.gtArrInds[dim], indentStack, arcType, nullptr, topOnly);
11024                 }
11025             }
11026             break;
11027
11028         case GT_ARR_OFFSET:
11029             gtDispCommonEndLine(tree);
11030
11031             if (!topOnly)
11032             {
11033                 gtDispChild(tree->gtArrOffs.gtOffset, indentStack, IIArc, nullptr, topOnly);
11034                 gtDispChild(tree->gtArrOffs.gtIndex, indentStack, IIArc, nullptr, topOnly);
11035                 gtDispChild(tree->gtArrOffs.gtArrObj, indentStack, IIArcBottom, nullptr, topOnly);
11036             }
11037             break;
11038
11039         case GT_CMPXCHG:
11040             gtDispCommonEndLine(tree);
11041
11042             if (!topOnly)
11043             {
11044                 gtDispChild(tree->gtCmpXchg.gtOpLocation, indentStack, IIArc, nullptr, topOnly);
11045                 gtDispChild(tree->gtCmpXchg.gtOpValue, indentStack, IIArc, nullptr, topOnly);
11046                 gtDispChild(tree->gtCmpXchg.gtOpComparand, indentStack, IIArcBottom, nullptr, topOnly);
11047             }
11048             break;
11049
11050         case GT_ARR_BOUNDS_CHECK:
11051 #ifdef FEATURE_SIMD
11052         case GT_SIMD_CHK:
11053 #endif // FEATURE_SIMD
11054 #ifdef FEATURE_HW_INTRINSICS
11055         case GT_HW_INTRINSIC_CHK:
11056 #endif // FEATURE_HW_INTRINSICS
11057             gtDispCommonEndLine(tree);
11058
11059             if (!topOnly)
11060             {
11061                 gtDispChild(tree->gtBoundsChk.gtIndex, indentStack, IIArc, nullptr, topOnly);
11062                 gtDispChild(tree->gtBoundsChk.gtArrLen, indentStack, IIArcBottom, nullptr, topOnly);
11063             }
11064             break;
11065
11066         case GT_STORE_DYN_BLK:
11067         case GT_DYN_BLK:
11068             if (tree->OperIsCopyBlkOp())
11069             {
11070                 printf(" (copy)");
11071             }
11072             else if (tree->OperIsInitBlkOp())
11073             {
11074                 printf(" (init)");
11075             }
11076             gtDispCommonEndLine(tree);
11077
11078             if (!topOnly)
11079             {
11080                 if (tree->gtDynBlk.Data() != nullptr)
11081                 {
11082                     gtDispChild(tree->gtDynBlk.Data(), indentStack, IIArc, nullptr, topOnly);
11083                 }
11084                 gtDispChild(tree->gtDynBlk.Addr(), indentStack, IIArc, nullptr, topOnly);
11085                 gtDispChild(tree->gtDynBlk.gtDynamicSize, indentStack, IIArcBottom, nullptr, topOnly);
11086             }
11087             break;
11088
11089         default:
11090             printf("<DON'T KNOW HOW TO DISPLAY THIS NODE> :");
11091             printf(""); // null string means flush
11092             break;
11093     }
11094 }
11095
11096 //------------------------------------------------------------------------
11097 // gtGetArgMsg: Construct a message about the given argument
11098 //
11099 // Arguments:
11100 //    call      - The call for which 'arg' is an argument
11101 //    arg       - The argument for which a message should be constructed
11102 //    argNum    - The ordinal number of the arg in the argument list
11103 //    listCount - When printing in LIR form this is the count for a GT_FIELD_LIST
11104 //                or -1 if we are not printing in LIR form
11105 //    bufp      - A pointer to the buffer into which the message is written
11106 //    bufLength - The length of the buffer pointed to by bufp
11107 //
11108 // Return Value:
11109 //    No return value, but bufp is written.
11110 //
11111 // Assumptions:
11112 //    'call' must be a call node
11113 //    'arg' must be an argument to 'call' (else gtArgEntryByNode will assert)
11114
11115 void Compiler::gtGetArgMsg(
11116     GenTreeCall* call, GenTree* arg, unsigned argNum, int listCount, char* bufp, unsigned bufLength)
11117 {
11118     if (call->gtCallLateArgs != nullptr)
11119     {
11120         fgArgTabEntry* curArgTabEntry = gtArgEntryByArgNum(call, argNum);
11121         assert(curArgTabEntry);
11122
11123         if (arg->gtFlags & GTF_LATE_ARG)
11124         {
11125             sprintf_s(bufp, bufLength, "arg%d SETUP%c", argNum, 0);
11126         }
11127         else
11128         {
11129 #ifdef _TARGET_ARM_
11130             if (curArgTabEntry->isSplit)
11131             {
11132                 regNumber firstReg = curArgTabEntry->regNum;
11133                 if (listCount == -1)
11134                 {
11135                     if (curArgTabEntry->numRegs == 1)
11136                     {
11137                         sprintf_s(bufp, bufLength, "arg%d %s out+%02x%c", argNum, compRegVarName(firstReg),
11138                                   (curArgTabEntry->slotNum) * TARGET_POINTER_SIZE, 0);
11139                     }
11140                     else
11141                     {
11142                         regNumber lastReg   = REG_STK;
11143                         char      separator = (curArgTabEntry->numRegs == 2) ? ',' : '-';
11144                         if (curArgTabEntry->isHfaRegArg)
11145                         {
11146                             unsigned lastRegNum = genMapFloatRegNumToRegArgNum(firstReg) + curArgTabEntry->numRegs - 1;
11147                             lastReg             = genMapFloatRegArgNumToRegNum(lastRegNum);
11148                         }
11149                         else
11150                         {
11151                             unsigned lastRegNum = genMapIntRegNumToRegArgNum(firstReg) + curArgTabEntry->numRegs - 1;
11152                             lastReg             = genMapIntRegArgNumToRegNum(lastRegNum);
11153                         }
11154                         sprintf_s(bufp, bufLength, "arg%d %s%c%s out+%02x%c", argNum, compRegVarName(firstReg),
11155                                   separator, compRegVarName(lastReg), (curArgTabEntry->slotNum) * TARGET_POINTER_SIZE,
11156                                   0);
11157                     }
11158                 }
11159                 else
11160                 {
11161                     unsigned curArgNum = BAD_VAR_NUM;
11162                     bool     isFloat   = curArgTabEntry->isHfaRegArg;
11163                     if (isFloat)
11164                     {
11165                         curArgNum = genMapFloatRegNumToRegArgNum(firstReg) + listCount;
11166                     }
11167                     else
11168                     {
11169                         curArgNum = genMapIntRegNumToRegArgNum(firstReg) + listCount;
11170                     }
11171
11172                     if (!isFloat && curArgNum < MAX_REG_ARG)
11173                     {
11174                         regNumber curReg = genMapIntRegArgNumToRegNum(curArgNum);
11175                         sprintf_s(bufp, bufLength, "arg%d m%d %s%c", argNum, listCount, compRegVarName(curReg), 0);
11176                     }
11177                     else if (isFloat && curArgNum < MAX_FLOAT_REG_ARG)
11178                     {
11179                         regNumber curReg = genMapFloatRegArgNumToRegNum(curArgNum);
11180                         sprintf_s(bufp, bufLength, "arg%d m%d %s%c", argNum, listCount, compRegVarName(curReg), 0);
11181                     }
11182                     else
11183                     {
11184                         unsigned stackSlot = listCount - curArgTabEntry->numRegs;
11185                         sprintf_s(bufp, bufLength, "arg%d m%d out+%02x%c", argNum, listCount,
11186                                   stackSlot * TARGET_POINTER_SIZE, 0);
11187                     }
11188                 }
11189                 return;
11190             }
11191 #endif // _TARGET_ARM_
11192 #if FEATURE_FIXED_OUT_ARGS
11193             if (listCount == -1)
11194             {
11195                 sprintf_s(bufp, bufLength, "arg%d out+%02x%c", argNum, curArgTabEntry->slotNum * TARGET_POINTER_SIZE,
11196                           0);
11197             }
11198             else // listCount is 0,1,2 or 3
11199             {
11200                 assert(listCount <= MAX_ARG_REG_COUNT);
11201                 sprintf_s(bufp, bufLength, "arg%d out+%02x%c", argNum,
11202                           (curArgTabEntry->slotNum + listCount) * TARGET_POINTER_SIZE, 0);
11203             }
11204 #else
11205             sprintf_s(bufp, bufLength, "arg%d on STK%c", argNum, 0);
11206 #endif
11207         }
11208     }
11209     else
11210     {
11211         sprintf_s(bufp, bufLength, "arg%d%c", argNum, 0);
11212     }
11213 }
11214
11215 //------------------------------------------------------------------------
11216 // gtGetLateArgMsg: Construct a message about the given argument
11217 //
11218 // Arguments:
11219 //    call         - The call for which 'arg' is an argument
11220 //    argx         - The argument for which a message should be constructed
11221 //    lateArgIndex - The ordinal number of the arg in the lastArg  list
11222 //    listCount    - When printing in LIR form this is the count for a multireg GT_FIELD_LIST
11223 //                   or -1 if we are not printing in LIR form
11224 //    bufp         - A pointer to the buffer into which the message is written
11225 //    bufLength    - The length of the buffer pointed to by bufp
11226 //
11227 // Return Value:
11228 //    No return value, but bufp is written.
11229 //
11230 // Assumptions:
11231 //    'call' must be a call node
11232 //    'arg' must be an argument to 'call' (else gtArgEntryByNode will assert)
11233
11234 void Compiler::gtGetLateArgMsg(
11235     GenTreeCall* call, GenTree* argx, int lateArgIndex, int listCount, char* bufp, unsigned bufLength)
11236 {
11237     assert(!argx->IsArgPlaceHolderNode()); // No place holders nodes are in gtCallLateArgs;
11238
11239     fgArgTabEntry* curArgTabEntry = gtArgEntryByLateArgIndex(call, lateArgIndex);
11240     assert(curArgTabEntry);
11241     regNumber argReg = curArgTabEntry->regNum;
11242
11243 #if !FEATURE_FIXED_OUT_ARGS
11244     assert(lateArgIndex < call->regArgListCount);
11245     assert(argReg == call->regArgList[lateArgIndex]);
11246 #else
11247     if (argReg == REG_STK)
11248     {
11249         sprintf_s(bufp, bufLength, "arg%d in out+%02x%c", curArgTabEntry->argNum,
11250                   curArgTabEntry->slotNum * TARGET_POINTER_SIZE, 0);
11251     }
11252     else
11253 #endif
11254     {
11255         if (gtArgIsThisPtr(curArgTabEntry))
11256         {
11257             sprintf_s(bufp, bufLength, "this in %s%c", compRegVarName(argReg), 0);
11258         }
11259 #ifdef _TARGET_ARM_
11260         else if (curArgTabEntry->isSplit)
11261         {
11262             regNumber firstReg = curArgTabEntry->regNum;
11263             unsigned  argNum   = curArgTabEntry->argNum;
11264             if (listCount == -1)
11265             {
11266                 if (curArgTabEntry->numRegs == 1)
11267                 {
11268                     sprintf_s(bufp, bufLength, "arg%d %s out+%02x%c", argNum, compRegVarName(firstReg),
11269                               (curArgTabEntry->slotNum) * TARGET_POINTER_SIZE, 0);
11270                 }
11271                 else
11272                 {
11273                     regNumber lastReg   = REG_STK;
11274                     char      separator = (curArgTabEntry->numRegs == 2) ? ',' : '-';
11275                     if (curArgTabEntry->isHfaRegArg)
11276                     {
11277                         unsigned lastRegNum = genMapFloatRegNumToRegArgNum(firstReg) + curArgTabEntry->numRegs - 1;
11278                         lastReg             = genMapFloatRegArgNumToRegNum(lastRegNum);
11279                     }
11280                     else
11281                     {
11282                         unsigned lastRegNum = genMapIntRegNumToRegArgNum(firstReg) + curArgTabEntry->numRegs - 1;
11283                         lastReg             = genMapIntRegArgNumToRegNum(lastRegNum);
11284                     }
11285                     sprintf_s(bufp, bufLength, "arg%d %s%c%s out+%02x%c", argNum, compRegVarName(firstReg), separator,
11286                               compRegVarName(lastReg), (curArgTabEntry->slotNum) * TARGET_POINTER_SIZE, 0);
11287                 }
11288             }
11289             else
11290             {
11291                 unsigned curArgNum = BAD_VAR_NUM;
11292                 bool     isFloat   = curArgTabEntry->isHfaRegArg;
11293                 if (isFloat)
11294                 {
11295                     curArgNum = genMapFloatRegNumToRegArgNum(firstReg) + listCount;
11296                 }
11297                 else
11298                 {
11299                     curArgNum = genMapIntRegNumToRegArgNum(firstReg) + listCount;
11300                 }
11301
11302                 if (!isFloat && curArgNum < MAX_REG_ARG)
11303                 {
11304                     regNumber curReg = genMapIntRegArgNumToRegNum(curArgNum);
11305                     sprintf_s(bufp, bufLength, "arg%d m%d %s%c", argNum, listCount, compRegVarName(curReg), 0);
11306                 }
11307                 else if (isFloat && curArgNum < MAX_FLOAT_REG_ARG)
11308                 {
11309                     regNumber curReg = genMapFloatRegArgNumToRegNum(curArgNum);
11310                     sprintf_s(bufp, bufLength, "arg%d m%d %s%c", argNum, listCount, compRegVarName(curReg), 0);
11311                 }
11312                 else
11313                 {
11314                     unsigned stackSlot = listCount - curArgTabEntry->numRegs;
11315                     sprintf_s(bufp, bufLength, "arg%d m%d out+%02x%c", argNum, listCount,
11316                               stackSlot * TARGET_POINTER_SIZE, 0);
11317                 }
11318             }
11319             return;
11320         }
11321 #endif // _TARGET_ARM_
11322         else
11323         {
11324 #if FEATURE_MULTIREG_ARGS
11325             if (curArgTabEntry->numRegs >= 2)
11326             {
11327                 // listCount could be -1 but it is signed, so this comparison is OK.
11328                 assert(listCount <= MAX_ARG_REG_COUNT);
11329                 char separator = (curArgTabEntry->numRegs == 2) ? ',' : '-';
11330                 sprintf_s(bufp, bufLength, "arg%d %s%c%s%c", curArgTabEntry->argNum, compRegVarName(argReg), separator,
11331                           compRegVarName(curArgTabEntry->getRegNum(curArgTabEntry->numRegs - 1)), 0);
11332             }
11333             else
11334 #endif
11335             {
11336                 sprintf_s(bufp, bufLength, "arg%d in %s%c", curArgTabEntry->argNum, compRegVarName(argReg), 0);
11337             }
11338         }
11339     }
11340 }
11341
11342 //------------------------------------------------------------------------
11343 // gtDispArgList: Dump the tree for a call arg list
11344 //
11345 // Arguments:
11346 //    call         - The call to dump arguments for
11347 //    indentStack  - the specification for the current level of indentation & arcs
11348 //
11349 // Return Value:
11350 //    None.
11351 //
11352 void Compiler::gtDispArgList(GenTreeCall* call, IndentStack* indentStack)
11353 {
11354     GenTree*  args      = call->gtCallArgs;
11355     unsigned  argnum    = 0;
11356     const int BufLength = 256;
11357     char      buf[BufLength];
11358     char*     bufp        = &buf[0];
11359     unsigned  numChildren = call->NumChildren();
11360     assert(numChildren != 0);
11361     bool argListIsLastChild = (args == call->GetChild(numChildren - 1));
11362
11363     IndentInfo arcType = IIArc;
11364     if (call->gtCallObjp != nullptr)
11365     {
11366         argnum++;
11367     }
11368
11369     while (args != nullptr)
11370     {
11371         assert(args->gtOper == GT_LIST);
11372         GenTree* arg = args->gtOp.gtOp1;
11373         if (!arg->IsNothingNode() && !arg->IsArgPlaceHolderNode())
11374         {
11375             gtGetArgMsg(call, arg, argnum, -1, bufp, BufLength);
11376             if (argListIsLastChild && (args->gtOp.gtOp2 == nullptr))
11377             {
11378                 arcType = IIArcBottom;
11379             }
11380             gtDispChild(arg, indentStack, arcType, bufp, false);
11381         }
11382         args = args->gtOp.gtOp2;
11383         argnum++;
11384     }
11385 }
11386
11387 //------------------------------------------------------------------------
11388 // gtDispArgList: Dump the tree for a call arg list
11389 //
11390 // Arguments:
11391 //    tree         - The call for which 'arg' is an argument
11392 //    indentStack  - the specification for the current level of indentation & arcs
11393 //
11394 // Return Value:
11395 //    None.
11396 //
11397 // Assumptions:
11398 //    'tree' must be a GT_LIST node
11399
11400 void Compiler::gtDispTreeList(GenTree* tree, IndentStack* indentStack /* = nullptr */)
11401 {
11402     for (/*--*/; tree != nullptr; tree = tree->gtNext)
11403     {
11404         gtDispTree(tree, indentStack);
11405         printf("\n");
11406     }
11407 }
11408
11409 //------------------------------------------------------------------------
11410 // Compiler::gtDispRange: dumps a range of LIR.
11411 //
11412 // Arguments:
11413 //    range - the range of LIR to display.
11414 //
11415 void Compiler::gtDispRange(LIR::ReadOnlyRange const& range)
11416 {
11417     for (GenTree* node : range)
11418     {
11419         gtDispLIRNode(node);
11420     }
11421 }
11422
11423 //------------------------------------------------------------------------
11424 // Compiler::gtDispTreeRange: dumps the LIR range that contains all of the
11425 //                            nodes in the dataflow tree rooted at a given
11426 //                            node.
11427 //
11428 // Arguments:
11429 //    containingRange - the LIR range that contains the root node.
11430 //    tree - the root of the dataflow tree.
11431 //
11432 void Compiler::gtDispTreeRange(LIR::Range& containingRange, GenTree* tree)
11433 {
11434     bool unused;
11435     gtDispRange(containingRange.GetTreeRange(tree, &unused));
11436 }
11437
11438 //------------------------------------------------------------------------
11439 // Compiler::gtDispLIRNode: dumps a single LIR node.
11440 //
11441 // Arguments:
11442 //    node - the LIR node to dump.
11443 //    prefixMsg - an optional prefix for each line of output.
11444 //
11445 void Compiler::gtDispLIRNode(GenTree* node, const char* prefixMsg /* = nullptr */)
11446 {
11447     auto displayOperand = [](GenTree* operand, const char* message, IndentInfo operandArc, IndentStack& indentStack,
11448                              size_t prefixIndent) {
11449         assert(operand != nullptr);
11450         assert(message != nullptr);
11451
11452         if (prefixIndent != 0)
11453         {
11454             printf("%*s", (int)prefixIndent, "");
11455         }
11456
11457         // 49 spaces for alignment
11458         printf("%-49s", "");
11459 #if FEATURE_SET_FLAGS
11460         // additional flag enlarges the flag field by one character
11461         printf(" ");
11462 #endif
11463
11464         indentStack.Push(operandArc);
11465         indentStack.print();
11466         indentStack.Pop();
11467         operandArc = IIArc;
11468
11469         printf("  t%-5d %-6s %s\n", operand->gtTreeID, varTypeName(operand->TypeGet()), message);
11470     };
11471
11472     IndentStack indentStack(this);
11473
11474     size_t prefixIndent = 0;
11475     if (prefixMsg != nullptr)
11476     {
11477         prefixIndent = strlen(prefixMsg);
11478     }
11479
11480     const int bufLength = 256;
11481     char      buf[bufLength];
11482
11483     const bool nodeIsCall = node->IsCall();
11484
11485     // Visit operands
11486     IndentInfo operandArc = IIArcTop;
11487     for (GenTree* operand : node->Operands())
11488     {
11489         if (operand->IsArgPlaceHolderNode() || !operand->IsValue())
11490         {
11491             // Either of these situations may happen with calls.
11492             continue;
11493         }
11494
11495         if (nodeIsCall)
11496         {
11497             GenTreeCall* call = node->AsCall();
11498             if (operand == call->gtCallObjp)
11499             {
11500                 sprintf_s(buf, sizeof(buf), "this in %s", compRegVarName(REG_ARG_0));
11501                 displayOperand(operand, buf, operandArc, indentStack, prefixIndent);
11502             }
11503             else if (operand == call->gtCallAddr)
11504             {
11505                 displayOperand(operand, "calli tgt", operandArc, indentStack, prefixIndent);
11506             }
11507             else if (operand == call->gtControlExpr)
11508             {
11509                 displayOperand(operand, "control expr", operandArc, indentStack, prefixIndent);
11510             }
11511             else if (operand == call->gtCallCookie)
11512             {
11513                 displayOperand(operand, "cookie", operandArc, indentStack, prefixIndent);
11514             }
11515             else
11516             {
11517                 fgArgTabEntry* curArgTabEntry = gtArgEntryByNode(call, operand);
11518                 assert(curArgTabEntry);
11519
11520                 if (operand->OperGet() == GT_LIST)
11521                 {
11522                     int listIndex = 0;
11523                     for (GenTreeArgList* element = operand->AsArgList(); element != nullptr; element = element->Rest())
11524                     {
11525                         operand = element->Current();
11526                         if (curArgTabEntry->lateArgInx == (unsigned)-1)
11527                         {
11528                             gtGetArgMsg(call, operand, curArgTabEntry->argNum, listIndex, buf, sizeof(buf));
11529                         }
11530                         else
11531                         {
11532                             gtGetLateArgMsg(call, operand, curArgTabEntry->lateArgInx, listIndex, buf, sizeof(buf));
11533                         }
11534
11535                         displayOperand(operand, buf, operandArc, indentStack, prefixIndent);
11536                         operandArc = IIArc;
11537                     }
11538                 }
11539                 else
11540                 {
11541                     if (!curArgTabEntry->isLateArg())
11542                     {
11543                         gtGetArgMsg(call, operand, curArgTabEntry->argNum, -1, buf, sizeof(buf));
11544                     }
11545                     else
11546                     {
11547                         gtGetLateArgMsg(call, operand, curArgTabEntry->lateArgInx, -1, buf, sizeof(buf));
11548                     }
11549
11550                     displayOperand(operand, buf, operandArc, indentStack, prefixIndent);
11551                 }
11552             }
11553         }
11554         else if (node->OperIsDynBlkOp())
11555         {
11556             if (operand == node->AsBlk()->Addr())
11557             {
11558                 displayOperand(operand, "lhs", operandArc, indentStack, prefixIndent);
11559             }
11560             else if (operand == node->AsBlk()->Data())
11561             {
11562                 displayOperand(operand, "rhs", operandArc, indentStack, prefixIndent);
11563             }
11564             else
11565             {
11566                 assert(operand == node->AsDynBlk()->gtDynamicSize);
11567                 displayOperand(operand, "size", operandArc, indentStack, prefixIndent);
11568             }
11569         }
11570         else if (node->OperGet() == GT_DYN_BLK)
11571         {
11572             if (operand == node->AsBlk()->Addr())
11573             {
11574                 displayOperand(operand, "lhs", operandArc, indentStack, prefixIndent);
11575             }
11576             else
11577             {
11578                 assert(operand == node->AsDynBlk()->gtDynamicSize);
11579                 displayOperand(operand, "size", operandArc, indentStack, prefixIndent);
11580             }
11581         }
11582         else if (node->OperIs(GT_ASG))
11583         {
11584             if (operand == node->gtGetOp1())
11585             {
11586                 displayOperand(operand, "lhs", operandArc, indentStack, prefixIndent);
11587             }
11588             else
11589             {
11590                 displayOperand(operand, "rhs", operandArc, indentStack, prefixIndent);
11591             }
11592         }
11593         else
11594         {
11595             displayOperand(operand, "", operandArc, indentStack, prefixIndent);
11596         }
11597
11598         operandArc = IIArc;
11599     }
11600
11601     // Visit the operator
11602
11603     if (prefixMsg != nullptr)
11604     {
11605         printf("%s", prefixMsg);
11606     }
11607
11608     const bool topOnly = true;
11609     const bool isLIR   = true;
11610     gtDispTree(node, &indentStack, nullptr, topOnly, isLIR);
11611 }
11612
11613 /*****************************************************************************/
11614 #endif // DEBUG
11615
11616 /*****************************************************************************
11617  *
11618  *  Check if the given node can be folded,
11619  *  and call the methods to perform the folding
11620  */
11621
11622 GenTree* Compiler::gtFoldExpr(GenTree* tree)
11623 {
11624     unsigned kind = tree->OperKind();
11625
11626     /* We must have a simple operation to fold */
11627
11628     // If we're in CSE, it's not safe to perform tree
11629     // folding given that it can will potentially
11630     // change considered CSE candidates.
11631     if (optValnumCSE_phase)
11632     {
11633         return tree;
11634     }
11635
11636     if (!(kind & GTK_SMPOP))
11637     {
11638         return tree;
11639     }
11640
11641     GenTree* op1 = tree->gtOp.gtOp1;
11642
11643     /* Filter out non-foldable trees that can have constant children */
11644
11645     assert(kind & (GTK_UNOP | GTK_BINOP));
11646     switch (tree->gtOper)
11647     {
11648         case GT_RETFILT:
11649         case GT_RETURN:
11650         case GT_IND:
11651             return tree;
11652         default:
11653             break;
11654     }
11655
11656     /* try to fold the current node */
11657
11658     if ((kind & GTK_UNOP) && op1)
11659     {
11660         if (op1->OperKind() & GTK_CONST)
11661         {
11662             return gtFoldExprConst(tree);
11663         }
11664     }
11665     else if ((kind & GTK_BINOP) && op1 && tree->gtOp.gtOp2 &&
11666              // Don't take out conditionals for debugging
11667              (opts.OptimizationEnabled() || !tree->OperIsCompare()))
11668     {
11669         GenTree* op2 = tree->gtOp.gtOp2;
11670
11671         // The atomic operations are exempted here because they are never computable statically;
11672         // one of their arguments is an address.
11673         if (((op1->OperKind() & op2->OperKind()) & GTK_CONST) && !tree->OperIsAtomicOp())
11674         {
11675             /* both nodes are constants - fold the expression */
11676             return gtFoldExprConst(tree);
11677         }
11678         else if ((op1->OperKind() | op2->OperKind()) & GTK_CONST)
11679         {
11680             /* at least one is a constant - see if we have a
11681              * special operator that can use only one constant
11682              * to fold - e.g. booleans */
11683
11684             return gtFoldExprSpecial(tree);
11685         }
11686         else if (tree->OperIsCompare())
11687         {
11688             /* comparisons of two local variables can sometimes be folded */
11689
11690             return gtFoldExprCompare(tree);
11691         }
11692         else if (op2->OperGet() == GT_COLON)
11693         {
11694             assert(tree->OperGet() == GT_QMARK);
11695
11696             GenTree* colon_op1 = op2->gtOp.gtOp1;
11697             GenTree* colon_op2 = op2->gtOp.gtOp2;
11698
11699             if (gtCompareTree(colon_op1, colon_op2))
11700             {
11701                 // Both sides of the GT_COLON are the same tree
11702
11703                 GenTree* sideEffList = nullptr;
11704                 gtExtractSideEffList(op1, &sideEffList);
11705
11706                 // Clear colon flags only if the qmark itself is not conditionaly executed
11707                 if ((tree->gtFlags & GTF_COLON_COND) == 0)
11708                 {
11709                     fgWalkTreePre(&colon_op2, gtClearColonCond);
11710                 }
11711
11712                 if (sideEffList == nullptr)
11713                 {
11714                     // No side-effects, just return colon_op2
11715                     return colon_op2;
11716                 }
11717                 else
11718                 {
11719 #ifdef DEBUG
11720                     if (verbose)
11721                     {
11722                         printf("\nIdentical GT_COLON trees with side effects! Extracting side effects...\n");
11723                         gtDispTree(sideEffList);
11724                         printf("\n");
11725                     }
11726 #endif
11727                     // Change the GT_COLON into a GT_COMMA node with the side-effects
11728                     op2->ChangeOper(GT_COMMA);
11729                     op2->gtFlags |= (sideEffList->gtFlags & GTF_ALL_EFFECT);
11730                     op2->gtOp.gtOp1 = sideEffList;
11731                     return op2;
11732                 }
11733             }
11734         }
11735     }
11736
11737     /* Return the original node (folded/bashed or not) */
11738
11739     return tree;
11740 }
11741
11742 //------------------------------------------------------------------------
11743 // gtFoldExprCall: see if a call is foldable
11744 //
11745 // Arguments:
11746 //    call - call to examine
11747 //
11748 // Returns:
11749 //    The original call if no folding happened.
11750 //    An alternative tree if folding happens.
11751 //
11752 // Notes:
11753 //    Checks for calls to Type.op_Equality, Type.op_Inequality, and
11754 //    Enum.HasFlag, and if the call is to one of these,
11755 //    attempts to optimize.
11756
11757 GenTree* Compiler::gtFoldExprCall(GenTreeCall* call)
11758 {
11759     // Can only fold calls to special intrinsics.
11760     if ((call->gtCallMoreFlags & GTF_CALL_M_SPECIAL_INTRINSIC) == 0)
11761     {
11762         return call;
11763     }
11764
11765     // Defer folding if not optimizing.
11766     if (opts.OptimizationDisabled())
11767     {
11768         return call;
11769     }
11770
11771     // Fetch id of the intrinsic.
11772     const CorInfoIntrinsics methodID = info.compCompHnd->getIntrinsicID(call->gtCallMethHnd);
11773
11774     switch (methodID)
11775     {
11776         case CORINFO_INTRINSIC_TypeEQ:
11777         case CORINFO_INTRINSIC_TypeNEQ:
11778         {
11779             noway_assert(call->TypeGet() == TYP_INT);
11780             GenTree* op1 = call->gtCallArgs->gtOp.gtOp1;
11781             GenTree* op2 = call->gtCallArgs->gtOp.gtOp2->gtOp.gtOp1;
11782
11783             // If either operand is known to be a RuntimeType, this can be folded
11784             GenTree* result = gtFoldTypeEqualityCall(methodID, op1, op2);
11785             if (result != nullptr)
11786             {
11787                 return result;
11788             }
11789             break;
11790         }
11791
11792         default:
11793             break;
11794     }
11795
11796     // Check for a new-style jit intrinsic.
11797     const NamedIntrinsic ni = lookupNamedIntrinsic(call->gtCallMethHnd);
11798
11799     if (ni == NI_System_Enum_HasFlag)
11800     {
11801         GenTree* thisOp = call->gtCallObjp;
11802         GenTree* flagOp = call->gtCallArgs->gtOp.gtOp1;
11803         GenTree* result = gtOptimizeEnumHasFlag(thisOp, flagOp);
11804
11805         if (result != nullptr)
11806         {
11807             return result;
11808         }
11809     }
11810
11811     return call;
11812 }
11813
11814 //------------------------------------------------------------------------
11815 // gtFoldTypeEqualityCall: see if a (potential) type equality call is foldable
11816 //
11817 // Arguments:
11818 //    methodID -- type equality intrinsic ID
11819 //    op1 -- first argument to call
11820 //    op2 -- second argument to call
11821 //
11822 // Returns:
11823 //    nulltpr if no folding happened.
11824 //    An alternative tree if folding happens.
11825 //
11826 // Notes:
11827 //    If either operand is known to be a a RuntimeType, then the type
11828 //    equality methods will simply check object identity and so we can
11829 //    fold the call into a simple compare of the call's operands.
11830
11831 GenTree* Compiler::gtFoldTypeEqualityCall(CorInfoIntrinsics methodID, GenTree* op1, GenTree* op2)
11832 {
11833     // The method must be be a type equality intrinsic
11834     assert(methodID == CORINFO_INTRINSIC_TypeEQ || methodID == CORINFO_INTRINSIC_TypeNEQ);
11835
11836     if ((gtGetTypeProducerKind(op1) == TPK_Unknown) && (gtGetTypeProducerKind(op2) == TPK_Unknown))
11837     {
11838         return nullptr;
11839     }
11840
11841     const genTreeOps simpleOp = (methodID == CORINFO_INTRINSIC_TypeEQ) ? GT_EQ : GT_NE;
11842
11843     JITDUMP("\nFolding call to Type:op_%s to a simple compare via %s\n",
11844             methodID == CORINFO_INTRINSIC_TypeEQ ? "Equality" : "Inequality", GenTree::OpName(simpleOp));
11845
11846     GenTree* compare = gtNewOperNode(simpleOp, TYP_INT, op1, op2);
11847
11848     return compare;
11849 }
11850
11851 /*****************************************************************************
11852  *
11853  *  Some comparisons can be folded:
11854  *
11855  *    locA        == locA
11856  *    classVarA   == classVarA
11857  *    locA + locB == locB + locA
11858  *
11859  */
11860
11861 GenTree* Compiler::gtFoldExprCompare(GenTree* tree)
11862 {
11863     GenTree* op1 = tree->gtOp.gtOp1;
11864     GenTree* op2 = tree->gtOp.gtOp2;
11865
11866     assert(tree->OperIsCompare());
11867
11868     /* Filter out cases that cannot be folded here */
11869
11870     /* Do not fold floats or doubles (e.g. NaN != Nan) */
11871
11872     if (varTypeIsFloating(op1->TypeGet()))
11873     {
11874         return tree;
11875     }
11876
11877     /* Currently we can only fold when the two subtrees exactly match */
11878
11879     if ((tree->gtFlags & GTF_SIDE_EFFECT) || GenTree::Compare(op1, op2, true) == false)
11880     {
11881         return tree; /* return unfolded tree */
11882     }
11883
11884     GenTree* cons;
11885
11886     switch (tree->gtOper)
11887     {
11888         case GT_EQ:
11889         case GT_LE:
11890         case GT_GE:
11891             cons = gtNewIconNode(true); /* Folds to GT_CNS_INT(true) */
11892             break;
11893
11894         case GT_NE:
11895         case GT_LT:
11896         case GT_GT:
11897             cons = gtNewIconNode(false); /* Folds to GT_CNS_INT(false) */
11898             break;
11899
11900         default:
11901             assert(!"Unexpected relOp");
11902             return tree;
11903     }
11904
11905     /* The node has beeen folded into 'cons' */
11906
11907     if (fgGlobalMorph)
11908     {
11909         fgMorphTreeDone(cons);
11910     }
11911     else
11912     {
11913         cons->gtNext = tree->gtNext;
11914         cons->gtPrev = tree->gtPrev;
11915     }
11916
11917     return cons;
11918 }
11919
11920 //------------------------------------------------------------------------
11921 // gtCreateHandleCompare: generate a type handle comparison
11922 //
11923 // Arguments:
11924 //    oper -- comparison operation (equal/not equal)
11925 //    op1 -- first operand
11926 //    op2 -- second operand
11927 //    typeCheckInliningResult -- indicates how the comparison should happen
11928 //
11929 // Returns:
11930 //    Type comparison tree
11931 //
11932
11933 GenTree* Compiler::gtCreateHandleCompare(genTreeOps             oper,
11934                                          GenTree*               op1,
11935                                          GenTree*               op2,
11936                                          CorInfoInlineTypeCheck typeCheckInliningResult)
11937 {
11938     // If we can compare pointers directly, just emit the binary operation
11939     if (typeCheckInliningResult == CORINFO_INLINE_TYPECHECK_PASS)
11940     {
11941         return gtNewOperNode(oper, TYP_INT, op1, op2);
11942     }
11943
11944     assert(typeCheckInliningResult == CORINFO_INLINE_TYPECHECK_USE_HELPER);
11945
11946     // Emit a call to a runtime helper
11947     GenTreeArgList* helperArgs = gtNewArgList(op1, op2);
11948     GenTree*        ret        = gtNewHelperCallNode(CORINFO_HELP_ARE_TYPES_EQUIVALENT, TYP_INT, helperArgs);
11949     if (oper == GT_EQ)
11950     {
11951         ret = gtNewOperNode(GT_NE, TYP_INT, ret, gtNewIconNode(0, TYP_INT));
11952     }
11953     else
11954     {
11955         assert(oper == GT_NE);
11956         ret = gtNewOperNode(GT_EQ, TYP_INT, ret, gtNewIconNode(0, TYP_INT));
11957     }
11958
11959     return ret;
11960 }
11961
11962 //------------------------------------------------------------------------
11963 // gtFoldTypeCompare: see if a type comparison can be further simplified
11964 //
11965 // Arguments:
11966 //    tree -- tree possibly comparing types
11967 //
11968 // Returns:
11969 //    An alternative tree if folding happens.
11970 //    Original tree otherwise.
11971 //
11972 // Notes:
11973 //    Checks for
11974 //        typeof(...) == obj.GetType()
11975 //        typeof(...) == typeof(...)
11976 //
11977 //    And potentially optimizes away the need to obtain actual
11978 //    RuntimeType objects to do the comparison.
11979
11980 GenTree* Compiler::gtFoldTypeCompare(GenTree* tree)
11981 {
11982     // Only handle EQ and NE
11983     // (maybe relop vs null someday)
11984     const genTreeOps oper = tree->OperGet();
11985     if ((oper != GT_EQ) && (oper != GT_NE))
11986     {
11987         return tree;
11988     }
11989
11990     // Screen for the right kinds of operands
11991     GenTree* const         op1     = tree->gtOp.gtOp1;
11992     const TypeProducerKind op1Kind = gtGetTypeProducerKind(op1);
11993     if (op1Kind == TPK_Unknown)
11994     {
11995         return tree;
11996     }
11997
11998     GenTree* const         op2     = tree->gtOp.gtOp2;
11999     const TypeProducerKind op2Kind = gtGetTypeProducerKind(op2);
12000     if (op2Kind == TPK_Unknown)
12001     {
12002         return tree;
12003     }
12004
12005     // We must have a handle on one side or the other here to optimize,
12006     // otherwise we can't be sure that optimizing is sound.
12007     const bool op1IsFromHandle = (op1Kind == TPK_Handle);
12008     const bool op2IsFromHandle = (op2Kind == TPK_Handle);
12009
12010     if (!(op1IsFromHandle || op2IsFromHandle))
12011     {
12012         return tree;
12013     }
12014
12015     // If both types are created via handles, we can simply compare
12016     // handles (or the indirection cells for handles) instead of the
12017     // types that they'd create.
12018     if (op1IsFromHandle && op2IsFromHandle)
12019     {
12020         JITDUMP("Optimizing compare of types-from-handles to instead compare handles\n");
12021         GenTree*             op1ClassFromHandle = tree->gtOp.gtOp1->gtCall.gtCallArgs->gtOp.gtOp1;
12022         GenTree*             op2ClassFromHandle = tree->gtOp.gtOp2->gtCall.gtCallArgs->gtOp.gtOp1;
12023         GenTree*             op1TunneledHandle  = nullptr;
12024         GenTree*             op2TunneledHandle  = nullptr;
12025         CORINFO_CLASS_HANDLE cls1Hnd            = NO_CLASS_HANDLE;
12026         CORINFO_CLASS_HANDLE cls2Hnd            = NO_CLASS_HANDLE;
12027         unsigned             runtimeLookupCount = 0;
12028
12029         // Try and find class handles from op1 and op2
12030         cls1Hnd = gtGetHelperArgClassHandle(op1ClassFromHandle, &runtimeLookupCount, &op1TunneledHandle);
12031         cls2Hnd = gtGetHelperArgClassHandle(op2ClassFromHandle, &runtimeLookupCount, &op2TunneledHandle);
12032
12033         // If we have both class handles, try and resolve the type equality test completely.
12034         bool resolveFailed = false;
12035
12036         if ((cls1Hnd != NO_CLASS_HANDLE) && (cls2Hnd != NO_CLASS_HANDLE))
12037         {
12038             JITDUMP("Asking runtime to compare %p (%s) and %p (%s) for equality\n", dspPtr(cls1Hnd),
12039                     info.compCompHnd->getClassName(cls1Hnd), dspPtr(cls2Hnd), info.compCompHnd->getClassName(cls2Hnd));
12040             TypeCompareState s = info.compCompHnd->compareTypesForEquality(cls1Hnd, cls2Hnd);
12041
12042             if (s != TypeCompareState::May)
12043             {
12044                 // Type comparison result is known.
12045                 const bool typesAreEqual = (s == TypeCompareState::Must);
12046                 const bool operatorIsEQ  = (oper == GT_EQ);
12047                 const int  compareResult = operatorIsEQ ^ typesAreEqual ? 0 : 1;
12048                 JITDUMP("Runtime reports comparison is known at jit time: %u\n", compareResult);
12049                 GenTree* result = gtNewIconNode(compareResult);
12050
12051                 // Any runtime lookups that fed into this compare are
12052                 // now dead code, so they no longer require the runtime context.
12053                 assert(lvaGenericsContextUseCount >= runtimeLookupCount);
12054                 lvaGenericsContextUseCount -= runtimeLookupCount;
12055                 return result;
12056             }
12057             else
12058             {
12059                 resolveFailed = true;
12060             }
12061         }
12062
12063         if (resolveFailed)
12064         {
12065             JITDUMP("Runtime reports comparison is NOT known at jit time\n");
12066         }
12067         else
12068         {
12069             JITDUMP("Could not find handle for %s%s\n", (cls1Hnd == NO_CLASS_HANDLE) ? " cls1" : "",
12070                     (cls2Hnd == NO_CLASS_HANDLE) ? " cls2" : "");
12071         }
12072
12073         // We can't answer the equality comparison definitively at jit
12074         // time, but can still simplfy the comparison.
12075         //
12076         // Find out how we can compare the two handles.
12077         // NOTE: We're potentially passing NO_CLASS_HANDLE, but the runtime knows what to do with it here.
12078         CorInfoInlineTypeCheck inliningKind =
12079             info.compCompHnd->canInlineTypeCheck(cls1Hnd, CORINFO_INLINE_TYPECHECK_SOURCE_TOKEN);
12080
12081         // If the first type needs helper, check the other type: it might be okay with a simple compare.
12082         if (inliningKind == CORINFO_INLINE_TYPECHECK_USE_HELPER)
12083         {
12084             inliningKind = info.compCompHnd->canInlineTypeCheck(cls2Hnd, CORINFO_INLINE_TYPECHECK_SOURCE_TOKEN);
12085         }
12086
12087         assert(inliningKind == CORINFO_INLINE_TYPECHECK_PASS || inliningKind == CORINFO_INLINE_TYPECHECK_USE_HELPER);
12088
12089         // If we successfully tunneled through both operands, compare
12090         // the tunneled values, otherwise compare the original values.
12091         GenTree* compare;
12092         if ((op1TunneledHandle != nullptr) && (op2TunneledHandle != nullptr))
12093         {
12094             compare = gtCreateHandleCompare(oper, op1TunneledHandle, op2TunneledHandle, inliningKind);
12095         }
12096         else
12097         {
12098             compare = gtCreateHandleCompare(oper, op1ClassFromHandle, op2ClassFromHandle, inliningKind);
12099         }
12100
12101         // Drop any now-irrelvant flags
12102         compare->gtFlags |= tree->gtFlags & (GTF_RELOP_JMP_USED | GTF_RELOP_QMARK | GTF_DONT_CSE);
12103
12104         return compare;
12105     }
12106
12107     // Just one operand creates a type from a handle.
12108     //
12109     // If the other operand is fetching the type from an object,
12110     // we can sometimes optimize the type compare into a simpler
12111     // method table comparison.
12112     //
12113     // TODO: if other operand is null...
12114     if ((op1Kind != TPK_GetType) && (op2Kind != TPK_GetType))
12115     {
12116         return tree;
12117     }
12118
12119     GenTree* const opHandle = op1IsFromHandle ? op1 : op2;
12120     GenTree* const opOther  = op1IsFromHandle ? op2 : op1;
12121
12122     // Tunnel through the handle operand to get at the class handle involved.
12123     GenTree* const       opHandleArgument = opHandle->gtCall.gtCallArgs->gtOp.gtOp1;
12124     CORINFO_CLASS_HANDLE clsHnd           = gtGetHelperArgClassHandle(opHandleArgument);
12125
12126     // If we couldn't find the class handle, give up.
12127     if (clsHnd == NO_CLASS_HANDLE)
12128     {
12129         return tree;
12130     }
12131
12132     // Ask the VM if this type can be equality tested by a simple method
12133     // table comparison.
12134     CorInfoInlineTypeCheck typeCheckInliningResult =
12135         info.compCompHnd->canInlineTypeCheck(clsHnd, CORINFO_INLINE_TYPECHECK_SOURCE_VTABLE);
12136     if (typeCheckInliningResult == CORINFO_INLINE_TYPECHECK_NONE)
12137     {
12138         return tree;
12139     }
12140
12141     // We're good to go.
12142     JITDUMP("Optimizing compare of obj.GetType()"
12143             " and type-from-handle to compare method table pointer\n");
12144
12145     // opHandleArgument is the method table we're looking for.
12146     GenTree* const knownMT = opHandleArgument;
12147
12148     // Fetch object method table from the object itself.
12149     GenTree* objOp = nullptr;
12150
12151     // Note we may see intrinsified or regular calls to GetType
12152     if (opOther->OperGet() == GT_INTRINSIC)
12153     {
12154         objOp = opOther->gtUnOp.gtOp1;
12155     }
12156     else
12157     {
12158         assert(opOther->OperGet() == GT_CALL);
12159         objOp = opOther->gtCall.gtCallObjp;
12160     }
12161
12162     GenTree* const objMT = gtNewOperNode(GT_IND, TYP_I_IMPL, objOp);
12163
12164     // Update various flags
12165     objMT->gtFlags |= GTF_EXCEPT;
12166     compCurBB->bbFlags |= BBF_HAS_VTABREF;
12167     optMethodFlags |= OMF_HAS_VTABLEREF;
12168
12169     // Compare the two method tables
12170     GenTree* const compare = gtCreateHandleCompare(oper, objMT, knownMT, typeCheckInliningResult);
12171
12172     // Drop any now irrelevant flags
12173     compare->gtFlags |= tree->gtFlags & (GTF_RELOP_JMP_USED | GTF_RELOP_QMARK | GTF_DONT_CSE);
12174
12175     // And we're done
12176     return compare;
12177 }
12178
12179 //------------------------------------------------------------------------
12180 // gtGetHelperArgClassHandle: find the compile time class handle from
12181 //   a helper call argument tree
12182 //
12183 // Arguments:
12184 //    tree - tree that passes the handle to the helper
12185 //    runtimeLookupCount [optional, in/out] - incremented if tree was a runtime lookup
12186 //    handleTree [optional, out] - set to the literal operand tree for indirect handles
12187 //
12188 // Returns:
12189 //    The compile time class handle if known.
12190 //
12191 CORINFO_CLASS_HANDLE Compiler::gtGetHelperArgClassHandle(GenTree*  tree,
12192                                                          unsigned* runtimeLookupCount,
12193                                                          GenTree** handleTree)
12194 {
12195     CORINFO_CLASS_HANDLE result = NO_CLASS_HANDLE;
12196
12197     // Walk through any wrapping nop.
12198     if ((tree->gtOper == GT_NOP) && (tree->gtType == TYP_I_IMPL))
12199     {
12200         tree = tree->gtOp.gtOp1;
12201     }
12202
12203     // The handle could be a literal constant
12204     if ((tree->OperGet() == GT_CNS_INT) && (tree->TypeGet() == TYP_I_IMPL))
12205     {
12206         assert(tree->IsIconHandle(GTF_ICON_CLASS_HDL));
12207         result = (CORINFO_CLASS_HANDLE)tree->gtIntCon.gtCompileTimeHandle;
12208     }
12209     // Or the result of a runtime lookup
12210     else if (tree->OperGet() == GT_RUNTIMELOOKUP)
12211     {
12212         result = tree->AsRuntimeLookup()->GetClassHandle();
12213
12214         if (runtimeLookupCount != nullptr)
12215         {
12216             *runtimeLookupCount = *runtimeLookupCount + 1;
12217         }
12218     }
12219     // Or something reached indirectly
12220     else if (tree->gtOper == GT_IND)
12221     {
12222         // The handle indirs we are looking for will be marked as non-faulting.
12223         // Certain others (eg from refanytype) may not be.
12224         if (tree->gtFlags & GTF_IND_NONFAULTING)
12225         {
12226             GenTree* handleTreeInternal = tree->gtOp.gtOp1;
12227
12228             if ((handleTreeInternal->OperGet() == GT_CNS_INT) && (handleTreeInternal->TypeGet() == TYP_I_IMPL))
12229             {
12230                 // These handle constants should be class handles.
12231                 assert(handleTreeInternal->IsIconHandle(GTF_ICON_CLASS_HDL));
12232                 result = (CORINFO_CLASS_HANDLE)handleTreeInternal->gtIntCon.gtCompileTimeHandle;
12233
12234                 if (handleTree != nullptr)
12235                 {
12236                     *handleTree = handleTreeInternal;
12237                 }
12238             }
12239         }
12240     }
12241
12242     return result;
12243 }
12244
12245 /*****************************************************************************
12246  *
12247  *  Some binary operators can be folded even if they have only one
12248  *  operand constant - e.g. boolean operators, add with 0
12249  *  multiply with 1, etc
12250  */
12251
12252 GenTree* Compiler::gtFoldExprSpecial(GenTree* tree)
12253 {
12254     GenTree*   op1  = tree->gtOp.gtOp1;
12255     GenTree*   op2  = tree->gtOp.gtOp2;
12256     genTreeOps oper = tree->OperGet();
12257
12258     GenTree* op;
12259     GenTree* cons;
12260     ssize_t  val;
12261
12262     assert(tree->OperKind() & GTK_BINOP);
12263
12264     /* Filter out operators that cannot be folded here */
12265     if (oper == GT_CAST)
12266     {
12267         return tree;
12268     }
12269
12270     /* We only consider TYP_INT for folding
12271      * Do not fold pointer arithmetic (e.g. addressing modes!) */
12272
12273     if (oper != GT_QMARK && !varTypeIsIntOrI(tree->gtType))
12274     {
12275         return tree;
12276     }
12277
12278     /* Find out which is the constant node */
12279
12280     if (op1->IsCnsIntOrI())
12281     {
12282         op   = op2;
12283         cons = op1;
12284     }
12285     else if (op2->IsCnsIntOrI())
12286     {
12287         op   = op1;
12288         cons = op2;
12289     }
12290     else
12291     {
12292         return tree;
12293     }
12294
12295     /* Get the constant value */
12296
12297     val = cons->gtIntConCommon.IconValue();
12298
12299     /* Here op is the non-constant operand, val is the constant,
12300        first is true if the constant is op1 */
12301
12302     switch (oper)
12303     {
12304         case GT_EQ:
12305         case GT_NE:
12306         case GT_GT:
12307
12308             // Optimize boxed value classes; these are always false.  This IL is
12309             // generated when a generic value is tested against null:
12310             //     <T> ... foo(T x) { ... if ((object)x == null) ...
12311             if (val == 0 && op->IsBoxedValue())
12312             {
12313                 JITDUMP("\nAttempting to optimize BOX(valueType) %s null [%06u]\n", GenTree::OpName(oper),
12314                         dspTreeID(tree));
12315
12316                 // We don't expect GT_GT with signed compares, and we
12317                 // can't predict the result if we do see it, since the
12318                 // boxed object addr could have its high bit set.
12319                 if ((oper == GT_GT) && !tree->IsUnsigned())
12320                 {
12321                     JITDUMP(" bailing; unexpected signed compare via GT_GT\n");
12322                 }
12323                 else
12324                 {
12325                     // The tree under the box must be side effect free
12326                     // since we will drop it if we optimize.
12327                     assert(!gtTreeHasSideEffects(op->gtBox.gtOp.gtOp1, GTF_SIDE_EFFECT));
12328
12329                     // See if we can optimize away the box and related statements.
12330                     GenTree* boxSourceTree = gtTryRemoveBoxUpstreamEffects(op);
12331                     bool     didOptimize   = (boxSourceTree != nullptr);
12332
12333                     // If optimization succeeded, remove the box.
12334                     if (didOptimize)
12335                     {
12336                         // Set up the result of the compare.
12337                         int compareResult = 0;
12338                         if (oper == GT_GT)
12339                         {
12340                             // GT_GT(null, box) == false
12341                             // GT_GT(box, null) == true
12342                             compareResult = (op1 == op);
12343                         }
12344                         else if (oper == GT_EQ)
12345                         {
12346                             // GT_EQ(box, null) == false
12347                             // GT_EQ(null, box) == false
12348                             compareResult = 0;
12349                         }
12350                         else
12351                         {
12352                             assert(oper == GT_NE);
12353                             // GT_NE(box, null) == true
12354                             // GT_NE(null, box) == true
12355                             compareResult = 1;
12356                         }
12357
12358                         JITDUMP("\nSuccess: replacing BOX(valueType) %s null with %d\n", GenTree::OpName(oper),
12359                                 compareResult);
12360
12361                         op = gtNewIconNode(compareResult);
12362
12363                         if (fgGlobalMorph)
12364                         {
12365                             fgMorphTreeDone(op);
12366                         }
12367                         else
12368                         {
12369                             op->gtNext = tree->gtNext;
12370                             op->gtPrev = tree->gtPrev;
12371                         }
12372
12373                         return op;
12374                     }
12375                 }
12376             }
12377
12378             break;
12379
12380         case GT_ADD:
12381             if (val == 0)
12382             {
12383                 goto DONE_FOLD;
12384             }
12385             break;
12386
12387         case GT_MUL:
12388             if (val == 1)
12389             {
12390                 goto DONE_FOLD;
12391             }
12392             else if (val == 0)
12393             {
12394                 /* Multiply by zero - return the 'zero' node, but not if side effects */
12395                 if (!(op->gtFlags & GTF_SIDE_EFFECT))
12396                 {
12397                     op = cons;
12398                     goto DONE_FOLD;
12399                 }
12400             }
12401             break;
12402
12403         case GT_DIV:
12404         case GT_UDIV:
12405             if ((op2 == cons) && (val == 1) && !(op1->OperKind() & GTK_CONST))
12406             {
12407                 goto DONE_FOLD;
12408             }
12409             break;
12410
12411         case GT_SUB:
12412             if ((op2 == cons) && (val == 0) && !(op1->OperKind() & GTK_CONST))
12413             {
12414                 goto DONE_FOLD;
12415             }
12416             break;
12417
12418         case GT_AND:
12419             if (val == 0)
12420             {
12421                 /* AND with zero - return the 'zero' node, but not if side effects */
12422
12423                 if (!(op->gtFlags & GTF_SIDE_EFFECT))
12424                 {
12425                     op = cons;
12426                     goto DONE_FOLD;
12427                 }
12428             }
12429             else
12430             {
12431                 /* The GTF_BOOLEAN flag is set for nodes that are part
12432                  * of a boolean expression, thus all their children
12433                  * are known to evaluate to only 0 or 1 */
12434
12435                 if (tree->gtFlags & GTF_BOOLEAN)
12436                 {
12437
12438                     /* The constant value must be 1
12439                      * AND with 1 stays the same */
12440                     assert(val == 1);
12441                     goto DONE_FOLD;
12442                 }
12443             }
12444             break;
12445
12446         case GT_OR:
12447             if (val == 0)
12448             {
12449                 goto DONE_FOLD;
12450             }
12451             else if (tree->gtFlags & GTF_BOOLEAN)
12452             {
12453                 /* The constant value must be 1 - OR with 1 is 1 */
12454
12455                 assert(val == 1);
12456
12457                 /* OR with one - return the 'one' node, but not if side effects */
12458
12459                 if (!(op->gtFlags & GTF_SIDE_EFFECT))
12460                 {
12461                     op = cons;
12462                     goto DONE_FOLD;
12463                 }
12464             }
12465             break;
12466
12467         case GT_LSH:
12468         case GT_RSH:
12469         case GT_RSZ:
12470         case GT_ROL:
12471         case GT_ROR:
12472             if (val == 0)
12473             {
12474                 if (op2 == cons)
12475                 {
12476                     goto DONE_FOLD;
12477                 }
12478                 else if (!(op->gtFlags & GTF_SIDE_EFFECT))
12479                 {
12480                     op = cons;
12481                     goto DONE_FOLD;
12482                 }
12483             }
12484             break;
12485
12486         case GT_QMARK:
12487         {
12488             assert(op1 == cons && op2 == op && op2->gtOper == GT_COLON);
12489             assert(op2->gtOp.gtOp1 && op2->gtOp.gtOp2);
12490
12491             assert(val == 0 || val == 1);
12492
12493             GenTree* opToDelete;
12494             if (val)
12495             {
12496                 op         = op2->AsColon()->ThenNode();
12497                 opToDelete = op2->AsColon()->ElseNode();
12498             }
12499             else
12500             {
12501                 op         = op2->AsColon()->ElseNode();
12502                 opToDelete = op2->AsColon()->ThenNode();
12503             }
12504
12505             // Clear colon flags only if the qmark itself is not conditionaly executed
12506             if ((tree->gtFlags & GTF_COLON_COND) == 0)
12507             {
12508                 fgWalkTreePre(&op, gtClearColonCond);
12509             }
12510         }
12511
12512             goto DONE_FOLD;
12513
12514         default:
12515             break;
12516     }
12517
12518     /* The node is not foldable */
12519
12520     return tree;
12521
12522 DONE_FOLD:
12523
12524     /* The node has beeen folded into 'op' */
12525
12526     // If there was an assigment update, we just morphed it into
12527     // a use, update the flags appropriately
12528     if (op->gtOper == GT_LCL_VAR)
12529     {
12530         assert(tree->OperIs(GT_ASG) || (op->gtFlags & (GTF_VAR_USEASG | GTF_VAR_DEF)) == 0);
12531
12532         op->gtFlags &= ~(GTF_VAR_USEASG | GTF_VAR_DEF);
12533     }
12534
12535     op->gtNext = tree->gtNext;
12536     op->gtPrev = tree->gtPrev;
12537
12538     return op;
12539 }
12540
12541 //------------------------------------------------------------------------
12542 // gtTryRemoveBoxUpstreamEffects: given an unused value type box,
12543 //    try and remove the upstream allocation and unnecessary parts of
12544 //    the copy.
12545 //
12546 // Arguments:
12547 //    op  - the box node to optimize
12548 //    options - controls whether and how trees are modified
12549 //        (see notes)
12550 //
12551 // Return Value:
12552 //    A tree representing the original value to box, if removal
12553 //    is successful/possible (but see note). nullptr if removal fails.
12554 //
12555 // Notes:
12556 //    Value typed box gets special treatment because it has associated
12557 //    side effects that can be removed if the box result is not used.
12558 //
12559 //    By default (options == BR_REMOVE_AND_NARROW) this method will
12560 //    try and remove unnecessary trees and will try and reduce remaning
12561 //    operations to the minimal set, possibly narrowing the width of
12562 //    loads from the box source if it is a struct.
12563 //
12564 //    To perform a trial removal, pass BR_DONT_REMOVE. This can be
12565 //    useful to determine if this optimization should only be
12566 //    performed if some other conditions hold true.
12567 //
12568 //    To remove but not alter the access to the box source, pass
12569 //    BR_REMOVE_BUT_NOT_NARROW.
12570 //
12571 //    To remove and return the tree for the type handle used for
12572 //    the boxed newobj, pass BR_REMOVE_BUT_NOT_NARROW_WANT_TYPE_HANDLE.
12573 //    This can be useful when the only part of the box that is "live"
12574 //    is its type.
12575 //
12576 //    If removal fails, is is possible that a subsequent pass may be
12577 //    able to optimize.  Blocking side effects may now be minimized
12578 //    (null or bounds checks might have been removed) or might be
12579 //    better known (inline return placeholder updated with the actual
12580 //    return expression). So the box is perhaps best left as is to
12581 //    help trigger this re-examination.
12582
12583 GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions options)
12584 {
12585     assert(op->IsBoxedValue());
12586
12587     // grab related parts for the optimization
12588     GenTreeBox*  box      = op->AsBox();
12589     GenTreeStmt* asgStmt  = box->gtAsgStmtWhenInlinedBoxValue;
12590     GenTreeStmt* copyStmt = box->gtCopyStmtWhenInlinedBoxValue;
12591
12592     JITDUMP("gtTryRemoveBoxUpstreamEffects: %s to %s of BOX (valuetype)"
12593             " [%06u] (assign/newobj [%06u] copy [%06u])\n",
12594             (options == BR_DONT_REMOVE) ? "checking if it is possible" : "attempting",
12595             (options == BR_MAKE_LOCAL_COPY) ? "make local unboxed version" : "remove side effects", dspTreeID(op),
12596             dspTreeID(asgStmt), dspTreeID(copyStmt));
12597
12598     // If we don't recognize the form of the assign, bail.
12599     GenTree* asg = asgStmt->gtStmtExpr;
12600     if (asg->gtOper != GT_ASG)
12601     {
12602         JITDUMP(" bailing; unexpected assignment op %s\n", GenTree::OpName(asg->gtOper));
12603         return nullptr;
12604     }
12605
12606     // If we're eventually going to return the type handle, remember it now.
12607     GenTree* boxTypeHandle = nullptr;
12608     if ((options == BR_REMOVE_AND_NARROW_WANT_TYPE_HANDLE) || (options == BR_DONT_REMOVE_WANT_TYPE_HANDLE))
12609     {
12610         GenTree*   asgSrc     = asg->gtOp.gtOp2;
12611         genTreeOps asgSrcOper = asgSrc->OperGet();
12612
12613         // Allocation may be via AllocObj or via helper call, depending
12614         // on when this is invoked and whether the jit is using AllocObj
12615         // for R2R allocations.
12616         if (asgSrcOper == GT_ALLOCOBJ)
12617         {
12618             GenTreeAllocObj* allocObj = asgSrc->AsAllocObj();
12619             boxTypeHandle             = allocObj->gtOp.gtOp1;
12620         }
12621         else if (asgSrcOper == GT_CALL)
12622         {
12623             GenTreeCall* newobjCall = asgSrc->AsCall();
12624             GenTree*     newobjArgs = newobjCall->gtCallArgs;
12625
12626             // In R2R expansions the handle may not be an explicit operand to the helper,
12627             // so we can't remove the box.
12628             if (newobjArgs == nullptr)
12629             {
12630                 assert(newobjCall->IsHelperCall(this, CORINFO_HELP_READYTORUN_NEW));
12631                 JITDUMP(" bailing; newobj via R2R helper\n");
12632                 return nullptr;
12633             }
12634
12635             boxTypeHandle = newobjArgs->AsArgList()->Current();
12636         }
12637         else
12638         {
12639             unreached();
12640         }
12641
12642         assert(boxTypeHandle != nullptr);
12643     }
12644
12645     // If we don't recognize the form of the copy, bail.
12646     GenTree* copy = copyStmt->gtStmtExpr;
12647     if (copy->gtOper != GT_ASG)
12648     {
12649         // GT_RET_EXPR is a tolerable temporary failure.
12650         // The jit will revisit this optimization after
12651         // inlining is done.
12652         if (copy->gtOper == GT_RET_EXPR)
12653         {
12654             JITDUMP(" bailing; must wait for replacement of copy %s\n", GenTree::OpName(copy->gtOper));
12655         }
12656         else
12657         {
12658             // Anything else is a missed case we should
12659             // figure out how to handle.  One known case
12660             // is GT_COMMAs enclosing the GT_ASG we are
12661             // looking for.
12662             JITDUMP(" bailing; unexpected copy op %s\n", GenTree::OpName(copy->gtOper));
12663         }
12664         return nullptr;
12665     }
12666
12667     // Handle case where we are optimizing the box into a local copy
12668     if (options == BR_MAKE_LOCAL_COPY)
12669     {
12670         // Drill into the box to get at the box temp local and the box type
12671         GenTree* boxTemp = box->BoxOp();
12672         assert(boxTemp->IsLocal());
12673         const unsigned boxTempLcl = boxTemp->AsLclVar()->GetLclNum();
12674         assert(lvaTable[boxTempLcl].lvType == TYP_REF);
12675         CORINFO_CLASS_HANDLE boxClass = lvaTable[boxTempLcl].lvClassHnd;
12676         assert(boxClass != nullptr);
12677
12678         // Verify that the copyDst has the expected shape
12679         // (blk|obj|ind (add (boxTempLcl, ptr-size)))
12680         //
12681         // The shape here is constrained to the patterns we produce
12682         // over in impImportAndPushBox for the inlined box case.
12683         GenTree* copyDst = copy->gtOp.gtOp1;
12684
12685         if (!copyDst->OperIs(GT_BLK, GT_IND, GT_OBJ))
12686         {
12687             JITDUMP("Unexpected copy dest operator %s\n", GenTree::OpName(copyDst->gtOper));
12688             return nullptr;
12689         }
12690
12691         GenTree* copyDstAddr = copyDst->gtOp.gtOp1;
12692         if (copyDstAddr->OperGet() != GT_ADD)
12693         {
12694             JITDUMP("Unexpected copy dest address tree\n");
12695             return nullptr;
12696         }
12697
12698         GenTree* copyDstAddrOp1 = copyDstAddr->gtOp.gtOp1;
12699         if ((copyDstAddrOp1->OperGet() != GT_LCL_VAR) || (copyDstAddrOp1->gtLclVarCommon.gtLclNum != boxTempLcl))
12700         {
12701             JITDUMP("Unexpected copy dest address 1st addend\n");
12702             return nullptr;
12703         }
12704
12705         GenTree* copyDstAddrOp2 = copyDstAddr->gtOp.gtOp2;
12706         if (!copyDstAddrOp2->IsIntegralConst(TARGET_POINTER_SIZE))
12707         {
12708             JITDUMP("Unexpected copy dest address 2nd addend\n");
12709             return nullptr;
12710         }
12711
12712         // Screening checks have all passed. Do the transformation.
12713         //
12714         // Retype the box temp to be a struct
12715         JITDUMP("Retyping box temp V%02u to struct %s\n", boxTempLcl, eeGetClassName(boxClass));
12716         lvaTable[boxTempLcl].lvType   = TYP_UNDEF;
12717         const bool isUnsafeValueClass = false;
12718         lvaSetStruct(boxTempLcl, boxClass, isUnsafeValueClass);
12719         var_types boxTempType = lvaTable[boxTempLcl].lvType;
12720
12721         // Remove the newobj and assigment to box temp
12722         JITDUMP("Bashing NEWOBJ [%06u] to NOP\n", dspTreeID(asg));
12723         asg->gtBashToNOP();
12724
12725         // Update the copy from the value to be boxed to the box temp
12726         GenTree* newDst     = gtNewOperNode(GT_ADDR, TYP_BYREF, gtNewLclvNode(boxTempLcl, boxTempType));
12727         copyDst->gtOp.gtOp1 = newDst;
12728
12729         // Return the address of the now-struct typed box temp
12730         GenTree* retValue = gtNewOperNode(GT_ADDR, TYP_BYREF, gtNewLclvNode(boxTempLcl, boxTempType));
12731
12732         return retValue;
12733     }
12734
12735     // If the copy is a struct copy, make sure we know how to isolate
12736     // any source side effects.
12737     GenTree* copySrc = copy->gtOp.gtOp2;
12738
12739     // If the copy source is from a pending inline, wait for it to resolve.
12740     if (copySrc->gtOper == GT_RET_EXPR)
12741     {
12742         JITDUMP(" bailing; must wait for replacement of copy source %s\n", GenTree::OpName(copySrc->gtOper));
12743         return nullptr;
12744     }
12745
12746     bool hasSrcSideEffect = false;
12747     bool isStructCopy     = false;
12748
12749     if (gtTreeHasSideEffects(copySrc, GTF_SIDE_EFFECT))
12750     {
12751         hasSrcSideEffect = true;
12752
12753         if (varTypeIsStruct(copySrc->gtType))
12754         {
12755             isStructCopy = true;
12756
12757             if ((copySrc->gtOper != GT_OBJ) && (copySrc->gtOper != GT_IND) && (copySrc->gtOper != GT_FIELD))
12758             {
12759                 // We don't know how to handle other cases, yet.
12760                 JITDUMP(" bailing; unexpected copy source struct op with side effect %s\n",
12761                         GenTree::OpName(copySrc->gtOper));
12762                 return nullptr;
12763             }
12764         }
12765     }
12766
12767     // If this was a trial removal, we're done.
12768     if (options == BR_DONT_REMOVE)
12769     {
12770         return copySrc;
12771     }
12772
12773     if (options == BR_DONT_REMOVE_WANT_TYPE_HANDLE)
12774     {
12775         return boxTypeHandle;
12776     }
12777
12778     // Otherwise, proceed with the optimization.
12779     //
12780     // Change the assignment expression to a NOP.
12781     JITDUMP("\nBashing NEWOBJ [%06u] to NOP\n", dspTreeID(asg));
12782     asg->gtBashToNOP();
12783
12784     // Change the copy expression so it preserves key
12785     // source side effects.
12786     JITDUMP("\nBashing COPY [%06u]", dspTreeID(copy));
12787
12788     if (!hasSrcSideEffect)
12789     {
12790         // If there were no copy source side effects just bash
12791         // the copy to a NOP.
12792         copy->gtBashToNOP();
12793         JITDUMP(" to NOP; no source side effects.\n");
12794     }
12795     else if (!isStructCopy)
12796     {
12797         // For scalar types, go ahead and produce the
12798         // value as the copy is fairly cheap and likely
12799         // the optimizer can trim things down to just the
12800         // minimal side effect parts.
12801         copyStmt->gtStmtExpr = copySrc;
12802         JITDUMP(" to scalar read via [%06u]\n", dspTreeID(copySrc));
12803     }
12804     else
12805     {
12806         // For struct types read the first byte of the
12807         // source struct; there's no need to read the
12808         // entire thing, and no place to put it.
12809         assert(copySrc->gtOper == GT_OBJ || copySrc->gtOper == GT_IND || copySrc->gtOper == GT_FIELD);
12810         copyStmt->gtStmtExpr = copySrc;
12811
12812         if (options == BR_REMOVE_AND_NARROW || options == BR_REMOVE_AND_NARROW_WANT_TYPE_HANDLE)
12813         {
12814             JITDUMP(" to read first byte of struct via modified [%06u]\n", dspTreeID(copySrc));
12815             copySrc->ChangeOper(GT_IND);
12816             copySrc->gtType = TYP_BYTE;
12817         }
12818         else
12819         {
12820             JITDUMP(" to read entire struct via modified [%06u]\n", dspTreeID(copySrc));
12821         }
12822     }
12823
12824     if (fgStmtListThreaded)
12825     {
12826         fgSetStmtSeq(asgStmt);
12827         fgSetStmtSeq(copyStmt);
12828     }
12829
12830     // Box effects were successfully optimized.
12831
12832     if (options == BR_REMOVE_AND_NARROW_WANT_TYPE_HANDLE)
12833     {
12834         return boxTypeHandle;
12835     }
12836     else
12837     {
12838         return copySrc;
12839     }
12840 }
12841
12842 //------------------------------------------------------------------------
12843 // gtOptimizeEnumHasFlag: given the operands for a call to Enum.HasFlag,
12844 //    try and optimize the call to a simple and/compare tree.
12845 //
12846 // Arguments:
12847 //    thisOp  - first argument to the call
12848 //    flagOp  - second argument to the call
12849 //
12850 // Return Value:
12851 //    A new cmp/amd tree if successful. nullptr on failure.
12852 //
12853 // Notes:
12854 //    If successful, may allocate new temps and modify connected
12855 //    statements.
12856
12857 GenTree* Compiler::gtOptimizeEnumHasFlag(GenTree* thisOp, GenTree* flagOp)
12858 {
12859     JITDUMP("Considering optimizing call to Enum.HasFlag....\n");
12860
12861     // Operands must be boxes
12862     if (!thisOp->IsBoxedValue() || !flagOp->IsBoxedValue())
12863     {
12864         JITDUMP("bailing, need both inputs to be BOXes\n");
12865         return nullptr;
12866     }
12867
12868     // Operands must have same type
12869     bool                 isExactThis   = false;
12870     bool                 isNonNullThis = false;
12871     CORINFO_CLASS_HANDLE thisHnd       = gtGetClassHandle(thisOp, &isExactThis, &isNonNullThis);
12872
12873     if (thisHnd == nullptr)
12874     {
12875         JITDUMP("bailing, can't find type for 'this' operand\n");
12876         return nullptr;
12877     }
12878
12879     // A boxed thisOp should have exact type and non-null instance
12880     assert(isExactThis);
12881     assert(isNonNullThis);
12882
12883     bool                 isExactFlag   = false;
12884     bool                 isNonNullFlag = false;
12885     CORINFO_CLASS_HANDLE flagHnd       = gtGetClassHandle(flagOp, &isExactFlag, &isNonNullFlag);
12886
12887     if (flagHnd == nullptr)
12888     {
12889         JITDUMP("bailing, can't find type for 'flag' operand\n");
12890         return nullptr;
12891     }
12892
12893     // A boxed flagOp should have exact type and non-null instance
12894     assert(isExactFlag);
12895     assert(isNonNullFlag);
12896
12897     if (flagHnd != thisHnd)
12898     {
12899         JITDUMP("bailing, operand types differ\n");
12900         return nullptr;
12901     }
12902
12903     // If we have a shared type instance we can't safely check type
12904     // equality, so bail.
12905     DWORD classAttribs = info.compCompHnd->getClassAttribs(thisHnd);
12906     if (classAttribs & CORINFO_FLG_SHAREDINST)
12907     {
12908         JITDUMP("bailing, have shared instance type\n");
12909         return nullptr;
12910     }
12911
12912     // Simulate removing the box for thisOP. We need to know that it can
12913     // be safely removed before we can optimize.
12914     GenTree* thisVal = gtTryRemoveBoxUpstreamEffects(thisOp, BR_DONT_REMOVE);
12915     if (thisVal == nullptr)
12916     {
12917         // Note we may fail here if the this operand comes from
12918         // a call. We should be able to retry this post-inlining.
12919         JITDUMP("bailing, can't undo box of 'this' operand\n");
12920         return nullptr;
12921     }
12922
12923     GenTree* flagVal = gtTryRemoveBoxUpstreamEffects(flagOp, BR_REMOVE_BUT_NOT_NARROW);
12924     if (flagVal == nullptr)
12925     {
12926         // Note we may fail here if the flag operand comes from
12927         // a call. We should be able to retry this post-inlining.
12928         JITDUMP("bailing, can't undo box of 'flag' operand\n");
12929         return nullptr;
12930     }
12931
12932     // Yes, both boxes can be cleaned up. Optimize.
12933     JITDUMP("Optimizing call to Enum.HasFlag\n");
12934
12935     // Undo the boxing of thisOp and prepare to operate directly
12936     // on the original enum values.
12937     thisVal = gtTryRemoveBoxUpstreamEffects(thisOp, BR_REMOVE_BUT_NOT_NARROW);
12938
12939     // Our trial removal above should guarantee successful removal here.
12940     assert(thisVal != nullptr);
12941
12942     // We should have a consistent view of the type
12943     var_types type = thisVal->TypeGet();
12944     assert(type == flagVal->TypeGet());
12945
12946     // The thisVal and flagVal trees come from earlier statements.
12947     //
12948     // Unless they are invariant values, we need to evaluate them both
12949     // to temps at those points to safely transmit the values here.
12950     //
12951     // Also we need to use the flag twice, so we need two trees for it.
12952     GenTree* thisValOpt     = nullptr;
12953     GenTree* flagValOpt     = nullptr;
12954     GenTree* flagValOptCopy = nullptr;
12955
12956     if (thisVal->IsIntegralConst())
12957     {
12958         thisValOpt = gtClone(thisVal);
12959         assert(thisValOpt != nullptr);
12960     }
12961     else
12962     {
12963         const unsigned thisTmp     = lvaGrabTemp(true DEBUGARG("Enum:HasFlag this temp"));
12964         GenTree*       thisAsg     = gtNewTempAssign(thisTmp, thisVal);
12965         GenTreeStmt*   thisAsgStmt = thisOp->AsBox()->gtCopyStmtWhenInlinedBoxValue;
12966         thisAsgStmt->gtStmtExpr    = thisAsg;
12967         thisValOpt                 = gtNewLclvNode(thisTmp, type);
12968     }
12969
12970     if (flagVal->IsIntegralConst())
12971     {
12972         flagValOpt = gtClone(flagVal);
12973         assert(flagValOpt != nullptr);
12974         flagValOptCopy = gtClone(flagVal);
12975         assert(flagValOptCopy != nullptr);
12976     }
12977     else
12978     {
12979         const unsigned flagTmp     = lvaGrabTemp(true DEBUGARG("Enum:HasFlag flag temp"));
12980         GenTree*       flagAsg     = gtNewTempAssign(flagTmp, flagVal);
12981         GenTreeStmt*   flagAsgStmt = flagOp->AsBox()->gtCopyStmtWhenInlinedBoxValue;
12982         flagAsgStmt->gtStmtExpr    = flagAsg;
12983         flagValOpt                 = gtNewLclvNode(flagTmp, type);
12984         flagValOptCopy             = gtNewLclvNode(flagTmp, type);
12985     }
12986
12987     // Turn the call into (thisValTmp & flagTmp) == flagTmp.
12988     GenTree* andTree = gtNewOperNode(GT_AND, type, thisValOpt, flagValOpt);
12989     GenTree* cmpTree = gtNewOperNode(GT_EQ, TYP_INT, andTree, flagValOptCopy);
12990
12991     JITDUMP("Optimized call to Enum.HasFlag\n");
12992
12993     return cmpTree;
12994 }
12995
12996 /*****************************************************************************
12997  *
12998  *  Fold the given constant tree.
12999  */
13000
13001 #ifdef _PREFAST_
13002 #pragma warning(push)
13003 #pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
13004 #endif
13005 GenTree* Compiler::gtFoldExprConst(GenTree* tree)
13006 {
13007     unsigned kind = tree->OperKind();
13008
13009     SSIZE_T       i1, i2, itemp;
13010     INT64         lval1, lval2, ltemp;
13011     float         f1, f2;
13012     double        d1, d2;
13013     var_types     switchType;
13014     FieldSeqNode* fieldSeq = FieldSeqStore::NotAField(); // default unless we override it when folding
13015
13016     assert(kind & (GTK_UNOP | GTK_BINOP));
13017
13018     GenTree* op1 = tree->gtOp.gtOp1;
13019     GenTree* op2 = tree->gtGetOp2IfPresent();
13020
13021     if (!opts.OptEnabled(CLFLG_CONSTANTFOLD))
13022     {
13023         return tree;
13024     }
13025
13026     if (tree->OperGet() == GT_NOP)
13027     {
13028         return tree;
13029     }
13030
13031 #ifdef FEATURE_SIMD
13032     if (tree->OperGet() == GT_SIMD)
13033     {
13034         return tree;
13035     }
13036 #endif // FEATURE_SIMD
13037
13038     if (tree->gtOper == GT_ALLOCOBJ)
13039     {
13040         return tree;
13041     }
13042
13043     if (tree->gtOper == GT_RUNTIMELOOKUP)
13044     {
13045         return tree;
13046     }
13047
13048     if (kind & GTK_UNOP)
13049     {
13050         assert(op1->OperKind() & GTK_CONST);
13051
13052         switch (op1->gtType)
13053         {
13054             case TYP_INT:
13055
13056                 /* Fold constant INT unary operator */
13057
13058                 if (!op1->gtIntCon.ImmedValCanBeFolded(this, tree->OperGet()))
13059                 {
13060                     return tree;
13061                 }
13062
13063                 i1 = (int)op1->gtIntCon.gtIconVal;
13064
13065                 // If we fold a unary oper, then the folded constant
13066                 // is considered a ConstantIndexField if op1 was one
13067                 //
13068
13069                 if ((op1->gtIntCon.gtFieldSeq != nullptr) && op1->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
13070                 {
13071                     fieldSeq = op1->gtIntCon.gtFieldSeq;
13072                 }
13073
13074                 switch (tree->gtOper)
13075                 {
13076                     case GT_NOT:
13077                         i1 = ~i1;
13078                         break;
13079
13080                     case GT_NEG:
13081                         i1 = -i1;
13082                         break;
13083
13084                     case GT_BSWAP:
13085                         i1 = ((i1 >> 24) & 0xFF) | ((i1 >> 8) & 0xFF00) | ((i1 << 8) & 0xFF0000) |
13086                              ((i1 << 24) & 0xFF000000);
13087                         break;
13088
13089                     case GT_BSWAP16:
13090                         i1 = ((i1 >> 8) & 0xFF) | ((i1 << 8) & 0xFF00);
13091                         break;
13092
13093                     case GT_CAST:
13094                         // assert (genActualType(tree->CastToType()) == tree->gtType);
13095                         switch (tree->CastToType())
13096                         {
13097                             case TYP_BYTE:
13098                                 itemp = INT32(INT8(i1));
13099                                 goto CHK_OVF;
13100
13101                             case TYP_SHORT:
13102                                 itemp = INT32(INT16(i1));
13103                             CHK_OVF:
13104                                 if (tree->gtOverflow() && ((itemp != i1) || ((tree->gtFlags & GTF_UNSIGNED) && i1 < 0)))
13105                                 {
13106                                     goto INT_OVF;
13107                                 }
13108                                 i1 = itemp;
13109                                 goto CNS_INT;
13110
13111                             case TYP_USHORT:
13112                                 itemp = INT32(UINT16(i1));
13113                                 if (tree->gtOverflow())
13114                                 {
13115                                     if (itemp != i1)
13116                                     {
13117                                         goto INT_OVF;
13118                                     }
13119                                 }
13120                                 i1 = itemp;
13121                                 goto CNS_INT;
13122
13123                             case TYP_BOOL:
13124                             case TYP_UBYTE:
13125                                 itemp = INT32(UINT8(i1));
13126                                 if (tree->gtOverflow())
13127                                 {
13128                                     if (itemp != i1)
13129                                     {
13130                                         goto INT_OVF;
13131                                     }
13132                                 }
13133                                 i1 = itemp;
13134                                 goto CNS_INT;
13135
13136                             case TYP_UINT:
13137                                 if (!(tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && i1 < 0)
13138                                 {
13139                                     goto INT_OVF;
13140                                 }
13141                                 goto CNS_INT;
13142
13143                             case TYP_INT:
13144                                 if ((tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && i1 < 0)
13145                                 {
13146                                     goto INT_OVF;
13147                                 }
13148                                 goto CNS_INT;
13149
13150                             case TYP_ULONG:
13151                                 if (tree->IsUnsigned())
13152                                 {
13153                                     lval1 = UINT64(UINT32(i1));
13154                                 }
13155                                 else
13156                                 {
13157                                     if (tree->gtOverflow() && (i1 < 0))
13158                                     {
13159                                         goto LNG_OVF;
13160                                     }
13161                                     lval1 = UINT64(INT32(i1));
13162                                 }
13163                                 goto CNS_LONG;
13164
13165                             case TYP_LONG:
13166                                 if (tree->IsUnsigned())
13167                                 {
13168                                     lval1 = INT64(UINT32(i1));
13169                                 }
13170                                 else
13171                                 {
13172                                     lval1 = INT64(INT32(i1));
13173                                 }
13174                                 goto CNS_LONG;
13175
13176                             case TYP_FLOAT:
13177                                 if (tree->gtFlags & GTF_UNSIGNED)
13178                                 {
13179                                     f1 = forceCastToFloat(UINT32(i1));
13180                                 }
13181                                 else
13182                                 {
13183                                     f1 = forceCastToFloat(INT32(i1));
13184                                 }
13185                                 d1 = f1;
13186                                 goto CNS_DOUBLE;
13187
13188                             case TYP_DOUBLE:
13189                                 if (tree->gtFlags & GTF_UNSIGNED)
13190                                 {
13191                                     d1 = (double)UINT32(i1);
13192                                 }
13193                                 else
13194                                 {
13195                                     d1 = (double)INT32(i1);
13196                                 }
13197                                 goto CNS_DOUBLE;
13198
13199                             default:
13200                                 assert(!"BAD_TYP");
13201                                 break;
13202                         }
13203                         return tree;
13204
13205                     default:
13206                         return tree;
13207                 }
13208
13209                 goto CNS_INT;
13210
13211             case TYP_LONG:
13212
13213                 /* Fold constant LONG unary operator */
13214
13215                 if (!op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()))
13216                 {
13217                     return tree;
13218                 }
13219
13220                 lval1 = op1->gtIntConCommon.LngValue();
13221
13222                 switch (tree->gtOper)
13223                 {
13224                     case GT_NOT:
13225                         lval1 = ~lval1;
13226                         break;
13227
13228                     case GT_NEG:
13229                         lval1 = -lval1;
13230                         break;
13231
13232                     case GT_BSWAP:
13233                         lval1 = ((lval1 >> 56) & 0xFF) | ((lval1 >> 40) & 0xFF00) | ((lval1 >> 24) & 0xFF0000) |
13234                                 ((lval1 >> 8) & 0xFF000000) | ((lval1 << 8) & 0xFF00000000) |
13235                                 ((lval1 << 24) & 0xFF0000000000) | ((lval1 << 40) & 0xFF000000000000) |
13236                                 ((lval1 << 56) & 0xFF00000000000000);
13237                         break;
13238
13239                     case GT_CAST:
13240                         assert(genActualType(tree->CastToType()) == tree->gtType);
13241                         switch (tree->CastToType())
13242                         {
13243                             case TYP_BYTE:
13244                                 i1 = INT32(INT8(lval1));
13245                                 goto CHECK_INT_OVERFLOW;
13246
13247                             case TYP_SHORT:
13248                                 i1 = INT32(INT16(lval1));
13249                                 goto CHECK_INT_OVERFLOW;
13250
13251                             case TYP_USHORT:
13252                                 i1 = INT32(UINT16(lval1));
13253                                 goto CHECK_UINT_OVERFLOW;
13254
13255                             case TYP_UBYTE:
13256                                 i1 = INT32(UINT8(lval1));
13257                                 goto CHECK_UINT_OVERFLOW;
13258
13259                             case TYP_INT:
13260                                 i1 = INT32(lval1);
13261
13262                             CHECK_INT_OVERFLOW:
13263                                 if (tree->gtOverflow())
13264                                 {
13265                                     if (i1 != lval1)
13266                                     {
13267                                         goto INT_OVF;
13268                                     }
13269                                     if ((tree->gtFlags & GTF_UNSIGNED) && i1 < 0)
13270                                     {
13271                                         goto INT_OVF;
13272                                     }
13273                                 }
13274                                 goto CNS_INT;
13275
13276                             case TYP_UINT:
13277                                 i1 = UINT32(lval1);
13278
13279                             CHECK_UINT_OVERFLOW:
13280                                 if (tree->gtOverflow() && UINT32(i1) != lval1)
13281                                 {
13282                                     goto INT_OVF;
13283                                 }
13284                                 goto CNS_INT;
13285
13286                             case TYP_ULONG:
13287                                 if (!(tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && lval1 < 0)
13288                                 {
13289                                     goto LNG_OVF;
13290                                 }
13291                                 goto CNS_LONG;
13292
13293                             case TYP_LONG:
13294                                 if ((tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && lval1 < 0)
13295                                 {
13296                                     goto LNG_OVF;
13297                                 }
13298                                 goto CNS_LONG;
13299
13300                             case TYP_FLOAT:
13301                             case TYP_DOUBLE:
13302                                 if ((tree->gtFlags & GTF_UNSIGNED) && lval1 < 0)
13303                                 {
13304                                     d1 = FloatingPointUtils::convertUInt64ToDouble((unsigned __int64)lval1);
13305                                 }
13306                                 else
13307                                 {
13308                                     d1 = (double)lval1;
13309                                 }
13310
13311                                 if (tree->CastToType() == TYP_FLOAT)
13312                                 {
13313                                     f1 = forceCastToFloat(d1); // truncate precision
13314                                     d1 = f1;
13315                                 }
13316                                 goto CNS_DOUBLE;
13317                             default:
13318                                 assert(!"BAD_TYP");
13319                                 break;
13320                         }
13321                         return tree;
13322
13323                     default:
13324                         return tree;
13325                 }
13326
13327                 goto CNS_LONG;
13328
13329             case TYP_FLOAT:
13330             case TYP_DOUBLE:
13331                 assert(op1->gtOper == GT_CNS_DBL);
13332
13333                 /* Fold constant DOUBLE unary operator */
13334
13335                 d1 = op1->gtDblCon.gtDconVal;
13336
13337                 switch (tree->gtOper)
13338                 {
13339                     case GT_NEG:
13340                         d1 = -d1;
13341                         break;
13342
13343                     case GT_CAST:
13344
13345                         if (tree->gtOverflowEx())
13346                         {
13347                             return tree;
13348                         }
13349
13350                         assert(genActualType(tree->CastToType()) == tree->gtType);
13351
13352                         if ((op1->gtType == TYP_FLOAT && !_finite(forceCastToFloat(d1))) ||
13353                             (op1->gtType == TYP_DOUBLE && !_finite(d1)))
13354                         {
13355                             // The floating point constant is not finite.  The ECMA spec says, in
13356                             // III 3.27, that "...if overflow occurs converting a floating point type
13357                             // to an integer, ..., the value returned is unspecified."  However, it would
13358                             // at least be desirable to have the same value returned for casting an overflowing
13359                             // constant to an int as would obtained by passing that constant as a parameter
13360                             // then casting that parameter to an int type.  We will assume that the C compiler's
13361                             // cast logic will yield the desired result (and trust testing to tell otherwise).
13362                             // Cross-compilation is an issue here; if that becomes an important scenario, we should
13363                             // capture the target-specific values of overflow casts to the various integral types as
13364                             // constants in a target-specific function.
13365                             CLANG_FORMAT_COMMENT_ANCHOR;
13366
13367                             // Don't fold conversions of +inf/-inf to integral value on all platforms
13368                             // as the value returned by JIT helper doesn't match with the C compiler's cast result.
13369                             // We want the behavior to be same with or without folding.
13370                             return tree;
13371                         }
13372
13373                         if (d1 <= -1.0 && varTypeIsUnsigned(tree->CastToType()))
13374                         {
13375                             // Don't fold conversions of these cases becasue the result is unspecified per ECMA spec
13376                             // and the native math doing the fold doesn't match the run-time computation on all
13377                             // platforms.
13378                             // We want the behavior to be same with or without folding.
13379                             return tree;
13380                         }
13381
13382                         switch (tree->CastToType())
13383                         {
13384                             case TYP_BYTE:
13385                                 i1 = INT32(INT8(d1));
13386                                 goto CNS_INT;
13387
13388                             case TYP_SHORT:
13389                                 i1 = INT32(INT16(d1));
13390                                 goto CNS_INT;
13391
13392                             case TYP_USHORT:
13393                                 i1 = INT32(UINT16(d1));
13394                                 goto CNS_INT;
13395
13396                             case TYP_UBYTE:
13397                                 i1 = INT32(UINT8(d1));
13398                                 goto CNS_INT;
13399
13400                             case TYP_INT:
13401                                 i1 = INT32(d1);
13402                                 goto CNS_INT;
13403
13404                             case TYP_UINT:
13405                                 i1 = forceCastToUInt32(d1);
13406                                 goto CNS_INT;
13407
13408                             case TYP_LONG:
13409                                 lval1 = INT64(d1);
13410                                 goto CNS_LONG;
13411
13412                             case TYP_ULONG:
13413                                 lval1 = FloatingPointUtils::convertDoubleToUInt64(d1);
13414                                 goto CNS_LONG;
13415
13416                             case TYP_FLOAT:
13417                                 d1 = forceCastToFloat(d1);
13418                                 goto CNS_DOUBLE;
13419
13420                             case TYP_DOUBLE:
13421                                 if (op1->gtType == TYP_FLOAT)
13422                                 {
13423                                     d1 = forceCastToFloat(d1); // truncate precision
13424                                 }
13425                                 goto CNS_DOUBLE; // redundant cast
13426
13427                             default:
13428                                 assert(!"BAD_TYP");
13429                                 break;
13430                         }
13431                         return tree;
13432
13433                     default:
13434                         return tree;
13435                 }
13436                 goto CNS_DOUBLE;
13437
13438             default:
13439                 /* not a foldable typ - e.g. RET const */
13440                 return tree;
13441         }
13442     }
13443
13444     /* We have a binary operator */
13445
13446     assert(kind & GTK_BINOP);
13447     assert(op2);
13448     assert(op1->OperKind() & GTK_CONST);
13449     assert(op2->OperKind() & GTK_CONST);
13450
13451     if (tree->gtOper == GT_COMMA)
13452     {
13453         return op2;
13454     }
13455
13456     if (tree->OperIsAnyList())
13457     {
13458         return tree;
13459     }
13460
13461     switchType = op1->gtType;
13462
13463     // Normally we will just switch on op1 types, but for the case where
13464     //  only op2 is a GC type and op1 is not a GC type, we use the op2 type.
13465     //  This makes us handle this as a case of folding for GC type.
13466     //
13467     if (varTypeIsGC(op2->gtType) && !varTypeIsGC(op1->gtType))
13468     {
13469         switchType = op2->gtType;
13470     }
13471
13472     switch (switchType)
13473     {
13474
13475         /*-------------------------------------------------------------------------
13476          * Fold constant REF of BYREF binary operator
13477          * These can only be comparisons or null pointers
13478          */
13479
13480         case TYP_REF:
13481
13482             /* String nodes are an RVA at this point */
13483
13484             if (op1->gtOper == GT_CNS_STR || op2->gtOper == GT_CNS_STR)
13485             {
13486                 return tree;
13487             }
13488
13489             __fallthrough;
13490
13491         case TYP_BYREF:
13492
13493             i1 = op1->gtIntConCommon.IconValue();
13494             i2 = op2->gtIntConCommon.IconValue();
13495
13496             switch (tree->gtOper)
13497             {
13498                 case GT_EQ:
13499                     i1 = (i1 == i2);
13500                     goto FOLD_COND;
13501
13502                 case GT_NE:
13503                     i1 = (i1 != i2);
13504                     goto FOLD_COND;
13505
13506                 case GT_ADD:
13507                     noway_assert(tree->gtType != TYP_REF);
13508                     // We only fold a GT_ADD that involves a null reference.
13509                     if (((op1->TypeGet() == TYP_REF) && (i1 == 0)) || ((op2->TypeGet() == TYP_REF) && (i2 == 0)))
13510                     {
13511 #ifdef DEBUG
13512                         if (verbose)
13513                         {
13514                             printf("\nFolding operator with constant nodes into a constant:\n");
13515                             gtDispTree(tree);
13516                         }
13517 #endif
13518                         // Fold into GT_IND of null byref
13519                         tree->ChangeOperConst(GT_CNS_INT);
13520                         tree->gtType              = TYP_BYREF;
13521                         tree->gtIntCon.gtIconVal  = 0;
13522                         tree->gtIntCon.gtFieldSeq = FieldSeqStore::NotAField();
13523                         if (vnStore != nullptr)
13524                         {
13525                             fgValueNumberTreeConst(tree);
13526                         }
13527 #ifdef DEBUG
13528                         if (verbose)
13529                         {
13530                             printf("\nFolded to null byref:\n");
13531                             gtDispTree(tree);
13532                         }
13533 #endif
13534                         goto DONE;
13535                     }
13536
13537                 default:
13538                     break;
13539             }
13540
13541             return tree;
13542
13543         /*-------------------------------------------------------------------------
13544          * Fold constant INT binary operator
13545          */
13546
13547         case TYP_INT:
13548
13549             if (tree->OperIsCompare() && (tree->gtType == TYP_BYTE))
13550             {
13551                 tree->gtType = TYP_INT;
13552             }
13553
13554             assert(tree->gtType == TYP_INT || varTypeIsGC(tree->TypeGet()) || tree->gtOper == GT_MKREFANY);
13555
13556             // No GC pointer types should be folded here...
13557             //
13558             assert(!varTypeIsGC(op1->gtType) && !varTypeIsGC(op2->gtType));
13559
13560             if (!op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()))
13561             {
13562                 return tree;
13563             }
13564
13565             if (!op2->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()))
13566             {
13567                 return tree;
13568             }
13569
13570             i1 = op1->gtIntConCommon.IconValue();
13571             i2 = op2->gtIntConCommon.IconValue();
13572
13573             switch (tree->gtOper)
13574             {
13575                 case GT_EQ:
13576                     i1 = (INT32(i1) == INT32(i2));
13577                     break;
13578                 case GT_NE:
13579                     i1 = (INT32(i1) != INT32(i2));
13580                     break;
13581
13582                 case GT_LT:
13583                     if (tree->gtFlags & GTF_UNSIGNED)
13584                     {
13585                         i1 = (UINT32(i1) < UINT32(i2));
13586                     }
13587                     else
13588                     {
13589                         i1 = (INT32(i1) < INT32(i2));
13590                     }
13591                     break;
13592
13593                 case GT_LE:
13594                     if (tree->gtFlags & GTF_UNSIGNED)
13595                     {
13596                         i1 = (UINT32(i1) <= UINT32(i2));
13597                     }
13598                     else
13599                     {
13600                         i1 = (INT32(i1) <= INT32(i2));
13601                     }
13602                     break;
13603
13604                 case GT_GE:
13605                     if (tree->gtFlags & GTF_UNSIGNED)
13606                     {
13607                         i1 = (UINT32(i1) >= UINT32(i2));
13608                     }
13609                     else
13610                     {
13611                         i1 = (INT32(i1) >= INT32(i2));
13612                     }
13613                     break;
13614
13615                 case GT_GT:
13616                     if (tree->gtFlags & GTF_UNSIGNED)
13617                     {
13618                         i1 = (UINT32(i1) > UINT32(i2));
13619                     }
13620                     else
13621                     {
13622                         i1 = (INT32(i1) > INT32(i2));
13623                     }
13624                     break;
13625
13626                 case GT_ADD:
13627                     itemp = i1 + i2;
13628                     if (tree->gtOverflow())
13629                     {
13630                         if (tree->gtFlags & GTF_UNSIGNED)
13631                         {
13632                             if (INT64(UINT32(itemp)) != INT64(UINT32(i1)) + INT64(UINT32(i2)))
13633                             {
13634                                 goto INT_OVF;
13635                             }
13636                         }
13637                         else
13638                         {
13639                             if (INT64(INT32(itemp)) != INT64(INT32(i1)) + INT64(INT32(i2)))
13640                             {
13641                                 goto INT_OVF;
13642                             }
13643                         }
13644                     }
13645                     i1       = itemp;
13646                     fieldSeq = GetFieldSeqStore()->Append(op1->gtIntCon.gtFieldSeq, op2->gtIntCon.gtFieldSeq);
13647                     break;
13648                 case GT_SUB:
13649                     itemp = i1 - i2;
13650                     if (tree->gtOverflow())
13651                     {
13652                         if (tree->gtFlags & GTF_UNSIGNED)
13653                         {
13654                             if (INT64(UINT32(itemp)) != ((INT64)((UINT32)i1) - (INT64)((UINT32)i2)))
13655                             {
13656                                 goto INT_OVF;
13657                             }
13658                         }
13659                         else
13660                         {
13661                             if (INT64(INT32(itemp)) != INT64(INT32(i1)) - INT64(INT32(i2)))
13662                             {
13663                                 goto INT_OVF;
13664                             }
13665                         }
13666                     }
13667                     i1 = itemp;
13668                     break;
13669                 case GT_MUL:
13670                     itemp = i1 * i2;
13671                     if (tree->gtOverflow())
13672                     {
13673                         if (tree->gtFlags & GTF_UNSIGNED)
13674                         {
13675                             if (INT64(UINT32(itemp)) != ((INT64)((UINT32)i1) * (INT64)((UINT32)i2)))
13676                             {
13677                                 goto INT_OVF;
13678                             }
13679                         }
13680                         else
13681                         {
13682                             if (INT64(INT32(itemp)) != INT64(INT32(i1)) * INT64(INT32(i2)))
13683                             {
13684                                 goto INT_OVF;
13685                             }
13686                         }
13687                     }
13688                     // For the very particular case of the "constant array index" pseudo-field, we
13689                     // assume that multiplication is by the field width, and preserves that field.
13690                     // This could obviously be made more robust by a more complicated set of annotations...
13691                     if ((op1->gtIntCon.gtFieldSeq != nullptr) && op1->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
13692                     {
13693                         assert(op2->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField());
13694                         fieldSeq = op1->gtIntCon.gtFieldSeq;
13695                     }
13696                     else if ((op2->gtIntCon.gtFieldSeq != nullptr) &&
13697                              op2->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
13698                     {
13699                         assert(op1->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField());
13700                         fieldSeq = op2->gtIntCon.gtFieldSeq;
13701                     }
13702                     i1 = itemp;
13703                     break;
13704
13705                 case GT_OR:
13706                     i1 |= i2;
13707                     break;
13708                 case GT_XOR:
13709                     i1 ^= i2;
13710                     break;
13711                 case GT_AND:
13712                     i1 &= i2;
13713                     break;
13714
13715                 case GT_LSH:
13716                     i1 <<= (i2 & 0x1f);
13717                     break;
13718                 case GT_RSH:
13719                     i1 >>= (i2 & 0x1f);
13720                     break;
13721                 case GT_RSZ:
13722                     /* logical shift -> make it unsigned to not propagate the sign bit */
13723                     i1 = UINT32(i1) >> (i2 & 0x1f);
13724                     break;
13725                 case GT_ROL:
13726                     i1 = (i1 << (i2 & 0x1f)) | (UINT32(i1) >> ((32 - i2) & 0x1f));
13727                     break;
13728                 case GT_ROR:
13729                     i1 = (i1 << ((32 - i2) & 0x1f)) | (UINT32(i1) >> (i2 & 0x1f));
13730                     break;
13731
13732                 /* DIV and MOD can generate an INT 0 - if division by 0
13733                  * or overflow - when dividing MIN by -1 */
13734
13735                 case GT_DIV:
13736                 case GT_MOD:
13737                 case GT_UDIV:
13738                 case GT_UMOD:
13739                     if (INT32(i2) == 0)
13740                     {
13741                         // Division by zero:
13742                         // We have to evaluate this expression and throw an exception
13743                         return tree;
13744                     }
13745                     else if ((INT32(i2) == -1) && (UINT32(i1) == 0x80000000))
13746                     {
13747                         // Overflow Division:
13748                         // We have to evaluate this expression and throw an exception
13749                         return tree;
13750                     }
13751
13752                     if (tree->gtOper == GT_DIV)
13753                     {
13754                         i1 = INT32(i1) / INT32(i2);
13755                     }
13756                     else if (tree->gtOper == GT_MOD)
13757                     {
13758                         i1 = INT32(i1) % INT32(i2);
13759                     }
13760                     else if (tree->gtOper == GT_UDIV)
13761                     {
13762                         i1 = UINT32(i1) / UINT32(i2);
13763                     }
13764                     else
13765                     {
13766                         assert(tree->gtOper == GT_UMOD);
13767                         i1 = UINT32(i1) % UINT32(i2);
13768                     }
13769                     break;
13770
13771                 default:
13772                     return tree;
13773             }
13774
13775         /* We get here after folding to a GT_CNS_INT type
13776          * change the node to the new type / value and make sure the node sizes are OK */
13777         CNS_INT:
13778         FOLD_COND:
13779
13780 #ifdef DEBUG
13781             if (verbose)
13782             {
13783                 printf("\nFolding operator with constant nodes into a constant:\n");
13784                 gtDispTree(tree);
13785             }
13786 #endif
13787
13788 #ifdef _TARGET_64BIT_
13789             // Some operations are performed as 64 bit instead of 32 bit so the upper 32 bits
13790             // need to be discarded. Since constant values are stored as ssize_t and the node
13791             // has TYP_INT the result needs to be sign extended rather than zero extended.
13792             i1 = INT32(i1);
13793 #endif // _TARGET_64BIT_
13794
13795             /* Also all conditional folding jumps here since the node hanging from
13796              * GT_JTRUE has to be a GT_CNS_INT - value 0 or 1 */
13797
13798             tree->ChangeOperConst(GT_CNS_INT);
13799             tree->gtType              = TYP_INT;
13800             tree->gtIntCon.gtIconVal  = i1;
13801             tree->gtIntCon.gtFieldSeq = fieldSeq;
13802             if (vnStore != nullptr)
13803             {
13804                 fgValueNumberTreeConst(tree);
13805             }
13806 #ifdef DEBUG
13807             if (verbose)
13808             {
13809                 printf("Bashed to int constant:\n");
13810                 gtDispTree(tree);
13811             }
13812 #endif
13813             goto DONE;
13814
13815         /* This operation is going to cause an overflow exception. Morph into
13816            an overflow helper. Put a dummy constant value for code generation.
13817
13818            We could remove all subsequent trees in the current basic block,
13819            unless this node is a child of GT_COLON
13820
13821            NOTE: Since the folded value is not constant we should not change the
13822                  "tree" node - otherwise we confuse the logic that checks if the folding
13823                  was successful - instead use one of the operands, e.g. op1
13824          */
13825
13826         LNG_OVF:
13827             // Don't fold overflow operations if not global morph phase.
13828             // The reason for this is that this optimization is replacing a gentree node
13829             // with another new gentree node. Say a GT_CALL(arglist) has one 'arg'
13830             // involving overflow arithmetic.  During assertion prop, it is possible
13831             // that the 'arg' could be constant folded and the result could lead to an
13832             // overflow.  In such a case 'arg' will get replaced with GT_COMMA node
13833             // but fgMorphArgs() - see the logic around "if(lateArgsComputed)" - doesn't
13834             // update args table. For this reason this optimization is enabled only
13835             // for global morphing phase.
13836             //
13837             // TODO-CQ: Once fgMorphArgs() is fixed this restriction could be removed.
13838             CLANG_FORMAT_COMMENT_ANCHOR;
13839
13840             if (!fgGlobalMorph)
13841             {
13842                 assert(tree->gtOverflow());
13843                 return tree;
13844             }
13845
13846             op1 = gtNewLconNode(0);
13847             if (vnStore != nullptr)
13848             {
13849                 op1->gtVNPair.SetBoth(vnStore->VNZeroForType(TYP_LONG));
13850             }
13851             goto OVF;
13852
13853         INT_OVF:
13854             // Don't fold overflow operations if not global morph phase.
13855             // The reason for this is that this optimization is replacing a gentree node
13856             // with another new gentree node. Say a GT_CALL(arglist) has one 'arg'
13857             // involving overflow arithmetic.  During assertion prop, it is possible
13858             // that the 'arg' could be constant folded and the result could lead to an
13859             // overflow.  In such a case 'arg' will get replaced with GT_COMMA node
13860             // but fgMorphArgs() - see the logic around "if(lateArgsComputed)" - doesn't
13861             // update args table. For this reason this optimization is enabled only
13862             // for global morphing phase.
13863             //
13864             // TODO-CQ: Once fgMorphArgs() is fixed this restriction could be removed.
13865
13866             if (!fgGlobalMorph)
13867             {
13868                 assert(tree->gtOverflow());
13869                 return tree;
13870             }
13871
13872             op1 = gtNewIconNode(0);
13873             if (vnStore != nullptr)
13874             {
13875                 op1->gtVNPair.SetBoth(vnStore->VNZeroForType(TYP_INT));
13876             }
13877             goto OVF;
13878
13879         OVF:
13880 #ifdef DEBUG
13881             if (verbose)
13882             {
13883                 printf("\nFolding binary operator with constant nodes into a comma throw:\n");
13884                 gtDispTree(tree);
13885             }
13886 #endif
13887             /* We will change the cast to a GT_COMMA and attach the exception helper as gtOp.gtOp1.
13888              * The constant expression zero becomes op2. */
13889
13890             assert(tree->gtOverflow());
13891             assert(tree->gtOper == GT_ADD || tree->gtOper == GT_SUB || tree->gtOper == GT_CAST ||
13892                    tree->gtOper == GT_MUL);
13893             assert(op1);
13894
13895             op2 = op1;
13896             op1 = gtNewHelperCallNode(CORINFO_HELP_OVERFLOW, TYP_VOID,
13897                                       gtNewArgList(gtNewIconNode(compCurBB->bbTryIndex)));
13898
13899             // op1 is a call to the JIT helper that throws an Overflow exception
13900             // attach the ExcSet for VNF_OverflowExc(Void) to this call
13901
13902             if (vnStore != nullptr)
13903             {
13904                 op1->gtVNPair =
13905                     vnStore->VNPWithExc(ValueNumPair(ValueNumStore::VNForVoid(), ValueNumStore::VNForVoid()),
13906                                         vnStore->VNPExcSetSingleton(
13907                                             vnStore->VNPairForFunc(TYP_REF, VNF_OverflowExc, vnStore->VNPForVoid())));
13908             }
13909
13910             tree = gtNewOperNode(GT_COMMA, tree->gtType, op1, op2);
13911
13912             return tree;
13913
13914         /*-------------------------------------------------------------------------
13915          * Fold constant LONG binary operator
13916          */
13917
13918         case TYP_LONG:
13919
13920             // No GC pointer types should be folded here...
13921             //
13922             assert(!varTypeIsGC(op1->gtType) && !varTypeIsGC(op2->gtType));
13923
13924             // op1 is known to be a TYP_LONG, op2 is normally a TYP_LONG, unless we have a shift operator in which case
13925             // it is a TYP_INT
13926             //
13927             assert((op2->gtType == TYP_LONG) || (op2->gtType == TYP_INT));
13928
13929             if (!op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()))
13930             {
13931                 return tree;
13932             }
13933
13934             if (!op2->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()))
13935             {
13936                 return tree;
13937             }
13938
13939             lval1 = op1->gtIntConCommon.LngValue();
13940
13941             // For the shift operators we can have a op2 that is a TYP_INT and thus will be GT_CNS_INT
13942             if (op2->OperGet() == GT_CNS_INT)
13943             {
13944                 lval2 = op2->gtIntConCommon.IconValue();
13945             }
13946             else
13947             {
13948                 lval2 = op2->gtIntConCommon.LngValue();
13949             }
13950
13951             switch (tree->gtOper)
13952             {
13953                 case GT_EQ:
13954                     i1 = (lval1 == lval2);
13955                     goto FOLD_COND;
13956                 case GT_NE:
13957                     i1 = (lval1 != lval2);
13958                     goto FOLD_COND;
13959
13960                 case GT_LT:
13961                     if (tree->gtFlags & GTF_UNSIGNED)
13962                     {
13963                         i1 = (UINT64(lval1) < UINT64(lval2));
13964                     }
13965                     else
13966                     {
13967                         i1 = (lval1 < lval2);
13968                     }
13969                     goto FOLD_COND;
13970
13971                 case GT_LE:
13972                     if (tree->gtFlags & GTF_UNSIGNED)
13973                     {
13974                         i1 = (UINT64(lval1) <= UINT64(lval2));
13975                     }
13976                     else
13977                     {
13978                         i1 = (lval1 <= lval2);
13979                     }
13980                     goto FOLD_COND;
13981
13982                 case GT_GE:
13983                     if (tree->gtFlags & GTF_UNSIGNED)
13984                     {
13985                         i1 = (UINT64(lval1) >= UINT64(lval2));
13986                     }
13987                     else
13988                     {
13989                         i1 = (lval1 >= lval2);
13990                     }
13991                     goto FOLD_COND;
13992
13993                 case GT_GT:
13994                     if (tree->gtFlags & GTF_UNSIGNED)
13995                     {
13996                         i1 = (UINT64(lval1) > UINT64(lval2));
13997                     }
13998                     else
13999                     {
14000                         i1 = (lval1 > lval2);
14001                     }
14002                     goto FOLD_COND;
14003
14004                 case GT_ADD:
14005                     ltemp = lval1 + lval2;
14006
14007                 LNG_ADD_CHKOVF:
14008                     /* For the SIGNED case - If there is one positive and one negative operand, there can be no overflow
14009                      * If both are positive, the result has to be positive, and similary for negatives.
14010                      *
14011                      * For the UNSIGNED case - If a UINT32 operand is bigger than the result then OVF */
14012
14013                     if (tree->gtOverflow())
14014                     {
14015                         if (tree->gtFlags & GTF_UNSIGNED)
14016                         {
14017                             if ((UINT64(lval1) > UINT64(ltemp)) || (UINT64(lval2) > UINT64(ltemp)))
14018                             {
14019                                 goto LNG_OVF;
14020                             }
14021                         }
14022                         else if (((lval1 < 0) == (lval2 < 0)) && ((lval1 < 0) != (ltemp < 0)))
14023                         {
14024                             goto LNG_OVF;
14025                         }
14026                     }
14027                     lval1 = ltemp;
14028                     break;
14029
14030                 case GT_SUB:
14031                     ltemp = lval1 - lval2;
14032                     if (tree->gtOverflow())
14033                     {
14034                         if (tree->gtFlags & GTF_UNSIGNED)
14035                         {
14036                             if (UINT64(lval2) > UINT64(lval1))
14037                             {
14038                                 goto LNG_OVF;
14039                             }
14040                         }
14041                         else
14042                         {
14043                             /* If both operands are +ve or both are -ve, there can be no
14044                                overflow. Else use the logic for : lval1 + (-lval2) */
14045
14046                             if ((lval1 < 0) != (lval2 < 0))
14047                             {
14048                                 if (lval2 == INT64_MIN)
14049                                 {
14050                                     goto LNG_OVF;
14051                                 }
14052                                 lval2 = -lval2;
14053                                 goto LNG_ADD_CHKOVF;
14054                             }
14055                         }
14056                     }
14057                     lval1 = ltemp;
14058                     break;
14059
14060                 case GT_MUL:
14061                     ltemp = lval1 * lval2;
14062
14063                     if (tree->gtOverflow() && lval2 != 0)
14064                     {
14065
14066                         if (tree->gtFlags & GTF_UNSIGNED)
14067                         {
14068                             UINT64 ultemp = ltemp;
14069                             UINT64 ulval1 = lval1;
14070                             UINT64 ulval2 = lval2;
14071                             if ((ultemp / ulval2) != ulval1)
14072                             {
14073                                 goto LNG_OVF;
14074                             }
14075                         }
14076                         else
14077                         {
14078                             // This does a multiply and then reverses it.  This test works great except for MIN_INT *
14079                             //-1.  In that case we mess up the sign on ltmp.  Make sure to double check the sign.
14080                             // if either is 0, then no overflow
14081                             if (lval1 != 0) // lval2 checked above.
14082                             {
14083                                 if (((lval1 < 0) == (lval2 < 0)) && (ltemp < 0))
14084                                 {
14085                                     goto LNG_OVF;
14086                                 }
14087                                 if (((lval1 < 0) != (lval2 < 0)) && (ltemp > 0))
14088                                 {
14089                                     goto LNG_OVF;
14090                                 }
14091
14092                                 // TODO-Amd64-Unix: Remove the code that disables optimizations for this method when the
14093                                 // clang
14094                                 // optimizer is fixed and/or the method implementation is refactored in a simpler code.
14095                                 // There is a bug in the clang-3.5 optimizer. The issue is that in release build the
14096                                 // optimizer is mistyping (or just wrongly decides to use 32 bit operation for a corner
14097                                 // case of MIN_LONG) the args of the (ltemp / lval2) to int (it does a 32 bit div
14098                                 // operation instead of 64 bit.). For the case of lval1 and lval2 equal to MIN_LONG
14099                                 // (0x8000000000000000) this results in raising a SIGFPE.
14100                                 // Optimizations disabled for now. See compiler.h.
14101                                 if ((ltemp / lval2) != lval1)
14102                                 {
14103                                     goto LNG_OVF;
14104                                 }
14105                             }
14106                         }
14107                     }
14108
14109                     lval1 = ltemp;
14110                     break;
14111
14112                 case GT_OR:
14113                     lval1 |= lval2;
14114                     break;
14115                 case GT_XOR:
14116                     lval1 ^= lval2;
14117                     break;
14118                 case GT_AND:
14119                     lval1 &= lval2;
14120                     break;
14121
14122                 case GT_LSH:
14123                     lval1 <<= (lval2 & 0x3f);
14124                     break;
14125                 case GT_RSH:
14126                     lval1 >>= (lval2 & 0x3f);
14127                     break;
14128                 case GT_RSZ:
14129                     /* logical shift -> make it unsigned to not propagate the sign bit */
14130                     lval1 = UINT64(lval1) >> (lval2 & 0x3f);
14131                     break;
14132                 case GT_ROL:
14133                     lval1 = (lval1 << (lval2 & 0x3f)) | (UINT64(lval1) >> ((64 - lval2) & 0x3f));
14134                     break;
14135                 case GT_ROR:
14136                     lval1 = (lval1 << ((64 - lval2) & 0x3f)) | (UINT64(lval1) >> (lval2 & 0x3f));
14137                     break;
14138
14139                 // Both DIV and IDIV on x86 raise an exception for min_int (and min_long) / -1.  So we preserve
14140                 // that behavior here.
14141                 case GT_DIV:
14142                     if (!lval2)
14143                     {
14144                         return tree;
14145                     }
14146
14147                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
14148                     {
14149                         return tree;
14150                     }
14151                     lval1 /= lval2;
14152                     break;
14153
14154                 case GT_MOD:
14155                     if (!lval2)
14156                     {
14157                         return tree;
14158                     }
14159                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
14160                     {
14161                         return tree;
14162                     }
14163                     lval1 %= lval2;
14164                     break;
14165
14166                 case GT_UDIV:
14167                     if (!lval2)
14168                     {
14169                         return tree;
14170                     }
14171                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
14172                     {
14173                         return tree;
14174                     }
14175                     lval1 = UINT64(lval1) / UINT64(lval2);
14176                     break;
14177
14178                 case GT_UMOD:
14179                     if (!lval2)
14180                     {
14181                         return tree;
14182                     }
14183                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
14184                     {
14185                         return tree;
14186                     }
14187                     lval1 = UINT64(lval1) % UINT64(lval2);
14188                     break;
14189                 default:
14190                     return tree;
14191             }
14192
14193         CNS_LONG:
14194
14195             if (fieldSeq != FieldSeqStore::NotAField())
14196             {
14197                 return tree;
14198             }
14199
14200 #ifdef DEBUG
14201             if (verbose)
14202             {
14203                 printf("\nFolding long operator with constant nodes into a constant:\n");
14204                 gtDispTree(tree);
14205             }
14206 #endif
14207             assert((GenTree::s_gtNodeSizes[GT_CNS_NATIVELONG] == TREE_NODE_SZ_SMALL) ||
14208                    (tree->gtDebugFlags & GTF_DEBUG_NODE_LARGE));
14209
14210             tree->ChangeOperConst(GT_CNS_NATIVELONG);
14211             tree->gtIntConCommon.SetLngValue(lval1);
14212             if (vnStore != nullptr)
14213             {
14214                 fgValueNumberTreeConst(tree);
14215             }
14216
14217 #ifdef DEBUG
14218             if (verbose)
14219             {
14220                 printf("Bashed to long constant:\n");
14221                 gtDispTree(tree);
14222             }
14223 #endif
14224             goto DONE;
14225
14226         /*-------------------------------------------------------------------------
14227          * Fold constant FLOAT or DOUBLE binary operator
14228          */
14229
14230         case TYP_FLOAT:
14231         case TYP_DOUBLE:
14232
14233             if (tree->gtOverflowEx())
14234             {
14235                 return tree;
14236             }
14237
14238             assert(op1->gtOper == GT_CNS_DBL);
14239             d1 = op1->gtDblCon.gtDconVal;
14240
14241             assert(varTypeIsFloating(op2->gtType));
14242             assert(op2->gtOper == GT_CNS_DBL);
14243             d2 = op2->gtDblCon.gtDconVal;
14244
14245             /* Special case - check if we have NaN operands.
14246              * For comparisons if not an unordered operation always return 0.
14247              * For unordered operations (i.e. the GTF_RELOP_NAN_UN flag is set)
14248              * the result is always true - return 1. */
14249
14250             if (_isnan(d1) || _isnan(d2))
14251             {
14252 #ifdef DEBUG
14253                 if (verbose)
14254                 {
14255                     printf("Double operator(s) is NaN\n");
14256                 }
14257 #endif
14258                 if (tree->OperKind() & GTK_RELOP)
14259                 {
14260                     if (tree->gtFlags & GTF_RELOP_NAN_UN)
14261                     {
14262                         /* Unordered comparison with NaN always succeeds */
14263                         i1 = 1;
14264                         goto FOLD_COND;
14265                     }
14266                     else
14267                     {
14268                         /* Normal comparison with NaN always fails */
14269                         i1 = 0;
14270                         goto FOLD_COND;
14271                     }
14272                 }
14273             }
14274
14275             switch (tree->gtOper)
14276             {
14277                 case GT_EQ:
14278                     i1 = (d1 == d2);
14279                     goto FOLD_COND;
14280                 case GT_NE:
14281                     i1 = (d1 != d2);
14282                     goto FOLD_COND;
14283
14284                 case GT_LT:
14285                     i1 = (d1 < d2);
14286                     goto FOLD_COND;
14287                 case GT_LE:
14288                     i1 = (d1 <= d2);
14289                     goto FOLD_COND;
14290                 case GT_GE:
14291                     i1 = (d1 >= d2);
14292                     goto FOLD_COND;
14293                 case GT_GT:
14294                     i1 = (d1 > d2);
14295                     goto FOLD_COND;
14296
14297                 // Floating point arithmetic should be done in declared
14298                 // precision while doing constant folding. For this reason though TYP_FLOAT
14299                 // constants are stored as double constants, while performing float arithmetic,
14300                 // double constants should be converted to float.  Here is an example case
14301                 // where performing arithmetic in double precision would lead to incorrect
14302                 // results.
14303                 //
14304                 // Example:
14305                 // float a = float.MaxValue;
14306                 // float b = a*a;   This will produce +inf in single precision and 1.1579207543382391e+077 in double
14307                 //                  precision.
14308                 // flaot c = b/b;   This will produce NaN in single precision and 1 in double precision.
14309                 case GT_ADD:
14310                     if (op1->TypeGet() == TYP_FLOAT)
14311                     {
14312                         f1 = forceCastToFloat(d1);
14313                         f2 = forceCastToFloat(d2);
14314                         d1 = forceCastToFloat(f1 + f2);
14315                     }
14316                     else
14317                     {
14318                         d1 += d2;
14319                     }
14320                     break;
14321
14322                 case GT_SUB:
14323                     if (op1->TypeGet() == TYP_FLOAT)
14324                     {
14325                         f1 = forceCastToFloat(d1);
14326                         f2 = forceCastToFloat(d2);
14327                         d1 = forceCastToFloat(f1 - f2);
14328                     }
14329                     else
14330                     {
14331                         d1 -= d2;
14332                     }
14333                     break;
14334
14335                 case GT_MUL:
14336                     if (op1->TypeGet() == TYP_FLOAT)
14337                     {
14338                         f1 = forceCastToFloat(d1);
14339                         f2 = forceCastToFloat(d2);
14340                         d1 = forceCastToFloat(f1 * f2);
14341                     }
14342                     else
14343                     {
14344                         d1 *= d2;
14345                     }
14346                     break;
14347
14348                 case GT_DIV:
14349                     if (!d2)
14350                     {
14351                         return tree;
14352                     }
14353                     if (op1->TypeGet() == TYP_FLOAT)
14354                     {
14355                         f1 = forceCastToFloat(d1);
14356                         f2 = forceCastToFloat(d2);
14357                         d1 = forceCastToFloat(f1 / f2);
14358                     }
14359                     else
14360                     {
14361                         d1 /= d2;
14362                     }
14363                     break;
14364
14365                 default:
14366                     return tree;
14367             }
14368
14369         CNS_DOUBLE:
14370
14371 #ifdef DEBUG
14372             if (verbose)
14373             {
14374                 printf("\nFolding fp operator with constant nodes into a fp constant:\n");
14375                 gtDispTree(tree);
14376             }
14377 #endif
14378
14379             assert((GenTree::s_gtNodeSizes[GT_CNS_DBL] == TREE_NODE_SZ_SMALL) ||
14380                    (tree->gtDebugFlags & GTF_DEBUG_NODE_LARGE));
14381
14382             tree->ChangeOperConst(GT_CNS_DBL);
14383             tree->gtDblCon.gtDconVal = d1;
14384             if (vnStore != nullptr)
14385             {
14386                 fgValueNumberTreeConst(tree);
14387             }
14388 #ifdef DEBUG
14389             if (verbose)
14390             {
14391                 printf("Bashed to fp constant:\n");
14392                 gtDispTree(tree);
14393             }
14394 #endif
14395             goto DONE;
14396
14397         default:
14398             /* not a foldable typ */
14399             return tree;
14400     }
14401
14402 //-------------------------------------------------------------------------
14403
14404 DONE:
14405
14406     /* Make sure no side effect flags are set on this constant node */
14407
14408     tree->gtFlags &= ~GTF_ALL_EFFECT;
14409
14410     return tree;
14411 }
14412 #ifdef _PREFAST_
14413 #pragma warning(pop)
14414 #endif
14415
14416 //------------------------------------------------------------------------
14417 // gtNewTempAssign: Create an assignment of the given value to a temp.
14418 //
14419 // Arguments:
14420 //    tmp         - local number for a compiler temp
14421 //    val         - value to assign to the temp
14422 //    pAfterStmt  - statement to insert any additional statements after
14423 //    ilOffset    - il offset for new statements
14424 //    block       - block to insert any additional statements in
14425 //
14426 // Return Value:
14427 //    Normally a new assignment node.
14428 //    However may return a nop node if val is simply a reference to the temp.
14429 //
14430 // Notes:
14431 //    Self-assignments may be represented via NOPs.
14432 //
14433 //    May update the type of the temp, if it was previously unknown.
14434 //
14435 //    May set compFloatingPointUsed.
14436
14437 GenTree* Compiler::gtNewTempAssign(
14438     unsigned tmp, GenTree* val, GenTreeStmt** pAfterStmt, IL_OFFSETX ilOffset, BasicBlock* block)
14439 {
14440     // Self-assignment is a nop.
14441     if (val->OperGet() == GT_LCL_VAR && val->gtLclVarCommon.gtLclNum == tmp)
14442     {
14443         return gtNewNothingNode();
14444     }
14445
14446     LclVarDsc* varDsc = lvaTable + tmp;
14447
14448     if (varDsc->TypeGet() == TYP_I_IMPL && val->TypeGet() == TYP_BYREF)
14449     {
14450         impBashVarAddrsToI(val);
14451     }
14452
14453     var_types valTyp = val->TypeGet();
14454     if (val->OperGet() == GT_LCL_VAR && lvaTable[val->gtLclVar.gtLclNum].lvNormalizeOnLoad())
14455     {
14456         valTyp      = lvaGetRealType(val->gtLclVar.gtLclNum);
14457         val->gtType = valTyp;
14458     }
14459     var_types dstTyp = varDsc->TypeGet();
14460
14461     /* If the variable's lvType is not yet set then set it here */
14462     if (dstTyp == TYP_UNDEF)
14463     {
14464         varDsc->lvType = dstTyp = genActualType(valTyp);
14465         if (varTypeIsGC(dstTyp))
14466         {
14467             varDsc->lvStructGcCount = 1;
14468         }
14469 #if FEATURE_SIMD
14470         else if (varTypeIsSIMD(dstTyp))
14471         {
14472             varDsc->lvSIMDType = 1;
14473         }
14474 #endif
14475     }
14476
14477 #ifdef DEBUG
14478     /* Make sure the actual types match               */
14479     if (genActualType(valTyp) != genActualType(dstTyp))
14480     {
14481         // Plus some other exceptions that are apparently legal:
14482         // 1) TYP_REF or BYREF = TYP_I_IMPL
14483         bool ok = false;
14484         if (varTypeIsGC(dstTyp) && (valTyp == TYP_I_IMPL))
14485         {
14486             ok = true;
14487         }
14488         // 2) TYP_DOUBLE = TYP_FLOAT or TYP_FLOAT = TYP_DOUBLE
14489         else if (varTypeIsFloating(dstTyp) && varTypeIsFloating(valTyp))
14490         {
14491             ok = true;
14492         }
14493         // 3) TYP_BYREF = TYP_REF when object stack allocation is enabled
14494         else if (JitConfig.JitObjectStackAllocation() && (dstTyp == TYP_BYREF) && (valTyp == TYP_REF))
14495         {
14496             ok = true;
14497         }
14498
14499         if (!ok)
14500         {
14501             gtDispTree(val);
14502             assert(!"Incompatible types for gtNewTempAssign");
14503         }
14504     }
14505 #endif
14506
14507     // Floating Point assignments can be created during inlining
14508     // see "Zero init inlinee locals:" in fgInlinePrependStatements
14509     // thus we may need to set compFloatingPointUsed to true here.
14510     //
14511     if (varTypeIsFloating(dstTyp) && (compFloatingPointUsed == false))
14512     {
14513         compFloatingPointUsed = true;
14514     }
14515
14516     /* Create the assignment node */
14517
14518     GenTree* asg;
14519     GenTree* dest = gtNewLclvNode(tmp, dstTyp);
14520     dest->gtFlags |= GTF_VAR_DEF;
14521
14522     // With first-class structs, we should be propagating the class handle on all non-primitive
14523     // struct types. We don't have a convenient way to do that for all SIMD temps, since some
14524     // internal trees use SIMD types that are not used by the input IL. In this case, we allow
14525     // a null type handle and derive the necessary information about the type from its varType.
14526     CORINFO_CLASS_HANDLE structHnd = gtGetStructHandleIfPresent(val);
14527     if (varTypeIsStruct(valTyp) && ((structHnd != NO_CLASS_HANDLE) || (varTypeIsSIMD(valTyp))))
14528     {
14529         // The struct value may be be a child of a GT_COMMA.
14530         GenTree* valx = val->gtEffectiveVal(/*commaOnly*/ true);
14531
14532         if (structHnd != NO_CLASS_HANDLE)
14533         {
14534             lvaSetStruct(tmp, structHnd, false);
14535         }
14536         else
14537         {
14538             assert(valx->gtOper != GT_OBJ);
14539         }
14540         dest->gtFlags |= GTF_DONT_CSE;
14541         valx->gtFlags |= GTF_DONT_CSE;
14542         asg = impAssignStruct(dest, val, structHnd, (unsigned)CHECK_SPILL_NONE, pAfterStmt, ilOffset, block);
14543     }
14544     else
14545     {
14546         asg = gtNewAssignNode(dest, val);
14547     }
14548
14549     if (compRationalIRForm)
14550     {
14551         Rationalizer::RewriteAssignmentIntoStoreLcl(asg->AsOp());
14552     }
14553
14554     return asg;
14555 }
14556
14557 /*****************************************************************************
14558  *
14559  *  Create a helper call to access a COM field (iff 'assg' is non-zero this is
14560  *  an assignment and 'assg' is the new value).
14561  */
14562
14563 GenTree* Compiler::gtNewRefCOMfield(GenTree*                objPtr,
14564                                     CORINFO_RESOLVED_TOKEN* pResolvedToken,
14565                                     CORINFO_ACCESS_FLAGS    access,
14566                                     CORINFO_FIELD_INFO*     pFieldInfo,
14567                                     var_types               lclTyp,
14568                                     CORINFO_CLASS_HANDLE    structType,
14569                                     GenTree*                assg)
14570 {
14571     assert(pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER ||
14572            pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_ADDR_HELPER ||
14573            pFieldInfo->fieldAccessor == CORINFO_FIELD_STATIC_ADDR_HELPER);
14574
14575     /* If we can't access it directly, we need to call a helper function */
14576     GenTreeArgList* args       = nullptr;
14577     var_types       helperType = TYP_BYREF;
14578
14579     if (pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER)
14580     {
14581         if (access & CORINFO_ACCESS_SET)
14582         {
14583             assert(assg != nullptr);
14584             // helper needs pointer to struct, not struct itself
14585             if (pFieldInfo->helper == CORINFO_HELP_SETFIELDSTRUCT)
14586             {
14587                 assert(structType != nullptr);
14588                 assg = impGetStructAddr(assg, structType, (unsigned)CHECK_SPILL_ALL, true);
14589             }
14590             else if (lclTyp == TYP_DOUBLE && assg->TypeGet() == TYP_FLOAT)
14591             {
14592                 assg = gtNewCastNode(TYP_DOUBLE, assg, false, TYP_DOUBLE);
14593             }
14594             else if (lclTyp == TYP_FLOAT && assg->TypeGet() == TYP_DOUBLE)
14595             {
14596                 assg = gtNewCastNode(TYP_FLOAT, assg, false, TYP_FLOAT);
14597             }
14598
14599             args       = gtNewArgList(assg);
14600             helperType = TYP_VOID;
14601         }
14602         else if (access & CORINFO_ACCESS_GET)
14603         {
14604             helperType = lclTyp;
14605
14606             // The calling convention for the helper does not take into
14607             // account optimization of primitive structs.
14608             if ((pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT) && !varTypeIsStruct(lclTyp))
14609             {
14610                 helperType = TYP_STRUCT;
14611             }
14612         }
14613     }
14614
14615     if (pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT || pFieldInfo->helper == CORINFO_HELP_SETFIELDSTRUCT)
14616     {
14617         assert(pFieldInfo->structType != nullptr);
14618         args = gtNewListNode(gtNewIconEmbClsHndNode(pFieldInfo->structType), args);
14619     }
14620
14621     GenTree* fieldHnd = impTokenToHandle(pResolvedToken);
14622     if (fieldHnd == nullptr)
14623     { // compDonotInline()
14624         return nullptr;
14625     }
14626
14627     args = gtNewListNode(fieldHnd, args);
14628
14629     // If it's a static field, we shouldn't have an object node
14630     // If it's an instance field, we have an object node
14631     assert((pFieldInfo->fieldAccessor != CORINFO_FIELD_STATIC_ADDR_HELPER) ^ (objPtr == nullptr));
14632
14633     if (objPtr != nullptr)
14634     {
14635         args = gtNewListNode(objPtr, args);
14636     }
14637
14638     GenTreeCall* call = gtNewHelperCallNode(pFieldInfo->helper, genActualType(helperType), args);
14639
14640 #if FEATURE_MULTIREG_RET
14641     if (varTypeIsStruct(call))
14642     {
14643         // Initialize Return type descriptor of call node.
14644         ReturnTypeDesc* retTypeDesc = call->GetReturnTypeDesc();
14645         retTypeDesc->InitializeStructReturnType(this, structType);
14646     }
14647 #endif // FEATURE_MULTIREG_RET
14648
14649     GenTree* result = call;
14650
14651     if (pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER)
14652     {
14653         if (access & CORINFO_ACCESS_GET)
14654         {
14655             if (pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT)
14656             {
14657                 if (!varTypeIsStruct(lclTyp))
14658                 {
14659                     // get the result as primitive type
14660                     result = impGetStructAddr(result, structType, (unsigned)CHECK_SPILL_ALL, true);
14661                     result = gtNewOperNode(GT_IND, lclTyp, result);
14662                 }
14663             }
14664             else if (varTypeIsIntegral(lclTyp) && genTypeSize(lclTyp) < genTypeSize(TYP_INT))
14665             {
14666                 // The helper does not extend the small return types.
14667                 result = gtNewCastNode(genActualType(lclTyp), result, false, lclTyp);
14668             }
14669         }
14670     }
14671     else
14672     {
14673         // OK, now do the indirection
14674         if (access & CORINFO_ACCESS_GET)
14675         {
14676             if (varTypeIsStruct(lclTyp))
14677             {
14678                 result = gtNewObjNode(structType, result);
14679             }
14680             else
14681             {
14682                 result = gtNewOperNode(GT_IND, lclTyp, result);
14683             }
14684             result->gtFlags |= (GTF_EXCEPT | GTF_GLOB_REF);
14685         }
14686         else if (access & CORINFO_ACCESS_SET)
14687         {
14688             if (varTypeIsStruct(lclTyp))
14689             {
14690                 result = impAssignStructPtr(result, assg, structType, (unsigned)CHECK_SPILL_ALL);
14691             }
14692             else
14693             {
14694                 result = gtNewOperNode(GT_IND, lclTyp, result);
14695                 result->gtFlags |= (GTF_EXCEPT | GTF_GLOB_REF | GTF_IND_TGTANYWHERE);
14696                 result = gtNewAssignNode(result, assg);
14697             }
14698         }
14699     }
14700
14701     return result;
14702 }
14703
14704 /*****************************************************************************
14705  *
14706  *  Return true if the given node (excluding children trees) contains side effects.
14707  *  Note that it does not recurse, and children need to be handled separately.
14708  *  It may return false even if the node has GTF_SIDE_EFFECT (because of its children).
14709  *
14710  *  Similar to OperMayThrow() (but handles GT_CALLs specially), but considers
14711  *  assignments too.
14712  */
14713
14714 bool Compiler::gtNodeHasSideEffects(GenTree* tree, unsigned flags)
14715 {
14716     if (flags & GTF_ASG)
14717     {
14718         // TODO-Cleanup: This only checks for GT_ASG but according to OperRequiresAsgFlag there
14719         // are many more opers that are considered to have an assignment side effect: atomic ops
14720         // (GT_CMPXCHG & co.), GT_MEMORYBARRIER (not classified as an atomic op) and HW intrinsic
14721         // memory stores. Atomic ops have special handling in gtExtractSideEffList but the others
14722         // will simply be dropped is they are ever subject to an "extract side effects" operation.
14723         // It is possible that the reason no bugs have yet been observed in this area is that the
14724         // other nodes are likely to always be tree roots.
14725         if (tree->OperIs(GT_ASG))
14726         {
14727             return true;
14728         }
14729     }
14730
14731     // Are there only GTF_CALL side effects remaining? (and no other side effect kinds)
14732     if (flags & GTF_CALL)
14733     {
14734         if (tree->OperGet() == GT_CALL)
14735         {
14736             GenTreeCall* const call             = tree->AsCall();
14737             const bool         ignoreExceptions = (flags & GTF_EXCEPT) == 0;
14738             const bool         ignoreCctors     = (flags & GTF_IS_IN_CSE) != 0; // We can CSE helpers that run cctors.
14739             if (!call->HasSideEffects(this, ignoreExceptions, ignoreCctors))
14740             {
14741                 // If this call is otherwise side effect free, check its arguments.
14742                 for (GenTreeArgList* args = call->gtCallArgs; args != nullptr; args = args->Rest())
14743                 {
14744                     if (gtTreeHasSideEffects(args->Current(), flags))
14745                     {
14746                         return true;
14747                     }
14748                 }
14749                 // I'm a little worried that args that assign to temps that are late args will look like
14750                 // side effects...but better to be conservative for now.
14751                 for (GenTreeArgList* args = call->gtCallLateArgs; args != nullptr; args = args->Rest())
14752                 {
14753                     if (gtTreeHasSideEffects(args->Current(), flags))
14754                     {
14755                         return true;
14756                     }
14757                 }
14758
14759                 // Otherwise:
14760                 return false;
14761             }
14762
14763             // Otherwise the GT_CALL is considered to have side-effects.
14764             return true;
14765         }
14766     }
14767
14768     if (flags & GTF_EXCEPT)
14769     {
14770         if (tree->OperMayThrow(this))
14771         {
14772             return true;
14773         }
14774     }
14775
14776     // Expressions declared as CSE by (e.g.) hoisting code are considered to have relevant side
14777     // effects (if we care about GTF_MAKE_CSE).
14778     if ((flags & GTF_MAKE_CSE) && (tree->gtFlags & GTF_MAKE_CSE))
14779     {
14780         return true;
14781     }
14782
14783     return false;
14784 }
14785
14786 /*****************************************************************************
14787  * Returns true if the expr tree has any side effects.
14788  */
14789
14790 bool Compiler::gtTreeHasSideEffects(GenTree* tree, unsigned flags /* = GTF_SIDE_EFFECT*/)
14791 {
14792     // These are the side effect flags that we care about for this tree
14793     unsigned sideEffectFlags = tree->gtFlags & flags;
14794
14795     // Does this tree have any Side-effect flags set that we care about?
14796     if (sideEffectFlags == 0)
14797     {
14798         // no it doesn't..
14799         return false;
14800     }
14801
14802     if (sideEffectFlags == GTF_CALL)
14803     {
14804         if (tree->OperGet() == GT_CALL)
14805         {
14806             // Generally all trees that contain GT_CALL nodes are considered to have side-effects.
14807             //
14808             if (tree->gtCall.gtCallType == CT_HELPER)
14809             {
14810                 // If this node is a helper call we may not care about the side-effects.
14811                 // Note that gtNodeHasSideEffects checks the side effects of the helper itself
14812                 // as well as the side effects of its arguments.
14813                 return gtNodeHasSideEffects(tree, flags);
14814             }
14815         }
14816         else if (tree->OperGet() == GT_INTRINSIC)
14817         {
14818             if (gtNodeHasSideEffects(tree, flags))
14819             {
14820                 return true;
14821             }
14822
14823             if (gtNodeHasSideEffects(tree->gtOp.gtOp1, flags))
14824             {
14825                 return true;
14826             }
14827
14828             if ((tree->gtOp.gtOp2 != nullptr) && gtNodeHasSideEffects(tree->gtOp.gtOp2, flags))
14829             {
14830                 return true;
14831             }
14832
14833             return false;
14834         }
14835     }
14836
14837     return true;
14838 }
14839
14840 GenTree* Compiler::gtBuildCommaList(GenTree* list, GenTree* expr)
14841 {
14842     // 'list' starts off as null,
14843     //        and when it is null we haven't started the list yet.
14844     //
14845     if (list != nullptr)
14846     {
14847         // Create a GT_COMMA that appends 'expr' in front of the remaining set of expressions in (*list)
14848         GenTree* result = gtNewOperNode(GT_COMMA, TYP_VOID, expr, list);
14849
14850         // Set the flags in the comma node
14851         result->gtFlags |= (list->gtFlags & GTF_ALL_EFFECT);
14852         result->gtFlags |= (expr->gtFlags & GTF_ALL_EFFECT);
14853
14854         // 'list' and 'expr' should have valuenumbers defined for both or for neither one (unless we are remorphing,
14855         // in which case a prior transform involving either node may have discarded or otherwise invalidated the value
14856         // numbers).
14857         assert((list->gtVNPair.BothDefined() == expr->gtVNPair.BothDefined()) || !fgGlobalMorph);
14858
14859         // Set the ValueNumber 'gtVNPair' for the new GT_COMMA node
14860         //
14861         if (list->gtVNPair.BothDefined() && expr->gtVNPair.BothDefined())
14862         {
14863             // The result of a GT_COMMA node is op2, the normal value number is op2vnp
14864             // But we also need to include the union of side effects from op1 and op2.
14865             // we compute this value into exceptions_vnp.
14866             ValueNumPair op1vnp;
14867             ValueNumPair op1Xvnp = ValueNumStore::VNPForEmptyExcSet();
14868             ValueNumPair op2vnp;
14869             ValueNumPair op2Xvnp = ValueNumStore::VNPForEmptyExcSet();
14870
14871             vnStore->VNPUnpackExc(expr->gtVNPair, &op1vnp, &op1Xvnp);
14872             vnStore->VNPUnpackExc(list->gtVNPair, &op2vnp, &op2Xvnp);
14873
14874             ValueNumPair exceptions_vnp = ValueNumStore::VNPForEmptyExcSet();
14875
14876             exceptions_vnp = vnStore->VNPExcSetUnion(exceptions_vnp, op1Xvnp);
14877             exceptions_vnp = vnStore->VNPExcSetUnion(exceptions_vnp, op2Xvnp);
14878
14879             result->gtVNPair = vnStore->VNPWithExc(op2vnp, exceptions_vnp);
14880         }
14881
14882         return result;
14883     }
14884     else
14885     {
14886         // The 'expr' will start the list of expressions
14887         return expr;
14888     }
14889 }
14890
14891 //------------------------------------------------------------------------
14892 // gtExtractSideEffList: Extracts side effects from the given expression.
14893 //
14894 // Arguments:
14895 //    expr       - the expression tree to extract side effects from
14896 //    pList      - pointer to a (possibly null) GT_COMMA list that
14897 //                 will contain the extracted side effects
14898 //    flags      - side effect flags to be considered
14899 //    ignoreRoot - ignore side effects on the expression root node
14900 //
14901 // Notes:
14902 //    Side effects are prepended to the GT_COMMA list such that op1 of
14903 //    each comma node holds the side effect tree and op2 points to the
14904 //    next comma node. The original side effect execution order is preserved.
14905 //
14906 void Compiler::gtExtractSideEffList(GenTree*  expr,
14907                                     GenTree** pList,
14908                                     unsigned  flags /* = GTF_SIDE_EFFECT*/,
14909                                     bool      ignoreRoot /* = false */)
14910 {
14911     class SideEffectExtractor final : public GenTreeVisitor<SideEffectExtractor>
14912     {
14913     public:
14914         const unsigned       m_flags;
14915         ArrayStack<GenTree*> m_sideEffects;
14916
14917         enum
14918         {
14919             DoPreOrder        = true,
14920             UseExecutionOrder = true
14921         };
14922
14923         SideEffectExtractor(Compiler* compiler, unsigned flags)
14924             : GenTreeVisitor(compiler), m_flags(flags), m_sideEffects(compiler->getAllocator(CMK_SideEffects))
14925         {
14926         }
14927
14928         fgWalkResult PreOrderVisit(GenTree** use, GenTree* user)
14929         {
14930             GenTree* node = *use;
14931
14932             bool treeHasSideEffects = m_compiler->gtTreeHasSideEffects(node, m_flags);
14933
14934             if (treeHasSideEffects)
14935             {
14936                 if (m_compiler->gtNodeHasSideEffects(node, m_flags))
14937                 {
14938                     m_sideEffects.Push(node);
14939                     return Compiler::WALK_SKIP_SUBTREES;
14940                 }
14941
14942                 // TODO-Cleanup: These have GTF_ASG set but for some reason gtNodeHasSideEffects ignores
14943                 // them. See the related gtNodeHasSideEffects comment as well.
14944                 // Also, these nodes must always be preserved, no matter what side effect flags are passed
14945                 // in. But then it should never be the case that gtExtractSideEffList gets called without
14946                 // specifying GTF_ASG so there doesn't seem to be any reason to be inconsistent with
14947                 // gtNodeHasSideEffects and make this check unconditionally.
14948                 if (node->OperIsAtomicOp())
14949                 {
14950                     m_sideEffects.Push(node);
14951                     return Compiler::WALK_SKIP_SUBTREES;
14952                 }
14953
14954                 if ((m_flags & GTF_EXCEPT) != 0)
14955                 {
14956                     // Special case - GT_ADDR of GT_IND nodes of TYP_STRUCT have to be kept together.
14957                     if (node->OperIs(GT_ADDR) && node->gtGetOp1()->OperIsIndir() &&
14958                         (node->gtGetOp1()->TypeGet() == TYP_STRUCT))
14959                     {
14960 #ifdef DEBUG
14961                         if (m_compiler->verbose)
14962                         {
14963                             printf("Keep the GT_ADDR and GT_IND together:\n");
14964                         }
14965 #endif
14966                         m_sideEffects.Push(node);
14967                         return Compiler::WALK_SKIP_SUBTREES;
14968                     }
14969                 }
14970
14971                 // Generally all GT_CALL nodes are considered to have side-effects.
14972                 // So if we get here it must be a helper call that we decided it does
14973                 // not have side effects that we needed to keep.
14974                 assert(!node->OperIs(GT_CALL) || (node->AsCall()->gtCallType == CT_HELPER));
14975             }
14976
14977             if ((m_flags & GTF_IS_IN_CSE) != 0)
14978             {
14979                 // If we're doing CSE then we also need to unmark CSE nodes. This will fail for CSE defs,
14980                 // those need to be extracted as if they're side effects.
14981                 if (!UnmarkCSE(node))
14982                 {
14983                     m_sideEffects.Push(node);
14984                     return Compiler::WALK_SKIP_SUBTREES;
14985                 }
14986
14987                 // The existence of CSE defs and uses is not propagated up the tree like side
14988                 // effects are. We need to continue visiting the tree as if it has side effects.
14989                 treeHasSideEffects = true;
14990             }
14991
14992             return treeHasSideEffects ? Compiler::WALK_CONTINUE : Compiler::WALK_SKIP_SUBTREES;
14993         }
14994
14995     private:
14996         bool UnmarkCSE(GenTree* node)
14997         {
14998             assert(m_compiler->optValnumCSE_phase);
14999
15000             if (m_compiler->optUnmarkCSE(node))
15001             {
15002                 // The call to optUnmarkCSE(node) should have cleared any CSE info.
15003                 assert(!IS_CSE_INDEX(node->gtCSEnum));
15004                 return true;
15005             }
15006             else
15007             {
15008                 assert(IS_CSE_DEF(node->gtCSEnum));
15009 #ifdef DEBUG
15010                 if (m_compiler->verbose)
15011                 {
15012                     printf("Preserving the CSE def #%02d at ", GET_CSE_INDEX(node->gtCSEnum));
15013                     m_compiler->printTreeID(node);
15014                 }
15015 #endif
15016                 return false;
15017             }
15018         }
15019     };
15020
15021     assert(!expr->OperIs(GT_STMT));
15022
15023     SideEffectExtractor extractor(this, flags);
15024
15025     if (ignoreRoot)
15026     {
15027         for (GenTree* op : expr->Operands())
15028         {
15029             extractor.WalkTree(&op, nullptr);
15030         }
15031     }
15032     else
15033     {
15034         extractor.WalkTree(&expr, nullptr);
15035     }
15036
15037     GenTree* list = *pList;
15038
15039     // The extractor returns side effects in execution order but gtBuildCommaList prepends
15040     // to the comma-based side effect list so we have to build the list in reverse order.
15041     // This is also why the list cannot be built while traversing the tree.
15042     // The number of side effects is usually small (<= 4), less than the ArrayStack's
15043     // built-in size, so memory allocation is avoided.
15044     while (!extractor.m_sideEffects.Empty())
15045     {
15046         list = gtBuildCommaList(list, extractor.m_sideEffects.Pop());
15047     }
15048
15049     *pList = list;
15050 }
15051
15052 /*****************************************************************************
15053  *
15054  *  For debugging only - displays a tree node list and makes sure all the
15055  *  links are correctly set.
15056  */
15057
15058 #ifdef DEBUG
15059
15060 void dispNodeList(GenTree* list, bool verbose)
15061 {
15062     GenTree* last = nullptr;
15063     GenTree* next;
15064
15065     if (!list)
15066     {
15067         return;
15068     }
15069
15070     for (;;)
15071     {
15072         next = list->gtNext;
15073
15074         if (verbose)
15075         {
15076             printf("%08X -> %08X -> %08X\n", last, list, next);
15077         }
15078
15079         assert(!last || last->gtNext == list);
15080
15081         assert(next == nullptr || next->gtPrev == list);
15082
15083         if (!next)
15084         {
15085             break;
15086         }
15087
15088         last = list;
15089         list = next;
15090     }
15091     printf(""); // null string means flush
15092 }
15093
15094 /*****************************************************************************
15095  * Callback to assert that the nodes of a qmark-colon subtree are marked
15096  */
15097
15098 /* static */
15099 Compiler::fgWalkResult Compiler::gtAssertColonCond(GenTree** pTree, fgWalkData* data)
15100 {
15101     assert(data->pCallbackData == nullptr);
15102
15103     assert((*pTree)->gtFlags & GTF_COLON_COND);
15104
15105     return WALK_CONTINUE;
15106 }
15107 #endif // DEBUG
15108
15109 /*****************************************************************************
15110  * Callback to mark the nodes of a qmark-colon subtree that are conditionally
15111  * executed.
15112  */
15113
15114 /* static */
15115 Compiler::fgWalkResult Compiler::gtMarkColonCond(GenTree** pTree, fgWalkData* data)
15116 {
15117     assert(data->pCallbackData == nullptr);
15118
15119     (*pTree)->gtFlags |= GTF_COLON_COND;
15120
15121     return WALK_CONTINUE;
15122 }
15123
15124 /*****************************************************************************
15125  * Callback to clear the conditionally executed flags of nodes that no longer
15126    will be conditionally executed. Note that when we find another colon we must
15127    stop, as the nodes below this one WILL be conditionally executed. This callback
15128    is called when folding a qmark condition (ie the condition is constant).
15129  */
15130
15131 /* static */
15132 Compiler::fgWalkResult Compiler::gtClearColonCond(GenTree** pTree, fgWalkData* data)
15133 {
15134     GenTree* tree = *pTree;
15135
15136     assert(data->pCallbackData == nullptr);
15137
15138     if (tree->OperGet() == GT_COLON)
15139     {
15140         // Nodes below this will be conditionally executed.
15141         return WALK_SKIP_SUBTREES;
15142     }
15143
15144     tree->gtFlags &= ~GTF_COLON_COND;
15145     return WALK_CONTINUE;
15146 }
15147
15148 struct FindLinkData
15149 {
15150     GenTree*  nodeToFind;
15151     GenTree** result;
15152 };
15153
15154 /*****************************************************************************
15155  *
15156  *  Callback used by the tree walker to implement fgFindLink()
15157  */
15158 static Compiler::fgWalkResult gtFindLinkCB(GenTree** pTree, Compiler::fgWalkData* cbData)
15159 {
15160     FindLinkData* data = (FindLinkData*)cbData->pCallbackData;
15161     if (*pTree == data->nodeToFind)
15162     {
15163         data->result = pTree;
15164         return Compiler::WALK_ABORT;
15165     }
15166
15167     return Compiler::WALK_CONTINUE;
15168 }
15169
15170 GenTree** Compiler::gtFindLink(GenTreeStmt* stmt, GenTree* node)
15171 {
15172     FindLinkData data = {node, nullptr};
15173
15174     fgWalkResult result = fgWalkTreePre(&stmt->gtStmtExpr, gtFindLinkCB, &data);
15175
15176     if (result == WALK_ABORT)
15177     {
15178         assert(data.nodeToFind == *data.result);
15179         return data.result;
15180     }
15181     else
15182     {
15183         return nullptr;
15184     }
15185 }
15186
15187 /*****************************************************************************
15188  *
15189  *  Callback that checks if a tree node has oper type GT_CATCH_ARG
15190  */
15191
15192 static Compiler::fgWalkResult gtFindCatchArg(GenTree** pTree, Compiler::fgWalkData* /* data */)
15193 {
15194     return ((*pTree)->OperGet() == GT_CATCH_ARG) ? Compiler::WALK_ABORT : Compiler::WALK_CONTINUE;
15195 }
15196
15197 /*****************************************************************************/
15198 bool Compiler::gtHasCatchArg(GenTree* tree)
15199 {
15200     if (((tree->gtFlags & GTF_ORDER_SIDEEFF) != 0) && (fgWalkTreePre(&tree, gtFindCatchArg) == WALK_ABORT))
15201     {
15202         return true;
15203     }
15204     return false;
15205 }
15206
15207 //------------------------------------------------------------------------
15208 // gtHasCallOnStack:
15209 //
15210 // Arguments:
15211 //    parentStack: a context (stack of parent nodes)
15212 //
15213 // Return Value:
15214 //     returns true if any of the parent nodes are a GT_CALL
15215 //
15216 // Assumptions:
15217 //    We have a stack of parent nodes. This generally requires that
15218 //    we are performing a recursive tree walk using struct fgWalkData
15219 //
15220 //------------------------------------------------------------------------
15221 /* static */ bool Compiler::gtHasCallOnStack(GenTreeStack* parentStack)
15222 {
15223     for (int i = 0; i < parentStack->Height(); i++)
15224     {
15225         GenTree* node = parentStack->Index(i);
15226         if (node->OperGet() == GT_CALL)
15227         {
15228             return true;
15229         }
15230     }
15231     return false;
15232 }
15233
15234 //------------------------------------------------------------------------
15235 // gtGetTypeProducerKind: determine if a tree produces a runtime type, and
15236 //    if so, how.
15237 //
15238 // Arguments:
15239 //    tree - tree to examine
15240 //
15241 // Return Value:
15242 //    TypeProducerKind for the tree.
15243 //
15244 // Notes:
15245 //    Checks to see if this tree returns a RuntimeType value, and if so,
15246 //    how that value is determined.
15247 //
15248 //    Currently handles these cases
15249 //    1) The result of Object::GetType
15250 //    2) The result of typeof(...)
15251 //    3) A null reference
15252 //    4) Tree is otherwise known to have type RuntimeType
15253 //
15254 //    The null reference case is surprisingly common because operator
15255 //    overloading turns the otherwise innocuous
15256 //
15257 //        Type t = ....;
15258 //        if (t == null)
15259 //
15260 //    into a method call.
15261
15262 Compiler::TypeProducerKind Compiler::gtGetTypeProducerKind(GenTree* tree)
15263 {
15264     if (tree->gtOper == GT_CALL)
15265     {
15266         if (tree->gtCall.gtCallType == CT_HELPER)
15267         {
15268             if (gtIsTypeHandleToRuntimeTypeHelper(tree->AsCall()))
15269             {
15270                 return TPK_Handle;
15271             }
15272         }
15273         else if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_SPECIAL_INTRINSIC)
15274         {
15275             if (info.compCompHnd->getIntrinsicID(tree->gtCall.gtCallMethHnd) == CORINFO_INTRINSIC_Object_GetType)
15276             {
15277                 return TPK_GetType;
15278             }
15279         }
15280     }
15281     else if ((tree->gtOper == GT_INTRINSIC) && (tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Object_GetType))
15282     {
15283         return TPK_GetType;
15284     }
15285     else if ((tree->gtOper == GT_CNS_INT) && (tree->gtIntCon.gtIconVal == 0))
15286     {
15287         return TPK_Null;
15288     }
15289     else
15290     {
15291         bool                 isExact   = false;
15292         bool                 isNonNull = false;
15293         CORINFO_CLASS_HANDLE clsHnd    = gtGetClassHandle(tree, &isExact, &isNonNull);
15294
15295         if (clsHnd != NO_CLASS_HANDLE && clsHnd == info.compCompHnd->getBuiltinClass(CLASSID_RUNTIME_TYPE))
15296         {
15297             return TPK_Other;
15298         }
15299     }
15300     return TPK_Unknown;
15301 }
15302
15303 //------------------------------------------------------------------------
15304 // gtIsTypeHandleToRuntimeTypeHelperCall -- see if tree is constructing
15305 //    a RuntimeType from a handle
15306 //
15307 // Arguments:
15308 //    tree - tree to examine
15309 //
15310 // Return Value:
15311 //    True if so
15312
15313 bool Compiler::gtIsTypeHandleToRuntimeTypeHelper(GenTreeCall* call)
15314 {
15315     return call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE) ||
15316            call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE_MAYBENULL);
15317 }
15318
15319 //------------------------------------------------------------------------
15320 // gtIsTypeHandleToRuntimeTypeHandleHelperCall -- see if tree is constructing
15321 //    a RuntimeTypeHandle from a handle
15322 //
15323 // Arguments:
15324 //    tree - tree to examine
15325 //    pHelper - optional pointer to a variable that receives the type of the helper
15326 //
15327 // Return Value:
15328 //    True if so
15329
15330 bool Compiler::gtIsTypeHandleToRuntimeTypeHandleHelper(GenTreeCall* call, CorInfoHelpFunc* pHelper)
15331 {
15332     CorInfoHelpFunc helper = CORINFO_HELP_UNDEF;
15333
15334     if (call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPEHANDLE))
15335     {
15336         helper = CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPEHANDLE;
15337     }
15338     else if (call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPEHANDLE_MAYBENULL))
15339     {
15340         helper = CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPEHANDLE_MAYBENULL;
15341     }
15342
15343     if (pHelper != nullptr)
15344     {
15345         *pHelper = helper;
15346     }
15347
15348     return helper != CORINFO_HELP_UNDEF;
15349 }
15350
15351 bool Compiler::gtIsActiveCSE_Candidate(GenTree* tree)
15352 {
15353     return (optValnumCSE_phase && IS_CSE_INDEX(tree->gtCSEnum));
15354 }
15355
15356 /*****************************************************************************/
15357
15358 struct ComplexityStruct
15359 {
15360     unsigned m_numNodes;
15361     unsigned m_nodeLimit;
15362     ComplexityStruct(unsigned nodeLimit) : m_numNodes(0), m_nodeLimit(nodeLimit)
15363     {
15364     }
15365 };
15366
15367 static Compiler::fgWalkResult ComplexityExceedsWalker(GenTree** pTree, Compiler::fgWalkData* data)
15368 {
15369     ComplexityStruct* pComplexity = (ComplexityStruct*)data->pCallbackData;
15370     if (++pComplexity->m_numNodes > pComplexity->m_nodeLimit)
15371     {
15372         return Compiler::WALK_ABORT;
15373     }
15374     else
15375     {
15376         return Compiler::WALK_CONTINUE;
15377     }
15378 }
15379
15380 bool Compiler::gtComplexityExceeds(GenTree** tree, unsigned limit)
15381 {
15382     ComplexityStruct complexity(limit);
15383     if (fgWalkTreePre(tree, &ComplexityExceedsWalker, &complexity) == WALK_ABORT)
15384     {
15385         return true;
15386     }
15387     else
15388     {
15389         return false;
15390     }
15391 }
15392
15393 bool GenTree::IsPhiNode()
15394 {
15395     return (OperGet() == GT_PHI_ARG) || (OperGet() == GT_PHI) || IsPhiDefn();
15396 }
15397
15398 bool GenTree::IsPhiDefn()
15399 {
15400     bool res = ((OperGet() == GT_ASG) && (gtOp.gtOp2 != nullptr) && (gtOp.gtOp2->OperGet() == GT_PHI)) ||
15401                ((OperGet() == GT_STORE_LCL_VAR) && (gtOp.gtOp1 != nullptr) && (gtOp.gtOp1->OperGet() == GT_PHI));
15402     assert(!res || OperGet() == GT_STORE_LCL_VAR || gtOp.gtOp1->OperGet() == GT_LCL_VAR);
15403     return res;
15404 }
15405
15406 bool GenTree::IsPhiDefnStmt()
15407 {
15408     if (OperGet() != GT_STMT)
15409     {
15410         return false;
15411     }
15412     GenTree* asg = gtStmt.gtStmtExpr;
15413     return asg->IsPhiDefn();
15414 }
15415
15416 // IsPartialLclFld: Check for a GT_LCL_FLD whose type is a different size than the lclVar.
15417 //
15418 // Arguments:
15419 //    comp      - the Compiler object.
15420 //
15421 // Return Value:
15422 //    Returns "true" iff 'this' is a GT_LCL_FLD or GT_STORE_LCL_FLD on which the type
15423 //    is not the same size as the type of the GT_LCL_VAR
15424
15425 bool GenTree::IsPartialLclFld(Compiler* comp)
15426 {
15427     return ((gtOper == GT_LCL_FLD) &&
15428             (comp->lvaTable[this->gtLclVarCommon.gtLclNum].lvExactSize != genTypeSize(gtType)));
15429 }
15430
15431 bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire)
15432 {
15433     GenTreeBlk* blkNode = nullptr;
15434     if (OperIs(GT_ASG))
15435     {
15436         if (gtOp.gtOp1->IsLocal())
15437         {
15438             GenTreeLclVarCommon* lclVarTree = gtOp.gtOp1->AsLclVarCommon();
15439             *pLclVarTree                    = lclVarTree;
15440             if (pIsEntire != nullptr)
15441             {
15442                 if (lclVarTree->IsPartialLclFld(comp))
15443                 {
15444                     *pIsEntire = false;
15445                 }
15446                 else
15447                 {
15448                     *pIsEntire = true;
15449                 }
15450             }
15451             return true;
15452         }
15453         else if (gtOp.gtOp1->OperGet() == GT_IND)
15454         {
15455             GenTree* indArg = gtOp.gtOp1->gtOp.gtOp1;
15456             return indArg->DefinesLocalAddr(comp, genTypeSize(gtOp.gtOp1->TypeGet()), pLclVarTree, pIsEntire);
15457         }
15458         else if (gtOp.gtOp1->OperIsBlk())
15459         {
15460             blkNode = gtOp.gtOp1->AsBlk();
15461         }
15462     }
15463     else if (OperIsBlk())
15464     {
15465         blkNode = this->AsBlk();
15466     }
15467     if (blkNode != nullptr)
15468     {
15469         GenTree* destAddr = blkNode->Addr();
15470         unsigned width    = blkNode->gtBlkSize;
15471         // Do we care about whether this assigns the entire variable?
15472         if (pIsEntire != nullptr && width == 0)
15473         {
15474             assert(blkNode->gtOper == GT_DYN_BLK);
15475             GenTree* blockWidth = blkNode->AsDynBlk()->gtDynamicSize;
15476             if (blockWidth->IsCnsIntOrI())
15477             {
15478                 if (blockWidth->IsIconHandle())
15479                 {
15480                     // If it's a handle, it must be a class handle.  We only create such block operations
15481                     // for initialization of struct types, so the type of the argument(s) will match this
15482                     // type, by construction, and be "entire".
15483                     assert(blockWidth->IsIconHandle(GTF_ICON_CLASS_HDL));
15484                     width = comp->info.compCompHnd->getClassSize(
15485                         CORINFO_CLASS_HANDLE(blockWidth->gtIntConCommon.IconValue()));
15486                 }
15487                 else
15488                 {
15489                     ssize_t swidth = blockWidth->AsIntConCommon()->IconValue();
15490                     assert(swidth >= 0);
15491                     // cpblk of size zero exists in the wild (in yacc-generated code in SQL) and is valid IL.
15492                     if (swidth == 0)
15493                     {
15494                         return false;
15495                     }
15496                     width = unsigned(swidth);
15497                 }
15498             }
15499         }
15500         return destAddr->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
15501     }
15502     // Otherwise...
15503     return false;
15504 }
15505
15506 // Returns true if this GenTree defines a result which is based on the address of a local.
15507 bool GenTree::DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire)
15508 {
15509     if (OperGet() == GT_ADDR || OperGet() == GT_LCL_VAR_ADDR)
15510     {
15511         GenTree* addrArg = this;
15512         if (OperGet() == GT_ADDR)
15513         {
15514             addrArg = gtOp.gtOp1;
15515         }
15516
15517         if (addrArg->IsLocal() || addrArg->OperIsLocalAddr())
15518         {
15519             GenTreeLclVarCommon* addrArgLcl = addrArg->AsLclVarCommon();
15520             *pLclVarTree                    = addrArgLcl;
15521             if (pIsEntire != nullptr)
15522             {
15523                 unsigned lclOffset = 0;
15524                 if (addrArg->OperIsLocalField())
15525                 {
15526                     lclOffset = addrArg->gtLclFld.gtLclOffs;
15527                 }
15528
15529                 if (lclOffset != 0)
15530                 {
15531                     // We aren't updating the bytes at [0..lclOffset-1] so *pIsEntire should be set to false
15532                     *pIsEntire = false;
15533                 }
15534                 else
15535                 {
15536                     unsigned lclNum   = addrArgLcl->GetLclNum();
15537                     unsigned varWidth = comp->lvaLclExactSize(lclNum);
15538                     if (comp->lvaTable[lclNum].lvNormalizeOnStore())
15539                     {
15540                         // It's normalize on store, so use the full storage width -- writing to low bytes won't
15541                         // necessarily yield a normalized value.
15542                         varWidth = genTypeStSz(var_types(comp->lvaTable[lclNum].lvType)) * sizeof(int);
15543                     }
15544                     *pIsEntire = (varWidth == width);
15545                 }
15546             }
15547             return true;
15548         }
15549         else if (addrArg->OperGet() == GT_IND)
15550         {
15551             // A GT_ADDR of a GT_IND can both be optimized away, recurse using the child of the GT_IND
15552             return addrArg->gtOp.gtOp1->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
15553         }
15554     }
15555     else if (OperGet() == GT_ADD)
15556     {
15557         if (gtOp.gtOp1->IsCnsIntOrI())
15558         {
15559             // If we just adding a zero then we allow an IsEntire match against width
15560             //  otherwise we change width to zero to disallow an IsEntire Match
15561             return gtOp.gtOp2->DefinesLocalAddr(comp, gtOp.gtOp1->IsIntegralConst(0) ? width : 0, pLclVarTree,
15562                                                 pIsEntire);
15563         }
15564         else if (gtOp.gtOp2->IsCnsIntOrI())
15565         {
15566             // If we just adding a zero then we allow an IsEntire match against width
15567             //  otherwise we change width to zero to disallow an IsEntire Match
15568             return gtOp.gtOp1->DefinesLocalAddr(comp, gtOp.gtOp2->IsIntegralConst(0) ? width : 0, pLclVarTree,
15569                                                 pIsEntire);
15570         }
15571     }
15572     // Post rationalization we could have GT_IND(GT_LEA(..)) trees.
15573     else if (OperGet() == GT_LEA)
15574     {
15575         // This method gets invoked during liveness computation and therefore it is critical
15576         // that we don't miss 'use' of any local.  The below logic is making the assumption
15577         // that in case of LEA(base, index, offset) - only base can be a GT_LCL_VAR_ADDR
15578         // and index is not.
15579         CLANG_FORMAT_COMMENT_ANCHOR;
15580
15581 #ifdef DEBUG
15582         GenTree* index = gtOp.gtOp2;
15583         if (index != nullptr)
15584         {
15585             assert(!index->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire));
15586         }
15587 #endif // DEBUG
15588
15589         // base
15590         GenTree* base = gtOp.gtOp1;
15591         if (base != nullptr)
15592         {
15593             // Lea could have an Indir as its base.
15594             if (base->OperGet() == GT_IND)
15595             {
15596                 base = base->gtOp.gtOp1->gtEffectiveVal(/*commas only*/ true);
15597             }
15598             return base->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
15599         }
15600     }
15601     // Otherwise...
15602     return false;
15603 }
15604
15605 //------------------------------------------------------------------------
15606 // IsLocalExpr: Determine if this is a LclVarCommon node and return some
15607 //              additional info about it in the two out parameters.
15608 //
15609 // Arguments:
15610 //    comp        - The Compiler instance
15611 //    pLclVarTree - An "out" argument that returns the local tree as a
15612 //                  LclVarCommon, if it is indeed local.
15613 //    pFldSeq     - An "out" argument that returns the value numbering field
15614 //                  sequence for the node, if any.
15615 //
15616 // Return Value:
15617 //    Returns true, and sets the out arguments accordingly, if this is
15618 //    a LclVarCommon node.
15619
15620 bool GenTree::IsLocalExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq)
15621 {
15622     if (IsLocal()) // Note that this covers "GT_LCL_FLD."
15623     {
15624         *pLclVarTree = AsLclVarCommon();
15625         if (OperGet() == GT_LCL_FLD)
15626         {
15627             // Otherwise, prepend this field to whatever we've already accumulated outside in.
15628             *pFldSeq = comp->GetFieldSeqStore()->Append(AsLclFld()->gtFieldSeq, *pFldSeq);
15629         }
15630         return true;
15631     }
15632     else
15633     {
15634         return false;
15635     }
15636 }
15637
15638 // If this tree evaluates some sum of a local address and some constants,
15639 // return the node for the local being addressed
15640
15641 GenTreeLclVarCommon* GenTree::IsLocalAddrExpr()
15642 {
15643     if (OperGet() == GT_ADDR)
15644     {
15645         return gtOp.gtOp1->IsLocal() ? gtOp.gtOp1->AsLclVarCommon() : nullptr;
15646     }
15647     else if (OperIsLocalAddr())
15648     {
15649         return this->AsLclVarCommon();
15650     }
15651     else if (OperGet() == GT_ADD)
15652     {
15653         if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
15654         {
15655             return gtOp.gtOp2->IsLocalAddrExpr();
15656         }
15657         else if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
15658         {
15659             return gtOp.gtOp1->IsLocalAddrExpr();
15660         }
15661     }
15662     // Otherwise...
15663     return nullptr;
15664 }
15665
15666 bool GenTree::IsLocalAddrExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq)
15667 {
15668     if (OperGet() == GT_ADDR)
15669     {
15670         assert(!comp->compRationalIRForm);
15671         GenTree* addrArg = gtOp.gtOp1;
15672         if (addrArg->IsLocal()) // Note that this covers "GT_LCL_FLD."
15673         {
15674             *pLclVarTree = addrArg->AsLclVarCommon();
15675             if (addrArg->OperGet() == GT_LCL_FLD)
15676             {
15677                 // Otherwise, prepend this field to whatever we've already accumulated outside in.
15678                 *pFldSeq = comp->GetFieldSeqStore()->Append(addrArg->AsLclFld()->gtFieldSeq, *pFldSeq);
15679             }
15680             return true;
15681         }
15682         else
15683         {
15684             return false;
15685         }
15686     }
15687     else if (OperIsLocalAddr())
15688     {
15689         *pLclVarTree = this->AsLclVarCommon();
15690         if (this->OperGet() == GT_LCL_FLD_ADDR)
15691         {
15692             *pFldSeq = comp->GetFieldSeqStore()->Append(this->AsLclFld()->gtFieldSeq, *pFldSeq);
15693         }
15694         return true;
15695     }
15696     else if (OperGet() == GT_ADD)
15697     {
15698         if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
15699         {
15700             if (gtOp.gtOp1->AsIntCon()->gtFieldSeq == nullptr)
15701             {
15702                 return false;
15703             }
15704             // Otherwise, prepend this field to whatever we've already accumulated outside in.
15705             *pFldSeq = comp->GetFieldSeqStore()->Append(gtOp.gtOp1->AsIntCon()->gtFieldSeq, *pFldSeq);
15706             return gtOp.gtOp2->IsLocalAddrExpr(comp, pLclVarTree, pFldSeq);
15707         }
15708         else if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
15709         {
15710             if (gtOp.gtOp2->AsIntCon()->gtFieldSeq == nullptr)
15711             {
15712                 return false;
15713             }
15714             // Otherwise, prepend this field to whatever we've already accumulated outside in.
15715             *pFldSeq = comp->GetFieldSeqStore()->Append(gtOp.gtOp2->AsIntCon()->gtFieldSeq, *pFldSeq);
15716             return gtOp.gtOp1->IsLocalAddrExpr(comp, pLclVarTree, pFldSeq);
15717         }
15718     }
15719     // Otherwise...
15720     return false;
15721 }
15722
15723 //------------------------------------------------------------------------
15724 // IsLclVarUpdateTree: Determine whether this is an assignment tree of the
15725 //                     form Vn = Vn 'oper' 'otherTree' where Vn is a lclVar
15726 //
15727 // Arguments:
15728 //    pOtherTree - An "out" argument in which 'otherTree' will be returned.
15729 //    pOper      - An "out" argument in which 'oper' will be returned.
15730 //
15731 // Return Value:
15732 //    If the tree is of the above form, the lclNum of the variable being
15733 //    updated is returned, and 'pOtherTree' and 'pOper' are set.
15734 //    Otherwise, returns BAD_VAR_NUM.
15735 //
15736 // Notes:
15737 //    'otherTree' can have any shape.
15738 //     We avoid worrying about whether the op is commutative by only considering the
15739 //     first operand of the rhs. It is expected that most trees of this form will
15740 //     already have the lclVar on the lhs.
15741 //     TODO-CQ: Evaluate whether there are missed opportunities due to this, or
15742 //     whether gtSetEvalOrder will already have put the lclVar on the lhs in
15743 //     the cases of interest.
15744
15745 unsigned GenTree::IsLclVarUpdateTree(GenTree** pOtherTree, genTreeOps* pOper)
15746 {
15747     unsigned lclNum = BAD_VAR_NUM;
15748     if (OperIs(GT_ASG))
15749     {
15750         GenTree* lhs = gtOp.gtOp1;
15751         if (lhs->OperGet() == GT_LCL_VAR)
15752         {
15753             unsigned lhsLclNum = lhs->AsLclVarCommon()->gtLclNum;
15754             GenTree* rhs       = gtOp.gtOp2;
15755             if (rhs->OperIsBinary() && (rhs->gtOp.gtOp1->gtOper == GT_LCL_VAR) &&
15756                 (rhs->gtOp.gtOp1->AsLclVarCommon()->gtLclNum == lhsLclNum))
15757             {
15758                 lclNum      = lhsLclNum;
15759                 *pOtherTree = rhs->gtOp.gtOp2;
15760                 *pOper      = rhs->gtOper;
15761             }
15762         }
15763     }
15764     return lclNum;
15765 }
15766
15767 //------------------------------------------------------------------------
15768 // canBeContained: check whether this tree node may be a subcomponent of its parent for purposes
15769 //                 of code generation.
15770 //
15771 // Return value: returns true if it is possible to contain this node and false otherwise.
15772 bool GenTree::canBeContained() const
15773 {
15774     assert(IsLIR());
15775
15776     if (gtHasReg())
15777     {
15778         return false;
15779     }
15780
15781     // It is not possible for nodes that do not produce values or that are not containable values
15782     // to be contained.
15783     if (((OperKind() & (GTK_NOVALUE | GTK_NOCONTAIN)) != 0) || (OperIsHWIntrinsic() && !isContainableHWIntrinsic()))
15784     {
15785         return false;
15786     }
15787
15788     return true;
15789 }
15790
15791 //------------------------------------------------------------------------
15792 // isContained: check whether this tree node is a subcomponent of its parent for codegen purposes
15793 //
15794 // Return Value:
15795 //    Returns true if there is no code generated explicitly for this node.
15796 //    Essentially, it will be rolled into the code generation for the parent.
15797 //
15798 // Assumptions:
15799 //    This method relies upon the value of the GTF_CONTAINED flag.
15800 //    Therefore this method is only valid after Lowering.
15801 //    Also note that register allocation or other subsequent phases may cause
15802 //    nodes to become contained (or not) and therefore this property may change.
15803 //
15804 bool GenTree::isContained() const
15805 {
15806     assert(IsLIR());
15807     const bool isMarkedContained = ((gtFlags & GTF_CONTAINED) != 0);
15808
15809 #ifdef DEBUG
15810     if (!canBeContained())
15811     {
15812         assert(!isMarkedContained);
15813     }
15814
15815     // these actually produce a register (the flags reg, we just don't model it)
15816     // and are a separate instruction from the branch that consumes the result.
15817     // They can only produce a result if the child is a SIMD equality comparison.
15818     else if (OperKind() & GTK_RELOP)
15819     {
15820         // We have to cast away const-ness since AsOp() method is non-const.
15821         GenTree* childNode = const_cast<GenTree*>(this)->AsOp()->gtOp1;
15822         assert((isMarkedContained == false) || childNode->IsSIMDEqualityOrInequality());
15823     }
15824
15825     // these either produce a result in register or set flags reg.
15826     else if (IsSIMDEqualityOrInequality())
15827     {
15828         assert(!isMarkedContained);
15829     }
15830
15831     // if it's contained it can't be unused.
15832     if (isMarkedContained)
15833     {
15834         assert(!IsUnusedValue());
15835     }
15836 #endif // DEBUG
15837     return isMarkedContained;
15838 }
15839
15840 // return true if node is contained and an indir
15841 bool GenTree::isContainedIndir() const
15842 {
15843     return isIndir() && isContained();
15844 }
15845
15846 bool GenTree::isIndirAddrMode()
15847 {
15848     return isIndir() && AsIndir()->Addr()->OperIsAddrMode() && AsIndir()->Addr()->isContained();
15849 }
15850
15851 bool GenTree::isIndir() const
15852 {
15853     return OperGet() == GT_IND || OperGet() == GT_STOREIND;
15854 }
15855
15856 bool GenTreeIndir::HasBase()
15857 {
15858     return Base() != nullptr;
15859 }
15860
15861 bool GenTreeIndir::HasIndex()
15862 {
15863     return Index() != nullptr;
15864 }
15865
15866 GenTree* GenTreeIndir::Base()
15867 {
15868     GenTree* addr = Addr();
15869
15870     if (isIndirAddrMode())
15871     {
15872         GenTree* result = addr->AsAddrMode()->Base();
15873         if (result != nullptr)
15874         {
15875             result = result->gtEffectiveVal();
15876         }
15877         return result;
15878     }
15879     else
15880     {
15881         return addr; // TODO: why do we return 'addr' here, but we return 'nullptr' in the equivalent Index() case?
15882     }
15883 }
15884
15885 GenTree* GenTreeIndir::Index()
15886 {
15887     if (isIndirAddrMode())
15888     {
15889         GenTree* result = Addr()->AsAddrMode()->Index();
15890         if (result != nullptr)
15891         {
15892             result = result->gtEffectiveVal();
15893         }
15894         return result;
15895     }
15896     else
15897     {
15898         return nullptr;
15899     }
15900 }
15901
15902 unsigned GenTreeIndir::Scale()
15903 {
15904     if (HasIndex())
15905     {
15906         return Addr()->AsAddrMode()->gtScale;
15907     }
15908     else
15909     {
15910         return 1;
15911     }
15912 }
15913
15914 ssize_t GenTreeIndir::Offset()
15915 {
15916     if (isIndirAddrMode())
15917     {
15918         return Addr()->AsAddrMode()->Offset();
15919     }
15920     else if (Addr()->gtOper == GT_CLS_VAR_ADDR)
15921     {
15922         return static_cast<ssize_t>(reinterpret_cast<intptr_t>(Addr()->gtClsVar.gtClsVarHnd));
15923     }
15924     else if (Addr()->IsCnsIntOrI() && Addr()->isContained())
15925     {
15926         return Addr()->AsIntConCommon()->IconValue();
15927     }
15928     else
15929     {
15930         return 0;
15931     }
15932 }
15933
15934 //------------------------------------------------------------------------
15935 // GenTreeIntConCommon::ImmedValNeedsReloc: does this immediate value needs recording a relocation with the VM?
15936 //
15937 // Arguments:
15938 //    comp - Compiler instance
15939 //
15940 // Return Value:
15941 //    True if this immediate value requires us to record a relocation for it; false otherwise.
15942
15943 bool GenTreeIntConCommon::ImmedValNeedsReloc(Compiler* comp)
15944 {
15945     return comp->opts.compReloc && (gtOper == GT_CNS_INT) && IsIconHandle();
15946 }
15947
15948 //------------------------------------------------------------------------
15949 // ImmedValCanBeFolded: can this immediate value be folded for op?
15950 //
15951 // Arguments:
15952 //    comp - Compiler instance
15953 //    op - Tree operator
15954 //
15955 // Return Value:
15956 //    True if this immediate value can be folded for op; false otherwise.
15957
15958 bool GenTreeIntConCommon::ImmedValCanBeFolded(Compiler* comp, genTreeOps op)
15959 {
15960     // In general, immediate values that need relocations can't be folded.
15961     // There are cases where we do want to allow folding of handle comparisons
15962     // (e.g., typeof(T) == typeof(int)).
15963     return !ImmedValNeedsReloc(comp) || (op == GT_EQ) || (op == GT_NE);
15964 }
15965
15966 #ifdef _TARGET_AMD64_
15967 // Returns true if this absolute address fits within the base of an addr mode.
15968 // On Amd64 this effectively means, whether an absolute indirect address can
15969 // be encoded as 32-bit offset relative to IP or zero.
15970 bool GenTreeIntConCommon::FitsInAddrBase(Compiler* comp)
15971 {
15972 #ifdef DEBUG
15973     // Early out if PC-rel encoding of absolute addr is disabled.
15974     if (!comp->opts.compEnablePCRelAddr)
15975     {
15976         return false;
15977     }
15978 #endif
15979
15980     if (comp->opts.compReloc)
15981     {
15982         // During Ngen JIT is always asked to generate relocatable code.
15983         // Hence JIT will try to encode only icon handles as pc-relative offsets.
15984         return IsIconHandle() && (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue()));
15985     }
15986     else
15987     {
15988         // During Jitting, we are allowed to generate non-relocatable code.
15989         // On Amd64 we can encode an absolute indirect addr as an offset relative to zero or RIP.
15990         // An absolute indir addr that can fit within 32-bits can ben encoded as an offset relative
15991         // to zero. All other absolute indir addr could be attempted to be encoded as RIP relative
15992         // based on reloc hint provided by VM.  RIP relative encoding is preferred over relative
15993         // to zero, because the former is one byte smaller than the latter.  For this reason
15994         // we check for reloc hint first and then whether addr fits in 32-bits next.
15995         //
15996         // VM starts off with an initial state to allow both data and code address to be encoded as
15997         // pc-relative offsets.  Hence JIT will attempt to encode all absolute addresses as pc-relative
15998         // offsets.  It is possible while jitting a method, an address could not be encoded as a
15999         // pc-relative offset.  In that case VM will note the overflow and will trigger re-jitting
16000         // of the method with reloc hints turned off for all future methods. Second time around
16001         // jitting will succeed since JIT will not attempt to encode data addresses as pc-relative
16002         // offsets.  Note that JIT will always attempt to relocate code addresses (.e.g call addr).
16003         // After an overflow, VM will assume any relocation recorded is for a code address and will
16004         // emit jump thunk if it cannot be encoded as pc-relative offset.
16005         return (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue())) || FitsInI32();
16006     }
16007 }
16008
16009 // Returns true if this icon value is encoded as addr needs recording a relocation with VM
16010 bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
16011 {
16012     if (comp->opts.compReloc)
16013     {
16014         // During Ngen JIT is always asked to generate relocatable code.
16015         // Hence JIT will try to encode only icon handles as pc-relative offsets.
16016         return IsIconHandle() && (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue()));
16017     }
16018     else
16019     {
16020         return IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue());
16021     }
16022 }
16023
16024 #elif defined(_TARGET_X86_)
16025 // Returns true if this absolute address fits within the base of an addr mode.
16026 // On x86 all addresses are 4-bytes and can be directly encoded in an addr mode.
16027 bool GenTreeIntConCommon::FitsInAddrBase(Compiler* comp)
16028 {
16029 #ifdef DEBUG
16030     // Early out if PC-rel encoding of absolute addr is disabled.
16031     if (!comp->opts.compEnablePCRelAddr)
16032     {
16033         return false;
16034     }
16035 #endif
16036
16037     return IsCnsIntOrI();
16038 }
16039
16040 // Returns true if this icon value is encoded as addr needs recording a relocation with VM
16041 bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
16042 {
16043     // If generating relocatable code, icons should be reported for recording relocatons.
16044     return comp->opts.compReloc && IsIconHandle();
16045 }
16046 #endif //_TARGET_X86_
16047
16048 bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pObj, GenTree** pStatic, FieldSeqNode** pFldSeq)
16049 {
16050     FieldSeqNode* newFldSeq    = nullptr;
16051     GenTree*      baseAddr     = nullptr;
16052     bool          mustBeStatic = false;
16053
16054     FieldSeqNode* statStructFldSeq = nullptr;
16055     if (TypeGet() == TYP_REF)
16056     {
16057         // Recognize struct static field patterns...
16058         if (OperGet() == GT_IND)
16059         {
16060             GenTree*       addr = gtOp.gtOp1;
16061             GenTreeIntCon* icon = nullptr;
16062             if (addr->OperGet() == GT_CNS_INT)
16063             {
16064                 icon = addr->AsIntCon();
16065             }
16066             else if (addr->OperGet() == GT_ADD)
16067             {
16068                 // op1 should never be a field sequence (or any other kind of handle)
16069                 assert((addr->gtOp.gtOp1->gtOper != GT_CNS_INT) || !addr->gtOp.gtOp1->IsIconHandle());
16070                 if (addr->gtOp.gtOp2->OperGet() == GT_CNS_INT)
16071                 {
16072                     icon = addr->gtOp.gtOp2->AsIntCon();
16073                 }
16074             }
16075             if (icon != nullptr && !icon->IsIconHandle(GTF_ICON_STR_HDL) // String handles are a source of TYP_REFs.
16076                 && icon->gtFieldSeq != nullptr &&
16077                 icon->gtFieldSeq->m_next == nullptr // A static field should be a singleton
16078                 // TODO-Review: A pseudoField here indicates an issue - this requires investigation
16079                 // See test case src\ddsuites\src\clr\x86\CoreMangLib\Dev\Globalization\CalendarRegressions.exe
16080                 && !(FieldSeqStore::IsPseudoField(icon->gtFieldSeq->m_fieldHnd)) &&
16081                 icon->gtFieldSeq != FieldSeqStore::NotAField()) // Ignore non-fields.
16082             {
16083                 statStructFldSeq = icon->gtFieldSeq;
16084             }
16085             else
16086             {
16087                 addr = addr->gtEffectiveVal();
16088
16089                 // Perhaps it's a direct indirection of a helper call or a cse with a zero offset annotation.
16090                 if ((addr->OperGet() == GT_CALL) || (addr->OperGet() == GT_LCL_VAR))
16091                 {
16092                     FieldSeqNode* zeroFieldSeq = nullptr;
16093                     if (comp->GetZeroOffsetFieldMap()->Lookup(addr, &zeroFieldSeq))
16094                     {
16095                         if (zeroFieldSeq->m_next == nullptr)
16096                         {
16097                             statStructFldSeq = zeroFieldSeq;
16098                         }
16099                     }
16100                 }
16101             }
16102         }
16103         else if (OperGet() == GT_CLS_VAR)
16104         {
16105             GenTreeClsVar* clsVar = AsClsVar();
16106             if (clsVar->gtFieldSeq != nullptr && clsVar->gtFieldSeq->m_next == nullptr)
16107             {
16108                 statStructFldSeq = clsVar->gtFieldSeq;
16109             }
16110         }
16111         else if (OperIsLocal())
16112         {
16113             // If we have a GT_LCL_VAR, it can be result of a CSE substitution
16114             // If it is then the CSE assignment will have a ValueNum that
16115             // describes the RHS of the CSE assignment.
16116             //
16117             // The CSE could be a pointer to a boxed struct
16118             //
16119             GenTreeLclVarCommon* lclVar = AsLclVarCommon();
16120             ValueNum             vn     = gtVNPair.GetLiberal();
16121             if (vn != ValueNumStore::NoVN)
16122             {
16123                 // Is the ValueNum a MapSelect involving a SharedStatic helper?
16124                 VNFuncApp funcApp1;
16125                 if (comp->vnStore->GetVNFunc(vn, &funcApp1) && (funcApp1.m_func == VNF_MapSelect) &&
16126                     (comp->vnStore->IsSharedStatic(funcApp1.m_args[1])))
16127                 {
16128                     ValueNum mapVN = funcApp1.m_args[0];
16129                     // Is this new 'mapVN' ValueNum, a MapSelect involving a handle?
16130                     VNFuncApp funcApp2;
16131                     if (comp->vnStore->GetVNFunc(mapVN, &funcApp2) && (funcApp2.m_func == VNF_MapSelect) &&
16132                         (comp->vnStore->IsVNHandle(funcApp2.m_args[1])))
16133                     {
16134                         ValueNum fldHndVN = funcApp2.m_args[1];
16135                         // Is this new 'fldHndVN' VNhandle a FieldHandle?
16136                         unsigned flags = comp->vnStore->GetHandleFlags(fldHndVN);
16137                         if (flags == GTF_ICON_FIELD_HDL)
16138                         {
16139                             CORINFO_FIELD_HANDLE fieldHnd =
16140                                 CORINFO_FIELD_HANDLE(comp->vnStore->ConstantValue<ssize_t>(fldHndVN));
16141
16142                             // Record this field sequence in 'statStructFldSeq' as it is likely to be a Boxed Struct
16143                             // field access.
16144                             statStructFldSeq = comp->GetFieldSeqStore()->CreateSingleton(fieldHnd);
16145                         }
16146                     }
16147                 }
16148             }
16149         }
16150
16151         if (statStructFldSeq != nullptr)
16152         {
16153             assert(statStructFldSeq->m_next == nullptr);
16154             // Is this a pointer to a boxed struct?
16155             if (comp->gtIsStaticFieldPtrToBoxedStruct(TYP_REF, statStructFldSeq->m_fieldHnd))
16156             {
16157                 *pFldSeq = comp->GetFieldSeqStore()->Append(statStructFldSeq, *pFldSeq);
16158                 *pObj    = nullptr;
16159                 *pStatic = this;
16160                 return true;
16161             }
16162         }
16163
16164         // Otherwise...
16165         *pObj    = this;
16166         *pStatic = nullptr;
16167         return true;
16168     }
16169     else if (OperGet() == GT_ADD)
16170     {
16171         // If one operator is a field sequence/handle, the other operator must not also be a field sequence/handle.
16172         if ((gtOp.gtOp1->OperGet() == GT_CNS_INT) && gtOp.gtOp1->IsIconHandle())
16173         {
16174             assert((gtOp.gtOp2->gtOper != GT_CNS_INT) || !gtOp.gtOp2->IsIconHandle());
16175             newFldSeq = gtOp.gtOp1->AsIntCon()->gtFieldSeq;
16176             baseAddr  = gtOp.gtOp2;
16177         }
16178         else if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
16179         {
16180             assert((gtOp.gtOp1->gtOper != GT_CNS_INT) || !gtOp.gtOp1->IsIconHandle());
16181             newFldSeq = gtOp.gtOp2->AsIntCon()->gtFieldSeq;
16182             baseAddr  = gtOp.gtOp1;
16183         }
16184     }
16185     else
16186     {
16187         // Check if "this" has a zero-offset annotation.
16188         if (!comp->GetZeroOffsetFieldMap()->Lookup(this, &newFldSeq))
16189         {
16190             // If not, this is not a field address.
16191             return false;
16192         }
16193         else
16194         {
16195             baseAddr     = this;
16196             mustBeStatic = true;
16197         }
16198     }
16199
16200     // If not we don't have a field seq, it's not a field address.
16201     if (newFldSeq == nullptr || newFldSeq == FieldSeqStore::NotAField())
16202     {
16203         return false;
16204     }
16205
16206     // Prepend this field to whatever we've already accumulated (outside-in).
16207     *pFldSeq = comp->GetFieldSeqStore()->Append(newFldSeq, *pFldSeq);
16208
16209     // Is it a static or instance field?
16210     if (!FieldSeqStore::IsPseudoField(newFldSeq->m_fieldHnd) &&
16211         comp->info.compCompHnd->isFieldStatic(newFldSeq->m_fieldHnd))
16212     {
16213         // It is a static field.  We're done.
16214         *pObj    = nullptr;
16215         *pStatic = baseAddr;
16216         return true;
16217     }
16218     else if ((baseAddr != nullptr) && !mustBeStatic)
16219     {
16220         // It's an instance field...but it must be for a struct field, since we've not yet encountered
16221         // a "TYP_REF" address.  Analyze the reset of the address.
16222         return baseAddr->gtEffectiveVal()->IsFieldAddr(comp, pObj, pStatic, pFldSeq);
16223     }
16224
16225     // Otherwise...
16226     return false;
16227 }
16228
16229 bool Compiler::gtIsStaticFieldPtrToBoxedStruct(var_types fieldNodeType, CORINFO_FIELD_HANDLE fldHnd)
16230 {
16231     if (fieldNodeType != TYP_REF)
16232     {
16233         return false;
16234     }
16235     noway_assert(fldHnd != nullptr);
16236     CorInfoType cit      = info.compCompHnd->getFieldType(fldHnd);
16237     var_types   fieldTyp = JITtype2varType(cit);
16238     return fieldTyp != TYP_REF;
16239 }
16240
16241 #ifdef FEATURE_SIMD
16242 //------------------------------------------------------------------------
16243 // gtGetSIMDZero: Get a zero value of the appropriate SIMD type.
16244 //
16245 // Arguments:
16246 //    var_types - The simdType
16247 //    baseType  - The base type we need
16248 //    simdHandle - The handle for the SIMD type
16249 //
16250 // Return Value:
16251 //    A node generating the appropriate Zero, if we are able to discern it,
16252 //    otherwise null (note that this shouldn't happen, but callers should
16253 //    be tolerant of this case).
16254
16255 GenTree* Compiler::gtGetSIMDZero(var_types simdType, var_types baseType, CORINFO_CLASS_HANDLE simdHandle)
16256 {
16257     bool found    = false;
16258     bool isHWSIMD = true;
16259     noway_assert(m_simdHandleCache != nullptr);
16260
16261     // First, determine whether this is Vector<T>.
16262     if (simdType == getSIMDVectorType())
16263     {
16264         switch (baseType)
16265         {
16266             case TYP_FLOAT:
16267                 found = (simdHandle == m_simdHandleCache->SIMDFloatHandle);
16268                 break;
16269             case TYP_DOUBLE:
16270                 found = (simdHandle == m_simdHandleCache->SIMDDoubleHandle);
16271                 break;
16272             case TYP_INT:
16273                 found = (simdHandle == m_simdHandleCache->SIMDIntHandle);
16274                 break;
16275             case TYP_USHORT:
16276                 found = (simdHandle == m_simdHandleCache->SIMDUShortHandle);
16277                 break;
16278             case TYP_UBYTE:
16279                 found = (simdHandle == m_simdHandleCache->SIMDUByteHandle);
16280                 break;
16281             case TYP_SHORT:
16282                 found = (simdHandle == m_simdHandleCache->SIMDShortHandle);
16283                 break;
16284             case TYP_BYTE:
16285                 found = (simdHandle == m_simdHandleCache->SIMDByteHandle);
16286                 break;
16287             case TYP_LONG:
16288                 found = (simdHandle == m_simdHandleCache->SIMDLongHandle);
16289                 break;
16290             case TYP_UINT:
16291                 found = (simdHandle == m_simdHandleCache->SIMDUIntHandle);
16292                 break;
16293             case TYP_ULONG:
16294                 found = (simdHandle == m_simdHandleCache->SIMDULongHandle);
16295                 break;
16296             default:
16297                 break;
16298         }
16299         if (found)
16300         {
16301             isHWSIMD = false;
16302         }
16303     }
16304
16305     if (!found)
16306     {
16307         // We must still have isHWSIMD set to true, and the only non-HW types left are the fixed types.
16308         switch (simdType)
16309         {
16310             case TYP_SIMD8:
16311                 switch (baseType)
16312                 {
16313                     case TYP_FLOAT:
16314                         if (simdHandle == m_simdHandleCache->SIMDVector2Handle)
16315                         {
16316                             isHWSIMD = false;
16317                         }
16318 #if defined(_TARGET_ARM64_) && defined(FEATURE_HW_INTRINSICS)
16319                         else
16320                         {
16321                             assert(simdHandle == m_simdHandleCache->Vector64FloatHandle);
16322                         }
16323                         break;
16324                     case TYP_INT:
16325                         assert(simdHandle == m_simdHandleCache->Vector64IntHandle);
16326                         break;
16327                     case TYP_USHORT:
16328                         assert(simdHandle == m_simdHandleCache->Vector64UShortHandle);
16329                         break;
16330                     case TYP_UBYTE:
16331                         assert(simdHandle == m_simdHandleCache->Vector64UByteHandle);
16332                         break;
16333                     case TYP_SHORT:
16334                         assert(simdHandle == m_simdHandleCache->Vector64ShortHandle);
16335                         break;
16336                     case TYP_BYTE:
16337                         assert(simdHandle == m_simdHandleCache->Vector64ByteHandle);
16338                         break;
16339                     case TYP_UINT:
16340                         assert(simdHandle == m_simdHandleCache->Vector64UIntHandle);
16341                         break;
16342 #endif // defined(_TARGET_ARM64_) && defined(FEATURE_HW_INTRINSICS)
16343                     default:
16344                         break;
16345                 }
16346                 break;
16347
16348             case TYP_SIMD12:
16349                 assert((baseType == TYP_FLOAT) && (simdHandle == m_simdHandleCache->SIMDVector3Handle));
16350                 isHWSIMD = false;
16351                 break;
16352
16353             case TYP_SIMD16:
16354                 switch (baseType)
16355                 {
16356                     case TYP_FLOAT:
16357                         if (simdHandle == m_simdHandleCache->SIMDVector4Handle)
16358                         {
16359                             isHWSIMD = false;
16360                         }
16361 #if defined(FEATURE_HW_INTRINSICS)
16362                         else
16363                         {
16364                             assert(simdHandle == m_simdHandleCache->Vector128FloatHandle);
16365                         }
16366                         break;
16367                     case TYP_DOUBLE:
16368                         assert(simdHandle == m_simdHandleCache->Vector128DoubleHandle);
16369                         break;
16370                     case TYP_INT:
16371                         assert(simdHandle == m_simdHandleCache->Vector128IntHandle);
16372                         break;
16373                     case TYP_USHORT:
16374                         assert(simdHandle == m_simdHandleCache->Vector128UShortHandle);
16375                         break;
16376                     case TYP_UBYTE:
16377                         assert(simdHandle == m_simdHandleCache->Vector128UByteHandle);
16378                         break;
16379                     case TYP_SHORT:
16380                         assert(simdHandle == m_simdHandleCache->Vector128ShortHandle);
16381                         break;
16382                     case TYP_BYTE:
16383                         assert(simdHandle == m_simdHandleCache->Vector128ByteHandle);
16384                         break;
16385                     case TYP_LONG:
16386                         assert(simdHandle == m_simdHandleCache->Vector128LongHandle);
16387                         break;
16388                     case TYP_UINT:
16389                         assert(simdHandle == m_simdHandleCache->Vector128UIntHandle);
16390                         break;
16391                     case TYP_ULONG:
16392                         assert(simdHandle == m_simdHandleCache->Vector128ULongHandle);
16393                         break;
16394 #endif // defined(FEATURE_HW_INTRINSICS)
16395
16396                     default:
16397                         break;
16398                 }
16399                 break;
16400
16401 #if defined(_TARGET_XARCH4_) && defined(FEATURE_HW_INTRINSICS)
16402             case TYP_SIMD32:
16403                 switch (baseType)
16404                 {
16405                     case TYP_FLOAT:
16406                         assert(simdHandle == m_simdHandleCache->Vector256FloatHandle);
16407                         break;
16408                     case TYP_DOUBLE:
16409                         assert(simdHandle == m_simdHandleCache->Vector256DoubleHandle);
16410                         break;
16411                     case TYP_INT:
16412                         assert(simdHandle == m_simdHandleCache->Vector256IntHandle);
16413                         break;
16414                     case TYP_USHORT:
16415                         assert(simdHandle == m_simdHandleCache->Vector256UShortHandle);
16416                         break;
16417                     case TYP_UBYTE:
16418                         assert(simdHandle == m_simdHandleCache->Vector256UByteHandle);
16419                         break;
16420                     case TYP_SHORT:
16421                         assert(simdHandle == m_simdHandleCache->Vector256ShortHandle);
16422                         break;
16423                     case TYP_BYTE:
16424                         assert(simdHandle == m_simdHandleCache->Vector256ByteHandle);
16425                         break;
16426                     case TYP_LONG:
16427                         assert(simdHandle == m_simdHandleCache->Vector256LongHandle);
16428                         break;
16429                     case TYP_UINT:
16430                         assert(simdHandle == m_simdHandleCache->Vector256UIntHandle);
16431                         break;
16432                     case TYP_ULONG:
16433                         assert(simdHandle == m_simdHandleCache->Vector256ULongHandle);
16434                         break;
16435                     default:
16436                         break;
16437                 }
16438                 break;
16439 #endif // _TARGET_XARCH_ && FEATURE_HW_INTRINSICS
16440             default:
16441                 break;
16442         }
16443     }
16444
16445     unsigned size = genTypeSize(simdType);
16446     if (isHWSIMD)
16447     {
16448 #if defined(_TARGET_XARCH_) && defined(FEATURE_HW_INTRINSICS)
16449         switch (simdType)
16450         {
16451             case TYP_SIMD16:
16452                 if (compSupports(InstructionSet_SSE))
16453                 {
16454                     // We only return the HWIntrinsicNode if SSE is supported, since it is possible for
16455                     // the user to disable the SSE HWIntrinsic support via the COMPlus configuration knobs
16456                     // even though the hardware vector types are still available.
16457                     return gtNewSimdHWIntrinsicNode(simdType, NI_Vector128_Zero, baseType, size);
16458                 }
16459                 return nullptr;
16460             case TYP_SIMD32:
16461                 if (compSupports(InstructionSet_AVX))
16462                 {
16463                     // We only return the HWIntrinsicNode if AVX is supported, since it is possible for
16464                     // the user to disable the AVX HWIntrinsic support via the COMPlus configuration knobs
16465                     // even though the hardware vector types are still available.
16466                     return gtNewSimdHWIntrinsicNode(simdType, NI_Vector256_Zero, baseType, size);
16467                 }
16468                 return nullptr;
16469             default:
16470                 break;
16471         }
16472 #endif // _TARGET_XARCH_ && FEATURE_HW_INTRINSICS
16473         JITDUMP("Coudn't find the matching SIMD type for %s<%s> in gtGetSIMDZero\n", varTypeName(simdType),
16474                 varTypeName(baseType));
16475     }
16476     else
16477     {
16478         return gtNewSIMDVectorZero(simdType, baseType, size);
16479     }
16480     return nullptr;
16481 }
16482 #endif // FEATURE_SIMD
16483
16484 CORINFO_CLASS_HANDLE Compiler::gtGetStructHandleIfPresent(GenTree* tree)
16485 {
16486     CORINFO_CLASS_HANDLE structHnd = NO_CLASS_HANDLE;
16487     tree                           = tree->gtEffectiveVal();
16488     if (varTypeIsStruct(tree->gtType))
16489     {
16490         switch (tree->gtOper)
16491         {
16492             default:
16493                 break;
16494             case GT_MKREFANY:
16495                 structHnd = impGetRefAnyClass();
16496                 break;
16497             case GT_OBJ:
16498                 structHnd = tree->gtObj.gtClass;
16499                 break;
16500             case GT_CALL:
16501                 structHnd = tree->gtCall.gtRetClsHnd;
16502                 break;
16503             case GT_RET_EXPR:
16504                 structHnd = tree->gtRetExpr.gtRetClsHnd;
16505                 break;
16506             case GT_ARGPLACE:
16507                 structHnd = tree->gtArgPlace.gtArgPlaceClsHnd;
16508                 break;
16509             case GT_INDEX:
16510                 structHnd = tree->gtIndex.gtStructElemClass;
16511                 break;
16512             case GT_INDEX_ADDR:
16513                 structHnd = tree->AsIndexAddr()->gtStructElemClass;
16514                 break;
16515             case GT_FIELD:
16516                 info.compCompHnd->getFieldType(tree->gtField.gtFldHnd, &structHnd);
16517                 break;
16518             case GT_ASG:
16519                 structHnd = gtGetStructHandleIfPresent(tree->gtGetOp1());
16520                 break;
16521             case GT_LCL_FLD:
16522 #ifdef FEATURE_SIMD
16523                 if (varTypeIsSIMD(tree))
16524                 {
16525                     structHnd = gtGetStructHandleForSIMD(tree->gtType, TYP_FLOAT);
16526                 }
16527 #endif
16528                 break;
16529             case GT_LCL_VAR:
16530                 structHnd = lvaTable[tree->AsLclVarCommon()->gtLclNum].lvVerTypeInfo.GetClassHandle();
16531                 break;
16532             case GT_RETURN:
16533                 structHnd = gtGetStructHandleIfPresent(tree->gtOp.gtOp1);
16534                 break;
16535             case GT_IND:
16536 #ifdef FEATURE_SIMD
16537                 if (varTypeIsSIMD(tree))
16538                 {
16539                     structHnd = gtGetStructHandleForSIMD(tree->gtType, TYP_FLOAT);
16540 #ifdef FEATURE_HW_INTRINSICS
16541                     if (structHnd == NO_CLASS_HANDLE)
16542                     {
16543                         structHnd = gtGetStructHandleForHWSIMD(tree->gtType, TYP_FLOAT);
16544                     }
16545 #endif
16546                 }
16547                 else
16548 #endif
16549                 {
16550                     // Attempt to find a handle for this expression.
16551                     // We can do this for an array element indirection, or for a field indirection.
16552                     ArrayInfo arrInfo;
16553                     if (TryGetArrayInfo(tree->AsIndir(), &arrInfo))
16554                     {
16555                         structHnd = EncodeElemType(arrInfo.m_elemType, arrInfo.m_elemStructType);
16556                     }
16557                     else
16558                     {
16559                         GenTree* addr = tree->AsIndir()->Addr();
16560                         if ((addr->OperGet() == GT_ADD) && addr->gtGetOp2()->OperIs(GT_CNS_INT))
16561                         {
16562                             FieldSeqNode* fieldSeq = addr->gtGetOp2()->AsIntCon()->gtFieldSeq;
16563
16564                             if (fieldSeq != nullptr)
16565                             {
16566                                 while (fieldSeq->m_next != nullptr)
16567                                 {
16568                                     fieldSeq = fieldSeq->m_next;
16569                                 }
16570                                 if (fieldSeq != FieldSeqStore::NotAField() && !fieldSeq->IsPseudoField())
16571                                 {
16572                                     CORINFO_FIELD_HANDLE fieldHnd = fieldSeq->m_fieldHnd;
16573                                     CorInfoType fieldCorType = info.compCompHnd->getFieldType(fieldHnd, &structHnd);
16574                                     assert(fieldCorType == CORINFO_TYPE_VALUECLASS);
16575                                 }
16576                             }
16577                         }
16578                     }
16579                 }
16580                 break;
16581 #ifdef FEATURE_SIMD
16582             case GT_SIMD:
16583                 structHnd = gtGetStructHandleForSIMD(tree->gtType, tree->AsSIMD()->gtSIMDBaseType);
16584                 break;
16585 #endif // FEATURE_SIMD
16586 #ifdef FEATURE_HW_INTRINSICS
16587             case GT_HWIntrinsic:
16588                 structHnd = gtGetStructHandleForHWSIMD(tree->gtType, tree->AsHWIntrinsic()->gtSIMDBaseType);
16589                 break;
16590 #endif
16591                 break;
16592         }
16593     }
16594     return structHnd;
16595 }
16596
16597 CORINFO_CLASS_HANDLE Compiler::gtGetStructHandle(GenTree* tree)
16598 {
16599     CORINFO_CLASS_HANDLE structHnd = gtGetStructHandleIfPresent(tree);
16600     assert(structHnd != NO_CLASS_HANDLE);
16601     return structHnd;
16602 }
16603
16604 //------------------------------------------------------------------------
16605 // gtGetClassHandle: find class handle for a ref type
16606 //
16607 // Arguments:
16608 //    tree -- tree to find handle for
16609 //    pIsExact   [out] -- whether handle is exact type
16610 //    pIsNonNull [out] -- whether tree value is known not to be null
16611 //
16612 // Return Value:
16613 //    nullptr if class handle is unknown,
16614 //        otherwise the class handle.
16615 //    *pIsExact set true if tree type is known to be exactly the handle type,
16616 //        otherwise actual type may be a subtype.
16617 //    *pIsNonNull set true if tree value is known not to be null,
16618 //        otherwise a null value is possible.
16619
16620 CORINFO_CLASS_HANDLE Compiler::gtGetClassHandle(GenTree* tree, bool* pIsExact, bool* pIsNonNull)
16621 {
16622     // Set default values for our out params.
16623     *pIsNonNull                   = false;
16624     *pIsExact                     = false;
16625     CORINFO_CLASS_HANDLE objClass = nullptr;
16626
16627     // Bail out if we're just importing and not generating code, since
16628     // the jit uses TYP_REF for CORINFO_TYPE_VAR locals and args, but
16629     // these may not be ref types.
16630     if (compIsForImportOnly())
16631     {
16632         return objClass;
16633     }
16634
16635     // Bail out if the tree is not a ref type.
16636     var_types treeType = tree->TypeGet();
16637     if (treeType != TYP_REF)
16638     {
16639         return objClass;
16640     }
16641
16642     // Tunnel through commas.
16643     GenTree*         obj   = tree->gtEffectiveVal(false);
16644     const genTreeOps objOp = obj->OperGet();
16645
16646     switch (objOp)
16647     {
16648         case GT_COMMA:
16649         {
16650             // gtEffectiveVal above means we shouldn't see commas here.
16651             assert(!"unexpected GT_COMMA");
16652             break;
16653         }
16654
16655         case GT_LCL_VAR:
16656         {
16657             // For locals, pick up type info from the local table.
16658             const unsigned objLcl = obj->AsLclVar()->GetLclNum();
16659
16660             objClass  = lvaTable[objLcl].lvClassHnd;
16661             *pIsExact = lvaTable[objLcl].lvClassIsExact;
16662             break;
16663         }
16664
16665         case GT_FIELD:
16666         {
16667             // For fields, get the type from the field handle.
16668             CORINFO_FIELD_HANDLE fieldHnd = obj->gtField.gtFldHnd;
16669
16670             if (fieldHnd != nullptr)
16671             {
16672                 objClass = gtGetFieldClassHandle(fieldHnd, pIsExact, pIsNonNull);
16673             }
16674
16675             break;
16676         }
16677
16678         case GT_RET_EXPR:
16679         {
16680             // If we see a RET_EXPR, recurse through to examine the
16681             // return value expression.
16682             GenTree* retExpr = tree->gtRetExpr.gtInlineCandidate;
16683             objClass         = gtGetClassHandle(retExpr, pIsExact, pIsNonNull);
16684             break;
16685         }
16686
16687         case GT_CALL:
16688         {
16689             GenTreeCall* call = tree->AsCall();
16690             if (call->IsInlineCandidate())
16691             {
16692                 // For inline candidates, we've already cached the return
16693                 // type class handle in the inline info.
16694                 InlineCandidateInfo* inlInfo = call->gtInlineCandidateInfo;
16695                 assert(inlInfo != nullptr);
16696
16697                 // Grab it as our first cut at a return type.
16698                 assert(inlInfo->methInfo.args.retType == CORINFO_TYPE_CLASS);
16699                 objClass = inlInfo->methInfo.args.retTypeClass;
16700
16701                 // If the method is shared, the above may not capture
16702                 // the most precise return type information (that is,
16703                 // it may represent a shared return type and as such,
16704                 // have instances of __Canon). See if we can use the
16705                 // context to get at something more definite.
16706                 //
16707                 // For now, we do this here on demand rather than when
16708                 // processing the call, but we could/should apply
16709                 // similar sharpening to the argument and local types
16710                 // of the inlinee.
16711                 const unsigned retClassFlags = info.compCompHnd->getClassAttribs(objClass);
16712                 if (retClassFlags & CORINFO_FLG_SHAREDINST)
16713                 {
16714                     CORINFO_CONTEXT_HANDLE context = inlInfo->exactContextHnd;
16715
16716                     if (context != nullptr)
16717                     {
16718                         CORINFO_CLASS_HANDLE exactClass = nullptr;
16719
16720                         if (((size_t)context & CORINFO_CONTEXTFLAGS_MASK) == CORINFO_CONTEXTFLAGS_CLASS)
16721                         {
16722                             exactClass = (CORINFO_CLASS_HANDLE)((size_t)context & ~CORINFO_CONTEXTFLAGS_MASK);
16723                         }
16724                         else
16725                         {
16726                             CORINFO_METHOD_HANDLE exactMethod =
16727                                 (CORINFO_METHOD_HANDLE)((size_t)context & ~CORINFO_CONTEXTFLAGS_MASK);
16728                             exactClass = info.compCompHnd->getMethodClass(exactMethod);
16729                         }
16730
16731                         // Grab the signature in this context.
16732                         CORINFO_SIG_INFO sig;
16733                         eeGetMethodSig(call->gtCallMethHnd, &sig, exactClass);
16734                         assert(sig.retType == CORINFO_TYPE_CLASS);
16735                         objClass = sig.retTypeClass;
16736                     }
16737                 }
16738             }
16739             else if (call->gtCallType == CT_USER_FUNC)
16740             {
16741                 // For user calls, we can fetch the approximate return
16742                 // type info from the method handle. Unfortunately
16743                 // we've lost the exact context, so this is the best
16744                 // we can do for now.
16745                 CORINFO_METHOD_HANDLE method     = call->gtCallMethHnd;
16746                 CORINFO_CLASS_HANDLE  exactClass = nullptr;
16747                 CORINFO_SIG_INFO      sig;
16748                 eeGetMethodSig(method, &sig, exactClass);
16749                 if (sig.retType == CORINFO_TYPE_VOID)
16750                 {
16751                     // This is a constructor call.
16752                     const unsigned methodFlags = info.compCompHnd->getMethodAttribs(method);
16753                     assert((methodFlags & CORINFO_FLG_CONSTRUCTOR) != 0);
16754                     objClass    = info.compCompHnd->getMethodClass(method);
16755                     *pIsExact   = true;
16756                     *pIsNonNull = true;
16757                 }
16758                 else
16759                 {
16760                     assert(sig.retType == CORINFO_TYPE_CLASS);
16761                     objClass = sig.retTypeClass;
16762                 }
16763             }
16764             else if (call->gtCallType == CT_HELPER)
16765             {
16766                 objClass = gtGetHelperCallClassHandle(call, pIsExact, pIsNonNull);
16767             }
16768
16769             break;
16770         }
16771
16772         case GT_INTRINSIC:
16773         {
16774             GenTreeIntrinsic* intrinsic = obj->AsIntrinsic();
16775
16776             if (intrinsic->gtIntrinsicId == CORINFO_INTRINSIC_Object_GetType)
16777             {
16778                 CORINFO_CLASS_HANDLE runtimeType = info.compCompHnd->getBuiltinClass(CLASSID_RUNTIME_TYPE);
16779                 assert(runtimeType != NO_CLASS_HANDLE);
16780
16781                 objClass    = runtimeType;
16782                 *pIsExact   = false;
16783                 *pIsNonNull = true;
16784             }
16785
16786             break;
16787         }
16788
16789         case GT_CNS_STR:
16790         {
16791             // For literal strings, we know the class and that the
16792             // value is not null.
16793             objClass    = impGetStringClass();
16794             *pIsExact   = true;
16795             *pIsNonNull = true;
16796             break;
16797         }
16798
16799         case GT_IND:
16800         {
16801             GenTreeIndir* indir = obj->AsIndir();
16802
16803             if (indir->HasBase() && !indir->HasIndex())
16804             {
16805                 // indir(addr(lcl)) --> lcl
16806                 //
16807                 // This comes up during constrained callvirt on ref types.
16808
16809                 GenTree*             base = indir->Base();
16810                 GenTreeLclVarCommon* lcl  = base->IsLocalAddrExpr();
16811
16812                 if ((lcl != nullptr) && (base->OperGet() != GT_ADD))
16813                 {
16814                     const unsigned objLcl = lcl->GetLclNum();
16815                     objClass              = lvaTable[objLcl].lvClassHnd;
16816                     *pIsExact             = lvaTable[objLcl].lvClassIsExact;
16817                 }
16818                 else if (base->OperGet() == GT_ARR_ELEM)
16819                 {
16820                     // indir(arr_elem(...)) -> array element type
16821
16822                     GenTree* array = base->AsArrElem()->gtArrObj;
16823
16824                     objClass    = gtGetArrayElementClassHandle(array);
16825                     *pIsExact   = false;
16826                     *pIsNonNull = false;
16827                 }
16828                 else if (base->OperGet() == GT_ADD)
16829                 {
16830                     // This could be a static field access.
16831                     //
16832                     // See if op1 is a static field base helper call
16833                     // and if so, op2 will have the field info.
16834                     GenTree* op1 = base->gtOp.gtOp1;
16835                     GenTree* op2 = base->gtOp.gtOp2;
16836
16837                     const bool op1IsStaticFieldBase = gtIsStaticGCBaseHelperCall(op1);
16838
16839                     if (op1IsStaticFieldBase && (op2->OperGet() == GT_CNS_INT))
16840                     {
16841                         FieldSeqNode* fieldSeq = op2->AsIntCon()->gtFieldSeq;
16842
16843                         if (fieldSeq != nullptr)
16844                         {
16845                             while (fieldSeq->m_next != nullptr)
16846                             {
16847                                 fieldSeq = fieldSeq->m_next;
16848                             }
16849
16850                             assert(!fieldSeq->IsPseudoField());
16851
16852                             // No benefit to calling gtGetFieldClassHandle here, as
16853                             // the exact field being accessed can vary.
16854                             CORINFO_FIELD_HANDLE fieldHnd     = fieldSeq->m_fieldHnd;
16855                             CORINFO_CLASS_HANDLE fieldClass   = nullptr;
16856                             CorInfoType          fieldCorType = info.compCompHnd->getFieldType(fieldHnd, &fieldClass);
16857
16858                             assert(fieldCorType == CORINFO_TYPE_CLASS);
16859                             objClass = fieldClass;
16860                         }
16861                     }
16862                 }
16863             }
16864
16865             break;
16866         }
16867
16868         case GT_BOX:
16869         {
16870             // Box should just wrap a local var reference which has
16871             // the type we're looking for. Also box only represents a
16872             // non-nullable value type so result cannot be null.
16873             GenTreeBox* box     = obj->AsBox();
16874             GenTree*    boxTemp = box->BoxOp();
16875             assert(boxTemp->IsLocal());
16876             const unsigned boxTempLcl = boxTemp->AsLclVar()->GetLclNum();
16877             objClass                  = lvaTable[boxTempLcl].lvClassHnd;
16878             *pIsExact                 = lvaTable[boxTempLcl].lvClassIsExact;
16879             *pIsNonNull               = true;
16880             break;
16881         }
16882
16883         case GT_INDEX:
16884         {
16885             GenTree* array = obj->AsIndex()->Arr();
16886
16887             objClass    = gtGetArrayElementClassHandle(array);
16888             *pIsExact   = false;
16889             *pIsNonNull = false;
16890             break;
16891         }
16892
16893         default:
16894         {
16895             break;
16896         }
16897     }
16898
16899     return objClass;
16900 }
16901
16902 //------------------------------------------------------------------------
16903 // gtGetHelperCallClassHandle: find class handle for return value of a
16904 //   helper call
16905 //
16906 // Arguments:
16907 //    call - helper call to examine
16908 //    pIsExact - [OUT] true if type is known exactly
16909 //    pIsNonNull - [OUT] true if return value is not null
16910 //
16911 // Return Value:
16912 //    nullptr if helper call result is not a ref class, or the class handle
16913 //    is unknown, otherwise the class handle.
16914
16915 CORINFO_CLASS_HANDLE Compiler::gtGetHelperCallClassHandle(GenTreeCall* call, bool* pIsExact, bool* pIsNonNull)
16916 {
16917     assert(call->gtCallType == CT_HELPER);
16918
16919     *pIsNonNull                    = false;
16920     *pIsExact                      = false;
16921     CORINFO_CLASS_HANDLE  objClass = nullptr;
16922     const CorInfoHelpFunc helper   = eeGetHelperNum(call->gtCallMethHnd);
16923
16924     switch (helper)
16925     {
16926         case CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE:
16927         case CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE_MAYBENULL:
16928         {
16929             // Note for some runtimes these helpers return exact types.
16930             //
16931             // But in those cases the types are also sealed, so there's no
16932             // need to claim exactness here.
16933             const bool           helperResultNonNull = (helper == CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE);
16934             CORINFO_CLASS_HANDLE runtimeType         = info.compCompHnd->getBuiltinClass(CLASSID_RUNTIME_TYPE);
16935
16936             assert(runtimeType != NO_CLASS_HANDLE);
16937
16938             objClass    = runtimeType;
16939             *pIsNonNull = helperResultNonNull;
16940             break;
16941         }
16942
16943         case CORINFO_HELP_CHKCASTCLASS:
16944         case CORINFO_HELP_CHKCASTANY:
16945         case CORINFO_HELP_CHKCASTARRAY:
16946         case CORINFO_HELP_CHKCASTINTERFACE:
16947         case CORINFO_HELP_CHKCASTCLASS_SPECIAL:
16948         case CORINFO_HELP_ISINSTANCEOFINTERFACE:
16949         case CORINFO_HELP_ISINSTANCEOFARRAY:
16950         case CORINFO_HELP_ISINSTANCEOFCLASS:
16951         case CORINFO_HELP_ISINSTANCEOFANY:
16952         {
16953             // Fetch the class handle from the helper call arglist
16954             GenTreeArgList*      args    = call->gtCallArgs;
16955             GenTree*             typeArg = args->Current();
16956             CORINFO_CLASS_HANDLE castHnd = gtGetHelperArgClassHandle(typeArg);
16957
16958             // We generally assume the type being cast to is the best type
16959             // for the result, unless it is an interface type.
16960             //
16961             // TODO-CQ: when we have default interface methods then
16962             // this might not be the best assumption. We could also
16963             // explore calling something like mergeClasses to identify
16964             // the more specific class. A similar issue arises when
16965             // typing the temp in impCastClassOrIsInstToTree, when we
16966             // expand the cast inline.
16967             if (castHnd != nullptr)
16968             {
16969                 DWORD attrs = info.compCompHnd->getClassAttribs(castHnd);
16970
16971                 if ((attrs & CORINFO_FLG_INTERFACE) != 0)
16972                 {
16973                     castHnd = nullptr;
16974                 }
16975             }
16976
16977             // If we don't have a good estimate for the type we can use the
16978             // type from the value being cast instead.
16979             if (castHnd == nullptr)
16980             {
16981                 GenTree* valueArg = args->Rest()->Current();
16982                 castHnd           = gtGetClassHandle(valueArg, pIsExact, pIsNonNull);
16983             }
16984
16985             // We don't know at jit time if the cast will succeed or fail, but if it
16986             // fails at runtime then an exception is thrown for cast helpers, or the
16987             // result is set null for instance helpers.
16988             //
16989             // So it safe to claim the result has the cast type.
16990             // Note we don't know for sure that it is exactly this type.
16991             if (castHnd != nullptr)
16992             {
16993                 objClass = castHnd;
16994             }
16995
16996             break;
16997         }
16998
16999         default:
17000             break;
17001     }
17002
17003     return objClass;
17004 }
17005
17006 //------------------------------------------------------------------------
17007 // gtGetArrayElementClassHandle: find class handle for elements of an array
17008 // of ref types
17009 //
17010 // Arguments:
17011 //    array -- array to find handle for
17012 //
17013 // Return Value:
17014 //    nullptr if element class handle is unknown, otherwise the class handle.
17015
17016 CORINFO_CLASS_HANDLE Compiler::gtGetArrayElementClassHandle(GenTree* array)
17017 {
17018     bool                 isArrayExact   = false;
17019     bool                 isArrayNonNull = false;
17020     CORINFO_CLASS_HANDLE arrayClassHnd  = gtGetClassHandle(array, &isArrayExact, &isArrayNonNull);
17021
17022     if (arrayClassHnd != nullptr)
17023     {
17024         // We know the class of the reference
17025         DWORD attribs = info.compCompHnd->getClassAttribs(arrayClassHnd);
17026
17027         if ((attribs & CORINFO_FLG_ARRAY) != 0)
17028         {
17029             // We know for sure it is an array
17030             CORINFO_CLASS_HANDLE elemClassHnd  = nullptr;
17031             CorInfoType          arrayElemType = info.compCompHnd->getChildType(arrayClassHnd, &elemClassHnd);
17032
17033             if (arrayElemType == CORINFO_TYPE_CLASS)
17034             {
17035                 // We know it is an array of ref types
17036                 return elemClassHnd;
17037             }
17038         }
17039     }
17040
17041     return nullptr;
17042 }
17043
17044 //------------------------------------------------------------------------
17045 // gtGetFieldClassHandle: find class handle for a field
17046 //
17047 // Arguments:
17048 //    fieldHnd - field handle for field in question
17049 //    pIsExact - [OUT] true if type is known exactly
17050 //    pIsNonNull - [OUT] true if field value is not null
17051 //
17052 // Return Value:
17053 //    nullptr if helper call result is not a ref class, or the class handle
17054 //    is unknown, otherwise the class handle.
17055 //
17056 //    May examine runtime state of static field instances.
17057
17058 CORINFO_CLASS_HANDLE Compiler::gtGetFieldClassHandle(CORINFO_FIELD_HANDLE fieldHnd, bool* pIsExact, bool* pIsNonNull)
17059 {
17060     CORINFO_CLASS_HANDLE fieldClass   = nullptr;
17061     CorInfoType          fieldCorType = info.compCompHnd->getFieldType(fieldHnd, &fieldClass);
17062
17063     if (fieldCorType == CORINFO_TYPE_CLASS)
17064     {
17065         // Optionally, look at the actual type of the field's value
17066         bool queryForCurrentClass = true;
17067         INDEBUG(queryForCurrentClass = (JitConfig.JitQueryCurrentStaticFieldClass() > 0););
17068
17069         if (queryForCurrentClass)
17070         {
17071
17072 #if DEBUG
17073             const char* fieldClassName = nullptr;
17074             const char* fieldName      = eeGetFieldName(fieldHnd, &fieldClassName);
17075             JITDUMP("Querying runtime about current class of field %s.%s (declared as %s)\n", fieldClassName, fieldName,
17076                     eeGetClassName(fieldClass));
17077 #endif // DEBUG
17078
17079             // Is this a fully initialized init-only static field?
17080             //
17081             // Note we're not asking for speculative results here, yet.
17082             CORINFO_CLASS_HANDLE currentClass = info.compCompHnd->getStaticFieldCurrentClass(fieldHnd);
17083
17084             if (currentClass != NO_CLASS_HANDLE)
17085             {
17086                 // Yes! We know the class exactly and can rely on this to always be true.
17087                 fieldClass  = currentClass;
17088                 *pIsExact   = true;
17089                 *pIsNonNull = true;
17090                 JITDUMP("Runtime reports field is init-only and initialized and has class %s\n",
17091                         eeGetClassName(fieldClass));
17092             }
17093             else
17094             {
17095                 JITDUMP("Field's current class not available\n");
17096             }
17097         }
17098     }
17099
17100     return fieldClass;
17101 }
17102
17103 //------------------------------------------------------------------------
17104 // gtIsGCStaticBaseHelperCall: true if tree is fetching the gc static base
17105 //    for a subsequent static field access
17106 //
17107 // Arguments:
17108 //    tree - tree to consider
17109 //
17110 // Return Value:
17111 //    true if the tree is a suitable helper call
17112 //
17113 // Notes:
17114 //    Excludes R2R helpers as they specify the target field in a way
17115 //    that is opaque to the jit.
17116
17117 bool Compiler::gtIsStaticGCBaseHelperCall(GenTree* tree)
17118 {
17119     if (tree->OperGet() != GT_CALL)
17120     {
17121         return false;
17122     }
17123
17124     GenTreeCall* call = tree->AsCall();
17125
17126     if (call->gtCallType != CT_HELPER)
17127     {
17128         return false;
17129     }
17130
17131     const CorInfoHelpFunc helper = eeGetHelperNum(call->gtCallMethHnd);
17132
17133     switch (helper)
17134     {
17135         // We are looking for a REF type so only need to check for the GC base helpers
17136         case CORINFO_HELP_GETGENERICS_GCSTATIC_BASE:
17137         case CORINFO_HELP_GETSHARED_GCSTATIC_BASE:
17138         case CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR:
17139         case CORINFO_HELP_GETSHARED_GCSTATIC_BASE_DYNAMICCLASS:
17140         case CORINFO_HELP_GETGENERICS_GCTHREADSTATIC_BASE:
17141         case CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE:
17142         case CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE_NOCTOR:
17143         case CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE_DYNAMICCLASS:
17144             return true;
17145         default:
17146             break;
17147     }
17148
17149     return false;
17150 }
17151
17152 void GenTree::ParseArrayAddress(
17153     Compiler* comp, ArrayInfo* arrayInfo, GenTree** pArr, ValueNum* pInxVN, FieldSeqNode** pFldSeq)
17154 {
17155     *pArr                 = nullptr;
17156     ValueNum       inxVN  = ValueNumStore::NoVN;
17157     target_ssize_t offset = 0;
17158     FieldSeqNode*  fldSeq = nullptr;
17159
17160     ParseArrayAddressWork(comp, 1, pArr, &inxVN, &offset, &fldSeq);
17161
17162     // If we didn't find an array reference (perhaps it is the constant null?) we will give up.
17163     if (*pArr == nullptr)
17164     {
17165         return;
17166     }
17167
17168     // OK, new we have to figure out if any part of the "offset" is a constant contribution to the index.
17169     // First, sum the offsets of any fields in fldSeq.
17170     unsigned      fieldOffsets = 0;
17171     FieldSeqNode* fldSeqIter   = fldSeq;
17172     // Also, find the first non-pseudo field...
17173     assert(*pFldSeq == nullptr);
17174     while (fldSeqIter != nullptr)
17175     {
17176         if (fldSeqIter == FieldSeqStore::NotAField())
17177         {
17178             // TODO-Review: A NotAField here indicates a failure to properly maintain the field sequence
17179             // See test case self_host_tests_x86\jit\regression\CLR-x86-JIT\v1-m12-beta2\ b70992\ b70992.exe
17180             // Safest thing to do here is to drop back to MinOpts
17181             CLANG_FORMAT_COMMENT_ANCHOR;
17182
17183 #ifdef DEBUG
17184             if (comp->opts.optRepeat)
17185             {
17186                 // We don't guarantee preserving these annotations through the entire optimizer, so
17187                 // just conservatively return null if under optRepeat.
17188                 *pArr = nullptr;
17189                 return;
17190             }
17191 #endif // DEBUG
17192             noway_assert(!"fldSeqIter is NotAField() in ParseArrayAddress");
17193         }
17194
17195         if (!FieldSeqStore::IsPseudoField(fldSeqIter->m_fieldHnd))
17196         {
17197             if (*pFldSeq == nullptr)
17198             {
17199                 *pFldSeq = fldSeqIter;
17200             }
17201             CORINFO_CLASS_HANDLE fldCls = nullptr;
17202             noway_assert(fldSeqIter->m_fieldHnd != nullptr);
17203             CorInfoType cit = comp->info.compCompHnd->getFieldType(fldSeqIter->m_fieldHnd, &fldCls);
17204             fieldOffsets += comp->compGetTypeSize(cit, fldCls);
17205         }
17206         fldSeqIter = fldSeqIter->m_next;
17207     }
17208
17209     // Is there some portion of the "offset" beyond the first-elem offset and the struct field suffix we just computed?
17210     if (!FitsIn<target_ssize_t>(fieldOffsets + arrayInfo->m_elemOffset) ||
17211         !FitsIn<target_ssize_t>(arrayInfo->m_elemSize))
17212     {
17213         // This seems unlikely, but no harm in being safe...
17214         *pInxVN = comp->GetValueNumStore()->VNForExpr(nullptr, TYP_INT);
17215         return;
17216     }
17217     // Otherwise...
17218     target_ssize_t offsetAccountedFor = static_cast<target_ssize_t>(fieldOffsets + arrayInfo->m_elemOffset);
17219     target_ssize_t elemSize           = static_cast<target_ssize_t>(arrayInfo->m_elemSize);
17220
17221     target_ssize_t constIndOffset = offset - offsetAccountedFor;
17222     // This should be divisible by the element size...
17223     assert((constIndOffset % elemSize) == 0);
17224     target_ssize_t constInd = constIndOffset / elemSize;
17225
17226     ValueNumStore* vnStore = comp->GetValueNumStore();
17227
17228     if (inxVN == ValueNumStore::NoVN)
17229     {
17230         // Must be a constant index.
17231         *pInxVN = vnStore->VNForPtrSizeIntCon(constInd);
17232     }
17233     else
17234     {
17235         //
17236         // Perform ((inxVN / elemSizeVN) + vnForConstInd)
17237         //
17238
17239         // The value associated with the index value number (inxVN) is the offset into the array,
17240         // which has been scaled by element size. We need to recover the array index from that offset
17241         if (vnStore->IsVNConstant(inxVN))
17242         {
17243             target_ssize_t index = vnStore->CoercedConstantValue<target_ssize_t>(inxVN);
17244             noway_assert(elemSize > 0 && ((index % elemSize) == 0));
17245             *pInxVN = vnStore->VNForPtrSizeIntCon((index / elemSize) + constInd);
17246         }
17247         else
17248         {
17249             bool canFoldDiv = false;
17250
17251             // If the index VN is a MUL by elemSize, see if we can eliminate it instead of adding
17252             // the division by elemSize.
17253             VNFuncApp funcApp;
17254             if (vnStore->GetVNFunc(inxVN, &funcApp) && funcApp.m_func == (VNFunc)GT_MUL)
17255             {
17256                 ValueNum vnForElemSize = vnStore->VNForLongCon(elemSize);
17257
17258                 // One of the multiply operand is elemSize, so the resulting
17259                 // index VN should simply be the other operand.
17260                 if (funcApp.m_args[1] == vnForElemSize)
17261                 {
17262                     *pInxVN    = funcApp.m_args[0];
17263                     canFoldDiv = true;
17264                 }
17265                 else if (funcApp.m_args[0] == vnForElemSize)
17266                 {
17267                     *pInxVN    = funcApp.m_args[1];
17268                     canFoldDiv = true;
17269                 }
17270             }
17271
17272             // Perform ((inxVN / elemSizeVN) + vnForConstInd)
17273             if (!canFoldDiv)
17274             {
17275                 ValueNum vnForElemSize = vnStore->VNForPtrSizeIntCon(elemSize);
17276                 ValueNum vnForScaledInx =
17277                     vnStore->VNForFunc(TYP_I_IMPL, GetVNFuncForOper(GT_DIV, VOK_Default), inxVN, vnForElemSize);
17278                 *pInxVN = vnForScaledInx;
17279             }
17280
17281             if (constInd != 0)
17282             {
17283                 ValueNum vnForConstInd = comp->GetValueNumStore()->VNForPtrSizeIntCon(constInd);
17284                 VNFunc   vnFunc        = GetVNFuncForOper(GT_ADD, VOK_Default);
17285
17286                 *pInxVN = comp->GetValueNumStore()->VNForFunc(TYP_I_IMPL, vnFunc, *pInxVN, vnForConstInd);
17287             }
17288         }
17289     }
17290 }
17291
17292 void GenTree::ParseArrayAddressWork(Compiler*       comp,
17293                                     target_ssize_t  inputMul,
17294                                     GenTree**       pArr,
17295                                     ValueNum*       pInxVN,
17296                                     target_ssize_t* pOffset,
17297                                     FieldSeqNode**  pFldSeq)
17298 {
17299     if (TypeGet() == TYP_REF)
17300     {
17301         // This must be the array pointer.
17302         *pArr = this;
17303         assert(inputMul == 1); // Can't multiply the array pointer by anything.
17304     }
17305     else
17306     {
17307         switch (OperGet())
17308         {
17309             case GT_CNS_INT:
17310                 *pFldSeq = comp->GetFieldSeqStore()->Append(*pFldSeq, gtIntCon.gtFieldSeq);
17311                 assert(!gtIntCon.ImmedValNeedsReloc(comp));
17312                 // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntCon::gtIconVal had target_ssize_t
17313                 // type.
17314                 *pOffset += (inputMul * (target_ssize_t)(gtIntCon.gtIconVal));
17315                 return;
17316
17317             case GT_ADD:
17318             case GT_SUB:
17319                 gtOp.gtOp1->ParseArrayAddressWork(comp, inputMul, pArr, pInxVN, pOffset, pFldSeq);
17320                 if (OperGet() == GT_SUB)
17321                 {
17322                     inputMul = -inputMul;
17323                 }
17324                 gtOp.gtOp2->ParseArrayAddressWork(comp, inputMul, pArr, pInxVN, pOffset, pFldSeq);
17325                 return;
17326
17327             case GT_MUL:
17328             {
17329                 // If one op is a constant, continue parsing down.
17330                 target_ssize_t subMul   = 0;
17331                 GenTree*       nonConst = nullptr;
17332                 if (gtOp.gtOp1->IsCnsIntOrI())
17333                 {
17334                     // If the other arg is an int constant, and is a "not-a-field", choose
17335                     // that as the multiplier, thus preserving constant index offsets...
17336                     if (gtOp.gtOp2->OperGet() == GT_CNS_INT &&
17337                         gtOp.gtOp2->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField())
17338                     {
17339                         assert(!gtOp.gtOp2->gtIntCon.ImmedValNeedsReloc(comp));
17340                         // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntConCommon::gtIconVal had
17341                         // target_ssize_t type.
17342                         subMul   = (target_ssize_t)gtOp.gtOp2->gtIntConCommon.IconValue();
17343                         nonConst = gtOp.gtOp1;
17344                     }
17345                     else
17346                     {
17347                         assert(!gtOp.gtOp1->gtIntCon.ImmedValNeedsReloc(comp));
17348                         // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntConCommon::gtIconVal had
17349                         // target_ssize_t type.
17350                         subMul   = (target_ssize_t)gtOp.gtOp1->gtIntConCommon.IconValue();
17351                         nonConst = gtOp.gtOp2;
17352                     }
17353                 }
17354                 else if (gtOp.gtOp2->IsCnsIntOrI())
17355                 {
17356                     assert(!gtOp.gtOp2->gtIntCon.ImmedValNeedsReloc(comp));
17357                     // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntConCommon::gtIconVal had
17358                     // target_ssize_t type.
17359                     subMul   = (target_ssize_t)gtOp.gtOp2->gtIntConCommon.IconValue();
17360                     nonConst = gtOp.gtOp1;
17361                 }
17362                 if (nonConst != nullptr)
17363                 {
17364                     nonConst->ParseArrayAddressWork(comp, inputMul * subMul, pArr, pInxVN, pOffset, pFldSeq);
17365                     return;
17366                 }
17367                 // Otherwise, exit the switch, treat as a contribution to the index.
17368             }
17369             break;
17370
17371             case GT_LSH:
17372                 // If one op is a constant, continue parsing down.
17373                 if (gtOp.gtOp2->IsCnsIntOrI())
17374                 {
17375                     assert(!gtOp.gtOp2->gtIntCon.ImmedValNeedsReloc(comp));
17376                     // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntCon::gtIconVal had target_ssize_t
17377                     // type.
17378                     target_ssize_t subMul = target_ssize_t{1} << (target_ssize_t)gtOp.gtOp2->gtIntConCommon.IconValue();
17379                     gtOp.gtOp1->ParseArrayAddressWork(comp, inputMul * subMul, pArr, pInxVN, pOffset, pFldSeq);
17380                     return;
17381                 }
17382                 // Otherwise, exit the switch, treat as a contribution to the index.
17383                 break;
17384
17385             case GT_COMMA:
17386                 // We don't care about exceptions for this purpose.
17387                 if ((gtOp.gtOp1->OperGet() == GT_ARR_BOUNDS_CHECK) || gtOp.gtOp1->IsNothingNode())
17388                 {
17389                     gtOp.gtOp2->ParseArrayAddressWork(comp, inputMul, pArr, pInxVN, pOffset, pFldSeq);
17390                     return;
17391                 }
17392                 break;
17393
17394             default:
17395                 break;
17396         }
17397         // If we didn't return above, must be a contribution to the non-constant part of the index VN.
17398         ValueNum vn = comp->GetValueNumStore()->VNLiberalNormalValue(gtVNPair);
17399         if (inputMul != 1)
17400         {
17401             ValueNum mulVN = comp->GetValueNumStore()->VNForLongCon(inputMul);
17402             vn = comp->GetValueNumStore()->VNForFunc(TypeGet(), GetVNFuncForOper(GT_MUL, VOK_Default), mulVN, vn);
17403         }
17404         if (*pInxVN == ValueNumStore::NoVN)
17405         {
17406             *pInxVN = vn;
17407         }
17408         else
17409         {
17410             *pInxVN =
17411                 comp->GetValueNumStore()->VNForFunc(TypeGet(), GetVNFuncForOper(GT_ADD, VOK_Default), *pInxVN, vn);
17412         }
17413     }
17414 }
17415
17416 bool GenTree::ParseArrayElemForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq)
17417 {
17418     if (OperIsIndir())
17419     {
17420         if (gtFlags & GTF_IND_ARR_INDEX)
17421         {
17422             bool b = comp->GetArrayInfoMap()->Lookup(this, arrayInfo);
17423             assert(b);
17424             return true;
17425         }
17426
17427         // Otherwise...
17428         GenTree* addr = AsIndir()->Addr();
17429         return addr->ParseArrayElemAddrForm(comp, arrayInfo, pFldSeq);
17430     }
17431     else
17432     {
17433         return false;
17434     }
17435 }
17436
17437 bool GenTree::ParseArrayElemAddrForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq)
17438 {
17439     switch (OperGet())
17440     {
17441         case GT_ADD:
17442         {
17443             GenTree* arrAddr = nullptr;
17444             GenTree* offset  = nullptr;
17445             if (gtOp.gtOp1->TypeGet() == TYP_BYREF)
17446             {
17447                 arrAddr = gtOp.gtOp1;
17448                 offset  = gtOp.gtOp2;
17449             }
17450             else if (gtOp.gtOp2->TypeGet() == TYP_BYREF)
17451             {
17452                 arrAddr = gtOp.gtOp2;
17453                 offset  = gtOp.gtOp1;
17454             }
17455             else
17456             {
17457                 return false;
17458             }
17459             if (!offset->ParseOffsetForm(comp, pFldSeq))
17460             {
17461                 return false;
17462             }
17463             return arrAddr->ParseArrayElemAddrForm(comp, arrayInfo, pFldSeq);
17464         }
17465
17466         case GT_ADDR:
17467         {
17468             GenTree* addrArg = gtOp.gtOp1;
17469             if (addrArg->OperGet() != GT_IND)
17470             {
17471                 return false;
17472             }
17473             else
17474             {
17475                 // The "Addr" node might be annotated with a zero-offset field sequence.
17476                 FieldSeqNode* zeroOffsetFldSeq = nullptr;
17477                 if (comp->GetZeroOffsetFieldMap()->Lookup(this, &zeroOffsetFldSeq))
17478                 {
17479                     *pFldSeq = comp->GetFieldSeqStore()->Append(*pFldSeq, zeroOffsetFldSeq);
17480                 }
17481                 return addrArg->ParseArrayElemForm(comp, arrayInfo, pFldSeq);
17482             }
17483         }
17484
17485         default:
17486             return false;
17487     }
17488 }
17489
17490 bool GenTree::ParseOffsetForm(Compiler* comp, FieldSeqNode** pFldSeq)
17491 {
17492     switch (OperGet())
17493     {
17494         case GT_CNS_INT:
17495         {
17496             GenTreeIntCon* icon = AsIntCon();
17497             *pFldSeq            = comp->GetFieldSeqStore()->Append(*pFldSeq, icon->gtFieldSeq);
17498             return true;
17499         }
17500
17501         case GT_ADD:
17502             if (!gtOp.gtOp1->ParseOffsetForm(comp, pFldSeq))
17503             {
17504                 return false;
17505             }
17506             return gtOp.gtOp2->ParseOffsetForm(comp, pFldSeq);
17507
17508         default:
17509             return false;
17510     }
17511 }
17512
17513 void GenTree::LabelIndex(Compiler* comp, bool isConst)
17514 {
17515     switch (OperGet())
17516     {
17517         case GT_CNS_INT:
17518             // If we got here, this is a contribution to the constant part of the index.
17519             if (isConst)
17520             {
17521                 gtIntCon.gtFieldSeq =
17522                     comp->GetFieldSeqStore()->CreateSingleton(FieldSeqStore::ConstantIndexPseudoField);
17523             }
17524             return;
17525
17526         case GT_LCL_VAR:
17527             gtFlags |= GTF_VAR_ARR_INDEX;
17528             return;
17529
17530         case GT_ADD:
17531         case GT_SUB:
17532             gtOp.gtOp1->LabelIndex(comp, isConst);
17533             gtOp.gtOp2->LabelIndex(comp, isConst);
17534             break;
17535
17536         case GT_CAST:
17537             gtOp.gtOp1->LabelIndex(comp, isConst);
17538             break;
17539
17540         case GT_ARR_LENGTH:
17541             gtFlags |= GTF_ARRLEN_ARR_IDX;
17542             return;
17543
17544         default:
17545             // For all other operators, peel off one constant; and then label the other if it's also a constant.
17546             if (OperIsArithmetic() || OperIsCompare())
17547             {
17548                 if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
17549                 {
17550                     gtOp.gtOp1->LabelIndex(comp, isConst);
17551                     break;
17552                 }
17553                 else if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
17554                 {
17555                     gtOp.gtOp2->LabelIndex(comp, isConst);
17556                     break;
17557                 }
17558                 // Otherwise continue downward on both, labeling vars.
17559                 gtOp.gtOp1->LabelIndex(comp, false);
17560                 gtOp.gtOp2->LabelIndex(comp, false);
17561             }
17562             break;
17563     }
17564 }
17565
17566 // Note that the value of the below field doesn't matter; it exists only to provide a distinguished address.
17567 //
17568 // static
17569 FieldSeqNode FieldSeqStore::s_notAField(nullptr, nullptr);
17570
17571 // FieldSeqStore methods.
17572 FieldSeqStore::FieldSeqStore(CompAllocator alloc) : m_alloc(alloc), m_canonMap(new (alloc) FieldSeqNodeCanonMap(alloc))
17573 {
17574 }
17575
17576 FieldSeqNode* FieldSeqStore::CreateSingleton(CORINFO_FIELD_HANDLE fieldHnd)
17577 {
17578     FieldSeqNode  fsn(fieldHnd, nullptr);
17579     FieldSeqNode* res = nullptr;
17580     if (m_canonMap->Lookup(fsn, &res))
17581     {
17582         return res;
17583     }
17584     else
17585     {
17586         res  = m_alloc.allocate<FieldSeqNode>(1);
17587         *res = fsn;
17588         m_canonMap->Set(fsn, res);
17589         return res;
17590     }
17591 }
17592
17593 FieldSeqNode* FieldSeqStore::Append(FieldSeqNode* a, FieldSeqNode* b)
17594 {
17595     if (a == nullptr)
17596     {
17597         return b;
17598     }
17599     else if (a == NotAField())
17600     {
17601         return NotAField();
17602     }
17603     else if (b == nullptr)
17604     {
17605         return a;
17606     }
17607     else if (b == NotAField())
17608     {
17609         return NotAField();
17610         // Extremely special case for ConstantIndex pseudo-fields -- appending consecutive such
17611         // together collapse to one.
17612     }
17613     else if (a->m_next == nullptr && a->m_fieldHnd == ConstantIndexPseudoField &&
17614              b->m_fieldHnd == ConstantIndexPseudoField)
17615     {
17616         return b;
17617     }
17618     else
17619     {
17620         FieldSeqNode* tmp = Append(a->m_next, b);
17621         FieldSeqNode  fsn(a->m_fieldHnd, tmp);
17622         FieldSeqNode* res = nullptr;
17623         if (m_canonMap->Lookup(fsn, &res))
17624         {
17625             return res;
17626         }
17627         else
17628         {
17629             res  = m_alloc.allocate<FieldSeqNode>(1);
17630             *res = fsn;
17631             m_canonMap->Set(fsn, res);
17632             return res;
17633         }
17634     }
17635 }
17636
17637 // Static vars.
17638 int FieldSeqStore::FirstElemPseudoFieldStruct;
17639 int FieldSeqStore::ConstantIndexPseudoFieldStruct;
17640
17641 CORINFO_FIELD_HANDLE FieldSeqStore::FirstElemPseudoField =
17642     (CORINFO_FIELD_HANDLE)&FieldSeqStore::FirstElemPseudoFieldStruct;
17643 CORINFO_FIELD_HANDLE FieldSeqStore::ConstantIndexPseudoField =
17644     (CORINFO_FIELD_HANDLE)&FieldSeqStore::ConstantIndexPseudoFieldStruct;
17645
17646 bool FieldSeqNode::IsFirstElemFieldSeq()
17647 {
17648     // this must be non-null per ISO C++
17649     return m_fieldHnd == FieldSeqStore::FirstElemPseudoField;
17650 }
17651
17652 bool FieldSeqNode::IsConstantIndexFieldSeq()
17653 {
17654     // this must be non-null per ISO C++
17655     return m_fieldHnd == FieldSeqStore::ConstantIndexPseudoField;
17656 }
17657
17658 bool FieldSeqNode::IsPseudoField()
17659 {
17660     if (this == nullptr)
17661     {
17662         return false;
17663     }
17664     return m_fieldHnd == FieldSeqStore::FirstElemPseudoField || m_fieldHnd == FieldSeqStore::ConstantIndexPseudoField;
17665 }
17666
17667 #ifdef FEATURE_SIMD
17668 GenTreeSIMD* Compiler::gtNewSIMDNode(
17669     var_types type, GenTree* op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
17670 {
17671     assert(op1 != nullptr);
17672     SetOpLclRelatedToSIMDIntrinsic(op1);
17673
17674     return new (this, GT_SIMD) GenTreeSIMD(type, op1, simdIntrinsicID, baseType, size);
17675 }
17676
17677 GenTreeSIMD* Compiler::gtNewSIMDNode(
17678     var_types type, GenTree* op1, GenTree* op2, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
17679 {
17680     assert(op1 != nullptr);
17681     SetOpLclRelatedToSIMDIntrinsic(op1);
17682     SetOpLclRelatedToSIMDIntrinsic(op2);
17683
17684     return new (this, GT_SIMD) GenTreeSIMD(type, op1, op2, simdIntrinsicID, baseType, size);
17685 }
17686
17687 //-------------------------------------------------------------------
17688 // SetOpLclRelatedToSIMDIntrinsic: Determine if the tree has a local var that needs to be set
17689 // as used by a SIMD intrinsic, and if so, set that local var appropriately.
17690 //
17691 // Arguments:
17692 //     op - The tree, to be an operand of a new GT_SIMD node, to check.
17693 //
17694 void Compiler::SetOpLclRelatedToSIMDIntrinsic(GenTree* op)
17695 {
17696     if (op != nullptr)
17697     {
17698         if (op->OperIsLocal())
17699         {
17700             setLclRelatedToSIMDIntrinsic(op);
17701         }
17702         else if ((op->OperGet() == GT_OBJ) && (op->gtOp.gtOp1->OperGet() == GT_ADDR) &&
17703                  op->gtOp.gtOp1->gtOp.gtOp1->OperIsLocal())
17704         {
17705             setLclRelatedToSIMDIntrinsic(op->gtOp.gtOp1->gtOp.gtOp1);
17706         }
17707     }
17708 }
17709
17710 bool GenTree::isCommutativeSIMDIntrinsic()
17711 {
17712     assert(gtOper == GT_SIMD);
17713     switch (AsSIMD()->gtSIMDIntrinsicID)
17714     {
17715         case SIMDIntrinsicAdd:
17716         case SIMDIntrinsicBitwiseAnd:
17717         case SIMDIntrinsicBitwiseOr:
17718         case SIMDIntrinsicBitwiseXor:
17719         case SIMDIntrinsicEqual:
17720         case SIMDIntrinsicMax:
17721         case SIMDIntrinsicMin:
17722         case SIMDIntrinsicMul:
17723         case SIMDIntrinsicOpEquality:
17724         case SIMDIntrinsicOpInEquality:
17725             return true;
17726         default:
17727             return false;
17728     }
17729 }
17730 #endif // FEATURE_SIMD
17731
17732 #ifdef FEATURE_HW_INTRINSICS
17733 bool GenTree::isCommutativeHWIntrinsic() const
17734 {
17735     assert(gtOper == GT_HWIntrinsic);
17736
17737 #ifdef _TARGET_XARCH_
17738     return HWIntrinsicInfo::IsCommutative(AsHWIntrinsic()->gtHWIntrinsicId);
17739 #else
17740     return false;
17741 #endif // _TARGET_XARCH_
17742 }
17743
17744 bool GenTree::isContainableHWIntrinsic() const
17745 {
17746     assert(gtOper == GT_HWIntrinsic);
17747
17748 #ifdef _TARGET_XARCH_
17749     switch (AsHWIntrinsic()->gtHWIntrinsicId)
17750     {
17751         case NI_SSE_LoadAlignedVector128:
17752         case NI_SSE_LoadScalarVector128:
17753         case NI_SSE_LoadVector128:
17754         case NI_SSE2_LoadAlignedVector128:
17755         case NI_SSE2_LoadScalarVector128:
17756         case NI_SSE2_LoadVector128:
17757         case NI_AVX_LoadAlignedVector256:
17758         case NI_AVX_LoadVector256:
17759         {
17760             return true;
17761         }
17762
17763         default:
17764         {
17765             return false;
17766         }
17767     }
17768 #else
17769     return false;
17770 #endif // _TARGET_XARCH_
17771 }
17772
17773 bool GenTree::isRMWHWIntrinsic(Compiler* comp)
17774 {
17775     assert(gtOper == GT_HWIntrinsic);
17776     assert(comp != nullptr);
17777
17778 #ifdef _TARGET_XARCH_
17779     if (!comp->canUseVexEncoding())
17780     {
17781         return HWIntrinsicInfo::HasRMWSemantics(AsHWIntrinsic()->gtHWIntrinsicId);
17782     }
17783
17784     switch (AsHWIntrinsic()->gtHWIntrinsicId)
17785     {
17786         // TODO-XArch-Cleanup: Move this switch block to be table driven.
17787
17788         case NI_SSE42_Crc32:
17789         case NI_SSE42_X64_Crc32:
17790         case NI_FMA_MultiplyAdd:
17791         case NI_FMA_MultiplyAddNegated:
17792         case NI_FMA_MultiplyAddNegatedScalar:
17793         case NI_FMA_MultiplyAddScalar:
17794         case NI_FMA_MultiplyAddSubtract:
17795         case NI_FMA_MultiplySubtract:
17796         case NI_FMA_MultiplySubtractAdd:
17797         case NI_FMA_MultiplySubtractNegated:
17798         case NI_FMA_MultiplySubtractNegatedScalar:
17799         case NI_FMA_MultiplySubtractScalar:
17800         {
17801             return true;
17802         }
17803
17804         default:
17805         {
17806             return false;
17807         }
17808     }
17809 #else
17810     return false;
17811 #endif // _TARGET_XARCH_
17812 }
17813
17814 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(var_types      type,
17815                                                        NamedIntrinsic hwIntrinsicID,
17816                                                        var_types      baseType,
17817                                                        unsigned       size)
17818 {
17819     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, hwIntrinsicID, baseType, size);
17820 }
17821
17822 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(
17823     var_types type, GenTree* op1, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned simdSize)
17824 {
17825     SetOpLclRelatedToSIMDIntrinsic(op1);
17826
17827     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, op1, hwIntrinsicID, baseType, simdSize);
17828 }
17829
17830 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(
17831     var_types type, GenTree* op1, GenTree* op2, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned simdSize)
17832 {
17833     SetOpLclRelatedToSIMDIntrinsic(op1);
17834     SetOpLclRelatedToSIMDIntrinsic(op2);
17835
17836     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, op1, op2, hwIntrinsicID, baseType, simdSize);
17837 }
17838
17839 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(var_types      type,
17840                                                        GenTree*       op1,
17841                                                        GenTree*       op2,
17842                                                        GenTree*       op3,
17843                                                        NamedIntrinsic hwIntrinsicID,
17844                                                        var_types      baseType,
17845                                                        unsigned       size)
17846 {
17847     SetOpLclRelatedToSIMDIntrinsic(op1);
17848     SetOpLclRelatedToSIMDIntrinsic(op2);
17849     SetOpLclRelatedToSIMDIntrinsic(op3);
17850
17851     return new (this, GT_HWIntrinsic)
17852         GenTreeHWIntrinsic(type, gtNewArgList(op1, op2, op3), hwIntrinsicID, baseType, size);
17853 }
17854
17855 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(var_types      type,
17856                                                        GenTree*       op1,
17857                                                        GenTree*       op2,
17858                                                        GenTree*       op3,
17859                                                        GenTree*       op4,
17860                                                        NamedIntrinsic hwIntrinsicID,
17861                                                        var_types      baseType,
17862                                                        unsigned       size)
17863 {
17864     SetOpLclRelatedToSIMDIntrinsic(op1);
17865     SetOpLclRelatedToSIMDIntrinsic(op2);
17866     SetOpLclRelatedToSIMDIntrinsic(op3);
17867     SetOpLclRelatedToSIMDIntrinsic(op4);
17868
17869     return new (this, GT_HWIntrinsic)
17870         GenTreeHWIntrinsic(type, gtNewArgList(op1, op2, op3, op4), hwIntrinsicID, baseType, size);
17871 }
17872
17873 GenTreeHWIntrinsic* Compiler::gtNewScalarHWIntrinsicNode(var_types type, GenTree* op1, NamedIntrinsic hwIntrinsicID)
17874 {
17875     SetOpLclRelatedToSIMDIntrinsic(op1);
17876
17877     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, op1, hwIntrinsicID, TYP_UNKNOWN, 0);
17878 }
17879
17880 GenTreeHWIntrinsic* Compiler::gtNewScalarHWIntrinsicNode(var_types      type,
17881                                                          GenTree*       op1,
17882                                                          GenTree*       op2,
17883                                                          NamedIntrinsic hwIntrinsicID)
17884 {
17885     SetOpLclRelatedToSIMDIntrinsic(op1);
17886     SetOpLclRelatedToSIMDIntrinsic(op2);
17887
17888     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, op1, op2, hwIntrinsicID, TYP_UNKNOWN, 0);
17889 }
17890
17891 GenTreeHWIntrinsic* Compiler::gtNewScalarHWIntrinsicNode(
17892     var_types type, GenTree* op1, GenTree* op2, GenTree* op3, NamedIntrinsic hwIntrinsicID)
17893 {
17894     SetOpLclRelatedToSIMDIntrinsic(op1);
17895     SetOpLclRelatedToSIMDIntrinsic(op2);
17896     SetOpLclRelatedToSIMDIntrinsic(op3);
17897
17898     return new (this, GT_HWIntrinsic)
17899         GenTreeHWIntrinsic(type, gtNewArgList(op1, op2, op3), hwIntrinsicID, TYP_UNKNOWN, 0);
17900 }
17901
17902 //---------------------------------------------------------------------------------------
17903 // gtNewMustThrowException:
17904 //    create a throw node (calling into JIT helper) that must be thrown.
17905 //    The result would be a comma node: COMMA(jithelperthrow(void), x) where x's type should be specified.
17906 //
17907 // Arguments
17908 //    helper      -  JIT helper ID
17909 //    type        -  return type of the node
17910 //
17911 // Return Value
17912 //    pointer to the throw node
17913 //
17914 GenTree* Compiler::gtNewMustThrowException(unsigned helper, var_types type, CORINFO_CLASS_HANDLE clsHnd)
17915 {
17916     GenTreeCall* node = gtNewHelperCallNode(helper, TYP_VOID);
17917     node->gtCallMoreFlags |= GTF_CALL_M_DOES_NOT_RETURN;
17918     if (type != TYP_VOID)
17919     {
17920         unsigned dummyTemp = lvaGrabTemp(true DEBUGARG("dummy temp of must thrown exception"));
17921         if (type == TYP_STRUCT)
17922         {
17923             lvaSetStruct(dummyTemp, clsHnd, false);
17924             type = lvaTable[dummyTemp].lvType; // struct type is normalized
17925         }
17926         else
17927         {
17928             lvaTable[dummyTemp].lvType = type;
17929         }
17930         GenTree* dummyNode = gtNewLclvNode(dummyTemp, type);
17931         return gtNewOperNode(GT_COMMA, type, node, dummyNode);
17932     }
17933     return node;
17934 }
17935
17936 // Returns true for the HW Instrinsic instructions that have MemoryLoad semantics, false otherwise
17937 bool GenTreeHWIntrinsic::OperIsMemoryLoad()
17938 {
17939 #ifdef _TARGET_XARCH_
17940     // Some xarch instructions have MemoryLoad sematics
17941     HWIntrinsicCategory category = HWIntrinsicInfo::lookupCategory(gtHWIntrinsicId);
17942     if (category == HW_Category_MemoryLoad)
17943     {
17944         return true;
17945     }
17946     else if (HWIntrinsicInfo::MaybeMemoryLoad(gtHWIntrinsicId))
17947     {
17948         // Some intrinsics (without HW_Category_MemoryLoad) also have MemoryLoad semantics
17949
17950         if (category == HW_Category_SIMDScalar)
17951         {
17952             // Avx2.BroadcastScalarToVector128/256 have vector and pointer overloads both, e.g.,
17953             // Vector128<byte> BroadcastScalarToVector128(Vector128<byte> value)
17954             // Vector128<byte> BroadcastScalarToVector128(byte* source)
17955             // So, we need to check the argument's type is memory-reference or Vector128
17956             assert(HWIntrinsicInfo::lookupNumArgs(this) == 1);
17957             return (gtHWIntrinsicId == NI_AVX2_BroadcastScalarToVector128 ||
17958                     gtHWIntrinsicId == NI_AVX2_BroadcastScalarToVector256) &&
17959                    gtOp.gtOp1->TypeGet() != TYP_SIMD16;
17960         }
17961         else if (category == HW_Category_IMM)
17962         {
17963             // Do we have less than 3 operands?
17964             if (HWIntrinsicInfo::lookupNumArgs(this) < 3)
17965             {
17966                 return false;
17967             }
17968             else if (HWIntrinsicInfo::isAVX2GatherIntrinsic(gtHWIntrinsicId))
17969             {
17970                 return true;
17971             }
17972         }
17973     }
17974 #endif // _TARGET_XARCH_
17975     return false;
17976 }
17977
17978 // Returns true for the HW Instrinsic instructions that have MemoryStore semantics, false otherwise
17979 bool GenTreeHWIntrinsic::OperIsMemoryStore()
17980 {
17981 #ifdef _TARGET_XARCH_
17982     // Some xarch instructions have MemoryStore sematics
17983     HWIntrinsicCategory category = HWIntrinsicInfo::lookupCategory(gtHWIntrinsicId);
17984     if (category == HW_Category_MemoryStore)
17985     {
17986         return true;
17987     }
17988     else if (HWIntrinsicInfo::MaybeMemoryStore(gtHWIntrinsicId) &&
17989              (category == HW_Category_IMM || category == HW_Category_Scalar))
17990     {
17991         // Some intrinsics (without HW_Category_MemoryStore) also have MemoryStore semantics
17992
17993         // Bmi2/Bmi2.X64.MultiplyNoFlags may return the lower half result by a out argument
17994         // unsafe ulong MultiplyNoFlags(ulong left, ulong right, ulong* low)
17995         //
17996         // So, the 3-argument form is MemoryStore
17997         if (HWIntrinsicInfo::lookupNumArgs(this) == 3)
17998         {
17999             switch (gtHWIntrinsicId)
18000             {
18001                 case NI_BMI2_MultiplyNoFlags:
18002                 case NI_BMI2_X64_MultiplyNoFlags:
18003                     return true;
18004                 default:
18005                     return false;
18006             }
18007         }
18008     }
18009 #endif // _TARGET_XARCH_
18010     return false;
18011 }
18012
18013 // Returns true for the HW Instrinsic instructions that have MemoryLoad semantics, false otherwise
18014 bool GenTreeHWIntrinsic::OperIsMemoryLoadOrStore()
18015 {
18016 #ifdef _TARGET_XARCH_
18017     return OperIsMemoryLoad() || OperIsMemoryStore();
18018 #endif // _TARGET_XARCH_
18019     return false;
18020 }
18021
18022 #endif // FEATURE_HW_INTRINSICS
18023
18024 //---------------------------------------------------------------------------------------
18025 // InitializeStructReturnType:
18026 //    Initialize the Return Type Descriptor for a method that returns a struct type
18027 //
18028 // Arguments
18029 //    comp        -  Compiler Instance
18030 //    retClsHnd   -  VM handle to the struct type returned by the method
18031 //
18032 // Return Value
18033 //    None
18034 //
18035 void ReturnTypeDesc::InitializeStructReturnType(Compiler* comp, CORINFO_CLASS_HANDLE retClsHnd)
18036 {
18037     assert(!m_inited);
18038
18039 #if FEATURE_MULTIREG_RET
18040
18041     assert(retClsHnd != NO_CLASS_HANDLE);
18042     unsigned structSize = comp->info.compCompHnd->getClassSize(retClsHnd);
18043
18044     Compiler::structPassingKind howToReturnStruct;
18045     var_types                   returnType = comp->getReturnTypeForStruct(retClsHnd, &howToReturnStruct, structSize);
18046
18047     switch (howToReturnStruct)
18048     {
18049         case Compiler::SPK_EnclosingType:
18050             m_isEnclosingType = true;
18051             __fallthrough;
18052
18053         case Compiler::SPK_PrimitiveType:
18054         {
18055             assert(returnType != TYP_UNKNOWN);
18056             assert(!varTypeIsStruct(returnType));
18057             m_regType[0] = returnType;
18058             break;
18059         }
18060
18061         case Compiler::SPK_ByValueAsHfa:
18062         {
18063             assert(varTypeIsStruct(returnType));
18064             var_types hfaType = comp->GetHfaType(retClsHnd);
18065
18066             // We should have an hfa struct type
18067             assert(varTypeIsFloating(hfaType));
18068
18069             // Note that the retail build issues a warning about a potential divsion by zero without this Max function
18070             unsigned elemSize = Max((unsigned)1, EA_SIZE_IN_BYTES(emitActualTypeSize(hfaType)));
18071
18072             // The size of this struct should be evenly divisible by elemSize
18073             assert((structSize % elemSize) == 0);
18074
18075             unsigned hfaCount = (structSize / elemSize);
18076             for (unsigned i = 0; i < hfaCount; ++i)
18077             {
18078                 m_regType[i] = hfaType;
18079             }
18080
18081             if (comp->compFloatingPointUsed == false)
18082             {
18083                 comp->compFloatingPointUsed = true;
18084             }
18085             break;
18086         }
18087
18088         case Compiler::SPK_ByValue:
18089         {
18090             assert(varTypeIsStruct(returnType));
18091
18092 #ifdef UNIX_AMD64_ABI
18093
18094             SYSTEMV_AMD64_CORINFO_STRUCT_REG_PASSING_DESCRIPTOR structDesc;
18095             comp->eeGetSystemVAmd64PassStructInRegisterDescriptor(retClsHnd, &structDesc);
18096
18097             assert(structDesc.passedInRegisters);
18098             for (int i = 0; i < structDesc.eightByteCount; i++)
18099             {
18100                 assert(i < MAX_RET_REG_COUNT);
18101                 m_regType[i] = comp->GetEightByteType(structDesc, i);
18102             }
18103
18104 #elif defined(_TARGET_ARM64_)
18105
18106             // a non-HFA struct returned using two registers
18107             //
18108             assert((structSize > TARGET_POINTER_SIZE) && (structSize <= (2 * TARGET_POINTER_SIZE)));
18109
18110             BYTE gcPtrs[2] = {TYPE_GC_NONE, TYPE_GC_NONE};
18111             comp->info.compCompHnd->getClassGClayout(retClsHnd, &gcPtrs[0]);
18112             for (unsigned i = 0; i < 2; ++i)
18113             {
18114                 m_regType[i] = comp->getJitGCType(gcPtrs[i]);
18115             }
18116
18117 #else //  _TARGET_XXX_
18118
18119             // This target needs support here!
18120             //
18121             NYI("Unsupported TARGET returning a TYP_STRUCT in InitializeStructReturnType");
18122
18123 #endif // UNIX_AMD64_ABI
18124
18125             break; // for case SPK_ByValue
18126         }
18127
18128         case Compiler::SPK_ByReference:
18129
18130             // We are returning using the return buffer argument
18131             // There are no return registers
18132             break;
18133
18134         default:
18135
18136             unreached(); // By the contract of getReturnTypeForStruct we should never get here.
18137
18138     } // end of switch (howToReturnStruct)
18139
18140 #endif //  FEATURE_MULTIREG_RET
18141
18142 #ifdef DEBUG
18143     m_inited = true;
18144 #endif
18145 }
18146
18147 //---------------------------------------------------------------------------------------
18148 // InitializeLongReturnType:
18149 //    Initialize the Return Type Descriptor for a method that returns a TYP_LONG
18150 //
18151 // Arguments
18152 //    comp        -  Compiler Instance
18153 //
18154 // Return Value
18155 //    None
18156 //
18157 void ReturnTypeDesc::InitializeLongReturnType(Compiler* comp)
18158 {
18159 #if defined(_TARGET_X86_) || defined(_TARGET_ARM_)
18160
18161     // Setups up a ReturnTypeDesc for returning a long using two registers
18162     //
18163     assert(MAX_RET_REG_COUNT >= 2);
18164     m_regType[0] = TYP_INT;
18165     m_regType[1] = TYP_INT;
18166
18167 #else // not (_TARGET_X86_ or _TARGET_ARM_)
18168
18169     m_regType[0] = TYP_LONG;
18170
18171 #endif // _TARGET_X86_ or _TARGET_ARM_
18172
18173 #ifdef DEBUG
18174     m_inited = true;
18175 #endif
18176 }
18177
18178 //-------------------------------------------------------------------
18179 // GetABIReturnReg:  Return ith return register as per target ABI
18180 //
18181 // Arguments:
18182 //     idx   -   Index of the return register.
18183 //               The first return register has an index of 0 and so on.
18184 //
18185 // Return Value:
18186 //     Returns ith return register as per target ABI.
18187 //
18188 // Notes:
18189 //     x86 and ARM return long in multiple registers.
18190 //     ARM and ARM64 return HFA struct in multiple registers.
18191 //
18192 regNumber ReturnTypeDesc::GetABIReturnReg(unsigned idx)
18193 {
18194     unsigned count = GetReturnRegCount();
18195     assert(idx < count);
18196
18197     regNumber resultReg = REG_NA;
18198
18199 #ifdef UNIX_AMD64_ABI
18200     var_types regType0 = GetReturnRegType(0);
18201
18202     if (idx == 0)
18203     {
18204         if (varTypeIsIntegralOrI(regType0))
18205         {
18206             resultReg = REG_INTRET;
18207         }
18208         else
18209         {
18210             noway_assert(varTypeIsFloating(regType0));
18211             resultReg = REG_FLOATRET;
18212         }
18213     }
18214     else if (idx == 1)
18215     {
18216         var_types regType1 = GetReturnRegType(1);
18217
18218         if (varTypeIsIntegralOrI(regType1))
18219         {
18220             if (varTypeIsIntegralOrI(regType0))
18221             {
18222                 resultReg = REG_INTRET_1;
18223             }
18224             else
18225             {
18226                 resultReg = REG_INTRET;
18227             }
18228         }
18229         else
18230         {
18231             noway_assert(varTypeIsFloating(regType1));
18232
18233             if (varTypeIsFloating(regType0))
18234             {
18235                 resultReg = REG_FLOATRET_1;
18236             }
18237             else
18238             {
18239                 resultReg = REG_FLOATRET;
18240             }
18241         }
18242     }
18243
18244 #elif defined(_TARGET_X86_)
18245
18246     if (idx == 0)
18247     {
18248         resultReg = REG_LNGRET_LO;
18249     }
18250     else if (idx == 1)
18251     {
18252         resultReg = REG_LNGRET_HI;
18253     }
18254
18255 #elif defined(_TARGET_ARM_)
18256
18257     var_types regType = GetReturnRegType(idx);
18258     if (varTypeIsIntegralOrI(regType))
18259     {
18260         // Ints are returned in one return register.
18261         // Longs are returned in two return registers.
18262         if (idx == 0)
18263         {
18264             resultReg = REG_LNGRET_LO;
18265         }
18266         else if (idx == 1)
18267         {
18268             resultReg = REG_LNGRET_HI;
18269         }
18270     }
18271     else
18272     {
18273         // Floats are returned in one return register (f0).
18274         // Doubles are returned in one return register (d0).
18275         // Structs are returned in four registers with HFAs.
18276         assert(idx < MAX_RET_REG_COUNT); // Up to 4 return registers for HFA's
18277         if (regType == TYP_DOUBLE)
18278         {
18279             resultReg = (regNumber)((unsigned)(REG_FLOATRET) + idx * 2); // d0, d1, d2 or d3
18280         }
18281         else
18282         {
18283             resultReg = (regNumber)((unsigned)(REG_FLOATRET) + idx); // f0, f1, f2 or f3
18284         }
18285     }
18286
18287 #elif defined(_TARGET_ARM64_)
18288
18289     var_types regType = GetReturnRegType(idx);
18290     if (varTypeIsIntegralOrI(regType))
18291     {
18292         noway_assert(idx < 2);                              // Up to 2 return registers for 16-byte structs
18293         resultReg = (idx == 0) ? REG_INTRET : REG_INTRET_1; // X0 or X1
18294     }
18295     else
18296     {
18297         noway_assert(idx < 4);                                   // Up to 4 return registers for HFA's
18298         resultReg = (regNumber)((unsigned)(REG_FLOATRET) + idx); // V0, V1, V2 or V3
18299     }
18300
18301 #endif // TARGET_XXX
18302
18303     assert(resultReg != REG_NA);
18304     return resultReg;
18305 }
18306
18307 //--------------------------------------------------------------------------------
18308 // GetABIReturnRegs: get the mask of return registers as per target arch ABI.
18309 //
18310 // Arguments:
18311 //    None
18312 //
18313 // Return Value:
18314 //    reg mask of return registers in which the return type is returned.
18315 //
18316 // Note:
18317 //    This routine can be used when the caller is not particular about the order
18318 //    of return registers and wants to know the set of return registers.
18319 //
18320 // static
18321 regMaskTP ReturnTypeDesc::GetABIReturnRegs()
18322 {
18323     regMaskTP resultMask = RBM_NONE;
18324
18325     unsigned count = GetReturnRegCount();
18326     for (unsigned i = 0; i < count; ++i)
18327     {
18328         resultMask |= genRegMask(GetABIReturnReg(i));
18329     }
18330
18331     return resultMask;
18332 }
18333
18334 //------------------------------------------------------------------------
18335 // The following functions manage the gtRsvdRegs set of temporary registers
18336 // created by LSRA during code generation.
18337
18338 //------------------------------------------------------------------------
18339 // AvailableTempRegCount: return the number of available temporary registers in the (optional) given set
18340 // (typically, RBM_ALLINT or RBM_ALLFLOAT).
18341 //
18342 // Arguments:
18343 //    mask - (optional) Check for available temporary registers only in this set.
18344 //
18345 // Return Value:
18346 //    Count of available temporary registers in given set.
18347 //
18348 unsigned GenTree::AvailableTempRegCount(regMaskTP mask /* = (regMaskTP)-1 */) const
18349 {
18350     return genCountBits(gtRsvdRegs & mask);
18351 }
18352
18353 //------------------------------------------------------------------------
18354 // GetSingleTempReg: There is expected to be exactly one available temporary register
18355 // in the given mask in the gtRsvdRegs set. Get that register. No future calls to get
18356 // a temporary register are expected. Removes the register from the set, but only in
18357 // DEBUG to avoid doing unnecessary work in non-DEBUG builds.
18358 //
18359 // Arguments:
18360 //    mask - (optional) Get an available temporary register only in this set.
18361 //
18362 // Return Value:
18363 //    Available temporary register in given mask.
18364 //
18365 regNumber GenTree::GetSingleTempReg(regMaskTP mask /* = (regMaskTP)-1 */)
18366 {
18367     regMaskTP availableSet = gtRsvdRegs & mask;
18368     assert(genCountBits(availableSet) == 1);
18369     regNumber tempReg = genRegNumFromMask(availableSet);
18370     INDEBUG(gtRsvdRegs &= ~availableSet;) // Remove the register from the set, so it can't be used again.
18371     return tempReg;
18372 }
18373
18374 //------------------------------------------------------------------------
18375 // ExtractTempReg: Find the lowest number temporary register from the gtRsvdRegs set
18376 // that is also in the optional given mask (typically, RBM_ALLINT or RBM_ALLFLOAT),
18377 // and return it. Remove this register from the temporary register set, so it won't
18378 // be returned again.
18379 //
18380 // Arguments:
18381 //    mask - (optional) Extract an available temporary register only in this set.
18382 //
18383 // Return Value:
18384 //    Available temporary register in given mask.
18385 //
18386 regNumber GenTree::ExtractTempReg(regMaskTP mask /* = (regMaskTP)-1 */)
18387 {
18388     regMaskTP availableSet = gtRsvdRegs & mask;
18389     assert(genCountBits(availableSet) >= 1);
18390     regMaskTP tempRegMask = genFindLowestBit(availableSet);
18391     gtRsvdRegs &= ~tempRegMask;
18392     return genRegNumFromMask(tempRegMask);
18393 }