fix BroadcastScalarToVector128/256 and simplify MoveMask
[platform/upstream/coreclr.git] / src / jit / gentree.cpp
1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
7 XX                                                                           XX
8 XX                               GenTree                                     XX
9 XX                                                                           XX
10 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
11 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
12 */
13
14 #include "jitpch.h"
15 #include "hwintrinsic.h"
16 #include "simd.h"
17
18 #ifdef _MSC_VER
19 #pragma hdrstop
20 #endif
21
22 /*****************************************************************************/
23
24 const unsigned short GenTree::gtOperKindTable[] = {
25 #define GTNODE(en, st, cm, ok) ok + GTK_COMMUTE *cm,
26 #include "gtlist.h"
27 };
28
29 /*****************************************************************************
30  *
31  *  The types of different GenTree nodes
32  */
33
34 #ifdef DEBUG
35
36 #define INDENT_SIZE 3
37
38 //--------------------------------------------
39 //
40 // IndentStack: This struct is used, along with its related enums and strings,
41 //    to control both the indendtation and the printing of arcs.
42 //
43 // Notes:
44 //    The mode of printing is set in the Constructor, using its 'compiler' argument.
45 //    Currently it only prints arcs when fgOrder == fgOrderLinear.
46 //    The type of arc to print is specified by the IndentInfo enum, and is controlled
47 //    by the caller of the Push() method.
48
49 enum IndentChars
50 {
51     ICVertical,
52     ICBottom,
53     ICTop,
54     ICMiddle,
55     ICDash,
56     ICEmbedded,
57     ICTerminal,
58     ICError,
59     IndentCharCount
60 };
61
62 // clang-format off
63 // Sets of strings for different dumping options            vert             bot             top             mid             dash       embedded    terminal    error
64 static const char*  emptyIndents[IndentCharCount]   = {     " ",             " ",            " ",            " ",            " ",           "{",      "",        "?"  };
65 static const char*  asciiIndents[IndentCharCount]   = {     "|",            "\\",            "/",            "+",            "-",           "{",      "*",       "?"  };
66 static const char*  unicodeIndents[IndentCharCount] = { "\xe2\x94\x82", "\xe2\x94\x94", "\xe2\x94\x8c", "\xe2\x94\x9c", "\xe2\x94\x80",     "{", "\xe2\x96\x8c", "?"  };
67 // clang-format on
68
69 typedef ArrayStack<Compiler::IndentInfo> IndentInfoStack;
70 struct IndentStack
71 {
72     IndentInfoStack stack;
73     const char**    indents;
74
75     // Constructor for IndentStack.  Uses 'compiler' to determine the mode of printing.
76     IndentStack(Compiler* compiler) : stack(compiler->getAllocator(CMK_DebugOnly))
77     {
78         if (compiler->asciiTrees)
79         {
80             indents = asciiIndents;
81         }
82         else
83         {
84             indents = unicodeIndents;
85         }
86     }
87
88     // Return the depth of the current indentation.
89     unsigned Depth()
90     {
91         return stack.Height();
92     }
93
94     // Push a new indentation onto the stack, of the given type.
95     void Push(Compiler::IndentInfo info)
96     {
97         stack.Push(info);
98     }
99
100     // Pop the most recent indentation type off the stack.
101     Compiler::IndentInfo Pop()
102     {
103         return stack.Pop();
104     }
105
106     // Print the current indentation and arcs.
107     void print()
108     {
109         unsigned indentCount = Depth();
110         for (unsigned i = 0; i < indentCount; i++)
111         {
112             unsigned index = indentCount - 1 - i;
113             switch (stack.Index(index))
114             {
115                 case Compiler::IndentInfo::IINone:
116                     printf("   ");
117                     break;
118                 case Compiler::IndentInfo::IIEmbedded:
119                     printf("%s  ", indents[ICEmbedded]);
120                     break;
121                 case Compiler::IndentInfo::IIArc:
122                     if (index == 0)
123                     {
124                         printf("%s%s%s", indents[ICMiddle], indents[ICDash], indents[ICDash]);
125                     }
126                     else
127                     {
128                         printf("%s  ", indents[ICVertical]);
129                     }
130                     break;
131                 case Compiler::IndentInfo::IIArcBottom:
132                     printf("%s%s%s", indents[ICBottom], indents[ICDash], indents[ICDash]);
133                     break;
134                 case Compiler::IndentInfo::IIArcTop:
135                     printf("%s%s%s", indents[ICTop], indents[ICDash], indents[ICDash]);
136                     break;
137                 case Compiler::IndentInfo::IIError:
138                     printf("%s%s%s", indents[ICError], indents[ICDash], indents[ICDash]);
139                     break;
140                 default:
141                     unreached();
142             }
143         }
144         printf("%s", indents[ICTerminal]);
145     }
146 };
147
148 //------------------------------------------------------------------------
149 // printIndent: This is a static method which simply invokes the 'print'
150 //    method on its 'indentStack' argument.
151 //
152 // Arguments:
153 //    indentStack - specifies the information for the indentation & arcs to be printed
154 //
155 // Notes:
156 //    This method exists to localize the checking for the case where indentStack is null.
157
158 static void printIndent(IndentStack* indentStack)
159 {
160     if (indentStack == nullptr)
161     {
162         return;
163     }
164     indentStack->print();
165 }
166
167 #endif
168
169 #if defined(DEBUG) || NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS
170
171 static const char* opNames[] = {
172 #define GTNODE(en, st, cm, ok) #en,
173 #include "gtlist.h"
174 };
175
176 const char* GenTree::OpName(genTreeOps op)
177 {
178     assert((unsigned)op < _countof(opNames));
179
180     return opNames[op];
181 }
182
183 #endif
184
185 #if MEASURE_NODE_SIZE
186
187 static const char* opStructNames[] = {
188 #define GTNODE(en, st, cm, ok) #st,
189 #include "gtlist.h"
190 };
191
192 const char* GenTree::OpStructName(genTreeOps op)
193 {
194     assert((unsigned)op < _countof(opStructNames));
195
196     return opStructNames[op];
197 }
198
199 #endif
200
201 //
202 //  We allocate tree nodes in 2 different sizes:
203 //  - TREE_NODE_SZ_SMALL for most nodes
204 //  - TREE_NODE_SZ_LARGE for the few nodes (such as calls) that have
205 //    more fields and take up a lot more space.
206 //
207
208 /* GT_COUNT'th oper is overloaded as 'undefined oper', so allocate storage for GT_COUNT'th oper also */
209 /* static */
210 unsigned char GenTree::s_gtNodeSizes[GT_COUNT + 1];
211
212 #if NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS
213
214 unsigned char GenTree::s_gtTrueSizes[GT_COUNT + 1]{
215 #define GTNODE(en, st, cm, ok) sizeof(st),
216 #include "gtlist.h"
217 };
218
219 #endif // NODEBASH_STATS || MEASURE_NODE_SIZE || COUNT_AST_OPERS
220
221 #if COUNT_AST_OPERS
222 LONG GenTree::s_gtNodeCounts[GT_COUNT + 1] = {0};
223 #endif // COUNT_AST_OPERS
224
225 /* static */
226 void GenTree::InitNodeSize()
227 {
228     /* Set all sizes to 'small' first */
229
230     for (unsigned op = 0; op <= GT_COUNT; op++)
231     {
232         GenTree::s_gtNodeSizes[op] = TREE_NODE_SZ_SMALL;
233     }
234
235     // Now set all of the appropriate entries to 'large'
236     CLANG_FORMAT_COMMENT_ANCHOR;
237
238 // clang-format off
239 #if defined(FEATURE_HFA) || defined(UNIX_AMD64_ABI)
240     // On ARM32, ARM64 and System V for struct returning
241     // there is code that does GT_ASG-tree.CopyObj call.
242     // CopyObj is a large node and the GT_ASG is small, which triggers an exception.
243     GenTree::s_gtNodeSizes[GT_ASG]              = TREE_NODE_SZ_LARGE;
244     GenTree::s_gtNodeSizes[GT_RETURN]           = TREE_NODE_SZ_LARGE;
245 #endif // defined(FEATURE_HFA) || defined(UNIX_AMD64_ABI)
246
247     GenTree::s_gtNodeSizes[GT_CALL]             = TREE_NODE_SZ_LARGE;
248     GenTree::s_gtNodeSizes[GT_CAST]             = TREE_NODE_SZ_LARGE;
249     GenTree::s_gtNodeSizes[GT_FTN_ADDR]         = TREE_NODE_SZ_LARGE;
250     GenTree::s_gtNodeSizes[GT_BOX]              = TREE_NODE_SZ_LARGE;
251     GenTree::s_gtNodeSizes[GT_INDEX]            = TREE_NODE_SZ_LARGE;
252     GenTree::s_gtNodeSizes[GT_INDEX_ADDR]        = TREE_NODE_SZ_LARGE;
253     GenTree::s_gtNodeSizes[GT_ARR_BOUNDS_CHECK] = TREE_NODE_SZ_LARGE;
254 #ifdef FEATURE_SIMD
255     GenTree::s_gtNodeSizes[GT_SIMD_CHK] = TREE_NODE_SZ_LARGE;
256 #endif // FEATURE_SIMD
257 #ifdef FEATURE_HW_INTRINSICS
258     GenTree::s_gtNodeSizes[GT_HW_INTRINSIC_CHK] = TREE_NODE_SZ_LARGE;
259 #endif // FEATURE_HW_INTRINSICS
260
261     GenTree::s_gtNodeSizes[GT_ARR_ELEM]         = TREE_NODE_SZ_LARGE;
262     GenTree::s_gtNodeSizes[GT_ARR_INDEX]        = TREE_NODE_SZ_LARGE;
263     GenTree::s_gtNodeSizes[GT_ARR_OFFSET]       = TREE_NODE_SZ_LARGE;
264     GenTree::s_gtNodeSizes[GT_RET_EXPR]         = TREE_NODE_SZ_LARGE;
265     GenTree::s_gtNodeSizes[GT_OBJ]              = TREE_NODE_SZ_LARGE;
266     GenTree::s_gtNodeSizes[GT_FIELD]            = TREE_NODE_SZ_LARGE;
267     GenTree::s_gtNodeSizes[GT_STMT]             = TREE_NODE_SZ_LARGE;
268     GenTree::s_gtNodeSizes[GT_CMPXCHG]          = TREE_NODE_SZ_LARGE;
269     GenTree::s_gtNodeSizes[GT_QMARK]            = TREE_NODE_SZ_LARGE;
270     GenTree::s_gtNodeSizes[GT_LEA]              = TREE_NODE_SZ_LARGE;
271     GenTree::s_gtNodeSizes[GT_STORE_OBJ]        = TREE_NODE_SZ_LARGE;
272     GenTree::s_gtNodeSizes[GT_DYN_BLK]          = TREE_NODE_SZ_LARGE;
273     GenTree::s_gtNodeSizes[GT_STORE_DYN_BLK]    = TREE_NODE_SZ_LARGE;
274     GenTree::s_gtNodeSizes[GT_INTRINSIC]        = TREE_NODE_SZ_LARGE;
275     GenTree::s_gtNodeSizes[GT_ALLOCOBJ]         = TREE_NODE_SZ_LARGE;
276 #if USE_HELPERS_FOR_INT_DIV
277     GenTree::s_gtNodeSizes[GT_DIV]              = TREE_NODE_SZ_LARGE;
278     GenTree::s_gtNodeSizes[GT_UDIV]             = TREE_NODE_SZ_LARGE;
279     GenTree::s_gtNodeSizes[GT_MOD]              = TREE_NODE_SZ_LARGE;
280     GenTree::s_gtNodeSizes[GT_UMOD]             = TREE_NODE_SZ_LARGE;
281 #endif
282 #ifdef FEATURE_PUT_STRUCT_ARG_STK
283     // TODO-Throughput: This should not need to be a large node. The object info should be
284     // obtained from the child node.
285     GenTree::s_gtNodeSizes[GT_PUTARG_STK]       = TREE_NODE_SZ_LARGE;
286 #if FEATURE_ARG_SPLIT
287     GenTree::s_gtNodeSizes[GT_PUTARG_SPLIT]     = TREE_NODE_SZ_LARGE;
288 #endif // FEATURE_ARG_SPLIT
289 #endif // FEATURE_PUT_STRUCT_ARG_STK
290
291     assert(GenTree::s_gtNodeSizes[GT_RETURN] == GenTree::s_gtNodeSizes[GT_ASG]);
292
293     // This list of assertions should come to contain all GenTree subtypes that are declared
294     // "small".
295     assert(sizeof(GenTreeLclFld) <= GenTree::s_gtNodeSizes[GT_LCL_FLD]);
296     assert(sizeof(GenTreeLclVar) <= GenTree::s_gtNodeSizes[GT_LCL_VAR]);
297
298     static_assert_no_msg(sizeof(GenTree)             <= TREE_NODE_SZ_SMALL);
299     static_assert_no_msg(sizeof(GenTreeUnOp)         <= TREE_NODE_SZ_SMALL);
300     static_assert_no_msg(sizeof(GenTreeOp)           <= TREE_NODE_SZ_SMALL);
301     static_assert_no_msg(sizeof(GenTreeVal)          <= TREE_NODE_SZ_SMALL);
302     static_assert_no_msg(sizeof(GenTreeIntConCommon) <= TREE_NODE_SZ_SMALL);
303     static_assert_no_msg(sizeof(GenTreePhysReg)      <= TREE_NODE_SZ_SMALL);
304     static_assert_no_msg(sizeof(GenTreeIntCon)       <= TREE_NODE_SZ_SMALL);
305     static_assert_no_msg(sizeof(GenTreeLngCon)       <= TREE_NODE_SZ_SMALL);
306     static_assert_no_msg(sizeof(GenTreeDblCon)       <= TREE_NODE_SZ_SMALL);
307     static_assert_no_msg(sizeof(GenTreeStrCon)       <= TREE_NODE_SZ_SMALL);
308     static_assert_no_msg(sizeof(GenTreeLclVarCommon) <= TREE_NODE_SZ_SMALL);
309     static_assert_no_msg(sizeof(GenTreeLclVar)       <= TREE_NODE_SZ_SMALL);
310     static_assert_no_msg(sizeof(GenTreeLclFld)       <= TREE_NODE_SZ_SMALL);
311     static_assert_no_msg(sizeof(GenTreeCC)           <= TREE_NODE_SZ_SMALL);
312     static_assert_no_msg(sizeof(GenTreeCast)         <= TREE_NODE_SZ_LARGE); // *** large node
313     static_assert_no_msg(sizeof(GenTreeBox)          <= TREE_NODE_SZ_LARGE); // *** large node
314     static_assert_no_msg(sizeof(GenTreeField)        <= TREE_NODE_SZ_LARGE); // *** large node
315     static_assert_no_msg(sizeof(GenTreeArgList)      <= TREE_NODE_SZ_SMALL);
316     static_assert_no_msg(sizeof(GenTreeFieldList)    <= TREE_NODE_SZ_SMALL);
317     static_assert_no_msg(sizeof(GenTreeColon)        <= TREE_NODE_SZ_SMALL);
318     static_assert_no_msg(sizeof(GenTreeCall)         <= TREE_NODE_SZ_LARGE); // *** large node
319     static_assert_no_msg(sizeof(GenTreeCmpXchg)      <= TREE_NODE_SZ_LARGE); // *** large node
320     static_assert_no_msg(sizeof(GenTreeFptrVal)      <= TREE_NODE_SZ_LARGE); // *** large node
321     static_assert_no_msg(sizeof(GenTreeQmark)        <= TREE_NODE_SZ_LARGE); // *** large node
322     static_assert_no_msg(sizeof(GenTreeIntrinsic)    <= TREE_NODE_SZ_LARGE); // *** large node
323     static_assert_no_msg(sizeof(GenTreeIndex)        <= TREE_NODE_SZ_LARGE); // *** large node
324     static_assert_no_msg(sizeof(GenTreeArrLen)       <= TREE_NODE_SZ_LARGE); // *** large node
325     static_assert_no_msg(sizeof(GenTreeBoundsChk)    <= TREE_NODE_SZ_LARGE); // *** large node
326     static_assert_no_msg(sizeof(GenTreeArrElem)      <= TREE_NODE_SZ_LARGE); // *** large node
327     static_assert_no_msg(sizeof(GenTreeArrIndex)     <= TREE_NODE_SZ_LARGE); // *** large node
328     static_assert_no_msg(sizeof(GenTreeArrOffs)      <= TREE_NODE_SZ_LARGE); // *** large node
329     static_assert_no_msg(sizeof(GenTreeIndir)        <= TREE_NODE_SZ_SMALL);
330     static_assert_no_msg(sizeof(GenTreeStoreInd)     <= TREE_NODE_SZ_SMALL);
331     static_assert_no_msg(sizeof(GenTreeAddrMode)     <= TREE_NODE_SZ_SMALL);
332     static_assert_no_msg(sizeof(GenTreeObj)          <= TREE_NODE_SZ_LARGE); // *** large node
333     static_assert_no_msg(sizeof(GenTreeBlk)          <= TREE_NODE_SZ_SMALL);
334     static_assert_no_msg(sizeof(GenTreeRetExpr)      <= TREE_NODE_SZ_LARGE); // *** large node
335     static_assert_no_msg(sizeof(GenTreeStmt)         <= TREE_NODE_SZ_LARGE); // *** large node
336     static_assert_no_msg(sizeof(GenTreeClsVar)       <= TREE_NODE_SZ_SMALL);
337     static_assert_no_msg(sizeof(GenTreeArgPlace)     <= TREE_NODE_SZ_SMALL);
338     static_assert_no_msg(sizeof(GenTreePhiArg)       <= TREE_NODE_SZ_SMALL);
339     static_assert_no_msg(sizeof(GenTreeAllocObj)     <= TREE_NODE_SZ_LARGE); // *** large node
340 #ifndef FEATURE_PUT_STRUCT_ARG_STK
341     static_assert_no_msg(sizeof(GenTreePutArgStk)    <= TREE_NODE_SZ_SMALL);
342 #else  // FEATURE_PUT_STRUCT_ARG_STK
343     // TODO-Throughput: This should not need to be a large node. The object info should be
344     // obtained from the child node.
345     static_assert_no_msg(sizeof(GenTreePutArgStk)    <= TREE_NODE_SZ_LARGE);
346 #if FEATURE_ARG_SPLIT
347     static_assert_no_msg(sizeof(GenTreePutArgSplit)  <= TREE_NODE_SZ_LARGE);
348 #endif // FEATURE_ARG_SPLIT
349 #endif // FEATURE_PUT_STRUCT_ARG_STK
350
351 #ifdef FEATURE_SIMD
352     static_assert_no_msg(sizeof(GenTreeSIMD)         <= TREE_NODE_SZ_SMALL);
353 #endif // FEATURE_SIMD
354
355 #ifdef FEATURE_HW_INTRINSICS
356     static_assert_no_msg(sizeof(GenTreeHWIntrinsic)  <= TREE_NODE_SZ_SMALL);
357 #endif // FEATURE_HW_INTRINSICS
358     // clang-format on
359 }
360
361 size_t GenTree::GetNodeSize() const
362 {
363     return GenTree::s_gtNodeSizes[gtOper];
364 }
365
366 #ifdef DEBUG
367 bool GenTree::IsNodeProperlySized() const
368 {
369     size_t size;
370
371     if (gtDebugFlags & GTF_DEBUG_NODE_SMALL)
372     {
373         size = TREE_NODE_SZ_SMALL;
374     }
375     else
376     {
377         assert(gtDebugFlags & GTF_DEBUG_NODE_LARGE);
378         size = TREE_NODE_SZ_LARGE;
379     }
380
381     return GenTree::s_gtNodeSizes[gtOper] <= size;
382 }
383 #endif
384
385 //------------------------------------------------------------------------
386 // ReplaceWith: replace this with the src node. The source must be an isolated node
387 //              and cannot be used after the replacement.
388 //
389 // Arguments:
390 //    src  - source tree, that replaces this.
391 //    comp - the compiler instance to transfer annotations for arrays.
392 //
393 void GenTree::ReplaceWith(GenTree* src, Compiler* comp)
394 {
395     // The source may be big only if the target is also a big node
396     assert((gtDebugFlags & GTF_DEBUG_NODE_LARGE) || GenTree::s_gtNodeSizes[src->gtOper] == TREE_NODE_SZ_SMALL);
397
398     // The check is effective only if nodes have been already threaded.
399     assert((src->gtPrev == nullptr) && (src->gtNext == nullptr));
400
401     RecordOperBashing(OperGet(), src->OperGet()); // nop unless NODEBASH_STATS is enabled
402
403     GenTree* prev = gtPrev;
404     GenTree* next = gtNext;
405     // The VTable pointer is copied intentionally here
406     memcpy((void*)this, (void*)src, src->GetNodeSize());
407     this->gtPrev = prev;
408     this->gtNext = next;
409
410 #ifdef DEBUG
411     gtSeqNum = 0;
412 #endif
413     // Transfer any annotations.
414     if (src->OperGet() == GT_IND && src->gtFlags & GTF_IND_ARR_INDEX)
415     {
416         ArrayInfo arrInfo;
417         bool      b = comp->GetArrayInfoMap()->Lookup(src, &arrInfo);
418         assert(b);
419         comp->GetArrayInfoMap()->Set(this, arrInfo);
420     }
421     DEBUG_DESTROY_NODE(src);
422 }
423
424 /*****************************************************************************
425  *
426  *  When 'NODEBASH_STATS' is enabled in "jit.h" we record all instances of
427  *  an existing GenTree node having its operator changed. This can be useful
428  *  for two (related) things - to see what is being bashed (and what isn't),
429  *  and to verify that the existing choices for what nodes are marked 'large'
430  *  are reasonable (to minimize "wasted" space).
431  *
432  *  And yes, the hash function / logic is simplistic, but it is conflict-free
433  *  and transparent for what we need.
434  */
435
436 #if NODEBASH_STATS
437
438 #define BASH_HASH_SIZE 211
439
440 inline unsigned hashme(genTreeOps op1, genTreeOps op2)
441 {
442     return ((op1 * 104729) ^ (op2 * 56569)) % BASH_HASH_SIZE;
443 }
444
445 struct BashHashDsc
446 {
447     unsigned __int32 bhFullHash; // the hash value (unique for all old->new pairs)
448     unsigned __int32 bhCount;    // the same old->new bashings seen so far
449     unsigned __int8  bhOperOld;  // original gtOper
450     unsigned __int8  bhOperNew;  // new      gtOper
451 };
452
453 static BashHashDsc BashHash[BASH_HASH_SIZE];
454
455 void GenTree::RecordOperBashing(genTreeOps operOld, genTreeOps operNew)
456 {
457     unsigned     hash = hashme(operOld, operNew);
458     BashHashDsc* desc = BashHash + hash;
459
460     if (desc->bhFullHash != hash)
461     {
462         noway_assert(desc->bhCount == 0); // if this ever fires, need fix the hash fn
463         desc->bhFullHash = hash;
464     }
465
466     desc->bhCount += 1;
467     desc->bhOperOld = operOld;
468     desc->bhOperNew = operNew;
469 }
470
471 void GenTree::ReportOperBashing(FILE* f)
472 {
473     unsigned total = 0;
474
475     fflush(f);
476
477     fprintf(f, "\n");
478     fprintf(f, "Bashed gtOper stats:\n");
479     fprintf(f, "\n");
480     fprintf(f, "    Old operator        New operator     #bytes old->new      Count\n");
481     fprintf(f, "    ---------------------------------------------------------------\n");
482
483     for (unsigned h = 0; h < BASH_HASH_SIZE; h++)
484     {
485         unsigned count = BashHash[h].bhCount;
486         if (count == 0)
487             continue;
488
489         unsigned opOld = BashHash[h].bhOperOld;
490         unsigned opNew = BashHash[h].bhOperNew;
491
492         fprintf(f, "    GT_%-13s -> GT_%-13s [size: %3u->%3u] %c %7u\n", OpName((genTreeOps)opOld),
493                 OpName((genTreeOps)opNew), s_gtTrueSizes[opOld], s_gtTrueSizes[opNew],
494                 (s_gtTrueSizes[opOld] < s_gtTrueSizes[opNew]) ? 'X' : ' ', count);
495         total += count;
496     }
497     fprintf(f, "\n");
498     fprintf(f, "Total bashings: %u\n", total);
499     fprintf(f, "\n");
500
501     fflush(f);
502 }
503
504 #endif // NODEBASH_STATS
505
506 /*****************************************************************************/
507
508 #if MEASURE_NODE_SIZE
509
510 void GenTree::DumpNodeSizes(FILE* fp)
511 {
512     // Dump the sizes of the various GenTree flavors
513
514     fprintf(fp, "Small tree node size = %3u bytes\n", TREE_NODE_SZ_SMALL);
515     fprintf(fp, "Large tree node size = %3u bytes\n", TREE_NODE_SZ_LARGE);
516     fprintf(fp, "\n");
517
518     // Verify that node sizes are set kosherly and dump sizes
519     for (unsigned op = GT_NONE + 1; op < GT_COUNT; op++)
520     {
521         unsigned needSize = s_gtTrueSizes[op];
522         unsigned nodeSize = s_gtNodeSizes[op];
523
524         const char* structNm = OpStructName((genTreeOps)op);
525         const char* operName = OpName((genTreeOps)op);
526
527         bool repeated = false;
528
529         // Have we seen this struct flavor before?
530         for (unsigned mop = GT_NONE + 1; mop < op; mop++)
531         {
532             if (strcmp(structNm, OpStructName((genTreeOps)mop)) == 0)
533             {
534                 repeated = true;
535                 break;
536             }
537         }
538
539         // Don't repeat the same GenTree flavor unless we have an error
540         if (!repeated || needSize > nodeSize)
541         {
542             unsigned sizeChar = '?';
543
544             if (nodeSize == TREE_NODE_SZ_SMALL)
545                 sizeChar = 'S';
546             else if (nodeSize == TREE_NODE_SZ_LARGE)
547                 sizeChar = 'L';
548
549             fprintf(fp, "GT_%-16s ... %-19s = %3u bytes (%c)", operName, structNm, needSize, sizeChar);
550             if (needSize > nodeSize)
551             {
552                 fprintf(fp, " -- ERROR -- allocation is only %u bytes!", nodeSize);
553             }
554             else if (needSize <= TREE_NODE_SZ_SMALL && nodeSize == TREE_NODE_SZ_LARGE)
555             {
556                 fprintf(fp, " ... could be small");
557             }
558
559             fprintf(fp, "\n");
560         }
561     }
562 }
563
564 #endif // MEASURE_NODE_SIZE
565
566 /*****************************************************************************
567  *
568  *  Walk all basic blocks and call the given function pointer for all tree
569  *  nodes contained therein.
570  */
571
572 void Compiler::fgWalkAllTreesPre(fgWalkPreFn* visitor, void* pCallBackData)
573 {
574     BasicBlock* block;
575
576     for (block = fgFirstBB; block; block = block->bbNext)
577     {
578         GenTree* tree;
579
580         for (tree = block->bbTreeList; tree; tree = tree->gtNext)
581         {
582             assert(tree->gtOper == GT_STMT);
583
584             fgWalkTreePre(&tree->gtStmt.gtStmtExpr, visitor, pCallBackData);
585         }
586     }
587 }
588
589 //-----------------------------------------------------------
590 // CopyReg: Copy the _gtRegNum/gtRegTag fields.
591 //
592 // Arguments:
593 //     from   -  GenTree node from which to copy
594 //
595 // Return Value:
596 //     None
597 void GenTree::CopyReg(GenTree* from)
598 {
599     _gtRegNum = from->_gtRegNum;
600     INDEBUG(gtRegTag = from->gtRegTag;)
601
602     // Also copy multi-reg state if this is a call node
603     if (IsCall())
604     {
605         assert(from->IsCall());
606         this->AsCall()->CopyOtherRegs(from->AsCall());
607     }
608     else if (IsCopyOrReload())
609     {
610         this->AsCopyOrReload()->CopyOtherRegs(from->AsCopyOrReload());
611     }
612 }
613
614 //------------------------------------------------------------------
615 // gtHasReg: Whether node beeen assigned a register by LSRA
616 //
617 // Arguments:
618 //    None
619 //
620 // Return Value:
621 //    Returns true if the node was assigned a register.
622 //
623 //    In case of multi-reg call nodes, it is considered
624 //    having a reg if regs are allocated for all its
625 //    return values.
626 //
627 //    In case of GT_COPY or GT_RELOAD of a multi-reg call,
628 //    GT_COPY/GT_RELOAD is considered having a reg if it
629 //    has a reg assigned to any of its positions.
630 //
631 // Assumption:
632 //    In order for this to work properly, gtClearReg must be called
633 //    prior to setting the register value.
634 //
635 bool GenTree::gtHasReg() const
636 {
637     bool hasReg;
638
639     if (IsMultiRegCall())
640     {
641         // Have to cast away const-ness because GetReturnTypeDesc() is a non-const method
642         GenTree*     tree     = const_cast<GenTree*>(this);
643         GenTreeCall* call     = tree->AsCall();
644         unsigned     regCount = call->GetReturnTypeDesc()->GetReturnRegCount();
645         hasReg                = false;
646
647         // A Multi-reg call node is said to have regs, if it has
648         // reg assigned to each of its result registers.
649         for (unsigned i = 0; i < regCount; ++i)
650         {
651             hasReg = (call->GetRegNumByIdx(i) != REG_NA);
652             if (!hasReg)
653             {
654                 break;
655             }
656         }
657     }
658     else if (IsCopyOrReloadOfMultiRegCall())
659     {
660         GenTree*             tree         = const_cast<GenTree*>(this);
661         GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
662         GenTreeCall*         call         = copyOrReload->gtGetOp1()->AsCall();
663         unsigned             regCount     = call->GetReturnTypeDesc()->GetReturnRegCount();
664         hasReg                            = false;
665
666         // A Multi-reg copy or reload node is said to have regs,
667         // if it has valid regs in any of the positions.
668         for (unsigned i = 0; i < regCount; ++i)
669         {
670             hasReg = (copyOrReload->GetRegNumByIdx(i) != REG_NA);
671             if (hasReg)
672             {
673                 break;
674             }
675         }
676     }
677     else
678     {
679         hasReg = (gtRegNum != REG_NA);
680     }
681
682     return hasReg;
683 }
684
685 //-----------------------------------------------------------------------------
686 // GetRegisterDstCount: Get the number of registers defined by the node.
687 //
688 // Arguments:
689 //    None
690 //
691 // Return Value:
692 //    The number of registers that this node defines.
693 //
694 // Notes:
695 //    This should not be called on a contained node.
696 //    This does not look at the actual register assignments, if any, and so
697 //    is valid after Lowering.
698 //
699 int GenTree::GetRegisterDstCount() const
700 {
701     assert(!isContained());
702     if (!IsMultiRegNode())
703     {
704         return (IsValue()) ? 1 : 0;
705     }
706     else if (IsMultiRegCall())
707     {
708         // temporarily cast away const-ness as AsCall() method is not declared const
709         GenTree* temp = const_cast<GenTree*>(this);
710         return temp->AsCall()->GetReturnTypeDesc()->GetReturnRegCount();
711     }
712     else if (IsCopyOrReload())
713     {
714         return gtGetOp1()->GetRegisterDstCount();
715     }
716 #if FEATURE_ARG_SPLIT
717     else if (OperIsPutArgSplit())
718     {
719         return (const_cast<GenTree*>(this))->AsPutArgSplit()->gtNumRegs;
720     }
721 #endif
722 #if !defined(_TARGET_64BIT_)
723     else if (OperIsMultiRegOp())
724     {
725         // A MultiRegOp is a GT_MUL_LONG, GT_PUTARG_REG, or GT_BITCAST.
726         // For the latter two (ARM-only), they only have multiple registers if they produce a long value
727         // (GT_MUL_LONG always produces a long value).
728         CLANG_FORMAT_COMMENT_ANCHOR;
729 #ifdef _TARGET_ARM_
730         return (TypeGet() == TYP_LONG) ? 2 : 1;
731 #else
732         assert(OperIs(GT_MUL_LONG));
733         return 2;
734 #endif
735     }
736 #endif
737     assert(!"Unexpected multi-reg node");
738     return 0;
739 }
740
741 //---------------------------------------------------------------
742 // gtGetRegMask: Get the reg mask of the node.
743 //
744 // Arguments:
745 //    None
746 //
747 // Return Value:
748 //    Reg Mask of GenTree node.
749 //
750 regMaskTP GenTree::gtGetRegMask() const
751 {
752     regMaskTP resultMask;
753
754     if (IsMultiRegCall())
755     {
756         // temporarily cast away const-ness as AsCall() method is not declared const
757         resultMask    = genRegMask(gtRegNum);
758         GenTree* temp = const_cast<GenTree*>(this);
759         resultMask |= temp->AsCall()->GetOtherRegMask();
760     }
761     else if (IsCopyOrReloadOfMultiRegCall())
762     {
763         // A multi-reg copy or reload, will have valid regs for only those
764         // positions that need to be copied or reloaded.  Hence we need
765         // to consider only those registers for computing reg mask.
766
767         GenTree*             tree         = const_cast<GenTree*>(this);
768         GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
769         GenTreeCall*         call         = copyOrReload->gtGetOp1()->AsCall();
770         unsigned             regCount     = call->GetReturnTypeDesc()->GetReturnRegCount();
771
772         resultMask = RBM_NONE;
773         for (unsigned i = 0; i < regCount; ++i)
774         {
775             regNumber reg = copyOrReload->GetRegNumByIdx(i);
776             if (reg != REG_NA)
777             {
778                 resultMask |= genRegMask(reg);
779             }
780         }
781     }
782 #if FEATURE_ARG_SPLIT
783     else if (OperIsPutArgSplit())
784     {
785         GenTree*            tree     = const_cast<GenTree*>(this);
786         GenTreePutArgSplit* splitArg = tree->AsPutArgSplit();
787         unsigned            regCount = splitArg->gtNumRegs;
788
789         resultMask = RBM_NONE;
790         for (unsigned i = 0; i < regCount; ++i)
791         {
792             regNumber reg = splitArg->GetRegNumByIdx(i);
793             assert(reg != REG_NA);
794             resultMask |= genRegMask(reg);
795         }
796     }
797 #endif // FEATURE_ARG_SPLIT
798     else
799     {
800         resultMask = genRegMask(gtRegNum);
801     }
802
803     return resultMask;
804 }
805
806 //---------------------------------------------------------------
807 // GetOtherRegMask: Get the reg mask of gtOtherRegs of call node
808 //
809 // Arguments:
810 //    None
811 //
812 // Return Value:
813 //    Reg mask of gtOtherRegs of call node.
814 //
815 regMaskTP GenTreeCall::GetOtherRegMask() const
816 {
817     regMaskTP resultMask = RBM_NONE;
818
819 #if FEATURE_MULTIREG_RET
820     for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
821     {
822         if (gtOtherRegs[i] != REG_NA)
823         {
824             resultMask |= genRegMask((regNumber)gtOtherRegs[i]);
825             continue;
826         }
827         break;
828     }
829 #endif
830
831     return resultMask;
832 }
833
834 //-------------------------------------------------------------------------
835 // IsPure:
836 //    Returns true if this call is pure. For now, this uses the same
837 //    definition of "pure" that is that used by HelperCallProperties: a
838 //    pure call does not read or write any aliased (e.g. heap) memory or
839 //    have other global side effects (e.g. class constructors, finalizers),
840 //    but is allowed to throw an exception.
841 //
842 //    NOTE: this call currently only returns true if the call target is a
843 //    helper method that is known to be pure. No other analysis is
844 //    performed.
845 //
846 // Arguments:
847 //    Copiler - the compiler context.
848 //
849 // Returns:
850 //    True if the call is pure; false otherwise.
851 //
852 bool GenTreeCall::IsPure(Compiler* compiler) const
853 {
854     return (gtCallType == CT_HELPER) &&
855            compiler->s_helperCallProperties.IsPure(compiler->eeGetHelperNum(gtCallMethHnd));
856 }
857
858 //-------------------------------------------------------------------------
859 // HasSideEffects:
860 //    Returns true if this call has any side effects. All non-helpers are considered to have side-effects. Only helpers
861 //    that do not mutate the heap, do not run constructors, may not throw, and are either a) pure or b) non-finalizing
862 //    allocation functions are considered side-effect-free.
863 //
864 // Arguments:
865 //     compiler         - the compiler instance
866 //     ignoreExceptions - when `true`, ignores exception side effects
867 //     ignoreCctors     - when `true`, ignores class constructor side effects
868 //
869 // Return Value:
870 //      true if this call has any side-effects; false otherwise.
871 bool GenTreeCall::HasSideEffects(Compiler* compiler, bool ignoreExceptions, bool ignoreCctors) const
872 {
873     // Generally all GT_CALL nodes are considered to have side-effects, but we may have extra information about helper
874     // calls that can prove them side-effect-free.
875     if (gtCallType != CT_HELPER)
876     {
877         return true;
878     }
879
880     CorInfoHelpFunc       helper           = compiler->eeGetHelperNum(gtCallMethHnd);
881     HelperCallProperties& helperProperties = compiler->s_helperCallProperties;
882
883     // We definitely care about the side effects if MutatesHeap is true
884     if (helperProperties.MutatesHeap(helper))
885     {
886         return true;
887     }
888
889     // Unless we have been instructed to ignore cctors (CSE, for example, ignores cctors), consider them side effects.
890     if (!ignoreCctors && helperProperties.MayRunCctor(helper))
891     {
892         return true;
893     }
894
895     // If we also care about exceptions then check if the helper can throw
896     if (!ignoreExceptions && !helperProperties.NoThrow(helper))
897     {
898         return true;
899     }
900
901     // If this is not a Pure helper call or an allocator (that will not need to run a finalizer)
902     // then this call has side effects.
903     return !helperProperties.IsPure(helper) &&
904            (!helperProperties.IsAllocator(helper) || ((gtCallMoreFlags & GTF_CALL_M_ALLOC_SIDE_EFFECTS) != 0));
905 }
906
907 //-------------------------------------------------------------------------
908 // HasNonStandardAddedArgs: Return true if the method has non-standard args added to the call
909 // argument list during argument morphing (fgMorphArgs), e.g., passed in R10 or R11 on AMD64.
910 // See also GetNonStandardAddedArgCount().
911 //
912 // Arguments:
913 //     compiler - the compiler instance
914 //
915 // Return Value:
916 //      true if there are any such args, false otherwise.
917 //
918 bool GenTreeCall::HasNonStandardAddedArgs(Compiler* compiler) const
919 {
920     return GetNonStandardAddedArgCount(compiler) != 0;
921 }
922
923 //-------------------------------------------------------------------------
924 // GetNonStandardAddedArgCount: Get the count of non-standard arguments that have been added
925 // during call argument morphing (fgMorphArgs). Do not count non-standard args that are already
926 // counted in the argument list prior to morphing.
927 //
928 // This function is used to help map the caller and callee arguments during tail call setup.
929 //
930 // Arguments:
931 //     compiler - the compiler instance
932 //
933 // Return Value:
934 //      The count of args, as described.
935 //
936 // Notes:
937 //      It would be more general to have fgMorphArgs set a bit on the call node when such
938 //      args are added to a call, and a bit on each such arg, and then have this code loop
939 //      over the call args when the special call bit is set, counting the args with the special
940 //      arg bit. This seems pretty heavyweight, though. Instead, this logic needs to be kept
941 //      in sync with fgMorphArgs.
942 //
943 int GenTreeCall::GetNonStandardAddedArgCount(Compiler* compiler) const
944 {
945     if (IsUnmanaged() && !compiler->opts.ShouldUsePInvokeHelpers())
946     {
947         // R11 = PInvoke cookie param
948         return 1;
949     }
950     else if (IsVirtualStub())
951     {
952         // R11 = Virtual stub param
953         return 1;
954     }
955     else if ((gtCallType == CT_INDIRECT) && (gtCallCookie != nullptr))
956     {
957         // R10 = PInvoke target param
958         // R11 = PInvoke cookie param
959         return 2;
960     }
961     return 0;
962 }
963
964 //-------------------------------------------------------------------------
965 // TreatAsHasRetBufArg:
966 //
967 // Arguments:
968 //     compiler, the compiler instance so that we can call eeGetHelperNum
969 //
970 // Return Value:
971 //     Returns true if we treat the call as if it has a retBuf argument
972 //     This method may actually have a retBuf argument
973 //     or it could be a JIT helper that we are still transforming during
974 //     the importer phase.
975 //
976 // Notes:
977 //     On ARM64 marking the method with the GTF_CALL_M_RETBUFFARG flag
978 //     will make HasRetBufArg() return true, but will also force the
979 //     use of register x8 to pass the RetBuf argument.
980 //
981 //     These two Jit Helpers that we handle here by returning true
982 //     aren't actually defined to return a struct, so they don't expect
983 //     their RetBuf to be passed in x8, instead they  expect it in x0.
984 //
985 bool GenTreeCall::TreatAsHasRetBufArg(Compiler* compiler) const
986 {
987     if (HasRetBufArg())
988     {
989         return true;
990     }
991     else
992     {
993         // If we see a Jit helper call that returns a TYP_STRUCT we will
994         // transform it as if it has a Return Buffer Argument
995         //
996         if (IsHelperCall() && (gtReturnType == TYP_STRUCT))
997         {
998             // There are two possible helper calls that use this path:
999             //  CORINFO_HELP_GETFIELDSTRUCT and CORINFO_HELP_UNBOX_NULLABLE
1000             //
1001             CorInfoHelpFunc helpFunc = compiler->eeGetHelperNum(gtCallMethHnd);
1002
1003             if (helpFunc == CORINFO_HELP_GETFIELDSTRUCT)
1004             {
1005                 return true;
1006             }
1007             else if (helpFunc == CORINFO_HELP_UNBOX_NULLABLE)
1008             {
1009                 return true;
1010             }
1011             else
1012             {
1013                 assert(!"Unexpected JIT helper in TreatAsHasRetBufArg");
1014             }
1015         }
1016     }
1017     return false;
1018 }
1019
1020 //-------------------------------------------------------------------------
1021 // IsHelperCall: Determine if this GT_CALL node is a specific helper call.
1022 //
1023 // Arguments:
1024 //     compiler - the compiler instance so that we can call eeFindHelper
1025 //
1026 // Return Value:
1027 //     Returns true if this GT_CALL node is a call to the specified helper.
1028 //
1029 bool GenTreeCall::IsHelperCall(Compiler* compiler, unsigned helper) const
1030 {
1031     return IsHelperCall(compiler->eeFindHelper(helper));
1032 }
1033
1034 //------------------------------------------------------------------------
1035 // GenTreeCall::ReplaceCallOperand:
1036 //    Replaces a given operand to a call node and updates the call
1037 //    argument table if necessary.
1038 //
1039 // Arguments:
1040 //    useEdge - the use edge that points to the operand to be replaced.
1041 //    replacement - the replacement node.
1042 //
1043 void GenTreeCall::ReplaceCallOperand(GenTree** useEdge, GenTree* replacement)
1044 {
1045     assert(useEdge != nullptr);
1046     assert(replacement != nullptr);
1047     assert(TryGetUse(*useEdge, &useEdge));
1048
1049     GenTree* originalOperand = *useEdge;
1050     *useEdge                 = replacement;
1051
1052     const bool isArgument =
1053         (replacement != gtControlExpr) &&
1054         ((gtCallType != CT_INDIRECT) || ((replacement != gtCallCookie) && (replacement != gtCallAddr)));
1055
1056     if (isArgument)
1057     {
1058         if ((originalOperand->gtFlags & GTF_LATE_ARG) != 0)
1059         {
1060             replacement->gtFlags |= GTF_LATE_ARG;
1061         }
1062         else
1063         {
1064             assert((replacement->gtFlags & GTF_LATE_ARG) == 0);
1065
1066             fgArgTabEntry* fp = Compiler::gtArgEntryByNode(this, originalOperand);
1067             assert(fp->node == originalOperand);
1068             fp->node = replacement;
1069         }
1070     }
1071 }
1072
1073 //-------------------------------------------------------------------------
1074 // AreArgsComplete: Determine if this GT_CALL node's arguments have been processed.
1075 //
1076 // Return Value:
1077 //     Returns true if fgMorphArgs has processed the arguments.
1078 //
1079 bool GenTreeCall::AreArgsComplete() const
1080 {
1081     if (fgArgInfo == nullptr)
1082     {
1083         return false;
1084     }
1085     if (fgArgInfo->AreArgsComplete())
1086     {
1087         assert((gtCallLateArgs != nullptr) || !fgArgInfo->HasRegArgs());
1088         return true;
1089     }
1090     assert(gtCallArgs == nullptr);
1091     return false;
1092 }
1093
1094 #if !defined(FEATURE_PUT_STRUCT_ARG_STK)
1095 unsigned GenTreePutArgStk::getArgSize()
1096 {
1097     return genTypeSize(genActualType(gtOp1->gtType));
1098 }
1099 #endif // !defined(FEATURE_PUT_STRUCT_ARG_STK)
1100
1101 /*****************************************************************************
1102  *
1103  *  Returns non-zero if the two trees are identical.
1104  */
1105
1106 bool GenTree::Compare(GenTree* op1, GenTree* op2, bool swapOK)
1107 {
1108     genTreeOps oper;
1109     unsigned   kind;
1110
1111 //  printf("tree1:\n"); gtDispTree(op1);
1112 //  printf("tree2:\n"); gtDispTree(op2);
1113
1114 AGAIN:
1115
1116     if (op1 == nullptr)
1117     {
1118         return (op2 == nullptr);
1119     }
1120     if (op2 == nullptr)
1121     {
1122         return false;
1123     }
1124     if (op1 == op2)
1125     {
1126         return true;
1127     }
1128
1129     assert(op1->gtOper != GT_STMT);
1130     assert(op2->gtOper != GT_STMT);
1131
1132     oper = op1->OperGet();
1133
1134     /* The operators must be equal */
1135
1136     if (oper != op2->gtOper)
1137     {
1138         return false;
1139     }
1140
1141     /* The types must be equal */
1142
1143     if (op1->gtType != op2->gtType)
1144     {
1145         return false;
1146     }
1147
1148     /* Overflow must be equal */
1149     if (op1->gtOverflowEx() != op2->gtOverflowEx())
1150     {
1151         return false;
1152     }
1153
1154     /* Sensible flags must be equal */
1155     if ((op1->gtFlags & (GTF_UNSIGNED)) != (op2->gtFlags & (GTF_UNSIGNED)))
1156     {
1157         return false;
1158     }
1159
1160     /* Figure out what kind of nodes we're comparing */
1161
1162     kind = op1->OperKind();
1163
1164     /* Is this a constant node? */
1165
1166     if (kind & GTK_CONST)
1167     {
1168         switch (oper)
1169         {
1170             case GT_CNS_INT:
1171                 if (op1->gtIntCon.gtIconVal == op2->gtIntCon.gtIconVal)
1172                 {
1173                     return true;
1174                 }
1175                 break;
1176 #if 0
1177             // TODO-CQ: Enable this in the future
1178         case GT_CNS_LNG:
1179             if  (op1->gtLngCon.gtLconVal == op2->gtLngCon.gtLconVal)
1180                 return true;
1181             break;
1182
1183         case GT_CNS_DBL:
1184             if  (op1->gtDblCon.gtDconVal == op2->gtDblCon.gtDconVal)
1185                 return true;
1186             break;
1187 #endif
1188             default:
1189                 break;
1190         }
1191
1192         return false;
1193     }
1194
1195     /* Is this a leaf node? */
1196
1197     if (kind & GTK_LEAF)
1198     {
1199         switch (oper)
1200         {
1201             case GT_LCL_VAR:
1202                 if (op1->gtLclVarCommon.gtLclNum != op2->gtLclVarCommon.gtLclNum)
1203                 {
1204                     break;
1205                 }
1206
1207                 return true;
1208
1209             case GT_LCL_FLD:
1210                 if (op1->gtLclFld.gtLclNum != op2->gtLclFld.gtLclNum ||
1211                     op1->gtLclFld.gtLclOffs != op2->gtLclFld.gtLclOffs)
1212                 {
1213                     break;
1214                 }
1215
1216                 return true;
1217
1218             case GT_CLS_VAR:
1219                 if (op1->gtClsVar.gtClsVarHnd != op2->gtClsVar.gtClsVarHnd)
1220                 {
1221                     break;
1222                 }
1223
1224                 return true;
1225
1226             case GT_LABEL:
1227                 return true;
1228
1229             case GT_ARGPLACE:
1230                 if ((op1->gtType == TYP_STRUCT) &&
1231                     (op1->gtArgPlace.gtArgPlaceClsHnd != op2->gtArgPlace.gtArgPlaceClsHnd))
1232                 {
1233                     break;
1234                 }
1235                 return true;
1236
1237             default:
1238                 break;
1239         }
1240
1241         return false;
1242     }
1243
1244     /* Is it a 'simple' unary/binary operator? */
1245
1246     if (kind & GTK_UNOP)
1247     {
1248         if (IsExOp(kind))
1249         {
1250             // ExOp operators extend unary operator with extra, non-GenTree* members.  In many cases,
1251             // these should be included in the comparison.
1252             switch (oper)
1253             {
1254                 case GT_ARR_LENGTH:
1255                     if (op1->gtArrLen.ArrLenOffset() != op2->gtArrLen.ArrLenOffset())
1256                     {
1257                         return false;
1258                     }
1259                     break;
1260                 case GT_CAST:
1261                     if (op1->gtCast.gtCastType != op2->gtCast.gtCastType)
1262                     {
1263                         return false;
1264                     }
1265                     break;
1266                 case GT_OBJ:
1267                     if (op1->AsObj()->gtClass != op2->AsObj()->gtClass)
1268                     {
1269                         return false;
1270                     }
1271                     break;
1272
1273                 // For the ones below no extra argument matters for comparison.
1274                 case GT_BOX:
1275                 case GT_RUNTIMELOOKUP:
1276                     break;
1277
1278                 default:
1279                     assert(!"unexpected unary ExOp operator");
1280             }
1281         }
1282         return Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp1);
1283     }
1284
1285     if (kind & GTK_BINOP)
1286     {
1287         if (IsExOp(kind))
1288         {
1289             // ExOp operators extend unary operator with extra, non-GenTree* members.  In many cases,
1290             // these should be included in the hash code.
1291             switch (oper)
1292             {
1293                 case GT_INTRINSIC:
1294                     if (op1->gtIntrinsic.gtIntrinsicId != op2->gtIntrinsic.gtIntrinsicId)
1295                     {
1296                         return false;
1297                     }
1298                     break;
1299                 case GT_LEA:
1300                     if (op1->gtAddrMode.gtScale != op2->gtAddrMode.gtScale)
1301                     {
1302                         return false;
1303                     }
1304                     if (op1->gtAddrMode.Offset() != op2->gtAddrMode.Offset())
1305                     {
1306                         return false;
1307                     }
1308                     break;
1309                 case GT_INDEX:
1310                     if (op1->gtIndex.gtIndElemSize != op2->gtIndex.gtIndElemSize)
1311                     {
1312                         return false;
1313                     }
1314                     break;
1315                 case GT_INDEX_ADDR:
1316                     if (op1->AsIndexAddr()->gtElemSize != op2->AsIndexAddr()->gtElemSize)
1317                     {
1318                         return false;
1319                     }
1320                     break;
1321 #ifdef FEATURE_SIMD
1322                 case GT_SIMD:
1323                     if ((op1->AsSIMD()->gtSIMDIntrinsicID != op2->AsSIMD()->gtSIMDIntrinsicID) ||
1324                         (op1->AsSIMD()->gtSIMDBaseType != op2->AsSIMD()->gtSIMDBaseType) ||
1325                         (op1->AsSIMD()->gtSIMDSize != op2->AsSIMD()->gtSIMDSize))
1326                     {
1327                         return false;
1328                     }
1329                     break;
1330 #endif // FEATURE_SIMD
1331
1332 #ifdef FEATURE_HW_INTRINSICS
1333                 case GT_HWIntrinsic:
1334                     if ((op1->AsHWIntrinsic()->gtHWIntrinsicId != op2->AsHWIntrinsic()->gtHWIntrinsicId) ||
1335                         (op1->AsHWIntrinsic()->gtSIMDBaseType != op2->AsHWIntrinsic()->gtSIMDBaseType) ||
1336                         (op1->AsHWIntrinsic()->gtSIMDSize != op2->AsHWIntrinsic()->gtSIMDSize) ||
1337                         (op1->AsHWIntrinsic()->gtIndexBaseType != op2->AsHWIntrinsic()->gtIndexBaseType))
1338                     {
1339                         return false;
1340                     }
1341                     break;
1342 #endif
1343
1344                 // For the ones below no extra argument matters for comparison.
1345                 case GT_QMARK:
1346                     break;
1347
1348                 default:
1349                     assert(!"unexpected binary ExOp operator");
1350             }
1351         }
1352
1353         if (op1->gtOp.gtOp2)
1354         {
1355             if (!Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp1, swapOK))
1356             {
1357                 if (swapOK && OperIsCommutative(oper) &&
1358                     ((op1->gtOp.gtOp1->gtFlags | op1->gtOp.gtOp2->gtFlags | op2->gtOp.gtOp1->gtFlags |
1359                       op2->gtOp.gtOp2->gtFlags) &
1360                      GTF_ALL_EFFECT) == 0)
1361                 {
1362                     if (Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp2, swapOK))
1363                     {
1364                         op1 = op1->gtOp.gtOp2;
1365                         op2 = op2->gtOp.gtOp1;
1366                         goto AGAIN;
1367                     }
1368                 }
1369
1370                 return false;
1371             }
1372
1373             op1 = op1->gtOp.gtOp2;
1374             op2 = op2->gtOp.gtOp2;
1375
1376             goto AGAIN;
1377         }
1378         else
1379         {
1380
1381             op1 = op1->gtOp.gtOp1;
1382             op2 = op2->gtOp.gtOp1;
1383
1384             if (!op1)
1385             {
1386                 return (op2 == nullptr);
1387             }
1388             if (!op2)
1389             {
1390                 return false;
1391             }
1392
1393             goto AGAIN;
1394         }
1395     }
1396
1397     /* See what kind of a special operator we have here */
1398
1399     switch (oper)
1400     {
1401         case GT_FIELD:
1402             if (op1->gtField.gtFldHnd != op2->gtField.gtFldHnd)
1403             {
1404                 break;
1405             }
1406
1407             op1 = op1->gtField.gtFldObj;
1408             op2 = op2->gtField.gtFldObj;
1409
1410             if (op1 || op2)
1411             {
1412                 if (op1 && op2)
1413                 {
1414                     goto AGAIN;
1415                 }
1416             }
1417
1418             return true;
1419
1420         case GT_CALL:
1421
1422             if (op1->gtCall.gtCallType != op2->gtCall.gtCallType)
1423             {
1424                 return false;
1425             }
1426
1427             if (op1->gtCall.gtCallType != CT_INDIRECT)
1428             {
1429                 if (op1->gtCall.gtCallMethHnd != op2->gtCall.gtCallMethHnd)
1430                 {
1431                     return false;
1432                 }
1433
1434 #ifdef FEATURE_READYTORUN_COMPILER
1435                 if (op1->gtCall.gtEntryPoint.addr != op2->gtCall.gtEntryPoint.addr)
1436                 {
1437                     return false;
1438                 }
1439 #endif
1440             }
1441             else
1442             {
1443                 if (!Compare(op1->gtCall.gtCallAddr, op2->gtCall.gtCallAddr))
1444                 {
1445                     return false;
1446                 }
1447             }
1448
1449             if (Compare(op1->gtCall.gtCallLateArgs, op2->gtCall.gtCallLateArgs) &&
1450                 Compare(op1->gtCall.gtCallArgs, op2->gtCall.gtCallArgs) &&
1451                 Compare(op1->gtCall.gtControlExpr, op2->gtCall.gtControlExpr) &&
1452                 Compare(op1->gtCall.gtCallObjp, op2->gtCall.gtCallObjp))
1453             {
1454                 return true;
1455             }
1456             break;
1457
1458         case GT_ARR_ELEM:
1459
1460             if (op1->gtArrElem.gtArrRank != op2->gtArrElem.gtArrRank)
1461             {
1462                 return false;
1463             }
1464
1465             // NOTE: gtArrElemSize may need to be handled
1466
1467             unsigned dim;
1468             for (dim = 0; dim < op1->gtArrElem.gtArrRank; dim++)
1469             {
1470                 if (!Compare(op1->gtArrElem.gtArrInds[dim], op2->gtArrElem.gtArrInds[dim]))
1471                 {
1472                     return false;
1473                 }
1474             }
1475
1476             op1 = op1->gtArrElem.gtArrObj;
1477             op2 = op2->gtArrElem.gtArrObj;
1478             goto AGAIN;
1479
1480         case GT_ARR_OFFSET:
1481             if (op1->gtArrOffs.gtCurrDim != op2->gtArrOffs.gtCurrDim ||
1482                 op1->gtArrOffs.gtArrRank != op2->gtArrOffs.gtArrRank)
1483             {
1484                 return false;
1485             }
1486             return (Compare(op1->gtArrOffs.gtOffset, op2->gtArrOffs.gtOffset) &&
1487                     Compare(op1->gtArrOffs.gtIndex, op2->gtArrOffs.gtIndex) &&
1488                     Compare(op1->gtArrOffs.gtArrObj, op2->gtArrOffs.gtArrObj));
1489
1490         case GT_CMPXCHG:
1491             return Compare(op1->gtCmpXchg.gtOpLocation, op2->gtCmpXchg.gtOpLocation) &&
1492                    Compare(op1->gtCmpXchg.gtOpValue, op2->gtCmpXchg.gtOpValue) &&
1493                    Compare(op1->gtCmpXchg.gtOpComparand, op2->gtCmpXchg.gtOpComparand);
1494
1495         case GT_ARR_BOUNDS_CHECK:
1496 #ifdef FEATURE_SIMD
1497         case GT_SIMD_CHK:
1498 #endif // FEATURE_SIMD
1499 #ifdef FEATURE_HW_INTRINSICS
1500         case GT_HW_INTRINSIC_CHK:
1501 #endif // FEATURE_HW_INTRINSICS
1502             return Compare(op1->gtBoundsChk.gtIndex, op2->gtBoundsChk.gtIndex) &&
1503                    Compare(op1->gtBoundsChk.gtArrLen, op2->gtBoundsChk.gtArrLen) &&
1504                    (op1->gtBoundsChk.gtThrowKind == op2->gtBoundsChk.gtThrowKind);
1505
1506         case GT_STORE_DYN_BLK:
1507         case GT_DYN_BLK:
1508             return Compare(op1->gtDynBlk.Addr(), op2->gtDynBlk.Addr()) &&
1509                    Compare(op1->gtDynBlk.Data(), op2->gtDynBlk.Data()) &&
1510                    Compare(op1->gtDynBlk.gtDynamicSize, op2->gtDynBlk.gtDynamicSize);
1511
1512         default:
1513             assert(!"unexpected operator");
1514     }
1515
1516     return false;
1517 }
1518
1519 /*****************************************************************************
1520  *
1521  *  Returns non-zero if the given tree contains a use of a local #lclNum.
1522  */
1523
1524 bool Compiler::gtHasRef(GenTree* tree, ssize_t lclNum, bool defOnly)
1525 {
1526     genTreeOps oper;
1527     unsigned   kind;
1528
1529 AGAIN:
1530
1531     assert(tree);
1532
1533     oper = tree->OperGet();
1534     kind = tree->OperKind();
1535
1536     assert(oper != GT_STMT);
1537
1538     /* Is this a constant node? */
1539
1540     if (kind & GTK_CONST)
1541     {
1542         return false;
1543     }
1544
1545     /* Is this a leaf node? */
1546
1547     if (kind & GTK_LEAF)
1548     {
1549         if (oper == GT_LCL_VAR)
1550         {
1551             if (tree->gtLclVarCommon.gtLclNum == (unsigned)lclNum)
1552             {
1553                 if (!defOnly)
1554                 {
1555                     return true;
1556                 }
1557             }
1558         }
1559         else if (oper == GT_RET_EXPR)
1560         {
1561             return gtHasRef(tree->gtRetExpr.gtInlineCandidate, lclNum, defOnly);
1562         }
1563
1564         return false;
1565     }
1566
1567     /* Is it a 'simple' unary/binary operator? */
1568
1569     if (kind & GTK_SMPOP)
1570     {
1571         if (tree->gtGetOp2IfPresent())
1572         {
1573             if (gtHasRef(tree->gtOp.gtOp1, lclNum, defOnly))
1574             {
1575                 return true;
1576             }
1577
1578             tree = tree->gtOp.gtOp2;
1579             goto AGAIN;
1580         }
1581         else
1582         {
1583             tree = tree->gtOp.gtOp1;
1584
1585             if (!tree)
1586             {
1587                 return false;
1588             }
1589
1590             if (oper == GT_ASG)
1591             {
1592                 // 'tree' is the gtOp1 of an assignment node. So we can handle
1593                 // the case where defOnly is either true or false.
1594
1595                 if (tree->gtOper == GT_LCL_VAR && tree->gtLclVarCommon.gtLclNum == (unsigned)lclNum)
1596                 {
1597                     return true;
1598                 }
1599                 else if (tree->gtOper == GT_FIELD && lclNum == (ssize_t)tree->gtField.gtFldHnd)
1600                 {
1601                     return true;
1602                 }
1603             }
1604
1605             goto AGAIN;
1606         }
1607     }
1608
1609     /* See what kind of a special operator we have here */
1610
1611     switch (oper)
1612     {
1613         case GT_FIELD:
1614             if (lclNum == (ssize_t)tree->gtField.gtFldHnd)
1615             {
1616                 if (!defOnly)
1617                 {
1618                     return true;
1619                 }
1620             }
1621
1622             tree = tree->gtField.gtFldObj;
1623             if (tree)
1624             {
1625                 goto AGAIN;
1626             }
1627             break;
1628
1629         case GT_CALL:
1630
1631             if (tree->gtCall.gtCallObjp)
1632             {
1633                 if (gtHasRef(tree->gtCall.gtCallObjp, lclNum, defOnly))
1634                 {
1635                     return true;
1636                 }
1637             }
1638
1639             if (tree->gtCall.gtCallArgs)
1640             {
1641                 if (gtHasRef(tree->gtCall.gtCallArgs, lclNum, defOnly))
1642                 {
1643                     return true;
1644                 }
1645             }
1646
1647             if (tree->gtCall.gtCallLateArgs)
1648             {
1649                 if (gtHasRef(tree->gtCall.gtCallLateArgs, lclNum, defOnly))
1650                 {
1651                     return true;
1652                 }
1653             }
1654
1655             if (tree->gtCall.gtControlExpr)
1656             {
1657                 if (gtHasRef(tree->gtCall.gtControlExpr, lclNum, defOnly))
1658                 {
1659                     return true;
1660                 }
1661             }
1662
1663             if (tree->gtCall.gtCallType == CT_INDIRECT)
1664             {
1665                 // pinvoke-calli cookie is a constant, or constant indirection
1666                 assert(tree->gtCall.gtCallCookie == nullptr || tree->gtCall.gtCallCookie->gtOper == GT_CNS_INT ||
1667                        tree->gtCall.gtCallCookie->gtOper == GT_IND);
1668
1669                 tree = tree->gtCall.gtCallAddr;
1670             }
1671             else
1672             {
1673                 tree = nullptr;
1674             }
1675
1676             if (tree)
1677             {
1678                 goto AGAIN;
1679             }
1680
1681             break;
1682
1683         case GT_ARR_ELEM:
1684             if (gtHasRef(tree->gtArrElem.gtArrObj, lclNum, defOnly))
1685             {
1686                 return true;
1687             }
1688
1689             unsigned dim;
1690             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
1691             {
1692                 if (gtHasRef(tree->gtArrElem.gtArrInds[dim], lclNum, defOnly))
1693                 {
1694                     return true;
1695                 }
1696             }
1697
1698             break;
1699
1700         case GT_ARR_OFFSET:
1701             if (gtHasRef(tree->gtArrOffs.gtOffset, lclNum, defOnly) ||
1702                 gtHasRef(tree->gtArrOffs.gtIndex, lclNum, defOnly) ||
1703                 gtHasRef(tree->gtArrOffs.gtArrObj, lclNum, defOnly))
1704             {
1705                 return true;
1706             }
1707             break;
1708
1709         case GT_CMPXCHG:
1710             if (gtHasRef(tree->gtCmpXchg.gtOpLocation, lclNum, defOnly))
1711             {
1712                 return true;
1713             }
1714             if (gtHasRef(tree->gtCmpXchg.gtOpValue, lclNum, defOnly))
1715             {
1716                 return true;
1717             }
1718             if (gtHasRef(tree->gtCmpXchg.gtOpComparand, lclNum, defOnly))
1719             {
1720                 return true;
1721             }
1722             break;
1723
1724         case GT_ARR_BOUNDS_CHECK:
1725 #ifdef FEATURE_SIMD
1726         case GT_SIMD_CHK:
1727 #endif // FEATURE_SIMD
1728 #ifdef FEATURE_HW_INTRINSICS
1729         case GT_HW_INTRINSIC_CHK:
1730 #endif // FEATURE_HW_INTRINSICS
1731             if (gtHasRef(tree->gtBoundsChk.gtIndex, lclNum, defOnly))
1732             {
1733                 return true;
1734             }
1735             if (gtHasRef(tree->gtBoundsChk.gtArrLen, lclNum, defOnly))
1736             {
1737                 return true;
1738             }
1739             break;
1740
1741         case GT_STORE_DYN_BLK:
1742             if (gtHasRef(tree->gtDynBlk.Data(), lclNum, defOnly))
1743             {
1744                 return true;
1745             }
1746             __fallthrough;
1747         case GT_DYN_BLK:
1748             if (gtHasRef(tree->gtDynBlk.Addr(), lclNum, defOnly))
1749             {
1750                 return true;
1751             }
1752             if (gtHasRef(tree->gtDynBlk.gtDynamicSize, lclNum, defOnly))
1753             {
1754                 return true;
1755             }
1756             break;
1757
1758         default:
1759 #ifdef DEBUG
1760             gtDispTree(tree);
1761 #endif
1762             assert(!"unexpected operator");
1763     }
1764
1765     return false;
1766 }
1767
1768 struct AddrTakenDsc
1769 {
1770     Compiler* comp;
1771     bool      hasAddrTakenLcl;
1772 };
1773
1774 /* static */
1775 Compiler::fgWalkResult Compiler::gtHasLocalsWithAddrOpCB(GenTree** pTree, fgWalkData* data)
1776 {
1777     GenTree*  tree = *pTree;
1778     Compiler* comp = data->compiler;
1779
1780     if (tree->gtOper == GT_LCL_VAR)
1781     {
1782         unsigned   lclNum = tree->gtLclVarCommon.gtLclNum;
1783         LclVarDsc* varDsc = &comp->lvaTable[lclNum];
1784
1785         if (varDsc->lvHasLdAddrOp || varDsc->lvAddrExposed)
1786         {
1787             ((AddrTakenDsc*)data->pCallbackData)->hasAddrTakenLcl = true;
1788             return WALK_ABORT;
1789         }
1790     }
1791
1792     return WALK_CONTINUE;
1793 }
1794
1795 /*****************************************************************************
1796  *
1797  *  Return true if this tree contains locals with lvHasLdAddrOp or lvAddrExposed
1798  *  flag(s) set.
1799  */
1800
1801 bool Compiler::gtHasLocalsWithAddrOp(GenTree* tree)
1802 {
1803     AddrTakenDsc desc;
1804
1805     desc.comp            = this;
1806     desc.hasAddrTakenLcl = false;
1807
1808     fgWalkTreePre(&tree, gtHasLocalsWithAddrOpCB, &desc);
1809
1810     return desc.hasAddrTakenLcl;
1811 }
1812
1813 #ifdef DEBUG
1814
1815 /*****************************************************************************
1816  *
1817  *  Helper used to compute hash values for trees.
1818  */
1819
1820 inline unsigned genTreeHashAdd(unsigned old, unsigned add)
1821 {
1822     return (old + old / 2) ^ add;
1823 }
1824
1825 inline unsigned genTreeHashAdd(unsigned old, void* add)
1826 {
1827     return genTreeHashAdd(old, (unsigned)(size_t)add);
1828 }
1829
1830 /*****************************************************************************
1831  *
1832  *  Given an arbitrary expression tree, compute a hash value for it.
1833  */
1834
1835 unsigned Compiler::gtHashValue(GenTree* tree)
1836 {
1837     genTreeOps oper;
1838     unsigned   kind;
1839
1840     unsigned hash = 0;
1841
1842     GenTree* temp;
1843
1844 AGAIN:
1845     assert(tree);
1846     assert(tree->gtOper != GT_STMT);
1847
1848     /* Figure out what kind of a node we have */
1849
1850     oper = tree->OperGet();
1851     kind = tree->OperKind();
1852
1853     /* Include the operator value in the hash */
1854
1855     hash = genTreeHashAdd(hash, oper);
1856
1857     /* Is this a constant or leaf node? */
1858
1859     if (kind & (GTK_CONST | GTK_LEAF))
1860     {
1861         size_t add;
1862
1863         switch (oper)
1864         {
1865             UINT64 bits;
1866             case GT_LCL_VAR:
1867                 add = tree->gtLclVar.gtLclNum;
1868                 break;
1869             case GT_LCL_FLD:
1870                 hash = genTreeHashAdd(hash, tree->gtLclFld.gtLclNum);
1871                 add  = tree->gtLclFld.gtLclOffs;
1872                 break;
1873
1874             case GT_CNS_INT:
1875                 add = tree->gtIntCon.gtIconVal;
1876                 break;
1877             case GT_CNS_LNG:
1878                 bits = (UINT64)tree->gtLngCon.gtLconVal;
1879 #ifdef _HOST_64BIT_
1880                 add = bits;
1881 #else // 32-bit host
1882                 add = genTreeHashAdd(uhi32(bits), ulo32(bits));
1883 #endif
1884                 break;
1885             case GT_CNS_DBL:
1886                 bits = *(UINT64*)(&tree->gtDblCon.gtDconVal);
1887 #ifdef _HOST_64BIT_
1888                 add = bits;
1889 #else // 32-bit host
1890                 add = genTreeHashAdd(uhi32(bits), ulo32(bits));
1891 #endif
1892                 break;
1893             case GT_CNS_STR:
1894                 add = tree->gtStrCon.gtSconCPX;
1895                 break;
1896
1897             case GT_JMP:
1898                 add = tree->gtVal.gtVal1;
1899                 break;
1900
1901             default:
1902                 add = 0;
1903                 break;
1904         }
1905
1906         // clang-format off
1907         // narrow 'add' into a 32-bit 'val'
1908         unsigned val;
1909 #ifdef _HOST_64BIT_
1910         val = genTreeHashAdd(uhi32(add), ulo32(add));
1911 #else // 32-bit host
1912         val = add;
1913 #endif
1914         // clang-format on
1915
1916         hash = genTreeHashAdd(hash, val);
1917         goto DONE;
1918     }
1919
1920     /* Is it a 'simple' unary/binary operator? */
1921
1922     GenTree* op1;
1923
1924     if (kind & GTK_UNOP)
1925     {
1926         op1 = tree->gtOp.gtOp1;
1927         /* Special case: no sub-operand at all */
1928
1929         if (GenTree::IsExOp(kind))
1930         {
1931             // ExOp operators extend operators with extra, non-GenTree* members.  In many cases,
1932             // these should be included in the hash code.
1933             switch (oper)
1934             {
1935                 case GT_ARR_LENGTH:
1936                     hash += tree->gtArrLen.ArrLenOffset();
1937                     break;
1938                 case GT_CAST:
1939                     hash ^= tree->gtCast.gtCastType;
1940                     break;
1941                 case GT_INDEX:
1942                     hash += tree->gtIndex.gtIndElemSize;
1943                     break;
1944                 case GT_INDEX_ADDR:
1945                     hash += tree->AsIndexAddr()->gtElemSize;
1946                     break;
1947                 case GT_ALLOCOBJ:
1948                     hash = genTreeHashAdd(hash, static_cast<unsigned>(
1949                                                     reinterpret_cast<uintptr_t>(tree->gtAllocObj.gtAllocObjClsHnd)));
1950                     hash = genTreeHashAdd(hash, tree->gtAllocObj.gtNewHelper);
1951                     break;
1952                 case GT_RUNTIMELOOKUP:
1953                     hash =
1954                         genTreeHashAdd(hash,
1955                                        static_cast<unsigned>(reinterpret_cast<uintptr_t>(tree->gtRuntimeLookup.gtHnd)));
1956                     break;
1957
1958                 case GT_OBJ:
1959                     hash =
1960                         genTreeHashAdd(hash, static_cast<unsigned>(reinterpret_cast<uintptr_t>(tree->gtObj.gtClass)));
1961                     break;
1962                 // For the ones below no extra argument matters for comparison.
1963                 case GT_BOX:
1964                     break;
1965
1966                 default:
1967                     assert(!"unexpected unary ExOp operator");
1968             }
1969         }
1970
1971         if (!op1)
1972         {
1973             goto DONE;
1974         }
1975
1976         tree = op1;
1977         goto AGAIN;
1978     }
1979
1980     if (kind & GTK_BINOP)
1981     {
1982         if (GenTree::IsExOp(kind))
1983         {
1984             // ExOp operators extend operators with extra, non-GenTree* members.  In many cases,
1985             // these should be included in the hash code.
1986             switch (oper)
1987             {
1988                 case GT_INTRINSIC:
1989                     hash += tree->gtIntrinsic.gtIntrinsicId;
1990                     break;
1991                 case GT_LEA:
1992                     hash += static_cast<unsigned>(tree->gtAddrMode.Offset() << 3) + tree->gtAddrMode.gtScale;
1993                     break;
1994
1995                 case GT_BLK:
1996                 case GT_STORE_BLK:
1997                     hash += tree->gtBlk.gtBlkSize;
1998                     break;
1999
2000                 case GT_OBJ:
2001                 case GT_STORE_OBJ:
2002                     hash ^= PtrToUlong(tree->AsObj()->gtClass);
2003                     break;
2004
2005                 case GT_DYN_BLK:
2006                 case GT_STORE_DYN_BLK:
2007                     hash += gtHashValue(tree->AsDynBlk()->gtDynamicSize);
2008                     break;
2009
2010                 // For the ones below no extra argument matters for comparison.
2011                 case GT_ARR_INDEX:
2012                 case GT_QMARK:
2013                 case GT_INDEX:
2014                 case GT_INDEX_ADDR:
2015                     break;
2016
2017 #ifdef FEATURE_SIMD
2018                 case GT_SIMD:
2019                     hash += tree->gtSIMD.gtSIMDIntrinsicID;
2020                     hash += tree->gtSIMD.gtSIMDBaseType;
2021                     hash += tree->gtSIMD.gtSIMDSize;
2022                     break;
2023 #endif // FEATURE_SIMD
2024
2025 #ifdef FEATURE_HW_INTRINSICS
2026                 case GT_HWIntrinsic:
2027                     hash += tree->gtHWIntrinsic.gtHWIntrinsicId;
2028                     hash += tree->gtHWIntrinsic.gtSIMDBaseType;
2029                     hash += tree->gtHWIntrinsic.gtSIMDSize;
2030                     hash += tree->gtHWIntrinsic.gtIndexBaseType;
2031                     break;
2032 #endif // FEATURE_HW_INTRINSICS
2033
2034                 default:
2035                     assert(!"unexpected binary ExOp operator");
2036             }
2037         }
2038
2039         op1          = tree->gtOp.gtOp1;
2040         GenTree* op2 = tree->gtOp.gtOp2;
2041
2042         /* Is there a second sub-operand? */
2043
2044         if (!op2)
2045         {
2046             /* Special case: no sub-operands at all */
2047
2048             if (!op1)
2049             {
2050                 goto DONE;
2051             }
2052
2053             /* This is a unary operator */
2054
2055             tree = op1;
2056             goto AGAIN;
2057         }
2058
2059         /* This is a binary operator */
2060
2061         unsigned hsh1 = gtHashValue(op1);
2062
2063         /* Add op1's hash to the running value and continue with op2 */
2064
2065         hash = genTreeHashAdd(hash, hsh1);
2066
2067         tree = op2;
2068         goto AGAIN;
2069     }
2070
2071     /* See what kind of a special operator we have here */
2072     switch (tree->gtOper)
2073     {
2074         case GT_FIELD:
2075             if (tree->gtField.gtFldObj)
2076             {
2077                 temp = tree->gtField.gtFldObj;
2078                 assert(temp);
2079                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2080             }
2081             break;
2082
2083         case GT_STMT:
2084             temp = tree->gtStmt.gtStmtExpr;
2085             assert(temp);
2086             hash = genTreeHashAdd(hash, gtHashValue(temp));
2087             break;
2088
2089         case GT_ARR_ELEM:
2090
2091             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrElem.gtArrObj));
2092
2093             unsigned dim;
2094             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
2095             {
2096                 hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrElem.gtArrInds[dim]));
2097             }
2098
2099             break;
2100
2101         case GT_ARR_OFFSET:
2102             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtOffset));
2103             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtIndex));
2104             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtArrObj));
2105             break;
2106
2107         case GT_CALL:
2108
2109             if (tree->gtCall.gtCallObjp && tree->gtCall.gtCallObjp->gtOper != GT_NOP)
2110             {
2111                 temp = tree->gtCall.gtCallObjp;
2112                 assert(temp);
2113                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2114             }
2115
2116             if (tree->gtCall.gtCallArgs)
2117             {
2118                 temp = tree->gtCall.gtCallArgs;
2119                 assert(temp);
2120                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2121             }
2122
2123             if (tree->gtCall.gtCallType == CT_INDIRECT)
2124             {
2125                 temp = tree->gtCall.gtCallAddr;
2126                 assert(temp);
2127                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2128             }
2129             else
2130             {
2131                 hash = genTreeHashAdd(hash, tree->gtCall.gtCallMethHnd);
2132             }
2133
2134             if (tree->gtCall.gtCallLateArgs)
2135             {
2136                 temp = tree->gtCall.gtCallLateArgs;
2137                 assert(temp);
2138                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2139             }
2140             break;
2141
2142         case GT_CMPXCHG:
2143             hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpLocation));
2144             hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpValue));
2145             hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpComparand));
2146             break;
2147
2148         case GT_ARR_BOUNDS_CHECK:
2149 #ifdef FEATURE_SIMD
2150         case GT_SIMD_CHK:
2151 #endif // FEATURE_SIMD
2152 #ifdef FEATURE_HW_INTRINSICS
2153         case GT_HW_INTRINSIC_CHK:
2154 #endif // FEATURE_HW_INTRINSICS
2155             hash = genTreeHashAdd(hash, gtHashValue(tree->gtBoundsChk.gtIndex));
2156             hash = genTreeHashAdd(hash, gtHashValue(tree->gtBoundsChk.gtArrLen));
2157             hash = genTreeHashAdd(hash, tree->gtBoundsChk.gtThrowKind);
2158             break;
2159
2160         case GT_STORE_DYN_BLK:
2161             hash = genTreeHashAdd(hash, gtHashValue(tree->gtDynBlk.Data()));
2162             __fallthrough;
2163         case GT_DYN_BLK:
2164             hash = genTreeHashAdd(hash, gtHashValue(tree->gtDynBlk.Addr()));
2165             hash = genTreeHashAdd(hash, gtHashValue(tree->gtDynBlk.gtDynamicSize));
2166             break;
2167
2168         default:
2169 #ifdef DEBUG
2170             gtDispTree(tree);
2171 #endif
2172             assert(!"unexpected operator");
2173             break;
2174     }
2175
2176 DONE:
2177
2178     return hash;
2179 }
2180
2181 #endif // DEBUG
2182
2183 /*****************************************************************************
2184  *
2185  *  Return a relational operator that is the reverse of the given one.
2186  */
2187
2188 /* static */
2189 genTreeOps GenTree::ReverseRelop(genTreeOps relop)
2190 {
2191     static const genTreeOps reverseOps[] = {
2192         GT_NE,      // GT_EQ
2193         GT_EQ,      // GT_NE
2194         GT_GE,      // GT_LT
2195         GT_GT,      // GT_LE
2196         GT_LT,      // GT_GE
2197         GT_LE,      // GT_GT
2198         GT_TEST_NE, // GT_TEST_EQ
2199         GT_TEST_EQ, // GT_TEST_NE
2200     };
2201
2202     assert(reverseOps[GT_EQ - GT_EQ] == GT_NE);
2203     assert(reverseOps[GT_NE - GT_EQ] == GT_EQ);
2204
2205     assert(reverseOps[GT_LT - GT_EQ] == GT_GE);
2206     assert(reverseOps[GT_LE - GT_EQ] == GT_GT);
2207     assert(reverseOps[GT_GE - GT_EQ] == GT_LT);
2208     assert(reverseOps[GT_GT - GT_EQ] == GT_LE);
2209
2210     assert(reverseOps[GT_TEST_EQ - GT_EQ] == GT_TEST_NE);
2211     assert(reverseOps[GT_TEST_NE - GT_EQ] == GT_TEST_EQ);
2212
2213     assert(OperIsCompare(relop));
2214     assert(relop >= GT_EQ && (unsigned)(relop - GT_EQ) < sizeof(reverseOps));
2215
2216     return reverseOps[relop - GT_EQ];
2217 }
2218
2219 /*****************************************************************************
2220  *
2221  *  Return a relational operator that will work for swapped operands.
2222  */
2223
2224 /* static */
2225 genTreeOps GenTree::SwapRelop(genTreeOps relop)
2226 {
2227     static const genTreeOps swapOps[] = {
2228         GT_EQ,      // GT_EQ
2229         GT_NE,      // GT_NE
2230         GT_GT,      // GT_LT
2231         GT_GE,      // GT_LE
2232         GT_LE,      // GT_GE
2233         GT_LT,      // GT_GT
2234         GT_TEST_EQ, // GT_TEST_EQ
2235         GT_TEST_NE, // GT_TEST_NE
2236     };
2237
2238     assert(swapOps[GT_EQ - GT_EQ] == GT_EQ);
2239     assert(swapOps[GT_NE - GT_EQ] == GT_NE);
2240
2241     assert(swapOps[GT_LT - GT_EQ] == GT_GT);
2242     assert(swapOps[GT_LE - GT_EQ] == GT_GE);
2243     assert(swapOps[GT_GE - GT_EQ] == GT_LE);
2244     assert(swapOps[GT_GT - GT_EQ] == GT_LT);
2245
2246     assert(swapOps[GT_TEST_EQ - GT_EQ] == GT_TEST_EQ);
2247     assert(swapOps[GT_TEST_NE - GT_EQ] == GT_TEST_NE);
2248
2249     assert(OperIsCompare(relop));
2250     assert(relop >= GT_EQ && (unsigned)(relop - GT_EQ) < sizeof(swapOps));
2251
2252     return swapOps[relop - GT_EQ];
2253 }
2254
2255 /*****************************************************************************
2256  *
2257  *  Reverse the meaning of the given test condition.
2258  */
2259
2260 GenTree* Compiler::gtReverseCond(GenTree* tree)
2261 {
2262     if (tree->OperIsCompare())
2263     {
2264         tree->SetOper(GenTree::ReverseRelop(tree->OperGet()));
2265
2266         // Flip the GTF_RELOP_NAN_UN bit
2267         //     a ord b   === (a != NaN && b != NaN)
2268         //     a unord b === (a == NaN || b == NaN)
2269         // => !(a ord b) === (a unord b)
2270         if (varTypeIsFloating(tree->gtOp.gtOp1->TypeGet()))
2271         {
2272             tree->gtFlags ^= GTF_RELOP_NAN_UN;
2273         }
2274     }
2275     else if (tree->OperIs(GT_JCC, GT_SETCC))
2276     {
2277         GenTreeCC* cc   = tree->AsCC();
2278         cc->gtCondition = GenCondition::Reverse(cc->gtCondition);
2279     }
2280     else if (tree->OperIs(GT_JCMP))
2281     {
2282         // Flip the GTF_JCMP_EQ
2283         //
2284         // This causes switching
2285         //     cbz <=> cbnz
2286         //     tbz <=> tbnz
2287         tree->gtFlags ^= GTF_JCMP_EQ;
2288     }
2289     else
2290     {
2291         tree = gtNewOperNode(GT_NOT, TYP_INT, tree);
2292     }
2293
2294     return tree;
2295 }
2296
2297 /*****************************************************************************/
2298
2299 #ifdef DEBUG
2300
2301 bool GenTree::gtIsValid64RsltMul()
2302 {
2303     if ((gtOper != GT_MUL) || !(gtFlags & GTF_MUL_64RSLT))
2304     {
2305         return false;
2306     }
2307
2308     GenTree* op1 = gtOp.gtOp1;
2309     GenTree* op2 = gtOp.gtOp2;
2310
2311     if (TypeGet() != TYP_LONG || op1->TypeGet() != TYP_LONG || op2->TypeGet() != TYP_LONG)
2312     {
2313         return false;
2314     }
2315
2316     if (gtOverflow())
2317     {
2318         return false;
2319     }
2320
2321     // op1 has to be conv.i8(i4Expr)
2322     if ((op1->gtOper != GT_CAST) || (genActualType(op1->CastFromType()) != TYP_INT))
2323     {
2324         return false;
2325     }
2326
2327     // op2 has to be conv.i8(i4Expr)
2328     if ((op2->gtOper != GT_CAST) || (genActualType(op2->CastFromType()) != TYP_INT))
2329     {
2330         return false;
2331     }
2332
2333     // The signedness of both casts must be the same
2334     if (((op1->gtFlags & GTF_UNSIGNED) != 0) != ((op2->gtFlags & GTF_UNSIGNED) != 0))
2335     {
2336         return false;
2337     }
2338
2339     // Do unsigned mul iff both the casts are unsigned
2340     if (((op1->gtFlags & GTF_UNSIGNED) != 0) != ((gtFlags & GTF_UNSIGNED) != 0))
2341     {
2342         return false;
2343     }
2344
2345     return true;
2346 }
2347
2348 #endif // DEBUG
2349
2350 //------------------------------------------------------------------------------
2351 // gtSetListOrder : Figure out the evaluation order for a list of values.
2352 //
2353 //
2354 // Arguments:
2355 //    list  - List to figure out the evaluation order for
2356 //    isListCallArgs - True iff the list is a list of call arguments
2357 //    callArgsInRegs -  True iff the list is a list of call arguments and they are passed in registers
2358 //
2359 // Return Value:
2360 //    True if the operation can be a root of a bitwise rotation tree; false otherwise.
2361
2362 unsigned Compiler::gtSetListOrder(GenTree* list, bool isListCallArgs, bool callArgsInRegs)
2363 {
2364     assert((list != nullptr) && list->OperIsAnyList());
2365     assert(!callArgsInRegs || isListCallArgs);
2366
2367     ArrayStack<GenTree*> listNodes(getAllocator(CMK_ArrayStack));
2368
2369     do
2370     {
2371         listNodes.Push(list);
2372         list = list->gtOp.gtOp2;
2373     } while ((list != nullptr) && (list->OperIsAnyList()));
2374
2375     unsigned nxtlvl = (list == nullptr) ? 0 : gtSetEvalOrder(list);
2376     while (!listNodes.Empty())
2377     {
2378         list = listNodes.Pop();
2379         assert(list && list->OperIsAnyList());
2380         GenTree* next = list->gtOp.gtOp2;
2381
2382         unsigned level = 0;
2383
2384         // TODO: Do we have to compute costs differently for argument lists and
2385         // all other lists?
2386         // https://github.com/dotnet/coreclr/issues/7095
2387         unsigned costSz = (isListCallArgs || (next == nullptr)) ? 0 : 1;
2388         unsigned costEx = (isListCallArgs || (next == nullptr)) ? 0 : 1;
2389
2390         if (next != nullptr)
2391         {
2392             if (isListCallArgs)
2393             {
2394                 if (level < nxtlvl)
2395                 {
2396                     level = nxtlvl;
2397                 }
2398             }
2399             costEx += next->gtCostEx;
2400             costSz += next->gtCostSz;
2401         }
2402
2403         GenTree* op1 = list->gtOp.gtOp1;
2404         unsigned lvl = gtSetEvalOrder(op1);
2405
2406         // Swap the level counts
2407         if (list->gtFlags & GTF_REVERSE_OPS)
2408         {
2409             unsigned tmpl;
2410
2411             tmpl   = lvl;
2412             lvl    = nxtlvl;
2413             nxtlvl = tmpl;
2414         }
2415
2416         // TODO: Do we have to compute levels differently for argument lists and
2417         // all other lists?
2418         // https://github.com/dotnet/coreclr/issues/7095
2419         if (isListCallArgs)
2420         {
2421             if (level < lvl)
2422             {
2423                 level = lvl;
2424             }
2425         }
2426         else
2427         {
2428             if (lvl < 1)
2429             {
2430                 level = nxtlvl;
2431             }
2432             else if (lvl == nxtlvl)
2433             {
2434                 level = lvl + 1;
2435             }
2436             else
2437             {
2438                 level = lvl;
2439             }
2440         }
2441
2442         if (op1->gtCostEx != 0)
2443         {
2444             costEx += op1->gtCostEx;
2445             costEx += (callArgsInRegs || !isListCallArgs) ? 0 : IND_COST_EX;
2446         }
2447
2448         if (op1->gtCostSz != 0)
2449         {
2450             costSz += op1->gtCostSz;
2451 #ifdef _TARGET_XARCH_
2452             if (callArgsInRegs) // push is smaller than mov to reg
2453 #endif
2454             {
2455                 costSz += 1;
2456             }
2457         }
2458
2459         list->SetCosts(costEx, costSz);
2460
2461         nxtlvl = level;
2462     }
2463
2464     return nxtlvl;
2465 }
2466
2467 //-----------------------------------------------------------------------------
2468 // gtWalkOp: Traverse and mark an address expression
2469 //
2470 // Arguments:
2471 //    op1WB - An out parameter which is either the address expression, or one
2472 //            of its operands.
2473 //    op2WB - An out parameter which starts as either null or one of the operands
2474 //            of the address expression.
2475 //    base  - The base address of the addressing mode, or null if 'constOnly' is false
2476 //    constOnly - True if we will only traverse into ADDs with constant op2.
2477 //
2478 // This routine is a helper routine for gtSetEvalOrder() and is used to identify the
2479 // base and index nodes, which will be validated against those identified by
2480 // genCreateAddrMode().
2481 // It also marks the ADD nodes involved in the address expression with the
2482 // GTF_ADDRMODE_NO_CSE flag which prevents them from being considered for CSE's.
2483 //
2484 // Its two output parameters are modified under the following conditions:
2485 //
2486 // It is called once with the original address expression as 'op1WB', and
2487 // with 'constOnly' set to false. On this first invocation, *op1WB is always
2488 // an ADD node, and it will consider the operands of the ADD even if its op2 is
2489 // not a constant. However, when it encounters a non-constant or the base in the
2490 // op2 position, it stops iterating. That operand is returned in the 'op2WB' out
2491 // parameter, and will be considered on the third invocation of this method if
2492 // it is an ADD.
2493 //
2494 // It is called the second time with the two operands of the original expression, in
2495 // the original order, and the third time in reverse order. For these invocations
2496 // 'constOnly' is true, so it will only traverse cascaded ADD nodes if they have a
2497 // constant op2.
2498 //
2499 // The result, after three invocations, is that the values of the two out parameters
2500 // correspond to the base and index in some fashion. This method doesn't attempt
2501 // to determine or validate the scale or offset, if any.
2502 //
2503 // Assumptions (presumed to be ensured by genCreateAddrMode()):
2504 //    If an ADD has a constant operand, it is in the op2 position.
2505 //
2506 // Notes:
2507 //    This method, and its invocation sequence, are quite confusing, and since they
2508 //    were not originally well-documented, this specification is a possibly-imperfect
2509 //    reconstruction.
2510 //    The motivation for the handling of the NOP case is unclear.
2511 //    Note that 'op2WB' is only modified in the initial (!constOnly) case,
2512 //    or if a NOP is encountered in the op1 position.
2513 //
2514 void Compiler::gtWalkOp(GenTree** op1WB, GenTree** op2WB, GenTree* base, bool constOnly)
2515 {
2516     GenTree* op1 = *op1WB;
2517     GenTree* op2 = *op2WB;
2518
2519     op1 = op1->gtEffectiveVal();
2520
2521     // Now we look for op1's with non-overflow GT_ADDs [of constants]
2522     while ((op1->gtOper == GT_ADD) && (!op1->gtOverflow()) && (!constOnly || (op1->gtOp.gtOp2->IsCnsIntOrI())))
2523     {
2524         // mark it with GTF_ADDRMODE_NO_CSE
2525         op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
2526
2527         if (!constOnly)
2528         {
2529             op2 = op1->gtOp.gtOp2;
2530         }
2531         op1 = op1->gtOp.gtOp1;
2532
2533         // If op1 is a GT_NOP then swap op1 and op2.
2534         // (Why? Also, presumably op2 is not a GT_NOP in this case?)
2535         if (op1->gtOper == GT_NOP)
2536         {
2537             GenTree* tmp;
2538
2539             tmp = op1;
2540             op1 = op2;
2541             op2 = tmp;
2542         }
2543
2544         if (!constOnly && ((op2 == base) || (!op2->IsCnsIntOrI())))
2545         {
2546             break;
2547         }
2548
2549         op1 = op1->gtEffectiveVal();
2550     }
2551
2552     *op1WB = op1;
2553     *op2WB = op2;
2554 }
2555
2556 #ifdef DEBUG
2557 /*****************************************************************************
2558  * This is a workaround. It is to help implement an assert in gtSetEvalOrder() that the values
2559  * gtWalkOp() leaves in op1 and op2 correspond with the values of adr, idx, mul, and cns
2560  * that are returned by genCreateAddrMode(). It's essentially impossible to determine
2561  * what gtWalkOp() *should* return for all possible trees. This simply loosens one assert
2562  * to handle the following case:
2563
2564          indir     int
2565                     const(h)  int    4 field
2566                  +         byref
2567                     lclVar    byref  V00 this               <-- op2
2568               comma     byref                           <-- adr (base)
2569                  indir     byte
2570                     lclVar    byref  V00 this
2571            +         byref
2572                  const     int    2                     <-- mul == 4
2573               <<        int                                 <-- op1
2574                  lclVar    int    V01 arg1              <-- idx
2575
2576  * Here, we are planning to generate the address mode [edx+4*eax], where eax = idx and edx = the GT_COMMA expression.
2577  * To check adr equivalence with op2, we need to walk down the GT_ADD tree just like gtWalkOp() does.
2578  */
2579 GenTree* Compiler::gtWalkOpEffectiveVal(GenTree* op)
2580 {
2581     for (;;)
2582     {
2583         op = op->gtEffectiveVal();
2584
2585         if ((op->gtOper != GT_ADD) || op->gtOverflow() || !op->gtOp.gtOp2->IsCnsIntOrI())
2586         {
2587             break;
2588         }
2589
2590         op = op->gtOp.gtOp1;
2591     }
2592
2593     return op;
2594 }
2595 #endif // DEBUG
2596
2597 /*****************************************************************************
2598  *
2599  *  Given a tree, set the gtCostEx and gtCostSz fields which
2600  *  are used to measure the relative costs of the codegen of the tree
2601  *
2602  */
2603
2604 void Compiler::gtPrepareCost(GenTree* tree)
2605 {
2606     gtSetEvalOrder(tree);
2607 }
2608
2609 bool Compiler::gtIsLikelyRegVar(GenTree* tree)
2610 {
2611     if (tree->gtOper != GT_LCL_VAR)
2612     {
2613         return false;
2614     }
2615
2616     assert(tree->gtLclVar.gtLclNum < lvaTableCnt);
2617     LclVarDsc* varDsc = lvaTable + tree->gtLclVar.gtLclNum;
2618
2619     if (varDsc->lvDoNotEnregister)
2620     {
2621         return false;
2622     }
2623
2624     // Be pessimistic if ref counts are not yet set up.
2625     //
2626     // Perhaps we should be optimistic though.
2627     // See notes in GitHub issue 18969.
2628     if (!lvaLocalVarRefCounted())
2629     {
2630         return false;
2631     }
2632
2633     if (varDsc->lvRefCntWtd() < (BB_UNITY_WEIGHT * 3))
2634     {
2635         return false;
2636     }
2637
2638 #ifdef _TARGET_X86_
2639     if (varTypeIsFloating(tree->TypeGet()))
2640         return false;
2641     if (varTypeIsLong(tree->TypeGet()))
2642         return false;
2643 #endif
2644
2645     return true;
2646 }
2647
2648 //------------------------------------------------------------------------
2649 // gtCanSwapOrder: Returns true iff the secondNode can be swapped with firstNode.
2650 //
2651 // Arguments:
2652 //    firstNode  - An operand of a tree that can have GTF_REVERSE_OPS set.
2653 //    secondNode - The other operand of the tree.
2654 //
2655 // Return Value:
2656 //    Returns a boolean indicating whether it is safe to reverse the execution
2657 //    order of the two trees, considering any exception, global effects, or
2658 //    ordering constraints.
2659 //
2660 bool Compiler::gtCanSwapOrder(GenTree* firstNode, GenTree* secondNode)
2661 {
2662     // Relative of order of global / side effects can't be swapped.
2663
2664     bool canSwap = true;
2665
2666     if (optValnumCSE_phase)
2667     {
2668         canSwap = optCSE_canSwap(firstNode, secondNode);
2669     }
2670
2671     // We cannot swap in the presence of special side effects such as GT_CATCH_ARG.
2672
2673     if (canSwap && (firstNode->gtFlags & GTF_ORDER_SIDEEFF))
2674     {
2675         canSwap = false;
2676     }
2677
2678     // When strict side effect order is disabled we allow GTF_REVERSE_OPS to be set
2679     // when one or both sides contains a GTF_CALL or GTF_EXCEPT.
2680     // Currently only the C and C++ languages allow non strict side effect order.
2681
2682     unsigned strictEffects = GTF_GLOB_EFFECT;
2683
2684     if (canSwap && (firstNode->gtFlags & strictEffects))
2685     {
2686         // op1 has side efects that can't be reordered.
2687         // Check for some special cases where we still may be able to swap.
2688
2689         if (secondNode->gtFlags & strictEffects)
2690         {
2691             // op2 has also has non reorderable side effects - can't swap.
2692             canSwap = false;
2693         }
2694         else
2695         {
2696             // No side effects in op2 - we can swap iff op1 has no way of modifying op2,
2697             // i.e. through byref assignments or calls or op2 is a constant.
2698
2699             if (firstNode->gtFlags & strictEffects & GTF_PERSISTENT_SIDE_EFFECTS)
2700             {
2701                 // We have to be conservative - can swap iff op2 is constant.
2702                 if (!secondNode->OperIsConst())
2703                 {
2704                     canSwap = false;
2705                 }
2706             }
2707         }
2708     }
2709     return canSwap;
2710 }
2711
2712 //------------------------------------------------------------------------
2713 // Given an address expression, compute its costs and addressing mode opportunities,
2714 // and mark addressing mode candidates as GTF_DONT_CSE.
2715 //
2716 // Arguments:
2717 //    addr   - The address expression
2718 //    costEx - The execution cost of this address expression (in/out arg to be updated)
2719 //    costEx - The size cost of this address expression (in/out arg to be updated)
2720 //    type   - The type of the value being referenced by the parent of this address expression.
2721 //
2722 // Return Value:
2723 //    Returns true if it finds an addressing mode.
2724 //
2725 // Notes:
2726 //    TODO-Throughput - Consider actually instantiating these early, to avoid
2727 //    having to re-run the algorithm that looks for them (might also improve CQ).
2728 //
2729 bool Compiler::gtMarkAddrMode(GenTree* addr, int* pCostEx, int* pCostSz, var_types type)
2730 {
2731     // These are "out" parameters on the call to genCreateAddrMode():
2732     bool rev; // This will be true if the operands will need to be reversed. At this point we
2733               // don't care about this because we're not yet instantiating this addressing mode.
2734 #if SCALED_ADDR_MODES
2735     unsigned mul; // This is the index (scale) value for the addressing mode
2736 #endif
2737     ssize_t  cns;  // This is the constant offset
2738     GenTree* base; // This is the base of the address.
2739     GenTree* idx;  // This is the index.
2740
2741     if (codeGen->genCreateAddrMode(addr, false /*fold*/, &rev, &base, &idx,
2742 #if SCALED_ADDR_MODES
2743                                    &mul,
2744 #endif // SCALED_ADDR_MODES
2745                                    &cns))
2746     {
2747         // We can form a complex addressing mode, so mark each of the interior
2748         // nodes with GTF_ADDRMODE_NO_CSE and calculate a more accurate cost.
2749
2750         addr->gtFlags |= GTF_ADDRMODE_NO_CSE;
2751 #ifdef _TARGET_XARCH_
2752         // addrmodeCount is the count of items that we used to form
2753         // an addressing mode.  The maximum value is 4 when we have
2754         // all of these:   { base, idx, cns, mul }
2755         //
2756         unsigned addrmodeCount = 0;
2757         if (base)
2758         {
2759             *pCostEx += base->gtCostEx;
2760             *pCostSz += base->gtCostSz;
2761             addrmodeCount++;
2762         }
2763
2764         if (idx)
2765         {
2766             *pCostEx += idx->gtCostEx;
2767             *pCostSz += idx->gtCostSz;
2768             addrmodeCount++;
2769         }
2770
2771         if (cns)
2772         {
2773             if (((signed char)cns) == ((int)cns))
2774             {
2775                 *pCostSz += 1;
2776             }
2777             else
2778             {
2779                 *pCostSz += 4;
2780             }
2781             addrmodeCount++;
2782         }
2783         if (mul)
2784         {
2785             addrmodeCount++;
2786         }
2787         // When we form a complex addressing mode we can reduced the costs
2788         // associated with the interior GT_ADD and GT_LSH nodes:
2789         //
2790         //                      GT_ADD      -- reduce this interior GT_ADD by (-3,-3)
2791         //                      /   \       --
2792         //                  GT_ADD  'cns'   -- reduce this interior GT_ADD by (-2,-2)
2793         //                  /   \           --
2794         //               'base'  GT_LSL     -- reduce this interior GT_LSL by (-1,-1)
2795         //                      /   \       --
2796         //                   'idx'  'mul'
2797         //
2798         if (addrmodeCount > 1)
2799         {
2800             // The number of interior GT_ADD and GT_LSL will always be one less than addrmodeCount
2801             //
2802             addrmodeCount--;
2803
2804             GenTree* tmp = addr;
2805             while (addrmodeCount > 0)
2806             {
2807                 // decrement the gtCosts for the interior GT_ADD or GT_LSH node by the remaining
2808                 // addrmodeCount
2809                 tmp->SetCosts(tmp->gtCostEx - addrmodeCount, tmp->gtCostSz - addrmodeCount);
2810
2811                 addrmodeCount--;
2812                 if (addrmodeCount > 0)
2813                 {
2814                     GenTree* tmpOp1 = tmp->gtOp.gtOp1;
2815                     GenTree* tmpOp2 = tmp->gtGetOp2();
2816                     assert(tmpOp2 != nullptr);
2817
2818                     if ((tmpOp1 != base) && (tmpOp1->OperGet() == GT_ADD))
2819                     {
2820                         tmp = tmpOp1;
2821                     }
2822                     else if (tmpOp2->OperGet() == GT_LSH)
2823                     {
2824                         tmp = tmpOp2;
2825                     }
2826                     else if (tmpOp1->OperGet() == GT_LSH)
2827                     {
2828                         tmp = tmpOp1;
2829                     }
2830                     else if (tmpOp2->OperGet() == GT_ADD)
2831                     {
2832                         tmp = tmpOp2;
2833                     }
2834                     else
2835                     {
2836                         // We can very rarely encounter a tree that has a GT_COMMA node
2837                         // that is difficult to walk, so we just early out without decrementing.
2838                         addrmodeCount = 0;
2839                     }
2840                 }
2841             }
2842         }
2843 #elif defined _TARGET_ARM_
2844         if (base)
2845         {
2846             *pCostEx += base->gtCostEx;
2847             *pCostSz += base->gtCostSz;
2848             if ((base->gtOper == GT_LCL_VAR) && ((idx == NULL) || (cns == 0)))
2849             {
2850                 *pCostSz -= 1;
2851             }
2852         }
2853
2854         if (idx)
2855         {
2856             *pCostEx += idx->gtCostEx;
2857             *pCostSz += idx->gtCostSz;
2858             if (mul > 0)
2859             {
2860                 *pCostSz += 2;
2861             }
2862         }
2863
2864         if (cns)
2865         {
2866             if (cns >= 128) // small offsets fits into a 16-bit instruction
2867             {
2868                 if (cns < 4096) // medium offsets require a 32-bit instruction
2869                 {
2870                     if (!varTypeIsFloating(type))
2871                     {
2872                         *pCostSz += 2;
2873                     }
2874                 }
2875                 else
2876                 {
2877                     *pCostEx += 2; // Very large offsets require movw/movt instructions
2878                     *pCostSz += 8;
2879                 }
2880             }
2881         }
2882 #elif defined _TARGET_ARM64_
2883         if (base)
2884         {
2885             *pCostEx += base->gtCostEx;
2886             *pCostSz += base->gtCostSz;
2887         }
2888
2889         if (idx)
2890         {
2891             *pCostEx += idx->gtCostEx;
2892             *pCostSz += idx->gtCostSz;
2893         }
2894
2895         if (cns != 0)
2896         {
2897             if (cns >= (4096 * genTypeSize(type)))
2898             {
2899                 *pCostEx += 1;
2900                 *pCostSz += 4;
2901             }
2902         }
2903 #else
2904 #error "Unknown _TARGET_"
2905 #endif
2906
2907         assert(addr->gtOper == GT_ADD);
2908         assert(!addr->gtOverflow());
2909         assert(mul != 1);
2910
2911         // If we have an addressing mode, we have one of:
2912         //   [base             + cns]
2913         //   [       idx * mul      ]  // mul >= 2, else we would use base instead of idx
2914         //   [       idx * mul + cns]  // mul >= 2, else we would use base instead of idx
2915         //   [base + idx * mul      ]  // mul can be 0, 2, 4, or 8
2916         //   [base + idx * mul + cns]  // mul can be 0, 2, 4, or 8
2917         // Note that mul == 0 is semantically equivalent to mul == 1.
2918         // Note that cns can be zero.
2919         CLANG_FORMAT_COMMENT_ANCHOR;
2920
2921 #if SCALED_ADDR_MODES
2922         assert((base != nullptr) || (idx != nullptr && mul >= 2));
2923 #else
2924         assert(base != NULL);
2925 #endif
2926
2927         INDEBUG(GenTree* op1Save = addr);
2928
2929         // Walk 'addr' identifying non-overflow ADDs that will be part of the address mode.
2930         // Note that we will be modifying 'op1' and 'op2' so that eventually they should
2931         // map to the base and index.
2932         GenTree* op1 = addr;
2933         GenTree* op2 = nullptr;
2934         gtWalkOp(&op1, &op2, base, false);
2935
2936         // op1 and op2 are now descendents of the root GT_ADD of the addressing mode.
2937         assert(op1 != op1Save);
2938         assert(op2 != nullptr);
2939
2940         // Walk the operands again (the third operand is unused in this case).
2941         // This time we will only consider adds with constant op2's, since
2942         // we have already found either a non-ADD op1 or a non-constant op2.
2943         gtWalkOp(&op1, &op2, nullptr, true);
2944
2945 #if defined(_TARGET_XARCH_)
2946         // For XARCH we will fold GT_ADDs in the op2 position into the addressing mode, so we call
2947         // gtWalkOp on both operands of the original GT_ADD.
2948         // This is not done for ARMARCH. Though the stated reason is that we don't try to create a
2949         // scaled index, in fact we actually do create them (even base + index*scale + offset).
2950
2951         // At this point, 'op2' may itself be an ADD of a constant that should be folded
2952         // into the addressing mode.
2953         // Walk op2 looking for non-overflow GT_ADDs of constants.
2954         gtWalkOp(&op2, &op1, nullptr, true);
2955 #endif // defined(_TARGET_XARCH_)
2956
2957         // OK we are done walking the tree
2958         // Now assert that op1 and op2 correspond with base and idx
2959         // in one of the several acceptable ways.
2960
2961         // Note that sometimes op1/op2 is equal to idx/base
2962         // and other times op1/op2 is a GT_COMMA node with
2963         // an effective value that is idx/base
2964
2965         if (mul > 1)
2966         {
2967             if ((op1 != base) && (op1->gtOper == GT_LSH))
2968             {
2969                 op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
2970                 if (op1->gtOp.gtOp1->gtOper == GT_MUL)
2971                 {
2972                     op1->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
2973                 }
2974                 assert((base == nullptr) || (op2 == base) || (op2->gtEffectiveVal() == base->gtEffectiveVal()) ||
2975                        (gtWalkOpEffectiveVal(op2) == gtWalkOpEffectiveVal(base)));
2976             }
2977             else
2978             {
2979                 assert(op2);
2980                 assert(op2->gtOper == GT_LSH || op2->gtOper == GT_MUL);
2981                 op2->gtFlags |= GTF_ADDRMODE_NO_CSE;
2982                 // We may have eliminated multiple shifts and multiplies in the addressing mode,
2983                 // so navigate down through them to get to "idx".
2984                 GenTree* op2op1 = op2->gtOp.gtOp1;
2985                 while ((op2op1->gtOper == GT_LSH || op2op1->gtOper == GT_MUL) && op2op1 != idx)
2986                 {
2987                     op2op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
2988                     op2op1 = op2op1->gtOp.gtOp1;
2989                 }
2990                 assert(op1->gtEffectiveVal() == base);
2991                 assert(op2op1 == idx);
2992             }
2993         }
2994         else
2995         {
2996             assert(mul == 0);
2997
2998             if ((op1 == idx) || (op1->gtEffectiveVal() == idx))
2999             {
3000                 if (idx != nullptr)
3001                 {
3002                     if ((op1->gtOper == GT_MUL) || (op1->gtOper == GT_LSH))
3003                     {
3004                         if ((op1->gtOp.gtOp1->gtOper == GT_NOP) ||
3005                             (op1->gtOp.gtOp1->gtOper == GT_MUL && op1->gtOp.gtOp1->gtOp.gtOp1->gtOper == GT_NOP))
3006                         {
3007                             op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
3008                             if (op1->gtOp.gtOp1->gtOper == GT_MUL)
3009                             {
3010                                 op1->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
3011                             }
3012                         }
3013                     }
3014                 }
3015                 assert((op2 == base) || (op2->gtEffectiveVal() == base));
3016             }
3017             else if ((op1 == base) || (op1->gtEffectiveVal() == base))
3018             {
3019                 if (idx != nullptr)
3020                 {
3021                     assert(op2);
3022                     if ((op2->gtOper == GT_MUL) || (op2->gtOper == GT_LSH))
3023                     {
3024                         if ((op2->gtOp.gtOp1->gtOper == GT_NOP) ||
3025                             (op2->gtOp.gtOp1->gtOper == GT_MUL && op2->gtOp.gtOp1->gtOp.gtOp1->gtOper == GT_NOP))
3026                         {
3027                             op2->gtFlags |= GTF_ADDRMODE_NO_CSE;
3028                             if (op2->gtOp.gtOp1->gtOper == GT_MUL)
3029                             {
3030                                 op2->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
3031                             }
3032                         }
3033                     }
3034                     assert((op2 == idx) || (op2->gtEffectiveVal() == idx));
3035                 }
3036             }
3037             else
3038             {
3039                 // op1 isn't base or idx. Is this possible? Or should there be an assert?
3040             }
3041         }
3042         return true;
3043
3044     } // end  if  (genCreateAddrMode(...))
3045     return false;
3046 }
3047
3048 /*****************************************************************************
3049  *
3050  *  Given a tree, figure out the order in which its sub-operands should be
3051  *  evaluated. If the second operand of a binary operator is more expensive
3052  *  than the first operand, then try to swap the operand trees. Updates the
3053  *  GTF_REVERSE_OPS bit if necessary in this case.
3054  *
3055  *  Returns the Sethi 'complexity' estimate for this tree (the higher
3056  *  the number, the higher is the tree's resources requirement).
3057  *
3058  *  This function sets:
3059  *      1. gtCostEx to the execution complexity estimate
3060  *      2. gtCostSz to the code size estimate
3061  *      3. Sometimes sets GTF_ADDRMODE_NO_CSE on nodes in the tree.
3062  *      4. DEBUG-only: clears GTF_DEBUG_NODE_MORPHED.
3063  */
3064
3065 #ifdef _PREFAST_
3066 #pragma warning(push)
3067 #pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
3068 #endif
3069 unsigned Compiler::gtSetEvalOrder(GenTree* tree)
3070 {
3071     assert(tree);
3072     assert(tree->gtOper != GT_STMT);
3073
3074 #ifdef DEBUG
3075     /* Clear the GTF_DEBUG_NODE_MORPHED flag as well */
3076     tree->gtDebugFlags &= ~GTF_DEBUG_NODE_MORPHED;
3077 #endif
3078
3079     /* Is this a FP value? */
3080
3081     bool isflt = varTypeIsFloating(tree->TypeGet());
3082
3083     /* Figure out what kind of a node we have */
3084
3085     const genTreeOps oper = tree->OperGet();
3086     const unsigned   kind = tree->OperKind();
3087
3088     /* Assume no fixed registers will be trashed */
3089
3090     unsigned level;
3091     int      costEx;
3092     int      costSz;
3093
3094 #ifdef DEBUG
3095     costEx = -1;
3096     costSz = -1;
3097 #endif
3098
3099     /* Is this a constant or a leaf node? */
3100
3101     if (kind & (GTK_LEAF | GTK_CONST))
3102     {
3103         switch (oper)
3104         {
3105 #ifdef _TARGET_ARM_
3106             case GT_CNS_LNG:
3107                 costSz = 9;
3108                 costEx = 4;
3109                 goto COMMON_CNS;
3110
3111             case GT_CNS_STR:
3112                 // Uses movw/movt
3113                 costSz = 7;
3114                 costEx = 3;
3115                 goto COMMON_CNS;
3116
3117             case GT_CNS_INT:
3118             {
3119                 // If the constant is a handle then it will need to have a relocation
3120                 //  applied to it.
3121                 // Any constant that requires a reloc must use the movw/movt sequence
3122                 //
3123                 GenTreeIntConCommon* con = tree->AsIntConCommon();
3124
3125                 if (con->ImmedValNeedsReloc(this) ||
3126                     !codeGen->validImmForInstr(INS_mov, (target_ssize_t)tree->gtIntCon.gtIconVal))
3127                 {
3128                     // Uses movw/movt
3129                     costSz = 7;
3130                     costEx = 3;
3131                 }
3132                 else if (((unsigned)tree->gtIntCon.gtIconVal) <= 0x00ff)
3133                 {
3134                     // mov  Rd, <const8>
3135                     costSz = 1;
3136                     costEx = 1;
3137                 }
3138                 else
3139                 {
3140                     // Uses movw/mvn
3141                     costSz = 3;
3142                     costEx = 1;
3143                 }
3144                 goto COMMON_CNS;
3145             }
3146
3147 #elif defined _TARGET_XARCH_
3148
3149             case GT_CNS_LNG:
3150                 costSz = 10;
3151                 costEx = 3;
3152                 goto COMMON_CNS;
3153
3154             case GT_CNS_STR:
3155                 costSz = 4;
3156                 costEx = 1;
3157                 goto COMMON_CNS;
3158
3159             case GT_CNS_INT:
3160             {
3161                 // If the constant is a handle then it will need to have a relocation
3162                 //  applied to it.
3163                 //
3164                 GenTreeIntConCommon* con = tree->AsIntConCommon();
3165
3166                 bool iconNeedsReloc = con->ImmedValNeedsReloc(this);
3167
3168                 if (!iconNeedsReloc && con->FitsInI8())
3169                 {
3170                     costSz = 1;
3171                     costEx = 1;
3172                 }
3173 #if defined(_TARGET_AMD64_)
3174                 else if (iconNeedsReloc || !con->FitsInI32())
3175                 {
3176                     costSz = 10;
3177                     costEx = 3;
3178                 }
3179 #endif // _TARGET_AMD64_
3180                 else
3181                 {
3182                     costSz = 4;
3183                     costEx = 1;
3184                 }
3185                 goto COMMON_CNS;
3186             }
3187
3188 #elif defined(_TARGET_ARM64_)
3189             case GT_CNS_LNG:
3190             case GT_CNS_STR:
3191             case GT_CNS_INT:
3192                 // TODO-ARM64-NYI: Need cost estimates.
3193                 costSz = 1;
3194                 costEx = 1;
3195                 goto COMMON_CNS;
3196
3197 #else
3198             case GT_CNS_LNG:
3199             case GT_CNS_STR:
3200             case GT_CNS_INT:
3201 #error "Unknown _TARGET_"
3202 #endif
3203
3204             COMMON_CNS:
3205                 /*
3206                     Note that some code below depends on constants always getting
3207                     moved to be the second operand of a binary operator. This is
3208                     easily accomplished by giving constants a level of 0, which
3209                     we do on the next line. If you ever decide to change this, be
3210                     aware that unless you make other arrangements for integer
3211                     constants to be moved, stuff will break.
3212                  */
3213
3214                 level = 0;
3215                 break;
3216
3217             case GT_CNS_DBL:
3218                 level = 0;
3219                 /* We use fldz and fld1 to load 0.0 and 1.0, but all other  */
3220                 /* floating point constants are loaded using an indirection */
3221                 if ((*((__int64*)&(tree->gtDblCon.gtDconVal)) == 0) ||
3222                     (*((__int64*)&(tree->gtDblCon.gtDconVal)) == I64(0x3ff0000000000000)))
3223                 {
3224                     costEx = 1;
3225                     costSz = 1;
3226                 }
3227                 else
3228                 {
3229                     costEx = IND_COST_EX;
3230                     costSz = 4;
3231                 }
3232                 break;
3233
3234             case GT_LCL_VAR:
3235                 level = 1;
3236                 if (gtIsLikelyRegVar(tree))
3237                 {
3238                     costEx = 1;
3239                     costSz = 1;
3240                     /* Sign-extend and zero-extend are more expensive to load */
3241                     if (lvaTable[tree->gtLclVar.gtLclNum].lvNormalizeOnLoad())
3242                     {
3243                         costEx += 1;
3244                         costSz += 1;
3245                     }
3246                 }
3247                 else
3248                 {
3249                     costEx = IND_COST_EX;
3250                     costSz = 2;
3251                     /* Sign-extend and zero-extend are more expensive to load */
3252                     if (varTypeIsSmall(tree->TypeGet()))
3253                     {
3254                         costEx += 1;
3255                         costSz += 1;
3256                     }
3257                 }
3258 #if defined(_TARGET_AMD64_)
3259                 // increase costSz for floating point locals
3260                 if (isflt)
3261                 {
3262                     costSz += 1;
3263                     if (!gtIsLikelyRegVar(tree))
3264                     {
3265                         costSz += 1;
3266                     }
3267                 }
3268 #endif
3269                 break;
3270
3271             case GT_CLS_VAR:
3272 #ifdef _TARGET_ARM_
3273                 // We generate movw/movt/ldr
3274                 level  = 1;
3275                 costEx = 3 + IND_COST_EX; // 6
3276                 costSz = 4 + 4 + 2;       // 10
3277                 break;
3278 #endif
3279             case GT_LCL_FLD:
3280                 level  = 1;
3281                 costEx = IND_COST_EX;
3282                 costSz = 4;
3283                 if (varTypeIsSmall(tree->TypeGet()))
3284                 {
3285                     costEx += 1;
3286                     costSz += 1;
3287                 }
3288                 break;
3289
3290             case GT_PHI_ARG:
3291             case GT_ARGPLACE:
3292                 level  = 0;
3293                 costEx = 0;
3294                 costSz = 0;
3295                 break;
3296
3297             default:
3298                 level  = 1;
3299                 costEx = 1;
3300                 costSz = 1;
3301                 break;
3302         }
3303         goto DONE;
3304     }
3305
3306     /* Is it a 'simple' unary/binary operator? */
3307
3308     if (kind & GTK_SMPOP)
3309     {
3310         int      lvlb; // preference for op2
3311         unsigned lvl2; // scratch variable
3312
3313         GenTree* op1 = tree->gtOp.gtOp1;
3314         GenTree* op2 = tree->gtGetOp2IfPresent();
3315
3316         costEx = 0;
3317         costSz = 0;
3318
3319         if (tree->OperIsAddrMode())
3320         {
3321             if (op1 == nullptr)
3322             {
3323                 op1 = op2;
3324                 op2 = nullptr;
3325             }
3326         }
3327
3328         /* Check for a nilary operator */
3329
3330         if (op1 == nullptr)
3331         {
3332             assert(op2 == nullptr);
3333
3334             level = 0;
3335
3336             goto DONE;
3337         }
3338
3339         /* Is this a unary operator? */
3340
3341         if (op2 == nullptr)
3342         {
3343             /* Process the operand of the operator */
3344
3345             /* Most Unary ops have costEx of 1 */
3346             costEx = 1;
3347             costSz = 1;
3348
3349             level = gtSetEvalOrder(op1);
3350
3351             /* Special handling for some operators */
3352
3353             switch (oper)
3354             {
3355                 case GT_JTRUE:
3356                     costEx = 2;
3357                     costSz = 2;
3358                     break;
3359
3360                 case GT_SWITCH:
3361                     costEx = 10;
3362                     costSz = 5;
3363                     break;
3364
3365                 case GT_CAST:
3366 #if defined(_TARGET_ARM_)
3367                     costEx = 1;
3368                     costSz = 1;
3369                     if (isflt || varTypeIsFloating(op1->TypeGet()))
3370                     {
3371                         costEx = 3;
3372                         costSz = 4;
3373                     }
3374 #elif defined(_TARGET_ARM64_)
3375                     costEx = 1;
3376                     costSz = 2;
3377                     if (isflt || varTypeIsFloating(op1->TypeGet()))
3378                     {
3379                         costEx = 2;
3380                         costSz = 4;
3381                     }
3382 #elif defined(_TARGET_XARCH_)
3383                     costEx = 1;
3384                     costSz = 2;
3385
3386                     if (isflt || varTypeIsFloating(op1->TypeGet()))
3387                     {
3388                         /* cast involving floats always go through memory */
3389                         costEx = IND_COST_EX * 2;
3390                         costSz = 6;
3391                     }
3392 #else
3393 #error "Unknown _TARGET_"
3394 #endif
3395
3396                     /* Overflow casts are a lot more expensive */
3397                     if (tree->gtOverflow())
3398                     {
3399                         costEx += 6;
3400                         costSz += 6;
3401                     }
3402
3403                     break;
3404
3405                 case GT_LIST:
3406                 case GT_FIELD_LIST:
3407                 case GT_NOP:
3408                     costEx = 0;
3409                     costSz = 0;
3410                     break;
3411
3412                 case GT_INTRINSIC:
3413                     // GT_INTRINSIC intrinsics Sin, Cos, Sqrt, Abs ... have higher costs.
3414                     // TODO: tune these costs target specific as some of these are
3415                     // target intrinsics and would cost less to generate code.
3416                     switch (tree->gtIntrinsic.gtIntrinsicId)
3417                     {
3418                         default:
3419                             assert(!"missing case for gtIntrinsicId");
3420                             costEx = 12;
3421                             costSz = 12;
3422                             break;
3423
3424                         case CORINFO_INTRINSIC_Sin:
3425                         case CORINFO_INTRINSIC_Cos:
3426                         case CORINFO_INTRINSIC_Sqrt:
3427                         case CORINFO_INTRINSIC_Cbrt:
3428                         case CORINFO_INTRINSIC_Cosh:
3429                         case CORINFO_INTRINSIC_Sinh:
3430                         case CORINFO_INTRINSIC_Tan:
3431                         case CORINFO_INTRINSIC_Tanh:
3432                         case CORINFO_INTRINSIC_Asin:
3433                         case CORINFO_INTRINSIC_Asinh:
3434                         case CORINFO_INTRINSIC_Acos:
3435                         case CORINFO_INTRINSIC_Acosh:
3436                         case CORINFO_INTRINSIC_Atan:
3437                         case CORINFO_INTRINSIC_Atanh:
3438                         case CORINFO_INTRINSIC_Atan2:
3439                         case CORINFO_INTRINSIC_Log10:
3440                         case CORINFO_INTRINSIC_Pow:
3441                         case CORINFO_INTRINSIC_Exp:
3442                         case CORINFO_INTRINSIC_Ceiling:
3443                         case CORINFO_INTRINSIC_Floor:
3444                         case CORINFO_INTRINSIC_Object_GetType:
3445                             // Giving intrinsics a large fixed execution cost is because we'd like to CSE
3446                             // them, even if they are implemented by calls. This is different from modeling
3447                             // user calls since we never CSE user calls.
3448                             costEx = 36;
3449                             costSz = 4;
3450                             break;
3451
3452                         case CORINFO_INTRINSIC_Abs:
3453                             costEx = 5;
3454                             costSz = 15;
3455                             break;
3456
3457                         case CORINFO_INTRINSIC_Round:
3458                             costEx = 3;
3459                             costSz = 4;
3460                             break;
3461                     }
3462                     level++;
3463                     break;
3464
3465                 case GT_NOT:
3466                 case GT_NEG:
3467                     // We need to ensure that -x is evaluated before x or else
3468                     // we get burned while adjusting genFPstkLevel in x*-x where
3469                     // the rhs x is the last use of the enregistered x.
3470                     //
3471                     // Even in the integer case we want to prefer to
3472                     // evaluate the side without the GT_NEG node, all other things
3473                     // being equal.  Also a GT_NOT requires a scratch register
3474
3475                     level++;
3476                     break;
3477
3478                 case GT_ADDR:
3479
3480                     costEx = 0;
3481                     costSz = 1;
3482
3483                     // If we have a GT_ADDR of an GT_IND we can just copy the costs from indOp1
3484                     if (op1->OperGet() == GT_IND)
3485                     {
3486                         GenTree* indOp1 = op1->gtOp.gtOp1;
3487                         costEx          = indOp1->gtCostEx;
3488                         costSz          = indOp1->gtCostSz;
3489                     }
3490                     break;
3491
3492                 case GT_ARR_LENGTH:
3493                     level++;
3494
3495                     /* Array Len should be the same as an indirections, which have a costEx of IND_COST_EX */
3496                     costEx = IND_COST_EX - 1;
3497                     costSz = 2;
3498                     break;
3499
3500                 case GT_MKREFANY:
3501                 case GT_OBJ:
3502                     // We estimate the cost of a GT_OBJ or GT_MKREFANY to be two loads (GT_INDs)
3503                     costEx = 2 * IND_COST_EX;
3504                     costSz = 2 * 2;
3505                     break;
3506
3507                 case GT_BOX:
3508                     // We estimate the cost of a GT_BOX to be two stores (GT_INDs)
3509                     costEx = 2 * IND_COST_EX;
3510                     costSz = 2 * 2;
3511                     break;
3512
3513                 case GT_BLK:
3514                 case GT_IND:
3515
3516                     /* An indirection should always have a non-zero level.
3517                      * Only constant leaf nodes have level 0.
3518                      */
3519
3520                     if (level == 0)
3521                     {
3522                         level = 1;
3523                     }
3524
3525                     /* Indirections have a costEx of IND_COST_EX */
3526                     costEx = IND_COST_EX;
3527                     costSz = 2;
3528
3529                     /* If we have to sign-extend or zero-extend, bump the cost */
3530                     if (varTypeIsSmall(tree->TypeGet()))
3531                     {
3532                         costEx += 1;
3533                         costSz += 1;
3534                     }
3535
3536                     if (isflt)
3537                     {
3538                         if (tree->TypeGet() == TYP_DOUBLE)
3539                         {
3540                             costEx += 1;
3541                         }
3542 #ifdef _TARGET_ARM_
3543                         costSz += 2;
3544 #endif // _TARGET_ARM_
3545                     }
3546
3547                     // Can we form an addressing mode with this indirection?
3548                     // TODO-CQ: Consider changing this to op1->gtEffectiveVal() to take into account
3549                     // addressing modes hidden under a comma node.
3550
3551                     if (op1->gtOper == GT_ADD)
3552                     {
3553                         // See if we can form a complex addressing mode.
3554
3555                         GenTree* addr = op1->gtEffectiveVal();
3556
3557                         bool doAddrMode = true;
3558                         // See if we can form a complex addressing mode.
3559                         // Always use an addrMode for an array index indirection.
3560                         // TODO-1stClassStructs: Always do this, but first make sure it's
3561                         // done in Lowering as well.
3562                         if ((tree->gtFlags & GTF_IND_ARR_INDEX) == 0)
3563                         {
3564                             if (tree->TypeGet() == TYP_STRUCT)
3565                             {
3566                                 doAddrMode = false;
3567                             }
3568                             else if (varTypeIsStruct(tree))
3569                             {
3570                                 // This is a heuristic attempting to match prior behavior when indirections
3571                                 // under a struct assignment would not be considered for addressing modes.
3572                                 if (compCurStmt != nullptr)
3573                                 {
3574                                     GenTree* expr = compCurStmt->gtStmt.gtStmtExpr;
3575                                     if ((expr->OperGet() == GT_ASG) &&
3576                                         ((expr->gtGetOp1() == tree) || (expr->gtGetOp2() == tree)))
3577                                     {
3578                                         doAddrMode = false;
3579                                     }
3580                                 }
3581                             }
3582                         }
3583                         if (doAddrMode && gtMarkAddrMode(addr, &costEx, &costSz, tree->TypeGet()))
3584                         {
3585                             goto DONE;
3586                         }
3587                     } // end if  (op1->gtOper == GT_ADD)
3588                     else if (gtIsLikelyRegVar(op1))
3589                     {
3590                         /* Indirection of an enregister LCL_VAR, don't increase costEx/costSz */
3591                         goto DONE;
3592                     }
3593 #ifdef _TARGET_XARCH_
3594                     else if (op1->IsCnsIntOrI())
3595                     {
3596                         // Indirection of a CNS_INT, subtract 1 from costEx
3597                         // makes costEx 3 for x86 and 4 for amd64
3598                         //
3599                         costEx += (op1->gtCostEx - 1);
3600                         costSz += op1->gtCostSz;
3601                         goto DONE;
3602                     }
3603 #endif
3604                     break;
3605
3606                 default:
3607                     break;
3608             }
3609             costEx += op1->gtCostEx;
3610             costSz += op1->gtCostSz;
3611             goto DONE;
3612         }
3613
3614         /* Binary operator - check for certain special cases */
3615
3616         lvlb = 0;
3617
3618         /* Default Binary ops have a cost of 1,1 */
3619         costEx = 1;
3620         costSz = 1;
3621
3622 #ifdef _TARGET_ARM_
3623         if (isflt)
3624         {
3625             costSz += 2;
3626         }
3627 #endif
3628 #ifndef _TARGET_64BIT_
3629         if (varTypeIsLong(op1->TypeGet()))
3630         {
3631             /* Operations on longs are more expensive */
3632             costEx += 3;
3633             costSz += 3;
3634         }
3635 #endif
3636         switch (oper)
3637         {
3638             case GT_MOD:
3639             case GT_UMOD:
3640
3641                 /* Modulo by a power of 2 is easy */
3642
3643                 if (op2->IsCnsIntOrI())
3644                 {
3645                     size_t ival = op2->gtIntConCommon.IconValue();
3646
3647                     if (ival > 0 && ival == genFindLowestBit(ival))
3648                     {
3649                         break;
3650                     }
3651                 }
3652
3653                 __fallthrough;
3654
3655             case GT_DIV:
3656             case GT_UDIV:
3657
3658                 if (isflt)
3659                 {
3660                     /* fp division is very expensive to execute */
3661                     costEx = 36; // TYP_DOUBLE
3662                     costSz += 3;
3663                 }
3664                 else
3665                 {
3666                     /* integer division is also very expensive */
3667                     costEx = 20;
3668                     costSz += 2;
3669
3670                     // Encourage the first operand to be evaluated (into EAX/EDX) first */
3671                     lvlb -= 3;
3672                 }
3673                 break;
3674
3675             case GT_MUL:
3676
3677                 if (isflt)
3678                 {
3679                     /* FP multiplication instructions are more expensive */
3680                     costEx += 4;
3681                     costSz += 3;
3682                 }
3683                 else
3684                 {
3685                     /* Integer multiplication instructions are more expensive */
3686                     costEx += 3;
3687                     costSz += 2;
3688
3689                     if (tree->gtOverflow())
3690                     {
3691                         /* Overflow check are more expensive */
3692                         costEx += 3;
3693                         costSz += 3;
3694                     }
3695
3696 #ifdef _TARGET_X86_
3697                     if ((tree->gtType == TYP_LONG) || tree->gtOverflow())
3698                     {
3699                         /* We use imulEAX for TYP_LONG and overflow multiplications */
3700                         // Encourage the first operand to be evaluated (into EAX/EDX) first */
3701                         lvlb -= 4;
3702
3703                         /* The 64-bit imul instruction costs more */
3704                         costEx += 4;
3705                     }
3706 #endif //  _TARGET_X86_
3707                 }
3708                 break;
3709
3710             case GT_ADD:
3711             case GT_SUB:
3712                 if (isflt)
3713                 {
3714                     /* FP instructions are a bit more expensive */
3715                     costEx += 4;
3716                     costSz += 3;
3717                     break;
3718                 }
3719
3720                 /* Overflow check are more expensive */
3721                 if (tree->gtOverflow())
3722                 {
3723                     costEx += 3;
3724                     costSz += 3;
3725                 }
3726                 break;
3727
3728             case GT_COMMA:
3729
3730                 /* Comma tosses the result of the left operand */
3731                 gtSetEvalOrder(op1);
3732                 level = gtSetEvalOrder(op2);
3733
3734                 /* GT_COMMA cost is the sum of op1 and op2 costs */
3735                 costEx = (op1->gtCostEx + op2->gtCostEx);
3736                 costSz = (op1->gtCostSz + op2->gtCostSz);
3737
3738                 goto DONE;
3739
3740             case GT_COLON:
3741
3742                 level = gtSetEvalOrder(op1);
3743                 lvl2  = gtSetEvalOrder(op2);
3744
3745                 if (level < lvl2)
3746                 {
3747                     level = lvl2;
3748                 }
3749                 else if (level == lvl2)
3750                 {
3751                     level += 1;
3752                 }
3753
3754                 costEx = op1->gtCostEx + op2->gtCostEx;
3755                 costSz = op1->gtCostSz + op2->gtCostSz;
3756
3757                 goto DONE;
3758
3759             case GT_LIST:
3760             case GT_FIELD_LIST:
3761             {
3762                 const bool isListCallArgs = false;
3763                 const bool callArgsInRegs = false;
3764                 return gtSetListOrder(tree, isListCallArgs, callArgsInRegs);
3765             }
3766
3767             case GT_ASG:
3768                 /* Assignments need a bit of special handling */
3769                 /* Process the target */
3770                 level = gtSetEvalOrder(op1);
3771
3772                 if (gtIsLikelyRegVar(op1))
3773                 {
3774                     assert(lvlb == 0);
3775                     lvl2 = gtSetEvalOrder(op2);
3776
3777                     /* Assignment to an enregistered LCL_VAR */
3778                     costEx = op2->gtCostEx;
3779                     costSz = max(3, op2->gtCostSz); // 3 is an estimate for a reg-reg assignment
3780                     goto DONE_OP1_AFTER_COST;
3781                 }
3782                 goto DONE_OP1;
3783
3784             default:
3785                 break;
3786         }
3787
3788         /* Process the sub-operands */
3789
3790         level = gtSetEvalOrder(op1);
3791         if (lvlb < 0)
3792         {
3793             level -= lvlb; // lvlb is negative, so this increases level
3794             lvlb = 0;
3795         }
3796
3797     DONE_OP1:
3798         assert(lvlb >= 0);
3799         lvl2 = gtSetEvalOrder(op2) + lvlb;
3800
3801         costEx += (op1->gtCostEx + op2->gtCostEx);
3802         costSz += (op1->gtCostSz + op2->gtCostSz);
3803
3804     DONE_OP1_AFTER_COST:
3805
3806         bool bReverseInAssignment = false;
3807         if (oper == GT_ASG)
3808         {
3809             GenTree* op1Val = op1;
3810
3811             // Skip over the GT_IND/GT_ADDR tree (if one exists)
3812             //
3813             if ((op1->gtOper == GT_IND) && (op1->gtOp.gtOp1->gtOper == GT_ADDR))
3814             {
3815                 op1Val = op1->gtOp.gtOp1->gtOp.gtOp1;
3816             }
3817
3818             switch (op1Val->gtOper)
3819             {
3820                 case GT_IND:
3821                 case GT_BLK:
3822                 case GT_OBJ:
3823                 case GT_DYN_BLK:
3824
3825                     // In an indirection, the destination address is evaluated prior to the source.
3826                     // If we have any side effects on the target indirection,
3827                     // we have to evaluate op1 first.
3828                     // However, if the LHS is a lclVar address, SSA relies on using evaluation order for its
3829                     // renaming, and therefore the RHS must be evaluated first.
3830                     // If we have an assignment involving a lclVar address, the LHS may be marked as having
3831                     // side-effects.
3832                     // However the side-effects won't require that we evaluate the LHS address first:
3833                     // - The GTF_GLOB_REF might have been conservatively set on a FIELD of a local.
3834                     // - The local might be address-exposed, but that side-effect happens at the actual assignment (not
3835                     //   when its address is "evaluated") so it doesn't change the side effect to "evaluate" the address
3836                     //   after the RHS (note that in this case it won't be renamed by SSA anyway, but the reordering is
3837                     //   safe).
3838                     //
3839                     if (op1Val->AsIndir()->Addr()->IsLocalAddrExpr())
3840                     {
3841                         bReverseInAssignment = true;
3842                         tree->gtFlags |= GTF_REVERSE_OPS;
3843                         break;
3844                     }
3845                     if (op1Val->AsIndir()->Addr()->gtFlags & GTF_ALL_EFFECT)
3846                     {
3847                         break;
3848                     }
3849
3850                     // In case op2 assigns to a local var that is used in op1Val, we have to evaluate op1Val first.
3851                     if (op2->gtFlags & GTF_ASG)
3852                     {
3853                         break;
3854                     }
3855
3856                     // If op2 is simple then evaluate op1 first
3857
3858                     if (op2->OperKind() & GTK_LEAF)
3859                     {
3860                         break;
3861                     }
3862
3863                 // fall through and set GTF_REVERSE_OPS
3864
3865                 case GT_LCL_VAR:
3866                 case GT_LCL_FLD:
3867
3868                     // We evaluate op2 before op1
3869                     bReverseInAssignment = true;
3870                     tree->gtFlags |= GTF_REVERSE_OPS;
3871                     break;
3872
3873                 default:
3874                     break;
3875             }
3876         }
3877         else if (kind & GTK_RELOP)
3878         {
3879             /* Float compares remove both operands from the FP stack */
3880             /* Also FP comparison uses EAX for flags */
3881
3882             if (varTypeIsFloating(op1->TypeGet()))
3883             {
3884                 level++;
3885                 lvl2++;
3886             }
3887             if ((tree->gtFlags & GTF_RELOP_JMP_USED) == 0)
3888             {
3889                 /* Using a setcc instruction is more expensive */
3890                 costEx += 3;
3891             }
3892         }
3893
3894         /* Check for other interesting cases */
3895
3896         switch (oper)
3897         {
3898             case GT_LSH:
3899             case GT_RSH:
3900             case GT_RSZ:
3901             case GT_ROL:
3902             case GT_ROR:
3903                 /* Variable sized shifts are more expensive and use REG_SHIFT */
3904
3905                 if (!op2->IsCnsIntOrI())
3906                 {
3907                     costEx += 3;
3908 #ifndef _TARGET_64BIT_
3909                     // Variable sized LONG shifts require the use of a helper call
3910                     //
3911                     if (tree->gtType == TYP_LONG)
3912                     {
3913                         level += 5;
3914                         lvl2 += 5;
3915                         costEx += 3 * IND_COST_EX;
3916                         costSz += 4;
3917                     }
3918 #endif // !_TARGET_64BIT_
3919                 }
3920                 break;
3921
3922             case GT_INTRINSIC:
3923
3924                 switch (tree->gtIntrinsic.gtIntrinsicId)
3925                 {
3926                     case CORINFO_INTRINSIC_Atan2:
3927                     case CORINFO_INTRINSIC_Pow:
3928                         // These math intrinsics are actually implemented by user calls.
3929                         // Increase the Sethi 'complexity' by two to reflect the argument
3930                         // register requirement.
3931                         level += 2;
3932                         break;
3933                     default:
3934                         assert(!"Unknown binary GT_INTRINSIC operator");
3935                         break;
3936                 }
3937
3938                 break;
3939
3940             default:
3941                 break;
3942         }
3943
3944         /* We need to evalutate constants later as many places in codegen
3945            can't handle op1 being a constant. This is normally naturally
3946            enforced as constants have the least level of 0. However,
3947            sometimes we end up with a tree like "cns1 < nop(cns2)". In
3948            such cases, both sides have a level of 0. So encourage constants
3949            to be evaluated last in such cases */
3950
3951         if ((level == 0) && (level == lvl2) && (op1->OperKind() & GTK_CONST) &&
3952             (tree->OperIsCommutative() || tree->OperIsCompare()))
3953         {
3954             lvl2++;
3955         }
3956
3957         /* We try to swap operands if the second one is more expensive */
3958         bool     tryToSwap;
3959         GenTree* opA;
3960         GenTree* opB;
3961
3962         if (tree->gtFlags & GTF_REVERSE_OPS)
3963         {
3964             opA = op2;
3965             opB = op1;
3966         }
3967         else
3968         {
3969             opA = op1;
3970             opB = op2;
3971         }
3972
3973         if (fgOrder == FGOrderLinear)
3974         {
3975             // Don't swap anything if we're in linear order; we're really just interested in the costs.
3976             tryToSwap = false;
3977         }
3978         else if (bReverseInAssignment)
3979         {
3980             // Assignments are special, we want the reverseops flags
3981             // so if possible it was set above.
3982             tryToSwap = false;
3983         }
3984         else if ((oper == GT_INTRINSIC) && IsIntrinsicImplementedByUserCall(tree->AsIntrinsic()->gtIntrinsicId))
3985         {
3986             // We do not swap operand execution order for intrinsics that are implemented by user calls
3987             // because of trickiness around ensuring the execution order does not change during rationalization.
3988             tryToSwap = false;
3989         }
3990         else
3991         {
3992             if (tree->gtFlags & GTF_REVERSE_OPS)
3993             {
3994                 tryToSwap = (level > lvl2);
3995             }
3996             else
3997             {
3998                 tryToSwap = (level < lvl2);
3999             }
4000
4001             // Try to force extra swapping when in the stress mode:
4002             if (compStressCompile(STRESS_REVERSE_FLAG, 60) && ((tree->gtFlags & GTF_REVERSE_OPS) == 0) &&
4003                 ((op2->OperKind() & GTK_CONST) == 0))
4004             {
4005                 tryToSwap = true;
4006             }
4007         }
4008
4009         if (tryToSwap)
4010         {
4011             bool canSwap = gtCanSwapOrder(opA, opB);
4012
4013             if (canSwap)
4014             {
4015                 /* Can we swap the order by commuting the operands? */
4016
4017                 switch (oper)
4018                 {
4019                     case GT_EQ:
4020                     case GT_NE:
4021                     case GT_LT:
4022                     case GT_LE:
4023                     case GT_GE:
4024                     case GT_GT:
4025                         if (GenTree::SwapRelop(oper) != oper)
4026                         {
4027                             tree->SetOper(GenTree::SwapRelop(oper), GenTree::PRESERVE_VN);
4028                         }
4029
4030                         __fallthrough;
4031
4032                     case GT_ADD:
4033                     case GT_MUL:
4034
4035                     case GT_OR:
4036                     case GT_XOR:
4037                     case GT_AND:
4038
4039                         /* Swap the operands */
4040
4041                         tree->gtOp.gtOp1 = op2;
4042                         tree->gtOp.gtOp2 = op1;
4043                         break;
4044
4045                     case GT_QMARK:
4046                     case GT_COLON:
4047                     case GT_MKREFANY:
4048                         break;
4049
4050                     case GT_LIST:
4051                     case GT_FIELD_LIST:
4052                         break;
4053
4054                     default:
4055
4056                         /* Mark the operand's evaluation order to be swapped */
4057                         if (tree->gtFlags & GTF_REVERSE_OPS)
4058                         {
4059                             tree->gtFlags &= ~GTF_REVERSE_OPS;
4060                         }
4061                         else
4062                         {
4063                             tree->gtFlags |= GTF_REVERSE_OPS;
4064                         }
4065
4066                         break;
4067                 }
4068             }
4069         }
4070
4071         /* Swap the level counts */
4072         if (tree->gtFlags & GTF_REVERSE_OPS)
4073         {
4074             unsigned tmpl;
4075
4076             tmpl  = level;
4077             level = lvl2;
4078             lvl2  = tmpl;
4079         }
4080
4081         /* Compute the sethi number for this binary operator */
4082
4083         if (level < 1)
4084         {
4085             level = lvl2;
4086         }
4087         else if (level == lvl2)
4088         {
4089             level += 1;
4090         }
4091
4092         goto DONE;
4093     }
4094
4095     /* See what kind of a special operator we have here */
4096
4097     switch (oper)
4098     {
4099         unsigned lvl2; // Scratch variable
4100
4101         case GT_CALL:
4102
4103             assert(tree->gtFlags & GTF_CALL);
4104
4105             level  = 0;
4106             costEx = 5;
4107             costSz = 2;
4108
4109             /* Evaluate the 'this' argument, if present */
4110
4111             if (tree->gtCall.gtCallObjp)
4112             {
4113                 GenTree* thisVal = tree->gtCall.gtCallObjp;
4114
4115                 lvl2 = gtSetEvalOrder(thisVal);
4116                 if (level < lvl2)
4117                 {
4118                     level = lvl2;
4119                 }
4120                 costEx += thisVal->gtCostEx;
4121                 costSz += thisVal->gtCostSz + 1;
4122             }
4123
4124             /* Evaluate the arguments, right to left */
4125
4126             if (tree->gtCall.gtCallArgs)
4127             {
4128                 const bool isListCallArgs = true;
4129                 const bool callArgsInRegs = false;
4130                 lvl2                      = gtSetListOrder(tree->gtCall.gtCallArgs, isListCallArgs, callArgsInRegs);
4131                 if (level < lvl2)
4132                 {
4133                     level = lvl2;
4134                 }
4135                 costEx += tree->gtCall.gtCallArgs->gtCostEx;
4136                 costSz += tree->gtCall.gtCallArgs->gtCostSz;
4137             }
4138
4139             /* Evaluate the temp register arguments list
4140              * This is a "hidden" list and its only purpose is to
4141              * extend the life of temps until we make the call */
4142
4143             if (tree->gtCall.gtCallLateArgs)
4144             {
4145                 const bool isListCallArgs = true;
4146                 const bool callArgsInRegs = true;
4147                 lvl2                      = gtSetListOrder(tree->gtCall.gtCallLateArgs, isListCallArgs, callArgsInRegs);
4148                 if (level < lvl2)
4149                 {
4150                     level = lvl2;
4151                 }
4152                 costEx += tree->gtCall.gtCallLateArgs->gtCostEx;
4153                 costSz += tree->gtCall.gtCallLateArgs->gtCostSz;
4154             }
4155
4156             if (tree->gtCall.gtCallType == CT_INDIRECT)
4157             {
4158                 // pinvoke-calli cookie is a constant, or constant indirection
4159                 assert(tree->gtCall.gtCallCookie == nullptr || tree->gtCall.gtCallCookie->gtOper == GT_CNS_INT ||
4160                        tree->gtCall.gtCallCookie->gtOper == GT_IND);
4161
4162                 GenTree* indirect = tree->gtCall.gtCallAddr;
4163
4164                 lvl2 = gtSetEvalOrder(indirect);
4165                 if (level < lvl2)
4166                 {
4167                     level = lvl2;
4168                 }
4169                 costEx += indirect->gtCostEx + IND_COST_EX;
4170                 costSz += indirect->gtCostSz;
4171             }
4172             else
4173             {
4174 #ifdef _TARGET_ARM_
4175                 if (tree->gtCall.IsVirtualStub())
4176                 {
4177                     // We generate movw/movt/ldr
4178                     costEx += (1 + IND_COST_EX);
4179                     costSz += 8;
4180                     if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT)
4181                     {
4182                         // Must use R12 for the ldr target -- REG_JUMP_THUNK_PARAM
4183                         costSz += 2;
4184                     }
4185                 }
4186                 else if (!opts.jitFlags->IsSet(JitFlags::JIT_FLAG_PREJIT))
4187                 {
4188                     costEx += 2;
4189                     costSz += 6;
4190                 }
4191                 costSz += 2;
4192 #endif
4193 #ifdef _TARGET_XARCH_
4194                 costSz += 3;
4195 #endif
4196             }
4197
4198             level += 1;
4199
4200             /* Virtual calls are a bit more expensive */
4201             if (tree->gtCall.IsVirtual())
4202             {
4203                 costEx += 2 * IND_COST_EX;
4204                 costSz += 2;
4205             }
4206
4207             level += 5;
4208             costEx += 3 * IND_COST_EX;
4209             break;
4210
4211         case GT_ARR_ELEM:
4212
4213             level  = gtSetEvalOrder(tree->gtArrElem.gtArrObj);
4214             costEx = tree->gtArrElem.gtArrObj->gtCostEx;
4215             costSz = tree->gtArrElem.gtArrObj->gtCostSz;
4216
4217             unsigned dim;
4218             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
4219             {
4220                 lvl2 = gtSetEvalOrder(tree->gtArrElem.gtArrInds[dim]);
4221                 if (level < lvl2)
4222                 {
4223                     level = lvl2;
4224                 }
4225                 costEx += tree->gtArrElem.gtArrInds[dim]->gtCostEx;
4226                 costSz += tree->gtArrElem.gtArrInds[dim]->gtCostSz;
4227             }
4228
4229             level += tree->gtArrElem.gtArrRank;
4230             costEx += 2 + (tree->gtArrElem.gtArrRank * (IND_COST_EX + 1));
4231             costSz += 2 + (tree->gtArrElem.gtArrRank * 2);
4232             break;
4233
4234         case GT_ARR_OFFSET:
4235             level  = gtSetEvalOrder(tree->gtArrOffs.gtOffset);
4236             costEx = tree->gtArrOffs.gtOffset->gtCostEx;
4237             costSz = tree->gtArrOffs.gtOffset->gtCostSz;
4238             lvl2   = gtSetEvalOrder(tree->gtArrOffs.gtIndex);
4239             level  = max(level, lvl2);
4240             costEx += tree->gtArrOffs.gtIndex->gtCostEx;
4241             costSz += tree->gtArrOffs.gtIndex->gtCostSz;
4242             lvl2  = gtSetEvalOrder(tree->gtArrOffs.gtArrObj);
4243             level = max(level, lvl2);
4244             costEx += tree->gtArrOffs.gtArrObj->gtCostEx;
4245             costSz += tree->gtArrOffs.gtArrObj->gtCostSz;
4246             break;
4247
4248         case GT_CMPXCHG:
4249
4250             level  = gtSetEvalOrder(tree->gtCmpXchg.gtOpLocation);
4251             costSz = tree->gtCmpXchg.gtOpLocation->gtCostSz;
4252
4253             lvl2 = gtSetEvalOrder(tree->gtCmpXchg.gtOpValue);
4254             if (level < lvl2)
4255             {
4256                 level = lvl2;
4257             }
4258             costSz += tree->gtCmpXchg.gtOpValue->gtCostSz;
4259
4260             lvl2 = gtSetEvalOrder(tree->gtCmpXchg.gtOpComparand);
4261             if (level < lvl2)
4262             {
4263                 level = lvl2;
4264             }
4265             costSz += tree->gtCmpXchg.gtOpComparand->gtCostSz;
4266
4267             costEx = MAX_COST; // Seriously, what could be more expensive than lock cmpxchg?
4268             costSz += 5;       // size of lock cmpxchg [reg+C], reg
4269             break;
4270
4271         case GT_ARR_BOUNDS_CHECK:
4272 #ifdef FEATURE_SIMD
4273         case GT_SIMD_CHK:
4274 #endif // FEATURE_SIMD
4275 #ifdef FEATURE_HW_INTRINSICS
4276         case GT_HW_INTRINSIC_CHK:
4277 #endif // FEATURE_HW_INTRINSICS
4278
4279             costEx = 4; // cmp reg,reg and jae throw (not taken)
4280             costSz = 7; // jump to cold section
4281
4282             level = gtSetEvalOrder(tree->gtBoundsChk.gtIndex);
4283             costEx += tree->gtBoundsChk.gtIndex->gtCostEx;
4284             costSz += tree->gtBoundsChk.gtIndex->gtCostSz;
4285
4286             lvl2 = gtSetEvalOrder(tree->gtBoundsChk.gtArrLen);
4287             if (level < lvl2)
4288             {
4289                 level = lvl2;
4290             }
4291             costEx += tree->gtBoundsChk.gtArrLen->gtCostEx;
4292             costSz += tree->gtBoundsChk.gtArrLen->gtCostSz;
4293
4294             break;
4295
4296         case GT_STORE_DYN_BLK:
4297         case GT_DYN_BLK:
4298         {
4299             costEx = 0;
4300             costSz = 0;
4301             level  = 0;
4302             if (oper == GT_STORE_DYN_BLK)
4303             {
4304                 lvl2  = gtSetEvalOrder(tree->gtDynBlk.Data());
4305                 level = max(level, lvl2);
4306                 costEx += tree->gtDynBlk.Data()->gtCostEx;
4307                 costSz += tree->gtDynBlk.Data()->gtCostSz;
4308             }
4309             lvl2               = gtSetEvalOrder(tree->gtDynBlk.Addr());
4310             level              = max(level, lvl2);
4311             costEx             = tree->gtDynBlk.Addr()->gtCostEx;
4312             costSz             = tree->gtDynBlk.Addr()->gtCostSz;
4313             unsigned sizeLevel = gtSetEvalOrder(tree->gtDynBlk.gtDynamicSize);
4314
4315             // Determine whether the size node should be evaluated first.
4316             // We would like to do this if the sizeLevel is larger than the current level,
4317             // but we have to ensure that we obey ordering constraints.
4318             if (tree->AsDynBlk()->gtEvalSizeFirst != (level < sizeLevel))
4319             {
4320                 bool canChange = true;
4321
4322                 GenTree* sizeNode = tree->AsDynBlk()->gtDynamicSize;
4323                 GenTree* dst      = tree->AsDynBlk()->Addr();
4324                 GenTree* src      = tree->AsDynBlk()->Data();
4325
4326                 if (tree->AsDynBlk()->gtEvalSizeFirst)
4327                 {
4328                     canChange = gtCanSwapOrder(sizeNode, dst);
4329                     if (canChange && (src != nullptr))
4330                     {
4331                         canChange = gtCanSwapOrder(sizeNode, src);
4332                     }
4333                 }
4334                 else
4335                 {
4336                     canChange = gtCanSwapOrder(dst, sizeNode);
4337                     if (canChange && (src != nullptr))
4338                     {
4339                         gtCanSwapOrder(src, sizeNode);
4340                     }
4341                 }
4342                 if (canChange)
4343                 {
4344                     tree->AsDynBlk()->gtEvalSizeFirst = (level < sizeLevel);
4345                 }
4346             }
4347             level = max(level, sizeLevel);
4348             costEx += tree->gtDynBlk.gtDynamicSize->gtCostEx;
4349             costSz += tree->gtDynBlk.gtDynamicSize->gtCostSz;
4350         }
4351         break;
4352
4353         case GT_INDEX_ADDR:
4354             costEx = 6; // cmp reg,reg; jae throw; mov reg, [addrmode]  (not taken)
4355             costSz = 9; // jump to cold section
4356
4357             level = gtSetEvalOrder(tree->AsIndexAddr()->Index());
4358             costEx += tree->AsIndexAddr()->Index()->gtCostEx;
4359             costSz += tree->AsIndexAddr()->Index()->gtCostSz;
4360
4361             lvl2 = gtSetEvalOrder(tree->AsIndexAddr()->Arr());
4362             if (level < lvl2)
4363             {
4364                 level = lvl2;
4365             }
4366             costEx += tree->AsIndexAddr()->Arr()->gtCostEx;
4367             costSz += tree->AsIndexAddr()->Arr()->gtCostSz;
4368             break;
4369
4370         default:
4371 #ifdef DEBUG
4372             if (verbose)
4373             {
4374                 printf("unexpected operator in this tree:\n");
4375                 gtDispTree(tree);
4376             }
4377 #endif
4378             NO_WAY("unexpected operator");
4379     }
4380
4381 DONE:
4382
4383 #ifdef FEATURE_HW_INTRINSICS
4384     if ((oper == GT_HWIntrinsic) && (tree->gtGetOp1() == nullptr))
4385     {
4386         // We can have nullary HWIntrinsic nodes, and we must have non-zero cost.
4387         costEx = 1;
4388         costSz = 1;
4389     }
4390 #endif // FEATURE_HW_INTRINSICS
4391
4392     // Some path through this function must have set the costs.
4393     assert(costEx != -1);
4394     assert(costSz != -1);
4395
4396     tree->SetCosts(costEx, costSz);
4397
4398     return level;
4399 }
4400 #ifdef _PREFAST_
4401 #pragma warning(pop)
4402 #endif
4403
4404 /*****************************************************************************
4405  *
4406  *  If the given tree is an integer constant that can be used
4407  *  in a scaled index address mode as a multiplier (e.g. "[4*index]"), then return
4408  *  the scale factor: 2, 4, or 8. Otherwise, return 0. Note that we never return 1,
4409  *  to match the behavior of GetScaleIndexShf().
4410  */
4411
4412 unsigned GenTree::GetScaleIndexMul()
4413 {
4414     if (IsCnsIntOrI() && jitIsScaleIndexMul(gtIntConCommon.IconValue()) && gtIntConCommon.IconValue() != 1)
4415     {
4416         return (unsigned)gtIntConCommon.IconValue();
4417     }
4418
4419     return 0;
4420 }
4421
4422 /*****************************************************************************
4423  *
4424  *  If the given tree is the right-hand side of a left shift (that is,
4425  *  'y' in the tree 'x' << 'y'), and it is an integer constant that can be used
4426  *  in a scaled index address mode as a multiplier (e.g. "[4*index]"), then return
4427  *  the scale factor: 2, 4, or 8. Otherwise, return 0.
4428  */
4429
4430 unsigned GenTree::GetScaleIndexShf()
4431 {
4432     if (IsCnsIntOrI() && jitIsScaleIndexShift(gtIntConCommon.IconValue()))
4433     {
4434         return (unsigned)(1 << gtIntConCommon.IconValue());
4435     }
4436
4437     return 0;
4438 }
4439
4440 /*****************************************************************************
4441  *
4442  *  If the given tree is a scaled index (i.e. "op * 4" or "op << 2"), returns
4443  *  the multiplier: 2, 4, or 8; otherwise returns 0. Note that "1" is never
4444  *  returned.
4445  */
4446
4447 unsigned GenTree::GetScaledIndex()
4448 {
4449     // with (!opts.OptEnabled(CLFLG_CONSTANTFOLD) we can have
4450     //   CNS_INT * CNS_INT
4451     //
4452     if (gtOp.gtOp1->IsCnsIntOrI())
4453     {
4454         return 0;
4455     }
4456
4457     switch (gtOper)
4458     {
4459         case GT_MUL:
4460             return gtOp.gtOp2->GetScaleIndexMul();
4461
4462         case GT_LSH:
4463             return gtOp.gtOp2->GetScaleIndexShf();
4464
4465         default:
4466             assert(!"GenTree::GetScaledIndex() called with illegal gtOper");
4467             break;
4468     }
4469
4470     return 0;
4471 }
4472
4473 /*****************************************************************************
4474  *
4475  *  Returns true if "addr" is a GT_ADD node, at least one of whose arguments is an integer (<= 32 bit)
4476  *  constant.  If it returns true, it sets "*offset" to (one of the) constant value(s), and
4477  *  "*addr" to the other argument.
4478  */
4479
4480 bool GenTree::IsAddWithI32Const(GenTree** addr, int* offset)
4481 {
4482     if (OperGet() == GT_ADD)
4483     {
4484         if (gtOp.gtOp1->IsIntCnsFitsInI32())
4485         {
4486             *offset = (int)gtOp.gtOp1->gtIntCon.gtIconVal;
4487             *addr   = gtOp.gtOp2;
4488             return true;
4489         }
4490         else if (gtOp.gtOp2->IsIntCnsFitsInI32())
4491         {
4492             *offset = (int)gtOp.gtOp2->gtIntCon.gtIconVal;
4493             *addr   = gtOp.gtOp1;
4494             return true;
4495         }
4496     }
4497     // Otherwise...
4498     return false;
4499 }
4500
4501 //------------------------------------------------------------------------
4502 // gtGetChildPointer: If 'parent' is the parent of this node, return the pointer
4503 //    to the child node so that it can be modified; otherwise, return nullptr.
4504 //
4505 // Arguments:
4506 //    parent - The possible parent of this node
4507 //
4508 // Return Value:
4509 //    If "child" is a child of "parent", returns a pointer to the child node in the parent
4510 //    (i.e. a pointer to a GenTree pointer).
4511 //    Otherwise, returns nullptr.
4512 //
4513 // Assumptions:
4514 //    'parent' must be non-null
4515 //
4516 // Notes:
4517 //    When FEATURE_MULTIREG_ARGS is defined we can get here with GT_OBJ tree.
4518 //    This happens when we have a struct that is passed in multiple registers.
4519 //
4520 //    Also note that when UNIX_AMD64_ABI is defined the GT_LDOBJ
4521 //    later gets converted to a GT_FIELD_LIST with two GT_LCL_FLDs in Lower/LowerXArch.
4522 //
4523
4524 GenTree** GenTree::gtGetChildPointer(GenTree* parent) const
4525
4526 {
4527     switch (parent->OperGet())
4528     {
4529         default:
4530             if (!parent->OperIsSimple())
4531             {
4532                 return nullptr;
4533             }
4534             if (this == parent->gtOp.gtOp1)
4535             {
4536                 return &(parent->gtOp.gtOp1);
4537             }
4538             if (this == parent->gtOp.gtOp2)
4539             {
4540                 return &(parent->gtOp.gtOp2);
4541             }
4542             break;
4543
4544         case GT_CMPXCHG:
4545             if (this == parent->gtCmpXchg.gtOpLocation)
4546             {
4547                 return &(parent->gtCmpXchg.gtOpLocation);
4548             }
4549             if (this == parent->gtCmpXchg.gtOpValue)
4550             {
4551                 return &(parent->gtCmpXchg.gtOpValue);
4552             }
4553             if (this == parent->gtCmpXchg.gtOpComparand)
4554             {
4555                 return &(parent->gtCmpXchg.gtOpComparand);
4556             }
4557             break;
4558
4559         case GT_ARR_BOUNDS_CHECK:
4560 #ifdef FEATURE_SIMD
4561         case GT_SIMD_CHK:
4562 #endif // FEATURE_SIMD
4563 #ifdef FEATURE_HW_INTRINSICS
4564         case GT_HW_INTRINSIC_CHK:
4565 #endif // FEATURE_HW_INTRINSICS
4566             if (this == parent->gtBoundsChk.gtIndex)
4567             {
4568                 return &(parent->gtBoundsChk.gtIndex);
4569             }
4570             if (this == parent->gtBoundsChk.gtArrLen)
4571             {
4572                 return &(parent->gtBoundsChk.gtArrLen);
4573             }
4574             break;
4575
4576         case GT_ARR_ELEM:
4577             if (this == parent->gtArrElem.gtArrObj)
4578             {
4579                 return &(parent->gtArrElem.gtArrObj);
4580             }
4581             for (int i = 0; i < GT_ARR_MAX_RANK; i++)
4582             {
4583                 if (this == parent->gtArrElem.gtArrInds[i])
4584                 {
4585                     return &(parent->gtArrElem.gtArrInds[i]);
4586                 }
4587             }
4588             break;
4589
4590         case GT_ARR_OFFSET:
4591             if (this == parent->gtArrOffs.gtOffset)
4592             {
4593                 return &(parent->gtArrOffs.gtOffset);
4594             }
4595             if (this == parent->gtArrOffs.gtIndex)
4596             {
4597                 return &(parent->gtArrOffs.gtIndex);
4598             }
4599             if (this == parent->gtArrOffs.gtArrObj)
4600             {
4601                 return &(parent->gtArrOffs.gtArrObj);
4602             }
4603             break;
4604
4605         case GT_STORE_DYN_BLK:
4606         case GT_DYN_BLK:
4607             if (this == parent->gtDynBlk.gtOp1)
4608             {
4609                 return &(parent->gtDynBlk.gtOp1);
4610             }
4611             if (this == parent->gtDynBlk.gtOp2)
4612             {
4613                 return &(parent->gtDynBlk.gtOp2);
4614             }
4615             if (this == parent->gtDynBlk.gtDynamicSize)
4616             {
4617                 return &(parent->gtDynBlk.gtDynamicSize);
4618             }
4619             break;
4620
4621         case GT_FIELD:
4622             if (this == parent->AsField()->gtFldObj)
4623             {
4624                 return &(parent->AsField()->gtFldObj);
4625             }
4626             break;
4627
4628         case GT_RET_EXPR:
4629             if (this == parent->gtRetExpr.gtInlineCandidate)
4630             {
4631                 return &(parent->gtRetExpr.gtInlineCandidate);
4632             }
4633             break;
4634
4635         case GT_CALL:
4636         {
4637             GenTreeCall* call = parent->AsCall();
4638
4639             if (this == call->gtCallObjp)
4640             {
4641                 return &(call->gtCallObjp);
4642             }
4643             if (this == call->gtCallArgs)
4644             {
4645                 return reinterpret_cast<GenTree**>(&(call->gtCallArgs));
4646             }
4647             if (this == call->gtCallLateArgs)
4648             {
4649                 return reinterpret_cast<GenTree**>(&(call->gtCallLateArgs));
4650             }
4651             if (this == call->gtControlExpr)
4652             {
4653                 return &(call->gtControlExpr);
4654             }
4655             if (call->gtCallType == CT_INDIRECT)
4656             {
4657                 if (this == call->gtCallCookie)
4658                 {
4659                     return &(call->gtCallCookie);
4660                 }
4661                 if (this == call->gtCallAddr)
4662                 {
4663                     return &(call->gtCallAddr);
4664                 }
4665             }
4666         }
4667         break;
4668
4669         case GT_STMT:
4670             noway_assert(!"Illegal node for gtGetChildPointer()");
4671             unreached();
4672     }
4673
4674     return nullptr;
4675 }
4676
4677 bool GenTree::TryGetUse(GenTree* def, GenTree*** use)
4678 {
4679     assert(def != nullptr);
4680     assert(use != nullptr);
4681
4682     switch (OperGet())
4683     {
4684         // Leaf nodes
4685         case GT_LCL_VAR:
4686         case GT_LCL_FLD:
4687         case GT_LCL_VAR_ADDR:
4688         case GT_LCL_FLD_ADDR:
4689         case GT_CATCH_ARG:
4690         case GT_LABEL:
4691         case GT_FTN_ADDR:
4692         case GT_RET_EXPR:
4693         case GT_CNS_INT:
4694         case GT_CNS_LNG:
4695         case GT_CNS_DBL:
4696         case GT_CNS_STR:
4697         case GT_MEMORYBARRIER:
4698         case GT_JMP:
4699         case GT_JCC:
4700         case GT_SETCC:
4701         case GT_NO_OP:
4702         case GT_START_NONGC:
4703         case GT_START_PREEMPTGC:
4704         case GT_PROF_HOOK:
4705 #if !FEATURE_EH_FUNCLETS
4706         case GT_END_LFIN:
4707 #endif // !FEATURE_EH_FUNCLETS
4708         case GT_PHI_ARG:
4709         case GT_JMPTABLE:
4710         case GT_CLS_VAR:
4711         case GT_CLS_VAR_ADDR:
4712         case GT_ARGPLACE:
4713         case GT_PHYSREG:
4714         case GT_EMITNOP:
4715         case GT_PINVOKE_PROLOG:
4716         case GT_PINVOKE_EPILOG:
4717         case GT_IL_OFFSET:
4718             return false;
4719
4720         // Standard unary operators
4721         case GT_STORE_LCL_VAR:
4722         case GT_STORE_LCL_FLD:
4723         case GT_NOT:
4724         case GT_NEG:
4725         case GT_COPY:
4726         case GT_RELOAD:
4727         case GT_ARR_LENGTH:
4728         case GT_CAST:
4729         case GT_BITCAST:
4730         case GT_CKFINITE:
4731         case GT_LCLHEAP:
4732         case GT_ADDR:
4733         case GT_IND:
4734         case GT_OBJ:
4735         case GT_BLK:
4736         case GT_BOX:
4737         case GT_ALLOCOBJ:
4738         case GT_RUNTIMELOOKUP:
4739         case GT_INIT_VAL:
4740         case GT_JTRUE:
4741         case GT_SWITCH:
4742         case GT_NULLCHECK:
4743         case GT_PUTARG_REG:
4744         case GT_PUTARG_STK:
4745         case GT_RETURNTRAP:
4746         case GT_NOP:
4747         case GT_RETURN:
4748         case GT_RETFILT:
4749         case GT_BSWAP:
4750         case GT_BSWAP16:
4751             if (def == this->AsUnOp()->gtOp1)
4752             {
4753                 *use = &this->AsUnOp()->gtOp1;
4754                 return true;
4755             }
4756             return false;
4757
4758         // Variadic nodes
4759         case GT_PHI:
4760             assert(this->AsUnOp()->gtOp1 != nullptr);
4761             return this->AsUnOp()->gtOp1->TryGetUseList(def, use);
4762
4763         case GT_FIELD_LIST:
4764             return TryGetUseList(def, use);
4765
4766 #if FEATURE_ARG_SPLIT
4767         case GT_PUTARG_SPLIT:
4768             if (this->AsUnOp()->gtOp1->gtOper == GT_FIELD_LIST)
4769             {
4770                 return this->AsUnOp()->gtOp1->TryGetUseList(def, use);
4771             }
4772             if (def == this->AsUnOp()->gtOp1)
4773             {
4774                 *use = &this->AsUnOp()->gtOp1;
4775                 return true;
4776             }
4777             return false;
4778 #endif // FEATURE_ARG_SPLIT
4779
4780 #ifdef FEATURE_SIMD
4781         case GT_SIMD:
4782             if (this->AsSIMD()->gtSIMDIntrinsicID == SIMDIntrinsicInitN)
4783             {
4784                 assert(this->AsSIMD()->gtOp1 != nullptr);
4785                 return this->AsSIMD()->gtOp1->TryGetUseList(def, use);
4786             }
4787
4788             return TryGetUseBinOp(def, use);
4789 #endif // FEATURE_SIMD
4790
4791 #ifdef FEATURE_HW_INTRINSICS
4792         case GT_HWIntrinsic:
4793             if ((this->AsHWIntrinsic()->gtOp1 != nullptr) && this->AsHWIntrinsic()->gtOp1->OperIsList())
4794             {
4795                 return this->AsHWIntrinsic()->gtOp1->TryGetUseList(def, use);
4796             }
4797
4798             return TryGetUseBinOp(def, use);
4799 #endif // FEATURE_HW_INTRINSICS
4800
4801         // Special nodes
4802         case GT_CMPXCHG:
4803         {
4804             GenTreeCmpXchg* const cmpXchg = this->AsCmpXchg();
4805             if (def == cmpXchg->gtOpLocation)
4806             {
4807                 *use = &cmpXchg->gtOpLocation;
4808                 return true;
4809             }
4810             if (def == cmpXchg->gtOpValue)
4811             {
4812                 *use = &cmpXchg->gtOpValue;
4813                 return true;
4814             }
4815             if (def == cmpXchg->gtOpComparand)
4816             {
4817                 *use = &cmpXchg->gtOpComparand;
4818                 return true;
4819             }
4820             return false;
4821         }
4822
4823         case GT_ARR_BOUNDS_CHECK:
4824 #ifdef FEATURE_SIMD
4825         case GT_SIMD_CHK:
4826 #endif // FEATURE_SIMD
4827 #ifdef FEATURE_HW_INTRINSICS
4828         case GT_HW_INTRINSIC_CHK:
4829 #endif // FEATURE_HW_INTRINSICS
4830         {
4831             GenTreeBoundsChk* const boundsChk = this->AsBoundsChk();
4832             if (def == boundsChk->gtIndex)
4833             {
4834                 *use = &boundsChk->gtIndex;
4835                 return true;
4836             }
4837             if (def == boundsChk->gtArrLen)
4838             {
4839                 *use = &boundsChk->gtArrLen;
4840                 return true;
4841             }
4842             return false;
4843         }
4844
4845         case GT_FIELD:
4846             if (def == this->AsField()->gtFldObj)
4847             {
4848                 *use = &this->AsField()->gtFldObj;
4849                 return true;
4850             }
4851             return false;
4852
4853         case GT_STMT:
4854             if (def == this->AsStmt()->gtStmtExpr)
4855             {
4856                 *use = &this->AsStmt()->gtStmtExpr;
4857                 return true;
4858             }
4859             return false;
4860
4861         case GT_ARR_ELEM:
4862         {
4863             GenTreeArrElem* const arrElem = this->AsArrElem();
4864             if (def == arrElem->gtArrObj)
4865             {
4866                 *use = &arrElem->gtArrObj;
4867                 return true;
4868             }
4869             for (unsigned i = 0; i < arrElem->gtArrRank; i++)
4870             {
4871                 if (def == arrElem->gtArrInds[i])
4872                 {
4873                     *use = &arrElem->gtArrInds[i];
4874                     return true;
4875                 }
4876             }
4877             return false;
4878         }
4879
4880         case GT_ARR_OFFSET:
4881         {
4882             GenTreeArrOffs* const arrOffs = this->AsArrOffs();
4883             if (def == arrOffs->gtOffset)
4884             {
4885                 *use = &arrOffs->gtOffset;
4886                 return true;
4887             }
4888             if (def == arrOffs->gtIndex)
4889             {
4890                 *use = &arrOffs->gtIndex;
4891                 return true;
4892             }
4893             if (def == arrOffs->gtArrObj)
4894             {
4895                 *use = &arrOffs->gtArrObj;
4896                 return true;
4897             }
4898             return false;
4899         }
4900
4901         case GT_DYN_BLK:
4902         {
4903             GenTreeDynBlk* const dynBlock = this->AsDynBlk();
4904             if (def == dynBlock->gtOp1)
4905             {
4906                 *use = &dynBlock->gtOp1;
4907                 return true;
4908             }
4909             if (def == dynBlock->gtDynamicSize)
4910             {
4911                 *use = &dynBlock->gtDynamicSize;
4912                 return true;
4913             }
4914             return false;
4915         }
4916
4917         case GT_STORE_DYN_BLK:
4918         {
4919             GenTreeDynBlk* const dynBlock = this->AsDynBlk();
4920             if (def == dynBlock->gtOp1)
4921             {
4922                 *use = &dynBlock->gtOp1;
4923                 return true;
4924             }
4925             if (def == dynBlock->gtOp2)
4926             {
4927                 *use = &dynBlock->gtOp2;
4928                 return true;
4929             }
4930             if (def == dynBlock->gtDynamicSize)
4931             {
4932                 *use = &dynBlock->gtDynamicSize;
4933                 return true;
4934             }
4935             return false;
4936         }
4937
4938         case GT_CALL:
4939         {
4940             GenTreeCall* const call = this->AsCall();
4941             if (def == call->gtCallObjp)
4942             {
4943                 *use = &call->gtCallObjp;
4944                 return true;
4945             }
4946             if (def == call->gtControlExpr)
4947             {
4948                 *use = &call->gtControlExpr;
4949                 return true;
4950             }
4951             if (call->gtCallType == CT_INDIRECT)
4952             {
4953                 if (def == call->gtCallCookie)
4954                 {
4955                     *use = &call->gtCallCookie;
4956                     return true;
4957                 }
4958                 if (def == call->gtCallAddr)
4959                 {
4960                     *use = &call->gtCallAddr;
4961                     return true;
4962                 }
4963             }
4964             if ((call->gtCallArgs != nullptr) && call->gtCallArgs->TryGetUseList(def, use))
4965             {
4966                 return true;
4967             }
4968
4969             return (call->gtCallLateArgs != nullptr) && call->gtCallLateArgs->TryGetUseList(def, use);
4970         }
4971
4972         // Binary nodes
4973         default:
4974             assert(this->OperIsBinary());
4975             return TryGetUseBinOp(def, use);
4976     }
4977 }
4978
4979 bool GenTree::TryGetUseList(GenTree* def, GenTree*** use)
4980 {
4981     assert(def != nullptr);
4982     assert(use != nullptr);
4983
4984     for (GenTreeArgList* node = this->AsArgList(); node != nullptr; node = node->Rest())
4985     {
4986         if (def == node->gtOp1)
4987         {
4988             *use = &node->gtOp1;
4989             return true;
4990         }
4991     }
4992     return false;
4993 }
4994
4995 bool GenTree::TryGetUseBinOp(GenTree* def, GenTree*** use)
4996 {
4997     assert(def != nullptr);
4998     assert(use != nullptr);
4999     assert(this->OperIsBinary());
5000
5001     GenTreeOp* const binOp = this->AsOp();
5002     if (def == binOp->gtOp1)
5003     {
5004         *use = &binOp->gtOp1;
5005         return true;
5006     }
5007     if (def == binOp->gtOp2)
5008     {
5009         *use = &binOp->gtOp2;
5010         return true;
5011     }
5012     return false;
5013 }
5014
5015 //------------------------------------------------------------------------
5016 // GenTree::ReplaceOperand:
5017 //    Replace a given operand to this node with a new operand. If the
5018 //    current node is a call node, this will also udpate the call
5019 //    argument table if necessary.
5020 //
5021 // Arguments:
5022 //    useEdge - the use edge that points to the operand to be replaced.
5023 //    replacement - the replacement node.
5024 //
5025 void GenTree::ReplaceOperand(GenTree** useEdge, GenTree* replacement)
5026 {
5027     assert(useEdge != nullptr);
5028     assert(replacement != nullptr);
5029     assert(TryGetUse(*useEdge, &useEdge));
5030
5031     if (OperGet() == GT_CALL)
5032     {
5033         AsCall()->ReplaceCallOperand(useEdge, replacement);
5034     }
5035     else
5036     {
5037         *useEdge = replacement;
5038     }
5039 }
5040
5041 //------------------------------------------------------------------------
5042 // gtGetParent: Get the parent of this node, and optionally capture the
5043 //    pointer to the child so that it can be modified.
5044 //
5045 // Arguments:
5046
5047 //    parentChildPointer - A pointer to a GenTree** (yes, that's three
5048 //                         levels, i.e. GenTree ***), which if non-null,
5049 //                         will be set to point to the field in the parent
5050 //                         that points to this node.
5051 //
5052 //    Return value       - The parent of this node.
5053 //
5054 //    Notes:
5055 //
5056 //    This requires that the execution order must be defined (i.e. gtSetEvalOrder() has been called).
5057 //    To enable the child to be replaced, it accepts an argument, parentChildPointer that, if non-null,
5058 //    will be set to point to the child pointer in the parent that points to this node.
5059
5060 GenTree* GenTree::gtGetParent(GenTree*** parentChildPtrPtr) const
5061 {
5062     // Find the parent node; it must be after this node in the execution order.
5063     GenTree** parentChildPtr = nullptr;
5064     GenTree*  parent;
5065     for (parent = gtNext; parent != nullptr; parent = parent->gtNext)
5066     {
5067         parentChildPtr = gtGetChildPointer(parent);
5068         if (parentChildPtr != nullptr)
5069         {
5070             break;
5071         }
5072     }
5073     if (parentChildPtrPtr != nullptr)
5074     {
5075         *parentChildPtrPtr = parentChildPtr;
5076     }
5077     return parent;
5078 }
5079
5080 //------------------------------------------------------------------------------
5081 // OperRequiresAsgFlag : Check whether the operation requires GTF_ASG flag regardless
5082 //                       of the children's flags.
5083 //
5084
5085 bool GenTree::OperRequiresAsgFlag()
5086 {
5087     if (OperIs(GT_ASG) || OperIs(GT_XADD, GT_XCHG, GT_LOCKADD, GT_CMPXCHG, GT_MEMORYBARRIER))
5088     {
5089         return true;
5090     }
5091 #ifdef FEATURE_HW_INTRINSICS
5092     if (gtOper == GT_HWIntrinsic)
5093     {
5094         GenTreeHWIntrinsic* hwIntrinsicNode = this->AsHWIntrinsic();
5095         if (hwIntrinsicNode->OperIsMemoryStore())
5096         {
5097             // A MemoryStore operation is an assignment
5098             return true;
5099         }
5100     }
5101 #endif // FEATURE_HW_INTRINSICS
5102     return false;
5103 }
5104
5105 //------------------------------------------------------------------------------
5106 // OperRequiresCallFlag : Check whether the operation requires GTF_CALL flag regardless
5107 //                        of the children's flags.
5108 //
5109
5110 bool GenTree::OperRequiresCallFlag(Compiler* comp)
5111 {
5112     switch (gtOper)
5113     {
5114         case GT_CALL:
5115             return true;
5116
5117         case GT_INTRINSIC:
5118             return comp->IsIntrinsicImplementedByUserCall(this->AsIntrinsic()->gtIntrinsicId);
5119
5120 #if FEATURE_FIXED_OUT_ARGS && !defined(_TARGET_64BIT_)
5121         case GT_LSH:
5122         case GT_RSH:
5123         case GT_RSZ:
5124
5125             // Variable shifts of a long end up being helper calls, so mark the tree as such in morph.
5126             // This is potentially too conservative, since they'll get treated as having side effects.
5127             // It is important to mark them as calls so if they are part of an argument list,
5128             // they will get sorted and processed properly (for example, it is important to handle
5129             // all nested calls before putting struct arguments in the argument registers). We
5130             // could mark the trees just before argument processing, but it would require a full
5131             // tree walk of the argument tree, so we just do it when morphing, instead, even though we'll
5132             // mark non-argument trees (that will still get converted to calls, anyway).
5133             return (this->TypeGet() == TYP_LONG) && (gtGetOp2()->OperGet() != GT_CNS_INT);
5134 #endif // FEATURE_FIXED_OUT_ARGS && !_TARGET_64BIT_
5135
5136         default:
5137             return false;
5138     }
5139 }
5140
5141 //------------------------------------------------------------------------------
5142 // OperIsImplicitIndir : Check whether the operation contains an implicit
5143 //                       indirection.
5144 // Arguments:
5145 //    this      -  a GenTree node
5146 //
5147 // Return Value:
5148 //    True if the given node contains an implicit indirection
5149 //
5150 // Note that for the GT_HWIntrinsic node we have to examine the
5151 // details of the node to determine its result.
5152 //
5153
5154 bool GenTree::OperIsImplicitIndir() const
5155 {
5156     switch (gtOper)
5157     {
5158         case GT_LOCKADD:
5159         case GT_XADD:
5160         case GT_XCHG:
5161         case GT_CMPXCHG:
5162         case GT_BLK:
5163         case GT_OBJ:
5164         case GT_DYN_BLK:
5165         case GT_STORE_BLK:
5166         case GT_STORE_OBJ:
5167         case GT_STORE_DYN_BLK:
5168         case GT_BOX:
5169         case GT_ARR_INDEX:
5170         case GT_ARR_ELEM:
5171         case GT_ARR_OFFSET:
5172             return true;
5173 #ifdef FEATURE_HW_INTRINSICS
5174         case GT_HWIntrinsic:
5175         {
5176             GenTreeHWIntrinsic* hwIntrinsicNode = (const_cast<GenTree*>(this))->AsHWIntrinsic();
5177             return hwIntrinsicNode->OperIsMemoryLoadOrStore();
5178         }
5179 #endif // FEATURE_HW_INTRINSICS
5180         default:
5181             return false;
5182     }
5183 }
5184
5185 //------------------------------------------------------------------------------
5186 // OperMayThrow : Check whether the operation may throw.
5187 //
5188 //
5189 // Arguments:
5190 //    comp      -  Compiler instance
5191 //
5192 // Return Value:
5193 //    True if the given operator may cause an exception
5194
5195 bool GenTree::OperMayThrow(Compiler* comp)
5196 {
5197     GenTree* op;
5198
5199     switch (gtOper)
5200     {
5201         case GT_MOD:
5202         case GT_DIV:
5203         case GT_UMOD:
5204         case GT_UDIV:
5205
5206             /* Division with a non-zero, non-minus-one constant does not throw an exception */
5207
5208             op = gtOp.gtOp2;
5209
5210             if (varTypeIsFloating(op->TypeGet()))
5211             {
5212                 return false; // Floating point division does not throw.
5213             }
5214
5215             // For integers only division by 0 or by -1 can throw
5216             if (op->IsIntegralConst() && !op->IsIntegralConst(0) && !op->IsIntegralConst(-1))
5217             {
5218                 return false;
5219             }
5220             return true;
5221
5222         case GT_INTRINSIC:
5223             // If this is an intrinsic that represents the object.GetType(), it can throw an NullReferenceException.
5224             // Report it as may throw.
5225             // Note: Some of the rest of the existing intrinsics could potentially throw an exception (for example
5226             //       the array and string element access ones). They are handled differently than the GetType intrinsic
5227             //       and are not marked with GTF_EXCEPT. If these are revisited at some point to be marked as
5228             //       GTF_EXCEPT,
5229             //       the code below might need to be specialized to handle them properly.
5230             if ((this->gtFlags & GTF_EXCEPT) != 0)
5231             {
5232                 return true;
5233             }
5234
5235             break;
5236
5237         case GT_CALL:
5238
5239             CorInfoHelpFunc helper;
5240             helper = comp->eeGetHelperNum(this->AsCall()->gtCallMethHnd);
5241             return ((helper == CORINFO_HELP_UNDEF) || !comp->s_helperCallProperties.NoThrow(helper));
5242
5243         case GT_IND:
5244         case GT_BLK:
5245         case GT_OBJ:
5246         case GT_DYN_BLK:
5247         case GT_STORE_BLK:
5248         case GT_NULLCHECK:
5249             return (((this->gtFlags & GTF_IND_NONFAULTING) == 0) && comp->fgAddrCouldBeNull(this->AsIndir()->Addr()));
5250
5251         case GT_ARR_LENGTH:
5252             return (((this->gtFlags & GTF_IND_NONFAULTING) == 0) &&
5253                     comp->fgAddrCouldBeNull(this->AsArrLen()->ArrRef()));
5254
5255         case GT_ARR_ELEM:
5256             return comp->fgAddrCouldBeNull(this->gtArrElem.gtArrObj);
5257
5258         case GT_ARR_BOUNDS_CHECK:
5259         case GT_ARR_INDEX:
5260         case GT_ARR_OFFSET:
5261         case GT_LCLHEAP:
5262         case GT_CKFINITE:
5263 #ifdef FEATURE_SIMD
5264         case GT_SIMD_CHK:
5265 #endif // FEATURE_SIMD
5266 #ifdef FEATURE_HW_INTRINSICS
5267         case GT_HW_INTRINSIC_CHK:
5268 #endif // FEATURE_HW_INTRINSICS
5269         case GT_INDEX_ADDR:
5270             return true;
5271
5272 #ifdef FEATURE_HW_INTRINSICS
5273         case GT_HWIntrinsic:
5274         {
5275             GenTreeHWIntrinsic* hwIntrinsicNode = this->AsHWIntrinsic();
5276             assert(hwIntrinsicNode != nullptr);
5277             if (hwIntrinsicNode->OperIsMemoryLoadOrStore())
5278             {
5279                 // This operation contains an implicit indirection
5280                 //   it could throw a null reference exception.
5281                 //
5282                 return true;
5283             }
5284         }
5285 #endif // FEATURE_HW_INTRINSICS
5286
5287         default:
5288             break;
5289     }
5290
5291     /* Overflow arithmetic operations also throw exceptions */
5292
5293     if (gtOverflowEx())
5294     {
5295         return true;
5296     }
5297
5298     return false;
5299 }
5300
5301 #if DEBUGGABLE_GENTREE
5302 // static
5303 GenTree::VtablePtr GenTree::s_vtablesForOpers[] = {nullptr};
5304 GenTree::VtablePtr GenTree::s_vtableForOp       = nullptr;
5305
5306 GenTree::VtablePtr GenTree::GetVtableForOper(genTreeOps oper)
5307 {
5308     noway_assert(oper < GT_COUNT);
5309
5310     // First, check a cache.
5311
5312     if (s_vtablesForOpers[oper] != nullptr)
5313     {
5314         return s_vtablesForOpers[oper];
5315     }
5316
5317     // Otherwise, look up the correct vtable entry. Note that we want the most derived GenTree subtype
5318     // for an oper. E.g., GT_LCL_VAR is defined in GTSTRUCT_3 as GenTreeLclVar and in GTSTRUCT_N as
5319     // GenTreeLclVarCommon. We want the GenTreeLclVar vtable, since nothing should actually be
5320     // instantiated as a GenTreeLclVarCommon.
5321
5322     VtablePtr res = nullptr;
5323     switch (oper)
5324     {
5325
5326 // clang-format off
5327
5328 #define GTSTRUCT_0(nm, tag)                             /*handle explicitly*/
5329 #define GTSTRUCT_1(nm, tag)                             \
5330         case tag:                                       \
5331         {                                               \
5332             GenTree##nm gt;                             \
5333             res = *reinterpret_cast<VtablePtr*>(&gt);   \
5334         }                                               \
5335         break;
5336 #define GTSTRUCT_2(nm, tag, tag2)                       \
5337         case tag:                                       \
5338         case tag2:                                      \
5339         {                                               \
5340             GenTree##nm gt;                             \
5341             res = *reinterpret_cast<VtablePtr*>(&gt);   \
5342         }                                               \
5343         break;
5344 #define GTSTRUCT_3(nm, tag, tag2, tag3)                 \
5345         case tag:                                       \
5346         case tag2:                                      \
5347         case tag3:                                      \
5348         {                                               \
5349             GenTree##nm gt;                             \
5350             res = *reinterpret_cast<VtablePtr*>(&gt);   \
5351         }                                               \
5352         break;
5353 #define GTSTRUCT_4(nm, tag, tag2, tag3, tag4)           \
5354         case tag:                                       \
5355         case tag2:                                      \
5356         case tag3:                                      \
5357         case tag4:                                      \
5358         {                                               \
5359             GenTree##nm gt;                             \
5360             res = *reinterpret_cast<VtablePtr*>(&gt);   \
5361         }                                               \
5362         break;
5363 #define GTSTRUCT_N(nm, ...)                             /*handle explicitly*/
5364 #define GTSTRUCT_2_SPECIAL(nm, tag, tag2)               /*handle explicitly*/
5365 #define GTSTRUCT_3_SPECIAL(nm, tag, tag2, tag3)         /*handle explicitly*/
5366 #include "gtstructs.h"
5367
5368         // clang-format on
5369
5370         // Handle the special cases.
5371         // The following opers are in GTSTRUCT_N but no other place (namely, no subtypes).
5372
5373         case GT_STORE_BLK:
5374         case GT_BLK:
5375         {
5376             GenTreeBlk gt;
5377             res = *reinterpret_cast<VtablePtr*>(&gt);
5378         }
5379         break;
5380
5381         case GT_IND:
5382         case GT_NULLCHECK:
5383         {
5384             GenTreeIndir gt;
5385             res = *reinterpret_cast<VtablePtr*>(&gt);
5386         }
5387         break;
5388
5389         // Handle GT_LIST (but not GT_FIELD_LIST, which is also in a GTSTRUCT_1).
5390
5391         case GT_LIST:
5392         {
5393             GenTreeArgList gt;
5394             res = *reinterpret_cast<VtablePtr*>(&gt);
5395         }
5396         break;
5397
5398         // We don't need to handle GTSTRUCT_N for LclVarCommon, since all those allowed opers are specified
5399         // in their proper subtype. Similarly for GenTreeIndir.
5400
5401         default:
5402         {
5403             // Should be unary or binary op.
5404             if (s_vtableForOp == nullptr)
5405             {
5406                 unsigned opKind = OperKind(oper);
5407                 assert(!IsExOp(opKind));
5408                 assert(OperIsSimple(oper) || OperIsLeaf(oper));
5409                 // Need to provide non-null operands.
5410                 GenTreeIntCon dummyOp(TYP_INT, 0);
5411                 GenTreeOp     gt(oper, TYP_INT, &dummyOp, ((opKind & GTK_UNOP) ? nullptr : &dummyOp));
5412                 s_vtableForOp = *reinterpret_cast<VtablePtr*>(&gt);
5413             }
5414             res = s_vtableForOp;
5415             break;
5416         }
5417     }
5418     s_vtablesForOpers[oper] = res;
5419     return res;
5420 }
5421
5422 void GenTree::SetVtableForOper(genTreeOps oper)
5423 {
5424     *reinterpret_cast<VtablePtr*>(this) = GetVtableForOper(oper);
5425 }
5426 #endif // DEBUGGABLE_GENTREE
5427
5428 GenTree* Compiler::gtNewOperNode(genTreeOps oper, var_types type, GenTree* op1, GenTree* op2)
5429 {
5430     assert(op1 != nullptr);
5431     assert(op2 != nullptr);
5432
5433     // We should not be allocating nodes that extend GenTreeOp with this;
5434     // should call the appropriate constructor for the extended type.
5435     assert(!GenTree::IsExOp(GenTree::OperKind(oper)));
5436
5437     GenTree* node = new (this, oper) GenTreeOp(oper, type, op1, op2);
5438
5439     return node;
5440 }
5441
5442 GenTree* Compiler::gtNewQmarkNode(var_types type, GenTree* cond, GenTree* colon)
5443 {
5444     compQmarkUsed = true;
5445     cond->gtFlags |= GTF_RELOP_QMARK;
5446     GenTree* result = new (this, GT_QMARK) GenTreeQmark(type, cond, colon, this);
5447 #ifdef DEBUG
5448     if (compQmarkRationalized)
5449     {
5450         fgCheckQmarkAllowedForm(result);
5451     }
5452 #endif
5453     return result;
5454 }
5455
5456 GenTreeQmark::GenTreeQmark(var_types type, GenTree* cond, GenTree* colonOp, Compiler* comp)
5457     : GenTreeOp(GT_QMARK, type, cond, colonOp)
5458 {
5459     // These must follow a specific form.
5460     assert(cond != nullptr && cond->TypeGet() == TYP_INT);
5461     assert(colonOp != nullptr && colonOp->OperGet() == GT_COLON);
5462 }
5463
5464 GenTreeIntCon* Compiler::gtNewIconNode(ssize_t value, var_types type)
5465 {
5466     return new (this, GT_CNS_INT) GenTreeIntCon(type, value);
5467 }
5468
5469 // return a new node representing the value in a physical register
5470 GenTree* Compiler::gtNewPhysRegNode(regNumber reg, var_types type)
5471 {
5472     assert(genIsValidIntReg(reg) || (reg == REG_SPBASE));
5473     GenTree* result = new (this, GT_PHYSREG) GenTreePhysReg(reg, type);
5474     return result;
5475 }
5476
5477 GenTree* Compiler::gtNewJmpTableNode()
5478 {
5479     return new (this, GT_JMPTABLE) GenTree(GT_JMPTABLE, TYP_I_IMPL);
5480 }
5481
5482 /*****************************************************************************
5483  *
5484  *  Converts an annotated token into an icon flags (so that we will later be
5485  *  able to tell the type of the handle that will be embedded in the icon
5486  *  node)
5487  */
5488
5489 unsigned Compiler::gtTokenToIconFlags(unsigned token)
5490 {
5491     unsigned flags = 0;
5492
5493     switch (TypeFromToken(token))
5494     {
5495         case mdtTypeRef:
5496         case mdtTypeDef:
5497         case mdtTypeSpec:
5498             flags = GTF_ICON_CLASS_HDL;
5499             break;
5500
5501         case mdtMethodDef:
5502             flags = GTF_ICON_METHOD_HDL;
5503             break;
5504
5505         case mdtFieldDef:
5506             flags = GTF_ICON_FIELD_HDL;
5507             break;
5508
5509         default:
5510             flags = GTF_ICON_TOKEN_HDL;
5511             break;
5512     }
5513
5514     return flags;
5515 }
5516
5517 //-----------------------------------------------------------------------------------------
5518 // gtNewIndOfIconHandleNode: Creates an indirection GenTree node of a constant handle
5519 //
5520 // Arguments:
5521 //    indType     - The type returned by the indirection node
5522 //    addr        - The constant address to read from
5523 //    iconFlags   - The GTF_ICON flag value that specifies the kind of handle that we have
5524 //    isInvariant - The indNode should also be marked as invariant
5525 //
5526 // Return Value:
5527 //    Returns a GT_IND node representing value at the address provided by 'value'
5528 //
5529 // Notes:
5530 //    The GT_IND node is marked as non-faulting
5531 //    If the indType is GT_REF we also mark the indNode as GTF_GLOB_REF
5532 //
5533
5534 GenTree* Compiler::gtNewIndOfIconHandleNode(var_types indType, size_t addr, unsigned iconFlags, bool isInvariant)
5535 {
5536     GenTree* addrNode = gtNewIconHandleNode(addr, iconFlags);
5537     GenTree* indNode  = gtNewOperNode(GT_IND, indType, addrNode);
5538
5539     // This indirection won't cause an exception.
5540     //
5541     indNode->gtFlags |= GTF_IND_NONFAULTING;
5542
5543     // String Literal handles are indirections that return a TYP_REF.
5544     // They are pointers into the GC heap and they are not invariant
5545     // as the address is a reportable GC-root and as such it can be
5546     // modified during a GC collection
5547     //
5548     if (indType == TYP_REF)
5549     {
5550         // This indirection points into the gloabal heap
5551         indNode->gtFlags |= GTF_GLOB_REF;
5552     }
5553     if (isInvariant)
5554     {
5555         // This indirection also is invariant.
5556         indNode->gtFlags |= GTF_IND_INVARIANT;
5557     }
5558     return indNode;
5559 }
5560
5561 /*****************************************************************************
5562  *
5563  *  Allocates a integer constant entry that represents a HANDLE to something.
5564  *  It may not be allowed to embed HANDLEs directly into the JITed code (for eg,
5565  *  as arguments to JIT helpers). Get a corresponding value that can be embedded.
5566  *  If the handle needs to be accessed via an indirection, pValue points to it.
5567  */
5568
5569 GenTree* Compiler::gtNewIconEmbHndNode(void* value, void* pValue, unsigned iconFlags, void* compileTimeHandle)
5570 {
5571     GenTree* iconNode;
5572     GenTree* handleNode;
5573
5574     if (value != nullptr)
5575     {
5576         // When 'value' is non-null, pValue is required to be null
5577         assert(pValue == nullptr);
5578
5579         // use 'value' to construct an integer constant node
5580         iconNode = gtNewIconHandleNode((size_t)value, iconFlags);
5581
5582         // 'value' is the handle
5583         handleNode = iconNode;
5584     }
5585     else
5586     {
5587         // When 'value' is null, pValue is required to be non-null
5588         assert(pValue != nullptr);
5589
5590         // use 'pValue' to construct an integer constant node
5591         iconNode = gtNewIconHandleNode((size_t)pValue, iconFlags);
5592
5593         // 'pValue' is an address of a location that contains the handle
5594
5595         // construct the indirection of 'pValue'
5596         handleNode = gtNewOperNode(GT_IND, TYP_I_IMPL, iconNode);
5597
5598         // This indirection won't cause an exception.
5599         handleNode->gtFlags |= GTF_IND_NONFAULTING;
5600 #if 0
5601         // It should also be invariant, but marking it as such leads to bad diffs.
5602
5603         // This indirection also is invariant.
5604         handleNode->gtFlags |= GTF_IND_INVARIANT;
5605 #endif
5606     }
5607
5608     iconNode->gtIntCon.gtCompileTimeHandle = (size_t)compileTimeHandle;
5609
5610     return handleNode;
5611 }
5612
5613 /*****************************************************************************/
5614 GenTree* Compiler::gtNewStringLiteralNode(InfoAccessType iat, void* pValue)
5615 {
5616     GenTree* tree = nullptr;
5617
5618     switch (iat)
5619     {
5620         case IAT_VALUE: // constructStringLiteral in CoreRT case can return IAT_VALUE
5621             tree         = gtNewIconEmbHndNode(pValue, nullptr, GTF_ICON_STR_HDL, nullptr);
5622             tree->gtType = TYP_REF;
5623             tree         = gtNewOperNode(GT_NOP, TYP_REF, tree); // prevents constant folding
5624             break;
5625
5626         case IAT_PVALUE: // The value needs to be accessed via an indirection
5627             // Create an indirection
5628             tree = gtNewIndOfIconHandleNode(TYP_REF, (size_t)pValue, GTF_ICON_STR_HDL, false);
5629             break;
5630
5631         case IAT_PPVALUE: // The value needs to be accessed via a double indirection
5632             // Create the first indirection
5633             tree = gtNewIndOfIconHandleNode(TYP_I_IMPL, (size_t)pValue, GTF_ICON_PSTR_HDL, true);
5634
5635             // Create the second indirection
5636             tree = gtNewOperNode(GT_IND, TYP_REF, tree);
5637             // This indirection won't cause an exception.
5638             tree->gtFlags |= GTF_IND_NONFAULTING;
5639             // This indirection points into the gloabal heap (it is String Object)
5640             tree->gtFlags |= GTF_GLOB_REF;
5641             break;
5642
5643         default:
5644             noway_assert(!"Unexpected InfoAccessType");
5645     }
5646
5647     return tree;
5648 }
5649
5650 /*****************************************************************************/
5651
5652 GenTree* Compiler::gtNewLconNode(__int64 value)
5653 {
5654 #ifdef _TARGET_64BIT_
5655     GenTree* node = new (this, GT_CNS_INT) GenTreeIntCon(TYP_LONG, value);
5656 #else
5657     GenTree* node = new (this, GT_CNS_LNG) GenTreeLngCon(value);
5658 #endif
5659
5660     return node;
5661 }
5662
5663 GenTree* Compiler::gtNewDconNode(double value, var_types type)
5664 {
5665     GenTree* node = new (this, GT_CNS_DBL) GenTreeDblCon(value, type);
5666
5667     return node;
5668 }
5669
5670 GenTree* Compiler::gtNewSconNode(int CPX, CORINFO_MODULE_HANDLE scpHandle)
5671 {
5672     // 'GT_CNS_STR' nodes later get transformed into 'GT_CALL'
5673     assert(GenTree::s_gtNodeSizes[GT_CALL] > GenTree::s_gtNodeSizes[GT_CNS_STR]);
5674     GenTree* node = new (this, GT_CALL) GenTreeStrCon(CPX, scpHandle DEBUGARG(/*largeNode*/ true));
5675     return node;
5676 }
5677
5678 GenTree* Compiler::gtNewZeroConNode(var_types type)
5679 {
5680     GenTree* zero;
5681     switch (type)
5682     {
5683         case TYP_INT:
5684             zero = gtNewIconNode(0);
5685             break;
5686
5687         case TYP_BYREF:
5688             __fallthrough;
5689
5690         case TYP_REF:
5691             zero         = gtNewIconNode(0);
5692             zero->gtType = type;
5693             break;
5694
5695         case TYP_LONG:
5696             zero = gtNewLconNode(0);
5697             break;
5698
5699         case TYP_FLOAT:
5700             zero         = gtNewDconNode(0.0);
5701             zero->gtType = type;
5702             break;
5703
5704         case TYP_DOUBLE:
5705             zero = gtNewDconNode(0.0);
5706             break;
5707
5708         default:
5709             noway_assert(!"Bad type in gtNewZeroConNode");
5710             zero = nullptr;
5711             break;
5712     }
5713     return zero;
5714 }
5715
5716 GenTree* Compiler::gtNewOneConNode(var_types type)
5717 {
5718     GenTree* one;
5719     switch (type)
5720     {
5721         case TYP_INT:
5722         case TYP_UINT:
5723             one = gtNewIconNode(1);
5724             break;
5725
5726         case TYP_LONG:
5727         case TYP_ULONG:
5728             one = gtNewLconNode(1);
5729             break;
5730
5731         case TYP_FLOAT:
5732         case TYP_DOUBLE:
5733             one         = gtNewDconNode(1.0);
5734             one->gtType = type;
5735             break;
5736
5737         default:
5738             noway_assert(!"Bad type in gtNewOneConNode");
5739             one = nullptr;
5740             break;
5741     }
5742     return one;
5743 }
5744
5745 #ifdef FEATURE_SIMD
5746 //---------------------------------------------------------------------
5747 // gtNewSIMDVectorZero: create a GT_SIMD node for Vector<T>.Zero
5748 //
5749 // Arguments:
5750 //    simdType  -  simd vector type
5751 //    baseType  -  element type of vector
5752 //    size      -  size of vector in bytes
5753 GenTree* Compiler::gtNewSIMDVectorZero(var_types simdType, var_types baseType, unsigned size)
5754 {
5755     baseType         = genActualType(baseType);
5756     GenTree* initVal = gtNewZeroConNode(baseType);
5757     initVal->gtType  = baseType;
5758     return gtNewSIMDNode(simdType, initVal, nullptr, SIMDIntrinsicInit, baseType, size);
5759 }
5760
5761 //---------------------------------------------------------------------
5762 // gtNewSIMDVectorOne: create a GT_SIMD node for Vector<T>.One
5763 //
5764 // Arguments:
5765 //    simdType  -  simd vector type
5766 //    baseType  -  element type of vector
5767 //    size      -  size of vector in bytes
5768 GenTree* Compiler::gtNewSIMDVectorOne(var_types simdType, var_types baseType, unsigned size)
5769 {
5770     GenTree* initVal;
5771     if (varTypeIsSmallInt(baseType))
5772     {
5773         unsigned baseSize = genTypeSize(baseType);
5774         int      val;
5775         if (baseSize == 1)
5776         {
5777             val = 0x01010101;
5778         }
5779         else
5780         {
5781             val = 0x00010001;
5782         }
5783         initVal = gtNewIconNode(val);
5784     }
5785     else
5786     {
5787         initVal = gtNewOneConNode(baseType);
5788     }
5789
5790     baseType        = genActualType(baseType);
5791     initVal->gtType = baseType;
5792     return gtNewSIMDNode(simdType, initVal, nullptr, SIMDIntrinsicInit, baseType, size);
5793 }
5794 #endif // FEATURE_SIMD
5795
5796 GenTreeCall* Compiler::gtNewIndCallNode(GenTree* addr, var_types type, GenTreeArgList* args, IL_OFFSETX ilOffset)
5797 {
5798     return gtNewCallNode(CT_INDIRECT, (CORINFO_METHOD_HANDLE)addr, type, args, ilOffset);
5799 }
5800
5801 GenTreeCall* Compiler::gtNewCallNode(
5802     gtCallTypes callType, CORINFO_METHOD_HANDLE callHnd, var_types type, GenTreeArgList* args, IL_OFFSETX ilOffset)
5803 {
5804     GenTreeCall* node = new (this, GT_CALL) GenTreeCall(genActualType(type));
5805
5806     node->gtFlags |= (GTF_CALL | GTF_GLOB_REF);
5807     if (args)
5808     {
5809         node->gtFlags |= (args->gtFlags & GTF_ALL_EFFECT);
5810     }
5811     node->gtCallType      = callType;
5812     node->gtCallMethHnd   = callHnd;
5813     node->gtCallArgs      = args;
5814     node->gtCallObjp      = nullptr;
5815     node->fgArgInfo       = nullptr;
5816     node->callSig         = nullptr;
5817     node->gtRetClsHnd     = nullptr;
5818     node->gtControlExpr   = nullptr;
5819     node->gtCallMoreFlags = 0;
5820
5821     if (callType == CT_INDIRECT)
5822     {
5823         node->gtCallCookie = nullptr;
5824     }
5825     else
5826     {
5827         node->gtInlineCandidateInfo = nullptr;
5828     }
5829     node->gtCallLateArgs = nullptr;
5830     node->gtReturnType   = type;
5831
5832 #ifdef FEATURE_READYTORUN_COMPILER
5833     node->gtEntryPoint.addr       = nullptr;
5834     node->gtEntryPoint.accessType = IAT_VALUE;
5835 #endif
5836
5837 #if defined(DEBUG) || defined(INLINE_DATA)
5838     // These get updated after call node is built.
5839     node->gtInlineObservation = InlineObservation::CALLEE_UNUSED_INITIAL;
5840     node->gtRawILOffset       = BAD_IL_OFFSET;
5841 #endif
5842
5843     // Spec: Managed Retval sequence points needs to be generated while generating debug info for debuggable code.
5844     //
5845     // Implementation note: if not generating MRV info genCallSite2ILOffsetMap will be NULL and
5846     // codegen will pass BAD_IL_OFFSET as IL offset of a call node to emitter, which will cause emitter
5847     // not to emit IP mapping entry.
5848     if (opts.compDbgCode && opts.compDbgInfo)
5849     {
5850         // Managed Retval - IL offset of the call.  This offset is used to emit a
5851         // CALL_INSTRUCTION type sequence point while emitting corresponding native call.
5852         //
5853         // TODO-Cleanup:
5854         // a) (Opt) We need not store this offset if the method doesn't return a
5855         // value.  Rather it can be made BAD_IL_OFFSET to prevent a sequence
5856         // point being emitted.
5857         //
5858         // b) (Opt) Add new sequence points only if requested by debugger through
5859         // a new boundary type - ICorDebugInfo::BoundaryTypes
5860         if (genCallSite2ILOffsetMap == nullptr)
5861         {
5862             genCallSite2ILOffsetMap = new (getAllocator()) CallSiteILOffsetTable(getAllocator());
5863         }
5864
5865         // Make sure that there are no duplicate entries for a given call node
5866         assert(!genCallSite2ILOffsetMap->Lookup(node));
5867         genCallSite2ILOffsetMap->Set(node, ilOffset);
5868     }
5869
5870     // Initialize gtOtherRegs
5871     node->ClearOtherRegs();
5872
5873     // Initialize spill flags of gtOtherRegs
5874     node->ClearOtherRegFlags();
5875
5876 #if defined(_TARGET_X86_) || defined(_TARGET_ARM_)
5877     // Initialize the multi-reg long return info if necessary
5878     if (varTypeIsLong(node))
5879     {
5880         // The return type will remain as the incoming long type
5881         node->gtReturnType = node->gtType;
5882
5883         // Initialize Return type descriptor of call node
5884         ReturnTypeDesc* retTypeDesc = node->GetReturnTypeDesc();
5885         retTypeDesc->InitializeLongReturnType(this);
5886
5887         // must be a long returned in two registers
5888         assert(retTypeDesc->GetReturnRegCount() == 2);
5889     }
5890 #endif // defined(_TARGET_X86_) || defined(_TARGET_ARM_)
5891
5892     return node;
5893 }
5894
5895 GenTree* Compiler::gtNewLclvNode(unsigned lnum, var_types type DEBUGARG(IL_OFFSETX ILoffs))
5896 {
5897     // We need to ensure that all struct values are normalized.
5898     // It might be nice to assert this in general, but we have assignments of int to long.
5899     if (varTypeIsStruct(type))
5900     {
5901         // Make an exception for implicit by-ref parameters during global morph, since
5902         // their lvType has been updated to byref but their appearances have not yet all
5903         // been rewritten and so may have struct type still.
5904         assert(type == lvaTable[lnum].lvType ||
5905                (lvaIsImplicitByRefLocal(lnum) && fgGlobalMorph && (lvaTable[lnum].lvType == TYP_BYREF)));
5906     }
5907     GenTree* node = new (this, GT_LCL_VAR) GenTreeLclVar(type, lnum DEBUGARG(ILoffs));
5908
5909     /* Cannot have this assert because the inliner uses this function
5910      * to add temporaries */
5911
5912     // assert(lnum < lvaCount);
5913
5914     return node;
5915 }
5916
5917 GenTree* Compiler::gtNewLclLNode(unsigned lnum, var_types type DEBUGARG(IL_OFFSETX ILoffs))
5918 {
5919     // We need to ensure that all struct values are normalized.
5920     // It might be nice to assert this in general, but we have assignments of int to long.
5921     if (varTypeIsStruct(type))
5922     {
5923         // Make an exception for implicit by-ref parameters during global morph, since
5924         // their lvType has been updated to byref but their appearances have not yet all
5925         // been rewritten and so may have struct type still.
5926         assert(type == lvaTable[lnum].lvType ||
5927                (lvaIsImplicitByRefLocal(lnum) && fgGlobalMorph && (lvaTable[lnum].lvType == TYP_BYREF)));
5928     }
5929     // This local variable node may later get transformed into a large node
5930     assert(GenTree::s_gtNodeSizes[GT_CALL] > GenTree::s_gtNodeSizes[GT_LCL_VAR]);
5931     GenTree* node = new (this, GT_CALL) GenTreeLclVar(type, lnum DEBUGARG(ILoffs) DEBUGARG(/*largeNode*/ true));
5932     return node;
5933 }
5934
5935 GenTreeLclFld* Compiler::gtNewLclFldNode(unsigned lnum, var_types type, unsigned offset)
5936 {
5937     GenTreeLclFld* node = new (this, GT_LCL_FLD) GenTreeLclFld(type, lnum, offset);
5938
5939     /* Cannot have this assert because the inliner uses this function
5940      * to add temporaries */
5941
5942     // assert(lnum < lvaCount);
5943
5944     node->gtFieldSeq = FieldSeqStore::NotAField();
5945     return node;
5946 }
5947
5948 GenTree* Compiler::gtNewInlineCandidateReturnExpr(GenTree* inlineCandidate, var_types type)
5949
5950 {
5951     assert(GenTree::s_gtNodeSizes[GT_RET_EXPR] == TREE_NODE_SZ_LARGE);
5952
5953     GenTree* node = new (this, GT_RET_EXPR) GenTreeRetExpr(type);
5954
5955     node->gtRetExpr.gtInlineCandidate = inlineCandidate;
5956
5957     if (varTypeIsStruct(inlineCandidate) && !inlineCandidate->OperIsBlkOp())
5958     {
5959         node->gtRetExpr.gtRetClsHnd = gtGetStructHandle(inlineCandidate);
5960     }
5961
5962     // GT_RET_EXPR node eventually might be bashed back to GT_CALL (when inlining is aborted for example).
5963     // Therefore it should carry the GTF_CALL flag so that all the rules about spilling can apply to it as well.
5964     // For example, impImportLeave or CEE_POP need to spill GT_RET_EXPR before empty the evaluation stack.
5965     node->gtFlags |= GTF_CALL;
5966
5967     return node;
5968 }
5969
5970 GenTreeArgList* Compiler::gtNewListNode(GenTree* op1, GenTreeArgList* op2)
5971 {
5972     assert((op1 != nullptr) && (op1->OperGet() != GT_LIST));
5973
5974     return new (this, GT_LIST) GenTreeArgList(op1, op2);
5975 }
5976
5977 /*****************************************************************************
5978  *
5979  *  Create a list out of one value.
5980  */
5981
5982 GenTreeArgList* Compiler::gtNewArgList(GenTree* arg)
5983 {
5984     return new (this, GT_LIST) GenTreeArgList(arg);
5985 }
5986
5987 /*****************************************************************************
5988  *
5989  *  Create a list out of the two values.
5990  */
5991
5992 GenTreeArgList* Compiler::gtNewArgList(GenTree* arg1, GenTree* arg2)
5993 {
5994     return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2));
5995 }
5996
5997 /*****************************************************************************
5998  *
5999  *  Create a list out of the three values.
6000  */
6001
6002 GenTreeArgList* Compiler::gtNewArgList(GenTree* arg1, GenTree* arg2, GenTree* arg3)
6003 {
6004     return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2, arg3));
6005 }
6006
6007 /*****************************************************************************
6008  *
6009  *  Create a list out of the three values.
6010  */
6011
6012 GenTreeArgList* Compiler::gtNewArgList(GenTree* arg1, GenTree* arg2, GenTree* arg3, GenTree* arg4)
6013 {
6014     return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2, arg3, arg4));
6015 }
6016
6017 /*****************************************************************************
6018  *
6019  *  Given a GT_CALL node, access the fgArgInfo and find the entry
6020  *  that has the matching argNum and return the fgArgTableEntryPtr
6021  */
6022
6023 fgArgTabEntry* Compiler::gtArgEntryByArgNum(GenTreeCall* call, unsigned argNum)
6024 {
6025     fgArgInfo* argInfo = call->fgArgInfo;
6026     noway_assert(argInfo != nullptr);
6027     return argInfo->GetArgEntry(argNum);
6028 }
6029
6030 /*****************************************************************************
6031  *
6032  *  Given a GT_CALL node, access the fgArgInfo and find the entry
6033  *  that has the matching node and return the fgArgTableEntryPtr
6034  */
6035
6036 fgArgTabEntry* Compiler::gtArgEntryByNode(GenTreeCall* call, GenTree* node)
6037 {
6038     fgArgInfo* argInfo = call->fgArgInfo;
6039     noway_assert(argInfo != nullptr);
6040
6041     unsigned        argCount       = argInfo->ArgCount();
6042     fgArgTabEntry** argTable       = argInfo->ArgTable();
6043     fgArgTabEntry*  curArgTabEntry = nullptr;
6044
6045     for (unsigned i = 0; i < argCount; i++)
6046     {
6047         curArgTabEntry = argTable[i];
6048
6049         if (curArgTabEntry->node == node)
6050         {
6051             return curArgTabEntry;
6052         }
6053         else if (curArgTabEntry->parent != nullptr)
6054         {
6055             assert(curArgTabEntry->parent->OperIsList());
6056             if (curArgTabEntry->parent->Current() == node)
6057             {
6058                 return curArgTabEntry;
6059             }
6060         }
6061         else // (curArgTabEntry->parent == NULL)
6062         {
6063             if (call->gtCallObjp == node)
6064             {
6065                 return curArgTabEntry;
6066             }
6067         }
6068     }
6069     noway_assert(!"gtArgEntryByNode: node not found");
6070     return nullptr;
6071 }
6072
6073 /*****************************************************************************
6074  *
6075  *  Find and return the entry with the given "lateArgInx".  Requires that one is found
6076  *  (asserts this).
6077  */
6078 fgArgTabEntry* Compiler::gtArgEntryByLateArgIndex(GenTreeCall* call, unsigned lateArgInx)
6079 {
6080     fgArgInfo* argInfo = call->fgArgInfo;
6081     noway_assert(argInfo != nullptr);
6082     assert(lateArgInx != UINT_MAX);
6083
6084     unsigned        argCount       = argInfo->ArgCount();
6085     fgArgTabEntry** argTable       = argInfo->ArgTable();
6086     fgArgTabEntry*  curArgTabEntry = nullptr;
6087
6088     for (unsigned i = 0; i < argCount; i++)
6089     {
6090         curArgTabEntry = argTable[i];
6091         if (curArgTabEntry->isLateArg() && curArgTabEntry->lateArgInx == lateArgInx)
6092         {
6093             return curArgTabEntry;
6094         }
6095     }
6096     noway_assert(!"gtArgEntryByNode: node not found");
6097     return nullptr;
6098 }
6099
6100 //------------------------------------------------------------------------
6101 // gtArgNodeByLateArgInx: Given a call instruction, find the argument with the given
6102 //                        late arg index (i.e. the given position in the gtCallLateArgs list).
6103 // Arguments:
6104 //    call - the call node
6105 //    lateArgInx - the index into the late args list
6106 //
6107 // Return value:
6108 //    The late argument node.
6109 //
6110 GenTree* Compiler::gtArgNodeByLateArgInx(GenTreeCall* call, unsigned lateArgInx)
6111 {
6112     GenTree* argx     = nullptr;
6113     unsigned regIndex = 0;
6114
6115     for (GenTreeArgList *list = call->gtCall.gtCallLateArgs; list != nullptr; regIndex++, list = list->Rest())
6116     {
6117         argx = list->Current();
6118         assert(!argx->IsArgPlaceHolderNode()); // No placeholder nodes are in gtCallLateArgs;
6119         if (regIndex == lateArgInx)
6120         {
6121             break;
6122         }
6123     }
6124     noway_assert(argx != nullptr);
6125     return argx;
6126 }
6127
6128 /*****************************************************************************
6129  *
6130  *  Given an fgArgTabEntry*, return true if it is the 'this' pointer argument.
6131  */
6132 bool Compiler::gtArgIsThisPtr(fgArgTabEntry* argEntry)
6133 {
6134     return (argEntry->parent == nullptr);
6135 }
6136
6137 /*****************************************************************************
6138  *
6139  *  Create a node that will assign 'src' to 'dst'.
6140  */
6141
6142 GenTree* Compiler::gtNewAssignNode(GenTree* dst, GenTree* src)
6143 {
6144     /* Mark the target as being assigned */
6145
6146     if ((dst->gtOper == GT_LCL_VAR) || (dst->OperGet() == GT_LCL_FLD))
6147     {
6148         dst->gtFlags |= GTF_VAR_DEF;
6149         if (dst->IsPartialLclFld(this))
6150         {
6151             // We treat these partial writes as combined uses and defs.
6152             dst->gtFlags |= GTF_VAR_USEASG;
6153         }
6154     }
6155     dst->gtFlags |= GTF_DONT_CSE;
6156
6157     /* Create the assignment node */
6158
6159     GenTree* asg = gtNewOperNode(GT_ASG, dst->TypeGet(), dst, src);
6160
6161     /* Mark the expression as containing an assignment */
6162
6163     asg->gtFlags |= GTF_ASG;
6164
6165     return asg;
6166 }
6167
6168 //------------------------------------------------------------------------
6169 // gtNewObjNode: Creates a new Obj node.
6170 //
6171 // Arguments:
6172 //    structHnd - The class handle of the struct type.
6173 //    addr      - The address of the struct.
6174 //
6175 // Return Value:
6176 //    Returns a node representing the struct value at the given address.
6177 //
6178 // Assumptions:
6179 //    Any entry and exit conditions, such as required preconditions of
6180 //    data structures, memory to be freed by caller, etc.
6181 //
6182 // Notes:
6183 //    It will currently return a GT_OBJ node for any struct type, but may
6184 //    return a GT_IND or a non-indirection for a scalar type.
6185 //    The node will not yet have its GC info initialized. This is because
6186 //    we may not need this info if this is an r-value.
6187
6188 GenTree* Compiler::gtNewObjNode(CORINFO_CLASS_HANDLE structHnd, GenTree* addr)
6189 {
6190     var_types nodeType = impNormStructType(structHnd);
6191     assert(varTypeIsStruct(nodeType));
6192     unsigned size = info.compCompHnd->getClassSize(structHnd);
6193
6194     // It would be convenient to set the GC info at this time, but we don't actually require
6195     // it unless this is going to be a destination.
6196     if (!varTypeIsStruct(nodeType))
6197     {
6198         if ((addr->gtOper == GT_ADDR) && (addr->gtGetOp1()->TypeGet() == nodeType))
6199         {
6200             return addr->gtGetOp1();
6201         }
6202         else
6203         {
6204             return gtNewOperNode(GT_IND, nodeType, addr);
6205         }
6206     }
6207     GenTreeBlk* newBlkOrObjNode = new (this, GT_OBJ) GenTreeObj(nodeType, addr, structHnd, size);
6208
6209     // An Obj is not a global reference, if it is known to be a local struct.
6210     if ((addr->gtFlags & GTF_GLOB_REF) == 0)
6211     {
6212         GenTreeLclVarCommon* lclNode = addr->IsLocalAddrExpr();
6213         if (lclNode != nullptr)
6214         {
6215             newBlkOrObjNode->gtFlags |= GTF_IND_NONFAULTING;
6216             if (!lvaIsImplicitByRefLocal(lclNode->gtLclNum))
6217             {
6218                 newBlkOrObjNode->gtFlags &= ~GTF_GLOB_REF;
6219             }
6220         }
6221     }
6222     return newBlkOrObjNode;
6223 }
6224
6225 //------------------------------------------------------------------------
6226 // gtSetObjGcInfo: Set the GC info on an object node
6227 //
6228 // Arguments:
6229 //    objNode - The object node of interest
6230
6231 void Compiler::gtSetObjGcInfo(GenTreeObj* objNode)
6232 {
6233     CORINFO_CLASS_HANDLE structHnd  = objNode->gtClass;
6234     var_types            nodeType   = objNode->TypeGet();
6235     unsigned             size       = objNode->gtBlkSize;
6236     unsigned             slots      = 0;
6237     unsigned             gcPtrCount = 0;
6238     BYTE*                gcPtrs     = nullptr;
6239
6240     assert(varTypeIsStruct(nodeType));
6241     assert(size == info.compCompHnd->getClassSize(structHnd));
6242     assert(nodeType == impNormStructType(structHnd));
6243
6244     if (nodeType == TYP_STRUCT)
6245     {
6246         if (size >= TARGET_POINTER_SIZE)
6247         {
6248             // Get the GC fields info
6249             var_types simdBaseType; // Dummy argument
6250             slots    = roundUp(size, TARGET_POINTER_SIZE) / TARGET_POINTER_SIZE;
6251             gcPtrs   = new (this, CMK_ASTNode) BYTE[slots];
6252             nodeType = impNormStructType(structHnd, gcPtrs, &gcPtrCount, &simdBaseType);
6253         }
6254     }
6255     objNode->SetGCInfo(gcPtrs, gcPtrCount, slots);
6256     assert(objNode->gtType == nodeType);
6257 }
6258
6259 //------------------------------------------------------------------------
6260 // gtNewStructVal: Return a node that represents a struct value
6261 //
6262 // Arguments:
6263 //    structHnd - The class for the struct
6264 //    addr      - The address of the struct
6265 //
6266 // Return Value:
6267 //    A block, object or local node that represents the struct value pointed to by 'addr'.
6268
6269 GenTree* Compiler::gtNewStructVal(CORINFO_CLASS_HANDLE structHnd, GenTree* addr)
6270 {
6271     if (addr->gtOper == GT_ADDR)
6272     {
6273         GenTree* val = addr->gtGetOp1();
6274         if (val->OperGet() == GT_LCL_VAR)
6275         {
6276             unsigned   lclNum = addr->gtGetOp1()->AsLclVarCommon()->gtLclNum;
6277             LclVarDsc* varDsc = &(lvaTable[lclNum]);
6278             if (varTypeIsStruct(varDsc) && (varDsc->lvVerTypeInfo.GetClassHandle() == structHnd) &&
6279                 !lvaIsImplicitByRefLocal(lclNum))
6280             {
6281                 return addr->gtGetOp1();
6282             }
6283         }
6284     }
6285     return gtNewObjNode(structHnd, addr);
6286 }
6287
6288 //------------------------------------------------------------------------
6289 // gtNewBlockVal: Return a node that represents a possibly untyped block value
6290 //
6291 // Arguments:
6292 //    addr      - The address of the block
6293 //    size      - The size of the block
6294 //
6295 // Return Value:
6296 //    A block, object or local node that represents the block value pointed to by 'addr'.
6297
6298 GenTree* Compiler::gtNewBlockVal(GenTree* addr, unsigned size)
6299 {
6300     // By default we treat this as an opaque struct type with known size.
6301     var_types blkType = TYP_STRUCT;
6302     if ((addr->gtOper == GT_ADDR) && (addr->gtGetOp1()->OperGet() == GT_LCL_VAR))
6303     {
6304         GenTree* val = addr->gtGetOp1();
6305 #if FEATURE_SIMD
6306         if (varTypeIsSIMD(val))
6307         {
6308             if (genTypeSize(val->TypeGet()) == size)
6309             {
6310                 blkType = val->TypeGet();
6311                 return addr->gtGetOp1();
6312             }
6313         }
6314         else
6315 #endif // FEATURE_SIMD
6316             if (val->TypeGet() == TYP_STRUCT)
6317         {
6318             GenTreeLclVarCommon* lcl    = addr->gtGetOp1()->AsLclVarCommon();
6319             LclVarDsc*           varDsc = &(lvaTable[lcl->gtLclNum]);
6320             if ((varDsc->TypeGet() == TYP_STRUCT) && (varDsc->lvExactSize == size))
6321             {
6322                 return addr->gtGetOp1();
6323             }
6324         }
6325     }
6326     return new (this, GT_BLK) GenTreeBlk(GT_BLK, blkType, addr, size);
6327 }
6328
6329 // Creates a new assignment node for a CpObj.
6330 // Parameters (exactly the same as MSIL CpObj):
6331 //
6332 //  dstAddr    - The target to copy the struct to
6333 //  srcAddr    - The source to copy the struct from
6334 //  structHnd  - A class token that represents the type of object being copied. May be null
6335 //               if FEATURE_SIMD is enabled and the source has a SIMD type.
6336 //  isVolatile - Is this marked as volatile memory?
6337
6338 GenTree* Compiler::gtNewCpObjNode(GenTree* dstAddr, GenTree* srcAddr, CORINFO_CLASS_HANDLE structHnd, bool isVolatile)
6339 {
6340     GenTree* lhs = gtNewStructVal(structHnd, dstAddr);
6341     GenTree* src = nullptr;
6342     unsigned size;
6343
6344     if (lhs->OperIsBlk())
6345     {
6346         size = lhs->AsBlk()->gtBlkSize;
6347         if (lhs->OperGet() == GT_OBJ)
6348         {
6349             gtSetObjGcInfo(lhs->AsObj());
6350         }
6351     }
6352     else
6353     {
6354         size = genTypeSize(lhs->gtType);
6355     }
6356
6357     if (srcAddr->OperGet() == GT_ADDR)
6358     {
6359         src = srcAddr->gtOp.gtOp1;
6360     }
6361     else
6362     {
6363         src = gtNewOperNode(GT_IND, lhs->TypeGet(), srcAddr);
6364     }
6365
6366     GenTree* result = gtNewBlkOpNode(lhs, src, size, isVolatile, true);
6367     return result;
6368 }
6369
6370 //------------------------------------------------------------------------
6371 // FixupInitBlkValue: Fixup the init value for an initBlk operation
6372 //
6373 // Arguments:
6374 //    asgType - The type of assignment that the initBlk is being transformed into
6375 //
6376 // Return Value:
6377 //    Modifies the constant value on this node to be the appropriate "fill"
6378 //    value for the initblk.
6379 //
6380 // Notes:
6381 //    The initBlk MSIL instruction takes a byte value, which must be
6382 //    extended to the size of the assignment when an initBlk is transformed
6383 //    to an assignment of a primitive type.
6384 //    This performs the appropriate extension.
6385
6386 void GenTreeIntCon::FixupInitBlkValue(var_types asgType)
6387 {
6388     assert(varTypeIsIntegralOrI(asgType));
6389     unsigned size = genTypeSize(asgType);
6390     if (size > 1)
6391     {
6392         size_t cns = gtIconVal;
6393         cns        = cns & 0xFF;
6394         cns |= cns << 8;
6395         if (size >= 4)
6396         {
6397             cns |= cns << 16;
6398 #ifdef _TARGET_64BIT_
6399             if (size == 8)
6400             {
6401                 cns |= cns << 32;
6402             }
6403 #endif // _TARGET_64BIT_
6404
6405             // Make the type match for evaluation types.
6406             gtType = asgType;
6407
6408             // if we are initializing a GC type the value being assigned must be zero (null).
6409             assert(!varTypeIsGC(asgType) || (cns == 0));
6410         }
6411
6412         gtIconVal = cns;
6413     }
6414 }
6415
6416 //
6417 //------------------------------------------------------------------------
6418 // gtBlockOpInit: Initializes a BlkOp GenTree
6419 //
6420 // Arguments:
6421 //    result     - an assignment node that is to be initialized.
6422 //    dst        - the target (destination) we want to either initialize or copy to.
6423 //    src        - the init value for InitBlk or the source struct for CpBlk/CpObj.
6424 //    isVolatile - specifies whether this node is a volatile memory operation.
6425 //
6426 // Assumptions:
6427 //    'result' is an assignment that is newly constructed.
6428 //    If 'dst' is TYP_STRUCT, then it must be a block node or lclVar.
6429 //
6430 // Notes:
6431 //    This procedure centralizes all the logic to both enforce proper structure and
6432 //    to properly construct any InitBlk/CpBlk node.
6433
6434 void Compiler::gtBlockOpInit(GenTree* result, GenTree* dst, GenTree* srcOrFillVal, bool isVolatile)
6435 {
6436     if (!result->OperIsBlkOp())
6437     {
6438         assert(dst->TypeGet() != TYP_STRUCT);
6439         return;
6440     }
6441 #ifdef DEBUG
6442     // If the copy involves GC pointers, the caller must have already set
6443     // the node additional members (gtGcPtrs, gtGcPtrCount, gtSlots) on the dst.
6444     if ((dst->gtOper == GT_OBJ) && dst->AsBlk()->HasGCPtr())
6445     {
6446         GenTreeObj* objNode = dst->AsObj();
6447         assert(objNode->gtGcPtrs != nullptr);
6448         assert(!IsUninitialized(objNode->gtGcPtrs));
6449         assert(!IsUninitialized(objNode->gtGcPtrCount));
6450         assert(!IsUninitialized(objNode->gtSlots) && objNode->gtSlots > 0);
6451
6452         for (unsigned i = 0; i < objNode->gtGcPtrCount; ++i)
6453         {
6454             CorInfoGCType t = (CorInfoGCType)objNode->gtGcPtrs[i];
6455             switch (t)
6456             {
6457                 case TYPE_GC_NONE:
6458                 case TYPE_GC_REF:
6459                 case TYPE_GC_BYREF:
6460                 case TYPE_GC_OTHER:
6461                     break;
6462                 default:
6463                     unreached();
6464             }
6465         }
6466     }
6467 #endif // DEBUG
6468
6469     /* In the case of CpBlk, we want to avoid generating
6470     * nodes where the source and destination are the same
6471     * because of two reasons, first, is useless, second
6472     * it introduces issues in liveness and also copying
6473     * memory from an overlapping memory location is
6474     * undefined both as per the ECMA standard and also
6475     * the memcpy semantics specify that.
6476     *
6477     * NOTE: In this case we'll only detect the case for addr of a local
6478     * and a local itself, any other complex expressions won't be
6479     * caught.
6480     *
6481     * TODO-Cleanup: though having this logic is goodness (i.e. avoids self-assignment
6482     * of struct vars very early), it was added because fgInterBlockLocalVarLiveness()
6483     * isn't handling self-assignment of struct variables correctly.  This issue may not
6484     * surface if struct promotion is ON (which is the case on x86/arm).  But still the
6485     * fundamental issue exists that needs to be addressed.
6486     */
6487     if (result->OperIsCopyBlkOp())
6488     {
6489         GenTree* currSrc = srcOrFillVal;
6490         GenTree* currDst = dst;
6491
6492         if (currSrc->OperIsBlk() && (currSrc->AsBlk()->Addr()->OperGet() == GT_ADDR))
6493         {
6494             currSrc = currSrc->AsBlk()->Addr()->gtGetOp1();
6495         }
6496         if (currDst->OperIsBlk() && (currDst->AsBlk()->Addr()->OperGet() == GT_ADDR))
6497         {
6498             currDst = currDst->AsBlk()->Addr()->gtGetOp1();
6499         }
6500
6501         if (currSrc->OperGet() == GT_LCL_VAR && currDst->OperGet() == GT_LCL_VAR &&
6502             currSrc->gtLclVarCommon.gtLclNum == currDst->gtLclVarCommon.gtLclNum)
6503         {
6504             // Make this a NOP
6505             // TODO-Cleanup: probably doesn't matter, but could do this earlier and avoid creating a GT_ASG
6506             result->gtBashToNOP();
6507             return;
6508         }
6509     }
6510
6511     // Propagate all effect flags from children
6512     result->gtFlags |= dst->gtFlags & GTF_ALL_EFFECT;
6513     result->gtFlags |= result->gtOp.gtOp2->gtFlags & GTF_ALL_EFFECT;
6514
6515     result->gtFlags |= (dst->gtFlags & GTF_EXCEPT) | (srcOrFillVal->gtFlags & GTF_EXCEPT);
6516
6517     if (isVolatile)
6518     {
6519         result->gtFlags |= GTF_BLK_VOLATILE;
6520     }
6521
6522 #ifdef FEATURE_SIMD
6523     if (result->OperIsCopyBlkOp() && varTypeIsSIMD(srcOrFillVal))
6524     {
6525         // If the source is a GT_SIMD node of SIMD type, then the dst lclvar struct
6526         // should be labeled as simd intrinsic related struct.
6527         // This is done so that the morpher can transform any field accesses into
6528         // intrinsics, thus avoiding conflicting access methods (fields vs. whole-register).
6529
6530         GenTree* src = srcOrFillVal;
6531         if (src->OperIsIndir() && (src->AsIndir()->Addr()->OperGet() == GT_ADDR))
6532         {
6533             src = src->AsIndir()->Addr()->gtGetOp1();
6534         }
6535 #ifdef FEATURE_HW_INTRINSICS
6536         if ((src->OperGet() == GT_SIMD) || (src->OperGet() == GT_HWIntrinsic))
6537 #else
6538         if (src->OperGet() == GT_SIMD)
6539 #endif // FEATURE_HW_INTRINSICS
6540         {
6541             if (dst->OperIsBlk() && (dst->AsIndir()->Addr()->OperGet() == GT_ADDR))
6542             {
6543                 dst = dst->AsIndir()->Addr()->gtGetOp1();
6544             }
6545
6546             if (dst->OperIsLocal() && varTypeIsStruct(dst))
6547             {
6548                 setLclRelatedToSIMDIntrinsic(dst);
6549             }
6550         }
6551     }
6552 #endif // FEATURE_SIMD
6553 }
6554
6555 //------------------------------------------------------------------------
6556 // gtNewBlkOpNode: Creates a GenTree for a block (struct) assignment.
6557 //
6558 // Arguments:
6559 //    dst           - Destination or target to copy to / initialize the buffer.
6560 //    srcOrFillVall - the size of the buffer to copy/initialize or zero, in the case of CpObj.
6561 //    size          - The size of the buffer or a class token (in the case of CpObj).
6562 //    isVolatile    - Whether this is a volatile memory operation or not.
6563 //    isCopyBlock   - True if this is a block copy (rather than a block init).
6564 //
6565 // Return Value:
6566 //    Returns the newly constructed and initialized block operation.
6567 //
6568 // Notes:
6569 //    If size is zero, the dst must be a GT_OBJ with the class handle.
6570 //    'dst' must be a block node or lclVar.
6571 //
6572 GenTree* Compiler::gtNewBlkOpNode(GenTree* dst, GenTree* srcOrFillVal, unsigned size, bool isVolatile, bool isCopyBlock)
6573 {
6574     assert(dst->OperIsBlk() || dst->OperIsLocal());
6575     if (isCopyBlock)
6576     {
6577         srcOrFillVal->gtFlags |= GTF_DONT_CSE;
6578         if (srcOrFillVal->OperIsIndir() && (srcOrFillVal->gtGetOp1()->gtOper == GT_ADDR))
6579         {
6580             srcOrFillVal = srcOrFillVal->gtGetOp1()->gtGetOp1();
6581         }
6582     }
6583     else
6584     {
6585         // InitBlk
6586         assert(varTypeIsIntegral(srcOrFillVal));
6587         if (varTypeIsStruct(dst))
6588         {
6589             if (!srcOrFillVal->IsIntegralConst(0))
6590             {
6591                 srcOrFillVal = gtNewOperNode(GT_INIT_VAL, TYP_INT, srcOrFillVal);
6592             }
6593         }
6594     }
6595
6596     GenTree* result = gtNewAssignNode(dst, srcOrFillVal);
6597     gtBlockOpInit(result, dst, srcOrFillVal, isVolatile);
6598     return result;
6599 }
6600
6601 //------------------------------------------------------------------------
6602 // gtNewPutArgReg: Creates a new PutArgReg node.
6603 //
6604 // Arguments:
6605 //    type   - The actual type of the argument
6606 //    arg    - The argument node
6607 //    argReg - The register that the argument will be passed in
6608 //
6609 // Return Value:
6610 //    Returns the newly created PutArgReg node.
6611 //
6612 // Notes:
6613 //    The node is generated as GenTreeMultiRegOp on RyuJIT/armel, GenTreeOp on all the other archs.
6614 //
6615 GenTree* Compiler::gtNewPutArgReg(var_types type, GenTree* arg, regNumber argReg)
6616 {
6617     assert(arg != nullptr);
6618
6619     GenTree* node = nullptr;
6620 #if defined(_TARGET_ARM_)
6621     // A PUTARG_REG could be a MultiRegOp on arm since we could move a double register to two int registers.
6622     node = new (this, GT_PUTARG_REG) GenTreeMultiRegOp(GT_PUTARG_REG, type, arg, nullptr);
6623     if (type == TYP_LONG)
6624     {
6625         node->AsMultiRegOp()->gtOtherReg = REG_NEXT(argReg);
6626     }
6627 #else
6628     node          = gtNewOperNode(GT_PUTARG_REG, type, arg);
6629 #endif
6630     node->gtRegNum = argReg;
6631
6632     return node;
6633 }
6634
6635 //------------------------------------------------------------------------
6636 // gtNewBitCastNode: Creates a new BitCast node.
6637 //
6638 // Arguments:
6639 //    type   - The actual type of the argument
6640 //    arg    - The argument node
6641 //    argReg - The register that the argument will be passed in
6642 //
6643 // Return Value:
6644 //    Returns the newly created BitCast node.
6645 //
6646 // Notes:
6647 //    The node is generated as GenTreeMultiRegOp on RyuJIT/arm, as GenTreeOp on all the other archs.
6648 //
6649 GenTree* Compiler::gtNewBitCastNode(var_types type, GenTree* arg)
6650 {
6651     assert(arg != nullptr);
6652
6653     GenTree* node = nullptr;
6654 #if defined(_TARGET_ARM_)
6655     // A BITCAST could be a MultiRegOp on arm since we could move a double register to two int registers.
6656     node = new (this, GT_BITCAST) GenTreeMultiRegOp(GT_BITCAST, type, arg, nullptr);
6657 #else
6658     node          = gtNewOperNode(GT_BITCAST, type, arg);
6659 #endif
6660
6661     return node;
6662 }
6663
6664 //------------------------------------------------------------------------
6665 // gtNewAllocObjNode: Helper to create an object allocation node.
6666 //
6667 // Arguments:
6668 //    pResolvedToken   - Resolved token for the object being allocated
6669 //    useParent     -    true iff the token represents a child of the object's class
6670 //
6671 // Return Value:
6672 //    Returns GT_ALLOCOBJ node that will be later morphed into an
6673 //    allocation helper call or local variable allocation on the stack.
6674
6675 GenTreeAllocObj* Compiler::gtNewAllocObjNode(CORINFO_RESOLVED_TOKEN* pResolvedToken, BOOL useParent)
6676 {
6677     const BOOL      mustRestoreHandle     = TRUE;
6678     BOOL* const     pRuntimeLookup        = nullptr;
6679     bool            usingReadyToRunHelper = false;
6680     CorInfoHelpFunc helper                = CORINFO_HELP_UNDEF;
6681     GenTree*        opHandle = impTokenToHandle(pResolvedToken, pRuntimeLookup, mustRestoreHandle, useParent);
6682
6683 #ifdef FEATURE_READYTORUN_COMPILER
6684     CORINFO_CONST_LOOKUP lookup = {};
6685
6686     if (opts.IsReadyToRun())
6687     {
6688         helper                                        = CORINFO_HELP_READYTORUN_NEW;
6689         CORINFO_LOOKUP_KIND* const pGenericLookupKind = nullptr;
6690         usingReadyToRunHelper =
6691             info.compCompHnd->getReadyToRunHelper(pResolvedToken, pGenericLookupKind, helper, &lookup);
6692     }
6693 #endif
6694
6695     if (!usingReadyToRunHelper)
6696     {
6697         if (opHandle == nullptr)
6698         {
6699             // We must be backing out of an inline.
6700             assert(compDonotInline());
6701             return nullptr;
6702         }
6703     }
6704
6705     bool            helperHasSideEffects;
6706     CorInfoHelpFunc helperTemp =
6707         info.compCompHnd->getNewHelper(pResolvedToken, info.compMethodHnd, &helperHasSideEffects);
6708
6709     if (!usingReadyToRunHelper)
6710     {
6711         helper = helperTemp;
6712     }
6713
6714     // TODO: ReadyToRun: When generic dictionary lookups are necessary, replace the lookup call
6715     // and the newfast call with a single call to a dynamic R2R cell that will:
6716     //      1) Load the context
6717     //      2) Perform the generic dictionary lookup and caching, and generate the appropriate stub
6718     //      3) Allocate and return the new object for boxing
6719     // Reason: performance (today, we'll always use the slow helper for the R2R generics case)
6720
6721     GenTreeAllocObj* allocObj =
6722         gtNewAllocObjNode(helper, helperHasSideEffects, pResolvedToken->hClass, TYP_REF, opHandle);
6723
6724 #ifdef FEATURE_READYTORUN_COMPILER
6725     if (usingReadyToRunHelper)
6726     {
6727         allocObj->gtEntryPoint = lookup;
6728     }
6729 #endif
6730
6731     return allocObj;
6732 }
6733
6734 /*****************************************************************************
6735  *
6736  *  Clones the given tree value and returns a copy of the given tree.
6737  *  If 'complexOK' is false, the cloning is only done provided the tree
6738  *     is not too complex (whatever that may mean);
6739  *  If 'complexOK' is true, we try slightly harder to clone the tree.
6740  *  In either case, NULL is returned if the tree cannot be cloned
6741  *
6742  *  Note that there is the function gtCloneExpr() which does a more
6743  *  complete job if you can't handle this function failing.
6744  */
6745
6746 GenTree* Compiler::gtClone(GenTree* tree, bool complexOK)
6747 {
6748     GenTree* copy;
6749
6750     switch (tree->gtOper)
6751     {
6752         case GT_CNS_INT:
6753
6754 #if defined(LATE_DISASM)
6755             if (tree->IsIconHandle())
6756             {
6757                 copy = gtNewIconHandleNode(tree->gtIntCon.gtIconVal, tree->gtFlags, tree->gtIntCon.gtFieldSeq);
6758                 copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
6759                 copy->gtType                       = tree->gtType;
6760             }
6761             else
6762 #endif
6763             {
6764                 copy = new (this, GT_CNS_INT)
6765                     GenTreeIntCon(tree->gtType, tree->gtIntCon.gtIconVal, tree->gtIntCon.gtFieldSeq);
6766                 copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
6767             }
6768             break;
6769
6770         case GT_CNS_LNG:
6771             copy = gtNewLconNode(tree->gtLngCon.gtLconVal);
6772             break;
6773
6774         case GT_LCL_VAR:
6775             // Remember that the LclVar node has been cloned. The flag will be set
6776             // on 'copy' as well.
6777             tree->gtFlags |= GTF_VAR_CLONED;
6778             copy = gtNewLclvNode(tree->gtLclVarCommon.gtLclNum, tree->gtType DEBUGARG(tree->gtLclVar.gtLclILoffs));
6779             break;
6780
6781         case GT_LCL_FLD:
6782         case GT_LCL_FLD_ADDR:
6783             // Remember that the LclVar node has been cloned. The flag will be set
6784             // on 'copy' as well.
6785             tree->gtFlags |= GTF_VAR_CLONED;
6786             copy = new (this, tree->gtOper)
6787                 GenTreeLclFld(tree->gtOper, tree->TypeGet(), tree->gtLclFld.gtLclNum, tree->gtLclFld.gtLclOffs);
6788             copy->gtLclFld.gtFieldSeq = tree->gtLclFld.gtFieldSeq;
6789             break;
6790
6791         case GT_CLS_VAR:
6792             copy = new (this, GT_CLS_VAR)
6793                 GenTreeClsVar(tree->gtType, tree->gtClsVar.gtClsVarHnd, tree->gtClsVar.gtFieldSeq);
6794             break;
6795
6796         default:
6797             if (!complexOK)
6798             {
6799                 return nullptr;
6800             }
6801
6802             if (tree->gtOper == GT_FIELD)
6803             {
6804                 GenTree* objp;
6805
6806                 // copied from line 9850
6807
6808                 objp = nullptr;
6809                 if (tree->gtField.gtFldObj)
6810                 {
6811                     objp = gtClone(tree->gtField.gtFldObj, false);
6812                     if (!objp)
6813                     {
6814                         return objp;
6815                     }
6816                 }
6817
6818                 copy = gtNewFieldRef(tree->TypeGet(), tree->gtField.gtFldHnd, objp, tree->gtField.gtFldOffset);
6819                 copy->gtField.gtFldMayOverlap = tree->gtField.gtFldMayOverlap;
6820 #ifdef FEATURE_READYTORUN_COMPILER
6821                 copy->gtField.gtFieldLookup = tree->gtField.gtFieldLookup;
6822 #endif
6823             }
6824             else if (tree->OperIs(GT_ADD, GT_SUB))
6825             {
6826                 GenTree* op1 = tree->gtOp.gtOp1;
6827                 GenTree* op2 = tree->gtOp.gtOp2;
6828
6829                 if (op1->OperIsLeaf() && op2->OperIsLeaf())
6830                 {
6831                     op1 = gtClone(op1);
6832                     if (op1 == nullptr)
6833                     {
6834                         return nullptr;
6835                     }
6836                     op2 = gtClone(op2);
6837                     if (op2 == nullptr)
6838                     {
6839                         return nullptr;
6840                     }
6841
6842                     copy = gtNewOperNode(tree->OperGet(), tree->TypeGet(), op1, op2);
6843                 }
6844                 else
6845                 {
6846                     return nullptr;
6847                 }
6848             }
6849             else if (tree->gtOper == GT_ADDR)
6850             {
6851                 GenTree* op1 = gtClone(tree->gtOp.gtOp1);
6852                 if (op1 == nullptr)
6853                 {
6854                     return nullptr;
6855                 }
6856                 copy = gtNewOperNode(GT_ADDR, tree->TypeGet(), op1);
6857             }
6858             else
6859             {
6860                 return nullptr;
6861             }
6862
6863             break;
6864     }
6865
6866     copy->gtFlags |= tree->gtFlags & ~GTF_NODE_MASK;
6867 #if defined(DEBUG)
6868     copy->gtDebugFlags |= tree->gtDebugFlags & ~GTF_DEBUG_NODE_MASK;
6869 #endif // defined(DEBUG)
6870
6871     return copy;
6872 }
6873
6874 //------------------------------------------------------------------------
6875 // gtCloneExpr: Create a copy of `tree`, adding flags `addFlags`, mapping
6876 //              local `varNum` to int constant `varVal` if it appears at
6877 //              the root, and mapping uses of local `deepVarNum` to constant
6878 //              `deepVarVal` if they occur beyond the root.
6879 //
6880 // Arguments:
6881 //    tree - GenTree to create a copy of
6882 //    addFlags - GTF_* flags to add to the copied tree nodes
6883 //    varNum - lclNum to replace at the root, or ~0 for no root replacement
6884 //    varVal - If replacing at root, replace local `varNum` with IntCns `varVal`
6885 //    deepVarNum - lclNum to replace uses of beyond the root, or ~0 for no replacement
6886 //    deepVarVal - If replacing beyond root, replace `deepVarNum` with IntCns `deepVarVal`
6887 //
6888 // Return Value:
6889 //    A copy of the given tree with the replacements and added flags specified.
6890 //
6891 // Notes:
6892 //    Top-level callers should generally call the overload that doesn't have
6893 //    the explicit `deepVarNum` and `deepVarVal` parameters; those are used in
6894 //    recursive invocations to avoid replacing defs.
6895
6896 GenTree* Compiler::gtCloneExpr(
6897     GenTree* tree, unsigned addFlags, unsigned varNum, int varVal, unsigned deepVarNum, int deepVarVal)
6898 {
6899     if (tree == nullptr)
6900     {
6901         return nullptr;
6902     }
6903
6904     /* Figure out what kind of a node we have */
6905
6906     genTreeOps oper = tree->OperGet();
6907     unsigned   kind = tree->OperKind();
6908     GenTree*   copy;
6909
6910     /* Is this a constant or leaf node? */
6911
6912     if (kind & (GTK_CONST | GTK_LEAF))
6913     {
6914         switch (oper)
6915         {
6916             case GT_CNS_INT:
6917
6918 #if defined(LATE_DISASM)
6919                 if (tree->IsIconHandle())
6920                 {
6921                     copy = gtNewIconHandleNode(tree->gtIntCon.gtIconVal, tree->gtFlags, tree->gtIntCon.gtFieldSeq);
6922                     copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
6923                     copy->gtType                       = tree->gtType;
6924                 }
6925                 else
6926 #endif
6927                 {
6928                     copy                               = gtNewIconNode(tree->gtIntCon.gtIconVal, tree->gtType);
6929                     copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
6930                     copy->gtIntCon.gtFieldSeq          = tree->gtIntCon.gtFieldSeq;
6931                 }
6932                 goto DONE;
6933
6934             case GT_CNS_LNG:
6935                 copy = gtNewLconNode(tree->gtLngCon.gtLconVal);
6936                 goto DONE;
6937
6938             case GT_CNS_DBL:
6939                 copy         = gtNewDconNode(tree->gtDblCon.gtDconVal);
6940                 copy->gtType = tree->gtType; // keep the same type
6941                 goto DONE;
6942
6943             case GT_CNS_STR:
6944                 copy = gtNewSconNode(tree->gtStrCon.gtSconCPX, tree->gtStrCon.gtScpHnd);
6945                 goto DONE;
6946
6947             case GT_LCL_VAR:
6948
6949                 if (tree->gtLclVarCommon.gtLclNum == varNum)
6950                 {
6951                     copy = gtNewIconNode(varVal, tree->gtType);
6952                     if (tree->gtFlags & GTF_VAR_ARR_INDEX)
6953                     {
6954                         copy->LabelIndex(this);
6955                     }
6956                 }
6957                 else
6958                 {
6959                     // Remember that the LclVar node has been cloned. The flag will
6960                     // be set on 'copy' as well.
6961                     tree->gtFlags |= GTF_VAR_CLONED;
6962                     copy = gtNewLclvNode(tree->gtLclVar.gtLclNum, tree->gtType DEBUGARG(tree->gtLclVar.gtLclILoffs));
6963                     copy->AsLclVarCommon()->SetSsaNum(tree->AsLclVarCommon()->GetSsaNum());
6964                 }
6965                 copy->gtFlags = tree->gtFlags;
6966                 goto DONE;
6967
6968             case GT_LCL_FLD:
6969                 if (tree->gtLclFld.gtLclNum == varNum)
6970                 {
6971                     IMPL_LIMITATION("replacing GT_LCL_FLD with a constant");
6972                 }
6973                 else
6974                 {
6975                     // Remember that the LclVar node has been cloned. The flag will
6976                     // be set on 'copy' as well.
6977                     tree->gtFlags |= GTF_VAR_CLONED;
6978                     copy = new (this, GT_LCL_FLD)
6979                         GenTreeLclFld(tree->TypeGet(), tree->gtLclFld.gtLclNum, tree->gtLclFld.gtLclOffs);
6980                     copy->gtLclFld.gtFieldSeq = tree->gtLclFld.gtFieldSeq;
6981                     copy->gtFlags             = tree->gtFlags;
6982                 }
6983                 goto DONE;
6984
6985             case GT_CLS_VAR:
6986                 copy = new (this, GT_CLS_VAR)
6987                     GenTreeClsVar(tree->TypeGet(), tree->gtClsVar.gtClsVarHnd, tree->gtClsVar.gtFieldSeq);
6988                 goto DONE;
6989
6990             case GT_RET_EXPR:
6991                 // GT_RET_EXPR is unique node, that contains a link to a gtInlineCandidate node,
6992                 // that is part of another statement. We cannot clone both here and cannot
6993                 // create another GT_RET_EXPR that points to the same gtInlineCandidate.
6994                 NO_WAY("Cloning of GT_RET_EXPR node not supported");
6995                 goto DONE;
6996
6997             case GT_MEMORYBARRIER:
6998                 copy = new (this, GT_MEMORYBARRIER) GenTree(GT_MEMORYBARRIER, TYP_VOID);
6999                 goto DONE;
7000
7001             case GT_ARGPLACE:
7002                 copy = gtNewArgPlaceHolderNode(tree->gtType, tree->gtArgPlace.gtArgPlaceClsHnd);
7003                 goto DONE;
7004
7005             case GT_FTN_ADDR:
7006                 copy = new (this, oper) GenTreeFptrVal(tree->gtType, tree->gtFptrVal.gtFptrMethod);
7007
7008 #ifdef FEATURE_READYTORUN_COMPILER
7009                 copy->gtFptrVal.gtEntryPoint = tree->gtFptrVal.gtEntryPoint;
7010 #endif
7011                 goto DONE;
7012
7013             case GT_CATCH_ARG:
7014             case GT_NO_OP:
7015             case GT_LABEL:
7016                 copy = new (this, oper) GenTree(oper, tree->gtType);
7017                 goto DONE;
7018
7019 #if !FEATURE_EH_FUNCLETS
7020             case GT_END_LFIN:
7021 #endif // !FEATURE_EH_FUNCLETS
7022             case GT_JMP:
7023                 copy = new (this, oper) GenTreeVal(oper, tree->gtType, tree->gtVal.gtVal1);
7024                 goto DONE;
7025
7026             default:
7027                 NO_WAY("Cloning of node not supported");
7028                 goto DONE;
7029         }
7030     }
7031
7032     /* Is it a 'simple' unary/binary operator? */
7033
7034     if (kind & GTK_SMPOP)
7035     {
7036         /* If necessary, make sure we allocate a "fat" tree node */
7037         CLANG_FORMAT_COMMENT_ANCHOR;
7038
7039         switch (oper)
7040         {
7041             /* These nodes sometimes get bashed to "fat" ones */
7042
7043             case GT_MUL:
7044             case GT_DIV:
7045             case GT_MOD:
7046
7047             case GT_UDIV:
7048             case GT_UMOD:
7049
7050                 //  In the implementation of gtNewLargeOperNode you have
7051                 //  to give an oper that will create a small node,
7052                 //  otherwise it asserts.
7053                 //
7054                 if (GenTree::s_gtNodeSizes[oper] == TREE_NODE_SZ_SMALL)
7055                 {
7056                     copy = gtNewLargeOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1,
7057                                               tree->OperIsBinary() ? tree->gtOp.gtOp2 : nullptr);
7058                 }
7059                 else // Always a large tree
7060                 {
7061                     if (tree->OperIsBinary())
7062                     {
7063                         copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2);
7064                     }
7065                     else
7066                     {
7067                         copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1);
7068                     }
7069                 }
7070                 break;
7071
7072             case GT_CAST:
7073                 copy =
7074                     new (this, LargeOpOpcode()) GenTreeCast(tree->TypeGet(), tree->gtCast.CastOp(), tree->IsUnsigned(),
7075                                                             tree->gtCast.gtCastType DEBUGARG(/*largeNode*/ TRUE));
7076                 break;
7077
7078             // The nodes below this are not bashed, so they can be allocated at their individual sizes.
7079
7080             case GT_LIST:
7081                 assert((tree->gtOp.gtOp2 == nullptr) || tree->gtOp.gtOp2->OperIsList());
7082                 copy             = new (this, GT_LIST) GenTreeArgList(tree->gtOp.gtOp1);
7083                 copy->gtOp.gtOp2 = tree->gtOp.gtOp2;
7084                 break;
7085
7086             case GT_FIELD_LIST:
7087                 copy = new (this, GT_FIELD_LIST) GenTreeFieldList(tree->gtOp.gtOp1, tree->AsFieldList()->gtFieldOffset,
7088                                                                   tree->AsFieldList()->gtFieldType, nullptr);
7089                 copy->gtOp.gtOp2 = tree->gtOp.gtOp2;
7090                 copy->gtFlags    = (copy->gtFlags & ~GTF_FIELD_LIST_HEAD) | (tree->gtFlags & GTF_FIELD_LIST_HEAD);
7091                 break;
7092
7093             case GT_INDEX:
7094             {
7095                 GenTreeIndex* asInd = tree->AsIndex();
7096                 copy                = new (this, GT_INDEX)
7097                     GenTreeIndex(asInd->TypeGet(), asInd->Arr(), asInd->Index(), asInd->gtIndElemSize);
7098                 copy->AsIndex()->gtStructElemClass = asInd->gtStructElemClass;
7099             }
7100             break;
7101
7102             case GT_INDEX_ADDR:
7103             {
7104                 GenTreeIndexAddr* asIndAddr = tree->AsIndexAddr();
7105
7106                 copy = new (this, GT_INDEX_ADDR)
7107                     GenTreeIndexAddr(asIndAddr->Arr(), asIndAddr->Index(), asIndAddr->gtElemType,
7108                                      asIndAddr->gtStructElemClass, asIndAddr->gtElemSize, asIndAddr->gtLenOffset,
7109                                      asIndAddr->gtElemOffset);
7110                 copy->AsIndexAddr()->gtIndRngFailBB = asIndAddr->gtIndRngFailBB;
7111             }
7112             break;
7113
7114             case GT_ALLOCOBJ:
7115             {
7116                 GenTreeAllocObj* asAllocObj = tree->AsAllocObj();
7117                 copy                        = new (this, GT_ALLOCOBJ)
7118                     GenTreeAllocObj(tree->TypeGet(), asAllocObj->gtNewHelper, asAllocObj->gtHelperHasSideEffects,
7119                                     asAllocObj->gtAllocObjClsHnd, asAllocObj->gtOp1);
7120             }
7121             break;
7122
7123             case GT_RUNTIMELOOKUP:
7124             {
7125                 GenTreeRuntimeLookup* asRuntimeLookup = tree->AsRuntimeLookup();
7126
7127                 copy = new (this, GT_RUNTIMELOOKUP)
7128                     GenTreeRuntimeLookup(asRuntimeLookup->gtHnd, asRuntimeLookup->gtHndType, asRuntimeLookup->gtOp1);
7129             }
7130             break;
7131
7132             case GT_ARR_LENGTH:
7133                 copy = gtNewArrLen(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtArrLen.ArrLenOffset());
7134                 break;
7135
7136             case GT_ARR_INDEX:
7137                 copy = new (this, GT_ARR_INDEX)
7138                     GenTreeArrIndex(tree->TypeGet(),
7139                                     gtCloneExpr(tree->gtArrIndex.ArrObj(), addFlags, deepVarNum, deepVarVal),
7140                                     gtCloneExpr(tree->gtArrIndex.IndexExpr(), addFlags, deepVarNum, deepVarVal),
7141                                     tree->gtArrIndex.gtCurrDim, tree->gtArrIndex.gtArrRank,
7142                                     tree->gtArrIndex.gtArrElemType);
7143                 break;
7144
7145             case GT_QMARK:
7146                 copy = new (this, GT_QMARK) GenTreeQmark(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2, this);
7147                 break;
7148
7149             case GT_OBJ:
7150                 copy = new (this, GT_OBJ)
7151                     GenTreeObj(tree->TypeGet(), tree->gtOp.gtOp1, tree->AsObj()->gtClass, tree->gtBlk.gtBlkSize);
7152                 copy->AsObj()->CopyGCInfo(tree->AsObj());
7153                 copy->gtBlk.gtBlkOpGcUnsafe = tree->gtBlk.gtBlkOpGcUnsafe;
7154                 break;
7155
7156             case GT_BLK:
7157                 copy = new (this, GT_BLK) GenTreeBlk(GT_BLK, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtBlk.gtBlkSize);
7158                 copy->gtBlk.gtBlkOpGcUnsafe = tree->gtBlk.gtBlkOpGcUnsafe;
7159                 break;
7160
7161             case GT_DYN_BLK:
7162                 copy = new (this, GT_DYN_BLK) GenTreeDynBlk(tree->gtOp.gtOp1, tree->gtDynBlk.gtDynamicSize);
7163                 copy->gtBlk.gtBlkOpGcUnsafe = tree->gtBlk.gtBlkOpGcUnsafe;
7164                 break;
7165
7166             case GT_BOX:
7167                 copy = new (this, GT_BOX)
7168                     GenTreeBox(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtBox.gtAsgStmtWhenInlinedBoxValue,
7169                                tree->gtBox.gtCopyStmtWhenInlinedBoxValue);
7170                 break;
7171
7172             case GT_INTRINSIC:
7173                 copy = new (this, GT_INTRINSIC)
7174                     GenTreeIntrinsic(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2,
7175                                      tree->gtIntrinsic.gtIntrinsicId, tree->gtIntrinsic.gtMethodHandle);
7176 #ifdef FEATURE_READYTORUN_COMPILER
7177                 copy->gtIntrinsic.gtEntryPoint = tree->gtIntrinsic.gtEntryPoint;
7178 #endif
7179                 break;
7180
7181             case GT_LEA:
7182             {
7183                 GenTreeAddrMode* addrModeOp = tree->AsAddrMode();
7184                 copy                        = new (this, GT_LEA)
7185                     GenTreeAddrMode(addrModeOp->TypeGet(), addrModeOp->Base(), addrModeOp->Index(), addrModeOp->gtScale,
7186                                     static_cast<unsigned>(addrModeOp->Offset()));
7187             }
7188             break;
7189
7190             case GT_COPY:
7191             case GT_RELOAD:
7192             {
7193                 copy = new (this, oper) GenTreeCopyOrReload(oper, tree->TypeGet(), tree->gtGetOp1());
7194             }
7195             break;
7196
7197 #ifdef FEATURE_SIMD
7198             case GT_SIMD:
7199             {
7200                 GenTreeSIMD* simdOp = tree->AsSIMD();
7201                 copy                = gtNewSIMDNode(simdOp->TypeGet(), simdOp->gtGetOp1(), simdOp->gtGetOp2IfPresent(),
7202                                      simdOp->gtSIMDIntrinsicID, simdOp->gtSIMDBaseType, simdOp->gtSIMDSize);
7203             }
7204             break;
7205 #endif
7206
7207 #ifdef FEATURE_HW_INTRINSICS
7208             case GT_HWIntrinsic:
7209             {
7210                 GenTreeHWIntrinsic* hwintrinsicOp = tree->AsHWIntrinsic();
7211                 copy                              = new (this, GT_HWIntrinsic)
7212                     GenTreeHWIntrinsic(hwintrinsicOp->TypeGet(), hwintrinsicOp->gtGetOp1(),
7213                                        hwintrinsicOp->gtGetOp2IfPresent(), hwintrinsicOp->gtHWIntrinsicId,
7214                                        hwintrinsicOp->gtSIMDBaseType, hwintrinsicOp->gtSIMDSize);
7215                 copy->AsHWIntrinsic()->gtIndexBaseType = hwintrinsicOp->gtIndexBaseType;
7216             }
7217             break;
7218 #endif
7219
7220             default:
7221                 assert(!GenTree::IsExOp(tree->OperKind()) && tree->OperIsSimple());
7222                 // We're in the SimpleOp case, so it's always unary or binary.
7223                 if (GenTree::OperIsUnary(tree->OperGet()))
7224                 {
7225                     copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, /*doSimplifications*/ false);
7226                 }
7227                 else
7228                 {
7229                     assert(GenTree::OperIsBinary(tree->OperGet()));
7230                     copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2);
7231                 }
7232                 break;
7233         }
7234
7235         // Some flags are conceptually part of the gtOper, and should be copied immediately.
7236         if (tree->gtOverflowEx())
7237         {
7238             copy->gtFlags |= GTF_OVERFLOW;
7239         }
7240
7241         if (tree->gtOp.gtOp1)
7242         {
7243             if (tree->gtOper == GT_ASG)
7244             {
7245                 // Don't replace varNum if it appears as the LHS of an assign.
7246                 copy->gtOp.gtOp1 = gtCloneExpr(tree->gtOp.gtOp1, addFlags, -1, 0, deepVarNum, deepVarVal);
7247             }
7248             else
7249             {
7250                 copy->gtOp.gtOp1 = gtCloneExpr(tree->gtOp.gtOp1, addFlags, deepVarNum, deepVarVal);
7251             }
7252         }
7253
7254         if (tree->gtGetOp2IfPresent())
7255         {
7256             copy->gtOp.gtOp2 = gtCloneExpr(tree->gtOp.gtOp2, addFlags, deepVarNum, deepVarVal);
7257         }
7258
7259         /* Flags */
7260         addFlags |= tree->gtFlags;
7261
7262         // Copy any node annotations, if necessary.
7263         switch (tree->gtOper)
7264         {
7265             case GT_STOREIND:
7266             case GT_IND:
7267             case GT_OBJ:
7268             case GT_STORE_OBJ:
7269             {
7270                 ArrayInfo arrInfo;
7271                 if (!tree->AsIndir()->gtOp1->OperIs(GT_INDEX_ADDR) && TryGetArrayInfo(tree->AsIndir(), &arrInfo))
7272                 {
7273                     GetArrayInfoMap()->Set(copy, arrInfo);
7274                 }
7275             }
7276             break;
7277
7278             default:
7279                 break;
7280         }
7281
7282 #ifdef DEBUG
7283         /* GTF_NODE_MASK should not be propagated from 'tree' to 'copy' */
7284         addFlags &= ~GTF_NODE_MASK;
7285 #endif
7286
7287         // Effects flags propagate upwards.
7288         if (copy->gtOp.gtOp1 != nullptr)
7289         {
7290             copy->gtFlags |= (copy->gtOp.gtOp1->gtFlags & GTF_ALL_EFFECT);
7291         }
7292         if (copy->gtGetOp2IfPresent() != nullptr)
7293         {
7294             copy->gtFlags |= (copy->gtGetOp2()->gtFlags & GTF_ALL_EFFECT);
7295         }
7296
7297         goto DONE;
7298     }
7299
7300     /* See what kind of a special operator we have here */
7301
7302     switch (oper)
7303     {
7304         case GT_STMT:
7305             copy = gtCloneExpr(tree->gtStmt.gtStmtExpr, addFlags, deepVarNum, deepVarVal);
7306             copy = gtNewStmt(copy, tree->gtStmt.gtStmtILoffsx);
7307             goto DONE;
7308
7309         case GT_CALL:
7310
7311             // We can't safely clone calls that have GT_RET_EXPRs via gtCloneExpr.
7312             // You must use gtCloneCandidateCall for these calls (and then do appropriate other fixup)
7313             if (tree->gtCall.IsInlineCandidate() || tree->gtCall.IsGuardedDevirtualizationCandidate())
7314             {
7315                 NO_WAY("Cloning of calls with associated GT_RET_EXPR nodes is not supported");
7316             }
7317
7318             copy = gtCloneExprCallHelper(tree->AsCall(), addFlags, deepVarNum, deepVarVal);
7319             break;
7320
7321         case GT_FIELD:
7322
7323             copy = gtNewFieldRef(tree->TypeGet(), tree->gtField.gtFldHnd, nullptr, tree->gtField.gtFldOffset);
7324
7325             copy->gtField.gtFldObj = tree->gtField.gtFldObj
7326                                          ? gtCloneExpr(tree->gtField.gtFldObj, addFlags, deepVarNum, deepVarVal)
7327                                          : nullptr;
7328             copy->gtField.gtFldMayOverlap = tree->gtField.gtFldMayOverlap;
7329 #ifdef FEATURE_READYTORUN_COMPILER
7330             copy->gtField.gtFieldLookup = tree->gtField.gtFieldLookup;
7331 #endif
7332
7333             break;
7334
7335         case GT_ARR_ELEM:
7336         {
7337             GenTree* inds[GT_ARR_MAX_RANK];
7338             for (unsigned dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
7339             {
7340                 inds[dim] = gtCloneExpr(tree->gtArrElem.gtArrInds[dim], addFlags, deepVarNum, deepVarVal);
7341             }
7342             copy = new (this, GT_ARR_ELEM)
7343                 GenTreeArrElem(tree->TypeGet(), gtCloneExpr(tree->gtArrElem.gtArrObj, addFlags, deepVarNum, deepVarVal),
7344                                tree->gtArrElem.gtArrRank, tree->gtArrElem.gtArrElemSize, tree->gtArrElem.gtArrElemType,
7345                                &inds[0]);
7346         }
7347         break;
7348
7349         case GT_ARR_OFFSET:
7350         {
7351             copy = new (this, GT_ARR_OFFSET)
7352                 GenTreeArrOffs(tree->TypeGet(), gtCloneExpr(tree->gtArrOffs.gtOffset, addFlags, deepVarNum, deepVarVal),
7353                                gtCloneExpr(tree->gtArrOffs.gtIndex, addFlags, deepVarNum, deepVarVal),
7354                                gtCloneExpr(tree->gtArrOffs.gtArrObj, addFlags, deepVarNum, deepVarVal),
7355                                tree->gtArrOffs.gtCurrDim, tree->gtArrOffs.gtArrRank, tree->gtArrOffs.gtArrElemType);
7356         }
7357         break;
7358
7359         case GT_CMPXCHG:
7360             copy = new (this, GT_CMPXCHG)
7361                 GenTreeCmpXchg(tree->TypeGet(),
7362                                gtCloneExpr(tree->gtCmpXchg.gtOpLocation, addFlags, deepVarNum, deepVarVal),
7363                                gtCloneExpr(tree->gtCmpXchg.gtOpValue, addFlags, deepVarNum, deepVarVal),
7364                                gtCloneExpr(tree->gtCmpXchg.gtOpComparand, addFlags, deepVarNum, deepVarVal));
7365             break;
7366
7367         case GT_ARR_BOUNDS_CHECK:
7368 #ifdef FEATURE_SIMD
7369         case GT_SIMD_CHK:
7370 #endif // FEATURE_SIMD
7371 #ifdef FEATURE_HW_INTRINSICS
7372         case GT_HW_INTRINSIC_CHK:
7373 #endif // FEATURE_HW_INTRINSICS
7374             copy = new (this, oper)
7375                 GenTreeBoundsChk(oper, tree->TypeGet(),
7376                                  gtCloneExpr(tree->gtBoundsChk.gtIndex, addFlags, deepVarNum, deepVarVal),
7377                                  gtCloneExpr(tree->gtBoundsChk.gtArrLen, addFlags, deepVarNum, deepVarVal),
7378                                  tree->gtBoundsChk.gtThrowKind);
7379             copy->gtBoundsChk.gtIndRngFailBB = tree->gtBoundsChk.gtIndRngFailBB;
7380             break;
7381
7382         case GT_STORE_DYN_BLK:
7383         case GT_DYN_BLK:
7384             copy = new (this, oper)
7385                 GenTreeDynBlk(gtCloneExpr(tree->gtDynBlk.Addr(), addFlags, deepVarNum, deepVarVal),
7386                               gtCloneExpr(tree->gtDynBlk.gtDynamicSize, addFlags, deepVarNum, deepVarVal));
7387             break;
7388
7389         default:
7390 #ifdef DEBUG
7391             gtDispTree(tree);
7392 #endif
7393             NO_WAY("unexpected operator");
7394     }
7395
7396 DONE:
7397
7398     // If it has a zero-offset field seq, copy annotation.
7399     if (tree->TypeGet() == TYP_BYREF)
7400     {
7401         FieldSeqNode* fldSeq = nullptr;
7402         if (GetZeroOffsetFieldMap()->Lookup(tree, &fldSeq))
7403         {
7404             GetZeroOffsetFieldMap()->Set(copy, fldSeq);
7405         }
7406     }
7407
7408     copy->gtVNPair = tree->gtVNPair; // A cloned tree gets the orginal's Value number pair
7409
7410     /* Compute the flags for the copied node. Note that we can do this only
7411        if we didnt gtFoldExpr(copy) */
7412
7413     if (copy->gtOper == oper)
7414     {
7415         addFlags |= tree->gtFlags;
7416
7417 #ifdef DEBUG
7418         /* GTF_NODE_MASK should not be propagated from 'tree' to 'copy' */
7419         addFlags &= ~GTF_NODE_MASK;
7420 #endif
7421         // Some other flags depend on the context of the expression, and should not be preserved.
7422         // For example, GTF_RELOP_QMARK:
7423         if (copy->OperKind() & GTK_RELOP)
7424         {
7425             addFlags &= ~GTF_RELOP_QMARK;
7426         }
7427         // On the other hand, if we're creating such a context, restore this flag.
7428         if (copy->OperGet() == GT_QMARK)
7429         {
7430             copy->gtOp.gtOp1->gtFlags |= GTF_RELOP_QMARK;
7431         }
7432
7433         copy->gtFlags |= addFlags;
7434
7435         // Update side effect flags since they may be different from the source side effect flags.
7436         // For example, we may have replaced some locals with constants and made indirections non-throwing.
7437         gtUpdateNodeSideEffects(copy);
7438     }
7439
7440     /* GTF_COLON_COND should be propagated from 'tree' to 'copy' */
7441     copy->gtFlags |= (tree->gtFlags & GTF_COLON_COND);
7442
7443 #if defined(DEBUG)
7444     // Non-node debug flags should be propagated from 'tree' to 'copy'
7445     copy->gtDebugFlags |= (tree->gtDebugFlags & ~GTF_DEBUG_NODE_MASK);
7446 #endif
7447
7448     /* Make sure to copy back fields that may have been initialized */
7449
7450     copy->CopyRawCosts(tree);
7451     copy->gtRsvdRegs = tree->gtRsvdRegs;
7452     copy->CopyReg(tree);
7453     return copy;
7454 }
7455
7456 //------------------------------------------------------------------------
7457 // gtCloneExprCallHelper: clone a call tree
7458 //
7459 // Notes:
7460 //    Do not invoke this method directly, instead call either gtCloneExpr
7461 //    or gtCloneCandidateCall, as appropriate.
7462 //
7463 // Arguments:
7464 //    tree - the call to clone
7465 //    addFlags - GTF_* flags to add to the copied tree nodes
7466 //    deepVarNum - lclNum to replace uses of beyond the root, or BAD_VAR_NUM for no replacement
7467 //    deepVarVal - If replacing beyond root, replace `deepVarNum` with IntCns `deepVarVal`
7468 //
7469 // Returns:
7470 //    Cloned copy of call and all subtrees.
7471
7472 GenTreeCall* Compiler::gtCloneExprCallHelper(GenTreeCall* tree, unsigned addFlags, unsigned deepVarNum, int deepVarVal)
7473 {
7474     GenTreeCall* copy = new (this, GT_CALL) GenTreeCall(tree->TypeGet());
7475
7476     copy->gtCallObjp = tree->gtCallObjp ? gtCloneExpr(tree->gtCallObjp, addFlags, deepVarNum, deepVarVal) : nullptr;
7477     copy->gtCallArgs =
7478         tree->gtCallArgs ? gtCloneExpr(tree->gtCallArgs, addFlags, deepVarNum, deepVarVal)->AsArgList() : nullptr;
7479     copy->gtCallMoreFlags = tree->gtCallMoreFlags;
7480     copy->gtCallLateArgs  = tree->gtCallLateArgs
7481                                ? gtCloneExpr(tree->gtCallLateArgs, addFlags, deepVarNum, deepVarVal)->AsArgList()
7482                                : nullptr;
7483
7484 #if !FEATURE_FIXED_OUT_ARGS
7485     copy->regArgList      = tree->regArgList;
7486     copy->regArgListCount = tree->regArgListCount;
7487 #endif
7488
7489     // The call sig comes from the EE and doesn't change throughout the compilation process, meaning
7490     // we only really need one physical copy of it. Therefore a shallow pointer copy will suffice.
7491     // (Note that this still holds even if the tree we are cloning was created by an inlinee compiler,
7492     // because the inlinee still uses the inliner's memory allocator anyway.)
7493     copy->callSig = tree->callSig;
7494
7495     copy->gtCallType    = tree->gtCallType;
7496     copy->gtReturnType  = tree->gtReturnType;
7497     copy->gtControlExpr = tree->gtControlExpr;
7498
7499     /* Copy the union */
7500     if (tree->gtCallType == CT_INDIRECT)
7501     {
7502         copy->gtCallCookie =
7503             tree->gtCallCookie ? gtCloneExpr(tree->gtCallCookie, addFlags, deepVarNum, deepVarVal) : nullptr;
7504         copy->gtCallAddr = tree->gtCallAddr ? gtCloneExpr(tree->gtCallAddr, addFlags, deepVarNum, deepVarVal) : nullptr;
7505     }
7506     else if (tree->IsVirtualStub())
7507     {
7508         copy->gtCallMethHnd      = tree->gtCallMethHnd;
7509         copy->gtStubCallStubAddr = tree->gtStubCallStubAddr;
7510     }
7511     else
7512     {
7513         copy->gtCallMethHnd         = tree->gtCallMethHnd;
7514         copy->gtInlineCandidateInfo = nullptr;
7515     }
7516
7517     if (tree->fgArgInfo)
7518     {
7519         // Create and initialize the fgArgInfo for our copy of the call tree
7520         copy->fgArgInfo = new (this, CMK_Unknown) fgArgInfo(copy, tree);
7521     }
7522     else
7523     {
7524         copy->fgArgInfo = nullptr;
7525     }
7526
7527     copy->gtRetClsHnd = tree->gtRetClsHnd;
7528
7529 #if FEATURE_MULTIREG_RET
7530     copy->gtReturnTypeDesc = tree->gtReturnTypeDesc;
7531 #endif
7532
7533 #ifdef FEATURE_READYTORUN_COMPILER
7534     copy->setEntryPoint(tree->gtEntryPoint);
7535 #endif
7536
7537 #if defined(DEBUG) || defined(INLINE_DATA)
7538     copy->gtInlineObservation = tree->gtInlineObservation;
7539     copy->gtRawILOffset       = tree->gtCall.gtRawILOffset;
7540 #endif
7541
7542     copy->CopyOtherRegFlags(tree);
7543
7544     return copy;
7545 }
7546
7547 //------------------------------------------------------------------------
7548 // gtCloneCandidateCall: clone a call that is an inline or guarded
7549 //    devirtualization candidate (~ any call that can have a GT_RET_EXPR)
7550 //
7551 // Notes:
7552 //    If the call really is a candidate, the caller must take additional steps
7553 //    after cloning to re-establish candidate info and the relationship between
7554 //    the candidate and any associated GT_RET_EXPR.
7555 //
7556 // Arguments:
7557 //    call - the call to clone
7558 //
7559 // Returns:
7560 //    Cloned copy of call and all subtrees.
7561
7562 GenTreeCall* Compiler::gtCloneCandidateCall(GenTreeCall* call)
7563 {
7564     assert(call->IsInlineCandidate() || call->IsGuardedDevirtualizationCandidate());
7565
7566     GenTreeCall* result = gtCloneExprCallHelper(call);
7567
7568     // There is some common post-processing in gtCloneExpr that we reproduce
7569     // here, for the fields that make sense for candidate calls.
7570     result->gtFlags |= call->gtFlags;
7571
7572 #if defined(DEBUG)
7573     result->gtDebugFlags |= (call->gtDebugFlags & ~GTF_DEBUG_NODE_MASK);
7574 #endif
7575
7576     result->CopyReg(call);
7577
7578     return result;
7579 }
7580
7581 //------------------------------------------------------------------------
7582 // gtReplaceTree: Replace a tree with a new tree.
7583 //
7584 // Arguments:
7585 //    stmt            - The top-level root stmt of the tree being replaced.
7586 //                      Must not be null.
7587 //    tree            - The tree being replaced. Must not be null.
7588 //    replacementTree - The replacement tree. Must not be null.
7589 //
7590 // Return Value:
7591 //    The tree node that replaces the old tree.
7592 //
7593 // Assumptions:
7594 //    The sequencing of the stmt has been done.
7595 //
7596 // Notes:
7597 //    The caller must ensure that the original statement has been sequenced,
7598 //    and the side effect flags are updated on the statement nodes,
7599 //    but this method will sequence 'replacementTree', and insert it into the
7600 //    proper place in the statement sequence.
7601
7602 GenTree* Compiler::gtReplaceTree(GenTree* stmt, GenTree* tree, GenTree* replacementTree)
7603 {
7604     assert(fgStmtListThreaded);
7605     assert(tree != nullptr);
7606     assert(stmt != nullptr);
7607     assert(replacementTree != nullptr);
7608
7609     GenTree** treePtr    = nullptr;
7610     GenTree*  treeParent = tree->gtGetParent(&treePtr);
7611
7612     assert(treeParent != nullptr || tree == stmt->gtStmt.gtStmtExpr);
7613
7614     if (treePtr == nullptr)
7615     {
7616         // Replace the stmt expr and rebuild the linear order for "stmt".
7617         assert(treeParent == nullptr);
7618         assert(fgOrder != FGOrderLinear);
7619         stmt->gtStmt.gtStmtExpr = tree;
7620         fgSetStmtSeq(stmt);
7621     }
7622     else
7623     {
7624         assert(treeParent != nullptr);
7625
7626         // Check to see if the node to be replaced is a call argument and if so,
7627         // set `treeParent` to the call node.
7628         GenTree* cursor = treeParent;
7629         while ((cursor != nullptr) && (cursor->OperGet() == GT_LIST))
7630         {
7631             cursor = cursor->gtNext;
7632         }
7633
7634         if ((cursor != nullptr) && (cursor->OperGet() == GT_CALL))
7635         {
7636             treeParent = cursor;
7637         }
7638
7639 #ifdef DEBUG
7640         GenTree** useEdge;
7641         assert(treeParent->TryGetUse(tree, &useEdge));
7642         assert(useEdge == treePtr);
7643 #endif // DEBUG
7644
7645         GenTree* treeFirstNode = fgGetFirstNode(tree);
7646         GenTree* treeLastNode  = tree;
7647         GenTree* treePrevNode  = treeFirstNode->gtPrev;
7648         GenTree* treeNextNode  = treeLastNode->gtNext;
7649
7650         treeParent->ReplaceOperand(treePtr, replacementTree);
7651
7652         // Build the linear order for "replacementTree".
7653         fgSetTreeSeq(replacementTree, treePrevNode);
7654
7655         // Restore linear-order Prev and Next for "replacementTree".
7656         if (treePrevNode != nullptr)
7657         {
7658             treeFirstNode         = fgGetFirstNode(replacementTree);
7659             treeFirstNode->gtPrev = treePrevNode;
7660             treePrevNode->gtNext  = treeFirstNode;
7661         }
7662         else
7663         {
7664             // Update the linear oder start of "stmt" if treeFirstNode
7665             // appears to have replaced the original first node.
7666             assert(treeFirstNode == stmt->gtStmt.gtStmtList);
7667             stmt->gtStmt.gtStmtList = fgGetFirstNode(replacementTree);
7668         }
7669
7670         if (treeNextNode != nullptr)
7671         {
7672             treeLastNode         = replacementTree;
7673             treeLastNode->gtNext = treeNextNode;
7674             treeNextNode->gtPrev = treeLastNode;
7675         }
7676     }
7677
7678     return replacementTree;
7679 }
7680
7681 //------------------------------------------------------------------------
7682 // gtUpdateSideEffects: Update the side effects of a tree and its ancestors
7683 //
7684 // Arguments:
7685 //    stmt            - The tree's statement
7686 //    tree            - Tree to update the side effects for
7687 //
7688 // Note: If tree's order hasn't been established, the method updates side effect
7689 //       flags on all statement's nodes.
7690
7691 void Compiler::gtUpdateSideEffects(GenTree* stmt, GenTree* tree)
7692 {
7693     if (fgStmtListThreaded)
7694     {
7695         gtUpdateTreeAncestorsSideEffects(tree);
7696     }
7697     else
7698     {
7699         gtUpdateStmtSideEffects(stmt);
7700     }
7701 }
7702
7703 //------------------------------------------------------------------------
7704 // gtUpdateTreeAncestorsSideEffects: Update the side effects of a tree and its ancestors
7705 //                                   when statement order has been established.
7706 //
7707 // Arguments:
7708 //    tree            - Tree to update the side effects for
7709
7710 void Compiler::gtUpdateTreeAncestorsSideEffects(GenTree* tree)
7711 {
7712     assert(fgStmtListThreaded);
7713     while (tree != nullptr)
7714     {
7715         gtUpdateNodeSideEffects(tree);
7716         tree = tree->gtGetParent(nullptr);
7717     }
7718 }
7719
7720 //------------------------------------------------------------------------
7721 // gtUpdateStmtSideEffects: Update the side effects for statement tree nodes.
7722 //
7723 // Arguments:
7724 //    stmt            - The statement to update side effects on
7725
7726 void Compiler::gtUpdateStmtSideEffects(GenTree* stmt)
7727 {
7728     fgWalkTree(&stmt->gtStmt.gtStmtExpr, fgUpdateSideEffectsPre, fgUpdateSideEffectsPost);
7729 }
7730
7731 //------------------------------------------------------------------------
7732 // gtUpdateNodeOperSideEffects: Update the side effects based on the node operation.
7733 //
7734 // Arguments:
7735 //    tree            - Tree to update the side effects on
7736 //
7737 // Notes:
7738 //    This method currently only updates GTF_EXCEPT, GTF_ASG, and GTF_CALL flags.
7739 //    The other side effect flags may remain unnecessarily (conservatively) set.
7740 //    The caller of this method is expected to update the flags based on the children's flags.
7741
7742 void Compiler::gtUpdateNodeOperSideEffects(GenTree* tree)
7743 {
7744     if (tree->OperMayThrow(this))
7745     {
7746         tree->gtFlags |= GTF_EXCEPT;
7747     }
7748     else
7749     {
7750         tree->gtFlags &= ~GTF_EXCEPT;
7751         if (tree->OperIsIndirOrArrLength())
7752         {
7753             tree->gtFlags |= GTF_IND_NONFAULTING;
7754         }
7755     }
7756
7757     if (tree->OperRequiresAsgFlag())
7758     {
7759         tree->gtFlags |= GTF_ASG;
7760     }
7761     else
7762     {
7763         tree->gtFlags &= ~GTF_ASG;
7764     }
7765
7766     if (tree->OperRequiresCallFlag(this))
7767     {
7768         tree->gtFlags |= GTF_CALL;
7769     }
7770     else
7771     {
7772         tree->gtFlags &= ~GTF_CALL;
7773     }
7774 }
7775
7776 //------------------------------------------------------------------------
7777 // gtUpdateNodeSideEffects: Update the side effects based on the node operation and
7778 //                          children's side efects.
7779 //
7780 // Arguments:
7781 //    tree            - Tree to update the side effects on
7782 //
7783 // Notes:
7784 //    This method currently only updates GTF_EXCEPT and GTF_ASG flags. The other side effect
7785 //    flags may remain unnecessarily (conservatively) set.
7786
7787 void Compiler::gtUpdateNodeSideEffects(GenTree* tree)
7788 {
7789     gtUpdateNodeOperSideEffects(tree);
7790     unsigned nChildren = tree->NumChildren();
7791     for (unsigned childNum = 0; childNum < nChildren; childNum++)
7792     {
7793         GenTree* child = tree->GetChild(childNum);
7794         if (child != nullptr)
7795         {
7796             tree->gtFlags |= (child->gtFlags & GTF_ALL_EFFECT);
7797         }
7798     }
7799 }
7800
7801 //------------------------------------------------------------------------
7802 // fgUpdateSideEffectsPre: Update the side effects based on the tree operation.
7803 //
7804 // Arguments:
7805 //    pTree            - Pointer to the tree to update the side effects
7806 //    fgWalkPre        - Walk data
7807 //
7808 // Notes:
7809 //    This method currently only updates GTF_EXCEPT and GTF_ASG flags. The other side effect
7810 //    flags may remain unnecessarily (conservatively) set.
7811
7812 Compiler::fgWalkResult Compiler::fgUpdateSideEffectsPre(GenTree** pTree, fgWalkData* fgWalkPre)
7813 {
7814     fgWalkPre->compiler->gtUpdateNodeOperSideEffects(*pTree);
7815
7816     return WALK_CONTINUE;
7817 }
7818
7819 //------------------------------------------------------------------------
7820 // fgUpdateSideEffectsPost: Update the side effects of the parent based on the tree's flags.
7821 //
7822 // Arguments:
7823 //    pTree            - Pointer to the tree
7824 //    fgWalkPost       - Walk data
7825 //
7826 // Notes:
7827 //    The routine is used for updating the stale side effect flags for ancestor
7828 //    nodes starting from treeParent up to the top-level stmt expr.
7829
7830 Compiler::fgWalkResult Compiler::fgUpdateSideEffectsPost(GenTree** pTree, fgWalkData* fgWalkPost)
7831 {
7832     GenTree* tree   = *pTree;
7833     GenTree* parent = fgWalkPost->parent;
7834     if (parent != nullptr)
7835     {
7836         parent->gtFlags |= (tree->gtFlags & GTF_ALL_EFFECT);
7837     }
7838     return WALK_CONTINUE;
7839 }
7840
7841 /*****************************************************************************
7842  *
7843  *  Compares two trees and returns true when both trees are the same.
7844  *  Instead of fully comparing the two trees this method can just return false.
7845  *  Thus callers should not assume that the trees are different when false is returned.
7846  *  Only when true is returned can the caller perform code optimizations.
7847  *  The current implementation only compares a limited set of LEAF/CONST node
7848  *  and returns false for all othere trees.
7849  */
7850 bool Compiler::gtCompareTree(GenTree* op1, GenTree* op2)
7851 {
7852     /* Make sure that both trees are of the same GT node kind */
7853     if (op1->OperGet() != op2->OperGet())
7854     {
7855         return false;
7856     }
7857
7858     /* Make sure that both trees are returning the same type */
7859     if (op1->gtType != op2->gtType)
7860     {
7861         return false;
7862     }
7863
7864     /* Figure out what kind of a node we have */
7865
7866     genTreeOps oper = op1->OperGet();
7867     unsigned   kind = op1->OperKind();
7868
7869     /* Is this a constant or leaf node? */
7870
7871     if (kind & (GTK_CONST | GTK_LEAF))
7872     {
7873         switch (oper)
7874         {
7875             case GT_CNS_INT:
7876                 if ((op1->gtIntCon.gtIconVal == op2->gtIntCon.gtIconVal) && GenTree::SameIconHandleFlag(op1, op2))
7877                 {
7878                     return true;
7879                 }
7880                 break;
7881
7882             case GT_CNS_LNG:
7883                 if (op1->gtLngCon.gtLconVal == op2->gtLngCon.gtLconVal)
7884                 {
7885                     return true;
7886                 }
7887                 break;
7888
7889             case GT_CNS_STR:
7890                 if (op1->gtStrCon.gtSconCPX == op2->gtStrCon.gtSconCPX)
7891                 {
7892                     return true;
7893                 }
7894                 break;
7895
7896             case GT_LCL_VAR:
7897                 if (op1->gtLclVarCommon.gtLclNum == op2->gtLclVarCommon.gtLclNum)
7898                 {
7899                     return true;
7900                 }
7901                 break;
7902
7903             case GT_CLS_VAR:
7904                 if (op1->gtClsVar.gtClsVarHnd == op2->gtClsVar.gtClsVarHnd)
7905                 {
7906                     return true;
7907                 }
7908                 break;
7909
7910             default:
7911                 // we return false for these unhandled 'oper' kinds
7912                 break;
7913         }
7914     }
7915     return false;
7916 }
7917
7918 GenTree* Compiler::gtGetThisArg(GenTreeCall* call)
7919 {
7920     if (call->gtCallObjp != nullptr)
7921     {
7922         if (call->gtCallObjp->gtOper != GT_NOP && call->gtCallObjp->gtOper != GT_ASG)
7923         {
7924             if (!(call->gtCallObjp->gtFlags & GTF_LATE_ARG))
7925             {
7926                 return call->gtCallObjp;
7927             }
7928         }
7929
7930         if (call->gtCallLateArgs)
7931         {
7932             regNumber      thisReg         = REG_ARG_0;
7933             unsigned       argNum          = 0;
7934             fgArgTabEntry* thisArgTabEntry = gtArgEntryByArgNum(call, argNum);
7935             GenTree*       result          = thisArgTabEntry->node;
7936
7937 #if !FEATURE_FIXED_OUT_ARGS
7938             GenTree* lateArgs = call->gtCallLateArgs;
7939             regList  list     = call->regArgList;
7940             int      index    = 0;
7941             while (lateArgs != NULL)
7942             {
7943                 assert(lateArgs->gtOper == GT_LIST);
7944                 assert(index < call->regArgListCount);
7945                 regNumber curArgReg = list[index];
7946                 if (curArgReg == thisReg)
7947                 {
7948                     if (optAssertionPropagatedCurrentStmt)
7949                         result = lateArgs->gtOp.gtOp1;
7950
7951                     assert(result == lateArgs->gtOp.gtOp1);
7952                 }
7953
7954                 lateArgs = lateArgs->gtOp.gtOp2;
7955                 index++;
7956             }
7957 #endif
7958             return result;
7959         }
7960     }
7961     return nullptr;
7962 }
7963
7964 bool GenTree::gtSetFlags() const
7965 {
7966     //
7967     // When FEATURE_SET_FLAGS (_TARGET_ARM_) is active the method returns true
7968     //    when the gtFlags has the flag GTF_SET_FLAGS set
7969     // otherwise the architecture will be have instructions that typically set
7970     //    the flags and this method will return true.
7971     //
7972     //    Exceptions: GT_IND (load/store) is not allowed to set the flags
7973     //                and on XARCH the GT_MUL/GT_DIV and all overflow instructions
7974     //                do not set the condition flags
7975     //
7976     // Precondition we have a GTK_SMPOP
7977     //
7978     if (!varTypeIsIntegralOrI(TypeGet()) && (TypeGet() != TYP_VOID))
7979     {
7980         return false;
7981     }
7982
7983     if (((gtFlags & GTF_SET_FLAGS) != 0) && (gtOper != GT_IND))
7984     {
7985         // GTF_SET_FLAGS is not valid on GT_IND and is overlaid with GTF_NONFAULTING_IND
7986         return true;
7987     }
7988     else
7989     {
7990         return false;
7991     }
7992 }
7993
7994 bool GenTree::gtRequestSetFlags()
7995 {
7996     bool result = false;
7997
7998 #if FEATURE_SET_FLAGS
7999     // This method is a Nop unless FEATURE_SET_FLAGS is defined
8000
8001     // In order to set GTF_SET_FLAGS
8002     //              we must have a GTK_SMPOP
8003     //          and we have a integer or machine size type (not floating point or TYP_LONG on 32-bit)
8004     //
8005     if (!OperIsSimple())
8006         return false;
8007
8008     if (!varTypeIsIntegralOrI(TypeGet()))
8009         return false;
8010
8011     switch (gtOper)
8012     {
8013         case GT_IND:
8014         case GT_ARR_LENGTH:
8015             // These will turn into simple load from memory instructions
8016             // and we can't force the setting of the flags on load from memory
8017             break;
8018
8019         case GT_MUL:
8020         case GT_DIV:
8021             // These instructions don't set the flags (on x86/x64)
8022             //
8023             break;
8024
8025         default:
8026             // Otherwise we can set the flags for this gtOper
8027             // and codegen must set the condition flags.
8028             //
8029             gtFlags |= GTF_SET_FLAGS;
8030             result = true;
8031             break;
8032     }
8033 #endif // FEATURE_SET_FLAGS
8034
8035     // Codegen for this tree must set the condition flags if
8036     // this method returns true.
8037     //
8038     return result;
8039 }
8040
8041 unsigned GenTree::NumChildren()
8042 {
8043     if (OperIsConst() || OperIsLeaf())
8044     {
8045         return 0;
8046     }
8047     else if (OperIsUnary())
8048     {
8049         if (OperGet() == GT_NOP || OperGet() == GT_RETURN || OperGet() == GT_RETFILT)
8050         {
8051             if (gtOp.gtOp1 == nullptr)
8052             {
8053                 return 0;
8054             }
8055             else
8056             {
8057                 return 1;
8058             }
8059         }
8060         else
8061         {
8062             return 1;
8063         }
8064     }
8065     else if (OperIsBinary())
8066     {
8067         // All binary operators except LEA have at least one arg; the second arg may sometimes be null, however.
8068         if (OperGet() == GT_LEA)
8069         {
8070             unsigned childCount = 0;
8071             if (gtOp.gtOp1 != nullptr)
8072             {
8073                 childCount++;
8074             }
8075             if (gtOp.gtOp2 != nullptr)
8076             {
8077                 childCount++;
8078             }
8079             return childCount;
8080         }
8081 #ifdef FEATURE_HW_INTRINSICS
8082         // GT_HWIntrinsic require special handling
8083         if (OperGet() == GT_HWIntrinsic)
8084         {
8085             if (gtOp.gtOp1 == nullptr)
8086             {
8087                 return 0;
8088             }
8089         }
8090 #endif
8091         assert(gtOp.gtOp1 != nullptr);
8092         if (gtOp.gtOp2 == nullptr)
8093         {
8094             return 1;
8095         }
8096         else
8097         {
8098             return 2;
8099         }
8100     }
8101     else
8102     {
8103         // Special
8104         switch (OperGet())
8105         {
8106             case GT_CMPXCHG:
8107                 return 3;
8108
8109             case GT_ARR_BOUNDS_CHECK:
8110 #ifdef FEATURE_SIMD
8111             case GT_SIMD_CHK:
8112 #endif // FEATURE_SIMD
8113 #ifdef FEATURE_HW_INTRINSICS
8114             case GT_HW_INTRINSIC_CHK:
8115 #endif // FEATURE_HW_INTRINSICS
8116                 return 2;
8117
8118             case GT_FIELD:
8119             case GT_STMT:
8120                 return 1;
8121
8122             case GT_ARR_ELEM:
8123                 return 1 + AsArrElem()->gtArrRank;
8124
8125             case GT_DYN_BLK:
8126                 return 2;
8127
8128             case GT_ARR_OFFSET:
8129             case GT_STORE_DYN_BLK:
8130                 return 3;
8131
8132             case GT_CALL:
8133             {
8134                 GenTreeCall* call = AsCall();
8135                 unsigned     res  = 0; // arg list(s) (including late args).
8136                 if (call->gtCallObjp != nullptr)
8137                 {
8138                     res++; // Add objp?
8139                 }
8140                 if (call->gtCallArgs != nullptr)
8141                 {
8142                     res++; // Add args?
8143                 }
8144                 if (call->gtCallLateArgs != nullptr)
8145                 {
8146                     res++; // Add late args?
8147                 }
8148                 if (call->gtControlExpr != nullptr)
8149                 {
8150                     res++;
8151                 }
8152
8153                 if (call->gtCallType == CT_INDIRECT)
8154                 {
8155                     if (call->gtCallCookie != nullptr)
8156                     {
8157                         res++;
8158                     }
8159                     if (call->gtCallAddr != nullptr)
8160                     {
8161                         res++;
8162                     }
8163                 }
8164                 return res;
8165             }
8166             case GT_NONE:
8167                 return 0;
8168             default:
8169                 unreached();
8170         }
8171     }
8172 }
8173
8174 GenTree* GenTree::GetChild(unsigned childNum)
8175 {
8176     assert(childNum < NumChildren()); // Precondition.
8177     assert(NumChildren() <= MAX_CHILDREN);
8178     assert(!(OperIsConst() || OperIsLeaf()));
8179     if (OperIsUnary())
8180     {
8181         return AsUnOp()->gtOp1;
8182     }
8183     // Special case for assignment of dynamic block.
8184     // This code is here to duplicate the former case where the size may be evaluated prior to the
8185     // source and destination addresses. In order to do this, we treat the size as a child of the
8186     // assignment.
8187     // TODO-1stClassStructs: Revisit the need to duplicate former behavior, so that we can remove
8188     // these special cases.
8189     if ((OperGet() == GT_ASG) && (gtOp.gtOp1->OperGet() == GT_DYN_BLK) && (childNum == 2))
8190     {
8191         return gtOp.gtOp1->AsDynBlk()->gtDynamicSize;
8192     }
8193     else if (OperIsBinary())
8194     {
8195         if (OperIsAddrMode())
8196         {
8197             // If this is the first (0th) child, only return op1 if it is non-null
8198             // Otherwise, we return gtOp2.
8199             if (childNum == 0 && AsOp()->gtOp1 != nullptr)
8200             {
8201                 return AsOp()->gtOp1;
8202             }
8203             return AsOp()->gtOp2;
8204         }
8205         // TODO-Cleanup: Consider handling ReverseOps here, and then we wouldn't have to handle it in
8206         // fgGetFirstNode().  However, it seems that it causes loop hoisting behavior to change.
8207         if (childNum == 0)
8208         {
8209             return AsOp()->gtOp1;
8210         }
8211         else
8212         {
8213             return AsOp()->gtOp2;
8214         }
8215     }
8216     else
8217     {
8218         // Special
8219         switch (OperGet())
8220         {
8221             case GT_CMPXCHG:
8222                 switch (childNum)
8223                 {
8224                     case 0:
8225                         return AsCmpXchg()->gtOpLocation;
8226                     case 1:
8227                         return AsCmpXchg()->gtOpValue;
8228                     case 2:
8229                         return AsCmpXchg()->gtOpComparand;
8230                     default:
8231                         unreached();
8232                 }
8233             case GT_ARR_BOUNDS_CHECK:
8234 #ifdef FEATURE_SIMD
8235             case GT_SIMD_CHK:
8236 #endif // FEATURE_SIMD
8237 #ifdef FEATURE_HW_INTRINSICS
8238             case GT_HW_INTRINSIC_CHK:
8239 #endif // FEATURE_HW_INTRINSICS
8240                 switch (childNum)
8241                 {
8242                     case 0:
8243                         return AsBoundsChk()->gtIndex;
8244                     case 1:
8245                         return AsBoundsChk()->gtArrLen;
8246                     default:
8247                         unreached();
8248                 }
8249
8250             case GT_STORE_DYN_BLK:
8251                 switch (childNum)
8252                 {
8253                     case 0:
8254                         return AsDynBlk()->Addr();
8255                     case 1:
8256                         return AsDynBlk()->Data();
8257                     case 2:
8258                         return AsDynBlk()->gtDynamicSize;
8259                     default:
8260                         unreached();
8261                 }
8262             case GT_DYN_BLK:
8263                 switch (childNum)
8264                 {
8265                     case 0:
8266                         return AsDynBlk()->gtEvalSizeFirst ? AsDynBlk()->gtDynamicSize : AsDynBlk()->Addr();
8267                     case 1:
8268                         return AsDynBlk()->gtEvalSizeFirst ? AsDynBlk()->Addr() : AsDynBlk()->gtDynamicSize;
8269                     default:
8270                         unreached();
8271                 }
8272
8273             case GT_FIELD:
8274                 return AsField()->gtFldObj;
8275
8276             case GT_STMT:
8277                 return AsStmt()->gtStmtExpr;
8278
8279             case GT_ARR_ELEM:
8280                 if (childNum == 0)
8281                 {
8282                     return AsArrElem()->gtArrObj;
8283                 }
8284                 else
8285                 {
8286                     return AsArrElem()->gtArrInds[childNum - 1];
8287                 }
8288
8289             case GT_ARR_OFFSET:
8290                 switch (childNum)
8291                 {
8292                     case 0:
8293                         return AsArrOffs()->gtOffset;
8294                     case 1:
8295                         return AsArrOffs()->gtIndex;
8296                     case 2:
8297                         return AsArrOffs()->gtArrObj;
8298                     default:
8299                         unreached();
8300                 }
8301
8302             case GT_CALL:
8303             {
8304                 // The if chain below assumes that all possible children are non-null.
8305                 // If some are null, "virtually skip them."
8306                 // If there isn't "virtually skip it."
8307                 GenTreeCall* call = AsCall();
8308
8309                 if (call->gtCallObjp == nullptr)
8310                 {
8311                     childNum++;
8312                 }
8313                 if (childNum >= 1 && call->gtCallArgs == nullptr)
8314                 {
8315                     childNum++;
8316                 }
8317                 if (childNum >= 2 && call->gtCallLateArgs == nullptr)
8318                 {
8319                     childNum++;
8320                 }
8321                 if (childNum >= 3 && call->gtControlExpr == nullptr)
8322                 {
8323                     childNum++;
8324                 }
8325                 if (call->gtCallType == CT_INDIRECT)
8326                 {
8327                     if (childNum >= 4 && call->gtCallCookie == nullptr)
8328                     {
8329                         childNum++;
8330                     }
8331                 }
8332
8333                 if (childNum == 0)
8334                 {
8335                     return call->gtCallObjp;
8336                 }
8337                 else if (childNum == 1)
8338                 {
8339                     return call->gtCallArgs;
8340                 }
8341                 else if (childNum == 2)
8342                 {
8343                     return call->gtCallLateArgs;
8344                 }
8345                 else if (childNum == 3)
8346                 {
8347                     return call->gtControlExpr;
8348                 }
8349                 else
8350                 {
8351                     assert(call->gtCallType == CT_INDIRECT);
8352                     if (childNum == 4)
8353                     {
8354                         return call->gtCallCookie;
8355                     }
8356                     else
8357                     {
8358                         assert(childNum == 5);
8359                         return call->gtCallAddr;
8360                     }
8361                 }
8362             }
8363             case GT_NONE:
8364                 unreached();
8365             default:
8366                 unreached();
8367         }
8368     }
8369 }
8370
8371 GenTreeUseEdgeIterator::GenTreeUseEdgeIterator()
8372     : m_advance(nullptr), m_node(nullptr), m_edge(nullptr), m_argList(nullptr), m_state(-1)
8373 {
8374 }
8375
8376 GenTreeUseEdgeIterator::GenTreeUseEdgeIterator(GenTree* node)
8377     : m_advance(nullptr), m_node(node), m_edge(nullptr), m_argList(nullptr), m_state(0)
8378 {
8379     assert(m_node != nullptr);
8380
8381     // NOTE: the switch statement below must be updated when introducing new nodes.
8382
8383     switch (m_node->OperGet())
8384     {
8385         // Leaf nodes
8386         case GT_LCL_VAR:
8387         case GT_LCL_FLD:
8388         case GT_LCL_VAR_ADDR:
8389         case GT_LCL_FLD_ADDR:
8390         case GT_CATCH_ARG:
8391         case GT_LABEL:
8392         case GT_FTN_ADDR:
8393         case GT_RET_EXPR:
8394         case GT_CNS_INT:
8395         case GT_CNS_LNG:
8396         case GT_CNS_DBL:
8397         case GT_CNS_STR:
8398         case GT_MEMORYBARRIER:
8399         case GT_JMP:
8400         case GT_JCC:
8401         case GT_SETCC:
8402         case GT_NO_OP:
8403         case GT_START_NONGC:
8404         case GT_START_PREEMPTGC:
8405         case GT_PROF_HOOK:
8406 #if !FEATURE_EH_FUNCLETS
8407         case GT_END_LFIN:
8408 #endif // !FEATURE_EH_FUNCLETS
8409         case GT_PHI_ARG:
8410         case GT_JMPTABLE:
8411         case GT_CLS_VAR:
8412         case GT_CLS_VAR_ADDR:
8413         case GT_ARGPLACE:
8414         case GT_PHYSREG:
8415         case GT_EMITNOP:
8416         case GT_PINVOKE_PROLOG:
8417         case GT_PINVOKE_EPILOG:
8418         case GT_IL_OFFSET:
8419             m_state = -1;
8420             return;
8421
8422         // Standard unary operators
8423         case GT_STORE_LCL_VAR:
8424         case GT_STORE_LCL_FLD:
8425         case GT_NOT:
8426         case GT_NEG:
8427         case GT_COPY:
8428         case GT_RELOAD:
8429         case GT_ARR_LENGTH:
8430         case GT_CAST:
8431         case GT_BITCAST:
8432         case GT_CKFINITE:
8433         case GT_LCLHEAP:
8434         case GT_ADDR:
8435         case GT_IND:
8436         case GT_OBJ:
8437         case GT_BLK:
8438         case GT_BOX:
8439         case GT_ALLOCOBJ:
8440         case GT_RUNTIMELOOKUP:
8441         case GT_INIT_VAL:
8442         case GT_JTRUE:
8443         case GT_SWITCH:
8444         case GT_NULLCHECK:
8445         case GT_PUTARG_REG:
8446         case GT_PUTARG_STK:
8447         case GT_BSWAP:
8448         case GT_BSWAP16:
8449 #if FEATURE_ARG_SPLIT
8450         case GT_PUTARG_SPLIT:
8451 #endif // FEATURE_ARG_SPLIT
8452         case GT_RETURNTRAP:
8453             m_edge = &m_node->AsUnOp()->gtOp1;
8454             assert(*m_edge != nullptr);
8455             m_advance = &GenTreeUseEdgeIterator::Terminate;
8456             return;
8457
8458         // Unary operators with an optional operand
8459         case GT_NOP:
8460         case GT_RETURN:
8461         case GT_RETFILT:
8462             if (m_node->AsUnOp()->gtOp1 == nullptr)
8463             {
8464                 assert(m_node->NullOp1Legal());
8465                 m_state = -1;
8466             }
8467             else
8468             {
8469                 m_edge    = &m_node->AsUnOp()->gtOp1;
8470                 m_advance = &GenTreeUseEdgeIterator::Terminate;
8471             }
8472             return;
8473
8474         // Variadic nodes
8475         case GT_PHI:
8476             SetEntryStateForList(m_node->AsUnOp()->gtOp1);
8477             return;
8478
8479         case GT_FIELD_LIST:
8480             SetEntryStateForList(m_node);
8481             return;
8482
8483 #ifdef FEATURE_SIMD
8484         case GT_SIMD:
8485             if (m_node->AsSIMD()->gtSIMDIntrinsicID == SIMDIntrinsicInitN)
8486             {
8487                 SetEntryStateForList(m_node->AsSIMD()->gtOp1);
8488             }
8489             else
8490             {
8491                 SetEntryStateForBinOp();
8492             }
8493             return;
8494 #endif // FEATURE_SIMD
8495
8496 #ifdef FEATURE_HW_INTRINSICS
8497         case GT_HWIntrinsic:
8498             if (m_node->AsHWIntrinsic()->gtOp1 == nullptr)
8499             {
8500                 assert(m_node->NullOp1Legal());
8501                 m_state = -1;
8502             }
8503             else if (m_node->AsHWIntrinsic()->gtOp1->OperIsList())
8504             {
8505                 SetEntryStateForList(m_node->AsHWIntrinsic()->gtOp1);
8506             }
8507             else
8508             {
8509                 SetEntryStateForBinOp();
8510             }
8511             return;
8512 #endif // FEATURE_HW_INTRINSICS
8513
8514         // LEA, which may have no first operand
8515         case GT_LEA:
8516             if (m_node->AsAddrMode()->gtOp1 == nullptr)
8517             {
8518                 m_edge    = &m_node->AsAddrMode()->gtOp2;
8519                 m_advance = &GenTreeUseEdgeIterator::Terminate;
8520             }
8521             else
8522             {
8523                 SetEntryStateForBinOp();
8524             }
8525             return;
8526
8527         // Special nodes
8528         case GT_CMPXCHG:
8529             m_edge = &m_node->AsCmpXchg()->gtOpLocation;
8530             assert(*m_edge != nullptr);
8531             m_advance = &GenTreeUseEdgeIterator::AdvanceCmpXchg;
8532             return;
8533
8534         case GT_ARR_BOUNDS_CHECK:
8535 #ifdef FEATURE_SIMD
8536         case GT_SIMD_CHK:
8537 #endif // FEATURE_SIMD
8538 #ifdef FEATURE_HW_INTRINSICS
8539         case GT_HW_INTRINSIC_CHK:
8540 #endif // FEATURE_HW_INTRINSICS
8541             m_edge = &m_node->AsBoundsChk()->gtIndex;
8542             assert(*m_edge != nullptr);
8543             m_advance = &GenTreeUseEdgeIterator::AdvanceBoundsChk;
8544             return;
8545
8546         case GT_FIELD:
8547             if (m_node->AsField()->gtFldObj == nullptr)
8548             {
8549                 m_state = -1;
8550             }
8551             else
8552             {
8553                 m_edge    = &m_node->AsField()->gtFldObj;
8554                 m_advance = &GenTreeUseEdgeIterator::Terminate;
8555             }
8556             return;
8557
8558         case GT_STMT:
8559             if (m_node->AsStmt()->gtStmtExpr == nullptr)
8560             {
8561                 m_state = -1;
8562             }
8563             else
8564             {
8565                 m_edge    = &m_node->AsStmt()->gtStmtExpr;
8566                 m_advance = &GenTreeUseEdgeIterator::Terminate;
8567             }
8568             return;
8569
8570         case GT_ARR_ELEM:
8571             m_edge = &m_node->AsArrElem()->gtArrObj;
8572             assert(*m_edge != nullptr);
8573             m_advance = &GenTreeUseEdgeIterator::AdvanceArrElem;
8574             return;
8575
8576         case GT_ARR_OFFSET:
8577             m_edge = &m_node->AsArrOffs()->gtOffset;
8578             assert(*m_edge != nullptr);
8579             m_advance = &GenTreeUseEdgeIterator::AdvanceArrOffset;
8580             return;
8581
8582         case GT_DYN_BLK:
8583         {
8584             GenTreeDynBlk* const dynBlock = m_node->AsDynBlk();
8585             m_edge                        = dynBlock->gtEvalSizeFirst ? &dynBlock->gtDynamicSize : &dynBlock->gtOp1;
8586             assert(*m_edge != nullptr);
8587             m_advance = &GenTreeUseEdgeIterator::AdvanceDynBlk;
8588         }
8589             return;
8590
8591         case GT_STORE_DYN_BLK:
8592         {
8593             GenTreeDynBlk* const dynBlock = m_node->AsDynBlk();
8594             if (dynBlock->gtEvalSizeFirst)
8595             {
8596                 m_edge = &dynBlock->gtDynamicSize;
8597             }
8598             else
8599             {
8600                 m_edge = dynBlock->IsReverseOp() ? &dynBlock->gtOp2 : &dynBlock->gtOp1;
8601             }
8602             assert(*m_edge != nullptr);
8603
8604             m_advance = &GenTreeUseEdgeIterator::AdvanceStoreDynBlk;
8605         }
8606             return;
8607
8608         case GT_CALL:
8609             AdvanceCall<CALL_INSTANCE>();
8610             return;
8611
8612         // Binary nodes
8613         default:
8614             assert(m_node->OperIsBinary());
8615             SetEntryStateForBinOp();
8616             return;
8617     }
8618 }
8619
8620 //------------------------------------------------------------------------
8621 // GenTreeUseEdgeIterator::AdvanceCmpXchg: produces the next operand of a CmpXchg node and advances the state.
8622 //
8623 void GenTreeUseEdgeIterator::AdvanceCmpXchg()
8624 {
8625     switch (m_state)
8626     {
8627         case 0:
8628             m_edge  = &m_node->AsCmpXchg()->gtOpValue;
8629             m_state = 1;
8630             break;
8631         case 1:
8632             m_edge    = &m_node->AsCmpXchg()->gtOpComparand;
8633             m_advance = &GenTreeUseEdgeIterator::Terminate;
8634             break;
8635         default:
8636             unreached();
8637     }
8638
8639     assert(*m_edge != nullptr);
8640 }
8641
8642 //------------------------------------------------------------------------
8643 // GenTreeUseEdgeIterator::AdvanceBoundsChk: produces the next operand of a BoundsChk node and advances the state.
8644 //
8645 void GenTreeUseEdgeIterator::AdvanceBoundsChk()
8646 {
8647     m_edge = &m_node->AsBoundsChk()->gtArrLen;
8648     assert(*m_edge != nullptr);
8649     m_advance = &GenTreeUseEdgeIterator::Terminate;
8650 }
8651
8652 //------------------------------------------------------------------------
8653 // GenTreeUseEdgeIterator::AdvanceArrElem: produces the next operand of a ArrElem node and advances the state.
8654 //
8655 // Because these nodes are variadic, this function uses `m_state` to index into the list of array indices.
8656 //
8657 void GenTreeUseEdgeIterator::AdvanceArrElem()
8658 {
8659     if (m_state < m_node->AsArrElem()->gtArrRank)
8660     {
8661         m_edge = &m_node->AsArrElem()->gtArrInds[m_state];
8662         assert(*m_edge != nullptr);
8663         m_state++;
8664     }
8665     else
8666     {
8667         m_state = -1;
8668     }
8669 }
8670
8671 //------------------------------------------------------------------------
8672 // GenTreeUseEdgeIterator::AdvanceArrOffset: produces the next operand of a ArrOffset node and advances the state.
8673 //
8674 void GenTreeUseEdgeIterator::AdvanceArrOffset()
8675 {
8676     switch (m_state)
8677     {
8678         case 0:
8679             m_edge  = &m_node->AsArrOffs()->gtIndex;
8680             m_state = 1;
8681             break;
8682         case 1:
8683             m_edge    = &m_node->AsArrOffs()->gtArrObj;
8684             m_advance = &GenTreeUseEdgeIterator::Terminate;
8685             break;
8686         default:
8687             unreached();
8688     }
8689
8690     assert(*m_edge != nullptr);
8691 }
8692
8693 //------------------------------------------------------------------------
8694 // GenTreeUseEdgeIterator::AdvanceDynBlk: produces the next operand of a DynBlk node and advances the state.
8695 //
8696 void GenTreeUseEdgeIterator::AdvanceDynBlk()
8697 {
8698     GenTreeDynBlk* const dynBlock = m_node->AsDynBlk();
8699
8700     m_edge = dynBlock->gtEvalSizeFirst ? &dynBlock->gtOp1 : &dynBlock->gtDynamicSize;
8701     assert(*m_edge != nullptr);
8702     m_advance = &GenTreeUseEdgeIterator::Terminate;
8703 }
8704
8705 //------------------------------------------------------------------------
8706 // GenTreeUseEdgeIterator::AdvanceStoreDynBlk: produces the next operand of a StoreDynBlk node and advances the state.
8707 //
8708 // These nodes are moderately complicated but rare enough that templating this function is probably not
8709 // worth the extra complexity.
8710 //
8711 void GenTreeUseEdgeIterator::AdvanceStoreDynBlk()
8712 {
8713     GenTreeDynBlk* const dynBlock = m_node->AsDynBlk();
8714     if (dynBlock->gtEvalSizeFirst)
8715     {
8716         switch (m_state)
8717         {
8718             case 0:
8719                 m_edge  = dynBlock->IsReverseOp() ? &dynBlock->gtOp2 : &dynBlock->gtOp1;
8720                 m_state = 1;
8721                 break;
8722             case 1:
8723                 m_edge    = dynBlock->IsReverseOp() ? &dynBlock->gtOp1 : &dynBlock->gtOp2;
8724                 m_advance = &GenTreeUseEdgeIterator::Terminate;
8725                 break;
8726             default:
8727                 unreached();
8728         }
8729     }
8730     else
8731     {
8732         switch (m_state)
8733         {
8734             case 0:
8735                 m_edge  = dynBlock->IsReverseOp() ? &dynBlock->gtOp1 : &dynBlock->gtOp2;
8736                 m_state = 1;
8737                 break;
8738             case 1:
8739                 m_edge    = &dynBlock->gtDynamicSize;
8740                 m_advance = &GenTreeUseEdgeIterator::Terminate;
8741                 break;
8742             default:
8743                 unreached();
8744         }
8745     }
8746
8747     assert(*m_edge != nullptr);
8748 }
8749
8750 //------------------------------------------------------------------------
8751 // GenTreeUseEdgeIterator::AdvanceBinOp: produces the next operand of a binary node and advances the state.
8752 //
8753 // This function must be instantiated s.t. `ReverseOperands` is `true` iff the node is marked with the
8754 // `GTF_REVERSE_OPS` flag.
8755 //
8756 template <bool ReverseOperands>
8757 void           GenTreeUseEdgeIterator::AdvanceBinOp()
8758 {
8759     assert(ReverseOperands == ((m_node->gtFlags & GTF_REVERSE_OPS) != 0));
8760
8761     m_edge = !ReverseOperands ? &m_node->AsOp()->gtOp2 : &m_node->AsOp()->gtOp1;
8762     assert(*m_edge != nullptr);
8763     m_advance = &GenTreeUseEdgeIterator::Terminate;
8764 }
8765
8766 //------------------------------------------------------------------------
8767 // GenTreeUseEdgeIterator::SetEntryStateForBinOp: produces the first operand of a binary node and chooses
8768 //                                                the appropriate advance function.
8769 //
8770 void GenTreeUseEdgeIterator::SetEntryStateForBinOp()
8771 {
8772     assert(m_node != nullptr);
8773     assert(m_node->OperIsBinary());
8774
8775     GenTreeOp* const node = m_node->AsOp();
8776
8777     if (node->gtOp2 == nullptr)
8778     {
8779         assert(node->gtOp1 != nullptr);
8780         assert(node->NullOp2Legal());
8781         m_edge    = &node->gtOp1;
8782         m_advance = &GenTreeUseEdgeIterator::Terminate;
8783     }
8784     else if ((node->gtFlags & GTF_REVERSE_OPS) != 0)
8785     {
8786         m_edge    = &m_node->AsOp()->gtOp2;
8787         m_advance = &GenTreeUseEdgeIterator::AdvanceBinOp<true>;
8788     }
8789     else
8790     {
8791         m_edge    = &m_node->AsOp()->gtOp1;
8792         m_advance = &GenTreeUseEdgeIterator::AdvanceBinOp<false>;
8793     }
8794 }
8795
8796 //------------------------------------------------------------------------
8797 // GenTreeUseEdgeIterator::AdvanceList: produces the next operand of a variadic node and advances the state.
8798 //
8799 // This function does not use `m_state` for anything meaningful; it simply walks the `m_argList` until
8800 // there are no further entries.
8801 //
8802 void GenTreeUseEdgeIterator::AdvanceList()
8803 {
8804     assert(m_state == 0);
8805
8806     if (m_argList == nullptr)
8807     {
8808         m_state = -1;
8809     }
8810     else
8811     {
8812         GenTreeArgList* listNode = m_argList->AsArgList();
8813         m_edge                   = &listNode->gtOp1;
8814         m_argList                = listNode->Rest();
8815     }
8816 }
8817
8818 //------------------------------------------------------------------------
8819 // GenTreeUseEdgeIterator::SetEntryStateForList: produces the first operand of a list node.
8820 //
8821 void GenTreeUseEdgeIterator::SetEntryStateForList(GenTree* list)
8822 {
8823     m_argList = list;
8824     m_advance = &GenTreeUseEdgeIterator::AdvanceList;
8825     AdvanceList();
8826 }
8827
8828 //------------------------------------------------------------------------
8829 // GenTreeUseEdgeIterator::AdvanceCall: produces the next operand of a call node and advances the state.
8830 //
8831 // This function is a bit tricky: in order to avoid doing unnecessary work, it is instantiated with the
8832 // state number the iterator will be in when it is called. For example, `AdvanceCall<CALL_INSTANCE>`
8833 // is the instantiation used when the iterator is at the `CALL_INSTANCE` state (i.e. the entry state).
8834 // This sort of templating allows each state to avoid processing earlier states without unnecessary
8835 // duplication of code.
8836 //
8837 // Note that this method expands the argument lists (`gtCallArgs` and `gtCallLateArgs`) into their
8838 // component operands.
8839 //
8840 template <int state>
8841 void          GenTreeUseEdgeIterator::AdvanceCall()
8842 {
8843     GenTreeCall* const call = m_node->AsCall();
8844
8845     switch (state)
8846     {
8847         case CALL_INSTANCE:
8848             m_argList = call->gtCallArgs;
8849             m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_ARGS>;
8850             if (call->gtCallObjp != nullptr)
8851             {
8852                 m_edge = &call->gtCallObjp;
8853                 return;
8854             }
8855             __fallthrough;
8856
8857         case CALL_ARGS:
8858             if (m_argList != nullptr)
8859             {
8860                 GenTreeArgList* argNode = m_argList->AsArgList();
8861                 m_edge                  = &argNode->gtOp1;
8862                 m_argList               = argNode->Rest();
8863                 return;
8864             }
8865             m_argList = call->gtCallLateArgs;
8866             m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_LATE_ARGS>;
8867             __fallthrough;
8868
8869         case CALL_LATE_ARGS:
8870             if (m_argList != nullptr)
8871             {
8872                 GenTreeArgList* argNode = m_argList->AsArgList();
8873                 m_edge                  = &argNode->gtOp1;
8874                 m_argList               = argNode->Rest();
8875                 return;
8876             }
8877             m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_CONTROL_EXPR>;
8878             __fallthrough;
8879
8880         case CALL_CONTROL_EXPR:
8881             if (call->gtControlExpr != nullptr)
8882             {
8883                 if (call->gtCallType == CT_INDIRECT)
8884                 {
8885                     m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_COOKIE>;
8886                 }
8887                 else
8888                 {
8889                     m_advance = &GenTreeUseEdgeIterator::Terminate;
8890                 }
8891                 m_edge = &call->gtControlExpr;
8892                 return;
8893             }
8894             else if (call->gtCallType != CT_INDIRECT)
8895             {
8896                 m_state = -1;
8897                 return;
8898             }
8899             __fallthrough;
8900
8901         case CALL_COOKIE:
8902             assert(call->gtCallType == CT_INDIRECT);
8903
8904             m_advance = &GenTreeUseEdgeIterator::AdvanceCall<CALL_ADDRESS>;
8905             if (call->gtCallCookie != nullptr)
8906             {
8907                 m_edge = &call->gtCallCookie;
8908                 return;
8909             }
8910             __fallthrough;
8911
8912         case CALL_ADDRESS:
8913             assert(call->gtCallType == CT_INDIRECT);
8914
8915             m_advance = &GenTreeUseEdgeIterator::Terminate;
8916             if (call->gtCallAddr != nullptr)
8917             {
8918                 m_edge = &call->gtCallAddr;
8919             }
8920             return;
8921
8922         default:
8923             unreached();
8924     }
8925 }
8926
8927 //------------------------------------------------------------------------
8928 // GenTreeUseEdgeIterator::Terminate: advances the iterator to the terminal state.
8929 //
8930 void GenTreeUseEdgeIterator::Terminate()
8931 {
8932     m_state = -1;
8933 }
8934
8935 //------------------------------------------------------------------------
8936 // GenTreeUseEdgeIterator::operator++: advances the iterator to the next operand.
8937 //
8938 GenTreeUseEdgeIterator& GenTreeUseEdgeIterator::operator++()
8939 {
8940     // If we've reached the terminal state, do nothing.
8941     if (m_state != -1)
8942     {
8943         (this->*m_advance)();
8944     }
8945
8946     return *this;
8947 }
8948
8949 GenTreeUseEdgeIterator GenTree::UseEdgesBegin()
8950 {
8951     return GenTreeUseEdgeIterator(this);
8952 }
8953
8954 GenTreeUseEdgeIterator GenTree::UseEdgesEnd()
8955 {
8956     return GenTreeUseEdgeIterator();
8957 }
8958
8959 IteratorPair<GenTreeUseEdgeIterator> GenTree::UseEdges()
8960 {
8961     return MakeIteratorPair(UseEdgesBegin(), UseEdgesEnd());
8962 }
8963
8964 GenTreeOperandIterator GenTree::OperandsBegin()
8965 {
8966     return GenTreeOperandIterator(this);
8967 }
8968
8969 GenTreeOperandIterator GenTree::OperandsEnd()
8970 {
8971     return GenTreeOperandIterator();
8972 }
8973
8974 IteratorPair<GenTreeOperandIterator> GenTree::Operands()
8975 {
8976     return MakeIteratorPair(OperandsBegin(), OperandsEnd());
8977 }
8978
8979 bool GenTree::Precedes(GenTree* other)
8980 {
8981     assert(other != nullptr);
8982
8983     for (GenTree* node = gtNext; node != nullptr; node = node->gtNext)
8984     {
8985         if (node == other)
8986         {
8987             return true;
8988         }
8989     }
8990
8991     return false;
8992 }
8993
8994 #ifdef DEBUG
8995
8996 /* static */ int GenTree::gtDispFlags(unsigned flags, unsigned debugFlags)
8997 {
8998     int charsDisplayed = 11; // 11 is the "baseline" number of flag characters displayed
8999
9000     printf("%c", (flags & GTF_ASG) ? 'A' : (IsContained(flags) ? 'c' : '-'));
9001     printf("%c", (flags & GTF_CALL) ? 'C' : '-');
9002     printf("%c", (flags & GTF_EXCEPT) ? 'X' : '-');
9003     printf("%c", (flags & GTF_GLOB_REF) ? 'G' : '-');
9004     printf("%c", (debugFlags & GTF_DEBUG_NODE_MORPHED) ? '+' : // First print '+' if GTF_DEBUG_NODE_MORPHED is set
9005                      (flags & GTF_ORDER_SIDEEFF) ? 'O' : '-'); // otherwise print 'O' or '-'
9006     printf("%c", (flags & GTF_COLON_COND) ? '?' : '-');
9007     printf("%c", (flags & GTF_DONT_CSE) ? 'N' :           // N is for No cse
9008                      (flags & GTF_MAKE_CSE) ? 'H' : '-'); // H is for Hoist this expr
9009     printf("%c", (flags & GTF_REVERSE_OPS) ? 'R' : '-');
9010     printf("%c", (flags & GTF_UNSIGNED) ? 'U' : (flags & GTF_BOOLEAN) ? 'B' : '-');
9011 #if FEATURE_SET_FLAGS
9012     printf("%c", (flags & GTF_SET_FLAGS) ? 'S' : '-');
9013     ++charsDisplayed;
9014 #endif
9015     printf("%c", (flags & GTF_LATE_ARG) ? 'L' : '-');
9016     printf("%c", (flags & GTF_SPILLED) ? 'z' : (flags & GTF_SPILL) ? 'Z' : '-');
9017
9018     return charsDisplayed;
9019 }
9020
9021 /*****************************************************************************/
9022
9023 void Compiler::gtDispNodeName(GenTree* tree)
9024 {
9025     /* print the node name */
9026
9027     const char* name;
9028
9029     assert(tree);
9030     if (tree->gtOper < GT_COUNT)
9031     {
9032         name = GenTree::OpName(tree->OperGet());
9033     }
9034     else
9035     {
9036         name = "<ERROR>";
9037     }
9038     char  buf[32];
9039     char* bufp = &buf[0];
9040
9041     if ((tree->gtOper == GT_CNS_INT) && tree->IsIconHandle())
9042     {
9043         sprintf_s(bufp, sizeof(buf), " %s(h)%c", name, 0);
9044     }
9045     else if (tree->gtOper == GT_PUTARG_STK)
9046     {
9047         sprintf_s(bufp, sizeof(buf), " %s [+0x%02x]%c", name, tree->AsPutArgStk()->getArgOffset(), 0);
9048     }
9049     else if (tree->gtOper == GT_CALL)
9050     {
9051         const char* callType = "CALL";
9052         const char* gtfType  = "";
9053         const char* ctType   = "";
9054         char        gtfTypeBuf[100];
9055
9056         if (tree->gtCall.gtCallType == CT_USER_FUNC)
9057         {
9058             if (tree->gtCall.IsVirtual())
9059             {
9060                 callType = "CALLV";
9061             }
9062         }
9063         else if (tree->gtCall.gtCallType == CT_HELPER)
9064         {
9065             ctType = " help";
9066         }
9067         else if (tree->gtCall.gtCallType == CT_INDIRECT)
9068         {
9069             ctType = " ind";
9070         }
9071         else
9072         {
9073             assert(!"Unknown gtCallType");
9074         }
9075
9076         if (tree->gtFlags & GTF_CALL_NULLCHECK)
9077         {
9078             gtfType = " nullcheck";
9079         }
9080         if (tree->gtCall.IsVirtualVtable())
9081         {
9082             gtfType = " ind";
9083         }
9084         else if (tree->gtCall.IsVirtualStub())
9085         {
9086             gtfType = " stub";
9087         }
9088 #ifdef FEATURE_READYTORUN_COMPILER
9089         else if (tree->gtCall.IsR2RRelativeIndir())
9090         {
9091             gtfType = " r2r_ind";
9092         }
9093 #endif // FEATURE_READYTORUN_COMPILER
9094         else if (tree->gtFlags & GTF_CALL_UNMANAGED)
9095         {
9096             char* gtfTypeBufWalk = gtfTypeBuf;
9097             gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf, sizeof(gtfTypeBuf), " unman");
9098             if (tree->gtFlags & GTF_CALL_POP_ARGS)
9099             {
9100                 gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf, sizeof(gtfTypeBuf), " popargs");
9101             }
9102             if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_UNMGD_THISCALL)
9103             {
9104                 gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf, sizeof(gtfTypeBuf), " thiscall");
9105             }
9106             gtfType = gtfTypeBuf;
9107         }
9108
9109         sprintf_s(bufp, sizeof(buf), " %s%s%s%c", callType, ctType, gtfType, 0);
9110     }
9111     else if (tree->gtOper == GT_ARR_ELEM)
9112     {
9113         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s[", name);
9114         for (unsigned rank = tree->gtArrElem.gtArrRank - 1; rank; rank--)
9115         {
9116             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), ",");
9117         }
9118         SimpleSprintf_s(bufp, buf, sizeof(buf), "]");
9119     }
9120     else if (tree->gtOper == GT_ARR_OFFSET || tree->gtOper == GT_ARR_INDEX)
9121     {
9122         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s[", name);
9123         unsigned char currDim;
9124         unsigned char rank;
9125         if (tree->gtOper == GT_ARR_OFFSET)
9126         {
9127             currDim = tree->gtArrOffs.gtCurrDim;
9128             rank    = tree->gtArrOffs.gtArrRank;
9129         }
9130         else
9131         {
9132             currDim = tree->gtArrIndex.gtCurrDim;
9133             rank    = tree->gtArrIndex.gtArrRank;
9134         }
9135
9136         for (unsigned char dim = 0; dim < rank; dim++)
9137         {
9138             // Use a defacto standard i,j,k for the dimensions.
9139             // Note that we only support up to rank 3 arrays with these nodes, so we won't run out of characters.
9140             char dimChar = '*';
9141             if (dim == currDim)
9142             {
9143                 dimChar = 'i' + dim;
9144             }
9145             else if (dim > currDim)
9146             {
9147                 dimChar = ' ';
9148             }
9149
9150             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "%c", dimChar);
9151             if (dim != rank - 1)
9152             {
9153                 bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), ",");
9154             }
9155         }
9156         SimpleSprintf_s(bufp, buf, sizeof(buf), "]");
9157     }
9158     else if (tree->gtOper == GT_LEA)
9159     {
9160         GenTreeAddrMode* lea = tree->AsAddrMode();
9161         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s(", name);
9162         if (lea->Base() != nullptr)
9163         {
9164             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "b+");
9165         }
9166         if (lea->Index() != nullptr)
9167         {
9168             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "(i*%d)+", lea->gtScale);
9169         }
9170         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "%d)", lea->Offset());
9171     }
9172     else if (tree->gtOper == GT_ARR_BOUNDS_CHECK)
9173     {
9174         switch (tree->gtBoundsChk.gtThrowKind)
9175         {
9176             case SCK_RNGCHK_FAIL:
9177                 sprintf_s(bufp, sizeof(buf), " %s_Rng", name);
9178                 break;
9179             case SCK_ARG_EXCPN:
9180                 sprintf_s(bufp, sizeof(buf), " %s_Arg", name);
9181                 break;
9182             case SCK_ARG_RNG_EXCPN:
9183                 sprintf_s(bufp, sizeof(buf), " %s_ArgRng", name);
9184                 break;
9185             default:
9186                 unreached();
9187         }
9188     }
9189     else if (tree->gtOverflowEx())
9190     {
9191         sprintf_s(bufp, sizeof(buf), " %s_ovfl%c", name, 0);
9192     }
9193     else if (tree->OperIsBlk() && !tree->OperIsDynBlk())
9194     {
9195         sprintf_s(bufp, sizeof(buf), " %s(%d)", name, tree->AsBlk()->gtBlkSize);
9196     }
9197     else
9198     {
9199         sprintf_s(bufp, sizeof(buf), " %s%c", name, 0);
9200     }
9201
9202     if (strlen(buf) < 10)
9203     {
9204         printf(" %-10s", buf);
9205     }
9206     else
9207     {
9208         printf(" %s", buf);
9209     }
9210 }
9211
9212 void Compiler::gtDispVN(GenTree* tree)
9213 {
9214     if (tree->gtVNPair.GetLiberal() != ValueNumStore::NoVN)
9215     {
9216         assert(tree->gtVNPair.GetConservative() != ValueNumStore::NoVN);
9217         printf(" ");
9218         vnpPrint(tree->gtVNPair, 0);
9219     }
9220 }
9221
9222 //------------------------------------------------------------------------
9223 // gtDispNode: Print a tree to jitstdout.
9224 //
9225 // Arguments:
9226 //    tree - the tree to be printed
9227 //    indentStack - the specification for the current level of indentation & arcs
9228 //    msg         - a contextual method (i.e. from the parent) to print
9229 //
9230 // Return Value:
9231 //    None.
9232 //
9233 // Notes:
9234 //    'indentStack' may be null, in which case no indentation or arcs are printed
9235 //    'msg' may be null
9236
9237 void Compiler::gtDispNode(GenTree* tree, IndentStack* indentStack, __in __in_z __in_opt const char* msg, bool isLIR)
9238 {
9239     bool printPointer = true; // always true..
9240     bool printFlags   = true; // always true..
9241     bool printCost    = true; // always true..
9242
9243     int msgLength = 25;
9244
9245     GenTree* prev;
9246
9247     if (tree->gtSeqNum)
9248     {
9249         printf("N%03u ", tree->gtSeqNum);
9250         if (tree->gtCostsInitialized)
9251         {
9252             printf("(%3u,%3u) ", tree->gtCostEx, tree->gtCostSz);
9253         }
9254         else
9255         {
9256             printf("(???"
9257                    ",???"
9258                    ") "); // This probably indicates a bug: the node has a sequence number, but not costs.
9259         }
9260     }
9261     else
9262     {
9263         if (tree->gtOper == GT_STMT)
9264         {
9265             prev = tree->gtStmt.gtStmtExpr;
9266         }
9267         else
9268         {
9269             prev = tree;
9270         }
9271
9272         bool     hasSeqNum = true;
9273         unsigned dotNum    = 0;
9274         do
9275         {
9276             dotNum++;
9277             prev = prev->gtPrev;
9278
9279             if ((prev == nullptr) || (prev == tree))
9280             {
9281                 hasSeqNum = false;
9282                 break;
9283             }
9284
9285             assert(prev);
9286         } while (prev->gtSeqNum == 0);
9287
9288         // If we have an indent stack, don't add additional characters,
9289         // as it will mess up the alignment.
9290         bool displayDotNum = tree->gtOper != GT_STMT && hasSeqNum && (indentStack == nullptr);
9291         if (displayDotNum)
9292         {
9293             printf("N%03u.%02u ", prev->gtSeqNum, dotNum);
9294         }
9295         else
9296         {
9297             printf("     ");
9298         }
9299
9300         if (tree->gtCostsInitialized)
9301         {
9302             printf("(%3u,%3u) ", tree->gtCostEx, tree->gtCostSz);
9303         }
9304         else
9305         {
9306             if (displayDotNum)
9307             {
9308                 // Do better alignment in this case
9309                 printf("       ");
9310             }
9311             else
9312             {
9313                 printf("          ");
9314             }
9315         }
9316     }
9317
9318     if (optValnumCSE_phase)
9319     {
9320         if (IS_CSE_INDEX(tree->gtCSEnum))
9321         {
9322             printf("CSE #%02d (%s)", GET_CSE_INDEX(tree->gtCSEnum), (IS_CSE_USE(tree->gtCSEnum) ? "use" : "def"));
9323         }
9324         else
9325         {
9326             printf("             ");
9327         }
9328     }
9329
9330     /* Print the node ID */
9331     printTreeID(tree);
9332     printf(" ");
9333
9334     if (tree->gtOper >= GT_COUNT)
9335     {
9336         printf(" **** ILLEGAL NODE ****");
9337         return;
9338     }
9339
9340     if (printFlags)
9341     {
9342         /* First print the flags associated with the node */
9343         switch (tree->gtOper)
9344         {
9345             case GT_LEA:
9346             case GT_BLK:
9347             case GT_OBJ:
9348             case GT_DYN_BLK:
9349             case GT_STORE_BLK:
9350             case GT_STORE_OBJ:
9351             case GT_STORE_DYN_BLK:
9352
9353             case GT_IND:
9354                 // We prefer printing V or U
9355                 if ((tree->gtFlags & (GTF_IND_VOLATILE | GTF_IND_UNALIGNED)) == 0)
9356                 {
9357                     if (tree->gtFlags & GTF_IND_TGTANYWHERE)
9358                     {
9359                         printf("*");
9360                         --msgLength;
9361                         break;
9362                     }
9363                     if (tree->gtFlags & GTF_IND_TGT_NOT_HEAP)
9364                     {
9365                         printf("s");
9366                         --msgLength;
9367                         break;
9368                     }
9369                     if (tree->gtFlags & GTF_IND_INVARIANT)
9370                     {
9371                         printf("#");
9372                         --msgLength;
9373                         break;
9374                     }
9375                     if (tree->gtFlags & GTF_IND_ARR_INDEX)
9376                     {
9377                         printf("a");
9378                         --msgLength;
9379                         break;
9380                     }
9381                     if (tree->gtFlags & GTF_IND_NONFAULTING)
9382                     {
9383                         printf("n"); // print a n for non-faulting
9384                         --msgLength;
9385                         break;
9386                     }
9387                     if (tree->gtFlags & GTF_IND_ASG_LHS)
9388                     {
9389                         printf("D"); // print a D for definition
9390                         --msgLength;
9391                         break;
9392                     }
9393                 }
9394                 __fallthrough;
9395
9396             case GT_INDEX:
9397             case GT_INDEX_ADDR:
9398
9399                 if ((tree->gtFlags & (GTF_IND_VOLATILE | GTF_IND_UNALIGNED)) == 0) // We prefer printing V or U over R
9400                 {
9401                     if (tree->gtFlags & GTF_INX_REFARR_LAYOUT)
9402                     {
9403                         printf("R");
9404                         --msgLength;
9405                         break;
9406                     } // R means RefArray
9407                 }
9408                 __fallthrough;
9409
9410             case GT_FIELD:
9411             case GT_CLS_VAR:
9412                 if (tree->gtFlags & GTF_IND_VOLATILE)
9413                 {
9414                     printf("V");
9415                     --msgLength;
9416                     break;
9417                 }
9418                 if (tree->gtFlags & GTF_IND_UNALIGNED)
9419                 {
9420                     printf("U");
9421                     --msgLength;
9422                     break;
9423                 }
9424                 goto DASH;
9425
9426             case GT_ASG:
9427                 if (tree->OperIsInitBlkOp())
9428                 {
9429                     printf("I");
9430                     --msgLength;
9431                     break;
9432                 }
9433                 goto DASH;
9434
9435             case GT_CALL:
9436                 if (tree->gtCall.IsInlineCandidate())
9437                 {
9438                     if (tree->gtCall.IsGuardedDevirtualizationCandidate())
9439                     {
9440                         printf("&");
9441                     }
9442                     else
9443                     {
9444                         printf("I");
9445                     }
9446                     --msgLength;
9447                     break;
9448                 }
9449                 else if (tree->gtCall.IsGuardedDevirtualizationCandidate())
9450                 {
9451                     printf("G");
9452                     --msgLength;
9453                     break;
9454                 }
9455                 if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_RETBUFFARG)
9456                 {
9457                     printf("S");
9458                     --msgLength;
9459                     break;
9460                 }
9461                 if (tree->gtFlags & GTF_CALL_HOISTABLE)
9462                 {
9463                     printf("H");
9464                     --msgLength;
9465                     break;
9466                 }
9467
9468                 goto DASH;
9469
9470             case GT_MUL:
9471 #if !defined(_TARGET_64BIT_)
9472             case GT_MUL_LONG:
9473 #endif
9474                 if (tree->gtFlags & GTF_MUL_64RSLT)
9475                 {
9476                     printf("L");
9477                     --msgLength;
9478                     break;
9479                 }
9480                 goto DASH;
9481
9482             case GT_LCL_FLD:
9483             case GT_LCL_VAR:
9484             case GT_LCL_VAR_ADDR:
9485             case GT_LCL_FLD_ADDR:
9486             case GT_STORE_LCL_FLD:
9487             case GT_STORE_LCL_VAR:
9488                 if (tree->gtFlags & GTF_VAR_USEASG)
9489                 {
9490                     printf("U");
9491                     --msgLength;
9492                     break;
9493                 }
9494                 if (tree->gtFlags & GTF_VAR_DEF)
9495                 {
9496                     printf("D");
9497                     --msgLength;
9498                     break;
9499                 }
9500                 if (tree->gtFlags & GTF_VAR_CAST)
9501                 {
9502                     printf("C");
9503                     --msgLength;
9504                     break;
9505                 }
9506                 if (tree->gtFlags & GTF_VAR_ARR_INDEX)
9507                 {
9508                     printf("i");
9509                     --msgLength;
9510                     break;
9511                 }
9512                 goto DASH;
9513
9514             case GT_EQ:
9515             case GT_NE:
9516             case GT_LT:
9517             case GT_LE:
9518             case GT_GE:
9519             case GT_GT:
9520             case GT_TEST_EQ:
9521             case GT_TEST_NE:
9522                 if (tree->gtFlags & GTF_RELOP_NAN_UN)
9523                 {
9524                     printf("N");
9525                     --msgLength;
9526                     break;
9527                 }
9528                 if (tree->gtFlags & GTF_RELOP_JMP_USED)
9529                 {
9530                     printf("J");
9531                     --msgLength;
9532                     break;
9533                 }
9534                 if (tree->gtFlags & GTF_RELOP_QMARK)
9535                 {
9536                     printf("Q");
9537                     --msgLength;
9538                     break;
9539                 }
9540                 goto DASH;
9541
9542             case GT_JCMP:
9543                 printf((tree->gtFlags & GTF_JCMP_TST) ? "T" : "C");
9544                 printf((tree->gtFlags & GTF_JCMP_EQ) ? "EQ" : "NE");
9545                 goto DASH;
9546
9547             case GT_FIELD_LIST:
9548                 if (tree->gtFlags & GTF_FIELD_LIST_HEAD)
9549                 {
9550                     printf("H");
9551                     --msgLength;
9552                     break;
9553                 }
9554                 goto DASH;
9555
9556             default:
9557             DASH:
9558                 printf("-");
9559                 --msgLength;
9560                 break;
9561         }
9562
9563         /* Then print the general purpose flags */
9564         unsigned flags = tree->gtFlags;
9565
9566         if (tree->OperIsBinary())
9567         {
9568             genTreeOps oper = tree->OperGet();
9569
9570             // Check for GTF_ADDRMODE_NO_CSE flag on add/mul/shl Binary Operators
9571             if ((oper == GT_ADD) || (oper == GT_MUL) || (oper == GT_LSH))
9572             {
9573                 if ((tree->gtFlags & GTF_ADDRMODE_NO_CSE) != 0)
9574                 {
9575                     flags |= GTF_DONT_CSE; // Force the GTF_ADDRMODE_NO_CSE flag to print out like GTF_DONT_CSE
9576                 }
9577             }
9578         }
9579         else // !tree->OperIsBinary()
9580         {
9581             // the GTF_REVERSE flag only applies to binary operations
9582             flags &= ~GTF_REVERSE_OPS; // we use this value for GTF_VAR_ARR_INDEX above
9583         }
9584
9585         msgLength -= GenTree::gtDispFlags(flags, tree->gtDebugFlags);
9586         /*
9587             printf("%c", (flags & GTF_ASG           ) ? 'A' : '-');
9588             printf("%c", (flags & GTF_CALL          ) ? 'C' : '-');
9589             printf("%c", (flags & GTF_EXCEPT        ) ? 'X' : '-');
9590             printf("%c", (flags & GTF_GLOB_REF      ) ? 'G' : '-');
9591             printf("%c", (flags & GTF_ORDER_SIDEEFF ) ? 'O' : '-');
9592             printf("%c", (flags & GTF_COLON_COND    ) ? '?' : '-');
9593             printf("%c", (flags & GTF_DONT_CSE      ) ? 'N' :        // N is for No cse
9594                          (flags & GTF_MAKE_CSE      ) ? 'H' : '-');  // H is for Hoist this expr
9595             printf("%c", (flags & GTF_REVERSE_OPS   ) ? 'R' : '-');
9596             printf("%c", (flags & GTF_UNSIGNED      ) ? 'U' :
9597                          (flags & GTF_BOOLEAN       ) ? 'B' : '-');
9598             printf("%c", (flags & GTF_SET_FLAGS     ) ? 'S' : '-');
9599             printf("%c", (flags & GTF_SPILLED       ) ? 'z' : '-');
9600             printf("%c", (flags & GTF_SPILL         ) ? 'Z' : '-');
9601         */
9602     }
9603
9604     // If we're printing a node for LIR, we use the space normally associated with the message
9605     // to display the node's temp name (if any)
9606     const bool hasOperands = tree->OperandsBegin() != tree->OperandsEnd();
9607     if (isLIR)
9608     {
9609         assert(msg == nullptr);
9610
9611         // If the tree does not have any operands, we do not display the indent stack. This gives us
9612         // two additional characters for alignment.
9613         if (!hasOperands)
9614         {
9615             msgLength += 1;
9616         }
9617
9618         if (tree->IsValue())
9619         {
9620             const size_t bufLength = msgLength - 1;
9621             msg                    = reinterpret_cast<char*>(alloca(bufLength * sizeof(char)));
9622             sprintf_s(const_cast<char*>(msg), bufLength, "t%d = %s", tree->gtTreeID, hasOperands ? "" : " ");
9623         }
9624     }
9625
9626     /* print the msg associated with the node */
9627
9628     if (msg == nullptr)
9629     {
9630         msg = "";
9631     }
9632     if (msgLength < 0)
9633     {
9634         msgLength = 0;
9635     }
9636
9637     printf(isLIR ? " %+*s" : " %-*s", msgLength, msg);
9638
9639     /* Indent the node accordingly */
9640     if (!isLIR || hasOperands)
9641     {
9642         printIndent(indentStack);
9643     }
9644
9645     gtDispNodeName(tree);
9646
9647     assert(tree == nullptr || tree->gtOper < GT_COUNT);
9648
9649     if (tree)
9650     {
9651         /* print the type of the node */
9652         if (tree->gtOper != GT_CAST)
9653         {
9654             printf(" %-6s", varTypeName(tree->TypeGet()));
9655             if (tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_STORE_LCL_VAR)
9656             {
9657                 LclVarDsc* varDsc = &lvaTable[tree->gtLclVarCommon.gtLclNum];
9658                 if (varDsc->lvAddrExposed)
9659                 {
9660                     printf("(AX)"); // Variable has address exposed.
9661                 }
9662
9663                 if (varDsc->lvUnusedStruct)
9664                 {
9665                     assert(varDsc->lvPromoted);
9666                     printf("(U)"); // Unused struct
9667                 }
9668                 else if (varDsc->lvPromoted)
9669                 {
9670                     if (varTypeIsPromotable(varDsc))
9671                     {
9672                         printf("(P)"); // Promoted struct
9673                     }
9674                     else
9675                     {
9676                         // Promoted implicit by-refs can have this state during
9677                         // global morph while they are being rewritten
9678                         assert(fgGlobalMorph);
9679                         printf("(P?!)"); // Promoted struct
9680                     }
9681                 }
9682             }
9683
9684             if (tree->gtOper == GT_STMT)
9685             {
9686                 if (opts.compDbgInfo)
9687                 {
9688                     IL_OFFSET endIL = tree->gtStmt.gtStmtLastILoffs;
9689
9690                     printf("(IL ");
9691                     if (tree->gtStmt.gtStmtILoffsx == BAD_IL_OFFSET)
9692                     {
9693                         printf("  ???");
9694                     }
9695                     else
9696                     {
9697                         printf("0x%03X", jitGetILoffs(tree->gtStmt.gtStmtILoffsx));
9698                     }
9699                     printf("...");
9700                     if (endIL == BAD_IL_OFFSET)
9701                     {
9702                         printf("  ???");
9703                     }
9704                     else
9705                     {
9706                         printf("0x%03X", endIL);
9707                     }
9708                     printf(")");
9709                 }
9710             }
9711
9712             if (tree->IsArgPlaceHolderNode() && (tree->gtArgPlace.gtArgPlaceClsHnd != nullptr))
9713             {
9714                 printf(" => [clsHnd=%08X]", dspPtr(tree->gtArgPlace.gtArgPlaceClsHnd));
9715             }
9716
9717             if (tree->gtOper == GT_RUNTIMELOOKUP)
9718             {
9719 #ifdef _TARGET_64BIT_
9720                 printf(" 0x%llx", dspPtr(tree->gtRuntimeLookup.gtHnd));
9721 #else
9722                 printf(" 0x%x", dspPtr(tree->gtRuntimeLookup.gtHnd));
9723 #endif
9724
9725                 switch (tree->gtRuntimeLookup.gtHndType)
9726                 {
9727                     case CORINFO_HANDLETYPE_CLASS:
9728                         printf(" class");
9729                         break;
9730                     case CORINFO_HANDLETYPE_METHOD:
9731                         printf(" method");
9732                         break;
9733                     case CORINFO_HANDLETYPE_FIELD:
9734                         printf(" field");
9735                         break;
9736                     default:
9737                         printf(" unknown");
9738                         break;
9739                 }
9740             }
9741         }
9742
9743         // for tracking down problems in reguse prediction or liveness tracking
9744
9745         if (verbose && 0)
9746         {
9747             printf(" RR=");
9748             dspRegMask(tree->gtRsvdRegs);
9749             printf("\n");
9750         }
9751     }
9752 }
9753
9754 void Compiler::gtDispRegVal(GenTree* tree)
9755 {
9756     switch (tree->GetRegTag())
9757     {
9758         // Don't display NOREG; the absence of this tag will imply this state
9759         // case GenTree::GT_REGTAG_NONE:       printf(" NOREG");   break;
9760
9761         case GenTree::GT_REGTAG_REG:
9762             printf(" REG %s", compRegVarName(tree->gtRegNum));
9763             break;
9764
9765         default:
9766             break;
9767     }
9768
9769     if (tree->IsMultiRegCall())
9770     {
9771         // 0th reg is gtRegNum, which is already printed above.
9772         // Print the remaining regs of a multi-reg call node.
9773         GenTreeCall* call     = tree->AsCall();
9774         unsigned     regCount = call->GetReturnTypeDesc()->TryGetReturnRegCount();
9775         for (unsigned i = 1; i < regCount; ++i)
9776         {
9777             printf(",%s", compRegVarName(call->GetRegNumByIdx(i)));
9778         }
9779     }
9780     else if (tree->IsCopyOrReloadOfMultiRegCall())
9781     {
9782         GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
9783         GenTreeCall*         call         = tree->gtGetOp1()->AsCall();
9784         unsigned             regCount     = call->GetReturnTypeDesc()->TryGetReturnRegCount();
9785         for (unsigned i = 1; i < regCount; ++i)
9786         {
9787             printf(",%s", compRegVarName(copyOrReload->GetRegNumByIdx(i)));
9788         }
9789     }
9790
9791 #if FEATURE_MULTIREG_RET
9792     if (tree->IsCopyOrReload())
9793     {
9794         for (int i = 1; i < MAX_RET_REG_COUNT; i++)
9795         {
9796             regNumber reg = (regNumber)tree->AsCopyOrReload()->GetRegNumByIdx(i);
9797             if (reg == REG_NA)
9798             {
9799                 break;
9800             }
9801             printf(",%s", compRegVarName(reg));
9802         }
9803     }
9804 #endif
9805
9806 #if defined(_TARGET_ARM_)
9807     if (tree->OperIsMultiRegOp() && (tree->AsMultiRegOp()->gtOtherReg != REG_NA))
9808     {
9809         printf(",%s", compRegVarName(tree->AsMultiRegOp()->gtOtherReg));
9810     }
9811 #endif
9812 }
9813
9814 // We usually/commonly don't expect to print anything longer than this string,
9815 #define LONGEST_COMMON_LCL_VAR_DISPLAY "V99 PInvokeFrame"
9816 #define LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH (sizeof(LONGEST_COMMON_LCL_VAR_DISPLAY))
9817 #define BUF_SIZE (LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH * 2)
9818
9819 void Compiler::gtGetLclVarNameInfo(unsigned lclNum, const char** ilKindOut, const char** ilNameOut, unsigned* ilNumOut)
9820 {
9821     const char* ilKind = nullptr;
9822     const char* ilName = nullptr;
9823
9824     unsigned ilNum = compMap2ILvarNum(lclNum);
9825
9826     if (ilNum == (unsigned)ICorDebugInfo::RETBUF_ILNUM)
9827     {
9828         ilName = "RetBuf";
9829     }
9830     else if (ilNum == (unsigned)ICorDebugInfo::VARARGS_HND_ILNUM)
9831     {
9832         ilName = "VarArgHandle";
9833     }
9834     else if (ilNum == (unsigned)ICorDebugInfo::TYPECTXT_ILNUM)
9835     {
9836         ilName = "TypeCtx";
9837     }
9838     else if (ilNum == (unsigned)ICorDebugInfo::UNKNOWN_ILNUM)
9839     {
9840 #if FEATURE_ANYCSE
9841         if (lclNumIsTrueCSE(lclNum))
9842         {
9843             ilKind = "cse";
9844             ilNum  = lclNum - optCSEstart;
9845         }
9846         else if (lclNum >= optCSEstart)
9847         {
9848             // Currently any new LclVar's introduced after the CSE phase
9849             // are believed to be created by the "rationalizer" that is what is meant by the "rat" prefix.
9850             ilKind = "rat";
9851             ilNum  = lclNum - (optCSEstart + optCSEcount);
9852         }
9853         else
9854 #endif // FEATURE_ANYCSE
9855         {
9856             if (lclNum == info.compLvFrameListRoot)
9857             {
9858                 ilName = "FramesRoot";
9859             }
9860             else if (lclNum == lvaInlinedPInvokeFrameVar)
9861             {
9862                 ilName = "PInvokeFrame";
9863             }
9864             else if (lclNum == lvaGSSecurityCookie)
9865             {
9866                 ilName = "GsCookie";
9867             }
9868 #if FEATURE_FIXED_OUT_ARGS
9869             else if (lclNum == lvaPInvokeFrameRegSaveVar)
9870             {
9871                 ilName = "PInvokeFrameRegSave";
9872             }
9873             else if (lclNum == lvaOutgoingArgSpaceVar)
9874             {
9875                 ilName = "OutArgs";
9876             }
9877 #endif // FEATURE_FIXED_OUT_ARGS
9878 #ifdef _TARGET_ARM_
9879             else if (lclNum == lvaPromotedStructAssemblyScratchVar)
9880             {
9881                 ilName = "PromotedStructScratch";
9882             }
9883 #endif // _TARGET_ARM_
9884 #if !FEATURE_EH_FUNCLETS
9885             else if (lclNum == lvaShadowSPslotsVar)
9886             {
9887                 ilName = "EHSlots";
9888             }
9889 #endif // !FEATURE_EH_FUNCLETS
9890 #ifdef JIT32_GCENCODER
9891             else if (lclNum == lvaLocAllocSPvar)
9892             {
9893                 ilName = "LocAllocSP";
9894             }
9895 #endif // JIT32_GCENCODER
9896 #if FEATURE_EH_FUNCLETS
9897             else if (lclNum == lvaPSPSym)
9898             {
9899                 ilName = "PSPSym";
9900             }
9901 #endif // FEATURE_EH_FUNCLETS
9902             else
9903             {
9904                 ilKind = "tmp";
9905                 if (compIsForInlining())
9906                 {
9907                     ilNum = lclNum - impInlineInfo->InlinerCompiler->info.compLocalsCount;
9908                 }
9909                 else
9910                 {
9911                     ilNum = lclNum - info.compLocalsCount;
9912                 }
9913             }
9914         }
9915     }
9916     else if (lclNum < (compIsForInlining() ? impInlineInfo->InlinerCompiler->info.compArgsCount : info.compArgsCount))
9917     {
9918         if (ilNum == 0 && !info.compIsStatic)
9919         {
9920             ilName = "this";
9921         }
9922         else
9923         {
9924             ilKind = "arg";
9925         }
9926     }
9927     else
9928     {
9929         if (!lvaTable[lclNum].lvIsStructField)
9930         {
9931             ilKind = "loc";
9932         }
9933         if (compIsForInlining())
9934         {
9935             ilNum -= impInlineInfo->InlinerCompiler->info.compILargsCount;
9936         }
9937         else
9938         {
9939             ilNum -= info.compILargsCount;
9940         }
9941     }
9942
9943     *ilKindOut = ilKind;
9944     *ilNameOut = ilName;
9945     *ilNumOut  = ilNum;
9946 }
9947
9948 /*****************************************************************************/
9949 int Compiler::gtGetLclVarName(unsigned lclNum, char* buf, unsigned buf_remaining)
9950 {
9951     char*    bufp_next    = buf;
9952     unsigned charsPrinted = 0;
9953     int      sprintf_result;
9954
9955     sprintf_result = sprintf_s(bufp_next, buf_remaining, "V%02u", lclNum);
9956
9957     if (sprintf_result < 0)
9958     {
9959         return sprintf_result;
9960     }
9961
9962     charsPrinted += sprintf_result;
9963     bufp_next += sprintf_result;
9964     buf_remaining -= sprintf_result;
9965
9966     const char* ilKind = nullptr;
9967     const char* ilName = nullptr;
9968     unsigned    ilNum  = 0;
9969
9970     gtGetLclVarNameInfo(lclNum, &ilKind, &ilName, &ilNum);
9971
9972     if (ilName != nullptr)
9973     {
9974         sprintf_result = sprintf_s(bufp_next, buf_remaining, " %s", ilName);
9975         if (sprintf_result < 0)
9976         {
9977             return sprintf_result;
9978         }
9979         charsPrinted += sprintf_result;
9980         bufp_next += sprintf_result;
9981         buf_remaining -= sprintf_result;
9982     }
9983     else if (ilKind != nullptr)
9984     {
9985         sprintf_result = sprintf_s(bufp_next, buf_remaining, " %s%d", ilKind, ilNum);
9986         if (sprintf_result < 0)
9987         {
9988             return sprintf_result;
9989         }
9990         charsPrinted += sprintf_result;
9991         bufp_next += sprintf_result;
9992         buf_remaining -= sprintf_result;
9993     }
9994
9995     assert(charsPrinted > 0);
9996     assert(buf_remaining > 0);
9997
9998     return (int)charsPrinted;
9999 }
10000
10001 /*****************************************************************************
10002  * Get the local var name, and create a copy of the string that can be used in debug output.
10003  */
10004 char* Compiler::gtGetLclVarName(unsigned lclNum)
10005 {
10006     char buf[BUF_SIZE];
10007     int  charsPrinted = gtGetLclVarName(lclNum, buf, _countof(buf));
10008     if (charsPrinted < 0)
10009     {
10010         return nullptr;
10011     }
10012
10013     char* retBuf = new (this, CMK_DebugOnly) char[charsPrinted + 1];
10014     strcpy_s(retBuf, charsPrinted + 1, buf);
10015     return retBuf;
10016 }
10017
10018 /*****************************************************************************/
10019 void Compiler::gtDispLclVar(unsigned lclNum, bool padForBiggestDisp)
10020 {
10021     char buf[BUF_SIZE];
10022     int  charsPrinted = gtGetLclVarName(lclNum, buf, _countof(buf));
10023
10024     if (charsPrinted < 0)
10025     {
10026         return;
10027     }
10028
10029     printf("%s", buf);
10030
10031     if (padForBiggestDisp && (charsPrinted < LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH))
10032     {
10033         printf("%*c", LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH - charsPrinted, ' ');
10034     }
10035 }
10036
10037 /*****************************************************************************/
10038 void Compiler::gtDispConst(GenTree* tree)
10039 {
10040     assert(tree->OperKind() & GTK_CONST);
10041
10042     switch (tree->gtOper)
10043     {
10044         case GT_CNS_INT:
10045             if (tree->IsIconHandle(GTF_ICON_STR_HDL))
10046             {
10047                 const wchar_t* str = eeGetCPString(tree->gtIntCon.gtIconVal);
10048                 if (str != nullptr)
10049                 {
10050                     printf(" 0x%X \"%S\"", dspPtr(tree->gtIntCon.gtIconVal), str);
10051                 }
10052                 else
10053                 {
10054                     // Note that eGetCPString isn't currently implemented on Linux/ARM
10055                     // and instead always returns nullptr
10056                     printf(" 0x%X [ICON_STR_HDL]", dspPtr(tree->gtIntCon.gtIconVal));
10057                 }
10058             }
10059             else
10060             {
10061                 ssize_t dspIconVal = tree->IsIconHandle() ? dspPtr(tree->gtIntCon.gtIconVal) : tree->gtIntCon.gtIconVal;
10062
10063                 if (tree->TypeGet() == TYP_REF)
10064                 {
10065                     assert(tree->gtIntCon.gtIconVal == 0);
10066                     printf(" null");
10067                 }
10068                 else if ((tree->gtIntCon.gtIconVal > -1000) && (tree->gtIntCon.gtIconVal < 1000))
10069                 {
10070                     printf(" %ld", dspIconVal);
10071 #ifdef _TARGET_64BIT_
10072                 }
10073                 else if ((tree->gtIntCon.gtIconVal & 0xFFFFFFFF00000000LL) != 0)
10074                 {
10075                     printf(" 0x%llx", dspIconVal);
10076 #endif
10077                 }
10078                 else
10079                 {
10080                     printf(" 0x%X", dspIconVal);
10081                 }
10082
10083                 if (tree->IsIconHandle())
10084                 {
10085                     switch (tree->GetIconHandleFlag())
10086                     {
10087                         case GTF_ICON_SCOPE_HDL:
10088                             printf(" scope");
10089                             break;
10090                         case GTF_ICON_CLASS_HDL:
10091                             printf(" class");
10092                             break;
10093                         case GTF_ICON_METHOD_HDL:
10094                             printf(" method");
10095                             break;
10096                         case GTF_ICON_FIELD_HDL:
10097                             printf(" field");
10098                             break;
10099                         case GTF_ICON_STATIC_HDL:
10100                             printf(" static");
10101                             break;
10102                         case GTF_ICON_STR_HDL:
10103                             unreached(); // This case is handled above
10104                             break;
10105                         case GTF_ICON_PSTR_HDL:
10106                             printf(" pstr");
10107                             break;
10108                         case GTF_ICON_PTR_HDL:
10109                             printf(" ptr");
10110                             break;
10111                         case GTF_ICON_VARG_HDL:
10112                             printf(" vararg");
10113                             break;
10114                         case GTF_ICON_PINVKI_HDL:
10115                             printf(" pinvoke");
10116                             break;
10117                         case GTF_ICON_TOKEN_HDL:
10118                             printf(" token");
10119                             break;
10120                         case GTF_ICON_TLS_HDL:
10121                             printf(" tls");
10122                             break;
10123                         case GTF_ICON_FTN_ADDR:
10124                             printf(" ftn");
10125                             break;
10126                         case GTF_ICON_CIDMID_HDL:
10127                             printf(" cid/mid");
10128                             break;
10129                         case GTF_ICON_BBC_PTR:
10130                             printf(" bbc");
10131                             break;
10132                         default:
10133                             printf(" UNKNOWN");
10134                             break;
10135                     }
10136                 }
10137
10138                 if ((tree->gtFlags & GTF_ICON_FIELD_OFF) != 0)
10139                 {
10140                     printf(" field offset");
10141                 }
10142
10143 #ifdef FEATURE_SIMD
10144                 if ((tree->gtFlags & GTF_ICON_SIMD_COUNT) != 0)
10145                 {
10146                     printf(" Vector<T>.Count");
10147                 }
10148 #endif
10149
10150                 if ((tree->IsReuseRegVal()) != 0)
10151                 {
10152                     printf(" reuse reg val");
10153                 }
10154             }
10155
10156             gtDispFieldSeq(tree->gtIntCon.gtFieldSeq);
10157
10158             break;
10159
10160         case GT_CNS_LNG:
10161             printf(" 0x%016I64x", tree->gtLngCon.gtLconVal);
10162             break;
10163
10164         case GT_CNS_DBL:
10165             if (*((__int64*)&tree->gtDblCon.gtDconVal) == (__int64)I64(0x8000000000000000))
10166             {
10167                 printf(" -0.00000");
10168             }
10169             else
10170             {
10171                 printf(" %#.17g", tree->gtDblCon.gtDconVal);
10172             }
10173             break;
10174         case GT_CNS_STR:
10175             printf("<string constant>");
10176             break;
10177         default:
10178             assert(!"unexpected constant node");
10179     }
10180
10181     gtDispRegVal(tree);
10182 }
10183
10184 void Compiler::gtDispFieldSeq(FieldSeqNode* pfsn)
10185 {
10186     if (pfsn == FieldSeqStore::NotAField() || (pfsn == nullptr))
10187     {
10188         return;
10189     }
10190
10191     // Otherwise...
10192     printf(" Fseq[");
10193     while (pfsn != nullptr)
10194     {
10195         assert(pfsn != FieldSeqStore::NotAField()); // Can't exist in a field sequence list except alone
10196         CORINFO_FIELD_HANDLE fldHnd = pfsn->m_fieldHnd;
10197         // First check the "pseudo" field handles...
10198         if (fldHnd == FieldSeqStore::FirstElemPseudoField)
10199         {
10200             printf("#FirstElem");
10201         }
10202         else if (fldHnd == FieldSeqStore::ConstantIndexPseudoField)
10203         {
10204             printf("#ConstantIndex");
10205         }
10206         else
10207         {
10208             printf("%s", eeGetFieldName(fldHnd));
10209         }
10210         pfsn = pfsn->m_next;
10211         if (pfsn != nullptr)
10212         {
10213             printf(", ");
10214         }
10215     }
10216     printf("]");
10217 }
10218
10219 //------------------------------------------------------------------------
10220 // gtDispLeaf: Print a single leaf node to jitstdout.
10221 //
10222 // Arguments:
10223 //    tree - the tree to be printed
10224 //    indentStack - the specification for the current level of indentation & arcs
10225 //
10226 // Return Value:
10227 //    None.
10228 //
10229 // Notes:
10230 //    'indentStack' may be null, in which case no indentation or arcs are printed
10231
10232 void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack)
10233 {
10234     if (tree->OperKind() & GTK_CONST)
10235     {
10236         gtDispConst(tree);
10237         return;
10238     }
10239
10240     bool isLclFld = false;
10241
10242     switch (tree->gtOper)
10243     {
10244         unsigned   varNum;
10245         LclVarDsc* varDsc;
10246
10247         case GT_LCL_FLD:
10248         case GT_LCL_FLD_ADDR:
10249         case GT_STORE_LCL_FLD:
10250             isLclFld = true;
10251             __fallthrough;
10252
10253         case GT_PHI_ARG:
10254         case GT_LCL_VAR:
10255         case GT_LCL_VAR_ADDR:
10256         case GT_STORE_LCL_VAR:
10257             printf(" ");
10258             varNum = tree->gtLclVarCommon.gtLclNum;
10259             varDsc = &lvaTable[varNum];
10260             gtDispLclVar(varNum);
10261             if (tree->gtLclVarCommon.HasSsaName())
10262             {
10263                 if (tree->gtFlags & GTF_VAR_USEASG)
10264                 {
10265                     assert(tree->gtFlags & GTF_VAR_DEF);
10266                     printf("ud:%d->%d", tree->gtLclVarCommon.gtSsaNum, GetSsaNumForLocalVarDef(tree));
10267                 }
10268                 else
10269                 {
10270                     printf("%s:%d", (tree->gtFlags & GTF_VAR_DEF) ? "d" : "u", tree->gtLclVarCommon.gtSsaNum);
10271                 }
10272             }
10273
10274             if (isLclFld)
10275             {
10276                 printf("[+%u]", tree->gtLclFld.gtLclOffs);
10277                 gtDispFieldSeq(tree->gtLclFld.gtFieldSeq);
10278             }
10279
10280             if (varDsc->lvRegister)
10281             {
10282                 printf(" ");
10283                 varDsc->PrintVarReg();
10284             }
10285             else if (tree->InReg())
10286             {
10287                 printf(" %s", compRegVarName(tree->gtRegNum));
10288             }
10289
10290             if (varDsc->lvPromoted)
10291             {
10292                 if (!varTypeIsPromotable(varDsc) && !varDsc->lvUnusedStruct)
10293                 {
10294                     // Promoted implicit byrefs can get in this state while they are being rewritten
10295                     // in global morph.
10296                     assert(fgGlobalMorph);
10297                 }
10298                 else
10299                 {
10300                     CORINFO_CLASS_HANDLE typeHnd = varDsc->lvVerTypeInfo.GetClassHandle();
10301                     CORINFO_FIELD_HANDLE fldHnd;
10302
10303                     for (unsigned i = varDsc->lvFieldLclStart; i < varDsc->lvFieldLclStart + varDsc->lvFieldCnt; ++i)
10304                     {
10305                         LclVarDsc*  fieldVarDsc = &lvaTable[i];
10306                         const char* fieldName;
10307 #if !defined(_TARGET_64BIT_)
10308                         if (varTypeIsLong(varDsc))
10309                         {
10310                             fieldName = (i == 0) ? "lo" : "hi";
10311                         }
10312                         else
10313 #endif // !defined(_TARGET_64BIT_)
10314                         {
10315                             fldHnd    = info.compCompHnd->getFieldInClass(typeHnd, fieldVarDsc->lvFldOrdinal);
10316                             fieldName = eeGetFieldName(fldHnd);
10317                         }
10318
10319                         printf("\n");
10320                         printf("                                                  ");
10321                         printIndent(indentStack);
10322                         printf("    %-6s V%02u.%s (offs=0x%02x) -> ", varTypeName(fieldVarDsc->TypeGet()),
10323                                tree->gtLclVarCommon.gtLclNum, fieldName, fieldVarDsc->lvFldOffset);
10324                         gtDispLclVar(i);
10325
10326                         if (fieldVarDsc->lvRegister)
10327                         {
10328                             printf(" ");
10329                             fieldVarDsc->PrintVarReg();
10330                         }
10331
10332                         if (fieldVarDsc->lvTracked && fgLocalVarLivenessDone && // Includes local variable liveness
10333                             ((tree->gtFlags & GTF_VAR_DEATH) != 0))
10334                         {
10335                             printf(" (last use)");
10336                         }
10337                     }
10338                 }
10339             }
10340             else // a normal not-promoted lclvar
10341             {
10342                 if (varDsc->lvTracked && fgLocalVarLivenessDone && ((tree->gtFlags & GTF_VAR_DEATH) != 0))
10343                 {
10344                     printf(" (last use)");
10345                 }
10346             }
10347             break;
10348
10349         case GT_JMP:
10350         {
10351             const char* methodName;
10352             const char* className;
10353
10354             methodName = eeGetMethodName((CORINFO_METHOD_HANDLE)tree->gtVal.gtVal1, &className);
10355             printf(" %s.%s\n", className, methodName);
10356         }
10357         break;
10358
10359         case GT_CLS_VAR:
10360             printf(" Hnd=%#x", dspPtr(tree->gtClsVar.gtClsVarHnd));
10361             gtDispFieldSeq(tree->gtClsVar.gtFieldSeq);
10362             break;
10363
10364         case GT_CLS_VAR_ADDR:
10365             printf(" Hnd=%#x", dspPtr(tree->gtClsVar.gtClsVarHnd));
10366             break;
10367
10368         case GT_LABEL:
10369             break;
10370
10371         case GT_FTN_ADDR:
10372         {
10373             const char* methodName;
10374             const char* className;
10375
10376             methodName = eeGetMethodName((CORINFO_METHOD_HANDLE)tree->gtFptrVal.gtFptrMethod, &className);
10377             printf(" %s.%s\n", className, methodName);
10378         }
10379         break;
10380
10381 #if !FEATURE_EH_FUNCLETS
10382         case GT_END_LFIN:
10383             printf(" endNstLvl=%d", tree->gtVal.gtVal1);
10384             break;
10385 #endif // !FEATURE_EH_FUNCLETS
10386
10387         // Vanilla leaves. No qualifying information available. So do nothing
10388
10389         case GT_NO_OP:
10390         case GT_START_NONGC:
10391         case GT_START_PREEMPTGC:
10392         case GT_PROF_HOOK:
10393         case GT_CATCH_ARG:
10394         case GT_MEMORYBARRIER:
10395         case GT_ARGPLACE:
10396         case GT_PINVOKE_PROLOG:
10397         case GT_JMPTABLE:
10398             break;
10399
10400         case GT_RET_EXPR:
10401             printf("(inl return from call ");
10402             printTreeID(tree->gtRetExpr.gtInlineCandidate);
10403             printf(")");
10404             break;
10405
10406         case GT_PHYSREG:
10407             printf(" %s", getRegName(tree->gtPhysReg.gtSrcReg, varTypeIsFloating(tree)));
10408             break;
10409
10410         case GT_IL_OFFSET:
10411             printf(" IL offset: ");
10412             if (tree->gtStmt.gtStmtILoffsx == BAD_IL_OFFSET)
10413             {
10414                 printf("???");
10415             }
10416             else
10417             {
10418                 printf("0x%x", jitGetILoffs(tree->gtStmt.gtStmtILoffsx));
10419             }
10420             break;
10421
10422         case GT_JCC:
10423         case GT_SETCC:
10424             printf(" cond=%s", tree->AsCC()->gtCondition.Name());
10425             break;
10426         case GT_JCMP:
10427             printf(" cond=%s%s", (tree->gtFlags & GTF_JCMP_TST) ? "TEST_" : "",
10428                    (tree->gtFlags & GTF_JCMP_EQ) ? "EQ" : "NE");
10429
10430         default:
10431             assert(!"don't know how to display tree leaf node");
10432     }
10433
10434     gtDispRegVal(tree);
10435 }
10436
10437 //------------------------------------------------------------------------
10438 // gtDispLeaf: Print a child node to jitstdout.
10439 //
10440 // Arguments:
10441 //    tree - the tree to be printed
10442 //    indentStack - the specification for the current level of indentation & arcs
10443 //    arcType     - the type of arc to use for this child
10444 //    msg         - a contextual method (i.e. from the parent) to print
10445 //    topOnly     - a boolean indicating whether to print the children, or just the top node
10446 //
10447 // Return Value:
10448 //    None.
10449 //
10450 // Notes:
10451 //    'indentStack' may be null, in which case no indentation or arcs are printed
10452 //    'msg' has a default value of null
10453 //    'topOnly' is an optional argument that defaults to false
10454
10455 void Compiler::gtDispChild(GenTree*             child,
10456                            IndentStack*         indentStack,
10457                            IndentInfo           arcType,
10458                            __in_opt const char* msg,     /* = nullptr  */
10459                            bool                 topOnly) /* = false */
10460 {
10461     indentStack->Push(arcType);
10462     gtDispTree(child, indentStack, msg, topOnly);
10463     indentStack->Pop();
10464 }
10465
10466 #ifdef FEATURE_SIMD
10467 // Intrinsic Id to name map
10468 extern const char* const simdIntrinsicNames[] = {
10469 #define SIMD_INTRINSIC(mname, inst, id, name, r, ac, arg1, arg2, arg3, t1, t2, t3, t4, t5, t6, t7, t8, t9, t10) name,
10470 #include "simdintrinsiclist.h"
10471 };
10472 #endif // FEATURE_SIMD
10473
10474 /*****************************************************************************/
10475
10476 void Compiler::gtDispTree(GenTree*     tree,
10477                           IndentStack* indentStack,                 /* = nullptr */
10478                           __in __in_z __in_opt const char* msg,     /* = nullptr  */
10479                           bool                             topOnly, /* = false */
10480                           bool                             isLIR)   /* = false */
10481 {
10482     if (tree == nullptr)
10483     {
10484         printf(" [%08X] <NULL>\n", tree);
10485         printf(""); // null string means flush
10486         return;
10487     }
10488
10489     if (indentStack == nullptr)
10490     {
10491         indentStack = new (this, CMK_DebugOnly) IndentStack(this);
10492     }
10493
10494     if (IsUninitialized(tree))
10495     {
10496         /* Value used to initalize nodes */
10497         printf("Uninitialized tree node!");
10498         return;
10499     }
10500
10501     if (tree->gtOper >= GT_COUNT)
10502     {
10503         gtDispNode(tree, indentStack, msg, isLIR);
10504         printf("Bogus operator!");
10505         return;
10506     }
10507
10508     /* Is tree a leaf node? */
10509
10510     if (tree->OperIsLeaf() || tree->OperIsLocalStore()) // local stores used to be leaves
10511     {
10512         gtDispNode(tree, indentStack, msg, isLIR);
10513         gtDispLeaf(tree, indentStack);
10514         gtDispVN(tree);
10515         printf("\n");
10516         if (tree->OperIsLocalStore() && !topOnly)
10517         {
10518             gtDispChild(tree->gtOp.gtOp1, indentStack, IINone);
10519         }
10520         return;
10521     }
10522
10523     // Determine what kind of arc to propagate.
10524     IndentInfo myArc    = IINone;
10525     IndentInfo lowerArc = IINone;
10526     if (indentStack->Depth() > 0)
10527     {
10528         myArc = indentStack->Pop();
10529         switch (myArc)
10530         {
10531             case IIArcBottom:
10532                 indentStack->Push(IIArc);
10533                 lowerArc = IINone;
10534                 break;
10535             case IIArc:
10536                 indentStack->Push(IIArc);
10537                 lowerArc = IIArc;
10538                 break;
10539             case IIArcTop:
10540                 indentStack->Push(IINone);
10541                 lowerArc = IIArc;
10542                 break;
10543             case IIEmbedded:
10544                 indentStack->Push(IIEmbedded);
10545                 lowerArc = IIEmbedded;
10546                 break;
10547             case IINone:
10548                 indentStack->Push(IINone);
10549                 lowerArc = IINone;
10550                 break;
10551             default:
10552                 unreached();
10553                 break;
10554         }
10555     }
10556
10557     // Special case formatting for PHI nodes -- arg lists like calls.
10558
10559     if (tree->OperGet() == GT_PHI)
10560     {
10561         gtDispNode(tree, indentStack, msg, isLIR);
10562         gtDispVN(tree);
10563         printf("\n");
10564
10565         if (!topOnly)
10566         {
10567             if (tree->gtOp.gtOp1 != nullptr)
10568             {
10569                 IndentInfo arcType = IIArcTop;
10570                 for (GenTreeArgList* args = tree->gtOp.gtOp1->AsArgList(); args != nullptr; args = args->Rest())
10571                 {
10572                     if (args->Rest() == nullptr)
10573                     {
10574                         arcType = IIArcBottom;
10575                     }
10576                     gtDispChild(args->Current(), indentStack, arcType);
10577                     arcType = IIArc;
10578                 }
10579             }
10580         }
10581         return;
10582     }
10583
10584     /* Is it a 'simple' unary/binary operator? */
10585
10586     const char* childMsg = nullptr;
10587
10588     if (tree->OperIsSimple())
10589     {
10590         if (!topOnly)
10591         {
10592             if (tree->gtGetOp2IfPresent())
10593             {
10594                 // Label the childMsgs of the GT_COLON operator
10595                 // op2 is the then part
10596
10597                 if (tree->gtOper == GT_COLON)
10598                 {
10599                     childMsg = "then";
10600                 }
10601                 gtDispChild(tree->gtOp.gtOp2, indentStack, IIArcTop, childMsg, topOnly);
10602             }
10603         }
10604
10605         // Now, get the right type of arc for this node
10606         if (myArc != IINone)
10607         {
10608             indentStack->Pop();
10609             indentStack->Push(myArc);
10610         }
10611
10612         gtDispNode(tree, indentStack, msg, isLIR);
10613
10614         // Propagate lowerArc to the lower children.
10615         if (indentStack->Depth() > 0)
10616         {
10617             (void)indentStack->Pop();
10618             indentStack->Push(lowerArc);
10619         }
10620
10621         if (tree->gtOper == GT_CAST)
10622         {
10623             /* Format a message that explains the effect of this GT_CAST */
10624
10625             var_types fromType  = genActualType(tree->gtCast.CastOp()->TypeGet());
10626             var_types toType    = tree->CastToType();
10627             var_types finalType = tree->TypeGet();
10628
10629             /* if GTF_UNSIGNED is set then force fromType to an unsigned type */
10630             if (tree->gtFlags & GTF_UNSIGNED)
10631             {
10632                 fromType = genUnsignedType(fromType);
10633             }
10634
10635             if (finalType != toType)
10636             {
10637                 printf(" %s <-", varTypeName(finalType));
10638             }
10639
10640             printf(" %s <- %s", varTypeName(toType), varTypeName(fromType));
10641         }
10642
10643         if (tree->gtOper == GT_OBJ && (tree->gtFlags & GTF_VAR_DEATH))
10644         {
10645             printf(" (last use)");
10646         }
10647         if (tree->OperIsBlkOp())
10648         {
10649             if (tree->OperIsCopyBlkOp())
10650             {
10651                 printf(" (copy)");
10652             }
10653             else if (tree->OperIsInitBlkOp())
10654             {
10655                 printf(" (init)");
10656             }
10657             if (tree->OperIsStoreBlk() && (tree->AsBlk()->gtBlkOpKind != GenTreeBlk::BlkOpKindInvalid))
10658             {
10659                 switch (tree->AsBlk()->gtBlkOpKind)
10660                 {
10661                     case GenTreeBlk::BlkOpKindRepInstr:
10662                         printf(" (RepInstr)");
10663                         break;
10664                     case GenTreeBlk::BlkOpKindUnroll:
10665                         printf(" (Unroll)");
10666                         break;
10667                     case GenTreeBlk::BlkOpKindHelper:
10668                         printf(" (Helper)");
10669                         break;
10670                     default:
10671                         unreached();
10672                 }
10673             }
10674         }
10675         else if (tree->OperIsFieldList())
10676         {
10677             printf(" %s at offset %d", varTypeName(tree->AsFieldList()->gtFieldType),
10678                    tree->AsFieldList()->gtFieldOffset);
10679         }
10680 #if FEATURE_PUT_STRUCT_ARG_STK
10681         else if (tree->OperGet() == GT_PUTARG_STK)
10682         {
10683             printf(" (%d slots)", tree->AsPutArgStk()->gtNumSlots);
10684             if (tree->AsPutArgStk()->gtPutArgStkKind != GenTreePutArgStk::Kind::Invalid)
10685             {
10686                 switch (tree->AsPutArgStk()->gtPutArgStkKind)
10687                 {
10688                     case GenTreePutArgStk::Kind::RepInstr:
10689                         printf(" (RepInstr)");
10690                         break;
10691                     case GenTreePutArgStk::Kind::Unroll:
10692                         printf(" (Unroll)");
10693                         break;
10694                     case GenTreePutArgStk::Kind::Push:
10695                         printf(" (Push)");
10696                         break;
10697                     case GenTreePutArgStk::Kind::PushAllSlots:
10698                         printf(" (PushAllSlots)");
10699                         break;
10700                     default:
10701                         unreached();
10702                 }
10703             }
10704         }
10705 #endif // FEATURE_PUT_STRUCT_ARG_STK
10706
10707         if (tree->gtOper == GT_INTRINSIC)
10708         {
10709             switch (tree->gtIntrinsic.gtIntrinsicId)
10710             {
10711                 case CORINFO_INTRINSIC_Sin:
10712                     printf(" sin");
10713                     break;
10714                 case CORINFO_INTRINSIC_Cos:
10715                     printf(" cos");
10716                     break;
10717                 case CORINFO_INTRINSIC_Cbrt:
10718                     printf(" cbrt");
10719                     break;
10720                 case CORINFO_INTRINSIC_Sqrt:
10721                     printf(" sqrt");
10722                     break;
10723                 case CORINFO_INTRINSIC_Abs:
10724                     printf(" abs");
10725                     break;
10726                 case CORINFO_INTRINSIC_Round:
10727                     printf(" round");
10728                     break;
10729                 case CORINFO_INTRINSIC_Cosh:
10730                     printf(" cosh");
10731                     break;
10732                 case CORINFO_INTRINSIC_Sinh:
10733                     printf(" sinh");
10734                     break;
10735                 case CORINFO_INTRINSIC_Tan:
10736                     printf(" tan");
10737                     break;
10738                 case CORINFO_INTRINSIC_Tanh:
10739                     printf(" tanh");
10740                     break;
10741                 case CORINFO_INTRINSIC_Asin:
10742                     printf(" asin");
10743                     break;
10744                 case CORINFO_INTRINSIC_Asinh:
10745                     printf(" asinh");
10746                     break;
10747                 case CORINFO_INTRINSIC_Acos:
10748                     printf(" acos");
10749                     break;
10750                 case CORINFO_INTRINSIC_Acosh:
10751                     printf(" acosh");
10752                     break;
10753                 case CORINFO_INTRINSIC_Atan:
10754                     printf(" atan");
10755                     break;
10756                 case CORINFO_INTRINSIC_Atan2:
10757                     printf(" atan2");
10758                     break;
10759                 case CORINFO_INTRINSIC_Atanh:
10760                     printf(" atanh");
10761                     break;
10762                 case CORINFO_INTRINSIC_Log10:
10763                     printf(" log10");
10764                     break;
10765                 case CORINFO_INTRINSIC_Pow:
10766                     printf(" pow");
10767                     break;
10768                 case CORINFO_INTRINSIC_Exp:
10769                     printf(" exp");
10770                     break;
10771                 case CORINFO_INTRINSIC_Ceiling:
10772                     printf(" ceiling");
10773                     break;
10774                 case CORINFO_INTRINSIC_Floor:
10775                     printf(" floor");
10776                     break;
10777                 case CORINFO_INTRINSIC_Object_GetType:
10778                     printf(" objGetType");
10779                     break;
10780
10781                 default:
10782                     unreached();
10783             }
10784         }
10785
10786 #ifdef FEATURE_SIMD
10787         if (tree->gtOper == GT_SIMD)
10788         {
10789             printf(" %s %s", varTypeName(tree->gtSIMD.gtSIMDBaseType),
10790                    simdIntrinsicNames[tree->gtSIMD.gtSIMDIntrinsicID]);
10791         }
10792 #endif // FEATURE_SIMD
10793
10794 #ifdef FEATURE_HW_INTRINSICS
10795         if (tree->gtOper == GT_HWIntrinsic)
10796         {
10797             printf(" %s %s",
10798                    tree->gtHWIntrinsic.gtSIMDBaseType == TYP_UNKNOWN ? ""
10799                                                                      : varTypeName(tree->gtHWIntrinsic.gtSIMDBaseType),
10800                    HWIntrinsicInfo::lookupName(tree->gtHWIntrinsic.gtHWIntrinsicId));
10801         }
10802 #endif // FEATURE_HW_INTRINSICS
10803
10804         gtDispRegVal(tree);
10805         gtDispVN(tree);
10806         printf("\n");
10807
10808         if (!topOnly && tree->gtOp.gtOp1)
10809         {
10810
10811             // Label the child of the GT_COLON operator
10812             // op1 is the else part
10813
10814             if (tree->gtOper == GT_COLON)
10815             {
10816                 childMsg = "else";
10817             }
10818             else if (tree->gtOper == GT_QMARK)
10819             {
10820                 childMsg = "   if";
10821             }
10822             gtDispChild(tree->gtOp.gtOp1, indentStack, IIArcBottom, childMsg, topOnly);
10823         }
10824
10825         return;
10826     }
10827
10828     // Now, get the right type of arc for this node
10829     if (myArc != IINone)
10830     {
10831         indentStack->Pop();
10832         indentStack->Push(myArc);
10833     }
10834     gtDispNode(tree, indentStack, msg, isLIR);
10835
10836     // Propagate lowerArc to the lower children.
10837     if (indentStack->Depth() > 0)
10838     {
10839         (void)indentStack->Pop();
10840         indentStack->Push(lowerArc);
10841     }
10842
10843     // See what kind of a special operator we have here, and handle its special children.
10844
10845     switch (tree->gtOper)
10846     {
10847         case GT_FIELD:
10848             if (FieldSeqStore::IsPseudoField(tree->gtField.gtFldHnd))
10849             {
10850                 printf(" #PseudoField:0x%x", tree->gtField.gtFldOffset);
10851             }
10852             else
10853             {
10854                 printf(" %s", eeGetFieldName(tree->gtField.gtFldHnd), 0);
10855             }
10856
10857             if (tree->gtField.gtFldObj && !topOnly)
10858             {
10859                 gtDispVN(tree);
10860                 printf("\n");
10861                 gtDispChild(tree->gtField.gtFldObj, indentStack, IIArcBottom);
10862             }
10863             else
10864             {
10865                 gtDispRegVal(tree);
10866                 gtDispVN(tree);
10867                 printf("\n");
10868             }
10869             break;
10870
10871         case GT_CALL:
10872         {
10873             GenTreeCall* call = tree->AsCall();
10874             assert(call->gtFlags & GTF_CALL);
10875             unsigned numChildren = call->NumChildren();
10876             GenTree* lastChild   = nullptr;
10877             if (numChildren != 0)
10878             {
10879                 lastChild = call->GetChild(numChildren - 1);
10880             }
10881
10882             if (call->gtCallType != CT_INDIRECT)
10883             {
10884                 const char* methodName;
10885                 const char* className;
10886
10887                 methodName = eeGetMethodName(call->gtCallMethHnd, &className);
10888
10889                 printf(" %s.%s", className, methodName);
10890             }
10891
10892             if ((call->gtFlags & GTF_CALL_UNMANAGED) && (call->gtCallMoreFlags & GTF_CALL_M_FRAME_VAR_DEATH))
10893             {
10894                 printf(" (FramesRoot last use)");
10895             }
10896
10897             if (((call->gtFlags & GTF_CALL_INLINE_CANDIDATE) != 0) && (call->gtInlineCandidateInfo != nullptr) &&
10898                 (call->gtInlineCandidateInfo->exactContextHnd != nullptr))
10899             {
10900                 printf(" (exactContextHnd=0x%p)", dspPtr(call->gtInlineCandidateInfo->exactContextHnd));
10901             }
10902
10903             gtDispVN(call);
10904             if (call->IsMultiRegCall())
10905             {
10906                 gtDispRegVal(call);
10907             }
10908             printf("\n");
10909
10910             if (!topOnly)
10911             {
10912                 char  buf[64];
10913                 char* bufp;
10914
10915                 bufp = &buf[0];
10916
10917                 if ((call->gtCallObjp != nullptr) && (call->gtCallObjp->gtOper != GT_NOP) &&
10918                     (!call->gtCallObjp->IsArgPlaceHolderNode()))
10919                 {
10920                     if (call->gtCallObjp->gtOper == GT_ASG)
10921                     {
10922                         sprintf_s(bufp, sizeof(buf), "this SETUP%c", 0);
10923                     }
10924                     else
10925                     {
10926                         sprintf_s(bufp, sizeof(buf), "this in %s%c", compRegVarName(REG_ARG_0), 0);
10927                     }
10928                     gtDispChild(call->gtCallObjp, indentStack, (call->gtCallObjp == lastChild) ? IIArcBottom : IIArc,
10929                                 bufp, topOnly);
10930                 }
10931
10932                 if (call->gtCallArgs)
10933                 {
10934                     gtDispArgList(call, indentStack);
10935                 }
10936
10937                 if (call->gtCallType == CT_INDIRECT)
10938                 {
10939                     gtDispChild(call->gtCallAddr, indentStack, (call->gtCallAddr == lastChild) ? IIArcBottom : IIArc,
10940                                 "calli tgt", topOnly);
10941                 }
10942
10943                 if (call->gtControlExpr != nullptr)
10944                 {
10945                     gtDispChild(call->gtControlExpr, indentStack,
10946                                 (call->gtControlExpr == lastChild) ? IIArcBottom : IIArc, "control expr", topOnly);
10947                 }
10948
10949 #if !FEATURE_FIXED_OUT_ARGS
10950                 regList list = call->regArgList;
10951 #endif
10952                 /* process the late argument list */
10953                 int lateArgIndex = 0;
10954                 for (GenTreeArgList* lateArgs = call->gtCallLateArgs; lateArgs;
10955                      (lateArgIndex++, lateArgs = lateArgs->Rest()))
10956                 {
10957                     GenTree* argx;
10958
10959                     argx = lateArgs->Current();
10960
10961                     IndentInfo arcType = (lateArgs->Rest() == nullptr) ? IIArcBottom : IIArc;
10962                     gtGetLateArgMsg(call, argx, lateArgIndex, -1, bufp, sizeof(buf));
10963                     gtDispChild(argx, indentStack, arcType, bufp, topOnly);
10964                 }
10965             }
10966         }
10967         break;
10968
10969         case GT_STMT:
10970             printf("\n");
10971
10972             if (!topOnly)
10973             {
10974                 gtDispChild(tree->gtStmt.gtStmtExpr, indentStack, IIArcBottom);
10975             }
10976             break;
10977
10978         case GT_ARR_ELEM:
10979             gtDispVN(tree);
10980             printf("\n");
10981
10982             if (!topOnly)
10983             {
10984                 gtDispChild(tree->gtArrElem.gtArrObj, indentStack, IIArc, nullptr, topOnly);
10985
10986                 unsigned dim;
10987                 for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
10988                 {
10989                     IndentInfo arcType = ((dim + 1) == tree->gtArrElem.gtArrRank) ? IIArcBottom : IIArc;
10990                     gtDispChild(tree->gtArrElem.gtArrInds[dim], indentStack, arcType, nullptr, topOnly);
10991                 }
10992             }
10993             break;
10994
10995         case GT_ARR_OFFSET:
10996             gtDispVN(tree);
10997             printf("\n");
10998             if (!topOnly)
10999             {
11000                 gtDispChild(tree->gtArrOffs.gtOffset, indentStack, IIArc, nullptr, topOnly);
11001                 gtDispChild(tree->gtArrOffs.gtIndex, indentStack, IIArc, nullptr, topOnly);
11002                 gtDispChild(tree->gtArrOffs.gtArrObj, indentStack, IIArcBottom, nullptr, topOnly);
11003             }
11004             break;
11005
11006         case GT_CMPXCHG:
11007             gtDispVN(tree);
11008             printf("\n");
11009             if (!topOnly)
11010             {
11011                 gtDispChild(tree->gtCmpXchg.gtOpLocation, indentStack, IIArc, nullptr, topOnly);
11012                 gtDispChild(tree->gtCmpXchg.gtOpValue, indentStack, IIArc, nullptr, topOnly);
11013                 gtDispChild(tree->gtCmpXchg.gtOpComparand, indentStack, IIArcBottom, nullptr, topOnly);
11014             }
11015             break;
11016
11017         case GT_ARR_BOUNDS_CHECK:
11018 #ifdef FEATURE_SIMD
11019         case GT_SIMD_CHK:
11020 #endif // FEATURE_SIMD
11021 #ifdef FEATURE_HW_INTRINSICS
11022         case GT_HW_INTRINSIC_CHK:
11023 #endif // FEATURE_HW_INTRINSICS
11024             gtDispVN(tree);
11025             printf("\n");
11026             if (!topOnly)
11027             {
11028                 gtDispChild(tree->gtBoundsChk.gtIndex, indentStack, IIArc, nullptr, topOnly);
11029                 gtDispChild(tree->gtBoundsChk.gtArrLen, indentStack, IIArcBottom, nullptr, topOnly);
11030             }
11031             break;
11032
11033         case GT_STORE_DYN_BLK:
11034         case GT_DYN_BLK:
11035             if (tree->OperIsCopyBlkOp())
11036             {
11037                 printf(" (copy)");
11038             }
11039             else if (tree->OperIsInitBlkOp())
11040             {
11041                 printf(" (init)");
11042             }
11043             gtDispVN(tree);
11044             printf("\n");
11045             if (!topOnly)
11046             {
11047                 if (tree->gtDynBlk.Data() != nullptr)
11048                 {
11049                     gtDispChild(tree->gtDynBlk.Data(), indentStack, IIArc, nullptr, topOnly);
11050                 }
11051                 gtDispChild(tree->gtDynBlk.Addr(), indentStack, IIArc, nullptr, topOnly);
11052                 gtDispChild(tree->gtDynBlk.gtDynamicSize, indentStack, IIArcBottom, nullptr, topOnly);
11053             }
11054             break;
11055
11056         default:
11057             printf("<DON'T KNOW HOW TO DISPLAY THIS NODE> :");
11058             printf(""); // null string means flush
11059             break;
11060     }
11061 }
11062
11063 //------------------------------------------------------------------------
11064 // gtGetArgMsg: Construct a message about the given argument
11065 //
11066 // Arguments:
11067 //    call      - The call for which 'arg' is an argument
11068 //    arg       - The argument for which a message should be constructed
11069 //    argNum    - The ordinal number of the arg in the argument list
11070 //    listCount - When printing in LIR form this is the count for a GT_FIELD_LIST
11071 //                or -1 if we are not printing in LIR form
11072 //    bufp      - A pointer to the buffer into which the message is written
11073 //    bufLength - The length of the buffer pointed to by bufp
11074 //
11075 // Return Value:
11076 //    No return value, but bufp is written.
11077 //
11078 // Assumptions:
11079 //    'call' must be a call node
11080 //    'arg' must be an argument to 'call' (else gtArgEntryByNode will assert)
11081
11082 void Compiler::gtGetArgMsg(
11083     GenTreeCall* call, GenTree* arg, unsigned argNum, int listCount, char* bufp, unsigned bufLength)
11084 {
11085     if (call->gtCallLateArgs != nullptr)
11086     {
11087         fgArgTabEntry* curArgTabEntry = gtArgEntryByArgNum(call, argNum);
11088         assert(curArgTabEntry);
11089
11090         if (arg->gtFlags & GTF_LATE_ARG)
11091         {
11092             sprintf_s(bufp, bufLength, "arg%d SETUP%c", argNum, 0);
11093         }
11094         else
11095         {
11096 #ifdef _TARGET_ARM_
11097             if (curArgTabEntry->isSplit)
11098             {
11099                 regNumber firstReg = curArgTabEntry->regNum;
11100                 if (listCount == -1)
11101                 {
11102                     if (curArgTabEntry->numRegs == 1)
11103                     {
11104                         sprintf_s(bufp, bufLength, "arg%d %s out+%02x%c", argNum, compRegVarName(firstReg),
11105                                   (curArgTabEntry->slotNum) * TARGET_POINTER_SIZE, 0);
11106                     }
11107                     else
11108                     {
11109                         regNumber lastReg   = REG_STK;
11110                         char      separator = (curArgTabEntry->numRegs == 2) ? ',' : '-';
11111                         if (curArgTabEntry->isHfaRegArg)
11112                         {
11113                             unsigned lastRegNum = genMapFloatRegNumToRegArgNum(firstReg) + curArgTabEntry->numRegs - 1;
11114                             lastReg             = genMapFloatRegArgNumToRegNum(lastRegNum);
11115                         }
11116                         else
11117                         {
11118                             unsigned lastRegNum = genMapIntRegNumToRegArgNum(firstReg) + curArgTabEntry->numRegs - 1;
11119                             lastReg             = genMapIntRegArgNumToRegNum(lastRegNum);
11120                         }
11121                         sprintf_s(bufp, bufLength, "arg%d %s%c%s out+%02x%c", argNum, compRegVarName(firstReg),
11122                                   separator, compRegVarName(lastReg), (curArgTabEntry->slotNum) * TARGET_POINTER_SIZE,
11123                                   0);
11124                     }
11125                 }
11126                 else
11127                 {
11128                     unsigned curArgNum = BAD_VAR_NUM;
11129                     bool     isFloat   = curArgTabEntry->isHfaRegArg;
11130                     if (isFloat)
11131                     {
11132                         curArgNum = genMapFloatRegNumToRegArgNum(firstReg) + listCount;
11133                     }
11134                     else
11135                     {
11136                         curArgNum = genMapIntRegNumToRegArgNum(firstReg) + listCount;
11137                     }
11138
11139                     if (!isFloat && curArgNum < MAX_REG_ARG)
11140                     {
11141                         regNumber curReg = genMapIntRegArgNumToRegNum(curArgNum);
11142                         sprintf_s(bufp, bufLength, "arg%d m%d %s%c", argNum, listCount, compRegVarName(curReg), 0);
11143                     }
11144                     else if (isFloat && curArgNum < MAX_FLOAT_REG_ARG)
11145                     {
11146                         regNumber curReg = genMapFloatRegArgNumToRegNum(curArgNum);
11147                         sprintf_s(bufp, bufLength, "arg%d m%d %s%c", argNum, listCount, compRegVarName(curReg), 0);
11148                     }
11149                     else
11150                     {
11151                         unsigned stackSlot = listCount - curArgTabEntry->numRegs;
11152                         sprintf_s(bufp, bufLength, "arg%d m%d out+%02x%c", argNum, listCount,
11153                                   stackSlot * TARGET_POINTER_SIZE, 0);
11154                     }
11155                 }
11156                 return;
11157             }
11158 #endif // _TARGET_ARM_
11159 #if FEATURE_FIXED_OUT_ARGS
11160             if (listCount == -1)
11161             {
11162                 sprintf_s(bufp, bufLength, "arg%d out+%02x%c", argNum, curArgTabEntry->slotNum * TARGET_POINTER_SIZE,
11163                           0);
11164             }
11165             else // listCount is 0,1,2 or 3
11166             {
11167                 assert(listCount <= MAX_ARG_REG_COUNT);
11168                 sprintf_s(bufp, bufLength, "arg%d out+%02x%c", argNum,
11169                           (curArgTabEntry->slotNum + listCount) * TARGET_POINTER_SIZE, 0);
11170             }
11171 #else
11172             sprintf_s(bufp, bufLength, "arg%d on STK%c", argNum, 0);
11173 #endif
11174         }
11175     }
11176     else
11177     {
11178         sprintf_s(bufp, bufLength, "arg%d%c", argNum, 0);
11179     }
11180 }
11181
11182 //------------------------------------------------------------------------
11183 // gtGetLateArgMsg: Construct a message about the given argument
11184 //
11185 // Arguments:
11186 //    call         - The call for which 'arg' is an argument
11187 //    argx         - The argument for which a message should be constructed
11188 //    lateArgIndex - The ordinal number of the arg in the lastArg  list
11189 //    listCount    - When printing in LIR form this is the count for a multireg GT_FIELD_LIST
11190 //                   or -1 if we are not printing in LIR form
11191 //    bufp         - A pointer to the buffer into which the message is written
11192 //    bufLength    - The length of the buffer pointed to by bufp
11193 //
11194 // Return Value:
11195 //    No return value, but bufp is written.
11196 //
11197 // Assumptions:
11198 //    'call' must be a call node
11199 //    'arg' must be an argument to 'call' (else gtArgEntryByNode will assert)
11200
11201 void Compiler::gtGetLateArgMsg(
11202     GenTreeCall* call, GenTree* argx, int lateArgIndex, int listCount, char* bufp, unsigned bufLength)
11203 {
11204     assert(!argx->IsArgPlaceHolderNode()); // No place holders nodes are in gtCallLateArgs;
11205
11206     fgArgTabEntry* curArgTabEntry = gtArgEntryByLateArgIndex(call, lateArgIndex);
11207     assert(curArgTabEntry);
11208     regNumber argReg = curArgTabEntry->regNum;
11209
11210 #if !FEATURE_FIXED_OUT_ARGS
11211     assert(lateArgIndex < call->regArgListCount);
11212     assert(argReg == call->regArgList[lateArgIndex]);
11213 #else
11214     if (argReg == REG_STK)
11215     {
11216         sprintf_s(bufp, bufLength, "arg%d in out+%02x%c", curArgTabEntry->argNum,
11217                   curArgTabEntry->slotNum * TARGET_POINTER_SIZE, 0);
11218     }
11219     else
11220 #endif
11221     {
11222         if (gtArgIsThisPtr(curArgTabEntry))
11223         {
11224             sprintf_s(bufp, bufLength, "this in %s%c", compRegVarName(argReg), 0);
11225         }
11226 #ifdef _TARGET_ARM_
11227         else if (curArgTabEntry->isSplit)
11228         {
11229             regNumber firstReg = curArgTabEntry->regNum;
11230             unsigned  argNum   = curArgTabEntry->argNum;
11231             if (listCount == -1)
11232             {
11233                 if (curArgTabEntry->numRegs == 1)
11234                 {
11235                     sprintf_s(bufp, bufLength, "arg%d %s out+%02x%c", argNum, compRegVarName(firstReg),
11236                               (curArgTabEntry->slotNum) * TARGET_POINTER_SIZE, 0);
11237                 }
11238                 else
11239                 {
11240                     regNumber lastReg   = REG_STK;
11241                     char      separator = (curArgTabEntry->numRegs == 2) ? ',' : '-';
11242                     if (curArgTabEntry->isHfaRegArg)
11243                     {
11244                         unsigned lastRegNum = genMapFloatRegNumToRegArgNum(firstReg) + curArgTabEntry->numRegs - 1;
11245                         lastReg             = genMapFloatRegArgNumToRegNum(lastRegNum);
11246                     }
11247                     else
11248                     {
11249                         unsigned lastRegNum = genMapIntRegNumToRegArgNum(firstReg) + curArgTabEntry->numRegs - 1;
11250                         lastReg             = genMapIntRegArgNumToRegNum(lastRegNum);
11251                     }
11252                     sprintf_s(bufp, bufLength, "arg%d %s%c%s out+%02x%c", argNum, compRegVarName(firstReg), separator,
11253                               compRegVarName(lastReg), (curArgTabEntry->slotNum) * TARGET_POINTER_SIZE, 0);
11254                 }
11255             }
11256             else
11257             {
11258                 unsigned curArgNum = BAD_VAR_NUM;
11259                 bool     isFloat   = curArgTabEntry->isHfaRegArg;
11260                 if (isFloat)
11261                 {
11262                     curArgNum = genMapFloatRegNumToRegArgNum(firstReg) + listCount;
11263                 }
11264                 else
11265                 {
11266                     curArgNum = genMapIntRegNumToRegArgNum(firstReg) + listCount;
11267                 }
11268
11269                 if (!isFloat && curArgNum < MAX_REG_ARG)
11270                 {
11271                     regNumber curReg = genMapIntRegArgNumToRegNum(curArgNum);
11272                     sprintf_s(bufp, bufLength, "arg%d m%d %s%c", argNum, listCount, compRegVarName(curReg), 0);
11273                 }
11274                 else if (isFloat && curArgNum < MAX_FLOAT_REG_ARG)
11275                 {
11276                     regNumber curReg = genMapFloatRegArgNumToRegNum(curArgNum);
11277                     sprintf_s(bufp, bufLength, "arg%d m%d %s%c", argNum, listCount, compRegVarName(curReg), 0);
11278                 }
11279                 else
11280                 {
11281                     unsigned stackSlot = listCount - curArgTabEntry->numRegs;
11282                     sprintf_s(bufp, bufLength, "arg%d m%d out+%02x%c", argNum, listCount,
11283                               stackSlot * TARGET_POINTER_SIZE, 0);
11284                 }
11285             }
11286             return;
11287         }
11288 #endif // _TARGET_ARM_
11289         else
11290         {
11291 #if FEATURE_MULTIREG_ARGS
11292             if (curArgTabEntry->numRegs >= 2)
11293             {
11294                 // listCount could be -1 but it is signed, so this comparison is OK.
11295                 assert(listCount <= MAX_ARG_REG_COUNT);
11296                 char separator = (curArgTabEntry->numRegs == 2) ? ',' : '-';
11297                 sprintf_s(bufp, bufLength, "arg%d %s%c%s%c", curArgTabEntry->argNum, compRegVarName(argReg), separator,
11298                           compRegVarName(curArgTabEntry->getRegNum(curArgTabEntry->numRegs - 1)), 0);
11299             }
11300             else
11301 #endif
11302             {
11303                 sprintf_s(bufp, bufLength, "arg%d in %s%c", curArgTabEntry->argNum, compRegVarName(argReg), 0);
11304             }
11305         }
11306     }
11307 }
11308
11309 //------------------------------------------------------------------------
11310 // gtDispArgList: Dump the tree for a call arg list
11311 //
11312 // Arguments:
11313 //    call         - The call to dump arguments for
11314 //    indentStack  - the specification for the current level of indentation & arcs
11315 //
11316 // Return Value:
11317 //    None.
11318 //
11319 void Compiler::gtDispArgList(GenTreeCall* call, IndentStack* indentStack)
11320 {
11321     GenTree*  args      = call->gtCallArgs;
11322     unsigned  argnum    = 0;
11323     const int BufLength = 256;
11324     char      buf[BufLength];
11325     char*     bufp        = &buf[0];
11326     unsigned  numChildren = call->NumChildren();
11327     assert(numChildren != 0);
11328     bool argListIsLastChild = (args == call->GetChild(numChildren - 1));
11329
11330     IndentInfo arcType = IIArc;
11331     if (call->gtCallObjp != nullptr)
11332     {
11333         argnum++;
11334     }
11335
11336     while (args != nullptr)
11337     {
11338         assert(args->gtOper == GT_LIST);
11339         GenTree* arg = args->gtOp.gtOp1;
11340         if (!arg->IsNothingNode() && !arg->IsArgPlaceHolderNode())
11341         {
11342             gtGetArgMsg(call, arg, argnum, -1, bufp, BufLength);
11343             if (argListIsLastChild && (args->gtOp.gtOp2 == nullptr))
11344             {
11345                 arcType = IIArcBottom;
11346             }
11347             gtDispChild(arg, indentStack, arcType, bufp, false);
11348         }
11349         args = args->gtOp.gtOp2;
11350         argnum++;
11351     }
11352 }
11353
11354 //------------------------------------------------------------------------
11355 // gtDispArgList: Dump the tree for a call arg list
11356 //
11357 // Arguments:
11358 //    tree         - The call for which 'arg' is an argument
11359 //    indentStack  - the specification for the current level of indentation & arcs
11360 //
11361 // Return Value:
11362 //    None.
11363 //
11364 // Assumptions:
11365 //    'tree' must be a GT_LIST node
11366
11367 void Compiler::gtDispTreeList(GenTree* tree, IndentStack* indentStack /* = nullptr */)
11368 {
11369     for (/*--*/; tree != nullptr; tree = tree->gtNext)
11370     {
11371         gtDispTree(tree, indentStack);
11372         printf("\n");
11373     }
11374 }
11375
11376 //------------------------------------------------------------------------
11377 // Compiler::gtDispRange: dumps a range of LIR.
11378 //
11379 // Arguments:
11380 //    range - the range of LIR to display.
11381 //
11382 void Compiler::gtDispRange(LIR::ReadOnlyRange const& range)
11383 {
11384     for (GenTree* node : range)
11385     {
11386         gtDispLIRNode(node);
11387     }
11388 }
11389
11390 //------------------------------------------------------------------------
11391 // Compiler::gtDispTreeRange: dumps the LIR range that contains all of the
11392 //                            nodes in the dataflow tree rooted at a given
11393 //                            node.
11394 //
11395 // Arguments:
11396 //    containingRange - the LIR range that contains the root node.
11397 //    tree - the root of the dataflow tree.
11398 //
11399 void Compiler::gtDispTreeRange(LIR::Range& containingRange, GenTree* tree)
11400 {
11401     bool unused;
11402     gtDispRange(containingRange.GetTreeRange(tree, &unused));
11403 }
11404
11405 //------------------------------------------------------------------------
11406 // Compiler::gtDispLIRNode: dumps a single LIR node.
11407 //
11408 // Arguments:
11409 //    node - the LIR node to dump.
11410 //    prefixMsg - an optional prefix for each line of output.
11411 //
11412 void Compiler::gtDispLIRNode(GenTree* node, const char* prefixMsg /* = nullptr */)
11413 {
11414     auto displayOperand = [](GenTree* operand, const char* message, IndentInfo operandArc, IndentStack& indentStack,
11415                              size_t prefixIndent) {
11416         assert(operand != nullptr);
11417         assert(message != nullptr);
11418
11419         if (prefixIndent != 0)
11420         {
11421             printf("%*s", (int)prefixIndent, "");
11422         }
11423
11424         // 49 spaces for alignment
11425         printf("%-49s", "");
11426 #if FEATURE_SET_FLAGS
11427         // additional flag enlarges the flag field by one character
11428         printf(" ");
11429 #endif
11430
11431         indentStack.Push(operandArc);
11432         indentStack.print();
11433         indentStack.Pop();
11434         operandArc = IIArc;
11435
11436         printf("  t%-5d %-6s %s\n", operand->gtTreeID, varTypeName(operand->TypeGet()), message);
11437     };
11438
11439     IndentStack indentStack(this);
11440
11441     size_t prefixIndent = 0;
11442     if (prefixMsg != nullptr)
11443     {
11444         prefixIndent = strlen(prefixMsg);
11445     }
11446
11447     const int bufLength = 256;
11448     char      buf[bufLength];
11449
11450     const bool nodeIsCall = node->IsCall();
11451
11452     // Visit operands
11453     IndentInfo operandArc = IIArcTop;
11454     for (GenTree* operand : node->Operands())
11455     {
11456         if (operand->IsArgPlaceHolderNode() || !operand->IsValue())
11457         {
11458             // Either of these situations may happen with calls.
11459             continue;
11460         }
11461
11462         if (nodeIsCall)
11463         {
11464             GenTreeCall* call = node->AsCall();
11465             if (operand == call->gtCallObjp)
11466             {
11467                 sprintf_s(buf, sizeof(buf), "this in %s", compRegVarName(REG_ARG_0));
11468                 displayOperand(operand, buf, operandArc, indentStack, prefixIndent);
11469             }
11470             else if (operand == call->gtCallAddr)
11471             {
11472                 displayOperand(operand, "calli tgt", operandArc, indentStack, prefixIndent);
11473             }
11474             else if (operand == call->gtControlExpr)
11475             {
11476                 displayOperand(operand, "control expr", operandArc, indentStack, prefixIndent);
11477             }
11478             else if (operand == call->gtCallCookie)
11479             {
11480                 displayOperand(operand, "cookie", operandArc, indentStack, prefixIndent);
11481             }
11482             else
11483             {
11484                 fgArgTabEntry* curArgTabEntry = gtArgEntryByNode(call, operand);
11485                 assert(curArgTabEntry);
11486
11487                 if (operand->OperGet() == GT_LIST)
11488                 {
11489                     int listIndex = 0;
11490                     for (GenTreeArgList* element = operand->AsArgList(); element != nullptr; element = element->Rest())
11491                     {
11492                         operand = element->Current();
11493                         if (curArgTabEntry->lateArgInx == (unsigned)-1)
11494                         {
11495                             gtGetArgMsg(call, operand, curArgTabEntry->argNum, listIndex, buf, sizeof(buf));
11496                         }
11497                         else
11498                         {
11499                             gtGetLateArgMsg(call, operand, curArgTabEntry->lateArgInx, listIndex, buf, sizeof(buf));
11500                         }
11501
11502                         displayOperand(operand, buf, operandArc, indentStack, prefixIndent);
11503                         operandArc = IIArc;
11504                     }
11505                 }
11506                 else
11507                 {
11508                     if (!curArgTabEntry->isLateArg())
11509                     {
11510                         gtGetArgMsg(call, operand, curArgTabEntry->argNum, -1, buf, sizeof(buf));
11511                     }
11512                     else
11513                     {
11514                         gtGetLateArgMsg(call, operand, curArgTabEntry->lateArgInx, -1, buf, sizeof(buf));
11515                     }
11516
11517                     displayOperand(operand, buf, operandArc, indentStack, prefixIndent);
11518                 }
11519             }
11520         }
11521         else if (node->OperIsDynBlkOp())
11522         {
11523             if (operand == node->AsBlk()->Addr())
11524             {
11525                 displayOperand(operand, "lhs", operandArc, indentStack, prefixIndent);
11526             }
11527             else if (operand == node->AsBlk()->Data())
11528             {
11529                 displayOperand(operand, "rhs", operandArc, indentStack, prefixIndent);
11530             }
11531             else
11532             {
11533                 assert(operand == node->AsDynBlk()->gtDynamicSize);
11534                 displayOperand(operand, "size", operandArc, indentStack, prefixIndent);
11535             }
11536         }
11537         else if (node->OperGet() == GT_DYN_BLK)
11538         {
11539             if (operand == node->AsBlk()->Addr())
11540             {
11541                 displayOperand(operand, "lhs", operandArc, indentStack, prefixIndent);
11542             }
11543             else
11544             {
11545                 assert(operand == node->AsDynBlk()->gtDynamicSize);
11546                 displayOperand(operand, "size", operandArc, indentStack, prefixIndent);
11547             }
11548         }
11549         else if (node->OperIs(GT_ASG))
11550         {
11551             if (operand == node->gtGetOp1())
11552             {
11553                 displayOperand(operand, "lhs", operandArc, indentStack, prefixIndent);
11554             }
11555             else
11556             {
11557                 displayOperand(operand, "rhs", operandArc, indentStack, prefixIndent);
11558             }
11559         }
11560         else
11561         {
11562             displayOperand(operand, "", operandArc, indentStack, prefixIndent);
11563         }
11564
11565         operandArc = IIArc;
11566     }
11567
11568     // Visit the operator
11569
11570     if (prefixMsg != nullptr)
11571     {
11572         printf("%s", prefixMsg);
11573     }
11574
11575     const bool topOnly = true;
11576     const bool isLIR   = true;
11577     gtDispTree(node, &indentStack, nullptr, topOnly, isLIR);
11578 }
11579
11580 /*****************************************************************************/
11581 #endif // DEBUG
11582
11583 /*****************************************************************************
11584  *
11585  *  Check if the given node can be folded,
11586  *  and call the methods to perform the folding
11587  */
11588
11589 GenTree* Compiler::gtFoldExpr(GenTree* tree)
11590 {
11591     unsigned kind = tree->OperKind();
11592
11593     /* We must have a simple operation to fold */
11594
11595     // If we're in CSE, it's not safe to perform tree
11596     // folding given that it can will potentially
11597     // change considered CSE candidates.
11598     if (optValnumCSE_phase)
11599     {
11600         return tree;
11601     }
11602
11603     if (!(kind & GTK_SMPOP))
11604     {
11605         return tree;
11606     }
11607
11608     GenTree* op1 = tree->gtOp.gtOp1;
11609
11610     /* Filter out non-foldable trees that can have constant children */
11611
11612     assert(kind & (GTK_UNOP | GTK_BINOP));
11613     switch (tree->gtOper)
11614     {
11615         case GT_RETFILT:
11616         case GT_RETURN:
11617         case GT_IND:
11618             return tree;
11619         default:
11620             break;
11621     }
11622
11623     /* try to fold the current node */
11624
11625     if ((kind & GTK_UNOP) && op1)
11626     {
11627         if (op1->OperKind() & GTK_CONST)
11628         {
11629             return gtFoldExprConst(tree);
11630         }
11631     }
11632     else if ((kind & GTK_BINOP) && op1 && tree->gtOp.gtOp2 &&
11633              // Don't take out conditionals for debugging
11634              (opts.OptimizationEnabled() || !tree->OperIsCompare()))
11635     {
11636         GenTree* op2 = tree->gtOp.gtOp2;
11637
11638         // The atomic operations are exempted here because they are never computable statically;
11639         // one of their arguments is an address.
11640         if (((op1->OperKind() & op2->OperKind()) & GTK_CONST) && !tree->OperIsAtomicOp())
11641         {
11642             /* both nodes are constants - fold the expression */
11643             return gtFoldExprConst(tree);
11644         }
11645         else if ((op1->OperKind() | op2->OperKind()) & GTK_CONST)
11646         {
11647             /* at least one is a constant - see if we have a
11648              * special operator that can use only one constant
11649              * to fold - e.g. booleans */
11650
11651             return gtFoldExprSpecial(tree);
11652         }
11653         else if (tree->OperIsCompare())
11654         {
11655             /* comparisons of two local variables can sometimes be folded */
11656
11657             return gtFoldExprCompare(tree);
11658         }
11659         else if (op2->OperGet() == GT_COLON)
11660         {
11661             assert(tree->OperGet() == GT_QMARK);
11662
11663             GenTree* colon_op1 = op2->gtOp.gtOp1;
11664             GenTree* colon_op2 = op2->gtOp.gtOp2;
11665
11666             if (gtCompareTree(colon_op1, colon_op2))
11667             {
11668                 // Both sides of the GT_COLON are the same tree
11669
11670                 GenTree* sideEffList = nullptr;
11671                 gtExtractSideEffList(op1, &sideEffList);
11672
11673                 // Clear colon flags only if the qmark itself is not conditionaly executed
11674                 if ((tree->gtFlags & GTF_COLON_COND) == 0)
11675                 {
11676                     fgWalkTreePre(&colon_op2, gtClearColonCond);
11677                 }
11678
11679                 if (sideEffList == nullptr)
11680                 {
11681                     // No side-effects, just return colon_op2
11682                     return colon_op2;
11683                 }
11684                 else
11685                 {
11686 #ifdef DEBUG
11687                     if (verbose)
11688                     {
11689                         printf("\nIdentical GT_COLON trees with side effects! Extracting side effects...\n");
11690                         gtDispTree(sideEffList);
11691                         printf("\n");
11692                     }
11693 #endif
11694                     // Change the GT_COLON into a GT_COMMA node with the side-effects
11695                     op2->ChangeOper(GT_COMMA);
11696                     op2->gtFlags |= (sideEffList->gtFlags & GTF_ALL_EFFECT);
11697                     op2->gtOp.gtOp1 = sideEffList;
11698                     return op2;
11699                 }
11700             }
11701         }
11702     }
11703
11704     /* Return the original node (folded/bashed or not) */
11705
11706     return tree;
11707 }
11708
11709 //------------------------------------------------------------------------
11710 // gtFoldExprCall: see if a call is foldable
11711 //
11712 // Arguments:
11713 //    call - call to examine
11714 //
11715 // Returns:
11716 //    The original call if no folding happened.
11717 //    An alternative tree if folding happens.
11718 //
11719 // Notes:
11720 //    Checks for calls to Type.op_Equality, Type.op_Inequality, and
11721 //    Enum.HasFlag, and if the call is to one of these,
11722 //    attempts to optimize.
11723
11724 GenTree* Compiler::gtFoldExprCall(GenTreeCall* call)
11725 {
11726     // Can only fold calls to special intrinsics.
11727     if ((call->gtCallMoreFlags & GTF_CALL_M_SPECIAL_INTRINSIC) == 0)
11728     {
11729         return call;
11730     }
11731
11732     // Defer folding if not optimizing.
11733     if (opts.OptimizationDisabled())
11734     {
11735         return call;
11736     }
11737
11738     // Fetch id of the intrinsic.
11739     const CorInfoIntrinsics methodID = info.compCompHnd->getIntrinsicID(call->gtCallMethHnd);
11740
11741     switch (methodID)
11742     {
11743         case CORINFO_INTRINSIC_TypeEQ:
11744         case CORINFO_INTRINSIC_TypeNEQ:
11745         {
11746             noway_assert(call->TypeGet() == TYP_INT);
11747             GenTree* op1 = call->gtCallArgs->gtOp.gtOp1;
11748             GenTree* op2 = call->gtCallArgs->gtOp.gtOp2->gtOp.gtOp1;
11749
11750             // If either operand is known to be a RuntimeType, this can be folded
11751             GenTree* result = gtFoldTypeEqualityCall(methodID, op1, op2);
11752             if (result != nullptr)
11753             {
11754                 return result;
11755             }
11756             break;
11757         }
11758
11759         default:
11760             break;
11761     }
11762
11763     // Check for a new-style jit intrinsic.
11764     const NamedIntrinsic ni = lookupNamedIntrinsic(call->gtCallMethHnd);
11765
11766     if (ni == NI_System_Enum_HasFlag)
11767     {
11768         GenTree* thisOp = call->gtCallObjp;
11769         GenTree* flagOp = call->gtCallArgs->gtOp.gtOp1;
11770         GenTree* result = gtOptimizeEnumHasFlag(thisOp, flagOp);
11771
11772         if (result != nullptr)
11773         {
11774             return result;
11775         }
11776     }
11777
11778     return call;
11779 }
11780
11781 //------------------------------------------------------------------------
11782 // gtFoldTypeEqualityCall: see if a (potential) type equality call is foldable
11783 //
11784 // Arguments:
11785 //    methodID -- type equality intrinsic ID
11786 //    op1 -- first argument to call
11787 //    op2 -- second argument to call
11788 //
11789 // Returns:
11790 //    nulltpr if no folding happened.
11791 //    An alternative tree if folding happens.
11792 //
11793 // Notes:
11794 //    If either operand is known to be a a RuntimeType, then the type
11795 //    equality methods will simply check object identity and so we can
11796 //    fold the call into a simple compare of the call's operands.
11797
11798 GenTree* Compiler::gtFoldTypeEqualityCall(CorInfoIntrinsics methodID, GenTree* op1, GenTree* op2)
11799 {
11800     // The method must be be a type equality intrinsic
11801     assert(methodID == CORINFO_INTRINSIC_TypeEQ || methodID == CORINFO_INTRINSIC_TypeNEQ);
11802
11803     if ((gtGetTypeProducerKind(op1) == TPK_Unknown) && (gtGetTypeProducerKind(op2) == TPK_Unknown))
11804     {
11805         return nullptr;
11806     }
11807
11808     const genTreeOps simpleOp = (methodID == CORINFO_INTRINSIC_TypeEQ) ? GT_EQ : GT_NE;
11809
11810     JITDUMP("\nFolding call to Type:op_%s to a simple compare via %s\n",
11811             methodID == CORINFO_INTRINSIC_TypeEQ ? "Equality" : "Inequality", GenTree::OpName(simpleOp));
11812
11813     GenTree* compare = gtNewOperNode(simpleOp, TYP_INT, op1, op2);
11814
11815     return compare;
11816 }
11817
11818 /*****************************************************************************
11819  *
11820  *  Some comparisons can be folded:
11821  *
11822  *    locA        == locA
11823  *    classVarA   == classVarA
11824  *    locA + locB == locB + locA
11825  *
11826  */
11827
11828 GenTree* Compiler::gtFoldExprCompare(GenTree* tree)
11829 {
11830     GenTree* op1 = tree->gtOp.gtOp1;
11831     GenTree* op2 = tree->gtOp.gtOp2;
11832
11833     assert(tree->OperIsCompare());
11834
11835     /* Filter out cases that cannot be folded here */
11836
11837     /* Do not fold floats or doubles (e.g. NaN != Nan) */
11838
11839     if (varTypeIsFloating(op1->TypeGet()))
11840     {
11841         return tree;
11842     }
11843
11844     /* Currently we can only fold when the two subtrees exactly match */
11845
11846     if ((tree->gtFlags & GTF_SIDE_EFFECT) || GenTree::Compare(op1, op2, true) == false)
11847     {
11848         return tree; /* return unfolded tree */
11849     }
11850
11851     GenTree* cons;
11852
11853     switch (tree->gtOper)
11854     {
11855         case GT_EQ:
11856         case GT_LE:
11857         case GT_GE:
11858             cons = gtNewIconNode(true); /* Folds to GT_CNS_INT(true) */
11859             break;
11860
11861         case GT_NE:
11862         case GT_LT:
11863         case GT_GT:
11864             cons = gtNewIconNode(false); /* Folds to GT_CNS_INT(false) */
11865             break;
11866
11867         default:
11868             assert(!"Unexpected relOp");
11869             return tree;
11870     }
11871
11872     /* The node has beeen folded into 'cons' */
11873
11874     if (fgGlobalMorph)
11875     {
11876         fgMorphTreeDone(cons);
11877     }
11878     else
11879     {
11880         cons->gtNext = tree->gtNext;
11881         cons->gtPrev = tree->gtPrev;
11882     }
11883
11884     return cons;
11885 }
11886
11887 //------------------------------------------------------------------------
11888 // gtCreateHandleCompare: generate a type handle comparison
11889 //
11890 // Arguments:
11891 //    oper -- comparison operation (equal/not equal)
11892 //    op1 -- first operand
11893 //    op2 -- second operand
11894 //    typeCheckInliningResult -- indicates how the comparison should happen
11895 //
11896 // Returns:
11897 //    Type comparison tree
11898 //
11899
11900 GenTree* Compiler::gtCreateHandleCompare(genTreeOps             oper,
11901                                          GenTree*               op1,
11902                                          GenTree*               op2,
11903                                          CorInfoInlineTypeCheck typeCheckInliningResult)
11904 {
11905     // If we can compare pointers directly, just emit the binary operation
11906     if (typeCheckInliningResult == CORINFO_INLINE_TYPECHECK_PASS)
11907     {
11908         return gtNewOperNode(oper, TYP_INT, op1, op2);
11909     }
11910
11911     assert(typeCheckInliningResult == CORINFO_INLINE_TYPECHECK_USE_HELPER);
11912
11913     // Emit a call to a runtime helper
11914     GenTreeArgList* helperArgs = gtNewArgList(op1, op2);
11915     GenTree*        ret        = gtNewHelperCallNode(CORINFO_HELP_ARE_TYPES_EQUIVALENT, TYP_INT, helperArgs);
11916     if (oper == GT_EQ)
11917     {
11918         ret = gtNewOperNode(GT_NE, TYP_INT, ret, gtNewIconNode(0, TYP_INT));
11919     }
11920     else
11921     {
11922         assert(oper == GT_NE);
11923         ret = gtNewOperNode(GT_EQ, TYP_INT, ret, gtNewIconNode(0, TYP_INT));
11924     }
11925
11926     return ret;
11927 }
11928
11929 //------------------------------------------------------------------------
11930 // gtFoldTypeCompare: see if a type comparison can be further simplified
11931 //
11932 // Arguments:
11933 //    tree -- tree possibly comparing types
11934 //
11935 // Returns:
11936 //    An alternative tree if folding happens.
11937 //    Original tree otherwise.
11938 //
11939 // Notes:
11940 //    Checks for
11941 //        typeof(...) == obj.GetType()
11942 //        typeof(...) == typeof(...)
11943 //
11944 //    And potentially optimizes away the need to obtain actual
11945 //    RuntimeType objects to do the comparison.
11946
11947 GenTree* Compiler::gtFoldTypeCompare(GenTree* tree)
11948 {
11949     // Only handle EQ and NE
11950     // (maybe relop vs null someday)
11951     const genTreeOps oper = tree->OperGet();
11952     if ((oper != GT_EQ) && (oper != GT_NE))
11953     {
11954         return tree;
11955     }
11956
11957     // Screen for the right kinds of operands
11958     GenTree* const         op1     = tree->gtOp.gtOp1;
11959     const TypeProducerKind op1Kind = gtGetTypeProducerKind(op1);
11960     if (op1Kind == TPK_Unknown)
11961     {
11962         return tree;
11963     }
11964
11965     GenTree* const         op2     = tree->gtOp.gtOp2;
11966     const TypeProducerKind op2Kind = gtGetTypeProducerKind(op2);
11967     if (op2Kind == TPK_Unknown)
11968     {
11969         return tree;
11970     }
11971
11972     // We must have a handle on one side or the other here to optimize,
11973     // otherwise we can't be sure that optimizing is sound.
11974     const bool op1IsFromHandle = (op1Kind == TPK_Handle);
11975     const bool op2IsFromHandle = (op2Kind == TPK_Handle);
11976
11977     if (!(op1IsFromHandle || op2IsFromHandle))
11978     {
11979         return tree;
11980     }
11981
11982     // If both types are created via handles, we can simply compare
11983     // handles (or the indirection cells for handles) instead of the
11984     // types that they'd create.
11985     if (op1IsFromHandle && op2IsFromHandle)
11986     {
11987         JITDUMP("Optimizing compare of types-from-handles to instead compare handles\n");
11988         GenTree*             op1ClassFromHandle = tree->gtOp.gtOp1->gtCall.gtCallArgs->gtOp.gtOp1;
11989         GenTree*             op2ClassFromHandle = tree->gtOp.gtOp2->gtCall.gtCallArgs->gtOp.gtOp1;
11990         GenTree*             op1TunneledHandle  = nullptr;
11991         GenTree*             op2TunneledHandle  = nullptr;
11992         CORINFO_CLASS_HANDLE cls1Hnd            = NO_CLASS_HANDLE;
11993         CORINFO_CLASS_HANDLE cls2Hnd            = NO_CLASS_HANDLE;
11994         unsigned             runtimeLookupCount = 0;
11995
11996         // Try and find class handles from op1 and op2
11997         cls1Hnd = gtGetHelperArgClassHandle(op1ClassFromHandle, &runtimeLookupCount, &op1TunneledHandle);
11998         cls2Hnd = gtGetHelperArgClassHandle(op2ClassFromHandle, &runtimeLookupCount, &op2TunneledHandle);
11999
12000         // If we have both class handles, try and resolve the type equality test completely.
12001         bool resolveFailed = false;
12002
12003         if ((cls1Hnd != NO_CLASS_HANDLE) && (cls2Hnd != NO_CLASS_HANDLE))
12004         {
12005             JITDUMP("Asking runtime to compare %p (%s) and %p (%s) for equality\n", dspPtr(cls1Hnd),
12006                     info.compCompHnd->getClassName(cls1Hnd), dspPtr(cls2Hnd), info.compCompHnd->getClassName(cls2Hnd));
12007             TypeCompareState s = info.compCompHnd->compareTypesForEquality(cls1Hnd, cls2Hnd);
12008
12009             if (s != TypeCompareState::May)
12010             {
12011                 // Type comparison result is known.
12012                 const bool typesAreEqual = (s == TypeCompareState::Must);
12013                 const bool operatorIsEQ  = (oper == GT_EQ);
12014                 const int  compareResult = operatorIsEQ ^ typesAreEqual ? 0 : 1;
12015                 JITDUMP("Runtime reports comparison is known at jit time: %u\n", compareResult);
12016                 GenTree* result = gtNewIconNode(compareResult);
12017
12018                 // Any runtime lookups that fed into this compare are
12019                 // now dead code, so they no longer require the runtime context.
12020                 assert(lvaGenericsContextUseCount >= runtimeLookupCount);
12021                 lvaGenericsContextUseCount -= runtimeLookupCount;
12022                 return result;
12023             }
12024             else
12025             {
12026                 resolveFailed = true;
12027             }
12028         }
12029
12030         if (resolveFailed)
12031         {
12032             JITDUMP("Runtime reports comparison is NOT known at jit time\n");
12033         }
12034         else
12035         {
12036             JITDUMP("Could not find handle for %s%s\n", (cls1Hnd == NO_CLASS_HANDLE) ? " cls1" : "",
12037                     (cls2Hnd == NO_CLASS_HANDLE) ? " cls2" : "");
12038         }
12039
12040         // We can't answer the equality comparison definitively at jit
12041         // time, but can still simplfy the comparison.
12042         //
12043         // Find out how we can compare the two handles.
12044         // NOTE: We're potentially passing NO_CLASS_HANDLE, but the runtime knows what to do with it here.
12045         CorInfoInlineTypeCheck inliningKind =
12046             info.compCompHnd->canInlineTypeCheck(cls1Hnd, CORINFO_INLINE_TYPECHECK_SOURCE_TOKEN);
12047
12048         // If the first type needs helper, check the other type: it might be okay with a simple compare.
12049         if (inliningKind == CORINFO_INLINE_TYPECHECK_USE_HELPER)
12050         {
12051             inliningKind = info.compCompHnd->canInlineTypeCheck(cls2Hnd, CORINFO_INLINE_TYPECHECK_SOURCE_TOKEN);
12052         }
12053
12054         assert(inliningKind == CORINFO_INLINE_TYPECHECK_PASS || inliningKind == CORINFO_INLINE_TYPECHECK_USE_HELPER);
12055
12056         // If we successfully tunneled through both operands, compare
12057         // the tunneled values, otherwise compare the original values.
12058         GenTree* compare;
12059         if ((op1TunneledHandle != nullptr) && (op2TunneledHandle != nullptr))
12060         {
12061             compare = gtCreateHandleCompare(oper, op1TunneledHandle, op2TunneledHandle, inliningKind);
12062         }
12063         else
12064         {
12065             compare = gtCreateHandleCompare(oper, op1ClassFromHandle, op2ClassFromHandle, inliningKind);
12066         }
12067
12068         // Drop any now-irrelvant flags
12069         compare->gtFlags |= tree->gtFlags & (GTF_RELOP_JMP_USED | GTF_RELOP_QMARK | GTF_DONT_CSE);
12070
12071         return compare;
12072     }
12073
12074     // Just one operand creates a type from a handle.
12075     //
12076     // If the other operand is fetching the type from an object,
12077     // we can sometimes optimize the type compare into a simpler
12078     // method table comparison.
12079     //
12080     // TODO: if other operand is null...
12081     if ((op1Kind != TPK_GetType) && (op2Kind != TPK_GetType))
12082     {
12083         return tree;
12084     }
12085
12086     GenTree* const opHandle = op1IsFromHandle ? op1 : op2;
12087     GenTree* const opOther  = op1IsFromHandle ? op2 : op1;
12088
12089     // Tunnel through the handle operand to get at the class handle involved.
12090     GenTree* const       opHandleArgument = opHandle->gtCall.gtCallArgs->gtOp.gtOp1;
12091     CORINFO_CLASS_HANDLE clsHnd           = gtGetHelperArgClassHandle(opHandleArgument);
12092
12093     // If we couldn't find the class handle, give up.
12094     if (clsHnd == NO_CLASS_HANDLE)
12095     {
12096         return tree;
12097     }
12098
12099     // Ask the VM if this type can be equality tested by a simple method
12100     // table comparison.
12101     CorInfoInlineTypeCheck typeCheckInliningResult =
12102         info.compCompHnd->canInlineTypeCheck(clsHnd, CORINFO_INLINE_TYPECHECK_SOURCE_VTABLE);
12103     if (typeCheckInliningResult == CORINFO_INLINE_TYPECHECK_NONE)
12104     {
12105         return tree;
12106     }
12107
12108     // We're good to go.
12109     JITDUMP("Optimizing compare of obj.GetType()"
12110             " and type-from-handle to compare method table pointer\n");
12111
12112     // opHandleArgument is the method table we're looking for.
12113     GenTree* const knownMT = opHandleArgument;
12114
12115     // Fetch object method table from the object itself.
12116     GenTree* objOp = nullptr;
12117
12118     // Note we may see intrinsified or regular calls to GetType
12119     if (opOther->OperGet() == GT_INTRINSIC)
12120     {
12121         objOp = opOther->gtUnOp.gtOp1;
12122     }
12123     else
12124     {
12125         assert(opOther->OperGet() == GT_CALL);
12126         objOp = opOther->gtCall.gtCallObjp;
12127     }
12128
12129     GenTree* const objMT = gtNewOperNode(GT_IND, TYP_I_IMPL, objOp);
12130
12131     // Update various flags
12132     objMT->gtFlags |= GTF_EXCEPT;
12133     compCurBB->bbFlags |= BBF_HAS_VTABREF;
12134     optMethodFlags |= OMF_HAS_VTABLEREF;
12135
12136     // Compare the two method tables
12137     GenTree* const compare = gtCreateHandleCompare(oper, objMT, knownMT, typeCheckInliningResult);
12138
12139     // Drop any now irrelevant flags
12140     compare->gtFlags |= tree->gtFlags & (GTF_RELOP_JMP_USED | GTF_RELOP_QMARK | GTF_DONT_CSE);
12141
12142     // And we're done
12143     return compare;
12144 }
12145
12146 //------------------------------------------------------------------------
12147 // gtGetHelperArgClassHandle: find the compile time class handle from
12148 //   a helper call argument tree
12149 //
12150 // Arguments:
12151 //    tree - tree that passes the handle to the helper
12152 //    runtimeLookupCount [optional, in/out] - incremented if tree was a runtime lookup
12153 //    handleTree [optional, out] - set to the literal operand tree for indirect handles
12154 //
12155 // Returns:
12156 //    The compile time class handle if known.
12157 //
12158 CORINFO_CLASS_HANDLE Compiler::gtGetHelperArgClassHandle(GenTree*  tree,
12159                                                          unsigned* runtimeLookupCount,
12160                                                          GenTree** handleTree)
12161 {
12162     CORINFO_CLASS_HANDLE result = NO_CLASS_HANDLE;
12163
12164     // Walk through any wrapping nop.
12165     if ((tree->gtOper == GT_NOP) && (tree->gtType == TYP_I_IMPL))
12166     {
12167         tree = tree->gtOp.gtOp1;
12168     }
12169
12170     // The handle could be a literal constant
12171     if ((tree->OperGet() == GT_CNS_INT) && (tree->TypeGet() == TYP_I_IMPL))
12172     {
12173         assert(tree->IsIconHandle(GTF_ICON_CLASS_HDL));
12174         result = (CORINFO_CLASS_HANDLE)tree->gtIntCon.gtCompileTimeHandle;
12175     }
12176     // Or the result of a runtime lookup
12177     else if (tree->OperGet() == GT_RUNTIMELOOKUP)
12178     {
12179         result = tree->AsRuntimeLookup()->GetClassHandle();
12180
12181         if (runtimeLookupCount != nullptr)
12182         {
12183             *runtimeLookupCount = *runtimeLookupCount + 1;
12184         }
12185     }
12186     // Or something reached indirectly
12187     else if (tree->gtOper == GT_IND)
12188     {
12189         // The handle indirs we are looking for will be marked as non-faulting.
12190         // Certain others (eg from refanytype) may not be.
12191         if (tree->gtFlags & GTF_IND_NONFAULTING)
12192         {
12193             GenTree* handleTreeInternal = tree->gtOp.gtOp1;
12194
12195             if ((handleTreeInternal->OperGet() == GT_CNS_INT) && (handleTreeInternal->TypeGet() == TYP_I_IMPL))
12196             {
12197                 // These handle constants should be class handles.
12198                 assert(handleTreeInternal->IsIconHandle(GTF_ICON_CLASS_HDL));
12199                 result = (CORINFO_CLASS_HANDLE)handleTreeInternal->gtIntCon.gtCompileTimeHandle;
12200
12201                 if (handleTree != nullptr)
12202                 {
12203                     *handleTree = handleTreeInternal;
12204                 }
12205             }
12206         }
12207     }
12208
12209     return result;
12210 }
12211
12212 /*****************************************************************************
12213  *
12214  *  Some binary operators can be folded even if they have only one
12215  *  operand constant - e.g. boolean operators, add with 0
12216  *  multiply with 1, etc
12217  */
12218
12219 GenTree* Compiler::gtFoldExprSpecial(GenTree* tree)
12220 {
12221     GenTree*   op1  = tree->gtOp.gtOp1;
12222     GenTree*   op2  = tree->gtOp.gtOp2;
12223     genTreeOps oper = tree->OperGet();
12224
12225     GenTree* op;
12226     GenTree* cons;
12227     ssize_t  val;
12228
12229     assert(tree->OperKind() & GTK_BINOP);
12230
12231     /* Filter out operators that cannot be folded here */
12232     if (oper == GT_CAST)
12233     {
12234         return tree;
12235     }
12236
12237     /* We only consider TYP_INT for folding
12238      * Do not fold pointer arithmetic (e.g. addressing modes!) */
12239
12240     if (oper != GT_QMARK && !varTypeIsIntOrI(tree->gtType))
12241     {
12242         return tree;
12243     }
12244
12245     /* Find out which is the constant node */
12246
12247     if (op1->IsCnsIntOrI())
12248     {
12249         op   = op2;
12250         cons = op1;
12251     }
12252     else if (op2->IsCnsIntOrI())
12253     {
12254         op   = op1;
12255         cons = op2;
12256     }
12257     else
12258     {
12259         return tree;
12260     }
12261
12262     /* Get the constant value */
12263
12264     val = cons->gtIntConCommon.IconValue();
12265
12266     /* Here op is the non-constant operand, val is the constant,
12267        first is true if the constant is op1 */
12268
12269     switch (oper)
12270     {
12271         case GT_EQ:
12272         case GT_NE:
12273         case GT_GT:
12274
12275             // Optimize boxed value classes; these are always false.  This IL is
12276             // generated when a generic value is tested against null:
12277             //     <T> ... foo(T x) { ... if ((object)x == null) ...
12278             if (val == 0 && op->IsBoxedValue())
12279             {
12280                 JITDUMP("\nAttempting to optimize BOX(valueType) %s null [%06u]\n", GenTree::OpName(oper),
12281                         dspTreeID(tree));
12282
12283                 // We don't expect GT_GT with signed compares, and we
12284                 // can't predict the result if we do see it, since the
12285                 // boxed object addr could have its high bit set.
12286                 if ((oper == GT_GT) && !tree->IsUnsigned())
12287                 {
12288                     JITDUMP(" bailing; unexpected signed compare via GT_GT\n");
12289                 }
12290                 else
12291                 {
12292                     // The tree under the box must be side effect free
12293                     // since we will drop it if we optimize.
12294                     assert(!gtTreeHasSideEffects(op->gtBox.gtOp.gtOp1, GTF_SIDE_EFFECT));
12295
12296                     // See if we can optimize away the box and related statements.
12297                     GenTree* boxSourceTree = gtTryRemoveBoxUpstreamEffects(op);
12298                     bool     didOptimize   = (boxSourceTree != nullptr);
12299
12300                     // If optimization succeeded, remove the box.
12301                     if (didOptimize)
12302                     {
12303                         // Set up the result of the compare.
12304                         int compareResult = 0;
12305                         if (oper == GT_GT)
12306                         {
12307                             // GT_GT(null, box) == false
12308                             // GT_GT(box, null) == true
12309                             compareResult = (op1 == op);
12310                         }
12311                         else if (oper == GT_EQ)
12312                         {
12313                             // GT_EQ(box, null) == false
12314                             // GT_EQ(null, box) == false
12315                             compareResult = 0;
12316                         }
12317                         else
12318                         {
12319                             assert(oper == GT_NE);
12320                             // GT_NE(box, null) == true
12321                             // GT_NE(null, box) == true
12322                             compareResult = 1;
12323                         }
12324
12325                         JITDUMP("\nSuccess: replacing BOX(valueType) %s null with %d\n", GenTree::OpName(oper),
12326                                 compareResult);
12327
12328                         op = gtNewIconNode(compareResult);
12329
12330                         if (fgGlobalMorph)
12331                         {
12332                             fgMorphTreeDone(op);
12333                         }
12334                         else
12335                         {
12336                             op->gtNext = tree->gtNext;
12337                             op->gtPrev = tree->gtPrev;
12338                         }
12339
12340                         return op;
12341                     }
12342                 }
12343             }
12344
12345             break;
12346
12347         case GT_ADD:
12348             if (val == 0)
12349             {
12350                 goto DONE_FOLD;
12351             }
12352             break;
12353
12354         case GT_MUL:
12355             if (val == 1)
12356             {
12357                 goto DONE_FOLD;
12358             }
12359             else if (val == 0)
12360             {
12361                 /* Multiply by zero - return the 'zero' node, but not if side effects */
12362                 if (!(op->gtFlags & GTF_SIDE_EFFECT))
12363                 {
12364                     op = cons;
12365                     goto DONE_FOLD;
12366                 }
12367             }
12368             break;
12369
12370         case GT_DIV:
12371         case GT_UDIV:
12372             if ((op2 == cons) && (val == 1) && !(op1->OperKind() & GTK_CONST))
12373             {
12374                 goto DONE_FOLD;
12375             }
12376             break;
12377
12378         case GT_SUB:
12379             if ((op2 == cons) && (val == 0) && !(op1->OperKind() & GTK_CONST))
12380             {
12381                 goto DONE_FOLD;
12382             }
12383             break;
12384
12385         case GT_AND:
12386             if (val == 0)
12387             {
12388                 /* AND with zero - return the 'zero' node, but not if side effects */
12389
12390                 if (!(op->gtFlags & GTF_SIDE_EFFECT))
12391                 {
12392                     op = cons;
12393                     goto DONE_FOLD;
12394                 }
12395             }
12396             else
12397             {
12398                 /* The GTF_BOOLEAN flag is set for nodes that are part
12399                  * of a boolean expression, thus all their children
12400                  * are known to evaluate to only 0 or 1 */
12401
12402                 if (tree->gtFlags & GTF_BOOLEAN)
12403                 {
12404
12405                     /* The constant value must be 1
12406                      * AND with 1 stays the same */
12407                     assert(val == 1);
12408                     goto DONE_FOLD;
12409                 }
12410             }
12411             break;
12412
12413         case GT_OR:
12414             if (val == 0)
12415             {
12416                 goto DONE_FOLD;
12417             }
12418             else if (tree->gtFlags & GTF_BOOLEAN)
12419             {
12420                 /* The constant value must be 1 - OR with 1 is 1 */
12421
12422                 assert(val == 1);
12423
12424                 /* OR with one - return the 'one' node, but not if side effects */
12425
12426                 if (!(op->gtFlags & GTF_SIDE_EFFECT))
12427                 {
12428                     op = cons;
12429                     goto DONE_FOLD;
12430                 }
12431             }
12432             break;
12433
12434         case GT_LSH:
12435         case GT_RSH:
12436         case GT_RSZ:
12437         case GT_ROL:
12438         case GT_ROR:
12439             if (val == 0)
12440             {
12441                 if (op2 == cons)
12442                 {
12443                     goto DONE_FOLD;
12444                 }
12445                 else if (!(op->gtFlags & GTF_SIDE_EFFECT))
12446                 {
12447                     op = cons;
12448                     goto DONE_FOLD;
12449                 }
12450             }
12451             break;
12452
12453         case GT_QMARK:
12454         {
12455             assert(op1 == cons && op2 == op && op2->gtOper == GT_COLON);
12456             assert(op2->gtOp.gtOp1 && op2->gtOp.gtOp2);
12457
12458             assert(val == 0 || val == 1);
12459
12460             GenTree* opToDelete;
12461             if (val)
12462             {
12463                 op         = op2->AsColon()->ThenNode();
12464                 opToDelete = op2->AsColon()->ElseNode();
12465             }
12466             else
12467             {
12468                 op         = op2->AsColon()->ElseNode();
12469                 opToDelete = op2->AsColon()->ThenNode();
12470             }
12471
12472             // Clear colon flags only if the qmark itself is not conditionaly executed
12473             if ((tree->gtFlags & GTF_COLON_COND) == 0)
12474             {
12475                 fgWalkTreePre(&op, gtClearColonCond);
12476             }
12477         }
12478
12479             goto DONE_FOLD;
12480
12481         default:
12482             break;
12483     }
12484
12485     /* The node is not foldable */
12486
12487     return tree;
12488
12489 DONE_FOLD:
12490
12491     /* The node has beeen folded into 'op' */
12492
12493     // If there was an assigment update, we just morphed it into
12494     // a use, update the flags appropriately
12495     if (op->gtOper == GT_LCL_VAR)
12496     {
12497         assert(tree->OperIs(GT_ASG) || (op->gtFlags & (GTF_VAR_USEASG | GTF_VAR_DEF)) == 0);
12498
12499         op->gtFlags &= ~(GTF_VAR_USEASG | GTF_VAR_DEF);
12500     }
12501
12502     op->gtNext = tree->gtNext;
12503     op->gtPrev = tree->gtPrev;
12504
12505     return op;
12506 }
12507
12508 //------------------------------------------------------------------------
12509 // gtTryRemoveBoxUpstreamEffects: given an unused value type box,
12510 //    try and remove the upstream allocation and unnecessary parts of
12511 //    the copy.
12512 //
12513 // Arguments:
12514 //    op  - the box node to optimize
12515 //    options - controls whether and how trees are modified
12516 //        (see notes)
12517 //
12518 // Return Value:
12519 //    A tree representing the original value to box, if removal
12520 //    is successful/possible (but see note). nullptr if removal fails.
12521 //
12522 // Notes:
12523 //    Value typed box gets special treatment because it has associated
12524 //    side effects that can be removed if the box result is not used.
12525 //
12526 //    By default (options == BR_REMOVE_AND_NARROW) this method will
12527 //    try and remove unnecessary trees and will try and reduce remaning
12528 //    operations to the minimal set, possibly narrowing the width of
12529 //    loads from the box source if it is a struct.
12530 //
12531 //    To perform a trial removal, pass BR_DONT_REMOVE. This can be
12532 //    useful to determine if this optimization should only be
12533 //    performed if some other conditions hold true.
12534 //
12535 //    To remove but not alter the access to the box source, pass
12536 //    BR_REMOVE_BUT_NOT_NARROW.
12537 //
12538 //    To remove and return the tree for the type handle used for
12539 //    the boxed newobj, pass BR_REMOVE_BUT_NOT_NARROW_WANT_TYPE_HANDLE.
12540 //    This can be useful when the only part of the box that is "live"
12541 //    is its type.
12542 //
12543 //    If removal fails, is is possible that a subsequent pass may be
12544 //    able to optimize.  Blocking side effects may now be minimized
12545 //    (null or bounds checks might have been removed) or might be
12546 //    better known (inline return placeholder updated with the actual
12547 //    return expression). So the box is perhaps best left as is to
12548 //    help trigger this re-examination.
12549
12550 GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions options)
12551 {
12552     assert(op->IsBoxedValue());
12553
12554     // grab related parts for the optimization
12555     GenTreeBox* box      = op->AsBox();
12556     GenTree*    asgStmt  = box->gtAsgStmtWhenInlinedBoxValue;
12557     GenTree*    copyStmt = box->gtCopyStmtWhenInlinedBoxValue;
12558
12559     assert(asgStmt->gtOper == GT_STMT);
12560     assert(copyStmt->gtOper == GT_STMT);
12561
12562     JITDUMP("gtTryRemoveBoxUpstreamEffects: %s to %s of BOX (valuetype)"
12563             " [%06u] (assign/newobj [%06u] copy [%06u])\n",
12564             (options == BR_DONT_REMOVE) ? "checking if it is possible" : "attempting",
12565             (options == BR_MAKE_LOCAL_COPY) ? "make local unboxed version" : "remove side effects", dspTreeID(op),
12566             dspTreeID(asgStmt), dspTreeID(copyStmt));
12567
12568     // If we don't recognize the form of the assign, bail.
12569     GenTree* asg = asgStmt->gtStmt.gtStmtExpr;
12570     if (asg->gtOper != GT_ASG)
12571     {
12572         JITDUMP(" bailing; unexpected assignment op %s\n", GenTree::OpName(asg->gtOper));
12573         return nullptr;
12574     }
12575
12576     // If we're eventually going to return the type handle, remember it now.
12577     GenTree* boxTypeHandle = nullptr;
12578     if ((options == BR_REMOVE_AND_NARROW_WANT_TYPE_HANDLE) || (options == BR_DONT_REMOVE_WANT_TYPE_HANDLE))
12579     {
12580         GenTree*   asgSrc     = asg->gtOp.gtOp2;
12581         genTreeOps asgSrcOper = asgSrc->OperGet();
12582
12583         // Allocation may be via AllocObj or via helper call, depending
12584         // on when this is invoked and whether the jit is using AllocObj
12585         // for R2R allocations.
12586         if (asgSrcOper == GT_ALLOCOBJ)
12587         {
12588             GenTreeAllocObj* allocObj = asgSrc->AsAllocObj();
12589             boxTypeHandle             = allocObj->gtOp.gtOp1;
12590         }
12591         else if (asgSrcOper == GT_CALL)
12592         {
12593             GenTreeCall* newobjCall = asgSrc->AsCall();
12594             GenTree*     newobjArgs = newobjCall->gtCallArgs;
12595
12596             // In R2R expansions the handle may not be an explicit operand to the helper,
12597             // so we can't remove the box.
12598             if (newobjArgs == nullptr)
12599             {
12600                 assert(newobjCall->IsHelperCall(this, CORINFO_HELP_READYTORUN_NEW));
12601                 JITDUMP(" bailing; newobj via R2R helper\n");
12602                 return nullptr;
12603             }
12604
12605             boxTypeHandle = newobjArgs->AsArgList()->Current();
12606         }
12607         else
12608         {
12609             unreached();
12610         }
12611
12612         assert(boxTypeHandle != nullptr);
12613     }
12614
12615     // If we don't recognize the form of the copy, bail.
12616     GenTree* copy = copyStmt->gtStmt.gtStmtExpr;
12617     if (copy->gtOper != GT_ASG)
12618     {
12619         // GT_RET_EXPR is a tolerable temporary failure.
12620         // The jit will revisit this optimization after
12621         // inlining is done.
12622         if (copy->gtOper == GT_RET_EXPR)
12623         {
12624             JITDUMP(" bailing; must wait for replacement of copy %s\n", GenTree::OpName(copy->gtOper));
12625         }
12626         else
12627         {
12628             // Anything else is a missed case we should
12629             // figure out how to handle.  One known case
12630             // is GT_COMMAs enclosing the GT_ASG we are
12631             // looking for.
12632             JITDUMP(" bailing; unexpected copy op %s\n", GenTree::OpName(copy->gtOper));
12633         }
12634         return nullptr;
12635     }
12636
12637     // Handle case where we are optimizing the box into a local copy
12638     if (options == BR_MAKE_LOCAL_COPY)
12639     {
12640         // Drill into the box to get at the box temp local and the box type
12641         GenTree* boxTemp = box->BoxOp();
12642         assert(boxTemp->IsLocal());
12643         const unsigned boxTempLcl = boxTemp->AsLclVar()->GetLclNum();
12644         assert(lvaTable[boxTempLcl].lvType == TYP_REF);
12645         CORINFO_CLASS_HANDLE boxClass = lvaTable[boxTempLcl].lvClassHnd;
12646         assert(boxClass != nullptr);
12647
12648         // Verify that the copyDst has the expected shape
12649         // (blk|obj|ind (add (boxTempLcl, ptr-size)))
12650         //
12651         // The shape here is constrained to the patterns we produce
12652         // over in impImportAndPushBox for the inlined box case.
12653         GenTree* copyDst = copy->gtOp.gtOp1;
12654
12655         if (!copyDst->OperIs(GT_BLK, GT_IND, GT_OBJ))
12656         {
12657             JITDUMP("Unexpected copy dest operator %s\n", GenTree::OpName(copyDst->gtOper));
12658             return nullptr;
12659         }
12660
12661         GenTree* copyDstAddr = copyDst->gtOp.gtOp1;
12662         if (copyDstAddr->OperGet() != GT_ADD)
12663         {
12664             JITDUMP("Unexpected copy dest address tree\n");
12665             return nullptr;
12666         }
12667
12668         GenTree* copyDstAddrOp1 = copyDstAddr->gtOp.gtOp1;
12669         if ((copyDstAddrOp1->OperGet() != GT_LCL_VAR) || (copyDstAddrOp1->gtLclVarCommon.gtLclNum != boxTempLcl))
12670         {
12671             JITDUMP("Unexpected copy dest address 1st addend\n");
12672             return nullptr;
12673         }
12674
12675         GenTree* copyDstAddrOp2 = copyDstAddr->gtOp.gtOp2;
12676         if (!copyDstAddrOp2->IsIntegralConst(TARGET_POINTER_SIZE))
12677         {
12678             JITDUMP("Unexpected copy dest address 2nd addend\n");
12679             return nullptr;
12680         }
12681
12682         // Screening checks have all passed. Do the transformation.
12683         //
12684         // Retype the box temp to be a struct
12685         JITDUMP("Retyping box temp V%02u to struct %s\n", boxTempLcl, eeGetClassName(boxClass));
12686         lvaTable[boxTempLcl].lvType   = TYP_UNDEF;
12687         const bool isUnsafeValueClass = false;
12688         lvaSetStruct(boxTempLcl, boxClass, isUnsafeValueClass);
12689         var_types boxTempType = lvaTable[boxTempLcl].lvType;
12690
12691         // Remove the newobj and assigment to box temp
12692         JITDUMP("Bashing NEWOBJ [%06u] to NOP\n", dspTreeID(asg));
12693         asg->gtBashToNOP();
12694
12695         // Update the copy from the value to be boxed to the box temp
12696         GenTree* newDst     = gtNewOperNode(GT_ADDR, TYP_BYREF, gtNewLclvNode(boxTempLcl, boxTempType));
12697         copyDst->gtOp.gtOp1 = newDst;
12698
12699         // Return the address of the now-struct typed box temp
12700         GenTree* retValue = gtNewOperNode(GT_ADDR, TYP_BYREF, gtNewLclvNode(boxTempLcl, boxTempType));
12701
12702         return retValue;
12703     }
12704
12705     // If the copy is a struct copy, make sure we know how to isolate
12706     // any source side effects.
12707     GenTree* copySrc = copy->gtOp.gtOp2;
12708
12709     // If the copy source is from a pending inline, wait for it to resolve.
12710     if (copySrc->gtOper == GT_RET_EXPR)
12711     {
12712         JITDUMP(" bailing; must wait for replacement of copy source %s\n", GenTree::OpName(copySrc->gtOper));
12713         return nullptr;
12714     }
12715
12716     bool hasSrcSideEffect = false;
12717     bool isStructCopy     = false;
12718
12719     if (gtTreeHasSideEffects(copySrc, GTF_SIDE_EFFECT))
12720     {
12721         hasSrcSideEffect = true;
12722
12723         if (varTypeIsStruct(copySrc->gtType))
12724         {
12725             isStructCopy = true;
12726
12727             if ((copySrc->gtOper != GT_OBJ) && (copySrc->gtOper != GT_IND) && (copySrc->gtOper != GT_FIELD))
12728             {
12729                 // We don't know how to handle other cases, yet.
12730                 JITDUMP(" bailing; unexpected copy source struct op with side effect %s\n",
12731                         GenTree::OpName(copySrc->gtOper));
12732                 return nullptr;
12733             }
12734         }
12735     }
12736
12737     // If this was a trial removal, we're done.
12738     if (options == BR_DONT_REMOVE)
12739     {
12740         return copySrc;
12741     }
12742
12743     if (options == BR_DONT_REMOVE_WANT_TYPE_HANDLE)
12744     {
12745         return boxTypeHandle;
12746     }
12747
12748     // Otherwise, proceed with the optimization.
12749     //
12750     // Change the assignment expression to a NOP.
12751     JITDUMP("\nBashing NEWOBJ [%06u] to NOP\n", dspTreeID(asg));
12752     asg->gtBashToNOP();
12753
12754     // Change the copy expression so it preserves key
12755     // source side effects.
12756     JITDUMP("\nBashing COPY [%06u]", dspTreeID(copy));
12757
12758     if (!hasSrcSideEffect)
12759     {
12760         // If there were no copy source side effects just bash
12761         // the copy to a NOP.
12762         copy->gtBashToNOP();
12763         JITDUMP(" to NOP; no source side effects.\n");
12764     }
12765     else if (!isStructCopy)
12766     {
12767         // For scalar types, go ahead and produce the
12768         // value as the copy is fairly cheap and likely
12769         // the optimizer can trim things down to just the
12770         // minimal side effect parts.
12771         copyStmt->gtStmt.gtStmtExpr = copySrc;
12772         JITDUMP(" to scalar read via [%06u]\n", dspTreeID(copySrc));
12773     }
12774     else
12775     {
12776         // For struct types read the first byte of the
12777         // source struct; there's no need to read the
12778         // entire thing, and no place to put it.
12779         assert(copySrc->gtOper == GT_OBJ || copySrc->gtOper == GT_IND || copySrc->gtOper == GT_FIELD);
12780         copyStmt->gtStmt.gtStmtExpr = copySrc;
12781
12782         if (options == BR_REMOVE_AND_NARROW || options == BR_REMOVE_AND_NARROW_WANT_TYPE_HANDLE)
12783         {
12784             JITDUMP(" to read first byte of struct via modified [%06u]\n", dspTreeID(copySrc));
12785             copySrc->ChangeOper(GT_IND);
12786             copySrc->gtType = TYP_BYTE;
12787         }
12788         else
12789         {
12790             JITDUMP(" to read entire struct via modified [%06u]\n", dspTreeID(copySrc));
12791         }
12792     }
12793
12794     if (fgStmtListThreaded)
12795     {
12796         fgSetStmtSeq(asgStmt);
12797         fgSetStmtSeq(copyStmt);
12798     }
12799
12800     // Box effects were successfully optimized.
12801
12802     if (options == BR_REMOVE_AND_NARROW_WANT_TYPE_HANDLE)
12803     {
12804         return boxTypeHandle;
12805     }
12806     else
12807     {
12808         return copySrc;
12809     }
12810 }
12811
12812 //------------------------------------------------------------------------
12813 // gtOptimizeEnumHasFlag: given the operands for a call to Enum.HasFlag,
12814 //    try and optimize the call to a simple and/compare tree.
12815 //
12816 // Arguments:
12817 //    thisOp  - first argument to the call
12818 //    flagOp  - second argument to the call
12819 //
12820 // Return Value:
12821 //    A new cmp/amd tree if successful. nullptr on failure.
12822 //
12823 // Notes:
12824 //    If successful, may allocate new temps and modify connected
12825 //    statements.
12826
12827 GenTree* Compiler::gtOptimizeEnumHasFlag(GenTree* thisOp, GenTree* flagOp)
12828 {
12829     JITDUMP("Considering optimizing call to Enum.HasFlag....\n");
12830
12831     // Operands must be boxes
12832     if (!thisOp->IsBoxedValue() || !flagOp->IsBoxedValue())
12833     {
12834         JITDUMP("bailing, need both inputs to be BOXes\n");
12835         return nullptr;
12836     }
12837
12838     // Operands must have same type
12839     bool                 isExactThis   = false;
12840     bool                 isNonNullThis = false;
12841     CORINFO_CLASS_HANDLE thisHnd       = gtGetClassHandle(thisOp, &isExactThis, &isNonNullThis);
12842
12843     if (thisHnd == nullptr)
12844     {
12845         JITDUMP("bailing, can't find type for 'this' operand\n");
12846         return nullptr;
12847     }
12848
12849     // A boxed thisOp should have exact type and non-null instance
12850     assert(isExactThis);
12851     assert(isNonNullThis);
12852
12853     bool                 isExactFlag   = false;
12854     bool                 isNonNullFlag = false;
12855     CORINFO_CLASS_HANDLE flagHnd       = gtGetClassHandle(flagOp, &isExactFlag, &isNonNullFlag);
12856
12857     if (flagHnd == nullptr)
12858     {
12859         JITDUMP("bailing, can't find type for 'flag' operand\n");
12860         return nullptr;
12861     }
12862
12863     // A boxed flagOp should have exact type and non-null instance
12864     assert(isExactFlag);
12865     assert(isNonNullFlag);
12866
12867     if (flagHnd != thisHnd)
12868     {
12869         JITDUMP("bailing, operand types differ\n");
12870         return nullptr;
12871     }
12872
12873     // If we have a shared type instance we can't safely check type
12874     // equality, so bail.
12875     DWORD classAttribs = info.compCompHnd->getClassAttribs(thisHnd);
12876     if (classAttribs & CORINFO_FLG_SHAREDINST)
12877     {
12878         JITDUMP("bailing, have shared instance type\n");
12879         return nullptr;
12880     }
12881
12882     // Simulate removing the box for thisOP. We need to know that it can
12883     // be safely removed before we can optimize.
12884     GenTree* thisVal = gtTryRemoveBoxUpstreamEffects(thisOp, BR_DONT_REMOVE);
12885     if (thisVal == nullptr)
12886     {
12887         // Note we may fail here if the this operand comes from
12888         // a call. We should be able to retry this post-inlining.
12889         JITDUMP("bailing, can't undo box of 'this' operand\n");
12890         return nullptr;
12891     }
12892
12893     GenTree* flagVal = gtTryRemoveBoxUpstreamEffects(flagOp, BR_REMOVE_BUT_NOT_NARROW);
12894     if (flagVal == nullptr)
12895     {
12896         // Note we may fail here if the flag operand comes from
12897         // a call. We should be able to retry this post-inlining.
12898         JITDUMP("bailing, can't undo box of 'flag' operand\n");
12899         return nullptr;
12900     }
12901
12902     // Yes, both boxes can be cleaned up. Optimize.
12903     JITDUMP("Optimizing call to Enum.HasFlag\n");
12904
12905     // Undo the boxing of thisOp and prepare to operate directly
12906     // on the original enum values.
12907     thisVal = gtTryRemoveBoxUpstreamEffects(thisOp, BR_REMOVE_BUT_NOT_NARROW);
12908
12909     // Our trial removal above should guarantee successful removal here.
12910     assert(thisVal != nullptr);
12911
12912     // We should have a consistent view of the type
12913     var_types type = thisVal->TypeGet();
12914     assert(type == flagVal->TypeGet());
12915
12916     // The thisVal and flagVal trees come from earlier statements.
12917     //
12918     // Unless they are invariant values, we need to evaluate them both
12919     // to temps at those points to safely transmit the values here.
12920     //
12921     // Also we need to use the flag twice, so we need two trees for it.
12922     GenTree* thisValOpt     = nullptr;
12923     GenTree* flagValOpt     = nullptr;
12924     GenTree* flagValOptCopy = nullptr;
12925
12926     if (thisVal->IsIntegralConst())
12927     {
12928         thisValOpt = gtClone(thisVal);
12929         assert(thisValOpt != nullptr);
12930     }
12931     else
12932     {
12933         const unsigned thisTmp         = lvaGrabTemp(true DEBUGARG("Enum:HasFlag this temp"));
12934         GenTree*       thisAsg         = gtNewTempAssign(thisTmp, thisVal);
12935         GenTree*       thisAsgStmt     = thisOp->AsBox()->gtCopyStmtWhenInlinedBoxValue;
12936         thisAsgStmt->gtStmt.gtStmtExpr = thisAsg;
12937         thisValOpt                     = gtNewLclvNode(thisTmp, type);
12938     }
12939
12940     if (flagVal->IsIntegralConst())
12941     {
12942         flagValOpt = gtClone(flagVal);
12943         assert(flagValOpt != nullptr);
12944         flagValOptCopy = gtClone(flagVal);
12945         assert(flagValOptCopy != nullptr);
12946     }
12947     else
12948     {
12949         const unsigned flagTmp         = lvaGrabTemp(true DEBUGARG("Enum:HasFlag flag temp"));
12950         GenTree*       flagAsg         = gtNewTempAssign(flagTmp, flagVal);
12951         GenTree*       flagAsgStmt     = flagOp->AsBox()->gtCopyStmtWhenInlinedBoxValue;
12952         flagAsgStmt->gtStmt.gtStmtExpr = flagAsg;
12953         flagValOpt                     = gtNewLclvNode(flagTmp, type);
12954         flagValOptCopy                 = gtNewLclvNode(flagTmp, type);
12955     }
12956
12957     // Turn the call into (thisValTmp & flagTmp) == flagTmp.
12958     GenTree* andTree = gtNewOperNode(GT_AND, type, thisValOpt, flagValOpt);
12959     GenTree* cmpTree = gtNewOperNode(GT_EQ, TYP_INT, andTree, flagValOptCopy);
12960
12961     JITDUMP("Optimized call to Enum.HasFlag\n");
12962
12963     return cmpTree;
12964 }
12965
12966 /*****************************************************************************
12967  *
12968  *  Fold the given constant tree.
12969  */
12970
12971 #ifdef _PREFAST_
12972 #pragma warning(push)
12973 #pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
12974 #endif
12975 GenTree* Compiler::gtFoldExprConst(GenTree* tree)
12976 {
12977     unsigned kind = tree->OperKind();
12978
12979     SSIZE_T       i1, i2, itemp;
12980     INT64         lval1, lval2, ltemp;
12981     float         f1, f2;
12982     double        d1, d2;
12983     var_types     switchType;
12984     FieldSeqNode* fieldSeq = FieldSeqStore::NotAField(); // default unless we override it when folding
12985
12986     assert(kind & (GTK_UNOP | GTK_BINOP));
12987
12988     GenTree* op1 = tree->gtOp.gtOp1;
12989     GenTree* op2 = tree->gtGetOp2IfPresent();
12990
12991     if (!opts.OptEnabled(CLFLG_CONSTANTFOLD))
12992     {
12993         return tree;
12994     }
12995
12996     if (tree->OperGet() == GT_NOP)
12997     {
12998         return tree;
12999     }
13000
13001 #ifdef FEATURE_SIMD
13002     if (tree->OperGet() == GT_SIMD)
13003     {
13004         return tree;
13005     }
13006 #endif // FEATURE_SIMD
13007
13008     if (tree->gtOper == GT_ALLOCOBJ)
13009     {
13010         return tree;
13011     }
13012
13013     if (tree->gtOper == GT_RUNTIMELOOKUP)
13014     {
13015         return tree;
13016     }
13017
13018     if (kind & GTK_UNOP)
13019     {
13020         assert(op1->OperKind() & GTK_CONST);
13021
13022         switch (op1->gtType)
13023         {
13024             case TYP_INT:
13025
13026                 /* Fold constant INT unary operator */
13027
13028                 if (!op1->gtIntCon.ImmedValCanBeFolded(this, tree->OperGet()))
13029                 {
13030                     return tree;
13031                 }
13032
13033                 i1 = (int)op1->gtIntCon.gtIconVal;
13034
13035                 // If we fold a unary oper, then the folded constant
13036                 // is considered a ConstantIndexField if op1 was one
13037                 //
13038
13039                 if ((op1->gtIntCon.gtFieldSeq != nullptr) && op1->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
13040                 {
13041                     fieldSeq = op1->gtIntCon.gtFieldSeq;
13042                 }
13043
13044                 switch (tree->gtOper)
13045                 {
13046                     case GT_NOT:
13047                         i1 = ~i1;
13048                         break;
13049
13050                     case GT_NEG:
13051                         i1 = -i1;
13052                         break;
13053
13054                     case GT_BSWAP:
13055                         i1 = ((i1 >> 24) & 0xFF) | ((i1 >> 8) & 0xFF00) | ((i1 << 8) & 0xFF0000) |
13056                              ((i1 << 24) & 0xFF000000);
13057                         break;
13058
13059                     case GT_BSWAP16:
13060                         i1 = ((i1 >> 8) & 0xFF) | ((i1 << 8) & 0xFF00);
13061                         break;
13062
13063                     case GT_CAST:
13064                         // assert (genActualType(tree->CastToType()) == tree->gtType);
13065                         switch (tree->CastToType())
13066                         {
13067                             case TYP_BYTE:
13068                                 itemp = INT32(INT8(i1));
13069                                 goto CHK_OVF;
13070
13071                             case TYP_SHORT:
13072                                 itemp = INT32(INT16(i1));
13073                             CHK_OVF:
13074                                 if (tree->gtOverflow() && ((itemp != i1) || ((tree->gtFlags & GTF_UNSIGNED) && i1 < 0)))
13075                                 {
13076                                     goto INT_OVF;
13077                                 }
13078                                 i1 = itemp;
13079                                 goto CNS_INT;
13080
13081                             case TYP_USHORT:
13082                                 itemp = INT32(UINT16(i1));
13083                                 if (tree->gtOverflow())
13084                                 {
13085                                     if (itemp != i1)
13086                                     {
13087                                         goto INT_OVF;
13088                                     }
13089                                 }
13090                                 i1 = itemp;
13091                                 goto CNS_INT;
13092
13093                             case TYP_BOOL:
13094                             case TYP_UBYTE:
13095                                 itemp = INT32(UINT8(i1));
13096                                 if (tree->gtOverflow())
13097                                 {
13098                                     if (itemp != i1)
13099                                     {
13100                                         goto INT_OVF;
13101                                     }
13102                                 }
13103                                 i1 = itemp;
13104                                 goto CNS_INT;
13105
13106                             case TYP_UINT:
13107                                 if (!(tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && i1 < 0)
13108                                 {
13109                                     goto INT_OVF;
13110                                 }
13111                                 goto CNS_INT;
13112
13113                             case TYP_INT:
13114                                 if ((tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && i1 < 0)
13115                                 {
13116                                     goto INT_OVF;
13117                                 }
13118                                 goto CNS_INT;
13119
13120                             case TYP_ULONG:
13121                                 if (tree->IsUnsigned())
13122                                 {
13123                                     lval1 = UINT64(UINT32(i1));
13124                                 }
13125                                 else
13126                                 {
13127                                     if (tree->gtOverflow() && (i1 < 0))
13128                                     {
13129                                         goto LNG_OVF;
13130                                     }
13131                                     lval1 = UINT64(INT32(i1));
13132                                 }
13133                                 goto CNS_LONG;
13134
13135                             case TYP_LONG:
13136                                 if (tree->IsUnsigned())
13137                                 {
13138                                     lval1 = INT64(UINT32(i1));
13139                                 }
13140                                 else
13141                                 {
13142                                     lval1 = INT64(INT32(i1));
13143                                 }
13144                                 goto CNS_LONG;
13145
13146                             case TYP_FLOAT:
13147                                 if (tree->gtFlags & GTF_UNSIGNED)
13148                                 {
13149                                     f1 = forceCastToFloat(UINT32(i1));
13150                                 }
13151                                 else
13152                                 {
13153                                     f1 = forceCastToFloat(INT32(i1));
13154                                 }
13155                                 d1 = f1;
13156                                 goto CNS_DOUBLE;
13157
13158                             case TYP_DOUBLE:
13159                                 if (tree->gtFlags & GTF_UNSIGNED)
13160                                 {
13161                                     d1 = (double)UINT32(i1);
13162                                 }
13163                                 else
13164                                 {
13165                                     d1 = (double)INT32(i1);
13166                                 }
13167                                 goto CNS_DOUBLE;
13168
13169                             default:
13170                                 assert(!"BAD_TYP");
13171                                 break;
13172                         }
13173                         return tree;
13174
13175                     default:
13176                         return tree;
13177                 }
13178
13179                 goto CNS_INT;
13180
13181             case TYP_LONG:
13182
13183                 /* Fold constant LONG unary operator */
13184
13185                 if (!op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()))
13186                 {
13187                     return tree;
13188                 }
13189
13190                 lval1 = op1->gtIntConCommon.LngValue();
13191
13192                 switch (tree->gtOper)
13193                 {
13194                     case GT_NOT:
13195                         lval1 = ~lval1;
13196                         break;
13197
13198                     case GT_NEG:
13199                         lval1 = -lval1;
13200                         break;
13201
13202                     case GT_BSWAP:
13203                         lval1 = ((lval1 >> 56) & 0xFF) | ((lval1 >> 40) & 0xFF00) | ((lval1 >> 24) & 0xFF0000) |
13204                                 ((lval1 >> 8) & 0xFF000000) | ((lval1 << 8) & 0xFF00000000) |
13205                                 ((lval1 << 24) & 0xFF0000000000) | ((lval1 << 40) & 0xFF000000000000) |
13206                                 ((lval1 << 56) & 0xFF00000000000000);
13207                         break;
13208
13209                     case GT_CAST:
13210                         assert(genActualType(tree->CastToType()) == tree->gtType);
13211                         switch (tree->CastToType())
13212                         {
13213                             case TYP_BYTE:
13214                                 i1 = INT32(INT8(lval1));
13215                                 goto CHECK_INT_OVERFLOW;
13216
13217                             case TYP_SHORT:
13218                                 i1 = INT32(INT16(lval1));
13219                                 goto CHECK_INT_OVERFLOW;
13220
13221                             case TYP_USHORT:
13222                                 i1 = INT32(UINT16(lval1));
13223                                 goto CHECK_UINT_OVERFLOW;
13224
13225                             case TYP_UBYTE:
13226                                 i1 = INT32(UINT8(lval1));
13227                                 goto CHECK_UINT_OVERFLOW;
13228
13229                             case TYP_INT:
13230                                 i1 = INT32(lval1);
13231
13232                             CHECK_INT_OVERFLOW:
13233                                 if (tree->gtOverflow())
13234                                 {
13235                                     if (i1 != lval1)
13236                                     {
13237                                         goto INT_OVF;
13238                                     }
13239                                     if ((tree->gtFlags & GTF_UNSIGNED) && i1 < 0)
13240                                     {
13241                                         goto INT_OVF;
13242                                     }
13243                                 }
13244                                 goto CNS_INT;
13245
13246                             case TYP_UINT:
13247                                 i1 = UINT32(lval1);
13248
13249                             CHECK_UINT_OVERFLOW:
13250                                 if (tree->gtOverflow() && UINT32(i1) != lval1)
13251                                 {
13252                                     goto INT_OVF;
13253                                 }
13254                                 goto CNS_INT;
13255
13256                             case TYP_ULONG:
13257                                 if (!(tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && lval1 < 0)
13258                                 {
13259                                     goto LNG_OVF;
13260                                 }
13261                                 goto CNS_LONG;
13262
13263                             case TYP_LONG:
13264                                 if ((tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && lval1 < 0)
13265                                 {
13266                                     goto LNG_OVF;
13267                                 }
13268                                 goto CNS_LONG;
13269
13270                             case TYP_FLOAT:
13271                             case TYP_DOUBLE:
13272                                 if ((tree->gtFlags & GTF_UNSIGNED) && lval1 < 0)
13273                                 {
13274                                     d1 = FloatingPointUtils::convertUInt64ToDouble((unsigned __int64)lval1);
13275                                 }
13276                                 else
13277                                 {
13278                                     d1 = (double)lval1;
13279                                 }
13280
13281                                 if (tree->CastToType() == TYP_FLOAT)
13282                                 {
13283                                     f1 = forceCastToFloat(d1); // truncate precision
13284                                     d1 = f1;
13285                                 }
13286                                 goto CNS_DOUBLE;
13287                             default:
13288                                 assert(!"BAD_TYP");
13289                                 break;
13290                         }
13291                         return tree;
13292
13293                     default:
13294                         return tree;
13295                 }
13296
13297                 goto CNS_LONG;
13298
13299             case TYP_FLOAT:
13300             case TYP_DOUBLE:
13301                 assert(op1->gtOper == GT_CNS_DBL);
13302
13303                 /* Fold constant DOUBLE unary operator */
13304
13305                 d1 = op1->gtDblCon.gtDconVal;
13306
13307                 switch (tree->gtOper)
13308                 {
13309                     case GT_NEG:
13310                         d1 = -d1;
13311                         break;
13312
13313                     case GT_CAST:
13314
13315                         if (tree->gtOverflowEx())
13316                         {
13317                             return tree;
13318                         }
13319
13320                         assert(genActualType(tree->CastToType()) == tree->gtType);
13321
13322                         if ((op1->gtType == TYP_FLOAT && !_finite(forceCastToFloat(d1))) ||
13323                             (op1->gtType == TYP_DOUBLE && !_finite(d1)))
13324                         {
13325                             // The floating point constant is not finite.  The ECMA spec says, in
13326                             // III 3.27, that "...if overflow occurs converting a floating point type
13327                             // to an integer, ..., the value returned is unspecified."  However, it would
13328                             // at least be desirable to have the same value returned for casting an overflowing
13329                             // constant to an int as would obtained by passing that constant as a parameter
13330                             // then casting that parameter to an int type.  We will assume that the C compiler's
13331                             // cast logic will yield the desired result (and trust testing to tell otherwise).
13332                             // Cross-compilation is an issue here; if that becomes an important scenario, we should
13333                             // capture the target-specific values of overflow casts to the various integral types as
13334                             // constants in a target-specific function.
13335                             CLANG_FORMAT_COMMENT_ANCHOR;
13336
13337                             // Don't fold conversions of +inf/-inf to integral value on all platforms
13338                             // as the value returned by JIT helper doesn't match with the C compiler's cast result.
13339                             // We want the behavior to be same with or without folding.
13340                             return tree;
13341                         }
13342
13343                         if (d1 <= -1.0 && varTypeIsUnsigned(tree->CastToType()))
13344                         {
13345                             // Don't fold conversions of these cases becasue the result is unspecified per ECMA spec
13346                             // and the native math doing the fold doesn't match the run-time computation on all
13347                             // platforms.
13348                             // We want the behavior to be same with or without folding.
13349                             return tree;
13350                         }
13351
13352                         switch (tree->CastToType())
13353                         {
13354                             case TYP_BYTE:
13355                                 i1 = INT32(INT8(d1));
13356                                 goto CNS_INT;
13357
13358                             case TYP_SHORT:
13359                                 i1 = INT32(INT16(d1));
13360                                 goto CNS_INT;
13361
13362                             case TYP_USHORT:
13363                                 i1 = INT32(UINT16(d1));
13364                                 goto CNS_INT;
13365
13366                             case TYP_UBYTE:
13367                                 i1 = INT32(UINT8(d1));
13368                                 goto CNS_INT;
13369
13370                             case TYP_INT:
13371                                 i1 = INT32(d1);
13372                                 goto CNS_INT;
13373
13374                             case TYP_UINT:
13375                                 i1 = forceCastToUInt32(d1);
13376                                 goto CNS_INT;
13377
13378                             case TYP_LONG:
13379                                 lval1 = INT64(d1);
13380                                 goto CNS_LONG;
13381
13382                             case TYP_ULONG:
13383                                 lval1 = FloatingPointUtils::convertDoubleToUInt64(d1);
13384                                 goto CNS_LONG;
13385
13386                             case TYP_FLOAT:
13387                                 d1 = forceCastToFloat(d1);
13388                                 goto CNS_DOUBLE;
13389
13390                             case TYP_DOUBLE:
13391                                 if (op1->gtType == TYP_FLOAT)
13392                                 {
13393                                     d1 = forceCastToFloat(d1); // truncate precision
13394                                 }
13395                                 goto CNS_DOUBLE; // redundant cast
13396
13397                             default:
13398                                 assert(!"BAD_TYP");
13399                                 break;
13400                         }
13401                         return tree;
13402
13403                     default:
13404                         return tree;
13405                 }
13406                 goto CNS_DOUBLE;
13407
13408             default:
13409                 /* not a foldable typ - e.g. RET const */
13410                 return tree;
13411         }
13412     }
13413
13414     /* We have a binary operator */
13415
13416     assert(kind & GTK_BINOP);
13417     assert(op2);
13418     assert(op1->OperKind() & GTK_CONST);
13419     assert(op2->OperKind() & GTK_CONST);
13420
13421     if (tree->gtOper == GT_COMMA)
13422     {
13423         return op2;
13424     }
13425
13426     if (tree->OperIsAnyList())
13427     {
13428         return tree;
13429     }
13430
13431     switchType = op1->gtType;
13432
13433     // Normally we will just switch on op1 types, but for the case where
13434     //  only op2 is a GC type and op1 is not a GC type, we use the op2 type.
13435     //  This makes us handle this as a case of folding for GC type.
13436     //
13437     if (varTypeIsGC(op2->gtType) && !varTypeIsGC(op1->gtType))
13438     {
13439         switchType = op2->gtType;
13440     }
13441
13442     switch (switchType)
13443     {
13444
13445         /*-------------------------------------------------------------------------
13446          * Fold constant REF of BYREF binary operator
13447          * These can only be comparisons or null pointers
13448          */
13449
13450         case TYP_REF:
13451
13452             /* String nodes are an RVA at this point */
13453
13454             if (op1->gtOper == GT_CNS_STR || op2->gtOper == GT_CNS_STR)
13455             {
13456                 return tree;
13457             }
13458
13459             __fallthrough;
13460
13461         case TYP_BYREF:
13462
13463             i1 = op1->gtIntConCommon.IconValue();
13464             i2 = op2->gtIntConCommon.IconValue();
13465
13466             switch (tree->gtOper)
13467             {
13468                 case GT_EQ:
13469                     i1 = (i1 == i2);
13470                     goto FOLD_COND;
13471
13472                 case GT_NE:
13473                     i1 = (i1 != i2);
13474                     goto FOLD_COND;
13475
13476                 case GT_ADD:
13477                     noway_assert(tree->gtType != TYP_REF);
13478                     // We only fold a GT_ADD that involves a null reference.
13479                     if (((op1->TypeGet() == TYP_REF) && (i1 == 0)) || ((op2->TypeGet() == TYP_REF) && (i2 == 0)))
13480                     {
13481 #ifdef DEBUG
13482                         if (verbose)
13483                         {
13484                             printf("\nFolding operator with constant nodes into a constant:\n");
13485                             gtDispTree(tree);
13486                         }
13487 #endif
13488                         // Fold into GT_IND of null byref
13489                         tree->ChangeOperConst(GT_CNS_INT);
13490                         tree->gtType              = TYP_BYREF;
13491                         tree->gtIntCon.gtIconVal  = 0;
13492                         tree->gtIntCon.gtFieldSeq = FieldSeqStore::NotAField();
13493                         if (vnStore != nullptr)
13494                         {
13495                             fgValueNumberTreeConst(tree);
13496                         }
13497 #ifdef DEBUG
13498                         if (verbose)
13499                         {
13500                             printf("\nFolded to null byref:\n");
13501                             gtDispTree(tree);
13502                         }
13503 #endif
13504                         goto DONE;
13505                     }
13506
13507                 default:
13508                     break;
13509             }
13510
13511             return tree;
13512
13513         /*-------------------------------------------------------------------------
13514          * Fold constant INT binary operator
13515          */
13516
13517         case TYP_INT:
13518
13519             if (tree->OperIsCompare() && (tree->gtType == TYP_BYTE))
13520             {
13521                 tree->gtType = TYP_INT;
13522             }
13523
13524             assert(tree->gtType == TYP_INT || varTypeIsGC(tree->TypeGet()) || tree->gtOper == GT_MKREFANY);
13525
13526             // No GC pointer types should be folded here...
13527             //
13528             assert(!varTypeIsGC(op1->gtType) && !varTypeIsGC(op2->gtType));
13529
13530             if (!op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()))
13531             {
13532                 return tree;
13533             }
13534
13535             if (!op2->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()))
13536             {
13537                 return tree;
13538             }
13539
13540             i1 = op1->gtIntConCommon.IconValue();
13541             i2 = op2->gtIntConCommon.IconValue();
13542
13543             switch (tree->gtOper)
13544             {
13545                 case GT_EQ:
13546                     i1 = (INT32(i1) == INT32(i2));
13547                     break;
13548                 case GT_NE:
13549                     i1 = (INT32(i1) != INT32(i2));
13550                     break;
13551
13552                 case GT_LT:
13553                     if (tree->gtFlags & GTF_UNSIGNED)
13554                     {
13555                         i1 = (UINT32(i1) < UINT32(i2));
13556                     }
13557                     else
13558                     {
13559                         i1 = (INT32(i1) < INT32(i2));
13560                     }
13561                     break;
13562
13563                 case GT_LE:
13564                     if (tree->gtFlags & GTF_UNSIGNED)
13565                     {
13566                         i1 = (UINT32(i1) <= UINT32(i2));
13567                     }
13568                     else
13569                     {
13570                         i1 = (INT32(i1) <= INT32(i2));
13571                     }
13572                     break;
13573
13574                 case GT_GE:
13575                     if (tree->gtFlags & GTF_UNSIGNED)
13576                     {
13577                         i1 = (UINT32(i1) >= UINT32(i2));
13578                     }
13579                     else
13580                     {
13581                         i1 = (INT32(i1) >= INT32(i2));
13582                     }
13583                     break;
13584
13585                 case GT_GT:
13586                     if (tree->gtFlags & GTF_UNSIGNED)
13587                     {
13588                         i1 = (UINT32(i1) > UINT32(i2));
13589                     }
13590                     else
13591                     {
13592                         i1 = (INT32(i1) > INT32(i2));
13593                     }
13594                     break;
13595
13596                 case GT_ADD:
13597                     itemp = i1 + i2;
13598                     if (tree->gtOverflow())
13599                     {
13600                         if (tree->gtFlags & GTF_UNSIGNED)
13601                         {
13602                             if (INT64(UINT32(itemp)) != INT64(UINT32(i1)) + INT64(UINT32(i2)))
13603                             {
13604                                 goto INT_OVF;
13605                             }
13606                         }
13607                         else
13608                         {
13609                             if (INT64(INT32(itemp)) != INT64(INT32(i1)) + INT64(INT32(i2)))
13610                             {
13611                                 goto INT_OVF;
13612                             }
13613                         }
13614                     }
13615                     i1       = itemp;
13616                     fieldSeq = GetFieldSeqStore()->Append(op1->gtIntCon.gtFieldSeq, op2->gtIntCon.gtFieldSeq);
13617                     break;
13618                 case GT_SUB:
13619                     itemp = i1 - i2;
13620                     if (tree->gtOverflow())
13621                     {
13622                         if (tree->gtFlags & GTF_UNSIGNED)
13623                         {
13624                             if (INT64(UINT32(itemp)) != ((INT64)((UINT32)i1) - (INT64)((UINT32)i2)))
13625                             {
13626                                 goto INT_OVF;
13627                             }
13628                         }
13629                         else
13630                         {
13631                             if (INT64(INT32(itemp)) != INT64(INT32(i1)) - INT64(INT32(i2)))
13632                             {
13633                                 goto INT_OVF;
13634                             }
13635                         }
13636                     }
13637                     i1 = itemp;
13638                     break;
13639                 case GT_MUL:
13640                     itemp = i1 * i2;
13641                     if (tree->gtOverflow())
13642                     {
13643                         if (tree->gtFlags & GTF_UNSIGNED)
13644                         {
13645                             if (INT64(UINT32(itemp)) != ((INT64)((UINT32)i1) * (INT64)((UINT32)i2)))
13646                             {
13647                                 goto INT_OVF;
13648                             }
13649                         }
13650                         else
13651                         {
13652                             if (INT64(INT32(itemp)) != INT64(INT32(i1)) * INT64(INT32(i2)))
13653                             {
13654                                 goto INT_OVF;
13655                             }
13656                         }
13657                     }
13658                     // For the very particular case of the "constant array index" pseudo-field, we
13659                     // assume that multiplication is by the field width, and preserves that field.
13660                     // This could obviously be made more robust by a more complicated set of annotations...
13661                     if ((op1->gtIntCon.gtFieldSeq != nullptr) && op1->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
13662                     {
13663                         assert(op2->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField());
13664                         fieldSeq = op1->gtIntCon.gtFieldSeq;
13665                     }
13666                     else if ((op2->gtIntCon.gtFieldSeq != nullptr) &&
13667                              op2->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
13668                     {
13669                         assert(op1->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField());
13670                         fieldSeq = op2->gtIntCon.gtFieldSeq;
13671                     }
13672                     i1 = itemp;
13673                     break;
13674
13675                 case GT_OR:
13676                     i1 |= i2;
13677                     break;
13678                 case GT_XOR:
13679                     i1 ^= i2;
13680                     break;
13681                 case GT_AND:
13682                     i1 &= i2;
13683                     break;
13684
13685                 case GT_LSH:
13686                     i1 <<= (i2 & 0x1f);
13687                     break;
13688                 case GT_RSH:
13689                     i1 >>= (i2 & 0x1f);
13690                     break;
13691                 case GT_RSZ:
13692                     /* logical shift -> make it unsigned to not propagate the sign bit */
13693                     i1 = UINT32(i1) >> (i2 & 0x1f);
13694                     break;
13695                 case GT_ROL:
13696                     i1 = (i1 << (i2 & 0x1f)) | (UINT32(i1) >> ((32 - i2) & 0x1f));
13697                     break;
13698                 case GT_ROR:
13699                     i1 = (i1 << ((32 - i2) & 0x1f)) | (UINT32(i1) >> (i2 & 0x1f));
13700                     break;
13701
13702                 /* DIV and MOD can generate an INT 0 - if division by 0
13703                  * or overflow - when dividing MIN by -1 */
13704
13705                 case GT_DIV:
13706                 case GT_MOD:
13707                 case GT_UDIV:
13708                 case GT_UMOD:
13709                     if (INT32(i2) == 0)
13710                     {
13711                         // Division by zero:
13712                         // We have to evaluate this expression and throw an exception
13713                         return tree;
13714                     }
13715                     else if ((INT32(i2) == -1) && (UINT32(i1) == 0x80000000))
13716                     {
13717                         // Overflow Division:
13718                         // We have to evaluate this expression and throw an exception
13719                         return tree;
13720                     }
13721
13722                     if (tree->gtOper == GT_DIV)
13723                     {
13724                         i1 = INT32(i1) / INT32(i2);
13725                     }
13726                     else if (tree->gtOper == GT_MOD)
13727                     {
13728                         i1 = INT32(i1) % INT32(i2);
13729                     }
13730                     else if (tree->gtOper == GT_UDIV)
13731                     {
13732                         i1 = UINT32(i1) / UINT32(i2);
13733                     }
13734                     else
13735                     {
13736                         assert(tree->gtOper == GT_UMOD);
13737                         i1 = UINT32(i1) % UINT32(i2);
13738                     }
13739                     break;
13740
13741                 default:
13742                     return tree;
13743             }
13744
13745         /* We get here after folding to a GT_CNS_INT type
13746          * change the node to the new type / value and make sure the node sizes are OK */
13747         CNS_INT:
13748         FOLD_COND:
13749
13750 #ifdef DEBUG
13751             if (verbose)
13752             {
13753                 printf("\nFolding operator with constant nodes into a constant:\n");
13754                 gtDispTree(tree);
13755             }
13756 #endif
13757
13758 #ifdef _TARGET_64BIT_
13759             // Some operations are performed as 64 bit instead of 32 bit so the upper 32 bits
13760             // need to be discarded. Since constant values are stored as ssize_t and the node
13761             // has TYP_INT the result needs to be sign extended rather than zero extended.
13762             i1 = INT32(i1);
13763 #endif // _TARGET_64BIT_
13764
13765             /* Also all conditional folding jumps here since the node hanging from
13766              * GT_JTRUE has to be a GT_CNS_INT - value 0 or 1 */
13767
13768             tree->ChangeOperConst(GT_CNS_INT);
13769             tree->gtType              = TYP_INT;
13770             tree->gtIntCon.gtIconVal  = i1;
13771             tree->gtIntCon.gtFieldSeq = fieldSeq;
13772             if (vnStore != nullptr)
13773             {
13774                 fgValueNumberTreeConst(tree);
13775             }
13776 #ifdef DEBUG
13777             if (verbose)
13778             {
13779                 printf("Bashed to int constant:\n");
13780                 gtDispTree(tree);
13781             }
13782 #endif
13783             goto DONE;
13784
13785         /* This operation is going to cause an overflow exception. Morph into
13786            an overflow helper. Put a dummy constant value for code generation.
13787
13788            We could remove all subsequent trees in the current basic block,
13789            unless this node is a child of GT_COLON
13790
13791            NOTE: Since the folded value is not constant we should not change the
13792                  "tree" node - otherwise we confuse the logic that checks if the folding
13793                  was successful - instead use one of the operands, e.g. op1
13794          */
13795
13796         LNG_OVF:
13797             // Don't fold overflow operations if not global morph phase.
13798             // The reason for this is that this optimization is replacing a gentree node
13799             // with another new gentree node. Say a GT_CALL(arglist) has one 'arg'
13800             // involving overflow arithmetic.  During assertion prop, it is possible
13801             // that the 'arg' could be constant folded and the result could lead to an
13802             // overflow.  In such a case 'arg' will get replaced with GT_COMMA node
13803             // but fgMorphArgs() - see the logic around "if(lateArgsComputed)" - doesn't
13804             // update args table. For this reason this optimization is enabled only
13805             // for global morphing phase.
13806             //
13807             // TODO-CQ: Once fgMorphArgs() is fixed this restriction could be removed.
13808             CLANG_FORMAT_COMMENT_ANCHOR;
13809
13810             if (!fgGlobalMorph)
13811             {
13812                 assert(tree->gtOverflow());
13813                 return tree;
13814             }
13815
13816             op1 = gtNewLconNode(0);
13817             if (vnStore != nullptr)
13818             {
13819                 op1->gtVNPair.SetBoth(vnStore->VNZeroForType(TYP_LONG));
13820             }
13821             goto OVF;
13822
13823         INT_OVF:
13824             // Don't fold overflow operations if not global morph phase.
13825             // The reason for this is that this optimization is replacing a gentree node
13826             // with another new gentree node. Say a GT_CALL(arglist) has one 'arg'
13827             // involving overflow arithmetic.  During assertion prop, it is possible
13828             // that the 'arg' could be constant folded and the result could lead to an
13829             // overflow.  In such a case 'arg' will get replaced with GT_COMMA node
13830             // but fgMorphArgs() - see the logic around "if(lateArgsComputed)" - doesn't
13831             // update args table. For this reason this optimization is enabled only
13832             // for global morphing phase.
13833             //
13834             // TODO-CQ: Once fgMorphArgs() is fixed this restriction could be removed.
13835
13836             if (!fgGlobalMorph)
13837             {
13838                 assert(tree->gtOverflow());
13839                 return tree;
13840             }
13841
13842             op1 = gtNewIconNode(0);
13843             if (vnStore != nullptr)
13844             {
13845                 op1->gtVNPair.SetBoth(vnStore->VNZeroForType(TYP_INT));
13846             }
13847             goto OVF;
13848
13849         OVF:
13850 #ifdef DEBUG
13851             if (verbose)
13852             {
13853                 printf("\nFolding binary operator with constant nodes into a comma throw:\n");
13854                 gtDispTree(tree);
13855             }
13856 #endif
13857             /* We will change the cast to a GT_COMMA and attach the exception helper as gtOp.gtOp1.
13858              * The constant expression zero becomes op2. */
13859
13860             assert(tree->gtOverflow());
13861             assert(tree->gtOper == GT_ADD || tree->gtOper == GT_SUB || tree->gtOper == GT_CAST ||
13862                    tree->gtOper == GT_MUL);
13863             assert(op1);
13864
13865             op2 = op1;
13866             op1 = gtNewHelperCallNode(CORINFO_HELP_OVERFLOW, TYP_VOID,
13867                                       gtNewArgList(gtNewIconNode(compCurBB->bbTryIndex)));
13868
13869             // op1 is a call to the JIT helper that throws an Overflow exception
13870             // attach the ExcSet for VNF_OverflowExc(Void) to this call
13871
13872             if (vnStore != nullptr)
13873             {
13874                 op1->gtVNPair =
13875                     vnStore->VNPWithExc(ValueNumPair(ValueNumStore::VNForVoid(), ValueNumStore::VNForVoid()),
13876                                         vnStore->VNPExcSetSingleton(
13877                                             vnStore->VNPairForFunc(TYP_REF, VNF_OverflowExc, vnStore->VNPForVoid())));
13878             }
13879
13880             tree = gtNewOperNode(GT_COMMA, tree->gtType, op1, op2);
13881
13882             return tree;
13883
13884         /*-------------------------------------------------------------------------
13885          * Fold constant LONG binary operator
13886          */
13887
13888         case TYP_LONG:
13889
13890             // No GC pointer types should be folded here...
13891             //
13892             assert(!varTypeIsGC(op1->gtType) && !varTypeIsGC(op2->gtType));
13893
13894             // op1 is known to be a TYP_LONG, op2 is normally a TYP_LONG, unless we have a shift operator in which case
13895             // it is a TYP_INT
13896             //
13897             assert((op2->gtType == TYP_LONG) || (op2->gtType == TYP_INT));
13898
13899             if (!op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()))
13900             {
13901                 return tree;
13902             }
13903
13904             if (!op2->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()))
13905             {
13906                 return tree;
13907             }
13908
13909             lval1 = op1->gtIntConCommon.LngValue();
13910
13911             // For the shift operators we can have a op2 that is a TYP_INT and thus will be GT_CNS_INT
13912             if (op2->OperGet() == GT_CNS_INT)
13913             {
13914                 lval2 = op2->gtIntConCommon.IconValue();
13915             }
13916             else
13917             {
13918                 lval2 = op2->gtIntConCommon.LngValue();
13919             }
13920
13921             switch (tree->gtOper)
13922             {
13923                 case GT_EQ:
13924                     i1 = (lval1 == lval2);
13925                     goto FOLD_COND;
13926                 case GT_NE:
13927                     i1 = (lval1 != lval2);
13928                     goto FOLD_COND;
13929
13930                 case GT_LT:
13931                     if (tree->gtFlags & GTF_UNSIGNED)
13932                     {
13933                         i1 = (UINT64(lval1) < UINT64(lval2));
13934                     }
13935                     else
13936                     {
13937                         i1 = (lval1 < lval2);
13938                     }
13939                     goto FOLD_COND;
13940
13941                 case GT_LE:
13942                     if (tree->gtFlags & GTF_UNSIGNED)
13943                     {
13944                         i1 = (UINT64(lval1) <= UINT64(lval2));
13945                     }
13946                     else
13947                     {
13948                         i1 = (lval1 <= lval2);
13949                     }
13950                     goto FOLD_COND;
13951
13952                 case GT_GE:
13953                     if (tree->gtFlags & GTF_UNSIGNED)
13954                     {
13955                         i1 = (UINT64(lval1) >= UINT64(lval2));
13956                     }
13957                     else
13958                     {
13959                         i1 = (lval1 >= lval2);
13960                     }
13961                     goto FOLD_COND;
13962
13963                 case GT_GT:
13964                     if (tree->gtFlags & GTF_UNSIGNED)
13965                     {
13966                         i1 = (UINT64(lval1) > UINT64(lval2));
13967                     }
13968                     else
13969                     {
13970                         i1 = (lval1 > lval2);
13971                     }
13972                     goto FOLD_COND;
13973
13974                 case GT_ADD:
13975                     ltemp = lval1 + lval2;
13976
13977                 LNG_ADD_CHKOVF:
13978                     /* For the SIGNED case - If there is one positive and one negative operand, there can be no overflow
13979                      * If both are positive, the result has to be positive, and similary for negatives.
13980                      *
13981                      * For the UNSIGNED case - If a UINT32 operand is bigger than the result then OVF */
13982
13983                     if (tree->gtOverflow())
13984                     {
13985                         if (tree->gtFlags & GTF_UNSIGNED)
13986                         {
13987                             if ((UINT64(lval1) > UINT64(ltemp)) || (UINT64(lval2) > UINT64(ltemp)))
13988                             {
13989                                 goto LNG_OVF;
13990                             }
13991                         }
13992                         else if (((lval1 < 0) == (lval2 < 0)) && ((lval1 < 0) != (ltemp < 0)))
13993                         {
13994                             goto LNG_OVF;
13995                         }
13996                     }
13997                     lval1 = ltemp;
13998                     break;
13999
14000                 case GT_SUB:
14001                     ltemp = lval1 - lval2;
14002                     if (tree->gtOverflow())
14003                     {
14004                         if (tree->gtFlags & GTF_UNSIGNED)
14005                         {
14006                             if (UINT64(lval2) > UINT64(lval1))
14007                             {
14008                                 goto LNG_OVF;
14009                             }
14010                         }
14011                         else
14012                         {
14013                             /* If both operands are +ve or both are -ve, there can be no
14014                                overflow. Else use the logic for : lval1 + (-lval2) */
14015
14016                             if ((lval1 < 0) != (lval2 < 0))
14017                             {
14018                                 if (lval2 == INT64_MIN)
14019                                 {
14020                                     goto LNG_OVF;
14021                                 }
14022                                 lval2 = -lval2;
14023                                 goto LNG_ADD_CHKOVF;
14024                             }
14025                         }
14026                     }
14027                     lval1 = ltemp;
14028                     break;
14029
14030                 case GT_MUL:
14031                     ltemp = lval1 * lval2;
14032
14033                     if (tree->gtOverflow() && lval2 != 0)
14034                     {
14035
14036                         if (tree->gtFlags & GTF_UNSIGNED)
14037                         {
14038                             UINT64 ultemp = ltemp;
14039                             UINT64 ulval1 = lval1;
14040                             UINT64 ulval2 = lval2;
14041                             if ((ultemp / ulval2) != ulval1)
14042                             {
14043                                 goto LNG_OVF;
14044                             }
14045                         }
14046                         else
14047                         {
14048                             // This does a multiply and then reverses it.  This test works great except for MIN_INT *
14049                             //-1.  In that case we mess up the sign on ltmp.  Make sure to double check the sign.
14050                             // if either is 0, then no overflow
14051                             if (lval1 != 0) // lval2 checked above.
14052                             {
14053                                 if (((lval1 < 0) == (lval2 < 0)) && (ltemp < 0))
14054                                 {
14055                                     goto LNG_OVF;
14056                                 }
14057                                 if (((lval1 < 0) != (lval2 < 0)) && (ltemp > 0))
14058                                 {
14059                                     goto LNG_OVF;
14060                                 }
14061
14062                                 // TODO-Amd64-Unix: Remove the code that disables optimizations for this method when the
14063                                 // clang
14064                                 // optimizer is fixed and/or the method implementation is refactored in a simpler code.
14065                                 // There is a bug in the clang-3.5 optimizer. The issue is that in release build the
14066                                 // optimizer is mistyping (or just wrongly decides to use 32 bit operation for a corner
14067                                 // case of MIN_LONG) the args of the (ltemp / lval2) to int (it does a 32 bit div
14068                                 // operation instead of 64 bit.). For the case of lval1 and lval2 equal to MIN_LONG
14069                                 // (0x8000000000000000) this results in raising a SIGFPE.
14070                                 // Optimizations disabled for now. See compiler.h.
14071                                 if ((ltemp / lval2) != lval1)
14072                                 {
14073                                     goto LNG_OVF;
14074                                 }
14075                             }
14076                         }
14077                     }
14078
14079                     lval1 = ltemp;
14080                     break;
14081
14082                 case GT_OR:
14083                     lval1 |= lval2;
14084                     break;
14085                 case GT_XOR:
14086                     lval1 ^= lval2;
14087                     break;
14088                 case GT_AND:
14089                     lval1 &= lval2;
14090                     break;
14091
14092                 case GT_LSH:
14093                     lval1 <<= (lval2 & 0x3f);
14094                     break;
14095                 case GT_RSH:
14096                     lval1 >>= (lval2 & 0x3f);
14097                     break;
14098                 case GT_RSZ:
14099                     /* logical shift -> make it unsigned to not propagate the sign bit */
14100                     lval1 = UINT64(lval1) >> (lval2 & 0x3f);
14101                     break;
14102                 case GT_ROL:
14103                     lval1 = (lval1 << (lval2 & 0x3f)) | (UINT64(lval1) >> ((64 - lval2) & 0x3f));
14104                     break;
14105                 case GT_ROR:
14106                     lval1 = (lval1 << ((64 - lval2) & 0x3f)) | (UINT64(lval1) >> (lval2 & 0x3f));
14107                     break;
14108
14109                 // Both DIV and IDIV on x86 raise an exception for min_int (and min_long) / -1.  So we preserve
14110                 // that behavior here.
14111                 case GT_DIV:
14112                     if (!lval2)
14113                     {
14114                         return tree;
14115                     }
14116
14117                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
14118                     {
14119                         return tree;
14120                     }
14121                     lval1 /= lval2;
14122                     break;
14123
14124                 case GT_MOD:
14125                     if (!lval2)
14126                     {
14127                         return tree;
14128                     }
14129                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
14130                     {
14131                         return tree;
14132                     }
14133                     lval1 %= lval2;
14134                     break;
14135
14136                 case GT_UDIV:
14137                     if (!lval2)
14138                     {
14139                         return tree;
14140                     }
14141                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
14142                     {
14143                         return tree;
14144                     }
14145                     lval1 = UINT64(lval1) / UINT64(lval2);
14146                     break;
14147
14148                 case GT_UMOD:
14149                     if (!lval2)
14150                     {
14151                         return tree;
14152                     }
14153                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
14154                     {
14155                         return tree;
14156                     }
14157                     lval1 = UINT64(lval1) % UINT64(lval2);
14158                     break;
14159                 default:
14160                     return tree;
14161             }
14162
14163         CNS_LONG:
14164
14165             if (fieldSeq != FieldSeqStore::NotAField())
14166             {
14167                 return tree;
14168             }
14169
14170 #ifdef DEBUG
14171             if (verbose)
14172             {
14173                 printf("\nFolding long operator with constant nodes into a constant:\n");
14174                 gtDispTree(tree);
14175             }
14176 #endif
14177             assert((GenTree::s_gtNodeSizes[GT_CNS_NATIVELONG] == TREE_NODE_SZ_SMALL) ||
14178                    (tree->gtDebugFlags & GTF_DEBUG_NODE_LARGE));
14179
14180             tree->ChangeOperConst(GT_CNS_NATIVELONG);
14181             tree->gtIntConCommon.SetLngValue(lval1);
14182             if (vnStore != nullptr)
14183             {
14184                 fgValueNumberTreeConst(tree);
14185             }
14186
14187 #ifdef DEBUG
14188             if (verbose)
14189             {
14190                 printf("Bashed to long constant:\n");
14191                 gtDispTree(tree);
14192             }
14193 #endif
14194             goto DONE;
14195
14196         /*-------------------------------------------------------------------------
14197          * Fold constant FLOAT or DOUBLE binary operator
14198          */
14199
14200         case TYP_FLOAT:
14201         case TYP_DOUBLE:
14202
14203             if (tree->gtOverflowEx())
14204             {
14205                 return tree;
14206             }
14207
14208             assert(op1->gtOper == GT_CNS_DBL);
14209             d1 = op1->gtDblCon.gtDconVal;
14210
14211             assert(varTypeIsFloating(op2->gtType));
14212             assert(op2->gtOper == GT_CNS_DBL);
14213             d2 = op2->gtDblCon.gtDconVal;
14214
14215             /* Special case - check if we have NaN operands.
14216              * For comparisons if not an unordered operation always return 0.
14217              * For unordered operations (i.e. the GTF_RELOP_NAN_UN flag is set)
14218              * the result is always true - return 1. */
14219
14220             if (_isnan(d1) || _isnan(d2))
14221             {
14222 #ifdef DEBUG
14223                 if (verbose)
14224                 {
14225                     printf("Double operator(s) is NaN\n");
14226                 }
14227 #endif
14228                 if (tree->OperKind() & GTK_RELOP)
14229                 {
14230                     if (tree->gtFlags & GTF_RELOP_NAN_UN)
14231                     {
14232                         /* Unordered comparison with NaN always succeeds */
14233                         i1 = 1;
14234                         goto FOLD_COND;
14235                     }
14236                     else
14237                     {
14238                         /* Normal comparison with NaN always fails */
14239                         i1 = 0;
14240                         goto FOLD_COND;
14241                     }
14242                 }
14243             }
14244
14245             switch (tree->gtOper)
14246             {
14247                 case GT_EQ:
14248                     i1 = (d1 == d2);
14249                     goto FOLD_COND;
14250                 case GT_NE:
14251                     i1 = (d1 != d2);
14252                     goto FOLD_COND;
14253
14254                 case GT_LT:
14255                     i1 = (d1 < d2);
14256                     goto FOLD_COND;
14257                 case GT_LE:
14258                     i1 = (d1 <= d2);
14259                     goto FOLD_COND;
14260                 case GT_GE:
14261                     i1 = (d1 >= d2);
14262                     goto FOLD_COND;
14263                 case GT_GT:
14264                     i1 = (d1 > d2);
14265                     goto FOLD_COND;
14266
14267                 // Floating point arithmetic should be done in declared
14268                 // precision while doing constant folding. For this reason though TYP_FLOAT
14269                 // constants are stored as double constants, while performing float arithmetic,
14270                 // double constants should be converted to float.  Here is an example case
14271                 // where performing arithmetic in double precision would lead to incorrect
14272                 // results.
14273                 //
14274                 // Example:
14275                 // float a = float.MaxValue;
14276                 // float b = a*a;   This will produce +inf in single precision and 1.1579207543382391e+077 in double
14277                 //                  precision.
14278                 // flaot c = b/b;   This will produce NaN in single precision and 1 in double precision.
14279                 case GT_ADD:
14280                     if (op1->TypeGet() == TYP_FLOAT)
14281                     {
14282                         f1 = forceCastToFloat(d1);
14283                         f2 = forceCastToFloat(d2);
14284                         d1 = forceCastToFloat(f1 + f2);
14285                     }
14286                     else
14287                     {
14288                         d1 += d2;
14289                     }
14290                     break;
14291
14292                 case GT_SUB:
14293                     if (op1->TypeGet() == TYP_FLOAT)
14294                     {
14295                         f1 = forceCastToFloat(d1);
14296                         f2 = forceCastToFloat(d2);
14297                         d1 = forceCastToFloat(f1 - f2);
14298                     }
14299                     else
14300                     {
14301                         d1 -= d2;
14302                     }
14303                     break;
14304
14305                 case GT_MUL:
14306                     if (op1->TypeGet() == TYP_FLOAT)
14307                     {
14308                         f1 = forceCastToFloat(d1);
14309                         f2 = forceCastToFloat(d2);
14310                         d1 = forceCastToFloat(f1 * f2);
14311                     }
14312                     else
14313                     {
14314                         d1 *= d2;
14315                     }
14316                     break;
14317
14318                 case GT_DIV:
14319                     if (!d2)
14320                     {
14321                         return tree;
14322                     }
14323                     if (op1->TypeGet() == TYP_FLOAT)
14324                     {
14325                         f1 = forceCastToFloat(d1);
14326                         f2 = forceCastToFloat(d2);
14327                         d1 = forceCastToFloat(f1 / f2);
14328                     }
14329                     else
14330                     {
14331                         d1 /= d2;
14332                     }
14333                     break;
14334
14335                 default:
14336                     return tree;
14337             }
14338
14339         CNS_DOUBLE:
14340
14341 #ifdef DEBUG
14342             if (verbose)
14343             {
14344                 printf("\nFolding fp operator with constant nodes into a fp constant:\n");
14345                 gtDispTree(tree);
14346             }
14347 #endif
14348
14349             assert((GenTree::s_gtNodeSizes[GT_CNS_DBL] == TREE_NODE_SZ_SMALL) ||
14350                    (tree->gtDebugFlags & GTF_DEBUG_NODE_LARGE));
14351
14352             tree->ChangeOperConst(GT_CNS_DBL);
14353             tree->gtDblCon.gtDconVal = d1;
14354             if (vnStore != nullptr)
14355             {
14356                 fgValueNumberTreeConst(tree);
14357             }
14358 #ifdef DEBUG
14359             if (verbose)
14360             {
14361                 printf("Bashed to fp constant:\n");
14362                 gtDispTree(tree);
14363             }
14364 #endif
14365             goto DONE;
14366
14367         default:
14368             /* not a foldable typ */
14369             return tree;
14370     }
14371
14372 //-------------------------------------------------------------------------
14373
14374 DONE:
14375
14376     /* Make sure no side effect flags are set on this constant node */
14377
14378     tree->gtFlags &= ~GTF_ALL_EFFECT;
14379
14380     return tree;
14381 }
14382 #ifdef _PREFAST_
14383 #pragma warning(pop)
14384 #endif
14385
14386 //------------------------------------------------------------------------
14387 // gtNewTempAssign: Create an assignment of the given value to a temp.
14388 //
14389 // Arguments:
14390 //    tmp         - local number for a compiler temp
14391 //    val         - value to assign to the temp
14392 //    pAfterStmt  - statement to insert any additional statements after
14393 //    ilOffset    - il offset for new statements
14394 //    block       - block to insert any additional statements in
14395 //
14396 // Return Value:
14397 //    Normally a new assignment node.
14398 //    However may return a nop node if val is simply a reference to the temp.
14399 //
14400 // Notes:
14401 //    Self-assignments may be represented via NOPs.
14402 //
14403 //    May update the type of the temp, if it was previously unknown.
14404 //
14405 //    May set compFloatingPointUsed.
14406
14407 GenTree* Compiler::gtNewTempAssign(
14408     unsigned tmp, GenTree* val, GenTree** pAfterStmt, IL_OFFSETX ilOffset, BasicBlock* block)
14409 {
14410     // Self-assignment is a nop.
14411     if (val->OperGet() == GT_LCL_VAR && val->gtLclVarCommon.gtLclNum == tmp)
14412     {
14413         return gtNewNothingNode();
14414     }
14415
14416     LclVarDsc* varDsc = lvaTable + tmp;
14417
14418     if (varDsc->TypeGet() == TYP_I_IMPL && val->TypeGet() == TYP_BYREF)
14419     {
14420         impBashVarAddrsToI(val);
14421     }
14422
14423     var_types valTyp = val->TypeGet();
14424     if (val->OperGet() == GT_LCL_VAR && lvaTable[val->gtLclVar.gtLclNum].lvNormalizeOnLoad())
14425     {
14426         valTyp      = lvaGetRealType(val->gtLclVar.gtLclNum);
14427         val->gtType = valTyp;
14428     }
14429     var_types dstTyp = varDsc->TypeGet();
14430
14431     /* If the variable's lvType is not yet set then set it here */
14432     if (dstTyp == TYP_UNDEF)
14433     {
14434         varDsc->lvType = dstTyp = genActualType(valTyp);
14435         if (varTypeIsGC(dstTyp))
14436         {
14437             varDsc->lvStructGcCount = 1;
14438         }
14439 #if FEATURE_SIMD
14440         else if (varTypeIsSIMD(dstTyp))
14441         {
14442             varDsc->lvSIMDType = 1;
14443         }
14444 #endif
14445     }
14446
14447 #ifdef DEBUG
14448     /* Make sure the actual types match               */
14449     if (genActualType(valTyp) != genActualType(dstTyp))
14450     {
14451         // Plus some other exceptions that are apparently legal:
14452         // 1) TYP_REF or BYREF = TYP_I_IMPL
14453         bool ok = false;
14454         if (varTypeIsGC(dstTyp) && (valTyp == TYP_I_IMPL))
14455         {
14456             ok = true;
14457         }
14458         // 2) TYP_DOUBLE = TYP_FLOAT or TYP_FLOAT = TYP_DOUBLE
14459         else if (varTypeIsFloating(dstTyp) && varTypeIsFloating(valTyp))
14460         {
14461             ok = true;
14462         }
14463         // 3) TYP_BYREF = TYP_REF when object stack allocation is enabled
14464         else if (JitConfig.JitObjectStackAllocation() && (dstTyp == TYP_BYREF) && (valTyp == TYP_REF))
14465         {
14466             ok = true;
14467         }
14468
14469         if (!ok)
14470         {
14471             gtDispTree(val);
14472             assert(!"Incompatible types for gtNewTempAssign");
14473         }
14474     }
14475 #endif
14476
14477     // Floating Point assignments can be created during inlining
14478     // see "Zero init inlinee locals:" in fgInlinePrependStatements
14479     // thus we may need to set compFloatingPointUsed to true here.
14480     //
14481     if (varTypeIsFloating(dstTyp) && (compFloatingPointUsed == false))
14482     {
14483         compFloatingPointUsed = true;
14484     }
14485
14486     /* Create the assignment node */
14487
14488     GenTree* asg;
14489     GenTree* dest = gtNewLclvNode(tmp, dstTyp);
14490     dest->gtFlags |= GTF_VAR_DEF;
14491
14492     // With first-class structs, we should be propagating the class handle on all non-primitive
14493     // struct types. We don't have a convenient way to do that for all SIMD temps, since some
14494     // internal trees use SIMD types that are not used by the input IL. In this case, we allow
14495     // a null type handle and derive the necessary information about the type from its varType.
14496     CORINFO_CLASS_HANDLE structHnd = gtGetStructHandleIfPresent(val);
14497     if (varTypeIsStruct(valTyp) && ((structHnd != NO_CLASS_HANDLE) || (varTypeIsSIMD(valTyp))))
14498     {
14499         // The struct value may be be a child of a GT_COMMA.
14500         GenTree* valx = val->gtEffectiveVal(/*commaOnly*/ true);
14501
14502         if (structHnd != NO_CLASS_HANDLE)
14503         {
14504             lvaSetStruct(tmp, structHnd, false);
14505         }
14506         else
14507         {
14508             assert(valx->gtOper != GT_OBJ);
14509         }
14510         dest->gtFlags |= GTF_DONT_CSE;
14511         valx->gtFlags |= GTF_DONT_CSE;
14512         asg = impAssignStruct(dest, val, structHnd, (unsigned)CHECK_SPILL_NONE, pAfterStmt, ilOffset, block);
14513     }
14514     else
14515     {
14516         asg = gtNewAssignNode(dest, val);
14517     }
14518
14519     if (compRationalIRForm)
14520     {
14521         Rationalizer::RewriteAssignmentIntoStoreLcl(asg->AsOp());
14522     }
14523
14524     return asg;
14525 }
14526
14527 /*****************************************************************************
14528  *
14529  *  Create a helper call to access a COM field (iff 'assg' is non-zero this is
14530  *  an assignment and 'assg' is the new value).
14531  */
14532
14533 GenTree* Compiler::gtNewRefCOMfield(GenTree*                objPtr,
14534                                     CORINFO_RESOLVED_TOKEN* pResolvedToken,
14535                                     CORINFO_ACCESS_FLAGS    access,
14536                                     CORINFO_FIELD_INFO*     pFieldInfo,
14537                                     var_types               lclTyp,
14538                                     CORINFO_CLASS_HANDLE    structType,
14539                                     GenTree*                assg)
14540 {
14541     assert(pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER ||
14542            pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_ADDR_HELPER ||
14543            pFieldInfo->fieldAccessor == CORINFO_FIELD_STATIC_ADDR_HELPER);
14544
14545     /* If we can't access it directly, we need to call a helper function */
14546     GenTreeArgList* args       = nullptr;
14547     var_types       helperType = TYP_BYREF;
14548
14549     if (pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER)
14550     {
14551         if (access & CORINFO_ACCESS_SET)
14552         {
14553             assert(assg != nullptr);
14554             // helper needs pointer to struct, not struct itself
14555             if (pFieldInfo->helper == CORINFO_HELP_SETFIELDSTRUCT)
14556             {
14557                 assert(structType != nullptr);
14558                 assg = impGetStructAddr(assg, structType, (unsigned)CHECK_SPILL_ALL, true);
14559             }
14560             else if (lclTyp == TYP_DOUBLE && assg->TypeGet() == TYP_FLOAT)
14561             {
14562                 assg = gtNewCastNode(TYP_DOUBLE, assg, false, TYP_DOUBLE);
14563             }
14564             else if (lclTyp == TYP_FLOAT && assg->TypeGet() == TYP_DOUBLE)
14565             {
14566                 assg = gtNewCastNode(TYP_FLOAT, assg, false, TYP_FLOAT);
14567             }
14568
14569             args       = gtNewArgList(assg);
14570             helperType = TYP_VOID;
14571         }
14572         else if (access & CORINFO_ACCESS_GET)
14573         {
14574             helperType = lclTyp;
14575
14576             // The calling convention for the helper does not take into
14577             // account optimization of primitive structs.
14578             if ((pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT) && !varTypeIsStruct(lclTyp))
14579             {
14580                 helperType = TYP_STRUCT;
14581             }
14582         }
14583     }
14584
14585     if (pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT || pFieldInfo->helper == CORINFO_HELP_SETFIELDSTRUCT)
14586     {
14587         assert(pFieldInfo->structType != nullptr);
14588         args = gtNewListNode(gtNewIconEmbClsHndNode(pFieldInfo->structType), args);
14589     }
14590
14591     GenTree* fieldHnd = impTokenToHandle(pResolvedToken);
14592     if (fieldHnd == nullptr)
14593     { // compDonotInline()
14594         return nullptr;
14595     }
14596
14597     args = gtNewListNode(fieldHnd, args);
14598
14599     // If it's a static field, we shouldn't have an object node
14600     // If it's an instance field, we have an object node
14601     assert((pFieldInfo->fieldAccessor != CORINFO_FIELD_STATIC_ADDR_HELPER) ^ (objPtr == nullptr));
14602
14603     if (objPtr != nullptr)
14604     {
14605         args = gtNewListNode(objPtr, args);
14606     }
14607
14608     GenTreeCall* call = gtNewHelperCallNode(pFieldInfo->helper, genActualType(helperType), args);
14609
14610 #if FEATURE_MULTIREG_RET
14611     if (varTypeIsStruct(call))
14612     {
14613         // Initialize Return type descriptor of call node.
14614         ReturnTypeDesc* retTypeDesc = call->GetReturnTypeDesc();
14615         retTypeDesc->InitializeStructReturnType(this, structType);
14616     }
14617 #endif // FEATURE_MULTIREG_RET
14618
14619     GenTree* result = call;
14620
14621     if (pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER)
14622     {
14623         if (access & CORINFO_ACCESS_GET)
14624         {
14625             if (pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT)
14626             {
14627                 if (!varTypeIsStruct(lclTyp))
14628                 {
14629                     // get the result as primitive type
14630                     result = impGetStructAddr(result, structType, (unsigned)CHECK_SPILL_ALL, true);
14631                     result = gtNewOperNode(GT_IND, lclTyp, result);
14632                 }
14633             }
14634             else if (varTypeIsIntegral(lclTyp) && genTypeSize(lclTyp) < genTypeSize(TYP_INT))
14635             {
14636                 // The helper does not extend the small return types.
14637                 result = gtNewCastNode(genActualType(lclTyp), result, false, lclTyp);
14638             }
14639         }
14640     }
14641     else
14642     {
14643         // OK, now do the indirection
14644         if (access & CORINFO_ACCESS_GET)
14645         {
14646             if (varTypeIsStruct(lclTyp))
14647             {
14648                 result = gtNewObjNode(structType, result);
14649             }
14650             else
14651             {
14652                 result = gtNewOperNode(GT_IND, lclTyp, result);
14653             }
14654             result->gtFlags |= (GTF_EXCEPT | GTF_GLOB_REF);
14655         }
14656         else if (access & CORINFO_ACCESS_SET)
14657         {
14658             if (varTypeIsStruct(lclTyp))
14659             {
14660                 result = impAssignStructPtr(result, assg, structType, (unsigned)CHECK_SPILL_ALL);
14661             }
14662             else
14663             {
14664                 result = gtNewOperNode(GT_IND, lclTyp, result);
14665                 result->gtFlags |= (GTF_EXCEPT | GTF_GLOB_REF | GTF_IND_TGTANYWHERE);
14666                 result = gtNewAssignNode(result, assg);
14667             }
14668         }
14669     }
14670
14671     return result;
14672 }
14673
14674 /*****************************************************************************
14675  *
14676  *  Return true if the given node (excluding children trees) contains side effects.
14677  *  Note that it does not recurse, and children need to be handled separately.
14678  *  It may return false even if the node has GTF_SIDE_EFFECT (because of its children).
14679  *
14680  *  Similar to OperMayThrow() (but handles GT_CALLs specially), but considers
14681  *  assignments too.
14682  */
14683
14684 bool Compiler::gtNodeHasSideEffects(GenTree* tree, unsigned flags)
14685 {
14686     if (flags & GTF_ASG)
14687     {
14688         // TODO-Cleanup: This only checks for GT_ASG but according to OperRequiresAsgFlag there
14689         // are many more opers that are considered to have an assignment side effect: atomic ops
14690         // (GT_CMPXCHG & co.), GT_MEMORYBARRIER (not classified as an atomic op) and HW intrinsic
14691         // memory stores. Atomic ops have special handling in gtExtractSideEffList but the others
14692         // will simply be dropped is they are ever subject to an "extract side effects" operation.
14693         // It is possible that the reason no bugs have yet been observed in this area is that the
14694         // other nodes are likely to always be tree roots.
14695         if (tree->OperIs(GT_ASG))
14696         {
14697             return true;
14698         }
14699     }
14700
14701     // Are there only GTF_CALL side effects remaining? (and no other side effect kinds)
14702     if (flags & GTF_CALL)
14703     {
14704         if (tree->OperGet() == GT_CALL)
14705         {
14706             GenTreeCall* const call             = tree->AsCall();
14707             const bool         ignoreExceptions = (flags & GTF_EXCEPT) == 0;
14708             const bool         ignoreCctors     = (flags & GTF_IS_IN_CSE) != 0; // We can CSE helpers that run cctors.
14709             if (!call->HasSideEffects(this, ignoreExceptions, ignoreCctors))
14710             {
14711                 // If this call is otherwise side effect free, check its arguments.
14712                 for (GenTreeArgList* args = call->gtCallArgs; args != nullptr; args = args->Rest())
14713                 {
14714                     if (gtTreeHasSideEffects(args->Current(), flags))
14715                     {
14716                         return true;
14717                     }
14718                 }
14719                 // I'm a little worried that args that assign to temps that are late args will look like
14720                 // side effects...but better to be conservative for now.
14721                 for (GenTreeArgList* args = call->gtCallLateArgs; args != nullptr; args = args->Rest())
14722                 {
14723                     if (gtTreeHasSideEffects(args->Current(), flags))
14724                     {
14725                         return true;
14726                     }
14727                 }
14728
14729                 // Otherwise:
14730                 return false;
14731             }
14732
14733             // Otherwise the GT_CALL is considered to have side-effects.
14734             return true;
14735         }
14736     }
14737
14738     if (flags & GTF_EXCEPT)
14739     {
14740         if (tree->OperMayThrow(this))
14741         {
14742             return true;
14743         }
14744     }
14745
14746     // Expressions declared as CSE by (e.g.) hoisting code are considered to have relevant side
14747     // effects (if we care about GTF_MAKE_CSE).
14748     if ((flags & GTF_MAKE_CSE) && (tree->gtFlags & GTF_MAKE_CSE))
14749     {
14750         return true;
14751     }
14752
14753     return false;
14754 }
14755
14756 /*****************************************************************************
14757  * Returns true if the expr tree has any side effects.
14758  */
14759
14760 bool Compiler::gtTreeHasSideEffects(GenTree* tree, unsigned flags /* = GTF_SIDE_EFFECT*/)
14761 {
14762     // These are the side effect flags that we care about for this tree
14763     unsigned sideEffectFlags = tree->gtFlags & flags;
14764
14765     // Does this tree have any Side-effect flags set that we care about?
14766     if (sideEffectFlags == 0)
14767     {
14768         // no it doesn't..
14769         return false;
14770     }
14771
14772     if (sideEffectFlags == GTF_CALL)
14773     {
14774         if (tree->OperGet() == GT_CALL)
14775         {
14776             // Generally all trees that contain GT_CALL nodes are considered to have side-effects.
14777             //
14778             if (tree->gtCall.gtCallType == CT_HELPER)
14779             {
14780                 // If this node is a helper call we may not care about the side-effects.
14781                 // Note that gtNodeHasSideEffects checks the side effects of the helper itself
14782                 // as well as the side effects of its arguments.
14783                 return gtNodeHasSideEffects(tree, flags);
14784             }
14785         }
14786         else if (tree->OperGet() == GT_INTRINSIC)
14787         {
14788             if (gtNodeHasSideEffects(tree, flags))
14789             {
14790                 return true;
14791             }
14792
14793             if (gtNodeHasSideEffects(tree->gtOp.gtOp1, flags))
14794             {
14795                 return true;
14796             }
14797
14798             if ((tree->gtOp.gtOp2 != nullptr) && gtNodeHasSideEffects(tree->gtOp.gtOp2, flags))
14799             {
14800                 return true;
14801             }
14802
14803             return false;
14804         }
14805     }
14806
14807     return true;
14808 }
14809
14810 GenTree* Compiler::gtBuildCommaList(GenTree* list, GenTree* expr)
14811 {
14812     // 'list' starts off as null,
14813     //        and when it is null we haven't started the list yet.
14814     //
14815     if (list != nullptr)
14816     {
14817         // Create a GT_COMMA that appends 'expr' in front of the remaining set of expressions in (*list)
14818         GenTree* result = gtNewOperNode(GT_COMMA, TYP_VOID, expr, list);
14819
14820         // Set the flags in the comma node
14821         result->gtFlags |= (list->gtFlags & GTF_ALL_EFFECT);
14822         result->gtFlags |= (expr->gtFlags & GTF_ALL_EFFECT);
14823
14824         // 'list' and 'expr' should have valuenumbers defined for both or for neither one (unless we are remorphing,
14825         // in which case a prior transform involving either node may have discarded or otherwise invalidated the value
14826         // numbers).
14827         assert((list->gtVNPair.BothDefined() == expr->gtVNPair.BothDefined()) || !fgGlobalMorph);
14828
14829         // Set the ValueNumber 'gtVNPair' for the new GT_COMMA node
14830         //
14831         if (list->gtVNPair.BothDefined() && expr->gtVNPair.BothDefined())
14832         {
14833             // The result of a GT_COMMA node is op2, the normal value number is op2vnp
14834             // But we also need to include the union of side effects from op1 and op2.
14835             // we compute this value into exceptions_vnp.
14836             ValueNumPair op1vnp;
14837             ValueNumPair op1Xvnp = ValueNumStore::VNPForEmptyExcSet();
14838             ValueNumPair op2vnp;
14839             ValueNumPair op2Xvnp = ValueNumStore::VNPForEmptyExcSet();
14840
14841             vnStore->VNPUnpackExc(expr->gtVNPair, &op1vnp, &op1Xvnp);
14842             vnStore->VNPUnpackExc(list->gtVNPair, &op2vnp, &op2Xvnp);
14843
14844             ValueNumPair exceptions_vnp = ValueNumStore::VNPForEmptyExcSet();
14845
14846             exceptions_vnp = vnStore->VNPExcSetUnion(exceptions_vnp, op1Xvnp);
14847             exceptions_vnp = vnStore->VNPExcSetUnion(exceptions_vnp, op2Xvnp);
14848
14849             result->gtVNPair = vnStore->VNPWithExc(op2vnp, exceptions_vnp);
14850         }
14851
14852         return result;
14853     }
14854     else
14855     {
14856         // The 'expr' will start the list of expressions
14857         return expr;
14858     }
14859 }
14860
14861 //------------------------------------------------------------------------
14862 // gtExtractSideEffList: Extracts side effects from the given expression.
14863 //
14864 // Arguments:
14865 //    expr       - the expression tree to extract side effects from
14866 //    pList      - pointer to a (possibly null) GT_COMMA list that
14867 //                 will contain the extracted side effects
14868 //    flags      - side effect flags to be considered
14869 //    ignoreRoot - ignore side effects on the expression root node
14870 //
14871 // Notes:
14872 //    Side effects are prepended to the GT_COMMA list such that op1 of
14873 //    each comma node holds the side effect tree and op2 points to the
14874 //    next comma node. The original side effect execution order is preserved.
14875 //
14876 void Compiler::gtExtractSideEffList(GenTree*  expr,
14877                                     GenTree** pList,
14878                                     unsigned  flags /* = GTF_SIDE_EFFECT*/,
14879                                     bool      ignoreRoot /* = false */)
14880 {
14881     class SideEffectExtractor final : public GenTreeVisitor<SideEffectExtractor>
14882     {
14883     public:
14884         const unsigned       m_flags;
14885         ArrayStack<GenTree*> m_sideEffects;
14886
14887         enum
14888         {
14889             DoPreOrder        = true,
14890             UseExecutionOrder = true
14891         };
14892
14893         SideEffectExtractor(Compiler* compiler, unsigned flags)
14894             : GenTreeVisitor(compiler), m_flags(flags), m_sideEffects(compiler->getAllocator(CMK_SideEffects))
14895         {
14896         }
14897
14898         fgWalkResult PreOrderVisit(GenTree** use, GenTree* user)
14899         {
14900             GenTree* node = *use;
14901
14902             bool treeHasSideEffects = m_compiler->gtTreeHasSideEffects(node, m_flags);
14903
14904             if (treeHasSideEffects)
14905             {
14906                 if (m_compiler->gtNodeHasSideEffects(node, m_flags))
14907                 {
14908                     m_sideEffects.Push(node);
14909                     return Compiler::WALK_SKIP_SUBTREES;
14910                 }
14911
14912                 // TODO-Cleanup: These have GTF_ASG set but for some reason gtNodeHasSideEffects ignores
14913                 // them. See the related gtNodeHasSideEffects comment as well.
14914                 // Also, these nodes must always be preserved, no matter what side effect flags are passed
14915                 // in. But then it should never be the case that gtExtractSideEffList gets called without
14916                 // specifying GTF_ASG so there doesn't seem to be any reason to be inconsistent with
14917                 // gtNodeHasSideEffects and make this check unconditionally.
14918                 if (node->OperIsAtomicOp())
14919                 {
14920                     m_sideEffects.Push(node);
14921                     return Compiler::WALK_SKIP_SUBTREES;
14922                 }
14923
14924                 if ((m_flags & GTF_EXCEPT) != 0)
14925                 {
14926                     // Special case - GT_ADDR of GT_IND nodes of TYP_STRUCT have to be kept together.
14927                     if (node->OperIs(GT_ADDR) && node->gtGetOp1()->OperIsIndir() &&
14928                         (node->gtGetOp1()->TypeGet() == TYP_STRUCT))
14929                     {
14930 #ifdef DEBUG
14931                         if (m_compiler->verbose)
14932                         {
14933                             printf("Keep the GT_ADDR and GT_IND together:\n");
14934                         }
14935 #endif
14936                         m_sideEffects.Push(node);
14937                         return Compiler::WALK_SKIP_SUBTREES;
14938                     }
14939                 }
14940
14941                 // Generally all GT_CALL nodes are considered to have side-effects.
14942                 // So if we get here it must be a helper call that we decided it does
14943                 // not have side effects that we needed to keep.
14944                 assert(!node->OperIs(GT_CALL) || (node->AsCall()->gtCallType == CT_HELPER));
14945             }
14946
14947             if ((m_flags & GTF_IS_IN_CSE) != 0)
14948             {
14949                 // If we're doing CSE then we also need to unmark CSE nodes. This will fail for CSE defs,
14950                 // those need to be extracted as if they're side effects.
14951                 if (!UnmarkCSE(node))
14952                 {
14953                     m_sideEffects.Push(node);
14954                     return Compiler::WALK_SKIP_SUBTREES;
14955                 }
14956
14957                 // The existence of CSE defs and uses is not propagated up the tree like side
14958                 // effects are. We need to continue visiting the tree as if it has side effects.
14959                 treeHasSideEffects = true;
14960             }
14961
14962             return treeHasSideEffects ? Compiler::WALK_CONTINUE : Compiler::WALK_SKIP_SUBTREES;
14963         }
14964
14965     private:
14966         bool UnmarkCSE(GenTree* node)
14967         {
14968             assert(m_compiler->optValnumCSE_phase);
14969
14970             if (m_compiler->optUnmarkCSE(node))
14971             {
14972                 // The call to optUnmarkCSE(node) should have cleared any CSE info.
14973                 assert(!IS_CSE_INDEX(node->gtCSEnum));
14974                 return true;
14975             }
14976             else
14977             {
14978                 assert(IS_CSE_DEF(node->gtCSEnum));
14979 #ifdef DEBUG
14980                 if (m_compiler->verbose)
14981                 {
14982                     printf("Preserving the CSE def #%02d at ", GET_CSE_INDEX(node->gtCSEnum));
14983                     m_compiler->printTreeID(node);
14984                 }
14985 #endif
14986                 return false;
14987             }
14988         }
14989     };
14990
14991     assert(!expr->OperIs(GT_STMT));
14992
14993     SideEffectExtractor extractor(this, flags);
14994
14995     if (ignoreRoot)
14996     {
14997         for (GenTree* op : expr->Operands())
14998         {
14999             extractor.WalkTree(&op, nullptr);
15000         }
15001     }
15002     else
15003     {
15004         extractor.WalkTree(&expr, nullptr);
15005     }
15006
15007     GenTree* list = *pList;
15008
15009     // The extractor returns side effects in execution order but gtBuildCommaList prepends
15010     // to the comma-based side effect list so we have to build the list in reverse order.
15011     // This is also why the list cannot be built while traversing the tree.
15012     // The number of side effects is usually small (<= 4), less than the ArrayStack's
15013     // built-in size, so memory allocation is avoided.
15014     while (!extractor.m_sideEffects.Empty())
15015     {
15016         list = gtBuildCommaList(list, extractor.m_sideEffects.Pop());
15017     }
15018
15019     *pList = list;
15020 }
15021
15022 /*****************************************************************************
15023  *
15024  *  For debugging only - displays a tree node list and makes sure all the
15025  *  links are correctly set.
15026  */
15027
15028 #ifdef DEBUG
15029
15030 void dispNodeList(GenTree* list, bool verbose)
15031 {
15032     GenTree* last = nullptr;
15033     GenTree* next;
15034
15035     if (!list)
15036     {
15037         return;
15038     }
15039
15040     for (;;)
15041     {
15042         next = list->gtNext;
15043
15044         if (verbose)
15045         {
15046             printf("%08X -> %08X -> %08X\n", last, list, next);
15047         }
15048
15049         assert(!last || last->gtNext == list);
15050
15051         assert(next == nullptr || next->gtPrev == list);
15052
15053         if (!next)
15054         {
15055             break;
15056         }
15057
15058         last = list;
15059         list = next;
15060     }
15061     printf(""); // null string means flush
15062 }
15063
15064 /*****************************************************************************
15065  * Callback to assert that the nodes of a qmark-colon subtree are marked
15066  */
15067
15068 /* static */
15069 Compiler::fgWalkResult Compiler::gtAssertColonCond(GenTree** pTree, fgWalkData* data)
15070 {
15071     assert(data->pCallbackData == nullptr);
15072
15073     assert((*pTree)->gtFlags & GTF_COLON_COND);
15074
15075     return WALK_CONTINUE;
15076 }
15077 #endif // DEBUG
15078
15079 /*****************************************************************************
15080  * Callback to mark the nodes of a qmark-colon subtree that are conditionally
15081  * executed.
15082  */
15083
15084 /* static */
15085 Compiler::fgWalkResult Compiler::gtMarkColonCond(GenTree** pTree, fgWalkData* data)
15086 {
15087     assert(data->pCallbackData == nullptr);
15088
15089     (*pTree)->gtFlags |= GTF_COLON_COND;
15090
15091     return WALK_CONTINUE;
15092 }
15093
15094 /*****************************************************************************
15095  * Callback to clear the conditionally executed flags of nodes that no longer
15096    will be conditionally executed. Note that when we find another colon we must
15097    stop, as the nodes below this one WILL be conditionally executed. This callback
15098    is called when folding a qmark condition (ie the condition is constant).
15099  */
15100
15101 /* static */
15102 Compiler::fgWalkResult Compiler::gtClearColonCond(GenTree** pTree, fgWalkData* data)
15103 {
15104     GenTree* tree = *pTree;
15105
15106     assert(data->pCallbackData == nullptr);
15107
15108     if (tree->OperGet() == GT_COLON)
15109     {
15110         // Nodes below this will be conditionally executed.
15111         return WALK_SKIP_SUBTREES;
15112     }
15113
15114     tree->gtFlags &= ~GTF_COLON_COND;
15115     return WALK_CONTINUE;
15116 }
15117
15118 struct FindLinkData
15119 {
15120     GenTree*  nodeToFind;
15121     GenTree** result;
15122 };
15123
15124 /*****************************************************************************
15125  *
15126  *  Callback used by the tree walker to implement fgFindLink()
15127  */
15128 static Compiler::fgWalkResult gtFindLinkCB(GenTree** pTree, Compiler::fgWalkData* cbData)
15129 {
15130     FindLinkData* data = (FindLinkData*)cbData->pCallbackData;
15131     if (*pTree == data->nodeToFind)
15132     {
15133         data->result = pTree;
15134         return Compiler::WALK_ABORT;
15135     }
15136
15137     return Compiler::WALK_CONTINUE;
15138 }
15139
15140 GenTree** Compiler::gtFindLink(GenTree* stmt, GenTree* node)
15141 {
15142     assert(stmt->gtOper == GT_STMT);
15143
15144     FindLinkData data = {node, nullptr};
15145
15146     fgWalkResult result = fgWalkTreePre(&stmt->gtStmt.gtStmtExpr, gtFindLinkCB, &data);
15147
15148     if (result == WALK_ABORT)
15149     {
15150         assert(data.nodeToFind == *data.result);
15151         return data.result;
15152     }
15153     else
15154     {
15155         return nullptr;
15156     }
15157 }
15158
15159 /*****************************************************************************
15160  *
15161  *  Callback that checks if a tree node has oper type GT_CATCH_ARG
15162  */
15163
15164 static Compiler::fgWalkResult gtFindCatchArg(GenTree** pTree, Compiler::fgWalkData* /* data */)
15165 {
15166     return ((*pTree)->OperGet() == GT_CATCH_ARG) ? Compiler::WALK_ABORT : Compiler::WALK_CONTINUE;
15167 }
15168
15169 /*****************************************************************************/
15170 bool Compiler::gtHasCatchArg(GenTree* tree)
15171 {
15172     if (((tree->gtFlags & GTF_ORDER_SIDEEFF) != 0) && (fgWalkTreePre(&tree, gtFindCatchArg) == WALK_ABORT))
15173     {
15174         return true;
15175     }
15176     return false;
15177 }
15178
15179 //------------------------------------------------------------------------
15180 // gtHasCallOnStack:
15181 //
15182 // Arguments:
15183 //    parentStack: a context (stack of parent nodes)
15184 //
15185 // Return Value:
15186 //     returns true if any of the parent nodes are a GT_CALL
15187 //
15188 // Assumptions:
15189 //    We have a stack of parent nodes. This generally requires that
15190 //    we are performing a recursive tree walk using struct fgWalkData
15191 //
15192 //------------------------------------------------------------------------
15193 /* static */ bool Compiler::gtHasCallOnStack(GenTreeStack* parentStack)
15194 {
15195     for (int i = 0; i < parentStack->Height(); i++)
15196     {
15197         GenTree* node = parentStack->Index(i);
15198         if (node->OperGet() == GT_CALL)
15199         {
15200             return true;
15201         }
15202     }
15203     return false;
15204 }
15205
15206 //------------------------------------------------------------------------
15207 // gtGetTypeProducerKind: determine if a tree produces a runtime type, and
15208 //    if so, how.
15209 //
15210 // Arguments:
15211 //    tree - tree to examine
15212 //
15213 // Return Value:
15214 //    TypeProducerKind for the tree.
15215 //
15216 // Notes:
15217 //    Checks to see if this tree returns a RuntimeType value, and if so,
15218 //    how that value is determined.
15219 //
15220 //    Currently handles these cases
15221 //    1) The result of Object::GetType
15222 //    2) The result of typeof(...)
15223 //    3) A null reference
15224 //    4) Tree is otherwise known to have type RuntimeType
15225 //
15226 //    The null reference case is surprisingly common because operator
15227 //    overloading turns the otherwise innocuous
15228 //
15229 //        Type t = ....;
15230 //        if (t == null)
15231 //
15232 //    into a method call.
15233
15234 Compiler::TypeProducerKind Compiler::gtGetTypeProducerKind(GenTree* tree)
15235 {
15236     if (tree->gtOper == GT_CALL)
15237     {
15238         if (tree->gtCall.gtCallType == CT_HELPER)
15239         {
15240             if (gtIsTypeHandleToRuntimeTypeHelper(tree->AsCall()))
15241             {
15242                 return TPK_Handle;
15243             }
15244         }
15245         else if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_SPECIAL_INTRINSIC)
15246         {
15247             if (info.compCompHnd->getIntrinsicID(tree->gtCall.gtCallMethHnd) == CORINFO_INTRINSIC_Object_GetType)
15248             {
15249                 return TPK_GetType;
15250             }
15251         }
15252     }
15253     else if ((tree->gtOper == GT_INTRINSIC) && (tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Object_GetType))
15254     {
15255         return TPK_GetType;
15256     }
15257     else if ((tree->gtOper == GT_CNS_INT) && (tree->gtIntCon.gtIconVal == 0))
15258     {
15259         return TPK_Null;
15260     }
15261     else
15262     {
15263         bool                 isExact   = false;
15264         bool                 isNonNull = false;
15265         CORINFO_CLASS_HANDLE clsHnd    = gtGetClassHandle(tree, &isExact, &isNonNull);
15266
15267         if (clsHnd != NO_CLASS_HANDLE && clsHnd == info.compCompHnd->getBuiltinClass(CLASSID_RUNTIME_TYPE))
15268         {
15269             return TPK_Other;
15270         }
15271     }
15272     return TPK_Unknown;
15273 }
15274
15275 //------------------------------------------------------------------------
15276 // gtIsTypeHandleToRuntimeTypeHelperCall -- see if tree is constructing
15277 //    a RuntimeType from a handle
15278 //
15279 // Arguments:
15280 //    tree - tree to examine
15281 //
15282 // Return Value:
15283 //    True if so
15284
15285 bool Compiler::gtIsTypeHandleToRuntimeTypeHelper(GenTreeCall* call)
15286 {
15287     return call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE) ||
15288            call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE_MAYBENULL);
15289 }
15290
15291 //------------------------------------------------------------------------
15292 // gtIsTypeHandleToRuntimeTypeHandleHelperCall -- see if tree is constructing
15293 //    a RuntimeTypeHandle from a handle
15294 //
15295 // Arguments:
15296 //    tree - tree to examine
15297 //    pHelper - optional pointer to a variable that receives the type of the helper
15298 //
15299 // Return Value:
15300 //    True if so
15301
15302 bool Compiler::gtIsTypeHandleToRuntimeTypeHandleHelper(GenTreeCall* call, CorInfoHelpFunc* pHelper)
15303 {
15304     CorInfoHelpFunc helper = CORINFO_HELP_UNDEF;
15305
15306     if (call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPEHANDLE))
15307     {
15308         helper = CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPEHANDLE;
15309     }
15310     else if (call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPEHANDLE_MAYBENULL))
15311     {
15312         helper = CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPEHANDLE_MAYBENULL;
15313     }
15314
15315     if (pHelper != nullptr)
15316     {
15317         *pHelper = helper;
15318     }
15319
15320     return helper != CORINFO_HELP_UNDEF;
15321 }
15322
15323 bool Compiler::gtIsActiveCSE_Candidate(GenTree* tree)
15324 {
15325     return (optValnumCSE_phase && IS_CSE_INDEX(tree->gtCSEnum));
15326 }
15327
15328 /*****************************************************************************/
15329
15330 struct ComplexityStruct
15331 {
15332     unsigned m_numNodes;
15333     unsigned m_nodeLimit;
15334     ComplexityStruct(unsigned nodeLimit) : m_numNodes(0), m_nodeLimit(nodeLimit)
15335     {
15336     }
15337 };
15338
15339 static Compiler::fgWalkResult ComplexityExceedsWalker(GenTree** pTree, Compiler::fgWalkData* data)
15340 {
15341     ComplexityStruct* pComplexity = (ComplexityStruct*)data->pCallbackData;
15342     if (++pComplexity->m_numNodes > pComplexity->m_nodeLimit)
15343     {
15344         return Compiler::WALK_ABORT;
15345     }
15346     else
15347     {
15348         return Compiler::WALK_CONTINUE;
15349     }
15350 }
15351
15352 bool Compiler::gtComplexityExceeds(GenTree** tree, unsigned limit)
15353 {
15354     ComplexityStruct complexity(limit);
15355     if (fgWalkTreePre(tree, &ComplexityExceedsWalker, &complexity) == WALK_ABORT)
15356     {
15357         return true;
15358     }
15359     else
15360     {
15361         return false;
15362     }
15363 }
15364
15365 bool GenTree::IsPhiNode()
15366 {
15367     return (OperGet() == GT_PHI_ARG) || (OperGet() == GT_PHI) || IsPhiDefn();
15368 }
15369
15370 bool GenTree::IsPhiDefn()
15371 {
15372     bool res = ((OperGet() == GT_ASG) && (gtOp.gtOp2 != nullptr) && (gtOp.gtOp2->OperGet() == GT_PHI)) ||
15373                ((OperGet() == GT_STORE_LCL_VAR) && (gtOp.gtOp1 != nullptr) && (gtOp.gtOp1->OperGet() == GT_PHI));
15374     assert(!res || OperGet() == GT_STORE_LCL_VAR || gtOp.gtOp1->OperGet() == GT_LCL_VAR);
15375     return res;
15376 }
15377
15378 bool GenTree::IsPhiDefnStmt()
15379 {
15380     if (OperGet() != GT_STMT)
15381     {
15382         return false;
15383     }
15384     GenTree* asg = gtStmt.gtStmtExpr;
15385     return asg->IsPhiDefn();
15386 }
15387
15388 // IsPartialLclFld: Check for a GT_LCL_FLD whose type is a different size than the lclVar.
15389 //
15390 // Arguments:
15391 //    comp      - the Compiler object.
15392 //
15393 // Return Value:
15394 //    Returns "true" iff 'this' is a GT_LCL_FLD or GT_STORE_LCL_FLD on which the type
15395 //    is not the same size as the type of the GT_LCL_VAR
15396
15397 bool GenTree::IsPartialLclFld(Compiler* comp)
15398 {
15399     return ((gtOper == GT_LCL_FLD) &&
15400             (comp->lvaTable[this->gtLclVarCommon.gtLclNum].lvExactSize != genTypeSize(gtType)));
15401 }
15402
15403 bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire)
15404 {
15405     GenTreeBlk* blkNode = nullptr;
15406     if (OperIs(GT_ASG))
15407     {
15408         if (gtOp.gtOp1->IsLocal())
15409         {
15410             GenTreeLclVarCommon* lclVarTree = gtOp.gtOp1->AsLclVarCommon();
15411             *pLclVarTree                    = lclVarTree;
15412             if (pIsEntire != nullptr)
15413             {
15414                 if (lclVarTree->IsPartialLclFld(comp))
15415                 {
15416                     *pIsEntire = false;
15417                 }
15418                 else
15419                 {
15420                     *pIsEntire = true;
15421                 }
15422             }
15423             return true;
15424         }
15425         else if (gtOp.gtOp1->OperGet() == GT_IND)
15426         {
15427             GenTree* indArg = gtOp.gtOp1->gtOp.gtOp1;
15428             return indArg->DefinesLocalAddr(comp, genTypeSize(gtOp.gtOp1->TypeGet()), pLclVarTree, pIsEntire);
15429         }
15430         else if (gtOp.gtOp1->OperIsBlk())
15431         {
15432             blkNode = gtOp.gtOp1->AsBlk();
15433         }
15434     }
15435     else if (OperIsBlk())
15436     {
15437         blkNode = this->AsBlk();
15438     }
15439     if (blkNode != nullptr)
15440     {
15441         GenTree* destAddr = blkNode->Addr();
15442         unsigned width    = blkNode->gtBlkSize;
15443         // Do we care about whether this assigns the entire variable?
15444         if (pIsEntire != nullptr && width == 0)
15445         {
15446             assert(blkNode->gtOper == GT_DYN_BLK);
15447             GenTree* blockWidth = blkNode->AsDynBlk()->gtDynamicSize;
15448             if (blockWidth->IsCnsIntOrI())
15449             {
15450                 if (blockWidth->IsIconHandle())
15451                 {
15452                     // If it's a handle, it must be a class handle.  We only create such block operations
15453                     // for initialization of struct types, so the type of the argument(s) will match this
15454                     // type, by construction, and be "entire".
15455                     assert(blockWidth->IsIconHandle(GTF_ICON_CLASS_HDL));
15456                     width = comp->info.compCompHnd->getClassSize(
15457                         CORINFO_CLASS_HANDLE(blockWidth->gtIntConCommon.IconValue()));
15458                 }
15459                 else
15460                 {
15461                     ssize_t swidth = blockWidth->AsIntConCommon()->IconValue();
15462                     assert(swidth >= 0);
15463                     // cpblk of size zero exists in the wild (in yacc-generated code in SQL) and is valid IL.
15464                     if (swidth == 0)
15465                     {
15466                         return false;
15467                     }
15468                     width = unsigned(swidth);
15469                 }
15470             }
15471         }
15472         return destAddr->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
15473     }
15474     // Otherwise...
15475     return false;
15476 }
15477
15478 // Returns true if this GenTree defines a result which is based on the address of a local.
15479 bool GenTree::DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire)
15480 {
15481     if (OperGet() == GT_ADDR || OperGet() == GT_LCL_VAR_ADDR)
15482     {
15483         GenTree* addrArg = this;
15484         if (OperGet() == GT_ADDR)
15485         {
15486             addrArg = gtOp.gtOp1;
15487         }
15488
15489         if (addrArg->IsLocal() || addrArg->OperIsLocalAddr())
15490         {
15491             GenTreeLclVarCommon* addrArgLcl = addrArg->AsLclVarCommon();
15492             *pLclVarTree                    = addrArgLcl;
15493             if (pIsEntire != nullptr)
15494             {
15495                 unsigned lclOffset = 0;
15496                 if (addrArg->OperIsLocalField())
15497                 {
15498                     lclOffset = addrArg->gtLclFld.gtLclOffs;
15499                 }
15500
15501                 if (lclOffset != 0)
15502                 {
15503                     // We aren't updating the bytes at [0..lclOffset-1] so *pIsEntire should be set to false
15504                     *pIsEntire = false;
15505                 }
15506                 else
15507                 {
15508                     unsigned lclNum   = addrArgLcl->GetLclNum();
15509                     unsigned varWidth = comp->lvaLclExactSize(lclNum);
15510                     if (comp->lvaTable[lclNum].lvNormalizeOnStore())
15511                     {
15512                         // It's normalize on store, so use the full storage width -- writing to low bytes won't
15513                         // necessarily yield a normalized value.
15514                         varWidth = genTypeStSz(var_types(comp->lvaTable[lclNum].lvType)) * sizeof(int);
15515                     }
15516                     *pIsEntire = (varWidth == width);
15517                 }
15518             }
15519             return true;
15520         }
15521         else if (addrArg->OperGet() == GT_IND)
15522         {
15523             // A GT_ADDR of a GT_IND can both be optimized away, recurse using the child of the GT_IND
15524             return addrArg->gtOp.gtOp1->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
15525         }
15526     }
15527     else if (OperGet() == GT_ADD)
15528     {
15529         if (gtOp.gtOp1->IsCnsIntOrI())
15530         {
15531             // If we just adding a zero then we allow an IsEntire match against width
15532             //  otherwise we change width to zero to disallow an IsEntire Match
15533             return gtOp.gtOp2->DefinesLocalAddr(comp, gtOp.gtOp1->IsIntegralConst(0) ? width : 0, pLclVarTree,
15534                                                 pIsEntire);
15535         }
15536         else if (gtOp.gtOp2->IsCnsIntOrI())
15537         {
15538             // If we just adding a zero then we allow an IsEntire match against width
15539             //  otherwise we change width to zero to disallow an IsEntire Match
15540             return gtOp.gtOp1->DefinesLocalAddr(comp, gtOp.gtOp2->IsIntegralConst(0) ? width : 0, pLclVarTree,
15541                                                 pIsEntire);
15542         }
15543     }
15544     // Post rationalization we could have GT_IND(GT_LEA(..)) trees.
15545     else if (OperGet() == GT_LEA)
15546     {
15547         // This method gets invoked during liveness computation and therefore it is critical
15548         // that we don't miss 'use' of any local.  The below logic is making the assumption
15549         // that in case of LEA(base, index, offset) - only base can be a GT_LCL_VAR_ADDR
15550         // and index is not.
15551         CLANG_FORMAT_COMMENT_ANCHOR;
15552
15553 #ifdef DEBUG
15554         GenTree* index = gtOp.gtOp2;
15555         if (index != nullptr)
15556         {
15557             assert(!index->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire));
15558         }
15559 #endif // DEBUG
15560
15561         // base
15562         GenTree* base = gtOp.gtOp1;
15563         if (base != nullptr)
15564         {
15565             // Lea could have an Indir as its base.
15566             if (base->OperGet() == GT_IND)
15567             {
15568                 base = base->gtOp.gtOp1->gtEffectiveVal(/*commas only*/ true);
15569             }
15570             return base->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
15571         }
15572     }
15573     // Otherwise...
15574     return false;
15575 }
15576
15577 //------------------------------------------------------------------------
15578 // IsLocalExpr: Determine if this is a LclVarCommon node and return some
15579 //              additional info about it in the two out parameters.
15580 //
15581 // Arguments:
15582 //    comp        - The Compiler instance
15583 //    pLclVarTree - An "out" argument that returns the local tree as a
15584 //                  LclVarCommon, if it is indeed local.
15585 //    pFldSeq     - An "out" argument that returns the value numbering field
15586 //                  sequence for the node, if any.
15587 //
15588 // Return Value:
15589 //    Returns true, and sets the out arguments accordingly, if this is
15590 //    a LclVarCommon node.
15591
15592 bool GenTree::IsLocalExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq)
15593 {
15594     if (IsLocal()) // Note that this covers "GT_LCL_FLD."
15595     {
15596         *pLclVarTree = AsLclVarCommon();
15597         if (OperGet() == GT_LCL_FLD)
15598         {
15599             // Otherwise, prepend this field to whatever we've already accumulated outside in.
15600             *pFldSeq = comp->GetFieldSeqStore()->Append(AsLclFld()->gtFieldSeq, *pFldSeq);
15601         }
15602         return true;
15603     }
15604     else
15605     {
15606         return false;
15607     }
15608 }
15609
15610 // If this tree evaluates some sum of a local address and some constants,
15611 // return the node for the local being addressed
15612
15613 GenTreeLclVarCommon* GenTree::IsLocalAddrExpr()
15614 {
15615     if (OperGet() == GT_ADDR)
15616     {
15617         return gtOp.gtOp1->IsLocal() ? gtOp.gtOp1->AsLclVarCommon() : nullptr;
15618     }
15619     else if (OperIsLocalAddr())
15620     {
15621         return this->AsLclVarCommon();
15622     }
15623     else if (OperGet() == GT_ADD)
15624     {
15625         if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
15626         {
15627             return gtOp.gtOp2->IsLocalAddrExpr();
15628         }
15629         else if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
15630         {
15631             return gtOp.gtOp1->IsLocalAddrExpr();
15632         }
15633     }
15634     // Otherwise...
15635     return nullptr;
15636 }
15637
15638 bool GenTree::IsLocalAddrExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq)
15639 {
15640     if (OperGet() == GT_ADDR)
15641     {
15642         assert(!comp->compRationalIRForm);
15643         GenTree* addrArg = gtOp.gtOp1;
15644         if (addrArg->IsLocal()) // Note that this covers "GT_LCL_FLD."
15645         {
15646             *pLclVarTree = addrArg->AsLclVarCommon();
15647             if (addrArg->OperGet() == GT_LCL_FLD)
15648             {
15649                 // Otherwise, prepend this field to whatever we've already accumulated outside in.
15650                 *pFldSeq = comp->GetFieldSeqStore()->Append(addrArg->AsLclFld()->gtFieldSeq, *pFldSeq);
15651             }
15652             return true;
15653         }
15654         else
15655         {
15656             return false;
15657         }
15658     }
15659     else if (OperIsLocalAddr())
15660     {
15661         *pLclVarTree = this->AsLclVarCommon();
15662         if (this->OperGet() == GT_LCL_FLD_ADDR)
15663         {
15664             *pFldSeq = comp->GetFieldSeqStore()->Append(this->AsLclFld()->gtFieldSeq, *pFldSeq);
15665         }
15666         return true;
15667     }
15668     else if (OperGet() == GT_ADD)
15669     {
15670         if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
15671         {
15672             if (gtOp.gtOp1->AsIntCon()->gtFieldSeq == nullptr)
15673             {
15674                 return false;
15675             }
15676             // Otherwise, prepend this field to whatever we've already accumulated outside in.
15677             *pFldSeq = comp->GetFieldSeqStore()->Append(gtOp.gtOp1->AsIntCon()->gtFieldSeq, *pFldSeq);
15678             return gtOp.gtOp2->IsLocalAddrExpr(comp, pLclVarTree, pFldSeq);
15679         }
15680         else if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
15681         {
15682             if (gtOp.gtOp2->AsIntCon()->gtFieldSeq == nullptr)
15683             {
15684                 return false;
15685             }
15686             // Otherwise, prepend this field to whatever we've already accumulated outside in.
15687             *pFldSeq = comp->GetFieldSeqStore()->Append(gtOp.gtOp2->AsIntCon()->gtFieldSeq, *pFldSeq);
15688             return gtOp.gtOp1->IsLocalAddrExpr(comp, pLclVarTree, pFldSeq);
15689         }
15690     }
15691     // Otherwise...
15692     return false;
15693 }
15694
15695 //------------------------------------------------------------------------
15696 // IsLclVarUpdateTree: Determine whether this is an assignment tree of the
15697 //                     form Vn = Vn 'oper' 'otherTree' where Vn is a lclVar
15698 //
15699 // Arguments:
15700 //    pOtherTree - An "out" argument in which 'otherTree' will be returned.
15701 //    pOper      - An "out" argument in which 'oper' will be returned.
15702 //
15703 // Return Value:
15704 //    If the tree is of the above form, the lclNum of the variable being
15705 //    updated is returned, and 'pOtherTree' and 'pOper' are set.
15706 //    Otherwise, returns BAD_VAR_NUM.
15707 //
15708 // Notes:
15709 //    'otherTree' can have any shape.
15710 //     We avoid worrying about whether the op is commutative by only considering the
15711 //     first operand of the rhs. It is expected that most trees of this form will
15712 //     already have the lclVar on the lhs.
15713 //     TODO-CQ: Evaluate whether there are missed opportunities due to this, or
15714 //     whether gtSetEvalOrder will already have put the lclVar on the lhs in
15715 //     the cases of interest.
15716
15717 unsigned GenTree::IsLclVarUpdateTree(GenTree** pOtherTree, genTreeOps* pOper)
15718 {
15719     unsigned lclNum = BAD_VAR_NUM;
15720     if (OperIs(GT_ASG))
15721     {
15722         GenTree* lhs = gtOp.gtOp1;
15723         if (lhs->OperGet() == GT_LCL_VAR)
15724         {
15725             unsigned lhsLclNum = lhs->AsLclVarCommon()->gtLclNum;
15726             GenTree* rhs       = gtOp.gtOp2;
15727             if (rhs->OperIsBinary() && (rhs->gtOp.gtOp1->gtOper == GT_LCL_VAR) &&
15728                 (rhs->gtOp.gtOp1->AsLclVarCommon()->gtLclNum == lhsLclNum))
15729             {
15730                 lclNum      = lhsLclNum;
15731                 *pOtherTree = rhs->gtOp.gtOp2;
15732                 *pOper      = rhs->gtOper;
15733             }
15734         }
15735     }
15736     return lclNum;
15737 }
15738
15739 //------------------------------------------------------------------------
15740 // canBeContained: check whether this tree node may be a subcomponent of its parent for purposes
15741 //                 of code generation.
15742 //
15743 // Return value: returns true if it is possible to contain this node and false otherwise.
15744 bool GenTree::canBeContained() const
15745 {
15746     assert(IsLIR());
15747
15748     if (gtHasReg())
15749     {
15750         return false;
15751     }
15752
15753     // It is not possible for nodes that do not produce values or that are not containable values
15754     // to be contained.
15755     if (((OperKind() & (GTK_NOVALUE | GTK_NOCONTAIN)) != 0) || (OperIsHWIntrinsic() && !isContainableHWIntrinsic()))
15756     {
15757         return false;
15758     }
15759
15760     return true;
15761 }
15762
15763 //------------------------------------------------------------------------
15764 // isContained: check whether this tree node is a subcomponent of its parent for codegen purposes
15765 //
15766 // Return Value:
15767 //    Returns true if there is no code generated explicitly for this node.
15768 //    Essentially, it will be rolled into the code generation for the parent.
15769 //
15770 // Assumptions:
15771 //    This method relies upon the value of the GTF_CONTAINED flag.
15772 //    Therefore this method is only valid after Lowering.
15773 //    Also note that register allocation or other subsequent phases may cause
15774 //    nodes to become contained (or not) and therefore this property may change.
15775 //
15776 bool GenTree::isContained() const
15777 {
15778     assert(IsLIR());
15779     const bool isMarkedContained = ((gtFlags & GTF_CONTAINED) != 0);
15780
15781 #ifdef DEBUG
15782     if (!canBeContained())
15783     {
15784         assert(!isMarkedContained);
15785     }
15786
15787     // these actually produce a register (the flags reg, we just don't model it)
15788     // and are a separate instruction from the branch that consumes the result.
15789     // They can only produce a result if the child is a SIMD equality comparison.
15790     else if (OperKind() & GTK_RELOP)
15791     {
15792         // We have to cast away const-ness since AsOp() method is non-const.
15793         GenTree* childNode = const_cast<GenTree*>(this)->AsOp()->gtOp1;
15794         assert((isMarkedContained == false) || childNode->IsSIMDEqualityOrInequality());
15795     }
15796
15797     // these either produce a result in register or set flags reg.
15798     else if (IsSIMDEqualityOrInequality())
15799     {
15800         assert(!isMarkedContained);
15801     }
15802
15803     // if it's contained it can't be unused.
15804     if (isMarkedContained)
15805     {
15806         assert(!IsUnusedValue());
15807     }
15808 #endif // DEBUG
15809     return isMarkedContained;
15810 }
15811
15812 // return true if node is contained and an indir
15813 bool GenTree::isContainedIndir() const
15814 {
15815     return isIndir() && isContained();
15816 }
15817
15818 bool GenTree::isIndirAddrMode()
15819 {
15820     return isIndir() && AsIndir()->Addr()->OperIsAddrMode() && AsIndir()->Addr()->isContained();
15821 }
15822
15823 bool GenTree::isIndir() const
15824 {
15825     return OperGet() == GT_IND || OperGet() == GT_STOREIND;
15826 }
15827
15828 bool GenTreeIndir::HasBase()
15829 {
15830     return Base() != nullptr;
15831 }
15832
15833 bool GenTreeIndir::HasIndex()
15834 {
15835     return Index() != nullptr;
15836 }
15837
15838 GenTree* GenTreeIndir::Base()
15839 {
15840     GenTree* addr = Addr();
15841
15842     if (isIndirAddrMode())
15843     {
15844         GenTree* result = addr->AsAddrMode()->Base();
15845         if (result != nullptr)
15846         {
15847             result = result->gtEffectiveVal();
15848         }
15849         return result;
15850     }
15851     else
15852     {
15853         return addr; // TODO: why do we return 'addr' here, but we return 'nullptr' in the equivalent Index() case?
15854     }
15855 }
15856
15857 GenTree* GenTreeIndir::Index()
15858 {
15859     if (isIndirAddrMode())
15860     {
15861         GenTree* result = Addr()->AsAddrMode()->Index();
15862         if (result != nullptr)
15863         {
15864             result = result->gtEffectiveVal();
15865         }
15866         return result;
15867     }
15868     else
15869     {
15870         return nullptr;
15871     }
15872 }
15873
15874 unsigned GenTreeIndir::Scale()
15875 {
15876     if (HasIndex())
15877     {
15878         return Addr()->AsAddrMode()->gtScale;
15879     }
15880     else
15881     {
15882         return 1;
15883     }
15884 }
15885
15886 ssize_t GenTreeIndir::Offset()
15887 {
15888     if (isIndirAddrMode())
15889     {
15890         return Addr()->AsAddrMode()->Offset();
15891     }
15892     else if (Addr()->gtOper == GT_CLS_VAR_ADDR)
15893     {
15894         return static_cast<ssize_t>(reinterpret_cast<intptr_t>(Addr()->gtClsVar.gtClsVarHnd));
15895     }
15896     else if (Addr()->IsCnsIntOrI() && Addr()->isContained())
15897     {
15898         return Addr()->AsIntConCommon()->IconValue();
15899     }
15900     else
15901     {
15902         return 0;
15903     }
15904 }
15905
15906 //------------------------------------------------------------------------
15907 // GenTreeIntConCommon::ImmedValNeedsReloc: does this immediate value needs recording a relocation with the VM?
15908 //
15909 // Arguments:
15910 //    comp - Compiler instance
15911 //
15912 // Return Value:
15913 //    True if this immediate value requires us to record a relocation for it; false otherwise.
15914
15915 bool GenTreeIntConCommon::ImmedValNeedsReloc(Compiler* comp)
15916 {
15917     return comp->opts.compReloc && (gtOper == GT_CNS_INT) && IsIconHandle();
15918 }
15919
15920 //------------------------------------------------------------------------
15921 // ImmedValCanBeFolded: can this immediate value be folded for op?
15922 //
15923 // Arguments:
15924 //    comp - Compiler instance
15925 //    op - Tree operator
15926 //
15927 // Return Value:
15928 //    True if this immediate value can be folded for op; false otherwise.
15929
15930 bool GenTreeIntConCommon::ImmedValCanBeFolded(Compiler* comp, genTreeOps op)
15931 {
15932     // In general, immediate values that need relocations can't be folded.
15933     // There are cases where we do want to allow folding of handle comparisons
15934     // (e.g., typeof(T) == typeof(int)).
15935     return !ImmedValNeedsReloc(comp) || (op == GT_EQ) || (op == GT_NE);
15936 }
15937
15938 #ifdef _TARGET_AMD64_
15939 // Returns true if this absolute address fits within the base of an addr mode.
15940 // On Amd64 this effectively means, whether an absolute indirect address can
15941 // be encoded as 32-bit offset relative to IP or zero.
15942 bool GenTreeIntConCommon::FitsInAddrBase(Compiler* comp)
15943 {
15944 #ifdef DEBUG
15945     // Early out if PC-rel encoding of absolute addr is disabled.
15946     if (!comp->opts.compEnablePCRelAddr)
15947     {
15948         return false;
15949     }
15950 #endif
15951
15952     if (comp->opts.compReloc)
15953     {
15954         // During Ngen JIT is always asked to generate relocatable code.
15955         // Hence JIT will try to encode only icon handles as pc-relative offsets.
15956         return IsIconHandle() && (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue()));
15957     }
15958     else
15959     {
15960         // During Jitting, we are allowed to generate non-relocatable code.
15961         // On Amd64 we can encode an absolute indirect addr as an offset relative to zero or RIP.
15962         // An absolute indir addr that can fit within 32-bits can ben encoded as an offset relative
15963         // to zero. All other absolute indir addr could be attempted to be encoded as RIP relative
15964         // based on reloc hint provided by VM.  RIP relative encoding is preferred over relative
15965         // to zero, because the former is one byte smaller than the latter.  For this reason
15966         // we check for reloc hint first and then whether addr fits in 32-bits next.
15967         //
15968         // VM starts off with an initial state to allow both data and code address to be encoded as
15969         // pc-relative offsets.  Hence JIT will attempt to encode all absolute addresses as pc-relative
15970         // offsets.  It is possible while jitting a method, an address could not be encoded as a
15971         // pc-relative offset.  In that case VM will note the overflow and will trigger re-jitting
15972         // of the method with reloc hints turned off for all future methods. Second time around
15973         // jitting will succeed since JIT will not attempt to encode data addresses as pc-relative
15974         // offsets.  Note that JIT will always attempt to relocate code addresses (.e.g call addr).
15975         // After an overflow, VM will assume any relocation recorded is for a code address and will
15976         // emit jump thunk if it cannot be encoded as pc-relative offset.
15977         return (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue())) || FitsInI32();
15978     }
15979 }
15980
15981 // Returns true if this icon value is encoded as addr needs recording a relocation with VM
15982 bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
15983 {
15984     if (comp->opts.compReloc)
15985     {
15986         // During Ngen JIT is always asked to generate relocatable code.
15987         // Hence JIT will try to encode only icon handles as pc-relative offsets.
15988         return IsIconHandle() && (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue()));
15989     }
15990     else
15991     {
15992         return IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue());
15993     }
15994 }
15995
15996 #elif defined(_TARGET_X86_)
15997 // Returns true if this absolute address fits within the base of an addr mode.
15998 // On x86 all addresses are 4-bytes and can be directly encoded in an addr mode.
15999 bool GenTreeIntConCommon::FitsInAddrBase(Compiler* comp)
16000 {
16001 #ifdef DEBUG
16002     // Early out if PC-rel encoding of absolute addr is disabled.
16003     if (!comp->opts.compEnablePCRelAddr)
16004     {
16005         return false;
16006     }
16007 #endif
16008
16009     return IsCnsIntOrI();
16010 }
16011
16012 // Returns true if this icon value is encoded as addr needs recording a relocation with VM
16013 bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
16014 {
16015     // If generating relocatable code, icons should be reported for recording relocatons.
16016     return comp->opts.compReloc && IsIconHandle();
16017 }
16018 #endif //_TARGET_X86_
16019
16020 bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pObj, GenTree** pStatic, FieldSeqNode** pFldSeq)
16021 {
16022     FieldSeqNode* newFldSeq    = nullptr;
16023     GenTree*      baseAddr     = nullptr;
16024     bool          mustBeStatic = false;
16025
16026     FieldSeqNode* statStructFldSeq = nullptr;
16027     if (TypeGet() == TYP_REF)
16028     {
16029         // Recognize struct static field patterns...
16030         if (OperGet() == GT_IND)
16031         {
16032             GenTree*       addr = gtOp.gtOp1;
16033             GenTreeIntCon* icon = nullptr;
16034             if (addr->OperGet() == GT_CNS_INT)
16035             {
16036                 icon = addr->AsIntCon();
16037             }
16038             else if (addr->OperGet() == GT_ADD)
16039             {
16040                 // op1 should never be a field sequence (or any other kind of handle)
16041                 assert((addr->gtOp.gtOp1->gtOper != GT_CNS_INT) || !addr->gtOp.gtOp1->IsIconHandle());
16042                 if (addr->gtOp.gtOp2->OperGet() == GT_CNS_INT)
16043                 {
16044                     icon = addr->gtOp.gtOp2->AsIntCon();
16045                 }
16046             }
16047             if (icon != nullptr && !icon->IsIconHandle(GTF_ICON_STR_HDL) // String handles are a source of TYP_REFs.
16048                 && icon->gtFieldSeq != nullptr &&
16049                 icon->gtFieldSeq->m_next == nullptr // A static field should be a singleton
16050                 // TODO-Review: A pseudoField here indicates an issue - this requires investigation
16051                 // See test case src\ddsuites\src\clr\x86\CoreMangLib\Dev\Globalization\CalendarRegressions.exe
16052                 && !(FieldSeqStore::IsPseudoField(icon->gtFieldSeq->m_fieldHnd)) &&
16053                 icon->gtFieldSeq != FieldSeqStore::NotAField()) // Ignore non-fields.
16054             {
16055                 statStructFldSeq = icon->gtFieldSeq;
16056             }
16057             else
16058             {
16059                 addr = addr->gtEffectiveVal();
16060
16061                 // Perhaps it's a direct indirection of a helper call or a cse with a zero offset annotation.
16062                 if ((addr->OperGet() == GT_CALL) || (addr->OperGet() == GT_LCL_VAR))
16063                 {
16064                     FieldSeqNode* zeroFieldSeq = nullptr;
16065                     if (comp->GetZeroOffsetFieldMap()->Lookup(addr, &zeroFieldSeq))
16066                     {
16067                         if (zeroFieldSeq->m_next == nullptr)
16068                         {
16069                             statStructFldSeq = zeroFieldSeq;
16070                         }
16071                     }
16072                 }
16073             }
16074         }
16075         else if (OperGet() == GT_CLS_VAR)
16076         {
16077             GenTreeClsVar* clsVar = AsClsVar();
16078             if (clsVar->gtFieldSeq != nullptr && clsVar->gtFieldSeq->m_next == nullptr)
16079             {
16080                 statStructFldSeq = clsVar->gtFieldSeq;
16081             }
16082         }
16083         else if (OperIsLocal())
16084         {
16085             // If we have a GT_LCL_VAR, it can be result of a CSE substitution
16086             // If it is then the CSE assignment will have a ValueNum that
16087             // describes the RHS of the CSE assignment.
16088             //
16089             // The CSE could be a pointer to a boxed struct
16090             //
16091             GenTreeLclVarCommon* lclVar = AsLclVarCommon();
16092             ValueNum             vn     = gtVNPair.GetLiberal();
16093             if (vn != ValueNumStore::NoVN)
16094             {
16095                 // Is the ValueNum a MapSelect involving a SharedStatic helper?
16096                 VNFuncApp funcApp1;
16097                 if (comp->vnStore->GetVNFunc(vn, &funcApp1) && (funcApp1.m_func == VNF_MapSelect) &&
16098                     (comp->vnStore->IsSharedStatic(funcApp1.m_args[1])))
16099                 {
16100                     ValueNum mapVN = funcApp1.m_args[0];
16101                     // Is this new 'mapVN' ValueNum, a MapSelect involving a handle?
16102                     VNFuncApp funcApp2;
16103                     if (comp->vnStore->GetVNFunc(mapVN, &funcApp2) && (funcApp2.m_func == VNF_MapSelect) &&
16104                         (comp->vnStore->IsVNHandle(funcApp2.m_args[1])))
16105                     {
16106                         ValueNum fldHndVN = funcApp2.m_args[1];
16107                         // Is this new 'fldHndVN' VNhandle a FieldHandle?
16108                         unsigned flags = comp->vnStore->GetHandleFlags(fldHndVN);
16109                         if (flags == GTF_ICON_FIELD_HDL)
16110                         {
16111                             CORINFO_FIELD_HANDLE fieldHnd =
16112                                 CORINFO_FIELD_HANDLE(comp->vnStore->ConstantValue<ssize_t>(fldHndVN));
16113
16114                             // Record this field sequence in 'statStructFldSeq' as it is likely to be a Boxed Struct
16115                             // field access.
16116                             statStructFldSeq = comp->GetFieldSeqStore()->CreateSingleton(fieldHnd);
16117                         }
16118                     }
16119                 }
16120             }
16121         }
16122
16123         if (statStructFldSeq != nullptr)
16124         {
16125             assert(statStructFldSeq->m_next == nullptr);
16126             // Is this a pointer to a boxed struct?
16127             if (comp->gtIsStaticFieldPtrToBoxedStruct(TYP_REF, statStructFldSeq->m_fieldHnd))
16128             {
16129                 *pFldSeq = comp->GetFieldSeqStore()->Append(statStructFldSeq, *pFldSeq);
16130                 *pObj    = nullptr;
16131                 *pStatic = this;
16132                 return true;
16133             }
16134         }
16135
16136         // Otherwise...
16137         *pObj    = this;
16138         *pStatic = nullptr;
16139         return true;
16140     }
16141     else if (OperGet() == GT_ADD)
16142     {
16143         // If one operator is a field sequence/handle, the other operator must not also be a field sequence/handle.
16144         if ((gtOp.gtOp1->OperGet() == GT_CNS_INT) && gtOp.gtOp1->IsIconHandle())
16145         {
16146             assert((gtOp.gtOp2->gtOper != GT_CNS_INT) || !gtOp.gtOp2->IsIconHandle());
16147             newFldSeq = gtOp.gtOp1->AsIntCon()->gtFieldSeq;
16148             baseAddr  = gtOp.gtOp2;
16149         }
16150         else if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
16151         {
16152             assert((gtOp.gtOp1->gtOper != GT_CNS_INT) || !gtOp.gtOp1->IsIconHandle());
16153             newFldSeq = gtOp.gtOp2->AsIntCon()->gtFieldSeq;
16154             baseAddr  = gtOp.gtOp1;
16155         }
16156     }
16157     else
16158     {
16159         // Check if "this" has a zero-offset annotation.
16160         if (!comp->GetZeroOffsetFieldMap()->Lookup(this, &newFldSeq))
16161         {
16162             // If not, this is not a field address.
16163             return false;
16164         }
16165         else
16166         {
16167             baseAddr     = this;
16168             mustBeStatic = true;
16169         }
16170     }
16171
16172     // If not we don't have a field seq, it's not a field address.
16173     if (newFldSeq == nullptr || newFldSeq == FieldSeqStore::NotAField())
16174     {
16175         return false;
16176     }
16177
16178     // Prepend this field to whatever we've already accumulated (outside-in).
16179     *pFldSeq = comp->GetFieldSeqStore()->Append(newFldSeq, *pFldSeq);
16180
16181     // Is it a static or instance field?
16182     if (!FieldSeqStore::IsPseudoField(newFldSeq->m_fieldHnd) &&
16183         comp->info.compCompHnd->isFieldStatic(newFldSeq->m_fieldHnd))
16184     {
16185         // It is a static field.  We're done.
16186         *pObj    = nullptr;
16187         *pStatic = baseAddr;
16188         return true;
16189     }
16190     else if ((baseAddr != nullptr) && !mustBeStatic)
16191     {
16192         // It's an instance field...but it must be for a struct field, since we've not yet encountered
16193         // a "TYP_REF" address.  Analyze the reset of the address.
16194         return baseAddr->gtEffectiveVal()->IsFieldAddr(comp, pObj, pStatic, pFldSeq);
16195     }
16196
16197     // Otherwise...
16198     return false;
16199 }
16200
16201 bool Compiler::gtIsStaticFieldPtrToBoxedStruct(var_types fieldNodeType, CORINFO_FIELD_HANDLE fldHnd)
16202 {
16203     if (fieldNodeType != TYP_REF)
16204     {
16205         return false;
16206     }
16207     noway_assert(fldHnd != nullptr);
16208     CorInfoType cit      = info.compCompHnd->getFieldType(fldHnd);
16209     var_types   fieldTyp = JITtype2varType(cit);
16210     return fieldTyp != TYP_REF;
16211 }
16212
16213 #ifdef FEATURE_SIMD
16214 //------------------------------------------------------------------------
16215 // gtGetSIMDZero: Get a zero value of the appropriate SIMD type.
16216 //
16217 // Arguments:
16218 //    var_types - The simdType
16219 //    baseType  - The base type we need
16220 //    simdHandle - The handle for the SIMD type
16221 //
16222 // Return Value:
16223 //    A node generating the appropriate Zero, if we are able to discern it,
16224 //    otherwise null (note that this shouldn't happen, but callers should
16225 //    be tolerant of this case).
16226
16227 GenTree* Compiler::gtGetSIMDZero(var_types simdType, var_types baseType, CORINFO_CLASS_HANDLE simdHandle)
16228 {
16229     bool found    = false;
16230     bool isHWSIMD = true;
16231     noway_assert(m_simdHandleCache != nullptr);
16232
16233     // First, determine whether this is Vector<T>.
16234     if (simdType == getSIMDVectorType())
16235     {
16236         switch (baseType)
16237         {
16238             case TYP_FLOAT:
16239                 found = (simdHandle == m_simdHandleCache->SIMDFloatHandle);
16240                 break;
16241             case TYP_DOUBLE:
16242                 found = (simdHandle == m_simdHandleCache->SIMDDoubleHandle);
16243                 break;
16244             case TYP_INT:
16245                 found = (simdHandle == m_simdHandleCache->SIMDIntHandle);
16246                 break;
16247             case TYP_USHORT:
16248                 found = (simdHandle == m_simdHandleCache->SIMDUShortHandle);
16249                 break;
16250             case TYP_UBYTE:
16251                 found = (simdHandle == m_simdHandleCache->SIMDUByteHandle);
16252                 break;
16253             case TYP_SHORT:
16254                 found = (simdHandle == m_simdHandleCache->SIMDShortHandle);
16255                 break;
16256             case TYP_BYTE:
16257                 found = (simdHandle == m_simdHandleCache->SIMDByteHandle);
16258                 break;
16259             case TYP_LONG:
16260                 found = (simdHandle == m_simdHandleCache->SIMDLongHandle);
16261                 break;
16262             case TYP_UINT:
16263                 found = (simdHandle == m_simdHandleCache->SIMDUIntHandle);
16264                 break;
16265             case TYP_ULONG:
16266                 found = (simdHandle == m_simdHandleCache->SIMDULongHandle);
16267                 break;
16268             default:
16269                 break;
16270         }
16271         if (found)
16272         {
16273             isHWSIMD = false;
16274         }
16275     }
16276
16277     if (!found)
16278     {
16279         // We must still have isHWSIMD set to true, and the only non-HW types left are the fixed types.
16280         switch (simdType)
16281         {
16282             case TYP_SIMD8:
16283                 switch (baseType)
16284                 {
16285                     case TYP_FLOAT:
16286                         if (simdHandle == m_simdHandleCache->SIMDVector2Handle)
16287                         {
16288                             isHWSIMD = false;
16289                         }
16290 #if defined(_TARGET_ARM64_) && defined(FEATURE_HW_INTRINSICS)
16291                         else
16292                         {
16293                             assert(simdHandle == m_simdHandleCache->Vector64FloatHandle);
16294                         }
16295                         break;
16296                     case TYP_INT:
16297                         assert(simdHandle == m_simdHandleCache->Vector64IntHandle);
16298                         break;
16299                     case TYP_USHORT:
16300                         assert(simdHandle == m_simdHandleCache->Vector64UShortHandle);
16301                         break;
16302                     case TYP_UBYTE:
16303                         assert(simdHandle == m_simdHandleCache->Vector64UByteHandle);
16304                         break;
16305                     case TYP_SHORT:
16306                         assert(simdHandle == m_simdHandleCache->Vector64ShortHandle);
16307                         break;
16308                     case TYP_BYTE:
16309                         assert(simdHandle == m_simdHandleCache->Vector64ByteHandle);
16310                         break;
16311                     case TYP_UINT:
16312                         assert(simdHandle == m_simdHandleCache->Vector64UIntHandle);
16313                         break;
16314 #endif // defined(_TARGET_ARM64_) && defined(FEATURE_HW_INTRINSICS)
16315                     default:
16316                         break;
16317                 }
16318                 break;
16319
16320             case TYP_SIMD12:
16321                 assert((baseType == TYP_FLOAT) && (simdHandle == m_simdHandleCache->SIMDVector3Handle));
16322                 isHWSIMD = false;
16323                 break;
16324
16325             case TYP_SIMD16:
16326                 switch (baseType)
16327                 {
16328                     case TYP_FLOAT:
16329                         if (simdHandle == m_simdHandleCache->SIMDVector4Handle)
16330                         {
16331                             isHWSIMD = false;
16332                         }
16333 #if defined(FEATURE_HW_INTRINSICS)
16334                         else
16335                         {
16336                             assert(simdHandle == m_simdHandleCache->Vector128FloatHandle);
16337                         }
16338                         break;
16339                     case TYP_DOUBLE:
16340                         assert(simdHandle == m_simdHandleCache->Vector128DoubleHandle);
16341                         break;
16342                     case TYP_INT:
16343                         assert(simdHandle == m_simdHandleCache->Vector128IntHandle);
16344                         break;
16345                     case TYP_USHORT:
16346                         assert(simdHandle == m_simdHandleCache->Vector128UShortHandle);
16347                         break;
16348                     case TYP_UBYTE:
16349                         assert(simdHandle == m_simdHandleCache->Vector128UByteHandle);
16350                         break;
16351                     case TYP_SHORT:
16352                         assert(simdHandle == m_simdHandleCache->Vector128ShortHandle);
16353                         break;
16354                     case TYP_BYTE:
16355                         assert(simdHandle == m_simdHandleCache->Vector128ByteHandle);
16356                         break;
16357                     case TYP_LONG:
16358                         assert(simdHandle == m_simdHandleCache->Vector128LongHandle);
16359                         break;
16360                     case TYP_UINT:
16361                         assert(simdHandle == m_simdHandleCache->Vector128UIntHandle);
16362                         break;
16363                     case TYP_ULONG:
16364                         assert(simdHandle == m_simdHandleCache->Vector128ULongHandle);
16365                         break;
16366 #endif // defined(FEATURE_HW_INTRINSICS)
16367
16368                     default:
16369                         break;
16370                 }
16371                 break;
16372
16373 #if defined(_TARGET_XARCH4_) && defined(FEATURE_HW_INTRINSICS)
16374             case TYP_SIMD32:
16375                 switch (baseType)
16376                 {
16377                     case TYP_FLOAT:
16378                         assert(simdHandle == m_simdHandleCache->Vector256FloatHandle);
16379                         break;
16380                     case TYP_DOUBLE:
16381                         assert(simdHandle == m_simdHandleCache->Vector256DoubleHandle);
16382                         break;
16383                     case TYP_INT:
16384                         assert(simdHandle == m_simdHandleCache->Vector256IntHandle);
16385                         break;
16386                     case TYP_USHORT:
16387                         assert(simdHandle == m_simdHandleCache->Vector256UShortHandle);
16388                         break;
16389                     case TYP_UBYTE:
16390                         assert(simdHandle == m_simdHandleCache->Vector256UByteHandle);
16391                         break;
16392                     case TYP_SHORT:
16393                         assert(simdHandle == m_simdHandleCache->Vector256ShortHandle);
16394                         break;
16395                     case TYP_BYTE:
16396                         assert(simdHandle == m_simdHandleCache->Vector256ByteHandle);
16397                         break;
16398                     case TYP_LONG:
16399                         assert(simdHandle == m_simdHandleCache->Vector256LongHandle);
16400                         break;
16401                     case TYP_UINT:
16402                         assert(simdHandle == m_simdHandleCache->Vector256UIntHandle);
16403                         break;
16404                     case TYP_ULONG:
16405                         assert(simdHandle == m_simdHandleCache->Vector256ULongHandle);
16406                         break;
16407                     default:
16408                         break;
16409                 }
16410                 break;
16411 #endif // _TARGET_XARCH_ && FEATURE_HW_INTRINSICS
16412             default:
16413                 break;
16414         }
16415     }
16416
16417     unsigned size = genTypeSize(simdType);
16418     if (isHWSIMD)
16419     {
16420 #if defined(_TARGET_XARCH_) && defined(FEATURE_HW_INTRINSICS)
16421         switch (simdType)
16422         {
16423             case TYP_SIMD16:
16424                 if (compSupports(InstructionSet_SSE))
16425                 {
16426                     // We only return the HWIntrinsicNode if SSE is supported, since it is possible for
16427                     // the user to disable the SSE HWIntrinsic support via the COMPlus configuration knobs
16428                     // even though the hardware vector types are still available.
16429                     return gtNewSimdHWIntrinsicNode(simdType, NI_Base_Vector128_Zero, baseType, size);
16430                 }
16431                 return nullptr;
16432             case TYP_SIMD32:
16433                 if (compSupports(InstructionSet_AVX))
16434                 {
16435                     // We only return the HWIntrinsicNode if AVX is supported, since it is possible for
16436                     // the user to disable the AVX HWIntrinsic support via the COMPlus configuration knobs
16437                     // even though the hardware vector types are still available.
16438                     return gtNewSimdHWIntrinsicNode(simdType, NI_Base_Vector256_Zero, baseType, size);
16439                 }
16440                 return nullptr;
16441             default:
16442                 break;
16443         }
16444 #endif // _TARGET_XARCH_ && FEATURE_HW_INTRINSICS
16445         JITDUMP("Coudn't find the matching SIMD type for %s<%s> in gtGetSIMDZero\n", varTypeName(simdType),
16446                 varTypeName(baseType));
16447     }
16448     else
16449     {
16450         return gtNewSIMDVectorZero(simdType, baseType, size);
16451     }
16452     return nullptr;
16453 }
16454 #endif // FEATURE_SIMD
16455
16456 CORINFO_CLASS_HANDLE Compiler::gtGetStructHandleIfPresent(GenTree* tree)
16457 {
16458     CORINFO_CLASS_HANDLE structHnd = NO_CLASS_HANDLE;
16459     tree                           = tree->gtEffectiveVal();
16460     if (varTypeIsStruct(tree->gtType))
16461     {
16462         switch (tree->gtOper)
16463         {
16464             default:
16465                 break;
16466             case GT_MKREFANY:
16467                 structHnd = impGetRefAnyClass();
16468                 break;
16469             case GT_OBJ:
16470                 structHnd = tree->gtObj.gtClass;
16471                 break;
16472             case GT_CALL:
16473                 structHnd = tree->gtCall.gtRetClsHnd;
16474                 break;
16475             case GT_RET_EXPR:
16476                 structHnd = tree->gtRetExpr.gtRetClsHnd;
16477                 break;
16478             case GT_ARGPLACE:
16479                 structHnd = tree->gtArgPlace.gtArgPlaceClsHnd;
16480                 break;
16481             case GT_INDEX:
16482                 structHnd = tree->gtIndex.gtStructElemClass;
16483                 break;
16484             case GT_INDEX_ADDR:
16485                 structHnd = tree->AsIndexAddr()->gtStructElemClass;
16486                 break;
16487             case GT_FIELD:
16488                 info.compCompHnd->getFieldType(tree->gtField.gtFldHnd, &structHnd);
16489                 break;
16490             case GT_ASG:
16491                 structHnd = gtGetStructHandleIfPresent(tree->gtGetOp1());
16492                 break;
16493             case GT_LCL_FLD:
16494 #ifdef FEATURE_SIMD
16495                 if (varTypeIsSIMD(tree))
16496                 {
16497                     structHnd = gtGetStructHandleForSIMD(tree->gtType, TYP_FLOAT);
16498                 }
16499 #endif
16500                 break;
16501             case GT_LCL_VAR:
16502                 structHnd = lvaTable[tree->AsLclVarCommon()->gtLclNum].lvVerTypeInfo.GetClassHandle();
16503                 break;
16504             case GT_RETURN:
16505                 structHnd = gtGetStructHandleIfPresent(tree->gtOp.gtOp1);
16506                 break;
16507             case GT_IND:
16508 #ifdef FEATURE_SIMD
16509                 if (varTypeIsSIMD(tree))
16510                 {
16511                     structHnd = gtGetStructHandleForSIMD(tree->gtType, TYP_FLOAT);
16512                 }
16513                 else
16514 #endif
16515                 {
16516                     ArrayInfo arrInfo;
16517                     if (TryGetArrayInfo(tree->AsIndir(), &arrInfo))
16518                     {
16519                         structHnd = EncodeElemType(arrInfo.m_elemType, arrInfo.m_elemStructType);
16520                     }
16521                 }
16522                 break;
16523 #ifdef FEATURE_SIMD
16524             case GT_SIMD:
16525                 structHnd = gtGetStructHandleForSIMD(tree->gtType, tree->AsSIMD()->gtSIMDBaseType);
16526                 break;
16527 #endif // FEATURE_SIMD
16528 #ifdef FEATURE_HW_INTRINSICS
16529             case GT_HWIntrinsic:
16530                 structHnd = gtGetStructHandleForHWSIMD(tree->gtType, tree->AsHWIntrinsic()->gtSIMDBaseType);
16531                 break;
16532 #endif
16533                 break;
16534         }
16535     }
16536     return structHnd;
16537 }
16538
16539 CORINFO_CLASS_HANDLE Compiler::gtGetStructHandle(GenTree* tree)
16540 {
16541     CORINFO_CLASS_HANDLE structHnd = gtGetStructHandleIfPresent(tree);
16542     assert(structHnd != NO_CLASS_HANDLE);
16543     return structHnd;
16544 }
16545
16546 //------------------------------------------------------------------------
16547 // gtGetClassHandle: find class handle for a ref type
16548 //
16549 // Arguments:
16550 //    tree -- tree to find handle for
16551 //    pIsExact   [out] -- whether handle is exact type
16552 //    pIsNonNull [out] -- whether tree value is known not to be null
16553 //
16554 // Return Value:
16555 //    nullptr if class handle is unknown,
16556 //        otherwise the class handle.
16557 //    *pIsExact set true if tree type is known to be exactly the handle type,
16558 //        otherwise actual type may be a subtype.
16559 //    *pIsNonNull set true if tree value is known not to be null,
16560 //        otherwise a null value is possible.
16561
16562 CORINFO_CLASS_HANDLE Compiler::gtGetClassHandle(GenTree* tree, bool* pIsExact, bool* pIsNonNull)
16563 {
16564     // Set default values for our out params.
16565     *pIsNonNull                   = false;
16566     *pIsExact                     = false;
16567     CORINFO_CLASS_HANDLE objClass = nullptr;
16568
16569     // Bail out if we're just importing and not generating code, since
16570     // the jit uses TYP_REF for CORINFO_TYPE_VAR locals and args, but
16571     // these may not be ref types.
16572     if (compIsForImportOnly())
16573     {
16574         return objClass;
16575     }
16576
16577     // Bail out if the tree is not a ref type.
16578     var_types treeType = tree->TypeGet();
16579     if (treeType != TYP_REF)
16580     {
16581         return objClass;
16582     }
16583
16584     // Tunnel through commas.
16585     GenTree*         obj   = tree->gtEffectiveVal(false);
16586     const genTreeOps objOp = obj->OperGet();
16587
16588     switch (objOp)
16589     {
16590         case GT_COMMA:
16591         {
16592             // gtEffectiveVal above means we shouldn't see commas here.
16593             assert(!"unexpected GT_COMMA");
16594             break;
16595         }
16596
16597         case GT_LCL_VAR:
16598         {
16599             // For locals, pick up type info from the local table.
16600             const unsigned objLcl = obj->AsLclVar()->GetLclNum();
16601
16602             objClass  = lvaTable[objLcl].lvClassHnd;
16603             *pIsExact = lvaTable[objLcl].lvClassIsExact;
16604             break;
16605         }
16606
16607         case GT_FIELD:
16608         {
16609             // For fields, get the type from the field handle.
16610             CORINFO_FIELD_HANDLE fieldHnd = obj->gtField.gtFldHnd;
16611
16612             if (fieldHnd != nullptr)
16613             {
16614                 objClass = gtGetFieldClassHandle(fieldHnd, pIsExact, pIsNonNull);
16615             }
16616
16617             break;
16618         }
16619
16620         case GT_RET_EXPR:
16621         {
16622             // If we see a RET_EXPR, recurse through to examine the
16623             // return value expression.
16624             GenTree* retExpr = tree->gtRetExpr.gtInlineCandidate;
16625             objClass         = gtGetClassHandle(retExpr, pIsExact, pIsNonNull);
16626             break;
16627         }
16628
16629         case GT_CALL:
16630         {
16631             GenTreeCall* call = tree->AsCall();
16632             if (call->IsInlineCandidate())
16633             {
16634                 // For inline candidates, we've already cached the return
16635                 // type class handle in the inline info.
16636                 InlineCandidateInfo* inlInfo = call->gtInlineCandidateInfo;
16637                 assert(inlInfo != nullptr);
16638
16639                 // Grab it as our first cut at a return type.
16640                 assert(inlInfo->methInfo.args.retType == CORINFO_TYPE_CLASS);
16641                 objClass = inlInfo->methInfo.args.retTypeClass;
16642
16643                 // If the method is shared, the above may not capture
16644                 // the most precise return type information (that is,
16645                 // it may represent a shared return type and as such,
16646                 // have instances of __Canon). See if we can use the
16647                 // context to get at something more definite.
16648                 //
16649                 // For now, we do this here on demand rather than when
16650                 // processing the call, but we could/should apply
16651                 // similar sharpening to the argument and local types
16652                 // of the inlinee.
16653                 const unsigned retClassFlags = info.compCompHnd->getClassAttribs(objClass);
16654                 if (retClassFlags & CORINFO_FLG_SHAREDINST)
16655                 {
16656                     CORINFO_CONTEXT_HANDLE context = inlInfo->exactContextHnd;
16657
16658                     if (context != nullptr)
16659                     {
16660                         CORINFO_CLASS_HANDLE exactClass = nullptr;
16661
16662                         if (((size_t)context & CORINFO_CONTEXTFLAGS_MASK) == CORINFO_CONTEXTFLAGS_CLASS)
16663                         {
16664                             exactClass = (CORINFO_CLASS_HANDLE)((size_t)context & ~CORINFO_CONTEXTFLAGS_MASK);
16665                         }
16666                         else
16667                         {
16668                             CORINFO_METHOD_HANDLE exactMethod =
16669                                 (CORINFO_METHOD_HANDLE)((size_t)context & ~CORINFO_CONTEXTFLAGS_MASK);
16670                             exactClass = info.compCompHnd->getMethodClass(exactMethod);
16671                         }
16672
16673                         // Grab the signature in this context.
16674                         CORINFO_SIG_INFO sig;
16675                         eeGetMethodSig(call->gtCallMethHnd, &sig, exactClass);
16676                         assert(sig.retType == CORINFO_TYPE_CLASS);
16677                         objClass = sig.retTypeClass;
16678                     }
16679                 }
16680             }
16681             else if (call->gtCallType == CT_USER_FUNC)
16682             {
16683                 // For user calls, we can fetch the approximate return
16684                 // type info from the method handle. Unfortunately
16685                 // we've lost the exact context, so this is the best
16686                 // we can do for now.
16687                 CORINFO_METHOD_HANDLE method     = call->gtCallMethHnd;
16688                 CORINFO_CLASS_HANDLE  exactClass = nullptr;
16689                 CORINFO_SIG_INFO      sig;
16690                 eeGetMethodSig(method, &sig, exactClass);
16691                 if (sig.retType == CORINFO_TYPE_VOID)
16692                 {
16693                     // This is a constructor call.
16694                     const unsigned methodFlags = info.compCompHnd->getMethodAttribs(method);
16695                     assert((methodFlags & CORINFO_FLG_CONSTRUCTOR) != 0);
16696                     objClass    = info.compCompHnd->getMethodClass(method);
16697                     *pIsExact   = true;
16698                     *pIsNonNull = true;
16699                 }
16700                 else
16701                 {
16702                     assert(sig.retType == CORINFO_TYPE_CLASS);
16703                     objClass = sig.retTypeClass;
16704                 }
16705             }
16706             else if (call->gtCallType == CT_HELPER)
16707             {
16708                 objClass = gtGetHelperCallClassHandle(call, pIsExact, pIsNonNull);
16709             }
16710
16711             break;
16712         }
16713
16714         case GT_INTRINSIC:
16715         {
16716             GenTreeIntrinsic* intrinsic = obj->AsIntrinsic();
16717
16718             if (intrinsic->gtIntrinsicId == CORINFO_INTRINSIC_Object_GetType)
16719             {
16720                 CORINFO_CLASS_HANDLE runtimeType = info.compCompHnd->getBuiltinClass(CLASSID_RUNTIME_TYPE);
16721                 assert(runtimeType != NO_CLASS_HANDLE);
16722
16723                 objClass    = runtimeType;
16724                 *pIsExact   = false;
16725                 *pIsNonNull = true;
16726             }
16727
16728             break;
16729         }
16730
16731         case GT_CNS_STR:
16732         {
16733             // For literal strings, we know the class and that the
16734             // value is not null.
16735             objClass    = impGetStringClass();
16736             *pIsExact   = true;
16737             *pIsNonNull = true;
16738             break;
16739         }
16740
16741         case GT_IND:
16742         {
16743             GenTreeIndir* indir = obj->AsIndir();
16744
16745             if (indir->HasBase() && !indir->HasIndex())
16746             {
16747                 // indir(addr(lcl)) --> lcl
16748                 //
16749                 // This comes up during constrained callvirt on ref types.
16750
16751                 GenTree*             base = indir->Base();
16752                 GenTreeLclVarCommon* lcl  = base->IsLocalAddrExpr();
16753
16754                 if ((lcl != nullptr) && (base->OperGet() != GT_ADD))
16755                 {
16756                     const unsigned objLcl = lcl->GetLclNum();
16757                     objClass              = lvaTable[objLcl].lvClassHnd;
16758                     *pIsExact             = lvaTable[objLcl].lvClassIsExact;
16759                 }
16760                 else if (base->OperGet() == GT_ARR_ELEM)
16761                 {
16762                     // indir(arr_elem(...)) -> array element type
16763
16764                     GenTree* array = base->AsArrElem()->gtArrObj;
16765
16766                     objClass    = gtGetArrayElementClassHandle(array);
16767                     *pIsExact   = false;
16768                     *pIsNonNull = false;
16769                 }
16770                 else if (base->OperGet() == GT_ADD)
16771                 {
16772                     // This could be a static field access.
16773                     //
16774                     // See if op1 is a static field base helper call
16775                     // and if so, op2 will have the field info.
16776                     GenTree* op1 = base->gtOp.gtOp1;
16777                     GenTree* op2 = base->gtOp.gtOp2;
16778
16779                     const bool op1IsStaticFieldBase = gtIsStaticGCBaseHelperCall(op1);
16780
16781                     if (op1IsStaticFieldBase && (op2->OperGet() == GT_CNS_INT))
16782                     {
16783                         FieldSeqNode* fieldSeq = op2->AsIntCon()->gtFieldSeq;
16784
16785                         if (fieldSeq != nullptr)
16786                         {
16787                             while (fieldSeq->m_next != nullptr)
16788                             {
16789                                 fieldSeq = fieldSeq->m_next;
16790                             }
16791
16792                             assert(!fieldSeq->IsPseudoField());
16793
16794                             // No benefit to calling gtGetFieldClassHandle here, as
16795                             // the exact field being accessed can vary.
16796                             CORINFO_FIELD_HANDLE fieldHnd     = fieldSeq->m_fieldHnd;
16797                             CORINFO_CLASS_HANDLE fieldClass   = nullptr;
16798                             CorInfoType          fieldCorType = info.compCompHnd->getFieldType(fieldHnd, &fieldClass);
16799
16800                             assert(fieldCorType == CORINFO_TYPE_CLASS);
16801                             objClass = fieldClass;
16802                         }
16803                     }
16804                 }
16805             }
16806
16807             break;
16808         }
16809
16810         case GT_BOX:
16811         {
16812             // Box should just wrap a local var reference which has
16813             // the type we're looking for. Also box only represents a
16814             // non-nullable value type so result cannot be null.
16815             GenTreeBox* box     = obj->AsBox();
16816             GenTree*    boxTemp = box->BoxOp();
16817             assert(boxTemp->IsLocal());
16818             const unsigned boxTempLcl = boxTemp->AsLclVar()->GetLclNum();
16819             objClass                  = lvaTable[boxTempLcl].lvClassHnd;
16820             *pIsExact                 = lvaTable[boxTempLcl].lvClassIsExact;
16821             *pIsNonNull               = true;
16822             break;
16823         }
16824
16825         case GT_INDEX:
16826         {
16827             GenTree* array = obj->AsIndex()->Arr();
16828
16829             objClass    = gtGetArrayElementClassHandle(array);
16830             *pIsExact   = false;
16831             *pIsNonNull = false;
16832             break;
16833         }
16834
16835         default:
16836         {
16837             break;
16838         }
16839     }
16840
16841     return objClass;
16842 }
16843
16844 //------------------------------------------------------------------------
16845 // gtGetHelperCallClassHandle: find class handle for return value of a
16846 //   helper call
16847 //
16848 // Arguments:
16849 //    call - helper call to examine
16850 //    pIsExact - [OUT] true if type is known exactly
16851 //    pIsNonNull - [OUT] true if return value is not null
16852 //
16853 // Return Value:
16854 //    nullptr if helper call result is not a ref class, or the class handle
16855 //    is unknown, otherwise the class handle.
16856
16857 CORINFO_CLASS_HANDLE Compiler::gtGetHelperCallClassHandle(GenTreeCall* call, bool* pIsExact, bool* pIsNonNull)
16858 {
16859     assert(call->gtCallType == CT_HELPER);
16860
16861     *pIsNonNull                    = false;
16862     *pIsExact                      = false;
16863     CORINFO_CLASS_HANDLE  objClass = nullptr;
16864     const CorInfoHelpFunc helper   = eeGetHelperNum(call->gtCallMethHnd);
16865
16866     switch (helper)
16867     {
16868         case CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE:
16869         case CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE_MAYBENULL:
16870         {
16871             // Note for some runtimes these helpers return exact types.
16872             //
16873             // But in those cases the types are also sealed, so there's no
16874             // need to claim exactness here.
16875             const bool           helperResultNonNull = (helper == CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE);
16876             CORINFO_CLASS_HANDLE runtimeType         = info.compCompHnd->getBuiltinClass(CLASSID_RUNTIME_TYPE);
16877
16878             assert(runtimeType != NO_CLASS_HANDLE);
16879
16880             objClass    = runtimeType;
16881             *pIsNonNull = helperResultNonNull;
16882             break;
16883         }
16884
16885         case CORINFO_HELP_CHKCASTCLASS:
16886         case CORINFO_HELP_CHKCASTANY:
16887         case CORINFO_HELP_CHKCASTARRAY:
16888         case CORINFO_HELP_CHKCASTINTERFACE:
16889         case CORINFO_HELP_CHKCASTCLASS_SPECIAL:
16890         case CORINFO_HELP_ISINSTANCEOFINTERFACE:
16891         case CORINFO_HELP_ISINSTANCEOFARRAY:
16892         case CORINFO_HELP_ISINSTANCEOFCLASS:
16893         case CORINFO_HELP_ISINSTANCEOFANY:
16894         {
16895             // Fetch the class handle from the helper call arglist
16896             GenTreeArgList*      args    = call->gtCallArgs;
16897             GenTree*             typeArg = args->Current();
16898             CORINFO_CLASS_HANDLE castHnd = gtGetHelperArgClassHandle(typeArg);
16899
16900             // We generally assume the type being cast to is the best type
16901             // for the result, unless it is an interface type.
16902             //
16903             // TODO-CQ: when we have default interface methods then
16904             // this might not be the best assumption. We could also
16905             // explore calling something like mergeClasses to identify
16906             // the more specific class. A similar issue arises when
16907             // typing the temp in impCastClassOrIsInstToTree, when we
16908             // expand the cast inline.
16909             if (castHnd != nullptr)
16910             {
16911                 DWORD attrs = info.compCompHnd->getClassAttribs(castHnd);
16912
16913                 if ((attrs & CORINFO_FLG_INTERFACE) != 0)
16914                 {
16915                     castHnd = nullptr;
16916                 }
16917             }
16918
16919             // If we don't have a good estimate for the type we can use the
16920             // type from the value being cast instead.
16921             if (castHnd == nullptr)
16922             {
16923                 GenTree* valueArg = args->Rest()->Current();
16924                 castHnd           = gtGetClassHandle(valueArg, pIsExact, pIsNonNull);
16925             }
16926
16927             // We don't know at jit time if the cast will succeed or fail, but if it
16928             // fails at runtime then an exception is thrown for cast helpers, or the
16929             // result is set null for instance helpers.
16930             //
16931             // So it safe to claim the result has the cast type.
16932             // Note we don't know for sure that it is exactly this type.
16933             if (castHnd != nullptr)
16934             {
16935                 objClass = castHnd;
16936             }
16937
16938             break;
16939         }
16940
16941         default:
16942             break;
16943     }
16944
16945     return objClass;
16946 }
16947
16948 //------------------------------------------------------------------------
16949 // gtGetArrayElementClassHandle: find class handle for elements of an array
16950 // of ref types
16951 //
16952 // Arguments:
16953 //    array -- array to find handle for
16954 //
16955 // Return Value:
16956 //    nullptr if element class handle is unknown, otherwise the class handle.
16957
16958 CORINFO_CLASS_HANDLE Compiler::gtGetArrayElementClassHandle(GenTree* array)
16959 {
16960     bool                 isArrayExact   = false;
16961     bool                 isArrayNonNull = false;
16962     CORINFO_CLASS_HANDLE arrayClassHnd  = gtGetClassHandle(array, &isArrayExact, &isArrayNonNull);
16963
16964     if (arrayClassHnd != nullptr)
16965     {
16966         // We know the class of the reference
16967         DWORD attribs = info.compCompHnd->getClassAttribs(arrayClassHnd);
16968
16969         if ((attribs & CORINFO_FLG_ARRAY) != 0)
16970         {
16971             // We know for sure it is an array
16972             CORINFO_CLASS_HANDLE elemClassHnd  = nullptr;
16973             CorInfoType          arrayElemType = info.compCompHnd->getChildType(arrayClassHnd, &elemClassHnd);
16974
16975             if (arrayElemType == CORINFO_TYPE_CLASS)
16976             {
16977                 // We know it is an array of ref types
16978                 return elemClassHnd;
16979             }
16980         }
16981     }
16982
16983     return nullptr;
16984 }
16985
16986 //------------------------------------------------------------------------
16987 // gtGetFieldClassHandle: find class handle for a field
16988 //
16989 // Arguments:
16990 //    fieldHnd - field handle for field in question
16991 //    pIsExact - [OUT] true if type is known exactly
16992 //    pIsNonNull - [OUT] true if field value is not null
16993 //
16994 // Return Value:
16995 //    nullptr if helper call result is not a ref class, or the class handle
16996 //    is unknown, otherwise the class handle.
16997 //
16998 //    May examine runtime state of static field instances.
16999
17000 CORINFO_CLASS_HANDLE Compiler::gtGetFieldClassHandle(CORINFO_FIELD_HANDLE fieldHnd, bool* pIsExact, bool* pIsNonNull)
17001 {
17002     CORINFO_CLASS_HANDLE fieldClass   = nullptr;
17003     CorInfoType          fieldCorType = info.compCompHnd->getFieldType(fieldHnd, &fieldClass);
17004
17005     if (fieldCorType == CORINFO_TYPE_CLASS)
17006     {
17007         // Optionally, look at the actual type of the field's value
17008         bool queryForCurrentClass = true;
17009         INDEBUG(queryForCurrentClass = (JitConfig.JitQueryCurrentStaticFieldClass() > 0););
17010
17011         if (queryForCurrentClass)
17012         {
17013
17014 #if DEBUG
17015             const char* fieldClassName = nullptr;
17016             const char* fieldName      = eeGetFieldName(fieldHnd, &fieldClassName);
17017             JITDUMP("Querying runtime about current class of field %s.%s (declared as %s)\n", fieldClassName, fieldName,
17018                     eeGetClassName(fieldClass));
17019 #endif // DEBUG
17020
17021             // Is this a fully initialized init-only static field?
17022             //
17023             // Note we're not asking for speculative results here, yet.
17024             CORINFO_CLASS_HANDLE currentClass = info.compCompHnd->getStaticFieldCurrentClass(fieldHnd);
17025
17026             if (currentClass != NO_CLASS_HANDLE)
17027             {
17028                 // Yes! We know the class exactly and can rely on this to always be true.
17029                 fieldClass  = currentClass;
17030                 *pIsExact   = true;
17031                 *pIsNonNull = true;
17032                 JITDUMP("Runtime reports field is init-only and initialized and has class %s\n",
17033                         eeGetClassName(fieldClass));
17034             }
17035             else
17036             {
17037                 JITDUMP("Field's current class not available\n");
17038             }
17039         }
17040     }
17041
17042     return fieldClass;
17043 }
17044
17045 //------------------------------------------------------------------------
17046 // gtIsGCStaticBaseHelperCall: true if tree is fetching the gc static base
17047 //    for a subsequent static field access
17048 //
17049 // Arguments:
17050 //    tree - tree to consider
17051 //
17052 // Return Value:
17053 //    true if the tree is a suitable helper call
17054 //
17055 // Notes:
17056 //    Excludes R2R helpers as they specify the target field in a way
17057 //    that is opaque to the jit.
17058
17059 bool Compiler::gtIsStaticGCBaseHelperCall(GenTree* tree)
17060 {
17061     if (tree->OperGet() != GT_CALL)
17062     {
17063         return false;
17064     }
17065
17066     GenTreeCall* call = tree->AsCall();
17067
17068     if (call->gtCallType != CT_HELPER)
17069     {
17070         return false;
17071     }
17072
17073     const CorInfoHelpFunc helper = eeGetHelperNum(call->gtCallMethHnd);
17074
17075     switch (helper)
17076     {
17077         // We are looking for a REF type so only need to check for the GC base helpers
17078         case CORINFO_HELP_GETGENERICS_GCSTATIC_BASE:
17079         case CORINFO_HELP_GETSHARED_GCSTATIC_BASE:
17080         case CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR:
17081         case CORINFO_HELP_GETSHARED_GCSTATIC_BASE_DYNAMICCLASS:
17082         case CORINFO_HELP_GETGENERICS_GCTHREADSTATIC_BASE:
17083         case CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE:
17084         case CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE_NOCTOR:
17085         case CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE_DYNAMICCLASS:
17086             return true;
17087         default:
17088             break;
17089     }
17090
17091     return false;
17092 }
17093
17094 void GenTree::ParseArrayAddress(
17095     Compiler* comp, ArrayInfo* arrayInfo, GenTree** pArr, ValueNum* pInxVN, FieldSeqNode** pFldSeq)
17096 {
17097     *pArr                 = nullptr;
17098     ValueNum       inxVN  = ValueNumStore::NoVN;
17099     target_ssize_t offset = 0;
17100     FieldSeqNode*  fldSeq = nullptr;
17101
17102     ParseArrayAddressWork(comp, 1, pArr, &inxVN, &offset, &fldSeq);
17103
17104     // If we didn't find an array reference (perhaps it is the constant null?) we will give up.
17105     if (*pArr == nullptr)
17106     {
17107         return;
17108     }
17109
17110     // OK, new we have to figure out if any part of the "offset" is a constant contribution to the index.
17111     // First, sum the offsets of any fields in fldSeq.
17112     unsigned      fieldOffsets = 0;
17113     FieldSeqNode* fldSeqIter   = fldSeq;
17114     // Also, find the first non-pseudo field...
17115     assert(*pFldSeq == nullptr);
17116     while (fldSeqIter != nullptr)
17117     {
17118         if (fldSeqIter == FieldSeqStore::NotAField())
17119         {
17120             // TODO-Review: A NotAField here indicates a failure to properly maintain the field sequence
17121             // See test case self_host_tests_x86\jit\regression\CLR-x86-JIT\v1-m12-beta2\ b70992\ b70992.exe
17122             // Safest thing to do here is to drop back to MinOpts
17123             CLANG_FORMAT_COMMENT_ANCHOR;
17124
17125 #ifdef DEBUG
17126             if (comp->opts.optRepeat)
17127             {
17128                 // We don't guarantee preserving these annotations through the entire optimizer, so
17129                 // just conservatively return null if under optRepeat.
17130                 *pArr = nullptr;
17131                 return;
17132             }
17133 #endif // DEBUG
17134             noway_assert(!"fldSeqIter is NotAField() in ParseArrayAddress");
17135         }
17136
17137         if (!FieldSeqStore::IsPseudoField(fldSeqIter->m_fieldHnd))
17138         {
17139             if (*pFldSeq == nullptr)
17140             {
17141                 *pFldSeq = fldSeqIter;
17142             }
17143             CORINFO_CLASS_HANDLE fldCls = nullptr;
17144             noway_assert(fldSeqIter->m_fieldHnd != nullptr);
17145             CorInfoType cit = comp->info.compCompHnd->getFieldType(fldSeqIter->m_fieldHnd, &fldCls);
17146             fieldOffsets += comp->compGetTypeSize(cit, fldCls);
17147         }
17148         fldSeqIter = fldSeqIter->m_next;
17149     }
17150
17151     // Is there some portion of the "offset" beyond the first-elem offset and the struct field suffix we just computed?
17152     if (!FitsIn<target_ssize_t>(fieldOffsets + arrayInfo->m_elemOffset) ||
17153         !FitsIn<target_ssize_t>(arrayInfo->m_elemSize))
17154     {
17155         // This seems unlikely, but no harm in being safe...
17156         *pInxVN = comp->GetValueNumStore()->VNForExpr(nullptr, TYP_INT);
17157         return;
17158     }
17159     // Otherwise...
17160     target_ssize_t offsetAccountedFor = static_cast<target_ssize_t>(fieldOffsets + arrayInfo->m_elemOffset);
17161     target_ssize_t elemSize           = static_cast<target_ssize_t>(arrayInfo->m_elemSize);
17162
17163     target_ssize_t constIndOffset = offset - offsetAccountedFor;
17164     // This should be divisible by the element size...
17165     assert((constIndOffset % elemSize) == 0);
17166     target_ssize_t constInd = constIndOffset / elemSize;
17167
17168     ValueNumStore* vnStore = comp->GetValueNumStore();
17169
17170     if (inxVN == ValueNumStore::NoVN)
17171     {
17172         // Must be a constant index.
17173         *pInxVN = vnStore->VNForPtrSizeIntCon(constInd);
17174     }
17175     else
17176     {
17177         //
17178         // Perform ((inxVN / elemSizeVN) + vnForConstInd)
17179         //
17180
17181         // The value associated with the index value number (inxVN) is the offset into the array,
17182         // which has been scaled by element size. We need to recover the array index from that offset
17183         if (vnStore->IsVNConstant(inxVN))
17184         {
17185             target_ssize_t index = vnStore->CoercedConstantValue<target_ssize_t>(inxVN);
17186             noway_assert(elemSize > 0 && ((index % elemSize) == 0));
17187             *pInxVN = vnStore->VNForPtrSizeIntCon((index / elemSize) + constInd);
17188         }
17189         else
17190         {
17191             bool canFoldDiv = false;
17192
17193             // If the index VN is a MUL by elemSize, see if we can eliminate it instead of adding
17194             // the division by elemSize.
17195             VNFuncApp funcApp;
17196             if (vnStore->GetVNFunc(inxVN, &funcApp) && funcApp.m_func == (VNFunc)GT_MUL)
17197             {
17198                 ValueNum vnForElemSize = vnStore->VNForLongCon(elemSize);
17199
17200                 // One of the multiply operand is elemSize, so the resulting
17201                 // index VN should simply be the other operand.
17202                 if (funcApp.m_args[1] == vnForElemSize)
17203                 {
17204                     *pInxVN    = funcApp.m_args[0];
17205                     canFoldDiv = true;
17206                 }
17207                 else if (funcApp.m_args[0] == vnForElemSize)
17208                 {
17209                     *pInxVN    = funcApp.m_args[1];
17210                     canFoldDiv = true;
17211                 }
17212             }
17213
17214             // Perform ((inxVN / elemSizeVN) + vnForConstInd)
17215             if (!canFoldDiv)
17216             {
17217                 ValueNum vnForElemSize = vnStore->VNForPtrSizeIntCon(elemSize);
17218                 ValueNum vnForScaledInx =
17219                     vnStore->VNForFunc(TYP_I_IMPL, GetVNFuncForOper(GT_DIV, VOK_Default), inxVN, vnForElemSize);
17220                 *pInxVN = vnForScaledInx;
17221             }
17222
17223             if (constInd != 0)
17224             {
17225                 ValueNum vnForConstInd = comp->GetValueNumStore()->VNForPtrSizeIntCon(constInd);
17226                 VNFunc   vnFunc        = GetVNFuncForOper(GT_ADD, VOK_Default);
17227
17228                 *pInxVN = comp->GetValueNumStore()->VNForFunc(TYP_I_IMPL, vnFunc, *pInxVN, vnForConstInd);
17229             }
17230         }
17231     }
17232 }
17233
17234 void GenTree::ParseArrayAddressWork(Compiler*       comp,
17235                                     target_ssize_t  inputMul,
17236                                     GenTree**       pArr,
17237                                     ValueNum*       pInxVN,
17238                                     target_ssize_t* pOffset,
17239                                     FieldSeqNode**  pFldSeq)
17240 {
17241     if (TypeGet() == TYP_REF)
17242     {
17243         // This must be the array pointer.
17244         *pArr = this;
17245         assert(inputMul == 1); // Can't multiply the array pointer by anything.
17246     }
17247     else
17248     {
17249         switch (OperGet())
17250         {
17251             case GT_CNS_INT:
17252                 *pFldSeq = comp->GetFieldSeqStore()->Append(*pFldSeq, gtIntCon.gtFieldSeq);
17253                 assert(!gtIntCon.ImmedValNeedsReloc(comp));
17254                 // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntCon::gtIconVal had target_ssize_t
17255                 // type.
17256                 *pOffset += (inputMul * (target_ssize_t)(gtIntCon.gtIconVal));
17257                 return;
17258
17259             case GT_ADD:
17260             case GT_SUB:
17261                 gtOp.gtOp1->ParseArrayAddressWork(comp, inputMul, pArr, pInxVN, pOffset, pFldSeq);
17262                 if (OperGet() == GT_SUB)
17263                 {
17264                     inputMul = -inputMul;
17265                 }
17266                 gtOp.gtOp2->ParseArrayAddressWork(comp, inputMul, pArr, pInxVN, pOffset, pFldSeq);
17267                 return;
17268
17269             case GT_MUL:
17270             {
17271                 // If one op is a constant, continue parsing down.
17272                 target_ssize_t subMul   = 0;
17273                 GenTree*       nonConst = nullptr;
17274                 if (gtOp.gtOp1->IsCnsIntOrI())
17275                 {
17276                     // If the other arg is an int constant, and is a "not-a-field", choose
17277                     // that as the multiplier, thus preserving constant index offsets...
17278                     if (gtOp.gtOp2->OperGet() == GT_CNS_INT &&
17279                         gtOp.gtOp2->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField())
17280                     {
17281                         assert(!gtOp.gtOp2->gtIntCon.ImmedValNeedsReloc(comp));
17282                         // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntConCommon::gtIconVal had
17283                         // target_ssize_t type.
17284                         subMul   = (target_ssize_t)gtOp.gtOp2->gtIntConCommon.IconValue();
17285                         nonConst = gtOp.gtOp1;
17286                     }
17287                     else
17288                     {
17289                         assert(!gtOp.gtOp1->gtIntCon.ImmedValNeedsReloc(comp));
17290                         // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntConCommon::gtIconVal had
17291                         // target_ssize_t type.
17292                         subMul   = (target_ssize_t)gtOp.gtOp1->gtIntConCommon.IconValue();
17293                         nonConst = gtOp.gtOp2;
17294                     }
17295                 }
17296                 else if (gtOp.gtOp2->IsCnsIntOrI())
17297                 {
17298                     assert(!gtOp.gtOp2->gtIntCon.ImmedValNeedsReloc(comp));
17299                     // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntConCommon::gtIconVal had
17300                     // target_ssize_t type.
17301                     subMul   = (target_ssize_t)gtOp.gtOp2->gtIntConCommon.IconValue();
17302                     nonConst = gtOp.gtOp1;
17303                 }
17304                 if (nonConst != nullptr)
17305                 {
17306                     nonConst->ParseArrayAddressWork(comp, inputMul * subMul, pArr, pInxVN, pOffset, pFldSeq);
17307                     return;
17308                 }
17309                 // Otherwise, exit the switch, treat as a contribution to the index.
17310             }
17311             break;
17312
17313             case GT_LSH:
17314                 // If one op is a constant, continue parsing down.
17315                 if (gtOp.gtOp2->IsCnsIntOrI())
17316                 {
17317                     assert(!gtOp.gtOp2->gtIntCon.ImmedValNeedsReloc(comp));
17318                     // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntCon::gtIconVal had target_ssize_t
17319                     // type.
17320                     target_ssize_t subMul = target_ssize_t{1} << (target_ssize_t)gtOp.gtOp2->gtIntConCommon.IconValue();
17321                     gtOp.gtOp1->ParseArrayAddressWork(comp, inputMul * subMul, pArr, pInxVN, pOffset, pFldSeq);
17322                     return;
17323                 }
17324                 // Otherwise, exit the switch, treat as a contribution to the index.
17325                 break;
17326
17327             case GT_COMMA:
17328                 // We don't care about exceptions for this purpose.
17329                 if ((gtOp.gtOp1->OperGet() == GT_ARR_BOUNDS_CHECK) || gtOp.gtOp1->IsNothingNode())
17330                 {
17331                     gtOp.gtOp2->ParseArrayAddressWork(comp, inputMul, pArr, pInxVN, pOffset, pFldSeq);
17332                     return;
17333                 }
17334                 break;
17335
17336             default:
17337                 break;
17338         }
17339         // If we didn't return above, must be a contribution to the non-constant part of the index VN.
17340         ValueNum vn = comp->GetValueNumStore()->VNLiberalNormalValue(gtVNPair);
17341         if (inputMul != 1)
17342         {
17343             ValueNum mulVN = comp->GetValueNumStore()->VNForLongCon(inputMul);
17344             vn = comp->GetValueNumStore()->VNForFunc(TypeGet(), GetVNFuncForOper(GT_MUL, VOK_Default), mulVN, vn);
17345         }
17346         if (*pInxVN == ValueNumStore::NoVN)
17347         {
17348             *pInxVN = vn;
17349         }
17350         else
17351         {
17352             *pInxVN =
17353                 comp->GetValueNumStore()->VNForFunc(TypeGet(), GetVNFuncForOper(GT_ADD, VOK_Default), *pInxVN, vn);
17354         }
17355     }
17356 }
17357
17358 bool GenTree::ParseArrayElemForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq)
17359 {
17360     if (OperIsIndir())
17361     {
17362         if (gtFlags & GTF_IND_ARR_INDEX)
17363         {
17364             bool b = comp->GetArrayInfoMap()->Lookup(this, arrayInfo);
17365             assert(b);
17366             return true;
17367         }
17368
17369         // Otherwise...
17370         GenTree* addr = AsIndir()->Addr();
17371         return addr->ParseArrayElemAddrForm(comp, arrayInfo, pFldSeq);
17372     }
17373     else
17374     {
17375         return false;
17376     }
17377 }
17378
17379 bool GenTree::ParseArrayElemAddrForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq)
17380 {
17381     switch (OperGet())
17382     {
17383         case GT_ADD:
17384         {
17385             GenTree* arrAddr = nullptr;
17386             GenTree* offset  = nullptr;
17387             if (gtOp.gtOp1->TypeGet() == TYP_BYREF)
17388             {
17389                 arrAddr = gtOp.gtOp1;
17390                 offset  = gtOp.gtOp2;
17391             }
17392             else if (gtOp.gtOp2->TypeGet() == TYP_BYREF)
17393             {
17394                 arrAddr = gtOp.gtOp2;
17395                 offset  = gtOp.gtOp1;
17396             }
17397             else
17398             {
17399                 return false;
17400             }
17401             if (!offset->ParseOffsetForm(comp, pFldSeq))
17402             {
17403                 return false;
17404             }
17405             return arrAddr->ParseArrayElemAddrForm(comp, arrayInfo, pFldSeq);
17406         }
17407
17408         case GT_ADDR:
17409         {
17410             GenTree* addrArg = gtOp.gtOp1;
17411             if (addrArg->OperGet() != GT_IND)
17412             {
17413                 return false;
17414             }
17415             else
17416             {
17417                 // The "Addr" node might be annotated with a zero-offset field sequence.
17418                 FieldSeqNode* zeroOffsetFldSeq = nullptr;
17419                 if (comp->GetZeroOffsetFieldMap()->Lookup(this, &zeroOffsetFldSeq))
17420                 {
17421                     *pFldSeq = comp->GetFieldSeqStore()->Append(*pFldSeq, zeroOffsetFldSeq);
17422                 }
17423                 return addrArg->ParseArrayElemForm(comp, arrayInfo, pFldSeq);
17424             }
17425         }
17426
17427         default:
17428             return false;
17429     }
17430 }
17431
17432 bool GenTree::ParseOffsetForm(Compiler* comp, FieldSeqNode** pFldSeq)
17433 {
17434     switch (OperGet())
17435     {
17436         case GT_CNS_INT:
17437         {
17438             GenTreeIntCon* icon = AsIntCon();
17439             *pFldSeq            = comp->GetFieldSeqStore()->Append(*pFldSeq, icon->gtFieldSeq);
17440             return true;
17441         }
17442
17443         case GT_ADD:
17444             if (!gtOp.gtOp1->ParseOffsetForm(comp, pFldSeq))
17445             {
17446                 return false;
17447             }
17448             return gtOp.gtOp2->ParseOffsetForm(comp, pFldSeq);
17449
17450         default:
17451             return false;
17452     }
17453 }
17454
17455 void GenTree::LabelIndex(Compiler* comp, bool isConst)
17456 {
17457     switch (OperGet())
17458     {
17459         case GT_CNS_INT:
17460             // If we got here, this is a contribution to the constant part of the index.
17461             if (isConst)
17462             {
17463                 gtIntCon.gtFieldSeq =
17464                     comp->GetFieldSeqStore()->CreateSingleton(FieldSeqStore::ConstantIndexPseudoField);
17465             }
17466             return;
17467
17468         case GT_LCL_VAR:
17469             gtFlags |= GTF_VAR_ARR_INDEX;
17470             return;
17471
17472         case GT_ADD:
17473         case GT_SUB:
17474             gtOp.gtOp1->LabelIndex(comp, isConst);
17475             gtOp.gtOp2->LabelIndex(comp, isConst);
17476             break;
17477
17478         case GT_CAST:
17479             gtOp.gtOp1->LabelIndex(comp, isConst);
17480             break;
17481
17482         case GT_ARR_LENGTH:
17483             gtFlags |= GTF_ARRLEN_ARR_IDX;
17484             return;
17485
17486         default:
17487             // For all other operators, peel off one constant; and then label the other if it's also a constant.
17488             if (OperIsArithmetic() || OperIsCompare())
17489             {
17490                 if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
17491                 {
17492                     gtOp.gtOp1->LabelIndex(comp, isConst);
17493                     break;
17494                 }
17495                 else if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
17496                 {
17497                     gtOp.gtOp2->LabelIndex(comp, isConst);
17498                     break;
17499                 }
17500                 // Otherwise continue downward on both, labeling vars.
17501                 gtOp.gtOp1->LabelIndex(comp, false);
17502                 gtOp.gtOp2->LabelIndex(comp, false);
17503             }
17504             break;
17505     }
17506 }
17507
17508 // Note that the value of the below field doesn't matter; it exists only to provide a distinguished address.
17509 //
17510 // static
17511 FieldSeqNode FieldSeqStore::s_notAField(nullptr, nullptr);
17512
17513 // FieldSeqStore methods.
17514 FieldSeqStore::FieldSeqStore(CompAllocator alloc) : m_alloc(alloc), m_canonMap(new (alloc) FieldSeqNodeCanonMap(alloc))
17515 {
17516 }
17517
17518 FieldSeqNode* FieldSeqStore::CreateSingleton(CORINFO_FIELD_HANDLE fieldHnd)
17519 {
17520     FieldSeqNode  fsn(fieldHnd, nullptr);
17521     FieldSeqNode* res = nullptr;
17522     if (m_canonMap->Lookup(fsn, &res))
17523     {
17524         return res;
17525     }
17526     else
17527     {
17528         res  = m_alloc.allocate<FieldSeqNode>(1);
17529         *res = fsn;
17530         m_canonMap->Set(fsn, res);
17531         return res;
17532     }
17533 }
17534
17535 FieldSeqNode* FieldSeqStore::Append(FieldSeqNode* a, FieldSeqNode* b)
17536 {
17537     if (a == nullptr)
17538     {
17539         return b;
17540     }
17541     else if (a == NotAField())
17542     {
17543         return NotAField();
17544     }
17545     else if (b == nullptr)
17546     {
17547         return a;
17548     }
17549     else if (b == NotAField())
17550     {
17551         return NotAField();
17552         // Extremely special case for ConstantIndex pseudo-fields -- appending consecutive such
17553         // together collapse to one.
17554     }
17555     else if (a->m_next == nullptr && a->m_fieldHnd == ConstantIndexPseudoField &&
17556              b->m_fieldHnd == ConstantIndexPseudoField)
17557     {
17558         return b;
17559     }
17560     else
17561     {
17562         FieldSeqNode* tmp = Append(a->m_next, b);
17563         FieldSeqNode  fsn(a->m_fieldHnd, tmp);
17564         FieldSeqNode* res = nullptr;
17565         if (m_canonMap->Lookup(fsn, &res))
17566         {
17567             return res;
17568         }
17569         else
17570         {
17571             res  = m_alloc.allocate<FieldSeqNode>(1);
17572             *res = fsn;
17573             m_canonMap->Set(fsn, res);
17574             return res;
17575         }
17576     }
17577 }
17578
17579 // Static vars.
17580 int FieldSeqStore::FirstElemPseudoFieldStruct;
17581 int FieldSeqStore::ConstantIndexPseudoFieldStruct;
17582
17583 CORINFO_FIELD_HANDLE FieldSeqStore::FirstElemPseudoField =
17584     (CORINFO_FIELD_HANDLE)&FieldSeqStore::FirstElemPseudoFieldStruct;
17585 CORINFO_FIELD_HANDLE FieldSeqStore::ConstantIndexPseudoField =
17586     (CORINFO_FIELD_HANDLE)&FieldSeqStore::ConstantIndexPseudoFieldStruct;
17587
17588 bool FieldSeqNode::IsFirstElemFieldSeq()
17589 {
17590     // this must be non-null per ISO C++
17591     return m_fieldHnd == FieldSeqStore::FirstElemPseudoField;
17592 }
17593
17594 bool FieldSeqNode::IsConstantIndexFieldSeq()
17595 {
17596     // this must be non-null per ISO C++
17597     return m_fieldHnd == FieldSeqStore::ConstantIndexPseudoField;
17598 }
17599
17600 bool FieldSeqNode::IsPseudoField()
17601 {
17602     if (this == nullptr)
17603     {
17604         return false;
17605     }
17606     return m_fieldHnd == FieldSeqStore::FirstElemPseudoField || m_fieldHnd == FieldSeqStore::ConstantIndexPseudoField;
17607 }
17608
17609 #ifdef FEATURE_SIMD
17610 GenTreeSIMD* Compiler::gtNewSIMDNode(
17611     var_types type, GenTree* op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
17612 {
17613     assert(op1 != nullptr);
17614     SetOpLclRelatedToSIMDIntrinsic(op1);
17615
17616     return new (this, GT_SIMD) GenTreeSIMD(type, op1, simdIntrinsicID, baseType, size);
17617 }
17618
17619 GenTreeSIMD* Compiler::gtNewSIMDNode(
17620     var_types type, GenTree* op1, GenTree* op2, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
17621 {
17622     assert(op1 != nullptr);
17623     SetOpLclRelatedToSIMDIntrinsic(op1);
17624     SetOpLclRelatedToSIMDIntrinsic(op2);
17625
17626     return new (this, GT_SIMD) GenTreeSIMD(type, op1, op2, simdIntrinsicID, baseType, size);
17627 }
17628
17629 //-------------------------------------------------------------------
17630 // SetOpLclRelatedToSIMDIntrinsic: Determine if the tree has a local var that needs to be set
17631 // as used by a SIMD intrinsic, and if so, set that local var appropriately.
17632 //
17633 // Arguments:
17634 //     op - The tree, to be an operand of a new GT_SIMD node, to check.
17635 //
17636 void Compiler::SetOpLclRelatedToSIMDIntrinsic(GenTree* op)
17637 {
17638     if (op != nullptr)
17639     {
17640         if (op->OperIsLocal())
17641         {
17642             setLclRelatedToSIMDIntrinsic(op);
17643         }
17644         else if ((op->OperGet() == GT_OBJ) && (op->gtOp.gtOp1->OperGet() == GT_ADDR) &&
17645                  op->gtOp.gtOp1->gtOp.gtOp1->OperIsLocal())
17646         {
17647             setLclRelatedToSIMDIntrinsic(op->gtOp.gtOp1->gtOp.gtOp1);
17648         }
17649     }
17650 }
17651
17652 bool GenTree::isCommutativeSIMDIntrinsic()
17653 {
17654     assert(gtOper == GT_SIMD);
17655     switch (AsSIMD()->gtSIMDIntrinsicID)
17656     {
17657         case SIMDIntrinsicAdd:
17658         case SIMDIntrinsicBitwiseAnd:
17659         case SIMDIntrinsicBitwiseOr:
17660         case SIMDIntrinsicBitwiseXor:
17661         case SIMDIntrinsicEqual:
17662         case SIMDIntrinsicMax:
17663         case SIMDIntrinsicMin:
17664         case SIMDIntrinsicMul:
17665         case SIMDIntrinsicOpEquality:
17666         case SIMDIntrinsicOpInEquality:
17667             return true;
17668         default:
17669             return false;
17670     }
17671 }
17672 #endif // FEATURE_SIMD
17673
17674 #ifdef FEATURE_HW_INTRINSICS
17675 bool GenTree::isCommutativeHWIntrinsic() const
17676 {
17677     assert(gtOper == GT_HWIntrinsic);
17678
17679 #ifdef _TARGET_XARCH_
17680     return HWIntrinsicInfo::IsCommutative(AsHWIntrinsic()->gtHWIntrinsicId);
17681 #else
17682     return false;
17683 #endif // _TARGET_XARCH_
17684 }
17685
17686 bool GenTree::isContainableHWIntrinsic() const
17687 {
17688     assert(gtOper == GT_HWIntrinsic);
17689
17690 #ifdef _TARGET_XARCH_
17691     switch (AsHWIntrinsic()->gtHWIntrinsicId)
17692     {
17693         case NI_SSE_LoadAlignedVector128:
17694         case NI_SSE_LoadScalarVector128:
17695         case NI_SSE_LoadVector128:
17696         case NI_SSE2_LoadAlignedVector128:
17697         case NI_SSE2_LoadScalarVector128:
17698         case NI_SSE2_LoadVector128:
17699         case NI_AVX_LoadAlignedVector256:
17700         case NI_AVX_LoadVector256:
17701         {
17702             return true;
17703         }
17704
17705         default:
17706         {
17707             return false;
17708         }
17709     }
17710 #else
17711     return false;
17712 #endif // _TARGET_XARCH_
17713 }
17714
17715 bool GenTree::isRMWHWIntrinsic(Compiler* comp)
17716 {
17717     assert(gtOper == GT_HWIntrinsic);
17718     assert(comp != nullptr);
17719
17720 #ifdef _TARGET_XARCH_
17721     if (!comp->canUseVexEncoding())
17722     {
17723         return HWIntrinsicInfo::HasRMWSemantics(AsHWIntrinsic()->gtHWIntrinsicId);
17724     }
17725
17726     switch (AsHWIntrinsic()->gtHWIntrinsicId)
17727     {
17728         // TODO-XArch-Cleanup: Move this switch block to be table driven.
17729
17730         case NI_SSE42_Crc32:
17731         case NI_SSE42_X64_Crc32:
17732         case NI_FMA_MultiplyAdd:
17733         case NI_FMA_MultiplyAddNegated:
17734         case NI_FMA_MultiplyAddNegatedScalar:
17735         case NI_FMA_MultiplyAddScalar:
17736         case NI_FMA_MultiplyAddSubtract:
17737         case NI_FMA_MultiplySubtract:
17738         case NI_FMA_MultiplySubtractAdd:
17739         case NI_FMA_MultiplySubtractNegated:
17740         case NI_FMA_MultiplySubtractNegatedScalar:
17741         case NI_FMA_MultiplySubtractScalar:
17742         {
17743             return true;
17744         }
17745
17746         default:
17747         {
17748             return false;
17749         }
17750     }
17751 #else
17752     return false;
17753 #endif // _TARGET_XARCH_
17754 }
17755
17756 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(var_types      type,
17757                                                        NamedIntrinsic hwIntrinsicID,
17758                                                        var_types      baseType,
17759                                                        unsigned       size)
17760 {
17761     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, hwIntrinsicID, baseType, size);
17762 }
17763
17764 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(
17765     var_types type, GenTree* op1, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned simdSize)
17766 {
17767     SetOpLclRelatedToSIMDIntrinsic(op1);
17768
17769     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, op1, hwIntrinsicID, baseType, simdSize);
17770 }
17771
17772 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(
17773     var_types type, GenTree* op1, GenTree* op2, NamedIntrinsic hwIntrinsicID, var_types baseType, unsigned simdSize)
17774 {
17775     SetOpLclRelatedToSIMDIntrinsic(op1);
17776     SetOpLclRelatedToSIMDIntrinsic(op2);
17777
17778     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, op1, op2, hwIntrinsicID, baseType, simdSize);
17779 }
17780
17781 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(var_types      type,
17782                                                        GenTree*       op1,
17783                                                        GenTree*       op2,
17784                                                        GenTree*       op3,
17785                                                        NamedIntrinsic hwIntrinsicID,
17786                                                        var_types      baseType,
17787                                                        unsigned       size)
17788 {
17789     SetOpLclRelatedToSIMDIntrinsic(op1);
17790     SetOpLclRelatedToSIMDIntrinsic(op2);
17791     SetOpLclRelatedToSIMDIntrinsic(op3);
17792
17793     return new (this, GT_HWIntrinsic)
17794         GenTreeHWIntrinsic(type, gtNewArgList(op1, op2, op3), hwIntrinsicID, baseType, size);
17795 }
17796
17797 GenTreeHWIntrinsic* Compiler::gtNewSimdHWIntrinsicNode(var_types      type,
17798                                                        GenTree*       op1,
17799                                                        GenTree*       op2,
17800                                                        GenTree*       op3,
17801                                                        GenTree*       op4,
17802                                                        NamedIntrinsic hwIntrinsicID,
17803                                                        var_types      baseType,
17804                                                        unsigned       size)
17805 {
17806     SetOpLclRelatedToSIMDIntrinsic(op1);
17807     SetOpLclRelatedToSIMDIntrinsic(op2);
17808     SetOpLclRelatedToSIMDIntrinsic(op3);
17809     SetOpLclRelatedToSIMDIntrinsic(op4);
17810
17811     return new (this, GT_HWIntrinsic)
17812         GenTreeHWIntrinsic(type, gtNewArgList(op1, op2, op3, op4), hwIntrinsicID, baseType, size);
17813 }
17814
17815 GenTreeHWIntrinsic* Compiler::gtNewScalarHWIntrinsicNode(var_types type, GenTree* op1, NamedIntrinsic hwIntrinsicID)
17816 {
17817     SetOpLclRelatedToSIMDIntrinsic(op1);
17818
17819     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, op1, hwIntrinsicID, TYP_UNKNOWN, 0);
17820 }
17821
17822 GenTreeHWIntrinsic* Compiler::gtNewScalarHWIntrinsicNode(var_types      type,
17823                                                          GenTree*       op1,
17824                                                          GenTree*       op2,
17825                                                          NamedIntrinsic hwIntrinsicID)
17826 {
17827     SetOpLclRelatedToSIMDIntrinsic(op1);
17828     SetOpLclRelatedToSIMDIntrinsic(op2);
17829
17830     return new (this, GT_HWIntrinsic) GenTreeHWIntrinsic(type, op1, op2, hwIntrinsicID, TYP_UNKNOWN, 0);
17831 }
17832
17833 GenTreeHWIntrinsic* Compiler::gtNewScalarHWIntrinsicNode(
17834     var_types type, GenTree* op1, GenTree* op2, GenTree* op3, NamedIntrinsic hwIntrinsicID)
17835 {
17836     SetOpLclRelatedToSIMDIntrinsic(op1);
17837     SetOpLclRelatedToSIMDIntrinsic(op2);
17838     SetOpLclRelatedToSIMDIntrinsic(op3);
17839
17840     return new (this, GT_HWIntrinsic)
17841         GenTreeHWIntrinsic(type, gtNewArgList(op1, op2, op3), hwIntrinsicID, TYP_UNKNOWN, 0);
17842 }
17843
17844 //---------------------------------------------------------------------------------------
17845 // gtNewMustThrowException:
17846 //    create a throw node (calling into JIT helper) that must be thrown.
17847 //    The result would be a comma node: COMMA(jithelperthrow(void), x) where x's type should be specified.
17848 //
17849 // Arguments
17850 //    helper      -  JIT helper ID
17851 //    type        -  return type of the node
17852 //
17853 // Return Value
17854 //    pointer to the throw node
17855 //
17856 GenTree* Compiler::gtNewMustThrowException(unsigned helper, var_types type, CORINFO_CLASS_HANDLE clsHnd)
17857 {
17858     GenTreeCall* node = gtNewHelperCallNode(helper, TYP_VOID);
17859     node->gtCallMoreFlags |= GTF_CALL_M_DOES_NOT_RETURN;
17860     if (type != TYP_VOID)
17861     {
17862         unsigned dummyTemp = lvaGrabTemp(true DEBUGARG("dummy temp of must thrown exception"));
17863         if (type == TYP_STRUCT)
17864         {
17865             lvaSetStruct(dummyTemp, clsHnd, false);
17866             type = lvaTable[dummyTemp].lvType; // struct type is normalized
17867         }
17868         else
17869         {
17870             lvaTable[dummyTemp].lvType = type;
17871         }
17872         GenTree* dummyNode = gtNewLclvNode(dummyTemp, type);
17873         return gtNewOperNode(GT_COMMA, type, node, dummyNode);
17874     }
17875     return node;
17876 }
17877
17878 // Returns true for the HW Instrinsic instructions that have MemoryLoad semantics, false otherwise
17879 bool GenTreeHWIntrinsic::OperIsMemoryLoad()
17880 {
17881 #ifdef _TARGET_XARCH_
17882     // Some xarch instructions have MemoryLoad sematics
17883     HWIntrinsicCategory category = HWIntrinsicInfo::lookupCategory(gtHWIntrinsicId);
17884     if (category == HW_Category_MemoryLoad)
17885     {
17886         return true;
17887     }
17888     else if (HWIntrinsicInfo::MaybeMemoryLoad(gtHWIntrinsicId))
17889     {
17890         // Some intrinsics (without HW_Category_MemoryLoad) also have MemoryLoad semantics
17891
17892         if (category == HW_Category_SIMDScalar)
17893         {
17894             // Avx2.BroadcastScalarToVector128/256 have vector and pointer overloads both, e.g.,
17895             // Vector128<byte> BroadcastScalarToVector128(Vector128<byte> value)
17896             // Vector128<byte> BroadcastScalarToVector128(byte* source)
17897             // So, we need to check the argument's type is memory-reference or Vector128
17898             assert(HWIntrinsicInfo::lookupNumArgs(this) == 1);
17899             return (gtHWIntrinsicId == NI_AVX2_BroadcastScalarToVector128 ||
17900                     gtHWIntrinsicId == NI_AVX2_BroadcastScalarToVector256) &&
17901                    gtOp.gtOp1->TypeGet() != TYP_SIMD16;
17902         }
17903         else if (category == HW_Category_IMM)
17904         {
17905             // Do we have less than 3 operands?
17906             if (HWIntrinsicInfo::lookupNumArgs(this) < 3)
17907             {
17908                 return false;
17909             }
17910             else if (HWIntrinsicInfo::isAVX2GatherIntrinsic(gtHWIntrinsicId))
17911             {
17912                 return true;
17913             }
17914         }
17915     }
17916 #endif // _TARGET_XARCH_
17917     return false;
17918 }
17919
17920 // Returns true for the HW Instrinsic instructions that have MemoryStore semantics, false otherwise
17921 bool GenTreeHWIntrinsic::OperIsMemoryStore()
17922 {
17923 #ifdef _TARGET_XARCH_
17924     // Some xarch instructions have MemoryStore sematics
17925     HWIntrinsicCategory category = HWIntrinsicInfo::lookupCategory(gtHWIntrinsicId);
17926     if (category == HW_Category_MemoryStore)
17927     {
17928         return true;
17929     }
17930     else if (HWIntrinsicInfo::MaybeMemoryStore(gtHWIntrinsicId) &&
17931              (category == HW_Category_IMM || category == HW_Category_Scalar))
17932     {
17933         // Some intrinsics (without HW_Category_MemoryStore) also have MemoryStore semantics
17934
17935         // Bmi2/Bmi2.X64.MultiplyNoFlags may return the lower half result by a out argument
17936         // unsafe ulong MultiplyNoFlags(ulong left, ulong right, ulong* low)
17937         //
17938         // So, the 3-argument form is MemoryStore
17939         if (HWIntrinsicInfo::lookupNumArgs(this) == 3)
17940         {
17941             switch (gtHWIntrinsicId)
17942             {
17943                 case NI_BMI2_MultiplyNoFlags:
17944                 case NI_BMI2_X64_MultiplyNoFlags:
17945                     return true;
17946                 default:
17947                     return false;
17948             }
17949         }
17950     }
17951 #endif // _TARGET_XARCH_
17952     return false;
17953 }
17954
17955 // Returns true for the HW Instrinsic instructions that have MemoryLoad semantics, false otherwise
17956 bool GenTreeHWIntrinsic::OperIsMemoryLoadOrStore()
17957 {
17958 #ifdef _TARGET_XARCH_
17959     return OperIsMemoryLoad() || OperIsMemoryStore();
17960 #endif // _TARGET_XARCH_
17961     return false;
17962 }
17963
17964 #endif // FEATURE_HW_INTRINSICS
17965
17966 //---------------------------------------------------------------------------------------
17967 // InitializeStructReturnType:
17968 //    Initialize the Return Type Descriptor for a method that returns a struct type
17969 //
17970 // Arguments
17971 //    comp        -  Compiler Instance
17972 //    retClsHnd   -  VM handle to the struct type returned by the method
17973 //
17974 // Return Value
17975 //    None
17976 //
17977 void ReturnTypeDesc::InitializeStructReturnType(Compiler* comp, CORINFO_CLASS_HANDLE retClsHnd)
17978 {
17979     assert(!m_inited);
17980
17981 #if FEATURE_MULTIREG_RET
17982
17983     assert(retClsHnd != NO_CLASS_HANDLE);
17984     unsigned structSize = comp->info.compCompHnd->getClassSize(retClsHnd);
17985
17986     Compiler::structPassingKind howToReturnStruct;
17987     var_types                   returnType = comp->getReturnTypeForStruct(retClsHnd, &howToReturnStruct, structSize);
17988
17989     switch (howToReturnStruct)
17990     {
17991         case Compiler::SPK_EnclosingType:
17992             m_isEnclosingType = true;
17993             __fallthrough;
17994
17995         case Compiler::SPK_PrimitiveType:
17996         {
17997             assert(returnType != TYP_UNKNOWN);
17998             assert(!varTypeIsStruct(returnType));
17999             m_regType[0] = returnType;
18000             break;
18001         }
18002
18003         case Compiler::SPK_ByValueAsHfa:
18004         {
18005             assert(varTypeIsStruct(returnType));
18006             var_types hfaType = comp->GetHfaType(retClsHnd);
18007
18008             // We should have an hfa struct type
18009             assert(varTypeIsFloating(hfaType));
18010
18011             // Note that the retail build issues a warning about a potential divsion by zero without this Max function
18012             unsigned elemSize = Max((unsigned)1, EA_SIZE_IN_BYTES(emitActualTypeSize(hfaType)));
18013
18014             // The size of this struct should be evenly divisible by elemSize
18015             assert((structSize % elemSize) == 0);
18016
18017             unsigned hfaCount = (structSize / elemSize);
18018             for (unsigned i = 0; i < hfaCount; ++i)
18019             {
18020                 m_regType[i] = hfaType;
18021             }
18022
18023             if (comp->compFloatingPointUsed == false)
18024             {
18025                 comp->compFloatingPointUsed = true;
18026             }
18027             break;
18028         }
18029
18030         case Compiler::SPK_ByValue:
18031         {
18032             assert(varTypeIsStruct(returnType));
18033
18034 #ifdef UNIX_AMD64_ABI
18035
18036             SYSTEMV_AMD64_CORINFO_STRUCT_REG_PASSING_DESCRIPTOR structDesc;
18037             comp->eeGetSystemVAmd64PassStructInRegisterDescriptor(retClsHnd, &structDesc);
18038
18039             assert(structDesc.passedInRegisters);
18040             for (int i = 0; i < structDesc.eightByteCount; i++)
18041             {
18042                 assert(i < MAX_RET_REG_COUNT);
18043                 m_regType[i] = comp->GetEightByteType(structDesc, i);
18044             }
18045
18046 #elif defined(_TARGET_ARM64_)
18047
18048             // a non-HFA struct returned using two registers
18049             //
18050             assert((structSize > TARGET_POINTER_SIZE) && (structSize <= (2 * TARGET_POINTER_SIZE)));
18051
18052             BYTE gcPtrs[2] = {TYPE_GC_NONE, TYPE_GC_NONE};
18053             comp->info.compCompHnd->getClassGClayout(retClsHnd, &gcPtrs[0]);
18054             for (unsigned i = 0; i < 2; ++i)
18055             {
18056                 m_regType[i] = comp->getJitGCType(gcPtrs[i]);
18057             }
18058
18059 #else //  _TARGET_XXX_
18060
18061             // This target needs support here!
18062             //
18063             NYI("Unsupported TARGET returning a TYP_STRUCT in InitializeStructReturnType");
18064
18065 #endif // UNIX_AMD64_ABI
18066
18067             break; // for case SPK_ByValue
18068         }
18069
18070         case Compiler::SPK_ByReference:
18071
18072             // We are returning using the return buffer argument
18073             // There are no return registers
18074             break;
18075
18076         default:
18077
18078             unreached(); // By the contract of getReturnTypeForStruct we should never get here.
18079
18080     } // end of switch (howToReturnStruct)
18081
18082 #endif //  FEATURE_MULTIREG_RET
18083
18084 #ifdef DEBUG
18085     m_inited = true;
18086 #endif
18087 }
18088
18089 //---------------------------------------------------------------------------------------
18090 // InitializeLongReturnType:
18091 //    Initialize the Return Type Descriptor for a method that returns a TYP_LONG
18092 //
18093 // Arguments
18094 //    comp        -  Compiler Instance
18095 //
18096 // Return Value
18097 //    None
18098 //
18099 void ReturnTypeDesc::InitializeLongReturnType(Compiler* comp)
18100 {
18101 #if defined(_TARGET_X86_) || defined(_TARGET_ARM_)
18102
18103     // Setups up a ReturnTypeDesc for returning a long using two registers
18104     //
18105     assert(MAX_RET_REG_COUNT >= 2);
18106     m_regType[0] = TYP_INT;
18107     m_regType[1] = TYP_INT;
18108
18109 #else // not (_TARGET_X86_ or _TARGET_ARM_)
18110
18111     m_regType[0] = TYP_LONG;
18112
18113 #endif // _TARGET_X86_ or _TARGET_ARM_
18114
18115 #ifdef DEBUG
18116     m_inited = true;
18117 #endif
18118 }
18119
18120 //-------------------------------------------------------------------
18121 // GetABIReturnReg:  Return ith return register as per target ABI
18122 //
18123 // Arguments:
18124 //     idx   -   Index of the return register.
18125 //               The first return register has an index of 0 and so on.
18126 //
18127 // Return Value:
18128 //     Returns ith return register as per target ABI.
18129 //
18130 // Notes:
18131 //     x86 and ARM return long in multiple registers.
18132 //     ARM and ARM64 return HFA struct in multiple registers.
18133 //
18134 regNumber ReturnTypeDesc::GetABIReturnReg(unsigned idx)
18135 {
18136     unsigned count = GetReturnRegCount();
18137     assert(idx < count);
18138
18139     regNumber resultReg = REG_NA;
18140
18141 #ifdef UNIX_AMD64_ABI
18142     var_types regType0 = GetReturnRegType(0);
18143
18144     if (idx == 0)
18145     {
18146         if (varTypeIsIntegralOrI(regType0))
18147         {
18148             resultReg = REG_INTRET;
18149         }
18150         else
18151         {
18152             noway_assert(varTypeIsFloating(regType0));
18153             resultReg = REG_FLOATRET;
18154         }
18155     }
18156     else if (idx == 1)
18157     {
18158         var_types regType1 = GetReturnRegType(1);
18159
18160         if (varTypeIsIntegralOrI(regType1))
18161         {
18162             if (varTypeIsIntegralOrI(regType0))
18163             {
18164                 resultReg = REG_INTRET_1;
18165             }
18166             else
18167             {
18168                 resultReg = REG_INTRET;
18169             }
18170         }
18171         else
18172         {
18173             noway_assert(varTypeIsFloating(regType1));
18174
18175             if (varTypeIsFloating(regType0))
18176             {
18177                 resultReg = REG_FLOATRET_1;
18178             }
18179             else
18180             {
18181                 resultReg = REG_FLOATRET;
18182             }
18183         }
18184     }
18185
18186 #elif defined(_TARGET_X86_)
18187
18188     if (idx == 0)
18189     {
18190         resultReg = REG_LNGRET_LO;
18191     }
18192     else if (idx == 1)
18193     {
18194         resultReg = REG_LNGRET_HI;
18195     }
18196
18197 #elif defined(_TARGET_ARM_)
18198
18199     var_types regType = GetReturnRegType(idx);
18200     if (varTypeIsIntegralOrI(regType))
18201     {
18202         // Ints are returned in one return register.
18203         // Longs are returned in two return registers.
18204         if (idx == 0)
18205         {
18206             resultReg = REG_LNGRET_LO;
18207         }
18208         else if (idx == 1)
18209         {
18210             resultReg = REG_LNGRET_HI;
18211         }
18212     }
18213     else
18214     {
18215         // Floats are returned in one return register (f0).
18216         // Doubles are returned in one return register (d0).
18217         // Structs are returned in four registers with HFAs.
18218         assert(idx < MAX_RET_REG_COUNT); // Up to 4 return registers for HFA's
18219         if (regType == TYP_DOUBLE)
18220         {
18221             resultReg = (regNumber)((unsigned)(REG_FLOATRET) + idx * 2); // d0, d1, d2 or d3
18222         }
18223         else
18224         {
18225             resultReg = (regNumber)((unsigned)(REG_FLOATRET) + idx); // f0, f1, f2 or f3
18226         }
18227     }
18228
18229 #elif defined(_TARGET_ARM64_)
18230
18231     var_types regType = GetReturnRegType(idx);
18232     if (varTypeIsIntegralOrI(regType))
18233     {
18234         noway_assert(idx < 2);                              // Up to 2 return registers for 16-byte structs
18235         resultReg = (idx == 0) ? REG_INTRET : REG_INTRET_1; // X0 or X1
18236     }
18237     else
18238     {
18239         noway_assert(idx < 4);                                   // Up to 4 return registers for HFA's
18240         resultReg = (regNumber)((unsigned)(REG_FLOATRET) + idx); // V0, V1, V2 or V3
18241     }
18242
18243 #endif // TARGET_XXX
18244
18245     assert(resultReg != REG_NA);
18246     return resultReg;
18247 }
18248
18249 //--------------------------------------------------------------------------------
18250 // GetABIReturnRegs: get the mask of return registers as per target arch ABI.
18251 //
18252 // Arguments:
18253 //    None
18254 //
18255 // Return Value:
18256 //    reg mask of return registers in which the return type is returned.
18257 //
18258 // Note:
18259 //    This routine can be used when the caller is not particular about the order
18260 //    of return registers and wants to know the set of return registers.
18261 //
18262 // static
18263 regMaskTP ReturnTypeDesc::GetABIReturnRegs()
18264 {
18265     regMaskTP resultMask = RBM_NONE;
18266
18267     unsigned count = GetReturnRegCount();
18268     for (unsigned i = 0; i < count; ++i)
18269     {
18270         resultMask |= genRegMask(GetABIReturnReg(i));
18271     }
18272
18273     return resultMask;
18274 }
18275
18276 //------------------------------------------------------------------------
18277 // The following functions manage the gtRsvdRegs set of temporary registers
18278 // created by LSRA during code generation.
18279
18280 //------------------------------------------------------------------------
18281 // AvailableTempRegCount: return the number of available temporary registers in the (optional) given set
18282 // (typically, RBM_ALLINT or RBM_ALLFLOAT).
18283 //
18284 // Arguments:
18285 //    mask - (optional) Check for available temporary registers only in this set.
18286 //
18287 // Return Value:
18288 //    Count of available temporary registers in given set.
18289 //
18290 unsigned GenTree::AvailableTempRegCount(regMaskTP mask /* = (regMaskTP)-1 */) const
18291 {
18292     return genCountBits(gtRsvdRegs & mask);
18293 }
18294
18295 //------------------------------------------------------------------------
18296 // GetSingleTempReg: There is expected to be exactly one available temporary register
18297 // in the given mask in the gtRsvdRegs set. Get that register. No future calls to get
18298 // a temporary register are expected. Removes the register from the set, but only in
18299 // DEBUG to avoid doing unnecessary work in non-DEBUG builds.
18300 //
18301 // Arguments:
18302 //    mask - (optional) Get an available temporary register only in this set.
18303 //
18304 // Return Value:
18305 //    Available temporary register in given mask.
18306 //
18307 regNumber GenTree::GetSingleTempReg(regMaskTP mask /* = (regMaskTP)-1 */)
18308 {
18309     regMaskTP availableSet = gtRsvdRegs & mask;
18310     assert(genCountBits(availableSet) == 1);
18311     regNumber tempReg = genRegNumFromMask(availableSet);
18312     INDEBUG(gtRsvdRegs &= ~availableSet;) // Remove the register from the set, so it can't be used again.
18313     return tempReg;
18314 }
18315
18316 //------------------------------------------------------------------------
18317 // ExtractTempReg: Find the lowest number temporary register from the gtRsvdRegs set
18318 // that is also in the optional given mask (typically, RBM_ALLINT or RBM_ALLFLOAT),
18319 // and return it. Remove this register from the temporary register set, so it won't
18320 // be returned again.
18321 //
18322 // Arguments:
18323 //    mask - (optional) Extract an available temporary register only in this set.
18324 //
18325 // Return Value:
18326 //    Available temporary register in given mask.
18327 //
18328 regNumber GenTree::ExtractTempReg(regMaskTP mask /* = (regMaskTP)-1 */)
18329 {
18330     regMaskTP availableSet = gtRsvdRegs & mask;
18331     assert(genCountBits(availableSet) >= 1);
18332     regMaskTP tempRegMask = genFindLowestBit(availableSet);
18333     gtRsvdRegs &= ~tempRegMask;
18334     return genRegNumFromMask(tempRegMask);
18335 }