Make GT_LIST processing non-recursive to avoid StackOverflow.
[platform/upstream/coreclr.git] / src / jit / gentree.cpp
1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
7 XX                                                                           XX
8 XX                               GenTree                                     XX
9 XX                                                                           XX
10 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
11 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
12 */
13
14 #include "jitpch.h"
15 #include "simd.h"
16
17 #ifdef _MSC_VER
18 #pragma hdrstop
19 #endif
20
21 /*****************************************************************************/
22
23 const unsigned short GenTree::gtOperKindTable[] = {
24 #define GTNODE(en, sn, cm, ok) ok + GTK_COMMUTE *cm,
25 #include "gtlist.h"
26 };
27
28 /*****************************************************************************/
29 // static
30 genTreeOps GenTree::OpAsgToOper(genTreeOps op)
31 {
32     // Precondition.
33     assert(OperIsAssignment(op) && op != GT_ASG);
34     switch (op)
35     {
36         case GT_ASG_ADD:
37             return GT_ADD;
38         case GT_ASG_SUB:
39             return GT_SUB;
40         case GT_ASG_MUL:
41             return GT_MUL;
42         case GT_ASG_DIV:
43             return GT_DIV;
44         case GT_ASG_MOD:
45             return GT_MOD;
46
47         case GT_ASG_UDIV:
48             return GT_UDIV;
49         case GT_ASG_UMOD:
50             return GT_UMOD;
51
52         case GT_ASG_OR:
53             return GT_OR;
54         case GT_ASG_XOR:
55             return GT_XOR;
56         case GT_ASG_AND:
57             return GT_AND;
58         case GT_ASG_LSH:
59             return GT_LSH;
60         case GT_ASG_RSH:
61             return GT_RSH;
62         case GT_ASG_RSZ:
63             return GT_RSZ;
64
65         case GT_CHS:
66             return GT_NEG;
67
68         default:
69             unreached(); // Precondition implies we don't get here.
70     }
71 }
72
73 /*****************************************************************************
74  *
75  *  The types of different GenTree nodes
76  */
77
78 #ifdef DEBUG
79
80 #define INDENT_SIZE 3
81
82 //--------------------------------------------
83 //
84 // IndentStack: This struct is used, along with its related enums and strings,
85 //    to control both the indendtation and the printing of arcs.
86 //
87 // Notes:
88 //    The mode of printing is set in the Constructor, using its 'compiler' argument.
89 //    Currently it only prints arcs when fgOrder == fgOrderLinear.
90 //    The type of arc to print is specified by the IndentInfo enum, and is controlled
91 //    by the caller of the Push() method.
92
93 enum IndentChars
94 {
95     ICVertical,
96     ICBottom,
97     ICTop,
98     ICMiddle,
99     ICDash,
100     ICEmbedded,
101     ICTerminal,
102     ICError,
103     IndentCharCount
104 };
105
106 // clang-format off
107 // Sets of strings for different dumping options            vert             bot             top             mid             dash       embedded    terminal    error
108 static const char*  emptyIndents[IndentCharCount]   = {     " ",             " ",            " ",            " ",            " ",           "{",      "",        "?"  };
109 static const char*  asciiIndents[IndentCharCount]   = {     "|",            "\\",            "/",            "+",            "-",           "{",      "*",       "?"  };
110 static const char*  unicodeIndents[IndentCharCount] = { "\xe2\x94\x82", "\xe2\x94\x94", "\xe2\x94\x8c", "\xe2\x94\x9c", "\xe2\x94\x80",     "{", "\xe2\x96\x8c", "?"  };
111 // clang-format on
112
113 typedef ArrayStack<Compiler::IndentInfo> IndentInfoStack;
114 struct IndentStack
115 {
116     IndentInfoStack stack;
117     const char**    indents;
118
119     // Constructor for IndentStack.  Uses 'compiler' to determine the mode of printing.
120     IndentStack(Compiler* compiler) : stack(compiler)
121     {
122         if (compiler->asciiTrees)
123         {
124             indents = asciiIndents;
125         }
126         else
127         {
128             indents = unicodeIndents;
129         }
130     }
131
132     // Return the depth of the current indentation.
133     unsigned Depth()
134     {
135         return stack.Height();
136     }
137
138     // Push a new indentation onto the stack, of the given type.
139     void Push(Compiler::IndentInfo info)
140     {
141         stack.Push(info);
142     }
143
144     // Pop the most recent indentation type off the stack.
145     Compiler::IndentInfo Pop()
146     {
147         return stack.Pop();
148     }
149
150     // Print the current indentation and arcs.
151     void print()
152     {
153         unsigned indentCount = Depth();
154         for (unsigned i = 0; i < indentCount; i++)
155         {
156             unsigned index = indentCount - 1 - i;
157             switch (stack.Index(index))
158             {
159                 case Compiler::IndentInfo::IINone:
160                     printf("   ");
161                     break;
162                 case Compiler::IndentInfo::IIEmbedded:
163                     printf("%s  ", indents[ICEmbedded]);
164                     break;
165                 case Compiler::IndentInfo::IIArc:
166                     if (index == 0)
167                     {
168                         printf("%s%s%s", indents[ICMiddle], indents[ICDash], indents[ICDash]);
169                     }
170                     else
171                     {
172                         printf("%s  ", indents[ICVertical]);
173                     }
174                     break;
175                 case Compiler::IndentInfo::IIArcBottom:
176                     printf("%s%s%s", indents[ICBottom], indents[ICDash], indents[ICDash]);
177                     break;
178                 case Compiler::IndentInfo::IIArcTop:
179                     printf("%s%s%s", indents[ICTop], indents[ICDash], indents[ICDash]);
180                     break;
181                 case Compiler::IndentInfo::IIError:
182                     printf("%s%s%s", indents[ICError], indents[ICDash], indents[ICDash]);
183                     break;
184                 default:
185                     unreached();
186             }
187         }
188         printf("%s", indents[ICTerminal]);
189     }
190 };
191
192 //------------------------------------------------------------------------
193 // printIndent: This is a static method which simply invokes the 'print'
194 //    method on its 'indentStack' argument.
195 //
196 // Arguments:
197 //    indentStack - specifies the information for the indentation & arcs to be printed
198 //
199 // Notes:
200 //    This method exists to localize the checking for the case where indentStack is null.
201
202 static void printIndent(IndentStack* indentStack)
203 {
204     if (indentStack == nullptr)
205     {
206         return;
207     }
208     indentStack->print();
209 }
210
211 static const char* nodeNames[] = {
212 #define GTNODE(en, sn, cm, ok) sn,
213 #include "gtlist.h"
214 };
215
216 const char* GenTree::NodeName(genTreeOps op)
217 {
218     assert((unsigned)op < sizeof(nodeNames) / sizeof(nodeNames[0]));
219
220     return nodeNames[op];
221 }
222
223 static const char* opNames[] = {
224 #define GTNODE(en, sn, cm, ok) #en,
225 #include "gtlist.h"
226 };
227
228 const char* GenTree::OpName(genTreeOps op)
229 {
230     assert((unsigned)op < sizeof(opNames) / sizeof(opNames[0]));
231
232     return opNames[op];
233 }
234
235 #endif
236
237 /*****************************************************************************
238  *
239  *  When 'SMALL_TREE_NODES' is enabled, we allocate tree nodes in 2 different
240  *  sizes: 'GTF_DEBUG_NODE_SMALL' for most nodes and 'GTF_DEBUG_NODE_LARGE' for
241  *  the few nodes (such as calls and statement list nodes) that have more fields
242  *  and take up a lot more space.
243  */
244
245 #if SMALL_TREE_NODES
246
247 /* GT_COUNT'th oper is overloaded as 'undefined oper', so allocate storage for GT_COUNT'th oper also */
248 /* static */
249 unsigned char GenTree::s_gtNodeSizes[GT_COUNT + 1];
250
251 /* static */
252 void GenTree::InitNodeSize()
253 {
254     /* 'GT_LCL_VAR' often gets changed to 'GT_REG_VAR' */
255
256     assert(GenTree::s_gtNodeSizes[GT_LCL_VAR] >= GenTree::s_gtNodeSizes[GT_REG_VAR]);
257
258     /* Set all sizes to 'small' first */
259
260     for (unsigned op = 0; op <= GT_COUNT; op++)
261     {
262         GenTree::s_gtNodeSizes[op] = TREE_NODE_SZ_SMALL;
263     }
264
265     // Now set all of the appropriate entries to 'large'
266     CLANG_FORMAT_COMMENT_ANCHOR;
267
268 #if defined(FEATURE_HFA) || defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
269     // On ARM32, ARM64 and System V for struct returning
270     // there is code that does GT_ASG-tree.CopyObj call.
271     // CopyObj is a large node and the GT_ASG is small, which triggers an exception.
272     GenTree::s_gtNodeSizes[GT_ASG]    = TREE_NODE_SZ_LARGE;
273     GenTree::s_gtNodeSizes[GT_RETURN] = TREE_NODE_SZ_LARGE;
274 #endif // defined(FEATURE_HFA) || defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
275
276     GenTree::s_gtNodeSizes[GT_CALL]             = TREE_NODE_SZ_LARGE;
277     GenTree::s_gtNodeSizes[GT_CAST]             = TREE_NODE_SZ_LARGE;
278     GenTree::s_gtNodeSizes[GT_FTN_ADDR]         = TREE_NODE_SZ_LARGE;
279     GenTree::s_gtNodeSizes[GT_BOX]              = TREE_NODE_SZ_LARGE;
280     GenTree::s_gtNodeSizes[GT_INDEX]            = TREE_NODE_SZ_LARGE;
281     GenTree::s_gtNodeSizes[GT_ARR_BOUNDS_CHECK] = TREE_NODE_SZ_LARGE;
282 #ifdef FEATURE_SIMD
283     GenTree::s_gtNodeSizes[GT_SIMD_CHK] = TREE_NODE_SZ_LARGE;
284 #endif // FEATURE_SIMD
285     GenTree::s_gtNodeSizes[GT_ARR_ELEM]   = TREE_NODE_SZ_LARGE;
286     GenTree::s_gtNodeSizes[GT_ARR_INDEX]  = TREE_NODE_SZ_LARGE;
287     GenTree::s_gtNodeSizes[GT_ARR_OFFSET] = TREE_NODE_SZ_LARGE;
288     GenTree::s_gtNodeSizes[GT_RET_EXPR]   = TREE_NODE_SZ_LARGE;
289     GenTree::s_gtNodeSizes[GT_OBJ]        = TREE_NODE_SZ_LARGE;
290     GenTree::s_gtNodeSizes[GT_FIELD]      = TREE_NODE_SZ_LARGE;
291     GenTree::s_gtNodeSizes[GT_STMT]       = TREE_NODE_SZ_LARGE;
292     GenTree::s_gtNodeSizes[GT_CMPXCHG]    = TREE_NODE_SZ_LARGE;
293     GenTree::s_gtNodeSizes[GT_QMARK]      = TREE_NODE_SZ_LARGE;
294     GenTree::s_gtNodeSizes[GT_LEA]        = TREE_NODE_SZ_LARGE;
295     GenTree::s_gtNodeSizes[GT_COPYOBJ]    = TREE_NODE_SZ_LARGE;
296     GenTree::s_gtNodeSizes[GT_INTRINSIC]  = TREE_NODE_SZ_LARGE;
297     GenTree::s_gtNodeSizes[GT_ALLOCOBJ]   = TREE_NODE_SZ_LARGE;
298 #if USE_HELPERS_FOR_INT_DIV
299     GenTree::s_gtNodeSizes[GT_DIV]  = TREE_NODE_SZ_LARGE;
300     GenTree::s_gtNodeSizes[GT_UDIV] = TREE_NODE_SZ_LARGE;
301     GenTree::s_gtNodeSizes[GT_MOD]  = TREE_NODE_SZ_LARGE;
302     GenTree::s_gtNodeSizes[GT_UMOD] = TREE_NODE_SZ_LARGE;
303 #endif
304 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
305     GenTree::s_gtNodeSizes[GT_PUTARG_STK] = TREE_NODE_SZ_LARGE;
306 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
307 #if defined(FEATURE_HFA) || defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
308     // In importer for Hfa and register returned structs we rewrite GT_ASG to GT_COPYOBJ/GT_CPYBLK
309     // Make sure the sizes agree.
310     assert(GenTree::s_gtNodeSizes[GT_COPYOBJ] <= GenTree::s_gtNodeSizes[GT_ASG]);
311     assert(GenTree::s_gtNodeSizes[GT_COPYBLK] <= GenTree::s_gtNodeSizes[GT_ASG]);
312 #endif // !(defined(FEATURE_HFA) || defined(FEATURE_UNIX_AMD64_STRUCT_PASSING))
313
314     assert(GenTree::s_gtNodeSizes[GT_RETURN] == GenTree::s_gtNodeSizes[GT_ASG]);
315
316     // This list of assertions should come to contain all GenTree subtypes that are declared
317     // "small".
318     assert(sizeof(GenTreeLclFld) <= GenTree::s_gtNodeSizes[GT_LCL_FLD]);
319     assert(sizeof(GenTreeLclVar) <= GenTree::s_gtNodeSizes[GT_LCL_VAR]);
320
321     static_assert_no_msg(sizeof(GenTree) <= TREE_NODE_SZ_SMALL);
322     static_assert_no_msg(sizeof(GenTreeUnOp) <= TREE_NODE_SZ_SMALL);
323     static_assert_no_msg(sizeof(GenTreeOp) <= TREE_NODE_SZ_SMALL);
324     static_assert_no_msg(sizeof(GenTreeVal) <= TREE_NODE_SZ_SMALL);
325     static_assert_no_msg(sizeof(GenTreeIntConCommon) <= TREE_NODE_SZ_SMALL);
326     static_assert_no_msg(sizeof(GenTreePhysReg) <= TREE_NODE_SZ_SMALL);
327 #ifndef LEGACY_BACKEND
328     static_assert_no_msg(sizeof(GenTreeJumpTable) <= TREE_NODE_SZ_SMALL);
329 #endif // !LEGACY_BACKEND
330     static_assert_no_msg(sizeof(GenTreeIntCon) <= TREE_NODE_SZ_SMALL);
331     static_assert_no_msg(sizeof(GenTreeLngCon) <= TREE_NODE_SZ_SMALL);
332     static_assert_no_msg(sizeof(GenTreeDblCon) <= TREE_NODE_SZ_SMALL);
333     static_assert_no_msg(sizeof(GenTreeStrCon) <= TREE_NODE_SZ_SMALL);
334     static_assert_no_msg(sizeof(GenTreeLclVarCommon) <= TREE_NODE_SZ_SMALL);
335     static_assert_no_msg(sizeof(GenTreeLclVar) <= TREE_NODE_SZ_SMALL);
336     static_assert_no_msg(sizeof(GenTreeLclFld) <= TREE_NODE_SZ_SMALL);
337     static_assert_no_msg(sizeof(GenTreeRegVar) <= TREE_NODE_SZ_SMALL);
338     static_assert_no_msg(sizeof(GenTreeCast) <= TREE_NODE_SZ_LARGE);  // *** large node
339     static_assert_no_msg(sizeof(GenTreeBox) <= TREE_NODE_SZ_LARGE);   // *** large node
340     static_assert_no_msg(sizeof(GenTreeField) <= TREE_NODE_SZ_LARGE); // *** large node
341     static_assert_no_msg(sizeof(GenTreeArgList) <= TREE_NODE_SZ_SMALL);
342     static_assert_no_msg(sizeof(GenTreeColon) <= TREE_NODE_SZ_SMALL);
343     static_assert_no_msg(sizeof(GenTreeCall) <= TREE_NODE_SZ_LARGE);      // *** large node
344     static_assert_no_msg(sizeof(GenTreeCmpXchg) <= TREE_NODE_SZ_LARGE);   // *** large node
345     static_assert_no_msg(sizeof(GenTreeFptrVal) <= TREE_NODE_SZ_LARGE);   // *** large node
346     static_assert_no_msg(sizeof(GenTreeQmark) <= TREE_NODE_SZ_LARGE);     // *** large node
347     static_assert_no_msg(sizeof(GenTreeIntrinsic) <= TREE_NODE_SZ_LARGE); // *** large node
348     static_assert_no_msg(sizeof(GenTreeIndex) <= TREE_NODE_SZ_LARGE);     // *** large node
349     static_assert_no_msg(sizeof(GenTreeArrLen) <= TREE_NODE_SZ_LARGE);    // *** large node
350     static_assert_no_msg(sizeof(GenTreeBoundsChk) <= TREE_NODE_SZ_LARGE); // *** large node
351     static_assert_no_msg(sizeof(GenTreeArrElem) <= TREE_NODE_SZ_LARGE);   // *** large node
352     static_assert_no_msg(sizeof(GenTreeArrIndex) <= TREE_NODE_SZ_LARGE);  // *** large node
353     static_assert_no_msg(sizeof(GenTreeArrOffs) <= TREE_NODE_SZ_LARGE);   // *** large node
354     static_assert_no_msg(sizeof(GenTreeIndir) <= TREE_NODE_SZ_SMALL);
355     static_assert_no_msg(sizeof(GenTreeStoreInd) <= TREE_NODE_SZ_SMALL);
356     static_assert_no_msg(sizeof(GenTreeBlkOp) <= TREE_NODE_SZ_SMALL);
357     static_assert_no_msg(sizeof(GenTreeCpBlk) <= TREE_NODE_SZ_SMALL);
358     static_assert_no_msg(sizeof(GenTreeInitBlk) <= TREE_NODE_SZ_SMALL);
359     static_assert_no_msg(sizeof(GenTreeCpObj) <= TREE_NODE_SZ_LARGE);   // *** large node
360     static_assert_no_msg(sizeof(GenTreeRetExpr) <= TREE_NODE_SZ_LARGE); // *** large node
361     static_assert_no_msg(sizeof(GenTreeStmt) <= TREE_NODE_SZ_LARGE);    // *** large node
362     static_assert_no_msg(sizeof(GenTreeObj) <= TREE_NODE_SZ_LARGE);     // *** large node
363     static_assert_no_msg(sizeof(GenTreeClsVar) <= TREE_NODE_SZ_SMALL);
364     static_assert_no_msg(sizeof(GenTreeArgPlace) <= TREE_NODE_SZ_SMALL);
365     static_assert_no_msg(sizeof(GenTreeLabel) <= TREE_NODE_SZ_SMALL);
366     static_assert_no_msg(sizeof(GenTreePhiArg) <= TREE_NODE_SZ_SMALL);
367     static_assert_no_msg(sizeof(GenTreeAllocObj) <= TREE_NODE_SZ_LARGE); // *** large node
368 #ifndef FEATURE_UNIX_AMD64_STRUCT_PASSING
369     static_assert_no_msg(sizeof(GenTreePutArgStk) <= TREE_NODE_SZ_SMALL);
370 #else  // FEATURE_UNIX_AMD64_STRUCT_PASSING
371     static_assert_no_msg(sizeof(GenTreePutArgStk) <= TREE_NODE_SZ_LARGE);
372 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
373
374 #ifdef FEATURE_SIMD
375     static_assert_no_msg(sizeof(GenTreeSIMD) <= TREE_NODE_SZ_SMALL);
376 #endif // FEATURE_SIMD
377 }
378
379 size_t GenTree::GetNodeSize() const
380 {
381     return GenTree::s_gtNodeSizes[gtOper];
382 }
383
384 #ifdef DEBUG
385 bool GenTree::IsNodeProperlySized() const
386 {
387     size_t size;
388
389     if (gtDebugFlags & GTF_DEBUG_NODE_SMALL)
390     {
391         size = TREE_NODE_SZ_SMALL;
392     }
393     else
394     {
395         assert(gtDebugFlags & GTF_DEBUG_NODE_LARGE);
396         size = TREE_NODE_SZ_LARGE;
397     }
398
399     return GenTree::s_gtNodeSizes[gtOper] <= size;
400 }
401 #endif
402
403 #else // SMALL_TREE_NODES
404
405 #ifdef DEBUG
406 bool GenTree::IsNodeProperlySized() const
407 {
408     return true;
409 }
410 #endif
411
412 #endif // SMALL_TREE_NODES
413
414 /*****************************************************************************/
415
416 // make sure these get instantiated, because it's not in a header file
417 // (emulating the c++ 'export' keyword here)
418 // VC appears to be somewhat unpredictable about whether they end up in the .obj file without this
419 template Compiler::fgWalkResult Compiler::fgWalkTreePostRec<true>(GenTreePtr* pTree, fgWalkData* fgWalkData);
420 template Compiler::fgWalkResult Compiler::fgWalkTreePostRec<false>(GenTreePtr* pTree, fgWalkData* fgWalkData);
421 template Compiler::fgWalkResult Compiler::fgWalkTreePreRec<true>(GenTreePtr* pTree, fgWalkData* fgWalkData);
422 template Compiler::fgWalkResult Compiler::fgWalkTreePreRec<false>(GenTreePtr* pTree, fgWalkData* fgWalkData);
423 template Compiler::fgWalkResult Compiler::fgWalkTreeRec<true, true>(GenTreePtr* pTree, fgWalkData* fgWalkData);
424 template Compiler::fgWalkResult Compiler::fgWalkTreeRec<false, false>(GenTreePtr* pTree, fgWalkData* fgWalkData);
425 template Compiler::fgWalkResult Compiler::fgWalkTreeRec<true, false>(GenTreePtr* pTree, fgWalkData* fgWalkData);
426 template Compiler::fgWalkResult Compiler::fgWalkTreeRec<false, true>(GenTreePtr* pTree, fgWalkData* fgWalkData);
427
428 //******************************************************************************
429 // fgWalkTreePreRec - Helper function for fgWalkTreePre.
430 //                    walk tree in pre order, executing callback on every node.
431 //                    Template parameter 'computeStack' specifies whether to maintain
432 //                    a stack of ancestor nodes which can be viewed in the callback.
433 //
434 template <bool computeStack>
435 // static
436 Compiler::fgWalkResult Compiler::fgWalkTreePreRec(GenTreePtr* pTree, fgWalkData* fgWalkData)
437 {
438     fgWalkResult result        = WALK_CONTINUE;
439     GenTreePtr   currentParent = fgWalkData->parent;
440
441     genTreeOps oper;
442     unsigned   kind;
443
444     do
445     {
446         GenTreePtr tree = *pTree;
447         assert(tree);
448         assert(tree->gtOper != GT_STMT);
449         GenTreeArgList* args; // For call node arg lists.
450
451         if (computeStack)
452         {
453             fgWalkData->parentStack->Push(tree);
454         }
455
456         /* Visit this node */
457
458         // if we are not in the mode where we only do the callback for local var nodes,
459         // visit the node unconditionally.  Otherwise we will visit it under leaf handling.
460         if (!fgWalkData->wtprLclsOnly)
461         {
462             assert(tree == *pTree);
463             result = fgWalkData->wtprVisitorFn(pTree, fgWalkData);
464             if (result != WALK_CONTINUE)
465             {
466                 break;
467             }
468         }
469
470         /* Figure out what kind of a node we have */
471
472         oper = tree->OperGet();
473         kind = tree->OperKind();
474
475         /* Is this a constant or leaf node? */
476
477         if (kind & (GTK_CONST | GTK_LEAF))
478         {
479             if (fgWalkData->wtprLclsOnly && (oper == GT_LCL_VAR || oper == GT_LCL_FLD))
480             {
481                 result = fgWalkData->wtprVisitorFn(pTree, fgWalkData);
482             }
483             break;
484         }
485         else if (fgWalkData->wtprLclsOnly && GenTree::OperIsLocalStore(oper))
486         {
487             result = fgWalkData->wtprVisitorFn(pTree, fgWalkData);
488             if (result != WALK_CONTINUE)
489             {
490                 break;
491             }
492         }
493
494         fgWalkData->parent = tree;
495
496         /* Is it a 'simple' unary/binary operator? */
497
498         if (kind & GTK_SMPOP)
499         {
500             if (tree->gtGetOp2())
501             {
502                 if (tree->gtOp.gtOp1 != nullptr)
503                 {
504                     result = fgWalkTreePreRec<computeStack>(&tree->gtOp.gtOp1, fgWalkData);
505                     if (result == WALK_ABORT)
506                     {
507                         return result;
508                     }
509                 }
510                 else
511                 {
512                     assert(tree->NullOp1Legal());
513                 }
514
515                 pTree = &tree->gtOp.gtOp2;
516                 continue;
517             }
518             else
519             {
520                 pTree = &tree->gtOp.gtOp1;
521                 if (*pTree)
522                 {
523                     continue;
524                 }
525
526                 break;
527             }
528         }
529
530         /* See what kind of a special operator we have here */
531
532         switch (oper)
533         {
534             case GT_FIELD:
535                 pTree = &tree->gtField.gtFldObj;
536                 break;
537
538             case GT_CALL:
539
540                 assert(tree->gtFlags & GTF_CALL);
541
542                 /* Is this a call to unmanaged code ? */
543                 if (fgWalkData->wtprLclsOnly && (tree->gtFlags & GTF_CALL_UNMANAGED))
544                 {
545                     result = fgWalkData->wtprVisitorFn(pTree, fgWalkData);
546                     if (result == WALK_ABORT)
547                     {
548                         return result;
549                     }
550                 }
551
552                 if (tree->gtCall.gtCallObjp)
553                 {
554                     result = fgWalkTreePreRec<computeStack>(&tree->gtCall.gtCallObjp, fgWalkData);
555                     if (result == WALK_ABORT)
556                     {
557                         return result;
558                     }
559                 }
560
561                 for (args = tree->gtCall.gtCallArgs; args; args = args->Rest())
562                 {
563                     result = fgWalkTreePreRec<computeStack>(args->pCurrent(), fgWalkData);
564                     if (result == WALK_ABORT)
565                     {
566                         return result;
567                     }
568                 }
569
570                 for (args = tree->gtCall.gtCallLateArgs; args; args = args->Rest())
571                 {
572                     result = fgWalkTreePreRec<computeStack>(args->pCurrent(), fgWalkData);
573                     if (result == WALK_ABORT)
574                     {
575                         return result;
576                     }
577                 }
578
579                 if (tree->gtCall.gtControlExpr)
580                 {
581                     result = fgWalkTreePreRec<computeStack>(&tree->gtCall.gtControlExpr, fgWalkData);
582                     if (result == WALK_ABORT)
583                     {
584                         return result;
585                     }
586                 }
587
588                 if (tree->gtCall.gtCallType == CT_INDIRECT)
589                 {
590                     if (tree->gtCall.gtCallCookie)
591                     {
592                         result = fgWalkTreePreRec<computeStack>(&tree->gtCall.gtCallCookie, fgWalkData);
593                         if (result == WALK_ABORT)
594                         {
595                             return result;
596                         }
597                     }
598                     pTree = &tree->gtCall.gtCallAddr;
599                 }
600                 else
601                 {
602                     pTree = nullptr;
603                 }
604
605                 break;
606
607             case GT_ARR_ELEM:
608
609                 result = fgWalkTreePreRec<computeStack>(&tree->gtArrElem.gtArrObj, fgWalkData);
610                 if (result == WALK_ABORT)
611                 {
612                     return result;
613                 }
614
615                 unsigned dim;
616                 for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
617                 {
618                     result = fgWalkTreePreRec<computeStack>(&tree->gtArrElem.gtArrInds[dim], fgWalkData);
619                     if (result == WALK_ABORT)
620                     {
621                         return result;
622                     }
623                 }
624                 pTree = nullptr;
625                 break;
626
627             case GT_ARR_OFFSET:
628                 result = fgWalkTreePreRec<computeStack>(&tree->gtArrOffs.gtOffset, fgWalkData);
629                 if (result == WALK_ABORT)
630                 {
631                     return result;
632                 }
633                 result = fgWalkTreePreRec<computeStack>(&tree->gtArrOffs.gtIndex, fgWalkData);
634                 if (result == WALK_ABORT)
635                 {
636                     return result;
637                 }
638                 result = fgWalkTreePreRec<computeStack>(&tree->gtArrOffs.gtArrObj, fgWalkData);
639                 if (result == WALK_ABORT)
640                 {
641                     return result;
642                 }
643                 pTree = nullptr;
644                 break;
645
646             case GT_CMPXCHG:
647                 result = fgWalkTreePreRec<computeStack>(&tree->gtCmpXchg.gtOpLocation, fgWalkData);
648                 if (result == WALK_ABORT)
649                 {
650                     return result;
651                 }
652                 result = fgWalkTreePreRec<computeStack>(&tree->gtCmpXchg.gtOpValue, fgWalkData);
653                 if (result == WALK_ABORT)
654                 {
655                     return result;
656                 }
657                 result = fgWalkTreePreRec<computeStack>(&tree->gtCmpXchg.gtOpComparand, fgWalkData);
658                 if (result == WALK_ABORT)
659                 {
660                     return result;
661                 }
662                 pTree = nullptr;
663                 break;
664
665             case GT_ARR_BOUNDS_CHECK:
666 #ifdef FEATURE_SIMD
667             case GT_SIMD_CHK:
668 #endif // FEATURE_SIMD
669                 result = fgWalkTreePreRec<computeStack>(&tree->gtBoundsChk.gtArrLen, fgWalkData);
670                 if (result == WALK_ABORT)
671                 {
672                     return result;
673                 }
674                 result = fgWalkTreePreRec<computeStack>(&tree->gtBoundsChk.gtIndex, fgWalkData);
675                 if (result == WALK_ABORT)
676                 {
677                     return result;
678                 }
679                 pTree = nullptr;
680                 break;
681
682             default:
683 #ifdef DEBUG
684                 fgWalkData->compiler->gtDispTree(tree);
685 #endif
686                 assert(!"unexpected operator");
687         }
688     } while (pTree != nullptr && *pTree != nullptr);
689
690     if (computeStack)
691     {
692         fgWalkData->parentStack->Pop();
693     }
694
695     if (result != WALK_ABORT)
696     {
697         //
698         // Restore fgWalkData->parent
699         //
700         fgWalkData->parent = currentParent;
701     }
702     return result;
703 }
704
705 /*****************************************************************************
706  *
707  *  Walk all basic blocks and call the given function pointer for all tree
708  *  nodes contained therein.
709  */
710
711 void Compiler::fgWalkAllTreesPre(fgWalkPreFn* visitor, void* pCallBackData)
712 {
713     BasicBlock* block;
714
715     for (block = fgFirstBB; block; block = block->bbNext)
716     {
717         GenTreePtr tree;
718
719         for (tree = block->bbTreeList; tree; tree = tree->gtNext)
720         {
721             assert(tree->gtOper == GT_STMT);
722
723             fgWalkTreePre(&tree->gtStmt.gtStmtExpr, visitor, pCallBackData);
724         }
725     }
726 }
727
728 //******************************************************************************
729 // fgWalkTreePostRec - Helper function for fgWalkTreePost.
730 //                     Walk tree in post order, executing callback on every node
731 //                     template parameter 'computeStack' specifies whether to maintain
732 //                     a stack of ancestor nodes which can be viewed in the callback.
733 //
734 template <bool computeStack>
735 // static
736 Compiler::fgWalkResult Compiler::fgWalkTreePostRec(GenTreePtr* pTree, fgWalkData* fgWalkData)
737 {
738     fgWalkResult result;
739     GenTreePtr   currentParent = fgWalkData->parent;
740
741     genTreeOps oper;
742     unsigned   kind;
743
744     GenTree* tree = *pTree;
745     assert(tree);
746     assert(tree->gtOper != GT_STMT);
747     GenTreeArgList* args;
748
749     /* Figure out what kind of a node we have */
750
751     oper = tree->OperGet();
752     kind = tree->OperKind();
753
754     if (computeStack)
755     {
756         fgWalkData->parentStack->Push(tree);
757     }
758
759     /* Is this a constant or leaf node? */
760
761     if (kind & (GTK_CONST | GTK_LEAF))
762     {
763         goto DONE;
764     }
765
766     /* Is it a 'simple' unary/binary operator? */
767
768     fgWalkData->parent = tree;
769
770     /* See what kind of a special operator we have here */
771
772     switch (oper)
773     {
774         case GT_FIELD:
775             if (tree->gtField.gtFldObj)
776             {
777                 result = fgWalkTreePostRec<computeStack>(&tree->gtField.gtFldObj, fgWalkData);
778                 if (result == WALK_ABORT)
779                 {
780                     return result;
781                 }
782             }
783
784             break;
785
786         case GT_CALL:
787
788             assert(tree->gtFlags & GTF_CALL);
789
790             if (tree->gtCall.gtCallObjp)
791             {
792                 result = fgWalkTreePostRec<computeStack>(&tree->gtCall.gtCallObjp, fgWalkData);
793                 if (result == WALK_ABORT)
794                 {
795                     return result;
796                 }
797             }
798
799             for (args = tree->gtCall.gtCallArgs; args; args = args->Rest())
800             {
801                 result = fgWalkTreePostRec<computeStack>(args->pCurrent(), fgWalkData);
802                 if (result == WALK_ABORT)
803                 {
804                     return result;
805                 }
806             }
807
808             for (args = tree->gtCall.gtCallLateArgs; args; args = args->Rest())
809             {
810                 result = fgWalkTreePostRec<computeStack>(args->pCurrent(), fgWalkData);
811                 if (result == WALK_ABORT)
812                 {
813                     return result;
814                 }
815             }
816             if (tree->gtCall.gtCallType == CT_INDIRECT)
817             {
818                 if (tree->gtCall.gtCallCookie)
819                 {
820                     result = fgWalkTreePostRec<computeStack>(&tree->gtCall.gtCallCookie, fgWalkData);
821                     if (result == WALK_ABORT)
822                     {
823                         return result;
824                     }
825                 }
826                 result = fgWalkTreePostRec<computeStack>(&tree->gtCall.gtCallAddr, fgWalkData);
827                 if (result == WALK_ABORT)
828                 {
829                     return result;
830                 }
831             }
832
833             if (tree->gtCall.gtControlExpr != nullptr)
834             {
835                 result = fgWalkTreePostRec<computeStack>(&tree->gtCall.gtControlExpr, fgWalkData);
836                 if (result == WALK_ABORT)
837                 {
838                     return result;
839                 }
840             }
841             break;
842
843         case GT_ARR_ELEM:
844
845             result = fgWalkTreePostRec<computeStack>(&tree->gtArrElem.gtArrObj, fgWalkData);
846             if (result == WALK_ABORT)
847             {
848                 return result;
849             }
850
851             unsigned dim;
852             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
853             {
854                 result = fgWalkTreePostRec<computeStack>(&tree->gtArrElem.gtArrInds[dim], fgWalkData);
855                 if (result == WALK_ABORT)
856                 {
857                     return result;
858                 }
859             }
860             break;
861
862         case GT_ARR_OFFSET:
863             result = fgWalkTreePostRec<computeStack>(&tree->gtArrOffs.gtOffset, fgWalkData);
864             if (result == WALK_ABORT)
865             {
866                 return result;
867             }
868             result = fgWalkTreePostRec<computeStack>(&tree->gtArrOffs.gtIndex, fgWalkData);
869             if (result == WALK_ABORT)
870             {
871                 return result;
872             }
873             result = fgWalkTreePostRec<computeStack>(&tree->gtArrOffs.gtArrObj, fgWalkData);
874             if (result == WALK_ABORT)
875             {
876                 return result;
877             }
878             break;
879
880         case GT_CMPXCHG:
881             result = fgWalkTreePostRec<computeStack>(&tree->gtCmpXchg.gtOpComparand, fgWalkData);
882             if (result == WALK_ABORT)
883             {
884                 return result;
885             }
886             result = fgWalkTreePostRec<computeStack>(&tree->gtCmpXchg.gtOpValue, fgWalkData);
887             if (result == WALK_ABORT)
888             {
889                 return result;
890             }
891             result = fgWalkTreePostRec<computeStack>(&tree->gtCmpXchg.gtOpLocation, fgWalkData);
892             if (result == WALK_ABORT)
893             {
894                 return result;
895             }
896             break;
897
898         case GT_ARR_BOUNDS_CHECK:
899 #ifdef FEATURE_SIMD
900         case GT_SIMD_CHK:
901 #endif // FEATURE_SIMD
902             result = fgWalkTreePostRec<computeStack>(&tree->gtBoundsChk.gtArrLen, fgWalkData);
903             if (result == WALK_ABORT)
904             {
905                 return result;
906             }
907             result = fgWalkTreePostRec<computeStack>(&tree->gtBoundsChk.gtIndex, fgWalkData);
908             if (result == WALK_ABORT)
909             {
910                 return result;
911             }
912             break;
913
914         case GT_PHI:
915         {
916             GenTreeUnOp* phi = tree->AsUnOp();
917             if (phi->gtOp1 != nullptr)
918             {
919                 for (GenTreeArgList* args = phi->gtOp1->AsArgList(); args != nullptr; args = args->Rest())
920                 {
921                     result = fgWalkTreePostRec<computeStack>(&args->gtOp1, fgWalkData);
922                     if (result == WALK_ABORT)
923                     {
924                         return result;
925                     }
926                 }
927             }
928         }
929         break;
930
931         case GT_INITBLK:
932         case GT_COPYBLK:
933         case GT_COPYOBJ:
934         {
935             GenTreeBlkOp* blkOp = tree->AsBlkOp();
936             result              = fgWalkTreePostRec<computeStack>(&blkOp->gtOp1->AsArgList()->gtOp1, fgWalkData);
937             if (result == WALK_ABORT)
938             {
939                 return result;
940             }
941
942             result = fgWalkTreePostRec<computeStack>(&blkOp->gtOp1->AsArgList()->gtOp2, fgWalkData);
943             if (result == WALK_ABORT)
944             {
945                 return result;
946             }
947
948             result = fgWalkTreePostRec<computeStack>(&blkOp->gtOp2, fgWalkData);
949             if (result == WALK_ABORT)
950             {
951                 return result;
952             }
953         }
954         break;
955
956         case GT_LIST:
957         {
958             GenTreeArgList* list = tree->AsArgList();
959             if (list->IsAggregate())
960             {
961                 for (; list != nullptr; list = list->Rest())
962                 {
963                     result = fgWalkTreePostRec<computeStack>(&list->gtOp1, fgWalkData);
964                     if (result == WALK_ABORT)
965                     {
966                         return result;
967                     }
968                 }
969                 break;
970             }
971
972             // GT_LIST nodes that do not represent aggregate arguments intentionally fall through to the
973             // default node processing below.
974             __fallthrough;
975         }
976
977         default:
978             if (kind & GTK_SMPOP)
979             {
980                 GenTree** op1Slot = &tree->gtOp.gtOp1;
981
982                 GenTree** op2Slot;
983                 if (tree->OperIsBinary())
984                 {
985                     if ((tree->gtFlags & GTF_REVERSE_OPS) == 0)
986                     {
987                         op2Slot = &tree->gtOp.gtOp2;
988                     }
989                     else
990                     {
991                         op2Slot = op1Slot;
992                         op1Slot = &tree->gtOp.gtOp2;
993                     }
994                 }
995                 else
996                 {
997                     op2Slot = nullptr;
998                 }
999
1000                 if (*op1Slot != nullptr)
1001                 {
1002                     result = fgWalkTreePostRec<computeStack>(op1Slot, fgWalkData);
1003                     if (result == WALK_ABORT)
1004                     {
1005                         return result;
1006                     }
1007                 }
1008
1009                 if (op2Slot != nullptr && *op2Slot != nullptr)
1010                 {
1011                     result = fgWalkTreePostRec<computeStack>(op2Slot, fgWalkData);
1012                     if (result == WALK_ABORT)
1013                     {
1014                         return result;
1015                     }
1016                 }
1017             }
1018 #ifdef DEBUG
1019             else
1020             {
1021                 fgWalkData->compiler->gtDispTree(tree);
1022                 assert(!"unexpected operator");
1023             }
1024 #endif
1025             break;
1026     }
1027
1028 DONE:
1029
1030     fgWalkData->parent = currentParent;
1031
1032     /* Finally, visit the current node */
1033     result = fgWalkData->wtpoVisitorFn(pTree, fgWalkData);
1034
1035     if (computeStack)
1036     {
1037         fgWalkData->parentStack->Pop();
1038     }
1039
1040     return result;
1041 }
1042
1043 // ****************************************************************************
1044 // walk tree doing callbacks in both pre- and post- order (both optional)
1045
1046 template <bool doPreOrder, bool doPostOrder>
1047 // static
1048 Compiler::fgWalkResult Compiler::fgWalkTreeRec(GenTreePtr* pTree, fgWalkData* fgWalkData)
1049 {
1050     fgWalkResult result = WALK_CONTINUE;
1051
1052     genTreeOps oper;
1053     unsigned   kind;
1054
1055     GenTree* tree = *pTree;
1056     assert(tree);
1057     assert(tree->gtOper != GT_STMT);
1058     GenTreeArgList* args;
1059
1060     /* Figure out what kind of a node we have */
1061
1062     oper = tree->OperGet();
1063     kind = tree->OperKind();
1064
1065     fgWalkData->parentStack->Push(tree);
1066
1067     if (doPreOrder)
1068     {
1069         result = fgWalkData->wtprVisitorFn(pTree, fgWalkData);
1070         if (result == WALK_ABORT)
1071         {
1072             return result;
1073         }
1074         else
1075         {
1076             tree = *pTree;
1077             oper = tree->OperGet();
1078             kind = tree->OperKind();
1079         }
1080     }
1081
1082     // If we're skipping subtrees, we're done.
1083     if (result == WALK_SKIP_SUBTREES)
1084     {
1085         goto DONE;
1086     }
1087
1088     /* Is this a constant or leaf node? */
1089
1090     if ((kind & (GTK_CONST | GTK_LEAF)) != 0)
1091     {
1092         goto DONE;
1093     }
1094
1095     /* Is it a 'simple' unary/binary operator? */
1096
1097     if (kind & GTK_SMPOP)
1098     {
1099         if (tree->gtOp.gtOp1)
1100         {
1101             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtOp.gtOp1, fgWalkData);
1102             if (result == WALK_ABORT)
1103             {
1104                 return result;
1105             }
1106         }
1107
1108         if (tree->gtGetOp2())
1109         {
1110             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtOp.gtOp2, fgWalkData);
1111             if (result == WALK_ABORT)
1112             {
1113                 return result;
1114             }
1115         }
1116
1117         goto DONE;
1118     }
1119
1120     /* See what kind of a special operator we have here */
1121
1122     switch (oper)
1123     {
1124         case GT_FIELD:
1125             if (tree->gtField.gtFldObj)
1126             {
1127                 result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtField.gtFldObj, fgWalkData);
1128                 if (result == WALK_ABORT)
1129                 {
1130                     return result;
1131                 }
1132             }
1133
1134             break;
1135
1136         case GT_CALL:
1137
1138             assert(tree->gtFlags & GTF_CALL);
1139
1140             if (tree->gtCall.gtCallObjp)
1141             {
1142                 result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtCall.gtCallObjp, fgWalkData);
1143                 if (result == WALK_ABORT)
1144                 {
1145                     return result;
1146                 }
1147             }
1148
1149             for (args = tree->gtCall.gtCallArgs; args; args = args->Rest())
1150             {
1151                 result = fgWalkTreeRec<doPreOrder, doPostOrder>(args->pCurrent(), fgWalkData);
1152                 if (result == WALK_ABORT)
1153                 {
1154                     return result;
1155                 }
1156             }
1157
1158             for (args = tree->gtCall.gtCallLateArgs; args; args = args->Rest())
1159             {
1160                 result = fgWalkTreeRec<doPreOrder, doPostOrder>(args->pCurrent(), fgWalkData);
1161                 if (result == WALK_ABORT)
1162                 {
1163                     return result;
1164                 }
1165             }
1166             if (tree->gtCall.gtCallType == CT_INDIRECT)
1167             {
1168                 if (tree->gtCall.gtCallCookie)
1169                 {
1170                     result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtCall.gtCallCookie, fgWalkData);
1171                     if (result == WALK_ABORT)
1172                     {
1173                         return result;
1174                     }
1175                 }
1176                 result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtCall.gtCallAddr, fgWalkData);
1177                 if (result == WALK_ABORT)
1178                 {
1179                     return result;
1180                 }
1181             }
1182
1183             if (tree->gtCall.gtControlExpr)
1184             {
1185                 result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtCall.gtControlExpr, fgWalkData);
1186                 if (result == WALK_ABORT)
1187                 {
1188                     return result;
1189                 }
1190             }
1191
1192             break;
1193
1194         case GT_ARR_ELEM:
1195
1196             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtArrElem.gtArrObj, fgWalkData);
1197             if (result == WALK_ABORT)
1198             {
1199                 return result;
1200             }
1201
1202             unsigned dim;
1203             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
1204             {
1205                 result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtArrElem.gtArrInds[dim], fgWalkData);
1206                 if (result == WALK_ABORT)
1207                 {
1208                     return result;
1209                 }
1210             }
1211             break;
1212
1213         case GT_ARR_OFFSET:
1214             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtArrOffs.gtOffset, fgWalkData);
1215             if (result == WALK_ABORT)
1216             {
1217                 return result;
1218             }
1219             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtArrOffs.gtIndex, fgWalkData);
1220             if (result == WALK_ABORT)
1221             {
1222                 return result;
1223             }
1224             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtArrOffs.gtArrObj, fgWalkData);
1225             if (result == WALK_ABORT)
1226             {
1227                 return result;
1228             }
1229             break;
1230
1231         case GT_CMPXCHG:
1232             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtCmpXchg.gtOpComparand, fgWalkData);
1233             if (result == WALK_ABORT)
1234             {
1235                 return result;
1236             }
1237             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtCmpXchg.gtOpValue, fgWalkData);
1238             if (result == WALK_ABORT)
1239             {
1240                 return result;
1241             }
1242             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtCmpXchg.gtOpLocation, fgWalkData);
1243             if (result == WALK_ABORT)
1244             {
1245                 return result;
1246             }
1247             break;
1248
1249         case GT_ARR_BOUNDS_CHECK:
1250 #ifdef FEATURE_SIMD
1251         case GT_SIMD_CHK:
1252 #endif // FEATURE_SIMD
1253             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtBoundsChk.gtArrLen, fgWalkData);
1254             if (result == WALK_ABORT)
1255             {
1256                 return result;
1257             }
1258             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtBoundsChk.gtIndex, fgWalkData);
1259             if (result == WALK_ABORT)
1260             {
1261                 return result;
1262             }
1263             break;
1264
1265         default:
1266 #ifdef DEBUG
1267             fgWalkData->compiler->gtDispTree(tree);
1268 #endif
1269             assert(!"unexpected operator");
1270     }
1271
1272 DONE:
1273
1274     /* Finally, visit the current node */
1275     if (doPostOrder)
1276     {
1277         result = fgWalkData->wtpoVisitorFn(pTree, fgWalkData);
1278     }
1279
1280     fgWalkData->parentStack->Pop();
1281
1282     return result;
1283 }
1284
1285 /*****************************************************************************
1286  *
1287  *  Call the given function pointer for all nodes in the tree. The 'visitor'
1288  *  fn should return one of the following values:
1289  *
1290  *  WALK_ABORT          stop walking and return immediately
1291  *  WALK_CONTINUE       continue walking
1292  *  WALK_SKIP_SUBTREES  don't walk any subtrees of the node just visited
1293  */
1294
1295 Compiler::fgWalkResult Compiler::fgWalkTree(GenTreePtr*  pTree,
1296                                             fgWalkPreFn* preVisitor,
1297                                             fgWalkPreFn* postVisitor,
1298                                             void*        callBackData)
1299
1300 {
1301     fgWalkData walkData;
1302
1303     walkData.compiler      = this;
1304     walkData.wtprVisitorFn = preVisitor;
1305     walkData.wtpoVisitorFn = postVisitor;
1306     walkData.pCallbackData = callBackData;
1307     walkData.parent        = nullptr;
1308     walkData.wtprLclsOnly  = false;
1309 #ifdef DEBUG
1310     walkData.printModified = false;
1311 #endif
1312     ArrayStack<GenTree*> parentStack(this);
1313     walkData.parentStack = &parentStack;
1314
1315     fgWalkResult result;
1316
1317     assert(preVisitor || postVisitor);
1318
1319     if (preVisitor && postVisitor)
1320     {
1321         result = fgWalkTreeRec<true, true>(pTree, &walkData);
1322     }
1323     else if (preVisitor)
1324     {
1325         result = fgWalkTreeRec<true, false>(pTree, &walkData);
1326     }
1327     else
1328     {
1329         result = fgWalkTreeRec<false, true>(pTree, &walkData);
1330     }
1331
1332 #ifdef DEBUG
1333     if (verbose && walkData.printModified)
1334     {
1335         gtDispTree(*pTree);
1336     }
1337 #endif
1338
1339     return result;
1340 }
1341
1342 // ------------------------------------------------------------------------------------------
1343 // gtClearReg: Sets the register to the "no register assignment" value, depending upon
1344 // the type of the node, and whether it fits any of the special cases for register pairs
1345 // or multi-reg call nodes.
1346 //
1347 // Arguments:
1348 //     compiler  -  compiler instance
1349 //
1350 // Return Value:
1351 //     None
1352 void GenTree::gtClearReg(Compiler* compiler)
1353 {
1354 #if CPU_LONG_USES_REGPAIR
1355     if (isRegPairType(TypeGet()) ||
1356         // (IsLocal() && isRegPairType(compiler->lvaTable[gtLclVarCommon.gtLclNum].TypeGet())) ||
1357         (OperGet() == GT_MUL && (gtFlags & GTF_MUL_64RSLT)))
1358     {
1359         gtRegPair = REG_PAIR_NONE;
1360     }
1361     else
1362 #endif // CPU_LONG_USES_REGPAIR
1363     {
1364         gtRegNum = REG_NA;
1365     }
1366
1367     // Also clear multi-reg state if this is a call node
1368     if (IsCall())
1369     {
1370         this->AsCall()->ClearOtherRegs();
1371     }
1372     else if (IsCopyOrReload())
1373     {
1374         this->AsCopyOrReload()->ClearOtherRegs();
1375     }
1376 }
1377
1378 //-----------------------------------------------------------
1379 // CopyReg: Copy the _gtRegNum/_gtRegPair/gtRegTag fields.
1380 //
1381 // Arguments:
1382 //     from   -  GenTree node from which to copy
1383 //
1384 // Return Value:
1385 //     None
1386 void GenTree::CopyReg(GenTreePtr from)
1387 {
1388     // To do the copy, use _gtRegPair, which must be bigger than _gtRegNum. Note that the values
1389     // might be undefined (so gtRegTag == GT_REGTAG_NONE).
1390     _gtRegPair = from->_gtRegPair;
1391     C_ASSERT(sizeof(_gtRegPair) >= sizeof(_gtRegNum));
1392     INDEBUG(gtRegTag = from->gtRegTag;)
1393
1394     // Also copy multi-reg state if this is a call node
1395     if (IsCall())
1396     {
1397         assert(from->IsCall());
1398         this->AsCall()->CopyOtherRegs(from->AsCall());
1399     }
1400     else if (IsCopyOrReload())
1401     {
1402         this->AsCopyOrReload()->CopyOtherRegs(from->AsCopyOrReload());
1403     }
1404 }
1405
1406 //------------------------------------------------------------------
1407 // gtHasReg: Whether node beeen assigned a register by LSRA
1408 //
1409 // Arguments:
1410 //    None
1411 //
1412 // Return Value:
1413 //    Returns true if the node was assigned a register.
1414 //
1415 //    In case of multi-reg call nodes, it is considered
1416 //    having a reg if regs are allocated for all its
1417 //    return values.
1418 //
1419 //    In case of GT_COPY or GT_RELOAD of a multi-reg call,
1420 //    GT_COPY/GT_RELOAD is considered having a reg if it
1421 //    has a reg assigned to any of its positions.
1422 //
1423 // Assumption:
1424 //    In order for this to work properly, gtClearReg must be called
1425 //    prior to setting the register value.
1426 //
1427 bool GenTree::gtHasReg() const
1428 {
1429     bool hasReg;
1430
1431 #if CPU_LONG_USES_REGPAIR
1432     if (isRegPairType(TypeGet()))
1433     {
1434         assert(_gtRegNum != REG_NA);
1435         INDEBUG(assert(gtRegTag == GT_REGTAG_REGPAIR));
1436         hasReg = (gtRegPair != REG_PAIR_NONE);
1437     }
1438     else
1439 #endif
1440     {
1441         assert(_gtRegNum != REG_PAIR_NONE);
1442         INDEBUG(assert(gtRegTag == GT_REGTAG_REG));
1443
1444         if (IsMultiRegCall())
1445         {
1446             // Has to cast away const-ness because GetReturnTypeDesc() is a non-const method
1447             GenTree*     tree     = const_cast<GenTree*>(this);
1448             GenTreeCall* call     = tree->AsCall();
1449             unsigned     regCount = call->GetReturnTypeDesc()->GetReturnRegCount();
1450             hasReg                = false;
1451
1452             // A Multi-reg call node is said to have regs, if it has
1453             // reg assigned to each of its result registers.
1454             for (unsigned i = 0; i < regCount; ++i)
1455             {
1456                 hasReg = (call->GetRegNumByIdx(i) != REG_NA);
1457                 if (!hasReg)
1458                 {
1459                     break;
1460                 }
1461             }
1462         }
1463         else if (IsCopyOrReloadOfMultiRegCall())
1464         {
1465             GenTree*             tree         = const_cast<GenTree*>(this);
1466             GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
1467             GenTreeCall*         call         = copyOrReload->gtGetOp1()->AsCall();
1468             unsigned             regCount     = call->GetReturnTypeDesc()->GetReturnRegCount();
1469             hasReg                            = false;
1470
1471             // A Multi-reg copy or reload node is said to have regs,
1472             // if it has valid regs in any of the positions.
1473             for (unsigned i = 0; i < regCount; ++i)
1474             {
1475                 hasReg = (copyOrReload->GetRegNumByIdx(i) != REG_NA);
1476                 if (hasReg)
1477                 {
1478                     break;
1479                 }
1480             }
1481         }
1482         else
1483         {
1484             hasReg = (gtRegNum != REG_NA);
1485         }
1486     }
1487
1488     return hasReg;
1489 }
1490
1491 //---------------------------------------------------------------
1492 // gtGetRegMask: Get the reg mask of the node.
1493 //
1494 // Arguments:
1495 //    None
1496 //
1497 // Return Value:
1498 //    Reg Mask of GenTree node.
1499 //
1500 regMaskTP GenTree::gtGetRegMask() const
1501 {
1502     regMaskTP resultMask;
1503
1504 #if CPU_LONG_USES_REGPAIR
1505     if (isRegPairType(TypeGet()))
1506     {
1507         resultMask = genRegPairMask(gtRegPair);
1508     }
1509     else
1510 #endif
1511     {
1512         if (IsMultiRegCall())
1513         {
1514             // temporarily cast away const-ness as AsCall() method is not declared const
1515             resultMask    = genRegMask(gtRegNum);
1516             GenTree* temp = const_cast<GenTree*>(this);
1517             resultMask |= temp->AsCall()->GetOtherRegMask();
1518         }
1519         else if (IsCopyOrReloadOfMultiRegCall())
1520         {
1521             // A multi-reg copy or reload, will have valid regs for only those
1522             // positions that need to be copied or reloaded.  Hence we need
1523             // to consider only those registers for computing reg mask.
1524
1525             GenTree*             tree         = const_cast<GenTree*>(this);
1526             GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
1527             GenTreeCall*         call         = copyOrReload->gtGetOp1()->AsCall();
1528             unsigned             regCount     = call->GetReturnTypeDesc()->GetReturnRegCount();
1529
1530             resultMask = RBM_NONE;
1531             for (unsigned i = 0; i < regCount; ++i)
1532             {
1533                 regNumber reg = copyOrReload->GetRegNumByIdx(i);
1534                 if (reg != REG_NA)
1535                 {
1536                     resultMask |= genRegMask(reg);
1537                 }
1538             }
1539         }
1540         else
1541         {
1542             resultMask = genRegMask(gtRegNum);
1543         }
1544     }
1545
1546     return resultMask;
1547 }
1548
1549 //---------------------------------------------------------------
1550 // GetOtherRegMask: Get the reg mask of gtOtherRegs of call node
1551 //
1552 // Arguments:
1553 //    None
1554 //
1555 // Return Value:
1556 //    Reg mask of gtOtherRegs of call node.
1557 //
1558 regMaskTP GenTreeCall::GetOtherRegMask() const
1559 {
1560     regMaskTP resultMask = RBM_NONE;
1561
1562 #if FEATURE_MULTIREG_RET
1563     for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
1564     {
1565         if (gtOtherRegs[i] != REG_NA)
1566         {
1567             resultMask |= genRegMask(gtOtherRegs[i]);
1568             continue;
1569         }
1570         break;
1571     }
1572 #endif
1573
1574     return resultMask;
1575 }
1576
1577 #ifndef LEGACY_BACKEND
1578
1579 //-------------------------------------------------------------------------
1580 // HasNonStandardAddedArgs: Return true if the method has non-standard args added to the call
1581 // argument list during argument morphing (fgMorphArgs), e.g., passed in R10 or R11 on AMD64.
1582 // See also GetNonStandardAddedArgCount().
1583 //
1584 // Arguments:
1585 //     compiler - the compiler instance
1586 //
1587 // Return Value:
1588 //      true if there are any such args, false otherwise.
1589 //
1590 bool GenTreeCall::HasNonStandardAddedArgs(Compiler* compiler) const
1591 {
1592     return GetNonStandardAddedArgCount(compiler) != 0;
1593 }
1594
1595 //-------------------------------------------------------------------------
1596 // GetNonStandardAddedArgCount: Get the count of non-standard arguments that have been added
1597 // during call argument morphing (fgMorphArgs). Do not count non-standard args that are already
1598 // counted in the argument list prior to morphing.
1599 //
1600 // This function is used to help map the caller and callee arguments during tail call setup.
1601 //
1602 // Arguments:
1603 //     compiler - the compiler instance
1604 //
1605 // Return Value:
1606 //      The count of args, as described.
1607 //
1608 // Notes:
1609 //      It would be more general to have fgMorphArgs set a bit on the call node when such
1610 //      args are added to a call, and a bit on each such arg, and then have this code loop
1611 //      over the call args when the special call bit is set, counting the args with the special
1612 //      arg bit. This seems pretty heavyweight, though. Instead, this logic needs to be kept
1613 //      in sync with fgMorphArgs.
1614 //
1615 int GenTreeCall::GetNonStandardAddedArgCount(Compiler* compiler) const
1616 {
1617     if (IsUnmanaged() && !compiler->opts.ShouldUsePInvokeHelpers())
1618     {
1619         // R11 = PInvoke cookie param
1620         return 1;
1621     }
1622     else if (gtCallType == CT_INDIRECT)
1623     {
1624         if (IsVirtualStub())
1625         {
1626             // R11 = Virtual stub param
1627             return 1;
1628         }
1629         else if (gtCallCookie != nullptr)
1630         {
1631             // R10 = PInvoke target param
1632             // R11 = PInvoke cookie param
1633             return 2;
1634         }
1635     }
1636     return 0;
1637 }
1638
1639 #endif // !LEGACY_BACKEND
1640
1641 //-------------------------------------------------------------------------
1642 // TreatAsHasRetBufArg:
1643 //
1644 // Arguments:
1645 //     compiler, the compiler instance so that we can call eeGetHelperNum
1646 //
1647 // Return Value:
1648 //     Returns true if we treat the call as if it has a retBuf argument
1649 //     This method may actually have a retBuf argument
1650 //     or it could be a JIT helper that we are still transforming during
1651 //     the importer phase.
1652 //
1653 // Notes:
1654 //     On ARM64 marking the method with the GTF_CALL_M_RETBUFFARG flag
1655 //     will make HasRetBufArg() return true, but will also force the
1656 //     use of register x8 to pass the RetBuf argument.
1657 //
1658 //     These two Jit Helpers that we handle here by returning true
1659 //     aren't actually defined to return a struct, so they don't expect
1660 //     their RetBuf to be passed in x8, instead they  expect it in x0.
1661 //
1662 bool GenTreeCall::TreatAsHasRetBufArg(Compiler* compiler) const
1663 {
1664     if (HasRetBufArg())
1665     {
1666         return true;
1667     }
1668     else
1669     {
1670         // If we see a Jit helper call that returns a TYP_STRUCT we will
1671         // transform it as if it has a Return Buffer Argument
1672         //
1673         if (IsHelperCall() && (gtReturnType == TYP_STRUCT))
1674         {
1675             // There are two possible helper calls that use this path:
1676             //  CORINFO_HELP_GETFIELDSTRUCT and CORINFO_HELP_UNBOX_NULLABLE
1677             //
1678             CorInfoHelpFunc helpFunc = compiler->eeGetHelperNum(gtCallMethHnd);
1679
1680             if (helpFunc == CORINFO_HELP_GETFIELDSTRUCT)
1681             {
1682                 return true;
1683             }
1684             else if (helpFunc == CORINFO_HELP_UNBOX_NULLABLE)
1685             {
1686                 return true;
1687             }
1688             else
1689             {
1690                 assert(!"Unexpected JIT helper in TreatAsHasRetBufArg");
1691             }
1692         }
1693     }
1694     return false;
1695 }
1696
1697 //-------------------------------------------------------------------------
1698 // IsHelperCall: Determine if this GT_CALL node is a specific helper call.
1699 //
1700 // Arguments:
1701 //     compiler - the compiler instance so that we can call eeFindHelper
1702 //
1703 // Return Value:
1704 //     Returns true if this GT_CALL node is a call to the specified helper.
1705 //
1706 bool GenTreeCall::IsHelperCall(Compiler* compiler, unsigned helper) const
1707 {
1708     return IsHelperCall(compiler->eeFindHelper(helper));
1709 }
1710
1711 /*****************************************************************************
1712  *
1713  *  Returns non-zero if the two trees are identical.
1714  */
1715
1716 bool GenTree::Compare(GenTreePtr op1, GenTreePtr op2, bool swapOK)
1717 {
1718     genTreeOps oper;
1719     unsigned   kind;
1720
1721 //  printf("tree1:\n"); gtDispTree(op1);
1722 //  printf("tree2:\n"); gtDispTree(op2);
1723
1724 AGAIN:
1725
1726     if (op1 == nullptr)
1727     {
1728         return (op2 == nullptr);
1729     }
1730     if (op2 == nullptr)
1731     {
1732         return false;
1733     }
1734     if (op1 == op2)
1735     {
1736         return true;
1737     }
1738
1739     assert(op1->gtOper != GT_STMT);
1740     assert(op2->gtOper != GT_STMT);
1741
1742     oper = op1->OperGet();
1743
1744     /* The operators must be equal */
1745
1746     if (oper != op2->gtOper)
1747     {
1748         return false;
1749     }
1750
1751     /* The types must be equal */
1752
1753     if (op1->gtType != op2->gtType)
1754     {
1755         return false;
1756     }
1757
1758     /* Overflow must be equal */
1759     if (op1->gtOverflowEx() != op2->gtOverflowEx())
1760     {
1761         return false;
1762     }
1763
1764     /* Sensible flags must be equal */
1765     if ((op1->gtFlags & (GTF_UNSIGNED)) != (op2->gtFlags & (GTF_UNSIGNED)))
1766     {
1767         return false;
1768     }
1769
1770     /* Figure out what kind of nodes we're comparing */
1771
1772     kind = op1->OperKind();
1773
1774     /* Is this a constant node? */
1775
1776     if (kind & GTK_CONST)
1777     {
1778         switch (oper)
1779         {
1780             case GT_CNS_INT:
1781                 if (op1->gtIntCon.gtIconVal == op2->gtIntCon.gtIconVal)
1782                 {
1783                     return true;
1784                 }
1785                 break;
1786 #if 0
1787             // TODO-CQ: Enable this in the future
1788         case GT_CNS_LNG:
1789             if  (op1->gtLngCon.gtLconVal == op2->gtLngCon.gtLconVal)
1790                 return true;
1791             break;
1792
1793         case GT_CNS_DBL:
1794             if  (op1->gtDblCon.gtDconVal == op2->gtDblCon.gtDconVal)
1795                 return true;
1796             break;
1797 #endif
1798             default:
1799                 break;
1800         }
1801
1802         return false;
1803     }
1804
1805     /* Is this a leaf node? */
1806
1807     if (kind & GTK_LEAF)
1808     {
1809         switch (oper)
1810         {
1811             case GT_LCL_VAR:
1812                 if (op1->gtLclVarCommon.gtLclNum != op2->gtLclVarCommon.gtLclNum)
1813                 {
1814                     break;
1815                 }
1816
1817                 return true;
1818
1819             case GT_LCL_FLD:
1820                 if (op1->gtLclFld.gtLclNum != op2->gtLclFld.gtLclNum ||
1821                     op1->gtLclFld.gtLclOffs != op2->gtLclFld.gtLclOffs)
1822                 {
1823                     break;
1824                 }
1825
1826                 return true;
1827
1828             case GT_CLS_VAR:
1829                 if (op1->gtClsVar.gtClsVarHnd != op2->gtClsVar.gtClsVarHnd)
1830                 {
1831                     break;
1832                 }
1833
1834                 return true;
1835
1836             case GT_LABEL:
1837                 return true;
1838
1839             case GT_ARGPLACE:
1840                 if ((op1->gtType == TYP_STRUCT) &&
1841                     (op1->gtArgPlace.gtArgPlaceClsHnd != op2->gtArgPlace.gtArgPlaceClsHnd))
1842                 {
1843                     break;
1844                 }
1845                 return true;
1846
1847             default:
1848                 break;
1849         }
1850
1851         return false;
1852     }
1853
1854     /* Is it a 'simple' unary/binary operator? */
1855
1856     if (kind & GTK_UNOP)
1857     {
1858         if (IsExOp(kind))
1859         {
1860             // ExOp operators extend unary operator with extra, non-GenTreePtr members.  In many cases,
1861             // these should be included in the comparison.
1862             switch (oper)
1863             {
1864                 case GT_ARR_LENGTH:
1865                     if (op1->gtArrLen.ArrLenOffset() != op2->gtArrLen.ArrLenOffset())
1866                     {
1867                         return false;
1868                     }
1869                     break;
1870                 case GT_CAST:
1871                     if (op1->gtCast.gtCastType != op2->gtCast.gtCastType)
1872                     {
1873                         return false;
1874                     }
1875                     break;
1876                 case GT_OBJ:
1877                     if (op1->AsObj()->gtClass != op2->AsObj()->gtClass)
1878                     {
1879                         return false;
1880                     }
1881                     break;
1882
1883                 // For the ones below no extra argument matters for comparison.
1884                 case GT_BOX:
1885                     break;
1886
1887                 default:
1888                     assert(!"unexpected unary ExOp operator");
1889             }
1890         }
1891         return Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp1);
1892     }
1893
1894     if (kind & GTK_BINOP)
1895     {
1896         if (IsExOp(kind))
1897         {
1898             // ExOp operators extend unary operator with extra, non-GenTreePtr members.  In many cases,
1899             // these should be included in the hash code.
1900             switch (oper)
1901             {
1902                 case GT_INTRINSIC:
1903                     if (op1->gtIntrinsic.gtIntrinsicId != op2->gtIntrinsic.gtIntrinsicId)
1904                     {
1905                         return false;
1906                     }
1907                     break;
1908                 case GT_LEA:
1909                     if (op1->gtAddrMode.gtScale != op2->gtAddrMode.gtScale)
1910                     {
1911                         return false;
1912                     }
1913                     if (op1->gtAddrMode.gtOffset != op2->gtAddrMode.gtOffset)
1914                     {
1915                         return false;
1916                     }
1917                     break;
1918                 case GT_INDEX:
1919                     if (op1->gtIndex.gtIndElemSize != op2->gtIndex.gtIndElemSize)
1920                     {
1921                         return false;
1922                     }
1923                     break;
1924
1925                 // For the ones below no extra argument matters for comparison.
1926                 case GT_QMARK:
1927                     break;
1928
1929                 default:
1930                     assert(!"unexpected binary ExOp operator");
1931             }
1932         }
1933
1934         if (op1->gtOp.gtOp2)
1935         {
1936             if (!Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp1, swapOK))
1937             {
1938                 if (swapOK && OperIsCommutative(oper) &&
1939                     ((op1->gtOp.gtOp1->gtFlags | op1->gtOp.gtOp2->gtFlags | op2->gtOp.gtOp1->gtFlags |
1940                       op2->gtOp.gtOp2->gtFlags) &
1941                      GTF_ALL_EFFECT) == 0)
1942                 {
1943                     if (Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp2, swapOK))
1944                     {
1945                         op1 = op1->gtOp.gtOp2;
1946                         op2 = op2->gtOp.gtOp1;
1947                         goto AGAIN;
1948                     }
1949                 }
1950
1951                 return false;
1952             }
1953
1954             op1 = op1->gtOp.gtOp2;
1955             op2 = op2->gtOp.gtOp2;
1956
1957             goto AGAIN;
1958         }
1959         else
1960         {
1961
1962             op1 = op1->gtOp.gtOp1;
1963             op2 = op2->gtOp.gtOp1;
1964
1965             if (!op1)
1966             {
1967                 return (op2 == nullptr);
1968             }
1969             if (!op2)
1970             {
1971                 return false;
1972             }
1973
1974             goto AGAIN;
1975         }
1976     }
1977
1978     /* See what kind of a special operator we have here */
1979
1980     switch (oper)
1981     {
1982         case GT_FIELD:
1983             if (op1->gtField.gtFldHnd != op2->gtField.gtFldHnd)
1984             {
1985                 break;
1986             }
1987
1988             op1 = op1->gtField.gtFldObj;
1989             op2 = op2->gtField.gtFldObj;
1990
1991             if (op1 || op2)
1992             {
1993                 if (op1 && op2)
1994                 {
1995                     goto AGAIN;
1996                 }
1997             }
1998
1999             return true;
2000
2001         case GT_CALL:
2002
2003             if (op1->gtCall.gtCallType != op2->gtCall.gtCallType)
2004             {
2005                 return false;
2006             }
2007
2008             if (op1->gtCall.gtCallType != CT_INDIRECT)
2009             {
2010                 if (op1->gtCall.gtCallMethHnd != op2->gtCall.gtCallMethHnd)
2011                 {
2012                     return false;
2013                 }
2014
2015 #ifdef FEATURE_READYTORUN_COMPILER
2016                 if (op1->gtCall.gtEntryPoint.addr != op2->gtCall.gtEntryPoint.addr)
2017                     return false;
2018 #endif
2019             }
2020             else
2021             {
2022                 if (!Compare(op1->gtCall.gtCallAddr, op2->gtCall.gtCallAddr))
2023                 {
2024                     return false;
2025                 }
2026             }
2027
2028             if (Compare(op1->gtCall.gtCallLateArgs, op2->gtCall.gtCallLateArgs) &&
2029                 Compare(op1->gtCall.gtCallArgs, op2->gtCall.gtCallArgs) &&
2030                 Compare(op1->gtCall.gtControlExpr, op2->gtCall.gtControlExpr) &&
2031                 Compare(op1->gtCall.gtCallObjp, op2->gtCall.gtCallObjp))
2032             {
2033                 return true;
2034             }
2035             break;
2036
2037         case GT_ARR_ELEM:
2038
2039             if (op1->gtArrElem.gtArrRank != op2->gtArrElem.gtArrRank)
2040             {
2041                 return false;
2042             }
2043
2044             // NOTE: gtArrElemSize may need to be handled
2045
2046             unsigned dim;
2047             for (dim = 0; dim < op1->gtArrElem.gtArrRank; dim++)
2048             {
2049                 if (!Compare(op1->gtArrElem.gtArrInds[dim], op2->gtArrElem.gtArrInds[dim]))
2050                 {
2051                     return false;
2052                 }
2053             }
2054
2055             op1 = op1->gtArrElem.gtArrObj;
2056             op2 = op2->gtArrElem.gtArrObj;
2057             goto AGAIN;
2058
2059         case GT_ARR_OFFSET:
2060             if (op1->gtArrOffs.gtCurrDim != op2->gtArrOffs.gtCurrDim ||
2061                 op1->gtArrOffs.gtArrRank != op2->gtArrOffs.gtArrRank)
2062             {
2063                 return false;
2064             }
2065             return (Compare(op1->gtArrOffs.gtOffset, op2->gtArrOffs.gtOffset) &&
2066                     Compare(op1->gtArrOffs.gtIndex, op2->gtArrOffs.gtIndex) &&
2067                     Compare(op1->gtArrOffs.gtArrObj, op2->gtArrOffs.gtArrObj));
2068
2069         case GT_CMPXCHG:
2070             return Compare(op1->gtCmpXchg.gtOpLocation, op2->gtCmpXchg.gtOpLocation) &&
2071                    Compare(op1->gtCmpXchg.gtOpValue, op2->gtCmpXchg.gtOpValue) &&
2072                    Compare(op1->gtCmpXchg.gtOpComparand, op2->gtCmpXchg.gtOpComparand);
2073
2074         case GT_ARR_BOUNDS_CHECK:
2075 #ifdef FEATURE_SIMD
2076         case GT_SIMD_CHK:
2077 #endif // FEATURE_SIMD
2078             return Compare(op1->gtBoundsChk.gtArrLen, op2->gtBoundsChk.gtArrLen) &&
2079                    Compare(op1->gtBoundsChk.gtIndex, op2->gtBoundsChk.gtIndex) &&
2080                    (op1->gtBoundsChk.gtThrowKind == op2->gtBoundsChk.gtThrowKind);
2081
2082         default:
2083             assert(!"unexpected operator");
2084     }
2085
2086     return false;
2087 }
2088
2089 /*****************************************************************************
2090  *
2091  *  Returns non-zero if the given tree contains a use of a local #lclNum.
2092  */
2093
2094 bool Compiler::gtHasRef(GenTreePtr tree, ssize_t lclNum, bool defOnly)
2095 {
2096     genTreeOps oper;
2097     unsigned   kind;
2098
2099 AGAIN:
2100
2101     assert(tree);
2102
2103     oper = tree->OperGet();
2104     kind = tree->OperKind();
2105
2106     assert(oper != GT_STMT);
2107
2108     /* Is this a constant node? */
2109
2110     if (kind & GTK_CONST)
2111     {
2112         return false;
2113     }
2114
2115     /* Is this a leaf node? */
2116
2117     if (kind & GTK_LEAF)
2118     {
2119         if (oper == GT_LCL_VAR)
2120         {
2121             if (tree->gtLclVarCommon.gtLclNum == (unsigned)lclNum)
2122             {
2123                 if (!defOnly)
2124                 {
2125                     return true;
2126                 }
2127             }
2128         }
2129         else if (oper == GT_RET_EXPR)
2130         {
2131             return gtHasRef(tree->gtRetExpr.gtInlineCandidate, lclNum, defOnly);
2132         }
2133
2134         return false;
2135     }
2136
2137     /* Is it a 'simple' unary/binary operator? */
2138
2139     if (kind & GTK_SMPOP)
2140     {
2141         if (tree->gtGetOp2())
2142         {
2143             if (gtHasRef(tree->gtOp.gtOp1, lclNum, defOnly))
2144             {
2145                 return true;
2146             }
2147
2148             tree = tree->gtOp.gtOp2;
2149             goto AGAIN;
2150         }
2151         else
2152         {
2153             tree = tree->gtOp.gtOp1;
2154
2155             if (!tree)
2156             {
2157                 return false;
2158             }
2159
2160             if (kind & GTK_ASGOP)
2161             {
2162                 // 'tree' is the gtOp1 of an assignment node. So we can handle
2163                 // the case where defOnly is either true or false.
2164
2165                 if (tree->gtOper == GT_LCL_VAR && tree->gtLclVarCommon.gtLclNum == (unsigned)lclNum)
2166                 {
2167                     return true;
2168                 }
2169                 else if (tree->gtOper == GT_FIELD && lclNum == (ssize_t)tree->gtField.gtFldHnd)
2170                 {
2171                     return true;
2172                 }
2173             }
2174
2175             goto AGAIN;
2176         }
2177     }
2178
2179     /* See what kind of a special operator we have here */
2180
2181     switch (oper)
2182     {
2183         case GT_FIELD:
2184             if (lclNum == (ssize_t)tree->gtField.gtFldHnd)
2185             {
2186                 if (!defOnly)
2187                 {
2188                     return true;
2189                 }
2190             }
2191
2192             tree = tree->gtField.gtFldObj;
2193             if (tree)
2194             {
2195                 goto AGAIN;
2196             }
2197             break;
2198
2199         case GT_CALL:
2200
2201             if (tree->gtCall.gtCallObjp)
2202             {
2203                 if (gtHasRef(tree->gtCall.gtCallObjp, lclNum, defOnly))
2204                 {
2205                     return true;
2206                 }
2207             }
2208
2209             if (tree->gtCall.gtCallArgs)
2210             {
2211                 if (gtHasRef(tree->gtCall.gtCallArgs, lclNum, defOnly))
2212                 {
2213                     return true;
2214                 }
2215             }
2216
2217             if (tree->gtCall.gtCallLateArgs)
2218             {
2219                 if (gtHasRef(tree->gtCall.gtCallLateArgs, lclNum, defOnly))
2220                 {
2221                     return true;
2222                 }
2223             }
2224
2225             if (tree->gtCall.gtCallLateArgs)
2226             {
2227                 if (gtHasRef(tree->gtCall.gtControlExpr, lclNum, defOnly))
2228                 {
2229                     return true;
2230                 }
2231             }
2232
2233             if (tree->gtCall.gtCallType == CT_INDIRECT)
2234             {
2235                 // pinvoke-calli cookie is a constant, or constant indirection
2236                 assert(tree->gtCall.gtCallCookie == nullptr || tree->gtCall.gtCallCookie->gtOper == GT_CNS_INT ||
2237                        tree->gtCall.gtCallCookie->gtOper == GT_IND);
2238
2239                 tree = tree->gtCall.gtCallAddr;
2240             }
2241             else
2242             {
2243                 tree = nullptr;
2244             }
2245
2246             if (tree)
2247             {
2248                 goto AGAIN;
2249             }
2250
2251             break;
2252
2253         case GT_ARR_ELEM:
2254             if (gtHasRef(tree->gtArrElem.gtArrObj, lclNum, defOnly))
2255             {
2256                 return true;
2257             }
2258
2259             unsigned dim;
2260             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
2261             {
2262                 if (gtHasRef(tree->gtArrElem.gtArrInds[dim], lclNum, defOnly))
2263                 {
2264                     return true;
2265                 }
2266             }
2267
2268             break;
2269
2270         case GT_ARR_OFFSET:
2271             if (gtHasRef(tree->gtArrOffs.gtOffset, lclNum, defOnly) ||
2272                 gtHasRef(tree->gtArrOffs.gtIndex, lclNum, defOnly) ||
2273                 gtHasRef(tree->gtArrOffs.gtArrObj, lclNum, defOnly))
2274             {
2275                 return true;
2276             }
2277             break;
2278
2279         case GT_CMPXCHG:
2280             if (gtHasRef(tree->gtCmpXchg.gtOpLocation, lclNum, defOnly))
2281             {
2282                 return true;
2283             }
2284             if (gtHasRef(tree->gtCmpXchg.gtOpValue, lclNum, defOnly))
2285             {
2286                 return true;
2287             }
2288             if (gtHasRef(tree->gtCmpXchg.gtOpComparand, lclNum, defOnly))
2289             {
2290                 return true;
2291             }
2292             break;
2293
2294         case GT_ARR_BOUNDS_CHECK:
2295 #ifdef FEATURE_SIMD
2296         case GT_SIMD_CHK:
2297 #endif // FEATURE_SIMD
2298             if (gtHasRef(tree->gtBoundsChk.gtArrLen, lclNum, defOnly))
2299             {
2300                 return true;
2301             }
2302             if (gtHasRef(tree->gtBoundsChk.gtIndex, lclNum, defOnly))
2303             {
2304                 return true;
2305             }
2306             break;
2307
2308         default:
2309 #ifdef DEBUG
2310             gtDispTree(tree);
2311 #endif
2312             assert(!"unexpected operator");
2313     }
2314
2315     return false;
2316 }
2317
2318 struct AddrTakenDsc
2319 {
2320     Compiler* comp;
2321     bool      hasAddrTakenLcl;
2322 };
2323
2324 /* static */
2325 Compiler::fgWalkResult Compiler::gtHasLocalsWithAddrOpCB(GenTreePtr* pTree, fgWalkData* data)
2326 {
2327     GenTreePtr tree = *pTree;
2328     Compiler*  comp = data->compiler;
2329
2330     if (tree->gtOper == GT_LCL_VAR)
2331     {
2332         unsigned   lclNum = tree->gtLclVarCommon.gtLclNum;
2333         LclVarDsc* varDsc = &comp->lvaTable[lclNum];
2334
2335         if (varDsc->lvHasLdAddrOp || varDsc->lvAddrExposed)
2336         {
2337             ((AddrTakenDsc*)data->pCallbackData)->hasAddrTakenLcl = true;
2338             return WALK_ABORT;
2339         }
2340     }
2341
2342     return WALK_CONTINUE;
2343 }
2344
2345 /*****************************************************************************
2346  *
2347  *  Return true if this tree contains locals with lvHasLdAddrOp or lvAddrExposed
2348  *  flag(s) set.
2349  */
2350
2351 bool Compiler::gtHasLocalsWithAddrOp(GenTreePtr tree)
2352 {
2353     AddrTakenDsc desc;
2354
2355     desc.comp            = this;
2356     desc.hasAddrTakenLcl = false;
2357
2358     fgWalkTreePre(&tree, gtHasLocalsWithAddrOpCB, &desc);
2359
2360     return desc.hasAddrTakenLcl;
2361 }
2362
2363 /*****************************************************************************
2364  *
2365  *  Helper used to compute hash values for trees.
2366  */
2367
2368 inline unsigned genTreeHashAdd(unsigned old, unsigned add)
2369 {
2370     return (old + old / 2) ^ add;
2371 }
2372
2373 inline unsigned genTreeHashAdd(unsigned old, void* add)
2374 {
2375     return genTreeHashAdd(old, (unsigned)(size_t)add);
2376 }
2377
2378 inline unsigned genTreeHashAdd(unsigned old, unsigned add1, unsigned add2)
2379 {
2380     return (old + old / 2) ^ add1 ^ add2;
2381 }
2382
2383 /*****************************************************************************
2384  *
2385  *  Given an arbitrary expression tree, compute a hash value for it.
2386  */
2387
2388 unsigned Compiler::gtHashValue(GenTree* tree)
2389 {
2390     genTreeOps oper;
2391     unsigned   kind;
2392
2393     unsigned hash = 0;
2394
2395     GenTreePtr temp;
2396
2397 AGAIN:
2398     assert(tree);
2399     assert(tree->gtOper != GT_STMT);
2400
2401     /* Figure out what kind of a node we have */
2402
2403     oper = tree->OperGet();
2404     kind = tree->OperKind();
2405
2406     /* Include the operator value in the hash */
2407
2408     hash = genTreeHashAdd(hash, oper);
2409
2410     /* Is this a constant or leaf node? */
2411
2412     if (kind & (GTK_CONST | GTK_LEAF))
2413     {
2414         size_t add;
2415
2416         switch (oper)
2417         {
2418             case GT_LCL_VAR:
2419                 add = tree->gtLclVar.gtLclNum;
2420                 break;
2421             case GT_LCL_FLD:
2422                 hash = genTreeHashAdd(hash, tree->gtLclFld.gtLclNum);
2423                 add  = tree->gtLclFld.gtLclOffs;
2424                 break;
2425
2426             case GT_CNS_INT:
2427                 add = (int)tree->gtIntCon.gtIconVal;
2428                 break;
2429             case GT_CNS_LNG:
2430                 add = (int)tree->gtLngCon.gtLconVal;
2431                 break;
2432             case GT_CNS_DBL:
2433                 add = (int)tree->gtDblCon.gtDconVal;
2434                 break;
2435             case GT_CNS_STR:
2436                 add = (int)tree->gtStrCon.gtSconCPX;
2437                 break;
2438
2439             case GT_JMP:
2440                 add = tree->gtVal.gtVal1;
2441                 break;
2442
2443             default:
2444                 add = 0;
2445                 break;
2446         }
2447
2448         // narrowing cast, but for hashing.
2449         hash = genTreeHashAdd(hash, (unsigned)add);
2450         goto DONE;
2451     }
2452
2453     /* Is it a 'simple' unary/binary operator? */
2454
2455     GenTreePtr op1;
2456
2457     if (kind & GTK_UNOP)
2458     {
2459         op1 = tree->gtOp.gtOp1;
2460         /* Special case: no sub-operand at all */
2461
2462         if (GenTree::IsExOp(kind))
2463         {
2464             // ExOp operators extend operators with extra, non-GenTreePtr members.  In many cases,
2465             // these should be included in the hash code.
2466             switch (oper)
2467             {
2468                 case GT_ARR_LENGTH:
2469                     hash += tree->gtArrLen.ArrLenOffset();
2470                     break;
2471                 case GT_CAST:
2472                     hash ^= tree->gtCast.gtCastType;
2473                     break;
2474                 case GT_OBJ:
2475                     hash ^= static_cast<unsigned>(reinterpret_cast<uintptr_t>(tree->gtObj.gtClass));
2476                     break;
2477                 case GT_INDEX:
2478                     hash += tree->gtIndex.gtIndElemSize;
2479                     break;
2480                 case GT_ALLOCOBJ:
2481                     hash = genTreeHashAdd(hash, static_cast<unsigned>(
2482                                                     reinterpret_cast<uintptr_t>(tree->gtAllocObj.gtAllocObjClsHnd)));
2483                     hash = genTreeHashAdd(hash, tree->gtAllocObj.gtNewHelper);
2484                     break;
2485
2486                 // For the ones below no extra argument matters for comparison.
2487                 case GT_BOX:
2488                     break;
2489
2490                 default:
2491                     assert(!"unexpected unary ExOp operator");
2492             }
2493         }
2494
2495         if (!op1)
2496         {
2497             goto DONE;
2498         }
2499
2500         tree = op1;
2501         goto AGAIN;
2502     }
2503
2504     if (kind & GTK_BINOP)
2505     {
2506         if (GenTree::IsExOp(kind))
2507         {
2508             // ExOp operators extend operators with extra, non-GenTreePtr members.  In many cases,
2509             // these should be included in the hash code.
2510             switch (oper)
2511             {
2512                 case GT_INTRINSIC:
2513                     hash += tree->gtIntrinsic.gtIntrinsicId;
2514                     break;
2515                 case GT_LEA:
2516                     hash += (tree->gtAddrMode.gtOffset << 3) + tree->gtAddrMode.gtScale;
2517                     break;
2518
2519                 // For the ones below no extra argument matters for comparison.
2520                 case GT_ARR_INDEX:
2521                 case GT_QMARK:
2522                 case GT_INDEX:
2523                     break;
2524
2525 #ifdef FEATURE_SIMD
2526                 case GT_SIMD:
2527                     hash += tree->gtSIMD.gtSIMDIntrinsicID;
2528                     hash += tree->gtSIMD.gtSIMDBaseType;
2529                     break;
2530 #endif // FEATURE_SIMD
2531
2532                 default:
2533                     assert(!"unexpected binary ExOp operator");
2534             }
2535         }
2536
2537         op1            = tree->gtOp.gtOp1;
2538         GenTreePtr op2 = tree->gtOp.gtOp2;
2539
2540         /* Is there a second sub-operand? */
2541
2542         if (!op2)
2543         {
2544             /* Special case: no sub-operands at all */
2545
2546             if (!op1)
2547             {
2548                 goto DONE;
2549             }
2550
2551             /* This is a unary operator */
2552
2553             tree = op1;
2554             goto AGAIN;
2555         }
2556
2557         /* This is a binary operator */
2558
2559         unsigned hsh1 = gtHashValue(op1);
2560
2561         /* Special case: addition of two values */
2562
2563         if (GenTree::OperIsCommutative(oper))
2564         {
2565             unsigned hsh2 = gtHashValue(op2);
2566
2567             /* Produce a hash that allows swapping the operands */
2568
2569             hash = genTreeHashAdd(hash, hsh1, hsh2);
2570             goto DONE;
2571         }
2572
2573         /* Add op1's hash to the running value and continue with op2 */
2574
2575         hash = genTreeHashAdd(hash, hsh1);
2576
2577         tree = op2;
2578         goto AGAIN;
2579     }
2580
2581     /* See what kind of a special operator we have here */
2582     switch (tree->gtOper)
2583     {
2584         case GT_FIELD:
2585             if (tree->gtField.gtFldObj)
2586             {
2587                 temp = tree->gtField.gtFldObj;
2588                 assert(temp);
2589                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2590             }
2591             break;
2592
2593         case GT_STMT:
2594             temp = tree->gtStmt.gtStmtExpr;
2595             assert(temp);
2596             hash = genTreeHashAdd(hash, gtHashValue(temp));
2597             break;
2598
2599         case GT_ARR_ELEM:
2600
2601             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrElem.gtArrObj));
2602
2603             unsigned dim;
2604             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
2605             {
2606                 hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrElem.gtArrInds[dim]));
2607             }
2608
2609             break;
2610
2611         case GT_ARR_OFFSET:
2612             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtOffset));
2613             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtIndex));
2614             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtArrObj));
2615             break;
2616
2617         case GT_CALL:
2618
2619             if (tree->gtCall.gtCallObjp && tree->gtCall.gtCallObjp->gtOper != GT_NOP)
2620             {
2621                 temp = tree->gtCall.gtCallObjp;
2622                 assert(temp);
2623                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2624             }
2625
2626             if (tree->gtCall.gtCallArgs)
2627             {
2628                 temp = tree->gtCall.gtCallArgs;
2629                 assert(temp);
2630                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2631             }
2632
2633             if (tree->gtCall.gtCallType == CT_INDIRECT)
2634             {
2635                 temp = tree->gtCall.gtCallAddr;
2636                 assert(temp);
2637                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2638             }
2639             else
2640             {
2641                 hash = genTreeHashAdd(hash, tree->gtCall.gtCallMethHnd);
2642             }
2643
2644             if (tree->gtCall.gtCallLateArgs)
2645             {
2646                 temp = tree->gtCall.gtCallLateArgs;
2647                 assert(temp);
2648                 hash = genTreeHashAdd(hash, gtHashValue(temp));
2649             }
2650             break;
2651
2652         case GT_CMPXCHG:
2653             hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpLocation));
2654             hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpValue));
2655             hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpComparand));
2656             break;
2657
2658         case GT_ARR_BOUNDS_CHECK:
2659 #ifdef FEATURE_SIMD
2660         case GT_SIMD_CHK:
2661 #endif // FEATURE_SIMD
2662             hash = genTreeHashAdd(hash, gtHashValue(tree->gtBoundsChk.gtArrLen));
2663             hash = genTreeHashAdd(hash, gtHashValue(tree->gtBoundsChk.gtIndex));
2664             hash = genTreeHashAdd(hash, tree->gtBoundsChk.gtThrowKind);
2665             break;
2666
2667         default:
2668 #ifdef DEBUG
2669             gtDispTree(tree);
2670 #endif
2671             assert(!"unexpected operator");
2672             break;
2673     }
2674
2675 DONE:
2676
2677     return hash;
2678 }
2679
2680 /*****************************************************************************
2681  *
2682  *  Given an arbitrary expression tree, attempts to find the set of all local variables
2683  *  referenced by the tree, and return them as "*result".
2684  *  If "findPtr" is null, this is a tracked variable set;
2685  *  if it is non-null, this is an "all var set."
2686  *  The "*result" value is valid only if the call returns "true."  It may return "false"
2687  *  for several reasons:
2688  *     If "findPtr" is NULL, and the expression contains an untracked variable.
2689  *     If "findPtr" is non-NULL, and the expression contains a variable that can't be represented
2690  *        in an "all var set."
2691  *     If the expression accesses address-exposed variables.
2692  *
2693  *  If there
2694  *  are any indirections or global refs in the expression, the "*refsPtr" argument
2695  *  will be assigned the appropriate bit set based on the 'varRefKinds' type.
2696  *  It won't be assigned anything when there are no indirections or global
2697  *  references, though, so this value should be initialized before the call.
2698  *  If we encounter an expression that is equal to *findPtr we set *findPtr
2699  *  to NULL.
2700  */
2701 bool Compiler::lvaLclVarRefs(GenTreePtr tree, GenTreePtr* findPtr, varRefKinds* refsPtr, void* result)
2702 {
2703     genTreeOps   oper;
2704     unsigned     kind;
2705     varRefKinds  refs = VR_NONE;
2706     ALLVARSET_TP ALLVARSET_INIT_NOCOPY(allVars, AllVarSetOps::UninitVal());
2707     VARSET_TP    VARSET_INIT_NOCOPY(trkdVars, VarSetOps::UninitVal());
2708     if (findPtr)
2709     {
2710         AllVarSetOps::AssignNoCopy(this, allVars, AllVarSetOps::MakeEmpty(this));
2711     }
2712     else
2713     {
2714         VarSetOps::AssignNoCopy(this, trkdVars, VarSetOps::MakeEmpty(this));
2715     }
2716
2717 AGAIN:
2718
2719     assert(tree);
2720     assert(tree->gtOper != GT_STMT);
2721
2722     /* Remember whether we've come across the expression we're looking for */
2723
2724     if (findPtr && *findPtr == tree)
2725     {
2726         *findPtr = nullptr;
2727     }
2728
2729     /* Figure out what kind of a node we have */
2730
2731     oper = tree->OperGet();
2732     kind = tree->OperKind();
2733
2734     /* Is this a constant or leaf node? */
2735
2736     if (kind & (GTK_CONST | GTK_LEAF))
2737     {
2738         if (oper == GT_LCL_VAR)
2739         {
2740             unsigned lclNum = tree->gtLclVarCommon.gtLclNum;
2741
2742             /* Should we use the variable table? */
2743
2744             if (findPtr)
2745             {
2746                 if (lclNum >= lclMAX_ALLSET_TRACKED)
2747                 {
2748                     return false;
2749                 }
2750
2751                 AllVarSetOps::AddElemD(this, allVars, lclNum);
2752             }
2753             else
2754             {
2755                 assert(lclNum < lvaCount);
2756                 LclVarDsc* varDsc = lvaTable + lclNum;
2757
2758                 if (varDsc->lvTracked == false)
2759                 {
2760                     return false;
2761                 }
2762
2763                 // Don't deal with expressions with address-exposed variables.
2764                 if (varDsc->lvAddrExposed)
2765                 {
2766                     return false;
2767                 }
2768
2769                 VarSetOps::AddElemD(this, trkdVars, varDsc->lvVarIndex);
2770             }
2771         }
2772         else if (oper == GT_LCL_FLD)
2773         {
2774             /* We can't track every field of every var. Moreover, indirections
2775                may access different parts of the var as different (but
2776                overlapping) fields. So just treat them as indirect accesses */
2777
2778             if (varTypeIsGC(tree->TypeGet()))
2779             {
2780                 refs = VR_IND_REF;
2781             }
2782             else
2783             {
2784                 refs = VR_IND_SCL;
2785             }
2786         }
2787         else if (oper == GT_CLS_VAR)
2788         {
2789             refs = VR_GLB_VAR;
2790         }
2791
2792         if (refs != VR_NONE)
2793         {
2794             /* Write it back to callers parameter using an 'or' */
2795             *refsPtr = varRefKinds((*refsPtr) | refs);
2796         }
2797         lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2798         return true;
2799     }
2800
2801     /* Is it a 'simple' unary/binary operator? */
2802
2803     if (kind & GTK_SMPOP)
2804     {
2805         if (oper == GT_IND)
2806         {
2807             assert(tree->gtOp.gtOp2 == nullptr);
2808
2809             /* Set the proper indirection bit */
2810
2811             if ((tree->gtFlags & GTF_IND_INVARIANT) == 0)
2812             {
2813                 if (varTypeIsGC(tree->TypeGet()))
2814                 {
2815                     refs = VR_IND_REF;
2816                 }
2817                 else
2818                 {
2819                     refs = VR_IND_SCL;
2820                 }
2821
2822                 // If the flag GTF_IND_TGTANYWHERE is set this indirection
2823                 // could also point at a global variable
2824
2825                 if (tree->gtFlags & GTF_IND_TGTANYWHERE)
2826                 {
2827                     refs = varRefKinds(((int)refs) | ((int)VR_GLB_VAR));
2828                 }
2829             }
2830
2831             /* Write it back to callers parameter using an 'or' */
2832             *refsPtr = varRefKinds((*refsPtr) | refs);
2833
2834             // For IL volatile memory accesses we mark the GT_IND node
2835             // with a GTF_DONT_CSE flag.
2836             //
2837             // This flag is also set for the left hand side of an assignment.
2838             //
2839             // If this flag is set then we return false
2840             //
2841             if (tree->gtFlags & GTF_DONT_CSE)
2842             {
2843                 return false;
2844             }
2845         }
2846
2847         if (tree->gtGetOp2())
2848         {
2849             /* It's a binary operator */
2850             if (!lvaLclVarRefsAccum(tree->gtOp.gtOp1, findPtr, refsPtr, &allVars, &trkdVars))
2851             {
2852                 return false;
2853             }
2854             // Otherwise...
2855             tree = tree->gtOp.gtOp2;
2856             assert(tree);
2857             goto AGAIN;
2858         }
2859         else
2860         {
2861             /* It's a unary (or nilary) operator */
2862
2863             tree = tree->gtOp.gtOp1;
2864             if (tree)
2865             {
2866                 goto AGAIN;
2867             }
2868
2869             lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2870             return true;
2871         }
2872     }
2873
2874     switch (oper)
2875     {
2876         case GT_ARR_ELEM:
2877             if (!lvaLclVarRefsAccum(tree->gtArrElem.gtArrObj, findPtr, refsPtr, &allVars, &trkdVars))
2878             {
2879                 return false;
2880             }
2881
2882             unsigned dim;
2883             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
2884             {
2885                 VARSET_TP VARSET_INIT_NOCOPY(tmpVs, VarSetOps::UninitVal());
2886                 if (!lvaLclVarRefsAccum(tree->gtArrElem.gtArrInds[dim], findPtr, refsPtr, &allVars, &trkdVars))
2887                 {
2888                     return false;
2889                 }
2890             }
2891             lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2892             return true;
2893
2894         case GT_ARR_OFFSET:
2895             if (!lvaLclVarRefsAccum(tree->gtArrOffs.gtOffset, findPtr, refsPtr, &allVars, &trkdVars))
2896             {
2897                 return false;
2898             }
2899             // Otherwise...
2900             if (!lvaLclVarRefsAccum(tree->gtArrOffs.gtIndex, findPtr, refsPtr, &allVars, &trkdVars))
2901             {
2902                 return false;
2903             }
2904             // Otherwise...
2905             if (!lvaLclVarRefsAccum(tree->gtArrOffs.gtArrObj, findPtr, refsPtr, &allVars, &trkdVars))
2906             {
2907                 return false;
2908             }
2909             // Otherwise...
2910             lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2911             return true;
2912
2913         case GT_ARR_BOUNDS_CHECK:
2914 #ifdef FEATURE_SIMD
2915         case GT_SIMD_CHK:
2916 #endif // FEATURE_SIMD
2917         {
2918             if (!lvaLclVarRefsAccum(tree->gtBoundsChk.gtArrLen, findPtr, refsPtr, &allVars, &trkdVars))
2919             {
2920                 return false;
2921             }
2922             // Otherwise...
2923             if (!lvaLclVarRefsAccum(tree->gtBoundsChk.gtIndex, findPtr, refsPtr, &allVars, &trkdVars))
2924             {
2925                 return false;
2926             }
2927             // Otherwise...
2928             lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2929             return true;
2930         }
2931
2932         case GT_CALL:
2933             /* Allow calls to the Shared Static helper */
2934             if (IsSharedStaticHelper(tree))
2935             {
2936                 *refsPtr = varRefKinds((*refsPtr) | VR_INVARIANT);
2937                 lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2938                 return true;
2939             }
2940             break;
2941         default:
2942             break;
2943
2944     } // end switch (oper)
2945
2946     return false;
2947 }
2948
2949 bool Compiler::lvaLclVarRefsAccum(
2950     GenTreePtr tree, GenTreePtr* findPtr, varRefKinds* refsPtr, ALLVARSET_TP* allVars, VARSET_TP* trkdVars)
2951 {
2952     if (findPtr)
2953     {
2954         ALLVARSET_TP ALLVARSET_INIT_NOCOPY(tmpVs, AllVarSetOps::UninitVal());
2955         if (!lvaLclVarRefs(tree, findPtr, refsPtr, &tmpVs))
2956         {
2957             return false;
2958         }
2959         // Otherwise...
2960         AllVarSetOps::UnionD(this, *allVars, tmpVs);
2961     }
2962     else
2963     {
2964         VARSET_TP VARSET_INIT_NOCOPY(tmpVs, VarSetOps::UninitVal());
2965         if (!lvaLclVarRefs(tree, findPtr, refsPtr, &tmpVs))
2966         {
2967             return false;
2968         }
2969         // Otherwise...
2970         VarSetOps::UnionD(this, *trkdVars, tmpVs);
2971     }
2972     return true;
2973 }
2974
2975 void Compiler::lvaLclVarRefsAccumIntoRes(GenTreePtr*         findPtr,
2976                                          void*               result,
2977                                          ALLVARSET_VALARG_TP allVars,
2978                                          VARSET_VALARG_TP    trkdVars)
2979 {
2980     if (findPtr)
2981     {
2982         ALLVARSET_TP* avsPtr = (ALLVARSET_TP*)result;
2983         AllVarSetOps::AssignNoCopy(this, (*avsPtr), allVars);
2984     }
2985     else
2986     {
2987         VARSET_TP* vsPtr = (VARSET_TP*)result;
2988         VarSetOps::AssignNoCopy(this, (*vsPtr), trkdVars);
2989     }
2990 }
2991
2992 /*****************************************************************************
2993  *
2994  *  Return a relational operator that is the reverse of the given one.
2995  */
2996
2997 /* static */
2998 genTreeOps GenTree::ReverseRelop(genTreeOps relop)
2999 {
3000     static const genTreeOps reverseOps[] = {
3001         GT_NE, // GT_EQ
3002         GT_EQ, // GT_NE
3003         GT_GE, // GT_LT
3004         GT_GT, // GT_LE
3005         GT_LT, // GT_GE
3006         GT_LE, // GT_GT
3007     };
3008
3009     assert(reverseOps[GT_EQ - GT_EQ] == GT_NE);
3010     assert(reverseOps[GT_NE - GT_EQ] == GT_EQ);
3011
3012     assert(reverseOps[GT_LT - GT_EQ] == GT_GE);
3013     assert(reverseOps[GT_LE - GT_EQ] == GT_GT);
3014     assert(reverseOps[GT_GE - GT_EQ] == GT_LT);
3015     assert(reverseOps[GT_GT - GT_EQ] == GT_LE);
3016
3017     assert(OperIsCompare(relop));
3018     assert(relop >= GT_EQ && (unsigned)(relop - GT_EQ) < sizeof(reverseOps));
3019
3020     return reverseOps[relop - GT_EQ];
3021 }
3022
3023 /*****************************************************************************
3024  *
3025  *  Return a relational operator that will work for swapped operands.
3026  */
3027
3028 /* static */
3029 genTreeOps GenTree::SwapRelop(genTreeOps relop)
3030 {
3031     static const genTreeOps swapOps[] = {
3032         GT_EQ, // GT_EQ
3033         GT_NE, // GT_NE
3034         GT_GT, // GT_LT
3035         GT_GE, // GT_LE
3036         GT_LE, // GT_GE
3037         GT_LT, // GT_GT
3038     };
3039
3040     assert(swapOps[GT_EQ - GT_EQ] == GT_EQ);
3041     assert(swapOps[GT_NE - GT_EQ] == GT_NE);
3042
3043     assert(swapOps[GT_LT - GT_EQ] == GT_GT);
3044     assert(swapOps[GT_LE - GT_EQ] == GT_GE);
3045     assert(swapOps[GT_GE - GT_EQ] == GT_LE);
3046     assert(swapOps[GT_GT - GT_EQ] == GT_LT);
3047
3048     assert(OperIsCompare(relop));
3049     assert(relop >= GT_EQ && (unsigned)(relop - GT_EQ) < sizeof(swapOps));
3050
3051     return swapOps[relop - GT_EQ];
3052 }
3053
3054 /*****************************************************************************
3055  *
3056  *  Reverse the meaning of the given test condition.
3057  */
3058
3059 GenTreePtr Compiler::gtReverseCond(GenTree* tree)
3060 {
3061     if (tree->OperIsCompare())
3062     {
3063         tree->SetOper(GenTree::ReverseRelop(tree->OperGet()));
3064
3065         // Flip the GTF_RELOP_NAN_UN bit
3066         //     a ord b   === (a != NaN && b != NaN)
3067         //     a unord b === (a == NaN || b == NaN)
3068         // => !(a ord b) === (a unord b)
3069         if (varTypeIsFloating(tree->gtOp.gtOp1->TypeGet()))
3070         {
3071             tree->gtFlags ^= GTF_RELOP_NAN_UN;
3072         }
3073     }
3074     else
3075     {
3076         tree = gtNewOperNode(GT_NOT, TYP_INT, tree);
3077     }
3078
3079     return tree;
3080 }
3081
3082 /*****************************************************************************/
3083
3084 #ifdef DEBUG
3085
3086 bool GenTree::gtIsValid64RsltMul()
3087 {
3088     if ((gtOper != GT_MUL) || !(gtFlags & GTF_MUL_64RSLT))
3089     {
3090         return false;
3091     }
3092
3093     GenTreePtr op1 = gtOp.gtOp1;
3094     GenTreePtr op2 = gtOp.gtOp2;
3095
3096     if (TypeGet() != TYP_LONG || op1->TypeGet() != TYP_LONG || op2->TypeGet() != TYP_LONG)
3097     {
3098         return false;
3099     }
3100
3101     if (gtOverflow())
3102     {
3103         return false;
3104     }
3105
3106     // op1 has to be conv.i8(i4Expr)
3107     if ((op1->gtOper != GT_CAST) || (genActualType(op1->CastFromType()) != TYP_INT))
3108     {
3109         return false;
3110     }
3111
3112     // op2 has to be conv.i8(i4Expr)
3113     if ((op2->gtOper != GT_CAST) || (genActualType(op2->CastFromType()) != TYP_INT))
3114     {
3115         return false;
3116     }
3117
3118     // The signedness of both casts must be the same
3119     if (((op1->gtFlags & GTF_UNSIGNED) != 0) != ((op2->gtFlags & GTF_UNSIGNED) != 0))
3120     {
3121         return false;
3122     }
3123
3124     // Do unsigned mul iff both the casts are unsigned
3125     if (((op1->gtFlags & GTF_UNSIGNED) != 0) != ((gtFlags & GTF_UNSIGNED) != 0))
3126     {
3127         return false;
3128     }
3129
3130     return true;
3131 }
3132
3133 #endif // DEBUG
3134
3135  //------------------------------------------------------------------------------
3136  // gtSetListOrder : Figure out the evaluation order for a list of values.
3137  //
3138  //
3139  // Arguments:
3140  //    list  - List to figure out the evaluation order for
3141  //    isListCallArgs - True iff the list is a list of call arguments
3142  //    callArgsInRegs -  True iff the list is a list of call arguments and they are passed in registers
3143  //
3144  // Return Value:
3145  //    True if the operation can be a root of a bitwise rotation tree; false otherwise.
3146
3147 unsigned            Compiler::gtSetListOrder(GenTree *list, bool isListCallArgs, bool callArgsInRegs)
3148 {
3149     assert((list != nullptr) && list->IsList());
3150     assert(!callArgsInRegs || isListCallArgs);
3151
3152     ArrayStack<GenTree *> listNodes(this);
3153
3154     do
3155     {
3156         listNodes.Push(list);
3157         list = list->gtOp.gtOp2;
3158     } while ((list != nullptr) && (list->IsList()));
3159
3160     unsigned  nxtlvl = (list == nullptr) ? 0 : gtSetEvalOrder(list);
3161     while (listNodes.Height() > 0)
3162     {
3163 #if FEATURE_STACK_FP_X87
3164         /* Save the current FP stack level since an argument list
3165         * will implicitly pop the FP stack when pushing the argument */
3166         unsigned        FPlvlSave = codeGen->genGetFPstkLevel();
3167 #endif // FEATURE_STACK_FP_X87
3168
3169         list = listNodes.Pop();
3170         assert(list && list->IsList());
3171         GenTreePtr      next = list->gtOp.gtOp2;
3172
3173         unsigned        level = 0;
3174         unsigned        ftreg = 0;
3175
3176         // TODO: Do we have to compute costs differently for argument lists and
3177         // all other lists?
3178         // https://github.com/dotnet/coreclr/issues/7095
3179         unsigned        costSz = (isListCallArgs || (next == nullptr)) ? 0 : 1;
3180         unsigned        costEx = (isListCallArgs || (next == nullptr)) ? 0 : 1;
3181
3182         if (next != nullptr)
3183         {
3184             ftreg |= next->gtRsvdRegs;
3185             if (isListCallArgs)
3186             {
3187                 if (level < nxtlvl)
3188                 {
3189                     level = nxtlvl;
3190                 }
3191             }
3192             costEx += next->gtCostEx;
3193             costSz += next->gtCostSz;
3194         }
3195
3196         GenTreePtr      op1 = list->gtOp.gtOp1;
3197         unsigned        lvl = gtSetEvalOrder(op1);
3198
3199 #if FEATURE_STACK_FP_X87
3200         // restore the FP level
3201         codeGen->genResetFPstkLevel(FPlvlSave);
3202 #endif // FEATURE_STACK_FP_X87
3203
3204         list->gtRsvdRegs = (regMaskSmall)(ftreg | op1->gtRsvdRegs);
3205
3206         // Swap the level counts
3207         if (list->gtFlags & GTF_REVERSE_OPS)
3208         {
3209             unsigned tmpl;
3210
3211             tmpl = lvl;
3212             lvl = nxtlvl;
3213             nxtlvl = tmpl;
3214         }
3215
3216         // TODO: Do we have to compute levels differently for argument lists and
3217         // all other lists?
3218         // https://github.com/dotnet/coreclr/issues/7095
3219         if (isListCallArgs)
3220         {
3221             if (level < lvl)
3222             {
3223                 level = lvl;
3224             }
3225         }
3226         else
3227         {
3228             if (lvl < 1)
3229             {
3230                 level = nxtlvl;
3231             }
3232             else if (lvl == nxtlvl)
3233             {
3234                 level = lvl + 1;
3235             }
3236             else
3237             {
3238                 level = lvl;
3239             }
3240         }
3241
3242         if (op1->gtCostEx != 0)
3243         {
3244             costEx += op1->gtCostEx;
3245             costEx += (callArgsInRegs || !isListCallArgs) ? 0 : IND_COST_EX;
3246         }
3247
3248         if (op1->gtCostSz != 0)
3249         {
3250             costSz += op1->gtCostSz;
3251 #ifdef _TARGET_XARCH_
3252             if (callArgsInRegs)                // push is smaller than mov to reg
3253 #endif
3254             {
3255                 costSz += 1;
3256             }
3257         }
3258
3259         list->SetCosts(costEx, costSz);
3260
3261         nxtlvl = level;
3262     }
3263
3264     return nxtlvl;
3265 }
3266
3267 /*****************************************************************************
3268  *
3269  *  This routine is a helper routine for gtSetEvalOrder() and is used to
3270  *  mark the interior address computation nodes with the GTF_ADDRMODE_NO_CSE flag
3271  *  which prevents them from being considered for CSE's.
3272  *
3273  *  Furthermore this routine is a factoring of the logic used to walk down
3274  *  the child nodes of a GT_IND tree, similar to optParseArrayRef().
3275  *
3276  *  Previously we had this logic repeated three times inside of gtSetEvalOrder().
3277  *  Here we combine those three repeats into this routine and use the
3278  *  bool constOnly to modify the behavior of this routine for the first call.
3279  *
3280  *  The object here is to mark all of the interior GT_ADD's and GT_NOP's
3281  *  with the GTF_ADDRMODE_NO_CSE flag and to set op1 and op2 to the terminal nodes
3282  *  which are later matched against 'adr' and 'idx'.
3283  *
3284  *  *pbHasRangeCheckBelow is set to false if we traverse a range check GT_NOP
3285  *  node in our walk. It remains unchanged otherwise.
3286  *
3287  *  TODO-Cleanup: It is essentially impossible to determine
3288  *  what it is supposed to do, or to write a reasonable specification comment
3289  *  for it that describes what it is supposed to do. There are obviously some
3290  *  very specific tree patterns that it expects to see, but those are not documented.
3291  *  The fact that it writes back to its op1WB and op2WB arguments, and traverses
3292  *  down both op1 and op2 trees, but op2 is only related to op1 in the (!constOnly)
3293  *  case (which really seems like a bug) is very confusing.
3294  */
3295
3296 void Compiler::gtWalkOp(GenTree** op1WB, GenTree** op2WB, GenTree* adr, bool constOnly)
3297 {
3298     GenTreePtr op1 = *op1WB;
3299     GenTreePtr op2 = *op2WB;
3300     GenTreePtr op1EffectiveVal;
3301
3302     if (op1->gtOper == GT_COMMA)
3303     {
3304         op1EffectiveVal = op1->gtEffectiveVal();
3305         if ((op1EffectiveVal->gtOper == GT_ADD) && (!op1EffectiveVal->gtOverflow()) &&
3306             (!constOnly || (op1EffectiveVal->gtOp.gtOp2->IsCnsIntOrI())))
3307         {
3308             op1 = op1EffectiveVal;
3309         }
3310     }
3311
3312     // Now we look for op1's with non-overflow GT_ADDs [of constants]
3313     while ((op1->gtOper == GT_ADD) && (!op1->gtOverflow()) && (!constOnly || (op1->gtOp.gtOp2->IsCnsIntOrI())))
3314     {
3315         // mark it with GTF_ADDRMODE_NO_CSE
3316         op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
3317
3318         if (!constOnly)
3319         { // TODO-Cleanup: It seems bizarre that this is !constOnly
3320             op2 = op1->gtOp.gtOp2;
3321         }
3322         op1 = op1->gtOp.gtOp1;
3323
3324         // If op1 is a GT_NOP then swap op1 and op2.
3325         // (Why? Also, presumably op2 is not a GT_NOP in this case?)
3326         if (op1->gtOper == GT_NOP)
3327         {
3328             GenTreePtr tmp;
3329
3330             tmp = op1;
3331             op1 = op2;
3332             op2 = tmp;
3333         }
3334
3335         if (op1->gtOper == GT_COMMA)
3336         {
3337             op1EffectiveVal = op1->gtEffectiveVal();
3338             if ((op1EffectiveVal->gtOper == GT_ADD) && (!op1EffectiveVal->gtOverflow()) &&
3339                 (!constOnly || (op1EffectiveVal->gtOp.gtOp2->IsCnsIntOrI())))
3340             {
3341                 op1 = op1EffectiveVal;
3342             }
3343         }
3344
3345         if (!constOnly && ((op2 == adr) || (!op2->IsCnsIntOrI())))
3346         {
3347             break;
3348         }
3349     }
3350
3351     *op1WB = op1;
3352     *op2WB = op2;
3353 }
3354
3355 #ifdef DEBUG
3356 /*****************************************************************************
3357  * This is a workaround. It is to help implement an assert in gtSetEvalOrder() that the values
3358  * gtWalkOp() leaves in op1 and op2 correspond with the values of adr, idx, mul, and cns
3359  * that are returned by genCreateAddrMode(). It's essentially impossible to determine
3360  * what gtWalkOp() *should* return for all possible trees. This simply loosens one assert
3361  * to handle the following case:
3362
3363          indir     int
3364                     const(h)  int    4 field
3365                  +         byref
3366                     lclVar    byref  V00 this               <-- op2
3367               comma     byref                           <-- adr (base)
3368                  indir     byte
3369                     lclVar    byref  V00 this
3370            +         byref
3371                  const     int    2                     <-- mul == 4
3372               <<        int                                 <-- op1
3373                  lclVar    int    V01 arg1              <-- idx
3374
3375  * Here, we are planning to generate the address mode [edx+4*eax], where eax = idx and edx = the GT_COMMA expression.
3376  * To check adr equivalence with op2, we need to walk down the GT_ADD tree just like gtWalkOp() does.
3377  */
3378 GenTreePtr Compiler::gtWalkOpEffectiveVal(GenTreePtr op)
3379 {
3380     for (;;)
3381     {
3382         if (op->gtOper == GT_COMMA)
3383         {
3384             GenTreePtr opEffectiveVal = op->gtEffectiveVal();
3385             if ((opEffectiveVal->gtOper == GT_ADD) && (!opEffectiveVal->gtOverflow()) &&
3386                 (opEffectiveVal->gtOp.gtOp2->IsCnsIntOrI()))
3387             {
3388                 op = opEffectiveVal;
3389             }
3390         }
3391
3392         if ((op->gtOper != GT_ADD) || op->gtOverflow() || !op->gtOp.gtOp2->IsCnsIntOrI())
3393         {
3394             break;
3395         }
3396
3397         op = op->gtOp.gtOp1;
3398     }
3399
3400     return op;
3401 }
3402 #endif // DEBUG
3403
3404 /*****************************************************************************
3405  *
3406  *  Given a tree, set the gtCostEx and gtCostSz fields which
3407  *  are used to measure the relative costs of the codegen of the tree
3408  *
3409  */
3410
3411 void Compiler::gtPrepareCost(GenTree* tree)
3412 {
3413 #if FEATURE_STACK_FP_X87
3414     codeGen->genResetFPstkLevel();
3415 #endif // FEATURE_STACK_FP_X87
3416     gtSetEvalOrder(tree);
3417 }
3418
3419 bool Compiler::gtIsLikelyRegVar(GenTree* tree)
3420 {
3421     if (tree->gtOper != GT_LCL_VAR)
3422     {
3423         return false;
3424     }
3425
3426     assert(tree->gtLclVar.gtLclNum < lvaTableCnt);
3427     LclVarDsc* varDsc = lvaTable + tree->gtLclVar.gtLclNum;
3428
3429     if (varDsc->lvDoNotEnregister)
3430     {
3431         return false;
3432     }
3433
3434     if (varDsc->lvRefCntWtd < (BB_UNITY_WEIGHT * 3))
3435     {
3436         return false;
3437     }
3438
3439 #ifdef _TARGET_X86_
3440     if (varTypeIsFloating(tree->TypeGet()))
3441         return false;
3442     if (varTypeIsLong(tree->TypeGet()))
3443         return false;
3444 #endif
3445
3446     return true;
3447 }
3448
3449 //------------------------------------------------------------------------
3450 // gtCanSwapOrder: Returns true iff the secondNode can be swapped with firstNode.
3451 //
3452 // Arguments:
3453 //    firstNode  - An operand of a tree that can have GTF_REVERSE_OPS set.
3454 //    secondNode - The other operand of the tree.
3455 //
3456 // Return Value:
3457 //    Returns a boolean indicating whether it is safe to reverse the execution
3458 //    order of the two trees, considering any exception, global effects, or
3459 //    ordering constraints.
3460 //
3461 bool Compiler::gtCanSwapOrder(GenTree* firstNode, GenTree* secondNode)
3462 {
3463     // Relative of order of global / side effects can't be swapped.
3464
3465     bool canSwap = true;
3466
3467     if (optValnumCSE_phase)
3468     {
3469         canSwap = optCSE_canSwap(firstNode, secondNode);
3470     }
3471
3472     // We cannot swap in the presence of special side effects such as GT_CATCH_ARG.
3473
3474     if (canSwap && (firstNode->gtFlags & GTF_ORDER_SIDEEFF))
3475     {
3476         canSwap = false;
3477     }
3478
3479     // When strict side effect order is disabled we allow GTF_REVERSE_OPS to be set
3480     // when one or both sides contains a GTF_CALL or GTF_EXCEPT.
3481     // Currently only the C and C++ languages allow non strict side effect order.
3482
3483     unsigned strictEffects = GTF_GLOB_EFFECT;
3484
3485     if (canSwap && (firstNode->gtFlags & strictEffects))
3486     {
3487         // op1 has side efects that can't be reordered.
3488         // Check for some special cases where we still may be able to swap.
3489
3490         if (secondNode->gtFlags & strictEffects)
3491         {
3492             // op2 has also has non reorderable side effects - can't swap.
3493             canSwap = false;
3494         }
3495         else
3496         {
3497             // No side effects in op2 - we can swap iff op1 has no way of modifying op2,
3498             // i.e. through byref assignments or calls or op2 is a constant.
3499
3500             if (firstNode->gtFlags & strictEffects & GTF_PERSISTENT_SIDE_EFFECTS)
3501             {
3502                 // We have to be conservative - can swap iff op2 is constant.
3503                 if (!secondNode->OperIsConst())
3504                 {
3505                     canSwap = false;
3506                 }
3507             }
3508         }
3509     }
3510     return canSwap;
3511 }
3512
3513 /*****************************************************************************
3514  *
3515  *  Given a tree, figure out the order in which its sub-operands should be
3516  *  evaluated. If the second operand of a binary operator is more expensive
3517  *  than the first operand, then try to swap the operand trees. Updates the
3518  *  GTF_REVERSE_OPS bit if necessary in this case.
3519  *
3520  *  Returns the Sethi 'complexity' estimate for this tree (the higher
3521  *  the number, the higher is the tree's resources requirement).
3522  *
3523  *  This function sets:
3524  *      1. gtCostEx to the execution complexity estimate
3525  *      2. gtCostSz to the code size estimate
3526  *      3. gtRsvdRegs to the set of fixed registers trashed by the tree
3527  *      4. gtFPlvl to the "floating point depth" value for node, i.e. the max. number
3528  *         of operands the tree will push on the x87 (coprocessor) stack. Also sets
3529  *         genFPstkLevel, tmpDoubleSpillMax, and possibly gtFPstLvlRedo.
3530  *      5. Sometimes sets GTF_ADDRMODE_NO_CSE on nodes in the tree.
3531  *      6. DEBUG-only: clears GTF_DEBUG_NODE_MORPHED.
3532  */
3533
3534 #ifdef _PREFAST_
3535 #pragma warning(push)
3536 #pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
3537 #endif
3538 unsigned Compiler::gtSetEvalOrder(GenTree* tree)
3539 {
3540     assert(tree);
3541     assert(tree->gtOper != GT_STMT);
3542
3543 #ifdef DEBUG
3544     /* Clear the GTF_DEBUG_NODE_MORPHED flag as well */
3545     tree->gtDebugFlags &= ~GTF_DEBUG_NODE_MORPHED;
3546 #endif
3547
3548     /* Is this a FP value? */
3549
3550     bool     isflt = varTypeIsFloating(tree->TypeGet());
3551     unsigned FPlvlSave;
3552
3553     /* Figure out what kind of a node we have */
3554
3555     genTreeOps oper = tree->OperGet();
3556     unsigned   kind = tree->OperKind();
3557
3558     /* Assume no fixed registers will be trashed */
3559
3560     regMaskTP ftreg = RBM_NONE; // Set of registers that will be used by the subtree
3561     unsigned  level;
3562     int       costEx;
3563     int       costSz;
3564
3565     bool bRngChk;
3566
3567 #ifdef DEBUG
3568     costEx = -1;
3569     costSz = -1;
3570 #endif
3571
3572     /* Is this a constant or a leaf node? */
3573
3574     if (kind & (GTK_LEAF | GTK_CONST))
3575     {
3576         switch (oper)
3577         {
3578             bool iconNeedsReloc;
3579
3580 #ifdef _TARGET_ARM_
3581             case GT_CNS_LNG:
3582                 costSz = 9;
3583                 costEx = 4;
3584                 goto COMMON_CNS;
3585
3586             case GT_CNS_STR:
3587                 // Uses movw/movt
3588                 costSz = 7;
3589                 costEx = 3;
3590                 goto COMMON_CNS;
3591
3592             case GT_CNS_INT:
3593
3594                 // If the constant is a handle then it will need to have a relocation
3595                 //  applied to it.
3596                 // Any constant that requires a reloc must use the movw/movt sequence
3597                 //
3598                 iconNeedsReloc = opts.compReloc && tree->IsIconHandle() && !tree->IsIconHandle(GTF_ICON_FIELD_HDL);
3599
3600                 if (iconNeedsReloc || !codeGen->validImmForInstr(INS_mov, tree->gtIntCon.gtIconVal))
3601                 {
3602                     // Uses movw/movt
3603                     costSz = 7;
3604                     costEx = 3;
3605                 }
3606                 else if (((unsigned)tree->gtIntCon.gtIconVal) <= 0x00ff)
3607                 {
3608                     // mov  Rd, <const8>
3609                     costSz = 1;
3610                     costEx = 1;
3611                 }
3612                 else
3613                 {
3614                     // Uses movw/mvn
3615                     costSz = 3;
3616                     costEx = 1;
3617                 }
3618                 goto COMMON_CNS;
3619
3620 #elif defined _TARGET_XARCH_
3621
3622             case GT_CNS_LNG:
3623                 costSz = 10;
3624                 costEx = 3;
3625                 goto COMMON_CNS;
3626
3627             case GT_CNS_STR:
3628                 costSz = 4;
3629                 costEx = 1;
3630                 goto COMMON_CNS;
3631
3632             case GT_CNS_INT:
3633
3634                 // If the constant is a handle then it will need to have a relocation
3635                 //  applied to it.
3636                 // Any constant that requires a reloc must use the movw/movt sequence
3637                 //
3638                 iconNeedsReloc = opts.compReloc && tree->IsIconHandle() && !tree->IsIconHandle(GTF_ICON_FIELD_HDL);
3639
3640                 if (!iconNeedsReloc && (((signed char)tree->gtIntCon.gtIconVal) == tree->gtIntCon.gtIconVal))
3641                 {
3642                     costSz = 1;
3643                     costEx = 1;
3644                 }
3645 #if defined(_TARGET_AMD64_)
3646                 else if (iconNeedsReloc || ((tree->gtIntCon.gtIconVal & 0xFFFFFFFF00000000LL) != 0))
3647                 {
3648                     costSz = 10;
3649                     costEx = 3;
3650                 }
3651 #endif // _TARGET_AMD64_
3652                 else
3653                 {
3654                     costSz = 4;
3655                     costEx = 1;
3656                 }
3657                 goto COMMON_CNS;
3658
3659 #elif defined(_TARGET_ARM64_)
3660             case GT_CNS_LNG:
3661             case GT_CNS_STR:
3662             case GT_CNS_INT:
3663                 // TODO-ARM64-NYI: Need cost estimates.
3664                 costSz = 1;
3665                 costEx = 1;
3666                 goto COMMON_CNS;
3667
3668 #else
3669             case GT_CNS_LNG:
3670             case GT_CNS_STR:
3671             case GT_CNS_INT:
3672 #error "Unknown _TARGET_"
3673 #endif
3674
3675             COMMON_CNS:
3676                 /*
3677                     Note that some code below depends on constants always getting
3678                     moved to be the second operand of a binary operator. This is
3679                     easily accomplished by giving constants a level of 0, which
3680                     we do on the next line. If you ever decide to change this, be
3681                     aware that unless you make other arrangements for integer
3682                     constants to be moved, stuff will break.
3683                  */
3684
3685                 level = 0;
3686                 break;
3687
3688             case GT_CNS_DBL:
3689                 level = 0;
3690                 /* We use fldz and fld1 to load 0.0 and 1.0, but all other  */
3691                 /* floating point constants are loaded using an indirection */
3692                 if ((*((__int64*)&(tree->gtDblCon.gtDconVal)) == 0) ||
3693                     (*((__int64*)&(tree->gtDblCon.gtDconVal)) == I64(0x3ff0000000000000)))
3694                 {
3695                     costEx = 1;
3696                     costSz = 1;
3697                 }
3698                 else
3699                 {
3700                     costEx = IND_COST_EX;
3701                     costSz = 4;
3702                 }
3703                 break;
3704
3705             case GT_LCL_VAR:
3706                 level = 1;
3707                 if (gtIsLikelyRegVar(tree))
3708                 {
3709                     costEx = 1;
3710                     costSz = 1;
3711                     /* Sign-extend and zero-extend are more expensive to load */
3712                     if (lvaTable[tree->gtLclVar.gtLclNum].lvNormalizeOnLoad())
3713                     {
3714                         costEx += 1;
3715                         costSz += 1;
3716                     }
3717                 }
3718                 else
3719                 {
3720                     costEx = IND_COST_EX;
3721                     costSz = 2;
3722                     /* Sign-extend and zero-extend are more expensive to load */
3723                     if (varTypeIsSmall(tree->TypeGet()))
3724                     {
3725                         costEx += 1;
3726                         costSz += 1;
3727                     }
3728                 }
3729 #if defined(_TARGET_AMD64_)
3730                 // increase costSz for floating point locals
3731                 if (isflt)
3732                 {
3733                     costSz += 1;
3734                     if (!gtIsLikelyRegVar(tree))
3735                     {
3736                         costSz += 1;
3737                     }
3738                 }
3739 #endif
3740 #if CPU_LONG_USES_REGPAIR
3741                 if (varTypeIsLong(tree->TypeGet()))
3742                 {
3743                     costEx *= 2; // Longs are twice as expensive
3744                     costSz *= 2;
3745                 }
3746 #endif
3747                 break;
3748
3749             case GT_CLS_VAR:
3750 #ifdef _TARGET_ARM_
3751                 // We generate movw/movt/ldr
3752                 level  = 1;
3753                 costEx = 3 + IND_COST_EX; // 6
3754                 costSz = 4 + 4 + 2;       // 10
3755                 break;
3756 #endif
3757             case GT_LCL_FLD:
3758                 level  = 1;
3759                 costEx = IND_COST_EX;
3760                 costSz = 4;
3761                 if (varTypeIsSmall(tree->TypeGet()))
3762                 {
3763                     costEx += 1;
3764                     costSz += 1;
3765                 }
3766                 break;
3767
3768             case GT_PHI_ARG:
3769             case GT_ARGPLACE:
3770                 level  = 0;
3771                 costEx = 0;
3772                 costSz = 0;
3773                 break;
3774
3775             default:
3776                 level  = 1;
3777                 costEx = 1;
3778                 costSz = 1;
3779                 break;
3780         }
3781 #if FEATURE_STACK_FP_X87
3782         if (isflt && (oper != GT_PHI_ARG))
3783         {
3784             codeGen->genIncrementFPstkLevel();
3785         }
3786 #endif // FEATURE_STACK_FP_X87
3787         goto DONE;
3788     }
3789
3790     /* Is it a 'simple' unary/binary operator? */
3791
3792     if (kind & GTK_SMPOP)
3793     {
3794         int      lvlb; // preference for op2
3795         unsigned lvl2; // scratch variable
3796
3797         GenTreePtr op1 = tree->gtOp.gtOp1;
3798         GenTreePtr op2 = tree->gtGetOp2();
3799
3800         costEx = 0;
3801         costSz = 0;
3802
3803         if (tree->OperIsAddrMode())
3804         {
3805             if (op1 == nullptr)
3806             {
3807                 op1 = op2;
3808                 op2 = nullptr;
3809             }
3810         }
3811
3812         /* Check for a nilary operator */
3813
3814         if (op1 == nullptr)
3815         {
3816             assert(op2 == nullptr);
3817
3818             level = 0;
3819
3820             goto DONE;
3821         }
3822
3823         /* Is this a unary operator? */
3824
3825         if (op2 == nullptr)
3826         {
3827             /* Process the operand of the operator */
3828
3829             /* Most Unary ops have costEx of 1 */
3830             costEx = 1;
3831             costSz = 1;
3832
3833             level = gtSetEvalOrder(op1);
3834             ftreg |= op1->gtRsvdRegs;
3835
3836             /* Special handling for some operators */
3837
3838             switch (oper)
3839             {
3840                 case GT_JTRUE:
3841                     costEx = 2;
3842                     costSz = 2;
3843                     break;
3844
3845                 case GT_SWITCH:
3846                     costEx = 10;
3847                     costSz = 5;
3848                     break;
3849
3850                 case GT_CAST:
3851 #if defined(_TARGET_ARM_)
3852                     costEx = 1;
3853                     costSz = 1;
3854                     if (isflt || varTypeIsFloating(op1->TypeGet()))
3855                     {
3856                         costEx = 3;
3857                         costSz = 4;
3858                     }
3859 #elif defined(_TARGET_ARM64_)
3860                     costEx = 1;
3861                     costSz = 2;
3862                     if (isflt || varTypeIsFloating(op1->TypeGet()))
3863                     {
3864                         costEx = 2;
3865                         costSz = 4;
3866                     }
3867 #elif defined(_TARGET_XARCH_)
3868                     costEx = 1;
3869                     costSz = 2;
3870
3871                     if (isflt || varTypeIsFloating(op1->TypeGet()))
3872                     {
3873                         /* cast involving floats always go through memory */
3874                         costEx = IND_COST_EX * 2;
3875                         costSz = 6;
3876
3877 #if FEATURE_STACK_FP_X87
3878                         if (isflt != varTypeIsFloating(op1->TypeGet()))
3879                         {
3880                             isflt ? codeGen->genIncrementFPstkLevel()  // Cast from int to float
3881                                   : codeGen->genDecrementFPstkLevel(); // Cast from float to int
3882                         }
3883 #endif // FEATURE_STACK_FP_X87
3884                     }
3885 #else
3886 #error "Unknown _TARGET_"
3887 #endif
3888
3889 #if CPU_LONG_USES_REGPAIR
3890                     if (varTypeIsLong(tree->TypeGet()))
3891                     {
3892                         if (varTypeIsUnsigned(tree->TypeGet()))
3893                         {
3894                             /* Cast to unsigned long */
3895                             costEx += 1;
3896                             costSz += 2;
3897                         }
3898                         else
3899                         {
3900                             /* Cast to signed long is slightly more costly */
3901                             costEx += 2;
3902                             costSz += 3;
3903                         }
3904                     }
3905 #endif // CPU_LONG_USES_REGPAIR
3906
3907                     /* Overflow casts are a lot more expensive */
3908                     if (tree->gtOverflow())
3909                     {
3910                         costEx += 6;
3911                         costSz += 6;
3912                     }
3913
3914                     break;
3915
3916                 case GT_LIST:
3917                 case GT_NOP:
3918                     costEx = 0;
3919                     costSz = 0;
3920                     break;
3921
3922                 case GT_INTRINSIC:
3923                     // GT_INTRINSIC intrinsics Sin, Cos, Sqrt, Abs ... have higher costs.
3924                     // TODO: tune these costs target specific as some of these are
3925                     // target intrinsics and would cost less to generate code.
3926                     switch (tree->gtIntrinsic.gtIntrinsicId)
3927                     {
3928                         default:
3929                             assert(!"missing case for gtIntrinsicId");
3930                             costEx = 12;
3931                             costSz = 12;
3932                             break;
3933
3934                         case CORINFO_INTRINSIC_Sin:
3935                         case CORINFO_INTRINSIC_Cos:
3936                         case CORINFO_INTRINSIC_Sqrt:
3937                         case CORINFO_INTRINSIC_Cosh:
3938                         case CORINFO_INTRINSIC_Sinh:
3939                         case CORINFO_INTRINSIC_Tan:
3940                         case CORINFO_INTRINSIC_Tanh:
3941                         case CORINFO_INTRINSIC_Asin:
3942                         case CORINFO_INTRINSIC_Acos:
3943                         case CORINFO_INTRINSIC_Atan:
3944                         case CORINFO_INTRINSIC_Atan2:
3945                         case CORINFO_INTRINSIC_Log10:
3946                         case CORINFO_INTRINSIC_Pow:
3947                         case CORINFO_INTRINSIC_Exp:
3948                         case CORINFO_INTRINSIC_Ceiling:
3949                         case CORINFO_INTRINSIC_Floor:
3950                         case CORINFO_INTRINSIC_Object_GetType:
3951                             // Giving intrinsics a large fixed exectuion cost is because we'd like to CSE
3952                             // them, even if they are implemented by calls. This is different from modeling
3953                             // user calls since we never CSE user calls.
3954                             costEx = 36;
3955                             costSz = 4;
3956                             break;
3957
3958                         case CORINFO_INTRINSIC_Abs:
3959                             costEx = 5;
3960                             costSz = 15;
3961                             break;
3962
3963                         case CORINFO_INTRINSIC_Round:
3964                             costEx = 3;
3965                             costSz = 4;
3966 #if FEATURE_STACK_FP_X87
3967                             if (tree->TypeGet() == TYP_INT)
3968                             {
3969                                 // This is a special case to handle the following
3970                                 // optimization: conv.i4(round.d(d)) -> round.i(d)
3971                                 codeGen->genDecrementFPstkLevel();
3972                             }
3973 #endif // FEATURE_STACK_FP_X87
3974                             break;
3975                     }
3976                     level++;
3977                     break;
3978
3979                 case GT_NOT:
3980                 case GT_NEG:
3981                     // We need to ensure that -x is evaluated before x or else
3982                     // we get burned while adjusting genFPstkLevel in x*-x where
3983                     // the rhs x is the last use of the enregsitered x.
3984                     //
3985                     // Even in the integer case we want to prefer to
3986                     // evaluate the side without the GT_NEG node, all other things
3987                     // being equal.  Also a GT_NOT requires a scratch register
3988
3989                     level++;
3990                     break;
3991
3992                 case GT_ADDR:
3993
3994 #if FEATURE_STACK_FP_X87
3995                     /* If the operand was floating point, pop the value from the stack */
3996
3997                     if (varTypeIsFloating(op1->TypeGet()))
3998                     {
3999                         codeGen->genDecrementFPstkLevel();
4000                     }
4001 #endif // FEATURE_STACK_FP_X87
4002                     costEx = 0;
4003                     costSz = 1;
4004
4005                     // If we have a GT_ADDR of an GT_IND we can just copy the costs from indOp1
4006                     if (op1->OperGet() == GT_IND)
4007                     {
4008                         GenTreePtr indOp1 = op1->gtOp.gtOp1;
4009                         costEx            = indOp1->gtCostEx;
4010                         costSz            = indOp1->gtCostSz;
4011                     }
4012                     break;
4013
4014                 case GT_ARR_LENGTH:
4015                     level++;
4016
4017                     /* Array Len should be the same as an indirections, which have a costEx of IND_COST_EX */
4018                     costEx = IND_COST_EX - 1;
4019                     costSz = 2;
4020                     break;
4021
4022                 case GT_MKREFANY:
4023                 case GT_OBJ:
4024                     // We estimate the cost of a GT_OBJ or GT_MKREFANY to be two loads (GT_INDs)
4025                     costEx = 2 * IND_COST_EX;
4026                     costSz = 2 * 2;
4027                     break;
4028
4029                 case GT_BOX:
4030                     // We estimate the cost of a GT_BOX to be two stores (GT_INDs)
4031                     costEx = 2 * IND_COST_EX;
4032                     costSz = 2 * 2;
4033                     break;
4034
4035                 case GT_IND:
4036
4037                     /* An indirection should always have a non-zero level.
4038                      * Only constant leaf nodes have level 0.
4039                      */
4040
4041                     if (level == 0)
4042                     {
4043                         level = 1;
4044                     }
4045
4046                     /* Indirections have a costEx of IND_COST_EX */
4047                     costEx = IND_COST_EX;
4048                     costSz = 2;
4049
4050                     /* If we have to sign-extend or zero-extend, bump the cost */
4051                     if (varTypeIsSmall(tree->TypeGet()))
4052                     {
4053                         costEx += 1;
4054                         costSz += 1;
4055                     }
4056
4057                     if (isflt)
4058                     {
4059 #if FEATURE_STACK_FP_X87
4060                         /* Indirect loads of FP values push a new value on the FP stack */
4061                         codeGen->genIncrementFPstkLevel();
4062 #endif // FEATURE_STACK_FP_X87
4063                         if (tree->TypeGet() == TYP_DOUBLE)
4064                         {
4065                             costEx += 1;
4066                         }
4067 #ifdef _TARGET_ARM_
4068                         costSz += 2;
4069 #endif // _TARGET_ARM_
4070                     }
4071
4072                     /* Can we form an addressing mode with this indirection? */
4073
4074                     if (op1->gtOper == GT_ADD)
4075                     {
4076                         bool rev;
4077 #if SCALED_ADDR_MODES
4078                         unsigned mul;
4079 #endif
4080                         unsigned   cns;
4081                         GenTreePtr base;
4082                         GenTreePtr idx;
4083
4084                         /* See if we can form a complex addressing mode? */
4085
4086                         GenTreePtr addr = op1;
4087                         if (codeGen->genCreateAddrMode(addr,     // address
4088                                                        0,        // mode
4089                                                        false,    // fold
4090                                                        RBM_NONE, // reg mask
4091                                                        &rev,     // reverse ops
4092                                                        &base,    // base addr
4093                                                        &idx,     // index val
4094 #if SCALED_ADDR_MODES
4095                                                        &mul, // scaling
4096 #endif
4097                                                        &cns,  // displacement
4098                                                        true)) // don't generate code
4099                         {
4100                             // We can form a complex addressing mode, so mark each of the interior
4101                             // nodes with GTF_ADDRMODE_NO_CSE and calculate a more accurate cost.
4102
4103                             addr->gtFlags |= GTF_ADDRMODE_NO_CSE;
4104 #ifdef _TARGET_XARCH_
4105                             // addrmodeCount is the count of items that we used to form
4106                             // an addressing mode.  The maximum value is 4 when we have
4107                             // all of these:   { base, idx, cns, mul }
4108                             //
4109                             unsigned addrmodeCount = 0;
4110                             if (base)
4111                             {
4112                                 costEx += base->gtCostEx;
4113                                 costSz += base->gtCostSz;
4114                                 addrmodeCount++;
4115                             }
4116
4117                             if (idx)
4118                             {
4119                                 costEx += idx->gtCostEx;
4120                                 costSz += idx->gtCostSz;
4121                                 addrmodeCount++;
4122                             }
4123
4124                             if (cns)
4125                             {
4126                                 if (((signed char)cns) == ((int)cns))
4127                                 {
4128                                     costSz += 1;
4129                                 }
4130                                 else
4131                                 {
4132                                     costSz += 4;
4133                                 }
4134                                 addrmodeCount++;
4135                             }
4136                             if (mul)
4137                             {
4138                                 addrmodeCount++;
4139                             }
4140                             // When we form a complex addressing mode we can reduced the costs
4141                             // associated with the interior GT_ADD and GT_LSH nodes:
4142                             //
4143                             //                      GT_ADD      -- reduce this interior GT_ADD by (-3,-3)
4144                             //                      /   \       --
4145                             //                  GT_ADD  'cns'   -- reduce this interior GT_ADD by (-2,-2)
4146                             //                  /   \           --
4147                             //               'base'  GT_LSL     -- reduce this interior GT_LSL by (-1,-1)
4148                             //                      /   \       --
4149                             //                   'idx'  'mul'
4150                             //
4151                             if (addrmodeCount > 1)
4152                             {
4153                                 // The number of interior GT_ADD and GT_LSL will always be one less than addrmodeCount
4154                                 //
4155                                 addrmodeCount--;
4156
4157                                 GenTreePtr tmp = addr;
4158                                 while (addrmodeCount > 0)
4159                                 {
4160                                     // decrement the gtCosts for the interior GT_ADD or GT_LSH node by the remaining
4161                                     // addrmodeCount
4162                                     tmp->SetCosts(tmp->gtCostEx - addrmodeCount, tmp->gtCostSz - addrmodeCount);
4163
4164                                     addrmodeCount--;
4165                                     if (addrmodeCount > 0)
4166                                     {
4167                                         GenTreePtr tmpOp1 = tmp->gtOp.gtOp1;
4168                                         GenTreePtr tmpOp2 = tmp->gtGetOp2();
4169                                         assert(tmpOp2 != nullptr);
4170
4171                                         if ((tmpOp1 != base) && (tmpOp1->OperGet() == GT_ADD))
4172                                         {
4173                                             tmp = tmpOp1;
4174                                         }
4175                                         else if (tmpOp2->OperGet() == GT_LSH)
4176                                         {
4177                                             tmp = tmpOp2;
4178                                         }
4179                                         else if (tmpOp1->OperGet() == GT_LSH)
4180                                         {
4181                                             tmp = tmpOp1;
4182                                         }
4183                                         else if (tmpOp2->OperGet() == GT_ADD)
4184                                         {
4185                                             tmp = tmpOp2;
4186                                         }
4187                                         else
4188                                         {
4189                                             // We can very rarely encounter a tree that has a GT_COMMA node
4190                                             // that is difficult to walk, so we just early out without decrementing.
4191                                             addrmodeCount = 0;
4192                                         }
4193                                     }
4194                                 }
4195                             }
4196 #elif defined _TARGET_ARM_
4197                             if (base)
4198                             {
4199                                 costEx += base->gtCostEx;
4200                                 costSz += base->gtCostSz;
4201                                 if ((base->gtOper == GT_LCL_VAR) && ((idx == NULL) || (cns == 0)))
4202                                 {
4203                                     costSz -= 1;
4204                                 }
4205                             }
4206
4207                             if (idx)
4208                             {
4209                                 costEx += idx->gtCostEx;
4210                                 costSz += idx->gtCostSz;
4211                                 if (mul > 0)
4212                                 {
4213                                     costSz += 2;
4214                                 }
4215                             }
4216
4217                             if (cns)
4218                             {
4219                                 if (cns >= 128) // small offsets fits into a 16-bit instruction
4220                                 {
4221                                     if (cns < 4096) // medium offsets require a 32-bit instruction
4222                                     {
4223                                         if (!isflt)
4224                                             costSz += 2;
4225                                     }
4226                                     else
4227                                     {
4228                                         costEx += 2; // Very large offsets require movw/movt instructions
4229                                         costSz += 8;
4230                                     }
4231                                 }
4232                             }
4233 #elif defined _TARGET_ARM64_
4234                             if (base)
4235                             {
4236                                 costEx += base->gtCostEx;
4237                                 costSz += base->gtCostSz;
4238                             }
4239
4240                             if (idx)
4241                             {
4242                                 costEx += idx->gtCostEx;
4243                                 costSz += idx->gtCostSz;
4244                             }
4245
4246                             if (cns != 0)
4247                             {
4248                                 if (cns >= (4096 * genTypeSize(tree->TypeGet())))
4249                                 {
4250                                     costEx += 1;
4251                                     costSz += 4;
4252                                 }
4253                             }
4254 #else
4255 #error "Unknown _TARGET_"
4256 #endif
4257
4258                             assert(addr->gtOper == GT_ADD);
4259                             assert(!addr->gtOverflow());
4260                             assert(op2 == nullptr);
4261                             assert(mul != 1);
4262
4263                             // If we have an addressing mode, we have one of:
4264                             //   [base             + cns]
4265                             //   [       idx * mul      ]  // mul >= 2, else we would use base instead of idx
4266                             //   [       idx * mul + cns]  // mul >= 2, else we would use base instead of idx
4267                             //   [base + idx * mul      ]  // mul can be 0, 2, 4, or 8
4268                             //   [base + idx * mul + cns]  // mul can be 0, 2, 4, or 8
4269                             // Note that mul == 0 is semantically equivalent to mul == 1.
4270                             // Note that cns can be zero.
4271                             CLANG_FORMAT_COMMENT_ANCHOR;
4272
4273 #if SCALED_ADDR_MODES
4274                             assert((base != nullptr) || (idx != nullptr && mul >= 2));
4275 #else
4276                             assert(base != NULL);
4277 #endif
4278
4279                             INDEBUG(GenTreePtr op1Save = addr);
4280
4281                             /* Walk addr looking for non-overflow GT_ADDs */
4282                             gtWalkOp(&addr, &op2, base, false);
4283
4284                             // addr and op2 are now children of the root GT_ADD of the addressing mode
4285                             assert(addr != op1Save);
4286                             assert(op2 != nullptr);
4287
4288                             /* Walk addr looking for non-overflow GT_ADDs of constants */
4289                             gtWalkOp(&addr, &op2, nullptr, true);
4290
4291                             // TODO-Cleanup: It seems very strange that we might walk down op2 now, even though the
4292                             // prior
4293                             //           call to gtWalkOp() may have altered op2.
4294
4295                             /* Walk op2 looking for non-overflow GT_ADDs of constants */
4296                             gtWalkOp(&op2, &addr, nullptr, true);
4297
4298                             // OK we are done walking the tree
4299                             // Now assert that addr and op2 correspond with base and idx
4300                             // in one of the several acceptable ways.
4301
4302                             // Note that sometimes addr/op2 is equal to idx/base
4303                             // and other times addr/op2 is a GT_COMMA node with
4304                             // an effective value that is idx/base
4305
4306                             if (mul > 1)
4307                             {
4308                                 if ((addr != base) && (addr->gtOper == GT_LSH))
4309                                 {
4310                                     addr->gtFlags |= GTF_ADDRMODE_NO_CSE;
4311                                     if (addr->gtOp.gtOp1->gtOper == GT_MUL)
4312                                     {
4313                                         addr->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
4314                                     }
4315                                     assert((base == nullptr) || (op2 == base) ||
4316                                            (op2->gtEffectiveVal() == base->gtEffectiveVal()) ||
4317                                            (gtWalkOpEffectiveVal(op2) == gtWalkOpEffectiveVal(base)));
4318                                 }
4319                                 else
4320                                 {
4321                                     assert(op2);
4322                                     assert(op2->gtOper == GT_LSH || op2->gtOper == GT_MUL);
4323                                     op2->gtFlags |= GTF_ADDRMODE_NO_CSE;
4324                                     // We may have eliminated multiple shifts and multiplies in the addressing mode,
4325                                     // so navigate down through them to get to "idx".
4326                                     GenTreePtr op2op1 = op2->gtOp.gtOp1;
4327                                     while ((op2op1->gtOper == GT_LSH || op2op1->gtOper == GT_MUL) && op2op1 != idx)
4328                                     {
4329                                         op2op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
4330                                         op2op1 = op2op1->gtOp.gtOp1;
4331                                     }
4332                                     assert(addr->gtEffectiveVal() == base);
4333                                     assert(op2op1 == idx);
4334                                 }
4335                             }
4336                             else
4337                             {
4338                                 assert(mul == 0);
4339
4340                                 if ((addr == idx) || (addr->gtEffectiveVal() == idx))
4341                                 {
4342                                     if (idx != nullptr)
4343                                     {
4344                                         if ((addr->gtOper == GT_MUL) || (addr->gtOper == GT_LSH))
4345                                         {
4346                                             if ((addr->gtOp.gtOp1->gtOper == GT_NOP) ||
4347                                                 (addr->gtOp.gtOp1->gtOper == GT_MUL &&
4348                                                  addr->gtOp.gtOp1->gtOp.gtOp1->gtOper == GT_NOP))
4349                                             {
4350                                                 addr->gtFlags |= GTF_ADDRMODE_NO_CSE;
4351                                                 if (addr->gtOp.gtOp1->gtOper == GT_MUL)
4352                                                 {
4353                                                     addr->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
4354                                                 }
4355                                             }
4356                                         }
4357                                     }
4358                                     assert((op2 == base) || (op2->gtEffectiveVal() == base));
4359                                 }
4360                                 else if ((addr == base) || (addr->gtEffectiveVal() == base))
4361                                 {
4362                                     if (idx != nullptr)
4363                                     {
4364                                         assert(op2);
4365                                         if ((op2->gtOper == GT_MUL) || (op2->gtOper == GT_LSH))
4366                                         {
4367                                             if ((op2->gtOp.gtOp1->gtOper == GT_NOP) ||
4368                                                 (op2->gtOp.gtOp1->gtOper == GT_MUL &&
4369                                                  op2->gtOp.gtOp1->gtOp.gtOp1->gtOper == GT_NOP))
4370                                             {
4371                                                 // assert(bRngChk);
4372                                                 op2->gtFlags |= GTF_ADDRMODE_NO_CSE;
4373                                                 if (op2->gtOp.gtOp1->gtOper == GT_MUL)
4374                                                 {
4375                                                     op2->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
4376                                                 }
4377                                             }
4378                                         }
4379                                         assert((op2 == idx) || (op2->gtEffectiveVal() == idx));
4380                                     }
4381                                 }
4382                                 else
4383                                 {
4384                                     // addr isn't base or idx. Is this possible? Or should there be an assert?
4385                                 }
4386                             }
4387                             goto DONE;
4388
4389                         } // end  if  (genCreateAddrMode(...))
4390
4391                     } // end if  (op1->gtOper == GT_ADD)
4392                     else if (gtIsLikelyRegVar(op1))
4393                     {
4394                         /* Indirection of an enregister LCL_VAR, don't increase costEx/costSz */
4395                         goto DONE;
4396                     }
4397 #ifdef _TARGET_XARCH_
4398                     else if (op1->IsCnsIntOrI())
4399                     {
4400                         // Indirection of a CNS_INT, subtract 1 from costEx
4401                         // makes costEx 3 for x86 and 4 for amd64
4402                         //
4403                         costEx += (op1->gtCostEx - 1);
4404                         costSz += op1->gtCostSz;
4405                         goto DONE;
4406                     }
4407 #endif
4408                     break;
4409
4410                 default:
4411                     break;
4412             }
4413             costEx += op1->gtCostEx;
4414             costSz += op1->gtCostSz;
4415             goto DONE;
4416         }
4417
4418         /* Binary operator - check for certain special cases */
4419
4420         lvlb = 0;
4421
4422         /* Default Binary ops have a cost of 1,1 */
4423         costEx = 1;
4424         costSz = 1;
4425
4426 #ifdef _TARGET_ARM_
4427         if (isflt)
4428         {
4429             costSz += 2;
4430         }
4431 #endif
4432 #ifndef _TARGET_64BIT_
4433         if (varTypeIsLong(op1->TypeGet()))
4434         {
4435             /* Operations on longs are more expensive */
4436             costEx += 3;
4437             costSz += 3;
4438         }
4439 #endif
4440         switch (oper)
4441         {
4442             case GT_MOD:
4443             case GT_UMOD:
4444
4445                 /* Modulo by a power of 2 is easy */
4446
4447                 if (op2->IsCnsIntOrI())
4448                 {
4449                     size_t ival = op2->gtIntConCommon.IconValue();
4450
4451                     if (ival > 0 && ival == genFindLowestBit(ival))
4452                     {
4453                         break;
4454                     }
4455                 }
4456
4457                 __fallthrough;
4458
4459             case GT_DIV:
4460             case GT_UDIV:
4461
4462                 if (isflt)
4463                 {
4464                     /* fp division is very expensive to execute */
4465                     costEx = 36; // TYP_DOUBLE
4466                     costSz += 3;
4467                 }
4468                 else
4469                 {
4470                     /* integer division is also very expensive */
4471                     costEx = 20;
4472                     costSz += 2;
4473
4474                     // Encourage the first operand to be evaluated (into EAX/EDX) first */
4475                     lvlb -= 3;
4476
4477 #ifdef _TARGET_XARCH_
4478                     // the idiv and div instruction requires EAX/EDX
4479                     ftreg |= RBM_EAX | RBM_EDX;
4480 #endif
4481                 }
4482                 break;
4483
4484             case GT_MUL:
4485
4486                 if (isflt)
4487                 {
4488                     /* FP multiplication instructions are more expensive */
4489                     costEx += 4;
4490                     costSz += 3;
4491                 }
4492                 else
4493                 {
4494                     /* Integer multiplication instructions are more expensive */
4495                     costEx += 3;
4496                     costSz += 2;
4497
4498                     if (tree->gtOverflow())
4499                     {
4500                         /* Overflow check are more expensive */
4501                         costEx += 3;
4502                         costSz += 3;
4503                     }
4504
4505 #ifdef _TARGET_X86_
4506                     if ((tree->gtType == TYP_LONG) || tree->gtOverflow())
4507                     {
4508                         /* We use imulEAX for TYP_LONG and overflow multiplications */
4509                         // Encourage the first operand to be evaluated (into EAX/EDX) first */
4510                         lvlb -= 4;
4511
4512                         // the imulEAX instruction ob x86 requires EDX:EAX
4513                         ftreg |= (RBM_EAX | RBM_EDX);
4514
4515                         /* The 64-bit imul instruction costs more */
4516                         costEx += 4;
4517                     }
4518 #endif //  _TARGET_X86_
4519                 }
4520                 break;
4521
4522             case GT_ADD:
4523             case GT_SUB:
4524             case GT_ASG_ADD:
4525             case GT_ASG_SUB:
4526
4527                 if (isflt)
4528                 {
4529                     /* FP instructions are a bit more expensive */
4530                     costEx += 4;
4531                     costSz += 3;
4532                     break;
4533                 }
4534
4535                 /* Overflow check are more expensive */
4536                 if (tree->gtOverflow())
4537                 {
4538                     costEx += 3;
4539                     costSz += 3;
4540                 }
4541                 break;
4542
4543             case GT_COMMA:
4544
4545                 /* Comma tosses the result of the left operand */
4546                 gtSetEvalOrderAndRestoreFPstkLevel(op1);
4547                 level = gtSetEvalOrder(op2);
4548
4549                 ftreg |= op1->gtRsvdRegs | op2->gtRsvdRegs;
4550
4551                 /* GT_COMMA cost is the sum of op1 and op2 costs */
4552                 costEx = (op1->gtCostEx + op2->gtCostEx);
4553                 costSz = (op1->gtCostSz + op2->gtCostSz);
4554
4555                 goto DONE;
4556
4557             case GT_COLON:
4558
4559                 level = gtSetEvalOrderAndRestoreFPstkLevel(op1);
4560                 lvl2  = gtSetEvalOrder(op2);
4561
4562                 if (level < lvl2)
4563                 {
4564                     level = lvl2;
4565                 }
4566                 else if (level == lvl2)
4567                 {
4568                     level += 1;
4569                 }
4570
4571                 ftreg |= op1->gtRsvdRegs | op2->gtRsvdRegs;
4572                 costEx = op1->gtCostEx + op2->gtCostEx;
4573                 costSz = op1->gtCostSz + op2->gtCostSz;
4574
4575                 goto DONE;
4576
4577             case GT_LIST:
4578
4579                 {
4580                     const bool isListCallArgs = false;
4581                     const bool callArgsInRegs = false;
4582                     return gtSetListOrder(tree, isListCallArgs, callArgsInRegs);
4583                 }
4584
4585             default:
4586                 break;
4587         }
4588
4589         /* Assignments need a bit of special handling */
4590
4591         if (kind & GTK_ASGOP)
4592         {
4593             /* Process the target */
4594
4595             level = gtSetEvalOrder(op1);
4596
4597 #if FEATURE_STACK_FP_X87
4598
4599             /* If assigning an FP value, the target won't get pushed */
4600
4601             if (isflt && !tree->IsPhiDefn())
4602             {
4603                 op1->gtFPlvl--;
4604                 codeGen->genDecrementFPstkLevel();
4605             }
4606
4607 #endif // FEATURE_STACK_FP_X87
4608
4609             if (gtIsLikelyRegVar(op1))
4610             {
4611                 assert(lvlb == 0);
4612                 lvl2 = gtSetEvalOrder(op2);
4613                 if (oper != GT_ASG)
4614                 {
4615                     ftreg |= op2->gtRsvdRegs;
4616                 }
4617
4618                 /* Assignment to an enregistered LCL_VAR */
4619                 costEx = op2->gtCostEx;
4620                 costSz = max(3, op2->gtCostSz); // 3 is an estimate for a reg-reg assignment
4621                 goto DONE_OP1_AFTER_COST;
4622             }
4623             else if (oper != GT_ASG)
4624             {
4625                 // Assign-Op instructions read and write op1
4626                 //
4627                 costEx += op1->gtCostEx;
4628 #ifdef _TARGET_ARM_
4629                 costSz += op1->gtCostSz;
4630 #endif
4631             }
4632
4633             goto DONE_OP1;
4634         }
4635
4636         /* Process the sub-operands */
4637
4638         level = gtSetEvalOrder(op1);
4639         if (lvlb < 0)
4640         {
4641             level -= lvlb; // lvlb is negative, so this increases level
4642             lvlb = 0;
4643         }
4644
4645     DONE_OP1:
4646         assert(lvlb >= 0);
4647         lvl2 = gtSetEvalOrder(op2) + lvlb;
4648         ftreg |= op1->gtRsvdRegs;
4649         if (oper != GT_ASG)
4650         {
4651             ftreg |= op2->gtRsvdRegs;
4652         }
4653
4654         costEx += (op1->gtCostEx + op2->gtCostEx);
4655         costSz += (op1->gtCostSz + op2->gtCostSz);
4656
4657     DONE_OP1_AFTER_COST:
4658 #if FEATURE_STACK_FP_X87
4659         /*
4660             Binary FP operators pop 2 operands and produce 1 result;
4661             FP comparisons pop 2 operands and produces 0 results.
4662             assignments consume 1 value and don't produce anything.
4663          */
4664
4665         if (isflt && !tree->IsPhiDefn())
4666         {
4667             assert(oper != GT_COMMA);
4668             codeGen->genDecrementFPstkLevel();
4669         }
4670 #endif // FEATURE_STACK_FP_X87
4671
4672         bool bReverseInAssignment = false;
4673         if (kind & GTK_ASGOP)
4674         {
4675             GenTreePtr op1Val = op1;
4676
4677             if (tree->gtOper == GT_ASG)
4678             {
4679                 // Skip over the GT_IND/GT_ADDR tree (if one exists)
4680                 //
4681                 if ((op1->gtOper == GT_IND) && (op1->gtOp.gtOp1->gtOper == GT_ADDR))
4682                 {
4683                     op1Val = op1->gtOp.gtOp1->gtOp.gtOp1;
4684                 }
4685             }
4686
4687             switch (op1Val->gtOper)
4688             {
4689                 case GT_IND:
4690
4691                     // If we have any side effects on the GT_IND child node
4692                     // we have to evaluate op1 first
4693                     if (op1Val->gtOp.gtOp1->gtFlags & GTF_ALL_EFFECT)
4694                     {
4695                         break;
4696                     }
4697
4698                     // In case op2 assigns to a local var that is used in op1Val, we have to evaluate op1Val first.
4699                     if (op2->gtFlags & GTF_ASG)
4700                     {
4701                         break;
4702                     }
4703
4704                     // If op2 is simple then evaluate op1 first
4705
4706                     if (op2->OperKind() & GTK_LEAF)
4707                     {
4708                         break;
4709                     }
4710
4711                 // fall through and set GTF_REVERSE_OPS
4712
4713                 case GT_LCL_VAR:
4714                 case GT_LCL_FLD:
4715
4716                     // We evaluate op2 before op1
4717                     bReverseInAssignment = true;
4718                     tree->gtFlags |= GTF_REVERSE_OPS;
4719                     break;
4720
4721                 default:
4722                     break;
4723             }
4724         }
4725         else if (kind & GTK_RELOP)
4726         {
4727             /* Float compares remove both operands from the FP stack */
4728             /* Also FP comparison uses EAX for flags */
4729
4730             if (varTypeIsFloating(op1->TypeGet()))
4731             {
4732 #if FEATURE_STACK_FP_X87
4733                 codeGen->genDecrementFPstkLevel(2);
4734 #endif // FEATURE_STACK_FP_X87
4735 #ifdef _TARGET_XARCH_
4736                 ftreg |= RBM_EAX;
4737 #endif
4738                 level++;
4739                 lvl2++;
4740             }
4741 #if CPU_LONG_USES_REGPAIR
4742             if (varTypeIsLong(op1->TypeGet()))
4743             {
4744                 costEx *= 2; // Longs are twice as expensive
4745                 costSz *= 2;
4746             }
4747 #endif
4748             if ((tree->gtFlags & GTF_RELOP_JMP_USED) == 0)
4749             {
4750                 /* Using a setcc instruction is more expensive */
4751                 costEx += 3;
4752             }
4753         }
4754
4755         /* Check for other interesting cases */
4756
4757         switch (oper)
4758         {
4759             case GT_LSH:
4760             case GT_RSH:
4761             case GT_RSZ:
4762             case GT_ROL:
4763             case GT_ROR:
4764             case GT_ASG_LSH:
4765             case GT_ASG_RSH:
4766             case GT_ASG_RSZ:
4767
4768                 /* Variable sized shifts are more expensive and use REG_SHIFT */
4769
4770                 if (!op2->IsCnsIntOrI())
4771                 {
4772                     costEx += 3;
4773                     if (REG_SHIFT != REG_NA)
4774                     {
4775                         ftreg |= RBM_SHIFT;
4776                     }
4777
4778 #ifndef _TARGET_64BIT_
4779                     // Variable sized LONG shifts require the use of a helper call
4780                     //
4781                     if (tree->gtType == TYP_LONG)
4782                     {
4783                         level += 5;
4784                         lvl2 += 5;
4785                         costEx += 3 * IND_COST_EX;
4786                         costSz += 4;
4787                         ftreg |= RBM_CALLEE_TRASH;
4788                     }
4789 #endif // !_TARGET_64BIT_
4790                 }
4791                 break;
4792
4793             case GT_INTRINSIC:
4794
4795                 switch (tree->gtIntrinsic.gtIntrinsicId)
4796                 {
4797                     case CORINFO_INTRINSIC_Atan2:
4798                     case CORINFO_INTRINSIC_Pow:
4799                         // These math intrinsics are actually implemented by user calls.
4800                         // Increase the Sethi 'complexity' by two to reflect the argument
4801                         // register requirement.
4802                         level += 2;
4803                         break;
4804                     default:
4805                         assert(!"Unknown binary GT_INTRINSIC operator");
4806                         break;
4807                 }
4808
4809                 break;
4810
4811             default:
4812                 break;
4813         }
4814
4815         /* We need to evalutate constants later as many places in codegen
4816            can't handle op1 being a constant. This is normally naturally
4817            enforced as constants have the least level of 0. However,
4818            sometimes we end up with a tree like "cns1 < nop(cns2)". In
4819            such cases, both sides have a level of 0. So encourage constants
4820            to be evaluated last in such cases */
4821
4822         if ((level == 0) && (level == lvl2) && (op1->OperKind() & GTK_CONST) &&
4823             (tree->OperIsCommutative() || tree->OperIsCompare()))
4824         {
4825             lvl2++;
4826         }
4827
4828         /* We try to swap operands if the second one is more expensive */
4829         bool       tryToSwap;
4830         GenTreePtr opA, opB;
4831
4832         if (tree->gtFlags & GTF_REVERSE_OPS)
4833         {
4834             opA = op2;
4835             opB = op1;
4836         }
4837         else
4838         {
4839             opA = op1;
4840             opB = op2;
4841         }
4842
4843         if (fgOrder == FGOrderLinear)
4844         {
4845             // Don't swap anything if we're in linear order; we're really just interested in the costs.
4846             tryToSwap = false;
4847         }
4848         else if (bReverseInAssignment)
4849         {
4850             // Assignments are special, we want the reverseops flags
4851             // so if possible it was set above.
4852             tryToSwap = false;
4853         }
4854         else
4855         {
4856             if (tree->gtFlags & GTF_REVERSE_OPS)
4857             {
4858                 tryToSwap = (level > lvl2);
4859             }
4860             else
4861             {
4862                 tryToSwap = (level < lvl2);
4863             }
4864
4865             // Try to force extra swapping when in the stress mode:
4866             if (compStressCompile(STRESS_REVERSE_FLAG, 60) && ((tree->gtFlags & GTF_REVERSE_OPS) == 0) &&
4867                 ((op2->OperKind() & GTK_CONST) == 0))
4868             {
4869                 tryToSwap = true;
4870             }
4871         }
4872
4873         if (tryToSwap)
4874         {
4875             bool canSwap = gtCanSwapOrder(opA, opB);
4876
4877             if (canSwap)
4878             {
4879                 /* Can we swap the order by commuting the operands? */
4880
4881                 switch (oper)
4882                 {
4883                     case GT_EQ:
4884                     case GT_NE:
4885                     case GT_LT:
4886                     case GT_LE:
4887                     case GT_GE:
4888                     case GT_GT:
4889                         if (GenTree::SwapRelop(oper) != oper)
4890                         {
4891                             // SetOper will obliterate the VN for the underlying expression.
4892                             // If we're in VN CSE phase, we don't want to lose that information,
4893                             // so save the value numbers and put them back after the SetOper.
4894                             ValueNumPair vnp = tree->gtVNPair;
4895                             tree->SetOper(GenTree::SwapRelop(oper));
4896                             if (optValnumCSE_phase)
4897                             {
4898                                 tree->gtVNPair = vnp;
4899                             }
4900                         }
4901
4902                         __fallthrough;
4903
4904                     case GT_ADD:
4905                     case GT_MUL:
4906
4907                     case GT_OR:
4908                     case GT_XOR:
4909                     case GT_AND:
4910
4911                         /* Swap the operands */
4912
4913                         tree->gtOp.gtOp1 = op2;
4914                         tree->gtOp.gtOp2 = op1;
4915
4916 #if FEATURE_STACK_FP_X87
4917                         /* We may have to recompute FP levels */
4918                         if (op1->gtFPlvl || op2->gtFPlvl)
4919                             gtFPstLvlRedo = true;
4920 #endif // FEATURE_STACK_FP_X87
4921                         break;
4922
4923                     case GT_QMARK:
4924                     case GT_COLON:
4925                     case GT_MKREFANY:
4926                         break;
4927
4928                     case GT_LIST:
4929                         break;
4930
4931                     case GT_SUB:
4932 #ifdef LEGACY_BACKEND
4933                         // For LSRA we require that LclVars be "evaluated" just prior to their use,
4934                         // so that if they must be reloaded, it is done at the right place.
4935                         // This means that we allow reverse evaluation for all BINOPs.
4936                         // (Note that this doesn't affect the order of the operands in the instruction).
4937                         if (!isflt)
4938                             break;
4939 #endif // LEGACY_BACKEND
4940
4941                         __fallthrough;
4942
4943                     default:
4944
4945                         /* Mark the operand's evaluation order to be swapped */
4946                         if (tree->gtFlags & GTF_REVERSE_OPS)
4947                         {
4948                             tree->gtFlags &= ~GTF_REVERSE_OPS;
4949                         }
4950                         else
4951                         {
4952                             tree->gtFlags |= GTF_REVERSE_OPS;
4953                         }
4954
4955 #if FEATURE_STACK_FP_X87
4956                         /* We may have to recompute FP levels */
4957                         if (op1->gtFPlvl || op2->gtFPlvl)
4958                             gtFPstLvlRedo = true;
4959 #endif // FEATURE_STACK_FP_X87
4960
4961                         break;
4962                 }
4963             }
4964         }
4965
4966         /* Swap the level counts */
4967         if (tree->gtFlags & GTF_REVERSE_OPS)
4968         {
4969             unsigned tmpl;
4970
4971             tmpl  = level;
4972             level = lvl2;
4973             lvl2  = tmpl;
4974         }
4975
4976         /* Compute the sethi number for this binary operator */
4977
4978         if (level < 1)
4979         {
4980             level = lvl2;
4981         }
4982         else if (level == lvl2)
4983         {
4984             level += 1;
4985         }
4986
4987         goto DONE;
4988     }
4989
4990     /* See what kind of a special operator we have here */
4991
4992     switch (oper)
4993     {
4994         unsigned lvl2; // Scratch variable
4995
4996         case GT_CALL:
4997
4998             assert(tree->gtFlags & GTF_CALL);
4999
5000             level  = 0;
5001             costEx = 5;
5002             costSz = 2;
5003
5004             /* Evaluate the 'this' argument, if present */
5005
5006             if (tree->gtCall.gtCallObjp)
5007             {
5008                 GenTreePtr thisVal = tree->gtCall.gtCallObjp;
5009
5010                 lvl2 = gtSetEvalOrder(thisVal);
5011                 if (level < lvl2)
5012                 {
5013                     level = lvl2;
5014                 }
5015                 costEx += thisVal->gtCostEx;
5016                 costSz += thisVal->gtCostSz + 1;
5017                 ftreg |= thisVal->gtRsvdRegs;
5018             }
5019
5020             /* Evaluate the arguments, right to left */
5021
5022             if (tree->gtCall.gtCallArgs)
5023             {
5024 #if FEATURE_STACK_FP_X87
5025                 FPlvlSave = codeGen->genGetFPstkLevel();
5026 #endif // FEATURE_STACK_FP_X87
5027                 const bool isListCallArgs = true;
5028                 const bool callArgsInRegs = false;
5029                 lvl2 = gtSetListOrder(tree->gtCall.gtCallArgs, isListCallArgs, callArgsInRegs);
5030                 if (level < lvl2)
5031                 {
5032                     level = lvl2;
5033                 }
5034                 costEx += tree->gtCall.gtCallArgs->gtCostEx;
5035                 costSz += tree->gtCall.gtCallArgs->gtCostSz;
5036                 ftreg |= tree->gtCall.gtCallArgs->gtRsvdRegs;
5037 #if FEATURE_STACK_FP_X87
5038                 codeGen->genResetFPstkLevel(FPlvlSave);
5039 #endif // FEATURE_STACK_FP_X87
5040             }
5041
5042             /* Evaluate the temp register arguments list
5043              * This is a "hidden" list and its only purpose is to
5044              * extend the life of temps until we make the call */
5045
5046             if (tree->gtCall.gtCallLateArgs)
5047             {
5048 #if FEATURE_STACK_FP_X87
5049                 FPlvlSave = codeGen->genGetFPstkLevel();
5050 #endif // FEATURE_STACK_FP_X87
5051                 const bool isListCallArgs = true;
5052                 const bool callArgsInRegs = true;
5053                 lvl2 = gtSetListOrder(tree->gtCall.gtCallLateArgs, isListCallArgs, callArgsInRegs);
5054                 if (level < lvl2)
5055                 {
5056                     level = lvl2;
5057                 }
5058                 costEx += tree->gtCall.gtCallLateArgs->gtCostEx;
5059                 costSz += tree->gtCall.gtCallLateArgs->gtCostSz;
5060                 ftreg |= tree->gtCall.gtCallLateArgs->gtRsvdRegs;
5061 #if FEATURE_STACK_FP_X87
5062                 codeGen->genResetFPstkLevel(FPlvlSave);
5063 #endif // FEATURE_STACK_FP_X87
5064             }
5065
5066             if (tree->gtCall.gtCallType == CT_INDIRECT)
5067             {
5068                 // pinvoke-calli cookie is a constant, or constant indirection
5069                 assert(tree->gtCall.gtCallCookie == nullptr || tree->gtCall.gtCallCookie->gtOper == GT_CNS_INT ||
5070                        tree->gtCall.gtCallCookie->gtOper == GT_IND);
5071
5072                 GenTreePtr indirect = tree->gtCall.gtCallAddr;
5073
5074                 lvl2 = gtSetEvalOrder(indirect);
5075                 if (level < lvl2)
5076                 {
5077                     level = lvl2;
5078                 }
5079                 costEx += indirect->gtCostEx + IND_COST_EX;
5080                 costSz += indirect->gtCostSz;
5081                 ftreg |= indirect->gtRsvdRegs;
5082             }
5083             else
5084             {
5085 #ifdef _TARGET_ARM_
5086                 if ((tree->gtFlags & GTF_CALL_VIRT_KIND_MASK) == GTF_CALL_VIRT_STUB)
5087                 {
5088                     // We generate movw/movt/ldr
5089                     costEx += (1 + IND_COST_EX);
5090                     costSz += 8;
5091                     if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT)
5092                     {
5093                         // Must use R12 for the ldr target -- REG_JUMP_THUNK_PARAM
5094                         costSz += 2;
5095                     }
5096                 }
5097                 else if ((opts.eeFlags & CORJIT_FLG_PREJIT) == 0)
5098                 {
5099                     costEx += 2;
5100                     costSz += 6;
5101                 }
5102                 costSz += 2;
5103 #endif
5104 #ifdef _TARGET_XARCH_
5105                 costSz += 3;
5106 #endif
5107             }
5108
5109             level += 1;
5110
5111             unsigned callKind;
5112             callKind = (tree->gtFlags & GTF_CALL_VIRT_KIND_MASK);
5113
5114             /* Virtual calls are a bit more expensive */
5115             if (callKind != GTF_CALL_NONVIRT)
5116             {
5117                 costEx += 2 * IND_COST_EX;
5118                 costSz += 2;
5119             }
5120
5121             /* Virtual stub calls also must reserve the VIRTUAL_STUB_PARAM reg */
5122             if (callKind == GTF_CALL_VIRT_STUB)
5123             {
5124                 ftreg |= RBM_VIRTUAL_STUB_PARAM;
5125             }
5126
5127 #ifdef FEATURE_READYTORUN_COMPILER
5128 #ifdef _TARGET_ARM64_
5129             if (tree->gtCall.IsR2RRelativeIndir())
5130             {
5131                 ftreg |= RBM_R2R_INDIRECT_PARAM;
5132             }
5133 #endif
5134 #endif
5135
5136 #if GTF_CALL_REG_SAVE
5137             // Normally function calls don't preserve caller save registers
5138             //   and thus are much more expensive.
5139             // However a few function calls do preserve these registers
5140             //   such as the GC WriteBarrier helper calls.
5141
5142             if (!(tree->gtFlags & GTF_CALL_REG_SAVE))
5143 #endif
5144             {
5145                 level += 5;
5146                 costEx += 3 * IND_COST_EX;
5147                 ftreg |= RBM_CALLEE_TRASH;
5148             }
5149
5150 #if FEATURE_STACK_FP_X87
5151             if (isflt)
5152                 codeGen->genIncrementFPstkLevel();
5153 #endif // FEATURE_STACK_FP_X87
5154
5155             break;
5156
5157         case GT_ARR_ELEM:
5158
5159             level  = gtSetEvalOrder(tree->gtArrElem.gtArrObj);
5160             costEx = tree->gtArrElem.gtArrObj->gtCostEx;
5161             costSz = tree->gtArrElem.gtArrObj->gtCostSz;
5162
5163             unsigned dim;
5164             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
5165             {
5166                 lvl2 = gtSetEvalOrder(tree->gtArrElem.gtArrInds[dim]);
5167                 if (level < lvl2)
5168                 {
5169                     level = lvl2;
5170                 }
5171                 costEx += tree->gtArrElem.gtArrInds[dim]->gtCostEx;
5172                 costSz += tree->gtArrElem.gtArrInds[dim]->gtCostSz;
5173             }
5174
5175 #if FEATURE_STACK_FP_X87
5176             if (isflt)
5177                 codeGen->genIncrementFPstkLevel();
5178 #endif // FEATURE_STACK_FP_X87
5179             level += tree->gtArrElem.gtArrRank;
5180             costEx += 2 + (tree->gtArrElem.gtArrRank * (IND_COST_EX + 1));
5181             costSz += 2 + (tree->gtArrElem.gtArrRank * 2);
5182             break;
5183
5184         case GT_ARR_OFFSET:
5185             level  = gtSetEvalOrder(tree->gtArrOffs.gtOffset);
5186             costEx = tree->gtArrOffs.gtOffset->gtCostEx;
5187             costSz = tree->gtArrOffs.gtOffset->gtCostSz;
5188             lvl2   = gtSetEvalOrder(tree->gtArrOffs.gtIndex);
5189             level  = max(level, lvl2);
5190             costEx += tree->gtArrOffs.gtIndex->gtCostEx;
5191             costSz += tree->gtArrOffs.gtIndex->gtCostSz;
5192             lvl2  = gtSetEvalOrder(tree->gtArrOffs.gtArrObj);
5193             level = max(level, lvl2);
5194             costEx += tree->gtArrOffs.gtArrObj->gtCostEx;
5195             costSz += tree->gtArrOffs.gtArrObj->gtCostSz;
5196             break;
5197
5198         case GT_CMPXCHG:
5199
5200             level  = gtSetEvalOrder(tree->gtCmpXchg.gtOpLocation);
5201             costSz = tree->gtCmpXchg.gtOpLocation->gtCostSz;
5202
5203             lvl2 = gtSetEvalOrder(tree->gtCmpXchg.gtOpValue);
5204             if (level < lvl2)
5205             {
5206                 level = lvl2;
5207             }
5208             costSz += tree->gtCmpXchg.gtOpValue->gtCostSz;
5209
5210             lvl2 = gtSetEvalOrder(tree->gtCmpXchg.gtOpComparand);
5211             if (level < lvl2)
5212             {
5213                 level = lvl2;
5214             }
5215             costSz += tree->gtCmpXchg.gtOpComparand->gtCostSz;
5216
5217             costEx = MAX_COST; // Seriously, what could be more expensive than lock cmpxchg?
5218             costSz += 5;       // size of lock cmpxchg [reg+C], reg
5219 #ifdef _TARGET_XARCH_
5220             ftreg |= RBM_EAX; // cmpxchg must be evaluated into eax.
5221 #endif
5222             break;
5223
5224         case GT_ARR_BOUNDS_CHECK:
5225 #ifdef FEATURE_SIMD
5226         case GT_SIMD_CHK:
5227 #endif                  // FEATURE_SIMD
5228             costEx = 4; // cmp reg,reg and jae throw (not taken)
5229             costSz = 7; // jump to cold section
5230
5231             level = gtSetEvalOrder(tree->gtBoundsChk.gtArrLen);
5232             costEx += tree->gtBoundsChk.gtArrLen->gtCostEx;
5233             costSz += tree->gtBoundsChk.gtArrLen->gtCostSz;
5234
5235             lvl2 = gtSetEvalOrder(tree->gtBoundsChk.gtIndex);
5236             if (level < lvl2)
5237             {
5238                 level = lvl2;
5239             }
5240             costEx += tree->gtBoundsChk.gtIndex->gtCostEx;
5241             costSz += tree->gtBoundsChk.gtIndex->gtCostSz;
5242
5243             break;
5244
5245         default:
5246 #ifdef DEBUG
5247             if (verbose)
5248             {
5249                 printf("unexpected operator in this tree:\n");
5250                 gtDispTree(tree);
5251             }
5252 #endif
5253             NO_WAY("unexpected operator");
5254     }
5255
5256 DONE:
5257
5258 #if FEATURE_STACK_FP_X87
5259     // printf("[FPlvl=%2u] ", genGetFPstkLevel()); gtDispTree(tree, 0, true);
5260     noway_assert((unsigned char)codeGen->genFPstkLevel == codeGen->genFPstkLevel);
5261     tree->gtFPlvl = (unsigned char)codeGen->genFPstkLevel;
5262
5263     if (codeGen->genFPstkLevel > tmpDoubleSpillMax)
5264         tmpDoubleSpillMax = codeGen->genFPstkLevel;
5265 #endif // FEATURE_STACK_FP_X87
5266
5267     tree->gtRsvdRegs = (regMaskSmall)ftreg;
5268
5269     // Some path through this function must have set the costs.
5270     assert(costEx != -1);
5271     assert(costSz != -1);
5272
5273     tree->SetCosts(costEx, costSz);
5274
5275     return level;
5276 }
5277 #ifdef _PREFAST_
5278 #pragma warning(pop)
5279 #endif
5280
5281 #if FEATURE_STACK_FP_X87
5282
5283 /*****************************************************************************/
5284 void Compiler::gtComputeFPlvls(GenTreePtr tree)
5285 {
5286     genTreeOps oper;
5287     unsigned   kind;
5288     bool       isflt;
5289     unsigned   savFPstkLevel;
5290
5291     noway_assert(tree);
5292     noway_assert(tree->gtOper != GT_STMT);
5293
5294     /* Figure out what kind of a node we have */
5295
5296     oper  = tree->OperGet();
5297     kind  = tree->OperKind();
5298     isflt = varTypeIsFloating(tree->TypeGet()) ? 1 : 0;
5299
5300     /* Is this a constant or leaf node? */
5301
5302     if (kind & (GTK_CONST | GTK_LEAF))
5303     {
5304         codeGen->genFPstkLevel += isflt;
5305         goto DONE;
5306     }
5307
5308     /* Is it a 'simple' unary/binary operator? */
5309
5310     if (kind & GTK_SMPOP)
5311     {
5312         GenTreePtr op1 = tree->gtOp.gtOp1;
5313         GenTreePtr op2 = tree->gtGetOp2();
5314
5315         /* Check for some special cases */
5316
5317         switch (oper)
5318         {
5319             case GT_IND:
5320
5321                 gtComputeFPlvls(op1);
5322
5323                 /* Indirect loads of FP values push a new value on the FP stack */
5324
5325                 codeGen->genFPstkLevel += isflt;
5326                 goto DONE;
5327
5328             case GT_CAST:
5329
5330                 gtComputeFPlvls(op1);
5331
5332                 /* Casts between non-FP and FP push on / pop from the FP stack */
5333
5334                 if (varTypeIsFloating(op1->TypeGet()))
5335                 {
5336                     if (isflt == false)
5337                         codeGen->genFPstkLevel--;
5338                 }
5339                 else
5340                 {
5341                     if (isflt != false)
5342                         codeGen->genFPstkLevel++;
5343                 }
5344
5345                 goto DONE;
5346
5347             case GT_LIST:  /* GT_LIST presumably part of an argument list */
5348             case GT_COMMA: /* Comma tosses the result of the left operand */
5349
5350                 savFPstkLevel = codeGen->genFPstkLevel;
5351                 gtComputeFPlvls(op1);
5352                 codeGen->genFPstkLevel = savFPstkLevel;
5353
5354                 if (op2)
5355                     gtComputeFPlvls(op2);
5356
5357                 goto DONE;
5358
5359             default:
5360                 break;
5361         }
5362
5363         if (!op1)
5364         {
5365             if (!op2)
5366                 goto DONE;
5367
5368             gtComputeFPlvls(op2);
5369             goto DONE;
5370         }
5371
5372         if (!op2)
5373         {
5374             gtComputeFPlvls(op1);
5375             if (oper == GT_ADDR)
5376             {
5377                 /* If the operand was floating point pop the value from the stack */
5378                 if (varTypeIsFloating(op1->TypeGet()))
5379                 {
5380                     noway_assert(codeGen->genFPstkLevel);
5381                     codeGen->genFPstkLevel--;
5382                 }
5383             }
5384
5385             // This is a special case to handle the following
5386             // optimization: conv.i4(round.d(d)) -> round.i(d)
5387
5388             if (oper == GT_INTRINSIC && tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Round &&
5389                 tree->TypeGet() == TYP_INT)
5390             {
5391                 codeGen->genFPstkLevel--;
5392             }
5393
5394             goto DONE;
5395         }
5396
5397         /* FP assignments need a bit special handling */
5398
5399         if (isflt && (kind & GTK_ASGOP))
5400         {
5401             /* The target of the assignment won't get pushed */
5402
5403             if (tree->gtFlags & GTF_REVERSE_OPS)
5404             {
5405                 gtComputeFPlvls(op2);
5406                 gtComputeFPlvls(op1);
5407                 op1->gtFPlvl--;
5408                 codeGen->genFPstkLevel--;
5409             }
5410             else
5411             {
5412                 gtComputeFPlvls(op1);
5413                 op1->gtFPlvl--;
5414                 codeGen->genFPstkLevel--;
5415                 gtComputeFPlvls(op2);
5416             }
5417
5418             codeGen->genFPstkLevel--;
5419             goto DONE;
5420         }
5421
5422         /* Here we have a binary operator; visit operands in proper order */
5423
5424         if (tree->gtFlags & GTF_REVERSE_OPS)
5425         {
5426             gtComputeFPlvls(op2);
5427             gtComputeFPlvls(op1);
5428         }
5429         else
5430         {
5431             gtComputeFPlvls(op1);
5432             gtComputeFPlvls(op2);
5433         }
5434
5435         /*
5436             Binary FP operators pop 2 operands and produce 1 result;
5437             assignments consume 1 value and don't produce any.
5438          */
5439
5440         if (isflt)
5441             codeGen->genFPstkLevel--;
5442
5443         /* Float compares remove both operands from the FP stack */
5444
5445         if (kind & GTK_RELOP)
5446         {
5447             if (varTypeIsFloating(op1->TypeGet()))
5448                 codeGen->genFPstkLevel -= 2;
5449         }
5450
5451         goto DONE;
5452     }
5453
5454     /* See what kind of a special operator we have here */
5455
5456     switch (oper)
5457     {
5458         case GT_FIELD:
5459             gtComputeFPlvls(tree->gtField.gtFldObj);
5460             codeGen->genFPstkLevel += isflt;
5461             break;
5462
5463         case GT_CALL:
5464
5465             if (tree->gtCall.gtCallObjp)
5466                 gtComputeFPlvls(tree->gtCall.gtCallObjp);
5467
5468             if (tree->gtCall.gtCallArgs)
5469             {
5470                 savFPstkLevel = codeGen->genFPstkLevel;
5471                 gtComputeFPlvls(tree->gtCall.gtCallArgs);
5472                 codeGen->genFPstkLevel = savFPstkLevel;
5473             }
5474
5475             if (tree->gtCall.gtCallLateArgs)
5476             {
5477                 savFPstkLevel = codeGen->genFPstkLevel;
5478                 gtComputeFPlvls(tree->gtCall.gtCallLateArgs);
5479                 codeGen->genFPstkLevel = savFPstkLevel;
5480             }
5481
5482             codeGen->genFPstkLevel += isflt;
5483             break;
5484
5485         case GT_ARR_ELEM:
5486
5487             gtComputeFPlvls(tree->gtArrElem.gtArrObj);
5488
5489             unsigned dim;
5490             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
5491                 gtComputeFPlvls(tree->gtArrElem.gtArrInds[dim]);
5492
5493             /* Loads of FP values push a new value on the FP stack */
5494             codeGen->genFPstkLevel += isflt;
5495             break;
5496
5497         case GT_CMPXCHG:
5498             // Evaluate the trees left to right
5499             gtComputeFPlvls(tree->gtCmpXchg.gtOpLocation);
5500             gtComputeFPlvls(tree->gtCmpXchg.gtOpValue);
5501             gtComputeFPlvls(tree->gtCmpXchg.gtOpComparand);
5502             noway_assert(!isflt);
5503             break;
5504
5505         case GT_ARR_BOUNDS_CHECK:
5506             gtComputeFPlvls(tree->gtBoundsChk.gtArrLen);
5507             gtComputeFPlvls(tree->gtBoundsChk.gtIndex);
5508             noway_assert(!isflt);
5509             break;
5510
5511 #ifdef DEBUG
5512         default:
5513             noway_assert(!"Unhandled special operator in gtComputeFPlvls()");
5514             break;
5515 #endif
5516     }
5517
5518 DONE:
5519
5520     noway_assert((unsigned char)codeGen->genFPstkLevel == codeGen->genFPstkLevel);
5521
5522     tree->gtFPlvl = (unsigned char)codeGen->genFPstkLevel;
5523 }
5524
5525 #endif // FEATURE_STACK_FP_X87
5526
5527 /*****************************************************************************
5528  *
5529  *  If the given tree is an integer constant that can be used
5530  *  in a scaled index address mode as a multiplier (e.g. "[4*index]"), then return
5531  *  the scale factor: 2, 4, or 8. Otherwise, return 0. Note that we never return 1,
5532  *  to match the behavior of GetScaleIndexShf().
5533  */
5534
5535 unsigned GenTree::GetScaleIndexMul()
5536 {
5537     if (IsCnsIntOrI() && jitIsScaleIndexMul(gtIntConCommon.IconValue()) && gtIntConCommon.IconValue() != 1)
5538     {
5539         return (unsigned)gtIntConCommon.IconValue();
5540     }
5541
5542     return 0;
5543 }
5544
5545 /*****************************************************************************
5546  *
5547  *  If the given tree is the right-hand side of a left shift (that is,
5548  *  'y' in the tree 'x' << 'y'), and it is an integer constant that can be used
5549  *  in a scaled index address mode as a multiplier (e.g. "[4*index]"), then return
5550  *  the scale factor: 2, 4, or 8. Otherwise, return 0.
5551  */
5552
5553 unsigned GenTree::GetScaleIndexShf()
5554 {
5555     if (IsCnsIntOrI() && jitIsScaleIndexShift(gtIntConCommon.IconValue()))
5556     {
5557         return (unsigned)(1 << gtIntConCommon.IconValue());
5558     }
5559
5560     return 0;
5561 }
5562
5563 /*****************************************************************************
5564  *
5565  *  If the given tree is a scaled index (i.e. "op * 4" or "op << 2"), returns
5566  *  the multiplier: 2, 4, or 8; otherwise returns 0. Note that "1" is never
5567  *  returned.
5568  */
5569
5570 unsigned GenTree::GetScaledIndex()
5571 {
5572     // with (!opts.OptEnabled(CLFLG_CONSTANTFOLD) we can have
5573     //   CNS_INT * CNS_INT
5574     //
5575     if (gtOp.gtOp1->IsCnsIntOrI())
5576     {
5577         return 0;
5578     }
5579
5580     switch (gtOper)
5581     {
5582         case GT_MUL:
5583             return gtOp.gtOp2->GetScaleIndexMul();
5584
5585         case GT_LSH:
5586             return gtOp.gtOp2->GetScaleIndexShf();
5587
5588         default:
5589             assert(!"GenTree::GetScaledIndex() called with illegal gtOper");
5590             break;
5591     }
5592
5593     return 0;
5594 }
5595
5596 /*****************************************************************************
5597  *
5598  *  Returns true if "addr" is a GT_ADD node, at least one of whose arguments is an integer (<= 32 bit)
5599  *  constant.  If it returns true, it sets "*offset" to (one of the) constant value(s), and
5600  *  "*addr" to the other argument.
5601  */
5602
5603 bool GenTree::IsAddWithI32Const(GenTreePtr* addr, int* offset)
5604 {
5605     if (OperGet() == GT_ADD)
5606     {
5607         if (gtOp.gtOp1->IsIntCnsFitsInI32())
5608         {
5609             *offset = (int)gtOp.gtOp1->gtIntCon.gtIconVal;
5610             *addr   = gtOp.gtOp2;
5611             return true;
5612         }
5613         else if (gtOp.gtOp2->IsIntCnsFitsInI32())
5614         {
5615             *offset = (int)gtOp.gtOp2->gtIntCon.gtIconVal;
5616             *addr   = gtOp.gtOp1;
5617             return true;
5618         }
5619     }
5620     // Otherwise...
5621     return false;
5622 }
5623
5624 //------------------------------------------------------------------------
5625 // gtGetChildPointer: If 'parent' is the parent of this node, return the pointer
5626 //    to the child node so that it can be modified; otherwise, return nullptr.
5627 //
5628 // Arguments:
5629 //    parent - The possible parent of this node
5630 //
5631 // Return Value:
5632 //    If "child" is a child of "parent", returns a pointer to the child node in the parent
5633 //    (i.e. a pointer to a GenTree pointer).
5634 //    Otherwise, returns nullptr.
5635 //
5636 // Assumptions:
5637 //    'parent' must be non-null
5638 //
5639 // Notes:
5640 //    When FEATURE_MULTIREG_ARGS is defined we can get here with GT_LDOBJ tree.
5641 //    This happens when we have a struct that is passed in multiple registers.
5642 //
5643 //    Also note that when FEATURE_UNIX_AMD64_STRUCT_PASSING is defined the GT_LDOBJ
5644 //    later gets converted to a GT_LIST with two GT_LCL_FLDs in Lower/LowerXArch.
5645 //
5646
5647 GenTreePtr* GenTree::gtGetChildPointer(GenTreePtr parent)
5648
5649 {
5650     switch (parent->OperGet())
5651     {
5652         default:
5653             if (!parent->OperIsSimple())
5654             {
5655                 return nullptr;
5656             }
5657             if (this == parent->gtOp.gtOp1)
5658             {
5659                 return &(parent->gtOp.gtOp1);
5660             }
5661             if (this == parent->gtOp.gtOp2)
5662             {
5663                 return &(parent->gtOp.gtOp2);
5664             }
5665             break;
5666
5667 #if !FEATURE_MULTIREG_ARGS
5668         // Note that when FEATURE_MULTIREG_ARGS==1
5669         //  a GT_OBJ node is handled above by the default case
5670         case GT_OBJ:
5671             // Any GT_OBJ with a field must be lowered before this point.
5672             noway_assert(!"GT_OBJ encountered in GenTree::gtGetChildPointer");
5673             break;
5674 #endif // !FEATURE_MULTIREG_ARGS
5675
5676         case GT_CMPXCHG:
5677             if (this == parent->gtCmpXchg.gtOpLocation)
5678             {
5679                 return &(parent->gtCmpXchg.gtOpLocation);
5680             }
5681             if (this == parent->gtCmpXchg.gtOpValue)
5682             {
5683                 return &(parent->gtCmpXchg.gtOpValue);
5684             }
5685             if (this == parent->gtCmpXchg.gtOpComparand)
5686             {
5687                 return &(parent->gtCmpXchg.gtOpComparand);
5688             }
5689             break;
5690
5691         case GT_ARR_BOUNDS_CHECK:
5692 #ifdef FEATURE_SIMD
5693         case GT_SIMD_CHK:
5694 #endif // FEATURE_SIMD
5695             if (this == parent->gtBoundsChk.gtArrLen)
5696             {
5697                 return &(parent->gtBoundsChk.gtArrLen);
5698             }
5699             if (this == parent->gtBoundsChk.gtIndex)
5700             {
5701                 return &(parent->gtBoundsChk.gtIndex);
5702             }
5703             if (this == parent->gtBoundsChk.gtIndRngFailBB)
5704             {
5705                 return &(parent->gtBoundsChk.gtIndRngFailBB);
5706             }
5707             break;
5708
5709         case GT_ARR_ELEM:
5710             if (this == parent->gtArrElem.gtArrObj)
5711             {
5712                 return &(parent->gtArrElem.gtArrObj);
5713             }
5714             for (int i = 0; i < GT_ARR_MAX_RANK; i++)
5715             {
5716                 if (this == parent->gtArrElem.gtArrInds[i])
5717                 {
5718                     return &(parent->gtArrElem.gtArrInds[i]);
5719                 }
5720             }
5721             break;
5722
5723         case GT_ARR_OFFSET:
5724             if (this == parent->gtArrOffs.gtOffset)
5725             {
5726                 return &(parent->gtArrOffs.gtOffset);
5727             }
5728             if (this == parent->gtArrOffs.gtIndex)
5729             {
5730                 return &(parent->gtArrOffs.gtIndex);
5731             }
5732             if (this == parent->gtArrOffs.gtArrObj)
5733             {
5734                 return &(parent->gtArrOffs.gtArrObj);
5735             }
5736             break;
5737
5738         case GT_FIELD:
5739             if (this == parent->AsField()->gtFldObj)
5740             {
5741                 return &(parent->AsField()->gtFldObj);
5742             }
5743             break;
5744
5745         case GT_RET_EXPR:
5746             if (this == parent->gtRetExpr.gtInlineCandidate)
5747             {
5748                 return &(parent->gtRetExpr.gtInlineCandidate);
5749             }
5750             break;
5751
5752         case GT_CALL:
5753         {
5754             GenTreeCall* call = parent->AsCall();
5755
5756             if (this == call->gtCallObjp)
5757             {
5758                 return &(call->gtCallObjp);
5759             }
5760             if (this == call->gtCallArgs)
5761             {
5762                 return reinterpret_cast<GenTreePtr*>(&(call->gtCallArgs));
5763             }
5764             if (this == call->gtCallLateArgs)
5765             {
5766                 return reinterpret_cast<GenTreePtr*>(&(call->gtCallLateArgs));
5767             }
5768             if (this == call->gtControlExpr)
5769             {
5770                 return &(call->gtControlExpr);
5771             }
5772             if (call->gtCallType == CT_INDIRECT)
5773             {
5774                 if (this == call->gtCallCookie)
5775                 {
5776                     return &(call->gtCallCookie);
5777                 }
5778                 if (this == call->gtCallAddr)
5779                 {
5780                     return &(call->gtCallAddr);
5781                 }
5782             }
5783         }
5784         break;
5785
5786         case GT_STMT:
5787             noway_assert(!"Illegal node for gtGetChildPointer()");
5788             unreached();
5789     }
5790
5791     return nullptr;
5792 }
5793
5794 bool GenTree::TryGetUse(GenTree* def, GenTree*** use)
5795 {
5796     for (GenTree** useEdge : UseEdges())
5797     {
5798         if (*useEdge == def)
5799         {
5800             *use = useEdge;
5801             return true;
5802         }
5803     }
5804
5805     return false;
5806 }
5807
5808 //------------------------------------------------------------------------
5809 // gtGetParent: Get the parent of this node, and optionally capture the
5810 //    pointer to the child so that it can be modified.
5811 //
5812 // Arguments:
5813
5814 //    parentChildPointer - A pointer to a GenTreePtr* (yes, that's three
5815 //                         levels, i.e. GenTree ***), which if non-null,
5816 //                         will be set to point to the field in the parent
5817 //                         that points to this node.
5818 //
5819 //    Return value       - The parent of this node.
5820 //
5821 //    Notes:
5822 //
5823 //    This requires that the execution order must be defined (i.e. gtSetEvalOrder() has been called).
5824 //    To enable the child to be replaced, it accepts an argument, parentChildPointer that, if non-null,
5825 //    will be set to point to the child pointer in the parent that points to this node.
5826
5827 GenTreePtr GenTree::gtGetParent(GenTreePtr** parentChildPtrPtr)
5828 {
5829     // Find the parent node; it must be after this node in the execution order.
5830     GenTreePtr* parentChildPtr = nullptr;
5831     GenTreePtr  parent;
5832     for (parent = gtNext; parent != nullptr; parent = parent->gtNext)
5833     {
5834         parentChildPtr = gtGetChildPointer(parent);
5835         if (parentChildPtr != nullptr)
5836         {
5837             break;
5838         }
5839     }
5840     if (parentChildPtrPtr != nullptr)
5841     {
5842         *parentChildPtrPtr = parentChildPtr;
5843     }
5844     return parent;
5845 }
5846
5847 /*****************************************************************************
5848  *
5849  *  Returns true if the given operator may cause an exception.
5850  */
5851
5852 bool GenTree::OperMayThrow()
5853 {
5854     GenTreePtr op;
5855
5856     switch (gtOper)
5857     {
5858         case GT_MOD:
5859         case GT_DIV:
5860         case GT_UMOD:
5861         case GT_UDIV:
5862
5863             /* Division with a non-zero, non-minus-one constant does not throw an exception */
5864
5865             op = gtOp.gtOp2;
5866
5867             if (varTypeIsFloating(op->TypeGet()))
5868             {
5869                 return false; // Floating point division does not throw.
5870             }
5871
5872             // For integers only division by 0 or by -1 can throw
5873             if (op->IsIntegralConst() && !op->IsIntegralConst(0) && !op->IsIntegralConst(-1))
5874             {
5875                 return false;
5876             }
5877             return true;
5878
5879         case GT_IND:
5880             op = gtOp.gtOp1;
5881
5882             /* Indirections of handles are known to be safe */
5883             if (op->gtOper == GT_CNS_INT)
5884             {
5885                 if (op->IsIconHandle())
5886                 {
5887                     /* No exception is thrown on this indirection */
5888                     return false;
5889                 }
5890             }
5891             if (this->gtFlags & GTF_IND_NONFAULTING)
5892             {
5893                 return false;
5894             }
5895             // Non-Null AssertionProp will remove the GTF_EXCEPT flag and mark the GT_IND with GTF_ORDER_SIDEEFF flag
5896             if ((this->gtFlags & GTF_ALL_EFFECT) == GTF_ORDER_SIDEEFF)
5897             {
5898                 return false;
5899             }
5900
5901             return true;
5902
5903         case GT_INTRINSIC:
5904             // If this is an intrinsic that represents the object.GetType(), it can throw an NullReferenceException.
5905             // Report it as may throw.
5906             // Note: Some of the rest of the existing intrinsics could potentially throw an exception (for example
5907             //       the array and string element access ones). They are handled differently than the GetType intrinsic
5908             //       and are not marked with GTF_EXCEPT. If these are revisited at some point to be marked as
5909             //       GTF_EXCEPT,
5910             //       the code below might need to be specialized to handle them properly.
5911             if ((this->gtFlags & GTF_EXCEPT) != 0)
5912             {
5913                 return true;
5914             }
5915
5916             break;
5917
5918         case GT_OBJ:
5919             return !Compiler::fgIsIndirOfAddrOfLocal(this);
5920
5921         case GT_ARR_BOUNDS_CHECK:
5922         case GT_ARR_ELEM:
5923         case GT_ARR_INDEX:
5924         case GT_CATCH_ARG:
5925         case GT_ARR_LENGTH:
5926         case GT_LCLHEAP:
5927         case GT_CKFINITE:
5928         case GT_NULLCHECK:
5929 #ifdef FEATURE_SIMD
5930         case GT_SIMD_CHK:
5931 #endif // FEATURE_SIMD
5932             return true;
5933         default:
5934             break;
5935     }
5936
5937     /* Overflow arithmetic operations also throw exceptions */
5938
5939     if (gtOverflowEx())
5940     {
5941         return true;
5942     }
5943
5944     return false;
5945 }
5946
5947 #if DEBUGGABLE_GENTREE
5948 // static
5949 GenTree::VtablePtr GenTree::s_vtablesForOpers[] = {nullptr};
5950 GenTree::VtablePtr GenTree::s_vtableForOp       = nullptr;
5951
5952 GenTree::VtablePtr GenTree::GetVtableForOper(genTreeOps oper)
5953 {
5954     noway_assert(oper < GT_COUNT);
5955
5956     if (s_vtablesForOpers[oper] != nullptr)
5957     {
5958         return s_vtablesForOpers[oper];
5959     }
5960     // Otherwise...
5961     VtablePtr res = nullptr;
5962     switch (oper)
5963     {
5964 #define GTSTRUCT_0(nm, tag) /*handle explicitly*/
5965 #define GTSTRUCT_1(nm, tag)                                                                                            \
5966     case tag:                                                                                                          \
5967     {                                                                                                                  \
5968         GenTree##nm gt;                                                                                                \
5969         res = *reinterpret_cast<VtablePtr*>(&gt);                                                                      \
5970     }                                                                                                                  \
5971     break;
5972 #define GTSTRUCT_2(nm, tag, tag2)             /*handle explicitly*/
5973 #define GTSTRUCT_3(nm, tag, tag2, tag3)       /*handle explicitly*/
5974 #define GTSTRUCT_4(nm, tag, tag2, tag3, tag4) /*handle explicitly*/
5975 #define GTSTRUCT_N(nm, ...)                   /*handle explicitly*/
5976 #include "gtstructs.h"
5977
5978 #if !FEATURE_EH_FUNCLETS
5979         // If FEATURE_EH_FUNCLETS is set, then GT_JMP becomes the only member of Val, and will be handled above.
5980         case GT_END_LFIN:
5981         case GT_JMP:
5982         {
5983             GenTreeVal gt(GT_JMP, TYP_INT, 0);
5984             res = *reinterpret_cast<VtablePtr*>(&gt);
5985             break;
5986         }
5987 #endif
5988         default:
5989         {
5990             // Should be unary or binary op.
5991             if (s_vtableForOp == nullptr)
5992             {
5993                 unsigned opKind = OperKind(oper);
5994                 assert(!IsExOp(opKind));
5995                 assert(OperIsSimple(oper) || OperIsLeaf(oper));
5996                 // Need to provide non-null operands.
5997                 Compiler*     comp = (Compiler*)_alloca(sizeof(Compiler));
5998                 GenTreeIntCon dummyOp(TYP_INT, 0);
5999                 GenTreeOp     gt(oper, TYP_INT, &dummyOp, ((opKind & GTK_UNOP) ? nullptr : &dummyOp));
6000                 s_vtableForOp = *reinterpret_cast<VtablePtr*>(&gt);
6001             }
6002             res = s_vtableForOp;
6003             break;
6004         }
6005     }
6006     s_vtablesForOpers[oper] = res;
6007     return res;
6008 }
6009
6010 void GenTree::SetVtableForOper(genTreeOps oper)
6011 {
6012     *reinterpret_cast<VtablePtr*>(this) = GetVtableForOper(oper);
6013 }
6014 #endif // DEBUGGABLE_GENTREE
6015
6016 GenTreePtr Compiler::gtNewOperNode(genTreeOps oper, var_types type, GenTreePtr op1, GenTreePtr op2)
6017 {
6018     assert(op1 != nullptr);
6019     assert(op2 != nullptr);
6020
6021     // We should not be allocating nodes that extend GenTreeOp with this;
6022     // should call the appropriate constructor for the extended type.
6023     assert(!GenTree::IsExOp(GenTree::OperKind(oper)));
6024
6025     GenTreePtr node = new (this, oper) GenTreeOp(oper, type, op1, op2);
6026
6027     return node;
6028 }
6029
6030 GenTreePtr Compiler::gtNewQmarkNode(var_types type, GenTreePtr cond, GenTreePtr colon)
6031 {
6032     compQmarkUsed   = true;
6033     GenTree* result = new (this, GT_QMARK) GenTreeQmark(type, cond, colon, this);
6034 #ifdef DEBUG
6035     if (compQmarkRationalized)
6036     {
6037         fgCheckQmarkAllowedForm(result);
6038     }
6039 #endif
6040     return result;
6041 }
6042
6043 GenTreeQmark::GenTreeQmark(var_types type, GenTreePtr cond, GenTreePtr colonOp, Compiler* comp)
6044     : GenTreeOp(GT_QMARK, type, cond, colonOp)
6045     , gtThenLiveSet(VarSetOps::UninitVal())
6046     , gtElseLiveSet(VarSetOps::UninitVal())
6047 {
6048     // These must follow a specific form.
6049     assert(cond != nullptr && cond->TypeGet() == TYP_INT);
6050     assert(colonOp != nullptr && colonOp->OperGet() == GT_COLON);
6051
6052     comp->impInlineRoot()->compQMarks->Push(this);
6053 }
6054
6055 GenTreeIntCon* Compiler::gtNewIconNode(ssize_t value, var_types type)
6056 {
6057     return new (this, GT_CNS_INT) GenTreeIntCon(type, value);
6058 }
6059
6060 // return a new node representing the value in a physical register
6061 GenTree* Compiler::gtNewPhysRegNode(regNumber reg, var_types type)
6062 {
6063     assert(genIsValidIntReg(reg) || (reg == REG_SPBASE));
6064     GenTree* result = new (this, GT_PHYSREG) GenTreePhysReg(reg, type);
6065     return result;
6066 }
6067
6068 // Return a new node representing a store of a value to a physical register
6069 // modifies: child's gtRegNum
6070 GenTree* Compiler::gtNewPhysRegNode(regNumber reg, GenTree* src)
6071 {
6072     assert(genIsValidIntReg(reg));
6073     GenTree* result  = new (this, GT_PHYSREGDST) GenTreeOp(GT_PHYSREGDST, TYP_I_IMPL, src, nullptr);
6074     result->gtRegNum = reg;
6075     src->gtRegNum    = reg;
6076     return result;
6077 }
6078
6079 #ifndef LEGACY_BACKEND
6080 GenTreePtr Compiler::gtNewJmpTableNode()
6081 {
6082     GenTreePtr node                   = new (this, GT_JMPTABLE) GenTreeJumpTable(TYP_INT);
6083     node->gtJumpTable.gtJumpTableAddr = 0;
6084     return node;
6085 }
6086 #endif // !LEGACY_BACKEND
6087
6088 /*****************************************************************************
6089  *
6090  *  Converts an annotated token into an icon flags (so that we will later be
6091  *  able to tell the type of the handle that will be embedded in the icon
6092  *  node)
6093  */
6094
6095 unsigned Compiler::gtTokenToIconFlags(unsigned token)
6096 {
6097     unsigned flags = 0;
6098
6099     switch (TypeFromToken(token))
6100     {
6101         case mdtTypeRef:
6102         case mdtTypeDef:
6103         case mdtTypeSpec:
6104             flags = GTF_ICON_CLASS_HDL;
6105             break;
6106
6107         case mdtMethodDef:
6108             flags = GTF_ICON_METHOD_HDL;
6109             break;
6110
6111         case mdtFieldDef:
6112             flags = GTF_ICON_FIELD_HDL;
6113             break;
6114
6115         default:
6116             flags = GTF_ICON_TOKEN_HDL;
6117             break;
6118     }
6119
6120     return flags;
6121 }
6122
6123 /*****************************************************************************
6124  *
6125  *  Allocates a integer constant entry that represents a HANDLE to something.
6126  *  It may not be allowed to embed HANDLEs directly into the JITed code (for eg,
6127  *  as arguments to JIT helpers). Get a corresponding value that can be embedded.
6128  *  If the handle needs to be accessed via an indirection, pValue points to it.
6129  */
6130
6131 GenTreePtr Compiler::gtNewIconEmbHndNode(
6132     void* value, void* pValue, unsigned flags, unsigned handle1, void* handle2, void* compileTimeHandle)
6133 {
6134     GenTreePtr node;
6135
6136     assert((!value) != (!pValue));
6137
6138     if (value)
6139     {
6140         node = gtNewIconHandleNode((size_t)value, flags, /*fieldSeq*/ FieldSeqStore::NotAField(), handle1, handle2);
6141         node->gtIntCon.gtCompileTimeHandle = (size_t)compileTimeHandle;
6142     }
6143     else
6144     {
6145         node = gtNewIconHandleNode((size_t)pValue, flags, /*fieldSeq*/ FieldSeqStore::NotAField(), handle1, handle2);
6146         node->gtIntCon.gtCompileTimeHandle = (size_t)compileTimeHandle;
6147         node                               = gtNewOperNode(GT_IND, TYP_I_IMPL, node);
6148     }
6149
6150     return node;
6151 }
6152
6153 /*****************************************************************************/
6154 GenTreePtr Compiler::gtNewStringLiteralNode(InfoAccessType iat, void* pValue)
6155 {
6156     GenTreePtr tree = nullptr;
6157
6158     switch (iat)
6159     {
6160         case IAT_VALUE: // The info value is directly available
6161             tree         = gtNewIconEmbHndNode(pValue, nullptr, GTF_ICON_STR_HDL);
6162             tree->gtType = TYP_REF;
6163             tree         = gtNewOperNode(GT_NOP, TYP_REF, tree); // prevents constant folding
6164             break;
6165
6166         case IAT_PVALUE: // The value needs to be accessed via an       indirection
6167             tree = gtNewIconHandleNode((size_t)pValue, GTF_ICON_STR_HDL);
6168             // An indirection of a string handle can't cause an exception so don't set GTF_EXCEPT
6169             tree = gtNewOperNode(GT_IND, TYP_REF, tree);
6170             tree->gtFlags |= GTF_GLOB_REF;
6171             break;
6172
6173         case IAT_PPVALUE: // The value needs to be accessed via a double indirection
6174             tree = gtNewIconHandleNode((size_t)pValue, GTF_ICON_PSTR_HDL);
6175             tree = gtNewOperNode(GT_IND, TYP_I_IMPL, tree);
6176             tree->gtFlags |= GTF_IND_INVARIANT;
6177             // An indirection of a string handle can't cause an exception so don't set GTF_EXCEPT
6178             tree = gtNewOperNode(GT_IND, TYP_REF, tree);
6179             tree->gtFlags |= GTF_GLOB_REF;
6180             break;
6181
6182         default:
6183             assert(!"Unexpected InfoAccessType");
6184     }
6185
6186     return tree;
6187 }
6188
6189 /*****************************************************************************/
6190
6191 GenTreePtr Compiler::gtNewLconNode(__int64 value)
6192 {
6193 #ifdef _TARGET_64BIT_
6194     GenTreePtr node = new (this, GT_CNS_INT) GenTreeIntCon(TYP_LONG, value);
6195 #else
6196     GenTreePtr node = new (this, GT_CNS_LNG) GenTreeLngCon(value);
6197 #endif
6198
6199     return node;
6200 }
6201
6202 GenTreePtr Compiler::gtNewDconNode(double value)
6203 {
6204     GenTreePtr node = new (this, GT_CNS_DBL) GenTreeDblCon(value);
6205
6206     return node;
6207 }
6208
6209 GenTreePtr Compiler::gtNewSconNode(int CPX, CORINFO_MODULE_HANDLE scpHandle)
6210 {
6211
6212 #if SMALL_TREE_NODES
6213
6214     /* 'GT_CNS_STR' nodes later get transformed into 'GT_CALL' */
6215
6216     assert(GenTree::s_gtNodeSizes[GT_CALL] > GenTree::s_gtNodeSizes[GT_CNS_STR]);
6217
6218     GenTreePtr node = new (this, GT_CALL) GenTreeStrCon(CPX, scpHandle DEBUGARG(/*largeNode*/ true));
6219 #else
6220     GenTreePtr node = new (this, GT_CNS_STR) GenTreeStrCon(CPX, scpHandle DEBUGARG(/*largeNode*/ true));
6221 #endif
6222
6223     return node;
6224 }
6225
6226 GenTreePtr Compiler::gtNewZeroConNode(var_types type)
6227 {
6228     GenTreePtr zero;
6229     switch (type)
6230     {
6231         case TYP_INT:
6232             zero = gtNewIconNode(0);
6233             break;
6234
6235         case TYP_BYREF:
6236             __fallthrough;
6237
6238         case TYP_REF:
6239             zero         = gtNewIconNode(0);
6240             zero->gtType = type;
6241             break;
6242
6243         case TYP_LONG:
6244             zero = gtNewLconNode(0);
6245             break;
6246
6247         case TYP_FLOAT:
6248             zero         = gtNewDconNode(0.0);
6249             zero->gtType = type;
6250             break;
6251
6252         case TYP_DOUBLE:
6253             zero = gtNewDconNode(0.0);
6254             break;
6255
6256         default:
6257             assert(!"Bad type");
6258             zero = nullptr;
6259             break;
6260     }
6261     return zero;
6262 }
6263
6264 GenTreePtr Compiler::gtNewOneConNode(var_types type)
6265 {
6266     switch (type)
6267     {
6268         case TYP_INT:
6269         case TYP_UINT:
6270             return gtNewIconNode(1);
6271
6272         case TYP_LONG:
6273         case TYP_ULONG:
6274             return gtNewLconNode(1);
6275
6276         case TYP_FLOAT:
6277         {
6278             GenTreePtr one = gtNewDconNode(1.0);
6279             one->gtType    = type;
6280             return one;
6281         }
6282
6283         case TYP_DOUBLE:
6284             return gtNewDconNode(1.0);
6285
6286         default:
6287             assert(!"Bad type");
6288             return nullptr;
6289     }
6290 }
6291
6292 GenTreeCall* Compiler::gtNewIndCallNode(GenTreePtr addr, var_types type, GenTreeArgList* args, IL_OFFSETX ilOffset)
6293 {
6294     return gtNewCallNode(CT_INDIRECT, (CORINFO_METHOD_HANDLE)addr, type, args, ilOffset);
6295 }
6296
6297 GenTreeCall* Compiler::gtNewCallNode(
6298     gtCallTypes callType, CORINFO_METHOD_HANDLE callHnd, var_types type, GenTreeArgList* args, IL_OFFSETX ilOffset)
6299 {
6300     GenTreeCall* node = new (this, GT_CALL) GenTreeCall(genActualType(type));
6301
6302     node->gtFlags |= (GTF_CALL | GTF_GLOB_REF);
6303     if (args)
6304     {
6305         node->gtFlags |= (args->gtFlags & GTF_ALL_EFFECT);
6306     }
6307     node->gtCallType      = callType;
6308     node->gtCallMethHnd   = callHnd;
6309     node->gtCallArgs      = args;
6310     node->gtCallObjp      = nullptr;
6311     node->fgArgInfo       = nullptr;
6312     node->callSig         = nullptr;
6313     node->gtRetClsHnd     = nullptr;
6314     node->gtControlExpr   = nullptr;
6315     node->gtCallMoreFlags = 0;
6316
6317     if (callType == CT_INDIRECT)
6318     {
6319         node->gtCallCookie = nullptr;
6320     }
6321     else
6322     {
6323         node->gtInlineCandidateInfo = nullptr;
6324     }
6325     node->gtCallLateArgs = nullptr;
6326     node->gtReturnType   = type;
6327
6328 #ifdef LEGACY_BACKEND
6329     node->gtCallRegUsedMask = RBM_NONE;
6330 #endif // LEGACY_BACKEND
6331
6332 #ifdef FEATURE_READYTORUN_COMPILER
6333     node->gtCall.gtEntryPoint.addr = nullptr;
6334 #endif
6335
6336 #if defined(DEBUG) || defined(INLINE_DATA)
6337     // These get updated after call node is built.
6338     node->gtCall.gtInlineObservation = InlineObservation::CALLEE_UNUSED_INITIAL;
6339     node->gtCall.gtRawILOffset       = BAD_IL_OFFSET;
6340 #endif
6341
6342 #ifdef DEBUGGING_SUPPORT
6343     // Spec: Managed Retval sequence points needs to be generated while generating debug info for debuggable code.
6344     //
6345     // Implementation note: if not generating MRV info genCallSite2ILOffsetMap will be NULL and
6346     // codegen will pass BAD_IL_OFFSET as IL offset of a call node to emitter, which will cause emitter
6347     // not to emit IP mapping entry.
6348     if (opts.compDbgCode && opts.compDbgInfo)
6349     {
6350         // Managed Retval - IL offset of the call.  This offset is used to emit a
6351         // CALL_INSTRUCTION type sequence point while emitting corresponding native call.
6352         //
6353         // TODO-Cleanup:
6354         // a) (Opt) We need not store this offset if the method doesn't return a
6355         // value.  Rather it can be made BAD_IL_OFFSET to prevent a sequence
6356         // point being emitted.
6357         //
6358         // b) (Opt) Add new sequence points only if requested by debugger through
6359         // a new boundary type - ICorDebugInfo::BoundaryTypes
6360         if (genCallSite2ILOffsetMap == nullptr)
6361         {
6362             genCallSite2ILOffsetMap = new (getAllocator()) CallSiteILOffsetTable(getAllocator());
6363         }
6364
6365         // Make sure that there are no duplicate entries for a given call node
6366         IL_OFFSETX value;
6367         assert(!genCallSite2ILOffsetMap->Lookup(node, &value));
6368         genCallSite2ILOffsetMap->Set(node, ilOffset);
6369     }
6370 #endif
6371
6372     // Initialize gtOtherRegs
6373     node->ClearOtherRegs();
6374
6375     // Initialize spill flags of gtOtherRegs
6376     node->ClearOtherRegFlags();
6377
6378     return node;
6379 }
6380
6381 GenTreePtr Compiler::gtNewLclvNode(unsigned lnum, var_types type, IL_OFFSETX ILoffs)
6382 {
6383     // We need to ensure that all struct values are normalized.
6384     // It might be nice to assert this in general, but we have assignments of int to long.
6385     if (varTypeIsStruct(type))
6386     {
6387         assert(type == lvaTable[lnum].lvType);
6388     }
6389     GenTreePtr node = new (this, GT_LCL_VAR) GenTreeLclVar(type, lnum, ILoffs);
6390
6391     /* Cannot have this assert because the inliner uses this function
6392      * to add temporaries */
6393
6394     // assert(lnum < lvaCount);
6395
6396     return node;
6397 }
6398
6399 GenTreePtr Compiler::gtNewLclLNode(unsigned lnum, var_types type, IL_OFFSETX ILoffs)
6400 {
6401     // We need to ensure that all struct values are normalized.
6402     // It might be nice to assert this in general, but we have assignments of int to long.
6403     if (varTypeIsStruct(type))
6404     {
6405         assert(type == lvaTable[lnum].lvType);
6406     }
6407 #if SMALL_TREE_NODES
6408     /* This local variable node may later get transformed into a large node */
6409
6410     // assert(GenTree::s_gtNodeSizes[GT_CALL] > GenTree::s_gtNodeSizes[GT_LCL_VAR]);
6411
6412     GenTreePtr node = new (this, GT_CALL) GenTreeLclVar(type, lnum, ILoffs DEBUGARG(/*largeNode*/ true));
6413 #else
6414     GenTreePtr node = new (this, GT_LCL_VAR) GenTreeLclVar(type, lnum, ILoffs DEBUGARG(/*largeNode*/ true));
6415 #endif
6416
6417     return node;
6418 }
6419
6420 GenTreeLclFld* Compiler::gtNewLclFldNode(unsigned lnum, var_types type, unsigned offset)
6421 {
6422     GenTreeLclFld* node = new (this, GT_LCL_FLD) GenTreeLclFld(type, lnum, offset);
6423
6424     /* Cannot have this assert because the inliner uses this function
6425      * to add temporaries */
6426
6427     // assert(lnum < lvaCount);
6428
6429     node->gtFieldSeq = FieldSeqStore::NotAField();
6430     return node;
6431 }
6432
6433 GenTreePtr Compiler::gtNewInlineCandidateReturnExpr(GenTreePtr inlineCandidate, var_types type)
6434 {
6435     assert(GenTree::s_gtNodeSizes[GT_RET_EXPR] == TREE_NODE_SZ_LARGE);
6436
6437     GenTreePtr node = new (this, GT_RET_EXPR) GenTreeRetExpr(type);
6438
6439     node->gtRetExpr.gtInlineCandidate = inlineCandidate;
6440
6441     if (varTypeIsStruct(inlineCandidate))
6442     {
6443         node->gtRetExpr.gtRetClsHnd = gtGetStructHandle(inlineCandidate);
6444     }
6445
6446     // GT_RET_EXPR node eventually might be bashed back to GT_CALL (when inlining is aborted for example).
6447     // Therefore it should carry the GTF_CALL flag so that all the rules about spilling can apply to it as well.
6448     // For example, impImportLeave or CEE_POP need to spill GT_RET_EXPR before empty the evaluation stack.
6449     node->gtFlags |= GTF_CALL;
6450
6451     return node;
6452 }
6453
6454 GenTreeArgList* Compiler::gtNewListNode(GenTreePtr op1, GenTreeArgList* op2)
6455 {
6456     assert((op1 != nullptr) && (op1->OperGet() != GT_LIST));
6457
6458     return new (this, GT_LIST) GenTreeArgList(op1, op2);
6459 }
6460
6461 /*****************************************************************************
6462  *
6463  *  Create a list out of one value.
6464  */
6465
6466 GenTreeArgList* Compiler::gtNewArgList(GenTreePtr arg)
6467 {
6468     return new (this, GT_LIST) GenTreeArgList(arg);
6469 }
6470
6471 /*****************************************************************************
6472  *
6473  *  Create a list out of the two values.
6474  */
6475
6476 GenTreeArgList* Compiler::gtNewArgList(GenTreePtr arg1, GenTreePtr arg2)
6477 {
6478     return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2));
6479 }
6480
6481 //------------------------------------------------------------------------
6482 // Compiler::gtNewAggregate:
6483 //    Creates a new aggregate argument node. These nodes are used to
6484 //    represent arguments that are composed of multiple values (e.g.
6485 //    the lclVars that represent the fields of a promoted struct).
6486 //
6487 //    Note that aggregate arguments are currently represented by GT_LIST
6488 //    nodes that are marked with the GTF_LIST_AGGREGATE flag. This
6489 //    representation may be changed in the future to instead use its own
6490 //    node type (e.g. GT_AGGREGATE).
6491 //
6492 // Arguments:
6493 //    firstElement - The first element in the aggregate's list of values.
6494 //
6495 // Returns:
6496 //    The newly-created aggregate node.
6497 GenTreeArgList* Compiler::gtNewAggregate(GenTree* firstElement)
6498 {
6499     GenTreeArgList* agg = gtNewArgList(firstElement);
6500     agg->gtFlags |= GTF_LIST_AGGREGATE;
6501     return agg;
6502 }
6503
6504 /*****************************************************************************
6505  *
6506  *  Create a list out of the three values.
6507  */
6508
6509 GenTreeArgList* Compiler::gtNewArgList(GenTreePtr arg1, GenTreePtr arg2, GenTreePtr arg3)
6510 {
6511     return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2, arg3));
6512 }
6513
6514 /*****************************************************************************
6515  *
6516  *  Given a GT_CALL node, access the fgArgInfo and find the entry
6517  *  that has the matching argNum and return the fgArgTableEntryPtr
6518  */
6519
6520 fgArgTabEntryPtr Compiler::gtArgEntryByArgNum(GenTreePtr call, unsigned argNum)
6521 {
6522     noway_assert(call->IsCall());
6523     fgArgInfoPtr argInfo = call->gtCall.fgArgInfo;
6524     noway_assert(argInfo != nullptr);
6525
6526     unsigned          argCount       = argInfo->ArgCount();
6527     fgArgTabEntryPtr* argTable       = argInfo->ArgTable();
6528     fgArgTabEntryPtr  curArgTabEntry = nullptr;
6529
6530     for (unsigned i = 0; i < argCount; i++)
6531     {
6532         curArgTabEntry = argTable[i];
6533         if (curArgTabEntry->argNum == argNum)
6534         {
6535             return curArgTabEntry;
6536         }
6537     }
6538     noway_assert(!"gtArgEntryByArgNum: argNum not found");
6539     return nullptr;
6540 }
6541
6542 /*****************************************************************************
6543  *
6544  *  Given a GT_CALL node, access the fgArgInfo and find the entry
6545  *  that has the matching node and return the fgArgTableEntryPtr
6546  */
6547
6548 fgArgTabEntryPtr Compiler::gtArgEntryByNode(GenTreePtr call, GenTreePtr node)
6549 {
6550     noway_assert(call->IsCall());
6551     fgArgInfoPtr argInfo = call->gtCall.fgArgInfo;
6552     noway_assert(argInfo != nullptr);
6553
6554     unsigned          argCount       = argInfo->ArgCount();
6555     fgArgTabEntryPtr* argTable       = argInfo->ArgTable();
6556     fgArgTabEntryPtr  curArgTabEntry = nullptr;
6557
6558     for (unsigned i = 0; i < argCount; i++)
6559     {
6560         curArgTabEntry = argTable[i];
6561
6562         if (curArgTabEntry->node == node)
6563         {
6564             return curArgTabEntry;
6565         }
6566 #ifdef PROTO_JIT
6567         else if (node->OperGet() == GT_RELOAD && node->gtOp.gtOp1 == curArgTabEntry->node)
6568         {
6569             return curArgTabEntry;
6570         }
6571 #endif // PROTO_JIT
6572         else if (curArgTabEntry->parent != nullptr)
6573         {
6574             assert(curArgTabEntry->parent->IsList());
6575             if (curArgTabEntry->parent->Current() == node)
6576             {
6577                 return curArgTabEntry;
6578             }
6579         }
6580         else // (curArgTabEntry->parent == NULL)
6581         {
6582             if (call->gtCall.gtCallObjp == node)
6583             {
6584                 return curArgTabEntry;
6585             }
6586         }
6587     }
6588     noway_assert(!"gtArgEntryByNode: node not found");
6589     return nullptr;
6590 }
6591
6592 /*****************************************************************************
6593  *
6594  *  Find and return the entry with the given "lateArgInx".  Requires that one is found
6595  *  (asserts this).
6596  */
6597 fgArgTabEntryPtr Compiler::gtArgEntryByLateArgIndex(GenTreePtr call, unsigned lateArgInx)
6598 {
6599     noway_assert(call->IsCall());
6600     fgArgInfoPtr argInfo = call->gtCall.fgArgInfo;
6601     noway_assert(argInfo != nullptr);
6602
6603     unsigned          argCount       = argInfo->ArgCount();
6604     fgArgTabEntryPtr* argTable       = argInfo->ArgTable();
6605     fgArgTabEntryPtr  curArgTabEntry = nullptr;
6606
6607     for (unsigned i = 0; i < argCount; i++)
6608     {
6609         curArgTabEntry = argTable[i];
6610         if (curArgTabEntry->lateArgInx == lateArgInx)
6611         {
6612             return curArgTabEntry;
6613         }
6614     }
6615     noway_assert(!"gtArgEntryByNode: node not found");
6616     return nullptr;
6617 }
6618
6619 /*****************************************************************************
6620  *
6621  *  Given an fgArgTabEntryPtr, return true if it is the 'this' pointer argument.
6622  */
6623 bool Compiler::gtArgIsThisPtr(fgArgTabEntryPtr argEntry)
6624 {
6625     return (argEntry->parent == nullptr);
6626 }
6627
6628 /*****************************************************************************
6629  *
6630  *  Create a node that will assign 'src' to 'dst'.
6631  */
6632
6633 GenTreePtr Compiler::gtNewAssignNode(GenTreePtr dst, GenTreePtr src)
6634 {
6635     /* Mark the target as being assigned */
6636
6637     if ((dst->gtOper == GT_LCL_VAR) || (dst->OperGet() == GT_LCL_FLD))
6638     {
6639         dst->gtFlags |= GTF_VAR_DEF;
6640         if (dst->IsPartialLclFld(this))
6641         {
6642             // We treat these partial writes as combined uses and defs.
6643             dst->gtFlags |= GTF_VAR_USEASG;
6644         }
6645     }
6646     dst->gtFlags |= GTF_DONT_CSE;
6647
6648     /* Create the assignment node */
6649
6650     GenTreePtr asg = gtNewOperNode(GT_ASG, dst->TypeGet(), dst, src);
6651
6652     /* Mark the expression as containing an assignment */
6653
6654     asg->gtFlags |= GTF_ASG;
6655
6656     return asg;
6657 }
6658
6659 // Creates a new Obj node.
6660 GenTreeObj* Compiler::gtNewObjNode(CORINFO_CLASS_HANDLE structHnd, GenTree* addr)
6661 {
6662     var_types nodeType = impNormStructType(structHnd);
6663     assert(varTypeIsStruct(nodeType));
6664     GenTreeObj* objNode = new (this, GT_OBJ) GenTreeObj(nodeType, addr, structHnd);
6665     // An Obj is not a global reference, if it is known to be a local struct.
6666     GenTreeLclVarCommon* lclNode = addr->IsLocalAddrExpr();
6667     if ((lclNode != nullptr) && !lvaIsImplicitByRefLocal(lclNode->gtLclNum))
6668     {
6669         objNode->gtFlags &= ~GTF_GLOB_REF;
6670     }
6671     return objNode;
6672 }
6673
6674 // Creates a new CpObj node.
6675 // Parameters (exactly the same as MSIL CpObj):
6676 //
6677 //  dst        - The target to copy the struct to
6678 //  src        - The source to copy the struct from
6679 //  structHnd  - A class token that represents the type of object being copied. May be null
6680 //               if FEATURE_SIMD is enabled and the source has a SIMD type.
6681 //  isVolatile - Is this marked as volatile memory?
6682 GenTreeBlkOp* Compiler::gtNewCpObjNode(GenTreePtr dst, GenTreePtr src, CORINFO_CLASS_HANDLE structHnd, bool isVolatile)
6683 {
6684     size_t    size       = 0;
6685     unsigned  slots      = 0;
6686     unsigned  gcPtrCount = 0;
6687     BYTE*     gcPtrs     = nullptr;
6688     var_types type       = TYP_STRUCT;
6689
6690     GenTreePtr hndOrSize = nullptr;
6691
6692     GenTreeBlkOp* result = nullptr;
6693
6694     bool useCopyObj = false;
6695
6696     // Intermediate SIMD operations may use SIMD types that are not used by the input IL.
6697     // In this case, the provided type handle will be null and the size of the copy will
6698     // be derived from the node's varType.
6699     if (structHnd == nullptr)
6700     {
6701 #if FEATURE_SIMD
6702         assert(src->OperGet() == GT_ADDR);
6703
6704         GenTree* srcValue = src->gtGetOp1();
6705
6706         type = srcValue->TypeGet();
6707         assert(varTypeIsSIMD(type));
6708
6709         size = genTypeSize(type);
6710 #else
6711         assert(!"structHnd should not be null if FEATURE_SIMD is not enabled!");
6712 #endif
6713     }
6714     else
6715     {
6716         // Get the size of the type
6717         size = info.compCompHnd->getClassSize(structHnd);
6718
6719         if (size >= TARGET_POINTER_SIZE)
6720         {
6721             slots  = (unsigned)(roundUp(size, TARGET_POINTER_SIZE) / TARGET_POINTER_SIZE);
6722             gcPtrs = new (this, CMK_ASTNode) BYTE[slots];
6723
6724             type = impNormStructType(structHnd, gcPtrs, &gcPtrCount);
6725             if (varTypeIsEnregisterableStruct(type))
6726             {
6727                 if (dst->OperGet() == GT_ADDR)
6728                 {
6729                     GenTree* actualDst = dst->gtGetOp1();
6730                     assert((actualDst->TypeGet() == type) || !varTypeIsEnregisterableStruct(actualDst));
6731                     actualDst->gtType = type;
6732                 }
6733                 if (src->OperGet() == GT_ADDR)
6734                 {
6735                     GenTree* actualSrc = src->gtGetOp1();
6736                     assert((actualSrc->TypeGet() == type) || !varTypeIsEnregisterableStruct(actualSrc));
6737                     actualSrc->gtType = type;
6738                 }
6739             }
6740
6741             useCopyObj = gcPtrCount > 0;
6742         }
6743     }
6744
6745     // If the class being copied contains any GC pointer we store a class handle
6746     // in the icon, otherwise we store the size in bytes to copy
6747     //
6748     genTreeOps op;
6749     if (useCopyObj)
6750     {
6751         // This will treated as a cpobj as we need to note GC info.
6752         // Store the class handle and mark the node
6753         op        = GT_COPYOBJ;
6754         hndOrSize = gtNewIconHandleNode((size_t)structHnd, GTF_ICON_CLASS_HDL);
6755         result    = new (this, GT_COPYOBJ) GenTreeCpObj(gcPtrCount, slots, gcPtrs);
6756     }
6757     else
6758     {
6759         assert(gcPtrCount == 0);
6760
6761         // Doesn't need GC info. Treat operation as a cpblk
6762         op                      = GT_COPYBLK;
6763         hndOrSize               = gtNewIconNode(size);
6764         result                  = new (this, GT_COPYBLK) GenTreeCpBlk();
6765         result->gtBlkOpGcUnsafe = false;
6766     }
6767
6768     gtBlockOpInit(result, op, dst, src, hndOrSize, isVolatile);
6769     return result;
6770 }
6771
6772 //------------------------------------------------------------------------
6773 // FixupInitBlkValue: Fixup the init value for an initBlk operation
6774 //
6775 // Arguments:
6776 //    asgType - The type of assignment that the initBlk is being transformed into
6777 //
6778 // Return Value:
6779 //    Modifies the constant value on this node to be the appropriate "fill"
6780 //    value for the initblk.
6781 //
6782 // Notes:
6783 //    The initBlk MSIL instruction takes a byte value, which must be
6784 //    extended to the size of the assignment when an initBlk is transformed
6785 //    to an assignment of a primitive type.
6786 //    This performs the appropriate extension.
6787
6788 void GenTreeIntCon::FixupInitBlkValue(var_types asgType)
6789 {
6790     assert(varTypeIsIntegralOrI(asgType));
6791     unsigned size = genTypeSize(asgType);
6792     if (size > 1)
6793     {
6794         size_t cns = gtIconVal;
6795         cns        = cns & 0xFF;
6796         cns |= cns << 8;
6797         if (size >= 4)
6798         {
6799             cns |= cns << 16;
6800 #ifdef _TARGET_64BIT_
6801             if (size == 8)
6802             {
6803                 cns |= cns << 32;
6804             }
6805 #endif // _TARGET_64BIT_
6806
6807             // Make the type used in the GT_IND node match for evaluation types.
6808             gtType = asgType;
6809
6810             // if we are using an GT_INITBLK on a GC type the value being assigned has to be zero (null).
6811             assert(!varTypeIsGC(asgType) || (cns == 0));
6812         }
6813
6814         gtIconVal = cns;
6815     }
6816 }
6817
6818 // Initializes a BlkOp GenTree
6819 // Preconditions:
6820 //     - Result is a GenTreeBlkOp that is newly constructed by gtNewCpObjNode or gtNewBlkOpNode
6821 //
6822 // Parameters:
6823 //     - result is a GenTreeBlkOp node that is the node to be initialized.
6824 //     - oper must be either GT_INITBLK or GT_COPYBLK
6825 //     - dst is the target (destination) we want to either initialize or copy to
6826 //     - src is the init value for IniBlk or the source struct for CpBlk/CpObj
6827 //     - size is either the size of the buffer to copy/initialize or a class token
6828 //       in the case of CpObj.
6829 //     - volatil flag specifies if this node is a volatile memory operation.
6830 //
6831 // This procedure centralizes all the logic to both enforce proper structure and
6832 // to properly construct any InitBlk/CpBlk node.
6833 void Compiler::gtBlockOpInit(
6834     GenTreePtr result, genTreeOps oper, GenTreePtr dst, GenTreePtr srcOrFillVal, GenTreePtr hndOrSize, bool volatil)
6835 {
6836     assert(GenTree::OperIsBlkOp(oper));
6837
6838     assert(result->gtType == TYP_VOID);
6839     result->gtOper = oper;
6840
6841 #ifdef DEBUG
6842     // If this is a CpObj node, the caller must have already set
6843     // the node additional members (gtGcPtrs, gtGcPtrCount, gtSlots).
6844     if (hndOrSize->OperGet() == GT_CNS_INT && hndOrSize->IsIconHandle(GTF_ICON_CLASS_HDL))
6845     {
6846         GenTreeCpObj* cpObjNode = result->AsCpObj();
6847
6848         assert(cpObjNode->gtGcPtrs != nullptr);
6849         assert(!IsUninitialized(cpObjNode->gtGcPtrs));
6850         assert(!IsUninitialized(cpObjNode->gtGcPtrCount) && cpObjNode->gtGcPtrCount > 0);
6851         assert(!IsUninitialized(cpObjNode->gtSlots) && cpObjNode->gtSlots > 0);
6852
6853         for (unsigned i = 0; i < cpObjNode->gtGcPtrCount; ++i)
6854         {
6855             CorInfoGCType t = (CorInfoGCType)cpObjNode->gtGcPtrs[i];
6856             switch (t)
6857             {
6858                 case TYPE_GC_NONE:
6859                 case TYPE_GC_REF:
6860                 case TYPE_GC_BYREF:
6861                 case TYPE_GC_OTHER:
6862                     break;
6863                 default:
6864                     unreached();
6865             }
6866         }
6867     }
6868 #endif // DEBUG
6869
6870     /* In the case of CpBlk, we want to avoid generating
6871     * nodes where the source and destination are the same
6872     * because of two reasons, first, is useless, second
6873     * it introduces issues in liveness and also copying
6874     * memory from an overlapping memory location is
6875     * undefined both as per the ECMA standard and also
6876     * the memcpy semantics specify that.
6877     *
6878     * NOTE: In this case we'll only detect the case for addr of a local
6879     * and a local itself, any other complex expressions won't be
6880     * caught.
6881     *
6882     * TODO-Cleanup: though having this logic is goodness (i.e. avoids self-assignment
6883     * of struct vars very early), it was added because fgInterBlockLocalVarLiveness()
6884     * isn't handling self-assignment of struct variables correctly.  This issue may not
6885     * surface if struct promotion is ON (which is the case on x86/arm).  But still the
6886     * fundamental issue exists that needs to be addressed.
6887     */
6888     GenTreePtr currSrc = srcOrFillVal;
6889     GenTreePtr currDst = dst;
6890     if (currSrc->OperGet() == GT_ADDR && currDst->OperGet() == GT_ADDR)
6891     {
6892         currSrc = currSrc->gtOp.gtOp1;
6893         currDst = currDst->gtOp.gtOp1;
6894     }
6895
6896     if (currSrc->OperGet() == GT_LCL_VAR && currDst->OperGet() == GT_LCL_VAR &&
6897         currSrc->gtLclVarCommon.gtLclNum == currDst->gtLclVarCommon.gtLclNum)
6898     {
6899         // Make this a NOP
6900         result->gtBashToNOP();
6901         return;
6902     }
6903
6904     /* Note  that this use of a  GT_LIST is different than all others */
6905     /* in that the the GT_LIST is used as a tuple [dest,src] rather   */
6906     /* than a being a NULL terminated list of GT_LIST nodes           */
6907     result->gtOp.gtOp1 = gtNewOperNode(GT_LIST, TYP_VOID,  /*        GT_[oper]          */
6908                                        dst, srcOrFillVal); /*        /      \           */
6909     result->gtOp.gtOp2 = hndOrSize;                        /*   GT_LIST      \          */
6910                                                            /*    /    \  [hndOrSize]    */
6911                                                            /* [dst] [srcOrFillVal]      */
6912
6913     // Propagate all effect flags from children
6914     result->gtFlags |= result->gtOp.gtOp1->gtFlags & GTF_ALL_EFFECT;
6915     result->gtFlags |= result->gtOp.gtOp2->gtFlags & GTF_ALL_EFFECT;
6916
6917     result->gtFlags |= (GTF_GLOB_REF | GTF_ASG);
6918
6919     // REVERSE_OPS is necessary because the use must occur before the def
6920     result->gtOp.gtOp1->gtFlags |= GTF_REVERSE_OPS;
6921
6922     if (result->gtOper == GT_INITBLK)
6923     {
6924         result->gtFlags |= (dst->gtFlags & GTF_EXCEPT) | (hndOrSize->gtFlags & GTF_EXCEPT);
6925     }
6926     else
6927     {
6928         result->gtFlags |=
6929             (dst->gtFlags & GTF_EXCEPT) | (srcOrFillVal->gtFlags & GTF_EXCEPT) | (hndOrSize->gtFlags & GTF_EXCEPT);
6930
6931         // If the class being copied contains any GC pointer we store a class handle
6932         // and we must set the flag GTF_BLK_HASGCPTR, so that the register predictor
6933         // knows that this GT_COPYBLK will use calls to the ByRef Assign helper
6934         //
6935         if ((hndOrSize->OperGet() == GT_CNS_INT) && hndOrSize->IsIconHandle(GTF_ICON_CLASS_HDL))
6936         {
6937             hndOrSize->gtFlags |= GTF_DONT_CSE; // We can't CSE the class handle
6938             result->gtFlags |= GTF_BLK_HASGCPTR;
6939         }
6940     }
6941
6942     if (volatil)
6943     {
6944         result->gtFlags |= GTF_BLK_VOLATILE;
6945     }
6946
6947 #ifdef FEATURE_SIMD
6948     if (oper == GT_COPYBLK && srcOrFillVal->OperGet() == GT_ADDR && dst->OperGet() == GT_ADDR)
6949     {
6950         // If the source is a GT_SIMD node of SIMD type, then the dst lclvar struct
6951         // should be labeled as simd intrinsic related struct.
6952         // This is done so that the morpher can transform any field accesses into
6953         // intrinsics, thus avoiding conflicting access methods (fields vs. whole-register).
6954
6955         GenTreePtr srcChild = srcOrFillVal->gtGetOp1();
6956         GenTreePtr dstChild = dst->gtGetOp1();
6957
6958         if (dstChild->OperIsLocal() && varTypeIsStruct(dstChild) && srcChild->OperGet() == GT_SIMD &&
6959             varTypeIsSIMD(srcChild))
6960         {
6961             unsigned   lclNum                = dst->gtGetOp1()->AsLclVarCommon()->GetLclNum();
6962             LclVarDsc* lclVarDsc             = &lvaTable[lclNum];
6963             lclVarDsc->lvUsedInSIMDIntrinsic = true;
6964         }
6965     }
6966 #endif // FEATURE_SIMD
6967 }
6968
6969 //------------------------------------------------------------------------
6970 // gtNewBlkOpNode: Creates an InitBlk or CpBlk node.
6971 //
6972 // Arguments:
6973 //    oper          - GT_COPYBLK, GT_INITBLK or GT_COPYOBJ
6974 //    dst           - Destination or target to copy to / initialize the buffer.
6975 //    srcOrFillVall - Either the source to copy from or the byte value to fill the buffer.
6976 //    sizeOrClsTok  - The size of the buffer or a class token (in the case of CpObj).
6977 //    isVolatile    - Whether this is a volatile memory operation or not.
6978 //
6979 // Return Value:
6980 //    Returns the newly constructed and initialized block operation.
6981
6982 GenTreeBlkOp* Compiler::gtNewBlkOpNode(
6983     genTreeOps oper, GenTreePtr dst, GenTreePtr srcOrFillVal, GenTreePtr sizeOrClsTok, bool isVolatile)
6984 {
6985     GenTreeBlkOp* result = new (this, oper) GenTreeBlkOp(oper);
6986     gtBlockOpInit(result, oper, dst, srcOrFillVal, sizeOrClsTok, isVolatile);
6987     return result;
6988 }
6989
6990 /*****************************************************************************
6991  *
6992  *  Clones the given tree value and returns a copy of the given tree.
6993  *  If 'complexOK' is false, the cloning is only done provided the tree
6994  *     is not too complex (whatever that may mean);
6995  *  If 'complexOK' is true, we try slightly harder to clone the tree.
6996  *  In either case, NULL is returned if the tree cannot be cloned
6997  *
6998  *  Note that there is the function gtCloneExpr() which does a more
6999  *  complete job if you can't handle this function failing.
7000  */
7001
7002 GenTreePtr Compiler::gtClone(GenTree* tree, bool complexOK)
7003 {
7004     GenTreePtr copy;
7005
7006     switch (tree->gtOper)
7007     {
7008         case GT_CNS_INT:
7009
7010 #if defined(LATE_DISASM)
7011             if (tree->IsIconHandle())
7012             {
7013                 copy = gtNewIconHandleNode(tree->gtIntCon.gtIconVal, tree->gtFlags, tree->gtIntCon.gtFieldSeq,
7014                                            tree->gtIntCon.gtIconHdl.gtIconHdl1, tree->gtIntCon.gtIconHdl.gtIconHdl2);
7015                 copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
7016                 copy->gtType                       = tree->gtType;
7017             }
7018             else
7019 #endif
7020             {
7021                 copy = new (this, GT_CNS_INT)
7022                     GenTreeIntCon(tree->gtType, tree->gtIntCon.gtIconVal, tree->gtIntCon.gtFieldSeq);
7023                 copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
7024             }
7025             break;
7026
7027         case GT_LCL_VAR:
7028             // Remember that the LclVar node has been cloned. The flag will be set
7029             // on 'copy' as well.
7030             tree->gtFlags |= GTF_VAR_CLONED;
7031             copy = gtNewLclvNode(tree->gtLclVarCommon.gtLclNum, tree->gtType, tree->gtLclVar.gtLclILoffs);
7032             break;
7033
7034         case GT_LCL_FLD:
7035         case GT_LCL_FLD_ADDR:
7036             // Remember that the LclVar node has been cloned. The flag will be set
7037             // on 'copy' as well.
7038             tree->gtFlags |= GTF_VAR_CLONED;
7039             copy = new (this, tree->gtOper)
7040                 GenTreeLclFld(tree->gtOper, tree->TypeGet(), tree->gtLclFld.gtLclNum, tree->gtLclFld.gtLclOffs);
7041             copy->gtLclFld.gtFieldSeq = tree->gtLclFld.gtFieldSeq;
7042             break;
7043
7044         case GT_CLS_VAR:
7045             copy = new (this, GT_CLS_VAR)
7046                 GenTreeClsVar(tree->gtType, tree->gtClsVar.gtClsVarHnd, tree->gtClsVar.gtFieldSeq);
7047             break;
7048
7049         case GT_REG_VAR:
7050             assert(!"clone regvar");
7051
7052         default:
7053             if (!complexOK)
7054             {
7055                 return nullptr;
7056             }
7057
7058             if (tree->gtOper == GT_FIELD)
7059             {
7060                 GenTreePtr objp;
7061
7062                 // copied from line 9850
7063
7064                 objp = nullptr;
7065                 if (tree->gtField.gtFldObj)
7066                 {
7067                     objp = gtClone(tree->gtField.gtFldObj, false);
7068                     if (!objp)
7069                     {
7070                         return objp;
7071                     }
7072                 }
7073
7074                 copy = gtNewFieldRef(tree->TypeGet(), tree->gtField.gtFldHnd, objp, tree->gtField.gtFldOffset);
7075                 copy->gtField.gtFldMayOverlap = tree->gtField.gtFldMayOverlap;
7076             }
7077             else if (tree->gtOper == GT_ADD)
7078             {
7079                 GenTreePtr op1 = tree->gtOp.gtOp1;
7080                 GenTreePtr op2 = tree->gtOp.gtOp2;
7081
7082                 if (op1->OperIsLeaf() && op2->OperIsLeaf())
7083                 {
7084                     op1 = gtClone(op1);
7085                     if (op1 == nullptr)
7086                     {
7087                         return nullptr;
7088                     }
7089                     op2 = gtClone(op2);
7090                     if (op2 == nullptr)
7091                     {
7092                         return nullptr;
7093                     }
7094
7095                     copy = gtNewOperNode(GT_ADD, tree->TypeGet(), op1, op2);
7096                 }
7097                 else
7098                 {
7099                     return nullptr;
7100                 }
7101             }
7102             else if (tree->gtOper == GT_ADDR)
7103             {
7104                 GenTreePtr op1 = gtClone(tree->gtOp.gtOp1);
7105                 if (op1 == nullptr)
7106                 {
7107                     return nullptr;
7108                 }
7109                 copy = gtNewOperNode(GT_ADDR, tree->TypeGet(), op1);
7110             }
7111             else
7112             {
7113                 return nullptr;
7114             }
7115
7116             break;
7117     }
7118
7119     copy->gtFlags |= tree->gtFlags & ~GTF_NODE_MASK;
7120 #if defined(DEBUG)
7121     copy->gtDebugFlags |= tree->gtDebugFlags & ~GTF_DEBUG_NODE_MASK;
7122 #endif // defined(DEBUG)
7123
7124     return copy;
7125 }
7126
7127 /*****************************************************************************
7128  *
7129  *  Clones the given tree value and returns a copy of the given tree. Any
7130  *  references to local variable varNum will be replaced with the integer
7131  *  constant varVal.
7132  */
7133
7134 GenTreePtr Compiler::gtCloneExpr(GenTree* tree,
7135                                  unsigned addFlags,
7136                                  unsigned varNum, // = (unsigned)-1
7137                                  int      varVal)
7138 {
7139     if (tree == nullptr)
7140     {
7141         return nullptr;
7142     }
7143
7144     /* Figure out what kind of a node we have */
7145
7146     genTreeOps oper = tree->OperGet();
7147     unsigned   kind = tree->OperKind();
7148     GenTree*   copy;
7149
7150     /* Is this a constant or leaf node? */
7151
7152     if (kind & (GTK_CONST | GTK_LEAF))
7153     {
7154         switch (oper)
7155         {
7156             case GT_CNS_INT:
7157
7158 #if defined(LATE_DISASM)
7159                 if (tree->IsIconHandle())
7160                 {
7161                     copy = gtNewIconHandleNode(tree->gtIntCon.gtIconVal, tree->gtFlags, tree->gtIntCon.gtFieldSeq,
7162                                                tree->gtIntCon.gtIconFld.gtIconCPX, tree->gtIntCon.gtIconFld.gtIconCls);
7163                     copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
7164                     copy->gtType                       = tree->gtType;
7165                 }
7166                 else
7167 #endif
7168                 {
7169                     copy                               = gtNewIconNode(tree->gtIntCon.gtIconVal, tree->gtType);
7170                     copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
7171                     copy->gtIntCon.gtFieldSeq          = tree->gtIntCon.gtFieldSeq;
7172                 }
7173                 goto DONE;
7174
7175             case GT_CNS_LNG:
7176                 copy = gtNewLconNode(tree->gtLngCon.gtLconVal);
7177                 goto DONE;
7178
7179             case GT_CNS_DBL:
7180                 copy         = gtNewDconNode(tree->gtDblCon.gtDconVal);
7181                 copy->gtType = tree->gtType; // keep the same type
7182                 goto DONE;
7183
7184             case GT_CNS_STR:
7185                 copy = gtNewSconNode(tree->gtStrCon.gtSconCPX, tree->gtStrCon.gtScpHnd);
7186                 goto DONE;
7187
7188             case GT_LCL_VAR:
7189
7190                 if (tree->gtLclVarCommon.gtLclNum == varNum)
7191                 {
7192                     copy = gtNewIconNode(varVal, tree->gtType);
7193                 }
7194                 else
7195                 {
7196                     // Remember that the LclVar node has been cloned. The flag will
7197                     // be set on 'copy' as well.
7198                     tree->gtFlags |= GTF_VAR_CLONED;
7199                     copy = gtNewLclvNode(tree->gtLclVar.gtLclNum, tree->gtType, tree->gtLclVar.gtLclILoffs);
7200                     copy->AsLclVarCommon()->SetSsaNum(tree->AsLclVarCommon()->GetSsaNum());
7201                 }
7202                 copy->gtFlags = tree->gtFlags;
7203                 goto DONE;
7204
7205             case GT_LCL_FLD:
7206                 if (tree->gtLclFld.gtLclNum == varNum)
7207                 {
7208                     IMPL_LIMITATION("replacing GT_LCL_FLD with a constant");
7209                 }
7210                 else
7211                 {
7212                     // Remember that the LclVar node has been cloned. The flag will
7213                     // be set on 'copy' as well.
7214                     tree->gtFlags |= GTF_VAR_CLONED;
7215                     copy = new (this, GT_LCL_FLD)
7216                         GenTreeLclFld(tree->TypeGet(), tree->gtLclFld.gtLclNum, tree->gtLclFld.gtLclOffs);
7217                     copy->gtLclFld.gtFieldSeq = tree->gtLclFld.gtFieldSeq;
7218                     copy->gtFlags             = tree->gtFlags;
7219                 }
7220                 goto DONE;
7221
7222             case GT_CLS_VAR:
7223                 copy = new (this, GT_CLS_VAR)
7224                     GenTreeClsVar(tree->TypeGet(), tree->gtClsVar.gtClsVarHnd, tree->gtClsVar.gtFieldSeq);
7225                 goto DONE;
7226
7227             case GT_RET_EXPR:
7228                 copy = gtNewInlineCandidateReturnExpr(tree->gtRetExpr.gtInlineCandidate, tree->gtType);
7229                 goto DONE;
7230
7231             case GT_MEMORYBARRIER:
7232                 copy = new (this, GT_MEMORYBARRIER) GenTree(GT_MEMORYBARRIER, TYP_VOID);
7233                 goto DONE;
7234
7235             case GT_ARGPLACE:
7236                 copy = gtNewArgPlaceHolderNode(tree->gtType, tree->gtArgPlace.gtArgPlaceClsHnd);
7237                 goto DONE;
7238
7239             case GT_REG_VAR:
7240                 NO_WAY("Cloning of GT_REG_VAR node not supported");
7241                 goto DONE;
7242
7243             case GT_FTN_ADDR:
7244                 copy = new (this, oper) GenTreeFptrVal(tree->gtType, tree->gtFptrVal.gtFptrMethod);
7245
7246 #ifdef FEATURE_READYTORUN_COMPILER
7247                 copy->gtFptrVal.gtEntryPoint         = tree->gtFptrVal.gtEntryPoint;
7248                 copy->gtFptrVal.gtLdftnResolvedToken = tree->gtFptrVal.gtLdftnResolvedToken;
7249 #endif
7250                 goto DONE;
7251
7252             case GT_CATCH_ARG:
7253             case GT_NO_OP:
7254                 copy = new (this, oper) GenTree(oper, tree->gtType);
7255                 goto DONE;
7256
7257 #if !FEATURE_EH_FUNCLETS
7258             case GT_END_LFIN:
7259 #endif // !FEATURE_EH_FUNCLETS
7260             case GT_JMP:
7261                 copy = new (this, oper) GenTreeVal(oper, tree->gtType, tree->gtVal.gtVal1);
7262                 goto DONE;
7263
7264             case GT_LABEL:
7265                 copy = new (this, oper) GenTreeLabel(tree->gtLabel.gtLabBB);
7266                 goto DONE;
7267
7268             default:
7269                 NO_WAY("Cloning of node not supported");
7270                 goto DONE;
7271         }
7272     }
7273
7274     /* Is it a 'simple' unary/binary operator? */
7275
7276     if (kind & GTK_SMPOP)
7277     {
7278         /* If necessary, make sure we allocate a "fat" tree node */
7279         CLANG_FORMAT_COMMENT_ANCHOR;
7280
7281 #if SMALL_TREE_NODES
7282         switch (oper)
7283         {
7284             /* These nodes sometimes get bashed to "fat" ones */
7285
7286             case GT_MUL:
7287             case GT_DIV:
7288             case GT_MOD:
7289
7290             case GT_UDIV:
7291             case GT_UMOD:
7292
7293                 //  In the implementation of gtNewLargeOperNode you have
7294                 //  to give an oper that will create a small node,
7295                 //  otherwise it asserts.
7296                 //
7297                 if (GenTree::s_gtNodeSizes[oper] == TREE_NODE_SZ_SMALL)
7298                 {
7299                     copy = gtNewLargeOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1,
7300                                               tree->OperIsBinary() ? tree->gtOp.gtOp2 : nullptr);
7301                 }
7302                 else // Always a large tree
7303                 {
7304                     if (tree->OperIsBinary())
7305                     {
7306                         copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2);
7307                     }
7308                     else
7309                     {
7310                         copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1);
7311                     }
7312                 }
7313                 break;
7314
7315             case GT_CAST:
7316                 copy = new (this, LargeOpOpcode()) GenTreeCast(tree->TypeGet(), tree->gtCast.CastOp(),
7317                                                                tree->gtCast.gtCastType DEBUGARG(/*largeNode*/ TRUE));
7318                 break;
7319
7320             // The nodes below this are not bashed, so they can be allocated at their individual sizes.
7321
7322             case GT_LIST:
7323                 // This is ridiculous, but would go away if we made a stronger distinction between argument lists, whose
7324                 // second argument *must* be an arglist*, and the uses of LIST in copyblk and initblk.
7325                 if (tree->gtOp.gtOp2 != nullptr && tree->gtOp.gtOp2->OperGet() == GT_LIST)
7326                 {
7327                     copy = new (this, GT_LIST) GenTreeArgList(tree->gtOp.gtOp1, tree->gtOp.gtOp2->AsArgList());
7328                 }
7329                 else
7330                 {
7331                     copy = new (this, GT_LIST) GenTreeOp(GT_LIST, TYP_VOID, tree->gtOp.gtOp1, tree->gtOp.gtOp2);
7332                 }
7333                 break;
7334
7335             case GT_INDEX:
7336             {
7337                 GenTreeIndex* asInd = tree->AsIndex();
7338                 copy                = new (this, GT_INDEX)
7339                     GenTreeIndex(asInd->TypeGet(), asInd->Arr(), asInd->Index(), asInd->gtIndElemSize);
7340                 copy->AsIndex()->gtStructElemClass = asInd->gtStructElemClass;
7341             }
7342             break;
7343
7344             case GT_ALLOCOBJ:
7345             {
7346                 GenTreeAllocObj* asAllocObj = tree->AsAllocObj();
7347                 copy = new (this, GT_ALLOCOBJ) GenTreeAllocObj(tree->TypeGet(), asAllocObj->gtNewHelper,
7348                                                                asAllocObj->gtAllocObjClsHnd, asAllocObj->gtOp1);
7349             }
7350             break;
7351
7352             case GT_ARR_LENGTH:
7353                 copy = new (this, GT_ARR_LENGTH)
7354                     GenTreeArrLen(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtArrLen.ArrLenOffset());
7355                 break;
7356
7357             case GT_ARR_INDEX:
7358                 copy = new (this, GT_ARR_INDEX)
7359                     GenTreeArrIndex(tree->TypeGet(), gtCloneExpr(tree->gtArrIndex.ArrObj(), addFlags, varNum, varVal),
7360                                     gtCloneExpr(tree->gtArrIndex.IndexExpr(), addFlags, varNum, varVal),
7361                                     tree->gtArrIndex.gtCurrDim, tree->gtArrIndex.gtArrRank,
7362                                     tree->gtArrIndex.gtArrElemType);
7363                 break;
7364
7365             case GT_QMARK:
7366                 copy = new (this, GT_QMARK) GenTreeQmark(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2, this);
7367                 VarSetOps::AssignAllowUninitRhs(this, copy->gtQmark.gtThenLiveSet, tree->gtQmark.gtThenLiveSet);
7368                 VarSetOps::AssignAllowUninitRhs(this, copy->gtQmark.gtElseLiveSet, tree->gtQmark.gtElseLiveSet);
7369                 break;
7370
7371             case GT_OBJ:
7372                 copy = new (this, GT_OBJ) GenTreeObj(tree->TypeGet(), tree->gtOp.gtOp1, tree->AsObj()->gtClass);
7373                 break;
7374
7375             case GT_BOX:
7376                 copy = new (this, GT_BOX)
7377                     GenTreeBox(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtBox.gtAsgStmtWhenInlinedBoxValue);
7378                 break;
7379
7380             case GT_INTRINSIC:
7381                 copy = new (this, GT_INTRINSIC)
7382                     GenTreeIntrinsic(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2,
7383                                      tree->gtIntrinsic.gtIntrinsicId, tree->gtIntrinsic.gtMethodHandle);
7384 #ifdef FEATURE_READYTORUN_COMPILER
7385                 copy->gtIntrinsic.gtEntryPoint = tree->gtIntrinsic.gtEntryPoint;
7386 #endif
7387                 break;
7388
7389             case GT_COPYOBJ:
7390             {
7391                 GenTreeCpObj* cpObjOp = tree->AsCpObj();
7392                 assert(cpObjOp->gtGcPtrCount > 0);
7393                 copy = gtCloneCpObjNode(cpObjOp);
7394             }
7395             break;
7396
7397             case GT_INITBLK:
7398             {
7399                 GenTreeInitBlk* initBlkOp = tree->AsInitBlk();
7400                 copy = gtNewBlkOpNode(oper, initBlkOp->Dest(), initBlkOp->InitVal(), initBlkOp->Size(),
7401                                       initBlkOp->IsVolatile());
7402             }
7403             break;
7404
7405             case GT_COPYBLK:
7406             {
7407                 GenTreeCpBlk* cpBlkOp = tree->AsCpBlk();
7408                 copy = gtNewBlkOpNode(oper, cpBlkOp->Dest(), cpBlkOp->Source(), cpBlkOp->Size(), cpBlkOp->IsVolatile());
7409                 copy->AsCpBlk()->gtBlkOpGcUnsafe = cpBlkOp->gtBlkOpGcUnsafe;
7410             }
7411             break;
7412
7413             case GT_LEA:
7414             {
7415                 GenTreeAddrMode* addrModeOp = tree->AsAddrMode();
7416                 copy =
7417                     new (this, GT_LEA) GenTreeAddrMode(addrModeOp->TypeGet(), addrModeOp->Base(), addrModeOp->Index(),
7418                                                        addrModeOp->gtScale, addrModeOp->gtOffset);
7419             }
7420             break;
7421
7422             case GT_COPY:
7423             case GT_RELOAD:
7424             {
7425                 copy = new (this, oper) GenTreeCopyOrReload(oper, tree->TypeGet(), tree->gtGetOp1());
7426             }
7427             break;
7428
7429 #ifdef FEATURE_SIMD
7430             case GT_SIMD:
7431             {
7432                 GenTreeSIMD* simdOp = tree->AsSIMD();
7433                 copy                = gtNewSIMDNode(simdOp->TypeGet(), simdOp->gtGetOp1(), simdOp->gtGetOp2(),
7434                                      simdOp->gtSIMDIntrinsicID, simdOp->gtSIMDBaseType, simdOp->gtSIMDSize);
7435             }
7436             break;
7437 #endif
7438
7439             default:
7440                 assert(!GenTree::IsExOp(tree->OperKind()) && tree->OperIsSimple());
7441                 // We're in the SimpleOp case, so it's always unary or binary.
7442                 if (GenTree::OperIsUnary(tree->OperGet()))
7443                 {
7444                     copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, /*doSimplifications*/ false);
7445                 }
7446                 else
7447                 {
7448                     assert(GenTree::OperIsBinary(tree->OperGet()));
7449                     copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2);
7450                 }
7451                 break;
7452         }
7453 #else
7454         // We're in the SimpleOp case, so it's always unary or binary.
7455         copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2);
7456 #endif
7457
7458         // Some flags are conceptually part of the gtOper, and should be copied immediately.
7459         if (tree->gtOverflowEx())
7460         {
7461             copy->gtFlags |= GTF_OVERFLOW;
7462         }
7463         if (copy->OperGet() == GT_CAST)
7464         {
7465             copy->gtFlags |= (tree->gtFlags & GTF_UNSIGNED);
7466         }
7467
7468         if (tree->gtOp.gtOp1)
7469         {
7470             copy->gtOp.gtOp1 = gtCloneExpr(tree->gtOp.gtOp1, addFlags, varNum, varVal);
7471         }
7472
7473         if (tree->gtGetOp2())
7474         {
7475             copy->gtOp.gtOp2 = gtCloneExpr(tree->gtOp.gtOp2, addFlags, varNum, varVal);
7476         }
7477
7478         /* Flags */
7479         addFlags |= tree->gtFlags;
7480
7481         // Copy any node annotations, if necessary.
7482         switch (tree->gtOper)
7483         {
7484             case GT_ASG:
7485             {
7486                 IndirectAssignmentAnnotation* pIndirAnnot = nullptr;
7487                 if (m_indirAssignMap != nullptr && GetIndirAssignMap()->Lookup(tree, &pIndirAnnot))
7488                 {
7489                     IndirectAssignmentAnnotation* pNewIndirAnnot = new (this, CMK_Unknown)
7490                         IndirectAssignmentAnnotation(pIndirAnnot->m_lclNum, pIndirAnnot->m_fieldSeq,
7491                                                      pIndirAnnot->m_isEntire);
7492                     GetIndirAssignMap()->Set(copy, pNewIndirAnnot);
7493                 }
7494             }
7495             break;
7496
7497             case GT_STOREIND:
7498             case GT_IND:
7499                 if (tree->gtFlags & GTF_IND_ARR_INDEX)
7500                 {
7501                     ArrayInfo arrInfo;
7502                     bool      b = GetArrayInfoMap()->Lookup(tree, &arrInfo);
7503                     assert(b);
7504                     GetArrayInfoMap()->Set(copy, arrInfo);
7505                 }
7506                 break;
7507
7508             default:
7509                 break;
7510         }
7511
7512 #ifdef DEBUG
7513         /* GTF_NODE_MASK should not be propagated from 'tree' to 'copy' */
7514         addFlags &= ~GTF_NODE_MASK;
7515 #endif
7516
7517         // Effects flags propagate upwards.
7518         if (copy->gtOp.gtOp1 != nullptr)
7519         {
7520             copy->gtFlags |= (copy->gtOp.gtOp1->gtFlags & GTF_ALL_EFFECT);
7521         }
7522         if (copy->gtGetOp2() != nullptr)
7523         {
7524             copy->gtFlags |= (copy->gtGetOp2()->gtFlags & GTF_ALL_EFFECT);
7525         }
7526
7527         // The early morph for TailCall creates a GT_NOP with GTF_REG_VAL flag set
7528         // Thus we have to copy the gtRegNum/gtRegPair value if we clone it here.
7529         //
7530         if (addFlags & GTF_REG_VAL)
7531         {
7532             copy->CopyReg(tree);
7533         }
7534
7535         // We can call gtCloneExpr() before we have called fgMorph when we expand a GT_INDEX node in fgMorphArrayIndex()
7536         // The method gtFoldExpr() expects to be run after fgMorph so it will set the GTF_DEBUG_NODE_MORPHED
7537         // flag on nodes that it adds/modifies.  Then when we call fgMorph we will assert.
7538         // We really only will need to fold when this method is used to replace references to
7539         // local variable with an integer.
7540         //
7541         if (varNum != (unsigned)-1)
7542         {
7543             /* Try to do some folding */
7544             copy = gtFoldExpr(copy);
7545         }
7546
7547         goto DONE;
7548     }
7549
7550     /* See what kind of a special operator we have here */
7551
7552     switch (oper)
7553     {
7554         case GT_STMT:
7555             copy = gtCloneExpr(tree->gtStmt.gtStmtExpr, addFlags, varNum, varVal);
7556             copy = gtNewStmt(copy, tree->gtStmt.gtStmtILoffsx);
7557             goto DONE;
7558
7559         case GT_CALL:
7560
7561             copy = new (this, GT_CALL) GenTreeCall(tree->TypeGet());
7562
7563             copy->gtCall.gtCallObjp =
7564                 tree->gtCall.gtCallObjp ? gtCloneExpr(tree->gtCall.gtCallObjp, addFlags, varNum, varVal) : nullptr;
7565             copy->gtCall.gtCallArgs = tree->gtCall.gtCallArgs
7566                                           ? gtCloneExpr(tree->gtCall.gtCallArgs, addFlags, varNum, varVal)->AsArgList()
7567                                           : nullptr;
7568             copy->gtCall.gtCallMoreFlags = tree->gtCall.gtCallMoreFlags;
7569             copy->gtCall.gtCallLateArgs =
7570                 tree->gtCall.gtCallLateArgs
7571                     ? gtCloneExpr(tree->gtCall.gtCallLateArgs, addFlags, varNum, varVal)->AsArgList()
7572                     : nullptr;
7573
7574 #if !FEATURE_FIXED_OUT_ARGS
7575             copy->gtCall.regArgList      = tree->gtCall.regArgList;
7576             copy->gtCall.regArgListCount = tree->gtCall.regArgListCount;
7577 #endif
7578
7579             // The call sig comes from the EE and doesn't change throughout the compilation process, meaning
7580             // we only really need one physical copy of it. Therefore a shallow pointer copy will suffice.
7581             // (Note that this still holds even if the tree we are cloning was created by an inlinee compiler,
7582             // because the inlinee still uses the inliner's memory allocator anyway.)
7583             copy->gtCall.callSig = tree->gtCall.callSig;
7584
7585             copy->gtCall.gtCallType    = tree->gtCall.gtCallType;
7586             copy->gtCall.gtReturnType  = tree->gtCall.gtReturnType;
7587             copy->gtCall.gtControlExpr = tree->gtCall.gtControlExpr;
7588
7589             /* Copy the union */
7590             if (tree->gtCall.gtCallType == CT_INDIRECT)
7591             {
7592                 copy->gtCall.gtCallCookie = tree->gtCall.gtCallCookie
7593                                                 ? gtCloneExpr(tree->gtCall.gtCallCookie, addFlags, varNum, varVal)
7594                                                 : nullptr;
7595                 copy->gtCall.gtCallAddr =
7596                     tree->gtCall.gtCallAddr ? gtCloneExpr(tree->gtCall.gtCallAddr, addFlags, varNum, varVal) : nullptr;
7597             }
7598             else if (tree->gtFlags & GTF_CALL_VIRT_STUB)
7599             {
7600                 copy->gtCall.gtCallMethHnd      = tree->gtCall.gtCallMethHnd;
7601                 copy->gtCall.gtStubCallStubAddr = tree->gtCall.gtStubCallStubAddr;
7602             }
7603             else
7604             {
7605                 copy->gtCall.gtCallMethHnd         = tree->gtCall.gtCallMethHnd;
7606                 copy->gtCall.gtInlineCandidateInfo = tree->gtCall.gtInlineCandidateInfo;
7607             }
7608
7609             if (tree->gtCall.fgArgInfo)
7610             {
7611                 // Create and initialize the fgArgInfo for our copy of the call tree
7612                 copy->gtCall.fgArgInfo = new (this, CMK_Unknown) fgArgInfo(copy, tree);
7613             }
7614             else
7615             {
7616                 copy->gtCall.fgArgInfo = nullptr;
7617             }
7618             copy->gtCall.gtRetClsHnd = tree->gtCall.gtRetClsHnd;
7619
7620 #if FEATURE_MULTIREG_RET
7621             copy->gtCall.gtReturnTypeDesc = tree->gtCall.gtReturnTypeDesc;
7622 #endif
7623
7624 #ifdef LEGACY_BACKEND
7625             copy->gtCall.gtCallRegUsedMask = tree->gtCall.gtCallRegUsedMask;
7626 #endif // LEGACY_BACKEND
7627
7628 #ifdef FEATURE_READYTORUN_COMPILER
7629             copy->gtCall.setEntryPoint(tree->gtCall.gtEntryPoint);
7630 #endif
7631
7632 #ifdef DEBUG
7633             copy->gtCall.gtInlineObservation = tree->gtCall.gtInlineObservation;
7634 #endif
7635
7636             copy->AsCall()->CopyOtherRegFlags(tree->AsCall());
7637             break;
7638
7639         case GT_FIELD:
7640
7641             copy = gtNewFieldRef(tree->TypeGet(), tree->gtField.gtFldHnd, nullptr, tree->gtField.gtFldOffset);
7642
7643             copy->gtField.gtFldObj =
7644                 tree->gtField.gtFldObj ? gtCloneExpr(tree->gtField.gtFldObj, addFlags, varNum, varVal) : nullptr;
7645             copy->gtField.gtFldMayOverlap = tree->gtField.gtFldMayOverlap;
7646 #ifdef FEATURE_READYTORUN_COMPILER
7647             copy->gtField.gtFieldLookup = tree->gtField.gtFieldLookup;
7648 #endif
7649
7650             break;
7651
7652         case GT_ARR_ELEM:
7653         {
7654             GenTreePtr inds[GT_ARR_MAX_RANK];
7655             for (unsigned dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
7656             {
7657                 inds[dim] = gtCloneExpr(tree->gtArrElem.gtArrInds[dim], addFlags, varNum, varVal);
7658             }
7659             copy = new (this, GT_ARR_ELEM)
7660                 GenTreeArrElem(tree->TypeGet(), gtCloneExpr(tree->gtArrElem.gtArrObj, addFlags, varNum, varVal),
7661                                tree->gtArrElem.gtArrRank, tree->gtArrElem.gtArrElemSize, tree->gtArrElem.gtArrElemType,
7662                                &inds[0]);
7663         }
7664         break;
7665
7666         case GT_ARR_OFFSET:
7667         {
7668             copy = new (this, GT_ARR_OFFSET)
7669                 GenTreeArrOffs(tree->TypeGet(), gtCloneExpr(tree->gtArrOffs.gtOffset, addFlags, varNum, varVal),
7670                                gtCloneExpr(tree->gtArrOffs.gtIndex, addFlags, varNum, varVal),
7671                                gtCloneExpr(tree->gtArrOffs.gtArrObj, addFlags, varNum, varVal),
7672                                tree->gtArrOffs.gtCurrDim, tree->gtArrOffs.gtArrRank, tree->gtArrOffs.gtArrElemType);
7673         }
7674         break;
7675
7676         case GT_CMPXCHG:
7677             copy = new (this, GT_CMPXCHG)
7678                 GenTreeCmpXchg(tree->TypeGet(), gtCloneExpr(tree->gtCmpXchg.gtOpLocation, addFlags, varNum, varVal),
7679                                gtCloneExpr(tree->gtCmpXchg.gtOpValue, addFlags, varNum, varVal),
7680                                gtCloneExpr(tree->gtCmpXchg.gtOpComparand, addFlags, varNum, varVal));
7681             break;
7682
7683         case GT_ARR_BOUNDS_CHECK:
7684 #ifdef FEATURE_SIMD
7685         case GT_SIMD_CHK:
7686 #endif // FEATURE_SIMD
7687             copy = new (this, oper) GenTreeBoundsChk(oper, tree->TypeGet(),
7688                                                      gtCloneExpr(tree->gtBoundsChk.gtArrLen, addFlags, varNum, varVal),
7689                                                      gtCloneExpr(tree->gtBoundsChk.gtIndex, addFlags, varNum, varVal),
7690                                                      tree->gtBoundsChk.gtThrowKind);
7691             break;
7692
7693         default:
7694 #ifdef DEBUG
7695             gtDispTree(tree);
7696 #endif
7697             NO_WAY("unexpected operator");
7698     }
7699
7700 DONE:
7701
7702     // If it has a zero-offset field seq, copy annotation.
7703     if (tree->TypeGet() == TYP_BYREF)
7704     {
7705         FieldSeqNode* fldSeq = nullptr;
7706         if (GetZeroOffsetFieldMap()->Lookup(tree, &fldSeq))
7707         {
7708             GetZeroOffsetFieldMap()->Set(copy, fldSeq);
7709         }
7710     }
7711
7712     copy->gtVNPair = tree->gtVNPair; // A cloned tree gets the orginal's Value number pair
7713
7714     /* We assume the FP stack level will be identical */
7715
7716     copy->gtCopyFPlvl(tree);
7717
7718     /* Compute the flags for the copied node. Note that we can do this only
7719        if we didnt gtFoldExpr(copy) */
7720
7721     if (copy->gtOper == oper)
7722     {
7723         addFlags |= tree->gtFlags;
7724
7725 #ifdef DEBUG
7726         /* GTF_NODE_MASK should not be propagated from 'tree' to 'copy' */
7727         addFlags &= ~GTF_NODE_MASK;
7728 #endif
7729         // Some other flags depend on the context of the expression, and should not be preserved.
7730         // For example, GTF_RELOP_QMARK:
7731         if (copy->OperKind() & GTK_RELOP)
7732         {
7733             addFlags &= ~GTF_RELOP_QMARK;
7734         }
7735         // On the other hand, if we're creating such a context, restore this flag.
7736         if (copy->OperGet() == GT_QMARK)
7737         {
7738             copy->gtOp.gtOp1->gtFlags |= GTF_RELOP_QMARK;
7739         }
7740
7741         copy->gtFlags |= addFlags;
7742     }
7743
7744     /* GTF_COLON_COND should be propagated from 'tree' to 'copy' */
7745     copy->gtFlags |= (tree->gtFlags & GTF_COLON_COND);
7746
7747 #if defined(DEBUG)
7748     // Non-node debug flags should be propagated from 'tree' to 'copy'
7749     copy->gtDebugFlags |= (tree->gtDebugFlags & ~GTF_DEBUG_NODE_MASK);
7750 #endif
7751
7752     /* Make sure to copy back fields that may have been initialized */
7753
7754     copy->CopyRawCosts(tree);
7755     copy->gtRsvdRegs = tree->gtRsvdRegs;
7756     copy->CopyReg(tree);
7757     return copy;
7758 }
7759
7760 //------------------------------------------------------------------------
7761 // gtReplaceTree: Replace a tree with a new tree.
7762 //
7763 // Arguments:
7764 //    stmt            - The top-level root stmt of the tree being replaced.
7765 //                      Must not be null.
7766 //    tree            - The tree being replaced. Must not be null.
7767 //    replacementTree - The replacement tree. Must not be null.
7768 //
7769 // Return Value:
7770 //    The tree node that replaces the old tree.
7771 //
7772 // Assumptions:
7773 //    The sequencing of the stmt has been done.
7774 //
7775 // Notes:
7776 //    The caller must ensure that the original statement has been sequenced,
7777 //    but this method will sequence 'replacementTree', and insert it into the
7778 //    proper place in the statement sequence.
7779
7780 GenTreePtr Compiler::gtReplaceTree(GenTreePtr stmt, GenTreePtr tree, GenTreePtr replacementTree)
7781 {
7782     assert(fgStmtListThreaded);
7783     assert(tree != nullptr);
7784     assert(stmt != nullptr);
7785     assert(replacementTree != nullptr);
7786
7787     GenTreePtr* treePtr    = nullptr;
7788     GenTreePtr  treeParent = tree->gtGetParent(&treePtr);
7789
7790     assert(treeParent != nullptr || tree == stmt->gtStmt.gtStmtExpr);
7791
7792     if (treePtr == nullptr)
7793     {
7794         // Replace the stmt expr and rebuild the linear order for "stmt".
7795         assert(treeParent == nullptr);
7796         assert(fgOrder != FGOrderLinear);
7797         stmt->gtStmt.gtStmtExpr = tree;
7798         fgSetStmtSeq(stmt);
7799     }
7800     else
7801     {
7802         assert(treeParent != nullptr);
7803
7804         GenTreePtr treeFirstNode = fgGetFirstNode(tree);
7805         GenTreePtr treeLastNode  = tree;
7806         GenTreePtr treePrevNode  = treeFirstNode->gtPrev;
7807         GenTreePtr treeNextNode  = treeLastNode->gtNext;
7808
7809         *treePtr = replacementTree;
7810
7811         // Build the linear order for "replacementTree".
7812         fgSetTreeSeq(replacementTree, treePrevNode);
7813
7814         // Restore linear-order Prev and Next for "replacementTree".
7815         if (treePrevNode != nullptr)
7816         {
7817             treeFirstNode         = fgGetFirstNode(replacementTree);
7818             treeFirstNode->gtPrev = treePrevNode;
7819             treePrevNode->gtNext  = treeFirstNode;
7820         }
7821         else
7822         {
7823             // Update the linear oder start of "stmt" if treeFirstNode
7824             // appears to have replaced the original first node.
7825             assert(treeFirstNode == stmt->gtStmt.gtStmtList);
7826             stmt->gtStmt.gtStmtList = fgGetFirstNode(replacementTree);
7827         }
7828
7829         if (treeNextNode != nullptr)
7830         {
7831             treeLastNode         = replacementTree;
7832             treeLastNode->gtNext = treeNextNode;
7833             treeNextNode->gtPrev = treeLastNode;
7834         }
7835
7836         bool       needFixupCallArg = false;
7837         GenTreePtr node             = treeParent;
7838
7839         // If we have replaced an arg, then update pointers in argtable.
7840         do
7841         {
7842             // Look for the first enclosing callsite
7843             switch (node->OperGet())
7844             {
7845                 case GT_LIST:
7846                 case GT_ARGPLACE:
7847                     // "tree" is likely an argument of a call.
7848                     needFixupCallArg = true;
7849                     break;
7850
7851                 case GT_CALL:
7852                     if (needFixupCallArg)
7853                     {
7854                         // We have replaced an arg, so update pointers in argtable.
7855                         fgFixupArgTabEntryPtr(node, tree, replacementTree);
7856                         needFixupCallArg = false;
7857                     }
7858                     break;
7859
7860                 default:
7861                     // "tree" is unlikely an argument of a call.
7862                     needFixupCallArg = false;
7863                     break;
7864             }
7865
7866             if (needFixupCallArg)
7867             {
7868                 // Keep tracking to update the first enclosing call.
7869                 node = node->gtGetParent(nullptr);
7870             }
7871             else
7872             {
7873                 // Stop tracking.
7874                 node = nullptr;
7875             }
7876         } while (node != nullptr);
7877
7878         // Propagate side-effect flags of "replacementTree" to its parents if needed.
7879         gtUpdateSideEffects(treeParent, tree->gtFlags, replacementTree->gtFlags);
7880     }
7881
7882     return replacementTree;
7883 }
7884
7885 //------------------------------------------------------------------------
7886 // gtUpdateSideEffects: Update the side effects for ancestors.
7887 //
7888 // Arguments:
7889 //    treeParent      - The immediate parent node.
7890 //    oldGtFlags      - The stale gtFlags.
7891 //    newGtFlags      - The new gtFlags.
7892 //
7893 //
7894 // Assumptions:
7895 //    Linear order of the stmt has been established.
7896 //
7897 // Notes:
7898 //    The routine is used for updating the stale side effect flags for ancestor
7899 //    nodes starting from treeParent up to the top-level stmt expr.
7900
7901 void Compiler::gtUpdateSideEffects(GenTreePtr treeParent, unsigned oldGtFlags, unsigned newGtFlags)
7902 {
7903     assert(fgStmtListThreaded);
7904
7905     oldGtFlags = oldGtFlags & GTF_ALL_EFFECT;
7906     newGtFlags = newGtFlags & GTF_ALL_EFFECT;
7907
7908     if (oldGtFlags != newGtFlags)
7909     {
7910         while (treeParent)
7911         {
7912             treeParent->gtFlags &= ~oldGtFlags;
7913             treeParent->gtFlags |= newGtFlags;
7914             treeParent = treeParent->gtGetParent(nullptr);
7915         }
7916     }
7917 }
7918
7919 /*****************************************************************************
7920  *
7921  *  Comapres two trees and returns true when both trees are the same.
7922  *  Instead of fully comparing the two trees this method can just return false.
7923  *  Thus callers should not assume that the trees are different when false is returned.
7924  *  Only when true is returned can the caller perform code optimizations.
7925  *  The current implementation only compares a limited set of LEAF/CONST node
7926  *  and returns false for all othere trees.
7927  */
7928 bool Compiler::gtCompareTree(GenTree* op1, GenTree* op2)
7929 {
7930     /* Make sure that both trees are of the same GT node kind */
7931     if (op1->OperGet() != op2->OperGet())
7932     {
7933         return false;
7934     }
7935
7936     /* Make sure that both trees are returning the same type */
7937     if (op1->gtType != op2->gtType)
7938     {
7939         return false;
7940     }
7941
7942     /* Figure out what kind of a node we have */
7943
7944     genTreeOps oper = op1->OperGet();
7945     unsigned   kind = op1->OperKind();
7946
7947     /* Is this a constant or leaf node? */
7948
7949     if (kind & (GTK_CONST | GTK_LEAF))
7950     {
7951         switch (oper)
7952         {
7953             case GT_CNS_INT:
7954                 if ((op1->gtIntCon.gtIconVal == op2->gtIntCon.gtIconVal) && GenTree::SameIconHandleFlag(op1, op2))
7955                 {
7956                     return true;
7957                 }
7958                 break;
7959
7960             case GT_CNS_LNG:
7961                 if (op1->gtLngCon.gtLconVal == op2->gtLngCon.gtLconVal)
7962                 {
7963                     return true;
7964                 }
7965                 break;
7966
7967             case GT_CNS_STR:
7968                 if (op1->gtStrCon.gtSconCPX == op2->gtStrCon.gtSconCPX)
7969                 {
7970                     return true;
7971                 }
7972                 break;
7973
7974             case GT_LCL_VAR:
7975                 if (op1->gtLclVarCommon.gtLclNum == op2->gtLclVarCommon.gtLclNum)
7976                 {
7977                     return true;
7978                 }
7979                 break;
7980
7981             case GT_CLS_VAR:
7982                 if (op1->gtClsVar.gtClsVarHnd == op2->gtClsVar.gtClsVarHnd)
7983                 {
7984                     return true;
7985                 }
7986                 break;
7987
7988             default:
7989                 // we return false for these unhandled 'oper' kinds
7990                 break;
7991         }
7992     }
7993     return false;
7994 }
7995
7996 GenTreePtr Compiler::gtGetThisArg(GenTreePtr call)
7997 {
7998     assert(call->gtOper == GT_CALL);
7999
8000     if (call->gtCall.gtCallObjp != nullptr)
8001     {
8002         if (call->gtCall.gtCallObjp->gtOper != GT_NOP && call->gtCall.gtCallObjp->gtOper != GT_ASG)
8003         {
8004             if (!(call->gtCall.gtCallObjp->gtFlags & GTF_LATE_ARG))
8005             {
8006                 return call->gtCall.gtCallObjp;
8007             }
8008         }
8009
8010         if (call->gtCall.gtCallLateArgs)
8011         {
8012             regNumber        thisReg         = REG_ARG_0;
8013             unsigned         argNum          = 0;
8014             fgArgTabEntryPtr thisArgTabEntry = gtArgEntryByArgNum(call, argNum);
8015             GenTreePtr       result          = thisArgTabEntry->node;
8016
8017 #if !FEATURE_FIXED_OUT_ARGS
8018             GenTreePtr lateArgs = call->gtCall.gtCallLateArgs;
8019             regList    list     = call->gtCall.regArgList;
8020             int        index    = 0;
8021             while (lateArgs != NULL)
8022             {
8023                 assert(lateArgs->gtOper == GT_LIST);
8024                 assert(index < call->gtCall.regArgListCount);
8025                 regNumber curArgReg = list[index];
8026                 if (curArgReg == thisReg)
8027                 {
8028                     if (optAssertionPropagatedCurrentStmt)
8029                         result = lateArgs->gtOp.gtOp1;
8030
8031                     assert(result == lateArgs->gtOp.gtOp1);
8032                 }
8033
8034                 lateArgs = lateArgs->gtOp.gtOp2;
8035                 index++;
8036             }
8037 #endif
8038             return result;
8039         }
8040     }
8041     return nullptr;
8042 }
8043
8044 bool GenTree::gtSetFlags() const
8045 {
8046     //
8047     // When FEATURE_SET_FLAGS (_TARGET_ARM_) is active the method returns true
8048     //    when the gtFlags has the flag GTF_SET_FLAGS set
8049     // otherwise the architecture will be have instructions that typically set
8050     //    the flags and this method will return true.
8051     //
8052     //    Exceptions: GT_IND (load/store) is not allowed to set the flags
8053     //                and on XARCH the GT_MUL/GT_DIV and all overflow instructions
8054     //                do not set the condition flags
8055     //
8056     // Precondition we have a GTK_SMPOP
8057     //
8058     assert(OperIsSimple());
8059
8060     if (!varTypeIsIntegralOrI(TypeGet()))
8061     {
8062         return false;
8063     }
8064
8065 #if FEATURE_SET_FLAGS
8066
8067     if ((gtFlags & GTF_SET_FLAGS) && gtOper != GT_IND)
8068     {
8069         // GTF_SET_FLAGS is not valid on GT_IND and is overlaid with GTF_NONFAULTING_IND
8070         return true;
8071     }
8072     else
8073     {
8074         return false;
8075     }
8076
8077 #else // !FEATURE_SET_FLAGS
8078
8079 #ifdef _TARGET_XARCH_
8080     // Return true if/when the codegen for this node will set the flags
8081     //
8082     //
8083     if ((gtOper == GT_IND) || (gtOper == GT_MUL) || (gtOper == GT_DIV))
8084     {
8085         return false;
8086     }
8087     else if (gtOverflowEx())
8088     {
8089         return false;
8090     }
8091     else
8092     {
8093         return true;
8094     }
8095 #else
8096     // Otherwise for other architectures we should return false
8097     return false;
8098 #endif
8099
8100 #endif // !FEATURE_SET_FLAGS
8101 }
8102
8103 bool GenTree::gtRequestSetFlags()
8104 {
8105     bool result = false;
8106
8107 #if FEATURE_SET_FLAGS
8108     // This method is a Nop unless FEATURE_SET_FLAGS is defined
8109
8110     // In order to set GTF_SET_FLAGS
8111     //              we must have a GTK_SMPOP
8112     //          and we have a integer or machine size type (not floating point or TYP_LONG on 32-bit)
8113     //
8114     if (!OperIsSimple())
8115         return false;
8116
8117     if (!varTypeIsIntegralOrI(TypeGet()))
8118         return false;
8119
8120     switch (gtOper)
8121     {
8122         case GT_IND:
8123         case GT_ARR_LENGTH:
8124             // These will turn into simple load from memory instructions
8125             // and we can't force the setting of the flags on load from memory
8126             break;
8127
8128         case GT_MUL:
8129         case GT_DIV:
8130             // These instructions don't set the flags (on x86/x64)
8131             //
8132             break;
8133
8134         default:
8135             // Otherwise we can set the flags for this gtOper
8136             // and codegen must set the condition flags.
8137             //
8138             gtFlags |= GTF_SET_FLAGS;
8139             result = true;
8140             break;
8141     }
8142 #endif // FEATURE_SET_FLAGS
8143
8144     // Codegen for this tree must set the condition flags if
8145     // this method returns true.
8146     //
8147     return result;
8148 }
8149
8150 /*****************************************************************************/
8151 void GenTree::CopyTo(class Compiler* comp, const GenTree& gt)
8152 {
8153     gtOper         = gt.gtOper;
8154     gtType         = gt.gtType;
8155     gtAssertionNum = gt.gtAssertionNum;
8156
8157     gtRegNum = gt.gtRegNum; // one union member.
8158     CopyCosts(&gt);
8159
8160     gtFlags  = gt.gtFlags;
8161     gtVNPair = gt.gtVNPair;
8162
8163     gtRsvdRegs = gt.gtRsvdRegs;
8164
8165 #ifdef LEGACY_BACKEND
8166     gtUsedRegs = gt.gtUsedRegs;
8167 #endif // LEGACY_BACKEND
8168
8169 #if FEATURE_STACK_FP_X87
8170     gtFPlvl = gt.gtFPlvl;
8171 #endif // FEATURE_STACK_FP_X87
8172
8173     gtNext = gt.gtNext;
8174     gtPrev = gt.gtPrev;
8175 #ifdef DEBUG
8176     gtTreeID = gt.gtTreeID;
8177     gtSeqNum = gt.gtSeqNum;
8178 #endif
8179     // Largest node subtype:
8180     void* remDst = reinterpret_cast<char*>(this) + sizeof(GenTree);
8181     void* remSrc = reinterpret_cast<char*>(const_cast<GenTree*>(&gt)) + sizeof(GenTree);
8182     memcpy(remDst, remSrc, TREE_NODE_SZ_LARGE - sizeof(GenTree));
8183 }
8184
8185 void GenTree::CopyToSmall(const GenTree& gt)
8186 {
8187     // Small node size is defined by GenTreeOp.
8188     void* remDst = reinterpret_cast<char*>(this) + sizeof(GenTree);
8189     void* remSrc = reinterpret_cast<char*>(const_cast<GenTree*>(&gt)) + sizeof(GenTree);
8190     memcpy(remDst, remSrc, TREE_NODE_SZ_SMALL - sizeof(GenTree));
8191 }
8192
8193 unsigned GenTree::NumChildren()
8194 {
8195     if (OperIsConst() || OperIsLeaf())
8196     {
8197         return 0;
8198     }
8199     else if (OperIsUnary())
8200     {
8201         if (OperGet() == GT_NOP || OperGet() == GT_RETURN || OperGet() == GT_RETFILT)
8202         {
8203             if (gtOp.gtOp1 == nullptr)
8204             {
8205                 return 0;
8206             }
8207             else
8208             {
8209                 return 1;
8210             }
8211         }
8212         else
8213         {
8214             return 1;
8215         }
8216     }
8217     else if (OperIsBinary())
8218     {
8219         // All binary operators except LEA have at least one arg; the second arg may sometimes be null, however.
8220         if (OperGet() == GT_LEA)
8221         {
8222             unsigned childCount = 0;
8223             if (gtOp.gtOp1 != nullptr)
8224             {
8225                 childCount++;
8226             }
8227             if (gtOp.gtOp2 != nullptr)
8228             {
8229                 childCount++;
8230             }
8231             return childCount;
8232         }
8233         assert(gtOp.gtOp1 != nullptr);
8234         if (gtOp.gtOp2 == nullptr)
8235         {
8236             return 1;
8237         }
8238         else
8239         {
8240             return 2;
8241         }
8242     }
8243     else
8244     {
8245         // Special
8246         switch (OperGet())
8247         {
8248             case GT_CMPXCHG:
8249                 return 3;
8250
8251             case GT_ARR_BOUNDS_CHECK:
8252 #ifdef FEATURE_SIMD
8253             case GT_SIMD_CHK:
8254 #endif // FEATURE_SIMD
8255                 return 2;
8256
8257             case GT_FIELD:
8258             case GT_STMT:
8259                 return 1;
8260
8261             case GT_ARR_ELEM:
8262                 return 1 + AsArrElem()->gtArrRank;
8263
8264             case GT_ARR_OFFSET:
8265                 return 3;
8266
8267             case GT_CALL:
8268             {
8269                 GenTreeCall* call = AsCall();
8270                 unsigned     res  = 0; // arg list(s) (including late args).
8271                 if (call->gtCallObjp != nullptr)
8272                 {
8273                     res++; // Add objp?
8274                 }
8275                 if (call->gtCallArgs != nullptr)
8276                 {
8277                     res++; // Add args?
8278                 }
8279                 if (call->gtCallLateArgs != nullptr)
8280                 {
8281                     res++; // Add late args?
8282                 }
8283                 if (call->gtControlExpr != nullptr)
8284                 {
8285                     res++;
8286                 }
8287
8288                 if (call->gtCallType == CT_INDIRECT)
8289                 {
8290                     if (call->gtCallCookie != nullptr)
8291                     {
8292                         res++;
8293                     }
8294                     if (call->gtCallAddr != nullptr)
8295                     {
8296                         res++;
8297                     }
8298                 }
8299                 return res;
8300             }
8301             case GT_NONE:
8302                 return 0;
8303             default:
8304                 unreached();
8305         }
8306     }
8307 }
8308
8309 GenTreePtr GenTree::GetChild(unsigned childNum)
8310 {
8311     assert(childNum < NumChildren()); // Precondition.
8312     assert(NumChildren() <= MAX_CHILDREN);
8313     assert(!(OperIsConst() || OperIsLeaf()));
8314     if (OperIsUnary())
8315     {
8316         return AsUnOp()->gtOp1;
8317     }
8318     else if (OperIsBinary())
8319     {
8320         if (OperIsAddrMode())
8321         {
8322             // If this is the first (0th) child, only return op1 if it is non-null
8323             // Otherwise, we return gtOp2.
8324             if (childNum == 0 && AsOp()->gtOp1 != nullptr)
8325             {
8326                 return AsOp()->gtOp1;
8327             }
8328             return AsOp()->gtOp2;
8329         }
8330         // TODO-Cleanup: Consider handling ReverseOps here, and then we wouldn't have to handle it in
8331         // fgGetFirstNode().  However, it seems that it causes loop hoisting behavior to change.
8332         if (childNum == 0)
8333         {
8334             return AsOp()->gtOp1;
8335         }
8336         else
8337         {
8338             return AsOp()->gtOp2;
8339         }
8340     }
8341     else
8342     {
8343         // Special
8344         switch (OperGet())
8345         {
8346             case GT_CMPXCHG:
8347                 switch (childNum)
8348                 {
8349                     case 0:
8350                         return AsCmpXchg()->gtOpLocation;
8351                     case 1:
8352                         return AsCmpXchg()->gtOpValue;
8353                     case 2:
8354                         return AsCmpXchg()->gtOpComparand;
8355                     default:
8356                         unreached();
8357                 }
8358             case GT_ARR_BOUNDS_CHECK:
8359 #ifdef FEATURE_SIMD
8360             case GT_SIMD_CHK:
8361 #endif // FEATURE_SIMD
8362                 switch (childNum)
8363                 {
8364                     case 0:
8365                         return AsBoundsChk()->gtArrLen;
8366                     case 1:
8367                         return AsBoundsChk()->gtIndex;
8368                     default:
8369                         unreached();
8370                 }
8371
8372             case GT_FIELD:
8373                 return AsField()->gtFldObj;
8374
8375             case GT_STMT:
8376                 return AsStmt()->gtStmtExpr;
8377
8378             case GT_ARR_ELEM:
8379                 if (childNum == 0)
8380                 {
8381                     return AsArrElem()->gtArrObj;
8382                 }
8383                 else
8384                 {
8385                     return AsArrElem()->gtArrInds[childNum - 1];
8386                 }
8387
8388             case GT_ARR_OFFSET:
8389                 switch (childNum)
8390                 {
8391                     case 0:
8392                         return AsArrOffs()->gtOffset;
8393                     case 1:
8394                         return AsArrOffs()->gtIndex;
8395                     case 2:
8396                         return AsArrOffs()->gtArrObj;
8397                     default:
8398                         unreached();
8399                 }
8400
8401             case GT_CALL:
8402             {
8403                 // The if chain below assumes that all possible children are non-null.
8404                 // If some are null, "virtually skip them."
8405                 // If there isn't "virtually skip it."
8406                 GenTreeCall* call = AsCall();
8407
8408                 if (call->gtCallObjp == nullptr)
8409                 {
8410                     childNum++;
8411                 }
8412                 if (childNum >= 1 && call->gtCallArgs == nullptr)
8413                 {
8414                     childNum++;
8415                 }
8416                 if (childNum >= 2 && call->gtCallLateArgs == nullptr)
8417                 {
8418                     childNum++;
8419                 }
8420                 if (childNum >= 3 && call->gtControlExpr == nullptr)
8421                 {
8422                     childNum++;
8423                 }
8424                 if (call->gtCallType == CT_INDIRECT)
8425                 {
8426                     if (childNum >= 4 && call->gtCallCookie == nullptr)
8427                     {
8428                         childNum++;
8429                     }
8430                 }
8431
8432                 if (childNum == 0)
8433                 {
8434                     return call->gtCallObjp;
8435                 }
8436                 else if (childNum == 1)
8437                 {
8438                     return call->gtCallArgs;
8439                 }
8440                 else if (childNum == 2)
8441                 {
8442                     return call->gtCallLateArgs;
8443                 }
8444                 else if (childNum == 3)
8445                 {
8446                     return call->gtControlExpr;
8447                 }
8448                 else
8449                 {
8450                     assert(call->gtCallType == CT_INDIRECT);
8451                     if (childNum == 4)
8452                     {
8453                         return call->gtCallCookie;
8454                     }
8455                     else
8456                     {
8457                         assert(childNum == 5);
8458                         return call->gtCallAddr;
8459                     }
8460                 }
8461             }
8462             case GT_NONE:
8463                 unreached();
8464             default:
8465                 unreached();
8466         }
8467     }
8468 }
8469
8470 GenTreeUseEdgeIterator::GenTreeUseEdgeIterator()
8471     : m_node(nullptr)
8472     , m_edge(nullptr)
8473     , m_argList(nullptr)
8474     , m_state(-1)
8475 {
8476 }
8477
8478 GenTreeUseEdgeIterator::GenTreeUseEdgeIterator(GenTree* node)
8479     : m_node(node)
8480     , m_edge(nullptr)
8481     , m_argList(nullptr)
8482     , m_state(0)
8483 {
8484     assert(m_node != nullptr);
8485
8486     // Advance to the first operand.
8487     ++(*this);
8488 }
8489
8490 //------------------------------------------------------------------------
8491 // GenTreeUseEdgeIterator::GetNextUseEdge:
8492 //    Gets the next operand of a node with a fixed number of operands.
8493 //    This covers all nodes besides GT_CALL, GT_PHI, and GT_SIMD. For the
8494 //    node types handled by this method, the `m_state` field indicates the
8495 //    index of the next operand to produce.
8496 //
8497 // Returns:
8498 //    The node's next operand or nullptr if all operands have been
8499 //    produced.
8500 //
8501 GenTree** GenTreeUseEdgeIterator::GetNextUseEdge() const
8502 {
8503     switch (m_node->OperGet())
8504     {
8505         case GT_CMPXCHG:
8506             switch (m_state)
8507             {
8508                 case 0:
8509                     return &m_node->AsCmpXchg()->gtOpLocation;
8510                 case 1:
8511                     return &m_node->AsCmpXchg()->gtOpValue;
8512                 case 2:
8513                     return &m_node->AsCmpXchg()->gtOpComparand;
8514                 default:
8515                     return nullptr;
8516             }
8517         case GT_ARR_BOUNDS_CHECK:
8518 #ifdef FEATURE_SIMD
8519         case GT_SIMD_CHK:
8520 #endif // FEATURE_SIMD
8521             switch (m_state)
8522             {
8523                 case 0:
8524                     return &m_node->AsBoundsChk()->gtArrLen;
8525                 case 1:
8526                     return &m_node->AsBoundsChk()->gtIndex;
8527                 default:
8528                     return nullptr;
8529             }
8530
8531         case GT_FIELD:
8532             if (m_state == 0)
8533             {
8534                 return &m_node->AsField()->gtFldObj;
8535             }
8536             return nullptr;
8537
8538         case GT_STMT:
8539             if (m_state == 0)
8540             {
8541                 return &m_node->AsStmt()->gtStmtExpr;
8542             }
8543             return nullptr;
8544
8545         case GT_ARR_ELEM:
8546             if (m_state == 0)
8547             {
8548                 return &m_node->AsArrElem()->gtArrObj;
8549             }
8550             else if (m_state <= m_node->AsArrElem()->gtArrRank)
8551             {
8552                 return &m_node->AsArrElem()->gtArrInds[m_state - 1];
8553             }
8554             return nullptr;
8555
8556         case GT_ARR_OFFSET:
8557             switch (m_state)
8558             {
8559                 case 0:
8560                     return &m_node->AsArrOffs()->gtOffset;
8561                 case 1:
8562                     return &m_node->AsArrOffs()->gtIndex;
8563                 case 2:
8564                     return &m_node->AsArrOffs()->gtArrObj;
8565                 default:
8566                     return nullptr;
8567             }
8568
8569         // Call, phi, and SIMD nodes are handled by MoveNext{Call,Phi,SIMD}UseEdge, repsectively.
8570         case GT_CALL:
8571         case GT_PHI:
8572 #ifdef FEATURE_SIMD
8573         case GT_SIMD:
8574 #endif
8575             break;
8576
8577         case GT_INITBLK:
8578         case GT_COPYBLK:
8579         case GT_COPYOBJ:
8580         {
8581             GenTreeBlkOp* blkOp = m_node->AsBlkOp();
8582
8583             bool blkOpReversed  = (blkOp->gtFlags & GTF_REVERSE_OPS) != 0;
8584             bool srcDstReversed = (blkOp->gtOp1->gtFlags & GTF_REVERSE_OPS) != 0;
8585
8586             if (!blkOpReversed)
8587             {
8588                 switch (m_state)
8589                 {
8590                     case 0:
8591                         return !srcDstReversed ? &blkOp->gtOp1->AsArgList()->gtOp1 : &blkOp->gtOp1->AsArgList()->gtOp2;
8592                     case 1:
8593                         return !srcDstReversed ? &blkOp->gtOp1->AsArgList()->gtOp2 : &blkOp->gtOp1->AsArgList()->gtOp1;
8594                     case 2:
8595                         return &blkOp->gtOp2;
8596                     default:
8597                         return nullptr;
8598                 }
8599             }
8600             else
8601             {
8602                 switch (m_state)
8603                 {
8604                     case 0:
8605                         return &blkOp->gtOp2;
8606                     case 1:
8607                         return !srcDstReversed ? &blkOp->gtOp1->AsArgList()->gtOp1 : &blkOp->gtOp1->AsArgList()->gtOp2;
8608                     case 2:
8609                         return !srcDstReversed ? &blkOp->gtOp1->AsArgList()->gtOp2 : &blkOp->gtOp1->AsArgList()->gtOp1;
8610                     default:
8611                         return nullptr;
8612                 }
8613             }
8614         }
8615         break;
8616
8617         case GT_LEA:
8618         {
8619             GenTreeAddrMode* lea = m_node->AsAddrMode();
8620
8621             bool hasOp1 = lea->gtOp1 != nullptr;
8622             if (!hasOp1)
8623             {
8624                 return m_state == 0 ? &lea->gtOp2 : nullptr;
8625             }
8626
8627             bool operandsReversed = (lea->gtFlags & GTF_REVERSE_OPS) != 0;
8628             switch (m_state)
8629             {
8630                 case 0:
8631                     return !operandsReversed ? &lea->gtOp1 : &lea->gtOp2;
8632                 case 1:
8633                     return !operandsReversed ? &lea->gtOp2 : &lea->gtOp1;
8634                 default:
8635                     return nullptr;
8636             }
8637         }
8638         break;
8639
8640         case GT_LIST:
8641             if (m_node->AsArgList()->IsAggregate())
8642             {
8643                 // List nodes that represent aggregates are handled by MoveNextAggregateUseEdge.
8644                 break;
8645             }
8646             __fallthrough;
8647
8648         default:
8649             if (m_node->OperIsConst() || m_node->OperIsLeaf())
8650             {
8651                 return nullptr;
8652             }
8653             else if (m_node->OperIsUnary())
8654             {
8655                 return m_state == 0 ? &m_node->AsUnOp()->gtOp1 : nullptr;
8656             }
8657             else if (m_node->OperIsBinary())
8658             {
8659                 bool operandsReversed = (m_node->gtFlags & GTF_REVERSE_OPS) != 0;
8660                 switch (m_state)
8661                 {
8662                     case 0:
8663                         return !operandsReversed ? &m_node->AsOp()->gtOp1 : &m_node->AsOp()->gtOp2;
8664                     case 1:
8665                         return !operandsReversed ? &m_node->AsOp()->gtOp2 : &m_node->AsOp()->gtOp1;
8666                     default:
8667                         return nullptr;
8668                 }
8669             }
8670     }
8671
8672     unreached();
8673 }
8674
8675 //------------------------------------------------------------------------
8676 // GenTreeUseEdgeIterator::MoveToNextCallUseEdge:
8677 //    Moves to the next operand of a call node. Unlike the simple nodes
8678 //    handled by `GetNextUseEdge`, call nodes have a variable number of
8679 //    operands stored in cons lists. This method expands the cons lists
8680 //    into the operands stored within.
8681 //
8682 void GenTreeUseEdgeIterator::MoveToNextCallUseEdge()
8683 {
8684     enum
8685     {
8686         CALL_INSTANCE = 0,
8687         CALL_ARGS = 1,
8688         CALL_LATE_ARGS = 2,
8689         CALL_CONTROL_EXPR = 3,
8690         CALL_COOKIE = 4,
8691         CALL_ADDRESS = 5,
8692         CALL_TERMINAL = 6,
8693     };
8694
8695     GenTreeCall* call = m_node->AsCall();
8696
8697     for (;;)
8698     {
8699         switch (m_state)
8700         {
8701             case CALL_INSTANCE:
8702                 m_state   = CALL_ARGS;
8703                 m_argList = call->gtCallArgs;
8704
8705                 if (call->gtCallObjp != nullptr)
8706                 {
8707                     m_edge = &call->gtCallObjp;
8708                     return;
8709                 }
8710                 break;
8711
8712             case CALL_ARGS:
8713             case CALL_LATE_ARGS:
8714                 if (m_argList == nullptr)
8715                 {
8716                     m_state++;
8717
8718                     if (m_state == CALL_LATE_ARGS)
8719                     {
8720                         m_argList = call->gtCallLateArgs;
8721                     }
8722                 }
8723                 else
8724                 {
8725                     GenTreeArgList* argNode = m_argList->AsArgList();
8726                     m_edge                  = &argNode->gtOp1;
8727                     m_argList               = argNode->Rest();
8728                     return;
8729                 }
8730                 break;
8731
8732             case CALL_CONTROL_EXPR:
8733                 m_state = call->gtCallType == CT_INDIRECT ? CALL_COOKIE : CALL_TERMINAL;
8734
8735                 if (call->gtControlExpr != nullptr)
8736                 {
8737                     m_edge = &call->gtControlExpr;
8738                     return;
8739                 }
8740                 break;
8741
8742             case 4:
8743                 assert(call->gtCallType == CT_INDIRECT);
8744
8745                 m_state = CALL_ADDRESS;
8746
8747                 if (call->gtCallCookie != nullptr)
8748                 {
8749                     m_edge = &call->gtCallCookie;
8750                     return;
8751                 }
8752                 break;
8753
8754             case 5:
8755                 assert(call->gtCallType == CT_INDIRECT);
8756
8757                 m_state = CALL_TERMINAL;
8758                 if (call->gtCallAddr != nullptr)
8759                 {
8760                     m_edge = &call->gtCallAddr;
8761                     return;
8762                 }
8763                 break;
8764
8765             default:
8766                 m_node    = nullptr;
8767                 m_edge    = nullptr;
8768                 m_argList = nullptr;
8769                 m_state   = -1;
8770                 return;
8771         }
8772     }
8773 }
8774
8775 //------------------------------------------------------------------------
8776 // GenTreeUseEdgeIterator::MoveToNextPhiUseEdge:
8777 //    Moves to the next operand of a phi node. Unlike the simple nodes
8778 //    handled by `GetNextUseEdge`, phi nodes have a variable number of
8779 //    operands stored in a cons list. This method expands the cons list
8780 //    into the operands stored within.
8781 //
8782 void GenTreeUseEdgeIterator::MoveToNextPhiUseEdge()
8783 {
8784     GenTreeUnOp* phi = m_node->AsUnOp();
8785
8786     for (;;)
8787     {
8788         switch (m_state)
8789         {
8790             case 0:
8791                 m_state   = 1;
8792                 m_argList = phi->gtOp1;
8793                 break;
8794
8795             case 1:
8796                 if (m_argList == nullptr)
8797                 {
8798                     m_state = 2;
8799                 }
8800                 else
8801                 {
8802                     GenTreeArgList* argNode = m_argList->AsArgList();
8803                     m_edge                  = &argNode->gtOp1;
8804                     m_argList               = argNode->Rest();
8805                     return;
8806                 }
8807                 break;
8808
8809             default:
8810                 m_node    = nullptr;
8811                 m_edge    = nullptr;
8812                 m_argList = nullptr;
8813                 m_state   = -1;
8814                 return;
8815         }
8816     }
8817 }
8818
8819 #ifdef FEATURE_SIMD
8820 //------------------------------------------------------------------------
8821 // GenTreeUseEdgeIterator::MoveToNextSIMDUseEdge:
8822 //    Moves to the next operand of a SIMD node. Most SIMD nodes have a
8823 //    fixed number of operands and are handled accordingly.
8824 //    `SIMDIntrinsicInitN` nodes, however, have a variable number of
8825 //    operands stored in a cons list. This method expands the cons list
8826 //    into the operands stored within.
8827 //
8828 void GenTreeUseEdgeIterator::MoveToNextSIMDUseEdge()
8829 {
8830     GenTreeSIMD* simd = m_node->AsSIMD();
8831
8832     if (simd->gtSIMDIntrinsicID != SIMDIntrinsicInitN)
8833     {
8834         bool operandsReversed = (simd->gtFlags & GTF_REVERSE_OPS) != 0;
8835         switch (m_state)
8836         {
8837             case 0:
8838                 m_edge = !operandsReversed ? &simd->gtOp1 : &simd->gtOp2;
8839                 break;
8840             case 1:
8841                 m_edge = !operandsReversed ? &simd->gtOp2 : &simd->gtOp1;
8842                 break;
8843             default:
8844                 m_edge = nullptr;
8845                 break;
8846         }
8847
8848         if (m_edge != nullptr && *m_edge != nullptr)
8849         {
8850             m_state++;
8851         }
8852         else
8853         {
8854             m_node  = nullptr;
8855             m_state = -1;
8856         }
8857
8858         return;
8859     }
8860
8861     for (;;)
8862     {
8863         switch (m_state)
8864         {
8865             case 0:
8866                 m_state   = 1;
8867                 m_argList = simd->gtOp1;
8868                 break;
8869
8870             case 1:
8871                 if (m_argList == nullptr)
8872                 {
8873                     m_state = 2;
8874                 }
8875                 else
8876                 {
8877                     GenTreeArgList* argNode = m_argList->AsArgList();
8878                     m_edge                  = &argNode->gtOp1;
8879                     m_argList               = argNode->Rest();
8880                     return;
8881                 }
8882                 break;
8883
8884             default:
8885                 m_node    = nullptr;
8886                 m_edge    = nullptr;
8887                 m_argList = nullptr;
8888                 m_state   = -1;
8889                 return;
8890         }
8891     }
8892 }
8893 #endif // FEATURE_SIMD
8894
8895 void GenTreeUseEdgeIterator::MoveToNextAggregateUseEdge()
8896 {
8897     assert(m_node->OperGet() == GT_LIST);
8898     assert(m_node->AsArgList()->IsAggregate());
8899
8900     for (;;)
8901     {
8902         switch (m_state)
8903         {
8904             case 0:
8905                 m_state   = 1;
8906                 m_argList = m_node;
8907                 break;
8908
8909             case 1:
8910                 if (m_argList == nullptr)
8911                 {
8912                     m_state = 2;
8913                 }
8914                 else
8915                 {
8916                     GenTreeArgList* aggNode = m_argList->AsArgList();
8917                     m_edge                  = &aggNode->gtOp1;
8918                     m_argList               = aggNode->Rest();
8919                     return;
8920                 }
8921                 break;
8922
8923             default:
8924                 m_node    = nullptr;
8925                 m_edge    = nullptr;
8926                 m_argList = nullptr;
8927                 m_state   = -1;
8928                 return;
8929         }
8930     }
8931 }
8932
8933 //------------------------------------------------------------------------
8934 // GenTreeUseEdgeIterator::operator++:
8935 //    Advances the iterator to the next operand.
8936 //
8937 GenTreeUseEdgeIterator& GenTreeUseEdgeIterator::operator++()
8938 {
8939     if (m_state == -1)
8940     {
8941         // If we've reached the terminal state, do nothing.
8942         assert(m_node == nullptr);
8943         assert(m_edge == nullptr);
8944         assert(m_argList == nullptr);
8945     }
8946     else
8947     {
8948         // Otherwise, move to the next operand in the node.
8949         genTreeOps op = m_node->OperGet();
8950         if (op == GT_CALL)
8951         {
8952             MoveToNextCallUseEdge();
8953         }
8954         else if (op == GT_PHI)
8955         {
8956             MoveToNextPhiUseEdge();
8957         }
8958 #ifdef FEATURE_SIMD
8959         else if (op == GT_SIMD)
8960         {
8961             MoveToNextSIMDUseEdge();
8962         }
8963 #endif
8964         else if ((op == GT_LIST) && (m_node->AsArgList()->IsAggregate()))
8965         {
8966             MoveToNextAggregateUseEdge();
8967         }
8968         else
8969         {
8970             m_edge = GetNextUseEdge();
8971             if (m_edge != nullptr && *m_edge != nullptr)
8972             {
8973                 m_state++;
8974             }
8975             else
8976             {
8977                 m_edge  = nullptr;
8978                 m_node  = nullptr;
8979                 m_state = -1;
8980             }
8981         }
8982     }
8983
8984     return *this;
8985 }
8986
8987 GenTreeUseEdgeIterator GenTree::UseEdgesBegin()
8988 {
8989     return GenTreeUseEdgeIterator(this);
8990 }
8991
8992 GenTreeUseEdgeIterator GenTree::UseEdgesEnd()
8993 {
8994     return GenTreeUseEdgeIterator();
8995 }
8996
8997 IteratorPair<GenTreeUseEdgeIterator> GenTree::UseEdges()
8998 {
8999     return MakeIteratorPair(UseEdgesBegin(), UseEdgesEnd());
9000 }
9001
9002 GenTreeOperandIterator GenTree::OperandsBegin()
9003 {
9004     return GenTreeOperandIterator(this);
9005 }
9006
9007 GenTreeOperandIterator GenTree::OperandsEnd()
9008 {
9009     return GenTreeOperandIterator();
9010 }
9011
9012 IteratorPair<GenTreeOperandIterator> GenTree::Operands()
9013 {
9014     return MakeIteratorPair(OperandsBegin(), OperandsEnd());
9015 }
9016
9017 bool GenTree::Precedes(GenTree* other)
9018 {
9019     assert(other != nullptr);
9020
9021     for (GenTree* node = gtNext; node != nullptr; node = node->gtNext)
9022     {
9023         if (node == other)
9024         {
9025             return true;
9026         }
9027     }
9028
9029     return false;
9030 }
9031
9032 #ifdef DEBUG
9033
9034 /* static */ int GenTree::gtDispFlags(unsigned flags, unsigned debugFlags)
9035 {
9036     printf("%c", (flags & GTF_ASG) ? 'A' : '-');
9037     printf("%c", (flags & GTF_CALL) ? 'C' : '-');
9038     printf("%c", (flags & GTF_EXCEPT) ? 'X' : '-');
9039     printf("%c", (flags & GTF_GLOB_REF) ? 'G' : '-');
9040     printf("%c", (debugFlags & GTF_DEBUG_NODE_MORPHED) ? '+' : // First print '+' if GTF_DEBUG_NODE_MORPHED is set
9041                      (flags & GTF_ORDER_SIDEEFF) ? 'O' : '-'); // otherwise print 'O' or '-'
9042     printf("%c", (flags & GTF_COLON_COND) ? '?' : '-');
9043     printf("%c", (flags & GTF_DONT_CSE) ? 'N' :           // N is for No cse
9044                      (flags & GTF_MAKE_CSE) ? 'H' : '-'); // H is for Hoist this expr
9045     printf("%c", (flags & GTF_REVERSE_OPS) ? 'R' : '-');
9046     printf("%c", (flags & GTF_UNSIGNED) ? 'U' : (flags & GTF_BOOLEAN) ? 'B' : '-');
9047 #if FEATURE_SET_FLAGS
9048     printf("%c", (flags & GTF_SET_FLAGS) ? 'S' : '-');
9049 #endif
9050     printf("%c", (flags & GTF_LATE_ARG) ? 'L' : '-');
9051     printf("%c", (flags & GTF_SPILLED) ? 'z' : (flags & GTF_SPILL) ? 'Z' : '-');
9052     return 12; // displayed 12 flag characters
9053 }
9054
9055 /*****************************************************************************/
9056
9057 void Compiler::gtDispNodeName(GenTree* tree)
9058 {
9059     /* print the node name */
9060
9061     const char* name;
9062
9063     assert(tree);
9064     if (tree->gtOper < GT_COUNT)
9065     {
9066         name = GenTree::NodeName(tree->OperGet());
9067     }
9068     else
9069     {
9070         name = "<ERROR>";
9071     }
9072     char  buf[32];
9073     char* bufp = &buf[0];
9074
9075     if ((tree->gtOper == GT_CNS_INT) && tree->IsIconHandle())
9076     {
9077         sprintf_s(bufp, sizeof(buf), " %s(h)%c", name, 0);
9078     }
9079     else if (tree->gtOper == GT_PUTARG_STK)
9080     {
9081         sprintf_s(bufp, sizeof(buf), " %s [+0x%02x]%c", name, tree->AsPutArgStk()->getArgOffset(), 0);
9082     }
9083     else if (tree->gtOper == GT_CALL)
9084     {
9085         const char* callType = "call";
9086         const char* gtfType  = "";
9087         const char* ctType   = "";
9088         char        gtfTypeBuf[100];
9089
9090         if (tree->gtCall.gtCallType == CT_USER_FUNC)
9091         {
9092             if ((tree->gtFlags & GTF_CALL_VIRT_KIND_MASK) != GTF_CALL_NONVIRT)
9093             {
9094                 callType = "callv";
9095             }
9096         }
9097         else if (tree->gtCall.gtCallType == CT_HELPER)
9098         {
9099             ctType = " help";
9100         }
9101         else if (tree->gtCall.gtCallType == CT_INDIRECT)
9102         {
9103             ctType = " ind";
9104         }
9105         else
9106         {
9107             assert(!"Unknown gtCallType");
9108         }
9109
9110         if (tree->gtFlags & GTF_CALL_NULLCHECK)
9111         {
9112             gtfType = " nullcheck";
9113         }
9114         if (tree->gtFlags & GTF_CALL_VIRT_VTABLE)
9115         {
9116             gtfType = " ind";
9117         }
9118         else if (tree->gtFlags & GTF_CALL_VIRT_STUB)
9119         {
9120             gtfType = " stub";
9121         }
9122 #ifdef FEATURE_READYTORUN_COMPILER
9123         else if (tree->gtCall.IsR2RRelativeIndir())
9124         {
9125             gtfType = " r2r_ind";
9126         }
9127 #endif // FEATURE_READYTORUN_COMPILER
9128         else if (tree->gtFlags & GTF_CALL_UNMANAGED)
9129         {
9130             char* gtfTypeBufWalk = gtfTypeBuf;
9131             gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf, sizeof(gtfTypeBuf), " unman");
9132             if (tree->gtFlags & GTF_CALL_POP_ARGS)
9133             {
9134                 gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf, sizeof(gtfTypeBuf), " popargs");
9135             }
9136             if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_UNMGD_THISCALL)
9137             {
9138                 gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf, sizeof(gtfTypeBuf), " thiscall");
9139             }
9140             gtfType = gtfTypeBuf;
9141         }
9142
9143         sprintf_s(bufp, sizeof(buf), " %s%s%s%c", callType, ctType, gtfType, 0);
9144     }
9145     else if (tree->gtOper == GT_ARR_ELEM)
9146     {
9147         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s[", name);
9148         for (unsigned rank = tree->gtArrElem.gtArrRank - 1; rank; rank--)
9149         {
9150             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), ",");
9151         }
9152         SimpleSprintf_s(bufp, buf, sizeof(buf), "]");
9153     }
9154     else if (tree->gtOper == GT_ARR_OFFSET || tree->gtOper == GT_ARR_INDEX)
9155     {
9156         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s[", name);
9157         unsigned char currDim;
9158         unsigned char rank;
9159         if (tree->gtOper == GT_ARR_OFFSET)
9160         {
9161             currDim = tree->gtArrOffs.gtCurrDim;
9162             rank    = tree->gtArrOffs.gtArrRank;
9163         }
9164         else
9165         {
9166             currDim = tree->gtArrIndex.gtCurrDim;
9167             rank    = tree->gtArrIndex.gtArrRank;
9168         }
9169
9170         for (unsigned char dim = 0; dim < rank; dim++)
9171         {
9172             // Use a defacto standard i,j,k for the dimensions.
9173             // Note that we only support up to rank 3 arrays with these nodes, so we won't run out of characters.
9174             char dimChar = '*';
9175             if (dim == currDim)
9176             {
9177                 dimChar = 'i' + dim;
9178             }
9179             else if (dim > currDim)
9180             {
9181                 dimChar = ' ';
9182             }
9183
9184             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "%c", dimChar);
9185             if (dim != rank - 1)
9186             {
9187                 bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), ",");
9188             }
9189         }
9190         SimpleSprintf_s(bufp, buf, sizeof(buf), "]");
9191     }
9192     else if (tree->gtOper == GT_LEA)
9193     {
9194         GenTreeAddrMode* lea = tree->AsAddrMode();
9195         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s(", name);
9196         if (lea->Base() != nullptr)
9197         {
9198             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "b+");
9199         }
9200         if (lea->Index() != nullptr)
9201         {
9202             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "(i*%d)+", lea->gtScale);
9203         }
9204         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "%d)", lea->gtOffset);
9205     }
9206     else if (tree->gtOper == GT_ARR_BOUNDS_CHECK)
9207     {
9208         switch (tree->gtBoundsChk.gtThrowKind)
9209         {
9210             case SCK_RNGCHK_FAIL:
9211                 sprintf_s(bufp, sizeof(buf), " %s_Rng", name);
9212                 break;
9213             case SCK_ARG_EXCPN:
9214                 sprintf_s(bufp, sizeof(buf), " %s_Arg", name);
9215                 break;
9216             case SCK_ARG_RNG_EXCPN:
9217                 sprintf_s(bufp, sizeof(buf), " %s_ArgRng", name);
9218                 break;
9219             default:
9220                 unreached();
9221         }
9222     }
9223     else if (tree->gtOverflowEx())
9224     {
9225         sprintf_s(bufp, sizeof(buf), " %s_ovfl%c", name, 0);
9226     }
9227     else
9228     {
9229         sprintf_s(bufp, sizeof(buf), " %s%c", name, 0);
9230     }
9231
9232     if (strlen(buf) < 10)
9233     {
9234         printf(" %-10s", buf);
9235     }
9236     else
9237     {
9238         printf(" %s", buf);
9239     }
9240 }
9241
9242 void Compiler::gtDispVN(GenTree* tree)
9243 {
9244     if (tree->gtVNPair.GetLiberal() != ValueNumStore::NoVN)
9245     {
9246         assert(tree->gtVNPair.GetConservative() != ValueNumStore::NoVN);
9247         printf(" ");
9248         vnpPrint(tree->gtVNPair, 0);
9249     }
9250 }
9251
9252 //------------------------------------------------------------------------
9253 // gtDispNode: Print a tree to jitstdout.
9254 //
9255 // Arguments:
9256 //    tree - the tree to be printed
9257 //    indentStack - the specification for the current level of indentation & arcs
9258 //    msg         - a contextual method (i.e. from the parent) to print
9259 //
9260 // Return Value:
9261 //    None.
9262 //
9263 // Notes:
9264 //    'indentStack' may be null, in which case no indentation or arcs are printed
9265 //    'msg' may be null
9266
9267 void Compiler::gtDispNode(GenTreePtr tree, IndentStack* indentStack, __in __in_z __in_opt const char* msg, bool isLIR)
9268 {
9269     bool printPointer = true; // always true..
9270     bool printFlags   = true; // always true..
9271     bool printCost    = true; // always true..
9272
9273     int msgLength = 25;
9274
9275     GenTree* prev;
9276
9277     if (tree->gtSeqNum)
9278     {
9279         printf("N%03u ", tree->gtSeqNum);
9280         if (tree->gtCostsInitialized)
9281         {
9282             printf("(%3u,%3u) ", tree->gtCostEx, tree->gtCostSz);
9283         }
9284         else
9285         {
9286             printf("(???"
9287                    ",???"
9288                    ") "); // This probably indicates a bug: the node has a sequence number, but not costs.
9289         }
9290     }
9291     else
9292     {
9293         if (tree->gtOper == GT_STMT)
9294         {
9295             prev = tree->gtStmt.gtStmtExpr;
9296         }
9297         else
9298         {
9299             prev = tree;
9300         }
9301
9302         bool     hasSeqNum = true;
9303         unsigned dotNum    = 0;
9304         do
9305         {
9306             dotNum++;
9307             prev = prev->gtPrev;
9308
9309             if ((prev == nullptr) || (prev == tree))
9310             {
9311                 hasSeqNum = false;
9312                 break;
9313             }
9314
9315             assert(prev);
9316         } while (prev->gtSeqNum == 0);
9317
9318         // If we have an indent stack, don't add additional characters,
9319         // as it will mess up the alignment.
9320         bool displayDotNum = tree->gtOper != GT_STMT && hasSeqNum && (indentStack == nullptr);
9321         if (displayDotNum)
9322         {
9323             printf("N%03u.%02u ", prev->gtSeqNum, dotNum);
9324         }
9325         else
9326         {
9327             printf("     ");
9328         }
9329
9330         if (tree->gtCostsInitialized)
9331         {
9332             printf("(%3u,%3u) ", tree->gtCostEx, tree->gtCostSz);
9333         }
9334         else
9335         {
9336             if (displayDotNum)
9337             {
9338                 // Do better alignment in this case
9339                 printf("       ");
9340             }
9341             else
9342             {
9343                 printf("          ");
9344             }
9345         }
9346     }
9347
9348     if (optValnumCSE_phase)
9349     {
9350         if (IS_CSE_INDEX(tree->gtCSEnum))
9351         {
9352             printf("CSE #%02d (%s)", GET_CSE_INDEX(tree->gtCSEnum), (IS_CSE_USE(tree->gtCSEnum) ? "use" : "def"));
9353         }
9354         else
9355         {
9356             printf("             ");
9357         }
9358     }
9359
9360     /* Print the node ID */
9361     printTreeID(tree);
9362     printf(" ");
9363
9364     if (tree->gtOper >= GT_COUNT)
9365     {
9366         printf(" **** ILLEGAL NODE ****");
9367         return;
9368     }
9369
9370     if (printFlags)
9371     {
9372         /* First print the flags associated with the node */
9373         switch (tree->gtOper)
9374         {
9375             case GT_LEA:
9376             case GT_IND:
9377                 // We prefer printing R, V or U
9378                 if ((tree->gtFlags & (GTF_IND_REFARR_LAYOUT | GTF_IND_VOLATILE | GTF_IND_UNALIGNED)) == 0)
9379                 {
9380                     if (tree->gtFlags & GTF_IND_TGTANYWHERE)
9381                     {
9382                         printf("*");
9383                         --msgLength;
9384                         break;
9385                     }
9386                     if (tree->gtFlags & GTF_IND_INVARIANT)
9387                     {
9388                         printf("#");
9389                         --msgLength;
9390                         break;
9391                     }
9392                     if (tree->gtFlags & GTF_IND_ARR_INDEX)
9393                     {
9394                         printf("a");
9395                         --msgLength;
9396                         break;
9397                     }
9398                 }
9399                 __fallthrough;
9400
9401             case GT_INDEX:
9402
9403                 if ((tree->gtFlags & (GTF_IND_VOLATILE | GTF_IND_UNALIGNED)) == 0) // We prefer printing V or U over R
9404                 {
9405                     if (tree->gtFlags & GTF_IND_REFARR_LAYOUT)
9406                     {
9407                         printf("R");
9408                         --msgLength;
9409                         break;
9410                     } // R means RefArray
9411                 }
9412                 __fallthrough;
9413
9414             case GT_FIELD:
9415             case GT_CLS_VAR:
9416                 if (tree->gtFlags & GTF_IND_VOLATILE)
9417                 {
9418                     printf("V");
9419                     --msgLength;
9420                     break;
9421                 }
9422                 if (tree->gtFlags & GTF_IND_UNALIGNED)
9423                 {
9424                     printf("U");
9425                     --msgLength;
9426                     break;
9427                 }
9428                 goto DASH;
9429
9430             case GT_INITBLK:
9431             case GT_COPYBLK:
9432             case GT_COPYOBJ:
9433                 if (tree->AsBlkOp()->IsVolatile())
9434                 {
9435                     printf("V");
9436                     --msgLength;
9437                     break;
9438                 }
9439                 if (tree->gtFlags & GTF_BLK_UNALIGNED)
9440                 {
9441                     printf("U");
9442                     --msgLength;
9443                     break;
9444                 }
9445                 goto DASH;
9446
9447             case GT_CALL:
9448                 if (tree->gtFlags & GTF_CALL_INLINE_CANDIDATE)
9449                 {
9450                     printf("I");
9451                     --msgLength;
9452                     break;
9453                 }
9454                 if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_RETBUFFARG)
9455                 {
9456                     printf("S");
9457                     --msgLength;
9458                     break;
9459                 }
9460                 if (tree->gtFlags & GTF_CALL_HOISTABLE)
9461                 {
9462                     printf("H");
9463                     --msgLength;
9464                     break;
9465                 }
9466
9467                 goto DASH;
9468
9469             case GT_MUL:
9470                 if (tree->gtFlags & GTF_MUL_64RSLT)
9471                 {
9472                     printf("L");
9473                     --msgLength;
9474                     break;
9475                 }
9476                 goto DASH;
9477
9478             case GT_ADDR:
9479                 if (tree->gtFlags & GTF_ADDR_ONSTACK)
9480                 {
9481                     printf("L");
9482                     --msgLength;
9483                     break;
9484                 } // L means LclVar
9485                 goto DASH;
9486
9487             case GT_LCL_FLD:
9488             case GT_LCL_VAR:
9489             case GT_LCL_VAR_ADDR:
9490             case GT_LCL_FLD_ADDR:
9491             case GT_STORE_LCL_FLD:
9492             case GT_STORE_LCL_VAR:
9493             case GT_REG_VAR:
9494                 if (tree->gtFlags & GTF_VAR_USEASG)
9495                 {
9496                     printf("U");
9497                     --msgLength;
9498                     break;
9499                 }
9500                 if (tree->gtFlags & GTF_VAR_USEDEF)
9501                 {
9502                     printf("B");
9503                     --msgLength;
9504                     break;
9505                 }
9506                 if (tree->gtFlags & GTF_VAR_DEF)
9507                 {
9508                     printf("D");
9509                     --msgLength;
9510                     break;
9511                 }
9512                 if (tree->gtFlags & GTF_VAR_CAST)
9513                 {
9514                     printf("C");
9515                     --msgLength;
9516                     break;
9517                 }
9518                 if (tree->gtFlags & GTF_VAR_ARR_INDEX)
9519                 {
9520                     printf("i");
9521                     --msgLength;
9522                     break;
9523                 }
9524                 goto DASH;
9525
9526             case GT_EQ:
9527             case GT_NE:
9528             case GT_LT:
9529             case GT_LE:
9530             case GT_GE:
9531             case GT_GT:
9532                 if (tree->gtFlags & GTF_RELOP_NAN_UN)
9533                 {
9534                     printf("N");
9535                     --msgLength;
9536                     break;
9537                 }
9538                 if (tree->gtFlags & GTF_RELOP_JMP_USED)
9539                 {
9540                     printf("J");
9541                     --msgLength;
9542                     break;
9543                 }
9544                 if (tree->gtFlags & GTF_RELOP_QMARK)
9545                 {
9546                     printf("Q");
9547                     --msgLength;
9548                     break;
9549                 }
9550                 if (tree->gtFlags & GTF_RELOP_SMALL)
9551                 {
9552                     printf("S");
9553                     --msgLength;
9554                     break;
9555                 }
9556                 goto DASH;
9557
9558             default:
9559             DASH:
9560                 printf("-");
9561                 --msgLength;
9562                 break;
9563         }
9564
9565         /* Then print the general purpose flags */
9566         unsigned flags = tree->gtFlags;
9567
9568         if (tree->OperIsBinary())
9569         {
9570             genTreeOps oper = tree->OperGet();
9571
9572             // Check for GTF_ADDRMODE_NO_CSE flag on add/mul/shl Binary Operators
9573             if ((oper == GT_ADD) || (oper == GT_MUL) || (oper == GT_LSH))
9574             {
9575                 if ((tree->gtFlags & GTF_ADDRMODE_NO_CSE) != 0)
9576                 {
9577                     flags |= GTF_DONT_CSE; // Force the GTF_ADDRMODE_NO_CSE flag to print out like GTF_DONT_CSE
9578                 }
9579             }
9580         }
9581         else // !tree->OperIsBinary()
9582         {
9583             // the GTF_REVERSE flag only applies to binary operations
9584             flags &= ~GTF_REVERSE_OPS; // we use this value for GTF_VAR_ARR_INDEX above
9585         }
9586
9587         msgLength -= GenTree::gtDispFlags(flags, tree->gtDebugFlags);
9588 /*
9589     printf("%c", (flags & GTF_ASG           ) ? 'A' : '-');
9590     printf("%c", (flags & GTF_CALL          ) ? 'C' : '-');
9591     printf("%c", (flags & GTF_EXCEPT        ) ? 'X' : '-');
9592     printf("%c", (flags & GTF_GLOB_REF      ) ? 'G' : '-');
9593     printf("%c", (flags & GTF_ORDER_SIDEEFF ) ? 'O' : '-');
9594     printf("%c", (flags & GTF_COLON_COND    ) ? '?' : '-');
9595     printf("%c", (flags & GTF_DONT_CSE      ) ? 'N' :        // N is for No cse
9596                  (flags & GTF_MAKE_CSE      ) ? 'H' : '-');  // H is for Hoist this expr
9597     printf("%c", (flags & GTF_REVERSE_OPS   ) ? 'R' : '-');
9598     printf("%c", (flags & GTF_UNSIGNED      ) ? 'U' :
9599                  (flags & GTF_BOOLEAN       ) ? 'B' : '-');
9600     printf("%c", (flags & GTF_SET_FLAGS     ) ? 'S' : '-');
9601     printf("%c", (flags & GTF_SPILLED       ) ? 'z' : '-');
9602     printf("%c", (flags & GTF_SPILL         ) ? 'Z' : '-');
9603 */
9604
9605 #if FEATURE_STACK_FP_X87
9606         BYTE fpLvl = (BYTE)tree->gtFPlvl;
9607         if (IsUninitialized(fpLvl) || fpLvl == 0x00)
9608         {
9609             printf("-");
9610         }
9611         else
9612         {
9613             printf("%1u", tree->gtFPlvl);
9614         }
9615 #endif // FEATURE_STACK_FP_X87
9616     }
9617
9618     // If we're printing a node for LIR, we use the space normally associated with the message
9619     // to display the node's temp name (if any)
9620     const bool hasOperands = tree->OperandsBegin() != tree->OperandsEnd();
9621     if (isLIR)
9622     {
9623         assert(msg == nullptr);
9624
9625         // If the tree does not have any operands, we do not display the indent stack. This gives us
9626         // two additional characters for alignment.
9627         if (!hasOperands)
9628         {
9629             msgLength += 1;
9630         }
9631
9632         if (tree->IsValue())
9633         {
9634             const size_t bufLength = msgLength - 1;
9635             msg                    = reinterpret_cast<char*>(alloca(bufLength * sizeof(char)));
9636             sprintf_s(const_cast<char*>(msg), bufLength, "t%d = %s", tree->gtTreeID, hasOperands ? "" : " ");
9637         }
9638     }
9639
9640     /* print the msg associated with the node */
9641
9642     if (msg == nullptr)
9643     {
9644         msg = "";
9645     }
9646     if (msgLength < 0)
9647     {
9648         msgLength = 0;
9649     }
9650
9651     printf(isLIR ? " %+*s" : " %-*s", msgLength, msg);
9652
9653     /* Indent the node accordingly */
9654     if (!isLIR || hasOperands)
9655     {
9656         printIndent(indentStack);
9657     }
9658
9659     gtDispNodeName(tree);
9660
9661     assert(tree == nullptr || tree->gtOper < GT_COUNT);
9662
9663     if (tree)
9664     {
9665         /* print the type of the node */
9666         if (tree->gtOper != GT_CAST)
9667         {
9668             printf(" %-6s", varTypeName(tree->TypeGet()));
9669             if (tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_STORE_LCL_VAR)
9670             {
9671                 LclVarDsc* varDsc = &lvaTable[tree->gtLclVarCommon.gtLclNum];
9672                 if (varDsc->lvAddrExposed)
9673                 {
9674                     printf("(AX)"); // Variable has address exposed.
9675                 }
9676
9677                 if (varDsc->lvUnusedStruct)
9678                 {
9679                     assert(varDsc->lvPromoted);
9680                     printf("(U)"); // Unused struct
9681                 }
9682                 else if (varDsc->lvPromoted)
9683                 {
9684                     assert(varTypeIsPromotable(varDsc));
9685                     printf("(P)"); // Promoted struct
9686                 }
9687             }
9688
9689             if (tree->gtOper == GT_STMT)
9690             {
9691                 if (opts.compDbgInfo)
9692                 {
9693                     IL_OFFSET endIL = tree->gtStmt.gtStmtLastILoffs;
9694
9695                     printf("(IL ");
9696                     if (tree->gtStmt.gtStmtILoffsx == BAD_IL_OFFSET)
9697                     {
9698                         printf("  ???");
9699                     }
9700                     else
9701                     {
9702                         printf("0x%03X", jitGetILoffs(tree->gtStmt.gtStmtILoffsx));
9703                     }
9704                     printf("...");
9705                     if (endIL == BAD_IL_OFFSET)
9706                     {
9707                         printf("  ???");
9708                     }
9709                     else
9710                     {
9711                         printf("0x%03X", endIL);
9712                     }
9713                     printf(")");
9714                 }
9715             }
9716
9717             if (tree->IsArgPlaceHolderNode() && (tree->gtArgPlace.gtArgPlaceClsHnd != nullptr))
9718             {
9719                 printf(" => [clsHnd=%08X]", dspPtr(tree->gtArgPlace.gtArgPlaceClsHnd));
9720             }
9721         }
9722
9723         // for tracking down problems in reguse prediction or liveness tracking
9724
9725         if (verbose && 0)
9726         {
9727             printf(" RR=");
9728             dspRegMask(tree->gtRsvdRegs);
9729 #ifdef LEGACY_BACKEND
9730             printf(",UR=");
9731             dspRegMask(tree->gtUsedRegs);
9732 #endif // LEGACY_BACKEND
9733             printf("\n");
9734         }
9735     }
9736 }
9737
9738 void Compiler::gtDispRegVal(GenTree* tree)
9739 {
9740     switch (tree->GetRegTag())
9741     {
9742         // Don't display NOREG; the absence of this tag will imply this state
9743         // case GenTree::GT_REGTAG_NONE:       printf(" NOREG");   break;
9744
9745         case GenTree::GT_REGTAG_REG:
9746             printf(" REG %s", compRegVarName(tree->gtRegNum));
9747             break;
9748
9749 #if CPU_LONG_USES_REGPAIR
9750         case GenTree::GT_REGTAG_REGPAIR:
9751             printf(" PAIR %s", compRegPairName(tree->gtRegPair));
9752             break;
9753 #endif
9754
9755         default:
9756             break;
9757     }
9758
9759     if (tree->IsMultiRegCall())
9760     {
9761         // 0th reg is gtRegNum, which is already printed above.
9762         // Print the remaining regs of a multi-reg call node.
9763         GenTreeCall* call     = tree->AsCall();
9764         unsigned     regCount = call->GetReturnTypeDesc()->GetReturnRegCount();
9765         for (unsigned i = 1; i < regCount; ++i)
9766         {
9767             printf(",%s", compRegVarName(call->GetRegNumByIdx(i)));
9768         }
9769     }
9770     else if (tree->IsCopyOrReloadOfMultiRegCall())
9771     {
9772         GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
9773         GenTreeCall*         call         = tree->gtGetOp1()->AsCall();
9774         unsigned             regCount     = call->GetReturnTypeDesc()->GetReturnRegCount();
9775         for (unsigned i = 1; i < regCount; ++i)
9776         {
9777             printf(",%s", compRegVarName(copyOrReload->GetRegNumByIdx(i)));
9778         }
9779     }
9780
9781     if (tree->gtFlags & GTF_REG_VAL)
9782     {
9783         printf(" RV");
9784     }
9785 }
9786
9787 // We usually/commonly don't expect to print anything longer than this string,
9788 #define LONGEST_COMMON_LCL_VAR_DISPLAY "V99 PInvokeFrame"
9789 #define LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH (sizeof(LONGEST_COMMON_LCL_VAR_DISPLAY))
9790 #define BUF_SIZE (LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH * 2)
9791
9792 void Compiler::gtGetLclVarNameInfo(unsigned lclNum, const char** ilKindOut, const char** ilNameOut, unsigned* ilNumOut)
9793 {
9794     const char* ilKind = nullptr;
9795     const char* ilName = nullptr;
9796
9797     unsigned ilNum = compMap2ILvarNum(lclNum);
9798
9799     if (ilNum == (unsigned)ICorDebugInfo::RETBUF_ILNUM)
9800     {
9801         ilName = "RetBuf";
9802     }
9803     else if (ilNum == (unsigned)ICorDebugInfo::VARARGS_HND_ILNUM)
9804     {
9805         ilName = "VarArgHandle";
9806     }
9807     else if (ilNum == (unsigned)ICorDebugInfo::TYPECTXT_ILNUM)
9808     {
9809         ilName = "TypeCtx";
9810     }
9811     else if (ilNum == (unsigned)ICorDebugInfo::UNKNOWN_ILNUM)
9812     {
9813 #if FEATURE_ANYCSE
9814         if (lclNumIsTrueCSE(lclNum))
9815         {
9816             ilKind = "cse";
9817             ilNum  = lclNum - optCSEstart;
9818         }
9819         else if (lclNum >= optCSEstart)
9820         {
9821             // Currently any new LclVar's introduced after the CSE phase
9822             // are believed to be created by the "rationalizer" that is what is meant by the "rat" prefix.
9823             ilKind = "rat";
9824             ilNum  = lclNum - (optCSEstart + optCSEcount);
9825         }
9826         else
9827 #endif // FEATURE_ANYCSE
9828         {
9829             if (lclNum == info.compLvFrameListRoot)
9830             {
9831                 ilName = "FramesRoot";
9832             }
9833             else if (lclNum == lvaInlinedPInvokeFrameVar)
9834             {
9835                 ilName = "PInvokeFrame";
9836             }
9837             else if (lclNum == lvaGSSecurityCookie)
9838             {
9839                 ilName = "GsCookie";
9840             }
9841 #if FEATURE_FIXED_OUT_ARGS
9842             else if (lclNum == lvaPInvokeFrameRegSaveVar)
9843             {
9844                 ilName = "PInvokeFrameRegSave";
9845             }
9846             else if (lclNum == lvaOutgoingArgSpaceVar)
9847             {
9848                 ilName = "OutArgs";
9849             }
9850 #endif // FEATURE_FIXED_OUT_ARGS
9851 #ifdef _TARGET_ARM_
9852             else if (lclNum == lvaPromotedStructAssemblyScratchVar)
9853             {
9854                 ilName = "PromotedStructScratch";
9855             }
9856 #endif // _TARGET_ARM_
9857 #if !FEATURE_EH_FUNCLETS
9858             else if (lclNum == lvaShadowSPslotsVar)
9859             {
9860                 ilName = "EHSlots";
9861             }
9862 #endif // !FEATURE_EH_FUNCLETS
9863             else if (lclNum == lvaLocAllocSPvar)
9864             {
9865                 ilName = "LocAllocSP";
9866             }
9867 #if FEATURE_EH_FUNCLETS
9868             else if (lclNum == lvaPSPSym)
9869             {
9870                 ilName = "PSPSym";
9871             }
9872 #endif // FEATURE_EH_FUNCLETS
9873             else
9874             {
9875                 ilKind = "tmp";
9876                 if (compIsForInlining())
9877                 {
9878                     ilNum = lclNum - impInlineInfo->InlinerCompiler->info.compLocalsCount;
9879                 }
9880                 else
9881                 {
9882                     ilNum = lclNum - info.compLocalsCount;
9883                 }
9884             }
9885         }
9886     }
9887     else if (lclNum < (compIsForInlining() ? impInlineInfo->InlinerCompiler->info.compArgsCount : info.compArgsCount))
9888     {
9889         if (ilNum == 0 && !info.compIsStatic)
9890         {
9891             ilName = "this";
9892         }
9893         else
9894         {
9895             ilKind = "arg";
9896         }
9897     }
9898     else
9899     {
9900         if (!lvaTable[lclNum].lvIsStructField)
9901         {
9902             ilKind = "loc";
9903         }
9904         if (compIsForInlining())
9905         {
9906             ilNum -= impInlineInfo->InlinerCompiler->info.compILargsCount;
9907         }
9908         else
9909         {
9910             ilNum -= info.compILargsCount;
9911         }
9912     }
9913
9914     *ilKindOut = ilKind;
9915     *ilNameOut = ilName;
9916     *ilNumOut  = ilNum;
9917 }
9918
9919 /*****************************************************************************/
9920 int Compiler::gtGetLclVarName(unsigned lclNum, char* buf, unsigned buf_remaining)
9921 {
9922     char*    bufp_next    = buf;
9923     unsigned charsPrinted = 0;
9924     int      sprintf_result;
9925
9926     sprintf_result = sprintf_s(bufp_next, buf_remaining, "V%02u", lclNum);
9927
9928     if (sprintf_result < 0)
9929     {
9930         return sprintf_result;
9931     }
9932
9933     charsPrinted += sprintf_result;
9934     bufp_next += sprintf_result;
9935     buf_remaining -= sprintf_result;
9936
9937     const char* ilKind = nullptr;
9938     const char* ilName = nullptr;
9939     unsigned    ilNum  = 0;
9940
9941     Compiler::gtGetLclVarNameInfo(lclNum, &ilKind, &ilName, &ilNum);
9942
9943     if (ilName != nullptr)
9944     {
9945         sprintf_result = sprintf_s(bufp_next, buf_remaining, " %s", ilName);
9946         if (sprintf_result < 0)
9947         {
9948             return sprintf_result;
9949         }
9950         charsPrinted += sprintf_result;
9951         bufp_next += sprintf_result;
9952         buf_remaining -= sprintf_result;
9953     }
9954     else if (ilKind != nullptr)
9955     {
9956         sprintf_result = sprintf_s(bufp_next, buf_remaining, " %s%d", ilKind, ilNum);
9957         if (sprintf_result < 0)
9958         {
9959             return sprintf_result;
9960         }
9961         charsPrinted += sprintf_result;
9962         bufp_next += sprintf_result;
9963         buf_remaining -= sprintf_result;
9964     }
9965
9966     assert(charsPrinted > 0);
9967     assert(buf_remaining > 0);
9968
9969     return (int)charsPrinted;
9970 }
9971
9972 /*****************************************************************************
9973  * Get the local var name, and create a copy of the string that can be used in debug output.
9974  */
9975 char* Compiler::gtGetLclVarName(unsigned lclNum)
9976 {
9977     char buf[BUF_SIZE];
9978     int  charsPrinted = gtGetLclVarName(lclNum, buf, sizeof(buf) / sizeof(buf[0]));
9979     if (charsPrinted < 0)
9980     {
9981         return nullptr;
9982     }
9983
9984     char* retBuf = new (this, CMK_DebugOnly) char[charsPrinted + 1];
9985     strcpy_s(retBuf, charsPrinted + 1, buf);
9986     return retBuf;
9987 }
9988
9989 /*****************************************************************************/
9990 void Compiler::gtDispLclVar(unsigned lclNum, bool padForBiggestDisp)
9991 {
9992     char buf[BUF_SIZE];
9993     int  charsPrinted = gtGetLclVarName(lclNum, buf, sizeof(buf) / sizeof(buf[0]));
9994
9995     if (charsPrinted < 0)
9996     {
9997         return;
9998     }
9999
10000     printf("%s", buf);
10001
10002     if (padForBiggestDisp && (charsPrinted < LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH))
10003     {
10004         printf("%*c", LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH - charsPrinted, ' ');
10005     }
10006 }
10007
10008 /*****************************************************************************/
10009 void Compiler::gtDispConst(GenTree* tree)
10010 {
10011     assert(tree->OperKind() & GTK_CONST);
10012
10013     switch (tree->gtOper)
10014     {
10015         case GT_CNS_INT:
10016             if (tree->IsIconHandle(GTF_ICON_STR_HDL))
10017             {
10018                 printf(" 0x%X \"%S\"", dspPtr(tree->gtIntCon.gtIconVal), eeGetCPString(tree->gtIntCon.gtIconVal));
10019             }
10020             else
10021             {
10022                 ssize_t dspIconVal = tree->IsIconHandle() ? dspPtr(tree->gtIntCon.gtIconVal) : tree->gtIntCon.gtIconVal;
10023
10024                 if (tree->TypeGet() == TYP_REF)
10025                 {
10026                     assert(tree->gtIntCon.gtIconVal == 0);
10027                     printf(" null");
10028                 }
10029                 else if ((tree->gtIntCon.gtIconVal > -1000) && (tree->gtIntCon.gtIconVal < 1000))
10030                 {
10031                     printf(" %ld", dspIconVal);
10032 #ifdef _TARGET_64BIT_
10033                 }
10034                 else if ((tree->gtIntCon.gtIconVal & 0xFFFFFFFF00000000LL) != 0)
10035                 {
10036                     printf(" 0x%llx", dspIconVal);
10037 #endif
10038                 }
10039                 else
10040                 {
10041                     printf(" 0x%X", dspIconVal);
10042                 }
10043
10044                 if (tree->IsIconHandle())
10045                 {
10046                     switch (tree->GetIconHandleFlag())
10047                     {
10048                         case GTF_ICON_SCOPE_HDL:
10049                             printf(" scope");
10050                             break;
10051                         case GTF_ICON_CLASS_HDL:
10052                             printf(" class");
10053                             break;
10054                         case GTF_ICON_METHOD_HDL:
10055                             printf(" method");
10056                             break;
10057                         case GTF_ICON_FIELD_HDL:
10058                             printf(" field");
10059                             break;
10060                         case GTF_ICON_STATIC_HDL:
10061                             printf(" static");
10062                             break;
10063                         case GTF_ICON_STR_HDL:
10064                             unreached(); // This case is handled above
10065                             break;
10066                         case GTF_ICON_PSTR_HDL:
10067                             printf(" pstr");
10068                             break;
10069                         case GTF_ICON_PTR_HDL:
10070                             printf(" ptr");
10071                             break;
10072                         case GTF_ICON_VARG_HDL:
10073                             printf(" vararg");
10074                             break;
10075                         case GTF_ICON_PINVKI_HDL:
10076                             printf(" pinvoke");
10077                             break;
10078                         case GTF_ICON_TOKEN_HDL:
10079                             printf(" token");
10080                             break;
10081                         case GTF_ICON_TLS_HDL:
10082                             printf(" tls");
10083                             break;
10084                         case GTF_ICON_FTN_ADDR:
10085                             printf(" ftn");
10086                             break;
10087                         case GTF_ICON_CIDMID_HDL:
10088                             printf(" cid");
10089                             break;
10090                         case GTF_ICON_BBC_PTR:
10091                             printf(" bbc");
10092                             break;
10093                         default:
10094                             printf(" UNKNOWN");
10095                             break;
10096                     }
10097                 }
10098
10099                 if ((tree->gtFlags & GTF_ICON_FIELD_OFF) != 0)
10100                 {
10101                     printf(" field offset");
10102                 }
10103
10104                 if ((tree->IsReuseRegVal()) != 0)
10105                 {
10106                     printf(" reuse reg val");
10107                 }
10108             }
10109
10110             gtDispFieldSeq(tree->gtIntCon.gtFieldSeq);
10111
10112             break;
10113
10114         case GT_CNS_LNG:
10115             printf(" 0x%016I64x", tree->gtLngCon.gtLconVal);
10116             break;
10117
10118         case GT_CNS_DBL:
10119             if (*((__int64*)&tree->gtDblCon.gtDconVal) == (__int64)I64(0x8000000000000000))
10120             {
10121                 printf(" -0.00000");
10122             }
10123             else
10124             {
10125                 printf(" %#.17g", tree->gtDblCon.gtDconVal);
10126             }
10127             break;
10128         case GT_CNS_STR:
10129             printf("<string constant>");
10130             break;
10131         default:
10132             assert(!"unexpected constant node");
10133     }
10134
10135     gtDispRegVal(tree);
10136 }
10137
10138 void Compiler::gtDispFieldSeq(FieldSeqNode* pfsn)
10139 {
10140     if (pfsn == FieldSeqStore::NotAField() || (pfsn == nullptr))
10141     {
10142         return;
10143     }
10144
10145     // Otherwise...
10146     printf(" Fseq[");
10147     while (pfsn != nullptr)
10148     {
10149         assert(pfsn != FieldSeqStore::NotAField()); // Can't exist in a field sequence list except alone
10150         CORINFO_FIELD_HANDLE fldHnd = pfsn->m_fieldHnd;
10151         // First check the "pseudo" field handles...
10152         if (fldHnd == FieldSeqStore::FirstElemPseudoField)
10153         {
10154             printf("#FirstElem");
10155         }
10156         else if (fldHnd == FieldSeqStore::ConstantIndexPseudoField)
10157         {
10158             printf("#ConstantIndex");
10159         }
10160         else
10161         {
10162             printf("%s", eeGetFieldName(fldHnd));
10163         }
10164         pfsn = pfsn->m_next;
10165         if (pfsn != nullptr)
10166         {
10167             printf(", ");
10168         }
10169     }
10170     printf("]");
10171 }
10172
10173 //------------------------------------------------------------------------
10174 // gtDispLeaf: Print a single leaf node to jitstdout.
10175 //
10176 // Arguments:
10177 //    tree - the tree to be printed
10178 //    indentStack - the specification for the current level of indentation & arcs
10179 //
10180 // Return Value:
10181 //    None.
10182 //
10183 // Notes:
10184 //    'indentStack' may be null, in which case no indentation or arcs are printed
10185
10186 void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack)
10187 {
10188     if (tree->OperKind() & GTK_CONST)
10189     {
10190         gtDispConst(tree);
10191         return;
10192     }
10193
10194     bool isLclFld = false;
10195
10196     switch (tree->gtOper)
10197     {
10198         unsigned   varNum;
10199         LclVarDsc* varDsc;
10200
10201         case GT_LCL_FLD:
10202         case GT_LCL_FLD_ADDR:
10203         case GT_STORE_LCL_FLD:
10204             isLclFld = true;
10205             __fallthrough;
10206
10207         case GT_PHI_ARG:
10208         case GT_LCL_VAR:
10209         case GT_LCL_VAR_ADDR:
10210         case GT_STORE_LCL_VAR:
10211             printf(" ");
10212             varNum = tree->gtLclVarCommon.gtLclNum;
10213             varDsc = &lvaTable[varNum];
10214             gtDispLclVar(varNum);
10215             if (tree->gtLclVarCommon.HasSsaName())
10216             {
10217                 if (tree->gtFlags & GTF_VAR_USEASG)
10218                 {
10219                     assert(tree->gtFlags & GTF_VAR_DEF);
10220                     printf("ud:%d->%d", tree->gtLclVarCommon.gtSsaNum, GetSsaNumForLocalVarDef(tree));
10221                 }
10222                 else
10223                 {
10224                     printf("%s:%d", (tree->gtFlags & GTF_VAR_DEF) ? "d" : "u", tree->gtLclVarCommon.gtSsaNum);
10225                 }
10226             }
10227
10228             if (isLclFld)
10229             {
10230                 printf("[+%u]", tree->gtLclFld.gtLclOffs);
10231                 gtDispFieldSeq(tree->gtLclFld.gtFieldSeq);
10232             }
10233
10234             if (varDsc->lvRegister)
10235             {
10236                 printf(" ");
10237                 varDsc->PrintVarReg();
10238             }
10239 #ifndef LEGACY_BACKEND
10240             else if (tree->InReg())
10241             {
10242 #if CPU_LONG_USES_REGPAIR
10243                 if (isRegPairType(tree->TypeGet()))
10244                     printf(" %s", compRegPairName(tree->gtRegPair));
10245                 else
10246 #endif
10247                     printf(" %s", compRegVarName(tree->gtRegNum));
10248             }
10249 #endif // !LEGACY_BACKEND
10250
10251             if (varDsc->lvPromoted)
10252             {
10253                 assert(varTypeIsPromotable(varDsc) || varDsc->lvUnusedStruct);
10254
10255                 CORINFO_CLASS_HANDLE typeHnd = varDsc->lvVerTypeInfo.GetClassHandle();
10256                 CORINFO_FIELD_HANDLE fldHnd;
10257
10258                 for (unsigned i = varDsc->lvFieldLclStart; i < varDsc->lvFieldLclStart + varDsc->lvFieldCnt; ++i)
10259                 {
10260                     LclVarDsc*  fieldVarDsc = &lvaTable[i];
10261                     const char* fieldName;
10262 #if !defined(_TARGET_64BIT_)
10263                     if (varTypeIsLong(varDsc))
10264                     {
10265                         fieldName = (i == 0) ? "lo" : "hi";
10266                     }
10267                     else
10268 #endif // !defined(_TARGET_64BIT_)
10269                     {
10270                         fldHnd    = info.compCompHnd->getFieldInClass(typeHnd, fieldVarDsc->lvFldOrdinal);
10271                         fieldName = eeGetFieldName(fldHnd);
10272                     }
10273
10274                     printf("\n");
10275                     printf("                                                  ");
10276                     printIndent(indentStack);
10277                     printf("    %-6s V%02u.%s (offs=0x%02x) -> ", varTypeName(fieldVarDsc->TypeGet()),
10278                            tree->gtLclVarCommon.gtLclNum, fieldName, fieldVarDsc->lvFldOffset);
10279                     gtDispLclVar(i);
10280
10281                     if (fieldVarDsc->lvRegister)
10282                     {
10283                         printf(" ");
10284                         fieldVarDsc->PrintVarReg();
10285                     }
10286
10287                     if (fieldVarDsc->lvTracked && fgLocalVarLivenessDone && // Includes local variable liveness
10288                         ((tree->gtFlags & GTF_VAR_DEATH) != 0))
10289                     {
10290                         printf(" (last use)");
10291                     }
10292                 }
10293             }
10294             else // a normal not-promoted lclvar
10295             {
10296                 if (varDsc->lvTracked && fgLocalVarLivenessDone && ((tree->gtFlags & GTF_VAR_DEATH) != 0))
10297                 {
10298                     printf(" (last use)");
10299                 }
10300             }
10301             break;
10302
10303         case GT_REG_VAR:
10304             printf(" ");
10305             gtDispLclVar(tree->gtRegVar.gtLclNum);
10306             if (isFloatRegType(tree->gtType))
10307             {
10308                 assert(tree->gtRegVar.gtRegNum == tree->gtRegNum);
10309                 printf(" FPV%u", tree->gtRegNum);
10310             }
10311             else
10312             {
10313                 printf(" %s", compRegVarName(tree->gtRegVar.gtRegNum));
10314             }
10315
10316             varNum = tree->gtRegVar.gtLclNum;
10317             varDsc = &lvaTable[varNum];
10318
10319             if (varDsc->lvTracked && fgLocalVarLivenessDone && ((tree->gtFlags & GTF_VAR_DEATH) != 0))
10320             {
10321                 printf(" (last use)");
10322             }
10323
10324             break;
10325
10326         case GT_JMP:
10327         {
10328             const char* methodName;
10329             const char* className;
10330
10331             methodName = eeGetMethodName((CORINFO_METHOD_HANDLE)tree->gtVal.gtVal1, &className);
10332             printf(" %s.%s\n", className, methodName);
10333         }
10334         break;
10335
10336         case GT_CLS_VAR:
10337             printf(" Hnd=%#x", dspPtr(tree->gtClsVar.gtClsVarHnd));
10338             gtDispFieldSeq(tree->gtClsVar.gtFieldSeq);
10339             break;
10340
10341         case GT_CLS_VAR_ADDR:
10342             printf(" Hnd=%#x", dspPtr(tree->gtClsVar.gtClsVarHnd));
10343             break;
10344
10345         case GT_LABEL:
10346             if (tree->gtLabel.gtLabBB)
10347             {
10348                 printf(" dst=BB%02u", tree->gtLabel.gtLabBB->bbNum);
10349             }
10350             else
10351             {
10352                 printf(" dst=<null>");
10353             }
10354
10355             break;
10356
10357         case GT_FTN_ADDR:
10358         {
10359             const char* methodName;
10360             const char* className;
10361
10362             methodName = eeGetMethodName((CORINFO_METHOD_HANDLE)tree->gtFptrVal.gtFptrMethod, &className);
10363             printf(" %s.%s\n", className, methodName);
10364         }
10365         break;
10366
10367 #if !FEATURE_EH_FUNCLETS
10368         case GT_END_LFIN:
10369             printf(" endNstLvl=%d", tree->gtVal.gtVal1);
10370             break;
10371 #endif // !FEATURE_EH_FUNCLETS
10372
10373         // Vanilla leaves. No qualifying information available. So do nothing
10374
10375         case GT_NO_OP:
10376         case GT_START_NONGC:
10377         case GT_PROF_HOOK:
10378         case GT_CATCH_ARG:
10379         case GT_MEMORYBARRIER:
10380         case GT_ARGPLACE:
10381         case GT_PINVOKE_PROLOG:
10382 #ifndef LEGACY_BACKEND
10383         case GT_JMPTABLE:
10384 #endif // !LEGACY_BACKEND
10385             break;
10386
10387         case GT_RET_EXPR:
10388             printf("(inl return from call ");
10389             printTreeID(tree->gtRetExpr.gtInlineCandidate);
10390             printf(")");
10391             break;
10392
10393         case GT_PHYSREG:
10394             printf(" %s", getRegName(tree->gtPhysReg.gtSrcReg, varTypeIsFloating(tree)));
10395             break;
10396
10397         case GT_IL_OFFSET:
10398             printf(" IL offset: ");
10399             if (tree->gtStmt.gtStmtILoffsx == BAD_IL_OFFSET)
10400             {
10401                 printf("???");
10402             }
10403             else
10404             {
10405                 printf("%d", jitGetILoffs(tree->gtStmt.gtStmtILoffsx));
10406             }
10407             break;
10408
10409         default:
10410             assert(!"don't know how to display tree leaf node");
10411     }
10412
10413     gtDispRegVal(tree);
10414 }
10415
10416 //------------------------------------------------------------------------
10417 // gtDispLeaf: Print a child node to jitstdout.
10418 //
10419 // Arguments:
10420 //    tree - the tree to be printed
10421 //    indentStack - the specification for the current level of indentation & arcs
10422 //    arcType     - the type of arc to use for this child
10423 //    msg         - a contextual method (i.e. from the parent) to print
10424 //    topOnly     - a boolean indicating whether to print the children, or just the top node
10425 //
10426 // Return Value:
10427 //    None.
10428 //
10429 // Notes:
10430 //    'indentStack' may be null, in which case no indentation or arcs are printed
10431 //    'msg' has a default value of null
10432 //    'topOnly' is an optional argument that defaults to false
10433
10434 void Compiler::gtDispChild(GenTreePtr           child,
10435                            IndentStack*         indentStack,
10436                            IndentInfo           arcType,
10437                            __in_opt const char* msg,     /* = nullptr  */
10438                            bool                 topOnly) /* = false */
10439 {
10440     IndentInfo info;
10441     indentStack->Push(arcType);
10442     gtDispTree(child, indentStack, msg, topOnly);
10443     indentStack->Pop();
10444 }
10445
10446 #ifdef FEATURE_SIMD
10447 // Intrinsic Id to name map
10448 extern const char* const simdIntrinsicNames[] = {
10449 #define SIMD_INTRINSIC(mname, inst, id, name, r, ac, arg1, arg2, arg3, t1, t2, t3, t4, t5, t6, t7, t8, t9, t10) name,
10450 #include "simdintrinsiclist.h"
10451 };
10452 #endif // FEATURE_SIMD
10453
10454 /*****************************************************************************/
10455
10456 void Compiler::gtDispTree(GenTreePtr   tree,
10457                           IndentStack* indentStack,                 /* = nullptr */
10458                           __in __in_z __in_opt const char* msg,     /* = nullptr  */
10459                           bool                             topOnly, /* = false */
10460                           bool                             isLIR)   /* = false */
10461 {
10462     if (tree == nullptr)
10463     {
10464         printf(" [%08X] <NULL>\n", tree);
10465         printf(""); // null string means flush
10466         return;
10467     }
10468
10469     if (indentStack == nullptr)
10470     {
10471         indentStack = new (this, CMK_DebugOnly) IndentStack(this);
10472     }
10473
10474     if (IsUninitialized(tree))
10475     {
10476         /* Value used to initalize nodes */
10477         printf("Uninitialized tree node!");
10478         return;
10479     }
10480
10481     if (tree->gtOper >= GT_COUNT)
10482     {
10483         gtDispNode(tree, indentStack, msg, isLIR);
10484         printf("Bogus operator!");
10485         return;
10486     }
10487
10488     /* Is tree a leaf node? */
10489
10490     if (tree->OperIsLeaf() || tree->OperIsLocalStore()) // local stores used to be leaves
10491     {
10492         gtDispNode(tree, indentStack, msg, isLIR);
10493         gtDispLeaf(tree, indentStack);
10494         gtDispVN(tree);
10495         printf("\n");
10496         if (tree->OperIsLocalStore() && !topOnly)
10497         {
10498             gtDispChild(tree->gtOp.gtOp1, indentStack, IINone);
10499         }
10500         return;
10501     }
10502
10503     // Determine what kind of arc to propagate.
10504     IndentInfo myArc    = IINone;
10505     IndentInfo lowerArc = IINone;
10506     if (indentStack->Depth() > 0)
10507     {
10508         myArc = indentStack->Pop();
10509         switch (myArc)
10510         {
10511             case IIArcBottom:
10512                 indentStack->Push(IIArc);
10513                 lowerArc = IINone;
10514                 break;
10515             case IIArc:
10516                 indentStack->Push(IIArc);
10517                 lowerArc = IIArc;
10518                 break;
10519             case IIArcTop:
10520                 indentStack->Push(IINone);
10521                 lowerArc = IIArc;
10522                 break;
10523             case IIEmbedded:
10524                 indentStack->Push(IIEmbedded);
10525                 lowerArc = IIEmbedded;
10526                 break;
10527             default:
10528                 // Should never get here; just use IINone.
10529                 break;
10530         }
10531     }
10532
10533     // Special case formatting for PHI nodes -- arg lists like calls.
10534
10535     if (tree->OperGet() == GT_PHI)
10536     {
10537         gtDispNode(tree, indentStack, msg, isLIR);
10538         gtDispVN(tree);
10539         printf("\n");
10540
10541         if (!topOnly)
10542         {
10543             if (tree->gtOp.gtOp1 != nullptr)
10544             {
10545                 IndentInfo arcType = IIArcTop;
10546                 for (GenTreeArgList* args = tree->gtOp.gtOp1->AsArgList(); args != nullptr; args = args->Rest())
10547                 {
10548                     if (args->Rest() == nullptr)
10549                     {
10550                         arcType = IIArcBottom;
10551                     }
10552                     gtDispChild(args->Current(), indentStack, arcType);
10553                     arcType = IIArc;
10554                 }
10555             }
10556         }
10557         return;
10558     }
10559
10560     /* Is it a 'simple' unary/binary operator? */
10561
10562     const char* childMsg = nullptr;
10563
10564     if (tree->OperIsSimple())
10565     {
10566         if (!topOnly)
10567         {
10568             if (tree->gtGetOp2())
10569             {
10570                 // Label the childMsgs of the GT_COLON operator
10571                 // op2 is the then part
10572
10573                 if (tree->gtOper == GT_COLON)
10574                 {
10575                     childMsg = "then";
10576                 }
10577                 gtDispChild(tree->gtOp.gtOp2, indentStack, IIArcTop, childMsg, topOnly);
10578             }
10579         }
10580
10581         // Now, get the right type of arc for this node
10582         if (myArc != IINone)
10583         {
10584             indentStack->Pop();
10585             indentStack->Push(myArc);
10586         }
10587
10588         gtDispNode(tree, indentStack, msg, isLIR);
10589
10590         // Propagate lowerArc to the lower children.
10591         if (indentStack->Depth() > 0)
10592         {
10593             (void)indentStack->Pop();
10594             indentStack->Push(lowerArc);
10595         }
10596
10597         if (tree->gtOper == GT_CAST)
10598         {
10599             /* Format a message that explains the effect of this GT_CAST */
10600
10601             var_types fromType  = genActualType(tree->gtCast.CastOp()->TypeGet());
10602             var_types toType    = tree->CastToType();
10603             var_types finalType = tree->TypeGet();
10604
10605             /* if GTF_UNSIGNED is set then force fromType to an unsigned type */
10606             if (tree->gtFlags & GTF_UNSIGNED)
10607             {
10608                 fromType = genUnsignedType(fromType);
10609             }
10610
10611             if (finalType != toType)
10612             {
10613                 printf(" %s <-", varTypeName(finalType));
10614             }
10615
10616             printf(" %s <- %s", varTypeName(toType), varTypeName(fromType));
10617         }
10618
10619         if (tree->gtOper == GT_OBJ && (tree->gtFlags & GTF_VAR_DEATH))
10620         {
10621             printf(" (last use)");
10622         }
10623
10624         IndirectAssignmentAnnotation* pIndirAnnote;
10625         if (tree->gtOper == GT_ASG && GetIndirAssignMap()->Lookup(tree, &pIndirAnnote))
10626         {
10627             printf("  indir assign of V%02d:", pIndirAnnote->m_lclNum);
10628             if (pIndirAnnote->m_isEntire)
10629             {
10630                 printf("d:%d", pIndirAnnote->m_defSsaNum);
10631             }
10632             else
10633             {
10634                 printf("ud:%d->%d", pIndirAnnote->m_useSsaNum, pIndirAnnote->m_defSsaNum);
10635             }
10636         }
10637
10638         if (tree->gtOper == GT_INTRINSIC)
10639         {
10640             switch (tree->gtIntrinsic.gtIntrinsicId)
10641             {
10642                 case CORINFO_INTRINSIC_Sin:
10643                     printf(" sin");
10644                     break;
10645                 case CORINFO_INTRINSIC_Cos:
10646                     printf(" cos");
10647                     break;
10648                 case CORINFO_INTRINSIC_Sqrt:
10649                     printf(" sqrt");
10650                     break;
10651                 case CORINFO_INTRINSIC_Abs:
10652                     printf(" abs");
10653                     break;
10654                 case CORINFO_INTRINSIC_Round:
10655                     printf(" round");
10656                     break;
10657                 case CORINFO_INTRINSIC_Cosh:
10658                     printf(" cosh");
10659                     break;
10660                 case CORINFO_INTRINSIC_Sinh:
10661                     printf(" sinh");
10662                     break;
10663                 case CORINFO_INTRINSIC_Tan:
10664                     printf(" tan");
10665                     break;
10666                 case CORINFO_INTRINSIC_Tanh:
10667                     printf(" tanh");
10668                     break;
10669                 case CORINFO_INTRINSIC_Asin:
10670                     printf(" asin");
10671                     break;
10672                 case CORINFO_INTRINSIC_Acos:
10673                     printf(" acos");
10674                     break;
10675                 case CORINFO_INTRINSIC_Atan:
10676                     printf(" atan");
10677                     break;
10678                 case CORINFO_INTRINSIC_Atan2:
10679                     printf(" atan2");
10680                     break;
10681                 case CORINFO_INTRINSIC_Log10:
10682                     printf(" log10");
10683                     break;
10684                 case CORINFO_INTRINSIC_Pow:
10685                     printf(" pow");
10686                     break;
10687                 case CORINFO_INTRINSIC_Exp:
10688                     printf(" exp");
10689                     break;
10690                 case CORINFO_INTRINSIC_Ceiling:
10691                     printf(" ceiling");
10692                     break;
10693                 case CORINFO_INTRINSIC_Floor:
10694                     printf(" floor");
10695                     break;
10696                 case CORINFO_INTRINSIC_Object_GetType:
10697                     printf(" objGetType");
10698                     break;
10699
10700                 default:
10701                     unreached();
10702             }
10703         }
10704
10705 #ifdef FEATURE_SIMD
10706         if (tree->gtOper == GT_SIMD)
10707         {
10708             printf(" %s %s", varTypeName(tree->gtSIMD.gtSIMDBaseType),
10709                    simdIntrinsicNames[tree->gtSIMD.gtSIMDIntrinsicID]);
10710         }
10711 #endif // FEATURE_SIMD
10712
10713         gtDispRegVal(tree);
10714         gtDispVN(tree);
10715         printf("\n");
10716
10717         if (!topOnly && tree->gtOp.gtOp1)
10718         {
10719
10720             // Label the child of the GT_COLON operator
10721             // op1 is the else part
10722
10723             if (tree->gtOper == GT_COLON)
10724             {
10725                 childMsg = "else";
10726             }
10727             else if (tree->gtOper == GT_QMARK)
10728             {
10729                 childMsg = "   if";
10730             }
10731             gtDispChild(tree->gtOp.gtOp1, indentStack, IIArcBottom, childMsg, topOnly);
10732         }
10733
10734         return;
10735     }
10736
10737     // Now, get the right type of arc for this node
10738     if (myArc != IINone)
10739     {
10740         indentStack->Pop();
10741         indentStack->Push(myArc);
10742     }
10743     gtDispNode(tree, indentStack, msg, isLIR);
10744
10745     // Propagate lowerArc to the lower children.
10746     if (indentStack->Depth() > 0)
10747     {
10748         (void)indentStack->Pop();
10749         indentStack->Push(lowerArc);
10750     }
10751
10752     // See what kind of a special operator we have here, and handle its special children.
10753
10754     switch (tree->gtOper)
10755     {
10756         case GT_FIELD:
10757             printf(" %s", eeGetFieldName(tree->gtField.gtFldHnd), 0);
10758
10759             if (tree->gtField.gtFldObj && !topOnly)
10760             {
10761                 gtDispVN(tree);
10762                 printf("\n");
10763                 gtDispChild(tree->gtField.gtFldObj, indentStack, IIArcBottom);
10764             }
10765             else
10766             {
10767                 gtDispRegVal(tree);
10768                 gtDispVN(tree);
10769                 printf("\n");
10770             }
10771             break;
10772
10773         case GT_CALL:
10774         {
10775             assert(tree->gtFlags & GTF_CALL);
10776             unsigned numChildren = tree->NumChildren();
10777             GenTree* lastChild   = nullptr;
10778             if (numChildren != 0)
10779             {
10780                 lastChild = tree->GetChild(numChildren - 1);
10781             }
10782
10783             if (tree->gtCall.gtCallType != CT_INDIRECT)
10784             {
10785                 const char* methodName;
10786                 const char* className;
10787
10788                 methodName = eeGetMethodName(tree->gtCall.gtCallMethHnd, &className);
10789
10790                 printf(" %s.%s", className, methodName);
10791             }
10792
10793             if ((tree->gtFlags & GTF_CALL_UNMANAGED) && (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_FRAME_VAR_DEATH))
10794             {
10795                 printf(" (FramesRoot last use)");
10796             }
10797
10798             if (((tree->gtFlags & GTF_CALL_INLINE_CANDIDATE) != 0) && (tree->gtCall.gtInlineCandidateInfo != nullptr) &&
10799                 (tree->gtCall.gtInlineCandidateInfo->exactContextHnd != nullptr))
10800             {
10801                 printf(" (exactContextHnd=0x%p)", dspPtr(tree->gtCall.gtInlineCandidateInfo->exactContextHnd));
10802             }
10803
10804             gtDispVN(tree);
10805             if (tree->IsMultiRegCall())
10806             {
10807                 gtDispRegVal(tree);
10808             }
10809             printf("\n");
10810
10811             if (!topOnly)
10812             {
10813                 char  buf[64];
10814                 char* bufp;
10815
10816                 bufp = &buf[0];
10817
10818                 if ((tree->gtCall.gtCallObjp != nullptr) && (tree->gtCall.gtCallObjp->gtOper != GT_NOP) &&
10819                     (!tree->gtCall.gtCallObjp->IsArgPlaceHolderNode()))
10820                 {
10821                     if (tree->gtCall.gtCallObjp->gtOper == GT_ASG)
10822                     {
10823                         sprintf_s(bufp, sizeof(buf), "this SETUP%c", 0);
10824                     }
10825                     else
10826                     {
10827                         sprintf_s(bufp, sizeof(buf), "this in %s%c", compRegVarName(REG_ARG_0), 0);
10828                     }
10829                     gtDispChild(tree->gtCall.gtCallObjp, indentStack,
10830                                 (tree->gtCall.gtCallObjp == lastChild) ? IIArcBottom : IIArc, bufp, topOnly);
10831                 }
10832
10833                 if (tree->gtCall.gtCallArgs)
10834                 {
10835                     gtDispArgList(tree, indentStack);
10836                 }
10837
10838                 if (tree->gtCall.gtCallType == CT_INDIRECT)
10839                 {
10840                     gtDispChild(tree->gtCall.gtCallAddr, indentStack,
10841                                 (tree->gtCall.gtCallAddr == lastChild) ? IIArcBottom : IIArc, "calli tgt", topOnly);
10842                 }
10843
10844                 if (tree->gtCall.gtControlExpr != nullptr)
10845                 {
10846                     gtDispChild(tree->gtCall.gtControlExpr, indentStack,
10847                                 (tree->gtCall.gtControlExpr == lastChild) ? IIArcBottom : IIArc, "control expr",
10848                                 topOnly);
10849                 }
10850
10851 #if !FEATURE_FIXED_OUT_ARGS
10852                 regList list = tree->gtCall.regArgList;
10853 #endif
10854                 /* process the late argument list */
10855                 int lateArgIndex = 0;
10856                 for (GenTreeArgList* lateArgs = tree->gtCall.gtCallLateArgs; lateArgs;
10857                      (lateArgIndex++, lateArgs = lateArgs->Rest()))
10858                 {
10859                     GenTreePtr argx;
10860
10861                     argx = lateArgs->Current();
10862
10863                     IndentInfo arcType = (lateArgs->Rest() == nullptr) ? IIArcBottom : IIArc;
10864                     gtGetLateArgMsg(tree, argx, lateArgIndex, -1, bufp, sizeof(buf));
10865                     gtDispChild(argx, indentStack, arcType, bufp, topOnly);
10866                 }
10867             }
10868         }
10869         break;
10870
10871         case GT_STMT:
10872             printf("\n");
10873
10874             if (!topOnly)
10875             {
10876                 gtDispChild(tree->gtStmt.gtStmtExpr, indentStack, IIArcBottom);
10877             }
10878             break;
10879
10880         case GT_ARR_ELEM:
10881             gtDispVN(tree);
10882             printf("\n");
10883
10884             if (!topOnly)
10885             {
10886                 gtDispChild(tree->gtArrElem.gtArrObj, indentStack, IIArc, nullptr, topOnly);
10887
10888                 unsigned dim;
10889                 for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
10890                 {
10891                     IndentInfo arcType = ((dim + 1) == tree->gtArrElem.gtArrRank) ? IIArcBottom : IIArc;
10892                     gtDispChild(tree->gtArrElem.gtArrInds[dim], indentStack, arcType, nullptr, topOnly);
10893                 }
10894             }
10895             break;
10896
10897         case GT_ARR_OFFSET:
10898             gtDispVN(tree);
10899             printf("\n");
10900             if (!topOnly)
10901             {
10902                 gtDispChild(tree->gtArrOffs.gtOffset, indentStack, IIArc, nullptr, topOnly);
10903                 gtDispChild(tree->gtArrOffs.gtIndex, indentStack, IIArc, nullptr, topOnly);
10904                 gtDispChild(tree->gtArrOffs.gtArrObj, indentStack, IIArcBottom, nullptr, topOnly);
10905             }
10906             break;
10907
10908         case GT_CMPXCHG:
10909             gtDispVN(tree);
10910             printf("\n");
10911             if (!topOnly)
10912             {
10913                 gtDispChild(tree->gtCmpXchg.gtOpLocation, indentStack, IIArc, nullptr, topOnly);
10914                 gtDispChild(tree->gtCmpXchg.gtOpValue, indentStack, IIArc, nullptr, topOnly);
10915                 gtDispChild(tree->gtCmpXchg.gtOpComparand, indentStack, IIArcBottom, nullptr, topOnly);
10916             }
10917             break;
10918
10919         case GT_ARR_BOUNDS_CHECK:
10920 #ifdef FEATURE_SIMD
10921         case GT_SIMD_CHK:
10922 #endif // FEATURE_SIMD
10923             gtDispVN(tree);
10924             printf("\n");
10925             if (!topOnly)
10926             {
10927                 gtDispChild(tree->gtBoundsChk.gtArrLen, indentStack, IIArc, nullptr, topOnly);
10928                 gtDispChild(tree->gtBoundsChk.gtIndex, indentStack, IIArcBottom, nullptr, topOnly);
10929             }
10930             break;
10931
10932         default:
10933             printf("<DON'T KNOW HOW TO DISPLAY THIS NODE> :");
10934             printf(""); // null string means flush
10935             break;
10936     }
10937 }
10938
10939 //------------------------------------------------------------------------
10940 // gtGetArgMsg: Construct a message about the given argument
10941 //
10942 // Arguments:
10943 //    call      - The call for which 'arg' is an argument
10944 //    arg       - The argument for which a message should be constructed
10945 //    argNum    - The ordinal number of the arg in the argument list
10946 //    listCount - When printing in LIR form this is the count for a multireg GT_LIST
10947 //                or -1 if we are not printing in LIR form
10948 //    bufp      - A pointer to the buffer into which the message is written
10949 //    bufLength - The length of the buffer pointed to by bufp
10950 //
10951 // Return Value:
10952 //    No return value, but bufp is written.
10953 //
10954 // Assumptions:
10955 //    'call' must be a call node
10956 //    'arg' must be an argument to 'call' (else gtArgEntryByNode will assert)
10957
10958 void Compiler::gtGetArgMsg(
10959     GenTreePtr call, GenTreePtr arg, unsigned argNum, int listCount, char* bufp, unsigned bufLength)
10960 {
10961     if (call->gtCall.gtCallLateArgs != nullptr)
10962     {
10963         fgArgTabEntryPtr curArgTabEntry = gtArgEntryByArgNum(call, argNum);
10964         assert(curArgTabEntry);
10965
10966         if (arg->gtFlags & GTF_LATE_ARG)
10967         {
10968             sprintf_s(bufp, bufLength, "arg%d SETUP%c", argNum, 0);
10969         }
10970         else
10971         {
10972 #if FEATURE_FIXED_OUT_ARGS
10973             if (listCount == -1)
10974             {
10975                 sprintf_s(bufp, bufLength, "arg%d out+%02x%c", argNum, curArgTabEntry->slotNum * TARGET_POINTER_SIZE,
10976                           0);
10977             }
10978             else // listCount is 0,1,2 or 3
10979             {
10980                 assert(listCount <= MAX_ARG_REG_COUNT);
10981                 sprintf_s(bufp, bufLength, "arg%d out+%02x%c", argNum,
10982                           (curArgTabEntry->slotNum + listCount) * TARGET_POINTER_SIZE, 0);
10983             }
10984 #else
10985             sprintf_s(bufp, bufLength, "arg%d on STK%c", argNum, 0);
10986 #endif
10987         }
10988     }
10989     else
10990     {
10991         sprintf_s(bufp, bufLength, "arg%d%c", argNum, 0);
10992     }
10993 }
10994
10995 //------------------------------------------------------------------------
10996 // gtGetLateArgMsg: Construct a message about the given argument
10997 //
10998 // Arguments:
10999 //    call         - The call for which 'arg' is an argument
11000 //    argx         - The argument for which a message should be constructed
11001 //    lateArgIndex - The ordinal number of the arg in the lastArg  list
11002 //    listCount    - When printing in LIR form this is the count for a multireg GT_LIST
11003 //                   or -1 if we are not printing in LIR form
11004 //    bufp         - A pointer to the buffer into which the message is written
11005 //    bufLength    - The length of the buffer pointed to by bufp
11006 //
11007 // Return Value:
11008 //    No return value, but bufp is written.
11009 //
11010 // Assumptions:
11011 //    'call' must be a call node
11012 //    'arg' must be an argument to 'call' (else gtArgEntryByNode will assert)
11013
11014 void Compiler::gtGetLateArgMsg(
11015     GenTreePtr call, GenTreePtr argx, int lateArgIndex, int listCount, char* bufp, unsigned bufLength)
11016 {
11017     assert(!argx->IsArgPlaceHolderNode()); // No place holders nodes are in gtCallLateArgs;
11018
11019     fgArgTabEntryPtr curArgTabEntry = gtArgEntryByLateArgIndex(call, lateArgIndex);
11020     assert(curArgTabEntry);
11021     regNumber argReg = curArgTabEntry->regNum;
11022
11023 #if !FEATURE_FIXED_OUT_ARGS
11024     assert(lateArgIndex < call->gtCall.regArgListCount);
11025     assert(argReg == call->gtCall.regArgList[lateArgIndex]);
11026 #else
11027     if (argReg == REG_STK)
11028     {
11029         sprintf_s(bufp, bufLength, "arg%d in out+%02x%c", curArgTabEntry->argNum,
11030                   curArgTabEntry->slotNum * TARGET_POINTER_SIZE, 0);
11031     }
11032     else
11033 #endif
11034     {
11035         if (gtArgIsThisPtr(curArgTabEntry))
11036         {
11037             sprintf_s(bufp, bufLength, "this in %s%c", compRegVarName(argReg), 0);
11038         }
11039         else
11040         {
11041 #if FEATURE_MULTIREG_ARGS
11042             if (curArgTabEntry->numRegs >= 2)
11043             {
11044                 regNumber otherRegNum;
11045 #if defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
11046                 assert(curArgTabEntry->numRegs == 2);
11047                 otherRegNum = curArgTabEntry->otherRegNum;
11048 #else
11049                 otherRegNum = (regNumber)(((unsigned)curArgTabEntry->regNum) + curArgTabEntry->numRegs - 1);
11050 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
11051
11052                 if (listCount == -1)
11053                 {
11054                     char seperator = (curArgTabEntry->numRegs == 2) ? ',' : '-';
11055
11056                     sprintf_s(bufp, bufLength, "arg%d %s%c%s%c", curArgTabEntry->argNum, compRegVarName(argReg),
11057                               seperator, compRegVarName(otherRegNum), 0);
11058                 }
11059                 else // listCount is 0,1,2 or 3
11060                 {
11061                     assert(listCount <= MAX_ARG_REG_COUNT);
11062                     regNumber curReg = (listCount == 1) ? otherRegNum : (regNumber)((unsigned)(argReg) + listCount);
11063                     sprintf_s(bufp, bufLength, "arg%d m%d %s%c", curArgTabEntry->argNum, listCount,
11064                               compRegVarName(curReg), 0);
11065                 }
11066             }
11067             else
11068 #endif
11069             {
11070                 sprintf_s(bufp, bufLength, "arg%d in %s%c", curArgTabEntry->argNum, compRegVarName(argReg), 0);
11071             }
11072         }
11073     }
11074 }
11075
11076 //------------------------------------------------------------------------
11077 // gtDispArgList: Dump the tree for a call arg list
11078 //
11079 // Arguments:
11080 //    tree         - The call for which 'arg' is an argument
11081 //    indentStack  - the specification for the current level of indentation & arcs
11082 //
11083 // Return Value:
11084 //    None.
11085 //
11086 // Assumptions:
11087 //    'tree' must be a call node
11088
11089 void Compiler::gtDispArgList(GenTreePtr tree, IndentStack* indentStack)
11090 {
11091     GenTree*  args      = tree->gtCall.gtCallArgs;
11092     unsigned  argnum    = 0;
11093     const int BufLength = 256;
11094     char      buf[BufLength];
11095     char*     bufp        = &buf[0];
11096     unsigned  numChildren = tree->NumChildren();
11097     assert(numChildren != 0);
11098     bool argListIsLastChild = (args == tree->GetChild(numChildren - 1));
11099
11100     IndentInfo arcType = IIArc;
11101     if (tree->gtCall.gtCallObjp != nullptr)
11102     {
11103         argnum++;
11104     }
11105
11106     while (args != nullptr)
11107     {
11108         assert(args->gtOper == GT_LIST);
11109         GenTree* arg = args->gtOp.gtOp1;
11110         if (!arg->IsNothingNode() && !arg->IsArgPlaceHolderNode())
11111         {
11112             gtGetArgMsg(tree, arg, argnum, -1, bufp, BufLength);
11113             if (argListIsLastChild && (args->gtOp.gtOp2 == nullptr))
11114             {
11115                 arcType = IIArcBottom;
11116             }
11117             gtDispChild(arg, indentStack, arcType, bufp, false);
11118         }
11119         args = args->gtOp.gtOp2;
11120         argnum++;
11121     }
11122 }
11123
11124 //------------------------------------------------------------------------
11125 // gtDispArgList: Dump the tree for a call arg list
11126 //
11127 // Arguments:
11128 //    tree         - The call for which 'arg' is an argument
11129 //    indentStack  - the specification for the current level of indentation & arcs
11130 //
11131 // Return Value:
11132 //    None.
11133 //
11134 // Assumptions:
11135 //    'tree' must be a GT_LIST node
11136
11137 void Compiler::gtDispTreeList(GenTreePtr tree, IndentStack* indentStack /* = nullptr */)
11138 {
11139     for (/*--*/; tree != nullptr; tree = tree->gtNext)
11140     {
11141         gtDispTree(tree, indentStack);
11142         printf("\n");
11143     }
11144 }
11145
11146 //------------------------------------------------------------------------
11147 // Compiler::gtDispRange: dumps a range of LIR.
11148 //
11149 // Arguments:
11150 //    range - the range of LIR to display.
11151 //
11152 void Compiler::gtDispRange(LIR::ReadOnlyRange const& range)
11153 {
11154     for (GenTree* node : range)
11155     {
11156         gtDispLIRNode(node);
11157     }
11158 }
11159
11160 //------------------------------------------------------------------------
11161 // Compiler::gtDispTreeRange: dumps the LIR range that contains all of the
11162 //                            nodes in the dataflow tree rooted at a given
11163 //                            node.
11164 //
11165 // Arguments:
11166 //    containingRange - the LIR range that contains the root node.
11167 //    tree - the root of the dataflow tree.
11168 //
11169 void Compiler::gtDispTreeRange(LIR::Range& containingRange, GenTree* tree)
11170 {
11171     bool unused;
11172     gtDispRange(containingRange.GetTreeRange(tree, &unused));
11173 }
11174
11175 //------------------------------------------------------------------------
11176 // Compiler::gtDispLIRNode: dumps a single LIR node.
11177 //
11178 // Arguments:
11179 //    node - the LIR node to dump.
11180 //
11181 void Compiler::gtDispLIRNode(GenTree* node)
11182 {
11183     auto displayOperand = [](GenTree* operand, const char* message, IndentInfo operandArc, IndentStack& indentStack)
11184     {
11185         assert(operand != nullptr);
11186         assert(message != nullptr);
11187
11188         // 49 spaces for alignment
11189         printf("%-49s", "");
11190
11191         indentStack.Push(operandArc);
11192         indentStack.print();
11193         indentStack.Pop();
11194         operandArc = IIArc;
11195
11196         printf("  t%-5d %-6s %s\n", operand->gtTreeID, varTypeName(operand->TypeGet()), message);
11197
11198     };
11199
11200     IndentStack indentStack(this);
11201
11202     const int bufLength = 256;
11203     char      buf[bufLength];
11204
11205     const bool nodeIsCall = node->IsCall();
11206
11207     int numCallEarlyArgs = 0;
11208     if (nodeIsCall)
11209     {
11210         GenTreeCall* call = node->AsCall();
11211         for (GenTreeArgList* args = call->gtCallArgs; args != nullptr; args = args->Rest())
11212         {
11213             if (!args->Current()->IsArgPlaceHolderNode() && args->Current()->IsValue())
11214             {
11215                 numCallEarlyArgs++;
11216             }
11217         }
11218     }
11219
11220     // Visit operands
11221     IndentInfo operandArc         = IIArcTop;
11222     int        callArgNumber      = 0;
11223     for (GenTree* operand : node->Operands())
11224     {
11225         if (operand->IsArgPlaceHolderNode() || !operand->IsValue())
11226         {
11227             // Either of these situations may happen with calls.
11228             continue;
11229         }
11230
11231         if (nodeIsCall)
11232         {
11233             GenTreeCall* call = node->AsCall();
11234             if (operand == call->gtCallObjp)
11235             {
11236                 sprintf_s(buf, sizeof(buf), "this in %s", compRegVarName(REG_ARG_0));
11237                 displayOperand(operand, buf, operandArc, indentStack);
11238             }
11239             else if (operand == call->gtCallAddr)
11240             {
11241                 displayOperand(operand, "calli tgt", operandArc, indentStack);
11242             }
11243             else if (operand == call->gtControlExpr)
11244             {
11245                 displayOperand(operand, "control expr", operandArc, indentStack);
11246             }
11247             else if (operand == call->gtCallCookie)
11248             {
11249                 displayOperand(operand, "cookie", operandArc, indentStack);
11250             }
11251             else
11252             {
11253                 int callLateArgNumber = callArgNumber - numCallEarlyArgs;
11254                 if (operand->OperGet() == GT_LIST)
11255                 {
11256                     int listIndex = 0;
11257                     for (GenTreeArgList* element = operand->AsArgList(); element != nullptr; element = element->Rest())
11258                     {
11259                         operand = element->Current();
11260                         if (callLateArgNumber < 0)
11261                         {
11262                             gtGetArgMsg(call, operand, callArgNumber, listIndex, buf, sizeof(buf));
11263                         }
11264                         else
11265                         {
11266                             gtGetLateArgMsg(call, operand, callLateArgNumber, listIndex, buf, sizeof(buf));
11267                         }
11268
11269                         displayOperand(operand, buf, operandArc, indentStack);
11270                         operandArc = IIArc;
11271                     }
11272                 }
11273                 else
11274                 {
11275                     if (callLateArgNumber < 0)
11276                     {
11277                         gtGetArgMsg(call, operand, callArgNumber, -1, buf, sizeof(buf));
11278                     }
11279                     else
11280                     {
11281                         gtGetLateArgMsg(call, operand, callLateArgNumber, -1, buf, sizeof(buf));
11282                     }
11283
11284                     displayOperand(operand, buf, operandArc, indentStack);
11285                 }
11286
11287                 callArgNumber++;
11288             }
11289         }
11290         else
11291         {
11292             displayOperand(operand, "", operandArc, indentStack);
11293         }
11294
11295         operandArc = IIArc;
11296     }
11297
11298     // Visit the operator
11299     const bool topOnly = true;
11300     const bool isLIR   = true;
11301     gtDispTree(node, &indentStack, nullptr, topOnly, isLIR);
11302
11303     printf("\n");
11304 }
11305
11306 /*****************************************************************************/
11307 #endif // DEBUG
11308
11309 /*****************************************************************************
11310  *
11311  *  Check if the given node can be folded,
11312  *  and call the methods to perform the folding
11313  */
11314
11315 GenTreePtr Compiler::gtFoldExpr(GenTreePtr tree)
11316 {
11317     unsigned kind = tree->OperKind();
11318
11319     /* We must have a simple operation to fold */
11320
11321     // If we're in CSE, it's not safe to perform tree
11322     // folding given that it can will potentially
11323     // change considered CSE candidates.
11324     if (optValnumCSE_phase)
11325     {
11326         return tree;
11327     }
11328
11329     if (!(kind & GTK_SMPOP))
11330     {
11331         return tree;
11332     }
11333
11334     GenTreePtr op1 = tree->gtOp.gtOp1;
11335
11336     /* Filter out non-foldable trees that can have constant children */
11337
11338     assert(kind & (GTK_UNOP | GTK_BINOP));
11339     switch (tree->gtOper)
11340     {
11341         case GT_RETFILT:
11342         case GT_RETURN:
11343         case GT_IND:
11344             return tree;
11345         default:
11346             break;
11347     }
11348
11349     /* try to fold the current node */
11350
11351     if ((kind & GTK_UNOP) && op1)
11352     {
11353         if (op1->OperKind() & GTK_CONST)
11354         {
11355             return gtFoldExprConst(tree);
11356         }
11357     }
11358     else if ((kind & GTK_BINOP) && op1 && tree->gtOp.gtOp2 &&
11359              // Don't take out conditionals for debugging
11360              !((opts.compDbgCode || opts.MinOpts()) && tree->OperIsCompare()))
11361     {
11362         GenTreePtr op2 = tree->gtOp.gtOp2;
11363
11364         // The atomic operations are exempted here because they are never computable statically;
11365         // one of their arguments is an address.
11366         if (((op1->OperKind() & op2->OperKind()) & GTK_CONST) && !tree->OperIsAtomicOp())
11367         {
11368             /* both nodes are constants - fold the expression */
11369             return gtFoldExprConst(tree);
11370         }
11371         else if ((op1->OperKind() | op2->OperKind()) & GTK_CONST)
11372         {
11373             /* at least one is a constant - see if we have a
11374              * special operator that can use only one constant
11375              * to fold - e.g. booleans */
11376
11377             return gtFoldExprSpecial(tree);
11378         }
11379         else if (tree->OperIsCompare())
11380         {
11381             /* comparisons of two local variables can sometimes be folded */
11382
11383             return gtFoldExprCompare(tree);
11384         }
11385         else if (op2->OperGet() == GT_COLON)
11386         {
11387             assert(tree->OperGet() == GT_QMARK);
11388
11389             GenTreePtr colon_op1 = op2->gtOp.gtOp1;
11390             GenTreePtr colon_op2 = op2->gtOp.gtOp2;
11391
11392             if (gtCompareTree(colon_op1, colon_op2))
11393             {
11394                 // Both sides of the GT_COLON are the same tree
11395
11396                 GenTreePtr sideEffList = nullptr;
11397                 gtExtractSideEffList(op1, &sideEffList);
11398
11399                 fgUpdateRefCntForExtract(op1, sideEffList);   // Decrement refcounts for op1, Keeping any side-effects
11400                 fgUpdateRefCntForExtract(colon_op1, nullptr); // Decrement refcounts for colon_op1
11401
11402                 // Clear colon flags only if the qmark itself is not conditionaly executed
11403                 if ((tree->gtFlags & GTF_COLON_COND) == 0)
11404                 {
11405                     fgWalkTreePre(&colon_op2, gtClearColonCond);
11406                 }
11407
11408                 if (sideEffList == nullptr)
11409                 {
11410                     // No side-effects, just return colon_op2
11411                     return colon_op2;
11412                 }
11413                 else
11414                 {
11415 #ifdef DEBUG
11416                     if (verbose)
11417                     {
11418                         printf("\nIdentical GT_COLON trees with side effects! Extracting side effects...\n");
11419                         gtDispTree(sideEffList);
11420                         printf("\n");
11421                     }
11422 #endif
11423                     // Change the GT_COLON into a GT_COMMA node with the side-effects
11424                     op2->ChangeOper(GT_COMMA);
11425                     op2->gtFlags |= (sideEffList->gtFlags & GTF_ALL_EFFECT);
11426                     op2->gtOp.gtOp1 = sideEffList;
11427                     return op2;
11428                 }
11429             }
11430         }
11431     }
11432
11433     /* Return the original node (folded/bashed or not) */
11434
11435     return tree;
11436 }
11437
11438 /*****************************************************************************
11439  *
11440  *  Some comparisons can be folded:
11441  *
11442  *    locA        == locA
11443  *    classVarA   == classVarA
11444  *    locA + locB == locB + locA
11445  *
11446  */
11447
11448 GenTreePtr Compiler::gtFoldExprCompare(GenTreePtr tree)
11449 {
11450     GenTreePtr op1 = tree->gtOp.gtOp1;
11451     GenTreePtr op2 = tree->gtOp.gtOp2;
11452
11453     assert(tree->OperIsCompare());
11454
11455     /* Filter out cases that cannot be folded here */
11456
11457     /* Do not fold floats or doubles (e.g. NaN != Nan) */
11458
11459     if (varTypeIsFloating(op1->TypeGet()))
11460     {
11461         return tree;
11462     }
11463
11464     /* Currently we can only fold when the two subtrees exactly match */
11465
11466     if ((tree->gtFlags & GTF_SIDE_EFFECT) || GenTree::Compare(op1, op2, true) == false)
11467     {
11468         return tree; /* return unfolded tree */
11469     }
11470
11471     GenTreePtr cons;
11472
11473     switch (tree->gtOper)
11474     {
11475         case GT_EQ:
11476         case GT_LE:
11477         case GT_GE:
11478             cons = gtNewIconNode(true); /* Folds to GT_CNS_INT(true) */
11479             break;
11480
11481         case GT_NE:
11482         case GT_LT:
11483         case GT_GT:
11484             cons = gtNewIconNode(false); /* Folds to GT_CNS_INT(false) */
11485             break;
11486
11487         default:
11488             assert(!"Unexpected relOp");
11489             return tree;
11490     }
11491
11492     /* The node has beeen folded into 'cons' */
11493
11494     if (fgGlobalMorph)
11495     {
11496         if (!fgIsInlining())
11497         {
11498             fgMorphTreeDone(cons);
11499         }
11500     }
11501     else
11502     {
11503         cons->gtNext = tree->gtNext;
11504         cons->gtPrev = tree->gtPrev;
11505     }
11506     if (lvaLocalVarRefCounted)
11507     {
11508         lvaRecursiveDecRefCounts(tree);
11509     }
11510     return cons;
11511 }
11512
11513 /*****************************************************************************
11514  *
11515  *  Some binary operators can be folded even if they have only one
11516  *  operand constant - e.g. boolean operators, add with 0
11517  *  multiply with 1, etc
11518  */
11519
11520 GenTreePtr Compiler::gtFoldExprSpecial(GenTreePtr tree)
11521 {
11522     GenTreePtr op1  = tree->gtOp.gtOp1;
11523     GenTreePtr op2  = tree->gtOp.gtOp2;
11524     genTreeOps oper = tree->OperGet();
11525
11526     GenTreePtr op, cons;
11527     ssize_t    val;
11528
11529     assert(tree->OperKind() & GTK_BINOP);
11530
11531     /* Filter out operators that cannot be folded here */
11532     if (oper == GT_CAST)
11533     {
11534         return tree;
11535     }
11536
11537     /* We only consider TYP_INT for folding
11538      * Do not fold pointer arithmetic (e.g. addressing modes!) */
11539
11540     if (oper != GT_QMARK && !varTypeIsIntOrI(tree->gtType))
11541     {
11542         return tree;
11543     }
11544
11545     /* Find out which is the constant node */
11546
11547     if (op1->IsCnsIntOrI())
11548     {
11549         op   = op2;
11550         cons = op1;
11551     }
11552     else if (op2->IsCnsIntOrI())
11553     {
11554         op   = op1;
11555         cons = op2;
11556     }
11557     else
11558     {
11559         return tree;
11560     }
11561
11562     /* Get the constant value */
11563
11564     val = cons->gtIntConCommon.IconValue();
11565
11566     /* Here op is the non-constant operand, val is the constant,
11567        first is true if the constant is op1 */
11568
11569     switch (oper)
11570     {
11571
11572         case GT_EQ:
11573         case GT_NE:
11574             // Optimize boxed value classes; these are always false.  This IL is
11575             // generated when a generic value is tested against null:
11576             //     <T> ... foo(T x) { ... if ((object)x == null) ...
11577             if (val == 0 && op->IsBoxedValue())
11578             {
11579                 // Change the assignment node so we don't generate any code for it.
11580
11581                 GenTreePtr asgStmt = op->gtBox.gtAsgStmtWhenInlinedBoxValue;
11582                 assert(asgStmt->gtOper == GT_STMT);
11583                 GenTreePtr asg = asgStmt->gtStmt.gtStmtExpr;
11584                 assert(asg->gtOper == GT_ASG);
11585 #ifdef DEBUG
11586                 if (verbose)
11587                 {
11588                     printf("Bashing ");
11589                     printTreeID(asg);
11590                     printf(" to NOP as part of dead box operation\n");
11591                     gtDispTree(tree);
11592                 }
11593 #endif
11594                 asg->gtBashToNOP();
11595
11596                 op = gtNewIconNode(oper == GT_NE);
11597                 if (fgGlobalMorph)
11598                 {
11599                     if (!fgIsInlining())
11600                     {
11601                         fgMorphTreeDone(op);
11602                     }
11603                 }
11604                 else
11605                 {
11606                     op->gtNext = tree->gtNext;
11607                     op->gtPrev = tree->gtPrev;
11608                 }
11609                 fgSetStmtSeq(asgStmt);
11610                 return op;
11611             }
11612             break;
11613
11614         case GT_ADD:
11615         case GT_ASG_ADD:
11616             if (val == 0)
11617             {
11618                 goto DONE_FOLD;
11619             }
11620             break;
11621
11622         case GT_MUL:
11623         case GT_ASG_MUL:
11624             if (val == 1)
11625             {
11626                 goto DONE_FOLD;
11627             }
11628             else if (val == 0)
11629             {
11630                 /* Multiply by zero - return the 'zero' node, but not if side effects */
11631                 if (!(op->gtFlags & GTF_SIDE_EFFECT))
11632                 {
11633                     if (lvaLocalVarRefCounted)
11634                     {
11635                         lvaRecursiveDecRefCounts(op);
11636                     }
11637                     op = cons;
11638                     goto DONE_FOLD;
11639                 }
11640             }
11641             break;
11642
11643         case GT_DIV:
11644         case GT_UDIV:
11645         case GT_ASG_DIV:
11646             if ((op2 == cons) && (val == 1) && !(op1->OperKind() & GTK_CONST))
11647             {
11648                 goto DONE_FOLD;
11649             }
11650             break;
11651
11652         case GT_SUB:
11653         case GT_ASG_SUB:
11654             if ((op2 == cons) && (val == 0) && !(op1->OperKind() & GTK_CONST))
11655             {
11656                 goto DONE_FOLD;
11657             }
11658             break;
11659
11660         case GT_AND:
11661             if (val == 0)
11662             {
11663                 /* AND with zero - return the 'zero' node, but not if side effects */
11664
11665                 if (!(op->gtFlags & GTF_SIDE_EFFECT))
11666                 {
11667                     if (lvaLocalVarRefCounted)
11668                     {
11669                         lvaRecursiveDecRefCounts(op);
11670                     }
11671                     op = cons;
11672                     goto DONE_FOLD;
11673                 }
11674             }
11675             else
11676             {
11677                 /* The GTF_BOOLEAN flag is set for nodes that are part
11678                  * of a boolean expression, thus all their children
11679                  * are known to evaluate to only 0 or 1 */
11680
11681                 if (tree->gtFlags & GTF_BOOLEAN)
11682                 {
11683
11684                     /* The constant value must be 1
11685                      * AND with 1 stays the same */
11686                     assert(val == 1);
11687                     goto DONE_FOLD;
11688                 }
11689             }
11690             break;
11691
11692         case GT_OR:
11693             if (val == 0)
11694             {
11695                 goto DONE_FOLD;
11696             }
11697             else if (tree->gtFlags & GTF_BOOLEAN)
11698             {
11699                 /* The constant value must be 1 - OR with 1 is 1 */
11700
11701                 assert(val == 1);
11702
11703                 /* OR with one - return the 'one' node, but not if side effects */
11704
11705                 if (!(op->gtFlags & GTF_SIDE_EFFECT))
11706                 {
11707                     if (lvaLocalVarRefCounted)
11708                     {
11709                         lvaRecursiveDecRefCounts(op);
11710                     }
11711                     op = cons;
11712                     goto DONE_FOLD;
11713                 }
11714             }
11715             break;
11716
11717         case GT_LSH:
11718         case GT_RSH:
11719         case GT_RSZ:
11720         case GT_ROL:
11721         case GT_ROR:
11722         case GT_ASG_LSH:
11723         case GT_ASG_RSH:
11724         case GT_ASG_RSZ:
11725             if (val == 0)
11726             {
11727                 if (op2 == cons)
11728                 {
11729                     goto DONE_FOLD;
11730                 }
11731                 else if (!(op->gtFlags & GTF_SIDE_EFFECT))
11732                 {
11733                     if (lvaLocalVarRefCounted)
11734                     {
11735                         lvaRecursiveDecRefCounts(op);
11736                     }
11737                     op = cons;
11738                     goto DONE_FOLD;
11739                 }
11740             }
11741             break;
11742
11743         case GT_QMARK:
11744         {
11745             assert(op1 == cons && op2 == op && op2->gtOper == GT_COLON);
11746             assert(op2->gtOp.gtOp1 && op2->gtOp.gtOp2);
11747
11748             assert(val == 0 || val == 1);
11749
11750             GenTree* opToDelete;
11751             if (val)
11752             {
11753                 op         = op2->AsColon()->ThenNode();
11754                 opToDelete = op2->AsColon()->ElseNode();
11755             }
11756             else
11757             {
11758                 op         = op2->AsColon()->ElseNode();
11759                 opToDelete = op2->AsColon()->ThenNode();
11760             }
11761             if (lvaLocalVarRefCounted)
11762             {
11763                 lvaRecursiveDecRefCounts(opToDelete);
11764             }
11765
11766             // Clear colon flags only if the qmark itself is not conditionaly executed
11767             if ((tree->gtFlags & GTF_COLON_COND) == 0)
11768             {
11769                 fgWalkTreePre(&op, gtClearColonCond);
11770             }
11771         }
11772
11773             goto DONE_FOLD;
11774
11775         default:
11776             break;
11777     }
11778
11779     /* The node is not foldable */
11780
11781     return tree;
11782
11783 DONE_FOLD:
11784
11785     /* The node has beeen folded into 'op' */
11786
11787     // If there was an assigment update, we just morphed it into
11788     // a use, update the flags appropriately
11789     if (op->gtOper == GT_LCL_VAR)
11790     {
11791         assert((tree->OperKind() & GTK_ASGOP) || (op->gtFlags & (GTF_VAR_USEASG | GTF_VAR_USEDEF | GTF_VAR_DEF)) == 0);
11792
11793         op->gtFlags &= ~(GTF_VAR_USEASG | GTF_VAR_USEDEF | GTF_VAR_DEF);
11794     }
11795
11796     op->gtNext = tree->gtNext;
11797     op->gtPrev = tree->gtPrev;
11798
11799     return op;
11800 }
11801
11802 /*****************************************************************************
11803  *
11804  *  Fold the given constant tree.
11805  */
11806
11807 #ifdef _PREFAST_
11808 #pragma warning(push)
11809 #pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
11810 #endif
11811 GenTreePtr Compiler::gtFoldExprConst(GenTreePtr tree)
11812 {
11813     unsigned kind = tree->OperKind();
11814
11815     SSIZE_T       i1, i2, itemp;
11816     INT64         lval1, lval2, ltemp;
11817     float         f1, f2;
11818     double        d1, d2;
11819     var_types     switchType;
11820     FieldSeqNode* fieldSeq = FieldSeqStore::NotAField(); // default unless we override it when folding
11821
11822     assert(kind & (GTK_UNOP | GTK_BINOP));
11823
11824     GenTreePtr op1 = tree->gtOp.gtOp1;
11825     GenTreePtr op2 = tree->gtGetOp2();
11826
11827     if (!opts.OptEnabled(CLFLG_CONSTANTFOLD))
11828     {
11829         return tree;
11830     }
11831
11832     if (tree->OperGet() == GT_NOP)
11833     {
11834         return tree;
11835     }
11836
11837 #ifdef FEATURE_SIMD
11838     if (tree->OperGet() == GT_SIMD)
11839     {
11840         return tree;
11841     }
11842 #endif // FEATURE_SIMD
11843
11844     if (tree->gtOper == GT_ALLOCOBJ)
11845     {
11846         return tree;
11847     }
11848
11849     if (kind & GTK_UNOP)
11850     {
11851         assert(op1->OperKind() & GTK_CONST);
11852
11853         switch (op1->gtType)
11854         {
11855             case TYP_INT:
11856
11857                 /* Fold constant INT unary operator */
11858                 assert(op1->gtIntCon.ImmedValCanBeFolded(this, tree->OperGet()));
11859                 i1 = (int)op1->gtIntCon.gtIconVal;
11860
11861                 // If we fold a unary oper, then the folded constant
11862                 // is considered a ConstantIndexField if op1 was one
11863                 //
11864
11865                 if ((op1->gtIntCon.gtFieldSeq != nullptr) && op1->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
11866                 {
11867                     fieldSeq = op1->gtIntCon.gtFieldSeq;
11868                 }
11869
11870                 switch (tree->gtOper)
11871                 {
11872                     case GT_NOT:
11873                         i1 = ~i1;
11874                         break;
11875
11876                     case GT_NEG:
11877                     case GT_CHS:
11878                         i1 = -i1;
11879                         break;
11880
11881                     case GT_CAST:
11882                         // assert (genActualType(tree->CastToType()) == tree->gtType);
11883                         switch (tree->CastToType())
11884                         {
11885                             case TYP_BYTE:
11886                                 itemp = INT32(INT8(i1));
11887                                 goto CHK_OVF;
11888
11889                             case TYP_SHORT:
11890                                 itemp = INT32(INT16(i1));
11891                             CHK_OVF:
11892                                 if (tree->gtOverflow() && ((itemp != i1) || ((tree->gtFlags & GTF_UNSIGNED) && i1 < 0)))
11893                                 {
11894                                     goto INT_OVF;
11895                                 }
11896                                 i1 = itemp;
11897                                 goto CNS_INT;
11898
11899                             case TYP_CHAR:
11900                                 itemp = INT32(UINT16(i1));
11901                                 if (tree->gtOverflow())
11902                                 {
11903                                     if (itemp != i1)
11904                                     {
11905                                         goto INT_OVF;
11906                                     }
11907                                 }
11908                                 i1 = itemp;
11909                                 goto CNS_INT;
11910
11911                             case TYP_BOOL:
11912                             case TYP_UBYTE:
11913                                 itemp = INT32(UINT8(i1));
11914                                 if (tree->gtOverflow())
11915                                 {
11916                                     if (itemp != i1)
11917                                     {
11918                                         goto INT_OVF;
11919                                     }
11920                                 }
11921                                 i1 = itemp;
11922                                 goto CNS_INT;
11923
11924                             case TYP_UINT:
11925                                 if (!(tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && i1 < 0)
11926                                 {
11927                                     goto INT_OVF;
11928                                 }
11929                                 goto CNS_INT;
11930
11931                             case TYP_INT:
11932                                 if ((tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && i1 < 0)
11933                                 {
11934                                     goto INT_OVF;
11935                                 }
11936                                 goto CNS_INT;
11937
11938                             case TYP_ULONG:
11939                                 if (!(tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && i1 < 0)
11940                                 {
11941                                     op1->ChangeOperConst(GT_CNS_NATIVELONG); // need type of oper to be same as tree
11942                                     op1->gtType = TYP_LONG;
11943                                     // We don't care about the value as we are throwing an exception
11944                                     goto LNG_OVF;
11945                                 }
11946                                 lval1 = UINT64(UINT32(i1));
11947                                 goto CNS_LONG;
11948
11949                             case TYP_LONG:
11950                                 if (tree->gtFlags & GTF_UNSIGNED)
11951                                 {
11952                                     lval1 = INT64(UINT32(i1));
11953                                 }
11954                                 else
11955                                 {
11956                                     lval1 = INT64(INT32(i1));
11957                                 }
11958                                 goto CNS_LONG;
11959
11960                             case TYP_FLOAT:
11961                                 if (tree->gtFlags & GTF_UNSIGNED)
11962                                 {
11963                                     f1 = forceCastToFloat(UINT32(i1));
11964                                 }
11965                                 else
11966                                 {
11967                                     f1 = forceCastToFloat(INT32(i1));
11968                                 }
11969                                 d1 = f1;
11970                                 goto CNS_DOUBLE;
11971
11972                             case TYP_DOUBLE:
11973                                 if (tree->gtFlags & GTF_UNSIGNED)
11974                                 {
11975                                     d1 = (double)UINT32(i1);
11976                                 }
11977                                 else
11978                                 {
11979                                     d1 = (double)INT32(i1);
11980                                 }
11981                                 goto CNS_DOUBLE;
11982
11983                             default:
11984                                 assert(!"BAD_TYP");
11985                                 break;
11986                         }
11987                         return tree;
11988
11989                     default:
11990                         return tree;
11991                 }
11992
11993                 goto CNS_INT;
11994
11995             case TYP_LONG:
11996
11997                 /* Fold constant LONG unary operator */
11998
11999                 assert(op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
12000                 lval1 = op1->gtIntConCommon.LngValue();
12001
12002                 switch (tree->gtOper)
12003                 {
12004                     case GT_NOT:
12005                         lval1 = ~lval1;
12006                         break;
12007
12008                     case GT_NEG:
12009                     case GT_CHS:
12010                         lval1 = -lval1;
12011                         break;
12012
12013                     case GT_CAST:
12014                         assert(genActualType(tree->CastToType()) == tree->gtType);
12015                         switch (tree->CastToType())
12016                         {
12017                             case TYP_BYTE:
12018                                 i1 = INT32(INT8(lval1));
12019                                 goto CHECK_INT_OVERFLOW;
12020
12021                             case TYP_SHORT:
12022                                 i1 = INT32(INT16(lval1));
12023                                 goto CHECK_INT_OVERFLOW;
12024
12025                             case TYP_CHAR:
12026                                 i1 = INT32(UINT16(lval1));
12027                                 goto CHECK_UINT_OVERFLOW;
12028
12029                             case TYP_UBYTE:
12030                                 i1 = INT32(UINT8(lval1));
12031                                 goto CHECK_UINT_OVERFLOW;
12032
12033                             case TYP_INT:
12034                                 i1 = INT32(lval1);
12035
12036                             CHECK_INT_OVERFLOW:
12037                                 if (tree->gtOverflow())
12038                                 {
12039                                     if (i1 != lval1)
12040                                     {
12041                                         goto INT_OVF;
12042                                     }
12043                                     if ((tree->gtFlags & GTF_UNSIGNED) && i1 < 0)
12044                                     {
12045                                         goto INT_OVF;
12046                                     }
12047                                 }
12048                                 goto CNS_INT;
12049
12050                             case TYP_UINT:
12051                                 i1 = UINT32(lval1);
12052
12053                             CHECK_UINT_OVERFLOW:
12054                                 if (tree->gtOverflow() && UINT32(i1) != lval1)
12055                                 {
12056                                     goto INT_OVF;
12057                                 }
12058                                 goto CNS_INT;
12059
12060                             case TYP_ULONG:
12061                                 if (!(tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && lval1 < 0)
12062                                 {
12063                                     goto LNG_OVF;
12064                                 }
12065                                 goto CNS_LONG;
12066
12067                             case TYP_LONG:
12068                                 if ((tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && lval1 < 0)
12069                                 {
12070                                     goto LNG_OVF;
12071                                 }
12072                                 goto CNS_LONG;
12073
12074                             case TYP_FLOAT:
12075                             case TYP_DOUBLE:
12076                                 if ((tree->gtFlags & GTF_UNSIGNED) && lval1 < 0)
12077                                 {
12078                                     d1 = FloatingPointUtils::convertUInt64ToDouble((unsigned __int64)lval1);
12079                                 }
12080                                 else
12081                                 {
12082                                     d1 = (double)lval1;
12083                                 }
12084
12085                                 if (tree->CastToType() == TYP_FLOAT)
12086                                 {
12087                                     f1 = forceCastToFloat(d1); // truncate precision
12088                                     d1 = f1;
12089                                 }
12090                                 goto CNS_DOUBLE;
12091                             default:
12092                                 assert(!"BAD_TYP");
12093                                 break;
12094                         }
12095                         return tree;
12096
12097                     default:
12098                         return tree;
12099                 }
12100
12101                 goto CNS_LONG;
12102
12103             case TYP_FLOAT:
12104             case TYP_DOUBLE:
12105                 assert(op1->gtOper == GT_CNS_DBL);
12106
12107                 /* Fold constant DOUBLE unary operator */
12108
12109                 d1 = op1->gtDblCon.gtDconVal;
12110
12111                 switch (tree->gtOper)
12112                 {
12113                     case GT_NEG:
12114                     case GT_CHS:
12115                         d1 = -d1;
12116                         break;
12117
12118                     case GT_CAST:
12119
12120                         if (tree->gtOverflowEx())
12121                         {
12122                             return tree;
12123                         }
12124
12125                         assert(genActualType(tree->CastToType()) == tree->gtType);
12126
12127                         if ((op1->gtType == TYP_FLOAT && !_finite(forceCastToFloat(d1))) ||
12128                             (op1->gtType == TYP_DOUBLE && !_finite(d1)))
12129                         {
12130                             // The floating point constant is not finite.  The ECMA spec says, in
12131                             // III 3.27, that "...if overflow occurs converting a floating point type
12132                             // to an integer, ..., the value returned is unspecified."  However, it would
12133                             // at least be desirable to have the same value returned for casting an overflowing
12134                             // constant to an int as would obtained by passing that constant as a parameter
12135                             // then casting that parameter to an int type.  We will assume that the C compiler's
12136                             // cast logic will yield the desired result (and trust testing to tell otherwise).
12137                             // Cross-compilation is an issue here; if that becomes an important scenario, we should
12138                             // capture the target-specific values of overflow casts to the various integral types as
12139                             // constants in a target-specific function.
12140                             CLANG_FORMAT_COMMENT_ANCHOR;
12141
12142 #ifdef _TARGET_XARCH_
12143                             // Don't fold conversions of +inf/-inf to integral value as the value returned by JIT helper
12144                             // doesn't match with the C compiler's cast result.
12145                             return tree;
12146 #else  //!_TARGET_XARCH_
12147
12148                             switch (tree->CastToType())
12149                             {
12150                                 case TYP_BYTE:
12151                                     i1 = ssize_t(INT8(d1));
12152                                     goto CNS_INT;
12153                                 case TYP_UBYTE:
12154                                     i1 = ssize_t(UINT8(d1));
12155                                     goto CNS_INT;
12156                                 case TYP_SHORT:
12157                                     i1 = ssize_t(INT16(d1));
12158                                     goto CNS_INT;
12159                                 case TYP_CHAR:
12160                                     i1 = ssize_t(UINT16(d1));
12161                                     goto CNS_INT;
12162                                 case TYP_INT:
12163                                     i1 = ssize_t(INT32(d1));
12164                                     goto CNS_INT;
12165                                 case TYP_UINT:
12166                                     i1 = ssize_t(UINT32(d1));
12167                                     goto CNS_INT;
12168                                 case TYP_LONG:
12169                                     lval1 = INT64(d1);
12170                                     goto CNS_LONG;
12171                                 case TYP_ULONG:
12172                                     lval1 = UINT64(d1);
12173                                     goto CNS_LONG;
12174                                 case TYP_FLOAT:
12175                                 case TYP_DOUBLE:
12176                                     if (op1->gtType == TYP_FLOAT)
12177                                         d1 = forceCastToFloat(d1); // it's only !_finite() after this conversion
12178                                     goto CNS_DOUBLE;
12179                                 default:
12180                                     unreached();
12181                             }
12182 #endif //!_TARGET_XARCH_
12183                         }
12184
12185                         switch (tree->CastToType())
12186                         {
12187                             case TYP_BYTE:
12188                                 i1 = INT32(INT8(d1));
12189                                 goto CNS_INT;
12190
12191                             case TYP_SHORT:
12192                                 i1 = INT32(INT16(d1));
12193                                 goto CNS_INT;
12194
12195                             case TYP_CHAR:
12196                                 i1 = INT32(UINT16(d1));
12197                                 goto CNS_INT;
12198
12199                             case TYP_UBYTE:
12200                                 i1 = INT32(UINT8(d1));
12201                                 goto CNS_INT;
12202
12203                             case TYP_INT:
12204                                 i1 = INT32(d1);
12205                                 goto CNS_INT;
12206
12207                             case TYP_UINT:
12208                                 i1 = forceCastToUInt32(d1);
12209                                 goto CNS_INT;
12210
12211                             case TYP_LONG:
12212                                 lval1 = INT64(d1);
12213                                 goto CNS_LONG;
12214
12215                             case TYP_ULONG:
12216                                 lval1 = FloatingPointUtils::convertDoubleToUInt64(d1);
12217                                 goto CNS_LONG;
12218
12219                             case TYP_FLOAT:
12220                                 d1 = forceCastToFloat(d1);
12221                                 goto CNS_DOUBLE;
12222
12223                             case TYP_DOUBLE:
12224                                 if (op1->gtType == TYP_FLOAT)
12225                                 {
12226                                     d1 = forceCastToFloat(d1); // truncate precision
12227                                 }
12228                                 goto CNS_DOUBLE; // redundant cast
12229
12230                             default:
12231                                 assert(!"BAD_TYP");
12232                                 break;
12233                         }
12234                         return tree;
12235
12236                     default:
12237                         return tree;
12238                 }
12239                 goto CNS_DOUBLE;
12240
12241             default:
12242                 /* not a foldable typ - e.g. RET const */
12243                 return tree;
12244         }
12245     }
12246
12247     /* We have a binary operator */
12248
12249     assert(kind & GTK_BINOP);
12250     assert(op2);
12251     assert(op1->OperKind() & GTK_CONST);
12252     assert(op2->OperKind() & GTK_CONST);
12253
12254     if (tree->gtOper == GT_COMMA)
12255     {
12256         return op2;
12257     }
12258
12259     if (tree->gtOper == GT_LIST)
12260     {
12261         return tree;
12262     }
12263
12264     switchType = op1->gtType;
12265
12266     // Normally we will just switch on op1 types, but for the case where
12267     //  only op2 is a GC type and op1 is not a GC type, we use the op2 type.
12268     //  This makes us handle this as a case of folding for GC type.
12269     //
12270     if (varTypeIsGC(op2->gtType) && !varTypeIsGC(op1->gtType))
12271     {
12272         switchType = op2->gtType;
12273     }
12274
12275     switch (switchType)
12276     {
12277
12278         /*-------------------------------------------------------------------------
12279          * Fold constant REF of BYREF binary operator
12280          * These can only be comparisons or null pointers
12281          */
12282
12283         case TYP_REF:
12284
12285             /* String nodes are an RVA at this point */
12286
12287             if (op1->gtOper == GT_CNS_STR || op2->gtOper == GT_CNS_STR)
12288             {
12289                 return tree;
12290             }
12291
12292             __fallthrough;
12293
12294         case TYP_BYREF:
12295
12296             i1 = op1->gtIntConCommon.IconValue();
12297             i2 = op2->gtIntConCommon.IconValue();
12298
12299             switch (tree->gtOper)
12300             {
12301                 case GT_EQ:
12302                     i1 = (i1 == i2);
12303                     goto FOLD_COND;
12304
12305                 case GT_NE:
12306                     i1 = (i1 != i2);
12307                     goto FOLD_COND;
12308
12309                 case GT_ADD:
12310                     noway_assert(tree->gtType != TYP_REF);
12311                     // We only fold a GT_ADD that involves a null reference.
12312                     if (((op1->TypeGet() == TYP_REF) && (i1 == 0)) || ((op2->TypeGet() == TYP_REF) && (i2 == 0)))
12313                     {
12314 #ifdef DEBUG
12315                         if (verbose)
12316                         {
12317                             printf("\nFolding operator with constant nodes into a constant:\n");
12318                             gtDispTree(tree);
12319                         }
12320 #endif
12321                         // Fold into GT_IND of null byref
12322                         tree->ChangeOperConst(GT_CNS_INT);
12323                         tree->gtType              = TYP_BYREF;
12324                         tree->gtIntCon.gtIconVal  = 0;
12325                         tree->gtIntCon.gtFieldSeq = FieldSeqStore::NotAField();
12326                         if (vnStore != nullptr)
12327                         {
12328                             fgValueNumberTreeConst(tree);
12329                         }
12330 #ifdef DEBUG
12331                         if (verbose)
12332                         {
12333                             printf("\nFolded to null byref:\n");
12334                             gtDispTree(tree);
12335                         }
12336 #endif
12337                         goto DONE;
12338                     }
12339
12340                 default:
12341                     break;
12342             }
12343
12344             return tree;
12345
12346         /*-------------------------------------------------------------------------
12347          * Fold constant INT binary operator
12348          */
12349
12350         case TYP_INT:
12351
12352             if (tree->OperIsCompare() && (tree->gtType == TYP_BYTE))
12353             {
12354                 tree->gtType = TYP_INT;
12355             }
12356
12357             assert(tree->gtType == TYP_INT || varTypeIsGC(tree->TypeGet()) || tree->gtOper == GT_MKREFANY);
12358
12359             // No GC pointer types should be folded here...
12360             //
12361             assert(!varTypeIsGC(op1->gtType) && !varTypeIsGC(op2->gtType));
12362
12363             assert(op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
12364             assert(op2->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
12365
12366             i1 = op1->gtIntConCommon.IconValue();
12367             i2 = op2->gtIntConCommon.IconValue();
12368
12369             switch (tree->gtOper)
12370             {
12371                 case GT_EQ:
12372                     i1 = (INT32(i1) == INT32(i2));
12373                     break;
12374                 case GT_NE:
12375                     i1 = (INT32(i1) != INT32(i2));
12376                     break;
12377
12378                 case GT_LT:
12379                     if (tree->gtFlags & GTF_UNSIGNED)
12380                     {
12381                         i1 = (UINT32(i1) < UINT32(i2));
12382                     }
12383                     else
12384                     {
12385                         i1 = (INT32(i1) < INT32(i2));
12386                     }
12387                     break;
12388
12389                 case GT_LE:
12390                     if (tree->gtFlags & GTF_UNSIGNED)
12391                     {
12392                         i1 = (UINT32(i1) <= UINT32(i2));
12393                     }
12394                     else
12395                     {
12396                         i1 = (INT32(i1) <= INT32(i2));
12397                     }
12398                     break;
12399
12400                 case GT_GE:
12401                     if (tree->gtFlags & GTF_UNSIGNED)
12402                     {
12403                         i1 = (UINT32(i1) >= UINT32(i2));
12404                     }
12405                     else
12406                     {
12407                         i1 = (INT32(i1) >= INT32(i2));
12408                     }
12409                     break;
12410
12411                 case GT_GT:
12412                     if (tree->gtFlags & GTF_UNSIGNED)
12413                     {
12414                         i1 = (UINT32(i1) > UINT32(i2));
12415                     }
12416                     else
12417                     {
12418                         i1 = (INT32(i1) > INT32(i2));
12419                     }
12420                     break;
12421
12422                 case GT_ADD:
12423                     itemp = i1 + i2;
12424                     if (tree->gtOverflow())
12425                     {
12426                         if (tree->gtFlags & GTF_UNSIGNED)
12427                         {
12428                             if (INT64(UINT32(itemp)) != INT64(UINT32(i1)) + INT64(UINT32(i2)))
12429                             {
12430                                 goto INT_OVF;
12431                             }
12432                         }
12433                         else
12434                         {
12435                             if (INT64(INT32(itemp)) != INT64(INT32(i1)) + INT64(INT32(i2)))
12436                             {
12437                                 goto INT_OVF;
12438                             }
12439                         }
12440                     }
12441                     i1       = itemp;
12442                     fieldSeq = GetFieldSeqStore()->Append(op1->gtIntCon.gtFieldSeq, op2->gtIntCon.gtFieldSeq);
12443                     break;
12444                 case GT_SUB:
12445                     itemp = i1 - i2;
12446                     if (tree->gtOverflow())
12447                     {
12448                         if (tree->gtFlags & GTF_UNSIGNED)
12449                         {
12450                             if (INT64(UINT32(itemp)) != ((INT64)((UINT32)i1) - (INT64)((UINT32)i2)))
12451                             {
12452                                 goto INT_OVF;
12453                             }
12454                         }
12455                         else
12456                         {
12457                             if (INT64(INT32(itemp)) != INT64(INT32(i1)) - INT64(INT32(i2)))
12458                             {
12459                                 goto INT_OVF;
12460                             }
12461                         }
12462                     }
12463                     i1 = itemp;
12464                     break;
12465                 case GT_MUL:
12466                     itemp = i1 * i2;
12467                     if (tree->gtOverflow())
12468                     {
12469                         if (tree->gtFlags & GTF_UNSIGNED)
12470                         {
12471                             if (INT64(UINT32(itemp)) != ((INT64)((UINT32)i1) * (INT64)((UINT32)i2)))
12472                             {
12473                                 goto INT_OVF;
12474                             }
12475                         }
12476                         else
12477                         {
12478                             if (INT64(INT32(itemp)) != INT64(INT32(i1)) * INT64(INT32(i2)))
12479                             {
12480                                 goto INT_OVF;
12481                             }
12482                         }
12483                     }
12484                     // For the very particular case of the "constant array index" pseudo-field, we
12485                     // assume that multiplication is by the field width, and preserves that field.
12486                     // This could obviously be made more robust by a more complicated set of annotations...
12487                     if ((op1->gtIntCon.gtFieldSeq != nullptr) && op1->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
12488                     {
12489                         assert(op2->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField());
12490                         fieldSeq = op1->gtIntCon.gtFieldSeq;
12491                     }
12492                     else if ((op2->gtIntCon.gtFieldSeq != nullptr) &&
12493                              op2->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
12494                     {
12495                         assert(op1->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField());
12496                         fieldSeq = op2->gtIntCon.gtFieldSeq;
12497                     }
12498                     i1 = itemp;
12499                     break;
12500
12501                 case GT_OR:
12502                     i1 |= i2;
12503                     break;
12504                 case GT_XOR:
12505                     i1 ^= i2;
12506                     break;
12507                 case GT_AND:
12508                     i1 &= i2;
12509                     break;
12510
12511                 case GT_LSH:
12512                     i1 <<= (i2 & 0x1f);
12513                     break;
12514                 case GT_RSH:
12515                     i1 >>= (i2 & 0x1f);
12516                     break;
12517                 case GT_RSZ:
12518                     /* logical shift -> make it unsigned to not propagate the sign bit */
12519                     i1 = UINT32(i1) >> (i2 & 0x1f);
12520                     break;
12521                 case GT_ROL:
12522                     i1 = (i1 << (i2 & 0x1f)) | (UINT32(i1) >> ((32 - i2) & 0x1f));
12523                     break;
12524                 case GT_ROR:
12525                     i1 = (i1 << ((32 - i2) & 0x1f)) | (UINT32(i1) >> (i2 & 0x1f));
12526                     break;
12527
12528                 /* DIV and MOD can generate an INT 0 - if division by 0
12529                  * or overflow - when dividing MIN by -1 */
12530
12531                 case GT_DIV:
12532                 case GT_MOD:
12533                 case GT_UDIV:
12534                 case GT_UMOD:
12535                     if (INT32(i2) == 0)
12536                     {
12537                         // Division by zero:
12538                         // We have to evaluate this expression and throw an exception
12539                         return tree;
12540                     }
12541                     else if ((INT32(i2) == -1) && (UINT32(i1) == 0x80000000))
12542                     {
12543                         // Overflow Division:
12544                         // We have to evaluate this expression and throw an exception
12545                         return tree;
12546                     }
12547
12548                     if (tree->gtOper == GT_DIV)
12549                     {
12550                         i1 = INT32(i1) / INT32(i2);
12551                     }
12552                     else if (tree->gtOper == GT_MOD)
12553                     {
12554                         i1 = INT32(i1) % INT32(i2);
12555                     }
12556                     else if (tree->gtOper == GT_UDIV)
12557                     {
12558                         i1 = UINT32(i1) / UINT32(i2);
12559                     }
12560                     else
12561                     {
12562                         assert(tree->gtOper == GT_UMOD);
12563                         i1 = UINT32(i1) % UINT32(i2);
12564                     }
12565                     break;
12566
12567                 default:
12568                     return tree;
12569             }
12570
12571         /* We get here after folding to a GT_CNS_INT type
12572          * change the node to the new type / value and make sure the node sizes are OK */
12573         CNS_INT:
12574         FOLD_COND:
12575
12576 #ifdef DEBUG
12577             if (verbose)
12578             {
12579                 printf("\nFolding operator with constant nodes into a constant:\n");
12580                 gtDispTree(tree);
12581             }
12582 #endif
12583
12584 #ifdef _TARGET_64BIT_
12585             // we need to properly re-sign-extend or truncate as needed.
12586             if (tree->gtFlags & GTF_UNSIGNED)
12587             {
12588                 i1 = UINT32(i1);
12589             }
12590             else
12591             {
12592                 i1 = INT32(i1);
12593             }
12594 #endif // _TARGET_64BIT_
12595
12596             /* Also all conditional folding jumps here since the node hanging from
12597              * GT_JTRUE has to be a GT_CNS_INT - value 0 or 1 */
12598
12599             tree->ChangeOperConst(GT_CNS_INT);
12600             tree->gtType              = TYP_INT;
12601             tree->gtIntCon.gtIconVal  = i1;
12602             tree->gtIntCon.gtFieldSeq = fieldSeq;
12603             if (vnStore != nullptr)
12604             {
12605                 fgValueNumberTreeConst(tree);
12606             }
12607 #ifdef DEBUG
12608             if (verbose)
12609             {
12610                 printf("Bashed to int constant:\n");
12611                 gtDispTree(tree);
12612             }
12613 #endif
12614             goto DONE;
12615
12616         /* This operation is going to cause an overflow exception. Morph into
12617            an overflow helper. Put a dummy constant value for code generation.
12618
12619            We could remove all subsequent trees in the current basic block,
12620            unless this node is a child of GT_COLON
12621
12622            NOTE: Since the folded value is not constant we should not change the
12623                  "tree" node - otherwise we confuse the logic that checks if the folding
12624                  was successful - instead use one of the operands, e.g. op1
12625          */
12626
12627         LNG_OVF:
12628             // Don't fold overflow operations if not global morph phase.
12629             // The reason for this is that this optimization is replacing a gentree node
12630             // with another new gentree node. Say a GT_CALL(arglist) has one 'arg'
12631             // involving overflow arithmetic.  During assertion prop, it is possible
12632             // that the 'arg' could be constant folded and the result could lead to an
12633             // overflow.  In such a case 'arg' will get replaced with GT_COMMA node
12634             // but fgMorphArgs() - see the logic around "if(lateArgsComputed)" - doesn't
12635             // update args table. For this reason this optimization is enabled only
12636             // for global morphing phase.
12637             //
12638             // X86/Arm32 legacy codegen note: This is not an issue on x86 for the reason that
12639             // it doesn't use arg table for calls.  In addition x86/arm32 legacy codegen doesn't
12640             // expect long constants to show up as an operand of overflow cast operation.
12641             //
12642             // TODO-CQ: Once fgMorphArgs() is fixed this restriction could be removed.
12643             CLANG_FORMAT_COMMENT_ANCHOR;
12644
12645 #ifndef LEGACY_BACKEND
12646             if (!fgGlobalMorph)
12647             {
12648                 assert(tree->gtOverflow());
12649                 return tree;
12650             }
12651 #endif // !LEGACY_BACKEND
12652
12653             op1 = gtNewLconNode(0);
12654             if (vnStore != nullptr)
12655             {
12656                 op1->gtVNPair.SetBoth(vnStore->VNZeroForType(TYP_LONG));
12657             }
12658             goto OVF;
12659
12660         INT_OVF:
12661 #ifndef LEGACY_BACKEND
12662             // Don't fold overflow operations if not global morph phase.
12663             // The reason for this is that this optimization is replacing a gentree node
12664             // with another new gentree node. Say a GT_CALL(arglist) has one 'arg'
12665             // involving overflow arithmetic.  During assertion prop, it is possible
12666             // that the 'arg' could be constant folded and the result could lead to an
12667             // overflow.  In such a case 'arg' will get replaced with GT_COMMA node
12668             // but fgMorphArgs() - see the logic around "if(lateArgsComputed)" - doesn't
12669             // update args table. For this reason this optimization is enabled only
12670             // for global morphing phase.
12671             //
12672             // X86/Arm32 legacy codegen note: This is not an issue on x86 for the reason that
12673             // it doesn't use arg table for calls.  In addition x86/arm32 legacy codegen doesn't
12674             // expect long constants to show up as an operand of overflow cast operation.
12675             //
12676             // TODO-CQ: Once fgMorphArgs() is fixed this restriction could be removed.
12677
12678             if (!fgGlobalMorph)
12679             {
12680                 assert(tree->gtOverflow());
12681                 return tree;
12682             }
12683 #endif // !LEGACY_BACKEND
12684
12685             op1 = gtNewIconNode(0);
12686             if (vnStore != nullptr)
12687             {
12688                 op1->gtVNPair.SetBoth(vnStore->VNZeroForType(TYP_INT));
12689             }
12690             goto OVF;
12691
12692         OVF:
12693 #ifdef DEBUG
12694             if (verbose)
12695             {
12696                 printf("\nFolding binary operator with constant nodes into a comma throw:\n");
12697                 gtDispTree(tree);
12698             }
12699 #endif
12700             /* We will change the cast to a GT_COMMA and attach the exception helper as gtOp.gtOp1.
12701              * The constant expression zero becomes op2. */
12702
12703             assert(tree->gtOverflow());
12704             assert(tree->gtOper == GT_ADD || tree->gtOper == GT_SUB || tree->gtOper == GT_CAST ||
12705                    tree->gtOper == GT_MUL);
12706             assert(op1);
12707
12708             op2 = op1;
12709             op1 = gtNewHelperCallNode(CORINFO_HELP_OVERFLOW, TYP_VOID, GTF_EXCEPT,
12710                                       gtNewArgList(gtNewIconNode(compCurBB->bbTryIndex)));
12711
12712             if (vnStore != nullptr)
12713             {
12714                 op1->gtVNPair =
12715                     vnStore->VNPWithExc(ValueNumPair(ValueNumStore::VNForVoid(), ValueNumStore::VNForVoid()),
12716                                         vnStore->VNPExcSetSingleton(vnStore->VNPairForFunc(TYP_REF, VNF_OverflowExc)));
12717             }
12718
12719             tree = gtNewOperNode(GT_COMMA, tree->gtType, op1, op2);
12720
12721             return tree;
12722
12723         /*-------------------------------------------------------------------------
12724          * Fold constant LONG binary operator
12725          */
12726
12727         case TYP_LONG:
12728
12729             // No GC pointer types should be folded here...
12730             //
12731             assert(!varTypeIsGC(op1->gtType) && !varTypeIsGC(op2->gtType));
12732
12733             // op1 is known to be a TYP_LONG, op2 is normally a TYP_LONG, unless we have a shift operator in which case
12734             // it is a TYP_INT
12735             //
12736             assert((op2->gtType == TYP_LONG) || (op2->gtType == TYP_INT));
12737
12738             assert(op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
12739             assert(op2->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
12740
12741             lval1 = op1->gtIntConCommon.LngValue();
12742
12743             // For the shift operators we can have a op2 that is a TYP_INT and thus will be GT_CNS_INT
12744             if (op2->OperGet() == GT_CNS_INT)
12745             {
12746                 lval2 = op2->gtIntConCommon.IconValue();
12747             }
12748             else
12749             {
12750                 lval2 = op2->gtIntConCommon.LngValue();
12751             }
12752
12753             switch (tree->gtOper)
12754             {
12755                 case GT_EQ:
12756                     i1 = (lval1 == lval2);
12757                     goto FOLD_COND;
12758                 case GT_NE:
12759                     i1 = (lval1 != lval2);
12760                     goto FOLD_COND;
12761
12762                 case GT_LT:
12763                     if (tree->gtFlags & GTF_UNSIGNED)
12764                     {
12765                         i1 = (UINT64(lval1) < UINT64(lval2));
12766                     }
12767                     else
12768                     {
12769                         i1 = (lval1 < lval2);
12770                     }
12771                     goto FOLD_COND;
12772
12773                 case GT_LE:
12774                     if (tree->gtFlags & GTF_UNSIGNED)
12775                     {
12776                         i1 = (UINT64(lval1) <= UINT64(lval2));
12777                     }
12778                     else
12779                     {
12780                         i1 = (lval1 <= lval2);
12781                     }
12782                     goto FOLD_COND;
12783
12784                 case GT_GE:
12785                     if (tree->gtFlags & GTF_UNSIGNED)
12786                     {
12787                         i1 = (UINT64(lval1) >= UINT64(lval2));
12788                     }
12789                     else
12790                     {
12791                         i1 = (lval1 >= lval2);
12792                     }
12793                     goto FOLD_COND;
12794
12795                 case GT_GT:
12796                     if (tree->gtFlags & GTF_UNSIGNED)
12797                     {
12798                         i1 = (UINT64(lval1) > UINT64(lval2));
12799                     }
12800                     else
12801                     {
12802                         i1 = (lval1 > lval2);
12803                     }
12804                     goto FOLD_COND;
12805
12806                 case GT_ADD:
12807                     ltemp = lval1 + lval2;
12808
12809                 LNG_ADD_CHKOVF:
12810                     /* For the SIGNED case - If there is one positive and one negative operand, there can be no overflow
12811                      * If both are positive, the result has to be positive, and similary for negatives.
12812                      *
12813                      * For the UNSIGNED case - If a UINT32 operand is bigger than the result then OVF */
12814
12815                     if (tree->gtOverflow())
12816                     {
12817                         if (tree->gtFlags & GTF_UNSIGNED)
12818                         {
12819                             if ((UINT64(lval1) > UINT64(ltemp)) || (UINT64(lval2) > UINT64(ltemp)))
12820                             {
12821                                 goto LNG_OVF;
12822                             }
12823                         }
12824                         else if (((lval1 < 0) == (lval2 < 0)) && ((lval1 < 0) != (ltemp < 0)))
12825                         {
12826                             goto LNG_OVF;
12827                         }
12828                     }
12829                     lval1 = ltemp;
12830                     break;
12831
12832                 case GT_SUB:
12833                     ltemp = lval1 - lval2;
12834                     if (tree->gtOverflow())
12835                     {
12836                         if (tree->gtFlags & GTF_UNSIGNED)
12837                         {
12838                             if (UINT64(lval2) > UINT64(lval1))
12839                             {
12840                                 goto LNG_OVF;
12841                             }
12842                         }
12843                         else
12844                         {
12845                             /* If both operands are +ve or both are -ve, there can be no
12846                                overflow. Else use the logic for : lval1 + (-lval2) */
12847
12848                             if ((lval1 < 0) != (lval2 < 0))
12849                             {
12850                                 if (lval2 == INT64_MIN)
12851                                 {
12852                                     goto LNG_OVF;
12853                                 }
12854                                 lval2 = -lval2;
12855                                 goto LNG_ADD_CHKOVF;
12856                             }
12857                         }
12858                     }
12859                     lval1 = ltemp;
12860                     break;
12861
12862                 case GT_MUL:
12863                     ltemp = lval1 * lval2;
12864
12865                     if (tree->gtOverflow() && lval2 != 0)
12866                     {
12867
12868                         if (tree->gtFlags & GTF_UNSIGNED)
12869                         {
12870                             UINT64 ultemp = ltemp;
12871                             UINT64 ulval1 = lval1;
12872                             UINT64 ulval2 = lval2;
12873                             if ((ultemp / ulval2) != ulval1)
12874                             {
12875                                 goto LNG_OVF;
12876                             }
12877                         }
12878                         else
12879                         {
12880                             // This does a multiply and then reverses it.  This test works great except for MIN_INT *
12881                             //-1.  In that case we mess up the sign on ltmp.  Make sure to double check the sign.
12882                             // if either is 0, then no overflow
12883                             if (lval1 != 0) // lval2 checked above.
12884                             {
12885                                 if (((lval1 < 0) == (lval2 < 0)) && (ltemp < 0))
12886                                 {
12887                                     goto LNG_OVF;
12888                                 }
12889                                 if (((lval1 < 0) != (lval2 < 0)) && (ltemp > 0))
12890                                 {
12891                                     goto LNG_OVF;
12892                                 }
12893
12894                                 // TODO-Amd64-Unix: Remove the code that disables optimizations for this method when the
12895                                 // clang
12896                                 // optimizer is fixed and/or the method implementation is refactored in a simpler code.
12897                                 // There is a bug in the clang-3.5 optimizer. The issue is that in release build the
12898                                 // optimizer is mistyping (or just wrongly decides to use 32 bit operation for a corner
12899                                 // case of MIN_LONG) the args of the (ltemp / lval2) to int (it does a 32 bit div
12900                                 // operation instead of 64 bit.). For the case of lval1 and lval2 equal to MIN_LONG
12901                                 // (0x8000000000000000) this results in raising a SIGFPE.
12902                                 // Optimizations disabled for now. See compiler.h.
12903                                 if ((ltemp / lval2) != lval1)
12904                                 {
12905                                     goto LNG_OVF;
12906                                 }
12907                             }
12908                         }
12909                     }
12910
12911                     lval1 = ltemp;
12912                     break;
12913
12914                 case GT_OR:
12915                     lval1 |= lval2;
12916                     break;
12917                 case GT_XOR:
12918                     lval1 ^= lval2;
12919                     break;
12920                 case GT_AND:
12921                     lval1 &= lval2;
12922                     break;
12923
12924                 case GT_LSH:
12925                     lval1 <<= (lval2 & 0x3f);
12926                     break;
12927                 case GT_RSH:
12928                     lval1 >>= (lval2 & 0x3f);
12929                     break;
12930                 case GT_RSZ:
12931                     /* logical shift -> make it unsigned to not propagate the sign bit */
12932                     lval1 = UINT64(lval1) >> (lval2 & 0x3f);
12933                     break;
12934                 case GT_ROL:
12935                     lval1 = (lval1 << (lval2 & 0x3f)) | (UINT64(lval1) >> ((64 - lval2) & 0x3f));
12936                     break;
12937                 case GT_ROR:
12938                     lval1 = (lval1 << ((64 - lval2) & 0x3f)) | (UINT64(lval1) >> (lval2 & 0x3f));
12939                     break;
12940
12941                 // Both DIV and IDIV on x86 raise an exception for min_int (and min_long) / -1.  So we preserve
12942                 // that behavior here.
12943                 case GT_DIV:
12944                     if (!lval2)
12945                     {
12946                         return tree;
12947                     }
12948
12949                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
12950                     {
12951                         return tree;
12952                     }
12953                     lval1 /= lval2;
12954                     break;
12955
12956                 case GT_MOD:
12957                     if (!lval2)
12958                     {
12959                         return tree;
12960                     }
12961                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
12962                     {
12963                         return tree;
12964                     }
12965                     lval1 %= lval2;
12966                     break;
12967
12968                 case GT_UDIV:
12969                     if (!lval2)
12970                     {
12971                         return tree;
12972                     }
12973                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
12974                     {
12975                         return tree;
12976                     }
12977                     lval1 = UINT64(lval1) / UINT64(lval2);
12978                     break;
12979
12980                 case GT_UMOD:
12981                     if (!lval2)
12982                     {
12983                         return tree;
12984                     }
12985                     if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
12986                     {
12987                         return tree;
12988                     }
12989                     lval1 = UINT64(lval1) % UINT64(lval2);
12990                     break;
12991                 default:
12992                     return tree;
12993             }
12994
12995         CNS_LONG:
12996
12997 #ifdef DEBUG
12998             if (verbose)
12999             {
13000                 printf("\nFolding long operator with constant nodes into a constant:\n");
13001                 gtDispTree(tree);
13002             }
13003 #endif
13004             assert((GenTree::s_gtNodeSizes[GT_CNS_NATIVELONG] == TREE_NODE_SZ_SMALL) ||
13005                    (tree->gtDebugFlags & GTF_DEBUG_NODE_LARGE));
13006
13007             tree->ChangeOperConst(GT_CNS_NATIVELONG);
13008             tree->gtIntConCommon.SetLngValue(lval1);
13009             if (vnStore != nullptr)
13010             {
13011                 fgValueNumberTreeConst(tree);
13012             }
13013
13014 #ifdef DEBUG
13015             if (verbose)
13016             {
13017                 printf("Bashed to long constant:\n");
13018                 gtDispTree(tree);
13019             }
13020 #endif
13021             goto DONE;
13022
13023         /*-------------------------------------------------------------------------
13024          * Fold constant FLOAT or DOUBLE binary operator
13025          */
13026
13027         case TYP_FLOAT:
13028         case TYP_DOUBLE:
13029
13030             if (tree->gtOverflowEx())
13031             {
13032                 return tree;
13033             }
13034
13035             assert(op1->gtOper == GT_CNS_DBL);
13036             d1 = op1->gtDblCon.gtDconVal;
13037
13038             assert(varTypeIsFloating(op2->gtType));
13039             assert(op2->gtOper == GT_CNS_DBL);
13040             d2 = op2->gtDblCon.gtDconVal;
13041
13042             /* Special case - check if we have NaN operands.
13043              * For comparisons if not an unordered operation always return 0.
13044              * For unordered operations (i.e. the GTF_RELOP_NAN_UN flag is set)
13045              * the result is always true - return 1. */
13046
13047             if (_isnan(d1) || _isnan(d2))
13048             {
13049 #ifdef DEBUG
13050                 if (verbose)
13051                 {
13052                     printf("Double operator(s) is NaN\n");
13053                 }
13054 #endif
13055                 if (tree->OperKind() & GTK_RELOP)
13056                 {
13057                     if (tree->gtFlags & GTF_RELOP_NAN_UN)
13058                     {
13059                         /* Unordered comparison with NaN always succeeds */
13060                         i1 = 1;
13061                         goto FOLD_COND;
13062                     }
13063                     else
13064                     {
13065                         /* Normal comparison with NaN always fails */
13066                         i1 = 0;
13067                         goto FOLD_COND;
13068                     }
13069                 }
13070             }
13071
13072             switch (tree->gtOper)
13073             {
13074                 case GT_EQ:
13075                     i1 = (d1 == d2);
13076                     goto FOLD_COND;
13077                 case GT_NE:
13078                     i1 = (d1 != d2);
13079                     goto FOLD_COND;
13080
13081                 case GT_LT:
13082                     i1 = (d1 < d2);
13083                     goto FOLD_COND;
13084                 case GT_LE:
13085                     i1 = (d1 <= d2);
13086                     goto FOLD_COND;
13087                 case GT_GE:
13088                     i1 = (d1 >= d2);
13089                     goto FOLD_COND;
13090                 case GT_GT:
13091                     i1 = (d1 > d2);
13092                     goto FOLD_COND;
13093
13094 #if FEATURE_STACK_FP_X87
13095                 case GT_ADD:
13096                     d1 += d2;
13097                     break;
13098                 case GT_SUB:
13099                     d1 -= d2;
13100                     break;
13101                 case GT_MUL:
13102                     d1 *= d2;
13103                     break;
13104                 case GT_DIV:
13105                     if (!d2)
13106                         return tree;
13107                     d1 /= d2;
13108                     break;
13109 #else  //! FEATURE_STACK_FP_X87
13110                 // non-x86 arch: floating point arithmetic should be done in declared
13111                 // precision while doing constant folding. For this reason though TYP_FLOAT
13112                 // constants are stored as double constants, while performing float arithmetic,
13113                 // double constants should be converted to float.  Here is an example case
13114                 // where performing arithmetic in double precision would lead to incorrect
13115                 // results.
13116                 //
13117                 // Example:
13118                 // float a = float.MaxValue;
13119                 // float b = a*a;   This will produce +inf in single precision and 1.1579207543382391e+077 in double
13120                 //                  precision.
13121                 // flaot c = b/b;   This will produce NaN in single precision and 1 in double precision.
13122                 case GT_ADD:
13123                     if (op1->TypeGet() == TYP_FLOAT)
13124                     {
13125                         f1 = forceCastToFloat(d1);
13126                         f2 = forceCastToFloat(d2);
13127                         d1 = f1 + f2;
13128                     }
13129                     else
13130                     {
13131                         d1 += d2;
13132                     }
13133                     break;
13134
13135                 case GT_SUB:
13136                     if (op1->TypeGet() == TYP_FLOAT)
13137                     {
13138                         f1 = forceCastToFloat(d1);
13139                         f2 = forceCastToFloat(d2);
13140                         d1 = f1 - f2;
13141                     }
13142                     else
13143                     {
13144                         d1 -= d2;
13145                     }
13146                     break;
13147
13148                 case GT_MUL:
13149                     if (op1->TypeGet() == TYP_FLOAT)
13150                     {
13151                         f1 = forceCastToFloat(d1);
13152                         f2 = forceCastToFloat(d2);
13153                         d1 = f1 * f2;
13154                     }
13155                     else
13156                     {
13157                         d1 *= d2;
13158                     }
13159                     break;
13160
13161                 case GT_DIV:
13162                     if (!d2)
13163                     {
13164                         return tree;
13165                     }
13166                     if (op1->TypeGet() == TYP_FLOAT)
13167                     {
13168                         f1 = forceCastToFloat(d1);
13169                         f2 = forceCastToFloat(d2);
13170                         d1 = f1 / f2;
13171                     }
13172                     else
13173                     {
13174                         d1 /= d2;
13175                     }
13176                     break;
13177 #endif //! FEATURE_STACK_FP_X87
13178
13179                 default:
13180                     return tree;
13181             }
13182
13183         CNS_DOUBLE:
13184
13185 #ifdef DEBUG
13186             if (verbose)
13187             {
13188                 printf("\nFolding fp operator with constant nodes into a fp constant:\n");
13189                 gtDispTree(tree);
13190             }
13191 #endif
13192
13193             assert((GenTree::s_gtNodeSizes[GT_CNS_DBL] == TREE_NODE_SZ_SMALL) ||
13194                    (tree->gtDebugFlags & GTF_DEBUG_NODE_LARGE));
13195
13196             tree->ChangeOperConst(GT_CNS_DBL);
13197             tree->gtDblCon.gtDconVal = d1;
13198             if (vnStore != nullptr)
13199             {
13200                 fgValueNumberTreeConst(tree);
13201             }
13202 #ifdef DEBUG
13203             if (verbose)
13204             {
13205                 printf("Bashed to fp constant:\n");
13206                 gtDispTree(tree);
13207             }
13208 #endif
13209             goto DONE;
13210
13211         default:
13212             /* not a foldable typ */
13213             return tree;
13214     }
13215
13216 //-------------------------------------------------------------------------
13217
13218 DONE:
13219
13220     /* Make sure no side effect flags are set on this constant node */
13221
13222     tree->gtFlags &= ~GTF_ALL_EFFECT;
13223
13224     return tree;
13225 }
13226 #ifdef _PREFAST_
13227 #pragma warning(pop)
13228 #endif
13229
13230 /*****************************************************************************
13231  *
13232  *  Create an assignment of the given value to a temp.
13233  */
13234
13235 GenTreePtr Compiler::gtNewTempAssign(unsigned tmp, GenTreePtr val)
13236 {
13237     LclVarDsc* varDsc = lvaTable + tmp;
13238
13239     if (varDsc->TypeGet() == TYP_I_IMPL && val->TypeGet() == TYP_BYREF)
13240     {
13241         impBashVarAddrsToI(val);
13242     }
13243
13244     var_types valTyp = val->TypeGet();
13245     if (val->OperGet() == GT_LCL_VAR && lvaTable[val->gtLclVar.gtLclNum].lvNormalizeOnLoad())
13246     {
13247         valTyp = lvaGetRealType(val->gtLclVar.gtLclNum);
13248         val    = gtNewLclvNode(val->gtLclVar.gtLclNum, valTyp, val->gtLclVar.gtLclILoffs);
13249     }
13250     var_types dstTyp = varDsc->TypeGet();
13251
13252     /* If the variable's lvType is not yet set then set it here */
13253     if (dstTyp == TYP_UNDEF)
13254     {
13255         varDsc->lvType = dstTyp = genActualType(valTyp);
13256         if (varTypeIsGC(dstTyp))
13257         {
13258             varDsc->lvStructGcCount = 1;
13259         }
13260 #if FEATURE_SIMD
13261         else if (varTypeIsSIMD(dstTyp))
13262         {
13263             varDsc->lvSIMDType = 1;
13264         }
13265 #endif
13266     }
13267
13268 #ifdef DEBUG
13269     /* Make sure the actual types match               */
13270     if (genActualType(valTyp) != genActualType(dstTyp))
13271     {
13272         // Plus some other exceptions that are apparently legal:
13273         // 1) TYP_REF or BYREF = TYP_I_IMPL
13274         bool ok = false;
13275         if (varTypeIsGC(dstTyp) && (valTyp == TYP_I_IMPL))
13276         {
13277             ok = true;
13278         }
13279         // 2) TYP_DOUBLE = TYP_FLOAT or TYP_FLOAT = TYP_DOUBLE
13280         else if (varTypeIsFloating(dstTyp) && varTypeIsFloating(valTyp))
13281         {
13282             ok = true;
13283         }
13284
13285         if (!ok)
13286         {
13287             gtDispTree(val);
13288             assert(!"Incompatible types for gtNewTempAssign");
13289         }
13290     }
13291 #endif
13292
13293     // Floating Point assignments can be created during inlining
13294     // see "Zero init inlinee locals:" in fgInlinePrependStatements
13295     // thus we may need to set compFloatingPointUsed to true here.
13296     //
13297     if (varTypeIsFloating(dstTyp) && (compFloatingPointUsed == false))
13298     {
13299         compFloatingPointUsed = true;
13300     }
13301
13302     /* Create the assignment node */
13303
13304     GenTreePtr asg;
13305     GenTreePtr dest = gtNewLclvNode(tmp, dstTyp);
13306     dest->gtFlags |= GTF_VAR_DEF;
13307
13308     // With first-class structs, we should be propagating the class handle on all non-primitive
13309     // struct types. We don't have a convenient way to do that for all SIMD temps, since some
13310     // internal trees use SIMD types that are not used by the input IL. In this case, we allow
13311     // a null type handle and derive the necessary information about the type from its varType.
13312     CORINFO_CLASS_HANDLE structHnd = gtGetStructHandleIfPresent(val);
13313     if (varTypeIsStruct(valTyp) && ((structHnd != NO_CLASS_HANDLE) || (varTypeIsSIMD(valTyp))))
13314     {
13315         // The GT_OBJ may be be a child of a GT_COMMA.
13316         GenTreePtr valx = val->gtEffectiveVal(/*commaOnly*/ true);
13317
13318         if (valx->gtOper == GT_OBJ)
13319         {
13320             assert(structHnd != nullptr);
13321             lvaSetStruct(tmp, structHnd, false);
13322         }
13323         dest->gtFlags |= GTF_DONT_CSE;
13324         valx->gtFlags |= GTF_DONT_CSE;
13325         asg = impAssignStruct(dest, val, structHnd, (unsigned)CHECK_SPILL_NONE);
13326     }
13327     else
13328     {
13329         asg = gtNewAssignNode(dest, val);
13330     }
13331
13332 #ifndef LEGACY_BACKEND
13333     if (compRationalIRForm)
13334     {
13335         Rationalizer::RewriteAssignmentIntoStoreLcl(asg->AsOp());
13336     }
13337 #endif // !LEGACY_BACKEND
13338
13339     return asg;
13340 }
13341
13342 /*****************************************************************************
13343  *
13344  *  Create a helper call to access a COM field (iff 'assg' is non-zero this is
13345  *  an assignment and 'assg' is the new value).
13346  */
13347
13348 GenTreePtr Compiler::gtNewRefCOMfield(GenTreePtr              objPtr,
13349                                       CORINFO_RESOLVED_TOKEN* pResolvedToken,
13350                                       CORINFO_ACCESS_FLAGS    access,
13351                                       CORINFO_FIELD_INFO*     pFieldInfo,
13352                                       var_types               lclTyp,
13353                                       CORINFO_CLASS_HANDLE    structType,
13354                                       GenTreePtr              assg)
13355 {
13356     assert(pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER ||
13357            pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_ADDR_HELPER ||
13358            pFieldInfo->fieldAccessor == CORINFO_FIELD_STATIC_ADDR_HELPER);
13359
13360     /* If we can't access it directly, we need to call a helper function */
13361     GenTreeArgList* args       = nullptr;
13362     var_types       helperType = TYP_BYREF;
13363
13364     if (pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER)
13365     {
13366         if (access & CORINFO_ACCESS_SET)
13367         {
13368             assert(assg != nullptr);
13369             // helper needs pointer to struct, not struct itself
13370             if (pFieldInfo->helper == CORINFO_HELP_SETFIELDSTRUCT)
13371             {
13372                 assert(structType != nullptr);
13373                 assg = impGetStructAddr(assg, structType, (unsigned)CHECK_SPILL_ALL, true);
13374             }
13375             else if (lclTyp == TYP_DOUBLE && assg->TypeGet() == TYP_FLOAT)
13376             {
13377                 assg = gtNewCastNode(TYP_DOUBLE, assg, TYP_DOUBLE);
13378             }
13379             else if (lclTyp == TYP_FLOAT && assg->TypeGet() == TYP_DOUBLE)
13380             {
13381                 assg = gtNewCastNode(TYP_FLOAT, assg, TYP_FLOAT);
13382             }
13383
13384             args       = gtNewArgList(assg);
13385             helperType = TYP_VOID;
13386         }
13387         else if (access & CORINFO_ACCESS_GET)
13388         {
13389             helperType = lclTyp;
13390
13391             // The calling convention for the helper does not take into
13392             // account optimization of primitive structs.
13393             if ((pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT) && !varTypeIsStruct(lclTyp))
13394             {
13395                 helperType = TYP_STRUCT;
13396             }
13397         }
13398     }
13399
13400     if (pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT || pFieldInfo->helper == CORINFO_HELP_SETFIELDSTRUCT)
13401     {
13402         assert(pFieldInfo->structType != nullptr);
13403         args = gtNewListNode(gtNewIconEmbClsHndNode(pFieldInfo->structType), args);
13404     }
13405
13406     GenTreePtr fieldHnd = impTokenToHandle(pResolvedToken);
13407     if (fieldHnd == nullptr)
13408     { // compDonotInline()
13409         return nullptr;
13410     }
13411
13412     args = gtNewListNode(fieldHnd, args);
13413
13414     // If it's a static field, we shouldn't have an object node
13415     // If it's an instance field, we have an object node
13416     assert((pFieldInfo->fieldAccessor != CORINFO_FIELD_STATIC_ADDR_HELPER) ^ (objPtr == nullptr));
13417
13418     if (objPtr != nullptr)
13419     {
13420         args = gtNewListNode(objPtr, args);
13421     }
13422
13423     GenTreePtr tree = gtNewHelperCallNode(pFieldInfo->helper, genActualType(helperType), 0, args);
13424
13425     if (pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER)
13426     {
13427         if (access & CORINFO_ACCESS_GET)
13428         {
13429             if (pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT)
13430             {
13431                 if (!varTypeIsStruct(lclTyp))
13432                 {
13433                     // get the result as primitive type
13434                     tree = impGetStructAddr(tree, structType, (unsigned)CHECK_SPILL_ALL, true);
13435                     tree = gtNewOperNode(GT_IND, lclTyp, tree);
13436                 }
13437             }
13438             else if (varTypeIsIntegral(lclTyp) && genTypeSize(lclTyp) < genTypeSize(TYP_INT))
13439             {
13440                 // The helper does not extend the small return types.
13441                 tree = gtNewCastNode(genActualType(lclTyp), tree, lclTyp);
13442             }
13443         }
13444     }
13445     else
13446     {
13447         // OK, now do the indirection
13448         if (access & CORINFO_ACCESS_GET)
13449         {
13450             if (varTypeIsStruct(lclTyp))
13451             {
13452                 tree = gtNewObjNode(structType, tree);
13453             }
13454             else
13455             {
13456                 tree = gtNewOperNode(GT_IND, lclTyp, tree);
13457             }
13458             tree->gtFlags |= (GTF_EXCEPT | GTF_GLOB_REF);
13459         }
13460         else if (access & CORINFO_ACCESS_SET)
13461         {
13462             if (varTypeIsStruct(lclTyp))
13463             {
13464                 tree = impAssignStructPtr(tree, assg, structType, (unsigned)CHECK_SPILL_ALL);
13465             }
13466             else
13467             {
13468                 tree = gtNewOperNode(GT_IND, lclTyp, tree);
13469                 tree->gtFlags |= (GTF_EXCEPT | GTF_GLOB_REF | GTF_IND_TGTANYWHERE);
13470                 tree = gtNewAssignNode(tree, assg);
13471             }
13472         }
13473     }
13474
13475     return (tree);
13476 }
13477
13478 /*****************************************************************************
13479  *
13480  *  Return true if the given node (excluding children trees) contains side effects.
13481  *  Note that it does not recurse, and children need to be handled separately.
13482  *  It may return false even if the node has GTF_SIDE_EFFECT (because of its children).
13483  *
13484  *  Similar to OperMayThrow() (but handles GT_CALLs specially), but considers
13485  *  assignments too.
13486  */
13487
13488 bool Compiler::gtNodeHasSideEffects(GenTreePtr tree, unsigned flags)
13489 {
13490     if (flags & GTF_ASG)
13491     {
13492         if ((tree->OperKind() & GTK_ASGOP) ||
13493             (tree->gtOper == GT_INITBLK || tree->gtOper == GT_COPYBLK || tree->gtOper == GT_COPYOBJ))
13494         {
13495             return true;
13496         }
13497     }
13498
13499     // Are there only GTF_CALL side effects remaining? (and no other side effect kinds)
13500     if (flags & GTF_CALL)
13501     {
13502         if (tree->OperGet() == GT_CALL)
13503         {
13504             // Generally all GT_CALL nodes are considered to have side-effects.
13505             // But we may have a helper call that doesn't have any important side effects.
13506             //
13507             if (tree->gtCall.gtCallType == CT_HELPER)
13508             {
13509                 // But if this tree is a helper call we may not care about the side-effects
13510                 //
13511                 CorInfoHelpFunc helper = eeGetHelperNum(tree->AsCall()->gtCallMethHnd);
13512
13513                 // We definitely care about the side effects if MutatesHeap is true
13514                 //
13515                 if (s_helperCallProperties.MutatesHeap(helper))
13516                 {
13517                     return true;
13518                 }
13519
13520                 // with GTF_PERSISTENT_SIDE_EFFECTS_IN_CSE we will CSE helper calls that can run cctors.
13521                 //
13522                 if ((flags != GTF_PERSISTENT_SIDE_EFFECTS_IN_CSE) && (s_helperCallProperties.MayRunCctor(helper)))
13523                 {
13524                     return true;
13525                 }
13526
13527                 // If we also care about exceptions then check if the helper can throw
13528                 //
13529                 if (((flags & GTF_EXCEPT) != 0) && !s_helperCallProperties.NoThrow(helper))
13530                 {
13531                     return true;
13532                 }
13533
13534                 // If this is a Pure helper call or an allocator (that will not need to run a finalizer)
13535                 // then we don't need to preserve the side effects (of this call -- we may care about those of the
13536                 // arguments).
13537                 if (s_helperCallProperties.IsPure(helper) ||
13538                     (s_helperCallProperties.IsAllocator(helper) && !s_helperCallProperties.MayFinalize(helper)))
13539                 {
13540                     GenTreeCall* call = tree->AsCall();
13541                     for (GenTreeArgList* args = call->gtCallArgs; args != nullptr; args = args->Rest())
13542                     {
13543                         if (gtTreeHasSideEffects(args->Current(), flags))
13544                         {
13545                             return true;
13546                         }
13547                     }
13548                     // I'm a little worried that args that assign to temps that are late args will look like
13549                     // side effects...but better to be conservative for now.
13550                     for (GenTreeArgList* args = call->gtCallLateArgs; args != nullptr; args = args->Rest())
13551                     {
13552                         if (gtTreeHasSideEffects(args->Current(), flags))
13553                         {
13554                             return true;
13555                         }
13556                     }
13557                     // Otherwise:
13558                     return false;
13559                 }
13560             }
13561
13562             // Otherwise the GT_CALL is considered to have side-effects.
13563             return true;
13564         }
13565     }
13566
13567     if (flags & GTF_EXCEPT)
13568     {
13569         if (tree->OperMayThrow())
13570         {
13571             return true;
13572         }
13573     }
13574
13575     // Expressions declared as CSE by (e.g.) hoisting code are considered to have relevant side
13576     // effects (if we care about GTF_MAKE_CSE).
13577     if ((flags & GTF_MAKE_CSE) && (tree->gtFlags & GTF_MAKE_CSE))
13578     {
13579         return true;
13580     }
13581
13582     return false;
13583 }
13584
13585 /*****************************************************************************
13586  * Returns true if the expr tree has any side effects.
13587  */
13588
13589 bool Compiler::gtTreeHasSideEffects(GenTreePtr tree, unsigned flags /* = GTF_SIDE_EFFECT*/)
13590 {
13591     // These are the side effect flags that we care about for this tree
13592     unsigned sideEffectFlags = tree->gtFlags & flags;
13593
13594     // Does this tree have any Side-effect flags set that we care about?
13595     if (sideEffectFlags == 0)
13596     {
13597         // no it doesn't..
13598         return false;
13599     }
13600
13601     if (sideEffectFlags == GTF_CALL)
13602     {
13603         if (tree->OperGet() == GT_CALL)
13604         {
13605             // Generally all trees that contain GT_CALL nodes are considered to have side-effects.
13606             //
13607             if (tree->gtCall.gtCallType == CT_HELPER)
13608             {
13609                 // If this node is a helper call we may not care about the side-effects.
13610                 // Note that gtNodeHasSideEffects checks the side effects of the helper itself
13611                 // as well as the side effects of its arguments.
13612                 return gtNodeHasSideEffects(tree, flags);
13613             }
13614         }
13615         else if (tree->OperGet() == GT_INTRINSIC)
13616         {
13617             if (gtNodeHasSideEffects(tree, flags))
13618             {
13619                 return true;
13620             }
13621
13622             if (gtNodeHasSideEffects(tree->gtOp.gtOp1, flags))
13623             {
13624                 return true;
13625             }
13626
13627             if ((tree->gtOp.gtOp2 != nullptr) && gtNodeHasSideEffects(tree->gtOp.gtOp2, flags))
13628             {
13629                 return true;
13630             }
13631
13632             return false;
13633         }
13634     }
13635
13636     return true;
13637 }
13638
13639 GenTreePtr Compiler::gtBuildCommaList(GenTreePtr list, GenTreePtr expr)
13640 {
13641     // 'list' starts off as null,
13642     //        and when it is null we haven't started the list yet.
13643     //
13644     if (list != nullptr)
13645     {
13646         // Create a GT_COMMA that appends 'expr' in front of the remaining set of expressions in (*list)
13647         GenTreePtr result = gtNewOperNode(GT_COMMA, TYP_VOID, expr, list);
13648
13649         // Set the flags in the comma node
13650         result->gtFlags |= (list->gtFlags & GTF_ALL_EFFECT);
13651         result->gtFlags |= (expr->gtFlags & GTF_ALL_EFFECT);
13652
13653         // 'list' and 'expr' should have valuenumbers defined for both or for neither one
13654         noway_assert(list->gtVNPair.BothDefined() == expr->gtVNPair.BothDefined());
13655
13656         // Set the ValueNumber 'gtVNPair' for the new GT_COMMA node
13657         //
13658         if (expr->gtVNPair.BothDefined())
13659         {
13660             // The result of a GT_COMMA node is op2, the normal value number is op2vnp
13661             // But we also need to include the union of side effects from op1 and op2.
13662             // we compute this value into exceptions_vnp.
13663             ValueNumPair op1vnp;
13664             ValueNumPair op1Xvnp = ValueNumStore::VNPForEmptyExcSet();
13665             ValueNumPair op2vnp;
13666             ValueNumPair op2Xvnp = ValueNumStore::VNPForEmptyExcSet();
13667
13668             vnStore->VNPUnpackExc(expr->gtVNPair, &op1vnp, &op1Xvnp);
13669             vnStore->VNPUnpackExc(list->gtVNPair, &op2vnp, &op2Xvnp);
13670
13671             ValueNumPair exceptions_vnp = ValueNumStore::VNPForEmptyExcSet();
13672
13673             exceptions_vnp = vnStore->VNPExcSetUnion(exceptions_vnp, op1Xvnp);
13674             exceptions_vnp = vnStore->VNPExcSetUnion(exceptions_vnp, op2Xvnp);
13675
13676             result->gtVNPair = vnStore->VNPWithExc(op2vnp, exceptions_vnp);
13677         }
13678
13679         return result;
13680     }
13681     else
13682     {
13683         // The 'expr' will start the list of expressions
13684         return expr;
13685     }
13686 }
13687
13688 /*****************************************************************************
13689  *
13690  *  Extracts side effects from the given expression
13691  *  and appends them to a given list (actually a GT_COMMA list)
13692  *  If ignore root is specified, the method doesn't treat the top
13693  *  level tree node as having side-effect.
13694  */
13695
13696 void Compiler::gtExtractSideEffList(GenTreePtr  expr,
13697                                     GenTreePtr* pList,
13698                                     unsigned    flags /* = GTF_SIDE_EFFECT*/,
13699                                     bool        ignoreRoot /* = false */)
13700 {
13701     assert(expr);
13702     assert(expr->gtOper != GT_STMT);
13703
13704     /* If no side effect in the expression return */
13705
13706     if (!gtTreeHasSideEffects(expr, flags))
13707     {
13708         return;
13709     }
13710
13711     genTreeOps oper = expr->OperGet();
13712     unsigned   kind = expr->OperKind();
13713
13714     // Look for any side effects that we care about
13715     //
13716     if (!ignoreRoot && gtNodeHasSideEffects(expr, flags))
13717     {
13718         // Add the side effect to the list and return
13719         //
13720         *pList = gtBuildCommaList(*pList, expr);
13721         return;
13722     }
13723
13724     if (kind & GTK_LEAF)
13725     {
13726         return;
13727     }
13728
13729     if (oper == GT_LOCKADD || oper == GT_XADD || oper == GT_XCHG || oper == GT_CMPXCHG)
13730     {
13731         // XADD both adds to the memory location and also fetches the old value.  If we only need the side
13732         // effect of this instruction, change it into a GT_LOCKADD node (the add only)
13733         if (oper == GT_XADD)
13734         {
13735             expr->gtOper = GT_LOCKADD;
13736             expr->gtType = TYP_VOID;
13737         }
13738
13739         // These operations are kind of important to keep
13740         *pList = gtBuildCommaList(*pList, expr);
13741         return;
13742     }
13743
13744     if (kind & GTK_SMPOP)
13745     {
13746         GenTreePtr op1 = expr->gtOp.gtOp1;
13747         GenTreePtr op2 = expr->gtGetOp2();
13748
13749         if (flags & GTF_EXCEPT)
13750         {
13751             // Special case - GT_ADDR of GT_IND nodes of TYP_STRUCT
13752             // have to be kept together
13753
13754             if (oper == GT_ADDR && op1->OperIsIndir() && op1->gtType == TYP_STRUCT)
13755             {
13756                 *pList = gtBuildCommaList(*pList, expr);
13757
13758 #ifdef DEBUG
13759                 if (verbose)
13760                 {
13761                     printf("Keep the GT_ADDR and GT_IND together:\n");
13762                 }
13763 #endif
13764                 return;
13765             }
13766         }
13767
13768         /* Continue searching for side effects in the subtrees of the expression
13769          * NOTE: Be careful to preserve the right ordering - side effects are prepended
13770          * to the list */
13771
13772         /* Continue searching for side effects in the subtrees of the expression
13773          * NOTE: Be careful to preserve the right ordering
13774          * as side effects are prepended to the list */
13775
13776         if (expr->gtFlags & GTF_REVERSE_OPS)
13777         {
13778             assert(oper != GT_COMMA);
13779             if (op1)
13780             {
13781                 gtExtractSideEffList(op1, pList, flags);
13782             }
13783             if (op2)
13784             {
13785                 gtExtractSideEffList(op2, pList, flags);
13786             }
13787         }
13788         else
13789         {
13790             if (op2)
13791             {
13792                 gtExtractSideEffList(op2, pList, flags);
13793             }
13794             if (op1)
13795             {
13796                 gtExtractSideEffList(op1, pList, flags);
13797             }
13798         }
13799     }
13800
13801     if (expr->OperGet() == GT_CALL)
13802     {
13803         // Generally all GT_CALL nodes are considered to have side-effects.
13804         // So if we get here it must be a Helper call that we decided does
13805         // not have side effects that we needed to keep
13806         //
13807         assert(expr->gtCall.gtCallType == CT_HELPER);
13808
13809         // We can remove this Helper call, but there still could be
13810         // side-effects in the arguments that we may need to keep
13811         //
13812         GenTreePtr args;
13813         for (args = expr->gtCall.gtCallArgs; args; args = args->gtOp.gtOp2)
13814         {
13815             assert(args->IsList());
13816             gtExtractSideEffList(args->Current(), pList, flags);
13817         }
13818         for (args = expr->gtCall.gtCallLateArgs; args; args = args->gtOp.gtOp2)
13819         {
13820             assert(args->IsList());
13821             gtExtractSideEffList(args->Current(), pList, flags);
13822         }
13823     }
13824
13825     if (expr->OperGet() == GT_ARR_BOUNDS_CHECK
13826 #ifdef FEATURE_SIMD
13827         || expr->OperGet() == GT_SIMD_CHK
13828 #endif // FEATURE_SIMD
13829         )
13830     {
13831         gtExtractSideEffList(expr->AsBoundsChk()->gtArrLen, pList, flags);
13832         gtExtractSideEffList(expr->AsBoundsChk()->gtIndex, pList, flags);
13833     }
13834 }
13835
13836 /*****************************************************************************
13837  *
13838  *  For debugging only - displays a tree node list and makes sure all the
13839  *  links are correctly set.
13840  */
13841
13842 #ifdef DEBUG
13843
13844 void dispNodeList(GenTreePtr list, bool verbose)
13845 {
13846     GenTreePtr last = nullptr;
13847     GenTreePtr next;
13848
13849     if (!list)
13850     {
13851         return;
13852     }
13853
13854     for (;;)
13855     {
13856         next = list->gtNext;
13857
13858         if (verbose)
13859         {
13860             printf("%08X -> %08X -> %08X\n", last, list, next);
13861         }
13862
13863         assert(!last || last->gtNext == list);
13864
13865         assert(next == nullptr || next->gtPrev == list);
13866
13867         if (!next)
13868         {
13869             break;
13870         }
13871
13872         last = list;
13873         list = next;
13874     }
13875     printf(""); // null string means flush
13876 }
13877
13878 /*****************************************************************************
13879  * Callback to assert that the nodes of a qmark-colon subtree are marked
13880  */
13881
13882 /* static */
13883 Compiler::fgWalkResult Compiler::gtAssertColonCond(GenTreePtr* pTree, fgWalkData* data)
13884 {
13885     assert(data->pCallbackData == nullptr);
13886
13887     assert((*pTree)->gtFlags & GTF_COLON_COND);
13888
13889     return WALK_CONTINUE;
13890 }
13891 #endif // DEBUG
13892
13893 /*****************************************************************************
13894  * Callback to mark the nodes of a qmark-colon subtree that are conditionally
13895  * executed.
13896  */
13897
13898 /* static */
13899 Compiler::fgWalkResult Compiler::gtMarkColonCond(GenTreePtr* pTree, fgWalkData* data)
13900 {
13901     assert(data->pCallbackData == nullptr);
13902
13903     (*pTree)->gtFlags |= GTF_COLON_COND;
13904
13905     return WALK_CONTINUE;
13906 }
13907
13908 /*****************************************************************************
13909  * Callback to clear the conditionally executed flags of nodes that no longer
13910    will be conditionally executed. Note that when we find another colon we must
13911    stop, as the nodes below this one WILL be conditionally executed. This callback
13912    is called when folding a qmark condition (ie the condition is constant).
13913  */
13914
13915 /* static */
13916 Compiler::fgWalkResult Compiler::gtClearColonCond(GenTreePtr* pTree, fgWalkData* data)
13917 {
13918     GenTreePtr tree = *pTree;
13919
13920     assert(data->pCallbackData == nullptr);
13921
13922     if (tree->OperGet() == GT_COLON)
13923     {
13924         // Nodes below this will be conditionally executed.
13925         return WALK_SKIP_SUBTREES;
13926     }
13927
13928     tree->gtFlags &= ~GTF_COLON_COND;
13929     return WALK_CONTINUE;
13930 }
13931
13932 struct FindLinkData
13933 {
13934     GenTreePtr  nodeToFind;
13935     GenTreePtr* result;
13936 };
13937
13938 /*****************************************************************************
13939  *
13940  *  Callback used by the tree walker to implement fgFindLink()
13941  */
13942 static Compiler::fgWalkResult gtFindLinkCB(GenTreePtr* pTree, Compiler::fgWalkData* cbData)
13943 {
13944     FindLinkData* data = (FindLinkData*)cbData->pCallbackData;
13945     if (*pTree == data->nodeToFind)
13946     {
13947         data->result = pTree;
13948         return Compiler::WALK_ABORT;
13949     }
13950
13951     return Compiler::WALK_CONTINUE;
13952 }
13953
13954 GenTreePtr* Compiler::gtFindLink(GenTreePtr stmt, GenTreePtr node)
13955 {
13956     assert(stmt->gtOper == GT_STMT);
13957
13958     FindLinkData data = {node, nullptr};
13959
13960     fgWalkResult result = fgWalkTreePre(&stmt->gtStmt.gtStmtExpr, gtFindLinkCB, &data);
13961
13962     if (result == WALK_ABORT)
13963     {
13964         assert(data.nodeToFind == *data.result);
13965         return data.result;
13966     }
13967     else
13968     {
13969         return nullptr;
13970     }
13971 }
13972
13973 /*****************************************************************************
13974  *
13975  *  Callback that checks if a tree node has oper type GT_CATCH_ARG
13976  */
13977
13978 static Compiler::fgWalkResult gtFindCatchArg(GenTreePtr* pTree, Compiler::fgWalkData* /* data */)
13979 {
13980     return ((*pTree)->OperGet() == GT_CATCH_ARG) ? Compiler::WALK_ABORT : Compiler::WALK_CONTINUE;
13981 }
13982
13983 /*****************************************************************************/
13984 bool Compiler::gtHasCatchArg(GenTreePtr tree)
13985 {
13986     if (((tree->gtFlags & GTF_ORDER_SIDEEFF) != 0) && (fgWalkTreePre(&tree, gtFindCatchArg) == WALK_ABORT))
13987     {
13988         return true;
13989     }
13990     return false;
13991 }
13992
13993 //------------------------------------------------------------------------
13994 // gtHasCallOnStack:
13995 //
13996 // Arguments:
13997 //    parentStack: a context (stack of parent nodes)
13998 //
13999 // Return Value:
14000 //     returns true if any of the parent nodes are a GT_CALL
14001 //
14002 // Assumptions:
14003 //    We have a stack of parent nodes. This generally requires that
14004 //    we are performing a recursive tree walk using struct fgWalkData
14005 //
14006 //------------------------------------------------------------------------
14007 /* static */ bool Compiler::gtHasCallOnStack(GenTreeStack* parentStack)
14008 {
14009     for (int i = 0; i < parentStack->Height(); i++)
14010     {
14011         GenTree* node = parentStack->Index(i);
14012         if (node->OperGet() == GT_CALL)
14013         {
14014             return true;
14015         }
14016     }
14017     return false;
14018 }
14019
14020 //------------------------------------------------------------------------
14021 // gtCheckQuirkAddrExposedLclVar:
14022 //
14023 // Arguments:
14024 //    tree: an address taken GenTree node that is a GT_LCL_VAR
14025 //    parentStack: a context (stack of parent nodes)
14026 //    The 'parentStack' is used to ensure that we are in an argument context.
14027 //
14028 // Return Value:
14029 //    None
14030 //
14031 // Notes:
14032 //    When allocation size of this LclVar is 32-bits we will quirk the size to 64-bits
14033 //    because some PInvoke signatures incorrectly specify a ByRef to an INT32
14034 //    when they actually write a SIZE_T or INT64. There are cases where overwriting
14035 //    these extra 4 bytes corrupts some data (such as a saved register) that leads to A/V
14036 //    Wheras previously the JIT64 codegen did not lead to an A/V
14037 //
14038 // Assumptions:
14039 //    'tree' is known to be address taken and that we have a stack
14040 //    of parent nodes. Both of these generally requires that
14041 //    we are performing a recursive tree walk using struct fgWalkData
14042 //------------------------------------------------------------------------
14043 void Compiler::gtCheckQuirkAddrExposedLclVar(GenTreePtr tree, GenTreeStack* parentStack)
14044 {
14045 #ifdef _TARGET_64BIT_
14046     // We only need to Quirk for _TARGET_64BIT_
14047
14048     // Do we have a parent node that is a Call?
14049     if (!Compiler::gtHasCallOnStack(parentStack))
14050     {
14051         // No, so we don't apply the Quirk
14052         return;
14053     }
14054     noway_assert(tree->gtOper == GT_LCL_VAR);
14055     unsigned   lclNum  = tree->gtLclVarCommon.gtLclNum;
14056     LclVarDsc* varDsc  = &lvaTable[lclNum];
14057     var_types  vartype = varDsc->TypeGet();
14058
14059     if (varDsc->lvIsParam)
14060     {
14061         // We can't Quirk the size of an incoming parameter
14062         return;
14063     }
14064
14065     // We may need to Quirk the storage size for this LCL_VAR
14066     if (genActualType(vartype) == TYP_INT)
14067     {
14068         varDsc->lvQuirkToLong = true;
14069 #ifdef DEBUG
14070         if (verbose)
14071         {
14072             printf("\nAdding a Quirk for the storage size of LvlVar V%02d:", lclNum);
14073             printf(" (%s ==> %s)\n", varTypeName(vartype), varTypeName(TYP_LONG));
14074         }
14075 #endif // DEBUG
14076     }
14077 #endif
14078 }
14079
14080 // Checks to see if we're allowed to optimize Type::op_Equality or Type::op_Inequality on this operand.
14081 // We're allowed to convert to GT_EQ/GT_NE if one of the operands is:
14082 //  1) The result of Object::GetType
14083 //  2) The result of typeof(...)
14084 //  3) a local variable of type RuntimeType.
14085 bool Compiler::gtCanOptimizeTypeEquality(GenTreePtr tree)
14086 {
14087     if (tree->gtOper == GT_CALL)
14088     {
14089         if (tree->gtCall.gtCallType == CT_HELPER)
14090         {
14091             if (gtIsTypeHandleToRuntimeTypeHelper(tree))
14092             {
14093                 return true;
14094             }
14095         }
14096         else if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_SPECIAL_INTRINSIC)
14097         {
14098             if (info.compCompHnd->getIntrinsicID(tree->gtCall.gtCallMethHnd) == CORINFO_INTRINSIC_Object_GetType)
14099             {
14100                 return true;
14101             }
14102         }
14103     }
14104     else if ((tree->gtOper == GT_INTRINSIC) && (tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Object_GetType))
14105     {
14106         return true;
14107     }
14108     else if (tree->gtOper == GT_LCL_VAR)
14109     {
14110         LclVarDsc* lcl = &(lvaTable[tree->gtLclVarCommon.gtLclNum]);
14111         if (lcl->TypeGet() == TYP_REF)
14112         {
14113             if (lcl->lvVerTypeInfo.GetClassHandle() == info.compCompHnd->getBuiltinClass(CLASSID_RUNTIME_TYPE))
14114             {
14115                 return true;
14116             }
14117         }
14118     }
14119     return false;
14120 }
14121
14122 bool Compiler::gtIsTypeHandleToRuntimeTypeHelper(GenTreePtr tree)
14123 {
14124     return tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE) ||
14125            tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE_MAYBENULL);
14126 }
14127
14128 bool Compiler::gtIsActiveCSE_Candidate(GenTreePtr tree)
14129 {
14130     return (optValnumCSE_phase && IS_CSE_INDEX(tree->gtCSEnum));
14131 }
14132
14133 /*****************************************************************************/
14134
14135 struct ComplexityStruct
14136 {
14137     unsigned m_numNodes;
14138     unsigned m_nodeLimit;
14139     ComplexityStruct(unsigned nodeLimit) : m_numNodes(0), m_nodeLimit(nodeLimit)
14140     {
14141     }
14142 };
14143
14144 static Compiler::fgWalkResult ComplexityExceedsWalker(GenTreePtr* pTree, Compiler::fgWalkData* data)
14145 {
14146     ComplexityStruct* pComplexity = (ComplexityStruct*)data->pCallbackData;
14147     if (++pComplexity->m_numNodes > pComplexity->m_nodeLimit)
14148     {
14149         return Compiler::WALK_ABORT;
14150     }
14151     else
14152     {
14153         return Compiler::WALK_CONTINUE;
14154     }
14155 }
14156
14157 bool Compiler::gtComplexityExceeds(GenTreePtr* tree, unsigned limit)
14158 {
14159     ComplexityStruct complexity(limit);
14160     if (fgWalkTreePre(tree, &ComplexityExceedsWalker, &complexity) == WALK_ABORT)
14161     {
14162         return true;
14163     }
14164     else
14165     {
14166         return false;
14167     }
14168 }
14169
14170 /*
14171 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
14172 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
14173 XX                                                                           XX
14174 XX                          BasicBlock                                       XX
14175 XX                                                                           XX
14176 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
14177 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
14178 */
14179
14180 #if MEASURE_BLOCK_SIZE
14181 /* static  */
14182 size_t BasicBlock::s_Size;
14183 /* static */
14184 size_t BasicBlock::s_Count;
14185 #endif // MEASURE_BLOCK_SIZE
14186
14187 #ifdef DEBUG
14188 // The max # of tree nodes in any BB
14189 /* static */
14190 unsigned BasicBlock::s_nMaxTrees;
14191 #endif // DEBUG
14192
14193 /*****************************************************************************
14194  *
14195  *  Allocate a basic block but don't append it to the current BB list.
14196  */
14197
14198 BasicBlock* Compiler::bbNewBasicBlock(BBjumpKinds jumpKind)
14199 {
14200     BasicBlock* block;
14201
14202     /* Allocate the block descriptor and zero it out */
14203     assert(fgSafeBasicBlockCreation);
14204
14205     block = new (this, CMK_BasicBlock) BasicBlock;
14206
14207 #if MEASURE_BLOCK_SIZE
14208     BasicBlock::s_Count += 1;
14209     BasicBlock::s_Size += sizeof(*block);
14210 #endif
14211
14212 #ifdef DEBUG
14213     // fgLookupBB() is invalid until fgInitBBLookup() is called again.
14214     fgBBs = (BasicBlock**)0xCDCD;
14215 #endif
14216
14217     // TODO-Throughput: The following memset is pretty expensive - do something else?
14218     // Note that some fields have to be initialized to 0 (like bbFPStateX87)
14219     memset(block, 0, sizeof(*block));
14220
14221     // scopeInfo needs to be able to differentiate between blocks which
14222     // correspond to some instrs (and so may have some LocalVarInfo
14223     // boundaries), or have been inserted by the JIT
14224     block->bbCodeOffs    = BAD_IL_OFFSET;
14225     block->bbCodeOffsEnd = BAD_IL_OFFSET;
14226
14227     /* Give the block a number, set the ancestor count and weight */
14228
14229     ++fgBBcount;
14230
14231     if (compIsForInlining())
14232     {
14233         block->bbNum = ++impInlineInfo->InlinerCompiler->fgBBNumMax;
14234     }
14235     else
14236     {
14237         block->bbNum = ++fgBBNumMax;
14238     }
14239
14240 #ifndef LEGACY_BACKEND
14241     if (compRationalIRForm)
14242     {
14243         block->bbFlags |= BBF_IS_LIR;
14244     }
14245 #endif // !LEGACY_BACKEND
14246
14247     block->bbRefs   = 1;
14248     block->bbWeight = BB_UNITY_WEIGHT;
14249
14250     block->bbStkTempsIn  = NO_BASE_TMP;
14251     block->bbStkTempsOut = NO_BASE_TMP;
14252
14253     block->bbEntryState = nullptr;
14254
14255     /* Record the jump kind in the block */
14256
14257     block->bbJumpKind = jumpKind;
14258
14259     if (jumpKind == BBJ_THROW)
14260     {
14261         block->bbSetRunRarely();
14262     }
14263
14264 #ifdef DEBUG
14265     if (verbose)
14266     {
14267         printf("New Basic Block BB%02u [%p] created.\n", block->bbNum, dspPtr(block));
14268     }
14269 #endif
14270
14271     // We will give all the blocks var sets after the number of tracked variables
14272     // is determined and frozen.  After that, if we dynamically create a basic block,
14273     // we will initialize its var sets.
14274     if (fgBBVarSetsInited)
14275     {
14276         VarSetOps::AssignNoCopy(this, block->bbVarUse, VarSetOps::MakeEmpty(this));
14277         VarSetOps::AssignNoCopy(this, block->bbVarDef, VarSetOps::MakeEmpty(this));
14278         VarSetOps::AssignNoCopy(this, block->bbVarTmp, VarSetOps::MakeEmpty(this));
14279         VarSetOps::AssignNoCopy(this, block->bbLiveIn, VarSetOps::MakeEmpty(this));
14280         VarSetOps::AssignNoCopy(this, block->bbLiveOut, VarSetOps::MakeEmpty(this));
14281         VarSetOps::AssignNoCopy(this, block->bbScope, VarSetOps::MakeEmpty(this));
14282     }
14283     else
14284     {
14285         VarSetOps::AssignNoCopy(this, block->bbVarUse, VarSetOps::UninitVal());
14286         VarSetOps::AssignNoCopy(this, block->bbVarDef, VarSetOps::UninitVal());
14287         VarSetOps::AssignNoCopy(this, block->bbVarTmp, VarSetOps::UninitVal());
14288         VarSetOps::AssignNoCopy(this, block->bbLiveIn, VarSetOps::UninitVal());
14289         VarSetOps::AssignNoCopy(this, block->bbLiveOut, VarSetOps::UninitVal());
14290         VarSetOps::AssignNoCopy(this, block->bbScope, VarSetOps::UninitVal());
14291     }
14292
14293     block->bbHeapUse     = false;
14294     block->bbHeapDef     = false;
14295     block->bbHeapLiveIn  = false;
14296     block->bbHeapLiveOut = false;
14297
14298     block->bbHeapSsaPhiFunc = nullptr;
14299     block->bbHeapSsaNumIn   = 0;
14300     block->bbHeapSsaNumOut  = 0;
14301
14302     // Make sure we reserve a NOT_IN_LOOP value that isn't a legal table index.
14303     static_assert_no_msg(MAX_LOOP_NUM < BasicBlock::NOT_IN_LOOP);
14304
14305     block->bbNatLoopNum = BasicBlock::NOT_IN_LOOP;
14306
14307     return block;
14308 }
14309
14310 //------------------------------------------------------------------------------
14311 // containsStatement - return true if the block contains the given statement
14312 //------------------------------------------------------------------------------
14313
14314 bool BasicBlock::containsStatement(GenTree* statement)
14315 {
14316     assert(statement->gtOper == GT_STMT);
14317
14318     GenTree* curr = bbTreeList;
14319     do
14320     {
14321         if (curr == statement)
14322         {
14323             break;
14324         }
14325         curr = curr->gtNext;
14326     } while (curr);
14327     return curr != nullptr;
14328 }
14329
14330 GenTreeStmt* BasicBlock::FirstNonPhiDef()
14331 {
14332     GenTreePtr stmt = bbTreeList;
14333     if (stmt == nullptr)
14334     {
14335         return nullptr;
14336     }
14337     GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
14338     while ((tree->OperGet() == GT_ASG && tree->gtOp.gtOp2->OperGet() == GT_PHI) ||
14339            (tree->OperGet() == GT_STORE_LCL_VAR && tree->gtOp.gtOp1->OperGet() == GT_PHI))
14340     {
14341         stmt = stmt->gtNext;
14342         if (stmt == nullptr)
14343         {
14344             return nullptr;
14345         }
14346         tree = stmt->gtStmt.gtStmtExpr;
14347     }
14348     return stmt->AsStmt();
14349 }
14350
14351 GenTreePtr BasicBlock::FirstNonPhiDefOrCatchArgAsg()
14352 {
14353     GenTreePtr stmt = FirstNonPhiDef();
14354     if (stmt == nullptr)
14355     {
14356         return nullptr;
14357     }
14358     GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
14359     if ((tree->OperGet() == GT_ASG && tree->gtOp.gtOp2->OperGet() == GT_CATCH_ARG) ||
14360         (tree->OperGet() == GT_STORE_LCL_VAR && tree->gtOp.gtOp1->OperGet() == GT_CATCH_ARG))
14361     {
14362         stmt = stmt->gtNext;
14363     }
14364     return stmt;
14365 }
14366
14367 /*****************************************************************************
14368  *
14369  *  Mark a block as rarely run, we also don't want to have a loop in a
14370  *   rarely run block, and we set it's weight to zero.
14371  */
14372
14373 void BasicBlock::bbSetRunRarely()
14374 {
14375     setBBWeight(BB_ZERO_WEIGHT);
14376     if (bbWeight == BB_ZERO_WEIGHT)
14377     {
14378         bbFlags |= BBF_RUN_RARELY; // This block is never/rarely run
14379     }
14380 }
14381
14382 /*****************************************************************************
14383  *
14384  *  Can a BasicBlock be inserted after this without altering the flowgraph
14385  */
14386
14387 bool BasicBlock::bbFallsThrough()
14388 {
14389     switch (bbJumpKind)
14390     {
14391
14392         case BBJ_THROW:
14393         case BBJ_EHFINALLYRET:
14394         case BBJ_EHFILTERRET:
14395         case BBJ_EHCATCHRET:
14396         case BBJ_RETURN:
14397         case BBJ_ALWAYS:
14398         case BBJ_LEAVE:
14399         case BBJ_SWITCH:
14400             return false;
14401
14402         case BBJ_NONE:
14403         case BBJ_COND:
14404             return true;
14405
14406         case BBJ_CALLFINALLY:
14407             return ((bbFlags & BBF_RETLESS_CALL) == 0);
14408
14409         default:
14410             assert(!"Unknown bbJumpKind in bbFallsThrough()");
14411             return true;
14412     }
14413 }
14414
14415 unsigned BasicBlock::NumSucc(Compiler* comp)
14416 {
14417     // As described in the spec comment of NumSucc at its declaration, whether "comp" is null determines
14418     // whether NumSucc and GetSucc yield successors of finally blocks.
14419
14420     switch (bbJumpKind)
14421     {
14422
14423         case BBJ_THROW:
14424         case BBJ_RETURN:
14425             return 0;
14426
14427         case BBJ_EHFILTERRET:
14428             if (comp == nullptr)
14429             {
14430                 return 0;
14431             }
14432             else
14433             {
14434                 return 1;
14435             }
14436
14437         case BBJ_EHFINALLYRET:
14438         {
14439             if (comp == nullptr)
14440             {
14441                 return 0;
14442             }
14443             else
14444             {
14445                 // The first block of the handler is labelled with the catch type.
14446                 BasicBlock* hndBeg = comp->fgFirstBlockOfHandler(this);
14447                 if (hndBeg->bbCatchTyp == BBCT_FINALLY)
14448                 {
14449                     return comp->fgNSuccsOfFinallyRet(this);
14450                 }
14451                 else
14452                 {
14453                     assert(hndBeg->bbCatchTyp == BBCT_FAULT); // We can only BBJ_EHFINALLYRET from FINALLY and FAULT.
14454                     // A FAULT block has no successors.
14455                     return 0;
14456                 }
14457             }
14458         }
14459         case BBJ_CALLFINALLY:
14460         case BBJ_ALWAYS:
14461         case BBJ_EHCATCHRET:
14462         case BBJ_LEAVE:
14463         case BBJ_NONE:
14464             return 1;
14465         case BBJ_COND:
14466             if (bbJumpDest == bbNext)
14467             {
14468                 return 1;
14469             }
14470             else
14471             {
14472                 return 2;
14473             }
14474         case BBJ_SWITCH:
14475             if (comp == nullptr)
14476             {
14477                 return bbJumpSwt->bbsCount;
14478             }
14479             else
14480             {
14481                 Compiler::SwitchUniqueSuccSet sd = comp->GetDescriptorForSwitch(this);
14482                 return sd.numDistinctSuccs;
14483             }
14484
14485         default:
14486             unreached();
14487     }
14488 }
14489
14490 BasicBlock* BasicBlock::GetSucc(unsigned i, Compiler* comp)
14491 {
14492     // As described in the spec comment of GetSucc at its declaration, whether "comp" is null determines
14493     // whether NumSucc and GetSucc yield successors of finally blocks.
14494
14495     assert(i < NumSucc(comp)); // Index bounds check.
14496     // printf("bbjk=%d\n", bbJumpKind);
14497     switch (bbJumpKind)
14498     {
14499
14500         case BBJ_THROW:
14501         case BBJ_RETURN:
14502             unreached(); // Should have been covered by assert above.
14503
14504         case BBJ_EHFILTERRET:
14505         {
14506             assert(comp != nullptr); // Or else we're not looking for successors.
14507             BasicBlock* result = comp->fgFirstBlockOfHandler(this);
14508             noway_assert(result == bbJumpDest);
14509             // Handler is the (sole) normal successor of the filter.
14510             return result;
14511         }
14512
14513         case BBJ_EHFINALLYRET:
14514             return comp->fgSuccOfFinallyRet(this, i);
14515
14516         case BBJ_CALLFINALLY:
14517         case BBJ_ALWAYS:
14518         case BBJ_EHCATCHRET:
14519         case BBJ_LEAVE:
14520             return bbJumpDest;
14521
14522         case BBJ_NONE:
14523             return bbNext;
14524         case BBJ_COND:
14525             if (i == 0)
14526             {
14527                 return bbNext;
14528             }
14529             else
14530             {
14531                 assert(i == 1);
14532                 return bbJumpDest;
14533             };
14534         case BBJ_SWITCH:
14535             if (comp == nullptr)
14536             {
14537                 assert(i < bbJumpSwt->bbsCount); // Range check.
14538                 return bbJumpSwt->bbsDstTab[i];
14539             }
14540             else
14541             {
14542                 // Remove duplicates.
14543                 Compiler::SwitchUniqueSuccSet sd = comp->GetDescriptorForSwitch(this);
14544                 assert(i < sd.numDistinctSuccs); // Range check.
14545                 return sd.nonDuplicates[i];
14546             }
14547
14548         default:
14549             unreached();
14550     }
14551 }
14552
14553 // -------------------------------------------------------------------------
14554 // IsRegOptional: Returns true if this gentree node is marked by lowering to
14555 // indicate that codegen can still generate code even if it wasn't allocated
14556 // a register.
14557 bool GenTree::IsRegOptional() const
14558 {
14559 #ifdef LEGACY_BACKEND
14560     return false;
14561 #else
14562     return gtLsraInfo.regOptional;
14563 #endif
14564 }
14565
14566 bool GenTree::IsPhiNode()
14567 {
14568     return (OperGet() == GT_PHI_ARG) || (OperGet() == GT_PHI) || IsPhiDefn();
14569 }
14570
14571 bool GenTree::IsPhiDefn()
14572 {
14573     bool res = ((OperGet() == GT_ASG) && (gtOp.gtOp2 != nullptr) && (gtOp.gtOp2->OperGet() == GT_PHI)) ||
14574                ((OperGet() == GT_STORE_LCL_VAR) && (gtOp.gtOp1 != nullptr) && (gtOp.gtOp1->OperGet() == GT_PHI));
14575     assert(!res || OperGet() == GT_STORE_LCL_VAR || gtOp.gtOp1->OperGet() == GT_LCL_VAR);
14576     return res;
14577 }
14578
14579 bool GenTree::IsPhiDefnStmt()
14580 {
14581     if (OperGet() != GT_STMT)
14582     {
14583         return false;
14584     }
14585     GenTreePtr asg = gtStmt.gtStmtExpr;
14586     return asg->IsPhiDefn();
14587 }
14588
14589 // IsPartialLclFld: Check for a GT_LCL_FLD whose type is a different size than the lclVar.
14590 //
14591 // Arguments:
14592 //    comp      - the Compiler object.
14593 //
14594 // Return Value:
14595 //    Returns "true" iff 'this' is a GT_LCL_FLD or GT_STORE_LCL_FLD on which the type
14596 //    is not the same size as the type of the GT_LCL_VAR
14597
14598 bool GenTree::IsPartialLclFld(Compiler* comp)
14599 {
14600     return ((gtOper == GT_LCL_FLD) &&
14601             (comp->lvaTable[this->gtLclVarCommon.gtLclNum].lvExactSize != genTypeSize(gtType)));
14602 }
14603
14604 bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire)
14605 {
14606     if (OperIsAssignment())
14607     {
14608         if (gtOp.gtOp1->IsLocal())
14609         {
14610             GenTreeLclVarCommon* lclVarTree = gtOp.gtOp1->AsLclVarCommon();
14611             *pLclVarTree                    = lclVarTree;
14612             if (pIsEntire != nullptr)
14613             {
14614                 if (lclVarTree->IsPartialLclFld(comp))
14615                 {
14616                     *pIsEntire = false;
14617                 }
14618                 else
14619                 {
14620                     *pIsEntire = true;
14621                 }
14622             }
14623             return true;
14624         }
14625         else if (gtOp.gtOp1->OperGet() == GT_IND)
14626         {
14627             GenTreePtr indArg = gtOp.gtOp1->gtOp.gtOp1;
14628             return indArg->DefinesLocalAddr(comp, genTypeSize(gtOp.gtOp1->TypeGet()), pLclVarTree, pIsEntire);
14629         }
14630     }
14631     else if (OperIsBlkOp())
14632     {
14633         GenTreePtr destAddr = gtOp.gtOp1->gtOp.gtOp1;
14634         unsigned   width    = 0;
14635         // Do we care about whether this assigns the entire variable?
14636         if (pIsEntire != nullptr)
14637         {
14638             GenTreePtr blockWidth = gtOp.gtOp2;
14639             if (blockWidth->IsCnsIntOrI())
14640             {
14641                 if (blockWidth->IsIconHandle())
14642                 {
14643                     // If it's a handle, it must be a class handle.  We only create such block operations
14644                     // for initialization of struct types, so the type of the argument(s) will match this
14645                     // type, by construction, and be "entire".
14646                     assert(blockWidth->IsIconHandle(GTF_ICON_CLASS_HDL));
14647                     width = comp->info.compCompHnd->getClassSize(
14648                         CORINFO_CLASS_HANDLE(blockWidth->gtIntConCommon.IconValue()));
14649                 }
14650                 else
14651                 {
14652                     ssize_t swidth = blockWidth->AsIntConCommon()->IconValue();
14653                     assert(swidth >= 0);
14654                     // cpblk of size zero exists in the wild (in yacc-generated code in SQL) and is valid IL.
14655                     if (swidth == 0)
14656                     {
14657                         return false;
14658                     }
14659                     width = unsigned(swidth);
14660                 }
14661             }
14662         }
14663         return destAddr->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
14664     }
14665     // Otherwise...
14666     return false;
14667 }
14668
14669 // Returns true if this GenTree defines a result which is based on the address of a local.
14670 bool GenTree::DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire)
14671 {
14672     if (OperGet() == GT_ADDR || OperGet() == GT_LCL_VAR_ADDR)
14673     {
14674         GenTreePtr addrArg = this;
14675         if (OperGet() == GT_ADDR)
14676         {
14677             addrArg = gtOp.gtOp1;
14678         }
14679
14680         if (addrArg->IsLocal() || addrArg->OperIsLocalAddr())
14681         {
14682             GenTreeLclVarCommon* addrArgLcl = addrArg->AsLclVarCommon();
14683             *pLclVarTree                    = addrArgLcl;
14684             if (pIsEntire != nullptr)
14685             {
14686                 unsigned lclOffset = 0;
14687                 if (addrArg->OperIsLocalField())
14688                 {
14689                     lclOffset = addrArg->gtLclFld.gtLclOffs;
14690                 }
14691
14692                 if (lclOffset != 0)
14693                 {
14694                     // We aren't updating the bytes at [0..lclOffset-1] so *pIsEntire should be set to false
14695                     *pIsEntire = false;
14696                 }
14697                 else
14698                 {
14699                     unsigned lclNum   = addrArgLcl->GetLclNum();
14700                     unsigned varWidth = comp->lvaLclExactSize(lclNum);
14701                     if (comp->lvaTable[lclNum].lvNormalizeOnStore())
14702                     {
14703                         // It's normalize on store, so use the full storage width -- writing to low bytes won't
14704                         // necessarily yield a normalized value.
14705                         varWidth = genTypeStSz(var_types(comp->lvaTable[lclNum].lvType)) * sizeof(int);
14706                     }
14707                     *pIsEntire = (varWidth == width);
14708                 }
14709             }
14710             return true;
14711         }
14712         else if (addrArg->OperGet() == GT_IND)
14713         {
14714             // A GT_ADDR of a GT_IND can both be optimized away, recurse using the child of the GT_IND
14715             return addrArg->gtOp.gtOp1->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
14716         }
14717     }
14718     else if (OperGet() == GT_ADD)
14719     {
14720         if (gtOp.gtOp1->IsCnsIntOrI())
14721         {
14722             // If we just adding a zero then we allow an IsEntire match against width
14723             //  otherwise we change width to zero to disallow an IsEntire Match
14724             return gtOp.gtOp2->DefinesLocalAddr(comp, gtOp.gtOp1->IsIntegralConst(0) ? width : 0, pLclVarTree,
14725                                                 pIsEntire);
14726         }
14727         else if (gtOp.gtOp2->IsCnsIntOrI())
14728         {
14729             // If we just adding a zero then we allow an IsEntire match against width
14730             //  otherwise we change width to zero to disallow an IsEntire Match
14731             return gtOp.gtOp1->DefinesLocalAddr(comp, gtOp.gtOp2->IsIntegralConst(0) ? width : 0, pLclVarTree,
14732                                                 pIsEntire);
14733         }
14734     }
14735     // Post rationalization we could have GT_IND(GT_LEA(..)) trees.
14736     else if (OperGet() == GT_LEA)
14737     {
14738         // This method gets invoked during liveness computation and therefore it is critical
14739         // that we don't miss 'use' of any local.  The below logic is making the assumption
14740         // that in case of LEA(base, index, offset) - only base can be a GT_LCL_VAR_ADDR
14741         // and index is not.
14742         CLANG_FORMAT_COMMENT_ANCHOR;
14743
14744 #ifdef DEBUG
14745         GenTreePtr index = gtOp.gtOp2;
14746         if (index != nullptr)
14747         {
14748             assert(!index->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire));
14749         }
14750 #endif // DEBUG
14751
14752         // base
14753         GenTreePtr base = gtOp.gtOp1;
14754         if (base != nullptr)
14755         {
14756             // Lea could have an Indir as its base.
14757             if (base->OperGet() == GT_IND)
14758             {
14759                 base = base->gtOp.gtOp1->gtEffectiveVal(/*commas only*/ true);
14760             }
14761             return base->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
14762         }
14763     }
14764     // Otherwise...
14765     return false;
14766 }
14767
14768 //------------------------------------------------------------------------
14769 // IsLocalExpr: Determine if this is a LclVarCommon node and return some
14770 //              additional info about it in the two out parameters.
14771 //
14772 // Arguments:
14773 //    comp        - The Compiler instance
14774 //    pLclVarTree - An "out" argument that returns the local tree as a
14775 //                  LclVarCommon, if it is indeed local.
14776 //    pFldSeq     - An "out" argument that returns the value numbering field
14777 //                  sequence for the node, if any.
14778 //
14779 // Return Value:
14780 //    Returns true, and sets the out arguments accordingly, if this is
14781 //    a LclVarCommon node.
14782
14783 bool GenTree::IsLocalExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq)
14784 {
14785     if (IsLocal()) // Note that this covers "GT_LCL_FLD."
14786     {
14787         *pLclVarTree = AsLclVarCommon();
14788         if (OperGet() == GT_LCL_FLD)
14789         {
14790             // Otherwise, prepend this field to whatever we've already accumulated outside in.
14791             *pFldSeq = comp->GetFieldSeqStore()->Append(AsLclFld()->gtFieldSeq, *pFldSeq);
14792         }
14793         return true;
14794     }
14795     else
14796     {
14797         return false;
14798     }
14799 }
14800
14801 // If this tree evaluates some sum of a local address and some constants,
14802 // return the node for the local being addressed
14803
14804 GenTreeLclVarCommon* GenTree::IsLocalAddrExpr()
14805 {
14806     if (OperGet() == GT_ADDR)
14807     {
14808         return gtOp.gtOp1->IsLocal() ? gtOp.gtOp1->AsLclVarCommon() : nullptr;
14809     }
14810     else if (OperIsLocalAddr())
14811     {
14812         return this->AsLclVarCommon();
14813     }
14814     else if (OperGet() == GT_ADD)
14815     {
14816         if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
14817         {
14818             return gtOp.gtOp2->IsLocalAddrExpr();
14819         }
14820         else if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
14821         {
14822             return gtOp.gtOp1->IsLocalAddrExpr();
14823         }
14824     }
14825     // Otherwise...
14826     return nullptr;
14827 }
14828
14829 bool GenTree::IsLocalAddrExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq)
14830 {
14831     if (OperGet() == GT_ADDR)
14832     {
14833         assert(!comp->compRationalIRForm);
14834         GenTreePtr addrArg = gtOp.gtOp1;
14835         if (addrArg->IsLocal()) // Note that this covers "GT_LCL_FLD."
14836         {
14837             *pLclVarTree = addrArg->AsLclVarCommon();
14838             if (addrArg->OperGet() == GT_LCL_FLD)
14839             {
14840                 // Otherwise, prepend this field to whatever we've already accumulated outside in.
14841                 *pFldSeq = comp->GetFieldSeqStore()->Append(addrArg->AsLclFld()->gtFieldSeq, *pFldSeq);
14842             }
14843             return true;
14844         }
14845         else
14846         {
14847             return false;
14848         }
14849     }
14850     else if (OperIsLocalAddr())
14851     {
14852         *pLclVarTree = this->AsLclVarCommon();
14853         if (this->OperGet() == GT_LCL_FLD_ADDR)
14854         {
14855             *pFldSeq = comp->GetFieldSeqStore()->Append(this->AsLclFld()->gtFieldSeq, *pFldSeq);
14856         }
14857         return true;
14858     }
14859     else if (OperGet() == GT_ADD)
14860     {
14861         if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
14862         {
14863             if (gtOp.gtOp1->AsIntCon()->gtFieldSeq == nullptr)
14864             {
14865                 return false;
14866             }
14867             // Otherwise, prepend this field to whatever we've already accumulated outside in.
14868             *pFldSeq = comp->GetFieldSeqStore()->Append(gtOp.gtOp1->AsIntCon()->gtFieldSeq, *pFldSeq);
14869             return gtOp.gtOp2->IsLocalAddrExpr(comp, pLclVarTree, pFldSeq);
14870         }
14871         else if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
14872         {
14873             if (gtOp.gtOp2->AsIntCon()->gtFieldSeq == nullptr)
14874             {
14875                 return false;
14876             }
14877             // Otherwise, prepend this field to whatever we've already accumulated outside in.
14878             *pFldSeq = comp->GetFieldSeqStore()->Append(gtOp.gtOp2->AsIntCon()->gtFieldSeq, *pFldSeq);
14879             return gtOp.gtOp1->IsLocalAddrExpr(comp, pLclVarTree, pFldSeq);
14880         }
14881     }
14882     // Otherwise...
14883     return false;
14884 }
14885
14886 //------------------------------------------------------------------------
14887 // IsLclVarUpdateTree: Determine whether this is an assignment tree of the
14888 //                     form Vn = Vn 'oper' 'otherTree' where Vn is a lclVar
14889 //
14890 // Arguments:
14891 //    pOtherTree - An "out" argument in which 'otherTree' will be returned.
14892 //    pOper      - An "out" argument in which 'oper' will be returned.
14893 //
14894 // Return Value:
14895 //    If the tree is of the above form, the lclNum of the variable being
14896 //    updated is returned, and 'pOtherTree' and 'pOper' are set.
14897 //    Otherwise, returns BAD_VAR_NUM.
14898 //
14899 // Notes:
14900 //    'otherTree' can have any shape.
14901 //     We avoid worrying about whether the op is commutative by only considering the
14902 //     first operand of the rhs. It is expected that most trees of this form will
14903 //     already have the lclVar on the lhs.
14904 //     TODO-CQ: Evaluate whether there are missed opportunities due to this, or
14905 //     whether gtSetEvalOrder will already have put the lclVar on the lhs in
14906 //     the cases of interest.
14907
14908 unsigned GenTree::IsLclVarUpdateTree(GenTree** pOtherTree, genTreeOps* pOper)
14909 {
14910     unsigned lclNum = BAD_VAR_NUM;
14911     if (OperIsAssignment())
14912     {
14913         GenTree* lhs = gtOp.gtOp1;
14914         if (lhs->OperGet() == GT_LCL_VAR)
14915         {
14916             unsigned lhsLclNum = lhs->AsLclVarCommon()->gtLclNum;
14917             if (gtOper == GT_ASG)
14918             {
14919                 GenTree* rhs = gtOp.gtOp2;
14920                 if (rhs->OperIsBinary() && (rhs->gtOp.gtOp1->gtOper == GT_LCL_VAR) &&
14921                     (rhs->gtOp.gtOp1->AsLclVarCommon()->gtLclNum == lhsLclNum))
14922                 {
14923                     lclNum      = lhsLclNum;
14924                     *pOtherTree = rhs->gtOp.gtOp2;
14925                     *pOper      = rhs->gtOper;
14926                 }
14927             }
14928             else
14929             {
14930                 lclNum      = lhsLclNum;
14931                 *pOper      = GenTree::OpAsgToOper(gtOper);
14932                 *pOtherTree = gtOp.gtOp2;
14933             }
14934         }
14935     }
14936     return lclNum;
14937 }
14938
14939 // return true if this tree node is a subcomponent of parent for codegen purposes
14940 // (essentially, will be rolled into the same instruction)
14941 // Note that this method relies upon the value of gtRegNum field to determine
14942 // if the treenode is contained or not.  Therefore you can not call this method
14943 // until after the LSRA phase has allocated physical registers to the treenodes.
14944 bool GenTree::isContained() const
14945 {
14946     if (isContainedSpillTemp())
14947     {
14948         return true;
14949     }
14950
14951     if (gtHasReg())
14952     {
14953         return false;
14954     }
14955
14956     // these actually produce a register (the flags reg, we just don't model it)
14957     // and are a separate instruction from the branch that consumes the result
14958     if (OperKind() & GTK_RELOP)
14959     {
14960         return false;
14961     }
14962
14963     // TODO-Cleanup : this is not clean, would be nice to have some way of marking this.
14964     switch (OperGet())
14965     {
14966         case GT_STOREIND:
14967         case GT_JTRUE:
14968         case GT_RETURN:
14969         case GT_RETFILT:
14970         case GT_STORE_LCL_FLD:
14971         case GT_STORE_LCL_VAR:
14972         case GT_ARR_BOUNDS_CHECK:
14973         case GT_LOCKADD:
14974         case GT_NOP:
14975         case GT_NO_OP:
14976         case GT_START_NONGC:
14977         case GT_PROF_HOOK:
14978         case GT_RETURNTRAP:
14979         case GT_COMMA:
14980         case GT_PINVOKE_PROLOG:
14981         case GT_PHYSREGDST:
14982         case GT_PUTARG_STK:
14983         case GT_MEMORYBARRIER:
14984         case GT_COPYBLK:
14985         case GT_INITBLK:
14986         case GT_COPYOBJ:
14987         case GT_SWITCH:
14988         case GT_JMPTABLE:
14989         case GT_SWITCH_TABLE:
14990         case GT_SWAP:
14991         case GT_LCLHEAP:
14992         case GT_CKFINITE:
14993         case GT_JMP:
14994         case GT_IL_OFFSET:
14995 #ifdef FEATURE_SIMD
14996         case GT_SIMD_CHK:
14997 #endif // FEATURE_SIMD
14998
14999 #if !FEATURE_EH_FUNCLETS
15000         case GT_END_LFIN:
15001 #endif
15002             return false;
15003
15004 #if !defined(LEGACY_BACKEND) && !defined(_TARGET_64BIT_)
15005         case GT_LONG:
15006             // GT_LONG nodes are normally contained. The only exception is when the result
15007             // of a TYP_LONG operation is not used and this can only happen if the GT_LONG
15008             // is the last node in the statement (in linear order).
15009             return gtNext != nullptr;
15010 #endif
15011
15012         case GT_CALL:
15013             // Note: if you hit this assert you are probably calling isContained()
15014             // before the LSRA phase has allocated physical register to the tree nodes
15015             //
15016             assert(gtType == TYP_VOID);
15017             return false;
15018
15019         default:
15020             // if it's contained it better have a parent
15021             assert(gtNext || OperIsLocal());
15022             return true;
15023     }
15024 }
15025
15026 // return true if node is contained and an indir
15027 bool GenTree::isContainedIndir() const
15028 {
15029     return isContained() && isIndir();
15030 }
15031
15032 bool GenTree::isIndirAddrMode()
15033 {
15034     return isIndir() && AsIndir()->Addr()->OperIsAddrMode() && AsIndir()->Addr()->isContained();
15035 }
15036
15037 bool GenTree::isIndir() const
15038 {
15039     return OperGet() == GT_IND || OperGet() == GT_STOREIND;
15040 }
15041
15042 bool GenTreeIndir::HasBase()
15043 {
15044     return Base() != nullptr;
15045 }
15046
15047 bool GenTreeIndir::HasIndex()
15048 {
15049     return Index() != nullptr;
15050 }
15051
15052 GenTreePtr GenTreeIndir::Base()
15053 {
15054     GenTreePtr addr = Addr();
15055
15056     if (isIndirAddrMode())
15057     {
15058         GenTree* result = addr->AsAddrMode()->Base();
15059         if (result != nullptr)
15060         {
15061             result = result->gtEffectiveVal();
15062         }
15063         return result;
15064     }
15065     else
15066     {
15067         return addr; // TODO: why do we return 'addr' here, but we return 'nullptr' in the equivalent Index() case?
15068     }
15069 }
15070
15071 GenTree* GenTreeIndir::Index()
15072 {
15073     if (isIndirAddrMode())
15074     {
15075         GenTree* result = Addr()->AsAddrMode()->Index();
15076         if (result != nullptr)
15077         {
15078             result = result->gtEffectiveVal();
15079         }
15080         return result;
15081     }
15082     else
15083     {
15084         return nullptr;
15085     }
15086 }
15087
15088 unsigned GenTreeIndir::Scale()
15089 {
15090     if (HasIndex())
15091     {
15092         return Addr()->AsAddrMode()->gtScale;
15093     }
15094     else
15095     {
15096         return 1;
15097     }
15098 }
15099
15100 size_t GenTreeIndir::Offset()
15101 {
15102     if (isIndirAddrMode())
15103     {
15104         return Addr()->AsAddrMode()->gtOffset;
15105     }
15106     else if (Addr()->gtOper == GT_CLS_VAR_ADDR)
15107     {
15108         return (size_t)Addr()->gtClsVar.gtClsVarHnd;
15109     }
15110     else if (Addr()->IsCnsIntOrI() && Addr()->isContained())
15111     {
15112         return Addr()->AsIntConCommon()->IconValue();
15113     }
15114     else
15115     {
15116         return 0;
15117     }
15118 }
15119
15120 //------------------------------------------------------------------------
15121 // GenTreeIntConCommon::ImmedValNeedsReloc: does this immediate value needs recording a relocation with the VM?
15122 //
15123 // Arguments:
15124 //    comp - Compiler instance
15125 //
15126 // Return Value:
15127 //    True if this immediate value needs recording a relocation with the VM; false otherwise.
15128
15129 bool GenTreeIntConCommon::ImmedValNeedsReloc(Compiler* comp)
15130 {
15131 #ifdef RELOC_SUPPORT
15132     return comp->opts.compReloc && (gtOper == GT_CNS_INT) && IsIconHandle();
15133 #else
15134     return false;
15135 #endif
15136 }
15137
15138 //------------------------------------------------------------------------
15139 // ImmedValCanBeFolded: can this immediate value be folded for op?
15140 //
15141 // Arguments:
15142 //    comp - Compiler instance
15143 //    op - Tree operator
15144 //
15145 // Return Value:
15146 //    True if this immediate value can be folded for op; false otherwise.
15147
15148 bool GenTreeIntConCommon::ImmedValCanBeFolded(Compiler* comp, genTreeOps op)
15149 {
15150     // In general, immediate values that need relocations can't be folded.
15151     // There are cases where we do want to allow folding of handle comparisons
15152     // (e.g., typeof(T) == typeof(int)).
15153     return !ImmedValNeedsReloc(comp) || (op == GT_EQ) || (op == GT_NE);
15154 }
15155
15156 #ifdef _TARGET_AMD64_
15157 // Returns true if this absolute address fits within the base of an addr mode.
15158 // On Amd64 this effectively means, whether an absolute indirect address can
15159 // be encoded as 32-bit offset relative to IP or zero.
15160 bool GenTreeIntConCommon::FitsInAddrBase(Compiler* comp)
15161 {
15162 #ifndef LEGACY_BACKEND
15163 #ifdef DEBUG
15164     // Early out if PC-rel encoding of absolute addr is disabled.
15165     if (!comp->opts.compEnablePCRelAddr)
15166     {
15167         return false;
15168     }
15169 #endif
15170 #endif //! LEGACY_BACKEND
15171
15172     if (comp->opts.compReloc)
15173     {
15174         // During Ngen JIT is always asked to generate relocatable code.
15175         // Hence JIT will try to encode only icon handles as pc-relative offsets.
15176         return IsIconHandle() && (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue()));
15177     }
15178     else
15179     {
15180         // During Jitting, we are allowed to generate non-relocatable code.
15181         // On Amd64 we can encode an absolute indirect addr as an offset relative to zero or RIP.
15182         // An absolute indir addr that can fit within 32-bits can ben encoded as an offset relative
15183         // to zero. All other absolute indir addr could be attempted to be encoded as RIP relative
15184         // based on reloc hint provided by VM.  RIP relative encoding is preferred over relative
15185         // to zero, because the former is one byte smaller than the latter.  For this reason
15186         // we check for reloc hint first and then whether addr fits in 32-bits next.
15187         //
15188         // VM starts off with an initial state to allow both data and code address to be encoded as
15189         // pc-relative offsets.  Hence JIT will attempt to encode all absolute addresses as pc-relative
15190         // offsets.  It is possible while jitting a method, an address could not be encoded as a
15191         // pc-relative offset.  In that case VM will note the overflow and will trigger re-jitting
15192         // of the method with reloc hints turned off for all future methods. Second time around
15193         // jitting will succeed since JIT will not attempt to encode data addresses as pc-relative
15194         // offsets.  Note that JIT will always attempt to relocate code addresses (.e.g call addr).
15195         // After an overflow, VM will assume any relocation recorded is for a code address and will
15196         // emit jump thunk if it cannot be encoded as pc-relative offset.
15197         return (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue())) || FitsInI32();
15198     }
15199 }
15200
15201 // Returns true if this icon value is encoded as addr needs recording a relocation with VM
15202 bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
15203 {
15204     if (comp->opts.compReloc)
15205     {
15206         // During Ngen JIT is always asked to generate relocatable code.
15207         // Hence JIT will try to encode only icon handles as pc-relative offsets.
15208         return IsIconHandle() && (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue()));
15209     }
15210     else
15211     {
15212         return IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void*)IconValue());
15213     }
15214 }
15215
15216 #elif defined(_TARGET_X86_)
15217 // Returns true if this absolute address fits within the base of an addr mode.
15218 // On x86 all addresses are 4-bytes and can be directly encoded in an addr mode.
15219 bool GenTreeIntConCommon::FitsInAddrBase(Compiler* comp)
15220 {
15221 #ifndef LEGACY_BACKEND
15222 #ifdef DEBUG
15223     // Early out if PC-rel encoding of absolute addr is disabled.
15224     if (!comp->opts.compEnablePCRelAddr)
15225     {
15226         return false;
15227     }
15228 #endif
15229 #endif //! LEGACY_BACKEND
15230
15231     // TODO-x86 - TLS field handles are excluded for now as they are accessed relative to FS segment.
15232     // Handling of TLS field handles is a NYI and this needs to be relooked after implementing it.
15233     return IsCnsIntOrI() && !IsIconHandle(GTF_ICON_TLS_HDL);
15234 }
15235
15236 // Returns true if this icon value is encoded as addr needs recording a relocation with VM
15237 bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
15238 {
15239     // If generating relocatable code, icons should be reported for recording relocatons.
15240     return comp->opts.compReloc && IsIconHandle();
15241 }
15242 #endif //_TARGET_X86_
15243
15244 bool GenTree::IsFieldAddr(Compiler* comp, GenTreePtr* pObj, GenTreePtr* pStatic, FieldSeqNode** pFldSeq)
15245 {
15246     FieldSeqNode* newFldSeq    = nullptr;
15247     GenTreePtr    baseAddr     = nullptr;
15248     bool          mustBeStatic = false;
15249
15250     FieldSeqNode* statStructFldSeq = nullptr;
15251     if (TypeGet() == TYP_REF)
15252     {
15253         // Recognize struct static field patterns...
15254         if (OperGet() == GT_IND)
15255         {
15256             GenTreePtr     addr = gtOp.gtOp1;
15257             GenTreeIntCon* icon = nullptr;
15258             if (addr->OperGet() == GT_CNS_INT)
15259             {
15260                 icon = addr->AsIntCon();
15261             }
15262             else if (addr->OperGet() == GT_ADD)
15263             {
15264                 // op1 should never be a field sequence (or any other kind of handle)
15265                 assert((addr->gtOp.gtOp1->gtOper != GT_CNS_INT) || !addr->gtOp.gtOp1->IsIconHandle());
15266                 if (addr->gtOp.gtOp2->OperGet() == GT_CNS_INT)
15267                 {
15268                     icon = addr->gtOp.gtOp2->AsIntCon();
15269                 }
15270             }
15271             if (icon != nullptr && !icon->IsIconHandle(GTF_ICON_STR_HDL) // String handles are a source of TYP_REFs.
15272                 && icon->gtFieldSeq != nullptr &&
15273                 icon->gtFieldSeq->m_next == nullptr // A static field should be a singleton
15274                 // TODO-Review: A pseudoField here indicates an issue - this requires investigation
15275                 // See test case src\ddsuites\src\clr\x86\CoreMangLib\Dev\Globalization\CalendarRegressions.exe
15276                 && !(FieldSeqStore::IsPseudoField(icon->gtFieldSeq->m_fieldHnd)) &&
15277                 icon->gtFieldSeq != FieldSeqStore::NotAField()) // Ignore non-fields.
15278             {
15279                 statStructFldSeq = icon->gtFieldSeq;
15280             }
15281             else
15282             {
15283                 addr = addr->gtEffectiveVal();
15284
15285                 // Perhaps it's a direct indirection of a helper call or a cse with a zero offset annotation.
15286                 if ((addr->OperGet() == GT_CALL) || (addr->OperGet() == GT_LCL_VAR))
15287                 {
15288                     FieldSeqNode* zeroFieldSeq = nullptr;
15289                     if (comp->GetZeroOffsetFieldMap()->Lookup(addr, &zeroFieldSeq))
15290                     {
15291                         if (zeroFieldSeq->m_next == nullptr)
15292                         {
15293                             statStructFldSeq = zeroFieldSeq;
15294                         }
15295                     }
15296                 }
15297             }
15298         }
15299         else if (OperGet() == GT_CLS_VAR)
15300         {
15301             GenTreeClsVar* clsVar = AsClsVar();
15302             if (clsVar->gtFieldSeq != nullptr && clsVar->gtFieldSeq->m_next == nullptr)
15303             {
15304                 statStructFldSeq = clsVar->gtFieldSeq;
15305             }
15306         }
15307         else if (OperIsLocal())
15308         {
15309             // If we have a GT_LCL_VAR, it can be result of a CSE substitution
15310             // If it is then the CSE assignment will have a ValueNum that
15311             // describes the RHS of the CSE assignment.
15312             //
15313             // The CSE could be a pointer to a boxed struct
15314             //
15315             GenTreeLclVarCommon* lclVar = AsLclVarCommon();
15316             ValueNum             vn     = gtVNPair.GetLiberal();
15317             if (vn != ValueNumStore::NoVN)
15318             {
15319                 // Is the ValueNum a MapSelect involving a SharedStatic helper?
15320                 VNFuncApp funcApp1;
15321                 if (comp->vnStore->GetVNFunc(vn, &funcApp1) && (funcApp1.m_func == VNF_MapSelect) &&
15322                     (comp->vnStore->IsSharedStatic(funcApp1.m_args[1])))
15323                 {
15324                     ValueNum mapVN = funcApp1.m_args[0];
15325                     // Is this new 'mapVN' ValueNum, a MapSelect involving a handle?
15326                     VNFuncApp funcApp2;
15327                     if (comp->vnStore->GetVNFunc(mapVN, &funcApp2) && (funcApp2.m_func == VNF_MapSelect) &&
15328                         (comp->vnStore->IsVNHandle(funcApp2.m_args[1])))
15329                     {
15330                         ValueNum fldHndVN = funcApp2.m_args[1];
15331                         // Is this new 'fldHndVN' VNhandle a FieldHandle?
15332                         unsigned flags = comp->vnStore->GetHandleFlags(fldHndVN);
15333                         if (flags == GTF_ICON_FIELD_HDL)
15334                         {
15335                             CORINFO_FIELD_HANDLE fieldHnd =
15336                                 CORINFO_FIELD_HANDLE(comp->vnStore->ConstantValue<ssize_t>(fldHndVN));
15337
15338                             // Record this field sequence in 'statStructFldSeq' as it is likely to be a Boxed Struct
15339                             // field access.
15340                             statStructFldSeq = comp->GetFieldSeqStore()->CreateSingleton(fieldHnd);
15341                         }
15342                     }
15343                 }
15344             }
15345         }
15346
15347         if (statStructFldSeq != nullptr)
15348         {
15349             assert(statStructFldSeq->m_next == nullptr);
15350             // Is this a pointer to a boxed struct?
15351             if (comp->gtIsStaticFieldPtrToBoxedStruct(TYP_REF, statStructFldSeq->m_fieldHnd))
15352             {
15353                 *pFldSeq = comp->GetFieldSeqStore()->Append(statStructFldSeq, *pFldSeq);
15354                 *pObj    = nullptr;
15355                 *pStatic = this;
15356                 return true;
15357             }
15358         }
15359
15360         // Otherwise...
15361         *pObj    = this;
15362         *pStatic = nullptr;
15363         return true;
15364     }
15365     else if (OperGet() == GT_ADD)
15366     {
15367         // op1 should never be a field sequence (or any other kind of handle)
15368         assert((gtOp.gtOp1->gtOper != GT_CNS_INT) || !gtOp.gtOp1->IsIconHandle());
15369         if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
15370         {
15371             newFldSeq = gtOp.gtOp2->AsIntCon()->gtFieldSeq;
15372             baseAddr  = gtOp.gtOp1;
15373         }
15374     }
15375     else
15376     {
15377         // Check if "this" has a zero-offset annotation.
15378         if (!comp->GetZeroOffsetFieldMap()->Lookup(this, &newFldSeq))
15379         {
15380             // If not, this is not a field address.
15381             return false;
15382         }
15383         else
15384         {
15385             baseAddr     = this;
15386             mustBeStatic = true;
15387         }
15388     }
15389
15390     // If not we don't have a field seq, it's not a field address.
15391     if (newFldSeq == nullptr || newFldSeq == FieldSeqStore::NotAField())
15392     {
15393         return false;
15394     }
15395
15396     // Prepend this field to whatever we've already accumulated (outside-in).
15397     *pFldSeq = comp->GetFieldSeqStore()->Append(newFldSeq, *pFldSeq);
15398
15399     // Is it a static or instance field?
15400     if (!FieldSeqStore::IsPseudoField(newFldSeq->m_fieldHnd) &&
15401         comp->info.compCompHnd->isFieldStatic(newFldSeq->m_fieldHnd))
15402     {
15403         // It is a static field.  We're done.
15404         *pObj    = nullptr;
15405         *pStatic = baseAddr;
15406         return true;
15407     }
15408     else if ((baseAddr != nullptr) && !mustBeStatic)
15409     {
15410         // It's an instance field...but it must be for a struct field, since we've not yet encountered
15411         // a "TYP_REF" address.  Analyze the reset of the address.
15412         return baseAddr->gtEffectiveVal()->IsFieldAddr(comp, pObj, pStatic, pFldSeq);
15413     }
15414
15415     // Otherwise...
15416     return false;
15417 }
15418
15419 bool Compiler::gtIsStaticFieldPtrToBoxedStruct(var_types fieldNodeType, CORINFO_FIELD_HANDLE fldHnd)
15420 {
15421     if (fieldNodeType != TYP_REF)
15422     {
15423         return false;
15424     }
15425     CORINFO_CLASS_HANDLE fldCls = nullptr;
15426     noway_assert(fldHnd != nullptr);
15427     CorInfoType cit      = info.compCompHnd->getFieldType(fldHnd, &fldCls);
15428     var_types   fieldTyp = JITtype2varType(cit);
15429     return fieldTyp != TYP_REF;
15430 }
15431
15432 CORINFO_CLASS_HANDLE Compiler::gtGetStructHandleIfPresent(GenTree* tree)
15433 {
15434     CORINFO_CLASS_HANDLE structHnd = NO_CLASS_HANDLE;
15435     tree                           = tree->gtEffectiveVal();
15436     if (varTypeIsStruct(tree->gtType))
15437     {
15438         switch (tree->gtOper)
15439         {
15440             default:
15441                 break;
15442             case GT_MKREFANY:
15443                 structHnd = impGetRefAnyClass();
15444                 break;
15445             case GT_OBJ:
15446                 structHnd = tree->gtObj.gtClass;
15447                 break;
15448             case GT_CALL:
15449                 structHnd = tree->gtCall.gtRetClsHnd;
15450                 break;
15451             case GT_RET_EXPR:
15452                 structHnd = tree->gtRetExpr.gtRetClsHnd;
15453                 break;
15454             case GT_ARGPLACE:
15455                 structHnd = tree->gtArgPlace.gtArgPlaceClsHnd;
15456                 break;
15457             case GT_INDEX:
15458                 structHnd = tree->gtIndex.gtStructElemClass;
15459                 break;
15460             case GT_FIELD:
15461                 info.compCompHnd->getFieldType(tree->gtField.gtFldHnd, &structHnd);
15462                 break;
15463             case GT_ASG:
15464                 structHnd = gtGetStructHandle(tree->gtGetOp1());
15465                 break;
15466             case GT_LCL_VAR:
15467             case GT_LCL_FLD:
15468                 structHnd = lvaTable[tree->AsLclVarCommon()->gtLclNum].lvVerTypeInfo.GetClassHandle();
15469                 break;
15470             case GT_RETURN:
15471                 structHnd = gtGetStructHandleIfPresent(tree->gtOp.gtOp1);
15472                 break;
15473             case GT_IND:
15474 #ifdef FEATURE_SIMD
15475                 if (varTypeIsSIMD(tree))
15476                 {
15477                     structHnd = gtGetStructHandleForSIMD(tree->gtType, TYP_FLOAT);
15478                 }
15479                 else
15480 #endif
15481                     if (tree->gtFlags & GTF_IND_ARR_INDEX)
15482                 {
15483                     ArrayInfo arrInfo;
15484                     bool      b = GetArrayInfoMap()->Lookup(tree, &arrInfo);
15485                     assert(b);
15486                     structHnd = EncodeElemType(arrInfo.m_elemType, arrInfo.m_elemStructType);
15487                 }
15488                 break;
15489 #ifdef FEATURE_SIMD
15490             case GT_SIMD:
15491                 structHnd = gtGetStructHandleForSIMD(tree->gtType, tree->AsSIMD()->gtSIMDBaseType);
15492 #endif // FEATURE_SIMD
15493                 break;
15494         }
15495     }
15496     return structHnd;
15497 }
15498
15499 CORINFO_CLASS_HANDLE Compiler::gtGetStructHandle(GenTree* tree)
15500 {
15501     CORINFO_CLASS_HANDLE structHnd = gtGetStructHandleIfPresent(tree);
15502     assert(structHnd != NO_CLASS_HANDLE);
15503     return structHnd;
15504 }
15505
15506 void GenTree::ParseArrayAddress(
15507     Compiler* comp, ArrayInfo* arrayInfo, GenTreePtr* pArr, ValueNum* pInxVN, FieldSeqNode** pFldSeq)
15508 {
15509     *pArr                = nullptr;
15510     ValueNum      inxVN  = ValueNumStore::NoVN;
15511     ssize_t       offset = 0;
15512     FieldSeqNode* fldSeq = nullptr;
15513
15514     ParseArrayAddressWork(comp, 1, pArr, &inxVN, &offset, &fldSeq);
15515
15516     // If we didn't find an array reference (perhaps it is the constant null?) we will give up.
15517     if (*pArr == nullptr)
15518     {
15519         return;
15520     }
15521
15522     // OK, new we have to figure out if any part of the "offset" is a constant contribution to the index.
15523     // First, sum the offsets of any fields in fldSeq.
15524     unsigned      fieldOffsets = 0;
15525     FieldSeqNode* fldSeqIter   = fldSeq;
15526     // Also, find the first non-pseudo field...
15527     assert(*pFldSeq == nullptr);
15528     while (fldSeqIter != nullptr)
15529     {
15530         if (fldSeqIter == FieldSeqStore::NotAField())
15531         {
15532             // TODO-Review: A NotAField here indicates a failure to properly maintain the field sequence
15533             // See test case self_host_tests_x86\jit\regression\CLR-x86-JIT\v1-m12-beta2\ b70992\ b70992.exe
15534             // Safest thing to do here is to drop back to MinOpts
15535             noway_assert(!"fldSeqIter is NotAField() in ParseArrayAddress");
15536         }
15537
15538         if (!FieldSeqStore::IsPseudoField(fldSeqIter->m_fieldHnd))
15539         {
15540             if (*pFldSeq == nullptr)
15541             {
15542                 *pFldSeq = fldSeqIter;
15543             }
15544             CORINFO_CLASS_HANDLE fldCls = nullptr;
15545             noway_assert(fldSeqIter->m_fieldHnd != nullptr);
15546             CorInfoType cit = comp->info.compCompHnd->getFieldType(fldSeqIter->m_fieldHnd, &fldCls);
15547             fieldOffsets += comp->compGetTypeSize(cit, fldCls);
15548         }
15549         fldSeqIter = fldSeqIter->m_next;
15550     }
15551
15552     // Is there some portion of the "offset" beyond the first-elem offset and the struct field suffix we just computed?
15553     if (!FitsIn<ssize_t>(fieldOffsets + arrayInfo->m_elemOffset) || !FitsIn<ssize_t>(arrayInfo->m_elemSize))
15554     {
15555         // This seems unlikely, but no harm in being safe...
15556         *pInxVN = comp->GetValueNumStore()->VNForExpr(nullptr, TYP_INT);
15557         return;
15558     }
15559     // Otherwise...
15560     ssize_t offsetAccountedFor = static_cast<ssize_t>(fieldOffsets + arrayInfo->m_elemOffset);
15561     ssize_t elemSize           = static_cast<ssize_t>(arrayInfo->m_elemSize);
15562
15563     ssize_t constIndOffset = offset - offsetAccountedFor;
15564     // This should be divisible by the element size...
15565     assert((constIndOffset % elemSize) == 0);
15566     ssize_t constInd = constIndOffset / elemSize;
15567
15568     ValueNumStore* vnStore = comp->GetValueNumStore();
15569
15570     if (inxVN == ValueNumStore::NoVN)
15571     {
15572         // Must be a constant index.
15573         *pInxVN = vnStore->VNForPtrSizeIntCon(constInd);
15574     }
15575     else
15576     {
15577         //
15578         // Perform ((inxVN / elemSizeVN) + vnForConstInd)
15579         //
15580
15581         // The value associated with the index value number (inxVN) is the offset into the array,
15582         // which has been scaled by element size. We need to recover the array index from that offset
15583         if (vnStore->IsVNConstant(inxVN))
15584         {
15585             ssize_t index = vnStore->CoercedConstantValue<ssize_t>(inxVN);
15586             noway_assert(elemSize > 0 && ((index % elemSize) == 0));
15587             *pInxVN = vnStore->VNForPtrSizeIntCon((index / elemSize) + constInd);
15588         }
15589         else
15590         {
15591             bool canFoldDiv = false;
15592
15593             // If the index VN is a MUL by elemSize, see if we can eliminate it instead of adding
15594             // the division by elemSize.
15595             VNFuncApp funcApp;
15596             if (vnStore->GetVNFunc(inxVN, &funcApp) && funcApp.m_func == (VNFunc)GT_MUL)
15597             {
15598                 ValueNum vnForElemSize = vnStore->VNForLongCon(elemSize);
15599
15600                 // One of the multiply operand is elemSize, so the resulting
15601                 // index VN should simply be the other operand.
15602                 if (funcApp.m_args[1] == vnForElemSize)
15603                 {
15604                     *pInxVN    = funcApp.m_args[0];
15605                     canFoldDiv = true;
15606                 }
15607                 else if (funcApp.m_args[0] == vnForElemSize)
15608                 {
15609                     *pInxVN    = funcApp.m_args[1];
15610                     canFoldDiv = true;
15611                 }
15612             }
15613
15614             // Perform ((inxVN / elemSizeVN) + vnForConstInd)
15615             if (!canFoldDiv)
15616             {
15617                 ValueNum vnForElemSize = vnStore->VNForPtrSizeIntCon(elemSize);
15618                 ValueNum vnForScaledInx =
15619                     vnStore->VNForFunc(TYP_I_IMPL, GetVNFuncForOper(GT_DIV, false), inxVN, vnForElemSize);
15620                 *pInxVN = vnForScaledInx;
15621             }
15622
15623             if (constInd != 0)
15624             {
15625                 ValueNum vnForConstInd = comp->GetValueNumStore()->VNForPtrSizeIntCon(constInd);
15626                 *pInxVN                = comp->GetValueNumStore()->VNForFunc(TYP_I_IMPL,
15627                                                               GetVNFuncForOper(GT_ADD, (gtFlags & GTF_UNSIGNED) != 0),
15628                                                               *pInxVN, vnForConstInd);
15629             }
15630         }
15631     }
15632 }
15633
15634 void GenTree::ParseArrayAddressWork(
15635     Compiler* comp, ssize_t inputMul, GenTreePtr* pArr, ValueNum* pInxVN, ssize_t* pOffset, FieldSeqNode** pFldSeq)
15636 {
15637     if (TypeGet() == TYP_REF)
15638     {
15639         // This must be the array pointer.
15640         *pArr = this;
15641         assert(inputMul == 1); // Can't multiply the array pointer by anything.
15642     }
15643     else
15644     {
15645         switch (OperGet())
15646         {
15647             case GT_CNS_INT:
15648                 *pFldSeq = comp->GetFieldSeqStore()->Append(*pFldSeq, gtIntCon.gtFieldSeq);
15649                 *pOffset += (inputMul * gtIntCon.gtIconVal);
15650                 return;
15651
15652             case GT_ADD:
15653             case GT_SUB:
15654                 gtOp.gtOp1->ParseArrayAddressWork(comp, inputMul, pArr, pInxVN, pOffset, pFldSeq);
15655                 if (OperGet() == GT_SUB)
15656                 {
15657                     inputMul = -inputMul;
15658                 }
15659                 gtOp.gtOp2->ParseArrayAddressWork(comp, inputMul, pArr, pInxVN, pOffset, pFldSeq);
15660                 return;
15661
15662             case GT_MUL:
15663             {
15664                 // If one op is a constant, continue parsing down.
15665                 ssize_t    subMul   = 0;
15666                 GenTreePtr nonConst = nullptr;
15667                 if (gtOp.gtOp1->IsCnsIntOrI())
15668                 {
15669                     // If the other arg is an int constant, and is a "not-a-field", choose
15670                     // that as the multiplier, thus preserving constant index offsets...
15671                     if (gtOp.gtOp2->OperGet() == GT_CNS_INT &&
15672                         gtOp.gtOp2->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField())
15673                     {
15674                         subMul   = gtOp.gtOp2->gtIntConCommon.IconValue();
15675                         nonConst = gtOp.gtOp1;
15676                     }
15677                     else
15678                     {
15679                         subMul   = gtOp.gtOp1->gtIntConCommon.IconValue();
15680                         nonConst = gtOp.gtOp2;
15681                     }
15682                 }
15683                 else if (gtOp.gtOp2->IsCnsIntOrI())
15684                 {
15685                     subMul   = gtOp.gtOp2->gtIntConCommon.IconValue();
15686                     nonConst = gtOp.gtOp1;
15687                 }
15688                 if (nonConst != nullptr)
15689                 {
15690                     nonConst->ParseArrayAddressWork(comp, inputMul * subMul, pArr, pInxVN, pOffset, pFldSeq);
15691                     return;
15692                 }
15693                 // Otherwise, exit the switch, treat as a contribution to the index.
15694             }
15695             break;
15696
15697             case GT_LSH:
15698                 // If one op is a constant, continue parsing down.
15699                 if (gtOp.gtOp2->IsCnsIntOrI())
15700                 {
15701                     ssize_t subMul = 1 << gtOp.gtOp2->gtIntConCommon.IconValue();
15702                     gtOp.gtOp1->ParseArrayAddressWork(comp, inputMul * subMul, pArr, pInxVN, pOffset, pFldSeq);
15703                     return;
15704                 }
15705                 // Otherwise, exit the switch, treat as a contribution to the index.
15706                 break;
15707
15708             default:
15709                 break;
15710         }
15711         // If we didn't return above, must be a constribution to the non-constant part of the index VN.
15712         ValueNum vn = comp->GetValueNumStore()->VNNormVal(gtVNPair.GetLiberal()); // We don't care about exceptions for
15713                                                                                   // this purpose.
15714         if (inputMul != 1)
15715         {
15716             ValueNum mulVN = comp->GetValueNumStore()->VNForLongCon(inputMul);
15717             vn             = comp->GetValueNumStore()->VNForFunc(TypeGet(), GetVNFuncForOper(GT_MUL, false), mulVN, vn);
15718         }
15719         if (*pInxVN == ValueNumStore::NoVN)
15720         {
15721             *pInxVN = vn;
15722         }
15723         else
15724         {
15725             *pInxVN = comp->GetValueNumStore()->VNForFunc(TypeGet(), GetVNFuncForOper(GT_ADD, false), *pInxVN, vn);
15726         }
15727     }
15728 }
15729
15730 bool GenTree::ParseArrayElemForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq)
15731 {
15732     if (OperIsIndir())
15733     {
15734         if (gtFlags & GTF_IND_ARR_INDEX)
15735         {
15736             bool b = comp->GetArrayInfoMap()->Lookup(this, arrayInfo);
15737             assert(b);
15738             return true;
15739         }
15740
15741         // Otherwise...
15742         GenTreePtr addr = AsIndir()->Addr();
15743         return addr->ParseArrayElemAddrForm(comp, arrayInfo, pFldSeq);
15744     }
15745     else
15746     {
15747         return false;
15748     }
15749 }
15750
15751 bool GenTree::ParseArrayElemAddrForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq)
15752 {
15753     switch (OperGet())
15754     {
15755         case GT_ADD:
15756         {
15757             GenTreePtr arrAddr = nullptr;
15758             GenTreePtr offset  = nullptr;
15759             if (gtOp.gtOp1->TypeGet() == TYP_BYREF)
15760             {
15761                 arrAddr = gtOp.gtOp1;
15762                 offset  = gtOp.gtOp2;
15763             }
15764             else if (gtOp.gtOp2->TypeGet() == TYP_BYREF)
15765             {
15766                 arrAddr = gtOp.gtOp2;
15767                 offset  = gtOp.gtOp1;
15768             }
15769             else
15770             {
15771                 return false;
15772             }
15773             if (!offset->ParseOffsetForm(comp, pFldSeq))
15774             {
15775                 return false;
15776             }
15777             return arrAddr->ParseArrayElemAddrForm(comp, arrayInfo, pFldSeq);
15778         }
15779
15780         case GT_ADDR:
15781         {
15782             GenTreePtr addrArg = gtOp.gtOp1;
15783             if (addrArg->OperGet() != GT_IND)
15784             {
15785                 return false;
15786             }
15787             else
15788             {
15789                 // The "Addr" node might be annotated with a zero-offset field sequence.
15790                 FieldSeqNode* zeroOffsetFldSeq = nullptr;
15791                 if (comp->GetZeroOffsetFieldMap()->Lookup(this, &zeroOffsetFldSeq))
15792                 {
15793                     *pFldSeq = comp->GetFieldSeqStore()->Append(*pFldSeq, zeroOffsetFldSeq);
15794                 }
15795                 return addrArg->ParseArrayElemForm(comp, arrayInfo, pFldSeq);
15796             }
15797         }
15798
15799         default:
15800             return false;
15801     }
15802 }
15803
15804 bool GenTree::ParseOffsetForm(Compiler* comp, FieldSeqNode** pFldSeq)
15805 {
15806     switch (OperGet())
15807     {
15808         case GT_CNS_INT:
15809         {
15810             GenTreeIntCon* icon = AsIntCon();
15811             *pFldSeq            = comp->GetFieldSeqStore()->Append(*pFldSeq, icon->gtFieldSeq);
15812             return true;
15813         }
15814
15815         case GT_ADD:
15816             if (!gtOp.gtOp1->ParseOffsetForm(comp, pFldSeq))
15817             {
15818                 return false;
15819             }
15820             return gtOp.gtOp2->ParseOffsetForm(comp, pFldSeq);
15821
15822         default:
15823             return false;
15824     }
15825 }
15826
15827 void GenTree::LabelIndex(Compiler* comp, bool isConst)
15828 {
15829     switch (OperGet())
15830     {
15831         case GT_CNS_INT:
15832             // If we got here, this is a contribution to the constant part of the index.
15833             if (isConst)
15834             {
15835                 gtIntCon.gtFieldSeq =
15836                     comp->GetFieldSeqStore()->CreateSingleton(FieldSeqStore::ConstantIndexPseudoField);
15837             }
15838             return;
15839
15840         case GT_LCL_VAR:
15841             gtFlags |= GTF_VAR_ARR_INDEX;
15842             return;
15843
15844         case GT_ADD:
15845         case GT_SUB:
15846             gtOp.gtOp1->LabelIndex(comp, isConst);
15847             gtOp.gtOp2->LabelIndex(comp, isConst);
15848             break;
15849
15850         case GT_CAST:
15851             gtOp.gtOp1->LabelIndex(comp, isConst);
15852             break;
15853
15854         case GT_ARR_LENGTH:
15855             gtFlags |= GTF_ARRLEN_ARR_IDX;
15856             return;
15857
15858         default:
15859             // For all other operators, peel off one constant; and then label the other if it's also a constant.
15860             if (OperIsArithmetic() || OperIsCompare())
15861             {
15862                 if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
15863                 {
15864                     gtOp.gtOp1->LabelIndex(comp, isConst);
15865                     break;
15866                 }
15867                 else if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
15868                 {
15869                     gtOp.gtOp2->LabelIndex(comp, isConst);
15870                     break;
15871                 }
15872                 // Otherwise continue downward on both, labeling vars.
15873                 gtOp.gtOp1->LabelIndex(comp, false);
15874                 gtOp.gtOp2->LabelIndex(comp, false);
15875             }
15876             break;
15877     }
15878 }
15879
15880 // Note that the value of the below field doesn't matter; it exists only to provide a distinguished address.
15881 //
15882 // static
15883 FieldSeqNode FieldSeqStore::s_notAField(nullptr, nullptr);
15884
15885 // FieldSeqStore methods.
15886 FieldSeqStore::FieldSeqStore(IAllocator* alloc) : m_alloc(alloc), m_canonMap(new (alloc) FieldSeqNodeCanonMap(alloc))
15887 {
15888 }
15889
15890 FieldSeqNode* FieldSeqStore::CreateSingleton(CORINFO_FIELD_HANDLE fieldHnd)
15891 {
15892     FieldSeqNode  fsn(fieldHnd, nullptr);
15893     FieldSeqNode* res = nullptr;
15894     if (m_canonMap->Lookup(fsn, &res))
15895     {
15896         return res;
15897     }
15898     else
15899     {
15900         res  = reinterpret_cast<FieldSeqNode*>(m_alloc->Alloc(sizeof(FieldSeqNode)));
15901         *res = fsn;
15902         m_canonMap->Set(fsn, res);
15903         return res;
15904     }
15905 }
15906
15907 FieldSeqNode* FieldSeqStore::Append(FieldSeqNode* a, FieldSeqNode* b)
15908 {
15909     if (a == nullptr)
15910     {
15911         return b;
15912     }
15913     else if (a == NotAField())
15914     {
15915         return NotAField();
15916     }
15917     else if (b == nullptr)
15918     {
15919         return a;
15920     }
15921     else if (b == NotAField())
15922     {
15923         return NotAField();
15924         // Extremely special case for ConstantIndex pseudo-fields -- appending consecutive such
15925         // together collapse to one.
15926     }
15927     else if (a->m_next == nullptr && a->m_fieldHnd == ConstantIndexPseudoField &&
15928              b->m_fieldHnd == ConstantIndexPseudoField)
15929     {
15930         return b;
15931     }
15932     else
15933     {
15934         FieldSeqNode* tmp = Append(a->m_next, b);
15935         FieldSeqNode  fsn(a->m_fieldHnd, tmp);
15936         FieldSeqNode* res = nullptr;
15937         if (m_canonMap->Lookup(fsn, &res))
15938         {
15939             return res;
15940         }
15941         else
15942         {
15943             res  = reinterpret_cast<FieldSeqNode*>(m_alloc->Alloc(sizeof(FieldSeqNode)));
15944             *res = fsn;
15945             m_canonMap->Set(fsn, res);
15946             return res;
15947         }
15948     }
15949 }
15950
15951 // Static vars.
15952 int FieldSeqStore::FirstElemPseudoFieldStruct;
15953 int FieldSeqStore::ConstantIndexPseudoFieldStruct;
15954
15955 CORINFO_FIELD_HANDLE FieldSeqStore::FirstElemPseudoField =
15956     (CORINFO_FIELD_HANDLE)&FieldSeqStore::FirstElemPseudoFieldStruct;
15957 CORINFO_FIELD_HANDLE FieldSeqStore::ConstantIndexPseudoField =
15958     (CORINFO_FIELD_HANDLE)&FieldSeqStore::ConstantIndexPseudoFieldStruct;
15959
15960 bool FieldSeqNode::IsFirstElemFieldSeq()
15961 {
15962     // this must be non-null per ISO C++
15963     return m_fieldHnd == FieldSeqStore::FirstElemPseudoField;
15964 }
15965
15966 bool FieldSeqNode::IsConstantIndexFieldSeq()
15967 {
15968     // this must be non-null per ISO C++
15969     return m_fieldHnd == FieldSeqStore::ConstantIndexPseudoField;
15970 }
15971
15972 bool FieldSeqNode::IsPseudoField()
15973 {
15974     if (this == nullptr)
15975     {
15976         return false;
15977     }
15978     return m_fieldHnd == FieldSeqStore::FirstElemPseudoField || m_fieldHnd == FieldSeqStore::ConstantIndexPseudoField;
15979 }
15980
15981 #ifdef FEATURE_SIMD
15982 GenTreeSIMD* Compiler::gtNewSIMDNode(
15983     var_types type, GenTreePtr op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
15984 {
15985     // TODO-CQ: An operand may be a GT_OBJ(GT_ADDR(GT_LCL_VAR))), in which case it should be
15986     // marked lvUsedInSIMDIntrinsic.
15987     assert(op1 != nullptr);
15988     if (op1->OperGet() == GT_LCL_VAR)
15989     {
15990         unsigned   lclNum                = op1->AsLclVarCommon()->GetLclNum();
15991         LclVarDsc* lclVarDsc             = &lvaTable[lclNum];
15992         lclVarDsc->lvUsedInSIMDIntrinsic = true;
15993     }
15994
15995     return new (this, GT_SIMD) GenTreeSIMD(type, op1, simdIntrinsicID, baseType, size);
15996 }
15997
15998 GenTreeSIMD* Compiler::gtNewSIMDNode(
15999     var_types type, GenTreePtr op1, GenTreePtr op2, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
16000 {
16001     // TODO-CQ: An operand may be a GT_OBJ(GT_ADDR(GT_LCL_VAR))), in which case it should be
16002     // marked lvUsedInSIMDIntrinsic.
16003     assert(op1 != nullptr);
16004     if (op1->OperIsLocal())
16005     {
16006         unsigned   lclNum                = op1->AsLclVarCommon()->GetLclNum();
16007         LclVarDsc* lclVarDsc             = &lvaTable[lclNum];
16008         lclVarDsc->lvUsedInSIMDIntrinsic = true;
16009     }
16010
16011     if (op2 != nullptr && op2->OperIsLocal())
16012     {
16013         unsigned   lclNum                = op2->AsLclVarCommon()->GetLclNum();
16014         LclVarDsc* lclVarDsc             = &lvaTable[lclNum];
16015         lclVarDsc->lvUsedInSIMDIntrinsic = true;
16016     }
16017
16018     return new (this, GT_SIMD) GenTreeSIMD(type, op1, op2, simdIntrinsicID, baseType, size);
16019 }
16020
16021 bool GenTree::isCommutativeSIMDIntrinsic()
16022 {
16023     assert(gtOper == GT_SIMD);
16024     switch (AsSIMD()->gtSIMDIntrinsicID)
16025     {
16026         case SIMDIntrinsicAdd:
16027         case SIMDIntrinsicBitwiseAnd:
16028         case SIMDIntrinsicBitwiseOr:
16029         case SIMDIntrinsicBitwiseXor:
16030         case SIMDIntrinsicEqual:
16031         case SIMDIntrinsicMax:
16032         case SIMDIntrinsicMin:
16033         case SIMDIntrinsicMul:
16034         case SIMDIntrinsicOpEquality:
16035         case SIMDIntrinsicOpInEquality:
16036             return true;
16037         default:
16038             return false;
16039     }
16040 }
16041 #endif // FEATURE_SIMD
16042
16043 //---------------------------------------------------------------------------------------
16044 // GenTreeArgList::Prepend:
16045 //    Prepends an element to a GT_LIST.
16046 // 
16047 // Arguments:
16048 //    compiler - The compiler context.
16049 //    element  - The element to prepend.
16050 //
16051 // Returns:
16052 //    The new head of the list.
16053 GenTreeArgList* GenTreeArgList::Prepend(Compiler* compiler, GenTree* element)
16054 {
16055     GenTreeArgList* head = compiler->gtNewListNode(element, this);
16056     head->gtFlags |= (gtFlags & GTF_LIST_AGGREGATE);
16057     gtFlags &= ~GTF_LIST_AGGREGATE;
16058     return head;
16059 }
16060
16061 //---------------------------------------------------------------------------------------
16062 // InitializeStructReturnType:
16063 //    Initialize the Return Type Descriptor for a method that returns a struct type
16064 //
16065 // Arguments
16066 //    comp        -  Compiler Instance
16067 //    retClsHnd   -  VM handle to the struct type returned by the method
16068 //
16069 // Return Value
16070 //    None
16071 //
16072 void ReturnTypeDesc::InitializeStructReturnType(Compiler* comp, CORINFO_CLASS_HANDLE retClsHnd)
16073 {
16074     assert(!m_inited);
16075
16076 #if FEATURE_MULTIREG_RET
16077
16078     assert(retClsHnd != NO_CLASS_HANDLE);
16079     unsigned structSize = comp->info.compCompHnd->getClassSize(retClsHnd);
16080
16081     Compiler::structPassingKind howToReturnStruct;
16082     var_types                   returnType = comp->getReturnTypeForStruct(retClsHnd, &howToReturnStruct, structSize);
16083
16084     switch (howToReturnStruct)
16085     {
16086         case Compiler::SPK_PrimitiveType:
16087         {
16088             assert(returnType != TYP_UNKNOWN);
16089             assert(returnType != TYP_STRUCT);
16090             m_regType[0] = returnType;
16091             break;
16092         }
16093
16094         case Compiler::SPK_ByValueAsHfa:
16095         {
16096             assert(returnType == TYP_STRUCT);
16097             var_types hfaType = comp->GetHfaType(retClsHnd);
16098
16099             // We should have an hfa struct type
16100             assert(varTypeIsFloating(hfaType));
16101
16102             // Note that the retail build issues a warning about a potential divsion by zero without this Max function
16103             unsigned elemSize = Max((unsigned)1, EA_SIZE_IN_BYTES(emitActualTypeSize(hfaType)));
16104
16105             // The size of this struct should be evenly divisible by elemSize
16106             assert((structSize % elemSize) == 0);
16107
16108             unsigned hfaCount = (structSize / elemSize);
16109             for (unsigned i = 0; i < hfaCount; ++i)
16110             {
16111                 m_regType[i] = hfaType;
16112             }
16113
16114             if (comp->compFloatingPointUsed == false)
16115             {
16116                 comp->compFloatingPointUsed = true;
16117             }
16118             break;
16119         }
16120
16121         case Compiler::SPK_ByValue:
16122         {
16123             assert(returnType == TYP_STRUCT);
16124
16125 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
16126
16127             SYSTEMV_AMD64_CORINFO_STRUCT_REG_PASSING_DESCRIPTOR structDesc;
16128             comp->eeGetSystemVAmd64PassStructInRegisterDescriptor(retClsHnd, &structDesc);
16129
16130             assert(structDesc.passedInRegisters);
16131             for (int i = 0; i < structDesc.eightByteCount; i++)
16132             {
16133                 assert(i < MAX_RET_REG_COUNT);
16134                 m_regType[i] = comp->GetEightByteType(structDesc, i);
16135             }
16136
16137 #elif defined(_TARGET_ARM64_)
16138
16139             // a non-HFA struct returned using two registers
16140             //
16141             assert((structSize > TARGET_POINTER_SIZE) && (structSize <= (2 * TARGET_POINTER_SIZE)));
16142
16143             BYTE gcPtrs[2] = {TYPE_GC_NONE, TYPE_GC_NONE};
16144             comp->info.compCompHnd->getClassGClayout(retClsHnd, &gcPtrs[0]);
16145             for (unsigned i = 0; i < 2; ++i)
16146             {
16147                 m_regType[i] = comp->getJitGCType(gcPtrs[i]);
16148             }
16149
16150 #else //  _TARGET_XXX_
16151
16152             // This target needs support here!
16153             //
16154             NYI("Unsupported TARGET returning a TYP_STRUCT in InitializeStructReturnType");
16155
16156 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
16157
16158             break; // for case SPK_ByValue
16159         }
16160
16161         case Compiler::SPK_ByReference:
16162
16163             // We are returning using the return buffer argument
16164             // There are no return registers
16165             break;
16166
16167         default:
16168
16169             unreached(); // By the contract of getReturnTypeForStruct we should never get here.
16170
16171     } // end of switch (howToReturnStruct)
16172
16173 #endif //  FEATURE_MULTIREG_RET
16174
16175 #ifdef DEBUG
16176     m_inited = true;
16177 #endif
16178 }
16179
16180 //---------------------------------------------------------------------------------------
16181 // InitializeLongReturnType:
16182 //    Initialize the Return Type Descriptor for a method that returns a TYP_LONG
16183 //
16184 // Arguments
16185 //    comp        -  Compiler Instance
16186 //
16187 // Return Value
16188 //    None
16189 //
16190 void ReturnTypeDesc::InitializeLongReturnType(Compiler* comp)
16191 {
16192 #if defined(_TARGET_X86_)
16193
16194     // Setups up a ReturnTypeDesc for returning a long using two registers
16195     //
16196     assert(MAX_RET_REG_COUNT >= 2);
16197     m_regType[0] = TYP_INT;
16198     m_regType[1] = TYP_INT;
16199
16200 #else // not _TARGET_X86_
16201
16202     m_regType[0] = TYP_LONG;
16203
16204 #endif // _TARGET_X86_
16205
16206 #ifdef DEBUG
16207     m_inited = true;
16208 #endif
16209 }
16210
16211 //-------------------------------------------------------------------
16212 // GetABIReturnReg:  Return ith return register as per target ABI
16213 //
16214 // Arguments:
16215 //     idx   -   Index of the return register.
16216 //               The first return register has an index of 0 and so on.
16217 //
16218 // Return Value:
16219 //     Returns ith return register as per target ABI.
16220 //
16221 // Notes:
16222 //     Right now this is implemented only for x64 Unix
16223 //     and yet to be implemented for other multi-reg return
16224 //     targets (Arm64/Arm32/x86).
16225 //
16226 // TODO-ARM:   Implement this routine to support HFA returns.
16227 // TODO-X86:   Implement this routine to support long returns.
16228 regNumber ReturnTypeDesc::GetABIReturnReg(unsigned idx)
16229 {
16230     unsigned count = GetReturnRegCount();
16231     assert(idx < count);
16232
16233     regNumber resultReg = REG_NA;
16234
16235 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
16236     var_types regType0 = GetReturnRegType(0);
16237
16238     if (idx == 0)
16239     {
16240         if (varTypeIsIntegralOrI(regType0))
16241         {
16242             resultReg = REG_INTRET;
16243         }
16244         else
16245         {
16246             noway_assert(varTypeIsFloating(regType0));
16247             resultReg = REG_FLOATRET;
16248         }
16249     }
16250     else if (idx == 1)
16251     {
16252         var_types regType1 = GetReturnRegType(1);
16253
16254         if (varTypeIsIntegralOrI(regType1))
16255         {
16256             if (varTypeIsIntegralOrI(regType0))
16257             {
16258                 resultReg = REG_INTRET_1;
16259             }
16260             else
16261             {
16262                 resultReg = REG_INTRET;
16263             }
16264         }
16265         else
16266         {
16267             noway_assert(varTypeIsFloating(regType1));
16268
16269             if (varTypeIsFloating(regType0))
16270             {
16271                 resultReg = REG_FLOATRET_1;
16272             }
16273             else
16274             {
16275                 resultReg = REG_FLOATRET;
16276             }
16277         }
16278     }
16279
16280 #elif defined(_TARGET_X86_)
16281
16282     if (idx == 0)
16283     {
16284         resultReg = REG_LNGRET_LO;
16285     }
16286     else if (idx == 1)
16287     {
16288         resultReg = REG_LNGRET_HI;
16289     }
16290
16291 #elif defined(_TARGET_ARM64_)
16292
16293     var_types regType = GetReturnRegType(idx);
16294     if (varTypeIsIntegralOrI(regType))
16295     {
16296         noway_assert(idx < 2);                              // Up to 2 return registers for 16-byte structs
16297         resultReg = (idx == 0) ? REG_INTRET : REG_INTRET_1; // X0 or X1
16298     }
16299     else
16300     {
16301         noway_assert(idx < 4);                                   // Up to 4 return registers for HFA's
16302         resultReg = (regNumber)((unsigned)(REG_FLOATRET) + idx); // V0, V1, V2 or V3
16303     }
16304
16305 #endif // TARGET_XXX
16306
16307     assert(resultReg != REG_NA);
16308     return resultReg;
16309 }
16310
16311 //--------------------------------------------------------------------------------
16312 // GetABIReturnRegs: get the mask of return registers as per target arch ABI.
16313 //
16314 // Arguments:
16315 //    None
16316 //
16317 // Return Value:
16318 //    reg mask of return registers in which the return type is returned.
16319 //
16320 // Note:
16321 //    For now this is implemented only for x64 Unix and yet to be implemented
16322 //    for other multi-reg return targets (Arm64/Arm32x86).
16323 //
16324 //    This routine can be used when the caller is not particular about the order
16325 //    of return registers and wants to know the set of return registers.
16326 //
16327 // TODO-ARM:   Implement this routine to support HFA returns.
16328 // TODO-ARM64: Implement this routine to support HFA returns.
16329 // TODO-X86:   Implement this routine to support long returns.
16330 //
16331 // static
16332 regMaskTP ReturnTypeDesc::GetABIReturnRegs()
16333 {
16334     regMaskTP resultMask = RBM_NONE;
16335
16336     unsigned count = GetReturnRegCount();
16337     for (unsigned i = 0; i < count; ++i)
16338     {
16339         resultMask |= genRegMask(GetABIReturnReg(i));
16340     }
16341
16342     return resultMask;
16343 }