Merge pull request #6297 from CarolEidt/MorphGenTreeRefactors
[platform/upstream/coreclr.git] / src / jit / gentree.cpp
1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
7 XX                                                                           XX
8 XX                               GenTree                                     XX
9 XX                                                                           XX
10 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
11 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
12 */
13
14 #include "jitpch.h"
15 #include "simd.h"
16
17 #ifdef _MSC_VER
18 #pragma hdrstop
19 #endif
20
21 /*****************************************************************************/
22
23 const
24 unsigned short    GenTree::gtOperKindTable[] =
25 {
26     #define GTNODE(en,sn,cm,ok) ok + GTK_COMMUTE*cm,
27     #include "gtlist.h"
28 };
29
30 /*****************************************************************************/
31 // static
32 genTreeOps        GenTree::OpAsgToOper(genTreeOps op)
33 {
34     // Precondition.
35     assert(OperIsAssignment(op) && op != GT_ASG);
36     switch (op)
37     {
38     case GT_ASG_ADD: return GT_ADD;
39     case GT_ASG_SUB: return GT_SUB;
40     case GT_ASG_MUL: return GT_MUL;
41     case GT_ASG_DIV: return GT_DIV;
42     case GT_ASG_MOD: return GT_MOD;
43
44     case GT_ASG_UDIV: return GT_UDIV;
45     case GT_ASG_UMOD: return GT_UMOD;
46
47     case GT_ASG_OR: return GT_OR;
48     case GT_ASG_XOR: return GT_XOR;
49     case GT_ASG_AND: return GT_AND;
50     case GT_ASG_LSH: return GT_LSH;
51     case GT_ASG_RSH: return GT_RSH;
52     case GT_ASG_RSZ: return GT_RSZ;
53
54     case GT_CHS: return GT_NEG;
55
56     default:
57         unreached(); // Precondition implies we don't get here.
58     }
59 }
60
61 /*****************************************************************************
62  *
63  *  The types of different GenTree nodes
64  */
65
66 #ifdef DEBUG
67
68 #define INDENT_SIZE         3
69
70 //--------------------------------------------
71 // 
72 // IndentStack: This struct is used, along with its related enums and strings,
73 //    to control both the indendtation and the printing of arcs.
74 //
75 // Notes:
76 //    The mode of printing is set in the Constructor, using its 'compiler' argument.
77 //    Currently it only prints arcs when fgOrder == fgOrderLinear.
78 //    The type of arc to print is specified by the IndentInfo enum, and is controlled
79 //    by the caller of the Push() method.
80
81 enum IndentChars {ICVertical, ICBottom, ICTop, ICMiddle, ICDash, ICEmbedded, ICTerminal, ICError, IndentCharCount };
82 // Sets of strings for different dumping options            vert             bot             top             mid             dash       embedded    terminal    error
83 static const char*  emptyIndents[IndentCharCount]   = {     " ",             " ",            " ",            " ",            " ",           "{",      "",        "?"  };
84 static const char*  asciiIndents[IndentCharCount]   = {     "|",            "\\",            "/",            "+",            "-",           "{",      "*",       "?"  };
85 static const char*  unicodeIndents[IndentCharCount] = { "\xe2\x94\x82", "\xe2\x94\x94", "\xe2\x94\x8c", "\xe2\x94\x9c", "\xe2\x94\x80",     "{", "\xe2\x96\x8c", "?"  };
86 typedef ArrayStack<Compiler::IndentInfo> IndentInfoStack;
87 struct IndentStack
88 {
89     IndentInfoStack     stack;
90     const char**        indents;
91
92     // Constructor for IndentStack.  Uses 'compiler' to determine the mode of printing.
93     IndentStack(Compiler* compiler) :
94         stack(compiler)
95     {
96         if (compiler->asciiTrees)
97         {
98             indents = asciiIndents;
99         }
100         else
101         {
102             indents = unicodeIndents;
103         }
104     }
105
106     // Return the depth of the current indentation.
107     unsigned Depth()
108     {
109         return stack.Height();
110     }
111
112     // Push a new indentation onto the stack, of the given type.
113     void Push(Compiler::IndentInfo info)
114     {
115         stack.Push(info);
116     }
117
118     // Pop the most recent indentation type off the stack.
119     Compiler::IndentInfo Pop()
120     {
121         return stack.Pop();
122     }
123
124     // Print the current indentation and arcs.
125     void print()
126     {
127         unsigned indentCount = Depth();
128         for (unsigned i = 0; i < indentCount; i++)
129         {
130             unsigned index = indentCount-1-i;
131             switch (stack.Index(index))
132             {
133             case Compiler::IndentInfo::IINone:
134                 printf("   ");
135                 break;
136             case Compiler::IndentInfo::IIEmbedded:
137                 printf("%s  ", indents[ICEmbedded]);
138                 break;
139             case Compiler::IndentInfo::IIArc:
140                 if (index == 0)
141                 {
142                     printf("%s%s%s", indents[ICMiddle], indents[ICDash], indents[ICDash]);
143                 }
144                 else
145                 {
146                     printf("%s  ", indents[ICVertical]);
147                 }
148                 break;
149             case Compiler::IndentInfo::IIArcBottom:
150                 printf("%s%s%s", indents[ICBottom], indents[ICDash], indents[ICDash]);
151                 break;
152             case Compiler::IndentInfo::IIArcTop:
153                 printf("%s%s%s", indents[ICTop], indents[ICDash], indents[ICDash]);
154                 break;
155             case Compiler::IndentInfo::IIError:
156                 printf("%s%s%s", indents[ICError], indents[ICDash], indents[ICDash]);
157                 break;
158             default:
159                 unreached();
160             }
161         }
162         printf("%s", indents[ICTerminal]);
163     }
164 };
165
166 //------------------------------------------------------------------------
167 // printIndent: This is a static method which simply invokes the 'print'
168 //    method on its 'indentStack' argument.
169 //
170 // Arguments:
171 //    indentStack - specifies the information for the indentation & arcs to be printed
172 //
173 // Notes:
174 //    This method exists to localize the checking for the case where indentStack is null.
175
176 static void printIndent(IndentStack* indentStack)
177 {
178     if (indentStack == nullptr)
179         return;
180     indentStack->print();
181 }
182
183 static const   char *      nodeNames[] =
184 {
185     #define GTNODE(en,sn,cm,ok) sn,
186     #include "gtlist.h"
187 };
188
189 const   char    *   GenTree::NodeName(genTreeOps op)
190 {
191     assert((unsigned)op < sizeof(nodeNames)/sizeof(nodeNames[0]));
192
193     return  nodeNames[op];
194 }
195
196 static const   char *      opNames[] =
197 {
198     #define GTNODE(en,sn,cm,ok) #en,
199     #include "gtlist.h"
200 };
201
202 const   char    *   GenTree::OpName(genTreeOps op)
203 {
204     assert((unsigned)op < sizeof(opNames)/sizeof(opNames[0]));
205
206     return  opNames[op];
207 }
208
209 #endif
210
211 /*****************************************************************************
212  *
213  *  When 'SMALL_TREE_NODES' is enabled, we allocate tree nodes in 2 different
214  *  sizes: 'GTF_DEBUG_NODE_SMALL' for most nodes and 'GTF_DEBUG_NODE_LARGE' for
215  *  the few nodes (such as calls and statement list nodes) that have more fields
216  *  and take up a lot more space.
217  */
218
219 #if SMALL_TREE_NODES
220
221 /* GT_COUNT'th oper is overloaded as 'undefined oper', so allocate storage for GT_COUNT'th oper also */
222 /* static */
223 unsigned char       GenTree::s_gtNodeSizes[GT_COUNT+1];
224
225 /* static */
226 void                GenTree::InitNodeSize()
227 {
228     /* 'GT_LCL_VAR' often gets changed to 'GT_REG_VAR' */
229
230     assert(GenTree::s_gtNodeSizes[GT_LCL_VAR] >= GenTree::s_gtNodeSizes[GT_REG_VAR]);
231
232     /* Set all sizes to 'small' first */
233
234     for (unsigned op = 0; op <= GT_COUNT; op++)
235     {
236         GenTree::s_gtNodeSizes[op] = TREE_NODE_SZ_SMALL;
237     }
238
239     // Now set all of the appropriate entries to 'large'
240
241     // On ARM32, ARM64 and System V for struct returning 
242     // there is code that does GT_ASG-tree.CopyObj call.
243     // CopyObj is a large node and the GT_ASG is small, which triggers an exception.
244 #if defined(FEATURE_HFA) || defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
245     GenTree::s_gtNodeSizes[GT_ASG             ] = TREE_NODE_SZ_LARGE;
246     GenTree::s_gtNodeSizes[GT_RETURN          ] = TREE_NODE_SZ_LARGE;
247 #endif // defined(FEATURE_HFA) || defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
248
249     GenTree::s_gtNodeSizes[GT_CALL            ] = TREE_NODE_SZ_LARGE;
250     GenTree::s_gtNodeSizes[GT_CAST            ] = TREE_NODE_SZ_LARGE;
251     GenTree::s_gtNodeSizes[GT_FTN_ADDR        ] = TREE_NODE_SZ_LARGE;
252     GenTree::s_gtNodeSizes[GT_BOX             ] = TREE_NODE_SZ_LARGE;
253     GenTree::s_gtNodeSizes[GT_INDEX           ] = TREE_NODE_SZ_LARGE;
254     GenTree::s_gtNodeSizes[GT_ARR_BOUNDS_CHECK] = TREE_NODE_SZ_LARGE;
255 #ifdef FEATURE_SIMD
256     GenTree::s_gtNodeSizes[GT_SIMD_CHK        ] = TREE_NODE_SZ_LARGE;
257 #endif // FEATURE_SIMD
258     GenTree::s_gtNodeSizes[GT_ARR_ELEM        ] = TREE_NODE_SZ_LARGE;
259     GenTree::s_gtNodeSizes[GT_ARR_INDEX       ] = TREE_NODE_SZ_LARGE;
260     GenTree::s_gtNodeSizes[GT_ARR_OFFSET      ] = TREE_NODE_SZ_LARGE;
261     GenTree::s_gtNodeSizes[GT_RET_EXPR        ] = TREE_NODE_SZ_LARGE;
262     GenTree::s_gtNodeSizes[GT_OBJ             ] = TREE_NODE_SZ_LARGE;
263     GenTree::s_gtNodeSizes[GT_FIELD           ] = TREE_NODE_SZ_LARGE;
264     GenTree::s_gtNodeSizes[GT_STMT            ] = TREE_NODE_SZ_LARGE;
265     GenTree::s_gtNodeSizes[GT_CMPXCHG         ] = TREE_NODE_SZ_LARGE;
266     GenTree::s_gtNodeSizes[GT_QMARK           ] = TREE_NODE_SZ_LARGE;
267     GenTree::s_gtNodeSizes[GT_LEA             ] = TREE_NODE_SZ_LARGE;
268     GenTree::s_gtNodeSizes[GT_COPYOBJ         ] = TREE_NODE_SZ_LARGE;
269     GenTree::s_gtNodeSizes[GT_INTRINSIC       ] = TREE_NODE_SZ_LARGE;
270 #if USE_HELPERS_FOR_INT_DIV
271     GenTree::s_gtNodeSizes[GT_DIV             ] = TREE_NODE_SZ_LARGE;
272     GenTree::s_gtNodeSizes[GT_UDIV            ] = TREE_NODE_SZ_LARGE;
273     GenTree::s_gtNodeSizes[GT_MOD             ] = TREE_NODE_SZ_LARGE;
274     GenTree::s_gtNodeSizes[GT_UMOD            ] = TREE_NODE_SZ_LARGE;
275 #endif
276 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
277     GenTree::s_gtNodeSizes[GT_PUTARG_STK      ] = TREE_NODE_SZ_LARGE;
278 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
279 #if defined(FEATURE_HFA) || defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
280     // In importer for Hfa and register returned structs we rewrite GT_ASG to GT_COPYOBJ/GT_CPYBLK
281     // Make sure the sizes agree.
282     assert(GenTree::s_gtNodeSizes[GT_COPYOBJ] <= GenTree::s_gtNodeSizes[GT_ASG]);
283     assert(GenTree::s_gtNodeSizes[GT_COPYBLK] <= GenTree::s_gtNodeSizes[GT_ASG]);
284 #endif // !(defined(FEATURE_HFA) || defined(FEATURE_UNIX_AMD64_STRUCT_PASSING))
285
286     assert(GenTree::s_gtNodeSizes[GT_RETURN] == GenTree::s_gtNodeSizes[GT_ASG]);
287
288     // This list of assertions should come to contain all GenTree subtypes that are declared
289     // "small".
290     assert(sizeof(GenTreeLclFld) <= GenTree::s_gtNodeSizes[GT_LCL_FLD]);
291     assert(sizeof(GenTreeLclVar) <= GenTree::s_gtNodeSizes[GT_LCL_VAR]);
292
293     static_assert_no_msg(sizeof(GenTree)              <= TREE_NODE_SZ_SMALL);
294     static_assert_no_msg(sizeof(GenTreeUnOp)          <= TREE_NODE_SZ_SMALL);
295     static_assert_no_msg(sizeof(GenTreeOp)            <= TREE_NODE_SZ_SMALL);
296     static_assert_no_msg(sizeof(GenTreeVal)           <= TREE_NODE_SZ_SMALL);
297     static_assert_no_msg(sizeof(GenTreeIntConCommon)  <= TREE_NODE_SZ_SMALL);
298     static_assert_no_msg(sizeof(GenTreePhysReg)       <= TREE_NODE_SZ_SMALL);
299 #ifndef LEGACY_BACKEND
300     static_assert_no_msg(sizeof(GenTreeJumpTable)     <= TREE_NODE_SZ_SMALL);
301 #endif // !LEGACY_BACKEND
302     static_assert_no_msg(sizeof(GenTreeIntCon)        <= TREE_NODE_SZ_SMALL);
303     static_assert_no_msg(sizeof(GenTreeLngCon)        <= TREE_NODE_SZ_SMALL);
304     static_assert_no_msg(sizeof(GenTreeDblCon)        <= TREE_NODE_SZ_SMALL);
305     static_assert_no_msg(sizeof(GenTreeStrCon)        <= TREE_NODE_SZ_SMALL);
306     static_assert_no_msg(sizeof(GenTreeLclVarCommon)  <= TREE_NODE_SZ_SMALL);
307     static_assert_no_msg(sizeof(GenTreeLclVar)        <= TREE_NODE_SZ_SMALL);
308     static_assert_no_msg(sizeof(GenTreeLclFld)        <= TREE_NODE_SZ_SMALL);
309     static_assert_no_msg(sizeof(GenTreeRegVar)        <= TREE_NODE_SZ_SMALL);
310     static_assert_no_msg(sizeof(GenTreeCast)          <= TREE_NODE_SZ_LARGE); // *** large node
311     static_assert_no_msg(sizeof(GenTreeBox)           <= TREE_NODE_SZ_LARGE); // *** large node
312     static_assert_no_msg(sizeof(GenTreeField)         <= TREE_NODE_SZ_LARGE); // *** large node
313     static_assert_no_msg(sizeof(GenTreeArgList)       <= TREE_NODE_SZ_SMALL);
314     static_assert_no_msg(sizeof(GenTreeColon)         <= TREE_NODE_SZ_SMALL);
315     static_assert_no_msg(sizeof(GenTreeCall)          <= TREE_NODE_SZ_LARGE); // *** large node
316     static_assert_no_msg(sizeof(GenTreeCmpXchg)       <= TREE_NODE_SZ_LARGE); // *** large node
317     static_assert_no_msg(sizeof(GenTreeFptrVal)       <= TREE_NODE_SZ_LARGE); // *** large node
318     static_assert_no_msg(sizeof(GenTreeQmark)         <= TREE_NODE_SZ_LARGE); // *** large node
319     static_assert_no_msg(sizeof(GenTreeIntrinsic)     <= TREE_NODE_SZ_LARGE); // *** large node
320     static_assert_no_msg(sizeof(GenTreeIndex)         <= TREE_NODE_SZ_LARGE); // *** large node
321     static_assert_no_msg(sizeof(GenTreeArrLen)        <= TREE_NODE_SZ_LARGE); // *** large node
322     static_assert_no_msg(sizeof(GenTreeBoundsChk)     <= TREE_NODE_SZ_LARGE); // *** large node
323     static_assert_no_msg(sizeof(GenTreeArrElem)       <= TREE_NODE_SZ_LARGE); // *** large node
324     static_assert_no_msg(sizeof(GenTreeArrIndex)      <= TREE_NODE_SZ_LARGE); // *** large node
325     static_assert_no_msg(sizeof(GenTreeArrOffs)       <= TREE_NODE_SZ_LARGE); // *** large node
326     static_assert_no_msg(sizeof(GenTreeIndir)         <= TREE_NODE_SZ_SMALL);
327     static_assert_no_msg(sizeof(GenTreeStoreInd)      <= TREE_NODE_SZ_SMALL);
328     static_assert_no_msg(sizeof(GenTreeBlkOp)         <= TREE_NODE_SZ_SMALL);
329     static_assert_no_msg(sizeof(GenTreeCpBlk)         <= TREE_NODE_SZ_SMALL);
330     static_assert_no_msg(sizeof(GenTreeInitBlk)       <= TREE_NODE_SZ_SMALL);
331     static_assert_no_msg(sizeof(GenTreeCpObj)         <= TREE_NODE_SZ_LARGE); // *** large node
332     static_assert_no_msg(sizeof(GenTreeRetExpr)       <= TREE_NODE_SZ_LARGE); // *** large node
333     static_assert_no_msg(sizeof(GenTreeStmt)          <= TREE_NODE_SZ_LARGE); // *** large node
334     static_assert_no_msg(sizeof(GenTreeObj)           <= TREE_NODE_SZ_LARGE); // *** large node
335     static_assert_no_msg(sizeof(GenTreeClsVar)        <= TREE_NODE_SZ_SMALL);
336     static_assert_no_msg(sizeof(GenTreeArgPlace)      <= TREE_NODE_SZ_SMALL);
337     static_assert_no_msg(sizeof(GenTreeLabel)         <= TREE_NODE_SZ_SMALL);
338     static_assert_no_msg(sizeof(GenTreePhiArg)        <= TREE_NODE_SZ_SMALL);
339 #ifndef FEATURE_UNIX_AMD64_STRUCT_PASSING
340     static_assert_no_msg(sizeof(GenTreePutArgStk)     <= TREE_NODE_SZ_SMALL);
341 #else // FEATURE_UNIX_AMD64_STRUCT_PASSING
342     static_assert_no_msg(sizeof(GenTreePutArgStk)     <= TREE_NODE_SZ_LARGE);
343 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
344
345 #ifdef FEATURE_SIMD
346     static_assert_no_msg(sizeof(GenTreeSIMD)          <= TREE_NODE_SZ_SMALL);
347 #endif // FEATURE_SIMD
348 }
349
350 size_t              GenTree::GetNodeSize() const
351 {
352     return GenTree::s_gtNodeSizes[gtOper];
353 }
354
355 #ifdef DEBUG
356 bool                GenTree::IsNodeProperlySized() const
357 {
358     size_t size;
359
360     if (gtDebugFlags & GTF_DEBUG_NODE_SMALL) 
361     {
362         size = TREE_NODE_SZ_SMALL;
363     }
364     else  
365     {
366         assert(gtDebugFlags & GTF_DEBUG_NODE_LARGE);
367         size = TREE_NODE_SZ_LARGE;
368     }
369
370     return GenTree::s_gtNodeSizes[gtOper] <= size;
371 }
372 #endif
373
374 #else // SMALL_TREE_NODES
375
376 #ifdef DEBUG
377 bool                GenTree::IsNodeProperlySized() const
378 {
379     return  true;
380 }
381 #endif
382
383 #endif // SMALL_TREE_NODES
384
385 /*****************************************************************************/
386
387 // make sure these get instantiated, because it's not in a header file
388 // (emulating the c++ 'export' keyword here)
389 // VC appears to be somewhat unpredictable about whether they end up in the .obj file without this
390 template Compiler::fgWalkResult Compiler::fgWalkTreePostRec<true>   (GenTreePtr *pTree, fgWalkData *fgWalkData);
391 template Compiler::fgWalkResult Compiler::fgWalkTreePostRec<false>  (GenTreePtr *pTree, fgWalkData *fgWalkData);
392 template Compiler::fgWalkResult Compiler::fgWalkTreePreRec<true>    (GenTreePtr *pTree, fgWalkData *fgWalkData);
393 template Compiler::fgWalkResult Compiler::fgWalkTreePreRec<false>   (GenTreePtr *pTree, fgWalkData *fgWalkData);
394 template Compiler::fgWalkResult Compiler::fgWalkTreeRec<true,true>  (GenTreePtr *pTree, fgWalkData *fgWalkData);
395 template Compiler::fgWalkResult Compiler::fgWalkTreeRec<false,false>(GenTreePtr *pTree, fgWalkData *fgWalkData);
396 template Compiler::fgWalkResult Compiler::fgWalkTreeRec<true,false> (GenTreePtr *pTree, fgWalkData *fgWalkData);
397 template Compiler::fgWalkResult Compiler::fgWalkTreeRec<false,true> (GenTreePtr *pTree, fgWalkData *fgWalkData);
398
399 //******************************************************************************
400 // fgWalkTreePreRec - Helper function for fgWalkTreePre.
401 //                    walk tree in pre order, executing callback on every node.
402 //                    Template parameter 'computeStack' specifies whether to maintain
403 //                    a stack of ancestor nodes which can be viewed in the callback.
404 //
405 template<bool computeStack>
406 // static
407 Compiler::fgWalkResult      Compiler::fgWalkTreePreRec(GenTreePtr *pTree, fgWalkData *fgWalkData)
408 {
409     fgWalkResult    result        = WALK_CONTINUE;
410     GenTreePtr      currentParent = fgWalkData->parent;
411
412     genTreeOps      oper;
413     unsigned        kind;
414
415     do 
416     {
417         GenTreePtr tree = *pTree;
418         assert(tree);
419         assert(tree->gtOper != GT_STMT);
420         GenTreeArgList* args;  // For call node arg lists.
421
422         if (computeStack)
423             fgWalkData->parentStack->Push(tree);
424
425         /* Visit this node */
426
427         // if we are not in the mode where we only do the callback for local var nodes,
428         // visit the node unconditionally.  Otherwise we will visit it under leaf handling.
429         if  (!fgWalkData->wtprLclsOnly)
430         {
431             assert(tree == *pTree);
432             result = fgWalkData->wtprVisitorFn(pTree, fgWalkData);
433             if  (result != WALK_CONTINUE)
434                 break;
435         }
436
437         /* Figure out what kind of a node we have */
438
439         oper = tree->OperGet();
440         kind = tree->OperKind();
441
442         /* Is this a constant or leaf node? */
443
444         if  (kind & (GTK_CONST|GTK_LEAF))
445         {
446             if  (fgWalkData->wtprLclsOnly && (oper == GT_LCL_VAR || oper == GT_LCL_FLD))
447                 result = fgWalkData->wtprVisitorFn(pTree, fgWalkData);
448             break;
449         }
450         else if (fgWalkData->wtprLclsOnly && GenTree::OperIsLocalStore(oper))
451         {
452             result = fgWalkData->wtprVisitorFn(pTree, fgWalkData);
453             if  (result != WALK_CONTINUE)
454                 break;
455         }
456
457         fgWalkData->parent = tree;
458
459         /* Is it a 'simple' unary/binary operator? */
460
461         if  (kind & GTK_SMPOP)
462         {
463             if  (tree->gtGetOp2())
464             {
465                 if (tree->gtOp.gtOp1 != NULL)
466                 {
467                     result = fgWalkTreePreRec<computeStack>(&tree->gtOp.gtOp1, fgWalkData);
468                     if  (result == WALK_ABORT)
469                         return result;
470                 }
471                 else
472                 {
473                     assert(tree->NullOp1Legal());
474                 }
475
476                 pTree = &tree->gtOp.gtOp2;
477                 continue;
478             }
479             else
480             {
481                 pTree = &tree->gtOp.gtOp1;
482                 if  (*pTree){
483                     continue;
484                 }
485
486                 break;
487             }
488         }
489
490         /* See what kind of a special operator we have here */
491
492         switch  (oper)
493         {
494         case GT_FIELD:
495             pTree = &tree->gtField.gtFldObj;
496             break;
497
498         case GT_CALL:
499
500             assert(tree->gtFlags & GTF_CALL);
501
502             /* Is this a call to unmanaged code ? */
503             if  (fgWalkData->wtprLclsOnly && (tree->gtFlags & GTF_CALL_UNMANAGED))
504             {
505                 result = fgWalkData->wtprVisitorFn(pTree, fgWalkData);
506                 if  (result == WALK_ABORT)
507                     return result;
508             }
509
510             if  (tree->gtCall.gtCallObjp)
511             {
512                 result = fgWalkTreePreRec<computeStack>(&tree->gtCall.gtCallObjp, fgWalkData);
513                 if  (result == WALK_ABORT)
514                     return result;
515             }
516
517             for (args = tree->gtCall.gtCallArgs; args; args = args->Rest())
518             {
519                 result = fgWalkTreePreRec<computeStack>(args->pCurrent(), fgWalkData);
520                 if  (result == WALK_ABORT)
521                     return result;
522             }
523
524             for (args = tree->gtCall.gtCallLateArgs; args; args = args->Rest())
525             {
526                 result = fgWalkTreePreRec<computeStack>(args->pCurrent(), fgWalkData);
527                 if  (result == WALK_ABORT)
528                     return result;
529             }
530
531             if (tree->gtCall.gtControlExpr)
532             {
533                 result = fgWalkTreePreRec<computeStack>(&tree->gtCall.gtControlExpr, fgWalkData);
534                 if  (result == WALK_ABORT)
535                     return result;
536             }
537
538             if (tree->gtCall.gtCallType == CT_INDIRECT)
539             {
540                 if  (tree->gtCall.gtCallCookie)
541                 {
542                     result = fgWalkTreePreRec<computeStack>(&tree->gtCall.gtCallCookie, fgWalkData);
543                     if  (result == WALK_ABORT)
544                         return result;
545                 }
546                 pTree = &tree->gtCall.gtCallAddr;
547             }
548             else
549                 pTree = NULL;
550
551             break;
552
553         case GT_ARR_ELEM:
554
555             result = fgWalkTreePreRec<computeStack>(&tree->gtArrElem.gtArrObj, fgWalkData);
556             if  (result == WALK_ABORT)
557                 return result;
558
559             unsigned dim;
560             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
561             {
562                 result = fgWalkTreePreRec<computeStack>(&tree->gtArrElem.gtArrInds[dim], fgWalkData);
563                 if  (result == WALK_ABORT)
564                     return result;
565             }
566             pTree = NULL;
567             break;
568
569         case GT_ARR_OFFSET:
570             result = fgWalkTreePreRec<computeStack>(&tree->gtArrOffs.gtOffset, fgWalkData);
571             if  (result == WALK_ABORT)
572                 return result;
573             result = fgWalkTreePreRec<computeStack>(&tree->gtArrOffs.gtIndex, fgWalkData);
574             if  (result == WALK_ABORT)
575                 return result;
576             result = fgWalkTreePreRec<computeStack>(&tree->gtArrOffs.gtArrObj, fgWalkData);
577             if  (result == WALK_ABORT)
578                 return result;
579             pTree = nullptr;
580             break;
581
582         case GT_CMPXCHG:
583             result = fgWalkTreePreRec<computeStack>(&tree->gtCmpXchg.gtOpLocation, fgWalkData);
584             if (result == WALK_ABORT)
585                 return result;
586             result = fgWalkTreePreRec<computeStack>(&tree->gtCmpXchg.gtOpValue, fgWalkData);
587             if (result == WALK_ABORT)
588                 return result;
589             result = fgWalkTreePreRec<computeStack>(&tree->gtCmpXchg.gtOpComparand, fgWalkData);
590             if (result == WALK_ABORT)
591                 return result;
592             pTree = NULL;
593             break;
594
595         case GT_ARR_BOUNDS_CHECK:
596 #ifdef FEATURE_SIMD
597         case GT_SIMD_CHK:
598 #endif // FEATURE_SIMD
599             result = fgWalkTreePreRec<computeStack>(&tree->gtBoundsChk.gtArrLen, fgWalkData);
600             if (result == WALK_ABORT)
601                 return result;
602             result = fgWalkTreePreRec<computeStack>(&tree->gtBoundsChk.gtIndex, fgWalkData);
603             if (result == WALK_ABORT)
604                 return result;
605             pTree = NULL;
606             break;
607
608         default:
609 #ifdef  DEBUG
610             fgWalkData->compiler->gtDispTree(tree);
611 #endif
612             assert(!"unexpected operator");
613         }
614     }
615     while (pTree != NULL && *pTree != NULL);
616
617     if (computeStack)
618         fgWalkData->parentStack->Pop();
619
620     if (result != WALK_ABORT)
621     {
622         //
623         // Restore fgWalkData->parent
624         // 
625         fgWalkData->parent = currentParent;
626     }
627     return result;
628 }
629
630 /*****************************************************************************
631  *
632  *  Walk all basic blocks and call the given function pointer for all tree
633  *  nodes contained therein.
634  */
635
636 void                    Compiler::fgWalkAllTreesPre(fgWalkPreFn * visitor,
637                                                     void * pCallBackData)
638 {
639     BasicBlock *    block;
640
641     for (block = fgFirstBB; block; block = block->bbNext)
642     {
643         GenTreePtr      tree;
644
645         for (tree = block->bbTreeList; tree; tree = tree->gtNext)
646         {
647             assert(tree->gtOper == GT_STMT);
648
649             fgWalkTreePre(&tree->gtStmt.gtStmtExpr, visitor, pCallBackData);
650         }
651     }
652 }
653
654
655 //******************************************************************************
656 // fgWalkTreePostRec - Helper function for fgWalkTreePost.
657 //                     Walk tree in post order, executing callback on every node
658 //                     template parameter 'computeStack' specifies whether to maintain
659 //                     a stack of ancestor nodes which can be viewed in the callback.
660 //
661 template<bool computeStack> 
662 // static
663 Compiler::fgWalkResult Compiler::fgWalkTreePostRec(GenTreePtr *pTree, fgWalkData *fgWalkData)
664 {
665     fgWalkResult    result;
666     GenTreePtr      currentParent = fgWalkData->parent;
667
668     genTreeOps      oper;
669     unsigned        kind;
670
671     GenTree *tree = *pTree;
672     assert(tree);
673     assert(tree->gtOper != GT_STMT);
674     GenTreeArgList* args;
675
676     /* Figure out what kind of a node we have */
677
678     oper = tree->OperGet();
679     kind = tree->OperKind();
680
681     if (computeStack)
682         fgWalkData->parentStack->Push(tree);
683
684     /* Is this a constant or leaf node? */
685
686     if  (kind & (GTK_CONST|GTK_LEAF))
687         goto DONE;
688
689     /* Is it a 'simple' unary/binary operator? */
690
691     fgWalkData->parent = tree;
692
693     if (kind & GTK_SMPOP)
694     {
695         GenTree** op1Slot = &tree->gtOp.gtOp1;
696
697         GenTree** op2Slot;
698         if (tree->OperIsBinary())
699         {
700             if ((tree->gtFlags & GTF_REVERSE_OPS) == 0)
701             {
702                 op2Slot = &tree->gtOp.gtOp2;
703             }
704             else
705             {
706                 op2Slot = op1Slot;
707                 op1Slot = &tree->gtOp.gtOp2;
708             }
709         }
710         else
711         {
712             op2Slot = nullptr;
713         }
714
715         if (*op1Slot != nullptr)
716         {
717             result = fgWalkTreePostRec<computeStack>(op1Slot, fgWalkData);
718             if  (result == WALK_ABORT)
719                 return result;
720         }
721
722         if (op2Slot != nullptr && *op2Slot != nullptr)
723         {
724             result = fgWalkTreePostRec<computeStack>(op2Slot, fgWalkData);
725             if  (result == WALK_ABORT)
726                 return result;
727         }
728
729         goto DONE;
730     }
731
732     /* See what kind of a special operator we have here */
733
734     switch  (oper)
735     {
736     case GT_FIELD:
737         if  (tree->gtField.gtFldObj)
738         {
739             result = fgWalkTreePostRec<computeStack>(&tree->gtField.gtFldObj, fgWalkData);
740             if  (result == WALK_ABORT)
741                 return result;
742         }
743
744         break;
745
746     case GT_CALL:
747
748         assert(tree->gtFlags & GTF_CALL);
749
750         if  (tree->gtCall.gtCallObjp)
751         {
752             result = fgWalkTreePostRec<computeStack>(&tree->gtCall.gtCallObjp, fgWalkData);
753             if  (result == WALK_ABORT)
754                 return result;
755         }
756
757         for (args = tree->gtCall.gtCallArgs; args; args = args->Rest())
758         {
759             result = fgWalkTreePostRec<computeStack>(args->pCurrent(), fgWalkData);
760             if  (result == WALK_ABORT)
761                 return result;
762         }
763
764         for (args = tree->gtCall.gtCallLateArgs; args; args = args->Rest())
765         {
766             result = fgWalkTreePostRec<computeStack>(args->pCurrent(), fgWalkData);
767             if  (result == WALK_ABORT)
768                 return result;
769         }
770         if  (tree->gtCall.gtCallType == CT_INDIRECT)
771         {
772             if  (tree->gtCall.gtCallCookie)
773             {
774                 result = fgWalkTreePostRec<computeStack>(&tree->gtCall.gtCallCookie, fgWalkData);
775                 if  (result == WALK_ABORT)
776                     return result;
777             }
778             result = fgWalkTreePostRec<computeStack>(&tree->gtCall.gtCallAddr, fgWalkData);
779             if  (result == WALK_ABORT)
780                 return result;
781         }
782
783         if (tree->gtCall.gtControlExpr != nullptr)
784         {
785             result = fgWalkTreePostRec<computeStack>(&tree->gtCall.gtControlExpr, fgWalkData);
786             if  (result == WALK_ABORT)
787                 return result;
788         }
789         break;
790
791     case GT_ARR_ELEM:
792
793         result = fgWalkTreePostRec<computeStack>(&tree->gtArrElem.gtArrObj, fgWalkData);
794         if  (result == WALK_ABORT)
795             return result;
796
797         unsigned dim;
798         for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
799         {
800             result = fgWalkTreePostRec<computeStack>(&tree->gtArrElem.gtArrInds[dim], fgWalkData);
801             if  (result == WALK_ABORT)
802                 return result;
803         }
804         break;
805
806     case GT_ARR_OFFSET:
807         result = fgWalkTreePostRec<computeStack>(&tree->gtArrOffs.gtOffset, fgWalkData);
808         if  (result == WALK_ABORT)
809             return result;
810         result = fgWalkTreePostRec<computeStack>(&tree->gtArrOffs.gtIndex, fgWalkData);
811         if  (result == WALK_ABORT)
812             return result;
813         result = fgWalkTreePostRec<computeStack>(&tree->gtArrOffs.gtArrObj, fgWalkData);
814         if  (result == WALK_ABORT)
815             return result;
816         break;
817
818     case GT_CMPXCHG:
819         result = fgWalkTreePostRec<computeStack>(&tree->gtCmpXchg.gtOpComparand, fgWalkData);
820         if (result == WALK_ABORT)
821             return result;
822         result = fgWalkTreePostRec<computeStack>(&tree->gtCmpXchg.gtOpValue, fgWalkData);
823         if (result == WALK_ABORT)
824             return result;
825         result = fgWalkTreePostRec<computeStack>(&tree->gtCmpXchg.gtOpLocation, fgWalkData);
826         if (result == WALK_ABORT)
827             return result;
828         break;
829
830     case GT_ARR_BOUNDS_CHECK:
831 #ifdef FEATURE_SIMD
832     case GT_SIMD_CHK:
833 #endif // FEATURE_SIMD
834         result = fgWalkTreePostRec<computeStack>(&tree->gtBoundsChk.gtArrLen, fgWalkData);
835         if (result == WALK_ABORT)
836             return result;
837         result = fgWalkTreePostRec<computeStack>(&tree->gtBoundsChk.gtIndex, fgWalkData);
838         if (result == WALK_ABORT)
839             return result;
840         break;
841
842     default:
843 #ifdef  DEBUG
844         fgWalkData->compiler->gtDispTree(tree);
845 #endif
846         assert(!"unexpected operator");
847     }
848
849 DONE:
850
851     fgWalkData->parent = currentParent;
852
853     /* Finally, visit the current node */
854     result = fgWalkData->wtpoVisitorFn(pTree, fgWalkData);
855
856     if (computeStack)
857         fgWalkData->parentStack->Pop();
858
859     return result;
860 }
861
862 // ****************************************************************************
863 // walk tree doing callbacks in both pre- and post- order (both optional)
864
865 template<bool doPreOrder, bool doPostOrder>
866 // static
867 Compiler::fgWalkResult      
868 Compiler::fgWalkTreeRec(GenTreePtr *pTree, fgWalkData *fgWalkData)
869 {
870     fgWalkResult    result = WALK_CONTINUE;
871
872     genTreeOps      oper;
873     unsigned        kind;
874
875     GenTree *tree = *pTree;
876     assert(tree);
877     assert(tree->gtOper != GT_STMT);
878     GenTreeArgList* args;
879
880     /* Figure out what kind of a node we have */
881
882     oper = tree->OperGet();
883     kind = tree->OperKind();
884
885     fgWalkData->parentStack->Push(tree);
886
887     if (doPreOrder)
888     {
889         result = fgWalkData->wtprVisitorFn(pTree, fgWalkData);
890         if  (result == WALK_ABORT)
891             return result;
892         else
893         {
894             tree = *pTree;
895             oper = tree->OperGet();
896             kind = tree->OperKind();
897         }
898     }
899
900     // If we're skipping subtrees, we're done.
901     if (result == WALK_SKIP_SUBTREES)
902         goto DONE;
903
904     /* Is this a constant or leaf node? */
905
906     if ((kind & (GTK_CONST|GTK_LEAF)) != 0)
907         goto DONE;
908
909     /* Is it a 'simple' unary/binary operator? */
910
911     if  (kind & GTK_SMPOP)
912     {
913         if  (tree->gtOp.gtOp1)
914         {
915             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtOp.gtOp1, fgWalkData);
916             if  (result == WALK_ABORT)
917                 return result;
918         }
919
920         if  (tree->gtGetOp2())
921         {
922             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtOp.gtOp2, fgWalkData);
923             if  (result == WALK_ABORT)
924                 return result;
925         }
926
927         goto DONE;
928     }
929
930     /* See what kind of a special operator we have here */
931
932     switch  (oper)
933     {
934     case GT_FIELD:
935         if  (tree->gtField.gtFldObj)
936         {
937             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtField.gtFldObj, fgWalkData);
938             if  (result == WALK_ABORT)
939                 return result;
940         }
941
942         break;
943
944     case GT_CALL:
945
946         assert(tree->gtFlags & GTF_CALL);
947
948         if  (tree->gtCall.gtCallObjp)
949         {
950             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtCall.gtCallObjp, fgWalkData);
951             if  (result == WALK_ABORT)
952                 return result;
953         }
954
955         for (args = tree->gtCall.gtCallArgs; args; args = args->Rest())
956         {
957             result = fgWalkTreeRec<doPreOrder, doPostOrder>(args->pCurrent(), fgWalkData);
958             if  (result == WALK_ABORT)
959                 return result;
960         }
961
962         for (args = tree->gtCall.gtCallLateArgs; args; args = args->Rest())
963         {
964             result = fgWalkTreeRec<doPreOrder, doPostOrder>(args->pCurrent(), fgWalkData);
965             if  (result == WALK_ABORT)
966                 return result;
967         }
968         if  (tree->gtCall.gtCallType == CT_INDIRECT)
969         {
970             if  (tree->gtCall.gtCallCookie)
971             {
972                 result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtCall.gtCallCookie, fgWalkData);
973                 if  (result == WALK_ABORT)
974                     return result;
975             }
976             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtCall.gtCallAddr, fgWalkData);
977             if  (result == WALK_ABORT)
978                 return result;
979         }
980
981         if (tree->gtCall.gtControlExpr)
982         {
983             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtCall.gtControlExpr, fgWalkData);
984             if  (result == WALK_ABORT)
985                 return result;
986         }
987
988         break;
989
990     case GT_ARR_ELEM:
991
992         result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtArrElem.gtArrObj, fgWalkData);
993         if  (result == WALK_ABORT)
994             return result;
995
996         unsigned dim;
997         for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
998         {
999             result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtArrElem.gtArrInds[dim], fgWalkData);
1000             if  (result == WALK_ABORT)
1001                 return result;
1002         }
1003         break;
1004
1005     case GT_ARR_OFFSET:
1006         result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtArrOffs.gtOffset, fgWalkData);
1007         if  (result == WALK_ABORT)
1008             return result;
1009         result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtArrOffs.gtIndex, fgWalkData);
1010         if  (result == WALK_ABORT)
1011             return result;
1012         result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtArrOffs.gtArrObj, fgWalkData);
1013         if  (result == WALK_ABORT)
1014             return result;
1015         break;
1016
1017     case GT_CMPXCHG:
1018         result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtCmpXchg.gtOpComparand, fgWalkData);
1019         if (result == WALK_ABORT)
1020             return result;
1021         result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtCmpXchg.gtOpValue, fgWalkData);
1022         if (result == WALK_ABORT)
1023             return result;
1024         result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtCmpXchg.gtOpLocation, fgWalkData);
1025         if (result == WALK_ABORT)
1026             return result;
1027         break;
1028
1029     case GT_ARR_BOUNDS_CHECK:
1030 #ifdef FEATURE_SIMD
1031     case GT_SIMD_CHK:
1032 #endif // FEATURE_SIMD
1033         result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtBoundsChk.gtArrLen, fgWalkData);
1034         if (result == WALK_ABORT)
1035             return result;
1036         result = fgWalkTreeRec<doPreOrder, doPostOrder>(&tree->gtBoundsChk.gtIndex, fgWalkData);
1037         if (result == WALK_ABORT)
1038             return result;
1039         break;
1040
1041     default:
1042 #ifdef  DEBUG
1043         fgWalkData->compiler->gtDispTree(tree);
1044 #endif
1045         assert(!"unexpected operator");
1046     }
1047
1048 DONE:
1049
1050     /* Finally, visit the current node */
1051     if (doPostOrder)
1052     {
1053         result = fgWalkData->wtpoVisitorFn(pTree, fgWalkData);
1054     }
1055
1056     fgWalkData->parentStack->Pop();
1057
1058     return result;
1059 }
1060
1061 /*****************************************************************************
1062  *
1063  *  Call the given function pointer for all nodes in the tree. The 'visitor'
1064  *  fn should return one of the following values:
1065  *
1066  *  WALK_ABORT          stop walking and return immediately
1067  *  WALK_CONTINUE       continue walking
1068  *  WALK_SKIP_SUBTREES  don't walk any subtrees of the node just visited
1069  */
1070
1071 Compiler::fgWalkResult  Compiler::fgWalkTree(GenTreePtr  * pTree,
1072                                              fgWalkPreFn * preVisitor,
1073                                              fgWalkPreFn * postVisitor,
1074                                              void *        callBackData)
1075
1076 {
1077     fgWalkData walkData;
1078
1079     walkData.compiler          = this;
1080     walkData.wtprVisitorFn     = preVisitor;
1081     walkData.wtpoVisitorFn     = postVisitor;
1082     walkData.pCallbackData     = callBackData;
1083     walkData.parent            = NULL;
1084     walkData.wtprLclsOnly      = false;
1085 #ifdef DEBUG
1086     walkData.printModified     = false;
1087 #endif
1088     ArrayStack<GenTree *> parentStack(this);
1089     walkData.parentStack = &parentStack;
1090
1091     fgWalkResult result;
1092
1093     assert (preVisitor || postVisitor);
1094
1095     if (preVisitor && postVisitor)
1096         result = fgWalkTreeRec<true,true>(pTree, &walkData);
1097     else if (preVisitor)
1098         result = fgWalkTreeRec<true,false>(pTree, &walkData);
1099     else
1100         result = fgWalkTreeRec<false,true>(pTree, &walkData);
1101         
1102
1103 #ifdef DEBUG
1104     if (verbose && walkData.printModified)
1105     {
1106         gtDispTree(*pTree);
1107     }
1108 #endif
1109
1110     return result;
1111 }
1112
1113 // ------------------------------------------------------------------------------------------
1114 // gtClearReg: Sets the register to the "no register assignment" value, depending upon
1115 // the type of the node, and whether it fits any of the special cases for register pairs
1116 // or multi-reg call nodes.
1117 //
1118 // Arguments:
1119 //     compiler  -  compiler instance
1120 //
1121 // Return Value:
1122 //     None
1123 void
1124 GenTree::gtClearReg(Compiler* compiler)
1125 {
1126 #if CPU_LONG_USES_REGPAIR
1127     if (isRegPairType(TypeGet()) ||
1128         // (IsLocal() && isRegPairType(compiler->lvaTable[gtLclVarCommon.gtLclNum].TypeGet())) ||
1129         (OperGet() == GT_MUL && (gtFlags & GTF_MUL_64RSLT)))
1130     {
1131         gtRegPair = REG_PAIR_NONE;
1132     }
1133     else
1134 #endif // CPU_LONG_USES_REGPAIR
1135     {
1136         gtRegNum = REG_NA;
1137     }
1138
1139     // Also clear multi-reg state if this is a call node
1140     if (IsCall())
1141     {
1142         this->AsCall()->ClearOtherRegs();
1143     }
1144     else if (IsCopyOrReload())
1145     {
1146         this->AsCopyOrReload()->ClearOtherRegs();
1147     }
1148 }
1149
1150 //-----------------------------------------------------------
1151 // CopyReg: Copy the _gtRegNum/_gtRegPair/gtRegTag fields.
1152 //
1153 // Arguments:
1154 //     from   -  GenTree node from which to copy
1155 //
1156 // Return Value:
1157 //     None
1158 void 
1159 GenTree::CopyReg(GenTreePtr from)
1160 {
1161     // To do the copy, use _gtRegPair, which must be bigger than _gtRegNum. Note that the values
1162     // might be undefined (so gtRegTag == GT_REGTAG_NONE).
1163     _gtRegPair = from->_gtRegPair;
1164     C_ASSERT(sizeof(_gtRegPair) >= sizeof(_gtRegNum));
1165     INDEBUG(gtRegTag = from->gtRegTag;)
1166
1167     // Also copy multi-reg state if this is a call node
1168     if (IsCall())
1169     {
1170         assert(from->IsCall());
1171         this->AsCall()->CopyOtherRegs(from->AsCall());
1172     }
1173     else if (IsCopyOrReload())
1174     {
1175         this->AsCopyOrReload()->CopyOtherRegs(from->AsCopyOrReload());
1176     }
1177 }
1178
1179 //------------------------------------------------------------------
1180 // gtHasReg: Whether node beeen assigned a register by LSRA
1181 //
1182 // Arguments:
1183 //    None
1184 //
1185 // Return Value:
1186 //    Returns true if the node was assigned a register.
1187 //
1188 //    In case of multi-reg call nodes, it is considered
1189 //    having a reg if regs are allocated for all its
1190 //    return values.
1191 //
1192 //    In case of GT_COPY or GT_RELOAD of a multi-reg call,
1193 //    GT_COPY/GT_RELOAD is considered having a reg if it
1194 //    has a reg assigned to any of its positions.
1195 //
1196 // Assumption:
1197 //    In order for this to work properly, gtClearReg must be called
1198 //    prior to setting the register value.
1199 //
1200 bool GenTree::gtHasReg() const
1201 {
1202     bool hasReg;
1203
1204 #if CPU_LONG_USES_REGPAIR
1205     if (isRegPairType(TypeGet()))
1206     {
1207         assert(_gtRegNum != REG_NA);
1208         INDEBUG(assert(gtRegTag == GT_REGTAG_REGPAIR));
1209         hasReg = (gtRegPair != REG_PAIR_NONE);
1210     }
1211     else
1212 #endif
1213     {
1214         assert(_gtRegNum != REG_PAIR_NONE);
1215         INDEBUG(assert(gtRegTag == GT_REGTAG_REG));
1216
1217         if (IsMultiRegCall())
1218         {
1219             // Has to cast away const-ness because GetReturnTypeDesc() is a non-const method
1220             GenTree* tree = const_cast<GenTree*>(this);
1221             GenTreeCall* call = tree->AsCall();
1222             unsigned regCount = call->GetReturnTypeDesc()->GetReturnRegCount();
1223             hasReg = false;
1224
1225             // A Multi-reg call node is said to have regs, if it has
1226             // reg assigned to each of its result registers.
1227             for (unsigned i = 0; i < regCount; ++i)
1228             {
1229                 hasReg = (call->GetRegNumByIdx(i) != REG_NA);
1230                 if (!hasReg)
1231                 {
1232                     break;
1233                 }
1234             }
1235         }
1236         else if (IsCopyOrReloadOfMultiRegCall())
1237         {
1238             GenTree* tree = const_cast<GenTree*>(this);
1239             GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
1240             GenTreeCall* call = copyOrReload->gtGetOp1()->AsCall();
1241             unsigned regCount = call->GetReturnTypeDesc()->GetReturnRegCount();
1242             hasReg = false;
1243
1244             // A Multi-reg copy or reload node is said to have regs,
1245             // if it has valid regs in any of the positions.
1246             for (unsigned i = 0; i < regCount; ++i)
1247             {
1248                 hasReg = (copyOrReload->GetRegNumByIdx(i) != REG_NA);
1249                 if (hasReg)
1250                 {
1251                     break;
1252                 }
1253             }
1254         }
1255         else
1256         {
1257             hasReg = (gtRegNum != REG_NA);
1258         }
1259     }
1260
1261     return hasReg;
1262 }
1263
1264 //---------------------------------------------------------------
1265 // gtGetRegMask: Get the reg mask of the node.
1266 //
1267 // Arguments:
1268 //    None
1269 //
1270 // Return Value:
1271 //    Reg Mask of GenTree node.
1272 //
1273 regMaskTP 
1274 GenTree::gtGetRegMask() const
1275 {
1276     regMaskTP resultMask;
1277
1278 #if CPU_LONG_USES_REGPAIR
1279     if (isRegPairType(TypeGet()))
1280     {
1281         resultMask = genRegPairMask(gtRegPair);
1282     }
1283     else
1284 #endif
1285     {
1286         if (IsMultiRegCall())
1287         {
1288             // temporarily cast away const-ness as AsCall() method is not declared const
1289             resultMask = genRegMask(gtRegNum);
1290             GenTree* temp = const_cast<GenTree*>(this);
1291             resultMask |= temp->AsCall()->GetOtherRegMask();
1292         }
1293         else if (IsCopyOrReloadOfMultiRegCall())
1294         {
1295             // A multi-reg copy or reload, will have valid regs for only those 
1296             // positions that need to be copied or reloaded.  Hence we need
1297             // to consider only those registers for computing reg mask.
1298
1299             GenTree* tree = const_cast<GenTree*>(this);
1300             GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
1301             GenTreeCall* call = copyOrReload->gtGetOp1()->AsCall();
1302             unsigned regCount = call->GetReturnTypeDesc()->GetReturnRegCount();
1303
1304             resultMask = RBM_NONE;
1305             for (unsigned i = 0; i < regCount; ++i)
1306             {
1307                 regNumber reg = copyOrReload->GetRegNumByIdx(i);
1308                 if (reg != REG_NA)
1309                 {
1310                     resultMask |= genRegMask(reg);
1311                 }
1312             }
1313         }
1314         else
1315         {
1316             resultMask = genRegMask(gtRegNum);
1317         }
1318     }
1319
1320     return resultMask;
1321 }
1322
1323 //---------------------------------------------------------------
1324 // GetOtherRegMask: Get the reg mask of gtOtherRegs of call node
1325 //
1326 // Arguments:
1327 //    None
1328 //
1329 // Return Value:
1330 //    Reg mask of gtOtherRegs of call node.
1331 //
1332 regMaskTP  
1333 GenTreeCall::GetOtherRegMask() const
1334 {
1335     regMaskTP resultMask = RBM_NONE;
1336
1337 #if FEATURE_MULTIREG_RET
1338     for (unsigned i = 0; i < MAX_RET_REG_COUNT - 1; ++i)
1339     {
1340         if (gtOtherRegs[i] != REG_NA)
1341         {
1342             resultMask |= genRegMask(gtOtherRegs[i]);
1343             continue;
1344         }
1345         break;
1346     }
1347 #endif
1348
1349     return resultMask;
1350 }
1351
1352 #ifndef LEGACY_BACKEND
1353
1354 //-------------------------------------------------------------------------
1355 // HasNonStandardAddedArgs: Return true if the method has non-standard args added to the call
1356 // argument list during argument morphing (fgMorphArgs), e.g., passed in R10 or R11 on AMD64.
1357 // See also GetNonStandardAddedArgCount().
1358 //
1359 // Arguments:
1360 //     compiler - the compiler instance
1361 //
1362 // Return Value:
1363 //      true if there are any such args, false otherwise.
1364 //
1365 bool GenTreeCall::HasNonStandardAddedArgs(Compiler* compiler) const
1366 {
1367     return GetNonStandardAddedArgCount(compiler) != 0;
1368 }
1369
1370
1371 //-------------------------------------------------------------------------
1372 // GetNonStandardAddedArgCount: Get the count of non-standard arguments that have been added
1373 // during call argument morphing (fgMorphArgs). Do not count non-standard args that are already
1374 // counted in the argument list prior to morphing.
1375 //
1376 // This function is used to help map the caller and callee arguments during tail call setup.
1377 //
1378 // Arguments:
1379 //     compiler - the compiler instance
1380 //
1381 // Return Value:
1382 //      The count of args, as described.
1383 //
1384 // Notes:
1385 //      It would be more general to have fgMorphArgs set a bit on the call node when such
1386 //      args are added to a call, and a bit on each such arg, and then have this code loop
1387 //      over the call args when the special call bit is set, counting the args with the special
1388 //      arg bit. This seems pretty heavyweight, though. Instead, this logic needs to be kept
1389 //      in sync with fgMorphArgs.
1390 //
1391 int GenTreeCall::GetNonStandardAddedArgCount(Compiler* compiler) const
1392 {
1393     if (IsUnmanaged() && !compiler->opts.ShouldUsePInvokeHelpers())
1394     {
1395         // R11 = PInvoke cookie param
1396         return 1;
1397     }
1398     else if (gtCallType == CT_INDIRECT)
1399     {
1400         if (IsVirtualStub())
1401         {
1402             // R11 = Virtual stub param
1403             return 1;
1404         }
1405         else if (gtCallCookie != nullptr)
1406         {
1407             // R10 = PInvoke target param
1408             // R11 = PInvoke cookie param
1409             return 2;
1410         }
1411     } 
1412     return 0;
1413 }
1414
1415 #endif // !LEGACY_BACKEND
1416
1417 //-------------------------------------------------------------------------
1418 // TreatAsHasRetBufArg:
1419 //
1420 // Arguments:
1421 //     compiler, the compiler instance so that we can call eeGetHelperNum
1422 //
1423 // Return Value:
1424 //     Returns true if we treat the call as if it has a retBuf argument
1425 //     This method may actually have a retBuf argument 
1426 //     or it could be a JIT helper that we are still transforming during 
1427 //     the importer phase.
1428 //
1429 // Notes:
1430 //     On ARM64 marking the method with the GTF_CALL_M_RETBUFFARG flag
1431 //     will make HasRetBufArg() return true, but will also force the 
1432 //     use of register x8 to pass the RetBuf argument.
1433 //
1434 //     These two Jit Helpers that we handle here by returning true
1435 //     aren't actually defined to return a struct, so they don't expect
1436 //     their RetBuf to be passed in x8, instead they  expect it in x0.
1437 //
1438 bool GenTreeCall::TreatAsHasRetBufArg(Compiler* compiler) const
1439 {
1440     if (HasRetBufArg())
1441     {
1442         return true;
1443     }
1444     else
1445     {
1446         // If we see a Jit helper call that returns a TYP_STRUCT we will
1447         // transform it as if it has a Return Buffer Argument
1448         //
1449         if (IsHelperCall() && (gtReturnType == TYP_STRUCT))
1450         {
1451             // There are two possible helper calls that use this path:
1452             //  CORINFO_HELP_GETFIELDSTRUCT and CORINFO_HELP_UNBOX_NULLABLE
1453             //
1454             CorInfoHelpFunc helpFunc = compiler->eeGetHelperNum(gtCallMethHnd);
1455
1456             if (helpFunc == CORINFO_HELP_GETFIELDSTRUCT)
1457             {
1458                 return true;
1459             }
1460             else if (helpFunc == CORINFO_HELP_UNBOX_NULLABLE)
1461             {
1462                 return true;
1463             }
1464             else
1465             {
1466                 assert(!"Unexpected JIT helper in TreatAsHasRetBufArg");
1467             }
1468         }
1469     }
1470     return false;
1471 }
1472  
1473
1474 //-------------------------------------------------------------------------
1475 // IsHelperCall: Determine if this GT_CALL node is a specific helper call.
1476 //
1477 // Arguments:
1478 //     compiler - the compiler instance so that we can call eeFindHelper
1479 //
1480 // Return Value:
1481 //     Returns true if this GT_CALL node is a call to the specified helper.
1482 //
1483 bool GenTreeCall::IsHelperCall(Compiler* compiler, unsigned helper) const
1484 {
1485     return IsHelperCall(compiler->eeFindHelper(helper));
1486 }
1487
1488
1489 /*****************************************************************************
1490  *
1491  *  Returns non-zero if the two trees are identical.
1492  */
1493
1494 bool                GenTree::Compare(GenTreePtr op1, GenTreePtr op2, bool swapOK)
1495 {
1496     genTreeOps      oper;
1497     unsigned        kind;
1498
1499 //  printf("tree1:\n"); gtDispTree(op1);
1500 //  printf("tree2:\n"); gtDispTree(op2);
1501
1502 AGAIN:
1503
1504     if  (op1 == NULL) return (op2 == NULL);
1505     if  (op2 == NULL) return false;
1506     if  (op1 == op2)  return true;
1507
1508     assert(op1->gtOper != GT_STMT);
1509     assert(op2->gtOper != GT_STMT);
1510
1511     oper = op1->OperGet();
1512
1513     /* The operators must be equal */
1514
1515     if  (oper != op2->gtOper)
1516         return false;
1517
1518     /* The types must be equal */
1519
1520     if  (op1->gtType != op2->gtType)
1521         return false;
1522
1523     /* Overflow must be equal */
1524     if  (op1->gtOverflowEx() != op2->gtOverflowEx())
1525     {
1526         return false;
1527     }
1528         
1529
1530     /* Sensible flags must be equal */
1531     if ( (op1->gtFlags & (GTF_UNSIGNED )) !=
1532          (op2->gtFlags & (GTF_UNSIGNED )) )
1533     {
1534         return false;
1535     }
1536
1537
1538     /* Figure out what kind of nodes we're comparing */
1539
1540     kind = op1->OperKind();
1541
1542     /* Is this a constant node? */
1543
1544     if  (kind & GTK_CONST)
1545     {
1546         switch (oper)
1547         {
1548         case GT_CNS_INT:
1549             if  (op1->gtIntCon.gtIconVal == op2->gtIntCon.gtIconVal)
1550                 return true;
1551             break;
1552 #if 0
1553             // TODO-CQ: Enable this in the future
1554         case GT_CNS_LNG:
1555             if  (op1->gtLngCon.gtLconVal == op2->gtLngCon.gtLconVal)
1556                 return true;
1557             break;
1558
1559         case GT_CNS_DBL:
1560             if  (op1->gtDblCon.gtDconVal == op2->gtDblCon.gtDconVal)
1561                 return true;
1562             break;
1563 #endif
1564         default:
1565             break;
1566         }
1567
1568         return  false;
1569     }
1570
1571     /* Is this a leaf node? */
1572
1573     if  (kind & GTK_LEAF)
1574     {
1575         switch (oper)
1576         {
1577         case GT_LCL_VAR:
1578             if  (op1->gtLclVarCommon.gtLclNum   != op2->gtLclVarCommon.gtLclNum)
1579                 break;
1580
1581             return true;
1582
1583         case GT_LCL_FLD:
1584             if  (op1->gtLclFld.gtLclNum  != op2->gtLclFld.gtLclNum ||
1585                  op1->gtLclFld.gtLclOffs != op2->gtLclFld.gtLclOffs)
1586                 break;
1587
1588             return true;
1589
1590         case GT_CLS_VAR:
1591             if  (op1->gtClsVar.gtClsVarHnd != op2->gtClsVar.gtClsVarHnd)
1592                 break;
1593
1594             return true;
1595
1596         case GT_LABEL:
1597             return true;
1598
1599         case GT_ARGPLACE:
1600             if ((op1->gtType == TYP_STRUCT) && 
1601                 (op1->gtArgPlace.gtArgPlaceClsHnd != op2->gtArgPlace.gtArgPlaceClsHnd))
1602             {
1603                 break;
1604             }
1605             return true;
1606
1607         default:
1608             break;
1609         }
1610
1611         return false;
1612     }
1613
1614     /* Is it a 'simple' unary/binary operator? */
1615
1616     if  (kind & GTK_UNOP)
1617     {
1618         if (IsExOp(kind))
1619         {
1620             // ExOp operators extend unary operator with extra, non-GenTreePtr members.  In many cases,
1621             // these should be included in the comparison.
1622             switch (oper)
1623             {
1624             case GT_ARR_LENGTH:
1625                 if (op1->gtArrLen.ArrLenOffset() != op2->gtArrLen.ArrLenOffset()) return false;
1626                 break;
1627             case GT_CAST:
1628                 if (op1->gtCast.gtCastType != op2->gtCast.gtCastType) return false;
1629                 break;
1630             case GT_OBJ:
1631                 if (op1->AsObj()->gtClass != op2->AsObj()->gtClass) return false;
1632                 break;
1633
1634                 // For the ones below no extra argument matters for comparison.
1635             case GT_BOX:
1636                 break;
1637
1638             default:
1639                 assert(!"unexpected unary ExOp operator");
1640             }
1641         }
1642         return Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp1);
1643     }
1644
1645     if  (kind & GTK_BINOP)
1646     {
1647         if (IsExOp(kind))
1648         {
1649             // ExOp operators extend unary operator with extra, non-GenTreePtr members.  In many cases,
1650             // these should be included in the hash code.
1651             switch (oper)
1652             {
1653             case GT_INTRINSIC:
1654                 if (op1->gtIntrinsic.gtIntrinsicId != op2->gtIntrinsic.gtIntrinsicId) return false;
1655                 break;
1656             case GT_LEA:
1657                 if (op1->gtAddrMode.gtScale != op2->gtAddrMode.gtScale) return false;
1658                 if (op1->gtAddrMode.gtOffset != op2->gtAddrMode.gtOffset) return false;
1659                 break;
1660             case GT_INDEX:
1661                 if (op1->gtIndex.gtIndElemSize != op2->gtIndex.gtIndElemSize) return false;
1662                 break;
1663
1664                 // For the ones below no extra argument matters for comparison.
1665             case GT_QMARK:
1666                 break;
1667
1668             default:
1669                 assert(!"unexpected binary ExOp operator");
1670             }
1671         }
1672
1673         if  (op1->gtOp.gtOp2)
1674         {
1675             if  (!Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp1, swapOK))
1676             {
1677                 if  (swapOK && OperIsCommutative(oper) &&
1678                     ((op1->gtOp.gtOp1->gtFlags | op1->gtOp.gtOp2->gtFlags | op2->gtOp.gtOp1->gtFlags | op2->gtOp.gtOp2->gtFlags) & GTF_ALL_EFFECT) == 0)
1679                 {
1680                     if  (Compare(op1->gtOp.gtOp1, op2->gtOp.gtOp2, swapOK))
1681                     {
1682                         op1 = op1->gtOp.gtOp2;
1683                         op2 = op2->gtOp.gtOp1;
1684                         goto AGAIN;
1685                     }
1686                 }
1687
1688                 return false;
1689             }
1690
1691             op1 = op1->gtOp.gtOp2;
1692             op2 = op2->gtOp.gtOp2;
1693
1694             goto AGAIN;
1695         }
1696         else
1697         {
1698
1699             op1 = op1->gtOp.gtOp1;
1700             op2 = op2->gtOp.gtOp1;
1701
1702             if  (!op1) return  (op2 == 0);
1703             if  (!op2) return  false;
1704
1705             goto AGAIN;
1706         }
1707     }
1708
1709     /* See what kind of a special operator we have here */
1710
1711     switch  (oper)
1712     {
1713     case GT_FIELD:
1714         if  (op1->gtField.gtFldHnd != op2->gtField.gtFldHnd)
1715             break;
1716
1717         op1 = op1->gtField.gtFldObj;
1718         op2 = op2->gtField.gtFldObj;
1719
1720         if  (op1 || op2)
1721         {
1722             if  (op1 && op2)
1723                 goto AGAIN;
1724         }
1725
1726         return true;
1727
1728     case GT_CALL:
1729
1730         if (op1->gtCall.gtCallType != op2->gtCall.gtCallType)
1731             return false;
1732
1733         if (op1->gtCall.gtCallType != CT_INDIRECT)
1734         {
1735             if (op1->gtCall.gtCallMethHnd != op2->gtCall.gtCallMethHnd)
1736                 return false;
1737
1738 #ifdef FEATURE_READYTORUN_COMPILER
1739             if (op1->gtCall.gtEntryPoint.addr != op2->gtCall.gtEntryPoint.addr)
1740                 return false;
1741 #endif
1742         }
1743         else
1744         {
1745              if (!Compare(op1->gtCall.gtCallAddr, op2->gtCall.gtCallAddr))
1746                 return false;
1747         }
1748
1749         if (Compare(op1->gtCall.gtCallLateArgs, op2->gtCall.gtCallLateArgs)  &&
1750             Compare(op1->gtCall.gtCallArgs,    op2->gtCall.gtCallArgs)     &&
1751             Compare(op1->gtCall.gtControlExpr,     op2->gtCall.gtControlExpr)     &&
1752             Compare(op1->gtCall.gtCallObjp,    op2->gtCall.gtCallObjp))
1753             return true;  
1754         break;
1755
1756     case GT_ARR_ELEM:
1757
1758         if (op1->gtArrElem.gtArrRank != op2->gtArrElem.gtArrRank)
1759             return false;
1760
1761         // NOTE: gtArrElemSize may need to be handled
1762
1763         unsigned dim;
1764         for (dim = 0; dim < op1->gtArrElem.gtArrRank; dim++)
1765         {
1766             if (!Compare(op1->gtArrElem.gtArrInds[dim], op2->gtArrElem.gtArrInds[dim]))
1767                 return false;
1768         }
1769
1770         op1 = op1->gtArrElem.gtArrObj;
1771         op2 = op2->gtArrElem.gtArrObj;
1772         goto AGAIN;
1773
1774     case GT_ARR_OFFSET:
1775         if (op1->gtArrOffs.gtCurrDim != op2->gtArrOffs.gtCurrDim ||
1776             op1->gtArrOffs.gtArrRank != op2->gtArrOffs.gtArrRank)
1777         {
1778             return false;
1779         }
1780         return (Compare(op1->gtArrOffs.gtOffset, op2->gtArrOffs.gtOffset) &&
1781                 Compare(op1->gtArrOffs.gtIndex, op2->gtArrOffs.gtIndex) &&
1782                 Compare(op1->gtArrOffs.gtArrObj, op2->gtArrOffs.gtArrObj));
1783
1784     case GT_CMPXCHG:
1785         return Compare(op1->gtCmpXchg.gtOpLocation, op2->gtCmpXchg.gtOpLocation)
1786             && Compare(op1->gtCmpXchg.gtOpValue, op2->gtCmpXchg.gtOpValue)
1787             && Compare(op1->gtCmpXchg.gtOpComparand, op2->gtCmpXchg.gtOpComparand);
1788
1789     case GT_ARR_BOUNDS_CHECK:
1790 #ifdef FEATURE_SIMD
1791     case GT_SIMD_CHK:
1792 #endif // FEATURE_SIMD
1793         return Compare(op1->gtBoundsChk.gtArrLen, op2->gtBoundsChk.gtArrLen)
1794             && Compare(op1->gtBoundsChk.gtIndex, op2->gtBoundsChk.gtIndex)
1795             && (op1->gtBoundsChk.gtThrowKind == op2->gtBoundsChk.gtThrowKind);
1796
1797     default:
1798         assert(!"unexpected operator");
1799     }
1800
1801     return false;
1802 }
1803
1804 /*****************************************************************************
1805  *
1806  *  Returns non-zero if the given tree contains a use of a local #lclNum.
1807  */
1808
1809 bool                Compiler::gtHasRef(GenTreePtr tree, ssize_t lclNum, bool defOnly)
1810 {
1811     genTreeOps      oper;
1812     unsigned        kind;
1813
1814 AGAIN:
1815
1816     assert(tree);
1817
1818     oper = tree->OperGet();
1819     kind = tree->OperKind();
1820
1821     assert(oper != GT_STMT);
1822
1823     /* Is this a constant node? */
1824
1825     if  (kind & GTK_CONST)
1826         return  false;
1827
1828     /* Is this a leaf node? */
1829
1830     if  (kind & GTK_LEAF)
1831     {
1832         if  (oper == GT_LCL_VAR)
1833         {
1834             if  (tree->gtLclVarCommon.gtLclNum == (unsigned)lclNum)
1835             {
1836                 if  (!defOnly)
1837                     return true;
1838             }
1839         }
1840         else if (oper == GT_RET_EXPR)
1841         {
1842             return gtHasRef(tree->gtRetExpr.gtInlineCandidate, lclNum, defOnly);
1843         } 
1844
1845         return false;
1846     }
1847
1848     /* Is it a 'simple' unary/binary operator? */
1849
1850     if  (kind & GTK_SMPOP)
1851     {
1852         if  (tree->gtGetOp2())
1853         {
1854             if  (gtHasRef(tree->gtOp.gtOp1, lclNum, defOnly))
1855                 return true;
1856
1857             tree = tree->gtOp.gtOp2;
1858             goto AGAIN;
1859         }
1860         else
1861         {
1862             tree = tree->gtOp.gtOp1;
1863
1864             if  (!tree)
1865                 return  false;
1866
1867             if  (kind & GTK_ASGOP)
1868             {
1869                 // 'tree' is the gtOp1 of an assignment node. So we can handle
1870                 // the case where defOnly is either true or false.
1871
1872                 if  (tree->gtOper == GT_LCL_VAR &&
1873                      tree->gtLclVarCommon.gtLclNum == (unsigned)lclNum)
1874                 {
1875                     return true;
1876                 }
1877                 else if (tree->gtOper == GT_FIELD &&
1878                          lclNum == (ssize_t)tree->gtField.gtFldHnd)
1879                 {
1880                      return true;
1881                 }
1882             }
1883
1884             goto AGAIN;
1885         }
1886     }
1887
1888     /* See what kind of a special operator we have here */
1889
1890     switch  (oper)
1891     {
1892     case GT_FIELD:
1893         if  (lclNum == (ssize_t)tree->gtField.gtFldHnd)
1894         {
1895             if  (!defOnly)
1896                 return true;
1897         }
1898
1899         tree = tree->gtField.gtFldObj;
1900         if  (tree)
1901             goto AGAIN;
1902         break;
1903
1904     case GT_CALL:
1905
1906         if  (tree->gtCall.gtCallObjp)
1907             if  (gtHasRef(tree->gtCall.gtCallObjp, lclNum, defOnly))
1908                 return true;
1909
1910         if  (tree->gtCall.gtCallArgs)
1911             if  (gtHasRef(tree->gtCall.gtCallArgs, lclNum, defOnly))
1912                 return true;
1913
1914         if  (tree->gtCall.gtCallLateArgs)
1915             if  (gtHasRef(tree->gtCall.gtCallLateArgs, lclNum, defOnly))
1916                 return true;
1917
1918         if  (tree->gtCall.gtCallLateArgs)
1919             if  (gtHasRef(tree->gtCall.gtControlExpr, lclNum, defOnly))
1920                 return true;
1921
1922         if  (tree->gtCall.gtCallType == CT_INDIRECT)
1923         {
1924             // pinvoke-calli cookie is a constant, or constant indirection
1925             assert(tree->gtCall.gtCallCookie == NULL ||
1926                    tree->gtCall.gtCallCookie->gtOper == GT_CNS_INT ||
1927                    tree->gtCall.gtCallCookie->gtOper == GT_IND);
1928             
1929             tree = tree->gtCall.gtCallAddr;
1930         }
1931         else
1932             tree = NULL;
1933
1934         if  (tree)
1935             goto AGAIN;
1936
1937         break;
1938
1939     case GT_ARR_ELEM:
1940         if (gtHasRef(tree->gtArrElem.gtArrObj, lclNum, defOnly))
1941             return true;
1942
1943         unsigned dim;
1944         for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
1945         {
1946             if (gtHasRef(tree->gtArrElem.gtArrInds[dim], lclNum, defOnly))
1947                 return true;
1948         }
1949
1950         break;
1951
1952     case GT_ARR_OFFSET:
1953         if (gtHasRef(tree->gtArrOffs.gtOffset, lclNum, defOnly) ||
1954             gtHasRef(tree->gtArrOffs.gtIndex, lclNum, defOnly) ||
1955             gtHasRef(tree->gtArrOffs.gtArrObj, lclNum, defOnly))
1956         {
1957             return true;
1958         }
1959         break;
1960
1961     case GT_CMPXCHG:
1962         if (gtHasRef(tree->gtCmpXchg.gtOpLocation, lclNum, defOnly))
1963             return true;
1964         if (gtHasRef(tree->gtCmpXchg.gtOpValue, lclNum, defOnly))
1965             return true;
1966         if (gtHasRef(tree->gtCmpXchg.gtOpComparand, lclNum, defOnly))
1967             return true;
1968         break;
1969
1970     case GT_ARR_BOUNDS_CHECK:
1971 #ifdef FEATURE_SIMD
1972     case GT_SIMD_CHK:
1973 #endif // FEATURE_SIMD
1974         if (gtHasRef(tree->gtBoundsChk.gtArrLen, lclNum, defOnly))
1975             return true;
1976         if (gtHasRef(tree->gtBoundsChk.gtIndex, lclNum, defOnly))
1977             return true;
1978         break;
1979
1980     default:
1981 #ifdef  DEBUG
1982         gtDispTree(tree);
1983 #endif
1984         assert(!"unexpected operator");
1985     }
1986
1987     return false;
1988 }
1989
1990 struct AddrTakenDsc
1991 {
1992     Compiler * comp;
1993     bool       hasAddrTakenLcl;
1994 };
1995
1996 /* static */
1997 Compiler::fgWalkResult      Compiler::gtHasLocalsWithAddrOpCB(GenTreePtr *pTree,
1998                                                               fgWalkData *data)
1999 {   
2000     GenTreePtr tree = *pTree;
2001     Compiler * comp = data->compiler;
2002
2003     if (tree->gtOper == GT_LCL_VAR)
2004     {
2005         unsigned    lclNum = tree->gtLclVarCommon.gtLclNum;
2006         LclVarDsc * varDsc = &comp->lvaTable[lclNum];
2007
2008         if (varDsc->lvHasLdAddrOp || varDsc->lvAddrExposed)
2009         {
2010             ((AddrTakenDsc *)data->pCallbackData)->hasAddrTakenLcl = true;
2011             return WALK_ABORT;            
2012         }
2013     }
2014     
2015     return WALK_CONTINUE;
2016 }
2017
2018 /*****************************************************************************
2019  *
2020  *  Return true if this tree contains locals with lvHasLdAddrOp or lvAddrExposed
2021  *  flag(s) set.
2022  */
2023
2024 bool                Compiler::gtHasLocalsWithAddrOp(GenTreePtr tree)
2025 {
2026     AddrTakenDsc   desc;
2027     
2028     desc.comp            = this;
2029     desc.hasAddrTakenLcl = false;
2030     
2031     fgWalkTreePre(&tree, 
2032                   gtHasLocalsWithAddrOpCB,
2033                   &desc);  
2034
2035     return desc.hasAddrTakenLcl;
2036 }                    
2037
2038 /*****************************************************************************
2039  *
2040  *  Helper used to compute hash values for trees.
2041  */
2042
2043 inline
2044 unsigned            genTreeHashAdd(unsigned old, unsigned add)
2045 {
2046     return  (old + old/2) ^ add;
2047 }
2048
2049 inline
2050 unsigned            genTreeHashAdd(unsigned old, void * add)
2051 {
2052     return genTreeHashAdd(old, (unsigned) (size_t)add);
2053 }
2054
2055 inline
2056 unsigned            genTreeHashAdd(unsigned old, unsigned add1,
2057                                                  unsigned add2)
2058 {
2059     return  (old + old/2) ^ add1 ^ add2;
2060 }
2061
2062 /*****************************************************************************
2063  *
2064  *  Given an arbitrary expression tree, compute a hash value for it.
2065  */
2066
2067 unsigned            Compiler::gtHashValue(GenTree * tree)
2068 {
2069     genTreeOps      oper;
2070     unsigned        kind;
2071
2072     unsigned        hash = 0;
2073
2074     GenTreePtr      temp;
2075
2076 AGAIN:
2077     assert(tree);
2078     assert(tree->gtOper != GT_STMT);
2079
2080     /* Figure out what kind of a node we have */
2081
2082     oper = tree->OperGet();
2083     kind = tree->OperKind();
2084
2085     /* Include the operator value in the hash */
2086
2087     hash = genTreeHashAdd(hash, oper);
2088
2089     /* Is this a constant or leaf node? */
2090
2091     if  (kind & (GTK_CONST|GTK_LEAF))
2092     {
2093         size_t        add;
2094
2095         switch (oper)
2096         {
2097         case GT_LCL_VAR: add = tree->gtLclVar.gtLclNum;       break;
2098         case GT_LCL_FLD: hash = genTreeHashAdd(hash, tree->gtLclFld.gtLclNum);
2099                          add = tree->gtLclFld.gtLclOffs;      break;
2100
2101         case GT_CNS_INT: add = (int)tree->gtIntCon.gtIconVal; break;
2102         case GT_CNS_LNG: add = (int)tree->gtLngCon.gtLconVal; break;
2103         case GT_CNS_DBL: add = (int)tree->gtDblCon.gtDconVal; break;
2104         case GT_CNS_STR: add = (int)tree->gtStrCon.gtSconCPX; break;
2105
2106         case GT_JMP:     add = tree->gtVal.gtVal1;            break;
2107
2108         default:         add = 0;                             break;
2109         }
2110
2111         //narrowing cast, but for hashing.
2112         hash = genTreeHashAdd(hash, (unsigned)add);
2113         goto DONE;
2114     }
2115
2116     /* Is it a 'simple' unary/binary operator? */
2117
2118     GenTreePtr op1; 
2119
2120     if (kind & GTK_UNOP)
2121     {
2122         op1 = tree->gtOp.gtOp1;
2123         /* Special case: no sub-operand at all */
2124
2125         if (GenTree::IsExOp(kind))
2126         {
2127             // ExOp operators extend operators with extra, non-GenTreePtr members.  In many cases,
2128             // these should be included in the hash code.
2129             switch (oper)
2130             {
2131             case GT_ARR_LENGTH:
2132                 hash += tree->gtArrLen.ArrLenOffset();
2133                 break;
2134             case GT_CAST:
2135                 hash ^= tree->gtCast.gtCastType;
2136                 break;
2137             case GT_OBJ:
2138                 hash ^= static_cast<unsigned>(reinterpret_cast<uintptr_t>(tree->gtObj.gtClass));
2139                 break;
2140             case GT_INDEX:
2141                 hash += tree->gtIndex.gtIndElemSize;
2142                 break;
2143
2144         
2145                 // For the ones below no extra argument matters for comparison.
2146             case GT_BOX:
2147                 break;
2148
2149             default:
2150                 assert(!"unexpected unary ExOp operator");
2151             }
2152         }
2153
2154         if  (!op1)
2155             goto DONE;
2156
2157         tree = op1;
2158         goto AGAIN;
2159     }
2160
2161     if  (kind & GTK_BINOP)
2162     {
2163         if (GenTree::IsExOp(kind))
2164         {
2165             // ExOp operators extend operators with extra, non-GenTreePtr members.  In many cases,
2166             // these should be included in the hash code.
2167             switch (oper)
2168             {
2169             case GT_INTRINSIC:
2170                 hash += tree->gtIntrinsic.gtIntrinsicId;
2171                 break;
2172             case GT_LEA:
2173                 hash += (tree->gtAddrMode.gtOffset << 3) + tree->gtAddrMode.gtScale;
2174                 break;
2175
2176                 // For the ones below no extra argument matters for comparison.
2177             case GT_ARR_INDEX:
2178             case GT_QMARK:
2179             case GT_INDEX:
2180                 break;
2181
2182 #ifdef FEATURE_SIMD
2183             case GT_SIMD:
2184                 hash += tree->gtSIMD.gtSIMDIntrinsicID;
2185                 hash += tree->gtSIMD.gtSIMDBaseType;
2186                 break;
2187 #endif // FEATURE_SIMD
2188
2189             default:
2190                 assert(!"unexpected binary ExOp operator");
2191             }
2192         }
2193
2194         op1 = tree->gtOp.gtOp1;
2195         GenTreePtr op2  = tree->gtOp.gtOp2;
2196
2197         /* Is there a second sub-operand? */
2198
2199         if  (!op2)
2200         {
2201             /* Special case: no sub-operands at all */
2202
2203             if  (!op1)
2204                 goto DONE;
2205
2206             /* This is a unary operator */
2207
2208             tree = op1;
2209             goto AGAIN;
2210         }
2211
2212         /* This is a binary operator */
2213
2214         unsigned        hsh1 = gtHashValue(op1);
2215
2216         /* Special case: addition of two values */
2217
2218         if  (GenTree::OperIsCommutative(oper))
2219         {
2220             unsigned    hsh2 = gtHashValue(op2);
2221
2222             /* Produce a hash that allows swapping the operands */
2223
2224             hash = genTreeHashAdd(hash, hsh1, hsh2);
2225             goto DONE;
2226         }
2227
2228         /* Add op1's hash to the running value and continue with op2 */
2229
2230         hash = genTreeHashAdd(hash, hsh1);
2231
2232         tree = op2;
2233         goto AGAIN;
2234     }
2235
2236     /* See what kind of a special operator we have here */
2237     switch  (tree->gtOper)
2238     {
2239     case GT_FIELD:
2240         if (tree->gtField.gtFldObj)
2241         {
2242             temp = tree->gtField.gtFldObj; assert(temp);
2243             hash = genTreeHashAdd(hash, gtHashValue(temp));
2244         }
2245         break;
2246
2247     case GT_STMT:
2248         temp = tree->gtStmt.gtStmtExpr; assert(temp);
2249         hash = genTreeHashAdd(hash, gtHashValue(temp));
2250         break;
2251
2252     case GT_ARR_ELEM:
2253
2254         hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrElem.gtArrObj));
2255
2256         unsigned dim;
2257         for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
2258             hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrElem.gtArrInds[dim]));
2259
2260         break;
2261
2262     case GT_ARR_OFFSET:
2263         hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtOffset));
2264         hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtIndex));
2265         hash = genTreeHashAdd(hash, gtHashValue(tree->gtArrOffs.gtArrObj));
2266         break;
2267
2268     case GT_CALL:
2269
2270         if  (tree->gtCall.gtCallObjp && tree->gtCall.gtCallObjp->gtOper != GT_NOP)
2271         {
2272             temp = tree->gtCall.gtCallObjp; assert(temp);
2273             hash = genTreeHashAdd(hash, gtHashValue(temp));
2274         }
2275
2276         if (tree->gtCall.gtCallArgs)
2277         {
2278             temp = tree->gtCall.gtCallArgs; assert(temp);
2279             hash = genTreeHashAdd(hash, gtHashValue(temp));
2280         }
2281
2282         if  (tree->gtCall.gtCallType == CT_INDIRECT) 
2283         {
2284             temp = tree->gtCall.gtCallAddr; assert(temp);
2285             hash = genTreeHashAdd(hash, gtHashValue(temp));
2286         }
2287         else
2288         {
2289             hash = genTreeHashAdd(hash, tree->gtCall.gtCallMethHnd);
2290         }
2291
2292         if (tree->gtCall.gtCallLateArgs) 
2293         {
2294             temp = tree->gtCall.gtCallLateArgs; assert(temp);
2295             hash = genTreeHashAdd(hash, gtHashValue(temp));
2296         }
2297         break;
2298
2299     case GT_CMPXCHG:
2300         hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpLocation));
2301         hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpValue));
2302         hash = genTreeHashAdd(hash, gtHashValue(tree->gtCmpXchg.gtOpComparand));
2303         break;
2304
2305     case GT_ARR_BOUNDS_CHECK:
2306 #ifdef FEATURE_SIMD
2307     case GT_SIMD_CHK:
2308 #endif // FEATURE_SIMD
2309         hash = genTreeHashAdd(hash, gtHashValue(tree->gtBoundsChk.gtArrLen));
2310         hash = genTreeHashAdd(hash, gtHashValue(tree->gtBoundsChk.gtIndex));
2311         hash = genTreeHashAdd(hash, tree->gtBoundsChk.gtThrowKind);
2312         break;
2313
2314     default:
2315 #ifdef  DEBUG
2316         gtDispTree(tree);
2317 #endif
2318         assert(!"unexpected operator");
2319         break;
2320     }
2321
2322 DONE:
2323
2324     return hash;
2325 }
2326
2327 /*****************************************************************************
2328  *
2329  *  Given an arbitrary expression tree, attempts to find the set of all local variables
2330  *  referenced by the tree, and return them as "*result".  
2331  *  If "findPtr" is null, this is a tracked variable set;
2332  *  if it is non-null, this is an "all var set."
2333  *  The "*result" value is valid only if the call returns "true."  It may return "false"
2334  *  for several reasons:
2335  *     If "findPtr" is NULL, and the expression contains an untracked variable.
2336  *     If "findPtr" is non-NULL, and the expression contains a variable that can't be represented
2337  *        in an "all var set."
2338  *     If the expression accesses address-exposed variables.
2339  *     
2340  *  If there
2341  *  are any indirections or global refs in the expression, the "*refsPtr" argument
2342  *  will be assigned the appropriate bit set based on the 'varRefKinds' type.
2343  *  It won't be assigned anything when there are no indirections or global
2344  *  references, though, so this value should be initialized before the call.
2345  *  If we encounter an expression that is equal to *findPtr we set *findPtr
2346  *  to NULL.
2347  */
2348 bool                Compiler::lvaLclVarRefs(GenTreePtr   tree,
2349                                             GenTreePtr * findPtr,
2350                                             varRefKinds* refsPtr,
2351                                             void*        result)
2352 {
2353     genTreeOps      oper;
2354     unsigned        kind;
2355     varRefKinds     refs = VR_NONE;
2356     ALLVARSET_TP    ALLVARSET_INIT_NOCOPY(allVars, AllVarSetOps::UninitVal());
2357     VARSET_TP       VARSET_INIT_NOCOPY(trkdVars, VarSetOps::UninitVal());
2358     if (findPtr)
2359         AllVarSetOps::AssignNoCopy(this, allVars, AllVarSetOps::MakeEmpty(this));
2360     else
2361         VarSetOps::AssignNoCopy(this, trkdVars, VarSetOps::MakeEmpty(this));
2362
2363 AGAIN:
2364
2365     assert(tree);
2366     assert(tree->gtOper != GT_STMT);
2367
2368     /* Remember whether we've come across the expression we're looking for */
2369
2370     if  (findPtr && *findPtr == tree) *findPtr = NULL;
2371
2372     /* Figure out what kind of a node we have */
2373
2374     oper = tree->OperGet();
2375     kind = tree->OperKind();
2376
2377     /* Is this a constant or leaf node? */
2378
2379     if  (kind & (GTK_CONST|GTK_LEAF))
2380     {
2381         if  (oper == GT_LCL_VAR)
2382         {
2383             unsigned    lclNum = tree->gtLclVarCommon.gtLclNum;
2384
2385             /* Should we use the variable table? */
2386
2387             if  (findPtr)
2388             {
2389                 if (lclNum >= lclMAX_ALLSET_TRACKED)
2390                     return false;
2391
2392                 AllVarSetOps::AddElemD(this, allVars, lclNum);
2393             }
2394             else
2395             {
2396                 assert(lclNum < lvaCount);
2397                 LclVarDsc * varDsc = lvaTable + lclNum;
2398
2399                 if (varDsc->lvTracked == false)
2400                     return  false;
2401
2402                 // Don't deal with expressions with address-exposed variables.
2403                 if (varDsc->lvAddrExposed)
2404                     return  false;
2405
2406                 VarSetOps::AddElemD(this, trkdVars, varDsc->lvVarIndex);
2407             }
2408         }
2409         else if (oper == GT_LCL_FLD)
2410         {
2411             /* We can't track every field of every var. Moreover, indirections
2412                may access different parts of the var as different (but
2413                overlapping) fields. So just treat them as indirect accesses */
2414
2415             if (varTypeIsGC(tree->TypeGet()))
2416                 refs = VR_IND_REF;
2417             else
2418                 refs = VR_IND_SCL;
2419         }
2420         else if (oper == GT_CLS_VAR)
2421         {
2422             refs = VR_GLB_VAR;
2423         }
2424
2425         if (refs != VR_NONE)
2426         {
2427             /* Write it back to callers parameter using an 'or' */
2428             *refsPtr = varRefKinds((*refsPtr) | refs);
2429         }
2430         lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2431         return true;
2432     }
2433
2434     /* Is it a 'simple' unary/binary operator? */
2435
2436     if  (kind & GTK_SMPOP)
2437     {
2438         if  (oper == GT_IND)
2439         {
2440             assert(tree->gtOp.gtOp2 == NULL);
2441
2442             /* Set the proper indirection bit */
2443
2444             if ((tree->gtFlags & GTF_IND_INVARIANT) == 0)
2445             {
2446                 if (varTypeIsGC(tree->TypeGet()))
2447                     refs = VR_IND_REF;
2448                 else
2449                     refs = VR_IND_SCL;
2450
2451                 // If the flag GTF_IND_TGTANYWHERE is set this indirection
2452                 // could also point at a global variable
2453
2454                 if (tree->gtFlags & GTF_IND_TGTANYWHERE)
2455                 {
2456                     refs = varRefKinds( ((int) refs) | ((int) VR_GLB_VAR) );
2457                 }
2458             }
2459
2460             /* Write it back to callers parameter using an 'or' */
2461             *refsPtr = varRefKinds((*refsPtr) | refs);
2462
2463             // For IL volatile memory accesses we mark the GT_IND node
2464             // with a GTF_DONT_CSE flag.
2465             //
2466             // This flag is also set for the left hand side of an assignment.
2467             //
2468             // If this flag is set then we return false
2469             //
2470             if (tree->gtFlags & GTF_DONT_CSE)
2471             {
2472                 return  false;
2473             }
2474         }
2475
2476         if  (tree->gtGetOp2())
2477         {
2478             /* It's a binary operator */
2479             if (!lvaLclVarRefsAccum(tree->gtOp.gtOp1, findPtr, refsPtr, &allVars, &trkdVars)) return false;
2480             // Otherwise...
2481             tree = tree->gtOp.gtOp2; assert(tree);
2482             goto AGAIN;
2483         }
2484         else
2485         {
2486             /* It's a unary (or nilary) operator */
2487
2488             tree = tree->gtOp.gtOp1;
2489             if  (tree)
2490                 goto AGAIN;
2491
2492             lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2493             return true;
2494         }
2495     }
2496
2497     switch (oper)
2498     {
2499     case GT_ARR_ELEM:
2500         if (!lvaLclVarRefsAccum(tree->gtArrElem.gtArrObj, findPtr, refsPtr, &allVars, &trkdVars)) return false;
2501
2502         unsigned dim;
2503         for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
2504         {
2505             VARSET_TP VARSET_INIT_NOCOPY(tmpVs, VarSetOps::UninitVal());
2506             if (!lvaLclVarRefsAccum(tree->gtArrElem.gtArrInds[dim], findPtr, refsPtr, &allVars, &trkdVars)) return false;
2507         }
2508         lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2509         return true;
2510
2511     case GT_ARR_OFFSET:
2512         if (!lvaLclVarRefsAccum(tree->gtArrOffs.gtOffset, findPtr, refsPtr, &allVars, &trkdVars))
2513             return false;
2514         // Otherwise...
2515         if (!lvaLclVarRefsAccum(tree->gtArrOffs.gtIndex, findPtr, refsPtr, &allVars, &trkdVars))
2516             return false;
2517         // Otherwise...
2518         if (!lvaLclVarRefsAccum(tree->gtArrOffs.gtArrObj, findPtr, refsPtr, &allVars, &trkdVars))
2519             return false;
2520         // Otherwise...
2521         lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2522         return true;
2523
2524     case GT_ARR_BOUNDS_CHECK:
2525 #ifdef FEATURE_SIMD
2526     case GT_SIMD_CHK:
2527 #endif // FEATURE_SIMD
2528         {
2529             if (!lvaLclVarRefsAccum(tree->gtBoundsChk.gtArrLen, findPtr, refsPtr, &allVars, &trkdVars)) return false;
2530             // Otherwise...
2531             if (!lvaLclVarRefsAccum(tree->gtBoundsChk.gtIndex, findPtr, refsPtr, &allVars, &trkdVars)) return false;
2532             // Otherwise...
2533             lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2534             return true;
2535         }
2536
2537     case GT_CALL:
2538         /* Allow calls to the Shared Static helper */
2539         if (IsSharedStaticHelper(tree))
2540         {
2541             *refsPtr = varRefKinds((*refsPtr) | VR_INVARIANT);
2542             lvaLclVarRefsAccumIntoRes(findPtr, result, allVars, trkdVars);
2543             return true;
2544         }
2545         break;
2546     default:
2547         break;
2548
2549     } // end switch (oper)
2550
2551     return false;
2552 }
2553
2554 bool                Compiler::lvaLclVarRefsAccum(GenTreePtr     tree,
2555                                                  GenTreePtr  *  findPtr,
2556                                                  varRefKinds *  refsPtr,
2557                                                  ALLVARSET_TP*  allVars,
2558                                                  VARSET_TP*     trkdVars)
2559 {
2560     if (findPtr)
2561     {
2562         ALLVARSET_TP ALLVARSET_INIT_NOCOPY(tmpVs, AllVarSetOps::UninitVal());
2563         if (!lvaLclVarRefs(tree, findPtr, refsPtr, &tmpVs)) return false;
2564         // Otherwise...
2565         AllVarSetOps::UnionD(this, *allVars, tmpVs);
2566     }
2567     else
2568     {
2569         VARSET_TP VARSET_INIT_NOCOPY(tmpVs, VarSetOps::UninitVal());
2570         if (!lvaLclVarRefs(tree, findPtr, refsPtr, &tmpVs)) return false;
2571         // Otherwise...
2572         VarSetOps::UnionD(this, *trkdVars, tmpVs);
2573     }
2574     return true;
2575 }
2576
2577 void                Compiler::lvaLclVarRefsAccumIntoRes(GenTreePtr  *  findPtr,
2578                                                         void*          result,
2579                                                         ALLVARSET_VALARG_TP allVars,
2580                                                         VARSET_VALARG_TP    trkdVars)
2581 {
2582     if (findPtr)
2583     {
2584         ALLVARSET_TP* avsPtr = (ALLVARSET_TP*)result;
2585         AllVarSetOps::AssignNoCopy(this, (*avsPtr), allVars);
2586     }
2587     else
2588     {
2589         VARSET_TP* vsPtr = (VARSET_TP*)result;
2590         VarSetOps::AssignNoCopy(this, (*vsPtr), trkdVars);
2591     }
2592 }
2593
2594 /*****************************************************************************
2595  *
2596  *  Return a relational operator that is the reverse of the given one.
2597  */
2598
2599 /* static */
2600 genTreeOps          GenTree::ReverseRelop(genTreeOps relop)
2601 {
2602     static const
2603     genTreeOps reverseOps[] =
2604     {
2605         GT_NE,          // GT_EQ
2606         GT_EQ,          // GT_NE
2607         GT_GE,          // GT_LT
2608         GT_GT,          // GT_LE
2609         GT_LT,          // GT_GE
2610         GT_LE,          // GT_GT
2611     };
2612
2613     assert(reverseOps[GT_EQ - GT_EQ] == GT_NE);
2614     assert(reverseOps[GT_NE - GT_EQ] == GT_EQ);
2615
2616     assert(reverseOps[GT_LT - GT_EQ] == GT_GE);
2617     assert(reverseOps[GT_LE - GT_EQ] == GT_GT);
2618     assert(reverseOps[GT_GE - GT_EQ] == GT_LT);
2619     assert(reverseOps[GT_GT - GT_EQ] == GT_LE);
2620
2621     assert(OperIsCompare(relop));
2622     assert(relop >= GT_EQ && (unsigned)(relop - GT_EQ) < sizeof(reverseOps));
2623
2624     return reverseOps[relop - GT_EQ];
2625 }
2626
2627 /*****************************************************************************
2628  *
2629  *  Return a relational operator that will work for swapped operands.
2630  */
2631
2632 /* static */
2633 genTreeOps          GenTree::SwapRelop(genTreeOps relop)
2634 {
2635     static const
2636     genTreeOps swapOps[] =
2637     {
2638         GT_EQ,          // GT_EQ
2639         GT_NE,          // GT_NE
2640         GT_GT,          // GT_LT
2641         GT_GE,          // GT_LE
2642         GT_LE,          // GT_GE
2643         GT_LT,          // GT_GT
2644     };
2645
2646     assert(swapOps[GT_EQ - GT_EQ] == GT_EQ);
2647     assert(swapOps[GT_NE - GT_EQ] == GT_NE);
2648
2649     assert(swapOps[GT_LT - GT_EQ] == GT_GT);
2650     assert(swapOps[GT_LE - GT_EQ] == GT_GE);
2651     assert(swapOps[GT_GE - GT_EQ] == GT_LE);
2652     assert(swapOps[GT_GT - GT_EQ] == GT_LT);
2653
2654     assert(OperIsCompare(relop));
2655     assert(relop >= GT_EQ && (unsigned)(relop - GT_EQ) < sizeof(swapOps));
2656
2657     return swapOps[relop - GT_EQ];
2658 }
2659
2660 /*****************************************************************************
2661  *
2662  *  Reverse the meaning of the given test condition.
2663  */
2664
2665 GenTreePtr          Compiler::gtReverseCond(GenTree * tree)
2666 {
2667     if  (tree->OperIsCompare())
2668     {
2669         tree->SetOper(GenTree::ReverseRelop(tree->OperGet()));
2670
2671         // Flip the GTF_RELOP_NAN_UN bit
2672         //     a ord b   === (a != NaN && b != NaN)
2673         //     a unord b === (a == NaN || b == NaN)
2674         // => !(a ord b) === (a unord b)
2675         if (varTypeIsFloating(tree->gtOp.gtOp1->TypeGet()))
2676             tree->gtFlags ^= GTF_RELOP_NAN_UN;
2677     }
2678     else
2679     {
2680         tree = gtNewOperNode(GT_NOT, TYP_INT, tree);
2681     }
2682
2683     return tree;
2684 }
2685
2686
2687 /*****************************************************************************/
2688
2689 #ifdef DEBUG
2690
2691
2692 bool                GenTree::gtIsValid64RsltMul()
2693 {
2694     if ((gtOper != GT_MUL) || !(gtFlags & GTF_MUL_64RSLT))
2695         return false;
2696
2697     GenTreePtr  op1 = gtOp.gtOp1;
2698     GenTreePtr  op2 = gtOp.gtOp2;
2699
2700     if (TypeGet() != TYP_LONG ||
2701         op1->TypeGet() != TYP_LONG ||
2702         op2->TypeGet() != TYP_LONG)
2703         return false;
2704
2705     if (gtOverflow())
2706         return false;
2707
2708     // op1 has to be conv.i8(i4Expr)
2709     if ((op1->gtOper != GT_CAST) ||
2710         (genActualType(op1->CastFromType()) != TYP_INT))
2711         return false;
2712
2713     // op2 has to be conv.i8(i4Expr)
2714     if ((op2->gtOper != GT_CAST) ||
2715         (genActualType(op2->CastFromType()) != TYP_INT))
2716         return false;
2717
2718     // The signedness of both casts must be the same
2719     if (((op1->gtFlags & GTF_UNSIGNED) != 0) !=
2720         ((op2->gtFlags & GTF_UNSIGNED) != 0))
2721         return false;
2722
2723     // Do unsigned mul iff both the casts are unsigned
2724     if (((op1->gtFlags & GTF_UNSIGNED) != 0) != ((gtFlags & GTF_UNSIGNED) != 0))
2725         return false;
2726
2727     return true;
2728 }
2729
2730 #endif // DEBUG
2731
2732 /*****************************************************************************
2733  *
2734  *  Figure out the evaluation order for a list of values.
2735  */
2736
2737 unsigned            Compiler::gtSetListOrder(GenTree *list, bool regs)
2738 {
2739     assert(list && list->IsList());
2740
2741     unsigned        level  = 0;
2742     unsigned        ftreg  = 0;
2743     unsigned        costSz = 0; 
2744     unsigned        costEx = 0;
2745
2746 #if FEATURE_STACK_FP_X87
2747     /* Save the current FP stack level since an argument list
2748      * will implicitly pop the FP stack when pushing the argument */
2749     unsigned        FPlvlSave = codeGen->genGetFPstkLevel();
2750 #endif // FEATURE_STACK_FP_X87
2751
2752     GenTreePtr      next = list->gtOp.gtOp2;
2753
2754     if  (next)
2755     {
2756         unsigned  nxtlvl = gtSetListOrder(next, regs);
2757
2758         ftreg |= next->gtRsvdRegs;
2759
2760         if  (level < nxtlvl)
2761              level = nxtlvl;
2762         costEx += next->gtCostEx;
2763         costSz += next->gtCostSz;
2764     }
2765
2766     GenTreePtr      op1  = list->gtOp.gtOp1;
2767     unsigned        lvl  = gtSetEvalOrder(op1);
2768
2769 #if FEATURE_STACK_FP_X87
2770     /* restore the FP level */
2771     codeGen->genResetFPstkLevel(FPlvlSave);
2772 #endif // FEATURE_STACK_FP_X87
2773
2774     list->gtRsvdRegs = (regMaskSmall)(ftreg | op1->gtRsvdRegs);
2775
2776     if  (level < lvl)
2777          level = lvl;
2778
2779     if (op1->gtCostEx != 0)
2780     {
2781         costEx += op1->gtCostEx;
2782         costEx += regs ? 0 : IND_COST_EX;
2783     }
2784
2785     if (op1->gtCostSz != 0)
2786     {
2787         costSz += op1->gtCostSz;
2788 #ifdef _TARGET_XARCH_
2789         if (regs)                // push is smaller than mov to reg
2790 #endif
2791         {
2792             costSz += 1;
2793         }
2794     }
2795
2796     list->SetCosts(costEx, costSz);
2797
2798     return level;
2799 }
2800
2801 /*****************************************************************************
2802  *
2803  *  This routine is a helper routine for gtSetEvalOrder() and is used to
2804  *  mark the interior address computation nodes with the GTF_ADDRMODE_NO_CSE flag
2805  *  which prevents them from being considered for CSE's.
2806  *
2807  *  Furthermore this routine is a factoring of the logic used to walk down 
2808  *  the child nodes of a GT_IND tree, similar to optParseArrayRef().
2809  *
2810  *  Previously we had this logic repeated three times inside of gtSetEvalOrder().
2811  *  Here we combine those three repeats into this routine and use the 
2812  *  bool constOnly to modify the behavior of this routine for the first call.
2813  *
2814  *  The object here is to mark all of the interior GT_ADD's and GT_NOP's
2815  *  with the GTF_ADDRMODE_NO_CSE flag and to set op1 and op2 to the terminal nodes
2816  *  which are later matched against 'adr' and 'idx'.
2817  *
2818  *  *pbHasRangeCheckBelow is set to false if we traverse a range check GT_NOP
2819  *  node in our walk. It remains unchanged otherwise.
2820  *
2821  *  TODO-Cleanup: It is essentially impossible to determine
2822  *  what it is supposed to do, or to write a reasonable specification comment
2823  *  for it that describes what it is supposed to do. There are obviously some
2824  *  very specific tree patterns that it expects to see, but those are not documented.
2825  *  The fact that it writes back to its op1WB and op2WB arguments, and traverses
2826  *  down both op1 and op2 trees, but op2 is only related to op1 in the (!constOnly)
2827  *  case (which really seems like a bug) is very confusing.
2828  */
2829
2830 void Compiler::gtWalkOp(GenTree * *  op1WB, 
2831                         GenTree * *  op2WB, 
2832                         GenTree *    adr,
2833                         bool         constOnly)
2834 {
2835     GenTreePtr op1 = *op1WB;
2836     GenTreePtr op2 = *op2WB;
2837     GenTreePtr op1EffectiveVal;
2838
2839     if (op1->gtOper == GT_COMMA)
2840     {
2841         op1EffectiveVal = op1->gtEffectiveVal();
2842         if ((op1EffectiveVal->gtOper == GT_ADD) &&
2843             (!op1EffectiveVal->gtOverflow())    && 
2844             (!constOnly || (op1EffectiveVal->gtOp.gtOp2->IsCnsIntOrI())))
2845         {
2846             op1 = op1EffectiveVal;
2847         }
2848     }
2849
2850     // Now we look for op1's with non-overflow GT_ADDs [of constants]
2851     while ((op1->gtOper == GT_ADD)  && 
2852            (!op1->gtOverflow())     && 
2853            (!constOnly || (op1->gtOp.gtOp2->IsCnsIntOrI())))
2854     {
2855         // mark it with GTF_ADDRMODE_NO_CSE
2856         op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
2857
2858         if (!constOnly) // TODO-Cleanup: It seems bizarre that this is !constOnly
2859             op2 = op1->gtOp.gtOp2;
2860         op1 = op1->gtOp.gtOp1;
2861         
2862         // If op1 is a GT_NOP then swap op1 and op2.
2863         // (Why? Also, presumably op2 is not a GT_NOP in this case?)
2864         if (op1->gtOper == GT_NOP)
2865         {
2866             GenTreePtr tmp;
2867
2868             tmp = op1;
2869             op1 = op2;
2870             op2 = tmp;
2871         }
2872
2873         if (op1->gtOper == GT_COMMA)
2874         {
2875             op1EffectiveVal = op1->gtEffectiveVal();
2876             if ((op1EffectiveVal->gtOper == GT_ADD) &&
2877                 (!op1EffectiveVal->gtOverflow())    && 
2878                 (!constOnly || (op1EffectiveVal->gtOp.gtOp2->IsCnsIntOrI())))
2879             {
2880                 op1 = op1EffectiveVal;
2881             }
2882         }
2883
2884         if (!constOnly && ((op2 == adr) || (!op2->IsCnsIntOrI())))
2885             break;
2886     }
2887
2888     *op1WB = op1;
2889     *op2WB = op2;
2890 }
2891
2892 #ifdef DEBUG
2893 /*****************************************************************************
2894  * This is a workaround. It is to help implement an assert in gtSetEvalOrder() that the values
2895  * gtWalkOp() leaves in op1 and op2 correspond with the values of adr, idx, mul, and cns
2896  * that are returned by genCreateAddrMode(). It's essentially impossible to determine
2897  * what gtWalkOp() *should* return for all possible trees. This simply loosens one assert
2898  * to handle the following case:
2899
2900          indir     int
2901                     const(h)  int    4 field
2902                  +         byref
2903                     lclVar    byref  V00 this               <-- op2
2904               comma     byref                           <-- adr (base)
2905                  indir     byte
2906                     lclVar    byref  V00 this
2907            +         byref
2908                  const     int    2                     <-- mul == 4
2909               <<        int                                 <-- op1
2910                  lclVar    int    V01 arg1              <-- idx
2911
2912  * Here, we are planning to generate the address mode [edx+4*eax], where eax = idx and edx = the GT_COMMA expression.
2913  * To check adr equivalence with op2, we need to walk down the GT_ADD tree just like gtWalkOp() does.
2914  */
2915 GenTreePtr Compiler::gtWalkOpEffectiveVal(GenTreePtr op)
2916 {
2917     for (;;)
2918     {
2919         if (op->gtOper == GT_COMMA)
2920         {
2921             GenTreePtr opEffectiveVal = op->gtEffectiveVal();
2922             if ((opEffectiveVal->gtOper == GT_ADD) &&
2923                 (!opEffectiveVal->gtOverflow())    && 
2924                 (opEffectiveVal->gtOp.gtOp2->IsCnsIntOrI()))
2925             {
2926                 op = opEffectiveVal;
2927             }
2928         }
2929
2930         if ((op->gtOper != GT_ADD)  ||
2931             op->gtOverflow()        ||
2932             !op->gtOp.gtOp2->IsCnsIntOrI())
2933            break;
2934
2935         op = op->gtOp.gtOp1;
2936     }
2937
2938     return op;
2939 }
2940 #endif // DEBUG
2941
2942 /*****************************************************************************
2943  *
2944  *  Given a tree, set the gtCostEx and gtCostSz fields which
2945  *  are used to measure the relative costs of the codegen of the tree   
2946  *
2947  */
2948
2949 void                Compiler::gtPrepareCost(GenTree * tree)
2950 {
2951 #if FEATURE_STACK_FP_X87
2952     codeGen->genResetFPstkLevel();
2953 #endif // FEATURE_STACK_FP_X87
2954     gtSetEvalOrder(tree);
2955 }
2956
2957 bool                Compiler::gtIsLikelyRegVar(GenTree * tree)
2958 {
2959     if (tree->gtOper != GT_LCL_VAR)
2960         return false;
2961
2962     assert(tree->gtLclVar.gtLclNum < lvaTableCnt);
2963     LclVarDsc * varDsc = lvaTable + tree->gtLclVar.gtLclNum;
2964
2965     if (varDsc->lvDoNotEnregister)
2966         return false;
2967
2968     if (varDsc->lvRefCntWtd < (BB_UNITY_WEIGHT * 3))
2969         return false;
2970
2971 #ifdef _TARGET_X86_
2972     if (varTypeIsFloating(tree->TypeGet()))
2973         return false;
2974     if (varTypeIsLong(tree->TypeGet()))
2975         return false;
2976 #endif
2977
2978     return true;
2979 }
2980
2981 //------------------------------------------------------------------------
2982 // gtCanSwapOrder: Returns true iff the secondNode can be swapped with firstNode.
2983 //
2984 // Arguments:
2985 //    firstNode  - An operand of a tree that can have GTF_REVERSE_OPS set.
2986 //    secondNode - The other operand of the tree.
2987 //
2988 // Return Value:
2989 //    Returns a boolean indicating whether it is safe to reverse the execution
2990 //    order of the two trees, considering any exception, global effects, or
2991 //    ordering constraints.
2992 //
2993 bool
2994 Compiler::gtCanSwapOrder(GenTree* firstNode, GenTree*  secondNode)
2995 {
2996     // Relative of order of global / side effects can't be swapped.
2997
2998     bool    canSwap = true;
2999
3000     if (optValnumCSE_phase)
3001     {
3002         canSwap = optCSE_canSwap(firstNode, secondNode);
3003     }
3004             
3005     // We cannot swap in the presence of special side effects such as GT_CATCH_ARG.
3006
3007     if (canSwap &&
3008         (firstNode->gtFlags & GTF_ORDER_SIDEEFF))
3009     {
3010         canSwap = false;
3011     }
3012
3013     // When strict side effect order is disabled we allow GTF_REVERSE_OPS to be set
3014     // when one or both sides contains a GTF_CALL or GTF_EXCEPT.
3015     // Currently only the C and C++ languages allow non strict side effect order.
3016
3017     unsigned strictEffects = GTF_GLOB_EFFECT;
3018
3019     if (canSwap &&
3020         (firstNode->gtFlags & strictEffects))
3021     {
3022         // op1 has side efects that can't be reordered.
3023         // Check for some special cases where we still may be able to swap.
3024
3025         if (secondNode->gtFlags & strictEffects)
3026         {
3027             // op2 has also has non reorderable side effects - can't swap.
3028             canSwap = false;
3029         }
3030         else
3031         {
3032             // No side effects in op2 - we can swap iff op1 has no way of modifying op2,
3033             // i.e. through byref assignments or calls or op2 is a constant.
3034
3035             if (firstNode->gtFlags & strictEffects & GTF_PERSISTENT_SIDE_EFFECTS)
3036             {
3037                 // We have to be conservative - can swap iff op2 is constant.
3038                 if (!secondNode->OperIsConst())
3039                     canSwap = false;
3040             }
3041         }
3042     }
3043     return canSwap;
3044 }
3045
3046 /*****************************************************************************
3047  *
3048  *  Given a tree, figure out the order in which its sub-operands should be
3049  *  evaluated. If the second operand of a binary operator is more expensive
3050  *  than the first operand, then try to swap the operand trees. Updates the
3051  *  GTF_REVERSE_OPS bit if necessary in this case.
3052  *
3053  *  Returns the Sethi 'complexity' estimate for this tree (the higher
3054  *  the number, the higher is the tree's resources requirement).
3055  *
3056  *  This function sets:
3057  *      1. gtCostEx to the execution complexity estimate
3058  *      2. gtCostSz to the code size estimate
3059  *      3. gtRsvdRegs to the set of fixed registers trashed by the tree
3060  *      4. gtFPlvl to the "floating point depth" value for node, i.e. the max. number
3061  *         of operands the tree will push on the x87 (coprocessor) stack. Also sets
3062  *         genFPstkLevel, tmpDoubleSpillMax, and possibly gtFPstLvlRedo.
3063  *      5. Sometimes sets GTF_ADDRMODE_NO_CSE on nodes in the tree.
3064  *      6. DEBUG-only: clears GTF_DEBUG_NODE_MORPHED.
3065  */
3066
3067 #ifdef _PREFAST_
3068 #pragma warning(push)
3069 #pragma warning(disable:21000) // Suppress PREFast warning about overly large function
3070 #endif
3071 unsigned            Compiler::gtSetEvalOrder(GenTree * tree)
3072 {
3073     assert(tree);
3074     assert(tree->gtOper != GT_STMT);
3075
3076 #ifdef DEBUG
3077     /* Clear the GTF_DEBUG_NODE_MORPHED flag as well */
3078     tree->gtDebugFlags &= ~GTF_DEBUG_NODE_MORPHED;
3079 #endif
3080
3081     /* Is this a FP value? */
3082
3083     bool            isflt = varTypeIsFloating(tree->TypeGet());
3084     unsigned        FPlvlSave;
3085
3086     /* Figure out what kind of a node we have */
3087
3088     genTreeOps      oper = tree->OperGet();
3089     unsigned        kind = tree->OperKind();
3090
3091     /* Assume no fixed registers will be trashed */
3092
3093     regMaskTP       ftreg = RBM_NONE;  // Set of registers that will be used by the subtree
3094     unsigned        level;
3095     int             costEx;
3096     int             costSz;
3097
3098     bool            bRngChk;
3099
3100 #ifdef DEBUG
3101     costEx = -1;
3102     costSz = -1;
3103 #endif
3104
3105     /* Is this a constant or a leaf node? */
3106
3107     if (kind & (GTK_LEAF|GTK_CONST))
3108     {
3109         switch (oper)
3110         {
3111             bool     iconNeedsReloc;
3112
3113 #ifdef _TARGET_ARM_
3114         case GT_CNS_LNG:
3115             costSz = 9;
3116             costEx = 4;
3117             goto COMMON_CNS;
3118
3119         case GT_CNS_STR:
3120             // Uses movw/movt
3121             costSz = 7;
3122             costEx = 3;
3123             goto COMMON_CNS;
3124
3125         case GT_CNS_INT:
3126
3127             // If the constant is a handle then it will need to have a relocation
3128             //  applied to it. 
3129             // Any constant that requires a reloc must use the movw/movt sequence
3130             //
3131             iconNeedsReloc = opts.compReloc && tree->IsIconHandle() && !tree->IsIconHandle(GTF_ICON_FIELD_HDL);
3132
3133             if (iconNeedsReloc || !codeGen->validImmForInstr(INS_mov, tree->gtIntCon.gtIconVal))
3134             {
3135                 // Uses movw/movt
3136                 costSz = 7;
3137                 costEx = 3;
3138             }
3139             else if (((unsigned) tree->gtIntCon.gtIconVal) <= 0x00ff)
3140             {
3141                 // mov  Rd, <const8>
3142                 costSz = 1;
3143                 costEx = 1;
3144             }
3145             else
3146             {
3147                 // Uses movw/mvn
3148                 costSz = 3;
3149                 costEx = 1;
3150             }
3151             goto COMMON_CNS;
3152
3153 #elif defined _TARGET_XARCH_
3154
3155         case GT_CNS_LNG:
3156             costSz = 10;
3157             costEx = 3;
3158             goto COMMON_CNS;
3159
3160         case GT_CNS_STR:
3161             costSz = 4;
3162             costEx = 1;
3163             goto COMMON_CNS;
3164
3165         case GT_CNS_INT:
3166
3167             // If the constant is a handle then it will need to have a relocation
3168             //  applied to it. 
3169             // Any constant that requires a reloc must use the movw/movt sequence
3170             //
3171             iconNeedsReloc = opts.compReloc && tree->IsIconHandle() && !tree->IsIconHandle(GTF_ICON_FIELD_HDL);
3172
3173             if (!iconNeedsReloc && (((signed char) tree->gtIntCon.gtIconVal) == tree->gtIntCon.gtIconVal))
3174             {
3175                 costSz = 1;
3176                 costEx = 1;
3177             }
3178 #if defined(_TARGET_AMD64_)
3179             else if (iconNeedsReloc || ((tree->gtIntCon.gtIconVal & 0xFFFFFFFF00000000LL) != 0))
3180             {
3181                 costSz = 10;
3182                 costEx = 3;
3183             }
3184 #endif // _TARGET_AMD64_
3185             else
3186             {
3187                 costSz = 4;
3188                 costEx = 1;
3189             }
3190             goto COMMON_CNS;
3191
3192 #elif defined(_TARGET_ARM64_)
3193         case GT_CNS_LNG:
3194         case GT_CNS_STR:
3195         case GT_CNS_INT:
3196             // TODO-ARM64-NYI: Need cost estimates.
3197             costSz = 1;
3198             costEx = 1;
3199             goto COMMON_CNS;
3200
3201 #else
3202         case GT_CNS_LNG:
3203         case GT_CNS_STR:
3204         case GT_CNS_INT:
3205 #error "Unknown _TARGET_"
3206 #endif
3207
3208 COMMON_CNS:
3209         /*
3210             Note that some code below depends on constants always getting
3211             moved to be the second operand of a binary operator. This is
3212             easily accomplished by giving constants a level of 0, which
3213             we do on the next line. If you ever decide to change this, be
3214             aware that unless you make other arrangements for integer 
3215             constants to be moved, stuff will break.
3216          */
3217
3218             level  = 0;
3219             break;
3220
3221         case GT_CNS_DBL:
3222             level = 0;
3223             /* We use fldz and fld1 to load 0.0 and 1.0, but all other  */
3224             /* floating point constants are loaded using an indirection */
3225             if  ((*((__int64 *)&(tree->gtDblCon.gtDconVal)) == 0) ||
3226                  (*((__int64 *)&(tree->gtDblCon.gtDconVal)) == I64(0x3ff0000000000000)))
3227             {
3228                 costEx = 1;
3229                 costSz = 1;
3230             }
3231             else
3232             {
3233                 costEx = IND_COST_EX;
3234                 costSz = 4;
3235             }
3236             break;
3237             
3238         case GT_LCL_VAR:
3239             level = 1;
3240             if (gtIsLikelyRegVar(tree))
3241             {
3242                 costEx = 1;
3243                 costSz = 1;
3244                 /* Sign-extend and zero-extend are more expensive to load */
3245                 if (lvaTable[tree->gtLclVar.gtLclNum].lvNormalizeOnLoad())
3246                 {
3247                     costEx += 1;
3248                     costSz += 1;
3249                 }
3250             }
3251             else
3252             {
3253                 costEx = IND_COST_EX;
3254                 costSz = 2;
3255                 /* Sign-extend and zero-extend are more expensive to load */
3256                 if (varTypeIsSmall(tree->TypeGet()))
3257                 {
3258                     costEx += 1;
3259                     costSz += 1;
3260                 }
3261             }
3262 #if defined(_TARGET_AMD64_)
3263             // increase costSz for floating point locals
3264             if (isflt)
3265             {
3266                 costSz += 1;
3267                 if (!gtIsLikelyRegVar(tree))
3268                 {
3269                     costSz += 1;
3270                 }
3271             }
3272 #endif
3273 #if CPU_LONG_USES_REGPAIR
3274             if (varTypeIsLong(tree->TypeGet()))
3275             {
3276                 costEx *= 2;     // Longs are twice as expensive
3277                 costSz *= 2;
3278             }
3279 #endif
3280             break;
3281
3282         case GT_CLS_VAR:
3283 #ifdef _TARGET_ARM_
3284             // We generate movw/movt/ldr
3285             level  = 1;
3286             costEx = 3 + IND_COST_EX;   // 6
3287             costSz = 4 + 4 + 2;         // 10
3288             break;
3289 #endif
3290         case GT_LCL_FLD:
3291             level = 1;
3292             costEx = IND_COST_EX;
3293             costSz = 4;
3294             if (varTypeIsSmall(tree->TypeGet()))
3295             {
3296                 costEx += 1;
3297                 costSz += 1;
3298             }
3299             break;
3300
3301         case GT_PHI_ARG:
3302         case GT_ARGPLACE:
3303             level  = 0;
3304             costEx = 0;
3305             costSz = 0;
3306             break;
3307
3308         default:
3309             level  = 1;
3310             costEx = 1;
3311             costSz = 1;
3312             break;
3313         }
3314 #if FEATURE_STACK_FP_X87
3315         if (isflt && (oper != GT_PHI_ARG))
3316         {
3317             codeGen->genIncrementFPstkLevel();
3318         }
3319 #endif // FEATURE_STACK_FP_X87
3320         goto DONE;
3321     }
3322
3323     /* Is it a 'simple' unary/binary operator? */
3324
3325     if  (kind & GTK_SMPOP)
3326     {
3327         int             lvlb; // preference for op2
3328         unsigned        lvl2; // scratch variable
3329
3330         GenTreePtr      op1 = tree->gtOp.gtOp1;
3331         GenTreePtr      op2 = tree->gtGetOp2();
3332
3333         costEx = 0;
3334         costSz = 0;
3335
3336         if (tree->OperIsAddrMode())
3337         {
3338             if (op1 == nullptr)
3339             {
3340                 op1 = op2;
3341                 op2 = nullptr;
3342             }
3343         }
3344
3345         /* Check for a nilary operator */
3346
3347         if (op1 == nullptr)
3348         {
3349             assert(op2 == nullptr);
3350
3351             level    = 0;
3352
3353             goto DONE;
3354         }
3355
3356         /* Is this a unary operator? */
3357
3358         if  (op2 == nullptr)
3359         {
3360             /* Process the operand of the operator */
3361
3362             /* Most Unary ops have costEx of 1 */
3363             costEx = 1;
3364             costSz = 1;
3365
3366             level  = gtSetEvalOrder(op1);
3367             ftreg |= op1->gtRsvdRegs;
3368
3369             /* Special handling for some operators */
3370
3371             switch (oper)
3372             {
3373             case GT_JTRUE:
3374                 costEx = 2;
3375                 costSz = 2;
3376                 break;
3377
3378             case GT_SWITCH:
3379                 costEx = 10;
3380                 costSz =  5;
3381                 break;
3382
3383             case GT_CAST:
3384 #if defined(_TARGET_ARM_)
3385                 costEx = 1;
3386                 costSz = 1;
3387                 if  (isflt || varTypeIsFloating(op1->TypeGet()))
3388                 {
3389                     costEx = 3;
3390                     costSz = 4;
3391                 }
3392 #elif defined(_TARGET_ARM64_)
3393                 costEx = 1;
3394                 costSz = 2;
3395                 if  (isflt || varTypeIsFloating(op1->TypeGet()))
3396                 {
3397                     costEx = 2;
3398                     costSz = 4;
3399                 }
3400 #elif defined(_TARGET_XARCH_)
3401                 costEx = 1;
3402                 costSz = 2;
3403
3404                 if  (isflt || varTypeIsFloating(op1->TypeGet()))
3405                 {
3406                     /* cast involving floats always go through memory */
3407                     costEx = IND_COST_EX * 2;
3408                     costSz = 6;
3409
3410 #if FEATURE_STACK_FP_X87
3411                     if  (isflt != varTypeIsFloating(op1->TypeGet()))
3412                     {
3413                         isflt ? codeGen->genIncrementFPstkLevel()    // Cast from int to float
3414                               : codeGen->genDecrementFPstkLevel();   // Cast from float to int
3415                     }
3416 #endif // FEATURE_STACK_FP_X87
3417                 }
3418 #else
3419 #error "Unknown _TARGET_"
3420 #endif
3421
3422 #if CPU_LONG_USES_REGPAIR
3423                 if (varTypeIsLong(tree->TypeGet()))
3424                 {
3425                     if (varTypeIsUnsigned(tree->TypeGet()))
3426                     {
3427                         /* Cast to unsigned long */
3428                         costEx += 1;
3429                         costSz += 2;
3430                     }
3431                     else
3432                     {
3433                         /* Cast to signed long is slightly more costly */
3434                         costEx += 2;
3435                         costSz += 3;
3436                     }
3437                 }
3438 #endif // CPU_LONG_USES_REGPAIR
3439
3440                 /* Overflow casts are a lot more expensive */
3441                 if (tree->gtOverflow())
3442                 {
3443                     costEx += 6;
3444                     costSz += 6;
3445                 }
3446
3447                 break;
3448
3449
3450             case GT_LIST:
3451             case GT_NOP:
3452                 costEx = 0;
3453                 costSz = 0;
3454                 break;
3455
3456             case GT_INTRINSIC:
3457                 // GT_INTRINSIC intrinsics Sin, Cos, Sqrt, Abs ... have higher costs.
3458                 // TODO: tune these costs target specific as some of these are 
3459                 // target intrinsics and would cost less to generate code.
3460                 switch (tree->gtIntrinsic.gtIntrinsicId)
3461                 {
3462                 default:
3463                     assert(!"missing case for gtIntrinsicId");
3464                     costEx = 12;
3465                     costSz = 12;
3466                     break;
3467
3468                 case CORINFO_INTRINSIC_Sin:
3469                 case CORINFO_INTRINSIC_Cos:
3470                 case CORINFO_INTRINSIC_Sqrt:
3471                 case CORINFO_INTRINSIC_Cosh:
3472                 case CORINFO_INTRINSIC_Sinh:
3473                 case CORINFO_INTRINSIC_Tan:
3474                 case CORINFO_INTRINSIC_Tanh:
3475                 case CORINFO_INTRINSIC_Asin:
3476                 case CORINFO_INTRINSIC_Acos:
3477                 case CORINFO_INTRINSIC_Atan:
3478                 case CORINFO_INTRINSIC_Atan2:
3479                 case CORINFO_INTRINSIC_Log10:
3480                 case CORINFO_INTRINSIC_Pow:
3481                 case CORINFO_INTRINSIC_Exp:
3482                 case CORINFO_INTRINSIC_Ceiling:
3483                 case CORINFO_INTRINSIC_Floor:
3484                 case CORINFO_INTRINSIC_Object_GetType:
3485                     // Giving intrinsics a large fixed exectuion cost is because we'd like to CSE
3486                     // them, even if they are implemented by calls. This is different from modeling
3487                     // user calls since we never CSE user calls.
3488                     costEx = 36;
3489                     costSz = 4;
3490                     break;
3491
3492                 case CORINFO_INTRINSIC_Abs:
3493                     costEx = 5;
3494                     costSz = 15;
3495                     break;
3496
3497                 case CORINFO_INTRINSIC_Round:
3498                     costEx = 3;
3499                     costSz = 4;
3500 #if FEATURE_STACK_FP_X87
3501                     if (tree->TypeGet() == TYP_INT)
3502                     {
3503                         // This is a special case to handle the following
3504                         // optimization: conv.i4(round.d(d)) -> round.i(d)
3505                         codeGen->genDecrementFPstkLevel();
3506                     }
3507 #endif // FEATURE_STACK_FP_X87
3508                     break;
3509                 }
3510                 level++;
3511                 break;
3512
3513             case GT_NOT:
3514             case GT_NEG:
3515                 // We need to ensure that -x is evaluated before x or else
3516                 // we get burned while adjusting genFPstkLevel in x*-x where
3517                 // the rhs x is the last use of the enregsitered x.
3518                 //
3519                 // Even in the integer case we want to prefer to
3520                 // evaluate the side without the GT_NEG node, all other things
3521                 // being equal.  Also a GT_NOT requires a scratch register
3522
3523                 level++;
3524                 break;
3525
3526             case GT_ADDR:
3527
3528                 /* If the operand was floating point, pop the value from the stack */
3529
3530 #if FEATURE_STACK_FP_X87
3531                 if (varTypeIsFloating(op1->TypeGet()))
3532                 {
3533                     codeGen->genDecrementFPstkLevel();
3534                 }
3535 #endif // FEATURE_STACK_FP_X87
3536                 costEx = 0;
3537                 costSz = 1;
3538
3539                 // If we have a GT_ADDR of an GT_IND we can just copy the costs from indOp1
3540                 if (op1->OperGet() == GT_IND)
3541                 {
3542                     GenTreePtr indOp1 = op1->gtOp.gtOp1;
3543                     costEx = indOp1->gtCostEx;
3544                     costSz = indOp1->gtCostSz;
3545                 }
3546                 break;
3547
3548             case GT_ARR_LENGTH:
3549                 level++;
3550
3551                 /* Array Len should be the same as an indirections, which have a costEx of IND_COST_EX */
3552                 costEx = IND_COST_EX - 1;
3553                 costSz = 2;
3554                 break;
3555
3556             case GT_MKREFANY:
3557             case GT_OBJ:
3558                 // We estimate the cost of a GT_OBJ or GT_MKREFANY to be two loads (GT_INDs)
3559                 costEx = 2*IND_COST_EX;
3560                 costSz = 2*2;
3561                 break;
3562
3563             case GT_BOX:
3564                 // We estimate the cost of a GT_BOX to be two stores (GT_INDs)
3565                 costEx = 2*IND_COST_EX;
3566                 costSz = 2*2;
3567                 break;
3568
3569             case GT_IND:
3570
3571                 /* An indirection should always have a non-zero level.
3572                  * Only constant leaf nodes have level 0.
3573                  */
3574
3575                 if (level == 0)
3576                     level = 1;
3577
3578                 /* Indirections have a costEx of IND_COST_EX */
3579                 costEx = IND_COST_EX;
3580                 costSz = 2;
3581
3582                 /* If we have to sign-extend or zero-extend, bump the cost */
3583                 if (varTypeIsSmall(tree->TypeGet()))
3584                 {
3585                     costEx += 1;
3586                     costSz += 1;
3587                 }
3588
3589                 if (isflt)
3590                 {
3591 #if FEATURE_STACK_FP_X87
3592                     /* Indirect loads of FP values push a new value on the FP stack */
3593                     codeGen->genIncrementFPstkLevel();
3594 #endif // FEATURE_STACK_FP_X87
3595                     if (tree->TypeGet() == TYP_DOUBLE)
3596                         costEx += 1;
3597 #ifdef _TARGET_ARM_
3598                     costSz += 2;
3599 #endif // _TARGET_ARM_
3600                 }
3601
3602                 /* Can we form an addressing mode with this indirection? */
3603
3604                 if  (op1->gtOper == GT_ADD)
3605                 {
3606                     bool            rev;
3607 #if SCALED_ADDR_MODES
3608                     unsigned        mul;
3609 #endif
3610                     unsigned        cns;
3611                     GenTreePtr      base;
3612                     GenTreePtr      idx;
3613
3614                     /* See if we can form a complex addressing mode? */
3615
3616                     GenTreePtr      addr = op1;
3617                     if  (codeGen->genCreateAddrMode(addr,             // address
3618                                            0,               // mode
3619                                            false,           // fold
3620                                            RBM_NONE,        // reg mask
3621                                            &rev,            // reverse ops
3622                                            &base,           // base addr
3623                                            &idx,            // index val
3624 #if SCALED_ADDR_MODES
3625                                            &mul,            // scaling
3626 #endif
3627                                            &cns,            // displacement
3628                                            true))           // don't generate code
3629                     {
3630                         // We can form a complex addressing mode, so mark each of the interior
3631                         // nodes with GTF_ADDRMODE_NO_CSE and calculate a more accurate cost.
3632
3633                         addr->gtFlags |= GTF_ADDRMODE_NO_CSE;
3634 #ifdef _TARGET_XARCH_
3635                         // addrmodeCount is the count of items that we used to form 
3636                         // an addressing mode.  The maximum value is 4 when we have
3637                         // all of these:   { base, idx, cns, mul }
3638                         //
3639                         unsigned addrmodeCount = 0;
3640                         if (base)
3641                         {
3642                             costEx += base->gtCostEx;
3643                             costSz += base->gtCostSz;
3644                             addrmodeCount++;
3645                         }
3646
3647                         if (idx)
3648                         {
3649                             costEx += idx->gtCostEx;
3650                             costSz += idx->gtCostSz;
3651                             addrmodeCount++;
3652                         }
3653
3654                         if (cns)
3655                         {
3656                             if (((signed char)cns) == ((int)cns))
3657                                 costSz += 1;
3658                             else
3659                                 costSz += 4;
3660                             addrmodeCount++;
3661                         }
3662                         if (mul)
3663                         {
3664                             addrmodeCount++;
3665                         }
3666                         // When we form a complex addressing mode we can reduced the costs 
3667                         // associated with the interior GT_ADD and GT_LSH nodes:
3668                         //
3669                         //                      GT_ADD      -- reduce this interior GT_ADD by (-3,-3)
3670                         //                      /   \       --
3671                         //                  GT_ADD  'cns'   -- reduce this interior GT_ADD by (-2,-2)
3672                         //                  /   \           --
3673                         //               'base'  GT_LSL     -- reduce this interior GT_LSL by (-1,-1)
3674                         //                      /   \       --
3675                         //                   'idx'  'mul'
3676                         //
3677                         if (addrmodeCount > 1)
3678                         {
3679                             // The number of interior GT_ADD and GT_LSL will always be one less than addrmodeCount
3680                             //
3681                             addrmodeCount--;
3682
3683                             GenTreePtr tmp = addr;
3684                             while (addrmodeCount > 0)
3685                             {
3686                                 // decrement the gtCosts for the interior GT_ADD or GT_LSH node by the remaining addrmodeCount
3687                                 tmp->SetCosts(tmp->gtCostEx - addrmodeCount, tmp->gtCostSz - addrmodeCount);
3688
3689                                 addrmodeCount--;
3690                                 if (addrmodeCount > 0)
3691                                 {
3692                                     GenTreePtr tmpOp1 = tmp->gtOp.gtOp1;
3693                                     GenTreePtr tmpOp2 = tmp->gtGetOp2();
3694                                     assert(tmpOp2 != nullptr);
3695
3696                                     if ((tmpOp1 != base) && (tmpOp1->OperGet() == GT_ADD))
3697                                     {
3698                                         tmp = tmpOp1;
3699                                     }
3700                                     else if (tmpOp2->OperGet() == GT_LSH)
3701                                     {
3702                                         tmp = tmpOp2;
3703                                     }
3704                                     else if (tmpOp1->OperGet() == GT_LSH)
3705                                     {
3706                                         tmp = tmpOp1;
3707                                     }
3708                                     else if (tmpOp2->OperGet() == GT_ADD)
3709                                     {
3710                                         tmp = tmpOp2;
3711                                     }
3712                                     else
3713                                     {
3714                                         // We can very rarely encounter a tree that has a GT_COMMA node 
3715                                         // that is difficult to walk, so we just early out without decrementing.
3716                                         addrmodeCount = 0;
3717                                     }
3718                                 }
3719                             }
3720                         }
3721 #elif defined _TARGET_ARM_
3722                         if (base)
3723                         {
3724                             costEx += base->gtCostEx;
3725                             costSz += base->gtCostSz;
3726                             if ((base->gtOper == GT_LCL_VAR) &&
3727                                 ((idx==NULL) || (cns==0)))
3728                             {
3729                                 costSz -= 1;
3730                             }
3731                         }
3732
3733                         if (idx)
3734                         {
3735                             costEx += idx->gtCostEx;
3736                             costSz += idx->gtCostSz;
3737                             if (mul > 0)
3738                             {
3739                                 costSz += 2;
3740                             }
3741                         }
3742
3743                         if (cns)
3744                         {
3745                             if (cns >= 128)         // small offsets fits into a 16-bit instruction
3746                             {
3747                                 if (cns < 4096)     // medium offsets require a 32-bit instruction
3748                                 {
3749                                     if (!isflt)
3750                                         costSz += 2;
3751                                 }
3752                                 else 
3753                                 {
3754                                     costEx += 2;     // Very large offsets require movw/movt instructions
3755                                     costSz += 8;
3756                                 }
3757                             }
3758                         }
3759 #elif defined _TARGET_ARM64_
3760                         if (base)
3761                         {
3762                             costEx += base->gtCostEx;
3763                             costSz += base->gtCostSz;
3764                         }
3765
3766                         if (idx)
3767                         {
3768                             costEx += idx->gtCostEx;
3769                             costSz += idx->gtCostSz;
3770                         }
3771
3772                         if (cns != 0)
3773                         {
3774                             if (cns >= (4096 * genTypeSize(tree->TypeGet())))
3775                             {
3776                                 costEx += 1;
3777                                 costSz += 4;
3778                             }
3779                         } 
3780 #else
3781 #error "Unknown _TARGET_"
3782 #endif
3783
3784                         assert(addr->gtOper == GT_ADD);
3785                         assert(!addr->gtOverflow());
3786                         assert(op2 == NULL);
3787                         assert(mul != 1);
3788
3789                         // If we have an addressing mode, we have one of:
3790                         //   [base             + cns]
3791                         //   [       idx * mul      ]  // mul >= 2, else we would use base instead of idx
3792                         //   [       idx * mul + cns]  // mul >= 2, else we would use base instead of idx
3793                         //   [base + idx * mul      ]  // mul can be 0, 2, 4, or 8
3794                         //   [base + idx * mul + cns]  // mul can be 0, 2, 4, or 8
3795                         // Note that mul == 0 is semantically equivalent to mul == 1.
3796                         // Note that cns can be zero.
3797 #if SCALED_ADDR_MODES
3798                         assert((base != NULL) || (idx != NULL && mul >= 2));
3799 #else
3800                         assert(base != NULL);
3801 #endif
3802
3803                         INDEBUG(GenTreePtr op1Save = addr);
3804
3805                         /* Walk addr looking for non-overflow GT_ADDs */
3806                         gtWalkOp(&addr, &op2, base, false);
3807
3808                         // addr and op2 are now children of the root GT_ADD of the addressing mode
3809                         assert(addr != op1Save);
3810                         assert(op2 != NULL);
3811
3812                         /* Walk addr looking for non-overflow GT_ADDs of constants */
3813                         gtWalkOp(&addr, &op2, NULL, true);
3814
3815                         // TODO-Cleanup: It seems very strange that we might walk down op2 now, even though the prior
3816                         //           call to gtWalkOp() may have altered op2.
3817
3818                         /* Walk op2 looking for non-overflow GT_ADDs of constants */
3819                         gtWalkOp(&op2, &addr, NULL, true);
3820
3821                         // OK we are done walking the tree
3822                         // Now assert that addr and op2 correspond with base and idx
3823                         // in one of the several acceptable ways.
3824
3825                         // Note that sometimes addr/op2 is equal to idx/base
3826                         // and other times addr/op2 is a GT_COMMA node with
3827                         // an effective value that is idx/base
3828
3829                         if (mul > 1)
3830                         {
3831                             if ((addr != base) && (addr->gtOper == GT_LSH))
3832                             {
3833                                 addr->gtFlags |= GTF_ADDRMODE_NO_CSE;
3834                                 if (addr->gtOp.gtOp1->gtOper == GT_MUL)
3835                                 {
3836                                     addr->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
3837                                 }
3838                                 assert((base == NULL) || (op2 == base) || (op2->gtEffectiveVal() == base->gtEffectiveVal()) ||
3839                                        (gtWalkOpEffectiveVal(op2) == gtWalkOpEffectiveVal(base)));
3840                             }
3841                             else
3842                             {
3843                                 assert(op2);
3844                                 assert(op2->gtOper == GT_LSH || op2->gtOper == GT_MUL);
3845                                 op2->gtFlags |= GTF_ADDRMODE_NO_CSE;
3846                                 // We may have eliminated multiple shifts and multiplies in the addressing mode,
3847                                 // so navigate down through them to get to "idx".
3848                                 GenTreePtr op2op1 = op2->gtOp.gtOp1;
3849                                 while ((op2op1->gtOper == GT_LSH || op2op1->gtOper == GT_MUL) && op2op1 != idx)
3850                                 {
3851                                     op2op1->gtFlags |= GTF_ADDRMODE_NO_CSE;
3852                                     op2op1 = op2op1->gtOp.gtOp1;
3853                                 }
3854                                 assert(addr->gtEffectiveVal() == base);
3855                                 assert(op2op1 == idx);
3856                             }
3857                         }
3858                         else
3859                         {
3860                             assert(mul == 0);
3861
3862                             if ((addr == idx) || (addr->gtEffectiveVal() == idx))
3863                             {
3864                                 if (idx != NULL)
3865                                 {
3866                                     if ((addr->gtOper == GT_MUL) || (addr->gtOper == GT_LSH))
3867                                     {
3868                                         if ((addr->gtOp.gtOp1->gtOper == GT_NOP) || 
3869                                             (addr->gtOp.gtOp1->gtOper == GT_MUL && addr->gtOp.gtOp1->gtOp.gtOp1->gtOper == GT_NOP))
3870                                         {
3871                                             addr->gtFlags |= GTF_ADDRMODE_NO_CSE;
3872                                             if (addr->gtOp.gtOp1->gtOper == GT_MUL)
3873                                                 addr->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
3874                                         }
3875                                     }
3876                                 }
3877                                 assert((op2 == base) || (op2->gtEffectiveVal() == base));
3878                             }
3879                             else if ((addr == base) || (addr->gtEffectiveVal() == base))
3880                             {
3881                                 if (idx != NULL)
3882                                 {
3883                                     assert(op2);
3884                                     if ((op2->gtOper == GT_MUL) || (op2->gtOper == GT_LSH))
3885                                     {
3886                                         if ((op2->gtOp.gtOp1->gtOper == GT_NOP) ||
3887                                             (op2->gtOp.gtOp1->gtOper == GT_MUL && op2->gtOp.gtOp1->gtOp.gtOp1->gtOper == GT_NOP))
3888                                         {
3889                                             // assert(bRngChk);
3890                                             op2->gtFlags |= GTF_ADDRMODE_NO_CSE;
3891                                             if (op2->gtOp.gtOp1->gtOper == GT_MUL)
3892                                                 op2->gtOp.gtOp1->gtFlags |= GTF_ADDRMODE_NO_CSE;
3893                                         }
3894                                     }
3895                                     assert((op2 == idx) || (op2->gtEffectiveVal() == idx));
3896                                 }
3897                             }
3898                             else
3899                             {
3900                                 // addr isn't base or idx. Is this possible? Or should there be an assert?
3901                             }
3902                         }
3903                         goto DONE;
3904
3905                     }   // end  if  (genCreateAddrMode(...))
3906
3907                 }   // end if  (op1->gtOper == GT_ADD)
3908                 else if (gtIsLikelyRegVar(op1))
3909                 {
3910                     /* Indirection of an enregister LCL_VAR, don't increase costEx/costSz */
3911                     goto DONE;
3912                 }
3913 #ifdef _TARGET_XARCH_
3914                 else if (op1->IsCnsIntOrI())
3915                 {
3916                     // Indirection of a CNS_INT, subtract 1 from costEx
3917                     // makes costEx 3 for x86 and 4 for amd64
3918                     //
3919                     costEx  += (op1->gtCostEx - 1);
3920                     costSz  += op1->gtCostSz;
3921                     goto DONE;
3922                 }
3923 #endif
3924                 break;
3925
3926             default:
3927                 break;
3928             }
3929             costEx  += op1->gtCostEx;
3930             costSz  += op1->gtCostSz;
3931             goto DONE;
3932         }
3933
3934         /* Binary operator - check for certain special cases */
3935
3936         lvlb = 0;
3937
3938         /* Default Binary ops have a cost of 1,1 */
3939         costEx = 1;
3940         costSz = 1;
3941
3942 #ifdef _TARGET_ARM_
3943         if (isflt)
3944         {
3945             costSz += 2;
3946         }
3947 #endif
3948 #ifndef _TARGET_64BIT_
3949         if (varTypeIsLong(op1->TypeGet()))
3950         {
3951             /* Operations on longs are more expensive */
3952             costEx += 3;
3953             costSz += 3;
3954         }       
3955 #endif
3956         switch (oper)
3957         {
3958         case GT_MOD:
3959         case GT_UMOD:
3960
3961             /* Modulo by a power of 2 is easy */
3962
3963             if  (op2->IsCnsIntOrI())
3964             {
3965                 size_t    ival = op2->gtIntConCommon.IconValue();
3966
3967                 if  (ival > 0 && ival == genFindLowestBit(ival))
3968                     break;
3969             }
3970
3971             __fallthrough;
3972
3973         case GT_DIV:
3974         case GT_UDIV:
3975
3976             if  (isflt)
3977             {
3978                 /* fp division is very expensive to execute */
3979                 costEx = 36;  // TYP_DOUBLE
3980                 costSz += 3;
3981             }
3982             else
3983             {
3984                 /* integer division is also very expensive */
3985                 costEx = 20;
3986                 costSz += 2;
3987
3988                 // Encourage the first operand to be evaluated (into EAX/EDX) first */
3989                 lvlb -= 3;
3990
3991 #ifdef _TARGET_XARCH_
3992                 // the idiv and div instruction requires EAX/EDX
3993                 ftreg |= RBM_EAX|RBM_EDX;
3994 #endif
3995             }
3996             break;
3997
3998         case GT_MUL:
3999
4000             if  (isflt)
4001             {
4002                 /* FP multiplication instructions are more expensive */
4003                 costEx += 4;
4004                 costSz += 3;
4005             }
4006             else
4007             {
4008                 /* Integer multiplication instructions are more expensive */
4009                 costEx += 3;
4010                 costSz += 2;
4011
4012                 if (tree->gtOverflow())
4013                 {
4014                     /* Overflow check are more expensive */
4015                     costEx += 3;
4016                     costSz += 3;
4017                 }
4018
4019 #ifdef _TARGET_X86_
4020                 if  ((tree->gtType == TYP_LONG) || tree->gtOverflow())
4021                 {
4022                     /* We use imulEAX for TYP_LONG and overflow multiplications */
4023                     // Encourage the first operand to be evaluated (into EAX/EDX) first */
4024                     lvlb -= 4;
4025
4026                     // the imulEAX instruction ob x86 requires EDX:EAX
4027                     ftreg |= (RBM_EAX|RBM_EDX);
4028
4029                     /* The 64-bit imul instruction costs more */
4030                     costEx += 4;
4031                 }
4032 #endif //  _TARGET_X86_
4033             }
4034             break;
4035
4036         case GT_ADD:
4037         case GT_SUB:
4038         case GT_ASG_ADD:
4039         case GT_ASG_SUB:
4040
4041             if  (isflt)
4042             {
4043                 /* FP instructions are a bit more expensive */
4044                 costEx += 4;
4045                 costSz += 3;
4046                 break;
4047             }
4048
4049             /* Overflow check are more expensive */
4050             if (tree->gtOverflow())
4051             {
4052                 costEx += 3;
4053                 costSz += 3;
4054             }
4055             break;
4056
4057
4058         case GT_COMMA:
4059
4060             /* Comma tosses the result of the left operand */
4061             gtSetEvalOrderAndRestoreFPstkLevel(op1);
4062             level = gtSetEvalOrder(op2);
4063
4064             ftreg |= op1->gtRsvdRegs|op2->gtRsvdRegs;
4065
4066             /* GT_COMMA cost is the sum of op1 and op2 costs */
4067             costEx = (op1->gtCostEx + op2->gtCostEx);
4068             costSz = (op1->gtCostSz + op2->gtCostSz);
4069
4070             goto DONE;
4071
4072         case GT_COLON:
4073
4074             level = gtSetEvalOrderAndRestoreFPstkLevel(op1);
4075             lvl2  = gtSetEvalOrder(op2);
4076
4077             if  (level < lvl2)
4078                  level = lvl2;
4079             else if  (level == lvl2)
4080                  level += 1;
4081
4082             ftreg |= op1->gtRsvdRegs|op2->gtRsvdRegs;
4083             costEx = op1->gtCostEx + op2->gtCostEx;
4084             costSz = op1->gtCostSz + op2->gtCostSz;
4085
4086             goto DONE;
4087
4088         default:
4089             break;
4090         }
4091
4092         /* Assignments need a bit of special handling */
4093
4094         if  (kind & GTK_ASGOP)
4095         {
4096             /* Process the target */
4097
4098             level = gtSetEvalOrder(op1);
4099
4100 #if FEATURE_STACK_FP_X87
4101
4102             /* If assigning an FP value, the target won't get pushed */
4103
4104             if  (isflt && !tree->IsPhiDefn())
4105             {
4106                 op1->gtFPlvl--;
4107                 codeGen->genDecrementFPstkLevel();
4108             }
4109
4110 #endif // FEATURE_STACK_FP_X87
4111
4112             if (gtIsLikelyRegVar(op1))
4113             {
4114                 assert(lvlb == 0);
4115                 lvl2 = gtSetEvalOrder(op2);
4116                 if (oper != GT_ASG)
4117                     ftreg |= op2->gtRsvdRegs;
4118
4119                 /* Assignment to an enregistered LCL_VAR */
4120                 costEx = op2->gtCostEx;
4121                 costSz = max(3, op2->gtCostSz);  // 3 is an estimate for a reg-reg assignment 
4122                 goto DONE_OP1_AFTER_COST;
4123             }
4124             else if (oper != GT_ASG) 
4125             {
4126                 // Assign-Op instructions read and write op1
4127                 //
4128                 costEx += op1->gtCostEx;
4129 #ifdef _TARGET_ARM_
4130                 costSz += op1->gtCostSz;
4131 #endif
4132             }
4133
4134             goto DONE_OP1;
4135         }
4136
4137         /* Process the sub-operands */
4138
4139         level  = gtSetEvalOrder(op1);
4140         if (lvlb < 0)
4141         {
4142             level -= lvlb;      // lvlb is negative, so this increases level
4143             lvlb   = 0;
4144         }
4145
4146     DONE_OP1:
4147         assert(lvlb >= 0);
4148         lvl2    = gtSetEvalOrder(op2) + lvlb;
4149         ftreg  |= op1->gtRsvdRegs;
4150         if (oper != GT_ASG)
4151             ftreg |= op2->gtRsvdRegs;
4152
4153         costEx += (op1->gtCostEx + op2->gtCostEx);
4154         costSz += (op1->gtCostSz + op2->gtCostSz);
4155
4156     DONE_OP1_AFTER_COST:
4157         /*
4158             Binary FP operators pop 2 operands and produce 1 result;
4159             FP comparisons pop 2 operands and produces 0 results.
4160             assignments consume 1 value and don't produce anything.
4161          */
4162
4163 #if FEATURE_STACK_FP_X87
4164         if  (isflt && !tree->IsPhiDefn())
4165         {
4166             assert(oper != GT_COMMA);
4167             codeGen->genDecrementFPstkLevel();
4168         }
4169 #endif // FEATURE_STACK_FP_X87
4170
4171         bool bReverseInAssignment = false;
4172         if  (kind & GTK_ASGOP)
4173         {
4174             GenTreePtr op1Val = op1;
4175
4176             if (tree->gtOper == GT_ASG)
4177             {
4178                 // Skip over the GT_IND/GT_ADDR tree (if one exists)
4179                 //
4180                 if ((op1->gtOper == GT_IND) && (op1->gtOp.gtOp1->gtOper == GT_ADDR))
4181                 {
4182                     op1Val = op1->gtOp.gtOp1->gtOp.gtOp1;
4183                 }
4184             }
4185
4186             switch (op1Val->gtOper)
4187             {
4188             case GT_IND:
4189
4190                 // If we have any side effects on the GT_IND child node
4191                 // we have to evaluate op1 first
4192                 if  (op1Val->gtOp.gtOp1->gtFlags & GTF_ALL_EFFECT)
4193                     break;
4194
4195                 // In case op2 assigns to a local var that is used in op1Val, we have to evaluate op1Val first.
4196                 if  (op2->gtFlags & GTF_ASG)
4197                     break;
4198
4199                 // If op2 is simple then evaluate op1 first
4200
4201                 if (op2->OperKind() & GTK_LEAF)
4202                     break;
4203
4204                 // fall through and set GTF_REVERSE_OPS
4205
4206             case GT_LCL_VAR:
4207             case GT_LCL_FLD:
4208
4209                 // We evaluate op2 before op1
4210                 bReverseInAssignment = true;
4211                 tree->gtFlags |= GTF_REVERSE_OPS;
4212                 break;
4213
4214             default:
4215                 break;
4216             }
4217         }
4218         else if (kind & GTK_RELOP)
4219         {
4220             /* Float compares remove both operands from the FP stack */
4221             /* Also FP comparison uses EAX for flags */
4222
4223             if  (varTypeIsFloating(op1->TypeGet()))
4224             {
4225 #if FEATURE_STACK_FP_X87
4226                 codeGen->genDecrementFPstkLevel(2);
4227 #endif // FEATURE_STACK_FP_X87
4228 #ifdef _TARGET_XARCH_
4229                 ftreg  |= RBM_EAX;
4230 #endif
4231                 level++; lvl2++;
4232             }
4233 #if CPU_LONG_USES_REGPAIR
4234             if (varTypeIsLong(op1->TypeGet()))
4235             {
4236                 costEx *= 2;     // Longs are twice as expensive
4237                 costSz *= 2;
4238             } 
4239 #endif
4240             if ((tree->gtFlags & GTF_RELOP_JMP_USED) == 0)
4241             {
4242                 /* Using a setcc instruction is more expensive */
4243                 costEx += 3;
4244             }
4245         }
4246
4247         /* Check for other interesting cases */
4248
4249         switch (oper)
4250         {
4251         case GT_LSH:
4252         case GT_RSH:
4253         case GT_RSZ:
4254         case GT_ROL:
4255         case GT_ROR:
4256         case GT_ASG_LSH:
4257         case GT_ASG_RSH:
4258         case GT_ASG_RSZ:
4259
4260             /* Variable sized shifts are more expensive and use REG_SHIFT */
4261
4262             if  (!op2->IsCnsIntOrI())
4263             {
4264                 costEx += 3;
4265                 if (REG_SHIFT != REG_NA)
4266                 {
4267                     ftreg |= RBM_SHIFT;
4268                 }
4269
4270 #ifndef _TARGET_64BIT_
4271                 // Variable sized LONG shifts require the use of a helper call
4272                 // 
4273                 if  (tree->gtType == TYP_LONG)
4274                 {
4275                     level  += 5;
4276                     lvl2   += 5;
4277                     costEx += 3 * IND_COST_EX;
4278                     costSz += 4;
4279                     ftreg  |= RBM_CALLEE_TRASH;
4280                 }
4281 #endif // !_TARGET_64BIT_
4282
4283             }
4284             break;
4285
4286         case GT_INTRINSIC:
4287
4288             switch (tree->gtIntrinsic.gtIntrinsicId)
4289             {
4290             case CORINFO_INTRINSIC_Atan2:
4291             case CORINFO_INTRINSIC_Pow:
4292                 // These math intrinsics are actually implemented by user calls.
4293                 // Increase the Sethi 'complexity' by two to reflect the argument 
4294                 // register requirement.
4295                 level += 2;
4296                 break;
4297             default:
4298                 assert(!"Unknown binary GT_INTRINSIC operator");
4299                 break;
4300             }
4301
4302             break;
4303
4304         default:
4305             break;
4306         }
4307
4308         /* We need to evalutate constants later as many places in codegen
4309            can't handle op1 being a constant. This is normally naturally
4310            enforced as constants have the least level of 0. However,
4311            sometimes we end up with a tree like "cns1 < nop(cns2)". In
4312            such cases, both sides have a level of 0. So encourage constants
4313            to be evaluated last in such cases */
4314
4315         if ((level == 0) && (level == lvl2) &&
4316             (op1->OperKind() & GTK_CONST)   &&
4317             (tree->OperIsCommutative() || tree->OperIsCompare()))
4318         {
4319             lvl2++;
4320         }
4321
4322         /* We try to swap operands if the second one is more expensive */
4323         bool tryToSwap;
4324         GenTreePtr opA,opB;
4325
4326         if (tree->gtFlags & GTF_REVERSE_OPS)
4327         {
4328             opA = op2;
4329             opB = op1;
4330         }
4331         else
4332         {
4333             opA = op1;
4334             opB = op2;
4335         }
4336         
4337         if (fgOrder == FGOrderLinear)
4338         {
4339             // Don't swap anything if we're in linear order; we're really just interested in the costs.
4340             tryToSwap = false;
4341         }
4342         else if (bReverseInAssignment)
4343         {
4344             // Assignments are special, we want the reverseops flags
4345             // so if possible it was set above. 
4346             tryToSwap = false;
4347         }
4348         else
4349         {
4350             if (tree->gtFlags & GTF_REVERSE_OPS)
4351             {
4352                 tryToSwap = (level > lvl2);
4353             }
4354             else
4355             {
4356                 tryToSwap = (level < lvl2);
4357             }
4358             
4359             // Try to force extra swapping when in the stress mode:
4360             if (compStressCompile(STRESS_REVERSE_FLAG, 60) &&             
4361                 ((tree->gtFlags & GTF_REVERSE_OPS) == 0)   &&           
4362                 ((op2->OperKind() & GTK_CONST) == 0)           )          
4363             {
4364                 tryToSwap = true;
4365             }
4366
4367         }
4368
4369         if (tryToSwap)
4370         {
4371             bool canSwap = gtCanSwapOrder(opA, opB);
4372
4373             if  (canSwap)
4374             {
4375                 /* Can we swap the order by commuting the operands? */
4376
4377                 switch (oper)
4378                 {
4379                 case GT_EQ:
4380                 case GT_NE:
4381                 case GT_LT:
4382                 case GT_LE:
4383                 case GT_GE:
4384                 case GT_GT:
4385                     if (GenTree::SwapRelop(oper) != oper)
4386                     {
4387                         // SetOper will obliterate the VN for the underlying expression.
4388                         // If we're in VN CSE phase, we don't want to lose that information, 
4389                         // so save the value numbers and put them back after the SetOper.
4390                         ValueNumPair vnp = tree->gtVNPair;
4391                         tree->SetOper(GenTree::SwapRelop(oper));
4392                         if (optValnumCSE_phase)
4393                         {
4394                             tree->gtVNPair = vnp;
4395                         }
4396                     }
4397
4398                     __fallthrough;
4399
4400                 case GT_ADD:
4401                 case GT_MUL:
4402
4403                 case GT_OR:
4404                 case GT_XOR:
4405                 case GT_AND:
4406
4407                     /* Swap the operands */
4408
4409                     tree->gtOp.gtOp1 = op2;
4410                     tree->gtOp.gtOp2 = op1;
4411
4412 #if FEATURE_STACK_FP_X87
4413                     /* We may have to recompute FP levels */
4414                     if  (op1->gtFPlvl || op2->gtFPlvl)
4415                         gtFPstLvlRedo = true;
4416 #endif // FEATURE_STACK_FP_X87
4417                     break;
4418
4419                 case GT_QMARK:
4420                 case GT_COLON:
4421                 case GT_MKREFANY:
4422                     break;
4423  
4424                 case GT_LIST:
4425                     break;
4426
4427                 case GT_SUB:
4428 #ifdef LEGACY_BACKEND
4429                     // For LSRA we require that LclVars be "evaluated" just prior to their use,
4430                     // so that if they must be reloaded, it is done at the right place.
4431                     // This means that we allow reverse evaluation for all BINOPs.
4432                     // (Note that this doesn't affect the order of the operands in the instruction).
4433                     if  (!isflt)
4434                         break;
4435 #endif // LEGACY_BACKEND
4436
4437                     __fallthrough;
4438
4439                 default:
4440
4441                     /* Mark the operand's evaluation order to be swapped */
4442                     if (tree->gtFlags & GTF_REVERSE_OPS)
4443                     {
4444                         tree->gtFlags &= ~GTF_REVERSE_OPS;
4445                     }
4446                     else
4447                     {
4448                         tree->gtFlags |= GTF_REVERSE_OPS;
4449                     }
4450
4451 #if FEATURE_STACK_FP_X87
4452                     /* We may have to recompute FP levels */
4453                     if  (op1->gtFPlvl || op2->gtFPlvl)
4454                         gtFPstLvlRedo = true;
4455 #endif // FEATURE_STACK_FP_X87
4456
4457                     break;
4458                 }
4459             }
4460         }
4461
4462         /* Swap the level counts */
4463         if (tree->gtFlags & GTF_REVERSE_OPS)
4464         {
4465             unsigned tmpl;
4466
4467             tmpl = level;
4468                    level = lvl2;
4469                            lvl2 = tmpl;
4470         }
4471
4472         /* Compute the sethi number for this binary operator */
4473
4474         if  (level < 1)
4475         {
4476             level  = lvl2;
4477         }
4478         else if  (level == lvl2)
4479         {
4480             level += 1;
4481         }
4482
4483         goto DONE;
4484     }
4485
4486     /* See what kind of a special operator we have here */
4487
4488     switch  (oper)
4489     {
4490         unsigned lvl2; // Scratch variable
4491
4492     case GT_CALL:
4493
4494         assert(tree->gtFlags & GTF_CALL);
4495
4496         level  = 0;
4497         costEx = 5;
4498         costSz = 2;
4499
4500         /* Evaluate the 'this' argument, if present */
4501
4502         if  (tree->gtCall.gtCallObjp)
4503         {
4504             GenTreePtr     thisVal = tree->gtCall.gtCallObjp;
4505
4506             lvl2   = gtSetEvalOrder(thisVal);
4507             if  (level < lvl2)   level = lvl2;
4508             costEx += thisVal->gtCostEx;
4509             costSz += thisVal->gtCostSz + 1;
4510             ftreg  |= thisVal->gtRsvdRegs;
4511         }
4512
4513         /* Evaluate the arguments, right to left */
4514
4515         if  (tree->gtCall.gtCallArgs)
4516         {
4517 #if FEATURE_STACK_FP_X87
4518             FPlvlSave = codeGen->genGetFPstkLevel();
4519 #endif // FEATURE_STACK_FP_X87
4520             lvl2  = gtSetListOrder(tree->gtCall.gtCallArgs, false);
4521             if  (level < lvl2)   level = lvl2;
4522             costEx += tree->gtCall.gtCallArgs->gtCostEx;
4523             costSz += tree->gtCall.gtCallArgs->gtCostSz;
4524             ftreg  |= tree->gtCall.gtCallArgs->gtRsvdRegs;
4525 #if FEATURE_STACK_FP_X87
4526             codeGen->genResetFPstkLevel(FPlvlSave);
4527 #endif // FEATURE_STACK_FP_X87
4528         }
4529
4530         /* Evaluate the temp register arguments list
4531          * This is a "hidden" list and its only purpose is to
4532          * extend the life of temps until we make the call */
4533
4534         if  (tree->gtCall.gtCallLateArgs)
4535         {
4536 #if FEATURE_STACK_FP_X87
4537             FPlvlSave = codeGen->genGetFPstkLevel();
4538 #endif // FEATURE_STACK_FP_X87
4539             lvl2  = gtSetListOrder(tree->gtCall.gtCallLateArgs, true);
4540             if  (level < lvl2)   level = lvl2;
4541             costEx += tree->gtCall.gtCallLateArgs->gtCostEx;
4542             costSz += tree->gtCall.gtCallLateArgs->gtCostSz;
4543             ftreg  |= tree->gtCall.gtCallLateArgs->gtRsvdRegs;
4544 #if FEATURE_STACK_FP_X87
4545             codeGen->genResetFPstkLevel(FPlvlSave);
4546 #endif // FEATURE_STACK_FP_X87
4547         }
4548
4549         if  (tree->gtCall.gtCallType == CT_INDIRECT)
4550         {
4551             // pinvoke-calli cookie is a constant, or constant indirection
4552             assert(tree->gtCall.gtCallCookie == NULL ||
4553                    tree->gtCall.gtCallCookie->gtOper == GT_CNS_INT ||
4554                    tree->gtCall.gtCallCookie->gtOper == GT_IND);
4555             
4556             GenTreePtr     indirect = tree->gtCall.gtCallAddr;
4557
4558             lvl2 = gtSetEvalOrder(indirect);
4559             if  (level < lvl2)   level = lvl2;
4560             costEx += indirect->gtCostEx + IND_COST_EX;
4561             costSz += indirect->gtCostSz;
4562             ftreg  |= indirect->gtRsvdRegs;
4563         }
4564         else
4565         {
4566 #ifdef _TARGET_ARM_
4567             if ((tree->gtFlags & GTF_CALL_VIRT_KIND_MASK) == GTF_CALL_VIRT_STUB)
4568             {
4569                 // We generate movw/movt/ldr
4570                 costEx += (1 + IND_COST_EX);
4571                 costSz += 8;
4572                 if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT)
4573                 {
4574                     // Must use R12 for the ldr target -- REG_JUMP_THUNK_PARAM
4575                     costSz += 2;
4576                 }
4577             }
4578             else if ((opts.eeFlags & CORJIT_FLG_PREJIT) == 0)
4579             {
4580                 costEx += 2;
4581                 costSz += 6;
4582             }
4583             costSz += 2;
4584 #endif
4585 #ifdef _TARGET_XARCH_
4586             costSz += 3;
4587 #endif
4588         }
4589
4590         level += 1;
4591
4592         unsigned callKind; callKind = (tree->gtFlags & GTF_CALL_VIRT_KIND_MASK); 
4593
4594         /* Virtual calls are a bit more expensive */
4595         if (callKind != GTF_CALL_NONVIRT)
4596         {
4597             costEx += 2 * IND_COST_EX;
4598             costSz += 2;
4599         }
4600
4601         /* Virtual stub calls also must reserve the VIRTUAL_STUB_PARAM reg */
4602         if (callKind == GTF_CALL_VIRT_STUB)
4603         {
4604             ftreg  |= RBM_VIRTUAL_STUB_PARAM;
4605         }
4606
4607 #ifdef FEATURE_READYTORUN_COMPILER
4608 #ifdef _TARGET_ARM64_
4609         if (tree->gtCall.IsR2RRelativeIndir())
4610         {
4611             ftreg |= RBM_R2R_INDIRECT_PARAM;
4612         }
4613 #endif
4614 #endif
4615
4616         // Normally function calls don't preserve caller save registers 
4617         //   and thus are much more expensive.
4618         // However a few function calls do preserve these registers
4619         //   such as the GC WriteBarrier helper calls.
4620
4621 #if GTF_CALL_REG_SAVE
4622         if  (!(tree->gtFlags & GTF_CALL_REG_SAVE))
4623 #endif
4624         {
4625             level  += 5;
4626             costEx += 3 * IND_COST_EX;
4627             ftreg  |= RBM_CALLEE_TRASH;
4628         }
4629
4630 #if FEATURE_STACK_FP_X87
4631         if (isflt) codeGen->genIncrementFPstkLevel();
4632 #endif // FEATURE_STACK_FP_X87
4633
4634         break;
4635
4636     case GT_ARR_ELEM:
4637
4638         level  = gtSetEvalOrder(tree->gtArrElem.gtArrObj);
4639         costEx = tree->gtArrElem.gtArrObj->gtCostEx;
4640         costSz = tree->gtArrElem.gtArrObj->gtCostSz;
4641
4642         unsigned dim;
4643         for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
4644         {
4645             lvl2 = gtSetEvalOrder(tree->gtArrElem.gtArrInds[dim]);
4646             if (level < lvl2)  level = lvl2;
4647             costEx += tree->gtArrElem.gtArrInds[dim]->gtCostEx;
4648             costSz += tree->gtArrElem.gtArrInds[dim]->gtCostSz;
4649         }
4650
4651 #if FEATURE_STACK_FP_X87
4652         if (isflt) codeGen->genIncrementFPstkLevel();
4653 #endif // FEATURE_STACK_FP_X87
4654         level  += tree->gtArrElem.gtArrRank;
4655         costEx += 2 + (tree->gtArrElem.gtArrRank * (IND_COST_EX+1));
4656         costSz += 2 + (tree->gtArrElem.gtArrRank * 2);
4657         break;
4658
4659     case GT_ARR_OFFSET:
4660         level  = gtSetEvalOrder(tree->gtArrOffs.gtOffset);
4661         costEx = tree->gtArrOffs.gtOffset->gtCostEx;
4662         costSz = tree->gtArrOffs.gtOffset->gtCostSz;
4663         lvl2  = gtSetEvalOrder(tree->gtArrOffs.gtIndex);
4664         level = max(level, lvl2);
4665         costEx += tree->gtArrOffs.gtIndex->gtCostEx;
4666         costSz += tree->gtArrOffs.gtIndex->gtCostSz;
4667         lvl2  = gtSetEvalOrder(tree->gtArrOffs.gtArrObj);
4668         level = max(level, lvl2);
4669         costEx += tree->gtArrOffs.gtArrObj->gtCostEx;
4670         costSz += tree->gtArrOffs.gtArrObj->gtCostSz;
4671         break;
4672
4673     case GT_CMPXCHG:
4674
4675         level = gtSetEvalOrder(tree->gtCmpXchg.gtOpLocation);
4676         costSz = tree->gtCmpXchg.gtOpLocation->gtCostSz;
4677
4678         lvl2 = gtSetEvalOrder(tree->gtCmpXchg.gtOpValue);
4679         if (level < lvl2) level = lvl2;
4680         costSz += tree->gtCmpXchg.gtOpValue->gtCostSz;
4681
4682         lvl2 = gtSetEvalOrder(tree->gtCmpXchg.gtOpComparand);
4683         if (level < lvl2) level = lvl2;
4684         costSz += tree->gtCmpXchg.gtOpComparand->gtCostSz;
4685
4686         costEx = MAX_COST; //Seriously, what could be more expensive than lock cmpxchg?
4687         costSz += 5; //size of lock cmpxchg [reg+C], reg
4688 #ifdef _TARGET_XARCH_
4689         ftreg |= RBM_EAX; //cmpxchg must be evaluated into eax.
4690 #endif
4691         break;
4692
4693     case GT_ARR_BOUNDS_CHECK:
4694 #ifdef FEATURE_SIMD
4695     case GT_SIMD_CHK:
4696 #endif // FEATURE_SIMD
4697         costEx = 4;    // cmp reg,reg and jae throw (not taken)
4698         costSz = 7;    // jump to cold section
4699
4700         level = gtSetEvalOrder(tree->gtBoundsChk.gtArrLen);
4701         costEx += tree->gtBoundsChk.gtArrLen->gtCostEx;
4702         costSz += tree->gtBoundsChk.gtArrLen->gtCostSz;
4703
4704         lvl2 = gtSetEvalOrder(tree->gtBoundsChk.gtIndex);
4705         if (level < lvl2) level = lvl2;
4706         costEx += tree->gtBoundsChk.gtIndex->gtCostEx;
4707         costSz += tree->gtBoundsChk.gtIndex->gtCostSz;
4708
4709         break;
4710
4711     default:
4712 #ifdef  DEBUG
4713         if (verbose)
4714         {
4715             printf("unexpected operator in this tree:\n");
4716             gtDispTree(tree);
4717         }
4718 #endif
4719         NO_WAY("unexpected operator");
4720     }
4721
4722 DONE:
4723
4724 #if FEATURE_STACK_FP_X87
4725 //  printf("[FPlvl=%2u] ", genGetFPstkLevel()); gtDispTree(tree, 0, true);
4726     noway_assert((unsigned char)codeGen->genFPstkLevel == codeGen->genFPstkLevel);
4727     tree->gtFPlvl = (unsigned char)codeGen->genFPstkLevel;
4728
4729     if (codeGen->genFPstkLevel > tmpDoubleSpillMax)
4730         tmpDoubleSpillMax = codeGen->genFPstkLevel;
4731 #endif // FEATURE_STACK_FP_X87
4732
4733     tree->gtRsvdRegs = (regMaskSmall)ftreg;
4734
4735     // Some path through this function must have set the costs.
4736     assert(costEx != -1);
4737     assert(costSz != -1);
4738
4739     tree->SetCosts(costEx, costSz);
4740
4741     return level;
4742 }
4743 #ifdef _PREFAST_
4744 #pragma warning(pop)
4745 #endif
4746
4747 #if FEATURE_STACK_FP_X87
4748
4749 /*****************************************************************************/
4750 void                Compiler::gtComputeFPlvls(GenTreePtr tree)
4751 {
4752     genTreeOps      oper;
4753     unsigned        kind;
4754     bool            isflt;
4755     unsigned        savFPstkLevel;
4756
4757     noway_assert(tree);
4758     noway_assert(tree->gtOper != GT_STMT);
4759
4760     /* Figure out what kind of a node we have */
4761
4762     oper  = tree->OperGet();
4763     kind  = tree->OperKind();
4764     isflt = varTypeIsFloating(tree->TypeGet()) ? 1 : 0;
4765
4766     /* Is this a constant or leaf node? */
4767
4768     if  (kind & (GTK_CONST|GTK_LEAF))
4769     {
4770         codeGen->genFPstkLevel += isflt;
4771         goto DONE;
4772     }
4773
4774     /* Is it a 'simple' unary/binary operator? */
4775
4776     if  (kind & GTK_SMPOP)
4777     {
4778         GenTreePtr      op1 = tree->gtOp.gtOp1;
4779         GenTreePtr      op2 = tree->gtGetOp2();
4780
4781         /* Check for some special cases */
4782
4783         switch (oper)
4784         {
4785         case GT_IND:
4786
4787             gtComputeFPlvls(op1);
4788
4789             /* Indirect loads of FP values push a new value on the FP stack */
4790
4791             codeGen->genFPstkLevel += isflt;
4792             goto DONE;
4793
4794         case GT_CAST:
4795
4796             gtComputeFPlvls(op1);
4797
4798             /* Casts between non-FP and FP push on / pop from the FP stack */
4799
4800             if  (varTypeIsFloating(op1->TypeGet()))
4801             {
4802                 if  (isflt == false)
4803                     codeGen->genFPstkLevel--;
4804             }
4805             else
4806             {
4807                 if  (isflt != false)
4808                     codeGen->genFPstkLevel++;
4809             }
4810
4811             goto DONE;
4812
4813         case GT_LIST:   /* GT_LIST presumably part of an argument list */
4814         case GT_COMMA:  /* Comma tosses the result of the left operand */
4815
4816             savFPstkLevel = codeGen->genFPstkLevel;
4817             gtComputeFPlvls(op1);
4818             codeGen->genFPstkLevel = savFPstkLevel;
4819
4820             if  (op2)
4821                 gtComputeFPlvls(op2);
4822
4823             goto DONE;
4824
4825         default:
4826             break;
4827         }
4828
4829         if  (!op1)
4830         {
4831             if  (!op2)
4832                 goto DONE;
4833
4834             gtComputeFPlvls(op2);
4835             goto DONE;
4836         }
4837
4838         if  (!op2)
4839         {
4840             gtComputeFPlvls(op1);
4841             if (oper == GT_ADDR)
4842             {
4843                 /* If the operand was floating point pop the value from the stack */
4844                 if (varTypeIsFloating(op1->TypeGet()))
4845                 {
4846                     noway_assert(codeGen->genFPstkLevel);
4847                     codeGen->genFPstkLevel--;
4848                 }
4849             }
4850
4851             // This is a special case to handle the following
4852             // optimization: conv.i4(round.d(d)) -> round.i(d)
4853
4854             if (oper== GT_INTRINSIC && tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Round &&
4855                 tree->TypeGet()==TYP_INT)
4856             {
4857                 codeGen->genFPstkLevel--;
4858             }
4859
4860             goto DONE;
4861         }
4862
4863         /* FP assignments need a bit special handling */
4864
4865         if  (isflt && (kind & GTK_ASGOP))
4866         {
4867             /* The target of the assignment won't get pushed */
4868
4869             if  (tree->gtFlags & GTF_REVERSE_OPS)
4870             {
4871                 gtComputeFPlvls(op2);
4872                 gtComputeFPlvls(op1);
4873                  op1->gtFPlvl--;
4874                 codeGen->genFPstkLevel--;
4875             }
4876             else
4877             {
4878                 gtComputeFPlvls(op1);
4879                 op1->gtFPlvl--;
4880                 codeGen->genFPstkLevel--;
4881                 gtComputeFPlvls(op2);
4882             }
4883
4884             codeGen->genFPstkLevel--;
4885             goto DONE;
4886         }
4887
4888         /* Here we have a binary operator; visit operands in proper order */
4889
4890         if  (tree->gtFlags & GTF_REVERSE_OPS)
4891         {
4892             gtComputeFPlvls(op2);
4893             gtComputeFPlvls(op1);
4894         }
4895         else
4896         {
4897             gtComputeFPlvls(op1);
4898             gtComputeFPlvls(op2);
4899         }
4900
4901         /*
4902             Binary FP operators pop 2 operands and produce 1 result;
4903             assignments consume 1 value and don't produce any.
4904          */
4905
4906         if  (isflt)
4907             codeGen->genFPstkLevel--;
4908
4909         /* Float compares remove both operands from the FP stack */
4910
4911         if  (kind & GTK_RELOP)
4912         {
4913             if  (varTypeIsFloating(op1->TypeGet()))
4914                 codeGen->genFPstkLevel -= 2;
4915         }
4916
4917         goto DONE;
4918     }
4919
4920     /* See what kind of a special operator we have here */
4921
4922     switch  (oper)
4923     {
4924     case GT_FIELD:
4925         gtComputeFPlvls(tree->gtField.gtFldObj);
4926         codeGen->genFPstkLevel += isflt;
4927         break;
4928
4929     case GT_CALL:
4930
4931         if  (tree->gtCall.gtCallObjp)
4932             gtComputeFPlvls(tree->gtCall.gtCallObjp);
4933
4934         if  (tree->gtCall.gtCallArgs)
4935         {
4936             savFPstkLevel = codeGen->genFPstkLevel;
4937             gtComputeFPlvls(tree->gtCall.gtCallArgs);
4938             codeGen->genFPstkLevel = savFPstkLevel;
4939         }
4940
4941         if  (tree->gtCall.gtCallLateArgs)
4942         {
4943             savFPstkLevel = codeGen->genFPstkLevel;
4944             gtComputeFPlvls(tree->gtCall.gtCallLateArgs);
4945             codeGen->genFPstkLevel = savFPstkLevel;
4946         }
4947
4948         codeGen->genFPstkLevel += isflt;
4949         break;
4950
4951     case GT_ARR_ELEM:
4952
4953         gtComputeFPlvls(tree->gtArrElem.gtArrObj);
4954
4955         unsigned dim;
4956         for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
4957             gtComputeFPlvls(tree->gtArrElem.gtArrInds[dim]);
4958
4959         /* Loads of FP values push a new value on the FP stack */
4960         codeGen->genFPstkLevel += isflt;
4961         break;
4962
4963     case GT_CMPXCHG:
4964         //Evaluate the trees left to right
4965         gtComputeFPlvls(tree->gtCmpXchg.gtOpLocation);
4966         gtComputeFPlvls(tree->gtCmpXchg.gtOpValue);
4967         gtComputeFPlvls(tree->gtCmpXchg.gtOpComparand);
4968         noway_assert(!isflt);
4969         break;
4970
4971     case GT_ARR_BOUNDS_CHECK:
4972         gtComputeFPlvls(tree->gtBoundsChk.gtArrLen);
4973         gtComputeFPlvls(tree->gtBoundsChk.gtIndex);
4974         noway_assert(!isflt);
4975         break;
4976
4977 #ifdef DEBUG
4978     default:
4979         noway_assert(!"Unhandled special operator in gtComputeFPlvls()");
4980         break;
4981 #endif
4982     }
4983
4984 DONE:
4985
4986     noway_assert((unsigned char)codeGen->genFPstkLevel == codeGen->genFPstkLevel);
4987
4988     tree->gtFPlvl = (unsigned char)codeGen->genFPstkLevel;
4989 }
4990
4991 #endif // FEATURE_STACK_FP_X87
4992
4993
4994 /*****************************************************************************
4995  *
4996  *  If the given tree is an integer constant that can be used
4997  *  in a scaled index address mode as a multiplier (e.g. "[4*index]"), then return
4998  *  the scale factor: 2, 4, or 8. Otherwise, return 0. Note that we never return 1,
4999  *  to match the behavior of GetScaleIndexShf().
5000  */
5001
5002 unsigned            GenTree::GetScaleIndexMul()
5003 {
5004     if  (IsCnsIntOrI() && jitIsScaleIndexMul(gtIntConCommon.IconValue()) && gtIntConCommon.IconValue()!=1)
5005         return (unsigned)gtIntConCommon.IconValue();
5006
5007     return 0;
5008 }
5009
5010 /*****************************************************************************
5011  *
5012  *  If the given tree is the right-hand side of a left shift (that is,
5013  *  'y' in the tree 'x' << 'y'), and it is an integer constant that can be used
5014  *  in a scaled index address mode as a multiplier (e.g. "[4*index]"), then return
5015  *  the scale factor: 2, 4, or 8. Otherwise, return 0.
5016  */
5017
5018 unsigned            GenTree::GetScaleIndexShf()
5019 {
5020     if  (IsCnsIntOrI() && jitIsScaleIndexShift(gtIntConCommon.IconValue()))
5021         return (unsigned)(1 << gtIntConCommon.IconValue());
5022
5023     return 0;
5024 }
5025
5026 /*****************************************************************************
5027  *
5028  *  If the given tree is a scaled index (i.e. "op * 4" or "op << 2"), returns
5029  *  the multiplier: 2, 4, or 8; otherwise returns 0. Note that "1" is never
5030  *  returned.
5031  */
5032
5033 unsigned            GenTree::GetScaledIndex()
5034 {
5035     // with (!opts.OptEnabled(CLFLG_CONSTANTFOLD) we can have
5036     //   CNS_INT * CNS_INT
5037     //   
5038     if (gtOp.gtOp1->IsCnsIntOrI())
5039         return 0;
5040
5041     switch (gtOper)
5042     {
5043     case GT_MUL:
5044         return gtOp.gtOp2->GetScaleIndexMul();
5045
5046     case GT_LSH:
5047         return gtOp.gtOp2->GetScaleIndexShf();
5048
5049     default:
5050         assert(!"GenTree::GetScaledIndex() called with illegal gtOper");
5051         break;
5052     }
5053
5054     return 0;
5055 }
5056
5057 /*****************************************************************************
5058  *
5059  *  Returns true if "addr" is a GT_ADD node, at least one of whose arguments is an integer (<= 32 bit)
5060  *  constant.  If it returns true, it sets "*offset" to (one of the) constant value(s), and
5061  *  "*addr" to the other argument.
5062  */
5063
5064 bool                GenTree::IsAddWithI32Const(GenTreePtr* addr, int* offset)
5065 {
5066     if (OperGet() == GT_ADD)
5067     {
5068         if (gtOp.gtOp1->IsIntCnsFitsInI32())
5069         {
5070             *offset = (int)gtOp.gtOp1->gtIntCon.gtIconVal;
5071             *addr = gtOp.gtOp2;
5072             return true;
5073         }
5074         else if (gtOp.gtOp2->IsIntCnsFitsInI32())
5075         {
5076             *offset = (int)gtOp.gtOp2->gtIntCon.gtIconVal;
5077             *addr = gtOp.gtOp1;
5078             return true;
5079         }
5080     }
5081     // Otherwise...
5082     return false;
5083 }
5084
5085 //------------------------------------------------------------------------
5086 // InsertAfterSelf: Insert 'node' after this node in execution order.
5087 // If 'stmt' is not nullptr, then it is the parent statement of 'this', and we can insert at the
5088 // end of the statement list. If 'stmt' is nullptr, we can't insert at the end of the statement list.
5089 //
5090 // Arguments:
5091 //    'node' - The node to insert. We only insert a node, not a whole tree.
5092 //    'stmt' - Optional. If set, the parent statement of 'this'.
5093 //
5094 // Return Value:
5095 //    None.
5096 //
5097 // Assumptions:
5098 //    'node' is a single node to insert, not a tree to insert.
5099 //
5100 // Notes:
5101 //    Use Compiler::fgInsertTreeInListAfter() to insert a whole tree.
5102
5103 void            GenTree::InsertAfterSelf(GenTree* node, GenTreeStmt* stmt /* = nullptr */)
5104 {
5105     // statements have crazy requirements
5106     assert(this->gtOper != GT_STMT);
5107
5108     node->gtNext = this->gtNext;
5109     node->gtPrev = this;
5110
5111     // Insertion at beginning and end of block are special cases
5112     // and require more context.
5113     if (this->gtNext == nullptr)
5114     {
5115         assert(stmt != nullptr);
5116         assert(stmt->gtOper == GT_STMT);
5117         assert(stmt->gtStmtExpr == this);
5118         stmt->gtStmtExpr = node;
5119     }
5120     else
5121     {
5122         this->gtNext->gtPrev = node;
5123     }
5124
5125     this->gtNext = node;
5126 }
5127
5128 //------------------------------------------------------------------------
5129 // gtGetChildPointer: If 'parent' is the parent of this node, return the pointer
5130 //    to the child node so that it can be modified; otherwise, return nullptr.
5131 //
5132 // Arguments:
5133 //    parent - The possible parent of this node
5134 //
5135 // Return Value:
5136 //    If "child" is a child of "parent", returns a pointer to the child node in the parent
5137 //    (i.e. a pointer to a GenTree pointer).
5138 //    Otherwise, returns nullptr.
5139 //
5140 // Assumptions:
5141 //    'parent' must be non-null
5142 //
5143 // Notes:
5144 //    When FEATURE_MULTIREG_ARGS is defined we can get here with GT_LDOBJ tree. 
5145 //    This happens when we have a struct that is passed in multiple registers.
5146 //
5147 //    Also note that when FEATURE_UNIX_AMD64_STRUCT_PASSING is defined the GT_LDOBJ 
5148 //    later gets converted to a GT_LIST with two GT_LCL_FLDs in Lower/LowerXArch.
5149 //
5150
5151 GenTreePtr*         GenTree::gtGetChildPointer(GenTreePtr parent)
5152
5153 {
5154     switch (parent->OperGet())
5155     {
5156     default:
5157         if (!parent->OperIsSimple())                       return nullptr;
5158         if (this == parent->gtOp.gtOp1)                    return &(parent->gtOp.gtOp1);
5159         if (this == parent->gtOp.gtOp2)                    return &(parent->gtOp.gtOp2);
5160         break;
5161
5162 #if !FEATURE_MULTIREG_ARGS
5163         // Note that when FEATURE_MULTIREG_ARGS==1 
5164         //  a GT_OBJ node is handled above by the default case
5165     case GT_OBJ:
5166         // Any GT_OBJ with a field must be lowered before this point.
5167         noway_assert(!"GT_OBJ encountered in GenTree::gtGetChildPointer");
5168         break;
5169 #endif // !FEATURE_MULTIREG_ARGS
5170
5171     case GT_CMPXCHG:
5172         if (this == parent->gtCmpXchg.gtOpLocation)        return &(parent->gtCmpXchg.gtOpLocation);
5173         if (this == parent->gtCmpXchg.gtOpValue)           return &(parent->gtCmpXchg.gtOpValue);
5174         if (this == parent->gtCmpXchg.gtOpComparand)       return &(parent->gtCmpXchg.gtOpComparand);
5175         break;
5176
5177     case GT_ARR_BOUNDS_CHECK:
5178 #ifdef FEATURE_SIMD
5179     case GT_SIMD_CHK:
5180 #endif // FEATURE_SIMD
5181         if (this == parent->gtBoundsChk.gtArrLen)          return &(parent->gtBoundsChk.gtArrLen);
5182         if (this == parent->gtBoundsChk.gtIndex)           return &(parent->gtBoundsChk.gtIndex);
5183         if (this == parent->gtBoundsChk.gtIndRngFailBB)    return &(parent->gtBoundsChk.gtIndRngFailBB);
5184         break;
5185
5186     case GT_ARR_ELEM:
5187         if (this == parent->gtArrElem.gtArrObj)            return &(parent->gtArrElem.gtArrObj);
5188         for (int i = 0; i < GT_ARR_MAX_RANK; i++)
5189             if (this == parent->gtArrElem.gtArrInds[i])    return &(parent->gtArrElem.gtArrInds[i]);
5190         break;
5191
5192     case GT_ARR_OFFSET:
5193         if (this == parent->gtArrOffs.gtOffset)            return &(parent->gtArrOffs.gtOffset);
5194         if (this == parent->gtArrOffs.gtIndex)             return &(parent->gtArrOffs.gtIndex);
5195         if (this == parent->gtArrOffs.gtArrObj)            return &(parent->gtArrOffs.gtArrObj);
5196         break;
5197
5198     case GT_FIELD:
5199         if (this == parent->AsField()->gtFldObj)           return &(parent->AsField()->gtFldObj);
5200         break;
5201
5202     case GT_RET_EXPR:
5203         if (this == parent->gtRetExpr.gtInlineCandidate)   return &(parent->gtRetExpr.gtInlineCandidate);
5204         break;
5205
5206     case GT_CALL:
5207         {
5208             GenTreeCall* call = parent->AsCall();
5209
5210             if (this == call->gtCallObjp)                  return &(call->gtCallObjp);
5211             if (this == call->gtCallArgs)                  return reinterpret_cast<GenTreePtr*>(&(call->gtCallArgs));
5212             if (this == call->gtCallLateArgs)              return reinterpret_cast<GenTreePtr*>(&(call->gtCallLateArgs));
5213             if (this == call->gtControlExpr)               return &(call->gtControlExpr);
5214             if (call->gtCallType == CT_INDIRECT)
5215             {
5216                 if (this == call->gtCallCookie)            return &(call->gtCallCookie);
5217                 if (this == call->gtCallAddr)              return &(call->gtCallAddr);
5218             }
5219         }
5220         break;
5221
5222     case GT_STMT:
5223         noway_assert(!"Illegal node for gtGetChildPointer()");
5224         unreached();
5225     }
5226
5227     return nullptr;
5228 }
5229
5230 //------------------------------------------------------------------------
5231 // gtGetParent: Get the parent of this node, and optionally capture the
5232 //    pointer to the child so that it can be modified.
5233 //
5234 // Arguments:
5235
5236 //    parentChildPointer - A pointer to a GenTreePtr* (yes, that's three
5237 //                         levels, i.e. GenTree ***), which if non-null,
5238 //                         will be set to point to the field in the parent
5239 //                         that points to this node.
5240 //
5241 //    Return value       - The parent of this node.
5242 //
5243 //    Notes:
5244 //
5245 //    This requires that the execution order must be defined (i.e. gtSetEvalOrder() has been called).
5246 //    To enable the child to be replaced, it accepts an argument, parentChildPointer that, if non-null,
5247 //    will be set to point to the child pointer in the parent that points to this node.
5248
5249 GenTreePtr          GenTree::gtGetParent(GenTreePtr** parentChildPtrPtr)
5250 {
5251     // Find the parent node; it must be after this node in the execution order.
5252     GenTreePtr * parentChildPtr = nullptr;
5253     GenTreePtr parent;
5254     for (parent = gtNext; parent != nullptr; parent = parent->gtNext)
5255     {
5256         parentChildPtr = gtGetChildPointer(parent);
5257         if (parentChildPtr != nullptr) break;
5258     }
5259     if (parentChildPtrPtr != nullptr) *parentChildPtrPtr = parentChildPtr;
5260     return parent;
5261 }
5262
5263 /*****************************************************************************
5264  *
5265  *  Returns true if the given operator may cause an exception.
5266  */
5267
5268 bool                GenTree::OperMayThrow()
5269 {
5270     GenTreePtr  op;
5271
5272     switch (gtOper)
5273     {
5274     case GT_MOD:
5275     case GT_DIV:
5276     case GT_UMOD:
5277     case GT_UDIV:
5278
5279         /* Division with a non-zero, non-minus-one constant does not throw an exception */
5280
5281         op = gtOp.gtOp2;
5282
5283         if  (varTypeIsFloating(op->TypeGet()))
5284             return false;  // Floating point division does not throw.
5285
5286         // For integers only division by 0 or by -1 can throw
5287         if (op->IsIntegralConst() && !op->IsIntegralConst(0) && !op->IsIntegralConst(-1))
5288             return false;
5289         return true;
5290
5291     case GT_IND:
5292         op = gtOp.gtOp1;
5293
5294         /* Indirections of handles are known to be safe */
5295         if (op->gtOper == GT_CNS_INT) 
5296         {
5297             if (op->IsIconHandle())
5298             {
5299                 /* No exception is thrown on this indirection */
5300                 return false;
5301             }
5302         }
5303         if (this->gtFlags & GTF_IND_NONFAULTING)
5304         {
5305             return false;
5306         }
5307         // Non-Null AssertionProp will remove the GTF_EXCEPT flag and mark the GT_IND with GTF_ORDER_SIDEEFF flag
5308         if ((this->gtFlags & GTF_ALL_EFFECT) == GTF_ORDER_SIDEEFF)
5309         {
5310             return false;
5311         }
5312
5313         return true;
5314
5315     case GT_INTRINSIC:
5316         // If this is an intrinsic that represents the object.GetType(), it can throw an NullReferenceException.
5317         // Report it as may throw.
5318         // Note: Some of the rest of the existing intrinsics could potentially throw an exception (for example
5319         //       the array and string element access ones). They are handled differently than the GetType intrinsic
5320         //       and are not marked with GTF_EXCEPT. If these are revisited at some point to be marked as GTF_EXCEPT, 
5321         //       the code below might need to be specialized to handle them properly.
5322         if ((this->gtFlags & GTF_EXCEPT) != 0)
5323         {
5324             return true;
5325         }
5326
5327         break;
5328
5329     case GT_ARR_BOUNDS_CHECK:
5330     case GT_ARR_ELEM:
5331     case GT_ARR_INDEX:
5332     case GT_CATCH_ARG:
5333     case GT_ARR_LENGTH:
5334     case GT_OBJ:
5335     case GT_LCLHEAP:
5336     case GT_CKFINITE:
5337     case GT_NULLCHECK:
5338 #ifdef FEATURE_SIMD
5339     case GT_SIMD_CHK:
5340 #endif // FEATURE_SIMD
5341         return  true;
5342     default:
5343         break;
5344     }
5345
5346     /* Overflow arithmetic operations also throw exceptions */
5347
5348     if (gtOverflowEx())
5349         return true;
5350
5351     return  false;
5352 }
5353
5354 #if DEBUGGABLE_GENTREE
5355 // static
5356 GenTree::VtablePtr GenTree::s_vtablesForOpers[] = { NULL };
5357 GenTree::VtablePtr GenTree::s_vtableForOp = NULL;
5358
5359 GenTree::VtablePtr GenTree::GetVtableForOper(genTreeOps oper)
5360 {
5361     noway_assert (oper < GT_COUNT);
5362
5363     if (s_vtablesForOpers[oper] != NULL) return s_vtablesForOpers[oper];
5364     // Otherwise...
5365     VtablePtr res = NULL;
5366     switch (oper)
5367     {
5368 #define GTSTRUCT_0(nm, tag)             /*handle explicitly*/
5369 #define GTSTRUCT_1(nm, tag)             case tag: { GenTree##nm gt; res = *reinterpret_cast<VtablePtr*>(&gt); } break;
5370 #define GTSTRUCT_2(nm, tag, tag2)       /*handle explicitly*/
5371 #define GTSTRUCT_3(nm, tag, tag2, tag3) /*handle explicitly*/
5372 #define GTSTRUCT_4(nm, tag, tag2, tag3, tag4) /*handle explicitly*/
5373 #define GTSTRUCT_N(nm, ...) /*handle explicitly*/
5374 #include "gtstructs.h"
5375         // If FEATURE_EH_FUNCLETS is set, then GT_JMP becomes the only member of Val, and will be handled above.
5376 #if !FEATURE_EH_FUNCLETS
5377     case GT_END_LFIN: 
5378     case GT_JMP:
5379         { GenTreeVal gt(GT_JMP, TYP_INT, 0); res = *reinterpret_cast<VtablePtr*>(&gt); break; }
5380 #endif
5381     default:
5382         {
5383             // Should be unary or binary op.
5384             if (s_vtableForOp == NULL)
5385             {
5386                 unsigned opKind = OperKind(oper);
5387                 assert(!IsExOp(opKind));
5388                 assert(OperIsSimple(oper) || OperIsLeaf(oper));
5389                 // Need to provide non-null operands.
5390                 Compiler* comp = (Compiler*)_alloca(sizeof(Compiler));
5391                 GenTreeIntCon dummyOp(TYP_INT, 0);
5392                 GenTreeOp gt(oper, TYP_INT, &dummyOp, ((opKind & GTK_UNOP) ? NULL : &dummyOp));
5393                 s_vtableForOp = *reinterpret_cast<VtablePtr*>(&gt);
5394             }
5395             res = s_vtableForOp;
5396             break;
5397         }
5398     }
5399     s_vtablesForOpers[oper] = res;
5400     return res;
5401 }
5402
5403 void GenTree::SetVtableForOper(genTreeOps oper)
5404 {
5405     *reinterpret_cast<VtablePtr*>(this) = GetVtableForOper(oper);
5406 }
5407 #endif // DEBUGGABLE_GENTREE
5408
5409 GenTreePtr          Compiler::gtNewOperNode(genTreeOps oper,
5410                                             var_types  type, GenTreePtr op1,
5411                                                              GenTreePtr op2)
5412 {
5413     assert(op1 != NULL);
5414     assert(op2 != NULL);
5415
5416     // We should not be allocating nodes that extend GenTreeOp with this;
5417     // should call the appropriate constructor for the extended type.
5418     assert(!GenTree::IsExOp(GenTree::OperKind(oper)));
5419
5420     GenTreePtr node = new(this, oper) GenTreeOp(oper, type, op1, op2);
5421
5422     return node;
5423 }
5424
5425 GenTreePtr          Compiler::gtNewQmarkNode(var_types  type, GenTreePtr cond, GenTreePtr colon)
5426 {
5427     compQmarkUsed = true;
5428     GenTree* result = new(this, GT_QMARK) GenTreeQmark(type, cond, colon, this);
5429 #ifdef DEBUG
5430     if (compQmarkRationalized)
5431     {
5432         fgCheckQmarkAllowedForm(result);
5433     }
5434 #endif
5435     return result;
5436 }
5437
5438 GenTreeQmark::GenTreeQmark(var_types type, GenTreePtr cond, GenTreePtr colonOp, Compiler* comp) :  
5439     GenTreeOp(GT_QMARK, type, cond, colonOp),
5440     gtThenLiveSet(VarSetOps::UninitVal()),
5441     gtElseLiveSet(VarSetOps::UninitVal())
5442     {
5443         // These must follow a specific form.
5444         assert(cond != NULL && cond->TypeGet() == TYP_INT);
5445         assert(colonOp != NULL && colonOp->OperGet() == GT_COLON);
5446
5447         comp->impInlineRoot()->compQMarks->Push(this);
5448     }
5449
5450
5451 GenTreeIntCon*          Compiler::gtNewIconNode(ssize_t value, var_types type)
5452 {
5453     return new(this, GT_CNS_INT) GenTreeIntCon(type, value);
5454 }
5455
5456 // return a new node representing the value in a physical register
5457 GenTree* Compiler::gtNewPhysRegNode(regNumber reg, var_types type)
5458 {
5459     assert(genIsValidIntReg(reg) || (reg == REG_SPBASE));
5460     GenTree *result = new(this, GT_PHYSREG) GenTreePhysReg(reg, type);
5461     return result;
5462 }
5463
5464 // Return a new node representing a store of a value to a physical register 
5465 // modifies: child's gtRegNum
5466 GenTree* Compiler::gtNewPhysRegNode(regNumber reg, GenTree* src)
5467 {
5468     assert(genIsValidIntReg(reg));
5469     GenTree *result = new(this, GT_PHYSREGDST) GenTreeOp(GT_PHYSREGDST, TYP_I_IMPL, src, nullptr);
5470     result->gtRegNum = reg;
5471     src->gtRegNum = reg;
5472     return result;
5473 }
5474
5475 #ifndef LEGACY_BACKEND
5476 GenTreePtr Compiler::gtNewJmpTableNode()
5477 {
5478     GenTreePtr node = new(this, GT_JMPTABLE) GenTreeJumpTable(TYP_INT);
5479     node->gtJumpTable.gtJumpTableAddr = 0;
5480     return node;
5481 }
5482 #endif // !LEGACY_BACKEND
5483
5484 /*****************************************************************************
5485  *
5486  *  Converts an annotated token into an icon flags (so that we will later be
5487  *  able to tell the type of the handle that will be embedded in the icon
5488  *  node)
5489  */
5490
5491 unsigned            Compiler::gtTokenToIconFlags(unsigned token)
5492 {
5493     unsigned flags = 0;
5494
5495     switch (TypeFromToken(token))
5496     {
5497     case mdtTypeRef:
5498     case mdtTypeDef:
5499     case mdtTypeSpec:
5500         flags = GTF_ICON_CLASS_HDL;
5501         break;
5502
5503     case mdtMethodDef:
5504         flags = GTF_ICON_METHOD_HDL;
5505         break;
5506
5507     case mdtFieldDef:
5508         flags = GTF_ICON_FIELD_HDL;
5509         break;
5510
5511     default:
5512         flags = GTF_ICON_TOKEN_HDL;
5513         break;
5514     }
5515
5516     return flags;
5517 }
5518
5519 /*****************************************************************************
5520  *
5521  *  Allocates a integer constant entry that represents a HANDLE to something.
5522  *  It may not be allowed to embed HANDLEs directly into the JITed code (for eg,
5523  *  as arguments to JIT helpers). Get a corresponding value that can be embedded.
5524  *  If the handle needs to be accessed via an indirection, pValue points to it.
5525  */
5526
5527 GenTreePtr          Compiler::gtNewIconEmbHndNode(void *       value,
5528                                                   void *       pValue,
5529                                                   unsigned     flags,
5530                                                   unsigned     handle1,
5531                                                   void *       handle2,
5532                                                   void *       compileTimeHandle)
5533 {
5534     GenTreePtr      node;
5535
5536     assert((!value) != (!pValue));
5537
5538     if (value)
5539     {
5540         node = gtNewIconHandleNode((size_t)value, flags, /*fieldSeq*/FieldSeqStore::NotAField(), handle1, handle2);
5541         node->gtIntCon.gtCompileTimeHandle = (size_t) compileTimeHandle;
5542     }
5543     else
5544     {
5545         node = gtNewIconHandleNode((size_t)pValue, flags, /*fieldSeq*/FieldSeqStore::NotAField(), handle1, handle2);
5546         node->gtIntCon.gtCompileTimeHandle = (size_t) compileTimeHandle;
5547         node = gtNewOperNode(GT_IND, TYP_I_IMPL, node);
5548     }
5549
5550
5551     return node;
5552 }
5553
5554 /*****************************************************************************/
5555 GenTreePtr          Compiler::gtNewStringLiteralNode(InfoAccessType iat, void * pValue)
5556 {
5557     GenTreePtr tree = NULL;
5558
5559     switch (iat)
5560     {
5561     case IAT_VALUE:      // The info value is directly available
5562         tree = gtNewIconEmbHndNode(pValue, NULL, GTF_ICON_STR_HDL);        
5563         tree->gtType = TYP_REF;
5564         tree = gtNewOperNode(GT_NOP, TYP_REF, tree); // prevents constant folding
5565         break;
5566
5567     case IAT_PVALUE:     // The value needs to be accessed via an       indirection
5568         tree = gtNewIconHandleNode((size_t)pValue, GTF_ICON_STR_HDL);
5569         // An indirection of a string handle can't cause an exception so don't set GTF_EXCEPT
5570         tree = gtNewOperNode(GT_IND, TYP_REF, tree);
5571         tree->gtFlags |= GTF_GLOB_REF;
5572         break;
5573
5574     case IAT_PPVALUE:     // The value needs to be accessed via a double indirection
5575         tree = gtNewIconHandleNode((size_t)pValue, GTF_ICON_PSTR_HDL);
5576         tree = gtNewOperNode(GT_IND, TYP_I_IMPL, tree);
5577         tree->gtFlags |= GTF_IND_INVARIANT;
5578         // An indirection of a string handle can't cause an exception so don't set GTF_EXCEPT
5579         tree = gtNewOperNode(GT_IND, TYP_REF, tree);
5580         tree->gtFlags |= GTF_GLOB_REF;
5581         break;
5582
5583     default:
5584         assert(!"Unexpected InfoAccessType");
5585     }
5586
5587     return tree;
5588 }
5589
5590 /*****************************************************************************/
5591
5592 GenTreePtr          Compiler::gtNewLconNode(__int64 value)
5593 {
5594 #ifdef _TARGET_64BIT_
5595     GenTreePtr      node = new(this, GT_CNS_INT) GenTreeIntCon(TYP_LONG, value);
5596 #else
5597     GenTreePtr      node = new(this, GT_CNS_LNG) GenTreeLngCon(value);
5598 #endif
5599
5600     return node;
5601 }
5602
5603
5604 GenTreePtr          Compiler::gtNewDconNode(double value)
5605 {
5606     GenTreePtr      node = new(this, GT_CNS_DBL) GenTreeDblCon(value);
5607
5608     return node;
5609 }
5610
5611
5612 GenTreePtr          Compiler::gtNewSconNode(int CPX, CORINFO_MODULE_HANDLE scpHandle)
5613 {
5614
5615 #if SMALL_TREE_NODES
5616
5617     /* 'GT_CNS_STR' nodes later get transformed into 'GT_CALL' */
5618
5619     assert(GenTree::s_gtNodeSizes[GT_CALL] > GenTree::s_gtNodeSizes[GT_CNS_STR]);
5620
5621     GenTreePtr      node = new(this, GT_CALL) GenTreeStrCon(CPX, scpHandle 
5622                                                             DEBUGARG(/*largeNode*/true));
5623 #else
5624     GenTreePtr      node = new(this, GT_CNS_STR) GenTreeStrCon(CPX, scpHandle
5625                                                                DEBUGARG(/*largeNode*/true));
5626 #endif
5627
5628     return node;
5629 }
5630
5631
5632 GenTreePtr          Compiler::gtNewZeroConNode(var_types type)
5633 {
5634     GenTreePtr      zero;
5635     switch (type)
5636     {
5637     case TYP_INT:
5638         zero =  gtNewIconNode(0);
5639         break;
5640
5641     case TYP_BYREF:
5642         __fallthrough;
5643
5644     case TYP_REF:
5645         zero = gtNewIconNode(0);
5646         zero->gtType = type;
5647         break;
5648
5649     case TYP_LONG:
5650         zero = gtNewLconNode(0);
5651         break;
5652
5653     case TYP_FLOAT:
5654         zero = gtNewDconNode(0.0);
5655         zero->gtType = type;
5656         break;
5657
5658     case TYP_DOUBLE:
5659         zero = gtNewDconNode(0.0);
5660         break;
5661
5662     default:
5663         assert(!"Bad type");
5664         zero = nullptr;
5665         break;
5666     }
5667     return zero;
5668 }
5669
5670 GenTreePtr          Compiler::gtNewOneConNode(var_types type)
5671 {
5672     switch (type)
5673     {
5674     case TYP_INT:
5675     case TYP_UINT:
5676         return gtNewIconNode(1);
5677
5678     case TYP_LONG:
5679     case TYP_ULONG:
5680         return gtNewLconNode(1);
5681         
5682     case TYP_FLOAT:
5683         {
5684             GenTreePtr one = gtNewDconNode(1.0);
5685             one->gtType = type;
5686             return one;
5687         }
5688
5689     case TYP_DOUBLE:    
5690         return gtNewDconNode(1.0);
5691
5692     default:            
5693         assert(!"Bad type");
5694         return nullptr;
5695     }
5696 }
5697
5698 GenTreeCall*          Compiler::gtNewIndCallNode(GenTreePtr addr,
5699                                                  var_types  type,
5700                                                  GenTreeArgList* args,
5701                                                  IL_OFFSETX ilOffset)
5702 {
5703     return gtNewCallNode(CT_INDIRECT,(CORINFO_METHOD_HANDLE)addr, type, args, ilOffset);
5704 }
5705
5706 GenTreeCall*          Compiler::gtNewCallNode(gtCallTypes     callType,
5707                                               CORINFO_METHOD_HANDLE callHnd,
5708                                               var_types       type,
5709                                               GenTreeArgList* args,
5710                                               IL_OFFSETX    ilOffset)
5711 {
5712     GenTreeCall* node = new(this, GT_CALL) GenTreeCall(genActualType(type));
5713
5714     node->gtFlags        |= (GTF_CALL|GTF_GLOB_REF);
5715     if (args)
5716         node->gtFlags    |= (args->gtFlags & GTF_ALL_EFFECT);
5717     node->gtCallType      = callType;
5718     node->gtCallMethHnd   = callHnd;
5719     node->gtCallArgs      = args;
5720     node->gtCallObjp      = nullptr;
5721     node->fgArgInfo       = nullptr;
5722     node->callSig         = nullptr;
5723     node->gtRetClsHnd     = nullptr;
5724     node->gtControlExpr   = nullptr;
5725     node->gtCallMoreFlags = 0;
5726
5727     if (callType == CT_INDIRECT)
5728     {
5729         node->gtCallCookie    = NULL;
5730     }
5731     else
5732     {
5733         node->gtInlineCandidateInfo = NULL;
5734     }
5735     node->gtCallLateArgs     = nullptr;
5736     node->gtReturnType      = type;
5737
5738 #ifdef LEGACY_BACKEND
5739     node->gtCallRegUsedMask = RBM_NONE;
5740 #endif // LEGACY_BACKEND
5741
5742 #ifdef FEATURE_READYTORUN_COMPILER
5743     node->gtCall.gtEntryPoint.addr = nullptr;
5744 #endif
5745
5746 #if defined(DEBUG) || defined(INLINE_DATA)
5747     // These get updated after call node is built.
5748     node->gtCall.gtInlineObservation = InlineObservation::CALLEE_UNUSED_INITIAL;
5749     node->gtCall.gtRawILOffset = BAD_IL_OFFSET;
5750 #endif
5751
5752 #ifdef DEBUGGING_SUPPORT
5753     // Spec: Managed Retval sequence points needs to be generated while generating debug info for debuggable code.
5754     //
5755     // Implementation note: if not generating MRV info genCallSite2ILOffsetMap will be NULL and 
5756     // codegen will pass BAD_IL_OFFSET as IL offset of a call node to emitter, which will cause emitter
5757     // not to emit IP mapping entry.
5758     if (opts.compDbgCode && opts.compDbgInfo)
5759     {
5760         // Managed Retval - IL offset of the call.  This offset is used to emit a
5761         // CALL_INSTRUCTION type sequence point while emitting corresponding native call.
5762         //
5763         // TODO-Cleanup: 
5764         // a) (Opt) We need not store this offset if the method doesn't return a 
5765         // value.  Rather it can be made BAD_IL_OFFSET to prevent a sequence
5766         // point being emitted.
5767         //
5768         // b) (Opt) Add new sequence points only if requested by debugger through
5769         // a new boundary type - ICorDebugInfo::BoundaryTypes
5770         if (genCallSite2ILOffsetMap == NULL)
5771         {
5772             genCallSite2ILOffsetMap = new (getAllocator()) CallSiteILOffsetTable(getAllocator());
5773         }
5774
5775         // Make sure that there are no duplicate entries for a given call node
5776         IL_OFFSETX value;
5777         assert(!genCallSite2ILOffsetMap->Lookup(node, &value));
5778         genCallSite2ILOffsetMap->Set(node, ilOffset);
5779     }
5780 #endif
5781
5782     // Initialize gtOtherRegs 
5783     node->ClearOtherRegs();
5784
5785     // Initialize spill flags of gtOtherRegs
5786     node->ClearOtherRegFlags();
5787
5788     return node;
5789 }
5790
5791 GenTreePtr          Compiler::gtNewLclvNode(unsigned   lnum,
5792                                             var_types  type,
5793                                             IL_OFFSETX ILoffs)
5794 {
5795     // We need to ensure that all struct values are normalized.
5796     // It might be nice to assert this in general, but we have assignments of int to long.
5797     if (varTypeIsStruct(type))
5798     {
5799         assert(type == lvaTable[lnum].lvType);
5800     }
5801     GenTreePtr      node = new(this, GT_LCL_VAR) GenTreeLclVar(type, lnum, ILoffs
5802                                                               );
5803
5804     /* Cannot have this assert because the inliner uses this function
5805      * to add temporaries */
5806
5807     //assert(lnum < lvaCount);
5808
5809     return node;
5810
5811
5812 GenTreePtr          Compiler::gtNewLclLNode(unsigned   lnum,
5813                                             var_types  type,
5814                                             IL_OFFSETX ILoffs)
5815 {
5816     // We need to ensure that all struct values are normalized.
5817     // It might be nice to assert this in general, but we have assignments of int to long.
5818     if (varTypeIsStruct(type))
5819     {
5820         assert(type == lvaTable[lnum].lvType);
5821     }
5822 #if SMALL_TREE_NODES
5823     /* This local variable node may later get transformed into a large node */
5824
5825 //    assert(GenTree::s_gtNodeSizes[GT_CALL] > GenTree::s_gtNodeSizes[GT_LCL_VAR]);
5826
5827     GenTreePtr node = new(this, GT_CALL) GenTreeLclVar(type, lnum, ILoffs
5828                                                        DEBUGARG(/*largeNode*/true));
5829 #else
5830     GenTreePtr node = new(this, GT_LCL_VAR) GenTreeLclVar(type, lnum, ILoffs
5831                                                           DEBUGARG(/*largeNode*/true));
5832 #endif
5833
5834     return node;
5835 }
5836
5837 GenTreeLclFld* Compiler::gtNewLclFldNode(unsigned  lnum,
5838                                          var_types type,
5839                                          unsigned  offset)
5840 {
5841     GenTreeLclFld*      node = new(this, GT_LCL_FLD) GenTreeLclFld(type, lnum, offset);
5842
5843     /* Cannot have this assert because the inliner uses this function
5844      * to add temporaries */
5845
5846     //assert(lnum < lvaCount);
5847
5848     node->gtFieldSeq = FieldSeqStore::NotAField();
5849     return node;
5850 }
5851
5852
5853
5854 GenTreePtr          Compiler::gtNewInlineCandidateReturnExpr(GenTreePtr   inlineCandidate,
5855                                                              var_types    type)
5856 {
5857     assert(GenTree::s_gtNodeSizes[GT_RET_EXPR] == TREE_NODE_SZ_LARGE);
5858
5859     GenTreePtr node = new(this, GT_RET_EXPR) GenTreeRetExpr(type);
5860     
5861     node->gtRetExpr.gtInlineCandidate = inlineCandidate;
5862
5863     if (varTypeIsStruct(inlineCandidate))
5864     {
5865         node->gtRetExpr.gtRetClsHnd = gtGetStructHandle(inlineCandidate);
5866     }
5867
5868     // GT_RET_EXPR node eventually might be bashed back to GT_CALL (when inlining is aborted for example).
5869     // Therefore it should carry the GTF_CALL flag so that all the rules about spilling can apply to it as well.
5870     // For example, impImportLeave or CEE_POP need to spill GT_RET_EXPR before empty the evaluation stack.
5871     node->gtFlags |= GTF_CALL;
5872     
5873     return node;
5874 }
5875
5876 GenTreeArgList* Compiler::gtNewListNode(GenTreePtr op1, GenTreeArgList* op2)
5877 {
5878     assert((op1 != NULL) && (op1->OperGet() != GT_LIST));
5879
5880     return new (this, GT_LIST) GenTreeArgList(op1, op2);
5881 }
5882
5883
5884 /*****************************************************************************
5885  *
5886  *  Create a list out of one value.
5887  */
5888
5889 GenTreeArgList* Compiler::gtNewArgList(GenTreePtr arg)
5890 {
5891     return new (this, GT_LIST) GenTreeArgList(arg);
5892 }
5893
5894 /*****************************************************************************
5895  *
5896  *  Create a list out of the two values.
5897  */
5898
5899 GenTreeArgList* Compiler::gtNewArgList(GenTreePtr arg1, GenTreePtr arg2)
5900 {
5901     return new (this, GT_LIST) GenTreeArgList(arg1, gtNewArgList(arg2));
5902 }
5903
5904 /*****************************************************************************
5905  *
5906  *  Given a GT_CALL node, access the fgArgInfo and find the entry
5907  *  that has the matching argNum and return the fgArgTableEntryPtr
5908  */
5909
5910 fgArgTabEntryPtr Compiler::gtArgEntryByArgNum(GenTreePtr call, unsigned argNum)
5911 {
5912     noway_assert(call->IsCall());
5913     fgArgInfoPtr argInfo = call->gtCall.fgArgInfo;
5914     noway_assert(argInfo != NULL);
5915
5916     unsigned            argCount = argInfo->ArgCount();
5917     fgArgTabEntryPtr *  argTable = argInfo->ArgTable();
5918     fgArgTabEntryPtr    curArgTabEntry = NULL; 
5919
5920     for (unsigned i=0; i < argCount; i++)
5921     {
5922         curArgTabEntry = argTable[i];
5923         if (curArgTabEntry->argNum == argNum)
5924             return curArgTabEntry;
5925     }
5926     noway_assert(!"gtArgEntryByArgNum: argNum not found");
5927     return NULL;
5928 }
5929
5930 /*****************************************************************************
5931  *
5932  *  Given a GT_CALL node, access the fgArgInfo and find the entry
5933  *  that has the matching node and return the fgArgTableEntryPtr
5934  */
5935
5936 fgArgTabEntryPtr Compiler::gtArgEntryByNode(GenTreePtr call, GenTreePtr node)
5937 {
5938     noway_assert(call->IsCall());
5939     fgArgInfoPtr argInfo = call->gtCall.fgArgInfo;
5940     noway_assert(argInfo != NULL);
5941
5942     unsigned            argCount = argInfo->ArgCount();
5943     fgArgTabEntryPtr *  argTable = argInfo->ArgTable();
5944     fgArgTabEntryPtr    curArgTabEntry = NULL; 
5945
5946     for (unsigned i=0; i < argCount; i++)
5947     {
5948         curArgTabEntry = argTable[i];
5949         
5950         if (curArgTabEntry->node == node)
5951         {
5952             return curArgTabEntry;
5953         }
5954 #ifdef PROTO_JIT
5955         else if (node->OperGet() == GT_RELOAD && node->gtOp.gtOp1 == curArgTabEntry->node)
5956         {
5957             return curArgTabEntry;
5958         }
5959 #endif // PROTO_JIT
5960         else if (curArgTabEntry->parent != NULL)
5961         {
5962             assert(curArgTabEntry->parent->IsList());
5963             if (curArgTabEntry->parent->Current() == node)
5964                 return curArgTabEntry;
5965         }
5966         else // (curArgTabEntry->parent == NULL)
5967         {
5968             if (call->gtCall.gtCallObjp == node)
5969                 return curArgTabEntry;
5970         }
5971     }
5972     noway_assert(!"gtArgEntryByNode: node not found");
5973     return NULL;
5974 }
5975
5976 /*****************************************************************************
5977  *
5978  *  Find and return the entry with the given "lateArgInx".  Requires that one is found
5979  *  (asserts this).
5980  */
5981 fgArgTabEntryPtr Compiler::gtArgEntryByLateArgIndex(GenTreePtr call, unsigned lateArgInx)
5982 {
5983     noway_assert(call->IsCall());
5984     fgArgInfoPtr argInfo = call->gtCall.fgArgInfo;
5985     noway_assert(argInfo != NULL);
5986
5987     unsigned            argCount = argInfo->ArgCount();
5988     fgArgTabEntryPtr *  argTable = argInfo->ArgTable();
5989     fgArgTabEntryPtr    curArgTabEntry = NULL; 
5990
5991     for (unsigned i=0; i < argCount; i++)
5992     {
5993         curArgTabEntry = argTable[i];
5994         if (curArgTabEntry->lateArgInx == lateArgInx)
5995             return curArgTabEntry;
5996     }
5997     noway_assert(!"gtArgEntryByNode: node not found");
5998     return NULL;
5999 }
6000
6001
6002 /*****************************************************************************
6003  *
6004  *  Given an fgArgTabEntryPtr, return true if it is the 'this' pointer argument.
6005  */
6006 bool  Compiler::gtArgIsThisPtr(fgArgTabEntryPtr argEntry)
6007 {
6008     return (argEntry->parent == NULL);
6009 }
6010
6011 /*****************************************************************************
6012  *
6013  *  Create a node that will assign 'src' to 'dst'.
6014  */
6015
6016 GenTreePtr          Compiler::gtNewAssignNode(GenTreePtr dst, GenTreePtr src DEBUGARG(bool isPhiDefn))
6017 {
6018     var_types type = dst->TypeGet();
6019
6020     // ARM has HFA struct return values, HFA return values are received in registers from GT_CALL,
6021     // using struct assignment.
6022 #ifdef FEATURE_HFA
6023     assert(isPhiDefn || type != TYP_STRUCT || IsHfa(dst) || IsHfa(src));
6024 #elif defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
6025     // You need to use GT_COPYBLK for assigning structs
6026     // See impAssignStruct()
6027     assert(isPhiDefn || type != TYP_STRUCT || IsRegisterPassable(dst) || IsRegisterPassable(src));
6028 #else // !FEATURE_UNIX_AMD64_STRUCT_PASSING
6029     assert(isPhiDefn || type != TYP_STRUCT);
6030 #endif
6031
6032     /* Mark the target as being assigned */
6033
6034     if ((dst->gtOper == GT_LCL_VAR) || (dst->OperGet() == GT_LCL_FLD))
6035     {
6036         dst->gtFlags |= GTF_VAR_DEF;
6037         if (dst->IsPartialLclFld(this))
6038         {
6039             // We treat these partial writes as combined uses and defs.
6040             dst->gtFlags |= GTF_VAR_USEASG;
6041         }
6042     }
6043     dst->gtFlags |= GTF_DONT_CSE;
6044
6045     /* Create the assignment node */
6046
6047     GenTreePtr asg = gtNewOperNode(GT_ASG, type, dst, src);
6048
6049     /* Mark the expression as containing an assignment */
6050
6051     asg->gtFlags |= GTF_ASG;
6052
6053     return asg;
6054 }
6055
6056 // Creates a new Obj node.
6057 GenTreeObj* Compiler::gtNewObjNode(CORINFO_CLASS_HANDLE structHnd, GenTree* addr)
6058 {
6059     var_types nodeType   = impNormStructType(structHnd);
6060     assert(varTypeIsStruct(nodeType));
6061     return new (this, GT_OBJ) GenTreeObj(nodeType, addr, structHnd);
6062 }
6063
6064 // Creates a new CpObj node.
6065 // Parameters (exactly the same as MSIL CpObj):
6066 //
6067 //  dst        - The target to copy the struct to
6068 //  src        - The source to copy the struct from
6069 //  structHnd  - A class token that represents the type of object being copied. May be null
6070 //               if FEATURE_SIMD is enabled and the source has a SIMD type.
6071 //  isVolatile - Is this marked as volatile memory?
6072 GenTreeBlkOp* Compiler::gtNewCpObjNode(GenTreePtr dst,
6073                                        GenTreePtr src,
6074                                        CORINFO_CLASS_HANDLE structHnd,
6075                                        bool isVolatile)
6076 {
6077     size_t      size = 0;
6078     unsigned    slots = 0;
6079     unsigned    gcPtrCount = 0;
6080     BYTE *      gcPtrs = nullptr;
6081     var_types   type = TYP_STRUCT;
6082
6083     GenTreePtr  hndOrSize = nullptr;
6084
6085     GenTreeBlkOp* result = nullptr;
6086     
6087     bool useCopyObj = false;
6088
6089     // Intermediate SIMD operations may use SIMD types that are not used by the input IL.
6090     // In this case, the provided type handle will be null and the size of the copy will
6091     // be derived from the node's varType.
6092     if (structHnd == nullptr)
6093     {
6094 #if FEATURE_SIMD
6095         assert(src->OperGet() == GT_ADDR);
6096
6097         GenTree* srcValue = src->gtGetOp1();
6098
6099         type = srcValue->TypeGet();
6100         assert(varTypeIsSIMD(type));
6101
6102         size = genTypeSize(type);
6103 #else
6104         assert(!"structHnd should not be null if FEATURE_SIMD is not enabled!");
6105 #endif
6106     }
6107     else
6108     {
6109         // Get the size of the type
6110         size = info.compCompHnd->getClassSize(structHnd);
6111
6112         if (size >= TARGET_POINTER_SIZE)
6113         {
6114             slots      = (unsigned)(roundUp(size, TARGET_POINTER_SIZE) / TARGET_POINTER_SIZE);
6115             gcPtrs     = new (this, CMK_ASTNode) BYTE[slots];
6116
6117             type = impNormStructType(structHnd, gcPtrs, &gcPtrCount);
6118             if (varTypeIsEnregisterableStruct(type))
6119             {
6120                 if (dst->OperGet() == GT_ADDR)
6121                 {
6122                     GenTree* actualDst = dst->gtGetOp1();
6123                     assert((actualDst->TypeGet() == type) || !varTypeIsEnregisterableStruct(actualDst));
6124                     actualDst->gtType = type;
6125                 }
6126                 if (src->OperGet() == GT_ADDR)
6127                 {
6128                     GenTree* actualSrc = src->gtGetOp1();
6129                     assert((actualSrc->TypeGet() == type) || !varTypeIsEnregisterableStruct(actualSrc));
6130                     actualSrc->gtType = type;
6131                 }
6132             }
6133
6134             useCopyObj = gcPtrCount > 0;
6135         }
6136     }
6137
6138     // If the class being copied contains any GC pointer we store a class handle 
6139     // in the icon, otherwise we store the size in bytes to copy
6140     // 
6141     genTreeOps op;
6142     if (useCopyObj)
6143     {
6144         // This will treated as a cpobj as we need to note GC info.
6145         // Store the class handle and mark the node
6146         op = GT_COPYOBJ;
6147         hndOrSize = gtNewIconHandleNode((size_t)structHnd, GTF_ICON_CLASS_HDL);
6148         result = new (this, GT_COPYOBJ) GenTreeCpObj(gcPtrCount, slots, gcPtrs);
6149     }
6150     else
6151     {
6152         assert(gcPtrCount == 0);
6153
6154         // Doesn't need GC info. Treat operation as a cpblk
6155         op = GT_COPYBLK;
6156         hndOrSize = gtNewIconNode(size);
6157         result = new (this, GT_COPYBLK) GenTreeCpBlk();
6158         result->gtBlkOpGcUnsafe = false;
6159     }
6160
6161     gtBlockOpInit(result, op, dst, src, hndOrSize, isVolatile);
6162     return result;
6163 }
6164
6165 //------------------------------------------------------------------------
6166 // FixupInitBlkValue: Fixup the init value for an initBlk operation
6167 //
6168 // Arguments:
6169 //    asgType - The type of assignment that the initBlk is being transformed into
6170 //
6171 // Return Value:
6172 //    Modifies the constant value on this node to be the appropriate "fill"
6173 //    value for the initblk.
6174 //
6175 // Notes:
6176 //    The initBlk MSIL instruction takes a byte value, which must be
6177 //    extended to the size of the assignment when an initBlk is transformed
6178 //    to an assignment of a primitive type.
6179 //    This performs the appropriate extension.
6180
6181 void
6182 GenTreeIntCon::FixupInitBlkValue(var_types asgType)
6183 {
6184     assert(varTypeIsIntegralOrI(asgType));
6185     unsigned size = genTypeSize(asgType);
6186     if (size > 1)
6187     {
6188         size_t cns = gtIconVal;
6189         cns  = cns & 0xFF;
6190         cns |= cns << 8;
6191         if (size >= 4)
6192         {
6193             cns |= cns << 16;
6194 #ifdef _TARGET_64BIT_
6195             if (size == 8)
6196             {
6197                 cns |= cns << 32;
6198             }
6199 #endif // _TARGET_64BIT_
6200
6201             // Make the type used in the GT_IND node match for evaluation types.
6202             gtType = asgType;
6203
6204             // if we are using an GT_INITBLK on a GC type the value being assigned has to be zero (null).
6205             assert(!varTypeIsGC(asgType) || (cns == 0));
6206         }
6207
6208         gtIconVal = cns;
6209     }
6210 }
6211
6212 // Initializes a BlkOp GenTree
6213 // Preconditions:
6214 //     - Result is a GenTreeBlkOp that is newly constructed by gtNewCpObjNode or gtNewBlkOpNode
6215 //
6216 // Parameters:
6217 //     - result is a GenTreeBlkOp node that is the node to be initialized.
6218 //     - oper must be either GT_INITBLK or GT_COPYBLK
6219 //     - dst is the target (destination) we want to either initialize or copy to
6220 //     - src is the init value for IniBlk or the source struct for CpBlk/CpObj
6221 //     - size is either the size of the buffer to copy/initialize or a class token
6222 //       in the case of CpObj.
6223 //     - volatil flag specifies if this node is a volatile memory operation.
6224 //
6225 // This procedure centralizes all the logic to both enforce proper structure and
6226 // to properly construct any InitBlk/CpBlk node.  
6227 void Compiler::gtBlockOpInit(GenTreePtr result,
6228                              genTreeOps oper,
6229                              GenTreePtr dst,
6230                              GenTreePtr srcOrFillVal,
6231                              GenTreePtr hndOrSize,
6232                              bool volatil)
6233 {
6234     assert(GenTree::OperIsBlkOp(oper));
6235
6236     assert(result->gtType == TYP_VOID);
6237     result->gtOper = oper;
6238
6239 #ifdef DEBUG
6240     // If this is a CpObj node, the caller must have already set
6241     // the node additional members (gtGcPtrs, gtGcPtrCount, gtSlots).
6242     if(hndOrSize->OperGet() == GT_CNS_INT && hndOrSize->IsIconHandle(GTF_ICON_CLASS_HDL))
6243     {
6244         GenTreeCpObj* cpObjNode = result->AsCpObj();
6245
6246         assert(cpObjNode->gtGcPtrs != nullptr);
6247         assert(!IsUninitialized(cpObjNode->gtGcPtrs));
6248         assert(!IsUninitialized(cpObjNode->gtGcPtrCount) && cpObjNode->gtGcPtrCount > 0);
6249         assert(!IsUninitialized(cpObjNode->gtSlots) && cpObjNode->gtSlots > 0);
6250
6251         for (unsigned i = 0; i < cpObjNode->gtGcPtrCount; ++i)
6252         {
6253             CorInfoGCType t = (CorInfoGCType)cpObjNode->gtGcPtrs[i];
6254             switch(t)
6255             {
6256             case TYPE_GC_NONE:
6257             case TYPE_GC_REF:
6258             case TYPE_GC_BYREF:
6259             case TYPE_GC_OTHER:
6260                 break;
6261             default:
6262                 unreached();
6263             }
6264         }
6265     }
6266 #endif // DEBUG
6267
6268     /* In the case of CpBlk, we want to avoid generating 
6269     * nodes where the source and destination are the same
6270     * because of two reasons, first, is useless, second 
6271     * it introduces issues in liveness and also copying 
6272     * memory from an overlapping memory location is 
6273     * undefined both as per the ECMA standard and also 
6274     * the memcpy semantics specify that.
6275     *
6276     * NOTE: In this case we'll only detect the case for addr of a local
6277     * and a local itself, any other complex expressions won't be
6278     * caught.
6279     *
6280     * TODO-Cleanup: though having this logic is goodness (i.e. avoids self-assignment
6281     * of struct vars very early), it was added because fgInterBlockLocalVarLiveness()
6282     * isn't handling self-assignment of struct variables correctly.  This issue may not
6283     * surface if struct promotion is ON (which is the case on x86/arm).  But still the 
6284     * fundamental issue exists that needs to be addressed.
6285     */
6286     GenTreePtr currSrc = srcOrFillVal;
6287     GenTreePtr currDst = dst;
6288     if (currSrc->OperGet() == GT_ADDR && currDst->OperGet() == GT_ADDR)
6289     {
6290         currSrc = currSrc->gtOp.gtOp1;
6291         currDst = currDst->gtOp.gtOp1;
6292     }
6293
6294     if (currSrc->OperGet() == GT_LCL_VAR && 
6295         currDst->OperGet() == GT_LCL_VAR &&
6296         currSrc->gtLclVarCommon.gtLclNum == currDst->gtLclVarCommon.gtLclNum)
6297     {
6298         // Make this a NOP
6299         result->gtBashToNOP();
6300         return;
6301     }
6302
6303
6304     /* Note  that this use of a  GT_LIST is different than all others */
6305     /* in that the the GT_LIST is used as a tuple [dest,src] rather   */
6306     /* than a being a NULL terminated list of GT_LIST nodes           */
6307     result->gtOp.gtOp1 = gtNewOperNode(GT_LIST, TYP_VOID,  /*        GT_[oper]          */
6308                                        dst, srcOrFillVal); /*        /      \           */
6309     result->gtOp.gtOp2 = hndOrSize;                        /*   GT_LIST      \          */
6310                                                            /*    /    \  [hndOrSize]    */
6311                                                            /* [dst] [srcOrFillVal]      */
6312
6313     // Propagate all effect flags from children
6314     result->gtFlags |= result->gtOp.gtOp1->gtFlags & GTF_ALL_EFFECT;
6315     result->gtFlags |= result->gtOp.gtOp2->gtFlags & GTF_ALL_EFFECT;
6316
6317     result->gtFlags |= (GTF_GLOB_REF | GTF_ASG);
6318
6319     // REVERSE_OPS is necessary because the use must occur before the def
6320     result->gtOp.gtOp1->gtFlags |= GTF_REVERSE_OPS;
6321
6322     if (result->gtOper == GT_INITBLK)
6323     {       
6324         result->gtFlags |= (dst->gtFlags  & GTF_EXCEPT) |
6325                            (hndOrSize->gtFlags & GTF_EXCEPT);
6326     }
6327     else 
6328     {
6329         result->gtFlags |= (dst->gtFlags  &  GTF_EXCEPT) |
6330                           (srcOrFillVal->gtFlags  &  GTF_EXCEPT) |
6331                           (hndOrSize->gtFlags &  GTF_EXCEPT);
6332
6333         // If the class being copied contains any GC pointer we store a class handle 
6334         // and we must set the flag GTF_BLK_HASGCPTR, so that the register predictor
6335         // knows that this GT_COPYBLK will use calls to the ByRef Assign helper
6336         //
6337         if ((hndOrSize->OperGet() == GT_CNS_INT) && hndOrSize->IsIconHandle(GTF_ICON_CLASS_HDL))
6338         {
6339             hndOrSize->gtFlags   |= GTF_DONT_CSE;     // We can't CSE the class handle
6340             result->gtFlags |= GTF_BLK_HASGCPTR;
6341         }
6342     }
6343
6344     if (volatil)
6345     {
6346         result->gtFlags |= GTF_BLK_VOLATILE;
6347     }
6348
6349 #ifdef FEATURE_SIMD
6350     if (oper == GT_COPYBLK                 && 
6351         srcOrFillVal->OperGet() == GT_ADDR && 
6352         dst->OperGet() == GT_ADDR)
6353     { 
6354         // If the source is a GT_SIMD node of SIMD type, then the dst lclvar struct 
6355         // should be labeled as simd intrinsic related struct.
6356         // This is done so that the morpher can transform any field accesses into
6357         // intrinsics, thus avoiding conflicting access methods (fields vs. whole-register).
6358         
6359         GenTreePtr srcChild = srcOrFillVal->gtGetOp1();
6360         GenTreePtr dstChild = dst->gtGetOp1();
6361
6362         if (dstChild->OperIsLocal()         &&
6363             varTypeIsStruct(dstChild)       &&
6364             srcChild->OperGet() == GT_SIMD  && 
6365             varTypeIsSIMD(srcChild))
6366         {
6367             unsigned lclNum = dst->gtGetOp1()->AsLclVarCommon()->GetLclNum();
6368             LclVarDsc* lclVarDsc = &lvaTable[lclNum];
6369             lclVarDsc->lvUsedInSIMDIntrinsic = true;
6370         }
6371         
6372     }
6373 #endif //FEATURE_SIMD
6374 }
6375
6376 //------------------------------------------------------------------------
6377 // gtNewBlkOpNode: Creates an InitBlk or CpBlk node.
6378 //
6379 // Arguments:
6380 //    oper          - GT_COPYBLK, GT_INITBLK or GT_COPYOBJ
6381 //    dst           - Destination or target to copy to / initialize the buffer.
6382 //    srcOrFillVall - Either the source to copy from or the byte value to fill the buffer.
6383 //    sizeOrClsTok  - The size of the buffer or a class token (in the case of CpObj).
6384 //    isVolatile    - Whether this is a volatile memory operation or not.
6385 //
6386 // Return Value:
6387 //    Returns the newly constructed and initialized block operation.
6388
6389 GenTreeBlkOp*
6390 Compiler::gtNewBlkOpNode(genTreeOps oper,
6391                          GenTreePtr dst, 
6392                          GenTreePtr srcOrFillVal,
6393                          GenTreePtr sizeOrClsTok,
6394                          bool isVolatile)
6395 {
6396     GenTreeBlkOp* result = new (this, oper) GenTreeBlkOp(oper);
6397     gtBlockOpInit(result, oper, dst, srcOrFillVal, sizeOrClsTok, isVolatile);
6398     return result;
6399 }
6400
6401 /*****************************************************************************
6402  *
6403  *  Clones the given tree value and returns a copy of the given tree.
6404  *  If 'complexOK' is false, the cloning is only done provided the tree
6405  *     is not too complex (whatever that may mean);
6406  *  If 'complexOK' is true, we try slightly harder to clone the tree.
6407  *  In either case, NULL is returned if the tree cannot be cloned
6408  *
6409  *  Note that there is the function gtCloneExpr() which does a more
6410  *  complete job if you can't handle this function failing.
6411  */
6412
6413 GenTreePtr          Compiler::gtClone(GenTree * tree, bool complexOK)
6414 {
6415     GenTreePtr  copy;
6416
6417     switch (tree->gtOper)
6418     {
6419     case GT_CNS_INT:
6420
6421 #if defined (LATE_DISASM)
6422         if (tree->IsIconHandle())
6423         {
6424             copy = gtNewIconHandleNode(tree->gtIntCon.gtIconVal,
6425                                        tree->gtFlags,
6426                                        tree->gtIntCon.gtFieldSeq,
6427                                        tree->gtIntCon.gtIconHdl.gtIconHdl1,
6428                                        tree->gtIntCon.gtIconHdl.gtIconHdl2);
6429             copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
6430             copy->gtType = tree->gtType;
6431         }
6432         else
6433 #endif
6434         {
6435             copy = new(this, GT_CNS_INT) GenTreeIntCon(tree->gtType, tree->gtIntCon.gtIconVal, tree->gtIntCon.gtFieldSeq 
6436                                                       );
6437             copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
6438         }
6439         break;
6440
6441     case GT_LCL_VAR:
6442         // Remember that the LclVar node has been cloned. The flag will be set
6443         // on 'copy' as well.
6444         tree->gtFlags |= GTF_VAR_CLONED;
6445         copy = gtNewLclvNode(tree->gtLclVarCommon.gtLclNum, tree->gtType,
6446                              tree->gtLclVar.gtLclILoffs);
6447         break;
6448
6449     case GT_LCL_FLD:
6450     case GT_LCL_FLD_ADDR:
6451         // Remember that the LclVar node has been cloned. The flag will be set
6452         // on 'copy' as well.
6453         tree->gtFlags |= GTF_VAR_CLONED;
6454         copy = new (this, tree->gtOper)
6455             GenTreeLclFld(tree->gtOper, tree->TypeGet(), tree->gtLclFld.gtLclNum, tree->gtLclFld.gtLclOffs);
6456         copy->gtLclFld.gtFieldSeq = tree->gtLclFld.gtFieldSeq;
6457         break;
6458
6459     case GT_CLS_VAR:
6460         copy = new(this, GT_CLS_VAR) GenTreeClsVar(tree->gtType, tree->gtClsVar.gtClsVarHnd, tree->gtClsVar.gtFieldSeq);
6461         break;
6462
6463     case GT_REG_VAR:
6464         assert(!"clone regvar");
6465
6466     default:
6467         if  (!complexOK)
6468             return NULL;
6469
6470         if  (tree->gtOper == GT_FIELD)
6471         {
6472             GenTreePtr  objp;
6473
6474             // copied from line 9850
6475
6476             objp = 0;
6477             if  (tree->gtField.gtFldObj)
6478             {
6479                 objp = gtClone(tree->gtField.gtFldObj, false);
6480                 if  (!objp)
6481                     return  objp;
6482             }
6483
6484             copy = gtNewFieldRef(tree->TypeGet(),
6485                                  tree->gtField.gtFldHnd,
6486                                  objp,
6487                                  tree->gtField.gtFldOffset);
6488             copy->gtField.gtFldMayOverlap = tree->gtField.gtFldMayOverlap;
6489         }
6490         else if  (tree->gtOper == GT_ADD)
6491         {
6492             GenTreePtr  op1 = tree->gtOp.gtOp1;
6493             GenTreePtr  op2 = tree->gtOp.gtOp2;
6494
6495             if  (op1->OperIsLeaf() &&
6496                  op2->OperIsLeaf())
6497             {
6498                 op1 = gtClone(op1);
6499                 if (op1 == 0)
6500                     return 0;
6501                 op2 = gtClone(op2);
6502                 if (op2 == 0)
6503                     return 0;
6504
6505                 copy =  gtNewOperNode(GT_ADD, tree->TypeGet(), op1, op2);
6506             }
6507             else
6508             {
6509                 return NULL;
6510             }
6511         }
6512         else if (tree->gtOper == GT_ADDR)
6513         {
6514             GenTreePtr  op1 = gtClone(tree->gtOp.gtOp1);
6515             if (op1 == 0)
6516                 return NULL;
6517             copy = gtNewOperNode(GT_ADDR, tree->TypeGet(), op1);
6518         }
6519         else
6520         {
6521             return NULL;
6522         }
6523
6524         break;
6525     }
6526
6527     copy->gtFlags |= tree->gtFlags & ~GTF_NODE_MASK;
6528 #if defined(DEBUG)
6529     copy->gtDebugFlags |= tree->gtDebugFlags & ~GTF_DEBUG_NODE_MASK;
6530 #endif // defined(DEBUG)
6531
6532     return copy;
6533 }
6534
6535 /*****************************************************************************
6536  *
6537  *  Clones the given tree value and returns a copy of the given tree. Any
6538  *  references to local variable varNum will be replaced with the integer
6539  *  constant varVal.
6540  */
6541
6542 GenTreePtr          Compiler::gtCloneExpr(GenTree * tree,
6543                                           unsigned  addFlags,
6544                                           unsigned  varNum,   // = (unsigned)-1
6545                                           int       varVal)
6546 {
6547     if (tree == NULL)
6548         return NULL;
6549
6550     /* Figure out what kind of a node we have */
6551
6552     genTreeOps      oper = tree->OperGet();
6553     unsigned        kind = tree->OperKind();
6554     GenTree *       copy;
6555
6556     /* Is this a constant or leaf node? */
6557
6558     if  (kind & (GTK_CONST|GTK_LEAF))
6559     {
6560         switch (oper)
6561         {
6562         case GT_CNS_INT:
6563
6564 #if defined (LATE_DISASM)
6565             if  (tree->IsIconHandle())
6566             {
6567                 copy = gtNewIconHandleNode(tree->gtIntCon.gtIconVal,
6568                                            tree->gtFlags,
6569                                            tree->gtIntCon.gtFieldSeq,
6570                                            tree->gtIntCon.gtIconFld.gtIconCPX,
6571                                            tree->gtIntCon.gtIconFld.gtIconCls);
6572                 copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
6573                 copy->gtType = tree->gtType;
6574             }
6575             else
6576 #endif
6577             {
6578                 copy = gtNewIconNode      (tree->gtIntCon.gtIconVal,
6579                                            tree->gtType);
6580                 copy->gtIntCon.gtCompileTimeHandle = tree->gtIntCon.gtCompileTimeHandle;
6581                 copy->gtIntCon.gtFieldSeq = tree->gtIntCon.gtFieldSeq;
6582             }
6583             goto DONE;
6584
6585         case GT_CNS_LNG:
6586             copy = gtNewLconNode(tree->gtLngCon.gtLconVal);
6587             goto DONE;
6588
6589         case GT_CNS_DBL:
6590             copy = gtNewDconNode(tree->gtDblCon.gtDconVal);
6591             copy->gtType = tree->gtType;    // keep the same type  
6592             goto DONE;
6593
6594         case GT_CNS_STR:
6595             copy = gtNewSconNode(tree->gtStrCon.gtSconCPX, tree->gtStrCon.gtScpHnd);
6596             goto DONE;
6597
6598         case GT_LCL_VAR:
6599
6600             if  (tree->gtLclVarCommon.gtLclNum == varNum)
6601             {
6602                 copy = gtNewIconNode(varVal, tree->gtType);
6603             }
6604             else
6605             {
6606                 // Remember that the LclVar node has been cloned. The flag will
6607                 // be set on 'copy' as well.
6608                 tree->gtFlags |= GTF_VAR_CLONED;
6609                 copy = gtNewLclvNode(tree->gtLclVar.gtLclNum, tree->gtType,
6610                                      tree->gtLclVar.gtLclILoffs);
6611                 copy->AsLclVarCommon()->SetSsaNum(tree->AsLclVarCommon()->GetSsaNum());
6612             }
6613             copy->gtFlags = tree->gtFlags;
6614             goto DONE;
6615
6616         case GT_LCL_FLD:
6617             if  (tree->gtLclFld.gtLclNum == varNum)
6618             {
6619                 IMPL_LIMITATION("replacing GT_LCL_FLD with a constant");
6620             }
6621             else
6622             {
6623                 // Remember that the LclVar node has been cloned. The flag will
6624                 // be set on 'copy' as well.
6625                 tree->gtFlags |= GTF_VAR_CLONED;
6626                 copy = new(this, GT_LCL_FLD) GenTreeLclFld(tree->TypeGet(), tree->gtLclFld.gtLclNum, tree->gtLclFld.gtLclOffs
6627                                                           );
6628                 copy->gtLclFld.gtFieldSeq = tree->gtLclFld.gtFieldSeq;
6629                 copy->gtFlags = tree->gtFlags;
6630             }
6631             goto DONE;
6632
6633         case GT_CLS_VAR:
6634             copy = new(this, GT_CLS_VAR) GenTreeClsVar(tree->TypeGet(), tree->gtClsVar.gtClsVarHnd, tree->gtClsVar.gtFieldSeq);
6635             goto DONE;
6636
6637         case  GT_RET_EXPR:            
6638             copy = gtNewInlineCandidateReturnExpr(tree->gtRetExpr.gtInlineCandidate, tree->gtType);
6639             goto DONE;
6640
6641         case GT_MEMORYBARRIER:
6642             copy = new (this, GT_MEMORYBARRIER) GenTree(GT_MEMORYBARRIER, TYP_VOID);
6643             goto DONE;
6644
6645         case GT_ARGPLACE:
6646             copy = gtNewArgPlaceHolderNode(tree->gtType, tree->gtArgPlace.gtArgPlaceClsHnd);
6647             goto DONE;
6648
6649         case GT_REG_VAR:
6650             NO_WAY("Cloning of GT_REG_VAR node not supported");
6651             goto DONE;
6652
6653         case GT_FTN_ADDR:
6654             copy = new (this, oper) GenTreeFptrVal(tree->gtType, tree->gtFptrVal.gtFptrMethod);
6655
6656 #ifdef FEATURE_READYTORUN_COMPILER
6657             copy->gtFptrVal.gtEntryPoint = tree->gtFptrVal.gtEntryPoint;
6658             copy->gtFptrVal.gtLdftnResolvedToken = tree->gtFptrVal.gtLdftnResolvedToken;
6659 #endif
6660             goto DONE;
6661
6662         case GT_CATCH_ARG:
6663         case GT_NO_OP:
6664             copy = new (this, oper) GenTree(oper, tree->gtType);
6665             goto DONE;
6666
6667 #if !FEATURE_EH_FUNCLETS
6668         case GT_END_LFIN:
6669 #endif // !FEATURE_EH_FUNCLETS
6670         case GT_JMP:
6671             copy = new (this, oper) GenTreeVal(oper, tree->gtType, tree->gtVal.gtVal1);
6672             goto DONE;
6673
6674         case GT_LABEL:
6675             copy = new (this, oper) GenTreeLabel(tree->gtLabel.gtLabBB);
6676             goto DONE;
6677
6678         default:
6679             NO_WAY("Cloning of node not supported");
6680             goto DONE;
6681
6682         }
6683     }
6684
6685     /* Is it a 'simple' unary/binary operator? */
6686
6687     if  (kind & GTK_SMPOP)
6688     {
6689         /* If necessary, make sure we allocate a "fat" tree node */
6690
6691 #if SMALL_TREE_NODES
6692         switch (oper)
6693         {
6694             /* These nodes sometimes get bashed to "fat" ones */
6695
6696         case GT_MUL:
6697         case GT_DIV:
6698         case GT_MOD:
6699         
6700         case GT_UDIV:
6701         case GT_UMOD:
6702
6703             //  In the implementation of gtNewLargeOperNode you have
6704             //  to give an oper that will create a small node,
6705             //  otherwise it asserts.
6706             //  
6707             if  (GenTree::s_gtNodeSizes[oper] == TREE_NODE_SZ_SMALL)
6708             {
6709                 copy = gtNewLargeOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, 
6710                                           tree->OperIsBinary() ? tree->gtOp.gtOp2 : NULL);
6711             }
6712             else  // Always a large tree
6713             {
6714                 if (tree->OperIsBinary())
6715                 {
6716                     copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2);
6717                 }
6718                 else
6719                 {
6720                     copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1);
6721                 }
6722             }
6723             break;
6724
6725         case GT_CAST:
6726             copy = new (this, LargeOpOpcode()) GenTreeCast(tree->TypeGet(), tree->gtCast.CastOp(), tree->gtCast.gtCastType
6727                                                            DEBUGARG(/*largeNode*/TRUE));
6728             break;
6729
6730             // The nodes below this are not bashed, so they can be allocated at their individual sizes.
6731
6732         case GT_LIST:
6733             // This is ridiculous, but would go away if we made a stronger distinction between argument lists, whose
6734             // second argument *must* be an arglist*, and the uses of LIST in copyblk and initblk.
6735             if (tree->gtOp.gtOp2 != NULL && tree->gtOp.gtOp2->OperGet() == GT_LIST)
6736             {
6737                 copy = new (this, GT_LIST) GenTreeArgList(tree->gtOp.gtOp1, tree->gtOp.gtOp2->AsArgList());
6738             }
6739             else
6740             {
6741                 copy = new (this, GT_LIST) GenTreeOp(GT_LIST, TYP_VOID, tree->gtOp.gtOp1, tree->gtOp.gtOp2);
6742             }
6743             break;
6744
6745         case GT_INDEX:
6746             {
6747                 GenTreeIndex* asInd = tree->AsIndex();
6748                 copy = new (this, GT_INDEX) GenTreeIndex(asInd->TypeGet(), asInd->Arr(), asInd->Index(), asInd->gtIndElemSize);
6749                 copy->AsIndex()->gtStructElemClass = asInd->gtStructElemClass;
6750             }
6751             break;
6752
6753         case GT_ARR_LENGTH:
6754             copy = new (this, GT_ARR_LENGTH) GenTreeArrLen(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtArrLen.ArrLenOffset());
6755             break;
6756
6757         case GT_ARR_INDEX:
6758             copy = new (this, GT_ARR_INDEX) 
6759                 GenTreeArrIndex(tree->TypeGet(),
6760                                 gtCloneExpr(tree->gtArrIndex.ArrObj(), addFlags, varNum, varVal),
6761                                 gtCloneExpr(tree->gtArrIndex.IndexExpr(), addFlags, varNum, varVal),
6762                                 tree->gtArrIndex.gtCurrDim,
6763                                 tree->gtArrIndex.gtArrRank,
6764                                 tree->gtArrIndex.gtArrElemType);
6765             break;
6766     
6767         case GT_QMARK:
6768             copy = new (this, GT_QMARK) GenTreeQmark(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2, this);
6769             VarSetOps::AssignAllowUninitRhs(this, copy->gtQmark.gtThenLiveSet, tree->gtQmark.gtThenLiveSet);
6770             VarSetOps::AssignAllowUninitRhs(this, copy->gtQmark.gtElseLiveSet, tree->gtQmark.gtElseLiveSet);
6771             break;
6772
6773         case GT_OBJ:
6774             copy = new (this, GT_OBJ) GenTreeObj(tree->TypeGet(), tree->gtOp.gtOp1, tree->AsObj()->gtClass);
6775             break;
6776
6777         case GT_BOX:
6778             copy = new (this, GT_BOX) GenTreeBox(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtBox.gtAsgStmtWhenInlinedBoxValue);
6779             break;
6780
6781         case GT_INTRINSIC:
6782             copy = new (this, GT_INTRINSIC) GenTreeIntrinsic(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2, tree->gtIntrinsic.gtIntrinsicId,
6783                                                    tree->gtIntrinsic.gtMethodHandle);
6784 #ifdef FEATURE_READYTORUN_COMPILER
6785             copy->gtIntrinsic.gtEntryPoint = tree->gtIntrinsic.gtEntryPoint;
6786 #endif
6787             break;
6788
6789         case GT_COPYOBJ:
6790             {
6791                 GenTreeCpObj* cpObjOp = tree->AsCpObj();
6792                 assert(cpObjOp->gtGcPtrCount > 0);
6793                 copy = gtCloneCpObjNode(cpObjOp);
6794             }
6795             break;
6796
6797         case GT_INITBLK:
6798             {
6799                 GenTreeInitBlk* initBlkOp = tree->AsInitBlk();
6800                 copy = gtNewBlkOpNode(oper,
6801                                         initBlkOp->Dest(),
6802                                         initBlkOp->InitVal(),
6803                                         initBlkOp->Size(),
6804                                         initBlkOp->IsVolatile());
6805             }
6806             break;
6807
6808         case GT_COPYBLK:
6809             {
6810                 GenTreeCpBlk* cpBlkOp = tree->AsCpBlk();
6811                 copy = gtNewBlkOpNode(oper,
6812                                       cpBlkOp->Dest(),
6813                                       cpBlkOp->Source(),
6814                                       cpBlkOp->Size(),
6815                                       cpBlkOp->IsVolatile());
6816                 copy->AsCpBlk()->gtBlkOpGcUnsafe = cpBlkOp->gtBlkOpGcUnsafe;
6817             }
6818             break;
6819
6820         case GT_LEA:
6821             {
6822                 GenTreeAddrMode* addrModeOp = tree->AsAddrMode();
6823                 copy = new(this, GT_LEA) GenTreeAddrMode(addrModeOp->TypeGet(),
6824                                                          addrModeOp->Base(),
6825                                                          addrModeOp->Index(),
6826                                                          addrModeOp->gtScale,
6827                                                          addrModeOp->gtOffset);
6828             }
6829             break;
6830
6831         case GT_COPY:
6832         case GT_RELOAD:
6833             {
6834                 copy = new(this, oper) GenTreeCopyOrReload(oper, tree->TypeGet(), tree->gtGetOp1());
6835             }
6836             break;
6837
6838 #ifdef FEATURE_SIMD
6839         case GT_SIMD:
6840             {
6841                 GenTreeSIMD *simdOp = tree->AsSIMD();
6842                 copy = gtNewSIMDNode(simdOp->TypeGet(), simdOp->gtGetOp1(), simdOp->gtGetOp2(),
6843                                      simdOp->gtSIMDIntrinsicID, simdOp->gtSIMDBaseType, simdOp->gtSIMDSize);
6844             }
6845             break;
6846 #endif
6847
6848         default:
6849             assert(!GenTree::IsExOp(tree->OperKind()) && tree->OperIsSimple());
6850             // We're in the SimpleOp case, so it's always unary or binary.
6851             if (GenTree::OperIsUnary(tree->OperGet()))
6852             {
6853                 copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, /*doSimplifications*/false);
6854             }
6855             else
6856             {
6857                 assert(GenTree::OperIsBinary(tree->OperGet()));
6858                 copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2);
6859             }
6860             break;
6861         }
6862 #else
6863         // We're in the SimpleOp case, so it's always unary or binary.
6864         copy = gtNewOperNode(oper, tree->TypeGet(), tree->gtOp.gtOp1, tree->gtOp.gtOp2);
6865 #endif
6866
6867         // Some flags are conceptually part of the gtOper, and should be copied immediately.
6868         if (tree->gtOverflowEx())
6869         {
6870             copy->gtFlags |= GTF_OVERFLOW;
6871         }
6872         if (copy->OperGet() == GT_CAST)
6873         {
6874             copy->gtFlags |= (tree->gtFlags & GTF_UNSIGNED);
6875         }
6876
6877         if  (tree->gtOp.gtOp1)
6878         {
6879             copy->gtOp.gtOp1 = gtCloneExpr(tree->gtOp.gtOp1, addFlags, varNum, varVal);
6880         }
6881
6882         if  (tree->gtGetOp2())
6883         {
6884             copy->gtOp.gtOp2 = gtCloneExpr(tree->gtOp.gtOp2, addFlags, varNum, varVal);
6885         }
6886
6887         
6888         /* Flags */
6889         addFlags |= tree->gtFlags;
6890
6891         // Copy any node annotations, if necessary.
6892         switch (tree->gtOper)
6893         {
6894         case GT_ASG:
6895             {
6896                 IndirectAssignmentAnnotation* pIndirAnnot = nullptr;
6897                 if (m_indirAssignMap != NULL && GetIndirAssignMap()->Lookup(tree, &pIndirAnnot))
6898                 {
6899                     IndirectAssignmentAnnotation* pNewIndirAnnot = 
6900                         new (this, CMK_Unknown) IndirectAssignmentAnnotation(pIndirAnnot->m_lclNum,
6901                                                                                            pIndirAnnot->m_fieldSeq, 
6902                                                                                            pIndirAnnot->m_isEntire);
6903                     GetIndirAssignMap()->Set(copy, pNewIndirAnnot);
6904                 }
6905             }
6906             break;
6907
6908         case GT_STOREIND:
6909         case GT_IND:
6910             if (tree->gtFlags & GTF_IND_ARR_INDEX)
6911             {
6912                 ArrayInfo arrInfo;
6913                 bool b = GetArrayInfoMap()->Lookup(tree, &arrInfo);
6914                 assert(b);
6915                 GetArrayInfoMap()->Set(copy, arrInfo);
6916             }
6917             break;
6918
6919         default:
6920             break;
6921         }
6922
6923 #ifdef  DEBUG
6924         /* GTF_NODE_MASK should not be propagated from 'tree' to 'copy' */
6925         addFlags &= ~GTF_NODE_MASK;
6926 #endif
6927
6928         // Effects flags propagate upwards.
6929         if (copy->gtOp.gtOp1 != nullptr)
6930             copy->gtFlags |= (copy->gtOp.gtOp1->gtFlags & GTF_ALL_EFFECT);
6931         if (copy->gtGetOp2() != nullptr)
6932             copy->gtFlags |= (copy->gtGetOp2()->gtFlags & GTF_ALL_EFFECT);
6933
6934         // The early morph for TailCall creates a GT_NOP with GTF_REG_VAL flag set
6935         // Thus we have to copy the gtRegNum/gtRegPair value if we clone it here.
6936         //
6937         if (addFlags & GTF_REG_VAL)
6938         {
6939             copy->CopyReg(tree);
6940         }
6941
6942         // We can call gtCloneExpr() before we have called fgMorph when we expand a GT_INDEX node in fgMorphArrayIndex()
6943         // The method gtFoldExpr() expects to be run after fgMorph so it will set the GTF_DEBUG_NODE_MORPHED
6944         // flag on nodes that it adds/modifies.  Then when we call fgMorph we will assert.
6945         // We really only will need to fold when this method is used to replace references to 
6946         // local variable with an integer.
6947         //
6948         if (varNum != (unsigned) -1)
6949         {
6950             /* Try to do some folding */
6951             copy = gtFoldExpr(copy);
6952         }
6953
6954         goto DONE;
6955     }
6956
6957     /* See what kind of a special operator we have here */
6958
6959     switch  (oper)
6960     {
6961     case GT_STMT:
6962         copy = gtCloneExpr(tree->gtStmt.gtStmtExpr, addFlags, varNum, varVal);
6963         copy = gtNewStmt(copy, tree->gtStmt.gtStmtILoffsx);
6964         goto DONE;
6965
6966     case GT_CALL:
6967
6968         copy = new(this, GT_CALL) GenTreeCall(tree->TypeGet());
6969
6970         copy->gtCall.gtCallObjp     = tree->gtCall.gtCallObjp    ? gtCloneExpr(tree->gtCall.gtCallObjp,    addFlags, varNum, varVal) : NULL;
6971         copy->gtCall.gtCallArgs     = tree->gtCall.gtCallArgs    ? gtCloneExpr(tree->gtCall.gtCallArgs,    addFlags, varNum, varVal)->AsArgList() : NULL;
6972         copy->gtCall.gtCallMoreFlags= tree->gtCall.gtCallMoreFlags;
6973         copy->gtCall.gtCallLateArgs  = tree->gtCall.gtCallLateArgs ? gtCloneExpr(tree->gtCall.gtCallLateArgs, addFlags, varNum, varVal)->AsArgList() : NULL;
6974
6975 #if !FEATURE_FIXED_OUT_ARGS
6976         copy->gtCall.regArgList     = tree->gtCall.regArgList;
6977         copy->gtCall.regArgListCount= tree->gtCall.regArgListCount;
6978 #endif
6979
6980         // The call sig comes from the EE and doesn't change throughout the compilation process, meaning
6981         // we only really need one physical copy of it. Therefore a shallow pointer copy will suffice.
6982         // (Note that this still holds even if the tree we are cloning was created by an inlinee compiler,
6983         // because the inlinee still uses the inliner's memory allocator anyway.)
6984         copy->gtCall.callSig        = tree->gtCall.callSig;
6985
6986         copy->gtCall.gtCallType     = tree->gtCall.gtCallType;
6987         copy->gtCall.gtReturnType   = tree->gtCall.gtReturnType;
6988         copy->gtCall.gtControlExpr      = tree->gtCall.gtControlExpr;
6989
6990         /* Copy the union */
6991         if (tree->gtCall.gtCallType == CT_INDIRECT)
6992         {
6993             copy->gtCall.gtCallCookie   = tree->gtCall.gtCallCookie  ? gtCloneExpr(tree->gtCall.gtCallCookie,  addFlags, varNum, varVal) : NULL;
6994             copy->gtCall.gtCallAddr = tree->gtCall.gtCallAddr    ? gtCloneExpr(tree->gtCall.gtCallAddr,    addFlags, varNum, varVal) : NULL;
6995         }
6996         else if (tree->gtFlags & GTF_CALL_VIRT_STUB)
6997         {
6998             copy->gtCall.gtCallMethHnd = tree->gtCall.gtCallMethHnd;
6999             copy->gtCall.gtStubCallStubAddr = tree->gtCall.gtStubCallStubAddr;
7000         }
7001         else
7002         {
7003             copy->gtCall.gtCallMethHnd = tree->gtCall.gtCallMethHnd;
7004             copy->gtCall.gtInlineCandidateInfo = tree->gtCall.gtInlineCandidateInfo;
7005         }
7006
7007         if (tree->gtCall.fgArgInfo)
7008         {
7009             // Create and initialize the fgArgInfo for our copy of the call tree
7010             copy->gtCall.fgArgInfo = new (this, CMK_Unknown) fgArgInfo(copy, tree);
7011         }
7012         else
7013         {
7014             copy->gtCall.fgArgInfo = NULL;
7015         }
7016         copy->gtCall.gtRetClsHnd = tree->gtCall.gtRetClsHnd;
7017
7018 #if FEATURE_MULTIREG_RET
7019         copy->gtCall.gtReturnTypeDesc = tree->gtCall.gtReturnTypeDesc;
7020 #endif
7021
7022 #ifdef LEGACY_BACKEND
7023         copy->gtCall.gtCallRegUsedMask = tree->gtCall.gtCallRegUsedMask;
7024 #endif // LEGACY_BACKEND
7025
7026 #ifdef FEATURE_READYTORUN_COMPILER
7027         copy->gtCall.setEntryPoint(tree->gtCall.gtEntryPoint);
7028 #endif
7029
7030 #ifdef DEBUG
7031         copy->gtCall.gtInlineObservation = tree->gtCall.gtInlineObservation;
7032 #endif
7033
7034         copy->AsCall()->CopyOtherRegFlags(tree->AsCall());
7035         break;
7036
7037     case GT_FIELD:
7038
7039         copy = gtNewFieldRef(tree->TypeGet(),
7040                              tree->gtField.gtFldHnd,
7041                              NULL,
7042                              tree->gtField.gtFldOffset);
7043
7044         copy->gtField.gtFldObj  = tree->gtField.gtFldObj  ? gtCloneExpr(tree->gtField.gtFldObj , addFlags, varNum, varVal) : 0;
7045         copy->gtField.gtFldMayOverlap = tree->gtField.gtFldMayOverlap;
7046 #ifdef FEATURE_READYTORUN_COMPILER
7047         copy->gtField.gtFieldLookup = tree->gtField.gtFieldLookup;
7048 #endif
7049
7050         break;
7051
7052     case GT_ARR_ELEM:
7053         {
7054             GenTreePtr inds[GT_ARR_MAX_RANK];
7055             for (unsigned dim = 0; dim < tree->gtArrElem.gtArrRank; dim++) 
7056                 inds[dim] = gtCloneExpr(tree->gtArrElem.gtArrInds[dim], addFlags, varNum, varVal);
7057             copy = new (this, GT_ARR_ELEM) 
7058                 GenTreeArrElem(tree->TypeGet(), 
7059                                gtCloneExpr(tree->gtArrElem.gtArrObj, addFlags, varNum, varVal),
7060                                tree->gtArrElem.gtArrRank,
7061                                tree->gtArrElem.gtArrElemSize,
7062                                tree->gtArrElem.gtArrElemType,
7063                                &inds[0]
7064                               );
7065         }
7066         break;
7067
7068     case GT_ARR_OFFSET:
7069         {
7070             copy = new (this, GT_ARR_OFFSET) 
7071                 GenTreeArrOffs(tree->TypeGet(),
7072                                gtCloneExpr(tree->gtArrOffs.gtOffset, addFlags, varNum, varVal),
7073                                gtCloneExpr(tree->gtArrOffs.gtIndex, addFlags, varNum, varVal),
7074                                gtCloneExpr(tree->gtArrOffs.gtArrObj, addFlags, varNum, varVal),
7075                                tree->gtArrOffs.gtCurrDim,
7076                                tree->gtArrOffs.gtArrRank,
7077                                tree->gtArrOffs.gtArrElemType);
7078         }
7079         break;
7080
7081     case GT_CMPXCHG:
7082         copy = new (this, GT_CMPXCHG) 
7083             GenTreeCmpXchg(tree->TypeGet(),
7084                            gtCloneExpr(tree->gtCmpXchg.gtOpLocation, addFlags, varNum, varVal),
7085                            gtCloneExpr(tree->gtCmpXchg.gtOpValue, addFlags, varNum, varVal),
7086                            gtCloneExpr(tree->gtCmpXchg.gtOpComparand, addFlags, varNum, varVal));
7087         break;
7088
7089     case GT_ARR_BOUNDS_CHECK:
7090 #ifdef FEATURE_SIMD
7091     case GT_SIMD_CHK:
7092 #endif // FEATURE_SIMD
7093         copy = new (this, oper) 
7094             GenTreeBoundsChk(oper,
7095                              tree->TypeGet(),
7096                              gtCloneExpr(tree->gtBoundsChk.gtArrLen, addFlags, varNum, varVal),
7097                              gtCloneExpr(tree->gtBoundsChk.gtIndex, addFlags, varNum, varVal),
7098                              tree->gtBoundsChk.gtThrowKind);
7099         break;
7100
7101
7102     default:
7103 #ifdef  DEBUG
7104         gtDispTree(tree);
7105 #endif
7106         NO_WAY("unexpected operator");
7107     }
7108
7109 DONE:
7110
7111     // If it has a zero-offset field seq, copy annotation.
7112     if (tree->TypeGet() == TYP_BYREF)
7113     {
7114         FieldSeqNode* fldSeq = nullptr;
7115         if (GetZeroOffsetFieldMap()->Lookup(tree, &fldSeq))
7116             GetZeroOffsetFieldMap()->Set(copy, fldSeq);
7117     }
7118
7119     copy->gtVNPair = tree->gtVNPair;  // A cloned tree gets the orginal's Value number pair
7120
7121     /* We assume the FP stack level will be identical */
7122
7123     copy->gtCopyFPlvl(tree);
7124
7125     /* Compute the flags for the copied node. Note that we can do this only
7126        if we didnt gtFoldExpr(copy) */
7127
7128     if (copy->gtOper == oper)
7129     {
7130         addFlags |= tree->gtFlags;
7131
7132 #ifdef  DEBUG
7133         /* GTF_NODE_MASK should not be propagated from 'tree' to 'copy' */
7134         addFlags &= ~GTF_NODE_MASK;
7135 #endif
7136         // Some other flags depend on the context of the expression, and should not be preserved.
7137         // For example, GTF_RELOP_QMARK:
7138         if (copy->OperKind() & GTK_RELOP)
7139             addFlags &= ~GTF_RELOP_QMARK;
7140         // On the other hand, if we're creating such a context, restore this flag.
7141         if (copy->OperGet() == GT_QMARK)
7142         {
7143             copy->gtOp.gtOp1->gtFlags |= GTF_RELOP_QMARK;
7144         }
7145
7146         copy->gtFlags |= addFlags;
7147     }
7148
7149     /* GTF_COLON_COND should be propagated from 'tree' to 'copy' */
7150     copy->gtFlags |= (tree->gtFlags & GTF_COLON_COND);
7151
7152 #if defined(DEBUG)
7153     // Non-node debug flags should be propagated from 'tree' to 'copy'
7154     copy->gtDebugFlags |= (tree->gtDebugFlags & ~GTF_DEBUG_NODE_MASK);
7155 #endif
7156
7157     /* Make sure to copy back fields that may have been initialized */
7158
7159     copy->CopyRawCosts(tree);
7160     copy->gtRsvdRegs = tree->gtRsvdRegs;
7161     copy->CopyReg(tree);
7162     return  copy;
7163 }
7164
7165 //------------------------------------------------------------------------
7166 // gtReplaceTree: Replace a tree with a new tree.
7167 //
7168 // Arguments:
7169 //    stmt            - The top-level root stmt of the tree bing replaced.
7170 //                      Must not be null.
7171 //    tree            - The tree being replaced. Must not be null.
7172 //    replacementTree - The replacement tree. Must not be null.
7173 //
7174 // Return Value:
7175 //    Return the tree node actually replaces the old tree.
7176 //
7177 // Assumptions:
7178 //    The sequencing of the stmt has been done.
7179 //
7180 // Notes:
7181 //    The caller must ensure that the original statement has been sequenced, 
7182 //    but this method will sequence 'replacementTree', and insert it into the 
7183 //    proper place in the statement sequence.
7184
7185 GenTreePtr    Compiler::gtReplaceTree(GenTreePtr     stmt,
7186                                       GenTreePtr     tree,
7187                                       GenTreePtr     replacementTree)
7188 {
7189     assert(fgStmtListThreaded);
7190     assert(tree != nullptr);
7191     assert(stmt != nullptr);
7192     assert(replacementTree != nullptr);
7193
7194     GenTreePtr* treePtr = nullptr;
7195     GenTreePtr  treeParent = tree->gtGetParent(&treePtr);
7196
7197     assert(treeParent != nullptr || tree == stmt->gtStmt.gtStmtExpr);
7198
7199     if (treePtr == nullptr)
7200     {
7201         // Replace the stmt expr and rebuild the linear order for "stmt".
7202         assert(treeParent == nullptr);
7203         assert(fgOrder != FGOrderLinear);
7204         stmt->gtStmt.gtStmtExpr = tree;
7205         fgSetStmtSeq(stmt);
7206     }
7207     else
7208     {
7209         assert(treeParent != nullptr);
7210
7211         GenTreePtr treeFirstNode = fgGetFirstNode(tree);
7212         GenTreePtr treeLastNode = tree;
7213         GenTreePtr treePrevNode = treeFirstNode->gtPrev;
7214         GenTreePtr treeNextNode = treeLastNode->gtNext;
7215
7216         *treePtr = replacementTree;
7217  
7218         // Build the linear order for "replacementTree".
7219         fgSetTreeSeq(replacementTree, treePrevNode);
7220
7221         // Restore linear-order Prev and Next for "replacementTree".
7222         if (treePrevNode != nullptr)
7223         {
7224             treeFirstNode = fgGetFirstNode(replacementTree);
7225             treeFirstNode->gtPrev = treePrevNode;
7226             treePrevNode->gtNext = treeFirstNode;
7227         }
7228         else
7229         {
7230             // Update the linear oder start of "stmt" if treeFirstNode 
7231             // appears to have replaced the original first node.
7232             assert(treeFirstNode == stmt->gtStmt.gtStmtList);
7233             stmt->gtStmt.gtStmtList = fgGetFirstNode(replacementTree);
7234         }
7235
7236         if (treeNextNode != nullptr)
7237         {
7238             treeLastNode = replacementTree;
7239             treeLastNode->gtNext = treeNextNode;
7240             treeNextNode->gtPrev = treeLastNode;
7241         }
7242
7243         bool needFixupCallArg = false;
7244         GenTreePtr node = treeParent;
7245
7246         // If we have replaced an arg, then update pointers in argtable.
7247         do
7248         {
7249             // Look for the first enclosing callsite
7250             switch (node->OperGet())
7251             {
7252                 case GT_LIST:
7253                 case GT_ARGPLACE:
7254                     // "tree" is likely an argument of a call.
7255                     needFixupCallArg = true;
7256                     break;
7257
7258                 case GT_CALL:
7259                     if (needFixupCallArg)
7260                     {
7261                         // We have replaced an arg, so update pointers in argtable.
7262                         fgFixupArgTabEntryPtr(node, tree, replacementTree);
7263                         needFixupCallArg = false;
7264                     }
7265                     break;
7266
7267                 default:
7268                     // "tree" is unlikely an argument of a call.
7269                     needFixupCallArg = false;
7270                     break;
7271             }
7272
7273             if (needFixupCallArg)
7274             {
7275                 // Keep tracking to update the first enclosing call.
7276                 node = node->gtGetParent(nullptr);
7277             }
7278             else
7279             {
7280                 // Stop tracking.
7281                 node = nullptr;
7282             }
7283         } while (node != nullptr);
7284
7285         // Propagate side-effect flags of "replacementTree" to its parents if needed.
7286         gtUpdateSideEffects(treeParent, tree->gtFlags, replacementTree->gtFlags);
7287     }
7288
7289     return  replacementTree;
7290 }
7291
7292 //------------------------------------------------------------------------
7293 // gtUpdateSideEffects: Update the side effects for ancestors.
7294 //
7295 // Arguments:
7296 //    treeParent      - The immediate parent node.
7297 //    oldGtFlags      - The stale gtFlags.
7298 //    newGtFlags      - The new gtFlags.
7299 //
7300 //
7301 // Assumptions:
7302 //    Linear order of the stmt has been established.
7303 //
7304 // Notes:
7305 //    The routine is used for updating the stale side effect flags for ancestor 
7306 //    nodes starting from treeParent up to the top-level stmt expr.
7307
7308 void    Compiler::gtUpdateSideEffects(GenTreePtr     treeParent,
7309                                       unsigned       oldGtFlags,
7310                                       unsigned       newGtFlags)
7311 {
7312     assert(fgStmtListThreaded);
7313
7314     oldGtFlags = oldGtFlags & GTF_ALL_EFFECT;
7315     newGtFlags = newGtFlags & GTF_ALL_EFFECT;
7316
7317     if (oldGtFlags != newGtFlags)
7318     {
7319         while (treeParent)
7320         {
7321             treeParent->gtFlags &= ~oldGtFlags;
7322             treeParent->gtFlags |= newGtFlags;
7323             treeParent = treeParent->gtGetParent(nullptr);
7324         } 
7325     }
7326 }
7327
7328 /*****************************************************************************
7329  *
7330  *  Comapres two trees and returns true when both trees are the same.
7331  *  Instead of fully comparing the two trees this method can just return false.
7332  *  Thus callers should not assume that the trees are different when false is returned.
7333  *  Only when true is returned can the caller perform code optimizations.
7334  *  The current implementation only compares a limited set of LEAF/CONST node
7335  *  and returns false for all othere trees.
7336  */
7337 bool       Compiler::gtCompareTree(GenTree *      op1,
7338                                    GenTree *      op2)
7339 {
7340     /* Make sure that both trees are of the same GT node kind */
7341     if (op1->OperGet() != op2->OperGet())
7342         return false;
7343
7344     /* Make sure that both trees are returning the same type */
7345     if (op1->gtType != op2->gtType)
7346         return false;
7347
7348     /* Figure out what kind of a node we have */
7349
7350     genTreeOps  oper = op1->OperGet();
7351     unsigned    kind = op1->OperKind();
7352
7353     /* Is this a constant or leaf node? */
7354
7355     if  (kind & (GTK_CONST|GTK_LEAF))
7356     {
7357         switch (oper)
7358         {
7359         case GT_CNS_INT:
7360             if ((op1->gtIntCon.gtIconVal == op2->gtIntCon.gtIconVal) && 
7361                 GenTree::SameIconHandleFlag(op1, op2))
7362             {
7363                 return true;
7364             }
7365             break;
7366
7367         case GT_CNS_LNG:
7368             if (op1->gtLngCon.gtLconVal == op2->gtLngCon.gtLconVal)
7369             {
7370                 return true;
7371             }
7372             break;
7373
7374         case GT_CNS_STR:
7375             if (op1->gtStrCon.gtSconCPX == op2->gtStrCon.gtSconCPX)
7376             {
7377                 return true;
7378             }
7379             break;
7380
7381         case GT_LCL_VAR:
7382             if (op1->gtLclVarCommon.gtLclNum == op2->gtLclVarCommon.gtLclNum)  
7383             {
7384                 return true;
7385             }
7386             break;
7387
7388         case GT_CLS_VAR:
7389             if (op1->gtClsVar.gtClsVarHnd == op2->gtClsVar.gtClsVarHnd)
7390             {
7391                 return true;
7392             }
7393             break;
7394
7395         default:
7396             // we return false for these unhandled 'oper' kinds
7397             break;
7398         }
7399     }
7400     return false;
7401 }
7402
7403
7404 GenTreePtr Compiler::gtGetThisArg(GenTreePtr call)
7405 {
7406     assert(call->gtOper == GT_CALL);
7407
7408     if  (call->gtCall.gtCallObjp != NULL)
7409     {
7410         if (call->gtCall.gtCallObjp->gtOper != GT_NOP && call->gtCall.gtCallObjp->gtOper != GT_ASG)
7411         {
7412             if (!(call->gtCall.gtCallObjp->gtFlags & GTF_LATE_ARG))
7413             {
7414                 return call->gtCall.gtCallObjp;
7415             }
7416         }
7417
7418         if (call->gtCall.gtCallLateArgs)
7419         {
7420             regNumber  thisReg = REG_ARG_0;
7421             unsigned   argNum  = 0;
7422             fgArgTabEntryPtr thisArgTabEntry = gtArgEntryByArgNum(call, argNum);
7423             GenTreePtr result = thisArgTabEntry->node;
7424
7425 #if !FEATURE_FIXED_OUT_ARGS
7426             GenTreePtr lateArgs = call->gtCall.gtCallLateArgs;
7427             regList list = call->gtCall.regArgList;
7428             int index = 0;
7429             while (lateArgs != NULL)
7430             {
7431                 assert(lateArgs->gtOper == GT_LIST);
7432                 assert(index < call->gtCall.regArgListCount);
7433                 regNumber curArgReg = list[index];
7434                 if (curArgReg == thisReg)
7435                 {
7436                     if (optAssertionPropagatedCurrentStmt)
7437                         result = lateArgs->gtOp.gtOp1;
7438
7439                     assert(result == lateArgs->gtOp.gtOp1);
7440                 }
7441
7442                 lateArgs = lateArgs->gtOp.gtOp2;
7443                 index++;
7444             }
7445 #endif
7446             return result;
7447         }
7448     }
7449     return NULL;
7450 }
7451
7452 bool                GenTree::gtSetFlags() const
7453 {
7454     //
7455     // When FEATURE_SET_FLAGS (_TARGET_ARM_) is active the method returns true
7456     //    when the gtFlags has the flag GTF_SET_FLAGS set
7457     // otherwise the architecture will be have instructions that typically set 
7458     //    the flags and this method will return true.
7459     //
7460     //    Exceptions: GT_IND (load/store) is not allowed to set the flags 
7461     //                and on XARCH the GT_MUL/GT_DIV and all overflow instructions
7462     //                do not set the condition flags
7463     // 
7464     // Precondition we have a GTK_SMPOP
7465     // 
7466     assert(OperIsSimple());
7467
7468     if (!varTypeIsIntegralOrI(TypeGet()))
7469         return false;
7470
7471 #if FEATURE_SET_FLAGS
7472
7473     if ((gtFlags & GTF_SET_FLAGS) && gtOper != GT_IND)
7474     {
7475         // GTF_SET_FLAGS is not valid on GT_IND and is overlaid with GTF_NONFAULTING_IND
7476         return true;
7477     }
7478     else
7479     {
7480         return false;
7481     }
7482
7483 #else  // !FEATURE_SET_FLAGS
7484
7485 #ifdef _TARGET_XARCH_
7486     // Return true if/when the codegen for this node will set the flags
7487     // 
7488     //
7489     if ((gtOper == GT_IND) || (gtOper == GT_MUL) || (gtOper == GT_DIV))
7490         return false;
7491     else if (gtOverflowEx())
7492         return false;
7493     else
7494         return true;
7495 #else
7496     // Otherwise for other architectures we should return false
7497     return false;
7498 #endif
7499
7500 #endif // !FEATURE_SET_FLAGS
7501 }
7502
7503 bool                GenTree::gtRequestSetFlags()
7504 {
7505     bool result = false;
7506
7507     // This method is a Nop unless FEATURE_SET_FLAGS is defined
7508
7509 #if FEATURE_SET_FLAGS
7510     // In order to set GTF_SET_FLAGS 
7511     //              we must have a GTK_SMPOP
7512     //          and we have a integer or machine size type (not floating point or TYP_LONG on 32-bit)
7513     // 
7514     if (!OperIsSimple())
7515         return false;
7516
7517     if (!varTypeIsIntegralOrI(TypeGet()))
7518         return false;
7519
7520     switch (gtOper) {
7521     case GT_IND:
7522     case GT_ARR_LENGTH:
7523         // These will turn into simple load from memory instructions
7524         // and we can't force the setting of the flags on load from memory
7525         break;
7526         
7527     case GT_MUL:
7528     case GT_DIV:
7529         // These instructions don't set the flags (on x86/x64)
7530         //
7531         break;
7532         
7533     default:
7534         // Otherwise we can set the flags for this gtOper
7535         // and codegen must set the condition flags.
7536         //
7537         gtFlags |= GTF_SET_FLAGS;
7538         result = true;
7539         break;
7540     }
7541 #endif // FEATURE_SET_FLAGS
7542
7543     // Codegen for this tree must set the condition flags if 
7544     // this method returns true.
7545     //
7546     return result;
7547 }
7548
7549 /*****************************************************************************/
7550 void GenTree::CopyTo(class Compiler* comp, const GenTree& gt)
7551 {
7552     gtOper   = gt.gtOper;
7553     gtType   = gt.gtType;
7554     gtAssertionNum = gt.gtAssertionNum;
7555
7556     gtRegNum = gt.gtRegNum; // one union member.
7557     CopyCosts(&gt);
7558
7559     gtFlags  = gt.gtFlags;
7560     gtVNPair = gt.gtVNPair;
7561
7562     gtRsvdRegs = gt.gtRsvdRegs;
7563
7564 #ifdef LEGACY_BACKEND
7565     gtUsedRegs = gt.gtUsedRegs;
7566 #endif // LEGACY_BACKEND
7567
7568 #if FEATURE_STACK_FP_X87
7569     gtFPlvl = gt.gtFPlvl;
7570 #endif // FEATURE_STACK_FP_X87
7571
7572     gtNext = gt.gtNext;
7573     gtPrev = gt.gtPrev;
7574 #ifdef DEBUG
7575     gtTreeID = gt.gtTreeID;
7576     gtSeqNum = gt.gtSeqNum;
7577 #endif
7578     // Largest node subtype:
7579     void* remDst = reinterpret_cast<char*>(this) + sizeof(GenTree);
7580     void* remSrc = reinterpret_cast<char*>(const_cast<GenTree*>(&gt)) + sizeof(GenTree);
7581     memcpy(remDst, remSrc, TREE_NODE_SZ_LARGE - sizeof(GenTree));
7582 }
7583
7584 void GenTree::CopyToSmall(const GenTree& gt)
7585 {
7586     // Small node size is defined by GenTreeOp.
7587     void* remDst = reinterpret_cast<char*>(this) + sizeof(GenTree);
7588     void* remSrc = reinterpret_cast<char*>(const_cast<GenTree*>(&gt)) + sizeof(GenTree);
7589     memcpy(remDst, remSrc, TREE_NODE_SZ_SMALL - sizeof(GenTree));
7590 }
7591
7592 unsigned GenTree::NumChildren()
7593 {
7594     if (OperIsConst() || OperIsLeaf())
7595     {
7596         return 0;
7597     }
7598     else if (OperIsUnary())
7599     {
7600         if (OperGet() == GT_NOP || OperGet() == GT_RETURN || OperGet() == GT_RETFILT)
7601         {
7602             if (gtOp.gtOp1 == nullptr)
7603                 return 0;
7604             else
7605                 return 1;
7606         }
7607         else
7608         {
7609             return 1;
7610         }
7611     }
7612     else if (OperIsBinary())
7613     {
7614         // All binary operators except LEA have at least one arg; the second arg may sometimes be null, however.
7615         if (OperGet() == GT_LEA)
7616         {
7617             unsigned childCount = 0;
7618             if (gtOp.gtOp1 != nullptr)
7619             {
7620                 childCount++;
7621             }
7622             if (gtOp.gtOp2 != nullptr)
7623             {
7624                 childCount++;
7625             }
7626             return childCount;
7627         }
7628         assert(gtOp.gtOp1 != nullptr);
7629         if (gtOp.gtOp2 == nullptr)
7630         {
7631             return 1;
7632         }
7633         else
7634         {
7635             return 2;
7636         }
7637     }
7638     else
7639     {
7640         // Special
7641         switch (OperGet())
7642         {
7643         case GT_CMPXCHG:
7644             return 3;
7645
7646         case GT_ARR_BOUNDS_CHECK:
7647 #ifdef FEATURE_SIMD
7648     case GT_SIMD_CHK:
7649 #endif // FEATURE_SIMD
7650         return 2;
7651
7652         case GT_FIELD:
7653         case GT_STMT:
7654             return 1;
7655
7656         case GT_ARR_ELEM:
7657             return 1 + AsArrElem()->gtArrRank;
7658
7659         case GT_ARR_OFFSET:
7660             return 3;
7661
7662         case GT_CALL:
7663             {
7664                 GenTreeCall* call = AsCall();
7665                 unsigned res = 0;  // arg list(s) (including late args).
7666                 if (call->gtCallObjp != nullptr) res++;  // Add objp?
7667                 if (call->gtCallArgs != nullptr) res++;  // Add args?
7668                 if (call->gtCallLateArgs != nullptr) res++;  // Add late args?
7669                 if (call->gtControlExpr != nullptr) res++;
7670
7671                 if (call->gtCallType == CT_INDIRECT)
7672                 {
7673                     if (call->gtCallCookie != nullptr) res++;
7674                     if (call->gtCallAddr != nullptr) res++;
7675                 }
7676                 return res;
7677             }
7678         case GT_NONE:
7679             return 0;
7680         default:
7681             unreached();
7682         }
7683     }
7684 }
7685
7686 GenTreePtr GenTree::GetChild(unsigned childNum)
7687 {
7688     assert(childNum < NumChildren()); // Precondition.
7689     assert(NumChildren() <= MAX_CHILDREN);
7690     assert(!(OperIsConst() || OperIsLeaf()));
7691     if (OperIsUnary())
7692     {
7693         return AsUnOp()->gtOp1;
7694     }
7695     else if (OperIsBinary())
7696     {
7697         if (OperIsAddrMode())
7698         {
7699             // If this is the first (0th) child, only return op1 if it is non-null
7700             // Otherwise, we return gtOp2.
7701             if (childNum == 0 && AsOp()->gtOp1 != nullptr)
7702                 return AsOp()->gtOp1;
7703             return AsOp()->gtOp2;
7704         }
7705         // TODO-Cleanup: Consider handling ReverseOps here, and then we wouldn't have to handle it in
7706         // fgGetFirstNode().  However, it seems that it causes loop hoisting behavior to change.
7707         if (childNum == 0)
7708         {
7709             return AsOp()->gtOp1;
7710         }
7711         else
7712         {
7713             return AsOp()->gtOp2;
7714         }
7715     }
7716     else
7717     {
7718         // Special
7719         switch (OperGet())
7720         {
7721         case GT_CMPXCHG:
7722             switch (childNum)
7723             {
7724             case 0:
7725                 return AsCmpXchg()->gtOpLocation;
7726             case 1:
7727                 return AsCmpXchg()->gtOpValue;
7728             case 2:
7729                 return AsCmpXchg()->gtOpComparand;
7730             default:
7731                 unreached();
7732             }
7733         case GT_ARR_BOUNDS_CHECK:
7734 #ifdef FEATURE_SIMD
7735     case GT_SIMD_CHK:
7736 #endif // FEATURE_SIMD
7737             switch (childNum)
7738             {
7739             case 0:
7740                 return AsBoundsChk()->gtArrLen;
7741             case 1:
7742                 return AsBoundsChk()->gtIndex;
7743             default:
7744                 unreached();
7745             }
7746
7747         case GT_FIELD:
7748             return AsField()->gtFldObj;
7749
7750         case GT_STMT:
7751             return AsStmt()->gtStmtExpr;
7752
7753         case GT_ARR_ELEM:
7754             if (childNum == 0)
7755             {
7756                 return AsArrElem()->gtArrObj;
7757             }
7758             else
7759             {
7760                 return AsArrElem()->gtArrInds[childNum-1];
7761             }
7762
7763         case GT_ARR_OFFSET:
7764             switch (childNum)
7765             {
7766             case 0:
7767                 return AsArrOffs()->gtOffset;
7768             case 1:
7769                 return AsArrOffs()->gtIndex;
7770             case 2:
7771                 return AsArrOffs()->gtArrObj;
7772             default:
7773                 unreached();
7774             }
7775
7776         case GT_CALL:
7777             {
7778                 // The if chain below assumes that all possible children are non-null.
7779                 // If some are null, "virtually skip them."
7780                 // If there isn't "virtually skip it."
7781                 GenTreeCall* call = AsCall();
7782
7783                 if (call->gtCallObjp                      == nullptr)
7784                     childNum++;
7785                 if (childNum >= 1 && call->gtCallArgs     == nullptr)
7786                     childNum++;
7787                 if (childNum >= 2 && call->gtCallLateArgs == nullptr)
7788                     childNum++;
7789                 if (childNum >= 3 && call->gtControlExpr  == nullptr)
7790                     childNum++;
7791                 if (call->gtCallType == CT_INDIRECT)
7792                 {
7793                     if (childNum >= 4 && call->gtCallCookie == nullptr) childNum++;
7794                 }
7795
7796                 if (childNum == 0)
7797                 {
7798                     return call->gtCallObjp;
7799                 }
7800                 else if (childNum == 1)
7801                 {
7802                     return call->gtCallArgs;
7803                 }
7804                 else if (childNum == 2)
7805                 {
7806                     return call->gtCallLateArgs;
7807                 }
7808                 else if (childNum == 3)
7809                 {
7810                     return call->gtControlExpr;
7811                 }
7812                 else
7813                 {
7814                     assert(call->gtCallType == CT_INDIRECT);
7815                     if (childNum == 4)
7816                     {
7817                         return call->gtCallCookie;
7818                     }
7819                     else
7820                     {
7821                         assert (childNum == 5);
7822                         return call->gtCallAddr;
7823                     }
7824                 }
7825             }
7826         case GT_NONE:
7827             unreached();
7828         default:
7829             unreached();
7830         }
7831     }
7832 }
7833
7834 #ifdef DEBUG
7835
7836 /* static */ int GenTree::gtDispFlags(unsigned flags, unsigned debugFlags)
7837 {
7838     printf("%c", (flags & GTF_ASG           ) ? 'A' : '-');
7839     printf("%c", (flags & GTF_CALL          ) ? 'C' : '-');
7840     printf("%c", (flags & GTF_EXCEPT        ) ? 'X' : '-');
7841     printf("%c", (flags & GTF_GLOB_REF      ) ? 'G' : '-');
7842     printf("%c", (debugFlags & GTF_DEBUG_NODE_MORPHED) ? '+' :     // First print '+' if GTF_DEBUG_NODE_MORPHED is set
7843                  (flags & GTF_ORDER_SIDEEFF ) ? 'O' : '-');   // otherwise print 'O' or '-'
7844     printf("%c", (flags & GTF_COLON_COND    ) ? '?' : '-');
7845     printf("%c", (flags & GTF_DONT_CSE      ) ? 'N' :         // N is for No cse
7846                  (flags & GTF_MAKE_CSE      ) ? 'H' : '-');   // H is for Hoist this expr
7847     printf("%c", (flags & GTF_REVERSE_OPS   ) ? 'R' : '-');
7848     printf("%c", (flags & GTF_UNSIGNED      ) ? 'U' :
7849                  (flags & GTF_BOOLEAN       ) ? 'B' : '-');
7850 #if FEATURE_SET_FLAGS
7851     printf("%c", (flags & GTF_SET_FLAGS     ) ? 'S' : '-');
7852 #endif
7853     printf("%c", (flags & GTF_LATE_ARG      ) ? 'L' : '-');
7854     printf("%c", (flags & GTF_SPILLED       ) ? 'z' : 
7855                  (flags & GTF_SPILL         ) ? 'Z' : '-');
7856     return 12; // displayed 12 flag characters
7857 }
7858
7859 /*****************************************************************************/
7860
7861 void 
7862 Compiler::gtDispNodeName(GenTree *tree)
7863 {
7864     /* print the node name */
7865
7866     const char * name;
7867
7868     assert(tree);
7869     if (tree->gtOper < GT_COUNT)
7870         name = GenTree::NodeName(tree->OperGet());
7871     else
7872         name = "<ERROR>";
7873
7874     char    buf[32];
7875     char *  bufp     = &buf[0];
7876
7877     if ((tree->gtOper == GT_CNS_INT) && tree->IsIconHandle())
7878     {
7879         sprintf_s(bufp, sizeof(buf), " %s(h)%c", name, 0);
7880     }
7881     else if (tree->gtOper == GT_PUTARG_STK)
7882     {
7883         sprintf_s(bufp, sizeof(buf), " %s [+0x%02x]%c", name, tree->AsPutArgStk()->getArgOffset(), 0);
7884     }
7885     else if (tree->gtOper == GT_CALL)
7886     {
7887         const char *  callType = "call";
7888         const char *  gtfType  = "";
7889         const char *  ctType   = "";
7890         char    gtfTypeBuf[100];
7891
7892         if (tree->gtCall.gtCallType == CT_USER_FUNC)
7893         {
7894             if ((tree->gtFlags & GTF_CALL_VIRT_KIND_MASK) != GTF_CALL_NONVIRT)
7895               callType = "callv";
7896         }
7897         else if (tree->gtCall.gtCallType == CT_HELPER)
7898             ctType  = " help";
7899         else if (tree->gtCall.gtCallType == CT_INDIRECT)
7900             ctType  = " ind";
7901         else
7902             assert(!"Unknown gtCallType");
7903
7904         if (tree->gtFlags & GTF_CALL_NULLCHECK)
7905             gtfType = " nullcheck";
7906
7907         if (tree->gtFlags & GTF_CALL_VIRT_VTABLE)
7908             gtfType = " ind";
7909         else if (tree->gtFlags & GTF_CALL_VIRT_STUB)
7910             gtfType = " stub";
7911 #ifdef FEATURE_READYTORUN_COMPILER
7912         else if (tree->gtCall.IsR2RRelativeIndir())
7913             gtfType = " r2r_ind";
7914 #endif // FEATURE_READYTORUN_COMPILER
7915         else if (tree->gtFlags & GTF_CALL_UNMANAGED)
7916         {
7917             char * gtfTypeBufWalk = gtfTypeBuf;
7918             gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf,
7919                                               sizeof(gtfTypeBuf), " unman");
7920             if (tree->gtFlags & GTF_CALL_POP_ARGS)
7921                 gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf,
7922                                                   sizeof(gtfTypeBuf), " popargs");
7923             if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_UNMGD_THISCALL)
7924             {
7925                 gtfTypeBufWalk += SimpleSprintf_s(gtfTypeBufWalk, gtfTypeBuf,
7926                                                   sizeof(gtfTypeBuf),
7927                                                   " thiscall");
7928             }
7929             gtfType = gtfTypeBuf;
7930         }        
7931
7932         sprintf_s(bufp, sizeof(buf), " %s%s%s%c", callType, ctType, gtfType, 0);
7933     }
7934     else if (tree->gtOper == GT_ARR_ELEM)
7935     {
7936         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s[", name);
7937         for (unsigned rank = tree->gtArrElem.gtArrRank-1; rank; rank--)
7938             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), ",");
7939         SimpleSprintf_s(bufp, buf, sizeof(buf), "]");
7940     }
7941     else if (tree->gtOper == GT_ARR_OFFSET || tree->gtOper == GT_ARR_INDEX)
7942     {
7943         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s[", name);
7944         unsigned char currDim;
7945         unsigned char rank;
7946         if (tree->gtOper == GT_ARR_OFFSET)
7947         {
7948             currDim = tree->gtArrOffs.gtCurrDim;
7949             rank = tree->gtArrOffs.gtArrRank;
7950         }
7951         else
7952         {
7953             currDim = tree->gtArrIndex.gtCurrDim;
7954             rank = tree->gtArrIndex.gtArrRank;
7955         }
7956
7957         for (unsigned char dim = 0; dim < rank; dim++)
7958         {
7959             // Use a defacto standard i,j,k for the dimensions.
7960             // Note that we only support up to rank 3 arrays with these nodes, so we won't run out of characters.
7961             char dimChar = '*';
7962             if (dim == currDim)
7963             {
7964                 dimChar = 'i' + dim;
7965             }
7966             else if (dim > currDim)
7967             {
7968                 dimChar = ' ';
7969             }
7970
7971             bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "%c", dimChar);
7972             if (dim != rank-1)
7973             {
7974                 bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), ",");
7975             }
7976         }
7977         SimpleSprintf_s(bufp, buf, sizeof(buf), "]");
7978     }
7979     else if (tree->gtOper == GT_LEA)
7980     {
7981         GenTreeAddrMode * lea = tree->AsAddrMode();
7982         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), " %s(", name);
7983         if (lea->Base() != NULL) bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "b+");
7984         if (lea->Index() != NULL) bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "(i*%d)+", lea->gtScale);
7985         bufp += SimpleSprintf_s(bufp, buf, sizeof(buf), "%d)", lea->gtOffset);
7986     }
7987     else if (tree->gtOper == GT_ARR_BOUNDS_CHECK)
7988     {
7989         switch(tree->gtBoundsChk.gtThrowKind)
7990         {
7991         case SCK_RNGCHK_FAIL:   sprintf_s(bufp, sizeof(buf), " %s_Rng", name);    break;
7992         case SCK_ARG_EXCPN:     sprintf_s(bufp, sizeof(buf), " %s_Arg", name);    break;
7993         case SCK_ARG_RNG_EXCPN: sprintf_s(bufp, sizeof(buf), " %s_ArgRng", name); break;
7994         default:                unreached();
7995         }
7996     }
7997     else if (tree->gtOverflowEx())
7998     {
7999         sprintf_s(bufp, sizeof(buf), " %s_ovfl%c", name, 0);
8000     }
8001     else
8002     {
8003         sprintf_s(bufp, sizeof(buf), " %s%c", name, 0);
8004     }
8005
8006     if (strlen(buf) < 10)
8007         printf(" %-10s", buf);
8008     else
8009         printf(" %s", buf);
8010 }
8011
8012 void                Compiler::gtDispVN(GenTree* tree)
8013 {
8014     if (tree->gtVNPair.GetLiberal() != ValueNumStore::NoVN)
8015     {
8016         assert(tree->gtVNPair.GetConservative() != ValueNumStore::NoVN);
8017         printf(" ");
8018         vnpPrint(tree->gtVNPair, 0);
8019     }
8020 }
8021
8022 //------------------------------------------------------------------------
8023 // gtDispNode: Print a tree to jitstdout.
8024 //
8025 // Arguments:
8026 //    tree - the tree to be printed
8027 //    indentStack - the specification for the current level of indentation & arcs
8028 //    msg         - a contextual method (i.e. from the parent) to print
8029 //
8030 // Return Value:
8031 //    None.
8032 //
8033 // Notes:
8034 //    'indentStack' may be null, in which case no indentation or arcs are printed
8035 //    'msg' may be null
8036
8037 void                Compiler::gtDispNode(GenTreePtr     tree,
8038                                          IndentStack*   indentStack,
8039                                          __in __in_z __in_opt const char * msg)
8040
8041     bool           printPointer = true;  // always true..
8042     bool           printFlags   = true;  // always true..
8043     bool           printCost    = true;  // always true..
8044
8045     int msgLength = 25;
8046
8047     GenTree *  prev;
8048
8049     if  (tree->gtSeqNum)
8050     {
8051         printf("N%03u ", tree->gtSeqNum);
8052         if (tree->gtCostsInitialized)
8053         {
8054             printf("(%3u,%3u) ", tree->gtCostEx, tree->gtCostSz);
8055         }
8056         else
8057         {
8058             printf("(???" ",???" ") "); // This probably indicates a bug: the node has a sequence number, but not costs.
8059         }
8060     }
8061     else
8062     {
8063         if (tree->gtOper == GT_STMT)
8064         {
8065             prev = tree->gtStmt.gtStmtExpr;
8066         }
8067         else
8068         {
8069             prev = tree;
8070         }
8071
8072         bool hasSeqNum = true;
8073         unsigned dotNum = 0;
8074         do
8075         {
8076             dotNum++;
8077             prev = prev->gtPrev;
8078
8079             if ((prev == NULL) || (prev == tree))
8080             {
8081                 hasSeqNum = false;
8082                 break;
8083             }
8084
8085             assert(prev);
8086         } while (prev->gtSeqNum == 0);
8087
8088         // If we have an indent stack, don't add additional characters,
8089         // as it will mess up the alignment.
8090         if (tree->gtOper != GT_STMT && hasSeqNum && (indentStack == nullptr))
8091             printf("N%03u.%02u ", prev->gtSeqNum, dotNum);
8092         else
8093             printf("     ");
8094
8095         if (tree->gtCostsInitialized)
8096         {
8097             printf("(%3u,%3u) ", tree->gtCostEx, tree->gtCostSz);
8098         }
8099         else
8100         {
8101             if (tree->gtOper != GT_STMT && hasSeqNum)
8102             {
8103                 // Do better alignment in this case
8104                 printf("       ");
8105             }
8106             else
8107             {
8108                 printf("          ");
8109             }
8110         }
8111     }
8112
8113     if (optValnumCSE_phase)
8114     {
8115         if (IS_CSE_INDEX(tree->gtCSEnum))
8116         {
8117             printf("CSE #%02d (%s)", GET_CSE_INDEX(tree->gtCSEnum),
8118                                    (IS_CSE_USE(tree->gtCSEnum) ? "use" : "def"));
8119         }
8120         else
8121         {
8122             printf("             ");
8123         }
8124     }
8125
8126     /* Print the node ID */
8127     printTreeID(tree);
8128     printf(" ");
8129
8130     if (tree->gtOper >= GT_COUNT)
8131     {
8132         printf(" **** ILLEGAL NODE ****");
8133         return;
8134     }
8135
8136     if  (printFlags)
8137     {
8138         /* First print the flags associated with the node */
8139         switch (tree->gtOper)
8140         {
8141         case GT_LEA:
8142         case GT_IND: 
8143             // We prefer printing R, V or U 
8144             if ((tree->gtFlags & (GTF_IND_REFARR_LAYOUT | GTF_IND_VOLATILE | GTF_IND_UNALIGNED)) == 0)
8145             {
8146                 if (tree->gtFlags & GTF_IND_TGTANYWHERE)
8147                                                          { printf("*"); --msgLength; break; }
8148                 if (tree->gtFlags & GTF_IND_INVARIANT)
8149                                                          { printf("#"); --msgLength; break; }
8150                 if (tree->gtFlags & GTF_IND_ARR_INDEX)
8151                                                          { printf("a"); --msgLength; break; }
8152             }
8153             __fallthrough;
8154
8155         case GT_INDEX:
8156
8157             if ((tree->gtFlags & (GTF_IND_VOLATILE | GTF_IND_UNALIGNED)) == 0)  // We prefer printing V or U over R
8158             {
8159                 if (tree->gtFlags & GTF_IND_REFARR_LAYOUT)
8160                                                          { printf("R"); --msgLength; break; }   // R means RefArray
8161             }
8162             __fallthrough;
8163
8164         case GT_FIELD:
8165         case GT_CLS_VAR:
8166             if (tree->gtFlags & GTF_IND_VOLATILE)
8167                                                          { printf("V"); --msgLength; break; }
8168             if (tree->gtFlags & GTF_IND_UNALIGNED)
8169                                                          { printf("U"); --msgLength; break; }
8170             goto DASH;
8171
8172         case GT_INITBLK:
8173         case GT_COPYBLK:
8174         case GT_COPYOBJ:
8175             if (tree->AsBlkOp()->IsVolatile())
8176                                                          { printf("V"); --msgLength; break; }
8177             if (tree->gtFlags & GTF_BLK_UNALIGNED)
8178                                                          { printf("U"); --msgLength; break; }
8179             goto DASH;
8180
8181         case GT_CALL:
8182             if (tree->gtFlags & GTF_CALL_INLINE_CANDIDATE)
8183                                                          { printf("I"); --msgLength; break; }
8184             if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_RETBUFFARG)
8185                                                          { printf("S"); --msgLength; break; }
8186             if (tree->gtFlags & GTF_CALL_HOISTABLE)
8187                                                          { printf("H"); --msgLength; break; }
8188             
8189             goto DASH;
8190
8191         case GT_MUL:
8192             if (tree->gtFlags & GTF_MUL_64RSLT)
8193                                                          { printf("L"); --msgLength; break; }
8194             goto DASH;
8195
8196         case GT_ADDR:
8197             if (tree->gtFlags & GTF_ADDR_ONSTACK)
8198                                                          { printf("L"); --msgLength; break; }   // L means LclVar
8199             goto DASH;
8200
8201         case GT_LCL_FLD:
8202         case GT_LCL_VAR:
8203         case GT_LCL_VAR_ADDR:
8204         case GT_LCL_FLD_ADDR:
8205         case GT_STORE_LCL_FLD:
8206         case GT_STORE_LCL_VAR:
8207         case GT_REG_VAR:
8208             if (tree->gtFlags & GTF_VAR_USEASG)
8209                                                          { printf("U"); --msgLength; break; }
8210             if (tree->gtFlags & GTF_VAR_USEDEF)
8211                                                          { printf("B"); --msgLength; break; }
8212             if (tree->gtFlags & GTF_VAR_DEF)
8213                                                          { printf("D"); --msgLength; break; }
8214             if (tree->gtFlags & GTF_VAR_CAST)
8215                                                          { printf("C"); --msgLength; break; }
8216             if (tree->gtFlags & GTF_VAR_ARR_INDEX)
8217                                                          { printf("i"); --msgLength; break; }
8218             goto DASH;
8219
8220         case GT_EQ:
8221         case GT_NE:
8222         case GT_LT:
8223         case GT_LE:
8224         case GT_GE:
8225         case GT_GT:
8226             if (tree->gtFlags & GTF_RELOP_NAN_UN)
8227                                                          { printf("N"); --msgLength; break; }
8228             if (tree->gtFlags & GTF_RELOP_JMP_USED)
8229                                                          { printf("J"); --msgLength; break; }
8230             if (tree->gtFlags & GTF_RELOP_QMARK)
8231                                                          { printf("Q"); --msgLength; break; }
8232             if (tree->gtFlags & GTF_RELOP_SMALL)
8233                                                          { printf("S"); --msgLength; break; }
8234             goto DASH;
8235
8236         default:
8237 DASH:
8238             printf("-");
8239             --msgLength;
8240             break;
8241         }
8242
8243         /* Then print the general purpose flags */
8244         unsigned flags = tree->gtFlags;
8245
8246         if (tree->OperIsBinary())
8247         {
8248             genTreeOps oper = tree->OperGet();
8249
8250             // Check for GTF_ADDRMODE_NO_CSE flag on add/mul/shl Binary Operators
8251             if ((oper == GT_ADD) || (oper == GT_MUL) || (oper == GT_LSH))
8252             {
8253                 if ((tree->gtFlags & GTF_ADDRMODE_NO_CSE) != 0)
8254                 {
8255                     flags |= GTF_DONT_CSE;   // Force the GTF_ADDRMODE_NO_CSE flag to print out like GTF_DONT_CSE
8256                 }
8257             }
8258         }
8259         else  // !tree->OperIsBinary()
8260         {
8261             // the GTF_REVERSE flag only applies to binary operations
8262             flags &= ~GTF_REVERSE_OPS;   // we use this value for GTF_VAR_ARR_INDEX above
8263         }
8264
8265         msgLength -= GenTree::gtDispFlags(flags, tree->gtDebugFlags);
8266         /*
8267             printf("%c", (flags & GTF_ASG           ) ? 'A' : '-');
8268             printf("%c", (flags & GTF_CALL          ) ? 'C' : '-');
8269             printf("%c", (flags & GTF_EXCEPT        ) ? 'X' : '-');
8270             printf("%c", (flags & GTF_GLOB_REF      ) ? 'G' : '-');
8271             printf("%c", (flags & GTF_ORDER_SIDEEFF ) ? 'O' : '-');
8272             printf("%c", (flags & GTF_COLON_COND    ) ? '?' : '-');
8273             printf("%c", (flags & GTF_DONT_CSE      ) ? 'N' :        // N is for No cse
8274                          (flags & GTF_MAKE_CSE      ) ? 'H' : '-');  // H is for Hoist this expr
8275             printf("%c", (flags & GTF_REVERSE_OPS   ) ? 'R' : '-');
8276             printf("%c", (flags & GTF_UNSIGNED      ) ? 'U' :
8277                          (flags & GTF_BOOLEAN       ) ? 'B' : '-');
8278             printf("%c", (flags & GTF_SET_FLAGS     ) ? 'S' : '-');
8279             printf("%c", (flags & GTF_SPILLED       ) ? 'z' : '-');
8280             printf("%c", (flags & GTF_SPILL         ) ? 'Z' : '-');
8281         */
8282
8283 #if FEATURE_STACK_FP_X87
8284         BYTE fpLvl = (BYTE)tree->gtFPlvl;
8285         if (IsUninitialized(fpLvl) || fpLvl == 0x00)
8286         {
8287             printf("-");
8288         }
8289         else
8290         {
8291             printf("%1u", tree->gtFPlvl);
8292         }
8293 #endif // FEATURE_STACK_FP_X87
8294     }
8295
8296     /* print the msg associated with the node */
8297
8298     if (msg == NULL)
8299         msg = "";
8300     if (msgLength < 0)
8301         msgLength = 0;
8302
8303     printf(" %-*s", msgLength, msg);
8304
8305     /* Indent the node accordingly */
8306     printIndent(indentStack);
8307
8308     gtDispNodeName(tree);
8309
8310     assert(tree == 0 || tree->gtOper < GT_COUNT);
8311
8312     if  (tree)
8313     {
8314         /* print the type of the node */
8315         if (tree->gtOper != GT_CAST)
8316         {
8317             printf(" %-6s", varTypeName(tree->TypeGet()));
8318             if (tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_STORE_LCL_VAR)
8319             {
8320                 LclVarDsc * varDsc = &lvaTable[tree->gtLclVarCommon.gtLclNum];
8321                 if (varDsc->lvAddrExposed)
8322                 {
8323                     printf("(AX)");  // Variable has address exposed.
8324                 }
8325
8326                 if (varDsc->lvUnusedStruct)
8327                 {
8328                     assert(varDsc->lvPromoted);
8329                     printf("(U)"); // Unused struct
8330                 }
8331                 else if (varDsc->lvPromoted)
8332                 {
8333                     assert(varTypeIsPromotable(varDsc));
8334                     printf("(P)"); // Promoted struct
8335                 }
8336             }
8337
8338             if (tree->gtOper == GT_STMT)
8339             {
8340                 if (tree->gtFlags & GTF_STMT_TOP_LEVEL)
8341                     printf("(top level) ");
8342                 else
8343                     printf("(embedded) ");
8344
8345                 if (opts.compDbgInfo)
8346                 {
8347                     IL_OFFSET endIL = tree->gtStmt.gtStmtLastILoffs;
8348
8349                     printf("(IL ");
8350                     if (tree->gtStmt.gtStmtILoffsx == BAD_IL_OFFSET)
8351                         printf("  ???");
8352                     else
8353                         printf("0x%03X", jitGetILoffs(tree->gtStmt.gtStmtILoffsx));
8354                     printf("...");
8355                     if (endIL == BAD_IL_OFFSET)
8356                         printf("  ???");
8357                     else
8358                         printf("0x%03X", endIL);
8359                     printf(")");
8360                 }
8361             }
8362
8363             if (tree->IsArgPlaceHolderNode() && (tree->gtArgPlace.gtArgPlaceClsHnd != NULL))
8364             {
8365                 printf(" => [clsHnd=%08X]", dspPtr(tree->gtArgPlace.gtArgPlaceClsHnd));
8366             }
8367         }
8368
8369         // for tracking down problems in reguse prediction or liveness tracking
8370
8371         if (verbose&&0)
8372         {
8373             printf(" RR="); dspRegMask(tree->gtRsvdRegs);
8374 #ifdef LEGACY_BACKEND
8375             printf(",UR="); dspRegMask(tree->gtUsedRegs);
8376 #endif // LEGACY_BACKEND
8377             printf("\n");
8378         }
8379     }
8380 }
8381
8382 void                Compiler::gtDispRegVal(GenTree *  tree)
8383 {
8384     switch (tree->GetRegTag())
8385     {
8386     // Don't display NOREG; the absence of this tag will imply this state
8387     //case GenTree::GT_REGTAG_NONE:       printf(" NOREG");   break;
8388
8389     case GenTree::GT_REGTAG_REG:
8390         printf(" REG %s", compRegVarName(tree->gtRegNum));
8391         break;
8392
8393 #if CPU_LONG_USES_REGPAIR
8394     case GenTree::GT_REGTAG_REGPAIR:
8395         printf(" PAIR %s", compRegPairName(tree->gtRegPair));
8396         break;
8397 #endif
8398
8399     default: 
8400         break;
8401     }
8402
8403     if (tree->IsMultiRegCall())
8404     {
8405         // 0th reg is gtRegNum, which is already printed above.
8406         // Print the remaining regs of a multi-reg call node.
8407         GenTreeCall* call = tree->AsCall();
8408         unsigned regCount = call->GetReturnTypeDesc()->GetReturnRegCount();
8409         for (unsigned i = 1; i < regCount; ++i)
8410         {
8411             printf(",%s", compRegVarName(call->GetRegNumByIdx(i)));
8412         }        
8413     }
8414     else if (tree->IsCopyOrReloadOfMultiRegCall())
8415     {
8416         GenTreeCopyOrReload* copyOrReload = tree->AsCopyOrReload();
8417         GenTreeCall* call = tree->gtGetOp1()->AsCall();
8418         unsigned regCount = call->GetReturnTypeDesc()->GetReturnRegCount();
8419         for (unsigned i = 1; i < regCount; ++i)
8420         {
8421             printf(",%s", compRegVarName(copyOrReload->GetRegNumByIdx(i)));
8422         }
8423     }
8424
8425     if  (tree->gtFlags & GTF_REG_VAL)
8426     {
8427         printf(" RV");
8428     }
8429 }
8430
8431 // We usually/commonly don't expect to print anything longer than this string,
8432 #define LONGEST_COMMON_LCL_VAR_DISPLAY         "V99 PInvokeFrame"
8433 #define LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH  (sizeof(LONGEST_COMMON_LCL_VAR_DISPLAY))
8434 #define BUF_SIZE                               (LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH*2)
8435
8436 void                Compiler::gtGetLclVarNameInfo(unsigned lclNum, const char** ilKindOut, const char** ilNameOut, unsigned * ilNumOut)
8437 {
8438     const char* ilKind = nullptr;
8439     const char* ilName = nullptr;
8440
8441     unsigned     ilNum  = compMap2ILvarNum(lclNum);
8442
8443     if (ilNum == (unsigned)ICorDebugInfo::RETBUF_ILNUM)
8444     {
8445         ilName = "RetBuf";
8446     }
8447     else if (ilNum == (unsigned)ICorDebugInfo::VARARGS_HND_ILNUM)
8448     {
8449         ilName = "VarArgHandle";
8450     }
8451     else if (ilNum == (unsigned)ICorDebugInfo::TYPECTXT_ILNUM)
8452     {
8453         ilName = "TypeCtx";
8454     }
8455     else if (ilNum == (unsigned)ICorDebugInfo::UNKNOWN_ILNUM)
8456     {
8457 #if FEATURE_ANYCSE
8458         if (lclNumIsTrueCSE(lclNum))
8459         {
8460             ilKind = "cse";
8461             ilNum  = lclNum - optCSEstart;
8462         }
8463         else if (lclNum >= optCSEstart)
8464         {
8465             // Currently any new LclVar's introduced after the CSE phase 
8466             // are believed to be created by the "rationalizer" that is what is meant by the "rat" prefix.
8467             ilKind = "rat";
8468             ilNum  = lclNum - (optCSEstart+optCSEcount);
8469         }
8470         else
8471 #endif // FEATURE_ANYCSE
8472         {         
8473             if (lclNum == info.compLvFrameListRoot)
8474                 ilName = "FramesRoot";
8475             else if (lclNum == lvaInlinedPInvokeFrameVar)
8476                 ilName = "PInvokeFrame";
8477             else if (lclNum == lvaGSSecurityCookie)
8478                 ilName = "GsCookie";
8479 #if FEATURE_FIXED_OUT_ARGS
8480             else if (lclNum == lvaPInvokeFrameRegSaveVar)
8481                 ilName = "PInvokeFrameRegSave";
8482             else if (lclNum == lvaOutgoingArgSpaceVar)
8483                 ilName = "OutArgs";
8484 #endif // FEATURE_FIXED_OUT_ARGS
8485 #ifdef _TARGET_ARM_
8486             else if (lclNum == lvaPromotedStructAssemblyScratchVar)
8487                 ilName = "PromotedStructScratch";
8488 #endif // _TARGET_ARM_
8489 #if !FEATURE_EH_FUNCLETS
8490             else if (lclNum == lvaShadowSPslotsVar)
8491                 ilName = "EHSlots";
8492 #endif // !FEATURE_EH_FUNCLETS
8493             else if (lclNum == lvaLocAllocSPvar)
8494                 ilName = "LocAllocSP";
8495 #if FEATURE_EH_FUNCLETS
8496             else if (lclNum == lvaPSPSym)
8497                 ilName = "PSPSym";
8498 #endif // FEATURE_EH_FUNCLETS
8499             else 
8500             {
8501                 ilKind = "tmp";                
8502                 if (compIsForInlining())
8503                 {
8504                     ilNum  = lclNum - impInlineInfo->InlinerCompiler->info.compLocalsCount;
8505                 }
8506                 else
8507                 {
8508                     ilNum  = lclNum - info.compLocalsCount;
8509                 }
8510             }
8511         }
8512     }
8513     else if (lclNum < (compIsForInlining()
8514                        ? impInlineInfo->InlinerCompiler->info.compArgsCount
8515                        : info.compArgsCount))
8516     {
8517         if (ilNum == 0 && !info.compIsStatic)
8518             ilName = "this";
8519         else
8520             ilKind = "arg";
8521     }
8522     else
8523     {
8524         if (!lvaTable[lclNum].lvIsStructField)            
8525            ilKind = "loc";        
8526         
8527         if (compIsForInlining())
8528         {
8529             ilNum -= impInlineInfo->InlinerCompiler->info.compILargsCount;
8530         }
8531         else
8532         {
8533             ilNum -= info.compILargsCount;
8534         }
8535     }
8536
8537     *ilKindOut = ilKind;
8538     *ilNameOut = ilName;
8539     *ilNumOut = ilNum;
8540 }
8541     
8542 /*****************************************************************************/
8543 int                 Compiler::gtGetLclVarName(unsigned lclNum, char* buf, unsigned buf_remaining) 
8544 {
8545     char*    bufp_next     = buf;
8546     unsigned charsPrinted  = 0;
8547     int      sprintf_result;
8548
8549     sprintf_result = sprintf_s(bufp_next, buf_remaining, "V%02u", lclNum);
8550
8551     if (sprintf_result < 0)
8552         return sprintf_result;
8553
8554     charsPrinted  += sprintf_result;
8555     bufp_next     += sprintf_result;
8556     buf_remaining -= sprintf_result;
8557
8558     const char* ilKind = nullptr;
8559     const char* ilName = nullptr;
8560     unsigned    ilNum = 0;
8561
8562     Compiler::gtGetLclVarNameInfo(lclNum, &ilKind, &ilName, &ilNum);
8563
8564     if (ilName != nullptr)
8565     {
8566         sprintf_result = sprintf_s(bufp_next, buf_remaining, " %s", ilName);
8567         if (sprintf_result < 0) return sprintf_result;
8568         charsPrinted  += sprintf_result;
8569         bufp_next     += sprintf_result;
8570         buf_remaining -= sprintf_result;
8571     }
8572     else if (ilKind != nullptr)
8573     {
8574         sprintf_result = sprintf_s(bufp_next, buf_remaining, " %s%d", ilKind, ilNum);
8575         if (sprintf_result < 0) return sprintf_result;
8576         charsPrinted  += sprintf_result;
8577         bufp_next     += sprintf_result;
8578         buf_remaining -= sprintf_result;
8579     }
8580
8581     assert(charsPrinted  > 0);
8582     assert(buf_remaining > 0);
8583     
8584     return (int)charsPrinted;
8585 }
8586
8587 /*****************************************************************************
8588  * Get the local var name, and create a copy of the string that can be used in debug output.
8589  */
8590 char*               Compiler::gtGetLclVarName(unsigned lclNum) 
8591 {
8592     char     buf[BUF_SIZE];
8593     int      charsPrinted = gtGetLclVarName(lclNum, buf, sizeof(buf)/sizeof(buf[0])); 
8594     if (charsPrinted < 0)
8595         return nullptr;
8596
8597     char* retBuf = new (this, CMK_DebugOnly) char[charsPrinted + 1];
8598     strcpy_s(retBuf, charsPrinted + 1, buf);
8599     return retBuf;
8600 }
8601
8602 /*****************************************************************************/
8603 void                Compiler::gtDispLclVar(unsigned lclNum, bool padForBiggestDisp) 
8604 {
8605     char     buf[BUF_SIZE];
8606     int      charsPrinted = gtGetLclVarName(lclNum, buf, sizeof(buf)/sizeof(buf[0])); 
8607
8608     if (charsPrinted < 0)
8609         return;
8610     
8611     printf("%s", buf);
8612
8613     if (padForBiggestDisp && (charsPrinted < LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH))
8614         printf("%*c", LONGEST_COMMON_LCL_VAR_DISPLAY_LENGTH - charsPrinted, ' ');
8615 }
8616
8617 /*****************************************************************************/
8618 void
8619 Compiler::gtDispConst(GenTree *tree)
8620 {
8621     assert(tree->OperKind() & GTK_CONST);
8622
8623     switch  (tree->gtOper)
8624     {
8625     case GT_CNS_INT:
8626         if (tree->IsIconHandle(GTF_ICON_STR_HDL))
8627         {
8628             printf(" 0x%X \"%S\"", dspPtr(tree->gtIntCon.gtIconVal), eeGetCPString(tree->gtIntCon.gtIconVal));
8629         }
8630         else
8631         {
8632             ssize_t         dspIconVal = tree->IsIconHandle() ? dspPtr(tree->gtIntCon.gtIconVal) : tree->gtIntCon.gtIconVal;
8633
8634             if (tree->TypeGet() == TYP_REF)
8635             {
8636                 assert(tree->gtIntCon.gtIconVal == 0);
8637                 printf(" null");
8638             }
8639             else if ((tree->gtIntCon.gtIconVal > -1000) && (tree->gtIntCon.gtIconVal < 1000))
8640                 printf(" %ld",  dspIconVal);
8641 #ifdef _TARGET_64BIT_
8642             else if ((tree->gtIntCon.gtIconVal & 0xFFFFFFFF00000000LL) != 0)
8643                 printf(" 0x%llx",  dspIconVal);
8644 #endif
8645             else
8646                 printf(" 0x%X", dspIconVal);
8647
8648             if (tree->IsIconHandle())
8649             {
8650                 switch (tree->GetIconHandleFlag())
8651                 {
8652                 case GTF_ICON_SCOPE_HDL:
8653                     printf(" scope");
8654                     break;
8655                 case GTF_ICON_CLASS_HDL:
8656                     printf(" class");
8657                     break;
8658                 case GTF_ICON_METHOD_HDL:
8659                     printf(" method");
8660                     break;
8661                 case GTF_ICON_FIELD_HDL:
8662                     printf(" field");
8663                     break;
8664                 case GTF_ICON_STATIC_HDL:
8665                     printf(" static");
8666                     break;
8667                 case GTF_ICON_STR_HDL:
8668                     unreached();            // This case is handled above
8669                     break;
8670                 case GTF_ICON_PSTR_HDL:
8671                     printf(" pstr");
8672                     break;
8673                 case GTF_ICON_PTR_HDL:
8674                     printf(" ptr");
8675                     break;
8676                 case GTF_ICON_VARG_HDL:
8677                     printf(" vararg");
8678                     break;
8679                 case GTF_ICON_PINVKI_HDL:
8680                     printf(" pinvoke");
8681                     break;
8682                 case GTF_ICON_TOKEN_HDL:
8683                     printf(" token");
8684                     break;
8685                 case GTF_ICON_TLS_HDL:
8686                     printf(" tls");
8687                     break;
8688                 case GTF_ICON_FTN_ADDR:
8689                     printf(" ftn");
8690                     break;
8691                 case GTF_ICON_CIDMID_HDL:
8692                     printf(" cid");
8693                     break;
8694                 case GTF_ICON_BBC_PTR:
8695                     printf(" bbc");
8696                     break;
8697                 default:
8698                     printf(" UNKNOWN");
8699                     break;
8700                 }
8701             }
8702
8703             if ((tree->gtFlags & GTF_ICON_FIELD_OFF) != 0)
8704                 printf(" field offset");
8705
8706             if ((tree->IsReuseRegVal()) != 0)
8707                 printf(" reuse reg val");
8708         }
8709
8710         gtDispFieldSeq(tree->gtIntCon.gtFieldSeq);
8711        
8712         break;
8713
8714     case GT_CNS_LNG: 
8715         printf(" 0x%016I64x", tree->gtLngCon.gtLconVal); 
8716         break;
8717
8718     case GT_CNS_DBL:
8719         if (*((__int64 *)&tree->gtDblCon.gtDconVal) == (__int64)I64(0x8000000000000000))
8720             printf(" -0.00000");
8721         else
8722             printf(" %#.17g", tree->gtDblCon.gtDconVal); 
8723         break;
8724     case GT_CNS_STR:
8725         printf("<string constant>");
8726         break;
8727     default: assert(!"unexpected constant node");
8728     }
8729
8730     gtDispRegVal(tree);
8731 }
8732
8733 void Compiler::gtDispFieldSeq(FieldSeqNode* pfsn)
8734 {
8735     if (pfsn == FieldSeqStore::NotAField() || (pfsn == nullptr))
8736     {
8737         return;
8738     }
8739
8740     // Otherwise...
8741     printf(" Fseq[");
8742     while (pfsn != NULL)
8743     {
8744         assert(pfsn != FieldSeqStore::NotAField()); // Can't exist in a field sequence list except alone
8745         CORINFO_FIELD_HANDLE fldHnd = pfsn->m_fieldHnd;
8746         // First check the "pseudo" field handles...
8747         if (fldHnd == FieldSeqStore::FirstElemPseudoField)
8748         {
8749             printf("#FirstElem");
8750         }
8751         else if (fldHnd == FieldSeqStore::ConstantIndexPseudoField)
8752         {
8753             printf("#ConstantIndex");
8754         }
8755         else
8756         {
8757             printf("%s", eeGetFieldName(fldHnd));
8758         }
8759         pfsn = pfsn->m_next;
8760         if (pfsn != NULL) printf(", ");
8761     }
8762     printf("]");
8763 }
8764
8765 //------------------------------------------------------------------------
8766 // gtDispLeaf: Print a single leaf node to jitstdout.
8767 //
8768 // Arguments:
8769 //    tree - the tree to be printed
8770 //    indentStack - the specification for the current level of indentation & arcs
8771 //
8772 // Return Value:
8773 //    None.
8774 //
8775 // Notes:
8776 //    'indentStack' may be null, in which case no indentation or arcs are printed
8777
8778 void
8779 Compiler::gtDispLeaf(GenTree *tree, IndentStack* indentStack)
8780 {
8781     if (tree->OperKind() & GTK_CONST)
8782     {
8783         gtDispConst(tree);
8784         return;
8785     }
8786
8787     bool isLclFld = false;
8788
8789     switch  (tree->gtOper)
8790     {
8791         unsigned        varNum;
8792         LclVarDsc *     varDsc;
8793
8794     case GT_LCL_FLD:
8795     case GT_LCL_FLD_ADDR:
8796     case GT_STORE_LCL_FLD:
8797         isLclFld = true;
8798         __fallthrough;
8799
8800     case GT_PHI_ARG:
8801     case GT_LCL_VAR:
8802     case GT_LCL_VAR_ADDR:
8803     case GT_STORE_LCL_VAR:
8804         printf(" ");
8805         varNum = tree->gtLclVarCommon.gtLclNum;
8806         varDsc = &lvaTable[varNum];
8807         gtDispLclVar(varNum);
8808         if (tree->gtLclVarCommon.HasSsaName())
8809         {
8810             if (tree->gtFlags & GTF_VAR_USEASG)
8811             {
8812                 assert(tree->gtFlags & GTF_VAR_DEF);
8813                 printf("ud:%d->%d", tree->gtLclVarCommon.gtSsaNum, GetSsaNumForLocalVarDef(tree));
8814             }
8815             else
8816             {
8817                 printf("%s:%d", (tree->gtFlags & GTF_VAR_DEF) ? "d" : "u", tree->gtLclVarCommon.gtSsaNum);
8818             }
8819         }
8820
8821         if (isLclFld)
8822         {
8823             printf("[+%u]", tree->gtLclFld.gtLclOffs);
8824             gtDispFieldSeq(tree->gtLclFld.gtFieldSeq);
8825         }
8826
8827         if (varDsc->lvRegister)
8828         {
8829             printf(" ");
8830             varDsc->PrintVarReg();
8831         }
8832 #ifndef LEGACY_BACKEND
8833         else if (tree->InReg())
8834         {
8835 #if CPU_LONG_USES_REGPAIR
8836             if (isRegPairType(tree->TypeGet()))
8837                 printf(" %s", compRegPairName(tree->gtRegPair));
8838             else
8839 #endif
8840                 printf(" %s", compRegVarName(tree->gtRegNum));
8841         }
8842 #endif // !LEGACY_BACKEND
8843         
8844         if (varDsc->lvPromoted)
8845         {
8846             assert(varTypeIsPromotable(varDsc) || varDsc->lvUnusedStruct);
8847             
8848             CORINFO_CLASS_HANDLE  typeHnd = varDsc->lvVerTypeInfo.GetClassHandle();     
8849             CORINFO_FIELD_HANDLE  fldHnd;
8850
8851             for (unsigned i = varDsc->lvFieldLclStart;
8852                  i < varDsc->lvFieldLclStart + varDsc->lvFieldCnt;
8853                  ++i)        
8854             { 
8855                 LclVarDsc *  fieldVarDsc = &lvaTable[i];
8856                 const char* fieldName;
8857 #if !defined(_TARGET_64BIT_)
8858                 if (varTypeIsLong(varDsc))
8859                 {
8860                     fieldName = (i == 0) ? "lo" : "hi";
8861                 }
8862                 else
8863 #endif // !defined(_TARGET_64BIT_)
8864                 {
8865                     fldHnd = info.compCompHnd->getFieldInClass(typeHnd, fieldVarDsc->lvFldOrdinal);
8866                     fieldName = eeGetFieldName(fldHnd);
8867                 }
8868                 
8869                 printf("\n");                    
8870                 printf("                                                  ");
8871                 printIndent(indentStack);
8872                 printf("    %-6s V%02u.%s (offs=0x%02x) -> ",
8873                        varTypeName(fieldVarDsc->TypeGet()),
8874                        tree->gtLclVarCommon.gtLclNum,
8875                        fieldName,
8876                        fieldVarDsc->lvFldOffset
8877                       );
8878                 gtDispLclVar(i);
8879
8880                 if (fieldVarDsc->lvRegister)
8881                 {
8882                     printf(" ");
8883                     fieldVarDsc->PrintVarReg();
8884                 }
8885
8886                 if (fieldVarDsc->lvTracked &&
8887                     fgLocalVarLivenessDone &&  // Includes local variable liveness
8888                     ((tree->gtFlags & GTF_VAR_DEATH) != 0))
8889                 {
8890                     printf(" (last use)");
8891                 }
8892             }
8893         }
8894         else // a normal not-promoted lclvar
8895         {
8896             if (varDsc->lvTracked &&
8897                 fgLocalVarLivenessDone &&
8898                 ((tree->gtFlags & GTF_VAR_DEATH) != 0))
8899             {
8900                 printf(" (last use)");
8901             }
8902         }
8903         break;
8904
8905     case GT_REG_VAR:
8906         printf(" ");
8907         gtDispLclVar(tree->gtRegVar.gtLclNum);
8908         if  (isFloatRegType(tree->gtType))
8909         {
8910             assert(tree->gtRegVar.gtRegNum == tree->gtRegNum);
8911             printf(" FPV%u", tree->gtRegNum);
8912         }
8913         else
8914         {
8915             printf(" %s", compRegVarName(tree->gtRegVar.gtRegNum));
8916         }
8917
8918         varNum = tree->gtRegVar.gtLclNum;
8919         varDsc = &lvaTable[varNum];
8920
8921         if (varDsc->lvTracked &&
8922             fgLocalVarLivenessDone &&
8923             ((tree->gtFlags & GTF_VAR_DEATH) != 0))
8924         {
8925             printf(" (last use)");
8926         }
8927
8928         break;
8929
8930     case GT_JMP:
8931     {
8932         const char *    methodName;
8933         const char *     className;
8934
8935         methodName = eeGetMethodName((CORINFO_METHOD_HANDLE)tree->gtVal.gtVal1, &className);
8936         printf(" %s.%s\n", className, methodName);
8937     }
8938     break;
8939
8940     case GT_CLS_VAR:
8941         printf(" Hnd=%#x"     , dspPtr(tree->gtClsVar.gtClsVarHnd));
8942         gtDispFieldSeq(tree->gtClsVar.gtFieldSeq);
8943         break;
8944
8945     case GT_CLS_VAR_ADDR:
8946         printf(" Hnd=%#x"     , dspPtr(tree->gtClsVar.gtClsVarHnd));
8947         break;
8948
8949     case GT_LABEL:
8950         if (tree->gtLabel.gtLabBB)
8951             printf(" dst=BB%02u"  , tree->gtLabel.gtLabBB->bbNum);
8952         else
8953             printf(" dst=<null>");
8954             
8955         break;
8956
8957     case GT_FTN_ADDR:
8958     {
8959         const char *    methodName;
8960         const char *     className;
8961
8962         methodName = eeGetMethodName((CORINFO_METHOD_HANDLE)tree->gtFptrVal.gtFptrMethod, &className);
8963         printf(" %s.%s\n", className, methodName);
8964     }
8965     break;
8966
8967 #if !FEATURE_EH_FUNCLETS
8968     case GT_END_LFIN:
8969         printf(" endNstLvl=%d", tree->gtVal.gtVal1);
8970         break;
8971 #endif // !FEATURE_EH_FUNCLETS
8972
8973         // Vanilla leaves. No qualifying information available. So do nothing
8974
8975     case GT_NO_OP:
8976     case GT_START_NONGC:
8977     case GT_PROF_HOOK:
8978     case GT_CATCH_ARG:
8979     case GT_MEMORYBARRIER:
8980     case GT_ARGPLACE:
8981     case GT_PINVOKE_PROLOG:
8982 #ifndef LEGACY_BACKEND
8983     case GT_JMPTABLE:    
8984 #endif // !LEGACY_BACKEND
8985         break;
8986
8987     case GT_RET_EXPR:            
8988         printf("(inl return from call ");
8989         printTreeID(tree->gtRetExpr.gtInlineCandidate);
8990         printf(")");
8991         break;         
8992
8993     case GT_PHYSREG:
8994         printf(" %s", getRegName(tree->gtPhysReg.gtSrcReg, varTypeIsFloating(tree)));
8995         break;
8996
8997     default:
8998         assert(!"don't know how to display tree leaf node");
8999     }
9000
9001     gtDispRegVal(tree);
9002 }
9003
9004 //------------------------------------------------------------------------
9005 // gtDispLeaf: Print a child node to jitstdout.
9006 //
9007 // Arguments:
9008 //    tree - the tree to be printed
9009 //    indentStack - the specification for the current level of indentation & arcs
9010 //    arcType     - the type of arc to use for this child
9011 //    msg         - a contextual method (i.e. from the parent) to print
9012 //    topOnly     - a boolean indicating whether to print the children, or just the top node
9013 //
9014 // Return Value:
9015 //    None.
9016 //
9017 // Notes:
9018 //    'indentStack' may be null, in which case no indentation or arcs are printed
9019 //    'msg' has a default value of null
9020 //    'topOnly' is an optional argument that defaults to false
9021
9022 void                Compiler::gtDispChild(GenTreePtr            child,
9023                                           IndentStack*          indentStack,
9024                                           IndentInfo            arcType,
9025                                           __in_opt const char*  msg,     /* = nullptr  */
9026                                           bool                  topOnly) /* = false */
9027 {
9028     IndentInfo      info;
9029     indentStack->Push(arcType);
9030     gtDispTree(child, indentStack, msg, topOnly);
9031     indentStack->Pop();
9032 }
9033
9034 #ifdef FEATURE_SIMD
9035 // Intrinsic Id to name map
9036 extern
9037 const char * const  simdIntrinsicNames[] =
9038 {
9039     #define SIMD_INTRINSIC(mname, inst, id, name, r, ac, arg1, arg2, arg3, t1, t2, t3, t4, t5, t6, t7, t8, t9, t10)   name,
9040     #include "simdintrinsiclist.h"
9041 };
9042 #endif //FEATURE_SIMD
9043
9044
9045 /*****************************************************************************/
9046
9047 void                Compiler::gtDispTree(GenTreePtr             tree,
9048                                          IndentStack*           indentStack,    /* = nullptr */
9049                                          __in __in_z __in_opt const char * msg, /* = nullptr  */
9050                                          bool                   topOnly)        /* = false */
9051 {
9052     if  (tree == NULL)
9053     {
9054         printf(" [%08X] <NULL>\n", tree);
9055         printf("");         // null string means flush
9056         return;
9057     }
9058
9059     if (fgOrder == FGOrderLinear && !topOnly)
9060     {
9061         if (tree->gtOper == GT_STMT)
9062         {
9063             (void) gtDispLinearStmt(tree->AsStmt());
9064         }
9065         else
9066         {
9067             gtDispLinearTree(nullptr, fgGetFirstNode(tree), tree, new (this, CMK_DebugOnly) IndentStack(this));
9068         }
9069         return;
9070     }
9071
9072     if (indentStack == nullptr)
9073     {
9074         indentStack = new (this, CMK_DebugOnly) IndentStack(this);
9075     }
9076
9077     if (IsUninitialized(tree))
9078     {
9079         /* Value used to initalize nodes */
9080         printf("Uninitialized tree node!");
9081         return;
9082     }
9083
9084     if  (tree->gtOper >= GT_COUNT)
9085     {
9086         gtDispNode(tree, indentStack, msg); 
9087         printf("Bogus operator!");
9088         return;
9089     }
9090
9091     /* Is tree a leaf node? */
9092
9093     if  (tree->OperIsLeaf()
9094          || tree->OperIsLocalStore()) // local stores used to be leaves
9095     {
9096         gtDispNode(tree, indentStack, msg);
9097         gtDispLeaf(tree, indentStack);
9098         gtDispVN(tree);
9099         printf("\n");
9100         if (tree->OperIsLocalStore() && !topOnly)
9101         {
9102             gtDispChild(tree->gtOp.gtOp1, indentStack, IINone);
9103         }
9104         return;
9105     }
9106
9107     // Determine what kind of arc to propagate.
9108     IndentInfo myArc = IINone;
9109     IndentInfo lowerArc = IINone;
9110     if (indentStack->Depth() > 0)
9111     {
9112         myArc = indentStack->Pop();
9113         switch(myArc)
9114         {
9115         case IIArcBottom:
9116             indentStack->Push(IIArc);
9117             lowerArc = IINone;
9118             break;
9119         case IIArc:
9120             indentStack->Push(IIArc);
9121             lowerArc = IIArc;
9122             break;
9123         case IIArcTop:
9124             indentStack->Push(IINone);
9125             lowerArc = IIArc;
9126             break;
9127         case IIEmbedded:
9128             indentStack->Push(IIEmbedded);
9129             lowerArc = IIEmbedded;
9130             break;
9131         default:
9132             // Should never get here; just use IINone.
9133             break;
9134         }
9135     }
9136
9137     // Special case formatting for PHI nodes -- arg lists like calls.
9138
9139     if (tree->OperGet() == GT_PHI)
9140     {
9141         gtDispNode(tree, indentStack, msg);
9142         gtDispVN(tree);
9143         printf("\n");
9144
9145         if (tree->gtOp.gtOp1 != NULL)
9146         {
9147             IndentInfo arcType = IIArcTop;
9148             for (GenTreeArgList* args = tree->gtOp.gtOp1->AsArgList(); args != NULL; args = args->Rest())
9149             {
9150                 if (args->Rest() == nullptr)
9151                 {
9152                    arcType = IIArcBottom;
9153                 }
9154                 gtDispChild(args->Current(), indentStack, arcType);
9155                 arcType = IIArc;
9156             }
9157         }
9158         return;
9159     }
9160
9161     /* Is it a 'simple' unary/binary operator? */
9162
9163     const char * childMsg = NULL;
9164
9165     if  (tree->OperIsSimple())
9166     {
9167         if (!topOnly)
9168         {
9169             if  (tree->gtGetOp2())
9170             {
9171                 // Label the childMsgs of the GT_COLON operator
9172                 // op2 is the then part
9173
9174                 if (tree->gtOper == GT_COLON)
9175                     childMsg = "then";
9176
9177                 gtDispChild(tree->gtOp.gtOp2, indentStack, IIArcTop, childMsg, topOnly);
9178             }
9179         }
9180
9181         // Now, get the right type of arc for this node
9182         if (myArc != IINone)
9183         {
9184             indentStack->Pop();
9185             indentStack->Push(myArc);
9186         }
9187         gtDispNode(tree, indentStack, msg);
9188
9189         // Propagate lowerArc to the lower children.
9190         if (indentStack->Depth() > 0)
9191         {
9192             (void) indentStack->Pop();
9193             indentStack->Push(lowerArc);
9194         }
9195     
9196         if (tree->gtOper == GT_CAST)
9197         {
9198             /* Format a message that explains the effect of this GT_CAST */
9199
9200             var_types fromType  = genActualType(tree->gtCast.CastOp()->TypeGet());
9201             var_types toType    = tree->CastToType();
9202             var_types finalType = tree->TypeGet();
9203
9204             /* if GTF_UNSIGNED is set then force fromType to an unsigned type */
9205             if (tree->gtFlags & GTF_UNSIGNED)
9206                 fromType = genUnsignedType(fromType);
9207
9208             if (finalType != toType)
9209                 printf(" %s <-", varTypeName(finalType));
9210
9211             printf(" %s <- %s", varTypeName(toType), varTypeName(fromType));
9212         }
9213
9214         if (tree->gtOper == GT_OBJ && (tree->gtFlags & GTF_VAR_DEATH))
9215         {
9216             printf(" (last use)");
9217         }
9218
9219         IndirectAssignmentAnnotation* pIndirAnnote;
9220         if (tree->gtOper == GT_ASG && GetIndirAssignMap()->Lookup(tree, &pIndirAnnote))
9221         {
9222             printf("  indir assign of V%02d:", pIndirAnnote->m_lclNum);
9223             if (pIndirAnnote->m_isEntire)
9224             {
9225                 printf("d:%d", pIndirAnnote->m_defSsaNum);
9226             }
9227             else
9228             {
9229                 printf("ud:%d->%d", pIndirAnnote->m_useSsaNum, pIndirAnnote->m_defSsaNum);
9230             }
9231         }
9232
9233         if (tree->gtOper == GT_INTRINSIC)
9234         {
9235             switch (tree->gtIntrinsic.gtIntrinsicId)
9236             {
9237             case CORINFO_INTRINSIC_Sin:     printf(" sin");     break;
9238             case CORINFO_INTRINSIC_Cos:     printf(" cos");     break;
9239             case CORINFO_INTRINSIC_Sqrt:    printf(" sqrt");    break;
9240             case CORINFO_INTRINSIC_Abs:     printf(" abs");     break;
9241             case CORINFO_INTRINSIC_Round:   printf(" round");   break;
9242             case CORINFO_INTRINSIC_Cosh:    printf(" cosh");    break;
9243             case CORINFO_INTRINSIC_Sinh:    printf(" sinh");    break;
9244             case CORINFO_INTRINSIC_Tan:     printf(" tan");     break;
9245             case CORINFO_INTRINSIC_Tanh:    printf(" tanh");    break;
9246             case CORINFO_INTRINSIC_Asin:    printf(" asin");    break;
9247             case CORINFO_INTRINSIC_Acos:    printf(" acos");    break;
9248             case CORINFO_INTRINSIC_Atan:    printf(" atan");    break;
9249             case CORINFO_INTRINSIC_Atan2:   printf(" atan2");   break;
9250             case CORINFO_INTRINSIC_Log10:   printf(" log10");   break;
9251             case CORINFO_INTRINSIC_Pow:     printf(" pow");     break;
9252             case CORINFO_INTRINSIC_Exp:     printf(" exp");     break;
9253             case CORINFO_INTRINSIC_Ceiling: printf(" ceiling"); break;
9254             case CORINFO_INTRINSIC_Floor:   printf(" floor");   break;
9255             case CORINFO_INTRINSIC_Object_GetType: printf(" objGetType");   break;
9256
9257             default:
9258                 unreached();
9259             }
9260         }
9261
9262 #ifdef FEATURE_SIMD
9263         if (tree->gtOper == GT_SIMD)
9264         {          
9265             printf(" %s %s", varTypeName(tree->gtSIMD.gtSIMDBaseType), simdIntrinsicNames[tree->gtSIMD.gtSIMDIntrinsicID]);
9266         }
9267 #endif // FEATURE_SIMD
9268
9269         gtDispRegVal(tree);
9270         gtDispVN(tree);
9271         printf("\n");
9272
9273         if  (!topOnly && tree->gtOp.gtOp1)
9274         {
9275
9276             // Label the child of the GT_COLON operator
9277             // op1 is the else part
9278
9279             if (tree->gtOper == GT_COLON)
9280                 childMsg = "else";
9281             else if (tree->gtOper == GT_QMARK)
9282                 childMsg = "   if"; 
9283
9284             gtDispChild(tree->gtOp.gtOp1, indentStack, IIArcBottom, childMsg, topOnly);
9285         }
9286
9287         return;
9288     }
9289
9290
9291     // Now, get the right type of arc for this node
9292     if (myArc != IINone)
9293     {
9294         indentStack->Pop();
9295         indentStack->Push(myArc);
9296     }
9297     gtDispNode(tree, indentStack, msg);
9298
9299     // Propagate lowerArc to the lower children.
9300     if (indentStack->Depth() > 0)
9301     {
9302         (void) indentStack->Pop();
9303         indentStack->Push(lowerArc);
9304     }
9305
9306     // See what kind of a special operator we have here, and handle its special children.
9307
9308     switch  (tree->gtOper)
9309     {
9310     case GT_FIELD:
9311         printf(" %s", eeGetFieldName(tree->gtField.gtFldHnd), 0);
9312
9313         if  (tree->gtField.gtFldObj && !topOnly)
9314         {
9315             gtDispVN(tree);
9316             printf("\n");
9317             gtDispChild(tree->gtField.gtFldObj, indentStack, IIArcBottom);
9318         }
9319         else
9320         {
9321             gtDispRegVal(tree);
9322             gtDispVN(tree);
9323             printf("\n");
9324         }
9325         break;
9326
9327     case GT_CALL:
9328         {
9329             assert(tree->gtFlags & GTF_CALL);
9330             unsigned numChildren = tree->NumChildren();
9331             GenTree* lastChild = nullptr;
9332             if (numChildren != 0)
9333             {
9334                 lastChild = tree->GetChild(numChildren - 1);
9335             }
9336
9337             if (tree->gtCall.gtCallType != CT_INDIRECT)
9338             {
9339                 const char *    methodName;
9340                 const char *     className;
9341
9342                 methodName = eeGetMethodName(tree->gtCall.gtCallMethHnd, &className);
9343
9344                 printf(" %s.%s", className, methodName);
9345             }
9346
9347             if ((tree->gtFlags & GTF_CALL_UNMANAGED) && (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_FRAME_VAR_DEATH))
9348             {
9349                 printf(" (FramesRoot last use)");
9350             }
9351
9352             if (((tree->gtFlags & GTF_CALL_INLINE_CANDIDATE) != 0) &&
9353                 (tree->gtCall.gtInlineCandidateInfo != NULL) &&
9354                 (tree->gtCall.gtInlineCandidateInfo->exactContextHnd != NULL))
9355             {
9356                 printf(" (exactContextHnd=0x%p)", dspPtr(tree->gtCall.gtInlineCandidateInfo->exactContextHnd));
9357             }
9358
9359             gtDispVN(tree);
9360             if (tree->IsMultiRegCall())
9361             {
9362                 gtDispRegVal(tree);
9363             }
9364             printf("\n");
9365
9366             if (!topOnly)
9367             {
9368                 char   buf[64];
9369                 char * bufp;
9370
9371                 bufp = &buf[0];
9372
9373                 if  ((tree->gtCall.gtCallObjp         != NULL) &&
9374                      (tree->gtCall.gtCallObjp->gtOper != GT_NOP) &&
9375                      (!tree->gtCall.gtCallObjp->IsArgPlaceHolderNode()))
9376                 {
9377                     if (tree->gtCall.gtCallObjp->gtOper == GT_ASG)
9378                         sprintf_s(bufp, sizeof(buf), "this SETUP%c", 0);
9379                     else
9380                         sprintf_s(bufp, sizeof(buf), "this in %s%c", compRegVarName(REG_ARG_0), 0);
9381                     gtDispChild(tree->gtCall.gtCallObjp, indentStack, (tree->gtCall.gtCallObjp == lastChild) ? IIArcBottom : IIArc, bufp, topOnly);
9382                 }
9383
9384                 if (tree->gtCall.gtCallArgs)
9385                     gtDispArgList(tree, indentStack);
9386
9387                 if  (tree->gtCall.gtCallType == CT_INDIRECT)
9388                     gtDispChild(tree->gtCall.gtCallAddr, indentStack, (tree->gtCall.gtCallAddr == lastChild) ? IIArcBottom : IIArc, "calli tgt", topOnly);
9389
9390                 if  (tree->gtCall.gtControlExpr != nullptr)
9391                     gtDispChild(tree->gtCall.gtControlExpr, indentStack, (tree->gtCall.gtControlExpr == lastChild) ? IIArcBottom : IIArc, "control expr", topOnly);
9392
9393     #if !FEATURE_FIXED_OUT_ARGS
9394                 regList list = tree->gtCall.regArgList;
9395     #endif
9396                 /* process the late argument list */
9397                 int lateArgIndex=0;
9398                 for (GenTreeArgList* lateArgs = tree->gtCall.gtCallLateArgs; lateArgs; (lateArgIndex++, lateArgs = lateArgs->Rest()))
9399                 {
9400                     GenTreePtr argx;
9401
9402                     argx    = lateArgs->Current();
9403
9404                     IndentInfo arcType = (lateArgs->Rest() == nullptr) ? IIArcBottom : IIArc;
9405                     gtGetLateArgMsg(tree, argx, lateArgIndex, -1, bufp, sizeof(buf));
9406                     gtDispChild(argx, indentStack, arcType, bufp, topOnly);
9407                 }
9408             }
9409         }
9410         break;
9411
9412     case GT_STMT:
9413         printf("\n");
9414
9415         if  (!topOnly)
9416             gtDispChild(tree->gtStmt.gtStmtExpr, indentStack, IIArcBottom);
9417         break;
9418
9419     case GT_ARR_ELEM:
9420         gtDispVN(tree);
9421         printf("\n");
9422
9423         if  (!topOnly)
9424         {
9425             gtDispChild(tree->gtArrElem.gtArrObj, indentStack, IIArc, nullptr, topOnly);
9426
9427             unsigned dim;
9428             for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
9429             {
9430                 IndentInfo arcType = ((dim + 1) == tree->gtArrElem.gtArrRank) ? IIArcBottom : IIArc;
9431                 gtDispChild(tree->gtArrElem.gtArrInds[dim], indentStack, arcType, nullptr, topOnly);
9432             }
9433         }
9434         break;
9435
9436     case GT_ARR_OFFSET:
9437         gtDispVN(tree);
9438         printf("\n");
9439         if  (!topOnly)
9440         {
9441             gtDispChild(tree->gtArrOffs.gtOffset, indentStack, IIArc, nullptr, topOnly);
9442             gtDispChild(tree->gtArrOffs.gtIndex,  indentStack, IIArc, nullptr, topOnly);
9443             gtDispChild(tree->gtArrOffs.gtArrObj, indentStack, IIArcBottom, nullptr, topOnly);
9444         }
9445         break;
9446
9447     case GT_CMPXCHG:
9448         gtDispVN(tree);
9449         printf("\n");
9450         if  (!topOnly)
9451         {
9452             gtDispChild(tree->gtCmpXchg.gtOpLocation,  indentStack, IIArc, nullptr, topOnly);
9453             gtDispChild(tree->gtCmpXchg.gtOpValue,     indentStack, IIArc, nullptr, topOnly);
9454             gtDispChild(tree->gtCmpXchg.gtOpComparand, indentStack, IIArcBottom, nullptr, topOnly);
9455         }
9456         break;
9457
9458     case GT_ARR_BOUNDS_CHECK:
9459 #ifdef FEATURE_SIMD
9460     case GT_SIMD_CHK:
9461 #endif // FEATURE_SIMD
9462         gtDispVN(tree);
9463         printf("\n");
9464         if  (!topOnly)
9465         {
9466             gtDispChild(tree->gtBoundsChk.gtArrLen, indentStack, IIArc, nullptr, topOnly);
9467             gtDispChild(tree->gtBoundsChk.gtIndex,  indentStack, IIArcBottom, nullptr, topOnly);
9468         }
9469         break;
9470
9471     default:
9472         printf("<DON'T KNOW HOW TO DISPLAY THIS NODE> :");
9473         printf("");         // null string means flush
9474         break;
9475     }
9476 }
9477
9478 //------------------------------------------------------------------------
9479 // gtGetArgMsg: Construct a message about the given argument
9480 //
9481 // Arguments:
9482 //    call      - The call for which 'arg' is an argument
9483 //    arg       - The argument for which a message should be constructed
9484 //    argNum    - The ordinal number of the arg in the argument list
9485 //    listCount - When printing in Linear form this is the count for a multireg GT_LIST 
9486 //                or -1 if we are not printing in Linear form
9487 //    bufp      - A pointer to the buffer into which the message is written
9488 //    bufLength - The length of the buffer pointed to by bufp
9489 //
9490 // Return Value:
9491 //    No return value, but bufp is written.
9492 //
9493 // Assumptions:
9494 //    'call' must be a call node
9495 //    'arg' must be an argument to 'call' (else gtArgEntryByNode will assert)
9496
9497 void                Compiler::gtGetArgMsg(GenTreePtr        call,
9498                                           GenTreePtr        arg,
9499                                           unsigned          argNum,
9500                                           int               listCount,
9501                                           char*             bufp,
9502                                           unsigned          bufLength)
9503 {
9504     if (call->gtCall.gtCallLateArgs != NULL)
9505     {
9506         fgArgTabEntryPtr curArgTabEntry = gtArgEntryByArgNum(call, argNum);
9507         assert(curArgTabEntry);
9508
9509         if (arg->gtFlags & GTF_LATE_ARG)
9510         {
9511             sprintf_s(bufp, bufLength, "arg%d SETUP%c", argNum, 0);
9512         }
9513         else 
9514         {
9515 #if FEATURE_FIXED_OUT_ARGS
9516             if (listCount == -1)
9517             {
9518                 sprintf_s(bufp, bufLength, "arg%d out+%02x%c", argNum, curArgTabEntry->slotNum * TARGET_POINTER_SIZE, 0);
9519             }
9520             else // listCount is 0,1,2 or 3
9521             {
9522                 assert(listCount <= MAX_ARG_REG_COUNT);
9523                 sprintf_s(bufp, bufLength, "arg%d out+%02x%c", argNum, (curArgTabEntry->slotNum + listCount) * TARGET_POINTER_SIZE, 0);
9524             }
9525 #else
9526             sprintf_s(bufp, bufLength, "arg%d on STK%c", argNum, 0);
9527 #endif
9528         }
9529     }
9530     else
9531     {
9532         sprintf_s(bufp, bufLength, "arg%d%c", argNum, 0);
9533     }
9534 }
9535
9536 //------------------------------------------------------------------------
9537 // gtGetLateArgMsg: Construct a message about the given argument
9538 //
9539 // Arguments:
9540 //    call         - The call for which 'arg' is an argument
9541 //    argx         - The argument for which a message should be constructed
9542 //    lateArgIndex - The ordinal number of the arg in the lastArg  list
9543 //    listCount    - When printing in Linear form this is the count for a multireg GT_LIST 
9544 //                   or -1 if we are not printing in Linear form
9545 //    bufp         - A pointer to the buffer into which the message is written
9546 //    bufLength    - The length of the buffer pointed to by bufp
9547 //
9548 // Return Value:
9549 //    No return value, but bufp is written.
9550 //
9551 // Assumptions:
9552 //    'call' must be a call node
9553 //    'arg' must be an argument to 'call' (else gtArgEntryByNode will assert)
9554
9555 void                Compiler::gtGetLateArgMsg(GenTreePtr        call,
9556                                               GenTreePtr        argx,
9557                                               int               lateArgIndex,
9558                                               int               listCount,
9559                                               char*             bufp,
9560                                               unsigned          bufLength)
9561 {
9562     assert(!argx->IsArgPlaceHolderNode());  // No place holders nodes are in gtCallLateArgs;
9563
9564     fgArgTabEntryPtr curArgTabEntry = gtArgEntryByLateArgIndex(call, lateArgIndex);
9565     assert(curArgTabEntry);
9566     regNumber argReg = curArgTabEntry->regNum;
9567
9568 #if !FEATURE_FIXED_OUT_ARGS
9569     assert(lateArgIndex < call->gtCall.regArgListCount);
9570     assert(argReg == call->gtCall.regArgList[lateArgIndex]);
9571 #else
9572     if (argReg == REG_STK)
9573     {
9574         sprintf_s(bufp, bufLength, "arg%d in out+%02x%c", curArgTabEntry->argNum, curArgTabEntry->slotNum * TARGET_POINTER_SIZE, 0);
9575     }
9576     else
9577 #endif
9578     {
9579         if (gtArgIsThisPtr(curArgTabEntry))
9580         {
9581             sprintf_s(bufp, bufLength, "this in %s%c", compRegVarName(argReg), 0);
9582         }
9583         else
9584         {
9585 #if FEATURE_MULTIREG_ARGS
9586             if (curArgTabEntry->numRegs >= 2)
9587             {
9588                 regNumber otherRegNum;
9589 #if defined(FEATURE_UNIX_AMD64_STRUCT_PASSING)
9590                 assert(curArgTabEntry->numRegs == 2);
9591                 otherRegNum = curArgTabEntry->otherRegNum;
9592 #else 
9593                 otherRegNum = (regNumber)(((unsigned)curArgTabEntry->regNum) + curArgTabEntry->numRegs - 1);
9594 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
9595
9596                 if (listCount == -1)
9597                 {
9598                     char seperator = (curArgTabEntry->numRegs == 2) ? ',' : '-';
9599
9600                     sprintf_s(bufp, bufLength, "arg%d %s%c%s%c", curArgTabEntry->argNum, 
9601                         compRegVarName(argReg), seperator, compRegVarName(otherRegNum), 0);
9602                 }
9603                 else // listCount is 0,1,2 or 3
9604                 {
9605                     assert(listCount <= MAX_ARG_REG_COUNT);
9606                     regNumber curReg = (listCount == 1) ? otherRegNum : (regNumber)((unsigned)(argReg)+listCount);
9607                     sprintf_s(bufp, bufLength, "arg%d m%d %s%c", curArgTabEntry->argNum, listCount, compRegVarName(curReg), 0);
9608                 }
9609             }
9610             else
9611 #endif
9612             {
9613                 sprintf_s(bufp, bufLength, "arg%d in %s%c", curArgTabEntry->argNum, compRegVarName(argReg), 0);
9614             }
9615         }
9616     }
9617 }
9618
9619 //------------------------------------------------------------------------
9620 // gtDispArgList: Dump the tree for a call arg list
9621 //
9622 // Arguments:
9623 //    tree         - The call for which 'arg' is an argument
9624 //    indentStack  - the specification for the current level of indentation & arcs
9625 //
9626 // Return Value:
9627 //    None.
9628 //
9629 // Assumptions:
9630 //    'tree' must be a call node
9631
9632 void                Compiler::gtDispArgList(GenTreePtr      tree,
9633                                             IndentStack*    indentStack)
9634 {
9635     GenTree *  args     = tree->gtCall.gtCallArgs;
9636     unsigned   argnum   = 0;
9637     const int  BufLength = 256;
9638     char       buf[BufLength];
9639     char *     bufp     = &buf[0];
9640     unsigned numChildren = tree->NumChildren();
9641     assert(numChildren != 0);
9642     bool argListIsLastChild = (args == tree->GetChild(numChildren - 1));
9643
9644     IndentInfo arcType = IIArc;
9645     if (tree->gtCall.gtCallObjp != NULL)
9646         argnum++;
9647
9648     while (args != 0)
9649     {
9650         assert(args->gtOper == GT_LIST);
9651         GenTree* arg = args->gtOp.gtOp1;
9652         if (!arg->IsNothingNode() && !arg->IsArgPlaceHolderNode())
9653         {
9654             gtGetArgMsg(tree, arg, argnum, -1, bufp, BufLength);
9655             if (argListIsLastChild && (args->gtOp.gtOp2 == nullptr))
9656             {
9657                 arcType = IIArcBottom;
9658             }
9659             gtDispChild(arg, indentStack, arcType, bufp, false);
9660         }
9661         args = args->gtOp.gtOp2;
9662         argnum++;
9663     }
9664 }
9665
9666 //------------------------------------------------------------------------
9667 // gtDispArgList: Dump the tree for a call arg list
9668 //
9669 // Arguments:
9670 //    tree         - The call for which 'arg' is an argument
9671 //    indentStack  - the specification for the current level of indentation & arcs
9672 //
9673 // Return Value:
9674 //    None.
9675 //
9676 // Assumptions:
9677 //    'tree' must be a GT_LIST node
9678
9679 void                Compiler::gtDispTreeList(GenTreePtr     tree,
9680                                              IndentStack*   indentStack /* = nullptr */)
9681 {
9682     for (/*--*/; tree != nullptr; tree = tree->gtNext)
9683     {
9684         gtDispTree(tree, indentStack);
9685         printf("\n");
9686     }
9687 }
9688
9689 //------------------------------------------------------------------------
9690 // nextPrintable: Retrieves the next gtNode that can be dumped in linear order
9691 //
9692 // Arguments:
9693 //    next         - The call for which 'arg' is an argument
9694 //    tree         - the specification for the current level of indentation & arcs
9695 //
9696 // Return Value:
9697 //    The next node to be printed in linear order.
9698 //
9699 GenTree* nextPrintable(GenTree* next, GenTree* tree)
9700 {
9701     assert(next != nullptr);
9702     assert(tree != nullptr);
9703
9704     // Skip any nodes that are in the linear order, but that we don't actually visit
9705     while (next != tree && (next->IsList() || next->IsArgPlaceHolderNode()))
9706     {
9707         next = next->gtNext;
9708     }
9709     return next;
9710 }
9711
9712 //------------------------------------------------------------------------
9713 // gtDispLinearTree: Dump a tree in linear order
9714 //
9715 // Arguments:
9716 //    curStmt        - The current statement being dumped
9717 //    nextLinearNode - The next node to be printed
9718 //    tree           - The current tree being traversed
9719 //    indentStack    - the specification for the current level of indentation & arcs
9720 //    msg            - a contextual method (i.e. from the parent) to print
9721 //
9722 // Return Value:
9723 //    None.
9724 //
9725 // Assumptions:
9726 //    'tree' must be a GT_LIST node
9727 //
9728 // Notes:
9729 //     'nextLinearNode' tracks the node we should be printing next.
9730 //     In general, we should encounter it as we traverse the tree.  If not, we
9731 //     have an embedded statement, so that statement is then printed within
9732 //     the dump for this statement.
9733
9734 GenTreePtr          Compiler::gtDispLinearTree(GenTreeStmt* curStmt,
9735                                                GenTreePtr   nextLinearNode,
9736                                                GenTreePtr   tree,
9737                                                IndentStack* indentStack,
9738                                                __in __in_z __in_opt const char * msg /* = nullptr  */)
9739 {
9740     const int  BufLength = 256;
9741     char       buf[BufLength];
9742     char *     bufp     = &buf[0];
9743
9744     // Determine what kind of arc to propagate
9745     IndentInfo myArc = IINone;
9746     if (indentStack->Depth() > 0)
9747     {
9748         myArc = indentStack->Pop();
9749         if (myArc == IIArcBottom || myArc == IIArc)
9750         {
9751             indentStack->Push(IIArc);
9752         }
9753         else
9754         {
9755             assert(myArc == IIArcTop);
9756             indentStack->Push(IINone);
9757         }
9758     }
9759
9760     // Visit children
9761     unsigned childCount = tree->NumChildren();
9762     GenTreePtr deferChild = nullptr;
9763     for (unsigned i = 0;
9764          i < childCount;
9765          i++)
9766     {
9767         unsigned childIndex = i;
9768         if (tree->OperIsBinary() && tree->IsReverseOp())
9769         {
9770             childIndex = (i == 0) ? 1 : 0;
9771         }
9772
9773         GenTreePtr child = tree->GetChild(childIndex);
9774         IndentInfo indentInfo = (i == 0) ? IIArcTop : IIArc;
9775
9776         if (tree->OperGet() == GT_COLON && i == 1)
9777         {
9778             deferChild = child;
9779             continue;
9780         }
9781
9782         unsigned listElemNum = 0;
9783         const char* childMsg = nullptr;
9784         if (tree->IsCall())
9785         {
9786             if (child == tree->gtCall.gtCallObjp)
9787             {
9788                 if (child->gtOper == GT_ASG)
9789                 {
9790                     sprintf_s(bufp, sizeof(buf), "this SETUP%c", 0);
9791                 }
9792                 else
9793                 {
9794                     sprintf_s(bufp, sizeof(buf), "this in %s%c", compRegVarName(REG_ARG_0), 0);
9795                 }
9796                 childMsg = bufp;
9797             }
9798             else if (child == tree->gtCall.gtCallAddr)
9799             {
9800                 childMsg = "calli tgt";
9801             }
9802             else if (child == tree->gtCall.gtControlExpr)
9803             {
9804                 childMsg = "control expr";
9805             }
9806             else if (child == tree->gtCall.gtCallCookie)
9807             {
9808                 childMsg = "cookie";
9809             }
9810             else if (child == tree->gtCall.gtCallArgs)
9811             {
9812                 // List is handled below, but adjust listElemNum to account for "this" if necessary
9813                 if (tree->gtCall.gtCallObjp != nullptr)
9814                     listElemNum = 1;
9815             }
9816             else
9817             {
9818                 // Late args list is handled below
9819                 assert(child == tree->gtCall.gtCallLateArgs);
9820             }
9821         }
9822
9823         if (child->OperGet() == GT_LIST)
9824         {
9825             // For each list element
9826             GenTreePtr nextList = nullptr;
9827             if (child->gtOp.gtOp2 != nullptr
9828                 && child->gtOp.gtOp2->gtOper != GT_LIST)
9829             {
9830                 // special case for child of initblk and cpblk
9831                 // op1 is dst, op2 is src, and op2 must show up first
9832                 assert(tree->OperIsBlkOp());
9833                 sprintf_s(bufp, sizeof(buf), "Source");
9834                 indentStack->Push(indentInfo);
9835                 nextLinearNode = gtDispLinearTree(curStmt, nextLinearNode, child->gtOp.gtOp2, indentStack, bufp);
9836                 indentStack->Pop();
9837
9838                 indentInfo = IIArc;
9839                 sprintf_s(bufp, sizeof(buf), "Destination");
9840                 indentStack->Push(indentInfo);
9841                 nextLinearNode = gtDispLinearTree(curStmt, nextLinearNode, child->gtOp.gtOp1, indentStack, bufp);
9842                 indentStack->Pop();
9843             }
9844             else
9845             {
9846                 // normal null-terminated list case
9847
9848                 for (GenTreePtr list = child; list != nullptr; list = nextList)
9849                 {
9850                     GenTreePtr listElem;
9851                     if (list->gtOper == GT_LIST)
9852                     {
9853                         nextList = list->gtGetOp2();
9854                         listElem = list->gtGetOp1();
9855                     }
9856                     else
9857                     {
9858                         // GT_LIST nodes (under initBlk, others?) can have a non-null op2 that's not a GT_LIST
9859                         nextList = nullptr;
9860                         listElem = list;
9861                     }
9862
9863                     // get child msg
9864                     if (tree->IsCall())
9865                     {
9866                         // If this is a call and the arg (listElem) is a GT_LIST (Unix LCL_FLD for passing a var in multiple registers)
9867                         // print the nodes of the nested list and continue to the next argument.
9868                         if (listElem->gtOper == GT_LIST)
9869                         {
9870                             int listCount = 0;
9871                             GenTreePtr nextListNested = nullptr;
9872                             for (GenTreePtr listNested = listElem; listNested != nullptr; listNested = nextListNested)
9873                             {
9874                                 GenTreePtr listElemNested;
9875                                 if (listNested->gtOper == GT_LIST)
9876                                 {
9877                                     nextListNested = listNested->MoveNext();
9878                                     listElemNested = listNested->Current();
9879                                 }
9880                                 else
9881                                 {
9882                                     // GT_LIST nodes (under initBlk, others?) can have a non-null op2 that's not a GT_LIST
9883                                     nextListNested = nullptr;
9884                                     listElemNested = listNested;
9885                                 }
9886
9887                                 indentStack->Push(indentInfo);
9888                                 if (child == tree->gtCall.gtCallArgs)
9889                                 {
9890                                     gtGetArgMsg(tree, listNested, listElemNum, listCount, bufp, BufLength);
9891                                 }
9892                                 else
9893                                 {
9894                                     assert(child == tree->gtCall.gtCallLateArgs);
9895                                     gtGetLateArgMsg(tree, listNested, listElemNum, listCount, bufp, BufLength);
9896                                 }
9897                                 listCount++;
9898                                 nextLinearNode = gtDispLinearTree(curStmt, nextLinearNode, listElemNested, indentStack, bufp);
9899                                 indentStack->Pop();
9900                             }
9901
9902                             // Skip the GT_LIST nodes, as we do not print them, and the next node to print will occur
9903                             // after the list.
9904                             while (nextLinearNode->OperGet() == GT_LIST)
9905                             {
9906                                 nextLinearNode = nextLinearNode->gtNext;
9907                             }
9908
9909                             listElemNum++;
9910                             continue;
9911                         }
9912
9913                         if (child == tree->gtCall.gtCallArgs)
9914                         {
9915                             gtGetArgMsg(tree, listElem, listElemNum, -1, bufp, BufLength);
9916                         }
9917                         else
9918                         {
9919                             assert(child == tree->gtCall.gtCallLateArgs);
9920                             gtGetLateArgMsg(tree, listElem, listElemNum, -1, bufp, BufLength);
9921                         }
9922                     }
9923                     else
9924                     {
9925                         sprintf_s(bufp, sizeof(buf), "List Item %d", listElemNum);
9926                     }
9927
9928                     indentStack->Push(indentInfo);
9929                     nextLinearNode = gtDispLinearTree(curStmt, nextLinearNode, listElem, indentStack, bufp);
9930                     indentStack->Pop();
9931                     indentInfo = IIArc;
9932                     listElemNum++;
9933                 }
9934             }
9935                 
9936             // Skip the GT_LIST nodes, as we do not print them, and the next node to print will occur
9937             // after the list.
9938             while (nextLinearNode->OperGet() == GT_LIST)
9939             {
9940                 nextLinearNode = nextLinearNode->gtNext;
9941             }
9942         }
9943         else
9944         {
9945             indentStack->Push(indentInfo);
9946             nextLinearNode = gtDispLinearTree(curStmt, nextLinearNode, child, indentStack, childMsg);
9947             indentStack->Pop();
9948         }
9949     }
9950     // This sometimes gets called before nodes have been properly sequenced.
9951     // TODO-Cleanup: Determine whether this needs to be hardened in some way.
9952     if (nextLinearNode == nullptr)
9953     {
9954         printf("BROKEN LINEAR ORDER\n");
9955         nextLinearNode = tree;
9956     }
9957
9958     // If we don't have a 'curStmt', we're only printing the local tree, so skip
9959     // any embedded statements
9960     if (curStmt != nullptr)
9961     {
9962         while (nextLinearNode != tree)
9963         {
9964             // Get the next statement, which had better be embedded
9965             GenTreePtr nextStmt = curStmt->gtNext;
9966             while (nextStmt != nullptr &&
9967                    nextStmt->gtStmt.gtStmtIsEmbedded() &&
9968                    nextStmt->gtStmt.gtStmtList != nextLinearNode)
9969             {
9970                 nextStmt = nextStmt->gtNext;
9971             }
9972
9973             if(nextStmt != nullptr && nextStmt->gtStmt.gtStmtList == nextLinearNode)
9974             {
9975                 indentStack->Push(IIEmbedded);
9976                 nextLinearNode = gtDispLinearStmt(nextStmt->AsStmt(), indentStack);
9977                 indentStack->Pop();
9978             }
9979             else if (nextLinearNode != nullptr)
9980             {
9981                 // If we have an inconsistency, attempt to print the rest of the broken tree, but don't assert,
9982                 // since we don't really want to have different asserts when dumping.
9983                 // The method should fail later with an assert in fgDebugCheckNodeLinks() the next time it's called.
9984                 // Print the next node in linear order, and eventually we will reach the end of the statement,
9985                 // or sync up to 'tree'
9986                 IndentInfo saveInfo = indentStack->Pop();
9987                 indentStack->Push(IIError);
9988                 gtDispTree(nextLinearNode, indentStack, msg, true /*topOnly*/);
9989                 nextLinearNode = nextLinearNode->gtNext;
9990                 indentStack->Pop();
9991                 indentStack->Push(saveInfo);
9992             }
9993             else
9994             {
9995                 break;
9996             }
9997         }
9998     }
9999
10000     // Now, get the right type of arc for this node
10001     if (myArc != IINone)
10002     {
10003         indentStack->Pop();
10004         indentStack->Push(myArc);
10005     }
10006     gtDispTree(tree, indentStack, msg, true /*topOnly*/);
10007     nextLinearNode = tree->gtNext;
10008
10009     if (deferChild != nullptr)
10010     {
10011         indentStack->Push(IIArcBottom);
10012         nextLinearNode = gtDispLinearTree(curStmt, nextLinearNode, deferChild, indentStack);
10013         indentStack->Pop();
10014     }
10015     
10016     return nextLinearNode;
10017 }
10018
10019 //------------------------------------------------------------------------
10020 // gtDispLinearStmt: Dump a statement in linear order
10021 //
10022 // Arguments:
10023 //    stmt           - The current statement being dumped
10024 //    indentStack    - the specification for the current level of indentation & arcs
10025 //
10026 // Return Value:
10027 //    A pointer to the tree that is next in the linear traversal.
10028 //    This will generally be null, except when this statement is embedded.
10029 //
10030 // Assumptions:
10031 //    'stmt' must be a GT_STMT node
10032
10033 GenTreePtr            Compiler::gtDispLinearStmt(GenTreeStmt* stmt, IndentStack *indentStack /* = nullptr */)
10034 {
10035     if (indentStack == nullptr)
10036     {
10037         indentStack = new (this, CMK_DebugOnly) IndentStack(this);
10038     }
10039     gtDispTree(stmt, indentStack, nullptr, true /*topOnly*/);
10040     indentStack->Push(IIArcBottom);
10041     GenTreePtr nextLinearNode = gtDispLinearTree(stmt, stmt->gtStmtList, stmt->gtStmtExpr, indentStack);
10042     indentStack->Pop();
10043     return nextLinearNode;
10044 }
10045
10046 /*****************************************************************************/
10047 #endif // DEBUG
10048
10049 /*****************************************************************************
10050  *
10051  *  Check if the given node can be folded,
10052  *  and call the methods to perform the folding
10053  */
10054
10055 GenTreePtr             Compiler::gtFoldExpr(GenTreePtr tree)
10056 {
10057     unsigned        kind = tree->OperKind();
10058
10059     /* We must have a simple operation to fold */
10060     
10061     // If we're in CSE, it's not safe to perform tree 
10062     // folding given that it can will potentially 
10063     // change considered CSE candidates.
10064     if(optValnumCSE_phase)
10065         return tree;
10066
10067     if (!(kind & GTK_SMPOP))
10068         return tree;
10069
10070     GenTreePtr  op1 = tree->gtOp.gtOp1;
10071
10072     /* Filter out non-foldable trees that can have constant children */
10073
10074     assert (kind & (GTK_UNOP | GTK_BINOP));
10075     switch (tree->gtOper)
10076     {
10077     case GT_RETFILT:
10078     case GT_RETURN:
10079     case GT_IND:
10080         return tree;
10081     default:
10082         break;
10083     }
10084
10085     /* try to fold the current node */
10086
10087     if  ((kind & GTK_UNOP) && op1)
10088     {
10089         if  (op1->OperKind() & GTK_CONST)
10090             return gtFoldExprConst(tree);
10091     }
10092     else if ((kind & GTK_BINOP) && op1 && tree->gtOp.gtOp2 &&
10093              // Don't take out conditionals for debugging
10094              !((opts.compDbgCode || opts.MinOpts()) && 
10095                tree->OperIsCompare())) 
10096     {
10097         GenTreePtr  op2  = tree->gtOp.gtOp2;
10098
10099         // The atomic operations are exempted here because they are never computable statically;
10100         // one of their arguments is an address.
10101         if  (((op1->OperKind() & op2->OperKind()) & GTK_CONST) && !tree->OperIsAtomicOp())
10102         {
10103             /* both nodes are constants - fold the expression */
10104             return gtFoldExprConst(tree);
10105         }
10106         else if ((op1->OperKind() | op2->OperKind()) & GTK_CONST)
10107         {
10108             /* at least one is a constant - see if we have a
10109              * special operator that can use only one constant
10110              * to fold - e.g. booleans */
10111
10112             return gtFoldExprSpecial(tree);
10113         }
10114         else if (tree->OperIsCompare())
10115         {
10116             /* comparisons of two local variables can sometimes be folded */
10117
10118             return gtFoldExprCompare(tree);
10119         }
10120         else if (op2->OperGet() == GT_COLON)
10121         {
10122             assert(tree->OperGet() == GT_QMARK);
10123
10124             GenTreePtr  colon_op1 = op2->gtOp.gtOp1;
10125             GenTreePtr  colon_op2 = op2->gtOp.gtOp2;
10126
10127             if (gtCompareTree(colon_op1, colon_op2))
10128             {
10129                 // Both sides of the GT_COLON are the same tree
10130
10131                 GenTreePtr sideEffList = NULL;
10132                 gtExtractSideEffList(op1, &sideEffList);
10133
10134                 fgUpdateRefCntForExtract(op1, sideEffList);   // Decrement refcounts for op1, Keeping any side-effects
10135                 fgUpdateRefCntForExtract(colon_op1, NULL);    // Decrement refcounts for colon_op1
10136
10137                 // Clear colon flags only if the qmark itself is not conditionaly executed
10138                 if ( (tree->gtFlags & GTF_COLON_COND)==0 )
10139                 {
10140                     fgWalkTreePre(&colon_op2, gtClearColonCond);
10141                 }
10142
10143                 if (sideEffList == NULL)
10144                 {
10145                     // No side-effects, just return colon_op2
10146                     return colon_op2;
10147                 }
10148                 else
10149                 {
10150 #ifdef  DEBUG
10151                     if  (verbose)
10152                     {
10153                         printf("\nIdentical GT_COLON trees with side effects! Extracting side effects...\n");
10154                         gtDispTree(sideEffList); printf("\n");
10155                     }
10156 #endif
10157                     // Change the GT_COLON into a GT_COMMA node with the side-effects
10158                     op2->ChangeOper(GT_COMMA);
10159                     op2->gtFlags |= (sideEffList->gtFlags & GTF_ALL_EFFECT);
10160                     op2->gtOp.gtOp1 = sideEffList;
10161                     return op2;
10162                 }
10163
10164             }
10165         }
10166     }
10167
10168     /* Return the original node (folded/bashed or not) */
10169
10170     return tree;
10171 }
10172
10173 /*****************************************************************************
10174  *
10175  *  Some comparisons can be folded:
10176  *
10177  *    locA        == locA
10178  *    classVarA   == classVarA
10179  *    locA + locB == locB + locA
10180  *
10181  */
10182
10183 GenTreePtr          Compiler::gtFoldExprCompare(GenTreePtr tree)
10184 {
10185     GenTreePtr      op1 = tree->gtOp.gtOp1;
10186     GenTreePtr      op2 = tree->gtOp.gtOp2;
10187
10188     assert(tree->OperIsCompare());
10189
10190     /* Filter out cases that cannot be folded here */
10191
10192     /* Do not fold floats or doubles (e.g. NaN != Nan) */
10193
10194     if  (varTypeIsFloating(op1->TypeGet()))
10195         return tree;
10196
10197     /* Currently we can only fold when the two subtrees exactly match */
10198
10199     if ((tree->gtFlags & GTF_SIDE_EFFECT) || GenTree::Compare(op1, op2, true) == false)
10200         return tree;                   /* return unfolded tree */
10201
10202     GenTreePtr cons;
10203
10204     switch (tree->gtOper)
10205     {
10206       case GT_EQ:
10207       case GT_LE:
10208       case GT_GE:
10209           cons = gtNewIconNode(true);   /* Folds to GT_CNS_INT(true) */
10210           break;
10211
10212       case GT_NE:
10213       case GT_LT:
10214       case GT_GT:
10215           cons = gtNewIconNode(false);  /* Folds to GT_CNS_INT(false) */
10216           break;
10217
10218       default:
10219           assert(!"Unexpected relOp");
10220           return tree;
10221     }
10222
10223     /* The node has beeen folded into 'cons' */
10224
10225     if (fgGlobalMorph)
10226     {
10227         if (!fgIsInlining())
10228             fgMorphTreeDone(cons);
10229     }
10230     else
10231     {
10232         cons->gtNext = tree->gtNext;
10233         cons->gtPrev = tree->gtPrev;
10234     }
10235     if (lvaLocalVarRefCounted)
10236     {
10237         lvaRecursiveDecRefCounts(tree);
10238     }
10239     return cons;
10240 }
10241
10242
10243 /*****************************************************************************
10244  *
10245  *  Some binary operators can be folded even if they have only one
10246  *  operand constant - e.g. boolean operators, add with 0
10247  *  multiply with 1, etc
10248  */
10249
10250 GenTreePtr              Compiler::gtFoldExprSpecial(GenTreePtr tree)
10251 {
10252     GenTreePtr      op1     = tree->gtOp.gtOp1;
10253     GenTreePtr      op2     = tree->gtOp.gtOp2;
10254     genTreeOps      oper    = tree->OperGet();
10255
10256     GenTreePtr      op, cons;
10257     ssize_t         val;
10258
10259     assert(tree->OperKind() & GTK_BINOP);
10260
10261     /* Filter out operators that cannot be folded here */
10262     if  (oper == GT_CAST)
10263          return tree;
10264
10265     /* We only consider TYP_INT for folding
10266      * Do not fold pointer arithmetic (e.g. addressing modes!) */
10267
10268     if (oper != GT_QMARK && !varTypeIsIntOrI(tree->gtType))
10269         return tree;
10270
10271     /* Find out which is the constant node */
10272
10273     if (op1->IsCnsIntOrI())
10274     {
10275         op    = op2;
10276         cons  = op1;
10277     }
10278     else if (op2->IsCnsIntOrI())
10279     {
10280         op    = op1;
10281         cons  = op2;
10282     }
10283     else
10284         return tree;
10285
10286     /* Get the constant value */
10287
10288     val = cons->gtIntConCommon.IconValue();
10289
10290     /* Here op is the non-constant operand, val is the constant,
10291        first is true if the constant is op1 */
10292
10293     switch  (oper)
10294     {
10295
10296     case GT_EQ:
10297     case GT_NE:
10298         // Optimize boxed value classes; these are always false.  This IL is
10299         // generated when a generic value is tested against null:
10300         //     <T> ... foo(T x) { ... if ((object)x == null) ...
10301         if (val == 0 && op->IsBoxedValue()) 
10302         {
10303             // Change the assignment node so we don't generate any code for it.
10304
10305             GenTreePtr asgStmt = op->gtBox.gtAsgStmtWhenInlinedBoxValue;                       
10306             assert(asgStmt->gtOper == GT_STMT);  
10307             GenTreePtr asg = asgStmt->gtStmt.gtStmtExpr;
10308             assert(asg->gtOper == GT_ASG);  
10309 #ifdef DEBUG
10310             if (verbose)
10311             {
10312                 printf("Bashing ");
10313                 printTreeID(asg);
10314                 printf(" to NOP as part of dead box operation\n");
10315                 gtDispTree(tree);
10316             }
10317 #endif
10318             asg->gtBashToNOP();
10319            
10320             op = gtNewIconNode(oper == GT_NE);
10321             if (fgGlobalMorph)
10322             {
10323                 if (!fgIsInlining())
10324                     fgMorphTreeDone(op);
10325             }
10326             else
10327             {
10328                 op->gtNext = tree->gtNext;
10329                 op->gtPrev = tree->gtPrev;
10330             }
10331             fgSetStmtSeq(asgStmt);
10332             return op;
10333         }
10334         break;
10335
10336     case GT_ADD:
10337     case GT_ASG_ADD:
10338         if  (val == 0) goto DONE_FOLD;
10339         break;
10340
10341     case GT_MUL:
10342     case GT_ASG_MUL:
10343         if  (val == 1)
10344             goto DONE_FOLD;
10345         else if (val == 0)
10346         {
10347             /* Multiply by zero - return the 'zero' node, but not if side effects */
10348             if (!(op->gtFlags & GTF_SIDE_EFFECT))
10349             {
10350                 if (lvaLocalVarRefCounted)
10351                 {
10352                     lvaRecursiveDecRefCounts(op);
10353                 }
10354                 op = cons;
10355                 goto DONE_FOLD;
10356             }
10357         }
10358         break;
10359
10360     case GT_DIV:
10361     case GT_UDIV:
10362     case GT_ASG_DIV:
10363         if ((op2 == cons) && (val == 1) && !(op1->OperKind() & GTK_CONST))
10364         {
10365             goto DONE_FOLD;
10366         }
10367         break;
10368
10369     case GT_SUB:
10370     case GT_ASG_SUB:
10371         if ((op2 == cons) && (val == 0) && !(op1->OperKind() & GTK_CONST))
10372         {
10373             goto DONE_FOLD;
10374         }
10375         break;
10376
10377     case GT_AND:
10378         if  (val == 0)
10379         {
10380             /* AND with zero - return the 'zero' node, but not if side effects */
10381
10382             if (!(op->gtFlags & GTF_SIDE_EFFECT))
10383             {
10384                 if (lvaLocalVarRefCounted)
10385                 {
10386                     lvaRecursiveDecRefCounts(op);
10387                 }
10388                 op = cons;
10389                 goto DONE_FOLD;
10390             }
10391         }
10392         else
10393         {
10394             /* The GTF_BOOLEAN flag is set for nodes that are part
10395              * of a boolean expression, thus all their children
10396              * are known to evaluate to only 0 or 1 */
10397
10398             if (tree->gtFlags & GTF_BOOLEAN)
10399             {
10400
10401                 /* The constant value must be 1
10402                  * AND with 1 stays the same */
10403                 assert(val == 1);
10404                 goto DONE_FOLD;
10405             }
10406         }
10407         break;
10408
10409     case GT_OR:
10410         if  (val == 0)
10411             goto DONE_FOLD;
10412         else if (tree->gtFlags & GTF_BOOLEAN)
10413         {
10414             /* The constant value must be 1 - OR with 1 is 1 */
10415
10416             assert(val == 1);
10417
10418             /* OR with one - return the 'one' node, but not if side effects */
10419
10420             if (!(op->gtFlags & GTF_SIDE_EFFECT))
10421             {
10422                 if (lvaLocalVarRefCounted)
10423                 {
10424                     lvaRecursiveDecRefCounts(op);
10425                 }
10426                 op = cons;
10427                 goto DONE_FOLD;
10428             }
10429         }
10430         break;
10431
10432     case GT_LSH:
10433     case GT_RSH:
10434     case GT_RSZ:
10435     case GT_ROL:
10436     case GT_ROR:
10437     case GT_ASG_LSH:
10438     case GT_ASG_RSH:
10439     case GT_ASG_RSZ:
10440         if (val == 0)
10441         {
10442             if (op2 == cons)
10443                 goto DONE_FOLD;
10444             else if (!(op->gtFlags & GTF_SIDE_EFFECT))
10445             {
10446                 if (lvaLocalVarRefCounted)
10447                 {
10448                     lvaRecursiveDecRefCounts(op);
10449                 }
10450                 op = cons;
10451                 goto DONE_FOLD;
10452             }
10453         }
10454         break;
10455
10456     case GT_QMARK:
10457         {
10458             assert(op1 == cons && op2 == op && op2->gtOper == GT_COLON);
10459             assert(op2->gtOp.gtOp1 && op2->gtOp.gtOp2);
10460
10461             assert(val == 0 || val == 1);
10462
10463             GenTree* opToDelete;
10464             if (val)
10465             {
10466                 op = op2->AsColon()->ThenNode();
10467                 opToDelete = op2->AsColon()->ElseNode();
10468             }
10469             else
10470             {
10471                 op = op2->AsColon()->ElseNode();
10472                 opToDelete = op2->AsColon()->ThenNode();
10473             }
10474             if (lvaLocalVarRefCounted)
10475             {
10476                 lvaRecursiveDecRefCounts(opToDelete);
10477             }
10478         
10479             // Clear colon flags only if the qmark itself is not conditionaly executed
10480             if ( (tree->gtFlags & GTF_COLON_COND)==0 )
10481             {
10482                 fgWalkTreePre(&op, gtClearColonCond);
10483             }
10484         }
10485
10486         goto DONE_FOLD;
10487
10488     default:
10489         break;
10490     }
10491
10492     /* The node is not foldable */
10493
10494     return tree;
10495
10496 DONE_FOLD:
10497
10498     /* The node has beeen folded into 'op' */
10499     
10500     // If there was an assigment update, we just morphed it into
10501     // a use, update the flags appropriately
10502     if (op->gtOper == GT_LCL_VAR)
10503     {
10504         assert ((tree->OperKind() & GTK_ASGOP) ||
10505                 (op->gtFlags & (GTF_VAR_USEASG | GTF_VAR_USEDEF | GTF_VAR_DEF)) == 0);
10506
10507         op->gtFlags &= ~(GTF_VAR_USEASG | GTF_VAR_USEDEF | GTF_VAR_DEF);
10508     }
10509
10510     op->gtNext = tree->gtNext;
10511     op->gtPrev = tree->gtPrev;
10512
10513     return op;
10514 }
10515
10516 /*****************************************************************************
10517  *
10518  *  Fold the given constant tree.
10519  */
10520
10521 #ifdef _PREFAST_
10522 #pragma warning(push)
10523 #pragma warning(disable:21000) // Suppress PREFast warning about overly large function
10524 #endif
10525 GenTreePtr                  Compiler::gtFoldExprConst(GenTreePtr tree)
10526 {
10527     unsigned        kind = tree->OperKind();
10528
10529     SSIZE_T         i1, i2, itemp;
10530     INT64           lval1, lval2, ltemp;
10531     float           f1, f2;
10532     double          d1, d2;
10533     var_types       switchType;
10534     FieldSeqNode*   fieldSeq = FieldSeqStore::NotAField();  // default unless we override it when folding
10535
10536     assert (kind & (GTK_UNOP | GTK_BINOP));
10537
10538     GenTreePtr      op1 = tree->gtOp.gtOp1;
10539     GenTreePtr      op2 = tree->gtGetOp2();
10540
10541     if (!opts.OptEnabled(CLFLG_CONSTANTFOLD))
10542     {
10543         return tree;
10544     }
10545
10546     if (tree->OperGet() == GT_NOP)
10547     {
10548         return tree;
10549     }
10550
10551 #ifdef FEATURE_SIMD
10552     if (tree->OperGet() == GT_SIMD)
10553     {
10554         return tree;
10555     }
10556 #endif // FEATURE_SIMD
10557
10558     if      (kind & GTK_UNOP)
10559     {
10560         assert(op1->OperKind() & GTK_CONST);
10561
10562         switch (op1->gtType)
10563         {
10564         case TYP_INT:
10565
10566             /* Fold constant INT unary operator */
10567             assert(op1->gtIntCon.ImmedValCanBeFolded(this, tree->OperGet()));
10568             i1 = (int) op1->gtIntCon.gtIconVal;
10569
10570             // If we fold a unary oper, then the folded constant 
10571             // is considered a ConstantIndexField if op1 was one
10572             //
10573
10574             if ((op1->gtIntCon.gtFieldSeq != nullptr) &&
10575                  op1->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
10576             {
10577                 fieldSeq = op1->gtIntCon.gtFieldSeq;
10578             }
10579
10580             switch (tree->gtOper)
10581             {
10582             case GT_NOT: i1 = ~i1; break;
10583
10584             case GT_NEG:
10585             case GT_CHS: i1 = -i1; break;
10586
10587             case GT_CAST:
10588                 // assert (genActualType(tree->CastToType()) == tree->gtType);
10589                 switch (tree->CastToType())
10590                 {
10591                 case TYP_BYTE:
10592                     itemp = INT32(INT8(i1));
10593                     goto CHK_OVF;
10594
10595                 case TYP_SHORT:
10596                     itemp = INT32(INT16(i1));
10597 CHK_OVF:
10598                     if (tree->gtOverflow() &&
10599                         ((itemp != i1) ||
10600                          ((tree->gtFlags & GTF_UNSIGNED) && i1 < 0)))
10601                     {
10602                          goto INT_OVF;
10603                     }
10604                     i1 = itemp; goto CNS_INT;
10605
10606                 case TYP_CHAR:
10607                     itemp = INT32(UINT16(i1));
10608                     if (tree->gtOverflow())
10609                         if (itemp != i1) goto INT_OVF;
10610                     i1 = itemp;
10611                     goto CNS_INT;
10612
10613                 case TYP_BOOL:
10614                 case TYP_UBYTE:
10615                     itemp = INT32(UINT8(i1));
10616                     if (tree->gtOverflow()) if (itemp != i1) goto INT_OVF;
10617                     i1 = itemp; goto CNS_INT;
10618
10619                 case TYP_UINT:
10620                     if (!(tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && i1 < 0)
10621                         goto INT_OVF;
10622                     goto CNS_INT;
10623
10624                 case TYP_INT:
10625                     if ((tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && i1 < 0)
10626                         goto INT_OVF;
10627                     goto CNS_INT;
10628
10629                 case TYP_ULONG:
10630                     if (!(tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && i1 < 0)
10631                     {
10632                         op1->ChangeOperConst(GT_CNS_NATIVELONG); // need type of oper to be same as tree
10633                         op1->gtType = TYP_LONG;
10634                         // We don't care about the value as we are throwing an exception
10635                         goto LNG_OVF;
10636                     }
10637                     lval1 = UINT64(UINT32(i1));
10638                     goto CNS_LONG;
10639
10640                 case TYP_LONG:
10641                     if (tree->gtFlags & GTF_UNSIGNED)
10642                     {
10643                         lval1 = INT64(UINT32(i1));                    
10644                     }
10645                     else
10646                     {
10647                         lval1 = INT64(INT32(i1));
10648                     }
10649                     goto CNS_LONG;
10650
10651                 case TYP_FLOAT:
10652                     if (tree->gtFlags & GTF_UNSIGNED)
10653                         f1 = forceCastToFloat(UINT32(i1));
10654                     else
10655                         f1 = forceCastToFloat(INT32(i1));
10656                     d1 = f1;
10657                     goto CNS_DOUBLE;
10658                 
10659                 case TYP_DOUBLE:
10660                     if (tree->gtFlags & GTF_UNSIGNED)
10661                         d1 = (double) UINT32(i1);
10662                     else
10663                         d1 = (double) INT32(i1);
10664                     goto CNS_DOUBLE;
10665
10666                 default:
10667                     assert(!"BAD_TYP");
10668                     break;
10669                 }
10670                 return tree;
10671
10672             default:
10673                 return tree;
10674             }
10675
10676             goto CNS_INT;
10677
10678         case TYP_LONG:
10679
10680             /* Fold constant LONG unary operator */
10681
10682             assert(op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
10683             lval1 = op1->gtIntConCommon.LngValue();
10684
10685             switch (tree->gtOper)
10686             {
10687             case GT_NOT: lval1 = ~lval1; break;
10688
10689             case GT_NEG:
10690             case GT_CHS: lval1 = -lval1; break;
10691
10692             case GT_CAST:
10693                 assert (genActualType(tree->CastToType()) == tree->gtType);
10694                 switch (tree->CastToType())
10695                 {
10696                 case TYP_BYTE:
10697                     i1 = INT32(INT8(lval1));
10698                     goto CHECK_INT_OVERFLOW;
10699
10700                 case TYP_SHORT:
10701                     i1 = INT32(INT16(lval1));
10702                     goto CHECK_INT_OVERFLOW;
10703
10704                 case TYP_CHAR:
10705                     i1 = INT32(UINT16(lval1));
10706                     goto CHECK_UINT_OVERFLOW;
10707
10708                 case TYP_UBYTE:
10709                     i1 = INT32(UINT8(lval1));
10710                     goto CHECK_UINT_OVERFLOW;
10711
10712                 case TYP_INT:
10713                     i1 = INT32(lval1);
10714
10715     CHECK_INT_OVERFLOW:
10716                     if (tree->gtOverflow())
10717                     {
10718                         if (i1 != lval1)
10719                             goto INT_OVF;
10720                         if ((tree->gtFlags & GTF_UNSIGNED) && i1 < 0)
10721                             goto INT_OVF;
10722                     }
10723                     goto CNS_INT;
10724
10725                 case TYP_UINT:
10726                     i1 = UINT32(lval1);
10727
10728     CHECK_UINT_OVERFLOW:
10729                     if (tree->gtOverflow() && UINT32(i1) != lval1)
10730                         goto INT_OVF;
10731                     goto CNS_INT;
10732
10733                 case TYP_ULONG:
10734                     if (!(tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && lval1 < 0)
10735                         goto LNG_OVF;
10736                     goto CNS_LONG;
10737
10738                 case TYP_LONG:
10739                     if ( (tree->gtFlags & GTF_UNSIGNED) && tree->gtOverflow() && lval1 < 0)
10740                         goto LNG_OVF;
10741                     goto CNS_LONG;
10742
10743                 case TYP_FLOAT:
10744                 case TYP_DOUBLE:
10745                     if ((tree->gtFlags & GTF_UNSIGNED) && lval1 < 0)
10746                     {
10747                         d1 = FloatingPointUtils::convertUInt64ToDouble((unsigned __int64)lval1);
10748                     }
10749                     else
10750                     {
10751                         d1 = (double)lval1;
10752                     }
10753
10754                     if (tree->CastToType() == TYP_FLOAT)
10755                     {
10756                         f1 = forceCastToFloat(d1);    // truncate precision
10757                         d1 = f1;
10758                     }
10759                     goto CNS_DOUBLE;
10760                 default:
10761                     assert(!"BAD_TYP");
10762                     break;
10763                 }
10764                 return tree;
10765
10766             default:
10767                 return tree;
10768             }
10769
10770             goto CNS_LONG;
10771
10772         case TYP_FLOAT:
10773         case TYP_DOUBLE:
10774             assert(op1->gtOper == GT_CNS_DBL);
10775
10776             /* Fold constant DOUBLE unary operator */
10777             
10778             d1 = op1->gtDblCon.gtDconVal;
10779             
10780             switch (tree->gtOper)
10781             {
10782             case GT_NEG:
10783             case GT_CHS:
10784                 d1 = -d1;
10785                 break;
10786
10787             case GT_CAST:
10788
10789                 if (tree->gtOverflowEx())
10790                     return tree;
10791
10792                 assert (genActualType(tree->CastToType()) == tree->gtType);
10793
10794                 if ((op1->gtType == TYP_FLOAT  && !_finite(forceCastToFloat(d1))) ||
10795                     (op1->gtType == TYP_DOUBLE && !_finite(d1)))
10796                 {
10797                     // The floating point constant is not finite.  The ECMA spec says, in
10798                     // III 3.27, that "...if overflow occurs converting a floating point type
10799                     // to an integer, ..., the value returned is unspecified."  However, it would
10800                     // at least be desirable to have the same value returned for casting an overflowing
10801                     // constant to an int as would obtained by passing that constant as a parameter
10802                     // then casting that parameter to an int type.  We will assume that the C compiler's
10803                     // cast logic will yield the desired result (and trust testing to tell otherwise).
10804                     // Cross-compilation is an issue here; if that becomes an important scenario, we should
10805                     // capture the target-specific values of overflow casts to the various integral types as
10806                     // constants in a target-specific function.
10807
10808 #ifdef _TARGET_XARCH_
10809                     // Don't fold conversions of +inf/-inf to integral value as the value returned by JIT helper
10810                     // doesn't match with the C compiler's cast result.
10811                     return tree;
10812 #else //!_TARGET_XARCH_
10813
10814                     switch (tree->CastToType())
10815                     {  
10816                     case TYP_BYTE:
10817                         i1 = ssize_t(INT8(d1)); goto CNS_INT;
10818                     case TYP_UBYTE:
10819                         i1 = ssize_t(UINT8(d1)); goto CNS_INT;
10820                     case TYP_SHORT:
10821                         i1 = ssize_t(INT16(d1)); goto CNS_INT;
10822                     case TYP_CHAR:
10823                         i1 = ssize_t(UINT16(d1)); goto CNS_INT;
10824                     case TYP_INT:
10825                         i1 = ssize_t(INT32(d1)); goto CNS_INT;
10826                     case TYP_UINT:
10827                         i1 = ssize_t(UINT32(d1)); goto CNS_INT;
10828                     case TYP_LONG: 
10829                         lval1 = INT64(d1); goto CNS_LONG;
10830                     case TYP_ULONG:
10831                         lval1 = UINT64(d1); goto CNS_LONG;
10832                     case TYP_FLOAT:
10833                     case TYP_DOUBLE:
10834                         if (op1->gtType == TYP_FLOAT)
10835                             d1 = forceCastToFloat(d1);  // it's only !_finite() after this conversion
10836                         goto CNS_DOUBLE;
10837                     default:
10838                         unreached();
10839                     }
10840 #endif //!_TARGET_XARCH_
10841                 }
10842                
10843                 switch (tree->CastToType())
10844                 {
10845                 case TYP_BYTE:
10846                     i1 = INT32(INT8(d1));   goto CNS_INT;
10847
10848                 case TYP_SHORT:
10849                     i1 = INT32(INT16(d1));  goto CNS_INT;
10850
10851                 case TYP_CHAR:
10852                     i1 = INT32(UINT16(d1)); goto CNS_INT;
10853
10854                 case TYP_UBYTE:
10855                     i1 = INT32(UINT8(d1));  goto CNS_INT;
10856
10857                 case TYP_INT:
10858                     i1 = INT32(d1);         goto CNS_INT;
10859
10860                 case TYP_UINT:
10861                     i1 = forceCastToUInt32(d1); goto CNS_INT;
10862
10863                 case TYP_LONG:
10864                     lval1 = INT64(d1);      goto CNS_LONG;
10865
10866                 case TYP_ULONG:
10867                     lval1 = FloatingPointUtils::convertDoubleToUInt64(d1);
10868                     goto CNS_LONG;
10869
10870                 case TYP_FLOAT:
10871                     d1 = forceCastToFloat(d1);  
10872                     goto CNS_DOUBLE;
10873
10874                 case TYP_DOUBLE:
10875                     if (op1->gtType == TYP_FLOAT)
10876                         d1 = forceCastToFloat(d1); // truncate precision
10877                     goto CNS_DOUBLE; // redundant cast
10878
10879                 default:
10880                     assert(!"BAD_TYP");
10881                     break;
10882                 }
10883                 return tree;
10884
10885             default:
10886                 return tree;
10887             }
10888             goto CNS_DOUBLE;
10889
10890         default:
10891             /* not a foldable typ - e.g. RET const */
10892             return tree;
10893         }
10894     }
10895
10896     /* We have a binary operator */
10897
10898     assert(kind & GTK_BINOP);
10899     assert(op2);
10900     assert(op1->OperKind() & GTK_CONST);
10901     assert(op2->OperKind() & GTK_CONST);
10902
10903     if (tree->gtOper == GT_COMMA)
10904         return op2;
10905
10906     if (tree->gtOper == GT_LIST)
10907         return tree;
10908
10909     switchType = op1->gtType;
10910
10911     // Normally we will just switch on op1 types, but for the case where
10912     //  only op2 is a GC type and op1 is not a GC type, we use the op2 type.
10913     //  This makes us handle this as a case of folding for GC type.
10914     //
10915     if (varTypeIsGC(op2->gtType) && !varTypeIsGC(op1->gtType))
10916     {
10917         switchType = op2->gtType;
10918     }
10919
10920     switch (switchType)
10921     {
10922
10923     /*-------------------------------------------------------------------------
10924      * Fold constant REF of BYREF binary operator
10925      * These can only be comparisons or null pointers
10926      */
10927
10928     case TYP_REF:
10929
10930         /* String nodes are an RVA at this point */
10931
10932         if (op1->gtOper == GT_CNS_STR || op2->gtOper == GT_CNS_STR)
10933             return tree;
10934
10935         __fallthrough;
10936
10937     case TYP_BYREF:
10938
10939         i1 = op1->gtIntConCommon.IconValue();
10940         i2 = op2->gtIntConCommon.IconValue();
10941
10942         switch (tree->gtOper)
10943         {
10944         case GT_EQ: 
10945             i1 = (i1 == i2); 
10946             goto FOLD_COND;
10947
10948         case GT_NE: 
10949             i1 = (i1 != i2); 
10950             goto FOLD_COND;
10951       
10952         case GT_ADD:
10953             noway_assert(tree->gtType != TYP_REF);
10954             // We only fold a GT_ADD that involves a null reference.
10955             if (((op1->TypeGet() == TYP_REF) && (i1 == 0)) ||
10956                 ((op2->TypeGet() == TYP_REF) && (i2 == 0)))
10957             {
10958 #ifdef  DEBUG
10959                 if  (verbose)
10960                 {
10961                     printf("\nFolding operator with constant nodes into a constant:\n");
10962                     gtDispTree(tree);
10963                 }
10964 #endif
10965                 // Fold into GT_IND of null byref
10966                 tree->ChangeOperConst(GT_CNS_INT);
10967                 tree->gtType              = TYP_BYREF;
10968                 tree->gtIntCon.gtIconVal  = 0;
10969                 tree->gtIntCon.gtFieldSeq = FieldSeqStore::NotAField();
10970                 if (vnStore != nullptr)
10971                 {
10972                     fgValueNumberTreeConst(tree);
10973                 }
10974 #ifdef  DEBUG
10975                 if  (verbose)
10976                 {
10977                     printf("\nFolded to null byref:\n");
10978                     gtDispTree(tree);
10979                 }
10980 #endif
10981                 goto DONE;
10982             }          
10983
10984         default:
10985             break;
10986         }
10987         
10988         return tree;
10989
10990     /*-------------------------------------------------------------------------
10991      * Fold constant INT binary operator
10992      */
10993
10994     case TYP_INT:
10995
10996         if (tree->OperIsCompare() && (tree->gtType == TYP_BYTE))
10997             tree->gtType = TYP_INT;
10998
10999         assert (tree->gtType == TYP_INT || 
11000                 varTypeIsGC(tree->TypeGet()) ||
11001                 tree->gtOper == GT_MKREFANY);
11002
11003         // No GC pointer types should be folded here...
11004         //
11005         assert(!varTypeIsGC(op1->gtType) && !varTypeIsGC(op2->gtType));
11006
11007         assert(op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
11008         assert(op2->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
11009
11010         i1 = op1->gtIntConCommon.IconValue();
11011         i2 = op2->gtIntConCommon.IconValue();
11012
11013         switch (tree->gtOper)
11014         {
11015         case GT_EQ : i1 = (INT32(i1) == INT32(i2)); break;
11016         case GT_NE : i1 = (INT32(i1) != INT32(i2)); break;
11017
11018         case GT_LT :
11019             if (tree->gtFlags & GTF_UNSIGNED)
11020                 i1 = (UINT32(i1) <  UINT32(i2));
11021             else
11022                 i1 = (INT32(i1) < INT32(i2));
11023             break;
11024
11025         case GT_LE :
11026             if (tree->gtFlags & GTF_UNSIGNED)
11027                 i1 = (UINT32(i1) <= UINT32(i2));
11028             else
11029                 i1 = (INT32(i1) <= INT32(i2));
11030             break;
11031
11032         case GT_GE :
11033             if (tree->gtFlags & GTF_UNSIGNED)
11034                 i1 = (UINT32(i1) >= UINT32(i2));
11035             else
11036                 i1 = (INT32(i1) >= INT32(i2));
11037             break;
11038
11039         case GT_GT :
11040             if (tree->gtFlags & GTF_UNSIGNED)
11041                 i1 = (UINT32(i1) >  UINT32(i2));
11042             else
11043                 i1 = (INT32(i1) >  INT32(i2));
11044             break;
11045
11046         case GT_ADD:
11047             itemp = i1 + i2;
11048             if (tree->gtOverflow())
11049             {
11050                 if (tree->gtFlags & GTF_UNSIGNED)
11051                 {
11052                     if (INT64(UINT32(itemp)) != INT64(UINT32(i1)) + INT64(UINT32(i2)))
11053                         goto INT_OVF;
11054                 }
11055                 else
11056                 {
11057                     if (INT64(INT32(itemp))  != INT64(INT32(i1))+INT64(INT32(i2)))
11058                         goto INT_OVF;
11059                 }
11060             }
11061             i1 = itemp; 
11062             fieldSeq = GetFieldSeqStore()->Append(op1->gtIntCon.gtFieldSeq,
11063                                                   op2->gtIntCon.gtFieldSeq);
11064             break;
11065         case GT_SUB:
11066             itemp = i1 - i2;
11067             if (tree->gtOverflow())
11068             {
11069                 if (tree->gtFlags & GTF_UNSIGNED)
11070                 {
11071                     if (INT64(UINT32(itemp)) != ((INT64)((UINT32)i1) - (INT64)((UINT32)i2)))
11072                         goto INT_OVF;
11073                 }
11074                 else
11075                 {
11076                     if (INT64(INT32(itemp))  != INT64(INT32(i1)) - INT64(INT32(i2)))
11077                         goto INT_OVF;
11078                 }
11079             }
11080             i1 = itemp; break;
11081         case GT_MUL:
11082             itemp = i1 * i2;
11083             if (tree->gtOverflow())
11084             {
11085                 if (tree->gtFlags & GTF_UNSIGNED)
11086                 {
11087                     if (INT64(UINT32(itemp)) != ((INT64)((UINT32)i1) * (INT64)((UINT32)i2)))
11088                         goto INT_OVF;
11089                 }
11090                 else
11091                 {
11092                     if (INT64(INT32(itemp))  != INT64(INT32(i1)) * INT64(INT32(i2)))
11093                         goto INT_OVF;
11094                 }
11095             }
11096             // For the very particular case of the "constant array index" pseudo-field, we
11097             // assume that multiplication is by the field width, and preserves that field.
11098             // This could obviously be made more robust by a more complicated set of annotations...
11099             if ((op1->gtIntCon.gtFieldSeq != nullptr) &&
11100                  op1->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
11101             {
11102                 assert(op2->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField());
11103                 fieldSeq = op1->gtIntCon.gtFieldSeq;
11104             } 
11105             else if ((op2->gtIntCon.gtFieldSeq != nullptr) &&
11106                       op2->gtIntCon.gtFieldSeq->IsConstantIndexFieldSeq())
11107             {
11108                 assert(op1->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField());
11109                 fieldSeq = op2->gtIntCon.gtFieldSeq;
11110             }
11111             i1 = itemp;
11112             break;
11113     
11114         case GT_OR : i1 |= i2; break;
11115         case GT_XOR: i1 ^= i2; break;
11116         case GT_AND: i1 &= i2; break;
11117
11118         case GT_LSH: i1 <<= (i2 & 0x1f); break;
11119         case GT_RSH: i1 >>= (i2 & 0x1f); break;
11120         case GT_RSZ:
11121                 /* logical shift -> make it unsigned to not propagate the sign bit */
11122                 i1 = UINT32(i1) >> (i2 & 0x1f);
11123             break;
11124         case GT_ROL: i1 = (i1 << (i2 & 0x1f)) | (UINT32(i1) >> ((32 - i2) & 0x1f));
11125             break;
11126         case GT_ROR: i1 = (i1 << ((32 - i2) & 0x1f)) | (UINT32(i1) >> (i2 & 0x1f));
11127             break;
11128
11129         /* DIV and MOD can generate an INT 0 - if division by 0
11130          * or overflow - when dividing MIN by -1 */
11131
11132         case GT_DIV:
11133         case GT_MOD:
11134         case GT_UDIV:
11135         case GT_UMOD:
11136             if (INT32(i2) == 0)
11137             {
11138                 // Division by zero: 
11139                 // We have to evaluate this expression and throw an exception
11140                 return tree;
11141             }
11142             else if ((INT32(i2) == -1) &&
11143                      (UINT32(i1) == 0x80000000))
11144             {
11145                 // Overflow Division: 
11146                 // We have to evaluate this expression and throw an exception
11147                 return tree;
11148             }
11149
11150             if (tree->gtOper == GT_DIV)
11151             {
11152                 i1 = INT32(i1) / INT32(i2);
11153             }
11154             else if (tree->gtOper == GT_MOD)
11155             {
11156                 i1 = INT32(i1) % INT32(i2);
11157             }
11158             else if (tree->gtOper == GT_UDIV)
11159             {
11160                 i1 = UINT32(i1) / UINT32(i2);
11161             }
11162             else 
11163             {
11164                 assert(tree->gtOper == GT_UMOD);
11165                 i1 = UINT32(i1) % UINT32(i2);
11166             }
11167             break;
11168
11169         default:
11170             return tree;
11171         }
11172
11173         /* We get here after folding to a GT_CNS_INT type
11174          * change the node to the new type / value and make sure the node sizes are OK */
11175 CNS_INT:
11176 FOLD_COND:
11177
11178 #ifdef  DEBUG
11179         if  (verbose)
11180         {
11181             printf("\nFolding operator with constant nodes into a constant:\n");
11182             gtDispTree(tree);
11183         }
11184 #endif
11185
11186 #ifdef _TARGET_64BIT_
11187         // we need to properly re-sign-extend or truncate as needed.
11188         if (tree->gtFlags & GTF_UNSIGNED)
11189             i1 = UINT32(i1);
11190         else
11191             i1 = INT32(i1);
11192 #endif // _TARGET_64BIT_
11193
11194         /* Also all conditional folding jumps here since the node hanging from
11195          * GT_JTRUE has to be a GT_CNS_INT - value 0 or 1 */
11196
11197         tree->ChangeOperConst(GT_CNS_INT);
11198         tree->gtType              = TYP_INT;
11199         tree->gtIntCon.gtIconVal  = i1;
11200         tree->gtIntCon.gtFieldSeq = fieldSeq;
11201         if (vnStore != nullptr)
11202         {
11203             fgValueNumberTreeConst(tree);
11204         }
11205 #ifdef  DEBUG
11206         if  (verbose)
11207         {
11208             printf("Bashed to int constant:\n");
11209             gtDispTree(tree);
11210         }
11211 #endif
11212         goto DONE;
11213
11214         /* This operation is going to cause an overflow exception. Morph into
11215            an overflow helper. Put a dummy constant value for code generation.
11216
11217            We could remove all subsequent trees in the current basic block,
11218            unless this node is a child of GT_COLON
11219
11220            NOTE: Since the folded value is not constant we should not change the
11221                  "tree" node - otherwise we confuse the logic that checks if the folding
11222                  was successful - instead use one of the operands, e.g. op1
11223          */
11224
11225 LNG_OVF:
11226         // Don't fold overflow operations if not global morph phase.
11227         // The reason for this is that this optimization is replacing a gentree node
11228         // with another new gentree node. Say a GT_CALL(arglist) has one 'arg'
11229         // involving overflow arithmetic.  During assertion prop, it is possible 
11230         // that the 'arg' could be constant folded and the result could lead to an
11231         // overflow.  In such a case 'arg' will get replaced with GT_COMMA node
11232         // but fgMorphArgs() - see the logic around "if(lateArgsComputed)" - doesn't
11233         // update args table. For this reason this optimization is enabled only
11234         // for global morphing phase.
11235         //
11236         // X86/Arm32 legacy codegen note: This is not an issue on x86 for the reason that
11237         // it doesn't use arg table for calls.  In addition x86/arm32 legacy codegen doesn't
11238         // expect long constants to show up as an operand of overflow cast operation.
11239         //
11240         // TODO-CQ: Once fgMorphArgs() is fixed this restriction could be removed.
11241 #ifndef LEGACY_BACKEND
11242         if (!fgGlobalMorph)
11243         {
11244             assert(tree->gtOverflow());
11245             return tree;
11246         }
11247 #endif // !LEGACY_BACKEND
11248
11249         op1 = gtNewLconNode(0);
11250         if (vnStore != nullptr)
11251         {
11252             op1->gtVNPair.SetBoth(vnStore->VNZeroForType(TYP_LONG));
11253         }
11254         goto OVF;
11255
11256 INT_OVF:
11257         // Don't fold overflow operations if not global morph phase.
11258         // The reason for this is that this optimization is replacing a gentree node
11259         // with another new gentree node. Say a GT_CALL(arglist) has one 'arg'
11260         // involving overflow arithmetic.  During assertion prop, it is possible 
11261         // that the 'arg' could be constant folded and the result could lead to an
11262         // overflow.  In such a case 'arg' will get replaced with GT_COMMA node
11263         // but fgMorphArgs() - see the logic around "if(lateArgsComputed)" - doesn't
11264         // update args table. For this reason this optimization is enabled only
11265         // for global morphing phase.
11266         //
11267         // X86/Arm32 legacy codegen note: This is not an issue on x86 for the reason that
11268         // it doesn't use arg table for calls.  In addition x86/arm32 legacy codegen doesn't
11269         // expect long constants to show up as an operand of overflow cast operation.
11270         //
11271         // TODO-CQ: Once fgMorphArgs() is fixed this restriction could be removed.
11272 #ifndef LEGACY_BACKEND
11273         if (!fgGlobalMorph)
11274         {
11275             assert(tree->gtOverflow());
11276             return tree;
11277         }
11278 #endif // !LEGACY_BACKEND
11279
11280         op1 = gtNewIconNode(0);
11281         if (vnStore != nullptr)
11282         {
11283             op1->gtVNPair.SetBoth(vnStore->VNZeroForType(TYP_INT));
11284         }
11285         goto OVF;
11286
11287 OVF:
11288 #ifdef  DEBUG
11289         if  (verbose)
11290         {
11291             printf("\nFolding binary operator with constant nodes into a comma throw:\n");
11292             gtDispTree(tree);
11293         }
11294 #endif
11295         /* We will change the cast to a GT_COMMA and attach the exception helper as gtOp.gtOp1.
11296          * The constant expression zero becomes op2. */
11297
11298         assert(tree->gtOverflow());
11299         assert(tree->gtOper == GT_ADD  || tree->gtOper == GT_SUB ||
11300                tree->gtOper == GT_CAST || tree->gtOper == GT_MUL);
11301         assert(op1);
11302
11303         op2 = op1;
11304         op1 = gtNewHelperCallNode(CORINFO_HELP_OVERFLOW, 
11305                                   TYP_VOID, 
11306                                   GTF_EXCEPT,
11307                                   gtNewArgList(gtNewIconNode(compCurBB->bbTryIndex)));
11308
11309         if (vnStore != nullptr)
11310         {
11311             op1->gtVNPair = vnStore->VNPWithExc(ValueNumPair(ValueNumStore::VNForVoid(), ValueNumStore::VNForVoid()), vnStore->VNPExcSetSingleton(vnStore->VNPairForFunc(TYP_REF, VNF_OverflowExc)));
11312         }
11313
11314         tree = gtNewOperNode(GT_COMMA, tree->gtType, op1, op2);
11315
11316         return tree;
11317
11318     /*-------------------------------------------------------------------------
11319      * Fold constant LONG binary operator
11320      */
11321
11322     case TYP_LONG:
11323
11324         // No GC pointer types should be folded here...
11325         //
11326         assert(!varTypeIsGC(op1->gtType) && !varTypeIsGC(op2->gtType));
11327
11328         // op1 is known to be a TYP_LONG, op2 is normally a TYP_LONG, unless we have a shift operator in which case it is a TYP_INT
11329         //
11330         assert((op2->gtType == TYP_LONG) || (op2->gtType == TYP_INT));
11331
11332         assert(op1->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
11333         assert(op2->gtIntConCommon.ImmedValCanBeFolded(this, tree->OperGet()));
11334
11335         lval1 = op1->gtIntConCommon.LngValue();
11336         
11337         // For the shift operators we can have a op2 that is a TYP_INT and thus will be GT_CNS_INT 
11338         if (op2->OperGet() == GT_CNS_INT)
11339             lval2 = op2->gtIntConCommon.IconValue();
11340         else
11341             lval2 = op2->gtIntConCommon.LngValue();
11342
11343         switch (tree->gtOper)
11344         {
11345         case GT_EQ : i1 = (lval1 == lval2); goto FOLD_COND;
11346         case GT_NE : i1 = (lval1 != lval2); goto FOLD_COND;
11347
11348         case GT_LT :
11349             if (tree->gtFlags & GTF_UNSIGNED)
11350                 i1 = (UINT64(lval1) <  UINT64(lval2));
11351             else
11352                 i1 = (lval1 <  lval2);
11353             goto FOLD_COND;
11354
11355         case GT_LE :
11356             if (tree->gtFlags & GTF_UNSIGNED)
11357                 i1 = (UINT64(lval1) <= UINT64(lval2));
11358             else
11359                 i1 = (lval1 <=  lval2);
11360             goto FOLD_COND;
11361
11362         case GT_GE :
11363             if (tree->gtFlags & GTF_UNSIGNED)
11364                 i1 = (UINT64(lval1) >= UINT64(lval2));
11365             else
11366                 i1 = (lval1 >=  lval2);
11367             goto FOLD_COND;
11368
11369         case GT_GT :
11370             if (tree->gtFlags & GTF_UNSIGNED)
11371                 i1 = (UINT64(lval1) >  UINT64(lval2));
11372             else
11373                 i1 = (lval1  >  lval2);
11374             goto FOLD_COND;
11375
11376         case GT_ADD:
11377             ltemp = lval1 + lval2;
11378
11379 LNG_ADD_CHKOVF:
11380             /* For the SIGNED case - If there is one positive and one negative operand, there can be no overflow
11381              * If both are positive, the result has to be positive, and similary for negatives.
11382              *
11383              * For the UNSIGNED case - If a UINT32 operand is bigger than the result then OVF */
11384
11385             if (tree->gtOverflow())
11386             {
11387                 if (tree->gtFlags & GTF_UNSIGNED)
11388                 {
11389                     if ( (UINT64(lval1) > UINT64(ltemp)) ||
11390                          (UINT64(lval2) > UINT64(ltemp))  )
11391                         goto LNG_OVF;
11392                 }
11393                 else
11394                     if ( ((lval1<0) == (lval2<0)) && ((lval1<0) != (ltemp<0)) )
11395                         goto LNG_OVF;
11396             }
11397             lval1 = ltemp; break;
11398
11399         case GT_SUB:
11400             ltemp = lval1 - lval2;
11401             if (tree->gtOverflow())
11402             {
11403                 if (tree->gtFlags & GTF_UNSIGNED)
11404                 {
11405                     if (UINT64(lval2) > UINT64(lval1))
11406                         goto LNG_OVF;
11407                 }
11408                 else
11409                 {
11410                     /* If both operands are +ve or both are -ve, there can be no
11411                        overflow. Else use the logic for : lval1 + (-lval2) */
11412
11413                     if ((lval1<0) != (lval2<0))
11414                     {
11415                         if (lval2 == INT64_MIN) goto LNG_OVF;
11416                         lval2 = -lval2; goto LNG_ADD_CHKOVF;
11417                     }
11418                 }
11419             }
11420             lval1 = ltemp; break;
11421
11422         case GT_MUL:
11423             ltemp = lval1 * lval2;
11424
11425             if (tree->gtOverflow() && lval2 != 0)
11426             {
11427
11428                 if (tree->gtFlags & GTF_UNSIGNED)
11429                 {
11430                     UINT64 ultemp = ltemp;
11431                     UINT64 ulval1 = lval1;
11432                     UINT64 ulval2 = lval2;
11433                     if ((ultemp/ulval2) != ulval1) goto LNG_OVF;
11434                 }
11435                 else
11436                 {
11437                     //This does a multiply and then reverses it.  This test works great except for MIN_INT *
11438                     //-1.  In that case we mess up the sign on ltmp.  Make sure to double check the sign.
11439                     //if either is 0, then no overflow
11440                     if (lval1 != 0) //lval2 checked above.
11441                     {
11442                         if (((lval1<0) == (lval2<0)) && (ltemp<0))
11443                         {
11444                             goto LNG_OVF;
11445                         }
11446                         if (((lval1<0) != (lval2<0)) && (ltemp>0))
11447                         {
11448                             goto LNG_OVF;
11449                         }
11450
11451                         // TODO-Amd64-Unix: Remove the code that disables optimizations for this method when the clang 
11452                         // optimizer is fixed and/or the method implementation is refactored in a simpler code.
11453                         // There is a bug in the clang-3.5 optimizer. The issue is that in release build the optimizer is mistyping 
11454                         // (or just wrongly decides to use 32 bit operation for a corner case of MIN_LONG) the args of the (ltemp / lval2)
11455                         // to int (it does a 32 bit div operation instead of 64 bit.)
11456                         // For the case of lval1 and lval2 equal to MIN_LONG (0x8000000000000000) this results in raising a SIGFPE.
11457                         // Optimizations disabled for now. See compiler.h.
11458                         if ((ltemp/lval2) != lval1) goto LNG_OVF;
11459                     }
11460                 }
11461             }
11462
11463             lval1 = ltemp; break;
11464
11465         case GT_OR : lval1 |= lval2; break;
11466         case GT_XOR: lval1 ^= lval2; break;
11467         case GT_AND: lval1 &= lval2; break;
11468
11469         case GT_LSH: lval1 <<= (lval2 & 0x3f); break;
11470         case GT_RSH: lval1 >>= (lval2 & 0x3f); break;
11471         case GT_RSZ:
11472                 /* logical shift -> make it unsigned to not propagate the sign bit */
11473                 lval1 = UINT64(lval1) >> (lval2 & 0x3f);
11474             break;
11475         case GT_ROL: lval1 = (lval1 << (lval2 & 0x3f)) | (UINT64(lval1) >> ((64 - lval2) & 0x3f));
11476             break;
11477         case GT_ROR: lval1 = (lval1 << ((64 - lval2) & 0x3f)) | (UINT64(lval1) >> (lval2 & 0x3f));
11478             break;
11479
11480             //Both DIV and IDIV on x86 raise an exception for min_int (and min_long) / -1.  So we preserve
11481             //that behavior here.
11482         case GT_DIV:
11483             if (!lval2) return tree;
11484
11485             if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
11486             {
11487                 return tree;
11488             }
11489             lval1 /= lval2; break;
11490
11491         case GT_MOD:
11492             if (!lval2) return tree;
11493             if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1))
11494             {
11495                 return tree;
11496             }
11497             lval1 %= lval2; break;
11498
11499         case GT_UDIV:
11500             if (!lval2) return tree;
11501             if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1)) return tree;
11502             lval1 = UINT64(lval1) / UINT64(lval2); break;
11503
11504         case GT_UMOD:
11505             if (!lval2) return tree;
11506             if (UINT64(lval1) == UI64(0x8000000000000000) && lval2 == INT64(-1)) return tree;
11507             lval1 = UINT64(lval1) % UINT64(lval2); break;
11508         default:
11509             return tree;
11510         }
11511
11512 CNS_LONG:
11513
11514 #ifdef  DEBUG
11515         if  (verbose)
11516         {
11517             printf("\nFolding long operator with constant nodes into a constant:\n");
11518             gtDispTree(tree);
11519         }
11520 #endif
11521         assert ((GenTree::s_gtNodeSizes[GT_CNS_NATIVELONG] == TREE_NODE_SZ_SMALL) ||
11522                 (tree->gtDebugFlags & GTF_DEBUG_NODE_LARGE) );
11523
11524         tree->ChangeOperConst(GT_CNS_NATIVELONG);
11525         tree->gtIntConCommon.SetLngValue(lval1);
11526         if (vnStore != nullptr)
11527         {
11528             fgValueNumberTreeConst(tree);
11529         }
11530
11531 #ifdef  DEBUG
11532         if  (verbose)
11533         {
11534             printf("Bashed to long constant:\n");
11535             gtDispTree(tree);
11536         }
11537 #endif
11538         goto DONE;
11539
11540     /*-------------------------------------------------------------------------
11541      * Fold constant FLOAT or DOUBLE binary operator
11542      */
11543
11544     case TYP_FLOAT:
11545     case TYP_DOUBLE:
11546
11547         if (tree->gtOverflowEx())
11548             return tree;
11549
11550         assert(op1->gtOper == GT_CNS_DBL);
11551         d1 = op1->gtDblCon.gtDconVal;
11552
11553         assert(varTypeIsFloating(op2->gtType));
11554         assert(op2->gtOper == GT_CNS_DBL);
11555         d2 = op2->gtDblCon.gtDconVal;
11556
11557         /* Special case - check if we have NaN operands.
11558          * For comparisons if not an unordered operation always return 0.
11559          * For unordered operations (i.e. the GTF_RELOP_NAN_UN flag is set)
11560          * the result is always true - return 1. */
11561
11562         if (_isnan(d1) || _isnan(d2))
11563         {
11564 #ifdef  DEBUG
11565             if  (verbose)
11566                 printf("Double operator(s) is NaN\n");
11567 #endif
11568             if (tree->OperKind() & GTK_RELOP)
11569             {
11570                 if (tree->gtFlags & GTF_RELOP_NAN_UN)
11571                 {
11572                     /* Unordered comparison with NaN always succeeds */
11573                     i1 = 1; goto FOLD_COND;
11574                 }
11575                 else
11576                 {
11577                     /* Normal comparison with NaN always fails */
11578                     i1 = 0; goto FOLD_COND;
11579                 }
11580             }
11581         }
11582
11583         switch (tree->gtOper)
11584         {
11585         case GT_EQ : i1 = (d1 == d2); goto FOLD_COND;
11586         case GT_NE : i1 = (d1 != d2); goto FOLD_COND;
11587
11588         case GT_LT : i1 = (d1 <  d2); goto FOLD_COND;
11589         case GT_LE : i1 = (d1 <= d2); goto FOLD_COND;
11590         case GT_GE : i1 = (d1 >= d2); goto FOLD_COND;
11591         case GT_GT : i1 = (d1 >  d2); goto FOLD_COND;
11592
11593 #if FEATURE_STACK_FP_X87
11594         case GT_ADD: d1 += d2; break;
11595         case GT_SUB: d1 -= d2; break;
11596         case GT_MUL: d1 *= d2; break;
11597         case GT_DIV: if (!d2) return tree;
11598                      d1 /= d2; break;
11599 #else //!FEATURE_STACK_FP_X87
11600         // non-x86 arch: floating point arithmetic should be done in declared
11601         // precision while doing constant folding. For this reason though TYP_FLOAT
11602         // constants are stored as double constants, while performing float arithmetic,
11603         // double constants should be converted to float.  Here is an example case
11604         // where performing arithmetic in double precision would lead to incorrect
11605         // results.
11606         //
11607         // Example:
11608         // float a = float.MaxValue;
11609         // float b = a*a;   This will produce +inf in single precision and 1.1579207543382391e+077 in double precision.
11610         // flaot c = b/b;   This will produce NaN in single precision and 1 in double precision.
11611         case GT_ADD:
11612             if (op1->TypeGet() == TYP_FLOAT)
11613             {
11614                 f1 = forceCastToFloat(d1);
11615                 f2 = forceCastToFloat(d2);
11616                 d1 = f1+f2;
11617             }
11618             else
11619             {
11620                 d1 += d2; 
11621             }
11622             break;
11623
11624         case GT_SUB: 
11625             if (op1->TypeGet() == TYP_FLOAT)
11626             {
11627                 f1 = forceCastToFloat(d1);
11628                 f2 = forceCastToFloat(d2);
11629                 d1 = f1 - f2;
11630             }
11631             else
11632             {
11633                 d1 -= d2; 
11634             }
11635             break;
11636
11637         case GT_MUL: 
11638             if (op1->TypeGet() == TYP_FLOAT)
11639             {
11640                 f1 = forceCastToFloat(d1);
11641                 f2 = forceCastToFloat(d2);
11642                 d1 = f1 * f2;
11643             }
11644             else
11645             {
11646                 d1 *= d2; 
11647             }
11648             break;
11649
11650         case GT_DIV: 
11651             if (!d2) return tree;
11652             if (op1->TypeGet() == TYP_FLOAT)
11653             {
11654                 f1 = forceCastToFloat(d1);
11655                 f2 = forceCastToFloat(d2);
11656                 d1 = f1/f2;
11657             }
11658             else
11659             {
11660                 d1 /= d2; 
11661             }
11662             break;
11663 #endif //!FEATURE_STACK_FP_X87
11664
11665         default:
11666             return tree;
11667         }
11668
11669 CNS_DOUBLE:
11670
11671 #ifdef  DEBUG
11672         if  (verbose)
11673         {
11674             printf("\nFolding fp operator with constant nodes into a fp constant:\n");
11675             gtDispTree(tree);
11676         }
11677 #endif
11678
11679         assert ((GenTree::s_gtNodeSizes[GT_CNS_DBL] == TREE_NODE_SZ_SMALL) ||
11680                 (tree->gtDebugFlags & GTF_DEBUG_NODE_LARGE)                            );
11681
11682         tree->ChangeOperConst(GT_CNS_DBL);
11683         tree->gtDblCon.gtDconVal = d1;
11684         if (vnStore != nullptr)
11685         {
11686             fgValueNumberTreeConst(tree);
11687         }
11688 #ifdef  DEBUG
11689         if  (verbose)
11690         {
11691             printf("Bashed to fp constant:\n");
11692             gtDispTree(tree);
11693         }
11694 #endif
11695         goto DONE;
11696
11697     default:
11698         /* not a foldable typ */
11699         return tree;
11700     }
11701
11702     //-------------------------------------------------------------------------
11703
11704 DONE:
11705
11706     /* Make sure no side effect flags are set on this constant node */
11707
11708     tree->gtFlags &= ~GTF_ALL_EFFECT;
11709
11710     return tree;
11711 }
11712 #ifdef _PREFAST_
11713 #pragma warning(pop)
11714 #endif
11715
11716 /*****************************************************************************
11717  *
11718  *  Create an assignment of the given value to a temp.
11719  */
11720
11721 GenTreePtr          Compiler::gtNewTempAssign(unsigned tmp, GenTreePtr val)
11722 {
11723     LclVarDsc  *    varDsc = lvaTable + tmp;
11724
11725     if (varDsc->TypeGet() == TYP_I_IMPL && val->TypeGet() == TYP_BYREF)
11726         impBashVarAddrsToI(val);
11727
11728     var_types   valTyp =    val->TypeGet();
11729     if (val->OperGet() == GT_LCL_VAR
11730         && lvaTable[val->gtLclVar.gtLclNum].lvNormalizeOnLoad())
11731     {
11732         valTyp = lvaGetRealType(val->gtLclVar.gtLclNum);
11733         val = gtNewLclvNode(val->gtLclVar.gtLclNum, valTyp, val->gtLclVar.gtLclILoffs);
11734     }
11735     var_types   dstTyp = varDsc->TypeGet();
11736     
11737     /* If the variable's lvType is not yet set then set it here */
11738     if (dstTyp == TYP_UNDEF) 
11739     {
11740         varDsc->lvType = dstTyp = genActualType(valTyp);
11741         if (varTypeIsGC(dstTyp))
11742         {
11743             varDsc->lvStructGcCount = 1;
11744         }
11745 #if FEATURE_SIMD
11746         else if (varTypeIsSIMD(dstTyp))
11747         {
11748             varDsc->lvSIMDType = 1;
11749         }
11750 #endif
11751     }
11752
11753 #ifdef  DEBUG
11754     /* Make sure the actual types match               */
11755     if (genActualType(valTyp) != genActualType(dstTyp))
11756     {
11757         // Plus some other exceptions that are apparently legal:
11758         // 1) TYP_REF or BYREF = TYP_I_IMPL
11759         bool ok = false;
11760         if (varTypeIsGC(dstTyp) && (valTyp == TYP_I_IMPL))
11761         {
11762             ok = true;
11763         }
11764         // 2) TYP_DOUBLE = TYP_FLOAT or TYP_FLOAT = TYP_DOUBLE
11765         else if (varTypeIsFloating(dstTyp) && varTypeIsFloating(valTyp))
11766         {
11767             ok = true;
11768         }
11769
11770         if (!ok)
11771         {
11772             gtDispTree(val);
11773             assert(!"Incompatible types for gtNewTempAssign");
11774         }
11775     }
11776 #endif
11777
11778     // Floating Point assignments can be created during inlining 
11779     // see "Zero init inlinee locals:" in fgInlinePrependStatements
11780     // thus we may need to set compFloatingPointUsed to true here.
11781     //
11782     if (varTypeIsFloating(dstTyp) && (compFloatingPointUsed == false))
11783     {
11784         compFloatingPointUsed = true;
11785     }
11786
11787     /* Create the assignment node */
11788     
11789     GenTreePtr asg;
11790     GenTreePtr dest = gtNewLclvNode(tmp, dstTyp);
11791     dest->gtFlags |= GTF_VAR_DEF;
11792     
11793     // With first-class structs, we should be propagating the class handle on all non-primitive
11794     // struct types. We don't have a convenient way to do that for all SIMD temps, since some
11795     // internal trees use SIMD types that are not used by the input IL. In this case, we allow
11796     // a null type handle and derive the necessary information about the type from its varType.
11797     CORINFO_CLASS_HANDLE structHnd = gtGetStructHandleIfPresent(val);
11798     if (varTypeIsStruct(valTyp) && ((structHnd != NO_CLASS_HANDLE) || (varTypeIsSIMD(valTyp))))
11799     {
11800         // The GT_OBJ may be be a child of a GT_COMMA.
11801         GenTreePtr valx = val->gtEffectiveVal(/*commaOnly*/true);
11802
11803         if (valx->gtOper == GT_OBJ)
11804         {
11805             assert(structHnd != nullptr);
11806             lvaSetStruct(tmp, structHnd, false);
11807         }
11808         dest->gtFlags |= GTF_DONT_CSE;
11809         valx->gtFlags |= GTF_DONT_CSE;
11810         asg = impAssignStruct(dest,
11811                               val, 
11812                               structHnd, 
11813                               (unsigned)CHECK_SPILL_NONE);
11814     }
11815     else
11816     {
11817         asg = gtNewAssignNode(dest, val);
11818     }
11819
11820 #ifndef LEGACY_BACKEND
11821     if (fgOrder == FGOrderLinear)
11822     {
11823         Rationalizer::MorphAsgIntoStoreLcl(nullptr, asg);
11824     }
11825 #endif // !LEGACY_BACKEND
11826
11827     return asg;
11828 }
11829
11830 /*****************************************************************************
11831  *
11832  *  Create a helper call to access a COM field (iff 'assg' is non-zero this is
11833  *  an assignment and 'assg' is the new value).
11834  */
11835
11836 GenTreePtr          Compiler::gtNewRefCOMfield(GenTreePtr   objPtr,
11837                                                CORINFO_RESOLVED_TOKEN * pResolvedToken,
11838                                                CORINFO_ACCESS_FLAGS access,
11839                                                CORINFO_FIELD_INFO * pFieldInfo,
11840                                                var_types    lclTyp,
11841                                                CORINFO_CLASS_HANDLE structType,
11842                                                GenTreePtr   assg)
11843 {
11844     assert(pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER ||
11845            pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_ADDR_HELPER ||
11846            pFieldInfo->fieldAccessor == CORINFO_FIELD_STATIC_ADDR_HELPER);
11847
11848     /* If we can't access it directly, we need to call a helper function */
11849     GenTreeArgList* args = NULL;
11850     var_types helperType = TYP_BYREF;
11851
11852     if (pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER)
11853     {
11854         if  (access & CORINFO_ACCESS_SET)
11855         {
11856             assert(assg != 0);
11857             // helper needs pointer to struct, not struct itself
11858             if (pFieldInfo->helper == CORINFO_HELP_SETFIELDSTRUCT)
11859             {
11860                 assert(structType != 0);
11861                 assg = impGetStructAddr(assg, structType, (unsigned)CHECK_SPILL_ALL, true);
11862             }
11863             else if (lclTyp == TYP_DOUBLE && assg->TypeGet() == TYP_FLOAT)
11864                 assg = gtNewCastNode(TYP_DOUBLE, assg, TYP_DOUBLE);
11865             else if (lclTyp == TYP_FLOAT && assg->TypeGet() == TYP_DOUBLE)
11866                 assg = gtNewCastNode(TYP_FLOAT, assg, TYP_FLOAT);
11867
11868             args = gtNewArgList(assg);
11869             helperType = TYP_VOID;
11870         }
11871         else if (access & CORINFO_ACCESS_GET)
11872         {
11873             helperType = lclTyp;
11874
11875             // The calling convention for the helper does not take into 
11876             // account optimization of primitive structs.
11877             if ((pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT) && !varTypeIsStruct(lclTyp))
11878             {
11879                 helperType = TYP_STRUCT;
11880             }
11881         }
11882     }
11883
11884     if (pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT || pFieldInfo->helper == CORINFO_HELP_SETFIELDSTRUCT)
11885     {
11886         assert(pFieldInfo->structType != NULL);
11887         args = gtNewListNode(gtNewIconEmbClsHndNode(pFieldInfo->structType), args);
11888     }
11889
11890     GenTreePtr fieldHnd = impTokenToHandle(pResolvedToken);
11891     if (fieldHnd == NULL) // compDonotInline()
11892         return NULL;
11893
11894     args = gtNewListNode(fieldHnd, args);
11895
11896     // If it's a static field, we shouldn't have an object node
11897     // If it's an instance field, we have an object node
11898     assert( (pFieldInfo->fieldAccessor != CORINFO_FIELD_STATIC_ADDR_HELPER) ^ (objPtr == 0) );
11899
11900     if (objPtr != NULL)
11901         args = gtNewListNode(objPtr, args);
11902
11903     GenTreePtr tree = gtNewHelperCallNode(pFieldInfo->helper, genActualType(helperType), 0, args);
11904
11905     if (pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER)
11906     {
11907         if (access & CORINFO_ACCESS_GET)
11908         {
11909             if (pFieldInfo->helper == CORINFO_HELP_GETFIELDSTRUCT)
11910             {
11911                 if (!varTypeIsStruct(lclTyp))
11912                 {
11913                     // get the result as primitive type
11914                     tree = impGetStructAddr(tree, structType, (unsigned)CHECK_SPILL_ALL, true);
11915                     tree = gtNewOperNode(GT_IND, lclTyp, tree);
11916                 }
11917             }
11918             else if (varTypeIsIntegral(lclTyp) && genTypeSize(lclTyp) < genTypeSize(TYP_INT))
11919             {
11920                 // The helper does not extend the small return types.
11921                 tree = gtNewCastNode(genActualType(lclTyp), tree, lclTyp);
11922             }
11923         }
11924     }
11925     else
11926     {
11927         // OK, now do the indirection
11928         if (access & CORINFO_ACCESS_GET)
11929         {
11930             if (varTypeIsStruct(lclTyp))
11931             {
11932                 tree = gtNewObjNode(structType, tree);
11933             }
11934             else
11935             {
11936                 tree = gtNewOperNode(GT_IND, lclTyp, tree);
11937             }
11938             tree->gtFlags |= (GTF_EXCEPT | GTF_GLOB_REF);
11939         }
11940         else if (access & CORINFO_ACCESS_SET)
11941         {
11942             if (varTypeIsStruct(lclTyp))
11943                 tree = impAssignStructPtr(tree, assg, structType, (unsigned)CHECK_SPILL_ALL);
11944             else
11945             {
11946                 tree = gtNewOperNode(GT_IND, lclTyp, tree);
11947                 tree->gtFlags |= (GTF_EXCEPT | GTF_GLOB_REF | GTF_IND_TGTANYWHERE);
11948                 tree = gtNewAssignNode(tree, assg);
11949             }
11950         }
11951     }
11952
11953     return(tree);
11954 }
11955
11956 /*****************************************************************************
11957  *
11958  *  Return true if the given node (excluding children trees) contains side effects.
11959  *  Note that it does not recurse, and children need to be handled separately.
11960  *  It may return false even if the node has GTF_SIDE_EFFECT (because of its children).
11961  *
11962  *  Similar to OperMayThrow() (but handles GT_CALLs specially), but considers
11963  *  assignments too.
11964  */
11965
11966 bool                Compiler::gtNodeHasSideEffects(GenTreePtr tree, unsigned flags)
11967 {
11968     if (flags & GTF_ASG)
11969     {
11970         if  ((tree->OperKind() & GTK_ASGOP) || 
11971              (tree->gtOper == GT_INITBLK ||
11972               tree->gtOper == GT_COPYBLK ||
11973               tree->gtOper == GT_COPYOBJ))
11974         {
11975             return  true;
11976         }
11977     }
11978
11979     // Are there only GTF_CALL side effects remaining? (and no other side effect kinds)
11980     if (flags & GTF_CALL) 
11981     {
11982         if (tree->OperGet() == GT_CALL)
11983         {
11984             // Generally all GT_CALL nodes are considered to have side-effects.
11985             // But we may have a helper call that doesn't have any important side effects.
11986             //
11987             if (tree->gtCall.gtCallType == CT_HELPER)
11988             {
11989                 // But if this tree is a helper call we may not care about the side-effects
11990                 //
11991                 CorInfoHelpFunc helper = eeGetHelperNum(tree->AsCall()->gtCallMethHnd);
11992
11993                 // We definitely care about the side effects if MutatesHeap is true
11994                 //
11995                 if (s_helperCallProperties.MutatesHeap(helper))
11996                     return true;
11997
11998                 // with GTF_PERSISTENT_SIDE_EFFECTS_IN_CSE we will CSE helper calls that can run cctors.
11999                 //
12000                 if ((flags != GTF_PERSISTENT_SIDE_EFFECTS_IN_CSE) && (s_helperCallProperties.MayRunCctor(helper)))
12001                     return true;
12002
12003                 // If we also care about exceptions then check if the helper can throw
12004                 //
12005                 if (((flags & GTF_EXCEPT) != 0) && !s_helperCallProperties.NoThrow(helper))
12006                     return true;
12007
12008                 // If this is a Pure helper call or an allocator (that will not need to run a finalizer)
12009                 // then we don't need to preserve the side effects (of this call -- we may care about those of the arguments).
12010                 if (   s_helperCallProperties.IsPure(helper)
12011                     || (s_helperCallProperties.IsAllocator(helper) && !s_helperCallProperties.MayFinalize(helper)))
12012                 {
12013                     GenTreeCall* call = tree->AsCall();
12014                     for (GenTreeArgList* args = call->gtCallArgs; args != nullptr; args = args->Rest())
12015                     {
12016                         if (gtTreeHasSideEffects(args->Current(), flags)) return true;
12017                     }
12018                     // I'm a little worried that args that assign to temps that are late args will look like
12019                     // side effects...but better to be conservative for now.
12020                     for (GenTreeArgList* args = call->gtCallLateArgs; args != nullptr; args = args->Rest())
12021                     {
12022                         if (gtTreeHasSideEffects(args->Current(), flags)) return true;
12023                     }
12024                     // Otherwise:
12025                     return false;
12026                 }
12027             }
12028
12029             // Otherwise the GT_CALL is considered to have side-effects.
12030             return true;
12031         }
12032     }
12033
12034     if (flags & GTF_EXCEPT)
12035     {
12036         if (tree->OperMayThrow())
12037             return true;
12038     }
12039
12040     // Expressions declared as CSE by (e.g.) hoisting code are considered to have relevant side
12041     // effects (if we care about GTF_MAKE_CSE).
12042     if ((flags & GTF_MAKE_CSE) && (tree->gtFlags & GTF_MAKE_CSE))
12043         return true;
12044
12045     return false;
12046 }
12047
12048 /*****************************************************************************
12049  * Returns true if the expr tree has any side effects.
12050  */
12051
12052 bool                Compiler::gtTreeHasSideEffects(GenTreePtr tree,
12053                                                    unsigned   flags /* = GTF_SIDE_EFFECT*/)
12054 {
12055     // These are the side effect flags that we care about for this tree
12056     unsigned sideEffectFlags = tree->gtFlags & flags;
12057
12058     // Does this tree have any Side-effect flags set that we care about?
12059     if (sideEffectFlags == 0)
12060     {
12061         // no it doesn't..
12062         return false;
12063     }
12064
12065     if (sideEffectFlags == GTF_CALL)
12066     {
12067         if (tree->OperGet() == GT_CALL)
12068         {
12069             // Generally all trees that contain GT_CALL nodes are considered to have side-effects.
12070             //
12071             if (tree->gtCall.gtCallType == CT_HELPER)
12072             {
12073                 // If this node is a helper call we may not care about the side-effects.
12074                 // Note that gtNodeHasSideEffects checks the side effects of the helper itself
12075                 // as well as the side effects of its arguments.
12076                 return gtNodeHasSideEffects(tree, flags);
12077             }
12078         }
12079         else if (tree->OperGet() == GT_INTRINSIC)
12080         {           
12081             if (gtNodeHasSideEffects(tree, flags))
12082                 return true;
12083
12084             if (gtNodeHasSideEffects(tree->gtOp.gtOp1, flags))
12085                 return true;
12086
12087             if ((tree->gtOp.gtOp2 != nullptr) && gtNodeHasSideEffects(tree->gtOp.gtOp2, flags))
12088                 return true;
12089
12090             return false;
12091         }
12092     }
12093
12094     return true;
12095 }
12096
12097 GenTreePtr Compiler::gtBuildCommaList(GenTreePtr list, GenTreePtr expr)
12098 {
12099     // 'list' starts off as null, 
12100     //        and when it is null we haven't started the list yet.
12101     //
12102     if (list != nullptr)
12103     {
12104         // Create a GT_COMMA that appends 'expr' in front of the remaining set of expressions in (*list)
12105         GenTreePtr result = gtNewOperNode(GT_COMMA, TYP_VOID, expr, list);
12106
12107         // Set the flags in the comma node 
12108         result->gtFlags |= (list->gtFlags & GTF_ALL_EFFECT);
12109         result->gtFlags |= (expr->gtFlags & GTF_ALL_EFFECT);
12110
12111         // 'list' and 'expr' should have valuenumbers defined for both or for neither one
12112         noway_assert(list->gtVNPair.BothDefined() == expr->gtVNPair.BothDefined());
12113             
12114         // Set the ValueNumber 'gtVNPair' for the new GT_COMMA node
12115         //
12116         if (expr->gtVNPair.BothDefined())
12117         {
12118             // The result of a GT_COMMA node is op2, the normal value number is op2vnp
12119             // But we also need to include the union of side effects from op1 and op2.
12120             // we compute this value into exceptions_vnp.
12121             ValueNumPair  op1vnp;
12122             ValueNumPair  op1Xvnp = ValueNumStore::VNPForEmptyExcSet();
12123             ValueNumPair  op2vnp;
12124             ValueNumPair  op2Xvnp = ValueNumStore::VNPForEmptyExcSet();
12125             
12126             vnStore->VNPUnpackExc(expr->gtVNPair, &op1vnp, &op1Xvnp);
12127             vnStore->VNPUnpackExc(list->gtVNPair, &op2vnp, &op2Xvnp);
12128
12129             ValueNumPair  exceptions_vnp = ValueNumStore::VNPForEmptyExcSet();
12130
12131             exceptions_vnp = vnStore->VNPExcSetUnion(exceptions_vnp, op1Xvnp);
12132             exceptions_vnp = vnStore->VNPExcSetUnion(exceptions_vnp, op2Xvnp);
12133
12134             result->gtVNPair = vnStore->VNPWithExc(op2vnp, exceptions_vnp);   
12135         }
12136
12137         return result;
12138     }
12139     else
12140     {
12141         // The 'expr' will start the list of expressions
12142         return expr;
12143     }
12144
12145 }
12146
12147 /*****************************************************************************
12148  *
12149  *  Extracts side effects from the given expression
12150  *  and appends them to a given list (actually a GT_COMMA list)
12151  *  If ignore root is specified, the method doesn't treat the top
12152  *  level tree node as having side-effect.
12153  */
12154
12155 void                Compiler::gtExtractSideEffList(GenTreePtr expr, GenTreePtr * pList,
12156                                                    unsigned   flags /* = GTF_SIDE_EFFECT*/,
12157                                                    bool ignoreRoot /* = false */)
12158 {
12159     assert(expr); assert(expr->gtOper != GT_STMT);
12160
12161     /* If no side effect in the expression return */
12162
12163     if (!gtTreeHasSideEffects(expr, flags))
12164         return;
12165
12166     genTreeOps  oper = expr->OperGet();
12167     unsigned    kind = expr->OperKind();
12168
12169     // Look for any side effects that we care about 
12170     //
12171     if (!ignoreRoot && gtNodeHasSideEffects(expr, flags))
12172     {
12173         // Add the side effect to the list and return
12174         //
12175         *pList = gtBuildCommaList(*pList, expr);
12176         return;
12177     }
12178
12179     if (kind & GTK_LEAF)
12180         return;
12181
12182     if (oper == GT_LOCKADD || oper == GT_XADD || oper == GT_XCHG || oper == GT_CMPXCHG)
12183     {
12184         //XADD both adds to the memory location and also fetches the old value.  If we only need the side
12185         //effect of this instruction, change it into a GT_LOCKADD node (the add only)
12186         if (oper == GT_XADD)
12187         {
12188             expr->gtOper = GT_LOCKADD;
12189             expr->gtType = TYP_VOID;
12190         }
12191
12192         // These operations are kind of important to keep
12193         *pList = gtBuildCommaList(*pList, expr);
12194         return;
12195     }
12196
12197     if (kind & GTK_SMPOP)
12198     {
12199         GenTreePtr      op1 = expr->gtOp.gtOp1; 
12200         GenTreePtr      op2 = expr->gtGetOp2(); 
12201
12202         if (flags & GTF_EXCEPT)
12203         {
12204             // Special case - GT_ADDR of GT_IND nodes of TYP_STRUCT
12205             // have to be kept together
12206
12207             if (oper == GT_ADDR && op1->OperIsIndir() && op1->gtType == TYP_STRUCT)
12208             {
12209                 *pList = gtBuildCommaList(*pList, expr);
12210
12211 #ifdef  DEBUG
12212                 if  (verbose)
12213                     printf("Keep the GT_ADDR and GT_IND together:\n");
12214 #endif
12215                 return;
12216             }
12217         }
12218
12219         /* Continue searching for side effects in the subtrees of the expression
12220          * NOTE: Be careful to preserve the right ordering - side effects are prepended
12221          * to the list */
12222
12223         /* Continue searching for side effects in the subtrees of the expression
12224          * NOTE: Be careful to preserve the right ordering 
12225          * as side effects are prepended to the list */
12226
12227         if (expr->gtFlags & GTF_REVERSE_OPS)
12228         {
12229             assert(oper != GT_COMMA);
12230             if (op1) gtExtractSideEffList(op1, pList, flags);
12231             if (op2) gtExtractSideEffList(op2, pList, flags);
12232         }
12233         else
12234         {
12235             if (op2) gtExtractSideEffList(op2, pList, flags);
12236             if (op1) gtExtractSideEffList(op1, pList, flags);
12237         }
12238     }
12239
12240     if (expr->OperGet() == GT_CALL)
12241     {
12242         // Generally all GT_CALL nodes are considered to have side-effects.
12243         // So if we get here it must be a Helper call that we decided does
12244         // not have side effects that we needed to keep
12245         //
12246         assert(expr->gtCall.gtCallType == CT_HELPER);
12247
12248         // We can remove this Helper call, but there still could be 
12249         // side-effects in the arguments that we may need to keep 
12250         //  
12251         GenTreePtr args;
12252         for (args = expr->gtCall.gtCallArgs; args; args = args->gtOp.gtOp2)
12253         {
12254             assert(args->IsList());
12255             gtExtractSideEffList(args->Current(), pList, flags);
12256         }
12257         for (args = expr->gtCall.gtCallLateArgs; args; args = args->gtOp.gtOp2)
12258         {
12259             assert(args->IsList());
12260             gtExtractSideEffList(args->Current(), pList, flags);
12261         }
12262     }
12263
12264     if (expr->OperGet() == GT_ARR_BOUNDS_CHECK
12265 #ifdef FEATURE_SIMD
12266         || expr->OperGet() == GT_SIMD_CHK
12267 #endif // FEATURE_SIMD
12268         )
12269     {
12270         gtExtractSideEffList(expr->AsBoundsChk()->gtArrLen, pList, flags);
12271         gtExtractSideEffList(expr->AsBoundsChk()->gtIndex, pList, flags);
12272     }
12273 }
12274
12275
12276 /*****************************************************************************
12277  *
12278  *  For debugging only - displays a tree node list and makes sure all the
12279  *  links are correctly set.
12280  */
12281
12282 #ifdef  DEBUG
12283
12284 void                dispNodeList(GenTreePtr list, bool verbose)
12285 {
12286     GenTreePtr      last = 0;
12287     GenTreePtr      next;
12288
12289     if  (!list)
12290         return;
12291
12292     for (;;)
12293     {
12294         next = list->gtNext;
12295
12296         if  (verbose)
12297             printf("%08X -> %08X -> %08X\n", last, list, next);
12298
12299         assert(!last || last->gtNext == list);
12300
12301         assert(next == 0 || next->gtPrev == list);
12302
12303         if  (!next)
12304             break;
12305
12306         last = list;
12307         list = next;
12308     }
12309     printf("");         // null string means flush
12310 }
12311
12312 /*****************************************************************************
12313  * Callback to assert that the nodes of a qmark-colon subtree are marked
12314  */
12315
12316 /* static */
12317 Compiler::fgWalkResult      Compiler::gtAssertColonCond(GenTreePtr *pTree,
12318                                                         fgWalkData *data)
12319 {
12320     assert(data->pCallbackData == NULL);
12321
12322     assert((*pTree)->gtFlags & GTF_COLON_COND);
12323
12324     return WALK_CONTINUE;
12325 }
12326 #endif // DEBUG
12327
12328 /*****************************************************************************
12329  * Callback to mark the nodes of a qmark-colon subtree that are conditionally 
12330  * executed.
12331  */
12332
12333 /* static */
12334 Compiler::fgWalkResult      Compiler::gtMarkColonCond(GenTreePtr *pTree,
12335                                                       fgWalkData *data)
12336 {
12337     assert(data->pCallbackData == NULL);
12338
12339     (*pTree)->gtFlags |= GTF_COLON_COND;
12340
12341     return WALK_CONTINUE;
12342 }
12343
12344 /*****************************************************************************
12345  * Callback to clear the conditionally executed flags of nodes that no longer
12346    will be conditionally executed. Note that when we find another colon we must 
12347    stop, as the nodes below this one WILL be conditionally executed. This callback
12348    is called when folding a qmark condition (ie the condition is constant).
12349  */
12350
12351 /* static */
12352 Compiler::fgWalkResult      Compiler::gtClearColonCond(GenTreePtr *pTree,
12353                                                        fgWalkData *data)
12354 {
12355     GenTreePtr tree = *pTree;
12356
12357     assert(data->pCallbackData == NULL);
12358
12359     if (tree->OperGet()==GT_COLON)
12360     {
12361         // Nodes below this will be conditionally executed.
12362         return WALK_SKIP_SUBTREES;
12363     }
12364
12365     tree->gtFlags &= ~GTF_COLON_COND;
12366     return WALK_CONTINUE;
12367 }
12368
12369
12370 struct FindLinkData {
12371     GenTreePtr   nodeToFind;
12372     GenTreePtr * result;
12373 };
12374
12375 /*****************************************************************************
12376  *
12377  *  Callback used by the tree walker to implement fgFindLink()
12378  */
12379 static Compiler::fgWalkResult  gtFindLinkCB(GenTreePtr *               pTree, 
12380                                             Compiler::fgWalkData *  cbData)
12381 {
12382     FindLinkData * data = (FindLinkData*) cbData->pCallbackData;
12383     if (*pTree == data->nodeToFind)
12384     {
12385         data->result = pTree;
12386         return Compiler::WALK_ABORT;
12387     }
12388
12389     return Compiler::WALK_CONTINUE;
12390 }
12391
12392 GenTreePtr * Compiler::gtFindLink(GenTreePtr stmt, GenTreePtr node)
12393 {
12394     assert(stmt->gtOper == GT_STMT);
12395
12396     FindLinkData data = {node, NULL};
12397
12398     fgWalkResult result = fgWalkTreePre(&stmt->gtStmt.gtStmtExpr, gtFindLinkCB, &data);
12399
12400     if (result == WALK_ABORT)
12401     {
12402         assert(data.nodeToFind == *data.result);
12403         return data.result;
12404     }
12405     else
12406         return NULL;
12407 }
12408
12409 /*****************************************************************************
12410  *
12411  *  Callback that checks if a tree node has oper type GT_CATCH_ARG
12412  */
12413
12414 static Compiler::fgWalkResult  gtFindCatchArg(GenTreePtr *               pTree,
12415                                               Compiler::fgWalkData *  /* data */)
12416 {
12417     return ((*pTree)->OperGet() == GT_CATCH_ARG) ? Compiler::WALK_ABORT
12418                                                  : Compiler::WALK_CONTINUE;
12419 }
12420
12421 /*****************************************************************************/
12422 bool            Compiler::gtHasCatchArg(GenTreePtr tree)
12423 {
12424     if  (((tree->gtFlags & GTF_ORDER_SIDEEFF) != 0) &&
12425          (fgWalkTreePre(&tree, gtFindCatchArg) == WALK_ABORT))
12426     {
12427         return true;
12428     }
12429     return false;
12430 }
12431
12432
12433 //------------------------------------------------------------------------
12434 // gtHasCallOnStack:
12435 //
12436 // Arguments:
12437 //    parentStack: a context (stack of parent nodes)
12438 //
12439 // Return Value:
12440 //     returns true if any of the parent nodes are a GT_CALL
12441 //
12442 // Assumptions:
12443 //    We have a stack of parent nodes. This generally requires that
12444 //    we are performing a recursive tree walk using struct fgWalkData
12445 //
12446 //------------------------------------------------------------------------
12447 /* static */ bool Compiler::gtHasCallOnStack(GenTreeStack *parentStack)
12448 {
12449     for (int i = 0;
12450          i < parentStack->Height();
12451          i++)
12452     {
12453         GenTree *node = parentStack->Index(i);
12454         if (node->OperGet() == GT_CALL)
12455         {
12456             return true;
12457         }
12458     }
12459     return false;
12460 }
12461
12462 //------------------------------------------------------------------------
12463 // gtCheckQuirkAddrExposedLclVar:
12464 //
12465 // Arguments:
12466 //    tree: an address taken GenTree node that is a GT_LCL_VAR
12467 //    parentStack: a context (stack of parent nodes)
12468 //    The 'parentStack' is used to ensure that we are in an argument context.
12469 //
12470 // Return Value:
12471 //    None
12472 //
12473 // Notes:
12474 //    When allocation size of this LclVar is 32-bits we will quirk the size to 64-bits
12475 //    because some PInvoke signatures incorrectly specify a ByRef to an INT32
12476 //    when they actually write a SIZE_T or INT64. There are cases where overwriting 
12477 //    these extra 4 bytes corrupts some data (such as a saved register) that leads to A/V 
12478 //    Wheras previously the JIT64 codegen did not lead to an A/V 
12479 //     
12480 // Assumptions:
12481 //    'tree' is known to be address taken and that we have a stack
12482 //    of parent nodes. Both of these generally requires that
12483 //    we are performing a recursive tree walk using struct fgWalkData
12484 //------------------------------------------------------------------------
12485 void Compiler::gtCheckQuirkAddrExposedLclVar(GenTreePtr tree, GenTreeStack* parentStack)
12486 {
12487     // We only need to Quirk for _TARGET_64BIT_
12488 #ifdef _TARGET_64BIT_
12489
12490     // Do we have a parent node that is a Call?
12491     if (!Compiler::gtHasCallOnStack(parentStack))
12492     {
12493         // No, so we don't apply the Quirk
12494         return;
12495     }
12496     noway_assert(tree->gtOper == GT_LCL_VAR);
12497     unsigned    lclNum  = tree->gtLclVarCommon.gtLclNum;
12498     LclVarDsc * varDsc  = &lvaTable[lclNum];
12499     var_types   vartype = varDsc->TypeGet();
12500
12501     if (varDsc->lvIsParam)
12502     {
12503         // We can't Quirk the size of an incoming parameter
12504         return;
12505     }
12506
12507     // We may need to Quirk the storage size for this LCL_VAR
12508     if (genActualType(vartype) == TYP_INT)
12509     {
12510         varDsc->lvQuirkToLong = true;
12511 #ifdef DEBUG
12512         if (verbose)
12513         {
12514             printf("\nAdding a Quirk for the storage size of LvlVar V%02d:", lclNum);
12515             printf(" (%s ==> %s)\n", varTypeName(vartype), varTypeName(TYP_LONG));
12516         }
12517 #endif // DEBUG
12518     }
12519 #endif
12520 }
12521
12522 //Checks to see if we're allowed to optimize Type::op_Equality or Type::op_Inequality on this operand.
12523 //We're allowed to convert to GT_EQ/GT_NE if one of the operands is:
12524 //  1) The result of Object::GetType
12525 //  2) The result of typeof(...)
12526 //  3) a local variable of type RuntimeType.
12527 bool Compiler::gtCanOptimizeTypeEquality(GenTreePtr tree)
12528 {
12529     if (tree->gtOper == GT_CALL)
12530     {
12531         if (tree->gtCall.gtCallType == CT_HELPER)
12532         {
12533             if (gtIsTypeHandleToRuntimeTypeHelper(tree))
12534                 return true;
12535         }
12536         else if (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_SPECIAL_INTRINSIC)
12537         {
12538             if (info.compCompHnd->getIntrinsicID(tree->gtCall.gtCallMethHnd)
12539                 == CORINFO_INTRINSIC_Object_GetType)
12540             {
12541                 return true;
12542             }
12543         }
12544     }
12545     else if ((tree->gtOper == GT_INTRINSIC) && 
12546              (tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Object_GetType))
12547     {
12548         return true;
12549     }
12550     else if (tree->gtOper == GT_LCL_VAR)
12551     {
12552         LclVarDsc * lcl = &(lvaTable[tree->gtLclVarCommon.gtLclNum]);
12553         if (lcl->TypeGet() == TYP_REF)
12554         {
12555             if (lcl->lvVerTypeInfo.GetClassHandle()
12556                 == info.compCompHnd->getBuiltinClass(CLASSID_RUNTIME_TYPE))
12557                 return true;
12558         }
12559     }
12560     return false;
12561 }
12562
12563 bool Compiler::gtIsTypeHandleToRuntimeTypeHelper(GenTreePtr tree)
12564 {
12565     return tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE) ||
12566         tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE_MAYBENULL);
12567 }
12568
12569 bool Compiler::gtIsActiveCSE_Candidate(GenTreePtr tree)
12570 {
12571     return (optValnumCSE_phase && IS_CSE_INDEX(tree->gtCSEnum));
12572 }
12573
12574 /*****************************************************************************/
12575
12576 struct ComplexityStruct
12577 {
12578     unsigned m_numNodes; unsigned m_nodeLimit;
12579     ComplexityStruct(unsigned nodeLimit) : m_numNodes(0), m_nodeLimit(nodeLimit) {}
12580 };
12581
12582 static Compiler::fgWalkResult ComplexityExceedsWalker(GenTreePtr* pTree, Compiler::fgWalkData* data)
12583 {
12584     ComplexityStruct* pComplexity = (ComplexityStruct*)data->pCallbackData;
12585     if (++pComplexity->m_numNodes > pComplexity->m_nodeLimit) return Compiler::WALK_ABORT;
12586     else return Compiler::WALK_CONTINUE;
12587 }
12588
12589 bool       Compiler::gtComplexityExceeds(GenTreePtr* tree, unsigned limit)
12590 {
12591     ComplexityStruct complexity(limit);
12592     if (fgWalkTreePre(tree, &ComplexityExceedsWalker, &complexity) == WALK_ABORT) return true;
12593     else return false;
12594 }
12595
12596 /*
12597 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
12598 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
12599 XX                                                                           XX
12600 XX                          BasicBlock                                       XX
12601 XX                                                                           XX
12602 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
12603 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
12604 */
12605
12606
12607 #if     MEASURE_BLOCK_SIZE
12608 /* static  */
12609 size_t              BasicBlock::s_Size;
12610 /* static */
12611 size_t              BasicBlock::s_Count;
12612 #endif // MEASURE_BLOCK_SIZE
12613
12614 #ifdef DEBUG
12615 // The max # of tree nodes in any BB
12616 /* static */
12617 unsigned            BasicBlock::s_nMaxTrees;
12618 #endif // DEBUG
12619
12620
12621 /*****************************************************************************
12622  *
12623  *  Allocate a basic block but don't append it to the current BB list.
12624  */
12625
12626 BasicBlock *        Compiler::bbNewBasicBlock(BBjumpKinds jumpKind)
12627 {
12628     BasicBlock *    block;
12629
12630     /* Allocate the block descriptor and zero it out */
12631     assert(fgSafeBasicBlockCreation);
12632
12633     block = new (this, CMK_BasicBlock) BasicBlock;
12634
12635 #if     MEASURE_BLOCK_SIZE
12636     BasicBlock::s_Count += 1;
12637     BasicBlock::s_Size  += sizeof(*block);
12638 #endif
12639
12640 #ifdef DEBUG
12641     // fgLookupBB() is invalid until fgInitBBLookup() is called again.
12642     fgBBs = (BasicBlock**)0xCDCD;
12643 #endif
12644
12645     // TODO-Throughput: The following memset is pretty expensive - do something else?
12646     // Note that some fields have to be initialized to 0 (like bbFPStateX87)
12647     memset(block, 0, sizeof(*block));
12648
12649     // scopeInfo needs to be able to differentiate between blocks which
12650     // correspond to some instrs (and so may have some LocalVarInfo
12651     // boundaries), or have been inserted by the JIT
12652     block->bbCodeOffs    = BAD_IL_OFFSET;
12653     block->bbCodeOffsEnd = BAD_IL_OFFSET;
12654
12655     /* Give the block a number, set the ancestor count and weight */
12656
12657     ++fgBBcount;
12658   
12659     if (compIsForInlining())
12660     {
12661         block->bbNum = ++impInlineInfo->InlinerCompiler->fgBBNumMax;
12662     }
12663     else
12664     {        
12665         block->bbNum = ++fgBBNumMax;
12666     }
12667
12668     block->bbRefs     = 1;
12669     block->bbWeight   = BB_UNITY_WEIGHT;
12670
12671     block->bbStkTempsIn  = NO_BASE_TMP;
12672     block->bbStkTempsOut = NO_BASE_TMP;
12673
12674     block->bbEntryState = NULL;
12675
12676     /* Record the jump kind in the block */
12677
12678     block->bbJumpKind = jumpKind;
12679
12680     if (jumpKind == BBJ_THROW)
12681         block->bbSetRunRarely();
12682
12683 #ifdef DEBUG
12684     if  (verbose)
12685         printf("New Basic Block BB%02u [%p] created.\n", block->bbNum, dspPtr(block));
12686 #endif
12687
12688     // We will give all the blocks var sets after the number of tracked variables
12689     // is determined and frozen.  After that, if we dynamically create a basic block,
12690     // we will initialize its var sets.
12691     if (fgBBVarSetsInited)
12692     {
12693         VarSetOps::AssignNoCopy(this, block->bbVarUse, VarSetOps::MakeEmpty(this));
12694         VarSetOps::AssignNoCopy(this, block->bbVarDef, VarSetOps::MakeEmpty(this));
12695         VarSetOps::AssignNoCopy(this, block->bbVarTmp, VarSetOps::MakeEmpty(this));
12696         VarSetOps::AssignNoCopy(this, block->bbLiveIn, VarSetOps::MakeEmpty(this));
12697         VarSetOps::AssignNoCopy(this, block->bbLiveOut, VarSetOps::MakeEmpty(this));
12698         VarSetOps::AssignNoCopy(this, block->bbScope, VarSetOps::MakeEmpty(this));
12699     }
12700     else
12701     {
12702         VarSetOps::AssignNoCopy(this, block->bbVarUse, VarSetOps::UninitVal());
12703         VarSetOps::AssignNoCopy(this, block->bbVarDef, VarSetOps::UninitVal());
12704         VarSetOps::AssignNoCopy(this, block->bbVarTmp, VarSetOps::UninitVal());
12705         VarSetOps::AssignNoCopy(this, block->bbLiveIn, VarSetOps::UninitVal());
12706         VarSetOps::AssignNoCopy(this, block->bbLiveOut, VarSetOps::UninitVal());
12707         VarSetOps::AssignNoCopy(this, block->bbScope, VarSetOps::UninitVal());
12708     }
12709
12710     block->bbHeapUse =     false;
12711     block->bbHeapDef =     false;
12712     block->bbHeapLiveIn =  false;
12713     block->bbHeapLiveOut = false;
12714
12715     block->bbHeapSsaPhiFunc = NULL;
12716     block->bbHeapSsaNumIn   = 0;
12717     block->bbHeapSsaNumOut  = 0;
12718
12719     // Make sure we reserve a NOT_IN_LOOP value that isn't a legal table index.
12720     static_assert_no_msg(MAX_LOOP_NUM < BasicBlock::NOT_IN_LOOP);
12721
12722     block->bbNatLoopNum = BasicBlock::NOT_IN_LOOP;
12723
12724     return block;
12725 }
12726
12727
12728 //------------------------------------------------------------------------------
12729 // containsStatement - return true if the block contains the given statement
12730 //------------------------------------------------------------------------------
12731
12732 bool BasicBlock::containsStatement(GenTree *statement)
12733 {
12734     assert(statement->gtOper == GT_STMT);
12735
12736     GenTree *curr = bbTreeList;
12737     do 
12738     {
12739         if (curr == statement)
12740             break;
12741         curr = curr->gtNext; 
12742     }
12743     while (curr);
12744     return curr != NULL;
12745 }
12746
12747 GenTreeStmt* BasicBlock::FirstNonPhiDef()
12748 {
12749     GenTreePtr stmt = bbTreeList;
12750     if (stmt == nullptr) return nullptr;
12751     GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
12752     while ((tree->OperGet() == GT_ASG && tree->gtOp.gtOp2->OperGet() == GT_PHI)
12753            || (tree->OperGet() == GT_STORE_LCL_VAR && tree->gtOp.gtOp1->OperGet() == GT_PHI))
12754     {
12755         stmt = stmt->gtNext;
12756         if (stmt == nullptr) return nullptr;
12757         tree = stmt->gtStmt.gtStmtExpr;
12758     }
12759     return stmt->AsStmt();
12760 }
12761
12762 GenTreePtr BasicBlock::FirstNonPhiDefOrCatchArgAsg()
12763 {
12764     GenTreePtr stmt = FirstNonPhiDef();
12765     if (stmt == nullptr) return nullptr;
12766     GenTreePtr tree = stmt->gtStmt.gtStmtExpr;
12767     if ((tree->OperGet() == GT_ASG && tree->gtOp.gtOp2->OperGet() == GT_CATCH_ARG)
12768         || (tree->OperGet() == GT_STORE_LCL_VAR && tree->gtOp.gtOp1->OperGet() == GT_CATCH_ARG))
12769     {
12770         stmt = stmt->gtNext;
12771     }
12772     return stmt;
12773 }
12774
12775 /*****************************************************************************
12776  *
12777  *  Mark a block as rarely run, we also don't want to have a loop in a
12778  *   rarely run block, and we set it's weight to zero.
12779  */
12780
12781 void                BasicBlock::bbSetRunRarely()
12782 {
12783     setBBWeight(BB_ZERO_WEIGHT);
12784     if (bbWeight == BB_ZERO_WEIGHT) 
12785     {
12786         bbFlags  |= BBF_RUN_RARELY;    // This block is never/rarely run
12787     }
12788 }
12789
12790 /*****************************************************************************
12791  *
12792  *  Can a BasicBlock be inserted after this without altering the flowgraph
12793  */
12794
12795 bool                BasicBlock::bbFallsThrough()
12796 {
12797     switch (bbJumpKind)
12798     {
12799
12800     case BBJ_THROW:
12801     case BBJ_EHFINALLYRET:
12802     case BBJ_EHFILTERRET:
12803     case BBJ_EHCATCHRET:
12804     case BBJ_RETURN:
12805     case BBJ_ALWAYS:
12806     case BBJ_LEAVE:
12807     case BBJ_SWITCH:
12808         return false;
12809
12810     case BBJ_NONE:
12811     case BBJ_COND:
12812         return true;
12813
12814     case BBJ_CALLFINALLY:
12815         return ((bbFlags & BBF_RETLESS_CALL) == 0);
12816     
12817     default:
12818         assert(!"Unknown bbJumpKind in bbFallsThrough()");
12819         return true;
12820     }
12821 }
12822
12823 unsigned BasicBlock::NumSucc(Compiler * comp)
12824 {
12825     // As described in the spec comment of NumSucc at its declaration, whether "comp" is null determines
12826     // whether NumSucc and GetSucc yield successors of finally blocks.  
12827
12828     switch (bbJumpKind)
12829     {
12830
12831     case BBJ_THROW:
12832     case BBJ_RETURN:
12833         return 0;
12834
12835     case BBJ_EHFILTERRET:
12836         if (comp == NULL)
12837             return 0;
12838         else
12839             return 1;
12840
12841     case BBJ_EHFINALLYRET:
12842         {
12843             if (comp == NULL)
12844             { 
12845                 return 0;
12846             }
12847             else
12848             {
12849                 // The first block of the handler is labelled with the catch type.
12850                 BasicBlock* hndBeg = comp->fgFirstBlockOfHandler(this);
12851                 if (hndBeg->bbCatchTyp == BBCT_FINALLY)
12852                 {
12853                     return comp->fgNSuccsOfFinallyRet(this);
12854                 }
12855                 else
12856                 {
12857                     assert(hndBeg->bbCatchTyp == BBCT_FAULT);  // We can only BBJ_EHFINALLYRET from FINALLY and FAULT.
12858                     // A FAULT block has no successors.
12859                     return 0;
12860                 }
12861             }
12862         }
12863     case BBJ_CALLFINALLY:
12864     case BBJ_ALWAYS:
12865     case BBJ_EHCATCHRET:
12866     case BBJ_LEAVE:
12867     case BBJ_NONE:
12868         return 1;
12869     case BBJ_COND:
12870         if (bbJumpDest == bbNext)
12871         {
12872             return 1;
12873         }
12874         else
12875         {
12876             return 2;
12877         }
12878     case BBJ_SWITCH:
12879         if (comp == NULL)
12880         {
12881             return bbJumpSwt->bbsCount;
12882         } else
12883         {
12884             Compiler::SwitchUniqueSuccSet sd = comp->GetDescriptorForSwitch(this);
12885             return sd.numDistinctSuccs;
12886         }
12887
12888     default:
12889         unreached();
12890     }
12891 }
12892
12893
12894 BasicBlock* BasicBlock::GetSucc(unsigned i, Compiler * comp)
12895 {
12896     // As described in the spec comment of GetSucc at its declaration, whether "comp" is null determines
12897     // whether NumSucc and GetSucc yield successors of finally blocks.  
12898
12899     assert(i < NumSucc(comp));  // Index bounds check.
12900     //printf("bbjk=%d\n", bbJumpKind);
12901     switch (bbJumpKind)
12902     {
12903
12904     case BBJ_THROW:
12905     case BBJ_RETURN:
12906         unreached();  // Should have been covered by assert above.
12907
12908     case BBJ_EHFILTERRET:
12909     {
12910         assert(comp != NULL); // Or else we're not looking for successors.
12911         BasicBlock* result = comp->fgFirstBlockOfHandler(this);
12912         noway_assert(result == bbJumpDest);
12913         // Handler is the (sole) normal successor of the filter.
12914         return result;
12915     }
12916
12917     case BBJ_EHFINALLYRET:
12918         return comp->fgSuccOfFinallyRet(this, i);
12919
12920     case BBJ_CALLFINALLY:
12921     case BBJ_ALWAYS:
12922     case BBJ_EHCATCHRET:
12923     case BBJ_LEAVE:
12924         return bbJumpDest;
12925
12926     case BBJ_NONE:
12927         return bbNext;
12928     case BBJ_COND:
12929         if (i == 0)
12930         {
12931             return bbNext;
12932         }
12933         else
12934         {
12935             assert(i == 1);
12936             return bbJumpDest;
12937         };
12938     case BBJ_SWITCH:
12939         if (comp == NULL)
12940         {
12941             assert(i < bbJumpSwt->bbsCount);  // Range check.
12942             return bbJumpSwt->bbsDstTab[i];
12943         }
12944         else
12945         {
12946             // Remove duplicates.
12947             Compiler::SwitchUniqueSuccSet sd = comp->GetDescriptorForSwitch(this);
12948             assert(i < sd.numDistinctSuccs);  // Range check.
12949             return sd.nonDuplicates[i];
12950         }
12951
12952     default:
12953         unreached();
12954     }
12955 }
12956
12957 // -------------------------------------------------------------------------
12958 // IsRegOptional: Returns true if this gentree node is marked by lowering to
12959 // indicate that codegen can still generate code even if it wasn't allocated
12960 // a register.
12961 bool GenTree::IsRegOptional() const
12962 {
12963 #ifdef LEGACY_BACKEND
12964     return false;
12965 #else
12966     return gtLsraInfo.regOptional;
12967 #endif
12968 }
12969
12970 bool GenTree::IsPhiDefn()
12971 {
12972     bool res = 
12973         OperGet() == GT_ASG
12974         && gtOp.gtOp2 != NULL
12975         && gtOp.gtOp2->OperGet() == GT_PHI;
12976     assert(!res || gtOp.gtOp1->OperGet() == GT_LCL_VAR);
12977     return res;
12978 }
12979
12980 bool GenTree::IsPhiDefnStmt()
12981 {
12982     if (OperGet() != GT_STMT) return false;
12983     GenTreePtr asg = gtStmt.gtStmtExpr;
12984     return asg->IsPhiDefn();
12985 }
12986
12987 // IsPartialLclFld: Check for a GT_LCL_FLD whose type is a different size than the lclVar.
12988 //
12989 // Arguments:
12990 //    comp      - the Compiler object.
12991 //
12992 // Return Value:
12993 //    Returns "true" iff 'this' is a GT_LCL_FLD or GT_STORE_LCL_FLD on which the type
12994 //    is not the same size as the type of the GT_LCL_VAR
12995
12996 bool GenTree::IsPartialLclFld(Compiler* comp)
12997 {
12998     return ((gtOper == GT_LCL_FLD) &&
12999             (comp->lvaTable[this->gtLclVarCommon.gtLclNum].lvExactSize  != genTypeSize(gtType)));
13000 }
13001
13002 bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire)
13003 {
13004     if (OperIsAssignment())
13005     {
13006         if (gtOp.gtOp1->IsLocal())
13007         {
13008             GenTreeLclVarCommon* lclVarTree = gtOp.gtOp1->AsLclVarCommon();
13009             *pLclVarTree = lclVarTree;
13010             if (pIsEntire != nullptr)
13011             {
13012                 if (lclVarTree->IsPartialLclFld(comp))
13013                 {
13014                     *pIsEntire = false;
13015                 }
13016                 else
13017                 {
13018                     *pIsEntire = true;
13019                 }
13020             }
13021             return true;
13022         }
13023         else if (gtOp.gtOp1->OperGet() == GT_IND)
13024         {
13025             GenTreePtr indArg = gtOp.gtOp1->gtOp.gtOp1;
13026             return indArg->DefinesLocalAddr(comp, genTypeSize(gtOp.gtOp1->TypeGet()), pLclVarTree, pIsEntire);
13027         }
13028     }
13029     else if (OperIsBlkOp())
13030     {
13031         GenTreePtr destAddr = gtOp.gtOp1->gtOp.gtOp1;
13032         unsigned width = 0;
13033         // Do we care about whether this assigns the entire variable?
13034         if (pIsEntire != NULL)
13035         {
13036             GenTreePtr blockWidth = gtOp.gtOp2;
13037             if (blockWidth->IsCnsIntOrI())
13038             {
13039                 if (blockWidth->IsIconHandle())
13040                 {
13041                     // If it's a handle, it must be a class handle.  We only create such block operations
13042                     // for initialization of struct types, so the type of the argument(s) will match this
13043                     // type, by construction, and be "entire".
13044                     assert(blockWidth->IsIconHandle(GTF_ICON_CLASS_HDL));
13045                     width = comp->info.compCompHnd->getClassSize(CORINFO_CLASS_HANDLE(blockWidth->gtIntConCommon.IconValue()));
13046                 }
13047                 else
13048                 {
13049                     ssize_t swidth = blockWidth->AsIntConCommon()->IconValue();
13050                     assert(swidth >= 0);
13051                     // cpblk of size zero exists in the wild (in yacc-generated code in SQL) and is valid IL.
13052                     if (swidth == 0)
13053                         return false;
13054                     width = unsigned(swidth);
13055                 }
13056             }
13057         }
13058         return destAddr->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
13059     }
13060     // Otherwise...
13061     return false;
13062 }
13063
13064 // Returns true if this GenTree defines a result which is based on the address of a local.
13065 bool GenTree::DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire)
13066 {
13067     if (OperGet() == GT_ADDR || OperGet() == GT_LCL_VAR_ADDR)
13068     {
13069         GenTreePtr addrArg = this;
13070         if (OperGet() == GT_ADDR)
13071         {
13072             addrArg = gtOp.gtOp1;
13073         }
13074
13075         if (addrArg->IsLocal() || addrArg->OperIsLocalAddr())
13076         {
13077             GenTreeLclVarCommon* addrArgLcl = addrArg->AsLclVarCommon();
13078             *pLclVarTree = addrArgLcl;
13079             if (pIsEntire != NULL)
13080             {
13081                 unsigned lclNum = addrArgLcl->GetLclNum();
13082                 unsigned varWidth = comp->lvaLclExactSize(lclNum);
13083                 if (comp->lvaTable[lclNum].lvNormalizeOnStore())
13084                 {
13085                     // It's normalize on store, so use the full storage width -- writing to low bytes won't
13086                     // necessarily yield a normalized value.
13087                     varWidth = genTypeStSz(var_types(comp->lvaTable[lclNum].lvType)) * sizeof(int);
13088                 }
13089                 *pIsEntire = (varWidth == width);
13090             }
13091             return true;
13092         }
13093         else if (addrArg->OperGet() == GT_IND)
13094         {
13095             // A GT_ADDR of a GT_IND can both be optimized away, recurse using the child of the GT_IND
13096             return addrArg->gtOp.gtOp1->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
13097         }
13098     }
13099     else if (OperGet() == GT_ADD)
13100     {
13101         if (gtOp.gtOp1->IsCnsIntOrI())
13102         {
13103             // If we just adding a zero then we allow an IsEntire match against width
13104             //  otherwise we change width to zero to disallow an IsEntire Match 
13105             return gtOp.gtOp2->DefinesLocalAddr(comp, gtOp.gtOp1->IsIntegralConst(0) ? width : 0, pLclVarTree, pIsEntire);
13106         }
13107         else if (gtOp.gtOp2->IsCnsIntOrI())
13108         {
13109             // If we just adding a zero then we allow an IsEntire match against width
13110             //  otherwise we change width to zero to disallow an IsEntire Match 
13111             return gtOp.gtOp1->DefinesLocalAddr(comp,  gtOp.gtOp2->IsIntegralConst(0) ? width : 0, pLclVarTree, pIsEntire);
13112         }
13113     }
13114     // Post rationalization we could have GT_IND(GT_LEA(..)) trees.
13115     else if (OperGet() == GT_LEA)
13116     {
13117         // This method gets invoked during liveness computation and therefore it is critical
13118         // that we don't miss 'use' of any local.  The below logic is making the assumption
13119         // that in case of LEA(base, index, offset) - only base can be a GT_LCL_VAR_ADDR
13120         // and index is not.
13121 #ifdef DEBUG
13122         GenTreePtr index = gtOp.gtOp2;
13123         if (index != nullptr)
13124         {
13125             assert(!index->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire));
13126         }
13127 #endif //DEBUG
13128
13129         // base
13130         GenTreePtr base = gtOp.gtOp1;
13131         if (base != nullptr)
13132         {
13133             // Lea could have an Indir as its base.
13134             if (base->OperGet() == GT_IND)
13135             {
13136                 base = base->gtOp.gtOp1->gtEffectiveVal(/*commas only*/ true);
13137             }
13138             return base->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
13139         }
13140     }
13141     // Otherwise...
13142     return false;
13143 }
13144
13145 //------------------------------------------------------------------------
13146 // IsLocalExpr: Determine if this is a LclVarCommon node and return some
13147 //              additional info about it in the two out parameters.
13148 //
13149 // Arguments:
13150 //    comp        - The Compiler instance
13151 //    pLclVarTree - An "out" argument that returns the local tree as a
13152 //                  LclVarCommon, if it is indeed local.
13153 //    pFldSeq     - An "out" argument that returns the value numbering field
13154 //                  sequence for the node, if any.
13155 //
13156 // Return Value:
13157 //    Returns true, and sets the out arguments accordingly, if this is
13158 //    a LclVarCommon node.
13159
13160 bool GenTree::IsLocalExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq)
13161 {
13162     if (IsLocal())  // Note that this covers "GT_LCL_FLD." 
13163     {
13164         *pLclVarTree = AsLclVarCommon();
13165         if (OperGet() == GT_LCL_FLD)
13166         {
13167             // Otherwise, prepend this field to whatever we've already accumulated outside in.
13168             *pFldSeq = comp->GetFieldSeqStore()->Append(AsLclFld()->gtFieldSeq, *pFldSeq);
13169         }
13170         return true;
13171     }
13172     else
13173     {
13174         return false;
13175     }
13176 }
13177
13178 // If this tree evaluates some sum of a local address and some constants,
13179 // return the node for the local being addressed
13180
13181 GenTreeLclVarCommon* GenTree::IsLocalAddrExpr()
13182 {
13183     if (OperGet() == GT_ADDR)
13184     {
13185         return gtOp.gtOp1->IsLocal() ? gtOp.gtOp1->AsLclVarCommon() : nullptr;
13186     }
13187     else if (OperIsLocalAddr())
13188     {
13189         return this->AsLclVarCommon();
13190     }
13191     else if (OperGet() == GT_ADD)
13192     {
13193         if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
13194         {
13195             return gtOp.gtOp2->IsLocalAddrExpr();
13196         }
13197         else if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
13198         {
13199             return gtOp.gtOp1->IsLocalAddrExpr();
13200         }
13201     }
13202     // Otherwise...
13203     return nullptr;
13204 }
13205
13206
13207 bool GenTree::IsLocalAddrExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq)
13208 {
13209     if (OperGet() == GT_ADDR)
13210     {
13211         assert(!comp->compRationalIRForm);
13212         GenTreePtr addrArg = gtOp.gtOp1;
13213         if (addrArg->IsLocal())  // Note that this covers "GT_LCL_FLD." 
13214         {
13215             *pLclVarTree = addrArg->AsLclVarCommon();
13216             if (addrArg->OperGet() == GT_LCL_FLD)
13217             {
13218                 // Otherwise, prepend this field to whatever we've already accumulated outside in.
13219                 *pFldSeq = comp->GetFieldSeqStore()->Append(addrArg->AsLclFld()->gtFieldSeq, *pFldSeq);
13220             }
13221             return true;
13222         }
13223         else
13224         {
13225             return false;
13226         }
13227     }
13228     else if (OperIsLocalAddr())
13229     {
13230         *pLclVarTree = this->AsLclVarCommon();
13231         if (this->OperGet() == GT_LCL_FLD_ADDR)
13232         {
13233             *pFldSeq = comp->GetFieldSeqStore()->Append(this->AsLclFld()->gtFieldSeq, *pFldSeq);
13234         }
13235         return true;
13236     }
13237     else if (OperGet() == GT_ADD)
13238     {
13239         if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
13240         {
13241             if (gtOp.gtOp1->AsIntCon()->gtFieldSeq == NULL)
13242                 return false;
13243             // Otherwise, prepend this field to whatever we've already accumulated outside in.
13244             *pFldSeq = comp->GetFieldSeqStore()->Append(gtOp.gtOp1->AsIntCon()->gtFieldSeq, *pFldSeq);
13245             return gtOp.gtOp2->IsLocalAddrExpr(comp, pLclVarTree, pFldSeq);
13246         }
13247         else if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
13248         {
13249             if (gtOp.gtOp2->AsIntCon()->gtFieldSeq == NULL)
13250                 return false;
13251             // Otherwise, prepend this field to whatever we've already accumulated outside in.
13252             *pFldSeq = comp->GetFieldSeqStore()->Append(gtOp.gtOp2->AsIntCon()->gtFieldSeq, *pFldSeq);
13253             return gtOp.gtOp1->IsLocalAddrExpr(comp, pLclVarTree, pFldSeq);
13254         }
13255     }
13256     // Otherwise...
13257     return false;
13258 }
13259
13260 //------------------------------------------------------------------------
13261 // IsLclVarUpdateTree: Determine whether this is an assignment tree of the
13262 //                     form Vn = Vn 'oper' 'otherTree' where Vn is a lclVar
13263 //
13264 // Arguments:
13265 //    pOtherTree - An "out" argument in which 'otherTree' will be returned.
13266 //    pOper      - An "out" argument in which 'oper' will be returned.
13267 //
13268 // Return Value:
13269 //    If the tree is of the above form, the lclNum of the variable being
13270 //    updated is returned, and 'pOtherTree' and 'pOper' are set.
13271 //    Otherwise, returns BAD_VAR_NUM.
13272 //
13273 // Notes:
13274 //    'otherTree' can have any shape.
13275 //     We avoid worrying about whether the op is commutative by only considering the
13276 //     first operand of the rhs. It is expected that most trees of this form will
13277 //     already have the lclVar on the lhs.
13278 //     TODO-CQ: Evaluate whether there are missed opportunities due to this, or
13279 //     whether gtSetEvalOrder will already have put the lclVar on the lhs in
13280 //     the cases of interest.
13281
13282 unsigned
13283 GenTree::IsLclVarUpdateTree(GenTree** pOtherTree, genTreeOps *pOper)
13284 {
13285     unsigned lclNum = BAD_VAR_NUM;
13286     if (OperIsAssignment())
13287     {
13288         GenTree* lhs = gtOp.gtOp1;
13289         if (lhs->OperGet() == GT_LCL_VAR)
13290         {
13291             unsigned lhsLclNum = lhs->AsLclVarCommon()->gtLclNum;
13292             if (gtOper == GT_ASG)
13293             {
13294                 GenTree* rhs = gtOp.gtOp2;
13295                 if (rhs->OperIsBinary() &&
13296                     (rhs->gtOp.gtOp1->gtOper == GT_LCL_VAR) &&
13297                     (rhs->gtOp.gtOp1->AsLclVarCommon()->gtLclNum == lhsLclNum))
13298                 {
13299                     lclNum = lhsLclNum;
13300                     *pOtherTree = rhs->gtOp.gtOp2;
13301                     *pOper = rhs->gtOper;
13302                 }
13303             }
13304             else
13305             {
13306                 lclNum = lhsLclNum;
13307                 *pOper = GenTree::OpAsgToOper(gtOper);
13308                 *pOtherTree = gtOp.gtOp2;
13309             }
13310         }
13311     }
13312     return lclNum;
13313 }
13314
13315 // return true if this tree node is a subcomponent of parent for codegen purposes
13316 // (essentially, will be rolled into the same instruction)
13317 // Note that this method relies upon the value of gtRegNum field to determine 
13318 // if the treenode is contained or not.  Therefore you can not call this method 
13319 // until after the LSRA phase has allocated physical registers to the treenodes.
13320 bool GenTree::isContained() const
13321 {
13322     if (isContainedSpillTemp())
13323     {
13324         return true;
13325     }
13326
13327     if (gtHasReg())
13328     {
13329         return false;
13330     }
13331
13332     // these actually produce a register (the flags reg, we just don't model it)
13333     // and are a separate instruction from the branch that consumes the result
13334     if (OperKind() & GTK_RELOP)
13335     {
13336         return false;
13337     }
13338
13339     // TODO-Cleanup : this is not clean, would be nice to have some way of marking this.
13340     switch (OperGet())
13341     {
13342     case GT_STOREIND:
13343     case GT_JTRUE:
13344     case GT_RETURN:
13345     case GT_STORE_LCL_FLD:
13346     case GT_STORE_LCL_VAR:
13347     case GT_ARR_BOUNDS_CHECK:
13348     case GT_LOCKADD:
13349     case GT_NOP:
13350     case GT_NO_OP:
13351     case GT_START_NONGC:
13352     case GT_PROF_HOOK:
13353     case GT_RETURNTRAP:
13354     case GT_COMMA:
13355     case GT_PINVOKE_PROLOG:
13356     case GT_PHYSREGDST:
13357     case GT_PUTARG_STK:
13358     case GT_MEMORYBARRIER:
13359     case GT_COPYBLK:
13360     case GT_INITBLK:
13361     case GT_COPYOBJ:
13362     case GT_SWITCH:
13363     case GT_JMPTABLE:
13364     case GT_SWITCH_TABLE:
13365     case GT_SWAP:
13366     case GT_LCLHEAP:
13367     case GT_CKFINITE:
13368     case GT_JMP:
13369 #ifdef FEATURE_SIMD
13370     case GT_SIMD_CHK:
13371 #endif // FEATURE_SIMD
13372
13373 #if !FEATURE_EH_FUNCLETS
13374     case GT_END_LFIN:
13375 #endif
13376         return false;
13377
13378 #if !defined(LEGACY_BACKEND) && !defined(_TARGET_64BIT_)
13379     case GT_LONG:
13380         // GT_LONG nodes are normally contained. The only exception is when the result
13381         // of a TYP_LONG operation is not used and this can only happen if the GT_LONG
13382         // is the last node in the statement (in linear order).
13383         return gtNext != nullptr;
13384 #endif
13385
13386     case GT_CALL:
13387         // Note: if you hit this assert you are probably calling isContained() 
13388         // before the LSRA phase has allocated physical register to the tree nodes
13389         // 
13390         assert(gtType == TYP_VOID);
13391         return false;
13392     case GT_RETFILT:
13393         if (gtType == TYP_VOID)
13394             return false; // endfinally case
13395
13396         __fallthrough;
13397
13398     default:
13399         // if it's contained it better have a parent
13400         assert(gtNext || OperIsLocal());
13401         return true;
13402     }
13403 }
13404
13405 // return true if node is contained and an indir
13406 bool GenTree::isContainedIndir() const
13407
13408     return isContained() && isIndir(); 
13409 }
13410
13411 bool GenTree::isIndirAddrMode()
13412
13413     return isIndir() && AsIndir()->Addr()->OperIsAddrMode() && AsIndir()->Addr()->isContained(); 
13414 }
13415
13416 bool GenTree::isIndir() const
13417
13418     return OperGet() == GT_IND || OperGet() == GT_STOREIND; 
13419 }
13420
13421 bool GenTreeIndir::HasBase()
13422 {
13423     return Base() != nullptr;
13424 }
13425
13426 bool GenTreeIndir::HasIndex()
13427 {
13428     return Index() != nullptr;
13429 }
13430
13431 GenTreePtr GenTreeIndir::Base()
13432 {
13433     GenTreePtr addr = Addr();
13434
13435     if (isIndirAddrMode())
13436     {
13437         GenTree* result = addr->AsAddrMode()->Base();
13438         if (result != nullptr)
13439         {
13440             result = result->gtEffectiveVal();
13441         }
13442         return result;
13443     }
13444     else
13445     {
13446         return addr;    // TODO: why do we return 'addr' here, but we return 'nullptr' in the equivalent Index() case?
13447     }
13448 }
13449
13450 GenTree* GenTreeIndir::Index()
13451 {
13452     if (isIndirAddrMode())
13453     {
13454         GenTree* result = Addr()->AsAddrMode()->Index();
13455         if (result != nullptr)
13456         {
13457             result = result->gtEffectiveVal();
13458         }
13459         return result;
13460     }
13461     else
13462     {
13463         return nullptr;
13464     }
13465 }
13466
13467 unsigned GenTreeIndir::Scale()
13468 {
13469     if (HasIndex())
13470         return Addr()->AsAddrMode()->gtScale;
13471     else
13472         return 1;
13473 }
13474
13475 size_t GenTreeIndir::Offset()
13476 {
13477     if (isIndirAddrMode())
13478         return Addr()->AsAddrMode()->gtOffset;
13479     else if (Addr()->gtOper == GT_CLS_VAR_ADDR)
13480         return (size_t) Addr()->gtClsVar.gtClsVarHnd;
13481     else if (Addr()->IsCnsIntOrI() && Addr()->isContained())
13482         return Addr()->AsIntConCommon()->IconValue();
13483     else
13484         return 0;
13485 }
13486
13487 //------------------------------------------------------------------------
13488 // GenTreeIntConCommon::ImmedValNeedsReloc: does this immediate value needs recording a relocation with the VM?
13489 //
13490 // Arguments:
13491 //    comp - Compiler instance
13492 //
13493 // Return Value:
13494 //    True if this immediate value needs recording a relocation with the VM; false otherwise.
13495
13496 bool GenTreeIntConCommon::ImmedValNeedsReloc(Compiler* comp)
13497 {
13498 #ifdef RELOC_SUPPORT
13499     return comp->opts.compReloc && (gtOper == GT_CNS_INT) && IsIconHandle();
13500 #else
13501     return false;
13502 #endif
13503 }
13504
13505 //------------------------------------------------------------------------
13506 // ImmedValCanBeFolded: can this immediate value be folded for op?
13507 //
13508 // Arguments:
13509 //    comp - Compiler instance
13510 //    op - Tree operator
13511 //
13512 // Return Value:
13513 //    True if this immediate value can be folded for op; false otherwise.
13514
13515 bool GenTreeIntConCommon::ImmedValCanBeFolded(Compiler* comp, genTreeOps op)
13516 {
13517     // In general, immediate values that need relocations can't be folded.
13518     // There are cases where we do want to allow folding of handle comparisons
13519     // (e.g., typeof(T) == typeof(int)).
13520     return !ImmedValNeedsReloc(comp) || (op == GT_EQ) || (op == GT_NE);
13521 }
13522
13523 #ifdef _TARGET_AMD64_
13524 // Returns true if this absolute address fits within the base of an addr mode.
13525 // On Amd64 this effectively means, whether an absolute indirect address can
13526 // be encoded as 32-bit offset relative to IP or zero.
13527 bool GenTreeIntConCommon::FitsInAddrBase(Compiler* comp)
13528 {
13529 #ifndef LEGACY_BACKEND
13530 #ifdef DEBUG
13531     // Early out if PC-rel encoding of absolute addr is disabled.
13532     if (!comp->opts.compEnablePCRelAddr)
13533     {
13534         return false;
13535     }
13536 #endif
13537 #endif //!LEGACY_BACKEND
13538
13539     if (comp->opts.compReloc)
13540     {
13541         // During Ngen JIT is always asked to generate relocatable code.
13542         // Hence JIT will try to encode only icon handles as pc-relative offsets.
13543         return IsIconHandle() && (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void *)IconValue()));
13544     }
13545     else
13546     {
13547         // During Jitting, we are allowed to generate non-relocatable code.
13548         // On Amd64 we can encode an absolute indirect addr as an offset relative to zero or RIP.
13549         // An absolute indir addr that can fit within 32-bits can ben encoded as an offset relative
13550         // to zero. All other absolute indir addr could be attempted to be encoded as RIP relative
13551         // based on reloc hint provided by VM.  RIP relative encoding is preferred over relative
13552         // to zero, because the former is one byte smaller than the latter.  For this reason
13553         // we check for reloc hint first and then whether addr fits in 32-bits next.
13554         // 
13555         // VM starts off with an initial state to allow both data and code address to be encoded as
13556         // pc-relative offsets.  Hence JIT will attempt to encode all absolute addresses as pc-relative
13557         // offsets.  It is possible while jitting a method, an address could not be encoded as a
13558         // pc-relative offset.  In that case VM will note the overflow and will trigger re-jitting
13559         // of the method with reloc hints turned off for all future methods. Second time around
13560         // jitting will succeed since JIT will not attempt to encode data addresses as pc-relative
13561         // offsets.  Note that JIT will always attempt to relocate code addresses (.e.g call addr).
13562         // After an overflow, VM will assume any relocation recorded is for a code address and will
13563         // emit jump thunk if it cannot be encoded as pc-relative offset.
13564         return (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void *)IconValue())) || FitsInI32();
13565     }    
13566 }
13567
13568 // Returns true if this icon value is encoded as addr needs recording a relocation with VM
13569 bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
13570 {
13571     if (comp->opts.compReloc)
13572     {
13573         // During Ngen JIT is always asked to generate relocatable code.
13574         // Hence JIT will try to encode only icon handles as pc-relative offsets.
13575         return IsIconHandle() && (IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void *)IconValue()));
13576     }
13577     else
13578     {
13579         return IMAGE_REL_BASED_REL32 == comp->eeGetRelocTypeHint((void *)IconValue());
13580     }
13581 }
13582
13583 #elif defined(_TARGET_X86_)
13584 // Returns true if this absolute address fits within the base of an addr mode.
13585 // On x86 all addresses are 4-bytes and can be directly encoded in an addr mode.
13586 bool GenTreeIntConCommon::FitsInAddrBase(Compiler* comp)
13587 {
13588 #ifndef LEGACY_BACKEND
13589 #ifdef DEBUG
13590     // Early out if PC-rel encoding of absolute addr is disabled.
13591     if (!comp->opts.compEnablePCRelAddr)
13592     {
13593         return false;
13594     }
13595 #endif
13596 #endif //!LEGACY_BACKEND
13597
13598     //TODO-x86 - TLS field handles are excluded for now as they are accessed relative to FS segment.
13599     //Handling of TLS field handles is a NYI and this needs to be relooked after implementing it.
13600     return IsCnsIntOrI() && !IsIconHandle(GTF_ICON_TLS_HDL);
13601 }
13602
13603 // Returns true if this icon value is encoded as addr needs recording a relocation with VM
13604 bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
13605 {
13606     //If generating relocatable code, icons should be reported for recording relocatons.
13607     return comp->opts.compReloc && IsIconHandle();
13608 }
13609 #endif //_TARGET_X86_
13610
13611 bool GenTree::IsFieldAddr(Compiler* comp, GenTreePtr* pObj, GenTreePtr* pStatic, FieldSeqNode** pFldSeq)
13612 {
13613     FieldSeqNode* newFldSeq    = nullptr;
13614     GenTreePtr    baseAddr     = nullptr;
13615     bool          mustBeStatic = false;
13616
13617     FieldSeqNode* statStructFldSeq = nullptr;
13618     if (TypeGet() == TYP_REF)
13619     {
13620         // Recognize struct static field patterns...
13621         if (OperGet() == GT_IND)
13622         {
13623             GenTreePtr addr = gtOp.gtOp1;
13624             GenTreeIntCon* icon = nullptr;
13625             if (addr->OperGet() == GT_CNS_INT)
13626             {
13627                 icon = addr->AsIntCon();
13628             }
13629             else if (addr->OperGet() == GT_ADD)
13630             {
13631                 // op1 should never be a field sequence (or any other kind of handle)
13632                 assert((addr->gtOp.gtOp1->gtOper != GT_CNS_INT) || !addr->gtOp.gtOp1->IsIconHandle());
13633                 if (addr->gtOp.gtOp2->OperGet() == GT_CNS_INT)
13634                 {
13635                     icon = addr->gtOp.gtOp2->AsIntCon();
13636                 }
13637             }
13638             if (   icon != nullptr 
13639                 && !icon->IsIconHandle(GTF_ICON_STR_HDL)           // String handles are a source of TYP_REFs.
13640                 && icon->gtFieldSeq != nullptr
13641                 && icon->gtFieldSeq->m_next == nullptr             // A static field should be a singleton
13642                 // TODO-Review: A pseudoField here indicates an issue - this requires investigation
13643                 // See test case src\ddsuites\src\clr\x86\CoreMangLib\Dev\Globalization\CalendarRegressions.exe
13644                 && !(FieldSeqStore::IsPseudoField(icon->gtFieldSeq->m_fieldHnd))
13645                 && icon->gtFieldSeq != FieldSeqStore::NotAField()) // Ignore non-fields.
13646             {
13647                 statStructFldSeq = icon->gtFieldSeq;
13648             }
13649             else
13650             {
13651                 addr = addr->gtEffectiveVal();
13652
13653                 // Perhaps it's a direct indirection of a helper call or a cse with a zero offset annotation.
13654                 if ((addr->OperGet() == GT_CALL) || (addr->OperGet() == GT_LCL_VAR))
13655                 {
13656                     FieldSeqNode* zeroFieldSeq = nullptr;
13657                     if (comp->GetZeroOffsetFieldMap()->Lookup(addr, &zeroFieldSeq))
13658                     {
13659                         if (zeroFieldSeq->m_next == nullptr)
13660                             statStructFldSeq = zeroFieldSeq;
13661                     }
13662                 }
13663             }
13664         }
13665         else if (OperGet() == GT_CLS_VAR)
13666         {
13667             GenTreeClsVar* clsVar = AsClsVar();
13668             if (clsVar->gtFieldSeq != nullptr && clsVar->gtFieldSeq->m_next == nullptr)
13669             {
13670                 statStructFldSeq = clsVar->gtFieldSeq;
13671             }
13672         }
13673         else if (OperIsLocal())
13674         {
13675             // If we have a GT_LCL_VAR, it can be result of a CSE substitution
13676             // If it is then the CSE assignment will have a ValueNum that
13677             // describes the RHS of the CSE assignment.
13678             //
13679             // The CSE could be a pointer to a boxed struct
13680             //
13681             GenTreeLclVarCommon* lclVar = AsLclVarCommon();
13682             ValueNum vn = gtVNPair.GetLiberal();
13683             if (vn != ValueNumStore::NoVN)
13684             {
13685                 // Is the ValueNum a MapSelect involving a SharedStatic helper?
13686                 VNFuncApp funcApp1;
13687                 if (comp->vnStore->GetVNFunc(vn, &funcApp1) && 
13688                     (funcApp1.m_func == VNF_MapSelect)      &&
13689                     (comp->vnStore->IsSharedStatic(funcApp1.m_args[1])))
13690                 {
13691                     ValueNum mapVN = funcApp1.m_args[0]; 
13692                     // Is this new 'mapVN' ValueNum, a MapSelect involving a handle?
13693                     VNFuncApp funcApp2;
13694                     if (comp->vnStore->GetVNFunc(mapVN, &funcApp2) &&
13695                         (funcApp2.m_func == VNF_MapSelect)         &&
13696                         (comp->vnStore->IsVNHandle(funcApp2.m_args[1])))
13697                     {
13698                         ValueNum fldHndVN = funcApp2.m_args[1]; 
13699                         // Is this new 'fldHndVN' VNhandle a FieldHandle?
13700                         unsigned flags = comp->vnStore->GetHandleFlags(fldHndVN);
13701                         if (flags == GTF_ICON_FIELD_HDL)
13702                         {
13703                             CORINFO_FIELD_HANDLE fieldHnd = CORINFO_FIELD_HANDLE(comp->vnStore->ConstantValue<ssize_t>(fldHndVN));
13704
13705                             // Record this field sequence in 'statStructFldSeq' as it is likely to be a Boxed Struct field access.
13706                             statStructFldSeq = comp->GetFieldSeqStore()->CreateSingleton(fieldHnd);
13707                         }
13708                     }
13709                 }
13710             }
13711         }  
13712
13713         if (statStructFldSeq != nullptr)
13714         {
13715             assert(statStructFldSeq->m_next == nullptr);
13716             // Is this a pointer to a boxed struct?
13717             if (comp->gtIsStaticFieldPtrToBoxedStruct(TYP_REF, statStructFldSeq->m_fieldHnd))
13718             {
13719                 *pFldSeq = comp->GetFieldSeqStore()->Append(statStructFldSeq, *pFldSeq);
13720                 *pObj = nullptr;
13721                 *pStatic = this;
13722                 return true;
13723             }
13724         }
13725
13726         // Otherwise...
13727         *pObj = this;
13728         *pStatic = nullptr;
13729         return true;
13730     }
13731     else if (OperGet() == GT_ADD)
13732     {
13733         // op1 should never be a field sequence (or any other kind of handle)
13734         assert((gtOp.gtOp1->gtOper != GT_CNS_INT) || !gtOp.gtOp1->IsIconHandle());
13735         if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
13736         {
13737             newFldSeq = gtOp.gtOp2->AsIntCon()->gtFieldSeq;
13738             baseAddr = gtOp.gtOp1;           
13739         }
13740     }
13741     else 
13742     {
13743         // Check if "this" has a zero-offset annotation.
13744         if (!comp->GetZeroOffsetFieldMap()->Lookup(this, &newFldSeq))
13745         {
13746             // If not, this is not a field address.
13747             return false;
13748         }
13749         else
13750         {
13751             baseAddr = this;
13752             mustBeStatic = true;
13753         }
13754     }
13755
13756     // If not we don't have a field seq, it's not a field address.
13757     if (newFldSeq == nullptr || newFldSeq == FieldSeqStore::NotAField())
13758         return false;
13759
13760     // Prepend this field to whatever we've already accumulated (outside-in).
13761     *pFldSeq = comp->GetFieldSeqStore()->Append(newFldSeq, *pFldSeq);
13762
13763     // Is it a static or instance field?
13764     if (!FieldSeqStore::IsPseudoField(newFldSeq->m_fieldHnd) &&
13765          comp->info.compCompHnd->isFieldStatic(newFldSeq->m_fieldHnd))
13766     {
13767         // It is a static field.  We're done.
13768         *pObj = nullptr;
13769         *pStatic = baseAddr;
13770         return true;
13771     }
13772     else if ((baseAddr != nullptr) && !mustBeStatic)
13773     {
13774         // It's an instance field...but it must be for a struct field, since we've not yet encountered
13775         // a "TYP_REF" address.  Analyze the reset of the address.
13776         return baseAddr->gtEffectiveVal()->IsFieldAddr(comp, pObj, pStatic, pFldSeq);
13777     }
13778
13779     // Otherwise...
13780     return false;
13781 }
13782
13783 bool Compiler::gtIsStaticFieldPtrToBoxedStruct(var_types fieldNodeType, CORINFO_FIELD_HANDLE fldHnd)
13784 {
13785     if (fieldNodeType != TYP_REF) return false;
13786     CORINFO_CLASS_HANDLE fldCls = nullptr;
13787     noway_assert(fldHnd != nullptr);
13788     CorInfoType cit = info.compCompHnd->getFieldType(fldHnd, &fldCls);
13789     var_types fieldTyp = JITtype2varType(cit);
13790     return fieldTyp != TYP_REF;
13791 }
13792
13793 CORINFO_CLASS_HANDLE Compiler::gtGetStructHandleIfPresent(GenTree* tree)
13794 {
13795     CORINFO_CLASS_HANDLE structHnd = NO_CLASS_HANDLE;
13796     tree = tree->gtEffectiveVal();
13797     if (varTypeIsStruct(tree->gtType))
13798     {
13799         switch(tree->gtOper)
13800         {
13801         default:
13802             break;
13803         case GT_MKREFANY:   structHnd = impGetRefAnyClass();                                     break;
13804         case GT_OBJ:        structHnd = tree->gtObj.gtClass;                                     break;
13805         case GT_CALL:       structHnd = tree->gtCall.gtRetClsHnd;                                break;
13806         case GT_RET_EXPR:   structHnd = tree->gtRetExpr.gtRetClsHnd;                             break;
13807         case GT_ARGPLACE:   structHnd = tree->gtArgPlace.gtArgPlaceClsHnd;                       break;
13808         case GT_INDEX:      structHnd = tree->gtIndex.gtStructElemClass;                         break;
13809         case GT_FIELD:      info.compCompHnd->getFieldType(tree->gtField.gtFldHnd, &structHnd);  break; 
13810         case GT_ASG:
13811             structHnd = gtGetStructHandle(tree->gtGetOp1());
13812             break;
13813         case GT_LCL_VAR:
13814         case GT_LCL_FLD:
13815             structHnd = lvaTable[tree->AsLclVarCommon()->gtLclNum].lvVerTypeInfo.GetClassHandle();
13816             break;
13817         case GT_RETURN:
13818             structHnd = gtGetStructHandleIfPresent(tree->gtOp.gtOp1);
13819             break;
13820         case GT_IND:
13821 #ifdef FEATURE_SIMD
13822             if (varTypeIsSIMD(tree))
13823             {
13824                 structHnd = gtGetStructHandleForSIMD(tree->gtType, TYP_FLOAT);
13825             }
13826             else
13827 #endif
13828             if (tree->gtFlags & GTF_IND_ARR_INDEX)
13829             {
13830                 ArrayInfo arrInfo;
13831                 bool b = GetArrayInfoMap()->Lookup(tree, &arrInfo);
13832                 assert(b);
13833                 structHnd = EncodeElemType(arrInfo.m_elemType, arrInfo.m_elemStructType); 
13834             }
13835             break;
13836 #ifdef FEATURE_SIMD
13837         case GT_SIMD:
13838             structHnd = gtGetStructHandleForSIMD(tree->gtType, tree->AsSIMD()->gtSIMDBaseType);
13839 #endif // FEATURE_SIMD
13840             break;
13841         }
13842     }
13843     return structHnd;
13844 }
13845
13846 CORINFO_CLASS_HANDLE Compiler::gtGetStructHandle(GenTree* tree)
13847 {
13848     CORINFO_CLASS_HANDLE structHnd = gtGetStructHandleIfPresent(tree);
13849     assert(structHnd != NO_CLASS_HANDLE);
13850     return structHnd;
13851 }
13852
13853 void GenTree::ParseArrayAddress(Compiler* comp, ArrayInfo* arrayInfo, GenTreePtr* pArr, ValueNum* pInxVN, FieldSeqNode** pFldSeq)
13854 {
13855     *pArr = nullptr;
13856     ValueNum inxVN = ValueNumStore::NoVN;
13857     ssize_t offset = 0;
13858     FieldSeqNode* fldSeq = nullptr;
13859
13860     ParseArrayAddressWork(comp, 1, pArr, &inxVN, &offset, &fldSeq);
13861
13862     // If we didn't find an array reference (perhaps it is the constant null?) we will give up.
13863     if (*pArr == nullptr) return;
13864
13865     // OK, new we have to figure out if any part of the "offset" is a constant contribution to the index.
13866     // First, sum the offsets of any fields in fldSeq.
13867     unsigned fieldOffsets = 0;
13868     FieldSeqNode* fldSeqIter = fldSeq;
13869     // Also, find the first non-pseudo field...
13870     assert(*pFldSeq == nullptr);
13871     while (fldSeqIter != nullptr)
13872     {
13873         if (fldSeqIter == FieldSeqStore::NotAField())
13874         {
13875             // TODO-Review: A NotAField here indicates a failure to properly maintain the field sequence
13876             // See test case self_host_tests_x86\jit\regression\CLR-x86-JIT\v1-m12-beta2\ b70992\ b70992.exe
13877             // Safest thing to do here is to drop back to MinOpts
13878             noway_assert(!"fldSeqIter is NotAField() in ParseArrayAddress");
13879         }
13880
13881         if (!FieldSeqStore::IsPseudoField(fldSeqIter->m_fieldHnd))
13882         {
13883             if (*pFldSeq == nullptr)
13884                 *pFldSeq = fldSeqIter;
13885             CORINFO_CLASS_HANDLE fldCls = nullptr;
13886             noway_assert(fldSeqIter->m_fieldHnd != nullptr);
13887             CorInfoType cit = comp->info.compCompHnd->getFieldType(fldSeqIter->m_fieldHnd, &fldCls);
13888             fieldOffsets += comp->compGetTypeSize(cit, fldCls);
13889         }
13890         fldSeqIter = fldSeqIter->m_next;
13891     }
13892
13893     // Is there some portion of the "offset" beyond the first-elem offset and the struct field suffix we just computed?
13894     if (   !FitsIn<ssize_t>(fieldOffsets + arrayInfo->m_elemOffset)
13895         || !FitsIn<ssize_t>(arrayInfo->m_elemSize))
13896     {
13897         // This seems unlikely, but no harm in being safe...
13898         *pInxVN = comp->GetValueNumStore()->VNForExpr(TYP_INT);
13899         return;
13900     }
13901     // Otherwise...
13902     ssize_t offsetAccountedFor = static_cast<ssize_t>(fieldOffsets + arrayInfo->m_elemOffset);
13903     ssize_t elemSize = static_cast<ssize_t>(arrayInfo->m_elemSize);
13904     
13905     ssize_t constIndOffset = offset - offsetAccountedFor;
13906     // This should be divisible by the element size...
13907     assert((constIndOffset % elemSize) == 0);
13908     ssize_t constInd = constIndOffset / elemSize;
13909
13910     ValueNumStore* vnStore = comp->GetValueNumStore();
13911
13912     if (inxVN == ValueNumStore::NoVN)
13913     {
13914         // Must be a constant index.
13915         *pInxVN = vnStore->VNForPtrSizeIntCon(constInd);
13916     }
13917     else
13918     {
13919         //
13920         // Perform ((inxVN / elemSizeVN) + vnForConstInd)
13921         //
13922
13923         // The value associated with the index value number (inxVN) is the offset into the array,
13924         // which has been scaled by element size. We need to recover the array index from that offset
13925         if (vnStore->IsVNConstant(inxVN))
13926         {
13927             ssize_t index = vnStore->CoercedConstantValue<ssize_t>(inxVN);
13928             noway_assert(elemSize > 0 && ((index % elemSize) == 0));
13929             *pInxVN = vnStore->VNForPtrSizeIntCon((index / elemSize) + constInd);
13930         }
13931         else
13932         {
13933             bool canFoldDiv = false;
13934             
13935             // If the index VN is a MUL by elemSize, see if we can eliminate it instead of adding 
13936             // the division by elemSize.
13937             VNFuncApp funcApp;
13938             if (vnStore->GetVNFunc(inxVN, &funcApp) && funcApp.m_func == (VNFunc) GT_MUL)
13939             {
13940                 ValueNum vnForElemSize = vnStore->VNForLongCon(elemSize);
13941
13942                 // One of the multiply operand is elemSize, so the resulting
13943                 // index VN should simply be the other operand.
13944                 if (funcApp.m_args[1] == vnForElemSize)
13945                 {
13946                     *pInxVN = funcApp.m_args[0];
13947                     canFoldDiv = true;
13948                 }
13949                 else if (funcApp.m_args[0] == vnForElemSize)
13950                 {
13951                     *pInxVN = funcApp.m_args[1];
13952                     canFoldDiv = true;
13953                 }
13954             }
13955
13956             // Perform ((inxVN / elemSizeVN) + vnForConstInd)
13957             if (!canFoldDiv)
13958             {
13959                 ValueNum vnForElemSize = vnStore->VNForPtrSizeIntCon(elemSize);
13960                 ValueNum vnForScaledInx = vnStore->VNForFunc(TYP_I_IMPL, GetVNFuncForOper(GT_DIV, false), inxVN, vnForElemSize);
13961                 *pInxVN = vnForScaledInx;
13962             }
13963
13964             if (constInd != 0)
13965             {
13966                 ValueNum vnForConstInd = comp->GetValueNumStore()->VNForPtrSizeIntCon(constInd);
13967                 *pInxVN = comp->GetValueNumStore()->VNForFunc(TYP_I_IMPL, GetVNFuncForOper(GT_ADD, (gtFlags & GTF_UNSIGNED) != 0), *pInxVN, vnForConstInd);
13968             }
13969         }
13970     }
13971 }
13972
13973 void GenTree::ParseArrayAddressWork(Compiler* comp, ssize_t inputMul, GenTreePtr* pArr, ValueNum* pInxVN, ssize_t* pOffset, FieldSeqNode** pFldSeq)
13974 {
13975     if (TypeGet() == TYP_REF)
13976     {
13977         // This must be the array pointer.
13978         *pArr = this;
13979         assert(inputMul == 1);  // Can't multiply the array pointer by anything.
13980     }
13981     else
13982     {
13983         switch (OperGet())
13984         {
13985         case GT_CNS_INT:
13986             *pFldSeq = comp->GetFieldSeqStore()->Append(*pFldSeq, gtIntCon.gtFieldSeq);
13987             *pOffset += (inputMul * gtIntCon.gtIconVal);
13988             return;
13989             
13990         case GT_ADD:
13991         case GT_SUB:
13992             gtOp.gtOp1->ParseArrayAddressWork(comp, inputMul, pArr, pInxVN, pOffset, pFldSeq);
13993             if (OperGet() == GT_SUB)
13994                 inputMul = -inputMul;
13995             gtOp.gtOp2->ParseArrayAddressWork(comp, inputMul, pArr, pInxVN, pOffset, pFldSeq);
13996             return;
13997
13998         case GT_MUL:
13999             {
14000                 // If one op is a constant, continue parsing down.
14001                 ssize_t subMul = 0;
14002                 GenTreePtr nonConst = nullptr;
14003                 if (gtOp.gtOp1->IsCnsIntOrI())
14004                 {
14005                     // If the other arg is an int constant, and is a "not-a-field", choose
14006                     // that as the multiplier, thus preserving constant index offsets...
14007                     if (gtOp.gtOp2->OperGet() == GT_CNS_INT && gtOp.gtOp2->gtIntCon.gtFieldSeq == FieldSeqStore::NotAField())
14008                     {
14009                         subMul = gtOp.gtOp2->gtIntConCommon.IconValue();
14010                         nonConst = gtOp.gtOp1;
14011                     }
14012                     else 
14013                     {
14014                         subMul = gtOp.gtOp1->gtIntConCommon.IconValue();
14015                         nonConst = gtOp.gtOp2;
14016                     }
14017                 }
14018                 else if (gtOp.gtOp2->IsCnsIntOrI())
14019                 {
14020                     subMul = gtOp.gtOp2->gtIntConCommon.IconValue();
14021                     nonConst = gtOp.gtOp1;
14022                 }
14023                 if (nonConst != NULL)
14024                 {
14025                     nonConst->ParseArrayAddressWork(comp, inputMul * subMul, pArr, pInxVN, pOffset, pFldSeq);
14026                     return;
14027                 }
14028                 // Otherwise, exit the switch, treat as a contribution to the index.
14029             }
14030             break;
14031
14032         case GT_LSH:
14033             // If one op is a constant, continue parsing down.
14034             if (gtOp.gtOp2->IsCnsIntOrI())
14035             {
14036                 ssize_t subMul = 1 << gtOp.gtOp2->gtIntConCommon.IconValue();
14037                 gtOp.gtOp1->ParseArrayAddressWork(comp, inputMul * subMul, pArr, pInxVN, pOffset, pFldSeq);
14038                 return;
14039             }
14040             // Otherwise, exit the switch, treat as a contribution to the index.
14041             break;
14042             
14043         default:
14044             break;
14045         }
14046         // If we didn't return above, must be a constribution to the non-constant part of the index VN.
14047         ValueNum vn = comp->GetValueNumStore()->VNNormVal(gtVNPair.GetLiberal());  // We don't care about exceptions for this purpose.
14048         if (inputMul != 1)
14049         {
14050             ValueNum mulVN = comp->GetValueNumStore()->VNForLongCon(inputMul);
14051             vn = comp->GetValueNumStore()->VNForFunc(TypeGet(), GetVNFuncForOper(GT_MUL, false), mulVN, vn);
14052         }
14053         if (*pInxVN == ValueNumStore::NoVN)
14054         {
14055             *pInxVN = vn;
14056         }
14057         else
14058         {
14059             *pInxVN = comp->GetValueNumStore()->VNForFunc(TypeGet(), GetVNFuncForOper(GT_ADD, false), *pInxVN, vn);
14060         }
14061     }
14062 }
14063
14064 bool GenTree::ParseArrayElemForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq)
14065 {
14066     if (OperIsIndir())
14067     {
14068         if (gtFlags & GTF_IND_ARR_INDEX)
14069         {
14070             bool b = comp->GetArrayInfoMap()->Lookup(this, arrayInfo);
14071             assert(b);
14072             return true;
14073         }
14074
14075         // Otherwise...
14076         GenTreePtr addr = AsIndir()->Addr();
14077         return addr->ParseArrayElemAddrForm(comp, arrayInfo, pFldSeq);
14078     }
14079     else
14080     {
14081         return false;
14082     }
14083 }
14084
14085 bool GenTree::ParseArrayElemAddrForm(Compiler* comp, ArrayInfo* arrayInfo, FieldSeqNode** pFldSeq)
14086 {
14087     switch (OperGet())
14088     {
14089     case GT_ADD:
14090         {
14091             GenTreePtr arrAddr = nullptr;
14092             GenTreePtr offset = nullptr;
14093             if (gtOp.gtOp1->TypeGet() == TYP_BYREF)
14094             {
14095                 arrAddr = gtOp.gtOp1;
14096                 offset = gtOp.gtOp2;
14097             }
14098             else if (gtOp.gtOp2->TypeGet() == TYP_BYREF)
14099             {
14100                 arrAddr = gtOp.gtOp2;
14101                 offset = gtOp.gtOp1;
14102             }
14103             else
14104             {
14105                 return false;
14106             }
14107             if (!offset->ParseOffsetForm(comp, pFldSeq)) return false;
14108             return arrAddr->ParseArrayElemAddrForm(comp, arrayInfo, pFldSeq);
14109         }
14110            
14111
14112     case GT_ADDR:
14113         {
14114             GenTreePtr addrArg = gtOp.gtOp1;
14115             if (addrArg->OperGet() != GT_IND)
14116             {
14117                 return false;
14118             }
14119             else
14120             {
14121                 // The "Addr" node might be annotated with a zero-offset field sequence.
14122                 FieldSeqNode* zeroOffsetFldSeq = nullptr;
14123                 if (comp->GetZeroOffsetFieldMap()->Lookup(this, &zeroOffsetFldSeq))
14124                 {
14125                     *pFldSeq = comp->GetFieldSeqStore()->Append(*pFldSeq, zeroOffsetFldSeq);
14126                 }
14127                 return addrArg->ParseArrayElemForm(comp, arrayInfo, pFldSeq);
14128             }
14129         }
14130
14131     default:
14132         return false;
14133     }
14134 }
14135
14136 bool GenTree::ParseOffsetForm(Compiler* comp, FieldSeqNode** pFldSeq)
14137 {
14138     switch (OperGet())
14139     {
14140     case GT_CNS_INT:
14141         {
14142             GenTreeIntCon* icon = AsIntCon();
14143             *pFldSeq = comp->GetFieldSeqStore()->Append(*pFldSeq, icon->gtFieldSeq);
14144             return true;
14145         }
14146
14147     case GT_ADD:
14148         if (!gtOp.gtOp1->ParseOffsetForm(comp, pFldSeq)) return false;
14149         return gtOp.gtOp2->ParseOffsetForm(comp, pFldSeq);
14150
14151     default:
14152         return false;
14153     }
14154 }
14155
14156 void GenTree::LabelIndex(Compiler* comp, bool isConst)
14157 {
14158     switch (OperGet())
14159     {
14160     case GT_CNS_INT:
14161         // If we got here, this is a contribution to the constant part of the index.
14162         if (isConst)
14163             gtIntCon.gtFieldSeq = comp->GetFieldSeqStore()->CreateSingleton(FieldSeqStore::ConstantIndexPseudoField);
14164         return;
14165
14166     case GT_LCL_VAR:
14167         gtFlags |= GTF_VAR_ARR_INDEX;
14168         return;
14169
14170     case GT_ADD:
14171     case GT_SUB:
14172         gtOp.gtOp1->LabelIndex(comp, isConst);
14173         gtOp.gtOp2->LabelIndex(comp, isConst);
14174         break;
14175
14176     case GT_CAST:
14177         gtOp.gtOp1->LabelIndex(comp, isConst);
14178         break;
14179
14180     default:
14181         // For all other operators, peel off one constant; and then label the other if it's also a constant.
14182         if (OperIsArithmetic() || OperIsCompare())
14183         {
14184             if (gtOp.gtOp2->OperGet() == GT_CNS_INT)
14185             {
14186                 gtOp.gtOp1->LabelIndex(comp, isConst);
14187                 break;
14188             }
14189             else if (gtOp.gtOp1->OperGet() == GT_CNS_INT)
14190         {
14191                 gtOp.gtOp2->LabelIndex(comp, isConst);
14192                 break;
14193             }
14194             // Otherwise continue downward on both, labeling vars.
14195             gtOp.gtOp1->LabelIndex(comp, false);
14196             gtOp.gtOp2->LabelIndex(comp, false);
14197         }
14198         break;
14199     }
14200 }
14201
14202 // static 
14203 FieldSeqNode FieldSeqStore::s_notAField(NULL, NULL);  // Value doesn't matter; exists only to provide a distinguished address.
14204
14205 // FieldSeqStore methods.
14206 FieldSeqStore::FieldSeqStore(IAllocator* alloc) : m_alloc(alloc), m_canonMap(new (alloc) FieldSeqNodeCanonMap(alloc))
14207 {}
14208
14209 FieldSeqNode* FieldSeqStore::CreateSingleton(CORINFO_FIELD_HANDLE fieldHnd)
14210 {
14211     FieldSeqNode fsn(fieldHnd, NULL);
14212     FieldSeqNode* res = NULL;
14213     if (m_canonMap->Lookup(fsn, &res))
14214     {
14215         return res;
14216     }
14217     else
14218     {
14219         res = reinterpret_cast<FieldSeqNode*>(m_alloc->Alloc(sizeof(FieldSeqNode)));
14220         *res = fsn;
14221         m_canonMap->Set(fsn, res);
14222         return res;
14223     }
14224 }
14225
14226 FieldSeqNode* FieldSeqStore::Append(FieldSeqNode* a, FieldSeqNode* b)
14227 {
14228     if (a == NULL) 
14229         return b;
14230     else if (a == NotAField()) 
14231         return NotAField();
14232     else if (b == NULL) 
14233         return a;
14234     else if (b == NotAField()) 
14235         return NotAField();
14236     // Extremely special case for ConstantIndex pseudo-fields -- appending consecutive such
14237     // together collapse to one.
14238     else if (   a->m_next == nullptr 
14239              && a->m_fieldHnd == ConstantIndexPseudoField
14240              && b->m_fieldHnd == ConstantIndexPseudoField)
14241     {
14242         return b;
14243     }
14244     else
14245     {
14246         FieldSeqNode* tmp = Append(a->m_next, b);
14247         FieldSeqNode fsn(a->m_fieldHnd, tmp);
14248         FieldSeqNode* res = NULL;
14249         if (m_canonMap->Lookup(fsn, &res))
14250         {
14251             return res;
14252         }
14253         else
14254         {
14255             res = reinterpret_cast<FieldSeqNode*>(m_alloc->Alloc(sizeof(FieldSeqNode)));
14256             *res = fsn;
14257             m_canonMap->Set(fsn, res);
14258             return res;
14259         }
14260     }
14261 }
14262
14263 // Static vars.
14264 int FieldSeqStore::FirstElemPseudoFieldStruct;
14265 int FieldSeqStore::ConstantIndexPseudoFieldStruct;
14266
14267 CORINFO_FIELD_HANDLE FieldSeqStore::FirstElemPseudoField = (CORINFO_FIELD_HANDLE)&FieldSeqStore::FirstElemPseudoFieldStruct;
14268 CORINFO_FIELD_HANDLE FieldSeqStore::ConstantIndexPseudoField = (CORINFO_FIELD_HANDLE)&FieldSeqStore::ConstantIndexPseudoFieldStruct;
14269
14270 bool FieldSeqNode::IsFirstElemFieldSeq()
14271 {
14272     // this must be non-null per ISO C++
14273     return m_fieldHnd == FieldSeqStore::FirstElemPseudoField;
14274 }
14275
14276 bool FieldSeqNode::IsConstantIndexFieldSeq()
14277 {
14278     // this must be non-null per ISO C++
14279     return m_fieldHnd == FieldSeqStore::ConstantIndexPseudoField;
14280 }
14281
14282 bool FieldSeqNode::IsPseudoField()
14283 {
14284     if (this == nullptr)
14285         return false;
14286     return m_fieldHnd == FieldSeqStore::FirstElemPseudoField || m_fieldHnd == FieldSeqStore::ConstantIndexPseudoField;
14287 }
14288
14289 #ifdef FEATURE_SIMD
14290 GenTreeSIMD* Compiler::gtNewSIMDNode(var_types type, GenTreePtr op1, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
14291 {   
14292     assert(op1 != nullptr);
14293     if (op1->OperGet() == GT_LCL_VAR)
14294     {
14295         unsigned lclNum = op1->AsLclVarCommon()->GetLclNum();
14296         LclVarDsc* lclVarDsc = &lvaTable[lclNum];
14297         lclVarDsc->lvUsedInSIMDIntrinsic = true;
14298     }
14299     
14300     return new (this, GT_SIMD) GenTreeSIMD(type, op1, simdIntrinsicID, baseType, size);
14301 }
14302
14303 GenTreeSIMD* Compiler::gtNewSIMDNode(var_types type, GenTreePtr op1, GenTreePtr op2, SIMDIntrinsicID simdIntrinsicID, var_types baseType, unsigned size)
14304 {
14305     assert(op1 != nullptr);
14306     if (op1->OperIsLocal())
14307     {
14308         unsigned lclNum = op1->AsLclVarCommon()->GetLclNum();
14309         LclVarDsc* lclVarDsc = &lvaTable[lclNum];
14310         lclVarDsc->lvUsedInSIMDIntrinsic = true;
14311     }
14312
14313     if (op2 != nullptr && op2->OperIsLocal())
14314     {
14315         unsigned lclNum = op2->AsLclVarCommon()->GetLclNum();
14316         LclVarDsc* lclVarDsc = &lvaTable[lclNum];
14317         lclVarDsc->lvUsedInSIMDIntrinsic = true;
14318     }
14319     
14320     return new (this, GT_SIMD) GenTreeSIMD(type, op1, op2, simdIntrinsicID, baseType, size);
14321 }
14322
14323 bool GenTree::isCommutativeSIMDIntrinsic()
14324 {
14325     assert(gtOper == GT_SIMD);
14326     switch (AsSIMD()->gtSIMDIntrinsicID)
14327     {
14328     case SIMDIntrinsicAdd:
14329     case SIMDIntrinsicBitwiseAnd:
14330     case SIMDIntrinsicBitwiseOr:
14331     case SIMDIntrinsicBitwiseXor:
14332     case SIMDIntrinsicEqual:
14333     case SIMDIntrinsicMax:
14334     case SIMDIntrinsicMin:
14335     case SIMDIntrinsicMul:
14336     case SIMDIntrinsicOpEquality:
14337     case SIMDIntrinsicOpInEquality:
14338         return true;
14339     default:
14340         return false;
14341     }
14342 }
14343 #endif //FEATURE_SIMD
14344
14345 //-------------------------------------------------------------------------
14346 // Initialize: Return Type Descriptor given type handle.
14347 // 
14348 // Arguments
14349 //    comp        -  Compiler Instance
14350 //    retClsHnd   -  VM handle to the type returned
14351 //
14352 // Return Value
14353 //    None
14354 //
14355 void ReturnTypeDesc::InitializeReturnType(Compiler* comp, CORINFO_CLASS_HANDLE retClsHnd)
14356 {
14357     assert(!m_inited);
14358
14359 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
14360     assert(retClsHnd != NO_CLASS_HANDLE);
14361
14362     SYSTEMV_AMD64_CORINFO_STRUCT_REG_PASSING_DESCRIPTOR structDesc;
14363     comp->eeGetSystemVAmd64PassStructInRegisterDescriptor(retClsHnd, &structDesc);
14364
14365     if (structDesc.passedInRegisters)
14366     {
14367         for (int i=0; i<structDesc.eightByteCount; i++)
14368         {
14369             assert(i < MAX_RET_REG_COUNT);
14370             m_regType[i] = comp->GetEightByteType(structDesc, i);
14371         }
14372     }
14373
14374 #elif defined(_TARGET_X86_)
14375     // TODO-X86: Assumes we are only using ReturnTypeDesc for longs on x86.
14376     // Will need to be updated in the future to handle other return types
14377     assert(MAX_RET_REG_COUNT == 2);
14378     m_regType[0] = TYP_INT;
14379     m_regType[1] = TYP_INT;
14380 #endif // FEATURE_UNIX_AMD64_STRUCT_PASSING
14381
14382 #ifdef DEBUG
14383     m_inited = true;
14384 #endif
14385 }
14386
14387 //-------------------------------------------------------------------
14388 // GetABIReturnReg:  Return ith return register as per target ABI
14389 //
14390 // Arguments:
14391 //     idx   -   Index of the return register.
14392 //               The first return register has an index of 0 and so on.
14393 //
14394 // Return Value:
14395 //     Returns ith return register as per target ABI.
14396 //
14397 // Notes:
14398 //     Right now this is implemented only for x64 Unix
14399 //     and yet to be implemented for other multi-reg return
14400 //     targets (Arm64/Arm32/x86).
14401 //
14402 // TODO-ARM:   Implement this routine to support HFA returns.
14403 // TODO-ARM64: Implement this routine to support HFA returns.
14404 // TODO-X86:   Implement this routine to support long returns.
14405 regNumber ReturnTypeDesc::GetABIReturnReg(unsigned idx)
14406 {
14407     unsigned count = GetReturnRegCount();
14408     assert(idx < count);
14409
14410     regNumber resultReg = REG_NA;
14411
14412 #ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
14413     var_types regType0 = GetReturnRegType(0);
14414
14415     if (idx == 0)
14416     {
14417         if (varTypeIsIntegralOrI(regType0))
14418         {
14419             resultReg = REG_INTRET;
14420         }
14421         else {
14422             noway_assert(varTypeIsFloating(regType0));
14423             resultReg = REG_FLOATRET;
14424         }
14425     }
14426     else if (idx == 1)
14427     {
14428         var_types regType1 = GetReturnRegType(1);
14429
14430         if (varTypeIsIntegralOrI(regType1))
14431         {
14432             if (varTypeIsIntegralOrI(regType0))
14433             {
14434                 resultReg = REG_INTRET_1;
14435             }
14436             else
14437             {
14438                 resultReg = REG_INTRET;
14439             }
14440         }
14441         else 
14442         {
14443             noway_assert(varTypeIsFloating(regType1));
14444
14445             if (varTypeIsFloating(regType0))
14446             {
14447                 resultReg = REG_FLOATRET_1;
14448             }
14449             else
14450             {
14451                 resultReg = REG_FLOATRET;
14452             }
14453         }
14454     }
14455
14456 #elif defined(_TARGET_X86_)
14457     if (idx == 0)
14458     {
14459         resultReg = REG_LNGRET_LO;
14460     }
14461     else if (idx == 1)
14462     {
14463         resultReg = REG_LNGRET_HI;
14464     }
14465 #endif //FEATURE_UNIX_AMD64_STRUCT_PASSING
14466
14467     assert(resultReg != REG_NA);
14468     return resultReg;
14469 }
14470
14471 //--------------------------------------------------------------------------------
14472 // GetABIReturnRegs: get the mask of return registers as per target arch ABI.
14473 //
14474 // Arguments:
14475 //    None
14476 // 
14477 // Return Value:
14478 //    reg mask of return registers in which the return type is returned.
14479 //
14480 // Note:
14481 //    For now this is implemented only for x64 Unix and yet to be implemented
14482 //    for other multi-reg return targets (Arm64/Arm32x86).
14483 //
14484 //    This routine can be used when the caller is not particular about the order
14485 //    of return registers and wants to know the set of return registers.
14486 //
14487 // TODO-ARM:   Implement this routine to support HFA returns.
14488 // TODO-ARM64: Implement this routine to support HFA returns.
14489 // TODO-X86:   Implement this routine to support long returns.
14490 //
14491 //static
14492 regMaskTP ReturnTypeDesc::GetABIReturnRegs()
14493 {
14494     regMaskTP resultMask = RBM_NONE;
14495
14496     unsigned count = GetReturnRegCount();
14497     for (unsigned i = 0; i < count; ++i)
14498     {
14499         resultMask |= genRegMask(GetABIReturnReg(i));
14500     }
14501
14502     return resultMask;
14503 }