1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
10 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
11 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
20 #include "ssaconfig.h"
22 VNFunc GetVNFuncForOper(genTreeOps oper, bool isUnsigned)
24 if (!isUnsigned || (oper == GT_EQ) || (oper == GT_NE))
57 ValueNumStore::ValueNumStore(Compiler* comp, IAllocator* alloc)
66 , m_fixedPointMapSels(alloc, 8)
68 , m_intCnsMap(nullptr)
69 , m_longCnsMap(nullptr)
70 , m_handleMap(nullptr)
71 , m_floatCnsMap(nullptr)
72 , m_doubleCnsMap(nullptr)
73 , m_byrefCnsMap(nullptr)
74 , m_VNFunc0Map(nullptr)
75 , m_VNFunc1Map(nullptr)
76 , m_VNFunc2Map(nullptr)
77 , m_VNFunc3Map(nullptr)
78 , m_VNFunc4Map(nullptr)
79 , m_uPtrToLocNotAFieldCount(1)
81 // We have no current allocation chunks.
82 for (unsigned i = 0; i < TYP_COUNT; i++)
84 for (unsigned j = CEA_None; j <= CEA_Count + MAX_LOOP_NUM; j++)
86 m_curAllocChunk[i][j] = NoChunk;
90 for (unsigned i = 0; i < SmallIntConstNum; i++)
92 m_VNsForSmallIntConsts[i] = NoVN;
94 // We will reserve chunk 0 to hold some special constants, like the constant NULL, the "exception" value, and the
96 Chunk* specialConstChunk = new (m_alloc) Chunk(m_alloc, &m_nextChunkBase, TYP_REF, CEA_Const, MAX_LOOP_NUM);
97 specialConstChunk->m_numUsed +=
98 SRC_NumSpecialRefConsts; // Implicitly allocate 0 ==> NULL, and 1 ==> Exception, 2 ==> ZeroMap.
99 ChunkNum cn = m_chunks.Push(specialConstChunk);
102 m_mapSelectBudget = JitConfig.JitVNMapSelBudget();
106 template <typename T>
107 T ValueNumStore::EvalOp(VNFunc vnf, T v0)
109 genTreeOps oper = genTreeOps(vnf);
111 // Here we handle those unary ops that are the same for integral and floating-point types.
117 // Must be int-specific
118 return EvalOpIntegral(vnf, v0);
122 template <typename T>
123 T ValueNumStore::EvalOpIntegral(VNFunc vnf, T v0)
125 genTreeOps oper = genTreeOps(vnf);
127 // Here we handle unary ops that are the same for all integral types.
138 template <typename T>
139 T ValueNumStore::EvalOp(VNFunc vnf, T v0, T v1, ValueNum* pExcSet)
141 if (vnf < VNF_Boundary)
143 genTreeOps oper = genTreeOps(vnf);
144 // Here we handle those that are the same for integral and floating-point types.
156 *pExcSet = VNExcSetSingleton(VNForFunc(TYP_REF, VNF_DivideByZeroExc));
159 if (IsOverflowIntDiv(v0, v1))
161 *pExcSet = VNExcSetSingleton(VNForFunc(TYP_REF, VNF_ArithmeticExc));
170 // Must be int-specific
171 return EvalOpIntegral(vnf, v0, v1, pExcSet);
174 else // must be a VNF_ function
176 typedef typename jitstd::make_unsigned<T>::type UT;
180 return T(UT(v0) > UT(v1));
182 return T(UT(v0) >= UT(v1));
184 return T(UT(v0) < UT(v1));
186 return T(UT(v0) <= UT(v1));
188 return T(UT(v0) + UT(v1));
190 return T(UT(v0) - UT(v1));
192 return T(UT(v0) * UT(v1));
196 *pExcSet = VNExcSetSingleton(VNForFunc(TYP_REF, VNF_DivideByZeroExc));
201 return T(UT(v0) / UT(v1));
204 // Must be int-specific
205 return EvalOpIntegral(vnf, v0, v1, pExcSet);
210 // Specialize for double for floating operations, that doesn't involve unsigned.
212 double ValueNumStore::EvalOp<double>(VNFunc vnf, double v0, double v1, ValueNum* pExcSet)
214 genTreeOps oper = genTreeOps(vnf);
215 // Here we handle those that are the same for floating-point types.
234 template <typename T>
235 int ValueNumStore::EvalComparison(VNFunc vnf, T v0, T v1)
237 if (vnf < VNF_Boundary)
239 genTreeOps oper = genTreeOps(vnf);
240 // Here we handle those that are the same for floating-point types.
259 else // must be a VNF_ function
264 return unsigned(v0) > unsigned(v1);
266 return unsigned(v0) >= unsigned(v1);
268 return unsigned(v0) < unsigned(v1);
270 return unsigned(v0) <= unsigned(v1);
278 template <typename T>
279 int ValueNumStore::EvalOrderedComparisonFloat(VNFunc vnf, T v0, T v1)
283 // All comparisons below are ordered comparisons.
285 // We should guard this function from unordered comparisons
286 // identified by the GTF_RELOP_NAN_UN flag. Either the flag
287 // should be bubbled (similar to GTF_UNSIGNED for ints)
288 // to this point or we should bail much earlier if any of
289 // the operands are NaN.
291 genTreeOps oper = genTreeOps(vnf);
292 // Here we handle those that are the same for floating-point types.
313 int ValueNumStore::EvalComparison<double>(VNFunc vnf, double v0, double v1)
315 return EvalOrderedComparisonFloat(vnf, v0, v1);
319 int ValueNumStore::EvalComparison<float>(VNFunc vnf, float v0, float v1)
321 return EvalOrderedComparisonFloat(vnf, v0, v1);
324 template <typename T>
325 T ValueNumStore::EvalOpIntegral(VNFunc vnf, T v0, T v1, ValueNum* pExcSet)
327 genTreeOps oper = genTreeOps(vnf);
355 return UINT64(v0) >> v1;
359 return UINT32(v0) >> v1;
364 return (v0 << v1) | (UINT64(v0) >> (64 - v1));
368 return (v0 << v1) | (UINT32(v0) >> (32 - v1));
374 return (v0 << (64 - v1)) | (UINT64(v0) >> v1);
378 return (v0 << (32 - v1)) | (UINT32(v0) >> v1);
385 *pExcSet = VNExcSetSingleton(VNForFunc(TYP_REF, VNF_DivideByZeroExc));
387 else if (IsOverflowIntDiv(v0, v1))
389 *pExcSet = VNExcSetSingleton(VNForFunc(TYP_REF, VNF_ArithmeticExc));
392 else // We are not dividing by Zero, so we can calculate the exact result.
394 // Perform the appropriate operation.
399 else // Must be GT_MOD
409 *pExcSet = VNExcSetSingleton(VNForFunc(TYP_REF, VNF_DivideByZeroExc));
412 else // We are not dividing by Zero, so we can calculate the exact result.
414 typedef typename jitstd::make_unsigned<T>::type UT;
415 // We need for force the source operands for the divide or mod operation
416 // to be considered unsigned.
420 // This is return unsigned(v0) / unsigned(v1) for both sizes of integers
421 return T(UT(v0) / UT(v1));
423 else // Must be GT_UMOD
425 // This is return unsigned(v0) % unsigned(v1) for both sizes of integers
426 return T(UT(v0) % UT(v1));
434 ValueNum ValueNumStore::VNExcSetSingleton(ValueNum x)
436 ValueNum res = VNForFunc(TYP_REF, VNF_ExcSetCons, x, VNForEmptyExcSet());
438 if (m_pComp->verbose)
440 printf(" " STR_VN "%x = singleton exc set", res);
448 ValueNumPair ValueNumStore::VNPExcSetSingleton(ValueNumPair xp)
450 return ValueNumPair(VNExcSetSingleton(xp.GetLiberal()), VNExcSetSingleton(xp.GetConservative()));
453 ValueNum ValueNumStore::VNExcSetUnion(ValueNum xs0, ValueNum xs1 DEBUGARG(bool topLevel))
455 if (xs0 == VNForEmptyExcSet())
459 else if (xs1 == VNForEmptyExcSet())
466 bool b0 = GetVNFunc(xs0, &funcXs0);
467 assert(b0 && funcXs0.m_func == VNF_ExcSetCons); // Precondition: xs0 is an exception set.
469 bool b1 = GetVNFunc(xs1, &funcXs1);
470 assert(b1 && funcXs1.m_func == VNF_ExcSetCons); // Precondition: xs1 is an exception set.
472 if (funcXs0.m_args[0] < funcXs1.m_args[0])
474 res = VNForFunc(TYP_REF, VNF_ExcSetCons, funcXs0.m_args[0],
475 VNExcSetUnion(funcXs0.m_args[1], xs1 DEBUGARG(false)));
477 else if (funcXs0.m_args[0] == funcXs1.m_args[0])
479 // Equal elements; only add one to the result.
480 res = VNExcSetUnion(funcXs0.m_args[1], xs1);
484 assert(funcXs0.m_args[0] > funcXs1.m_args[0]);
485 res = VNForFunc(TYP_REF, VNF_ExcSetCons, funcXs1.m_args[0],
486 VNExcSetUnion(xs0, funcXs1.m_args[1] DEBUGARG(false)));
493 ValueNumPair ValueNumStore::VNPExcSetUnion(ValueNumPair xs0vnp, ValueNumPair xs1vnp)
495 return ValueNumPair(VNExcSetUnion(xs0vnp.GetLiberal(), xs1vnp.GetLiberal()),
496 VNExcSetUnion(xs0vnp.GetConservative(), xs1vnp.GetConservative()));
499 void ValueNumStore::VNUnpackExc(ValueNum vnWx, ValueNum* pvn, ValueNum* pvnx)
501 assert(vnWx != NoVN);
503 if (GetVNFunc(vnWx, &funcApp) && funcApp.m_func == VNF_ValWithExc)
505 *pvn = funcApp.m_args[0];
506 *pvnx = funcApp.m_args[1];
514 void ValueNumStore::VNPUnpackExc(ValueNumPair vnWx, ValueNumPair* pvn, ValueNumPair* pvnx)
516 VNUnpackExc(vnWx.GetLiberal(), pvn->GetLiberalAddr(), pvnx->GetLiberalAddr());
517 VNUnpackExc(vnWx.GetConservative(), pvn->GetConservativeAddr(), pvnx->GetConservativeAddr());
520 ValueNum ValueNumStore::VNNormVal(ValueNum vn)
523 if (GetVNFunc(vn, &funcApp) && funcApp.m_func == VNF_ValWithExc)
525 return funcApp.m_args[0];
533 ValueNumPair ValueNumStore::VNPNormVal(ValueNumPair vnp)
535 return ValueNumPair(VNNormVal(vnp.GetLiberal()), VNNormVal(vnp.GetConservative()));
538 ValueNum ValueNumStore::VNExcVal(ValueNum vn)
541 if (GetVNFunc(vn, &funcApp) && funcApp.m_func == VNF_ValWithExc)
543 return funcApp.m_args[1];
547 return VNForEmptyExcSet();
551 ValueNumPair ValueNumStore::VNPExcVal(ValueNumPair vnp)
553 return ValueNumPair(VNExcVal(vnp.GetLiberal()), VNExcVal(vnp.GetConservative()));
556 // If vn "excSet" is not "VNForEmptyExcSet()", return "VNF_ValWithExc(vn, excSet)". Otherwise,
558 ValueNum ValueNumStore::VNWithExc(ValueNum vn, ValueNum excSet)
560 if (excSet == VNForEmptyExcSet())
567 ValueNum vnX = VNForEmptyExcSet();
568 VNUnpackExc(vn, &vnNorm, &vnX);
569 return VNForFunc(TypeOfVN(vnNorm), VNF_ValWithExc, vnNorm, VNExcSetUnion(vnX, excSet));
573 ValueNumPair ValueNumStore::VNPWithExc(ValueNumPair vnp, ValueNumPair excSetVNP)
575 return ValueNumPair(VNWithExc(vnp.GetLiberal(), excSetVNP.GetLiberal()),
576 VNWithExc(vnp.GetConservative(), excSetVNP.GetConservative()));
579 bool ValueNumStore::IsKnownNonNull(ValueNum vn)
586 return GetVNFunc(vn, &funcAttr) && (s_vnfOpAttribs[funcAttr.m_func] & VNFOA_KnownNonNull) != 0;
589 bool ValueNumStore::IsSharedStatic(ValueNum vn)
596 return GetVNFunc(vn, &funcAttr) && (s_vnfOpAttribs[funcAttr.m_func] & VNFOA_SharedStatic) != 0;
599 ValueNumStore::Chunk::Chunk(
600 IAllocator* alloc, ValueNum* pNextBaseVN, var_types typ, ChunkExtraAttribs attribs, BasicBlock::loopNumber loopNum)
601 : m_defs(nullptr), m_numUsed(0), m_baseVN(*pNextBaseVN), m_typ(typ), m_attribs(attribs), m_loopNum(loopNum)
603 // Allocate "m_defs" here, according to the typ/attribs pair.
607 break; // Nothing to do.
612 m_defs = new (alloc) Alloc<TYP_INT>::Type[ChunkSize];
615 m_defs = new (alloc) Alloc<TYP_FLOAT>::Type[ChunkSize];
618 m_defs = new (alloc) Alloc<TYP_LONG>::Type[ChunkSize];
621 m_defs = new (alloc) Alloc<TYP_DOUBLE>::Type[ChunkSize];
624 m_defs = new (alloc) Alloc<TYP_BYREF>::Type[ChunkSize];
627 // We allocate space for a single REF constant, NULL, so we can access these values uniformly.
628 // Since this value is always the same, we represent it as a static.
629 m_defs = &s_specialRefConsts[0];
630 break; // Nothing to do.
632 assert(false); // Should not reach here.
637 m_defs = new (alloc) VNHandle[ChunkSize];
641 m_defs = new (alloc) VNFunc[ChunkSize];
645 m_defs = new (alloc) VNDefFunc1Arg[ChunkSize];
648 m_defs = new (alloc) VNDefFunc2Arg[ChunkSize];
651 m_defs = new (alloc) VNDefFunc3Arg[ChunkSize];
654 m_defs = new (alloc) VNDefFunc4Arg[ChunkSize];
659 *pNextBaseVN += ChunkSize;
662 ValueNumStore::Chunk* ValueNumStore::GetAllocChunk(var_types typ,
663 ChunkExtraAttribs attribs,
664 BasicBlock::loopNumber loopNum)
668 if (loopNum == MAX_LOOP_NUM)
670 // Loop nest is unknown/irrelevant for this VN.
675 // Loop nest is interesting. Since we know this is only true for unique VNs, we know attribs will
676 // be CEA_None and can just index based on loop number.
677 noway_assert(attribs == CEA_None);
678 // Map NOT_IN_LOOP -> MAX_LOOP_NUM to make the index range contiguous [0..MAX_LOOP_NUM]
679 index = CEA_Count + (loopNum == BasicBlock::NOT_IN_LOOP ? MAX_LOOP_NUM : loopNum);
681 ChunkNum cn = m_curAllocChunk[typ][index];
684 res = m_chunks.Get(cn);
685 if (res->m_numUsed < ChunkSize)
690 // Otherwise, must allocate a new one.
691 res = new (m_alloc) Chunk(m_alloc, &m_nextChunkBase, typ, attribs, loopNum);
692 cn = m_chunks.Push(res);
693 m_curAllocChunk[typ][index] = cn;
697 ValueNum ValueNumStore::VNForIntCon(INT32 cnsVal)
699 if (IsSmallIntConst(cnsVal))
701 unsigned ind = cnsVal - SmallIntConstMin;
702 ValueNum vn = m_VNsForSmallIntConsts[ind];
707 vn = GetVNForIntCon(cnsVal);
708 m_VNsForSmallIntConsts[ind] = vn;
713 return GetVNForIntCon(cnsVal);
717 ValueNum ValueNumStore::VNForLongCon(INT64 cnsVal)
720 if (GetLongCnsMap()->Lookup(cnsVal, &res))
726 Chunk* c = GetAllocChunk(TYP_LONG, CEA_Const);
727 unsigned offsetWithinChunk = c->AllocVN();
728 res = c->m_baseVN + offsetWithinChunk;
729 reinterpret_cast<INT64*>(c->m_defs)[offsetWithinChunk] = cnsVal;
730 GetLongCnsMap()->Set(cnsVal, res);
735 ValueNum ValueNumStore::VNForFloatCon(float cnsVal)
738 if (GetFloatCnsMap()->Lookup(cnsVal, &res))
744 Chunk* c = GetAllocChunk(TYP_FLOAT, CEA_Const);
745 unsigned offsetWithinChunk = c->AllocVN();
746 res = c->m_baseVN + offsetWithinChunk;
747 reinterpret_cast<float*>(c->m_defs)[offsetWithinChunk] = cnsVal;
748 GetFloatCnsMap()->Set(cnsVal, res);
753 ValueNum ValueNumStore::VNForDoubleCon(double cnsVal)
756 if (GetDoubleCnsMap()->Lookup(cnsVal, &res))
762 Chunk* c = GetAllocChunk(TYP_DOUBLE, CEA_Const);
763 unsigned offsetWithinChunk = c->AllocVN();
764 res = c->m_baseVN + offsetWithinChunk;
765 reinterpret_cast<double*>(c->m_defs)[offsetWithinChunk] = cnsVal;
766 GetDoubleCnsMap()->Set(cnsVal, res);
771 ValueNum ValueNumStore::VNForByrefCon(INT64 cnsVal)
774 if (GetByrefCnsMap()->Lookup(cnsVal, &res))
780 Chunk* c = GetAllocChunk(TYP_BYREF, CEA_Const);
781 unsigned offsetWithinChunk = c->AllocVN();
782 res = c->m_baseVN + offsetWithinChunk;
783 reinterpret_cast<INT64*>(c->m_defs)[offsetWithinChunk] = cnsVal;
784 GetByrefCnsMap()->Set(cnsVal, res);
789 ValueNum ValueNumStore::VNForCastOper(var_types castToType, bool srcIsUnsigned /*=false*/)
791 assert(castToType != TYP_STRUCT);
792 INT32 cnsVal = INT32(castToType) << INT32(VCA_BitCount);
793 assert((cnsVal & INT32(VCA_ReservedBits)) == 0);
797 // We record the srcIsUnsigned by or-ing a 0x01
798 cnsVal |= INT32(VCA_UnsignedSrc);
800 ValueNum result = VNForIntCon(cnsVal);
803 if (m_pComp->verbose)
805 printf(" VNForCastOper(%s%s) is " STR_VN "%x\n", varTypeName(castToType),
806 srcIsUnsigned ? ", unsignedSrc" : "", result);
813 ValueNum ValueNumStore::VNForHandle(ssize_t cnsVal, unsigned handleFlags)
815 assert((handleFlags & ~GTF_ICON_HDL_MASK) == 0);
819 VNHandle::Initialize(&handle, cnsVal, handleFlags);
820 if (GetHandleMap()->Lookup(handle, &res))
826 Chunk* c = GetAllocChunk(TYP_I_IMPL, CEA_Handle);
827 unsigned offsetWithinChunk = c->AllocVN();
828 res = c->m_baseVN + offsetWithinChunk;
829 reinterpret_cast<VNHandle*>(c->m_defs)[offsetWithinChunk] = handle;
830 GetHandleMap()->Set(handle, res);
835 // Returns the value number for zero of the given "typ".
836 // It has an unreached() for a "typ" that has no zero value, such as TYP_BYREF.
837 ValueNum ValueNumStore::VNZeroForType(var_types typ)
849 return VNForIntCon(0);
852 return VNForLongCon(0);
854 #if FEATURE_X87_DOUBLES
855 return VNForDoubleCon(0.0);
857 return VNForFloatCon(0.0f);
860 return VNForDoubleCon(0.0);
866 // TODO-CQ: Improve value numbering for SIMD types.
871 #endif // FEATURE_SIMD
872 return VNForZeroMap(); // Recursion!
874 // These should be unreached.
876 unreached(); // Should handle all types.
880 // Returns the value number for one of the given "typ".
881 // It returns NoVN for a "typ" that has no one value, such as TYP_REF.
882 ValueNum ValueNumStore::VNOneForType(var_types typ)
894 return VNForIntCon(1);
897 return VNForLongCon(1);
899 return VNForFloatCon(1.0f);
901 return VNForDoubleCon(1.0);
908 class Object* ValueNumStore::s_specialRefConsts[] = {nullptr, nullptr, nullptr};
910 // Nullary operators (i.e., symbolic constants).
911 ValueNum ValueNumStore::VNForFunc(var_types typ, VNFunc func)
913 assert(VNFuncArity(func) == 0);
917 if (GetVNFunc0Map()->Lookup(func, &res))
923 Chunk* c = GetAllocChunk(typ, CEA_Func0);
924 unsigned offsetWithinChunk = c->AllocVN();
925 res = c->m_baseVN + offsetWithinChunk;
926 reinterpret_cast<VNFunc*>(c->m_defs)[offsetWithinChunk] = func;
927 GetVNFunc0Map()->Set(func, res);
932 ValueNum ValueNumStore::VNForFunc(var_types typ, VNFunc func, ValueNum arg0VN)
934 assert(arg0VN == VNNormVal(arg0VN)); // Arguments don't carry exceptions.
937 VNDefFunc1Arg fstruct(func, arg0VN);
939 // Do constant-folding.
940 if (CanEvalForConstantArgs(func) && IsVNConstant(arg0VN))
942 return EvalFuncForConstantArgs(typ, func, arg0VN);
945 if (GetVNFunc1Map()->Lookup(fstruct, &res))
951 // Otherwise, create a new VN for this application.
952 Chunk* c = GetAllocChunk(typ, CEA_Func1);
953 unsigned offsetWithinChunk = c->AllocVN();
954 res = c->m_baseVN + offsetWithinChunk;
955 reinterpret_cast<VNDefFunc1Arg*>(c->m_defs)[offsetWithinChunk] = fstruct;
956 GetVNFunc1Map()->Set(fstruct, res);
961 ValueNum ValueNumStore::VNForFunc(var_types typ, VNFunc func, ValueNum arg0VN, ValueNum arg1VN)
963 assert(arg0VN != NoVN && arg1VN != NoVN);
964 assert(arg0VN == VNNormVal(arg0VN)); // Arguments carry no exceptions.
965 assert(arg1VN == VNNormVal(arg1VN)); // Arguments carry no exceptions.
966 assert(VNFuncArity(func) == 2);
967 assert(func != VNF_MapSelect); // Precondition: use the special function VNForMapSelect defined for that.
971 // Do constant-folding.
972 if (CanEvalForConstantArgs(func) && IsVNConstant(arg0VN) && IsVNConstant(arg1VN))
974 bool canFold = true; // Normally we will be able to fold this 'func'
976 // Special case for VNF_Cast of constant handles
977 // Don't allow eval/fold of a GT_CAST(non-I_IMPL, Handle)
979 if ((func == VNF_Cast) && (typ != TYP_I_IMPL) && IsVNHandle(arg0VN))
984 // It is possible for us to have mismatched types (see Bug 750863)
985 // We don't try to fold a binary operation when one of the constant operands
986 // is a floating-point constant and the other is not.
988 bool arg0IsFloating = varTypeIsFloating(TypeOfVN(arg0VN));
989 bool arg1IsFloating = varTypeIsFloating(TypeOfVN(arg1VN));
990 if (arg0IsFloating != arg1IsFloating)
995 // NaNs are unordered wrt to other floats. While an ordered
996 // comparison would return false, an unordered comparison
997 // will return true if any operands are a NaN. We only perform
998 // ordered NaN comparison in EvalComparison.
999 if ((arg0IsFloating && _isnan(GetConstantDouble(arg0VN))) ||
1000 (arg1IsFloating && _isnan(GetConstantDouble(arg1VN))))
1007 return EvalFuncForConstantArgs(typ, func, arg0VN, arg1VN);
1010 // We canonicalize commutative operations.
1011 // (Perhaps should eventually handle associative/commutative [AC] ops -- but that gets complicated...)
1012 if (VNFuncIsCommutative(func))
1014 // Order arg0 arg1 by numerical VN value.
1015 if (arg0VN > arg1VN)
1017 jitstd::swap(arg0VN, arg1VN);
1020 VNDefFunc2Arg fstruct(func, arg0VN, arg1VN);
1021 if (GetVNFunc2Map()->Lookup(fstruct, &res))
1027 // We have ways of evaluating some binary functions.
1028 if (func < VNF_Boundary)
1030 if (typ != TYP_BYREF) // We don't want/need to optimize a zero byref
1032 genTreeOps oper = genTreeOps(func);
1033 ValueNum ZeroVN, OneVN; // We may need to create one of these in the switch below.
1037 // This identity does not apply for floating point (when x == -0.0)
1038 if (!varTypeIsFloating(typ))
1040 // (x + 0) == (0 + x) => x
1041 ZeroVN = VNZeroForType(typ);
1042 if (arg0VN == ZeroVN)
1046 else if (arg1VN == ZeroVN)
1055 ZeroVN = VNZeroForType(typ);
1056 if (arg1VN == ZeroVN)
1063 // (x * 1) == (1 * x) => x
1064 OneVN = VNOneForType(typ);
1067 if (arg0VN == OneVN)
1071 else if (arg1VN == OneVN)
1077 if (!varTypeIsFloating(typ))
1079 // (x * 0) == (0 * x) => 0 (unless x is NaN, which we must assume a fp value may be)
1080 ZeroVN = VNZeroForType(typ);
1081 if (arg0VN == ZeroVN)
1085 else if (arg1VN == ZeroVN)
1095 OneVN = VNOneForType(typ);
1098 if (arg1VN == OneVN)
1107 // (x | 0) == (0 | x) => x
1108 // (x ^ 0) == (0 ^ x) => x
1109 ZeroVN = VNZeroForType(typ);
1110 if (arg0VN == ZeroVN)
1114 else if (arg1VN == ZeroVN)
1121 // (x & 0) == (0 & x) => 0
1122 ZeroVN = VNZeroForType(typ);
1123 if (arg0VN == ZeroVN)
1127 else if (arg1VN == ZeroVN)
1142 ZeroVN = VNZeroForType(typ);
1143 if (arg1VN == ZeroVN)
1150 // (x == x) => true (unless x is NaN)
1151 if (!varTypeIsFloating(TypeOfVN(arg0VN)) && (arg0VN != NoVN) && (arg0VN == arg1VN))
1153 return VNOneForType(typ);
1155 if ((arg0VN == VNForNull() && IsKnownNonNull(arg1VN)) ||
1156 (arg1VN == VNForNull() && IsKnownNonNull(arg0VN)))
1158 return VNZeroForType(typ);
1162 // (x != x) => false (unless x is NaN)
1163 if (!varTypeIsFloating(TypeOfVN(arg0VN)) && (arg0VN != NoVN) && (arg0VN == arg1VN))
1165 return VNZeroForType(typ);
1167 if ((arg0VN == VNForNull() && IsKnownNonNull(arg1VN)) ||
1168 (arg1VN == VNForNull() && IsKnownNonNull(arg0VN)))
1170 return VNOneForType(typ);
1179 else // must be a VNF_ function
1181 if (func == VNF_CastClass)
1183 // In terms of values, a castclass always returns its second argument, the object being cast.
1184 // The IL operation may also throw an exception
1185 return VNWithExc(arg1VN, VNExcSetSingleton(VNForFunc(TYP_REF, VNF_InvalidCastExc, arg1VN, arg0VN)));
1189 // Otherwise, assign a new VN for the function application.
1190 Chunk* c = GetAllocChunk(typ, CEA_Func2);
1191 unsigned offsetWithinChunk = c->AllocVN();
1192 res = c->m_baseVN + offsetWithinChunk;
1193 reinterpret_cast<VNDefFunc2Arg*>(c->m_defs)[offsetWithinChunk] = fstruct;
1194 GetVNFunc2Map()->Set(fstruct, res);
1199 //------------------------------------------------------------------------------
1200 // VNForMapStore : Evaluate VNF_MapStore with the given arguments.
1205 // arg0VN - Map value number
1206 // arg1VN - Index value number
1207 // arg2VN - New value for map[index]
1210 // Value number for the result of the evaluation.
1212 ValueNum ValueNumStore::VNForMapStore(var_types typ, ValueNum arg0VN, ValueNum arg1VN, ValueNum arg2VN)
1214 ValueNum result = VNForFunc(typ, VNF_MapStore, arg0VN, arg1VN, arg2VN);
1216 if (m_pComp->verbose)
1218 printf(" VNForMapStore(" STR_VN "%x, " STR_VN "%x, " STR_VN "%x):%s returns ", arg0VN, arg1VN, arg2VN,
1220 m_pComp->vnPrint(result, 1);
1227 //------------------------------------------------------------------------------
1228 // VNForMapSelect : Evaluate VNF_MapSelect with the given arguments.
1232 // vnk - Value number kind
1234 // arg0VN - Map value number
1235 // arg1VN - Index value number
1238 // Value number for the result of the evaluation.
1241 // This requires a "ValueNumKind" because it will attempt, given "select(phi(m1, ..., mk), ind)", to evaluate
1242 // "select(m1, ind)", ..., "select(mk, ind)" to see if they agree. It needs to know which kind of value number
1243 // (liberal/conservative) to read from the SSA def referenced in the phi argument.
1245 ValueNum ValueNumStore::VNForMapSelect(ValueNumKind vnk, var_types typ, ValueNum arg0VN, ValueNum arg1VN)
1247 unsigned budget = m_mapSelectBudget;
1248 bool usedRecursiveVN = false;
1249 ValueNum result = VNForMapSelectWork(vnk, typ, arg0VN, arg1VN, &budget, &usedRecursiveVN);
1251 if (m_pComp->verbose)
1253 printf(" VNForMapSelect(" STR_VN "%x, " STR_VN "%x):%s returns ", arg0VN, arg1VN, varTypeName(typ));
1254 m_pComp->vnPrint(result, 1);
1261 //------------------------------------------------------------------------------
1262 // VNForMapSelectWork : A method that does the work for VNForMapSelect and may call itself recursively.
1266 // vnk - Value number kind
1268 // arg0VN - Zeroth argument
1269 // arg1VN - First argument
1270 // pBudget - Remaining budget for the outer evaluation
1271 // pUsedRecursiveVN - Out-parameter that is set to true iff RecursiveVN was returned from this method
1272 // or from a method called during one of recursive invocations.
1275 // Value number for the result of the evaluation.
1278 // This requires a "ValueNumKind" because it will attempt, given "select(phi(m1, ..., mk), ind)", to evaluate
1279 // "select(m1, ind)", ..., "select(mk, ind)" to see if they agree. It needs to know which kind of value number
1280 // (liberal/conservative) to read from the SSA def referenced in the phi argument.
1282 ValueNum ValueNumStore::VNForMapSelectWork(
1283 ValueNumKind vnk, var_types typ, ValueNum arg0VN, ValueNum arg1VN, unsigned* pBudget, bool* pUsedRecursiveVN)
1286 // This label allows us to directly implement a tail call by setting up the arguments, and doing a goto to here.
1287 assert(arg0VN != NoVN && arg1VN != NoVN);
1288 assert(arg0VN == VNNormVal(arg0VN)); // Arguments carry no exceptions.
1289 assert(arg1VN == VNNormVal(arg1VN)); // Arguments carry no exceptions.
1291 *pUsedRecursiveVN = false;
1294 // Provide a mechanism for writing tests that ensure we don't call this ridiculously often.
1297 // This printing is sometimes useful in debugging.
1298 // if ((m_numMapSels % 1000) == 0) printf("%d VNF_MapSelect applications.\n", m_numMapSels);
1300 unsigned selLim = JitConfig.JitVNMapSelLimit();
1301 assert(selLim == 0 || m_numMapSels < selLim);
1305 VNDefFunc2Arg fstruct(VNF_MapSelect, arg0VN, arg1VN);
1306 if (GetVNFunc2Map()->Lookup(fstruct, &res))
1313 // Give up if we've run out of budget.
1314 if (--(*pBudget) == 0)
1316 // We have to use 'nullptr' for the basic block here, because subsequent expressions
1317 // in different blocks may find this result in the VNFunc2Map -- other expressions in
1318 // the IR may "evaluate" to this same VNForExpr, so it is not "unique" in the sense
1319 // that permits the BasicBlock attribution.
1320 res = VNForExpr(nullptr, typ);
1321 GetVNFunc2Map()->Set(fstruct, res);
1325 // If it's recursive, stop the recursion.
1326 if (SelectIsBeingEvaluatedRecursively(arg0VN, arg1VN))
1328 *pUsedRecursiveVN = true;
1332 if (arg0VN == VNForZeroMap())
1334 return VNZeroForType(typ);
1336 else if (IsVNFunc(arg0VN))
1339 GetVNFunc(arg0VN, &funcApp);
1340 if (funcApp.m_func == VNF_MapStore)
1342 // select(store(m, i, v), i) == v
1343 if (funcApp.m_args[1] == arg1VN)
1345 #if FEATURE_VN_TRACE_APPLY_SELECTORS
1346 JITDUMP(" AX1: select([" STR_VN "%x]store(" STR_VN "%x, " STR_VN "%x, " STR_VN "%x), " STR_VN
1347 "%x) ==> " STR_VN "%x.\n",
1348 funcApp.m_args[0], arg0VN, funcApp.m_args[1], funcApp.m_args[2], arg1VN, funcApp.m_args[2]);
1350 return funcApp.m_args[2];
1352 // i # j ==> select(store(m, i, v), j) == select(m, j)
1353 // Currently the only source of distinctions is when both indices are constants.
1354 else if (IsVNConstant(arg1VN) && IsVNConstant(funcApp.m_args[1]))
1356 assert(funcApp.m_args[1] != arg1VN); // we already checked this above.
1357 #if FEATURE_VN_TRACE_APPLY_SELECTORS
1358 JITDUMP(" AX2: " STR_VN "%x != " STR_VN "%x ==> select([" STR_VN "%x]store(" STR_VN
1359 "%x, " STR_VN "%x, " STR_VN "%x), " STR_VN "%x) ==> select(" STR_VN "%x, " STR_VN "%x).\n",
1360 arg1VN, funcApp.m_args[1], arg0VN, funcApp.m_args[0], funcApp.m_args[1], funcApp.m_args[2],
1361 arg1VN, funcApp.m_args[0], arg1VN);
1363 // This is the equivalent of the recursive tail call:
1364 // return VNForMapSelect(vnk, typ, funcApp.m_args[0], arg1VN);
1365 // Make sure we capture any exceptions from the "i" and "v" of the store...
1366 arg0VN = funcApp.m_args[0];
1370 else if (funcApp.m_func == VNF_PhiDef || funcApp.m_func == VNF_PhiHeapDef)
1372 unsigned lclNum = BAD_VAR_NUM;
1373 bool isHeap = false;
1374 VNFuncApp phiFuncApp;
1375 bool defArgIsFunc = false;
1376 if (funcApp.m_func == VNF_PhiDef)
1378 lclNum = unsigned(funcApp.m_args[0]);
1379 defArgIsFunc = GetVNFunc(funcApp.m_args[2], &phiFuncApp);
1383 assert(funcApp.m_func == VNF_PhiHeapDef);
1385 defArgIsFunc = GetVNFunc(funcApp.m_args[1], &phiFuncApp);
1387 if (defArgIsFunc && phiFuncApp.m_func == VNF_Phi)
1389 // select(phi(m1, m2), x): if select(m1, x) == select(m2, x), return that, else new fresh.
1390 // Get the first argument of the phi.
1392 // We need to be careful about breaking infinite recursion. Record the outer select.
1393 m_fixedPointMapSels.Push(VNDefFunc2Arg(VNF_MapSelect, arg0VN, arg1VN));
1395 assert(IsVNConstant(phiFuncApp.m_args[0]));
1396 unsigned phiArgSsaNum = ConstantValue<unsigned>(phiFuncApp.m_args[0]);
1400 phiArgVN = m_pComp->GetHeapPerSsaData(phiArgSsaNum)->m_vnPair.Get(vnk);
1404 phiArgVN = m_pComp->lvaTable[lclNum].GetPerSsaData(phiArgSsaNum)->m_vnPair.Get(vnk);
1406 if (phiArgVN != ValueNumStore::NoVN)
1408 bool allSame = true;
1409 ValueNum argRest = phiFuncApp.m_args[1];
1410 ValueNum sameSelResult =
1411 VNForMapSelectWork(vnk, typ, phiArgVN, arg1VN, pBudget, pUsedRecursiveVN);
1412 while (allSame && argRest != ValueNumStore::NoVN)
1414 ValueNum cur = argRest;
1415 VNFuncApp phiArgFuncApp;
1416 if (GetVNFunc(argRest, &phiArgFuncApp) && phiArgFuncApp.m_func == VNF_Phi)
1418 cur = phiArgFuncApp.m_args[0];
1419 argRest = phiArgFuncApp.m_args[1];
1423 argRest = ValueNumStore::NoVN; // Cause the loop to terminate.
1425 assert(IsVNConstant(cur));
1426 phiArgSsaNum = ConstantValue<unsigned>(cur);
1429 phiArgVN = m_pComp->GetHeapPerSsaData(phiArgSsaNum)->m_vnPair.Get(vnk);
1433 phiArgVN = m_pComp->lvaTable[lclNum].GetPerSsaData(phiArgSsaNum)->m_vnPair.Get(vnk);
1435 if (phiArgVN == ValueNumStore::NoVN)
1441 bool usedRecursiveVN = false;
1442 ValueNum curResult =
1443 VNForMapSelectWork(vnk, typ, phiArgVN, arg1VN, pBudget, &usedRecursiveVN);
1444 *pUsedRecursiveVN |= usedRecursiveVN;
1445 if (sameSelResult == ValueNumStore::RecursiveVN)
1447 sameSelResult = curResult;
1449 if (curResult != ValueNumStore::RecursiveVN && curResult != sameSelResult)
1455 if (allSame && sameSelResult != ValueNumStore::RecursiveVN)
1457 // Make sure we're popping what we pushed.
1458 assert(FixedPointMapSelsTopHasValue(arg0VN, arg1VN));
1459 m_fixedPointMapSels.Pop();
1461 // To avoid exponential searches, we make sure that this result is memo-ized.
1462 // The result is always valid for memoization if we didn't rely on RecursiveVN to get it.
1463 // If RecursiveVN was used, we are processing a loop and we can't memo-ize this intermediate
1464 // result if, e.g., this block is in a multi-entry loop.
1465 if (!*pUsedRecursiveVN)
1467 GetVNFunc2Map()->Set(fstruct, sameSelResult);
1470 return sameSelResult;
1472 // Otherwise, fall through to creating the select(phi(m1, m2), x) function application.
1474 // Make sure we're popping what we pushed.
1475 assert(FixedPointMapSelsTopHasValue(arg0VN, arg1VN));
1476 m_fixedPointMapSels.Pop();
1481 // Otherwise, assign a new VN for the function application.
1482 Chunk* c = GetAllocChunk(typ, CEA_Func2);
1483 unsigned offsetWithinChunk = c->AllocVN();
1484 res = c->m_baseVN + offsetWithinChunk;
1485 reinterpret_cast<VNDefFunc2Arg*>(c->m_defs)[offsetWithinChunk] = fstruct;
1486 GetVNFunc2Map()->Set(fstruct, res);
1491 ValueNum ValueNumStore::EvalFuncForConstantArgs(var_types typ, VNFunc func, ValueNum arg0VN)
1493 assert(CanEvalForConstantArgs(func));
1494 assert(IsVNConstant(arg0VN));
1495 switch (TypeOfVN(arg0VN))
1499 int resVal = EvalOp(func, ConstantValue<int>(arg0VN));
1500 // Unary op on a handle results in a handle.
1501 return IsVNHandle(arg0VN) ? VNForHandle(ssize_t(resVal), GetHandleFlags(arg0VN)) : VNForIntCon(resVal);
1505 INT64 resVal = EvalOp(func, ConstantValue<INT64>(arg0VN));
1506 // Unary op on a handle results in a handle.
1507 return IsVNHandle(arg0VN) ? VNForHandle(ssize_t(resVal), GetHandleFlags(arg0VN)) : VNForLongCon(resVal);
1510 return VNForFloatCon(EvalOp(func, ConstantValue<float>(arg0VN)));
1512 return VNForDoubleCon(EvalOp(func, ConstantValue<double>(arg0VN)));
1514 // If arg0 has a possible exception, it wouldn't have been constant.
1515 assert(!VNHasExc(arg0VN));
1517 assert(arg0VN == VNForNull()); // Only other REF constant.
1518 assert(func == VNFunc(GT_ARR_LENGTH)); // Only function we can apply to a REF constant!
1519 return VNWithExc(VNForVoid(), VNExcSetSingleton(VNForFunc(TYP_REF, VNF_NullPtrExc, VNForNull())));
1525 bool ValueNumStore::SelectIsBeingEvaluatedRecursively(ValueNum map, ValueNum ind)
1527 for (unsigned i = 0; i < m_fixedPointMapSels.Size(); i++)
1529 VNDefFunc2Arg& elem = m_fixedPointMapSels.GetRef(i);
1530 assert(elem.m_func == VNF_MapSelect);
1531 if (elem.m_arg0 == map && elem.m_arg1 == ind)
1540 bool ValueNumStore::FixedPointMapSelsTopHasValue(ValueNum map, ValueNum index)
1542 if (m_fixedPointMapSels.Size() == 0)
1546 VNDefFunc2Arg& top = m_fixedPointMapSels.TopRef();
1547 return top.m_func == VNF_MapSelect && top.m_arg0 == map && top.m_arg1 == index;
1551 // Given an integer constant value number return its value as an int.
1553 int ValueNumStore::GetConstantInt32(ValueNum argVN)
1555 assert(IsVNConstant(argVN));
1556 var_types argVNtyp = TypeOfVN(argVN);
1563 result = ConstantValue<int>(argVN);
1565 #ifndef _TARGET_64BIT_
1568 result = (int)ConstantValue<size_t>(argVN);
1577 // Given an integer constant value number return its value as an INT64.
1579 INT64 ValueNumStore::GetConstantInt64(ValueNum argVN)
1581 assert(IsVNConstant(argVN));
1582 var_types argVNtyp = TypeOfVN(argVN);
1589 result = (INT64)ConstantValue<int>(argVN);
1592 result = ConstantValue<INT64>(argVN);
1596 result = (INT64)ConstantValue<size_t>(argVN);
1604 // Given a float or a double constant value number return its value as a double.
1606 double ValueNumStore::GetConstantDouble(ValueNum argVN)
1608 assert(IsVNConstant(argVN));
1609 var_types argVNtyp = TypeOfVN(argVN);
1616 result = (double)ConstantValue<float>(argVN);
1619 result = ConstantValue<double>(argVN);
1627 // Compute the proper value number when the VNFunc has all constant arguments
1628 // This essentially performs constant folding at value numbering time
1630 ValueNum ValueNumStore::EvalFuncForConstantArgs(var_types typ, VNFunc func, ValueNum arg0VN, ValueNum arg1VN)
1632 assert(CanEvalForConstantArgs(func));
1633 assert(IsVNConstant(arg0VN) && IsVNConstant(arg1VN));
1634 assert(!VNHasExc(arg0VN) && !VNHasExc(arg1VN)); // Otherwise, would not be constant.
1636 // if our func is the VNF_Cast operation we handle it first
1637 if (func == VNF_Cast)
1639 return EvalCastForConstantArgs(typ, func, arg0VN, arg1VN);
1642 if (typ == TYP_BYREF)
1644 // We don't want to fold expressions that produce TYP_BYREF
1648 var_types arg0VNtyp = TypeOfVN(arg0VN);
1649 var_types arg1VNtyp = TypeOfVN(arg1VN);
1651 // When both arguments are floating point types
1652 // We defer to the EvalFuncForConstantFPArgs()
1653 if (varTypeIsFloating(arg0VNtyp) && varTypeIsFloating(arg1VNtyp))
1655 return EvalFuncForConstantFPArgs(typ, func, arg0VN, arg1VN);
1658 // after this we shouldn't have to deal with floating point types for arg0VN or arg1VN
1659 assert(!varTypeIsFloating(arg0VNtyp));
1660 assert(!varTypeIsFloating(arg1VNtyp));
1662 // Stack-normalize the result type.
1663 if (varTypeIsSmall(typ))
1668 ValueNum result; // left uninitialized, we are required to initialize it on all paths below.
1669 ValueNum excSet = VNForEmptyExcSet();
1671 // Are both args of the same type?
1672 if (arg0VNtyp == arg1VNtyp)
1674 if (arg0VNtyp == TYP_INT)
1676 int arg0Val = ConstantValue<int>(arg0VN);
1677 int arg1Val = ConstantValue<int>(arg1VN);
1679 assert(typ == TYP_INT);
1680 int resultVal = EvalOp(func, arg0Val, arg1Val, &excSet);
1681 // Bin op on a handle results in a handle.
1682 ValueNum handleVN = IsVNHandle(arg0VN) ? arg0VN : IsVNHandle(arg1VN) ? arg1VN : NoVN;
1683 ValueNum resultVN = (handleVN != NoVN)
1684 ? VNForHandle(ssize_t(resultVal), GetHandleFlags(handleVN)) // Use VN for Handle
1685 : VNForIntCon(resultVal);
1686 result = VNWithExc(resultVN, excSet);
1688 else if (arg0VNtyp == TYP_LONG)
1690 INT64 arg0Val = ConstantValue<INT64>(arg0VN);
1691 INT64 arg1Val = ConstantValue<INT64>(arg1VN);
1693 if (VNFuncIsComparison(func))
1695 assert(typ == TYP_INT);
1696 result = VNForIntCon(EvalComparison(func, arg0Val, arg1Val));
1700 assert(typ == TYP_LONG);
1701 INT64 resultVal = EvalOp(func, arg0Val, arg1Val, &excSet);
1702 ValueNum handleVN = IsVNHandle(arg0VN) ? arg0VN : IsVNHandle(arg1VN) ? arg1VN : NoVN;
1703 ValueNum resultVN = (handleVN != NoVN)
1704 ? VNForHandle(ssize_t(resultVal), GetHandleFlags(handleVN)) // Use VN for Handle
1705 : VNForLongCon(resultVal);
1706 result = VNWithExc(resultVN, excSet);
1709 else // both args are TYP_REF or both args are TYP_BYREF
1711 INT64 arg0Val = ConstantValue<size_t>(arg0VN); // We represent ref/byref constants as size_t's.
1712 INT64 arg1Val = ConstantValue<size_t>(arg1VN); // Also we consider null to be zero.
1714 if (VNFuncIsComparison(func))
1716 assert(typ == TYP_INT);
1717 result = VNForIntCon(EvalComparison(func, arg0Val, arg1Val));
1719 else if (typ == TYP_INT) // We could see GT_OR of a constant ByRef and Null
1721 int resultVal = (int)EvalOp(func, arg0Val, arg1Val, &excSet);
1722 result = VNWithExc(VNForIntCon(resultVal), excSet);
1724 else // We could see GT_OR of a constant ByRef and Null
1726 assert((typ == TYP_BYREF) || (typ == TYP_LONG));
1727 INT64 resultVal = EvalOp(func, arg0Val, arg1Val, &excSet);
1728 result = VNWithExc(VNForByrefCon(resultVal), excSet);
1732 else // We have args of different types
1734 // We represent ref/byref constants as size_t's.
1735 // Also we consider null to be zero.
1737 INT64 arg0Val = GetConstantInt64(arg0VN);
1738 INT64 arg1Val = GetConstantInt64(arg1VN);
1740 if (VNFuncIsComparison(func))
1742 assert(typ == TYP_INT);
1743 result = VNForIntCon(EvalComparison(func, arg0Val, arg1Val));
1745 else if (typ == TYP_INT) // We could see GT_OR of an int and constant ByRef or Null
1747 int resultVal = (int)EvalOp(func, arg0Val, arg1Val, &excSet);
1748 result = VNWithExc(VNForIntCon(resultVal), excSet);
1752 assert(typ != TYP_INT);
1753 ValueNum resultValx = VNForEmptyExcSet();
1754 INT64 resultVal = EvalOp(func, arg0Val, arg1Val, &resultValx);
1756 // check for the Exception case
1757 if (resultValx != VNForEmptyExcSet())
1759 result = VNWithExc(VNForVoid(), resultValx);
1766 result = VNForByrefCon(resultVal);
1769 result = VNForLongCon(resultVal);
1772 assert(resultVal == 0); // Only valid REF constant
1773 result = VNForNull();
1785 // Compute the proper value number when the VNFunc has all constant floating-point arguments
1786 // This essentially must perform constant folding at value numbering time
1788 ValueNum ValueNumStore::EvalFuncForConstantFPArgs(var_types typ, VNFunc func, ValueNum arg0VN, ValueNum arg1VN)
1790 assert(CanEvalForConstantArgs(func));
1791 assert(IsVNConstant(arg0VN) && IsVNConstant(arg1VN));
1793 // We expect both argument types to be floating point types
1794 var_types arg0VNtyp = TypeOfVN(arg0VN);
1795 var_types arg1VNtyp = TypeOfVN(arg1VN);
1797 assert(varTypeIsFloating(arg0VNtyp));
1798 assert(varTypeIsFloating(arg1VNtyp));
1800 double arg0Val = GetConstantDouble(arg0VN);
1801 double arg1Val = GetConstantDouble(arg1VN);
1803 ValueNum result; // left uninitialized, we are required to initialize it on all paths below.
1805 if (VNFuncIsComparison(func))
1807 assert(genActualType(typ) == TYP_INT);
1808 result = VNForIntCon(EvalComparison(func, arg0Val, arg1Val));
1812 assert(varTypeIsFloating(typ)); // We must be computing a floating point result
1814 // We always compute the result using a double
1815 ValueNum exception = VNForEmptyExcSet();
1816 double doubleResultVal = EvalOp(func, arg0Val, arg1Val, &exception);
1817 assert(exception == VNForEmptyExcSet()); // Floating point ops don't throw.
1819 if (typ == TYP_FLOAT)
1821 float floatResultVal = float(doubleResultVal);
1822 result = VNForFloatCon(floatResultVal);
1826 assert(typ == TYP_DOUBLE);
1827 result = VNForDoubleCon(doubleResultVal);
1834 // Compute the proper value number for a VNF_Cast with constant arguments
1835 // This essentially must perform constant folding at value numbering time
1837 ValueNum ValueNumStore::EvalCastForConstantArgs(var_types typ, VNFunc func, ValueNum arg0VN, ValueNum arg1VN)
1839 assert(func == VNF_Cast);
1840 assert(IsVNConstant(arg0VN) && IsVNConstant(arg1VN));
1842 // Stack-normalize the result type.
1843 if (varTypeIsSmall(typ))
1848 var_types arg0VNtyp = TypeOfVN(arg0VN);
1849 var_types arg1VNtyp = TypeOfVN(arg1VN);
1851 // arg1VN is really the gtCastType that we are casting to
1852 assert(arg1VNtyp == TYP_INT);
1853 int arg1Val = ConstantValue<int>(arg1VN);
1854 assert(arg1Val >= 0);
1856 if (IsVNHandle(arg0VN))
1858 // We don't allow handles to be cast to random var_types.
1859 assert(typ == TYP_I_IMPL);
1862 // We previously encoded the castToType operation using vnForCastOper()
1864 bool srcIsUnsigned = ((arg1Val & INT32(VCA_UnsignedSrc)) != 0);
1865 var_types castToType = var_types(arg1Val >> INT32(VCA_BitCount));
1867 var_types castFromType = arg0VNtyp;
1869 switch (castFromType) // GT_CAST source type
1871 #ifndef _TARGET_64BIT_
1876 int arg0Val = GetConstantInt32(arg0VN);
1881 assert(typ == TYP_INT);
1882 return VNForIntCon(INT8(arg0Val));
1885 assert(typ == TYP_INT);
1886 return VNForIntCon(UINT8(arg0Val));
1888 assert(typ == TYP_INT);
1889 return VNForIntCon(INT16(arg0Val));
1892 assert(typ == TYP_INT);
1893 return VNForIntCon(UINT16(arg0Val));
1896 assert(typ == TYP_INT);
1900 assert(!IsVNHandle(arg0VN));
1901 #ifdef _TARGET_64BIT_
1902 if (typ == TYP_LONG)
1906 return VNForLongCon(INT64(unsigned(arg0Val)));
1910 return VNForLongCon(INT64(arg0Val));
1915 assert(typ == TYP_BYREF);
1918 return VNForByrefCon(INT64(unsigned(arg0Val)));
1922 return VNForByrefCon(INT64(arg0Val));
1925 #else // TARGET_32BIT
1927 return VNForLongCon(INT64(unsigned(arg0Val)));
1929 return VNForLongCon(INT64(arg0Val));
1932 assert(typ == TYP_FLOAT);
1935 return VNForFloatCon(float(unsigned(arg0Val)));
1939 return VNForFloatCon(float(arg0Val));
1942 assert(typ == TYP_DOUBLE);
1945 return VNForDoubleCon(double(unsigned(arg0Val)));
1949 return VNForDoubleCon(double(arg0Val));
1957 #ifdef _TARGET_64BIT_
1961 INT64 arg0Val = GetConstantInt64(arg0VN);
1966 assert(typ == TYP_INT);
1967 return VNForIntCon(INT8(arg0Val));
1970 assert(typ == TYP_INT);
1971 return VNForIntCon(UINT8(arg0Val));
1973 assert(typ == TYP_INT);
1974 return VNForIntCon(INT16(arg0Val));
1977 assert(typ == TYP_INT);
1978 return VNForIntCon(UINT16(arg0Val));
1980 assert(typ == TYP_INT);
1981 return VNForIntCon(INT32(arg0Val));
1983 assert(typ == TYP_INT);
1984 return VNForIntCon(UINT32(arg0Val));
1987 assert(typ == TYP_LONG);
1990 assert(typ == TYP_FLOAT);
1993 return VNForFloatCon(FloatingPointUtils::convertUInt64ToFloat(UINT64(arg0Val)));
1997 return VNForFloatCon(float(arg0Val));
2000 assert(typ == TYP_DOUBLE);
2003 return VNForDoubleCon(FloatingPointUtils::convertUInt64ToDouble(UINT64(arg0Val)));
2007 return VNForDoubleCon(double(arg0Val));
2016 double arg0Val = GetConstantDouble(arg0VN);
2021 assert(typ == TYP_INT);
2022 return VNForIntCon(INT8(arg0Val));
2025 assert(typ == TYP_INT);
2026 return VNForIntCon(UINT8(arg0Val));
2028 assert(typ == TYP_INT);
2029 return VNForIntCon(INT16(arg0Val));
2032 assert(typ == TYP_INT);
2033 return VNForIntCon(UINT16(arg0Val));
2035 assert(typ == TYP_INT);
2036 return VNForIntCon(INT32(arg0Val));
2038 assert(typ == TYP_INT);
2039 return VNForIntCon(UINT32(arg0Val));
2041 assert(typ == TYP_LONG);
2042 return VNForLongCon(INT64(arg0Val));
2044 assert(typ == TYP_LONG);
2045 return VNForLongCon(UINT64(arg0Val));
2047 assert(typ == TYP_FLOAT);
2048 return VNForFloatCon(float(arg0Val));
2050 assert(typ == TYP_DOUBLE);
2051 return VNForDoubleCon(arg0Val);
2061 bool ValueNumStore::CanEvalForConstantArgs(VNFunc vnf)
2063 if (vnf < VNF_Boundary)
2065 // We'll refine this as we get counterexamples. But to
2066 // a first approximation, VNFuncs that are genTreeOps should
2067 // be things we can evaluate.
2068 genTreeOps oper = genTreeOps(vnf);
2069 // Some exceptions...
2072 case GT_MKREFANY: // We can't evaluate these.
2079 // should be rare, not worth the complexity and risk of getting it wrong
2087 // some VNF_ that we can evaluate
2090 case VNF_Cast: // We can evaluate these.
2092 case VNF_ObjGetType:
2100 unsigned ValueNumStore::VNFuncArity(VNFunc vnf)
2102 // Read the bit field out of the table...
2103 return (s_vnfOpAttribs[vnf] & VNFOA_ArityMask) >> VNFOA_ArityShift;
2107 bool ValueNumStore::IsOverflowIntDiv(int v0, int v1)
2109 return (v1 == -1) && (v0 == INT32_MIN);
2112 bool ValueNumStore::IsOverflowIntDiv(INT64 v0, INT64 v1)
2114 return (v1 == -1) && (v0 == INT64_MIN);
2116 template <typename T>
2117 bool ValueNumStore::IsOverflowIntDiv(T v0, T v1)
2123 bool ValueNumStore::IsIntZero(int v)
2128 bool ValueNumStore::IsIntZero(unsigned v)
2133 bool ValueNumStore::IsIntZero(INT64 v)
2138 bool ValueNumStore::IsIntZero(UINT64 v)
2142 template <typename T>
2143 bool ValueNumStore::IsIntZero(T v)
2149 float ValueNumStore::EvalOpIntegral<float>(VNFunc vnf, float v0)
2151 assert(!"EvalOpIntegral<float>");
2156 double ValueNumStore::EvalOpIntegral<double>(VNFunc vnf, double v0)
2158 assert(!"EvalOpIntegral<double>");
2163 float ValueNumStore::EvalOpIntegral<float>(VNFunc vnf, float v0, float v1, ValueNum* pExcSet)
2165 genTreeOps oper = genTreeOps(vnf);
2169 return fmodf(v0, v1);
2171 // For any other values of 'oper', we will assert and return 0.0f
2174 assert(!"EvalOpIntegral<float> with pExcSet");
2179 double ValueNumStore::EvalOpIntegral<double>(VNFunc vnf, double v0, double v1, ValueNum* pExcSet)
2181 genTreeOps oper = genTreeOps(vnf);
2185 return fmod(v0, v1);
2187 // For any other value of 'oper', we will assert and return 0.0
2190 assert(!"EvalOpIntegral<double> with pExcSet");
2194 ValueNum ValueNumStore::VNForFunc(var_types typ, VNFunc func, ValueNum arg0VN, ValueNum arg1VN, ValueNum arg2VN)
2196 assert(arg0VN != NoVN);
2197 assert(arg1VN != NoVN);
2198 assert(arg2VN != NoVN);
2199 assert(VNFuncArity(func) == 3);
2201 // Function arguments carry no exceptions.
2202 CLANG_FORMAT_COMMENT_ANCHOR;
2205 if (func != VNF_PhiDef)
2207 // For a phi definition first and second argument are "plain" local/ssa numbers.
2208 // (I don't know if having such non-VN arguments to a VN function is a good idea -- if we wanted to declare
2209 // ValueNum to be "short" it would be a problem, for example. But we'll leave it for now, with these explicit
2211 assert(arg0VN == VNNormVal(arg0VN));
2212 assert(arg1VN == VNNormVal(arg1VN));
2214 assert(arg2VN == VNNormVal(arg2VN));
2217 assert(VNFuncArity(func) == 3);
2220 VNDefFunc3Arg fstruct(func, arg0VN, arg1VN, arg2VN);
2221 if (GetVNFunc3Map()->Lookup(fstruct, &res))
2227 Chunk* c = GetAllocChunk(typ, CEA_Func3);
2228 unsigned offsetWithinChunk = c->AllocVN();
2229 res = c->m_baseVN + offsetWithinChunk;
2230 reinterpret_cast<VNDefFunc3Arg*>(c->m_defs)[offsetWithinChunk] = fstruct;
2231 GetVNFunc3Map()->Set(fstruct, res);
2236 ValueNum ValueNumStore::VNForFunc(
2237 var_types typ, VNFunc func, ValueNum arg0VN, ValueNum arg1VN, ValueNum arg2VN, ValueNum arg3VN)
2239 assert(arg0VN != NoVN && arg1VN != NoVN && arg2VN != NoVN && arg3VN != NoVN);
2240 // Function arguments carry no exceptions.
2241 assert(arg0VN == VNNormVal(arg0VN));
2242 assert(arg1VN == VNNormVal(arg1VN));
2243 assert(arg2VN == VNNormVal(arg2VN));
2244 assert(arg3VN == VNNormVal(arg3VN));
2245 assert(VNFuncArity(func) == 4);
2248 VNDefFunc4Arg fstruct(func, arg0VN, arg1VN, arg2VN, arg3VN);
2249 if (GetVNFunc4Map()->Lookup(fstruct, &res))
2255 Chunk* c = GetAllocChunk(typ, CEA_Func4);
2256 unsigned offsetWithinChunk = c->AllocVN();
2257 res = c->m_baseVN + offsetWithinChunk;
2258 reinterpret_cast<VNDefFunc4Arg*>(c->m_defs)[offsetWithinChunk] = fstruct;
2259 GetVNFunc4Map()->Set(fstruct, res);
2264 //------------------------------------------------------------------------
2265 // VNForExpr: Opaque value number that is equivalent to itself but unique
2266 // from all other value numbers.
2269 // block - BasicBlock where the expression that produces this value occurs.
2270 // May be nullptr to force conservative "could be anywhere" interpretation.
2271 // typ - Type of the expression in the IR
2274 // A new value number distinct from any previously generated, that compares as equal
2275 // to itself, but not any other value number, and is annotated with the given
2278 ValueNum ValueNumStore::VNForExpr(BasicBlock* block, var_types typ)
2280 BasicBlock::loopNumber loopNum;
2281 if (block == nullptr)
2283 loopNum = MAX_LOOP_NUM;
2287 loopNum = block->bbNatLoopNum;
2290 // We always allocate a new, unique VN in this call.
2291 // The 'typ' is used to partition the allocation of VNs into different chunks.
2292 Chunk* c = GetAllocChunk(typ, CEA_None, loopNum);
2293 unsigned offsetWithinChunk = c->AllocVN();
2294 ValueNum result = c->m_baseVN + offsetWithinChunk;
2298 ValueNum ValueNumStore::VNApplySelectors(ValueNumKind vnk,
2300 FieldSeqNode* fieldSeq,
2301 size_t* wbFinalStructSize)
2303 if (fieldSeq == nullptr)
2309 assert(fieldSeq != FieldSeqStore::NotAField());
2311 // Skip any "FirstElem" pseudo-fields or any "ConstantIndex" pseudo-fields
2312 if (fieldSeq->IsPseudoField())
2314 return VNApplySelectors(vnk, map, fieldSeq->m_next, wbFinalStructSize);
2317 // Otherwise, is a real field handle.
2318 CORINFO_FIELD_HANDLE fldHnd = fieldSeq->m_fieldHnd;
2319 CORINFO_CLASS_HANDLE structHnd = NO_CLASS_HANDLE;
2320 ValueNum fldHndVN = VNForHandle(ssize_t(fldHnd), GTF_ICON_FIELD_HDL);
2321 noway_assert(fldHnd != nullptr);
2322 CorInfoType fieldCit = m_pComp->info.compCompHnd->getFieldType(fldHnd, &structHnd);
2323 var_types fieldType = JITtype2varType(fieldCit);
2325 size_t structSize = 0;
2326 if (varTypeIsStruct(fieldType))
2328 structSize = m_pComp->info.compCompHnd->getClassSize(structHnd);
2329 // We do not normalize the type field accesses during importation unless they
2330 // are used in a call, return or assignment.
2331 if ((fieldType == TYP_STRUCT) && (structSize <= m_pComp->largestEnregisterableStructSize()))
2333 fieldType = m_pComp->impNormStructType(structHnd);
2336 if (wbFinalStructSize != nullptr)
2338 *wbFinalStructSize = structSize;
2342 if (m_pComp->verbose)
2344 printf(" VNApplySelectors:\n");
2345 const char* modName;
2346 const char* fldName = m_pComp->eeGetFieldName(fldHnd, &modName);
2347 printf(" VNForHandle(Fseq[%s]) is " STR_VN "%x, fieldType is %s", fldName, fldHndVN,
2348 varTypeName(fieldType));
2349 if (varTypeIsStruct(fieldType))
2351 printf(", size = %d", structSize);
2357 if (fieldSeq->m_next != nullptr)
2359 ValueNum newMap = VNForMapSelect(vnk, fieldType, map, fldHndVN);
2360 return VNApplySelectors(vnk, newMap, fieldSeq->m_next, wbFinalStructSize);
2362 else // end of fieldSeq
2364 return VNForMapSelect(vnk, fieldType, map, fldHndVN);
2369 ValueNum ValueNumStore::VNApplySelectorsTypeCheck(ValueNum elem, var_types indType, size_t elemStructSize)
2371 var_types elemTyp = TypeOfVN(elem);
2373 // Check if the elemTyp is matching/compatible
2375 if (indType != elemTyp)
2377 bool isConstant = IsVNConstant(elem);
2378 if (isConstant && (elemTyp == genActualType(indType)))
2380 // (i.e. We recorded a constant of TYP_INT for a TYP_BYTE field)
2384 // We are trying to read from an 'elem' of type 'elemType' using 'indType' read
2386 size_t elemTypSize = (elemTyp == TYP_STRUCT) ? elemStructSize : genTypeSize(elemTyp);
2387 size_t indTypeSize = genTypeSize(indType);
2389 if ((indType == TYP_REF) && (varTypeIsStruct(elemTyp)))
2391 // indType is TYP_REF and elemTyp is TYP_STRUCT
2393 // We have a pointer to a static that is a Boxed Struct
2397 else if (indTypeSize > elemTypSize)
2399 // Reading beyong the end of 'elem'
2401 // return a new unique value number
2402 elem = VNForExpr(nullptr, indType);
2403 JITDUMP(" *** Mismatched types in VNApplySelectorsTypeCheck (reading beyond the end)\n");
2405 else if (varTypeIsStruct(indType))
2407 // indType is TYP_STRUCT
2409 // return a new unique value number
2410 elem = VNForExpr(nullptr, indType);
2411 JITDUMP(" *** Mismatched types in VNApplySelectorsTypeCheck (indType is TYP_STRUCT)\n");
2415 // We are trying to read an 'elem' of type 'elemType' using 'indType' read
2417 // insert a cast of elem to 'indType'
2418 elem = VNForCast(elem, indType, elemTyp);
2425 ValueNum ValueNumStore::VNApplySelectorsAssignTypeCoerce(ValueNum elem, var_types indType, BasicBlock* block)
2427 var_types elemTyp = TypeOfVN(elem);
2429 // Check if the elemTyp is matching/compatible
2431 if (indType != elemTyp)
2433 bool isConstant = IsVNConstant(elem);
2434 if (isConstant && (elemTyp == genActualType(indType)))
2436 // (i.e. We recorded a constant of TYP_INT for a TYP_BYTE field)
2440 // We are trying to write an 'elem' of type 'elemType' using 'indType' store
2442 if (varTypeIsStruct(indType))
2444 // return a new unique value number
2445 elem = VNForExpr(block, indType);
2446 JITDUMP(" *** Mismatched types in VNApplySelectorsAssignTypeCoerce (indType is TYP_STRUCT)\n");
2450 // We are trying to write an 'elem' of type 'elemType' using 'indType' store
2452 // insert a cast of elem to 'indType'
2453 elem = VNForCast(elem, indType, elemTyp);
2460 //------------------------------------------------------------------------
2461 // VNApplySelectorsAssign: Compute the value number corresponding to "map" but with
2462 // the element at "fieldSeq" updated to have type "elem"; this is the new heap
2463 // value for an assignment of value "elem" into the heap at location "fieldSeq"
2464 // that occurs in block "block" and has type "indType".
2467 // vnk - Identifies whether to recurse to Conservative or Liberal value numbers
2468 // when recursing through phis
2469 // map - Value number for the field map before the assignment
2470 // elem - Value number for the value being stored (to the given field)
2471 // indType - Type of the indirection storing the value to the field
2472 // block - Block where the assignment occurs
2475 // The value number corresopnding to the heap after the assignment.
2477 ValueNum ValueNumStore::VNApplySelectorsAssign(
2478 ValueNumKind vnk, ValueNum map, FieldSeqNode* fieldSeq, ValueNum elem, var_types indType, BasicBlock* block)
2480 if (fieldSeq == nullptr)
2482 return VNApplySelectorsAssignTypeCoerce(elem, indType, block);
2486 assert(fieldSeq != FieldSeqStore::NotAField());
2488 // Skip any "FirstElem" pseudo-fields or any "ConstantIndex" pseudo-fields
2489 // These will occur, at least, in struct static expressions, for method table offsets.
2490 if (fieldSeq->IsPseudoField())
2492 return VNApplySelectorsAssign(vnk, map, fieldSeq->m_next, elem, indType, block);
2495 // Otherwise, fldHnd is a real field handle.
2496 CORINFO_FIELD_HANDLE fldHnd = fieldSeq->m_fieldHnd;
2497 CORINFO_CLASS_HANDLE structType = nullptr;
2498 noway_assert(fldHnd != nullptr);
2499 CorInfoType fieldCit = m_pComp->info.compCompHnd->getFieldType(fldHnd, &structType);
2500 var_types fieldType = JITtype2varType(fieldCit);
2502 ValueNum fieldHndVN = VNForHandle(ssize_t(fldHnd), GTF_ICON_FIELD_HDL);
2505 if (m_pComp->verbose)
2507 printf(" fieldHnd " STR_VN "%x is ", fieldHndVN);
2508 vnDump(m_pComp, fieldHndVN);
2511 ValueNum seqNextVN = VNForFieldSeq(fieldSeq->m_next);
2512 ValueNum fieldSeqVN = VNForFunc(TYP_REF, VNF_FieldSeq, fieldHndVN, seqNextVN);
2514 printf(" fieldSeq " STR_VN "%x is ", fieldSeqVN);
2515 vnDump(m_pComp, fieldSeqVN);
2521 if (fieldSeq->m_next)
2523 ValueNum fseqMap = VNForMapSelect(vnk, fieldType, map, fieldHndVN);
2524 elemAfter = VNApplySelectorsAssign(vnk, fseqMap, fieldSeq->m_next, elem, indType, block);
2528 elemAfter = VNApplySelectorsAssignTypeCoerce(elem, indType, block);
2531 ValueNum newMap = VNForMapStore(fieldType, map, fieldHndVN, elemAfter);
2536 ValueNumPair ValueNumStore::VNPairApplySelectors(ValueNumPair map, FieldSeqNode* fieldSeq, var_types indType)
2538 size_t structSize = 0;
2539 ValueNum liberalVN = VNApplySelectors(VNK_Liberal, map.GetLiberal(), fieldSeq, &structSize);
2540 liberalVN = VNApplySelectorsTypeCheck(liberalVN, indType, structSize);
2543 ValueNum conservVN = VNApplySelectors(VNK_Conservative, map.GetConservative(), fieldSeq, &structSize);
2544 conservVN = VNApplySelectorsTypeCheck(conservVN, indType, structSize);
2546 return ValueNumPair(liberalVN, conservVN);
2549 ValueNum ValueNumStore::VNForFieldSeq(FieldSeqNode* fieldSeq)
2551 if (fieldSeq == nullptr)
2555 else if (fieldSeq == FieldSeqStore::NotAField())
2557 return VNForNotAField();
2561 ssize_t fieldHndVal = ssize_t(fieldSeq->m_fieldHnd);
2562 ValueNum fieldHndVN = VNForHandle(fieldHndVal, GTF_ICON_FIELD_HDL);
2563 ValueNum seqNextVN = VNForFieldSeq(fieldSeq->m_next);
2564 ValueNum fieldSeqVN = VNForFunc(TYP_REF, VNF_FieldSeq, fieldHndVN, seqNextVN);
2567 if (m_pComp->verbose)
2569 printf(" fieldHnd " STR_VN "%x is ", fieldHndVN);
2570 vnDump(m_pComp, fieldHndVN);
2573 printf(" fieldSeq " STR_VN "%x is ", fieldSeqVN);
2574 vnDump(m_pComp, fieldSeqVN);
2583 FieldSeqNode* ValueNumStore::FieldSeqVNToFieldSeq(ValueNum vn)
2585 if (vn == VNForNull())
2589 else if (vn == VNForNotAField())
2591 return FieldSeqStore::NotAField();
2595 assert(IsVNFunc(vn));
2597 GetVNFunc(vn, &funcApp);
2598 assert(funcApp.m_func == VNF_FieldSeq);
2599 ssize_t fieldHndVal = ConstantValue<ssize_t>(funcApp.m_args[0]);
2600 FieldSeqNode* head =
2601 m_pComp->GetFieldSeqStore()->CreateSingleton(reinterpret_cast<CORINFO_FIELD_HANDLE>(fieldHndVal));
2602 FieldSeqNode* tail = FieldSeqVNToFieldSeq(funcApp.m_args[1]);
2603 return m_pComp->GetFieldSeqStore()->Append(head, tail);
2607 ValueNum ValueNumStore::FieldSeqVNAppend(ValueNum fsVN1, ValueNum fsVN2)
2609 if (fsVN1 == VNForNull())
2613 else if (fsVN1 == VNForNotAField() || fsVN2 == VNForNotAField())
2615 return VNForNotAField();
2619 assert(IsVNFunc(fsVN1));
2621 GetVNFunc(fsVN1, &funcApp1);
2622 assert(funcApp1.m_func == VNF_FieldSeq);
2623 ValueNum tailRes = FieldSeqVNAppend(funcApp1.m_args[1], fsVN2);
2624 ValueNum fieldSeqVN = VNForFunc(TYP_REF, VNF_FieldSeq, funcApp1.m_args[0], tailRes);
2627 if (m_pComp->verbose)
2629 printf(" fieldSeq " STR_VN "%x is ", fieldSeqVN);
2630 vnDump(m_pComp, fieldSeqVN);
2639 ValueNum ValueNumStore::VNForPtrToLoc(var_types typ, ValueNum lclVarVN, ValueNum fieldSeqVN)
2641 if (fieldSeqVN == VNForNotAField())
2643 // To distinguish two different not a fields, append a unique value.
2644 return VNForFunc(typ, VNF_PtrToLoc, lclVarVN, fieldSeqVN, VNForIntCon(++m_uPtrToLocNotAFieldCount));
2646 return VNForFunc(typ, VNF_PtrToLoc, lclVarVN, fieldSeqVN, VNForIntCon(0));
2649 ValueNum ValueNumStore::ExtendPtrVN(GenTreePtr opA, GenTreePtr opB)
2651 if (opB->OperGet() == GT_CNS_INT)
2653 FieldSeqNode* fldSeq = opB->gtIntCon.gtFieldSeq;
2654 if ((fldSeq != nullptr) && (fldSeq != FieldSeqStore::NotAField()))
2656 return ExtendPtrVN(opA, opB->gtIntCon.gtFieldSeq);
2662 ValueNum ValueNumStore::ExtendPtrVN(GenTreePtr opA, FieldSeqNode* fldSeq)
2664 ValueNum res = NoVN;
2665 assert(fldSeq != FieldSeqStore::NotAField());
2667 ValueNum opAvnWx = opA->gtVNPair.GetLiberal();
2668 assert(VNIsValid(opAvnWx));
2670 ValueNum opAvnx = VNForEmptyExcSet();
2671 VNUnpackExc(opAvnWx, &opAvn, &opAvnx);
2672 assert(VNIsValid(opAvn) && VNIsValid(opAvnx));
2675 if (!GetVNFunc(opAvn, &funcApp))
2680 if (funcApp.m_func == VNF_PtrToLoc)
2683 // For PtrToLoc, lib == cons.
2684 VNFuncApp consFuncApp;
2685 assert(GetVNFunc(VNNormVal(opA->GetVN(VNK_Conservative)), &consFuncApp) && consFuncApp.Equals(funcApp));
2687 ValueNum fldSeqVN = VNForFieldSeq(fldSeq);
2688 res = VNForPtrToLoc(TYP_BYREF, funcApp.m_args[0], FieldSeqVNAppend(funcApp.m_args[1], fldSeqVN));
2690 else if (funcApp.m_func == VNF_PtrToStatic)
2692 ValueNum fldSeqVN = VNForFieldSeq(fldSeq);
2693 res = VNForFunc(TYP_BYREF, VNF_PtrToStatic, FieldSeqVNAppend(funcApp.m_args[0], fldSeqVN));
2695 else if (funcApp.m_func == VNF_PtrToArrElem)
2697 ValueNum fldSeqVN = VNForFieldSeq(fldSeq);
2698 res = VNForFunc(TYP_BYREF, VNF_PtrToArrElem, funcApp.m_args[0], funcApp.m_args[1], funcApp.m_args[2],
2699 FieldSeqVNAppend(funcApp.m_args[3], fldSeqVN));
2703 res = VNWithExc(res, opAvnx);
2708 void Compiler::fgValueNumberArrIndexAssign(CORINFO_CLASS_HANDLE elemTypeEq,
2711 FieldSeqNode* fldSeq,
2715 bool invalidateArray = false;
2716 ValueNum elemTypeEqVN = vnStore->VNForHandle(ssize_t(elemTypeEq), GTF_ICON_CLASS_HDL);
2717 var_types arrElemType = DecodeElemType(elemTypeEq);
2718 ValueNum hAtArrType = vnStore->VNForMapSelect(VNK_Liberal, TYP_REF, fgCurHeapVN, elemTypeEqVN);
2719 ValueNum hAtArrTypeAtArr = vnStore->VNForMapSelect(VNK_Liberal, TYP_REF, hAtArrType, arrVN);
2720 ValueNum hAtArrTypeAtArrAtInx = vnStore->VNForMapSelect(VNK_Liberal, arrElemType, hAtArrTypeAtArr, inxVN);
2722 ValueNum newValAtInx = ValueNumStore::NoVN;
2723 ValueNum newValAtArr = ValueNumStore::NoVN;
2724 ValueNum newValAtArrType = ValueNumStore::NoVN;
2726 if (fldSeq == FieldSeqStore::NotAField())
2728 // This doesn't represent a proper array access
2729 JITDUMP(" *** NotAField sequence encountered in fgValueNumberArrIndexAssign\n");
2731 // Store a new unique value for newValAtArrType
2732 newValAtArrType = vnStore->VNForExpr(compCurBB, TYP_REF);
2733 invalidateArray = true;
2737 // Note that this does the right thing if "fldSeq" is null -- returns last "rhs" argument.
2738 // This is the value that should be stored at "arr[inx]".
2740 vnStore->VNApplySelectorsAssign(VNK_Liberal, hAtArrTypeAtArrAtInx, fldSeq, rhsVN, indType, compCurBB);
2742 var_types arrElemFldType = arrElemType; // Uses arrElemType unless we has a non-null fldSeq
2743 if (vnStore->IsVNFunc(newValAtInx))
2746 vnStore->GetVNFunc(newValAtInx, &funcApp);
2747 if (funcApp.m_func == VNF_MapStore)
2749 arrElemFldType = vnStore->TypeOfVN(newValAtInx);
2753 if (indType != arrElemFldType)
2755 // Mismatched types: Store between different types (indType into array of arrElemFldType)
2758 JITDUMP(" *** Mismatched types in fgValueNumberArrIndexAssign\n");
2760 // Store a new unique value for newValAtArrType
2761 newValAtArrType = vnStore->VNForExpr(compCurBB, TYP_REF);
2762 invalidateArray = true;
2766 if (!invalidateArray)
2768 newValAtArr = vnStore->VNForMapStore(indType, hAtArrTypeAtArr, inxVN, newValAtInx);
2769 newValAtArrType = vnStore->VNForMapStore(TYP_REF, hAtArrType, arrVN, newValAtArr);
2775 printf(" hAtArrType " STR_VN "%x is MapSelect(curHeap(" STR_VN "%x), ", hAtArrType, fgCurHeapVN);
2777 if (arrElemType == TYP_STRUCT)
2779 printf("%s[]).\n", eeGetClassName(elemTypeEq));
2783 printf("%s[]).\n", varTypeName(arrElemType));
2785 printf(" hAtArrTypeAtArr " STR_VN "%x is MapSelect(hAtArrType(" STR_VN "%x), arr=" STR_VN "%x)\n",
2786 hAtArrTypeAtArr, hAtArrType, arrVN);
2787 printf(" hAtArrTypeAtArrAtInx " STR_VN "%x is MapSelect(hAtArrTypeAtArr(" STR_VN "%x), inx=" STR_VN "%x):%s\n",
2788 hAtArrTypeAtArrAtInx, hAtArrTypeAtArr, inxVN, varTypeName(arrElemType));
2790 if (!invalidateArray)
2792 printf(" newValAtInd " STR_VN "%x is ", newValAtInx);
2793 vnStore->vnDump(this, newValAtInx);
2796 printf(" newValAtArr " STR_VN "%x is ", newValAtArr);
2797 vnStore->vnDump(this, newValAtArr);
2801 printf(" newValAtArrType " STR_VN "%x is ", newValAtArrType);
2802 vnStore->vnDump(this, newValAtArrType);
2805 printf(" fgCurHeapVN assigned:\n");
2809 // bbHeapDef must be set to true for any block that Mutates the global Heap
2810 assert(compCurBB->bbHeapDef);
2812 fgCurHeapVN = vnStore->VNForMapStore(TYP_REF, fgCurHeapVN, elemTypeEqVN, newValAtArrType);
2815 ValueNum Compiler::fgValueNumberArrIndexVal(GenTreePtr tree, VNFuncApp* pFuncApp, ValueNum addrXvn)
2817 assert(vnStore->IsVNHandle(pFuncApp->m_args[0]));
2818 CORINFO_CLASS_HANDLE arrElemTypeEQ = CORINFO_CLASS_HANDLE(vnStore->ConstantValue<ssize_t>(pFuncApp->m_args[0]));
2819 ValueNum arrVN = pFuncApp->m_args[1];
2820 ValueNum inxVN = pFuncApp->m_args[2];
2821 FieldSeqNode* fldSeq = vnStore->FieldSeqVNToFieldSeq(pFuncApp->m_args[3]);
2822 return fgValueNumberArrIndexVal(tree, arrElemTypeEQ, arrVN, inxVN, addrXvn, fldSeq);
2825 ValueNum Compiler::fgValueNumberArrIndexVal(GenTreePtr tree,
2826 CORINFO_CLASS_HANDLE elemTypeEq,
2830 FieldSeqNode* fldSeq)
2832 assert(tree == nullptr || tree->OperIsIndir());
2834 // The VN inputs are required to be non-exceptional values.
2835 assert(arrVN == vnStore->VNNormVal(arrVN));
2836 assert(inxVN == vnStore->VNNormVal(inxVN));
2838 var_types elemTyp = DecodeElemType(elemTypeEq);
2839 var_types indType = (tree == nullptr) ? elemTyp : tree->TypeGet();
2840 ValueNum selectedElem;
2842 if (fldSeq == FieldSeqStore::NotAField())
2844 // This doesn't represent a proper array access
2845 JITDUMP(" *** NotAField sequence encountered in fgValueNumberArrIndexVal\n");
2847 // a new unique value number
2848 selectedElem = vnStore->VNForExpr(compCurBB, elemTyp);
2853 printf(" IND of PtrToArrElem is unique VN " STR_VN "%x.\n", selectedElem);
2857 if (tree != nullptr)
2859 tree->gtVNPair.SetBoth(selectedElem);
2864 ValueNum elemTypeEqVN = vnStore->VNForHandle(ssize_t(elemTypeEq), GTF_ICON_CLASS_HDL);
2865 ValueNum hAtArrType = vnStore->VNForMapSelect(VNK_Liberal, TYP_REF, fgCurHeapVN, elemTypeEqVN);
2866 ValueNum hAtArrTypeAtArr = vnStore->VNForMapSelect(VNK_Liberal, TYP_REF, hAtArrType, arrVN);
2867 ValueNum wholeElem = vnStore->VNForMapSelect(VNK_Liberal, elemTyp, hAtArrTypeAtArr, inxVN);
2872 printf(" hAtArrType " STR_VN "%x is MapSelect(curHeap(" STR_VN "%x), ", hAtArrType, fgCurHeapVN);
2873 if (elemTyp == TYP_STRUCT)
2875 printf("%s[]).\n", eeGetClassName(elemTypeEq));
2879 printf("%s[]).\n", varTypeName(elemTyp));
2882 printf(" hAtArrTypeAtArr " STR_VN "%x is MapSelect(hAtArrType(" STR_VN "%x), arr=" STR_VN "%x).\n",
2883 hAtArrTypeAtArr, hAtArrType, arrVN);
2885 printf(" wholeElem " STR_VN "%x is MapSelect(hAtArrTypeAtArr(" STR_VN "%x), ind=" STR_VN "%x).\n",
2886 wholeElem, hAtArrTypeAtArr, inxVN);
2890 selectedElem = wholeElem;
2891 size_t elemStructSize = 0;
2894 selectedElem = vnStore->VNApplySelectors(VNK_Liberal, wholeElem, fldSeq, &elemStructSize);
2895 elemTyp = vnStore->TypeOfVN(selectedElem);
2897 selectedElem = vnStore->VNApplySelectorsTypeCheck(selectedElem, indType, elemStructSize);
2898 selectedElem = vnStore->VNWithExc(selectedElem, excVN);
2901 if (verbose && (selectedElem != wholeElem))
2903 printf(" selectedElem is " STR_VN "%x after applying selectors.\n", selectedElem);
2907 if (tree != nullptr)
2909 tree->gtVNPair.SetLiberal(selectedElem);
2910 // TODO-CQ: what to do here about exceptions? We don't have the array and ind conservative
2911 // values, so we don't have their exceptions. Maybe we should.
2912 tree->gtVNPair.SetConservative(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
2916 return selectedElem;
2919 var_types ValueNumStore::TypeOfVN(ValueNum vn)
2921 Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
2925 //------------------------------------------------------------------------
2926 // LoopOfVN: If the given value number is an opaque one associated with a particular
2927 // expression in the IR, give the loop number where the expression occurs; otherwise,
2928 // returns MAX_LOOP_NUM.
2931 // vn - Value number to query
2934 // The correspondingblock's bbNatLoopNum, which may be BasicBlock::NOT_IN_LOOP.
2935 // Returns MAX_LOOP_NUM if this VN is not an opaque value number associated with
2936 // a particular expression/location in the IR.
2938 BasicBlock::loopNumber ValueNumStore::LoopOfVN(ValueNum vn)
2940 Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
2941 return c->m_loopNum;
2944 bool ValueNumStore::IsVNConstant(ValueNum vn)
2950 Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
2951 if (c->m_attribs == CEA_Const)
2953 return vn != VNForVoid(); // Void is not a "real" constant -- in the sense that it represents no value.
2957 return c->m_attribs == CEA_Handle;
2961 bool ValueNumStore::IsVNInt32Constant(ValueNum vn)
2963 if (!IsVNConstant(vn))
2968 return TypeOfVN(vn) == TYP_INT;
2971 unsigned ValueNumStore::GetHandleFlags(ValueNum vn)
2973 assert(IsVNHandle(vn));
2974 Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
2975 unsigned offset = ChunkOffset(vn);
2976 VNHandle* handle = &reinterpret_cast<VNHandle*>(c->m_defs)[offset];
2977 return handle->m_flags;
2980 bool ValueNumStore::IsVNHandle(ValueNum vn)
2987 Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
2988 return c->m_attribs == CEA_Handle;
2991 bool ValueNumStore::IsVNConstantBound(ValueNum vn)
2993 // Do we have "var < 100"?
3000 if (!GetVNFunc(vn, &funcAttr))
3004 if (funcAttr.m_func != (VNFunc)GT_LE && funcAttr.m_func != (VNFunc)GT_GE && funcAttr.m_func != (VNFunc)GT_LT &&
3005 funcAttr.m_func != (VNFunc)GT_GT)
3010 return IsVNInt32Constant(funcAttr.m_args[0]) != IsVNInt32Constant(funcAttr.m_args[1]);
3013 void ValueNumStore::GetConstantBoundInfo(ValueNum vn, ConstantBoundInfo* info)
3015 assert(IsVNConstantBound(vn));
3018 // Do we have var < 100?
3020 GetVNFunc(vn, &funcAttr);
3022 bool isOp1Const = IsVNInt32Constant(funcAttr.m_args[1]);
3026 info->cmpOper = funcAttr.m_func;
3027 info->cmpOpVN = funcAttr.m_args[0];
3028 info->constVal = GetConstantInt32(funcAttr.m_args[1]);
3032 info->cmpOper = GenTree::SwapRelop((genTreeOps)funcAttr.m_func);
3033 info->cmpOpVN = funcAttr.m_args[1];
3034 info->constVal = GetConstantInt32(funcAttr.m_args[0]);
3038 bool ValueNumStore::IsVNArrLenBound(ValueNum vn)
3040 // Do we have "var < a.len"?
3047 if (!GetVNFunc(vn, &funcAttr))
3051 if (funcAttr.m_func != (VNFunc)GT_LE && funcAttr.m_func != (VNFunc)GT_GE && funcAttr.m_func != (VNFunc)GT_LT &&
3052 funcAttr.m_func != (VNFunc)GT_GT)
3056 if (!IsVNArrLen(funcAttr.m_args[0]) && !IsVNArrLen(funcAttr.m_args[1]))
3064 void ValueNumStore::GetArrLenBoundInfo(ValueNum vn, ArrLenArithBoundInfo* info)
3066 assert(IsVNArrLenBound(vn));
3068 // Do we have var < a.len?
3070 GetVNFunc(vn, &funcAttr);
3072 bool isOp1ArrLen = IsVNArrLen(funcAttr.m_args[1]);
3075 info->cmpOper = funcAttr.m_func;
3076 info->cmpOp = funcAttr.m_args[0];
3077 info->vnArray = GetArrForLenVn(funcAttr.m_args[1]);
3081 info->cmpOper = GenTree::SwapRelop((genTreeOps)funcAttr.m_func);
3082 info->cmpOp = funcAttr.m_args[1];
3083 info->vnArray = GetArrForLenVn(funcAttr.m_args[0]);
3087 bool ValueNumStore::IsVNArrLenArith(ValueNum vn)
3089 // Do we have "a.len +or- var"
3097 return GetVNFunc(vn, &funcAttr) && // vn is a func.
3098 (funcAttr.m_func == (VNFunc)GT_ADD || funcAttr.m_func == (VNFunc)GT_SUB) && // the func is +/-
3099 (IsVNArrLen(funcAttr.m_args[0]) || IsVNArrLen(funcAttr.m_args[1])); // either op1 or op2 is a.len
3102 void ValueNumStore::GetArrLenArithInfo(ValueNum vn, ArrLenArithBoundInfo* info)
3104 // Do we have a.len +/- var?
3105 assert(IsVNArrLenArith(vn));
3106 VNFuncApp funcArith;
3107 GetVNFunc(vn, &funcArith);
3109 bool isOp1ArrLen = IsVNArrLen(funcArith.m_args[1]);
3112 info->arrOper = funcArith.m_func;
3113 info->arrOp = funcArith.m_args[0];
3114 info->vnArray = GetArrForLenVn(funcArith.m_args[1]);
3118 info->arrOper = funcArith.m_func;
3119 info->arrOp = funcArith.m_args[1];
3120 info->vnArray = GetArrForLenVn(funcArith.m_args[0]);
3124 bool ValueNumStore::IsVNArrLenArithBound(ValueNum vn)
3126 // Do we have: "var < a.len - var"
3133 if (!GetVNFunc(vn, &funcAttr))
3138 // Suitable comparator.
3139 if (funcAttr.m_func != (VNFunc)GT_LE && funcAttr.m_func != (VNFunc)GT_GE && funcAttr.m_func != (VNFunc)GT_LT &&
3140 funcAttr.m_func != (VNFunc)GT_GT)
3145 // Either the op0 or op1 is arr len arithmetic.
3146 if (!IsVNArrLenArith(funcAttr.m_args[0]) && !IsVNArrLenArith(funcAttr.m_args[1]))
3154 void ValueNumStore::GetArrLenArithBoundInfo(ValueNum vn, ArrLenArithBoundInfo* info)
3156 assert(IsVNArrLenArithBound(vn));
3159 GetVNFunc(vn, &funcAttr);
3161 // Check whether op0 or op1 ia arr len arithmetic.
3162 bool isOp1ArrLenArith = IsVNArrLenArith(funcAttr.m_args[1]);
3163 if (isOp1ArrLenArith)
3165 info->cmpOper = funcAttr.m_func;
3166 info->cmpOp = funcAttr.m_args[0];
3167 GetArrLenArithInfo(funcAttr.m_args[1], info);
3171 info->cmpOper = GenTree::SwapRelop((genTreeOps)funcAttr.m_func);
3172 info->cmpOp = funcAttr.m_args[1];
3173 GetArrLenArithInfo(funcAttr.m_args[0], info);
3177 ValueNum ValueNumStore::GetArrForLenVn(ValueNum vn)
3185 if (GetVNFunc(vn, &funcAttr) && funcAttr.m_func == (VNFunc)GT_ARR_LENGTH)
3187 return funcAttr.m_args[0];
3192 bool ValueNumStore::IsVNNewArr(ValueNum vn, VNFuncApp* funcApp)
3198 bool result = false;
3199 if (GetVNFunc(vn, funcApp))
3201 result = (funcApp->m_func == VNF_JitNewArr) || (funcApp->m_func == VNF_JitReadyToRunNewArr);
3206 int ValueNumStore::GetNewArrSize(ValueNum vn)
3209 if (IsVNNewArr(vn, &funcApp))
3211 ValueNum arg1VN = funcApp.m_args[1];
3212 if (IsVNConstant(arg1VN) && TypeOfVN(arg1VN) == TYP_INT)
3214 return ConstantValue<int>(arg1VN);
3220 bool ValueNumStore::IsVNArrLen(ValueNum vn)
3227 return (GetVNFunc(vn, &funcAttr) && funcAttr.m_func == (VNFunc)GT_ARR_LENGTH);
3230 ValueNum ValueNumStore::EvalMathFuncUnary(var_types typ, CorInfoIntrinsics gtMathFN, ValueNum arg0VN)
3232 assert(arg0VN == VNNormVal(arg0VN));
3233 if (IsVNConstant(arg0VN) && Compiler::IsTargetIntrinsic(gtMathFN))
3235 // If the math intrinsic is not implemented by target-specific instructions, such as implemented
3236 // by user calls, then don't do constant folding on it. This minimizes precision loss.
3237 // I *may* need separate tracks for the double/float -- if the intrinsic funcs have overloads for these.
3238 double arg0Val = GetConstantDouble(arg0VN);
3243 case CORINFO_INTRINSIC_Sin:
3246 case CORINFO_INTRINSIC_Cos:
3249 case CORINFO_INTRINSIC_Sqrt:
3250 res = sqrt(arg0Val);
3252 case CORINFO_INTRINSIC_Abs:
3253 res = fabs(arg0Val); // The result and params are doubles.
3255 case CORINFO_INTRINSIC_Round:
3256 res = FloatingPointUtils::round(arg0Val);
3259 unreached(); // the above are the only math intrinsics at the time of this writing.
3261 if (typ == TYP_DOUBLE)
3263 return VNForDoubleCon(res);
3265 else if (typ == TYP_FLOAT)
3267 return VNForFloatCon(float(res));
3271 assert(typ == TYP_INT);
3272 assert(gtMathFN == CORINFO_INTRINSIC_Round);
3274 return VNForIntCon(int(res));
3279 assert(typ == TYP_DOUBLE || typ == TYP_FLOAT || (typ == TYP_INT && gtMathFN == CORINFO_INTRINSIC_Round));
3281 VNFunc vnf = VNF_Boundary;
3284 case CORINFO_INTRINSIC_Sin:
3287 case CORINFO_INTRINSIC_Cos:
3290 case CORINFO_INTRINSIC_Sqrt:
3293 case CORINFO_INTRINSIC_Abs:
3296 case CORINFO_INTRINSIC_Round:
3297 if (typ == TYP_DOUBLE)
3299 vnf = VNF_RoundDouble;
3301 else if (typ == TYP_FLOAT)
3303 vnf = VNF_RoundFloat;
3305 else if (typ == TYP_INT)
3311 noway_assert(!"Invalid INTRINSIC_Round");
3314 case CORINFO_INTRINSIC_Cosh:
3317 case CORINFO_INTRINSIC_Sinh:
3320 case CORINFO_INTRINSIC_Tan:
3323 case CORINFO_INTRINSIC_Tanh:
3326 case CORINFO_INTRINSIC_Asin:
3329 case CORINFO_INTRINSIC_Acos:
3332 case CORINFO_INTRINSIC_Atan:
3335 case CORINFO_INTRINSIC_Log10:
3338 case CORINFO_INTRINSIC_Exp:
3341 case CORINFO_INTRINSIC_Ceiling:
3344 case CORINFO_INTRINSIC_Floor:
3348 unreached(); // the above are the only math intrinsics at the time of this writing.
3351 return VNForFunc(typ, vnf, arg0VN);
3355 ValueNum ValueNumStore::EvalMathFuncBinary(var_types typ, CorInfoIntrinsics gtMathFN, ValueNum arg0VN, ValueNum arg1VN)
3357 assert(varTypeIsFloating(typ));
3358 assert(arg0VN == VNNormVal(arg0VN));
3359 assert(arg1VN == VNNormVal(arg1VN));
3361 VNFunc vnf = VNF_Boundary;
3363 // Currently, none of the binary math intrinsic are implemented by target-specific instructions.
3364 // To minimize precision loss, do not do constant folding on them.
3368 case CORINFO_INTRINSIC_Atan2:
3372 case CORINFO_INTRINSIC_Pow:
3377 unreached(); // the above are the only binary math intrinsics at the time of this writing.
3380 return VNForFunc(typ, vnf, arg0VN, arg1VN);
3383 bool ValueNumStore::IsVNFunc(ValueNum vn)
3389 Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
3390 switch (c->m_attribs)
3403 bool ValueNumStore::GetVNFunc(ValueNum vn, VNFuncApp* funcApp)
3405 Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
3406 unsigned offset = ChunkOffset(vn);
3407 assert(offset < c->m_numUsed);
3408 switch (c->m_attribs)
3412 VNDefFunc4Arg* farg4 = &reinterpret_cast<VNDefFunc4Arg*>(c->m_defs)[offset];
3413 funcApp->m_func = farg4->m_func;
3414 funcApp->m_arity = 4;
3415 funcApp->m_args[0] = farg4->m_arg0;
3416 funcApp->m_args[1] = farg4->m_arg1;
3417 funcApp->m_args[2] = farg4->m_arg2;
3418 funcApp->m_args[3] = farg4->m_arg3;
3423 VNDefFunc3Arg* farg3 = &reinterpret_cast<VNDefFunc3Arg*>(c->m_defs)[offset];
3424 funcApp->m_func = farg3->m_func;
3425 funcApp->m_arity = 3;
3426 funcApp->m_args[0] = farg3->m_arg0;
3427 funcApp->m_args[1] = farg3->m_arg1;
3428 funcApp->m_args[2] = farg3->m_arg2;
3433 VNDefFunc2Arg* farg2 = &reinterpret_cast<VNDefFunc2Arg*>(c->m_defs)[offset];
3434 funcApp->m_func = farg2->m_func;
3435 funcApp->m_arity = 2;
3436 funcApp->m_args[0] = farg2->m_arg0;
3437 funcApp->m_args[1] = farg2->m_arg1;
3442 VNDefFunc1Arg* farg1 = &reinterpret_cast<VNDefFunc1Arg*>(c->m_defs)[offset];
3443 funcApp->m_func = farg1->m_func;
3444 funcApp->m_arity = 1;
3445 funcApp->m_args[0] = farg1->m_arg0;
3450 VNDefFunc0Arg* farg0 = &reinterpret_cast<VNDefFunc0Arg*>(c->m_defs)[offset];
3451 funcApp->m_func = farg0->m_func;
3452 funcApp->m_arity = 0;
3460 ValueNum ValueNumStore::VNForRefInAddr(ValueNum vn)
3462 var_types vnType = TypeOfVN(vn);
3463 if (vnType == TYP_REF)
3468 assert(vnType == TYP_BYREF);
3470 if (GetVNFunc(vn, &funcApp))
3472 assert(funcApp.m_arity == 2 && (funcApp.m_func == VNFunc(GT_ADD) || funcApp.m_func == VNFunc(GT_SUB)));
3473 var_types vnArg0Type = TypeOfVN(funcApp.m_args[0]);
3474 if (vnArg0Type == TYP_REF || vnArg0Type == TYP_BYREF)
3476 return VNForRefInAddr(funcApp.m_args[0]);
3480 assert(funcApp.m_func == VNFunc(GT_ADD) &&
3481 (TypeOfVN(funcApp.m_args[1]) == TYP_REF || TypeOfVN(funcApp.m_args[1]) == TYP_BYREF));
3482 return VNForRefInAddr(funcApp.m_args[1]);
3487 assert(IsVNConstant(vn));
3492 bool ValueNumStore::VNIsValid(ValueNum vn)
3494 ChunkNum cn = GetChunkNum(vn);
3495 if (cn >= m_chunks.Size())
3500 Chunk* c = m_chunks.GetNoExpand(cn);
3501 return ChunkOffset(vn) < c->m_numUsed;
3506 void ValueNumStore::vnDump(Compiler* comp, ValueNum vn, bool isPtr)
3513 else if (IsVNHandle(vn))
3515 ssize_t val = ConstantValue<ssize_t>(vn);
3516 printf("Hnd const: 0x%p", dspPtr(val));
3518 else if (IsVNConstant(vn))
3520 var_types vnt = TypeOfVN(vn);
3532 int val = ConstantValue<int>(vn);
3535 printf("PtrCns[%p]", dspPtr(val));
3540 if ((val > -1000) && (val < 1000))
3542 printf(" %ld", val);
3546 printf(" 0x%X", val);
3554 INT64 val = ConstantValue<INT64>(vn);
3557 printf("LngPtrCns: 0x%p", dspPtr(val));
3562 if ((val > -1000) && (val < 1000))
3564 printf(" %ld", val);
3566 else if ((val & 0xFFFFFFFF00000000LL) == 0)
3568 printf(" 0x%X", val);
3572 printf(" 0x%llx", val);
3578 printf("FltCns[%f]", ConstantValue<float>(vn));
3581 printf("DblCns[%f]", ConstantValue<double>(vn));
3585 if (vn == VNForNull())
3589 else if (vn == VNForVoid())
3595 assert(vn == VNForZeroMap());
3608 #endif // FEATURE_SIMD
3609 printf("structVal");
3612 // These should be unreached.
3617 else if (IsVNArrLenBound(vn))
3619 ArrLenArithBoundInfo info;
3620 GetArrLenBoundInfo(vn, &info);
3623 else if (IsVNArrLenArithBound(vn))
3625 ArrLenArithBoundInfo info;
3626 GetArrLenArithBoundInfo(vn, &info);
3629 else if (IsVNFunc(vn))
3632 GetVNFunc(vn, &funcApp);
3633 // A few special cases...
3634 switch (funcApp.m_func)
3637 vnDumpFieldSeq(comp, &funcApp, true);
3640 vnDumpMapSelect(comp, &funcApp);
3643 vnDumpMapStore(comp, &funcApp);
3646 printf("%s(", VNFuncName(funcApp.m_func));
3647 for (unsigned i = 0; i < funcApp.m_arity; i++)
3654 printf(STR_VN "%x", funcApp.m_args[i]);
3656 #if FEATURE_VN_DUMP_FUNC_ARGS
3658 vnDump(comp, funcApp.m_args[i]);
3666 // Otherwise, just a VN with no structure; print just the VN.
3672 void ValueNumStore::vnDumpFieldSeq(Compiler* comp, VNFuncApp* fieldSeq, bool isHead)
3674 assert(fieldSeq->m_func == VNF_FieldSeq); // Precondition.
3675 // First arg is the field handle VN.
3676 assert(IsVNConstant(fieldSeq->m_args[0]) && TypeOfVN(fieldSeq->m_args[0]) == TYP_I_IMPL);
3677 ssize_t fieldHndVal = ConstantValue<ssize_t>(fieldSeq->m_args[0]);
3678 bool hasTail = (fieldSeq->m_args[1] != VNForNull());
3680 if (isHead && hasTail)
3685 CORINFO_FIELD_HANDLE fldHnd = CORINFO_FIELD_HANDLE(fieldHndVal);
3686 if (fldHnd == FieldSeqStore::FirstElemPseudoField)
3688 printf("#FirstElem");
3690 else if (fldHnd == FieldSeqStore::ConstantIndexPseudoField)
3692 printf("#ConstantIndex");
3696 const char* modName;
3697 const char* fldName = m_pComp->eeGetFieldName(fldHnd, &modName);
3698 printf("%s", fldName);
3704 assert(IsVNFunc(fieldSeq->m_args[1]));
3706 GetVNFunc(fieldSeq->m_args[1], &tail);
3707 vnDumpFieldSeq(comp, &tail, false);
3710 if (isHead && hasTail)
3716 void ValueNumStore::vnDumpMapSelect(Compiler* comp, VNFuncApp* mapSelect)
3718 assert(mapSelect->m_func == VNF_MapSelect); // Precondition.
3720 ValueNum mapVN = mapSelect->m_args[0]; // First arg is the map id
3721 ValueNum indexVN = mapSelect->m_args[1]; // Second arg is the index
3723 comp->vnPrint(mapVN, 0);
3725 comp->vnPrint(indexVN, 0);
3729 void ValueNumStore::vnDumpMapStore(Compiler* comp, VNFuncApp* mapStore)
3731 assert(mapStore->m_func == VNF_MapStore); // Precondition.
3733 ValueNum mapVN = mapStore->m_args[0]; // First arg is the map id
3734 ValueNum indexVN = mapStore->m_args[1]; // Second arg is the index
3735 ValueNum newValVN = mapStore->m_args[2]; // Third arg is the new value
3737 comp->vnPrint(mapVN, 0);
3739 comp->vnPrint(indexVN, 0);
3741 comp->vnPrint(newValVN, 0);
3746 // Static fields, methods.
3747 static UINT8 vnfOpAttribs[VNF_COUNT];
3748 static genTreeOps genTreeOpsIllegalAsVNFunc[] = {GT_IND, // When we do heap memory.
3749 GT_NULLCHECK, GT_QMARK, GT_COLON, GT_LOCKADD, GT_XADD, GT_XCHG,
3750 GT_CMPXCHG, GT_LCLHEAP, GT_BOX,
3752 // These need special semantics:
3753 GT_COMMA, // == second argument (but with exception(s) from first).
3754 GT_ADDR, GT_ARR_BOUNDS_CHECK,
3755 GT_OBJ, // May reference heap memory.
3756 GT_BLK, // May reference heap memory.
3758 // These control-flow operations need no values.
3759 GT_JTRUE, GT_RETURN, GT_SWITCH, GT_RETFILT, GT_CKFINITE};
3761 UINT8* ValueNumStore::s_vnfOpAttribs = nullptr;
3763 void ValueNumStore::InitValueNumStoreStatics()
3765 // Make sure we've gotten constants right...
3766 assert(unsigned(VNFOA_Arity) == (1 << VNFOA_ArityShift));
3767 assert(unsigned(VNFOA_AfterArity) == (unsigned(VNFOA_Arity) << VNFOA_ArityBits));
3769 s_vnfOpAttribs = &vnfOpAttribs[0];
3770 for (unsigned i = 0; i < GT_COUNT; i++)
3772 genTreeOps gtOper = static_cast<genTreeOps>(i);
3774 if (GenTree::OperIsUnary(gtOper))
3778 else if (GenTree::OperIsBinary(gtOper))
3782 // Since GT_ARR_BOUNDS_CHECK is not currently GTK_BINOP
3783 else if (gtOper == GT_ARR_BOUNDS_CHECK)
3787 vnfOpAttribs[i] |= (arity << VNFOA_ArityShift);
3789 if (GenTree::OperIsCommutative(gtOper))
3791 vnfOpAttribs[i] |= VNFOA_Commutative;
3795 // I so wish this wasn't the best way to do this...
3797 int vnfNum = VNF_Boundary + 1; // The macro definition below will update this after using it.
3799 #define ValueNumFuncDef(vnf, arity, commute, knownNonNull, sharedStatic) \
3801 vnfOpAttribs[vnfNum] |= VNFOA_Commutative; \
3803 vnfOpAttribs[vnfNum] |= VNFOA_KnownNonNull; \
3805 vnfOpAttribs[vnfNum] |= VNFOA_SharedStatic; \
3806 vnfOpAttribs[vnfNum] |= (arity << VNFOA_ArityShift); \
3809 #include "valuenumfuncs.h"
3810 #undef ValueNumFuncDef
3812 unsigned n = sizeof(genTreeOpsIllegalAsVNFunc) / sizeof(genTreeOps);
3813 for (unsigned i = 0; i < n; i++)
3815 vnfOpAttribs[genTreeOpsIllegalAsVNFunc[i]] |= VNFOA_IllegalGenTreeOp;
3820 // Define the name array.
3821 #define ValueNumFuncDef(vnf, arity, commute, knownNonNull, sharedStatic) #vnf,
3823 const char* ValueNumStore::VNFuncNameArr[] = {
3824 #include "valuenumfuncs.h"
3825 #undef ValueNumFuncDef
3829 const char* ValueNumStore::VNFuncName(VNFunc vnf)
3831 if (vnf < VNF_Boundary)
3833 return GenTree::NodeName(genTreeOps(vnf));
3837 return VNFuncNameArr[vnf - (VNF_Boundary + 1)];
3841 static const char* s_reservedNameArr[] = {
3842 "$VN.Recursive", // -2 RecursiveVN
3843 "$VN.No", // -1 NoVN
3844 "$VN.Null", // 0 VNForNull()
3845 "$VN.ZeroMap", // 1 VNForZeroMap()
3846 "$VN.NotAField", // 2 VNForNotAField()
3847 "$VN.ReadOnlyHeap", // 3 VNForROH()
3848 "$VN.Void", // 4 VNForVoid()
3849 "$VN.EmptyExcSet" // 5 VNForEmptyExcSet()
3852 // Returns the string name of "vn" when it is a reserved value number, nullptr otherwise
3854 const char* ValueNumStore::reservedName(ValueNum vn)
3856 int val = vn - ValueNumStore::RecursiveVN; // Add two, making 'RecursiveVN' equal to zero
3857 int max = ValueNumStore::SRC_NumSpecialRefConsts - ValueNumStore::RecursiveVN;
3859 if ((val >= 0) && (val < max))
3861 return s_reservedNameArr[val];
3868 // Returns true if "vn" is a reserved value number
3871 bool ValueNumStore::isReservedVN(ValueNum vn)
3873 int val = vn - ValueNumStore::RecursiveVN; // Adding two, making 'RecursiveVN' equal to zero
3874 int max = ValueNumStore::SRC_NumSpecialRefConsts - ValueNumStore::RecursiveVN;
3876 if ((val >= 0) && (val < max))
3884 void ValueNumStore::RunTests(Compiler* comp)
3886 VNFunc VNF_Add = GenTreeOpToVNFunc(GT_ADD);
3888 ValueNumStore* vns = new (comp->getAllocatorDebugOnly()) ValueNumStore(comp, comp->getAllocatorDebugOnly());
3889 ValueNum vnNull = VNForNull();
3890 assert(vnNull == VNForNull());
3892 ValueNum vnFor1 = vns->VNForIntCon(1);
3893 assert(vnFor1 == vns->VNForIntCon(1));
3894 assert(vns->TypeOfVN(vnFor1) == TYP_INT);
3895 assert(vns->IsVNConstant(vnFor1));
3896 assert(vns->ConstantValue<int>(vnFor1) == 1);
3898 ValueNum vnFor100 = vns->VNForIntCon(100);
3899 assert(vnFor100 == vns->VNForIntCon(100));
3900 assert(vnFor100 != vnFor1);
3901 assert(vns->TypeOfVN(vnFor100) == TYP_INT);
3902 assert(vns->IsVNConstant(vnFor100));
3903 assert(vns->ConstantValue<int>(vnFor100) == 100);
3905 ValueNum vnFor1F = vns->VNForFloatCon(1.0f);
3906 assert(vnFor1F == vns->VNForFloatCon(1.0f));
3907 assert(vnFor1F != vnFor1 && vnFor1F != vnFor100);
3908 assert(vns->TypeOfVN(vnFor1F) == TYP_FLOAT);
3909 assert(vns->IsVNConstant(vnFor1F));
3910 assert(vns->ConstantValue<float>(vnFor1F) == 1.0f);
3912 ValueNum vnFor1D = vns->VNForDoubleCon(1.0);
3913 assert(vnFor1D == vns->VNForDoubleCon(1.0));
3914 assert(vnFor1D != vnFor1F && vnFor1D != vnFor1 && vnFor1D != vnFor100);
3915 assert(vns->TypeOfVN(vnFor1D) == TYP_DOUBLE);
3916 assert(vns->IsVNConstant(vnFor1D));
3917 assert(vns->ConstantValue<double>(vnFor1D) == 1.0);
3919 ValueNum vnRandom1 = vns->VNForExpr(nullptr, TYP_INT);
3920 ValueNum vnForFunc2a = vns->VNForFunc(TYP_INT, VNF_Add, vnFor1, vnRandom1);
3921 assert(vnForFunc2a == vns->VNForFunc(TYP_INT, VNF_Add, vnFor1, vnRandom1));
3922 assert(vnForFunc2a != vnFor1D && vnForFunc2a != vnFor1F && vnForFunc2a != vnFor1 && vnForFunc2a != vnRandom1);
3923 assert(vns->TypeOfVN(vnForFunc2a) == TYP_INT);
3924 assert(!vns->IsVNConstant(vnForFunc2a));
3925 assert(vns->IsVNFunc(vnForFunc2a));
3927 bool b = vns->GetVNFunc(vnForFunc2a, &fa2a);
3929 assert(fa2a.m_func == VNF_Add && fa2a.m_arity == 2 && fa2a.m_args[0] == vnFor1 && fa2a.m_args[1] == vnRandom1);
3931 ValueNum vnForFunc2b = vns->VNForFunc(TYP_INT, VNF_Add, vnFor1, vnFor100);
3932 assert(vnForFunc2b == vns->VNForFunc(TYP_INT, VNF_Add, vnFor1, vnFor100));
3933 assert(vnForFunc2b != vnFor1D && vnForFunc2b != vnFor1F && vnForFunc2b != vnFor1 && vnForFunc2b != vnFor100);
3934 assert(vns->TypeOfVN(vnForFunc2b) == TYP_INT);
3935 assert(vns->IsVNConstant(vnForFunc2b));
3936 assert(vns->ConstantValue<int>(vnForFunc2b) == 101);
3938 // printf("Did ValueNumStore::RunTests.\n");
3942 typedef ExpandArrayStack<BasicBlock*> BlockStack;
3944 // This represents the "to do" state of the value number computation.
3945 struct ValueNumberState
3947 // These two stacks collectively represent the set of blocks that are candidates for
3948 // processing, because at least one predecessor has been processed. Blocks on "m_toDoAllPredsDone"
3949 // have had *all* predecessors processed, and thus are candidates for some extra optimizations.
3950 // Blocks on "m_toDoNotAllPredsDone" have at least one predecessor that has not been processed.
3951 // Blocks are initially on "m_toDoNotAllPredsDone" may be moved to "m_toDoAllPredsDone" when their last
3952 // unprocessed predecessor is processed, thus maintaining the invariants.
3953 BlockStack m_toDoAllPredsDone;
3954 BlockStack m_toDoNotAllPredsDone;
3958 // TBD: This should really be a bitset...
3960 // first bit indicates completed,
3961 // second bit indicates that it's been pushed on all-done stack,
3962 // third bit indicates that it's been pushed on not-all-done stack.
3968 BVB_onAllDone = 0x2,
3969 BVB_onNotAllDone = 0x4,
3972 bool GetVisitBit(unsigned bbNum, BlockVisitBits bvb)
3974 return (m_visited[bbNum] & bvb) != 0;
3976 void SetVisitBit(unsigned bbNum, BlockVisitBits bvb)
3978 m_visited[bbNum] |= bvb;
3981 ValueNumberState(Compiler* comp)
3982 : m_toDoAllPredsDone(comp->getAllocator(), /*minSize*/ 4)
3983 , m_toDoNotAllPredsDone(comp->getAllocator(), /*minSize*/ 4)
3985 , m_visited(new (comp, CMK_ValueNumber) BYTE[comp->fgBBNumMax + 1]())
3989 BasicBlock* ChooseFromNotAllPredsDone()
3991 assert(m_toDoAllPredsDone.Size() == 0);
3992 // If we have no blocks with all preds done, then (ideally, if all cycles have been captured by loops)
3993 // we must have at least one block within a loop. We want to do the loops first. Doing a loop entry block
3994 // should break the cycle, making the rest of the body of the loop (unless there's a nested loop) doable by the
3995 // all-preds-done rule. If several loop entry blocks are available, at least one should have all non-loop preds
3996 // done -- we choose that.
3997 for (unsigned i = 0; i < m_toDoNotAllPredsDone.Size(); i++)
3999 BasicBlock* cand = m_toDoNotAllPredsDone.Get(i);
4001 // Skip any already-completed blocks (a block may have all its preds finished, get added to the
4002 // all-preds-done todo set, and get processed there). Do this by moving the last one down, to
4003 // keep the array compact.
4004 while (GetVisitBit(cand->bbNum, BVB_complete))
4006 if (i + 1 < m_toDoNotAllPredsDone.Size())
4008 cand = m_toDoNotAllPredsDone.Pop();
4009 m_toDoNotAllPredsDone.Set(i, cand);
4013 // "cand" is the last element; delete it.
4014 (void)m_toDoNotAllPredsDone.Pop();
4018 // We may have run out of non-complete candidates above. If so, we're done.
4019 if (i == m_toDoNotAllPredsDone.Size())
4024 // See if "cand" is a loop entry.
4026 if (m_comp->optBlockIsLoopEntry(cand, &lnum))
4028 // "lnum" is the innermost loop of which "cand" is the entry; find the outermost.
4029 unsigned lnumPar = m_comp->optLoopTable[lnum].lpParent;
4030 while (lnumPar != BasicBlock::NOT_IN_LOOP)
4032 if (m_comp->optLoopTable[lnumPar].lpEntry == cand)
4040 lnumPar = m_comp->optLoopTable[lnumPar].lpParent;
4043 bool allNonLoopPredsDone = true;
4044 for (flowList* pred = m_comp->BlockPredsWithEH(cand); pred != nullptr; pred = pred->flNext)
4046 BasicBlock* predBlock = pred->flBlock;
4047 if (!m_comp->optLoopTable[lnum].lpContains(predBlock))
4049 if (!GetVisitBit(predBlock->bbNum, BVB_complete))
4051 allNonLoopPredsDone = false;
4055 if (allNonLoopPredsDone)
4062 // If we didn't find a loop entry block with all non-loop preds done above, then return a random member (if
4064 if (m_toDoNotAllPredsDone.Size() == 0)
4070 return m_toDoNotAllPredsDone.Pop();
4074 // Debugging output that is too detailed for a normal JIT dump...
4075 #define DEBUG_VN_VISIT 0
4077 // Record that "blk" has been visited, and add any unvisited successors of "blk" to the appropriate todo set.
4078 void FinishVisit(BasicBlock* blk)
4080 #ifdef DEBUG_VN_VISIT
4081 JITDUMP("finish(BB%02u).\n", blk->bbNum);
4082 #endif // DEBUG_VN_VISIT
4084 SetVisitBit(blk->bbNum, BVB_complete);
4086 AllSuccessorIter succsEnd = blk->GetAllSuccs(m_comp).end();
4087 for (AllSuccessorIter succs = blk->GetAllSuccs(m_comp).begin(); succs != succsEnd; ++succs)
4089 BasicBlock* succ = (*succs);
4090 #ifdef DEBUG_VN_VISIT
4091 JITDUMP(" Succ(BB%02u).\n", succ->bbNum);
4092 #endif // DEBUG_VN_VISIT
4094 if (GetVisitBit(succ->bbNum, BVB_complete))
4098 #ifdef DEBUG_VN_VISIT
4099 JITDUMP(" Not yet completed.\n");
4100 #endif // DEBUG_VN_VISIT
4102 bool allPredsVisited = true;
4103 for (flowList* pred = m_comp->BlockPredsWithEH(succ); pred != nullptr; pred = pred->flNext)
4105 BasicBlock* predBlock = pred->flBlock;
4106 if (!GetVisitBit(predBlock->bbNum, BVB_complete))
4108 allPredsVisited = false;
4113 if (allPredsVisited)
4115 #ifdef DEBUG_VN_VISIT
4116 JITDUMP(" All preds complete, adding to allDone.\n");
4117 #endif // DEBUG_VN_VISIT
4119 assert(!GetVisitBit(succ->bbNum, BVB_onAllDone)); // Only last completion of last succ should add to
4121 m_toDoAllPredsDone.Push(succ);
4122 SetVisitBit(succ->bbNum, BVB_onAllDone);
4126 #ifdef DEBUG_VN_VISIT
4127 JITDUMP(" Not all preds complete Adding to notallDone, if necessary...\n");
4128 #endif // DEBUG_VN_VISIT
4130 if (!GetVisitBit(succ->bbNum, BVB_onNotAllDone))
4132 #ifdef DEBUG_VN_VISIT
4133 JITDUMP(" Was necessary.\n");
4134 #endif // DEBUG_VN_VISIT
4135 m_toDoNotAllPredsDone.Push(succ);
4136 SetVisitBit(succ->bbNum, BVB_onNotAllDone);
4144 return m_toDoAllPredsDone.Size() > 0 || m_toDoNotAllPredsDone.Size() > 0;
4148 void Compiler::fgValueNumber()
4151 // This could be a JITDUMP, but some people find it convenient to set a breakpoint on the printf.
4154 printf("\n*************** In fgValueNumber()\n");
4158 // If we skipped SSA, skip VN as well.
4159 if (fgSsaPassesCompleted == 0)
4164 // Allocate the value number store.
4165 assert(fgVNPassesCompleted > 0 || vnStore == nullptr);
4166 if (fgVNPassesCompleted == 0)
4168 CompAllocator* allocator = new (this, CMK_ValueNumber) CompAllocator(this, CMK_ValueNumber);
4169 vnStore = new (this, CMK_ValueNumber) ValueNumStore(this, allocator);
4174 // Make sure the heap SSA names have no value numbers.
4175 for (unsigned i = 0; i < lvHeapNumSsaNames; i++)
4177 lvHeapPerSsaData.GetRef(i).m_vnPair = noVnp;
4179 for (BasicBlock* blk = fgFirstBB; blk != nullptr; blk = blk->bbNext)
4181 // Now iterate over the block's statements, and their trees.
4182 for (GenTreePtr stmts = blk->FirstNonPhiDef(); stmts != nullptr; stmts = stmts->gtNext)
4184 assert(stmts->IsStatement());
4185 for (GenTreePtr tree = stmts->gtStmt.gtStmtList; tree; tree = tree->gtNext)
4187 tree->gtVNPair.SetBoth(ValueNumStore::NoVN);
4193 // Compute the side effects of loops.
4194 optComputeLoopSideEffects();
4196 // At the block level, we will use a modified worklist algorithm. We will have two
4197 // "todo" sets of unvisited blocks. Blocks (other than the entry block) are put in a
4198 // todo set only when some predecessor has been visited, so all blocks have at least one
4199 // predecessor visited. The distinction between the two sets is whether *all* predecessors have
4200 // already been visited. We visit such blocks preferentially if they exist, since phi definitions
4201 // in such blocks will have all arguments defined, enabling a simplification in the case that all
4202 // arguments to the phi have the same VN. If no such blocks exist, we pick a block with at least
4203 // one unvisited predecessor. In this case, we assign a new VN for phi definitions.
4205 // Start by giving incoming arguments value numbers.
4206 // Also give must-init vars a zero of their type.
4207 for (unsigned i = 0; i < lvaCount; i++)
4209 LclVarDsc* varDsc = &lvaTable[i];
4210 if (varDsc->lvIsParam)
4212 // We assume that code equivalent to this variable initialization loop
4213 // has been performed when doing SSA naming, so that all the variables we give
4214 // initial VNs to here have been given initial SSA definitions there.
4215 // SSA numbers always start from FIRST_SSA_NUM, and we give the value number to SSA name FIRST_SSA_NUM.
4216 // We use the VNF_InitVal(i) from here so we know that this value is loop-invariant
4218 ValueNum initVal = vnStore->VNForFunc(varDsc->TypeGet(), VNF_InitVal, vnStore->VNForIntCon(i));
4219 LclSsaVarDsc* ssaDef = varDsc->GetPerSsaData(SsaConfig::FIRST_SSA_NUM);
4220 ssaDef->m_vnPair.SetBoth(initVal);
4221 ssaDef->m_defLoc.m_blk = fgFirstBB;
4223 else if (info.compInitMem || varDsc->lvMustInit ||
4224 (varDsc->lvTracked && VarSetOps::IsMember(this, fgFirstBB->bbLiveIn, varDsc->lvVarIndex)))
4226 // The last clause covers the use-before-def variables (the ones that are live-in to the the first block),
4227 // these are variables that are read before being initialized (at least on some control flow paths)
4228 // if they are not must-init, then they get VNF_InitVal(i), as with the param case.)
4230 bool isZeroed = (info.compInitMem || varDsc->lvMustInit);
4231 ValueNum initVal = ValueNumStore::NoVN; // We must assign a new value to initVal
4232 var_types typ = varDsc->TypeGet();
4236 case TYP_LCLBLK: // The outgoing args area for arm and x64
4237 case TYP_BLK: // A blob of memory
4238 // TYP_BLK is used for the EHSlots LclVar on x86 (aka shadowSPslotsVar)
4239 // and for the lvaInlinedPInvokeFrameVar on x64, arm and x86
4240 // The stack associated with these LclVars are not zero initialized
4241 // thus we set 'initVN' to a new, unique VN.
4243 initVal = vnStore->VNForExpr(fgFirstBB);
4249 // LclVars of TYP_BYREF can be zero-inited.
4250 initVal = vnStore->VNForByrefCon(0);
4254 // Here we have uninitialized TYP_BYREF
4255 initVal = vnStore->VNForFunc(typ, VNF_InitVal, vnStore->VNForIntCon(i));
4262 // By default we will zero init these LclVars
4263 initVal = vnStore->VNZeroForType(typ);
4267 initVal = vnStore->VNForFunc(typ, VNF_InitVal, vnStore->VNForIntCon(i));
4272 bool isVarargParam = (i == lvaVarargsBaseOfStkArgs || i == lvaVarargsHandleArg);
4274 initVal = vnStore->VNForExpr(fgFirstBB); // a new, unique VN.
4276 assert(initVal != ValueNumStore::NoVN);
4278 LclSsaVarDsc* ssaDef = varDsc->GetPerSsaData(SsaConfig::FIRST_SSA_NUM);
4279 ssaDef->m_vnPair.SetBoth(initVal);
4280 ssaDef->m_defLoc.m_blk = fgFirstBB;
4283 // Give "Heap" an initial value number (about which we know nothing).
4284 ValueNum heapInitVal = vnStore->VNForFunc(TYP_REF, VNF_InitVal, vnStore->VNForIntCon(-1)); // Use -1 for the heap.
4285 GetHeapPerSsaData(SsaConfig::FIRST_SSA_NUM)->m_vnPair.SetBoth(heapInitVal);
4289 printf("Heap Initial Value in BB01 is: " STR_VN "%x\n", heapInitVal);
4293 ValueNumberState vs(this);
4295 // Push the first block. This has no preds.
4296 vs.m_toDoAllPredsDone.Push(fgFirstBB);
4298 while (vs.ToDoExists())
4300 while (vs.m_toDoAllPredsDone.Size() > 0)
4302 BasicBlock* toDo = vs.m_toDoAllPredsDone.Pop();
4303 fgValueNumberBlock(toDo, /*newVNsForPhis*/ false);
4304 // Record that we've visited "toDo", and add successors to the right sets.
4305 vs.FinishVisit(toDo);
4307 // OK, we've run out of blocks whose predecessors are done. Pick one whose predecessors are not all done,
4308 // process that. This may make more "all-done" blocks, so we'll go around the outer loop again --
4309 // note that this is an "if", not a "while" loop.
4310 if (vs.m_toDoNotAllPredsDone.Size() > 0)
4312 BasicBlock* toDo = vs.ChooseFromNotAllPredsDone();
4313 if (toDo == nullptr)
4315 continue; // We may have run out, because of completed blocks on the not-all-preds done list.
4318 fgValueNumberBlock(toDo, /*newVNsForPhis*/ true);
4319 // Record that we've visited "toDo", and add successors to the right sest.
4320 vs.FinishVisit(toDo);
4328 fgVNPassesCompleted++;
4331 void Compiler::fgValueNumberBlock(BasicBlock* blk, bool newVNsForPhis)
4336 compCurStmtNum = blk->bbStmtNum - 1; // Set compCurStmtNum
4339 unsigned outerLoopNum = BasicBlock::NOT_IN_LOOP;
4341 // First: visit phi's. If "newVNForPhis", give them new VN's. If not,
4342 // first check to see if all phi args have the same value.
4343 GenTreePtr firstNonPhi = blk->FirstNonPhiDef();
4344 for (GenTreePtr phiDefs = blk->bbTreeList; phiDefs != firstNonPhi; phiDefs = phiDefs->gtNext)
4346 // TODO-Cleanup: It has been proposed that we should have an IsPhiDef predicate. We would use it
4347 // in Block::FirstNonPhiDef as well.
4348 GenTreePtr phiDef = phiDefs->gtStmt.gtStmtExpr;
4349 assert(phiDef->OperGet() == GT_ASG);
4350 GenTreeLclVarCommon* newSsaVar = phiDef->gtOp.gtOp1->AsLclVarCommon();
4352 ValueNumPair phiAppVNP;
4353 ValueNumPair sameVNPair;
4355 GenTreePtr phiFunc = phiDef->gtOp.gtOp2;
4357 // At this point a GT_PHI node should never have a nullptr for gtOp1
4358 // and the gtOp1 should always be a GT_LIST node.
4359 GenTreePtr phiOp1 = phiFunc->gtOp.gtOp1;
4360 noway_assert(phiOp1 != nullptr);
4361 noway_assert(phiOp1->OperGet() == GT_LIST);
4363 GenTreeArgList* phiArgs = phiFunc->gtOp.gtOp1->AsArgList();
4365 // A GT_PHI node should have more than one argument.
4366 noway_assert(phiArgs->Rest() != nullptr);
4368 GenTreeLclVarCommon* phiArg = phiArgs->Current()->AsLclVarCommon();
4369 phiArgs = phiArgs->Rest();
4371 phiAppVNP.SetBoth(vnStore->VNForIntCon(phiArg->gtSsaNum));
4372 bool allSameLib = true;
4373 bool allSameCons = true;
4374 sameVNPair = lvaTable[phiArg->gtLclNum].GetPerSsaData(phiArg->gtSsaNum)->m_vnPair;
4375 if (!sameVNPair.BothDefined())
4378 allSameCons = false;
4380 while (phiArgs != nullptr)
4382 phiArg = phiArgs->Current()->AsLclVarCommon();
4383 // Set the VN of the phi arg.
4384 phiArg->gtVNPair = lvaTable[phiArg->gtLclNum].GetPerSsaData(phiArg->gtSsaNum)->m_vnPair;
4385 if (phiArg->gtVNPair.BothDefined())
4387 if (phiArg->gtVNPair.GetLiberal() != sameVNPair.GetLiberal())
4391 if (phiArg->gtVNPair.GetConservative() != sameVNPair.GetConservative())
4393 allSameCons = false;
4399 allSameCons = false;
4401 ValueNumPair phiArgSsaVNP;
4402 phiArgSsaVNP.SetBoth(vnStore->VNForIntCon(phiArg->gtSsaNum));
4403 phiAppVNP = vnStore->VNPairForFunc(newSsaVar->TypeGet(), VNF_Phi, phiArgSsaVNP, phiAppVNP);
4404 phiArgs = phiArgs->Rest();
4407 ValueNumPair newVNPair;
4410 newVNPair.SetLiberal(sameVNPair.GetLiberal());
4414 newVNPair.SetLiberal(phiAppVNP.GetLiberal());
4418 newVNPair.SetConservative(sameVNPair.GetConservative());
4422 newVNPair.SetConservative(phiAppVNP.GetConservative());
4425 LclSsaVarDsc* newSsaVarDsc = lvaTable[newSsaVar->gtLclNum].GetPerSsaData(newSsaVar->GetSsaNum());
4426 // If all the args of the phi had the same value(s, liberal and conservative), then there wasn't really
4427 // a reason to have the phi -- just pass on that value.
4428 if (allSameLib && allSameCons)
4430 newSsaVarDsc->m_vnPair = newVNPair;
4434 printf("In SSA definition, incoming phi args all same, set VN of local %d/%d to ",
4435 newSsaVar->GetLclNum(), newSsaVar->GetSsaNum());
4436 vnpPrint(newVNPair, 1);
4443 // They were not the same; we need to create a phi definition.
4444 ValueNumPair lclNumVNP;
4445 lclNumVNP.SetBoth(ValueNum(newSsaVar->GetLclNum()));
4446 ValueNumPair ssaNumVNP;
4447 ssaNumVNP.SetBoth(ValueNum(newSsaVar->GetSsaNum()));
4448 ValueNumPair vnPhiDef =
4449 vnStore->VNPairForFunc(newSsaVar->TypeGet(), VNF_PhiDef, lclNumVNP, ssaNumVNP, phiAppVNP);
4450 newSsaVarDsc->m_vnPair = vnPhiDef;
4454 printf("SSA definition: set VN of local %d/%d to ", newSsaVar->GetLclNum(), newSsaVar->GetSsaNum());
4455 vnpPrint(vnPhiDef, 1);
4462 // Now do the same for "Heap".
4463 // Is there a phi for this block?
4464 if (blk->bbHeapSsaPhiFunc == nullptr)
4466 fgCurHeapVN = GetHeapPerSsaData(blk->bbHeapSsaNumIn)->m_vnPair.GetLiberal();
4467 assert(fgCurHeapVN != ValueNumStore::NoVN);
4473 if (optBlockIsLoopEntry(blk, &loopNum))
4475 newHeapVN = fgHeapVNForLoopSideEffects(blk, loopNum);
4479 // Are all the VN's the same?
4480 BasicBlock::HeapPhiArg* phiArgs = blk->bbHeapSsaPhiFunc;
4481 assert(phiArgs != BasicBlock::EmptyHeapPhiDef);
4482 // There should be > 1 args to a phi.
4483 assert(phiArgs->m_nextArg != nullptr);
4484 ValueNum phiAppVN = vnStore->VNForIntCon(phiArgs->GetSsaNum());
4485 JITDUMP(" Building phi application: $%x = SSA# %d.\n", phiAppVN, phiArgs->GetSsaNum());
4486 bool allSame = true;
4487 ValueNum sameVN = GetHeapPerSsaData(phiArgs->GetSsaNum())->m_vnPair.GetLiberal();
4488 if (sameVN == ValueNumStore::NoVN)
4492 phiArgs = phiArgs->m_nextArg;
4493 while (phiArgs != nullptr)
4495 ValueNum phiArgVN = GetHeapPerSsaData(phiArgs->GetSsaNum())->m_vnPair.GetLiberal();
4496 if (phiArgVN == ValueNumStore::NoVN || phiArgVN != sameVN)
4501 ValueNum oldPhiAppVN = phiAppVN;
4503 unsigned phiArgSSANum = phiArgs->GetSsaNum();
4504 ValueNum phiArgSSANumVN = vnStore->VNForIntCon(phiArgSSANum);
4505 JITDUMP(" Building phi application: $%x = SSA# %d.\n", phiArgSSANumVN, phiArgSSANum);
4506 phiAppVN = vnStore->VNForFunc(TYP_REF, VNF_Phi, phiArgSSANumVN, phiAppVN);
4507 JITDUMP(" Building phi application: $%x = phi($%x, $%x).\n", phiAppVN, phiArgSSANumVN, oldPhiAppVN);
4508 phiArgs = phiArgs->m_nextArg;
4517 vnStore->VNForFunc(TYP_REF, VNF_PhiHeapDef, vnStore->VNForHandle(ssize_t(blk), 0), phiAppVN);
4520 GetHeapPerSsaData(blk->bbHeapSsaNumIn)->m_vnPair.SetLiberal(newHeapVN);
4521 fgCurHeapVN = newHeapVN;
4526 printf("The SSA definition for heap (#%d) at start of BB%02u is ", blk->bbHeapSsaNumIn, blk->bbNum);
4527 vnPrint(fgCurHeapVN, 1);
4532 // Now iterate over the remaining statements, and their trees.
4533 for (GenTreePtr stmt = firstNonPhi; stmt != nullptr; stmt = stmt->gtNext)
4535 assert(stmt->IsStatement());
4541 printf("\n***** BB%02u, stmt %d (before)\n", blk->bbNum, compCurStmtNum);
4542 gtDispTree(stmt->gtStmt.gtStmtExpr);
4547 for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
4549 fgValueNumberTree(tree);
4555 printf("\n***** BB%02u, stmt %d (after)\n", blk->bbNum, compCurStmtNum);
4556 gtDispTree(stmt->gtStmt.gtStmtExpr);
4560 printf("---------\n");
4566 if (blk->bbHeapSsaNumOut != blk->bbHeapSsaNumIn)
4568 GetHeapPerSsaData(blk->bbHeapSsaNumOut)->m_vnPair.SetLiberal(fgCurHeapVN);
4571 compCurBB = nullptr;
4574 ValueNum Compiler::fgHeapVNForLoopSideEffects(BasicBlock* entryBlock, unsigned innermostLoopNum)
4576 // "loopNum" is the innermost loop for which "blk" is the entry; find the outermost one.
4577 assert(innermostLoopNum != BasicBlock::NOT_IN_LOOP);
4578 unsigned loopsInNest = innermostLoopNum;
4579 unsigned loopNum = innermostLoopNum;
4580 while (loopsInNest != BasicBlock::NOT_IN_LOOP)
4582 if (optLoopTable[loopsInNest].lpEntry != entryBlock)
4586 loopNum = loopsInNest;
4587 loopsInNest = optLoopTable[loopsInNest].lpParent;
4593 printf("Computing heap state for block BB%02u, entry block for loops %d to %d:\n", entryBlock->bbNum,
4594 innermostLoopNum, loopNum);
4598 // If this loop has heap havoc effects, just use a new, unique VN.
4599 if (optLoopTable[loopNum].lpLoopHasHeapHavoc)
4601 ValueNum res = vnStore->VNForExpr(entryBlock, TYP_REF);
4605 printf(" Loop %d has heap havoc effect; heap state is new fresh $%x.\n", loopNum, res);
4611 // Otherwise, find the predecessors of the entry block that are not in the loop.
4612 // If there is only one such, use its heap value as the "base." If more than one,
4613 // use a new unique heap VN.
4614 BasicBlock* nonLoopPred = nullptr;
4615 bool multipleNonLoopPreds = false;
4616 for (flowList* pred = BlockPredsWithEH(entryBlock); pred != nullptr; pred = pred->flNext)
4618 BasicBlock* predBlock = pred->flBlock;
4619 if (!optLoopTable[loopNum].lpContains(predBlock))
4621 if (nonLoopPred == nullptr)
4623 nonLoopPred = predBlock;
4630 printf(" Entry block has >1 non-loop preds: (at least) BB%02u and BB%02u.\n", nonLoopPred->bbNum,
4634 multipleNonLoopPreds = true;
4639 if (multipleNonLoopPreds)
4641 ValueNum res = vnStore->VNForExpr(entryBlock, TYP_REF);
4645 printf(" Therefore, heap state is new, fresh $%x.\n", res);
4650 // Otherwise, there is a single non-loop pred.
4651 assert(nonLoopPred != nullptr);
4652 // What is it's heap post-state?
4653 ValueNum newHeapVN = GetHeapPerSsaData(nonLoopPred->bbHeapSsaNumOut)->m_vnPair.GetLiberal();
4655 ValueNumStore::NoVN); // We must have processed the single non-loop pred before reaching the loop entry.
4660 printf(" Init heap state is $%x, with new, fresh VN at:\n", newHeapVN);
4663 // Modify "base" by setting all the modified fields/field maps/array maps to unknown values.
4664 // First the fields/field maps.
4666 Compiler::LoopDsc::FieldHandleSet* fieldsMod = optLoopTable[loopNum].lpFieldsModified;
4667 if (fieldsMod != nullptr)
4669 for (Compiler::LoopDsc::FieldHandleSet::KeyIterator ki = fieldsMod->Begin(); !ki.Equal(fieldsMod->End()); ++ki)
4671 CORINFO_FIELD_HANDLE fldHnd = ki.Get();
4672 ValueNum fldHndVN = vnStore->VNForHandle(ssize_t(fldHnd), GTF_ICON_FIELD_HDL);
4677 const char* modName;
4678 const char* fldName = eeGetFieldName(fldHnd, &modName);
4679 printf(" VNForHandle(Fseq[%s]) is " STR_VN "%x\n", fldName, fldHndVN);
4681 printf(" fgCurHeapVN assigned:\n");
4685 newHeapVN = vnStore->VNForMapStore(TYP_REF, newHeapVN, fldHndVN, vnStore->VNForExpr(entryBlock, TYP_REF));
4688 // Now do the array maps.
4689 Compiler::LoopDsc::ClassHandleSet* elemTypesMod = optLoopTable[loopNum].lpArrayElemTypesModified;
4690 if (elemTypesMod != nullptr)
4692 for (Compiler::LoopDsc::ClassHandleSet::KeyIterator ki = elemTypesMod->Begin(); !ki.Equal(elemTypesMod->End());
4695 CORINFO_CLASS_HANDLE elemClsHnd = ki.Get();
4700 var_types elemTyp = DecodeElemType(elemClsHnd);
4701 if (varTypeIsStruct(elemTyp))
4703 printf(" Array map %s[]\n", eeGetClassName(elemClsHnd));
4707 printf(" Array map %s[]\n", varTypeName(elemTyp));
4709 printf(" fgCurHeapVN assigned:\n");
4713 ValueNum elemTypeVN = vnStore->VNForHandle(ssize_t(elemClsHnd), GTF_ICON_CLASS_HDL);
4714 ValueNum uniqueVN = vnStore->VNForExpr(entryBlock, TYP_REF);
4715 newHeapVN = vnStore->VNForMapStore(TYP_REF, newHeapVN, elemTypeVN, uniqueVN);
4722 printf(" Final heap state is $%x.\n", newHeapVN);
4728 void Compiler::fgMutateHeap(GenTreePtr tree DEBUGARG(const char* msg))
4730 // bbHeapDef must be set to true for any block that Mutates the global Heap
4731 assert(compCurBB->bbHeapDef);
4733 fgCurHeapVN = vnStore->VNForExpr(compCurBB, TYP_REF);
4735 // If we're tracking the heap SSA # caused by this node, record it.
4736 fgValueNumberRecordHeapSsa(tree);
4741 printf(" fgCurHeapVN assigned by %s at ", msg);
4742 Compiler::printTreeID(tree);
4743 printf(" to new unique VN: " STR_VN "%x.\n", fgCurHeapVN);
4748 void Compiler::fgValueNumberRecordHeapSsa(GenTreePtr tree)
4751 if (GetHeapSsaMap()->Lookup(tree, &ssaNum))
4753 GetHeapPerSsaData(ssaNum)->m_vnPair.SetLiberal(fgCurHeapVN);
4758 Compiler::printTreeID(tree);
4759 printf(" sets heap SSA # %d to VN $%x: ", ssaNum, fgCurHeapVN);
4760 vnStore->vnDump(this, fgCurHeapVN);
4767 // The input 'tree' is a leaf node that is a constant
4768 // Assign the proper value number to the tree
4769 void Compiler::fgValueNumberTreeConst(GenTreePtr tree)
4771 genTreeOps oper = tree->OperGet();
4772 var_types typ = tree->TypeGet();
4773 assert(GenTree::OperIsConst(oper));
4786 if (tree->IsCnsIntOrI() && tree->IsIconHandle())
4788 tree->gtVNPair.SetBoth(
4789 vnStore->VNForHandle(ssize_t(tree->gtIntConCommon.IconValue()), tree->GetIconHandleFlag()));
4791 else if ((typ == TYP_LONG) || (typ == TYP_ULONG))
4793 tree->gtVNPair.SetBoth(vnStore->VNForLongCon(INT64(tree->gtIntConCommon.LngValue())));
4797 tree->gtVNPair.SetBoth(vnStore->VNForIntCon(int(tree->gtIntConCommon.IconValue())));
4802 tree->gtVNPair.SetBoth(vnStore->VNForFloatCon((float)tree->gtDblCon.gtDconVal));
4805 tree->gtVNPair.SetBoth(vnStore->VNForDoubleCon(tree->gtDblCon.gtDconVal));
4808 // Null is the only constant. (Except maybe for String?)
4809 tree->gtVNPair.SetBoth(ValueNumStore::VNForNull());
4813 if (tree->gtIntConCommon.IconValue() == 0)
4815 tree->gtVNPair.SetBoth(ValueNumStore::VNForNull());
4819 assert(tree->IsCnsIntOrI());
4821 if (tree->IsIconHandle())
4823 tree->gtVNPair.SetBoth(
4824 vnStore->VNForHandle(ssize_t(tree->gtIntConCommon.IconValue()), tree->GetIconHandleFlag()));
4828 tree->gtVNPair.SetBoth(vnStore->VNForByrefCon(tree->gtIntConCommon.IconValue()));
4838 //------------------------------------------------------------------------
4839 // fgValueNumberBlockAssignment: Perform value numbering for block assignments.
4842 // tree - the block assignment to be value numbered.
4843 // evalAsgLhsInd - true iff we should value number the LHS of the assignment.
4849 // 'tree' must be a block assignment (GT_INITBLK, GT_COPYBLK, GT_COPYOBJ).
4851 void Compiler::fgValueNumberBlockAssignment(GenTreePtr tree, bool evalAsgLhsInd)
4853 GenTree* lhs = tree->gtGetOp1();
4854 GenTree* rhs = tree->gtGetOp2();
4856 // Sometimes we query the heap ssa map, and need a dummy location for the ignored result.
4857 unsigned heapSsaNum;
4860 if (tree->OperIsInitBlkOp())
4862 GenTreeLclVarCommon* lclVarTree;
4865 if (tree->DefinesLocal(this, &lclVarTree, &isEntire))
4867 assert(lclVarTree->gtFlags & GTF_VAR_DEF);
4868 // Should not have been recorded as updating the heap.
4869 assert(!GetHeapSsaMap()->Lookup(tree, &heapSsaNum));
4871 unsigned lclNum = lclVarTree->GetLclNum();
4873 // Ignore vars that we excluded from SSA (for example, because they're address-exposed). They don't have
4874 // SSA names in which to store VN's on defs. We'll yield unique VN's when we read from them.
4875 if (!fgExcludeFromSsa(lclNum))
4877 unsigned lclDefSsaNum = GetSsaNumForLocalVarDef(lclVarTree);
4879 ValueNum initBlkVN = ValueNumStore::NoVN;
4880 GenTreePtr initConst = rhs;
4881 if (isEntire && initConst->OperGet() == GT_CNS_INT)
4883 unsigned initVal = 0xFF & (unsigned)initConst->AsIntConCommon()->IconValue();
4886 initBlkVN = vnStore->VNZeroForType(lclVarTree->TypeGet());
4889 ValueNum lclVarVN = (initBlkVN != ValueNumStore::NoVN)
4891 : vnStore->VNForExpr(compCurBB, var_types(lvaTable[lclNum].lvType));
4893 lvaTable[lclNum].GetPerSsaData(lclDefSsaNum)->m_vnPair.SetBoth(lclVarVN);
4897 printf("N%03u ", tree->gtSeqNum);
4898 Compiler::printTreeID(tree);
4900 gtDispNodeName(tree);
4901 printf(" V%02u/%d => ", lclNum, lclDefSsaNum);
4902 vnPrint(lclVarVN, 1);
4907 // Initblock's are of type void. Give them the void "value" -- they may occur in argument lists, which we
4908 // want to be able to give VN's to.
4909 tree->gtVNPair.SetBoth(ValueNumStore::VNForVoid());
4913 // For now, arbitrary side effect on Heap.
4914 // TODO-CQ: Why not be complete, and get this case right?
4915 fgMutateHeap(tree DEBUGARG("INITBLK - non local"));
4920 assert(tree->OperIsCopyBlkOp());
4921 // TODO-Cleanup: We should factor things so that we uniformly rely on "PtrTo" VN's, and
4922 // the heap cases can be shared with assignments.
4923 GenTreeLclVarCommon* lclVarTree = nullptr;
4924 bool isEntire = false;
4925 // Note that we don't care about exceptions here, since we're only using the values
4926 // to perform an assignment (which happens after any exceptions are raised...)
4928 if (tree->DefinesLocal(this, &lclVarTree, &isEntire))
4930 // Should not have been recorded as updating the heap.
4931 assert(!GetHeapSsaMap()->Lookup(tree, &heapSsaNum));
4933 unsigned lhsLclNum = lclVarTree->GetLclNum();
4934 FieldSeqNode* lhsFldSeq = nullptr;
4935 // If it's excluded from SSA, don't need to do anything.
4936 if (!fgExcludeFromSsa(lhsLclNum))
4938 unsigned lclDefSsaNum = GetSsaNumForLocalVarDef(lclVarTree);
4940 if (lhs->IsLocalExpr(this, &lclVarTree, &lhsFldSeq) ||
4941 (lhs->OperIsBlk() && (lhs->AsBlk()->gtBlkSize == lvaLclSize(lhsLclNum))))
4943 noway_assert(lclVarTree->gtLclNum == lhsLclNum);
4948 if (lhs->OperIsBlk())
4950 lhsAddr = lhs->AsBlk()->Addr();
4954 assert(lhs->OperGet() == GT_IND);
4955 lhsAddr = lhs->gtOp.gtOp1;
4957 // For addr-of-local expressions, lib/cons shouldn't matter.
4958 assert(lhsAddr->gtVNPair.BothEqual());
4959 ValueNum lhsAddrVN = lhsAddr->GetVN(VNK_Liberal);
4961 // Unpack the PtrToLoc value number of the address.
4962 assert(vnStore->IsVNFunc(lhsAddrVN));
4963 VNFuncApp lhsAddrFuncApp;
4964 vnStore->GetVNFunc(lhsAddrVN, &lhsAddrFuncApp);
4965 assert(lhsAddrFuncApp.m_func == VNF_PtrToLoc);
4966 assert(vnStore->IsVNConstant(lhsAddrFuncApp.m_args[0]) &&
4967 vnStore->ConstantValue<unsigned>(lhsAddrFuncApp.m_args[0]) == lhsLclNum);
4968 lhsFldSeq = vnStore->FieldSeqVNToFieldSeq(lhsAddrFuncApp.m_args[1]);
4971 // Now we need to get the proper RHS.
4972 GenTreeLclVarCommon* rhsLclVarTree = nullptr;
4973 LclVarDsc* rhsVarDsc = nullptr;
4974 FieldSeqNode* rhsFldSeq = nullptr;
4975 ValueNumPair rhsVNPair;
4976 bool isNewUniq = false;
4977 if (!rhs->OperIsIndir())
4979 if (rhs->IsLocalExpr(this, &rhsLclVarTree, &rhsFldSeq))
4981 unsigned rhsLclNum = rhsLclVarTree->GetLclNum();
4982 rhsVarDsc = &lvaTable[rhsLclNum];
4983 if (fgExcludeFromSsa(rhsLclNum) || rhsFldSeq == FieldSeqStore::NotAField())
4985 rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, rhsLclVarTree->TypeGet()));
4990 rhsVNPair = lvaTable[rhsLclVarTree->GetLclNum()]
4991 .GetPerSsaData(rhsLclVarTree->GetSsaNum())
4993 var_types indType = rhsLclVarTree->TypeGet();
4995 rhsVNPair = vnStore->VNPairApplySelectors(rhsVNPair, rhsFldSeq, indType);
5000 rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, rhs->TypeGet()));
5006 GenTreePtr srcAddr = rhs->AsIndir()->Addr();
5007 VNFuncApp srcAddrFuncApp;
5008 if (srcAddr->IsLocalAddrExpr(this, &rhsLclVarTree, &rhsFldSeq))
5010 unsigned rhsLclNum = rhsLclVarTree->GetLclNum();
5011 rhsVarDsc = &lvaTable[rhsLclNum];
5012 if (fgExcludeFromSsa(rhsLclNum) || rhsFldSeq == FieldSeqStore::NotAField())
5018 rhsVNPair = lvaTable[rhsLclVarTree->GetLclNum()]
5019 .GetPerSsaData(rhsLclVarTree->GetSsaNum())
5021 var_types indType = rhsLclVarTree->TypeGet();
5023 rhsVNPair = vnStore->VNPairApplySelectors(rhsVNPair, rhsFldSeq, indType);
5026 else if (vnStore->GetVNFunc(vnStore->VNNormVal(srcAddr->gtVNPair.GetLiberal()), &srcAddrFuncApp))
5028 if (srcAddrFuncApp.m_func == VNF_PtrToStatic)
5030 var_types indType = lclVarTree->TypeGet();
5031 ValueNum fieldSeqVN = srcAddrFuncApp.m_args[0];
5033 FieldSeqNode* zeroOffsetFldSeq = nullptr;
5034 if (GetZeroOffsetFieldMap()->Lookup(srcAddr, &zeroOffsetFldSeq))
5037 vnStore->FieldSeqVNAppend(fieldSeqVN, vnStore->VNForFieldSeq(zeroOffsetFldSeq));
5040 FieldSeqNode* fldSeqForStaticVar = vnStore->FieldSeqVNToFieldSeq(fieldSeqVN);
5042 if (fldSeqForStaticVar != FieldSeqStore::NotAField())
5044 // We model statics as indices into the heap variable.
5045 ValueNum selectedStaticVar;
5046 size_t structSize = 0;
5047 selectedStaticVar = vnStore->VNApplySelectors(VNK_Liberal, fgCurHeapVN,
5048 fldSeqForStaticVar, &structSize);
5050 vnStore->VNApplySelectorsTypeCheck(selectedStaticVar, indType, structSize);
5052 rhsVNPair.SetLiberal(selectedStaticVar);
5053 rhsVNPair.SetConservative(vnStore->VNForExpr(compCurBB, indType));
5057 JITDUMP(" *** Missing field sequence info for Src/RHS of COPYBLK\n");
5058 rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, indType)); // a new unique value number
5061 else if (srcAddrFuncApp.m_func == VNF_PtrToArrElem)
5064 fgValueNumberArrIndexVal(nullptr, &srcAddrFuncApp, vnStore->VNForEmptyExcSet());
5065 rhsVNPair.SetLiberal(elemLib);
5066 rhsVNPair.SetConservative(vnStore->VNForExpr(compCurBB, lclVarTree->TypeGet()));
5079 if (lhsFldSeq == FieldSeqStore::NotAField())
5081 // We don't have proper field sequence information for the lhs
5083 JITDUMP(" *** Missing field sequence info for Dst/LHS of COPYBLK\n");
5086 else if (lhsFldSeq != nullptr && isEntire)
5088 // This can occur in for structs with one field, itself of a struct type.
5089 // We won't promote these.
5090 // TODO-Cleanup: decide what exactly to do about this.
5091 // Always treat them as maps, making them use/def, or reconstitute the
5095 else if (!isNewUniq)
5097 ValueNumPair oldLhsVNPair = lvaTable[lhsLclNum].GetPerSsaData(lclVarTree->GetSsaNum())->m_vnPair;
5098 rhsVNPair = vnStore->VNPairApplySelectorsAssign(oldLhsVNPair, lhsFldSeq, rhsVNPair,
5099 lclVarTree->TypeGet(), compCurBB);
5104 rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lclVarTree->TypeGet()));
5107 lvaTable[lhsLclNum].GetPerSsaData(lclDefSsaNum)->m_vnPair = vnStore->VNPNormVal(rhsVNPair);
5113 Compiler::printTreeID(tree);
5114 printf(" assigned VN to local var V%02u/%d: ", lhsLclNum, lclDefSsaNum);
5117 printf("new uniq ");
5119 vnpPrint(rhsVNPair, 1);
5127 // For now, arbitrary side effect on Heap.
5128 // TODO-CQ: Why not be complete, and get this case right?
5129 fgMutateHeap(tree DEBUGARG("COPYBLK - non local"));
5131 // Copyblock's are of type void. Give them the void "value" -- they may occur in argument lists, which we want
5132 // to be able to give VN's to.
5133 tree->gtVNPair.SetBoth(ValueNumStore::VNForVoid());
5137 void Compiler::fgValueNumberTree(GenTreePtr tree, bool evalAsgLhsInd)
5139 genTreeOps oper = tree->OperGet();
5142 // TODO-CQ: For now TYP_SIMD values are not handled by value numbering to be amenable for CSE'ing.
5143 if (oper == GT_SIMD)
5145 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, TYP_UNKNOWN));
5150 var_types typ = tree->TypeGet();
5151 if (GenTree::OperIsConst(oper))
5153 // If this is a struct assignment, with a constant rhs, it is an initBlk, and it is not
5154 // really useful to value number the constant.
5155 if (!varTypeIsStruct(tree))
5157 fgValueNumberTreeConst(tree);
5160 else if (GenTree::OperIsLeaf(oper))
5167 GenTreeLclVarCommon* lcl = tree->AsLclVarCommon();
5168 unsigned lclNum = lcl->gtLclNum;
5170 if ((lcl->gtFlags & GTF_VAR_DEF) == 0 ||
5171 (lcl->gtFlags & GTF_VAR_USEASG)) // If it is a "pure" def, will handled as part of the assignment.
5173 LclVarDsc* varDsc = &lvaTable[lcl->gtLclNum];
5174 if (varDsc->lvPromoted && varDsc->lvFieldCnt == 1)
5176 // If the promoted var has only one field var, treat like a use of the field var.
5177 lclNum = varDsc->lvFieldLclStart;
5180 // Initialize to the undefined value, so we know whether we hit any of the cases here.
5181 lcl->gtVNPair = ValueNumPair();
5183 if (lcl->gtSsaNum == SsaConfig::RESERVED_SSA_NUM)
5185 // Not an SSA variable. Assign each occurrence a new, unique, VN.
5186 lcl->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lcl->TypeGet()));
5190 var_types varType = varDsc->TypeGet();
5191 ValueNumPair wholeLclVarVNP = varDsc->GetPerSsaData(lcl->gtSsaNum)->m_vnPair;
5193 // Check for mismatched LclVar size
5195 unsigned typSize = genTypeSize(genActualType(typ));
5196 unsigned varSize = genTypeSize(genActualType(varType));
5198 if (typSize == varSize)
5200 lcl->gtVNPair = wholeLclVarVNP;
5202 else // mismatched LclVar definition and LclVar use size
5204 if (typSize < varSize)
5206 // the indirection is reading less that the whole LclVar
5207 // create a new VN that represent the partial value
5209 ValueNumPair partialLclVarVNP = vnStore->VNPairForCast(wholeLclVarVNP, typ, varType);
5210 lcl->gtVNPair = partialLclVarVNP;
5214 assert(typSize > varSize);
5215 // the indirection is reading beyond the end of the field
5217 lcl->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, typ)); // return a new unique value
5222 // Temporary, to make progress.
5223 // TODO-CQ: This should become an assert again...
5224 if (lcl->gtVNPair.GetLiberal() == ValueNumStore::NoVN)
5226 assert(lcl->gtVNPair.GetConservative() == ValueNumStore::NoVN);
5228 // We don't want to fabricate arbitrary value numbers to things we can't reason about.
5229 // So far, we know about two of these cases:
5230 // Case 1) We have a local var who has never been defined but it's seen as a use.
5231 // This is the case of storeIndir(addr(lclvar)) = expr. In this case since we only
5232 // take the address of the variable, this doesn't mean it's a use nor we have to
5233 // initialize it, so in this very rare case, we fabricate a value number.
5234 // Case 2) Local variables that represent structs which are assigned using CpBlk.
5235 GenTree* nextNode = lcl->gtNext;
5236 assert((nextNode->gtOper == GT_ADDR && nextNode->gtOp.gtOp1 == lcl) ||
5237 varTypeIsStruct(lcl->TypeGet()));
5238 lcl->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lcl->TypeGet()));
5240 assert(lcl->gtVNPair.BothDefined());
5243 // TODO-Review: For the short term, we have a workaround for copyblk/initblk. Those that use
5244 // addrSpillTemp will have a statement like "addrSpillTemp = addr(local)." If we previously decided
5245 // that this block operation defines the local, we will have labeled the "local" node as a DEF
5246 // (or USEDEF). This flag propogates to the "local" on the RHS. So we'll assume that this is correct,
5247 // and treat it as a def (to a new, unique VN).
5248 else if ((lcl->gtFlags & GTF_VAR_DEF) != 0)
5250 LclVarDsc* varDsc = &lvaTable[lcl->gtLclNum];
5251 if (lcl->gtSsaNum != SsaConfig::RESERVED_SSA_NUM)
5254 .GetPerSsaData(lcl->gtSsaNum)
5255 ->m_vnPair.SetBoth(vnStore->VNForExpr(compCurBB, lcl->TypeGet()));
5257 lcl->gtVNPair = ValueNumPair(); // Avoid confusion -- we don't set the VN of a lcl being defined.
5263 // Use the value of the function pointer (actually, a method handle.)
5264 tree->gtVNPair.SetBoth(
5265 vnStore->VNForHandle(ssize_t(tree->gtFptrVal.gtFptrMethod), GTF_ICON_METHOD_HDL));
5268 // This group passes through a value from a child node.
5270 tree->SetVNsFromNode(tree->gtRetExpr.gtInlineCandidate);
5275 GenTreeLclFld* lclFld = tree->AsLclFld();
5276 assert(fgExcludeFromSsa(lclFld->GetLclNum()) || lclFld->gtFieldSeq != nullptr);
5277 // If this is a (full) def, then the variable will be labeled with the new SSA number,
5278 // which will not have a value. We skip; it will be handled by one of the assignment-like
5279 // forms (assignment, or initBlk or copyBlk).
5280 if (((lclFld->gtFlags & GTF_VAR_DEF) == 0) || (lclFld->gtFlags & GTF_VAR_USEASG))
5282 unsigned lclNum = lclFld->GetLclNum();
5283 unsigned ssaNum = lclFld->GetSsaNum();
5284 LclVarDsc* varDsc = &lvaTable[lclNum];
5286 if (ssaNum == SsaConfig::UNINIT_SSA_NUM)
5288 if (varDsc->GetPerSsaData(ssaNum)->m_vnPair.GetLiberal() == ValueNumStore::NoVN)
5290 ValueNum vnForLcl = vnStore->VNForExpr(compCurBB, lclFld->TypeGet());
5291 varDsc->GetPerSsaData(ssaNum)->m_vnPair = ValueNumPair(vnForLcl, vnForLcl);
5295 var_types indType = tree->TypeGet();
5296 if (lclFld->gtFieldSeq == FieldSeqStore::NotAField() || fgExcludeFromSsa(lclFld->GetLclNum()))
5298 // This doesn't represent a proper field access or it's a struct
5299 // with overlapping fields that is hard to reason about; return a new unique VN.
5300 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, indType));
5304 ValueNumPair lclVNPair = varDsc->GetPerSsaData(ssaNum)->m_vnPair;
5305 tree->gtVNPair = vnStore->VNPairApplySelectors(lclVNPair, lclFld->gtFieldSeq, indType);
5311 // The ones below here all get a new unique VN -- but for various reasons, explained after each.
5313 // We know nothing about the value of a caught expression.
5314 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
5318 // Skip GT_CLS_VAR nodes that are the LHS of an assignment. (We labeled these earlier.)
5319 // We will "evaluate" this as part of the assignment. (Unless we're explicitly told by
5320 // the caller to evaluate anyway -- perhaps the assignment is an "op=" assignment.)
5322 if (((tree->gtFlags & GTF_CLS_VAR_ASG_LHS) == 0) || evalAsgLhsInd)
5324 bool isVolatile = (tree->gtFlags & GTF_FLD_VOLATILE) != 0;
5328 // For Volatile indirection, first mutate the global heap
5329 fgMutateHeap(tree DEBUGARG("GTF_FLD_VOLATILE - read"));
5332 // We just mutate the heap if isVolatile is true, and then do the read as normal.
5336 // 2: volatile read s;
5339 // We should never assume that the values read by 1 and 2 are the same (because the heap was mutated
5340 // in between them)... but we *should* be able to prove that the values read in 2 and 3 are the
5344 ValueNumPair clsVarVNPair;
5346 // If the static field handle is for a struct type field, then the value of the static
5347 // is a "ref" to the boxed struct -- treat it as the address of the static (we assume that a
5348 // first element offset will be added to get to the actual struct...)
5349 GenTreeClsVar* clsVar = tree->AsClsVar();
5350 FieldSeqNode* fldSeq = clsVar->gtFieldSeq;
5351 assert(fldSeq != nullptr); // We need to have one.
5352 ValueNum selectedStaticVar = ValueNumStore::NoVN;
5353 if (gtIsStaticFieldPtrToBoxedStruct(clsVar->TypeGet(), fldSeq->m_fieldHnd))
5355 clsVarVNPair.SetBoth(
5356 vnStore->VNForFunc(TYP_BYREF, VNF_PtrToStatic, vnStore->VNForFieldSeq(fldSeq)));
5360 // This is a reference to heap memory.
5361 // We model statics as indices into the heap variable.
5363 FieldSeqNode* fldSeqForStaticVar =
5364 GetFieldSeqStore()->CreateSingleton(tree->gtClsVar.gtClsVarHnd);
5365 size_t structSize = 0;
5367 vnStore->VNApplySelectors(VNK_Liberal, fgCurHeapVN, fldSeqForStaticVar, &structSize);
5369 vnStore->VNApplySelectorsTypeCheck(selectedStaticVar, tree->TypeGet(), structSize);
5371 clsVarVNPair.SetLiberal(selectedStaticVar);
5372 // The conservative interpretation always gets a new, unique VN.
5373 clsVarVNPair.SetConservative(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
5376 // The ValueNum returned must represent the full-sized IL-Stack value
5377 // If we need to widen this value then we need to introduce a VNF_Cast here to represent
5378 // the widened value. This is necessary since the CSE package can replace all occurances
5379 // of a given ValueNum with a LclVar that is a full-sized IL-Stack value
5381 if (varTypeIsSmall(tree->TypeGet()))
5383 var_types castToType = tree->TypeGet();
5384 clsVarVNPair = vnStore->VNPairForCast(clsVarVNPair, castToType, castToType);
5386 tree->gtVNPair = clsVarVNPair;
5390 case GT_MEMORYBARRIER: // Leaf
5391 // For MEMORYBARRIER add an arbitrary side effect on Heap.
5392 fgMutateHeap(tree DEBUGARG("MEMORYBARRIER"));
5395 // These do not represent values.
5397 case GT_JMP: // Control flow
5398 case GT_LABEL: // Control flow
5399 #if !FEATURE_EH_FUNCLETS
5400 case GT_END_LFIN: // Control flow
5403 // This node is a standin for an argument whose value will be computed later. (Perhaps it's
5404 // a register argument, and we don't want to preclude use of the register in arg evaluation yet.)
5405 // We give this a "fake" value number now; if the call in which it occurs cares about the
5406 // value (e.g., it's a helper call whose result is a function of argument values) we'll reset
5407 // this later, when the later args have been assigned VNs.
5408 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
5412 // This one is special because we should never process it in this method: it should
5413 // always be taken care of, when needed, during pre-processing of a blocks phi definitions.
5421 else if (GenTree::OperIsSimple(oper))
5424 // Sometimes we query the heap ssa map, and need a dummy location for the ignored result.
5425 unsigned heapSsaNum;
5428 if (GenTree::OperIsAssignment(oper) && !varTypeIsStruct(tree))
5431 GenTreePtr lhs = tree->gtOp.gtOp1;
5432 GenTreePtr rhs = tree->gtOp.gtOp2;
5434 ValueNumPair rhsVNPair;
5437 rhsVNPair = rhs->gtVNPair;
5439 else // Must be an "op="
5441 // If the LHS is an IND, we didn't evaluate it when we visited it previously.
5442 // But we didn't know that the parent was an op=. We do now, so go back and evaluate it.
5443 // (We actually check if the effective val is the IND. We will have evaluated any non-last
5444 // args of an LHS comma already -- including their heap effects.)
5445 GenTreePtr lhsVal = lhs->gtEffectiveVal(/*commaOnly*/ true);
5446 if (lhsVal->OperIsIndir() || (lhsVal->OperGet() == GT_CLS_VAR))
5448 fgValueNumberTree(lhsVal, /*evalAsgLhsInd*/ true);
5450 // Now we can make this assertion:
5451 assert(lhsVal->gtVNPair.BothDefined());
5452 genTreeOps op = GenTree::OpAsgToOper(oper);
5453 if (GenTree::OperIsBinary(op))
5455 ValueNumPair lhsNormVNP;
5456 ValueNumPair lhsExcVNP;
5457 lhsExcVNP.SetBoth(ValueNumStore::VNForEmptyExcSet());
5458 vnStore->VNPUnpackExc(lhsVal->gtVNPair, &lhsNormVNP, &lhsExcVNP);
5459 assert(rhs->gtVNPair.BothDefined());
5460 ValueNumPair rhsNormVNP;
5461 ValueNumPair rhsExcVNP;
5462 rhsExcVNP.SetBoth(ValueNumStore::VNForEmptyExcSet());
5463 vnStore->VNPUnpackExc(rhs->gtVNPair, &rhsNormVNP, &rhsExcVNP);
5464 rhsVNPair = vnStore->VNPWithExc(vnStore->VNPairForFunc(tree->TypeGet(),
5465 GetVNFuncForOper(op, (tree->gtFlags &
5466 GTF_UNSIGNED) != 0),
5467 lhsNormVNP, rhsNormVNP),
5468 vnStore->VNPExcSetUnion(lhsExcVNP, rhsExcVNP));
5472 // As of now, GT_CHS ==> GT_NEG is the only pattern fitting this.
5473 assert(GenTree::OperIsUnary(op));
5474 ValueNumPair lhsNormVNP;
5475 ValueNumPair lhsExcVNP;
5476 lhsExcVNP.SetBoth(ValueNumStore::VNForEmptyExcSet());
5477 vnStore->VNPUnpackExc(lhsVal->gtVNPair, &lhsNormVNP, &lhsExcVNP);
5478 rhsVNPair = vnStore->VNPWithExc(vnStore->VNPairForFunc(tree->TypeGet(),
5479 GetVNFuncForOper(op, (tree->gtFlags &
5480 GTF_UNSIGNED) != 0),
5485 if (tree->TypeGet() != TYP_VOID)
5487 // Assignment operators, as expressions, return the value of the RHS.
5488 tree->gtVNPair = rhsVNPair;
5491 // Now that we've labeled the assignment as a whole, we don't care about exceptions.
5492 rhsVNPair = vnStore->VNPNormVal(rhsVNPair);
5494 // If the types of the rhs and lhs are different then we
5495 // may want to change the ValueNumber assigned to the lhs.
5497 if (rhs->TypeGet() != lhs->TypeGet())
5499 if (rhs->TypeGet() == TYP_REF)
5501 // If we have an unsafe IL assignment of a TYP_REF to a non-ref (typically a TYP_BYREF)
5502 // then don't propagate this ValueNumber to the lhs, instead create a new unique VN
5504 rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lhs->TypeGet()));
5508 // We have to handle the case where the LHS is a comma. In that case, we don't evaluate the comma,
5509 // so we give it VNForVoid, and we're really interested in the effective value.
5510 GenTreePtr lhsCommaIter = lhs;
5511 while (lhsCommaIter->OperGet() == GT_COMMA)
5513 lhsCommaIter->gtVNPair.SetBoth(vnStore->VNForVoid());
5514 lhsCommaIter = lhsCommaIter->gtOp.gtOp2;
5516 lhs = lhs->gtEffectiveVal();
5518 // Now, record the new VN for an assignment (performing the indicated "state update").
5519 // It's safe to use gtEffectiveVal here, because the non-last elements of a comma list on the
5520 // LHS will come before the assignment in evaluation order.
5521 switch (lhs->OperGet())
5526 GenTreeLclVarCommon* lcl = lhs->AsLclVarCommon();
5527 unsigned lclDefSsaNum = GetSsaNumForLocalVarDef(lcl);
5529 // Should not have been recorded as updating the heap.
5530 assert(!GetHeapSsaMap()->Lookup(tree, &heapSsaNum));
5532 if (lclDefSsaNum != SsaConfig::RESERVED_SSA_NUM)
5534 assert(rhsVNPair.GetLiberal() != ValueNumStore::NoVN);
5536 lhs->gtVNPair = rhsVNPair;
5537 lvaTable[lcl->gtLclNum].GetPerSsaData(lclDefSsaNum)->m_vnPair = rhsVNPair;
5542 printf("N%03u ", lhs->gtSeqNum);
5543 Compiler::printTreeID(lhs);
5545 gtDispNodeName(lhs);
5546 gtDispLeaf(lhs, nullptr);
5548 vnpPrint(lhs->gtVNPair, 1);
5559 Compiler::printTreeID(tree);
5560 printf(" assigns to local var V%02u; excluded from SSA, so value not tracked.\n",
5569 GenTreeLclFld* lclFld = lhs->AsLclFld();
5570 unsigned lclDefSsaNum = GetSsaNumForLocalVarDef(lclFld);
5572 // Should not have been recorded as updating the heap.
5573 assert(!GetHeapSsaMap()->Lookup(tree, &heapSsaNum));
5575 if (lclDefSsaNum != SsaConfig::RESERVED_SSA_NUM)
5577 ValueNumPair newLhsVNPair;
5578 // Is this a full definition?
5579 if ((lclFld->gtFlags & GTF_VAR_USEASG) == 0)
5581 assert(!lclFld->IsPartialLclFld(this));
5582 assert(rhsVNPair.GetLiberal() != ValueNumStore::NoVN);
5583 newLhsVNPair = rhsVNPair;
5587 // We should never have a null field sequence here.
5588 assert(lclFld->gtFieldSeq != nullptr);
5589 if (lclFld->gtFieldSeq == FieldSeqStore::NotAField())
5591 // We don't know what field this represents. Assign a new VN to the whole variable
5592 // (since we may be writing to an unknown portion of it.)
5593 newLhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lvaGetActualType(lclFld->gtLclNum)));
5597 // We do know the field sequence.
5598 // The "lclFld" node will be labeled with the SSA number of its "use" identity
5599 // (we looked in a side table above for its "def" identity). Look up that value.
5600 ValueNumPair oldLhsVNPair =
5601 lvaTable[lclFld->GetLclNum()].GetPerSsaData(lclFld->GetSsaNum())->m_vnPair;
5603 vnStore->VNPairApplySelectorsAssign(oldLhsVNPair, lclFld->gtFieldSeq,
5604 rhsVNPair, // Pre-value.
5605 lvaGetActualType(lclFld->gtLclNum), compCurBB);
5608 lvaTable[lclFld->GetLclNum()].GetPerSsaData(lclDefSsaNum)->m_vnPair = newLhsVNPair;
5609 lhs->gtVNPair = newLhsVNPair;
5613 if (lhs->gtVNPair.GetLiberal() != ValueNumStore::NoVN)
5615 printf("N%03u ", lhs->gtSeqNum);
5616 Compiler::printTreeID(lhs);
5618 gtDispNodeName(lhs);
5619 gtDispLeaf(lhs, nullptr);
5621 vnpPrint(lhs->gtVNPair, 1);
5631 assert(false); // Phi arg cannot be LHS.
5637 bool isVolatile = (lhs->gtFlags & GTF_IND_VOLATILE) != 0;
5641 // For Volatile store indirection, first mutate the global heap
5642 fgMutateHeap(lhs DEBUGARG("GTF_IND_VOLATILE - store"));
5643 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lhs->TypeGet()));
5646 GenTreePtr arg = lhs->gtOp.gtOp1;
5648 // Indicates whether the argument of the IND is the address of a local.
5649 bool wasLocal = false;
5651 lhs->gtVNPair = rhsVNPair;
5654 ValueNum argVN = arg->gtVNPair.GetLiberal();
5656 bool argIsVNFunc = vnStore->GetVNFunc(vnStore->VNNormVal(argVN), &funcApp);
5658 // Is this an assignment to a (field of, perhaps) a local?
5659 // If it is a PtrToLoc, lib and cons VNs will be the same.
5662 IndirectAssignmentAnnotation* pIndirAnnot =
5663 nullptr; // This will be used if "tree" is an "indirect assignment",
5665 if (funcApp.m_func == VNF_PtrToLoc)
5667 assert(arg->gtVNPair.BothEqual()); // If it's a PtrToLoc, lib/cons shouldn't differ.
5668 assert(vnStore->IsVNConstant(funcApp.m_args[0]));
5669 unsigned lclNum = vnStore->ConstantValue<unsigned>(funcApp.m_args[0]);
5673 if (!fgExcludeFromSsa(lclNum))
5675 FieldSeqNode* fieldSeq = vnStore->FieldSeqVNToFieldSeq(funcApp.m_args[1]);
5677 // Either "arg" is the address of (part of) a local itself, or the assignment is an
5678 // "indirect assignment", where an outer comma expression assigned the address of a
5679 // local to a temp, and that temp is our lhs, and we recorded this in a table when we
5680 // made the indirect assignment...or else we have a "rogue" PtrToLoc, one that should
5681 // have made the local in question address-exposed. Assert on that.
5682 GenTreeLclVarCommon* lclVarTree = nullptr;
5683 bool isEntire = false;
5684 unsigned lclDefSsaNum = SsaConfig::RESERVED_SSA_NUM;
5685 ValueNumPair newLhsVNPair;
5687 if (arg->DefinesLocalAddr(this, genTypeSize(lhs->TypeGet()), &lclVarTree, &isEntire))
5689 // The local #'s should agree.
5690 assert(lclNum == lclVarTree->GetLclNum());
5692 if (fieldSeq == FieldSeqStore::NotAField())
5694 // We don't know where we're storing, so give the local a new, unique VN.
5695 // Do this by considering it an "entire" assignment, with an unknown RHS.
5697 rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lclVarTree->TypeGet()));
5702 newLhsVNPair = rhsVNPair;
5703 lclDefSsaNum = lclVarTree->GetSsaNum();
5707 // Don't use the lclVarTree's VN: if it's a local field, it will
5708 // already be dereferenced by it's field sequence.
5709 ValueNumPair oldLhsVNPair = lvaTable[lclVarTree->GetLclNum()]
5710 .GetPerSsaData(lclVarTree->GetSsaNum())
5712 lclDefSsaNum = GetSsaNumForLocalVarDef(lclVarTree);
5714 vnStore->VNPairApplySelectorsAssign(oldLhsVNPair, fieldSeq, rhsVNPair,
5715 lhs->TypeGet(), compCurBB);
5717 lvaTable[lclNum].GetPerSsaData(lclDefSsaNum)->m_vnPair = newLhsVNPair;
5719 else if (m_indirAssignMap != nullptr && GetIndirAssignMap()->Lookup(tree, &pIndirAnnot))
5721 // The local #'s should agree.
5722 assert(lclNum == pIndirAnnot->m_lclNum);
5723 assert(pIndirAnnot->m_defSsaNum != SsaConfig::RESERVED_SSA_NUM);
5724 lclDefSsaNum = pIndirAnnot->m_defSsaNum;
5725 // Does this assignment write the entire width of the local?
5726 if (genTypeSize(lhs->TypeGet()) == genTypeSize(var_types(lvaTable[lclNum].lvType)))
5728 assert(pIndirAnnot->m_useSsaNum == SsaConfig::RESERVED_SSA_NUM);
5729 assert(pIndirAnnot->m_isEntire);
5730 newLhsVNPair = rhsVNPair;
5734 assert(pIndirAnnot->m_useSsaNum != SsaConfig::RESERVED_SSA_NUM);
5735 assert(!pIndirAnnot->m_isEntire);
5736 assert(pIndirAnnot->m_fieldSeq == fieldSeq);
5737 ValueNumPair oldLhsVNPair =
5738 lvaTable[lclNum].GetPerSsaData(pIndirAnnot->m_useSsaNum)->m_vnPair;
5740 vnStore->VNPairApplySelectorsAssign(oldLhsVNPair, fieldSeq, rhsVNPair,
5741 lhs->TypeGet(), compCurBB);
5743 lvaTable[lclNum].GetPerSsaData(lclDefSsaNum)->m_vnPair = newLhsVNPair;
5747 unreached(); // "Rogue" PtrToLoc, as discussed above.
5753 Compiler::printTreeID(tree);
5754 printf(" assigned VN to local var V%02u/%d: VN ", lclNum, lclDefSsaNum);
5755 vnpPrint(newLhsVNPair, 1);
5763 // Was the argument of the GT_IND the address of a local, handled above?
5766 GenTreePtr obj = nullptr;
5767 GenTreePtr staticOffset = nullptr;
5768 FieldSeqNode* fldSeq = nullptr;
5770 // Is the LHS an array index expression?
5771 if (argIsVNFunc && funcApp.m_func == VNF_PtrToArrElem)
5773 CORINFO_CLASS_HANDLE elemTypeEq =
5774 CORINFO_CLASS_HANDLE(vnStore->ConstantValue<ssize_t>(funcApp.m_args[0]));
5775 ValueNum arrVN = funcApp.m_args[1];
5776 ValueNum inxVN = funcApp.m_args[2];
5777 FieldSeqNode* fldSeq = vnStore->FieldSeqVNToFieldSeq(funcApp.m_args[3]);
5779 // Does the child of the GT_IND 'arg' have an associated zero-offset field sequence?
5780 FieldSeqNode* addrFieldSeq = nullptr;
5781 if (GetZeroOffsetFieldMap()->Lookup(arg, &addrFieldSeq))
5783 fldSeq = GetFieldSeqStore()->Append(addrFieldSeq, fldSeq);
5790 Compiler::printTreeID(tree);
5791 printf(" assigns to an array element:\n");
5795 fgValueNumberArrIndexAssign(elemTypeEq, arrVN, inxVN, fldSeq, rhsVNPair.GetLiberal(),
5797 fgValueNumberRecordHeapSsa(tree);
5799 // It may be that we haven't parsed it yet. Try.
5800 else if (lhs->gtFlags & GTF_IND_ARR_INDEX)
5803 bool b = GetArrayInfoMap()->Lookup(lhs, &arrInfo);
5805 ValueNum arrVN = ValueNumStore::NoVN;
5806 ValueNum inxVN = ValueNumStore::NoVN;
5807 FieldSeqNode* fldSeq = nullptr;
5810 GenTreePtr arr = nullptr;
5811 arg->ParseArrayAddress(this, &arrInfo, &arr, &inxVN, &fldSeq);
5814 fgMutateHeap(tree DEBUGARG("assignment to unparseable array expression"));
5817 // Otherwise, parsing succeeded.
5819 // Need to form H[arrType][arr][ind][fldSeq] = rhsVNPair.GetLiberal()
5821 // Get the element type equivalence class representative.
5822 CORINFO_CLASS_HANDLE elemTypeEq =
5823 EncodeElemType(arrInfo.m_elemType, arrInfo.m_elemStructType);
5824 arrVN = arr->gtVNPair.GetLiberal();
5826 FieldSeqNode* zeroOffsetFldSeq = nullptr;
5827 if (GetZeroOffsetFieldMap()->Lookup(arg, &zeroOffsetFldSeq))
5829 fldSeq = GetFieldSeqStore()->Append(fldSeq, zeroOffsetFldSeq);
5832 fgValueNumberArrIndexAssign(elemTypeEq, arrVN, inxVN, fldSeq, rhsVNPair.GetLiberal(),
5834 fgValueNumberRecordHeapSsa(tree);
5836 else if (arg->IsFieldAddr(this, &obj, &staticOffset, &fldSeq))
5838 if (fldSeq == FieldSeqStore::NotAField())
5840 fgMutateHeap(tree DEBUGARG("NotAField"));
5844 assert(fldSeq != nullptr);
5846 CORINFO_CLASS_HANDLE fldCls = info.compCompHnd->getFieldClass(fldSeq->m_fieldHnd);
5849 // Make sure that the class containing it is not a value class (as we are expecting
5850 // an instance field)
5851 assert((info.compCompHnd->getClassAttribs(fldCls) & CORINFO_FLG_VALUECLASS) == 0);
5852 assert(staticOffset == nullptr);
5855 // Get the first (instance or static) field from field seq. Heap[field] will yield the
5857 if (fldSeq->IsFirstElemFieldSeq())
5859 fldSeq = fldSeq->m_next;
5860 assert(fldSeq != nullptr);
5863 // Get a field sequence for just the first field in the sequence
5865 FieldSeqNode* firstFieldOnly = GetFieldSeqStore()->CreateSingleton(fldSeq->m_fieldHnd);
5867 // The final field in the sequence will need to match the 'indType'
5868 var_types indType = lhs->TypeGet();
5869 ValueNum fldMapVN = vnStore->VNApplySelectors(VNK_Liberal, fgCurHeapVN, firstFieldOnly);
5871 // The type of the field is "struct" if there are more fields in the sequence,
5872 // otherwise it is the type returned from VNApplySelectors above.
5873 var_types firstFieldType = vnStore->TypeOfVN(fldMapVN);
5876 rhsVNPair.GetLiberal(); // The value number from the rhs of the assignment
5877 ValueNum newFldMapVN = ValueNumStore::NoVN;
5879 // when (obj != nullptr) we have an instance field, otherwise a static field
5880 // when (staticOffset != nullptr) it represents a offset into a static or the call to
5881 // Shared Static Base
5882 if ((obj != nullptr) || (staticOffset != nullptr))
5884 ValueNum valAtAddr = fldMapVN;
5885 ValueNum normVal = ValueNumStore::NoVN;
5889 // construct the ValueNumber for 'fldMap at obj'
5890 normVal = vnStore->VNNormVal(obj->GetVN(VNK_Liberal));
5892 vnStore->VNForMapSelect(VNK_Liberal, firstFieldType, fldMapVN, normVal);
5894 else // (staticOffset != nullptr)
5896 // construct the ValueNumber for 'fldMap at staticOffset'
5897 normVal = vnStore->VNNormVal(staticOffset->GetVN(VNK_Liberal));
5899 vnStore->VNForMapSelect(VNK_Liberal, firstFieldType, fldMapVN, normVal);
5901 // Now get rid of any remaining struct field dereferences. (if they exist)
5905 vnStore->VNApplySelectorsAssign(VNK_Liberal, valAtAddr, fldSeq->m_next,
5906 storeVal, indType, compCurBB);
5909 // From which we can construct the new ValueNumber for 'fldMap at normVal'
5910 newFldMapVN = vnStore->VNForMapStore(vnStore->TypeOfVN(fldMapVN), fldMapVN, normVal,
5915 // plain static field
5917 // Now get rid of any remaining struct field dereferences. (if they exist)
5921 vnStore->VNApplySelectorsAssign(VNK_Liberal, fldMapVN, fldSeq->m_next,
5922 storeVal, indType, compCurBB);
5925 newFldMapVN = vnStore->VNApplySelectorsAssign(VNK_Liberal, fgCurHeapVN, fldSeq,
5926 storeVal, indType, compCurBB);
5929 // It is not strictly necessary to set the lhs value number,
5930 // but the dumps read better with it set to the 'storeVal' that we just computed
5931 lhs->gtVNPair.SetBoth(storeVal);
5936 printf(" fgCurHeapVN assigned:\n");
5939 // bbHeapDef must be set to true for any block that Mutates the global Heap
5940 assert(compCurBB->bbHeapDef);
5942 // Update the field map for firstField in Heap to this new value.
5943 fgCurHeapVN = vnStore->VNApplySelectorsAssign(VNK_Liberal, fgCurHeapVN, firstFieldOnly,
5944 newFldMapVN, indType, compCurBB);
5946 fgValueNumberRecordHeapSsa(tree);
5951 GenTreeLclVarCommon* dummyLclVarTree = nullptr;
5952 if (!tree->DefinesLocal(this, &dummyLclVarTree))
5954 // If it doesn't define a local, then it might update the heap.
5955 fgMutateHeap(tree DEBUGARG("assign-of-IND"));
5960 // We don't actually evaluate an IND on the LHS, so give it the Void value.
5961 tree->gtVNPair.SetBoth(vnStore->VNForVoid());
5967 bool isVolatile = (lhs->gtFlags & GTF_FLD_VOLATILE) != 0;
5971 // For Volatile store indirection, first mutate the global heap
5972 fgMutateHeap(lhs DEBUGARG("GTF_CLS_VAR - store")); // always change fgCurHeapVN
5975 // We model statics as indices into the heap variable.
5976 FieldSeqNode* fldSeqForStaticVar = GetFieldSeqStore()->CreateSingleton(lhs->gtClsVar.gtClsVarHnd);
5977 assert(fldSeqForStaticVar != FieldSeqStore::NotAField());
5979 ValueNum storeVal = rhsVNPair.GetLiberal(); // The value number from the rhs of the assignment
5980 storeVal = vnStore->VNApplySelectorsAssign(VNK_Liberal, fgCurHeapVN, fldSeqForStaticVar, storeVal,
5981 lhs->TypeGet(), compCurBB);
5983 // It is not strictly necessary to set the lhs value number,
5984 // but the dumps read better with it set to the 'storeVal' that we just computed
5985 lhs->gtVNPair.SetBoth(storeVal);
5989 printf(" fgCurHeapVN assigned:\n");
5992 // bbHeapDef must be set to true for any block that Mutates the global Heap
5993 assert(compCurBB->bbHeapDef);
5995 // Update the field map for the fgCurHeapVN
5996 fgCurHeapVN = storeVal;
5997 fgValueNumberRecordHeapSsa(tree);
6002 assert(!"Unknown node for lhs of assignment!");
6004 // For Unknown stores, mutate the global heap
6005 fgMutateHeap(lhs DEBUGARG("Unkwown Assignment - store")); // always change fgCurHeapVN
6009 // Other kinds of assignment: initblk and copyblk.
6010 else if (oper == GT_ASG && varTypeIsStruct(tree))
6012 fgValueNumberBlockAssignment(tree, evalAsgLhsInd);
6014 else if (oper == GT_ADDR)
6016 // We have special representations for byrefs to lvalues.
6017 GenTreePtr arg = tree->gtOp.gtOp1;
6018 if (arg->OperIsLocal())
6020 FieldSeqNode* fieldSeq = nullptr;
6021 ValueNum newVN = ValueNumStore::NoVN;
6022 if (fgExcludeFromSsa(arg->gtLclVarCommon.GetLclNum()))
6024 newVN = vnStore->VNForExpr(compCurBB, TYP_BYREF);
6026 else if (arg->OperGet() == GT_LCL_FLD)
6028 fieldSeq = arg->AsLclFld()->gtFieldSeq;
6029 if (fieldSeq == nullptr)
6031 // Local field with unknown field seq -- not a precise pointer.
6032 newVN = vnStore->VNForExpr(compCurBB, TYP_BYREF);
6035 if (newVN == ValueNumStore::NoVN)
6037 assert(arg->gtLclVarCommon.GetSsaNum() != ValueNumStore::NoVN);
6038 newVN = vnStore->VNForPtrToLoc(TYP_BYREF, vnStore->VNForIntCon(arg->gtLclVarCommon.GetLclNum()),
6039 vnStore->VNForFieldSeq(fieldSeq));
6041 tree->gtVNPair.SetBoth(newVN);
6043 else if ((arg->gtOper == GT_IND) || arg->OperIsBlk())
6045 // Usually the ADDR and IND just cancel out...
6046 // except when this GT_ADDR has a valid zero-offset field sequence
6048 FieldSeqNode* zeroOffsetFieldSeq = nullptr;
6049 if (GetZeroOffsetFieldMap()->Lookup(tree, &zeroOffsetFieldSeq) &&
6050 (zeroOffsetFieldSeq != FieldSeqStore::NotAField()))
6052 ValueNum addrExtended = vnStore->ExtendPtrVN(arg->gtOp.gtOp1, zeroOffsetFieldSeq);
6053 if (addrExtended != ValueNumStore::NoVN)
6055 tree->gtVNPair.SetBoth(addrExtended); // We don't care about lib/cons differences for addresses.
6059 // ExtendPtrVN returned a failure result
6060 // So give this address a new unique value
6061 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, TYP_BYREF));
6066 // They just cancel, so fetch the ValueNumber from the op1 of the GT_IND node.
6068 GenTree* addr = arg->AsIndir()->Addr();
6069 tree->gtVNPair = addr->gtVNPair;
6071 // For the CSE phase mark the address as GTF_DONT_CSE
6072 // because it will end up with the same value number as tree (the GT_ADDR).
6073 addr->gtFlags |= GTF_DONT_CSE;
6078 // May be more cases to do here! But we'll punt for now.
6079 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, TYP_BYREF));
6082 else if ((oper == GT_IND) || GenTree::OperIsBlk(oper))
6084 // So far, we handle cases in which the address is a ptr-to-local, or if it's
6085 // a pointer to an object field.
6086 GenTreePtr addr = tree->AsIndir()->Addr();
6087 GenTreeLclVarCommon* lclVarTree = nullptr;
6088 FieldSeqNode* fldSeq1 = nullptr;
6089 FieldSeqNode* fldSeq2 = nullptr;
6090 GenTreePtr obj = nullptr;
6091 GenTreePtr staticOffset = nullptr;
6092 bool isVolatile = (tree->gtFlags & GTF_IND_VOLATILE) != 0;
6094 // See if the addr has any exceptional part.
6095 ValueNumPair addrNvnp;
6096 ValueNumPair addrXvnp = ValueNumPair(ValueNumStore::VNForEmptyExcSet(), ValueNumStore::VNForEmptyExcSet());
6097 vnStore->VNPUnpackExc(addr->gtVNPair, &addrNvnp, &addrXvnp);
6099 // Is the dereference immutable? If so, model it as referencing the read-only heap.
6100 if (tree->gtFlags & GTF_IND_INVARIANT)
6102 assert(!isVolatile); // We don't expect both volatile and invariant
6104 ValueNumPair(vnStore->VNForMapSelect(VNK_Liberal, TYP_REF, ValueNumStore::VNForROH(),
6105 addrNvnp.GetLiberal()),
6106 vnStore->VNForMapSelect(VNK_Conservative, TYP_REF, ValueNumStore::VNForROH(),
6107 addrNvnp.GetConservative()));
6108 tree->gtVNPair = vnStore->VNPWithExc(tree->gtVNPair, addrXvnp);
6110 else if (isVolatile)
6112 // For Volatile indirection, mutate the global heap
6113 fgMutateHeap(tree DEBUGARG("GTF_IND_VOLATILE - read"));
6115 // The value read by the GT_IND can immediately change
6116 ValueNum newUniq = vnStore->VNForExpr(compCurBB, tree->TypeGet());
6117 tree->gtVNPair = vnStore->VNPWithExc(ValueNumPair(newUniq, newUniq), addrXvnp);
6119 // We always want to evaluate the LHS when the GT_IND node is marked with GTF_IND_ARR_INDEX
6120 // as this will relabel the GT_IND child correctly using the VNF_PtrToArrElem
6121 else if ((tree->gtFlags & GTF_IND_ARR_INDEX) != 0)
6124 bool b = GetArrayInfoMap()->Lookup(tree, &arrInfo);
6127 ValueNum inxVN = ValueNumStore::NoVN;
6128 FieldSeqNode* fldSeq = nullptr;
6130 // GenTreePtr addr = tree->gtOp.gtOp1;
6131 ValueNum addrVN = addrNvnp.GetLiberal();
6134 GenTreePtr arr = nullptr;
6135 addr->ParseArrayAddress(this, &arrInfo, &arr, &inxVN, &fldSeq);
6138 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6141 assert(fldSeq != FieldSeqStore::NotAField());
6144 // Need to form H[arrType][arr][ind][fldSeq]
6145 // Get the array element type equivalence class rep.
6146 CORINFO_CLASS_HANDLE elemTypeEq = EncodeElemType(arrInfo.m_elemType, arrInfo.m_elemStructType);
6147 ValueNum elemTypeEqVN = vnStore->VNForHandle(ssize_t(elemTypeEq), GTF_ICON_CLASS_HDL);
6149 // We take the "VNNormVal"s here, because if either has exceptional outcomes, they will be captured
6150 // as part of the value of the composite "addr" operation...
6151 ValueNum arrVN = vnStore->VNNormVal(arr->gtVNPair.GetLiberal());
6152 inxVN = vnStore->VNNormVal(inxVN);
6154 // Additionally, relabel the address with a PtrToArrElem value number.
6155 ValueNum fldSeqVN = vnStore->VNForFieldSeq(fldSeq);
6157 vnStore->VNForFunc(TYP_BYREF, VNF_PtrToArrElem, elemTypeEqVN, arrVN, inxVN, fldSeqVN);
6159 // The aggregate "addr" VN should have had all the exceptions bubble up...
6160 elemAddr = vnStore->VNWithExc(elemAddr, addrXvnp.GetLiberal());
6161 addr->gtVNPair.SetBoth(elemAddr);
6165 printf(" Relabeled IND_ARR_INDEX address node ");
6166 Compiler::printTreeID(addr);
6167 printf(" with l:" STR_VN "%x: ", elemAddr);
6168 vnStore->vnDump(this, elemAddr);
6170 if (vnStore->VNNormVal(elemAddr) != elemAddr)
6172 printf(" [" STR_VN "%x is: ", vnStore->VNNormVal(elemAddr));
6173 vnStore->vnDump(this, vnStore->VNNormVal(elemAddr));
6178 // We now need to retrieve the value number for the array element value
6179 // and give this value number to the GT_IND node 'tree'
6180 // We do this whenever we have an rvalue, or for the LHS when we have an "op=",
6181 // but we don't do it for a normal LHS assignment into an array element.
6183 if (evalAsgLhsInd || ((tree->gtFlags & GTF_IND_ASG_LHS) == 0))
6185 fgValueNumberArrIndexVal(tree, elemTypeEq, arrVN, inxVN, addrXvnp.GetLiberal(), fldSeq);
6189 // In general we skip GT_IND nodes on that are the LHS of an assignment. (We labeled these earlier.)
6190 // We will "evaluate" this as part of the assignment. (Unless we're explicitly told by
6191 // the caller to evaluate anyway -- perhaps the assignment is an "op=" assignment.)
6192 else if (((tree->gtFlags & GTF_IND_ASG_LHS) == 0) || evalAsgLhsInd)
6194 FieldSeqNode* localFldSeq = nullptr;
6197 // Is it a local or a heap address?
6198 if (addr->IsLocalAddrExpr(this, &lclVarTree, &localFldSeq) &&
6199 !fgExcludeFromSsa(lclVarTree->GetLclNum()))
6201 unsigned lclNum = lclVarTree->GetLclNum();
6202 unsigned ssaNum = lclVarTree->GetSsaNum();
6203 LclVarDsc* varDsc = &lvaTable[lclNum];
6205 if ((localFldSeq == FieldSeqStore::NotAField()) || (localFldSeq == nullptr))
6207 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6211 var_types indType = tree->TypeGet();
6212 ValueNumPair lclVNPair = varDsc->GetPerSsaData(ssaNum)->m_vnPair;
6213 tree->gtVNPair = vnStore->VNPairApplySelectors(lclVNPair, localFldSeq, indType);
6216 tree->gtVNPair = vnStore->VNPWithExc(tree->gtVNPair, addrXvnp);
6218 else if (vnStore->GetVNFunc(addrNvnp.GetLiberal(), &funcApp) && funcApp.m_func == VNF_PtrToStatic)
6220 var_types indType = tree->TypeGet();
6221 ValueNum fieldSeqVN = funcApp.m_args[0];
6223 FieldSeqNode* fldSeqForStaticVar = vnStore->FieldSeqVNToFieldSeq(fieldSeqVN);
6225 if (fldSeqForStaticVar != FieldSeqStore::NotAField())
6227 ValueNum selectedStaticVar;
6228 // We model statics as indices into the heap variable.
6229 size_t structSize = 0;
6231 vnStore->VNApplySelectors(VNK_Liberal, fgCurHeapVN, fldSeqForStaticVar, &structSize);
6232 selectedStaticVar = vnStore->VNApplySelectorsTypeCheck(selectedStaticVar, indType, structSize);
6234 tree->gtVNPair.SetLiberal(selectedStaticVar);
6235 tree->gtVNPair.SetConservative(vnStore->VNForExpr(compCurBB, indType));
6239 JITDUMP(" *** Missing field sequence info for VNF_PtrToStatic value GT_IND\n");
6240 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, indType)); // a new unique value number
6242 tree->gtVNPair = vnStore->VNPWithExc(tree->gtVNPair, addrXvnp);
6244 else if (!varTypeIsStruct(tree) && vnStore->GetVNFunc(addrNvnp.GetLiberal(), &funcApp) &&
6245 (funcApp.m_func == VNF_PtrToArrElem))
6247 // TODO-1stClassStructs: The above condition need not exclude struct types, but it is
6248 // excluded for now to minimize diffs.
6249 fgValueNumberArrIndexVal(tree, &funcApp, addrXvnp.GetLiberal());
6251 else if (!varTypeIsStruct(tree) && addr->IsFieldAddr(this, &obj, &staticOffset, &fldSeq2))
6253 // TODO-1stClassStructs: The above condition need not exclude struct types, but it is
6254 // excluded for now to minimize diffs.
6255 if (fldSeq2 == FieldSeqStore::NotAField())
6257 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6259 else if (fldSeq2 != nullptr)
6261 // Get the first (instance or static) field from field seq. Heap[field] will yield the "field
6263 CLANG_FORMAT_COMMENT_ANCHOR;
6266 CORINFO_CLASS_HANDLE fldCls = info.compCompHnd->getFieldClass(fldSeq2->m_fieldHnd);
6269 // Make sure that the class containing it is not a value class (as we are expecting an
6271 assert((info.compCompHnd->getClassAttribs(fldCls) & CORINFO_FLG_VALUECLASS) == 0);
6272 assert(staticOffset == nullptr);
6275 // Get a field sequence for just the first field in the sequence
6277 FieldSeqNode* firstFieldOnly = GetFieldSeqStore()->CreateSingleton(fldSeq2->m_fieldHnd);
6278 size_t structSize = 0;
6280 vnStore->VNApplySelectors(VNK_Liberal, fgCurHeapVN, firstFieldOnly, &structSize);
6282 // The final field in the sequence will need to match the 'indType'
6283 var_types indType = tree->TypeGet();
6285 // The type of the field is "struct" if there are more fields in the sequence,
6286 // otherwise it is the type returned from VNApplySelectors above.
6287 var_types firstFieldType = vnStore->TypeOfVN(fldMapVN);
6289 ValueNum valAtAddr = fldMapVN;
6292 // construct the ValueNumber for 'fldMap at obj'
6293 ValueNum objNormVal = vnStore->VNNormVal(obj->GetVN(VNK_Liberal));
6294 valAtAddr = vnStore->VNForMapSelect(VNK_Liberal, firstFieldType, fldMapVN, objNormVal);
6296 else if (staticOffset != nullptr)
6298 // construct the ValueNumber for 'fldMap at staticOffset'
6299 ValueNum offsetNormVal = vnStore->VNNormVal(staticOffset->GetVN(VNK_Liberal));
6300 valAtAddr = vnStore->VNForMapSelect(VNK_Liberal, firstFieldType, fldMapVN, offsetNormVal);
6303 // Now get rid of any remaining struct field dereferences.
6304 if (fldSeq2->m_next)
6306 valAtAddr = vnStore->VNApplySelectors(VNK_Liberal, valAtAddr, fldSeq2->m_next, &structSize);
6308 valAtAddr = vnStore->VNApplySelectorsTypeCheck(valAtAddr, indType, structSize);
6310 tree->gtVNPair.SetLiberal(valAtAddr);
6312 // The conservative value is a new, unique VN.
6313 tree->gtVNPair.SetConservative(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6314 tree->gtVNPair = vnStore->VNPWithExc(tree->gtVNPair, addrXvnp);
6318 // Occasionally we do an explicit null test on a REF, so we just dereference it with no
6319 // field sequence. The result is probably unused.
6320 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6321 tree->gtVNPair = vnStore->VNPWithExc(tree->gtVNPair, addrXvnp);
6324 else // We don't know where the address points.
6326 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6327 tree->gtVNPair = vnStore->VNPWithExc(tree->gtVNPair, addrXvnp);
6331 else if (tree->OperGet() == GT_CAST)
6333 fgValueNumberCastTree(tree);
6335 else if (tree->OperGet() == GT_INTRINSIC)
6337 fgValueNumberIntrinsic(tree);
6339 else if (ValueNumStore::VNFuncIsLegal(GetVNFuncForOper(oper, (tree->gtFlags & GTF_UNSIGNED) != 0)))
6341 if (GenTree::OperIsUnary(oper))
6343 if (tree->gtOp.gtOp1 != nullptr)
6345 if (tree->OperGet() == GT_NOP)
6347 // Pass through arg vn.
6348 tree->gtVNPair = tree->gtOp.gtOp1->gtVNPair;
6352 ValueNumPair op1VNP;
6353 ValueNumPair op1VNPx = ValueNumStore::VNPForEmptyExcSet();
6354 vnStore->VNPUnpackExc(tree->gtOp.gtOp1->gtVNPair, &op1VNP, &op1VNPx);
6356 vnStore->VNPWithExc(vnStore->VNPairForFunc(tree->TypeGet(),
6357 GetVNFuncForOper(oper, (tree->gtFlags &
6358 GTF_UNSIGNED) != 0),
6363 else // Is actually nullary.
6365 // Mostly we'll leave these without a value number, assuming we'll detect these as VN failures
6366 // if they actually need to have values. With the exception of NOPs, which can sometimes have
6368 if (tree->OperGet() == GT_NOP)
6370 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6376 assert(!GenTree::OperIsAssignment(oper)); // We handled assignments earlier.
6377 assert(GenTree::OperIsBinary(oper));
6378 // Standard binary operator.
6379 ValueNumPair op2VNPair;
6380 if (tree->gtOp.gtOp2 == nullptr)
6382 op2VNPair.SetBoth(ValueNumStore::VNForNull());
6386 op2VNPair = tree->gtOp.gtOp2->gtVNPair;
6388 // A few special case: if we add a field offset constant to a PtrToXXX, we get back a new PtrToXXX.
6389 ValueNum newVN = ValueNumStore::NoVN;
6391 ValueNumPair op1vnp;
6392 ValueNumPair op1Xvnp = ValueNumStore::VNPForEmptyExcSet();
6393 vnStore->VNPUnpackExc(tree->gtOp.gtOp1->gtVNPair, &op1vnp, &op1Xvnp);
6394 ValueNumPair op2vnp;
6395 ValueNumPair op2Xvnp = ValueNumStore::VNPForEmptyExcSet();
6396 vnStore->VNPUnpackExc(op2VNPair, &op2vnp, &op2Xvnp);
6397 ValueNumPair excSet = vnStore->VNPExcSetUnion(op1Xvnp, op2Xvnp);
6401 newVN = vnStore->ExtendPtrVN(tree->gtOp.gtOp1, tree->gtOp.gtOp2);
6402 if (newVN == ValueNumStore::NoVN)
6404 newVN = vnStore->ExtendPtrVN(tree->gtOp.gtOp2, tree->gtOp.gtOp1);
6407 if (newVN != ValueNumStore::NoVN)
6409 newVN = vnStore->VNWithExc(newVN, excSet.GetLiberal());
6410 // We don't care about differences between liberal and conservative for pointer values.
6411 tree->gtVNPair.SetBoth(newVN);
6416 ValueNumPair normalRes =
6417 vnStore->VNPairForFunc(tree->TypeGet(),
6418 GetVNFuncForOper(oper, (tree->gtFlags & GTF_UNSIGNED) != 0), op1vnp,
6420 // Overflow-checking operations add an overflow exception
6421 if (tree->gtOverflowEx())
6423 ValueNum overflowExcSet =
6424 vnStore->VNExcSetSingleton(vnStore->VNForFunc(TYP_REF, VNF_OverflowExc));
6425 excSet = vnStore->VNPExcSetUnion(excSet, ValueNumPair(overflowExcSet, overflowExcSet));
6427 tree->gtVNPair = vnStore->VNPWithExc(normalRes, excSet);
6431 else // ValueNumStore::VNFuncIsLegal returns false
6433 // Some of the genTreeOps that aren't legal VNFuncs so they get special handling.
6438 ValueNumPair op1vnp;
6439 ValueNumPair op1Xvnp = ValueNumStore::VNPForEmptyExcSet();
6440 vnStore->VNPUnpackExc(tree->gtOp.gtOp1->gtVNPair, &op1vnp, &op1Xvnp);
6441 ValueNumPair op2vnp;
6442 ValueNumPair op2Xvnp = ValueNumStore::VNPForEmptyExcSet();
6444 GenTree* op2 = tree->gtGetOp2();
6445 if (op2->OperIsIndir() && ((op2->gtFlags & GTF_IND_ASG_LHS) != 0))
6447 // If op2 represents the lhs of an assignment then we give a VNForVoid for the lhs
6448 op2vnp = ValueNumPair(ValueNumStore::VNForVoid(), ValueNumStore::VNForVoid());
6450 else if ((op2->OperGet() == GT_CLS_VAR) && (op2->gtFlags & GTF_CLS_VAR_ASG_LHS))
6452 // If op2 represents the lhs of an assignment then we give a VNForVoid for the lhs
6453 op2vnp = ValueNumPair(ValueNumStore::VNForVoid(), ValueNumStore::VNForVoid());
6457 vnStore->VNPUnpackExc(op2->gtVNPair, &op2vnp, &op2Xvnp);
6460 tree->gtVNPair = vnStore->VNPWithExc(op2vnp, vnStore->VNPExcSetUnion(op1Xvnp, op2Xvnp));
6465 // Explicit null check.
6467 vnStore->VNPWithExc(ValueNumPair(ValueNumStore::VNForVoid(), ValueNumStore::VNForVoid()),
6468 vnStore->VNPExcSetSingleton(
6469 vnStore->VNPairForFunc(TYP_REF, VNF_NullPtrExc,
6470 tree->gtOp.gtOp1->gtVNPair)));
6476 if (tree->gtFlags & GTF_IND_ARR_LEN)
6478 // It's an array length. The argument is the sum of an array ref with some integer values...
6479 ValueNum arrRefLib = vnStore->VNForRefInAddr(tree->gtOp.gtOp1->gtVNPair.GetLiberal());
6480 ValueNum arrRefCons = vnStore->VNForRefInAddr(tree->gtOp.gtOp1->gtVNPair.GetConservative());
6482 assert(vnStore->TypeOfVN(arrRefLib) == TYP_REF || vnStore->TypeOfVN(arrRefLib) == TYP_BYREF);
6483 if (vnStore->IsVNConstant(arrRefLib))
6485 // (or in weird cases, a REF or BYREF constant, in which case the result is an exception).
6486 tree->gtVNPair.SetLiberal(
6487 vnStore->VNWithExc(ValueNumStore::VNForVoid(),
6488 vnStore->VNExcSetSingleton(
6489 vnStore->VNForFunc(TYP_REF, VNF_NullPtrExc, arrRefLib))));
6493 tree->gtVNPair.SetLiberal(vnStore->VNForFunc(TYP_INT, VNFunc(GT_ARR_LENGTH), arrRefLib));
6495 assert(vnStore->TypeOfVN(arrRefCons) == TYP_REF || vnStore->TypeOfVN(arrRefCons) == TYP_BYREF);
6496 if (vnStore->IsVNConstant(arrRefCons))
6498 // (or in weird cases, a REF or BYREF constant, in which case the result is an exception).
6499 tree->gtVNPair.SetConservative(
6500 vnStore->VNWithExc(ValueNumStore::VNForVoid(),
6501 vnStore->VNExcSetSingleton(
6502 vnStore->VNForFunc(TYP_REF, VNF_NullPtrExc, arrRefCons))));
6506 tree->gtVNPair.SetConservative(
6507 vnStore->VNForFunc(TYP_INT, VNFunc(GT_ARR_LENGTH), arrRefCons));
6512 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6516 case GT_LOCKADD: // Binop
6517 case GT_XADD: // Binop
6518 case GT_XCHG: // Binop
6519 // For CMPXCHG and other intrinsics add an arbitrary side effect on Heap.
6520 fgMutateHeap(tree DEBUGARG("Interlocked intrinsic"));
6521 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6526 #ifndef LEGACY_BACKEND
6528 #endif // !LEGACY_BACKEND
6529 // These nodes never need to have a ValueNumber
6530 tree->gtVNPair.SetBoth(ValueNumStore::NoVN);
6534 // The default action is to give the node a new, unique VN.
6535 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6542 assert(GenTree::OperIsSpecial(oper));
6544 // TBD: We must handle these individually. For now:
6548 fgValueNumberCall(tree->AsCall());
6551 case GT_ARR_BOUNDS_CHECK:
6554 #endif // FEATURE_SIMD
6556 // A bounds check node has no value, but may throw exceptions.
6557 ValueNumPair excSet = vnStore->VNPExcSetSingleton(
6558 vnStore->VNPairForFunc(TYP_REF, VNF_IndexOutOfRangeExc,
6559 vnStore->VNPNormVal(tree->AsBoundsChk()->gtArrLen->gtVNPair),
6560 vnStore->VNPNormVal(tree->AsBoundsChk()->gtIndex->gtVNPair)));
6561 excSet = vnStore->VNPExcSetUnion(excSet, vnStore->VNPExcVal(tree->AsBoundsChk()->gtArrLen->gtVNPair));
6562 excSet = vnStore->VNPExcSetUnion(excSet, vnStore->VNPExcVal(tree->AsBoundsChk()->gtIndex->gtVNPair));
6564 tree->gtVNPair = vnStore->VNPWithExc(vnStore->VNPForVoid(), excSet);
6568 case GT_CMPXCHG: // Specialop
6569 // For CMPXCHG and other intrinsics add an arbitrary side effect on Heap.
6570 fgMutateHeap(tree DEBUGARG("Interlocked intrinsic"));
6571 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6575 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6581 if (tree->gtVNPair.GetLiberal() != ValueNumStore::NoVN)
6583 printf("N%03u ", tree->gtSeqNum);
6586 gtDispNodeName(tree);
6587 if (tree->OperIsLeaf() || tree->OperIsLocalStore()) // local stores used to be leaves
6589 gtDispLeaf(tree, nullptr);
6592 vnpPrint(tree->gtVNPair, 1);
6599 void Compiler::fgValueNumberIntrinsic(GenTreePtr tree)
6601 assert(tree->OperGet() == GT_INTRINSIC);
6602 GenTreeIntrinsic* intrinsic = tree->AsIntrinsic();
6603 ValueNumPair arg0VNP, arg1VNP;
6604 ValueNumPair arg0VNPx = ValueNumStore::VNPForEmptyExcSet();
6605 ValueNumPair arg1VNPx = ValueNumStore::VNPForEmptyExcSet();
6607 vnStore->VNPUnpackExc(intrinsic->gtOp.gtOp1->gtVNPair, &arg0VNP, &arg0VNPx);
6609 if (intrinsic->gtOp.gtOp2 != nullptr)
6611 vnStore->VNPUnpackExc(intrinsic->gtOp.gtOp2->gtVNPair, &arg1VNP, &arg1VNPx);
6614 switch (intrinsic->gtIntrinsicId)
6616 case CORINFO_INTRINSIC_Sin:
6617 case CORINFO_INTRINSIC_Sqrt:
6618 case CORINFO_INTRINSIC_Abs:
6619 case CORINFO_INTRINSIC_Cos:
6620 case CORINFO_INTRINSIC_Round:
6621 case CORINFO_INTRINSIC_Cosh:
6622 case CORINFO_INTRINSIC_Sinh:
6623 case CORINFO_INTRINSIC_Tan:
6624 case CORINFO_INTRINSIC_Tanh:
6625 case CORINFO_INTRINSIC_Asin:
6626 case CORINFO_INTRINSIC_Acos:
6627 case CORINFO_INTRINSIC_Atan:
6628 case CORINFO_INTRINSIC_Atan2:
6629 case CORINFO_INTRINSIC_Log10:
6630 case CORINFO_INTRINSIC_Pow:
6631 case CORINFO_INTRINSIC_Exp:
6632 case CORINFO_INTRINSIC_Ceiling:
6633 case CORINFO_INTRINSIC_Floor:
6635 // GT_INTRINSIC is a currently a subtype of binary operators. But most of
6636 // the math intrinsics are actually unary operations.
6638 if (intrinsic->gtOp.gtOp2 == nullptr)
6640 intrinsic->gtVNPair =
6641 vnStore->VNPWithExc(vnStore->EvalMathFuncUnary(tree->TypeGet(), intrinsic->gtIntrinsicId, arg0VNP),
6646 ValueNumPair newVNP =
6647 vnStore->EvalMathFuncBinary(tree->TypeGet(), intrinsic->gtIntrinsicId, arg0VNP, arg1VNP);
6648 ValueNumPair excSet = vnStore->VNPExcSetUnion(arg0VNPx, arg1VNPx);
6649 intrinsic->gtVNPair = vnStore->VNPWithExc(newVNP, excSet);
6654 case CORINFO_INTRINSIC_Object_GetType:
6655 intrinsic->gtVNPair =
6656 vnStore->VNPWithExc(vnStore->VNPairForFunc(intrinsic->TypeGet(), VNF_ObjGetType, arg0VNP), arg0VNPx);
6664 void Compiler::fgValueNumberCastTree(GenTreePtr tree)
6666 assert(tree->OperGet() == GT_CAST);
6668 ValueNumPair srcVNPair = tree->gtOp.gtOp1->gtVNPair;
6669 var_types castToType = tree->CastToType();
6670 var_types castFromType = tree->CastFromType();
6671 bool srcIsUnsigned = ((tree->gtFlags & GTF_UNSIGNED) != 0);
6672 bool hasOverflowCheck = tree->gtOverflowEx();
6674 assert(genActualType(castToType) == tree->TypeGet()); // Insure that the resultType is correct
6676 tree->gtVNPair = vnStore->VNPairForCast(srcVNPair, castToType, castFromType, srcIsUnsigned, hasOverflowCheck);
6679 // Compute the normal ValueNumber for a cast operation with no exceptions
6680 ValueNum ValueNumStore::VNForCast(ValueNum srcVN,
6681 var_types castToType,
6682 var_types castFromType,
6683 bool srcIsUnsigned /* = false */)
6685 // The resulting type after performingthe cast is always widened to a supported IL stack size
6686 var_types resultType = genActualType(castToType);
6688 // When we're considering actual value returned by a non-checking cast whether or not the source is
6689 // unsigned does *not* matter for non-widening casts. That is, if we cast an int or a uint to short,
6690 // we just extract the first two bytes from the source bit pattern, not worrying about the interpretation.
6691 // The same is true in casting between signed/unsigned types of the same width. Only when we're doing
6692 // a widening cast do we care about whether the source was unsigned,so we know whether to sign or zero extend it.
6694 bool srcIsUnsignedNorm = srcIsUnsigned;
6695 if (genTypeSize(castToType) <= genTypeSize(castFromType))
6697 srcIsUnsignedNorm = false;
6700 ValueNum castTypeVN = VNForCastOper(castToType, srcIsUnsigned);
6701 ValueNum resultVN = VNForFunc(resultType, VNF_Cast, srcVN, castTypeVN);
6704 if (m_pComp->verbose)
6706 printf(" VNForCast(" STR_VN "%x, " STR_VN "%x) returns ", srcVN, castTypeVN);
6707 m_pComp->vnPrint(resultVN, 1);
6715 // Compute the ValueNumberPair for a cast operation
6716 ValueNumPair ValueNumStore::VNPairForCast(ValueNumPair srcVNPair,
6717 var_types castToType,
6718 var_types castFromType,
6719 bool srcIsUnsigned, /* = false */
6720 bool hasOverflowCheck) /* = false */
6722 // The resulting type after performingthe cast is always widened to a supported IL stack size
6723 var_types resultType = genActualType(castToType);
6725 ValueNumPair castArgVNP;
6726 ValueNumPair castArgxVNP = ValueNumStore::VNPForEmptyExcSet();
6727 VNPUnpackExc(srcVNPair, &castArgVNP, &castArgxVNP);
6729 // When we're considering actual value returned by a non-checking cast (or a checking cast that succeeds),
6730 // whether or not the source is unsigned does *not* matter for non-widening casts.
6731 // That is, if we cast an int or a uint to short, we just extract the first two bytes from the source
6732 // bit pattern, not worrying about the interpretation. The same is true in casting between signed/unsigned
6733 // types of the same width. Only when we're doing a widening cast do we care about whether the source
6734 // was unsigned, so we know whether to sign or zero extend it.
6736 // Important: Casts to floating point cannot be optimized in this fashion. (bug 946768)
6738 bool srcIsUnsignedNorm = srcIsUnsigned;
6739 if (genTypeSize(castToType) <= genTypeSize(castFromType) && !varTypeIsFloating(castToType))
6741 srcIsUnsignedNorm = false;
6744 ValueNum castTypeVN = VNForCastOper(castToType, srcIsUnsignedNorm);
6745 ValueNumPair castTypeVNPair(castTypeVN, castTypeVN);
6746 ValueNumPair castNormRes = VNPairForFunc(resultType, VNF_Cast, castArgVNP, castTypeVNPair);
6748 ValueNumPair resultVNP = VNPWithExc(castNormRes, castArgxVNP);
6750 // If we have a check for overflow, add the exception information.
6751 if (hasOverflowCheck)
6753 // For overflow checking, we always need to know whether the source is unsigned.
6754 castTypeVNPair.SetBoth(VNForCastOper(castToType, srcIsUnsigned));
6755 ValueNumPair excSet =
6756 VNPExcSetSingleton(VNPairForFunc(TYP_REF, VNF_ConvOverflowExc, castArgVNP, castTypeVNPair));
6757 excSet = VNPExcSetUnion(excSet, castArgxVNP);
6758 resultVNP = VNPWithExc(castNormRes, excSet);
6764 void Compiler::fgValueNumberHelperCallFunc(GenTreeCall* call, VNFunc vnf, ValueNumPair vnpExc)
6766 unsigned nArgs = ValueNumStore::VNFuncArity(vnf);
6767 assert(vnf != VNF_Boundary);
6768 GenTreeArgList* args = call->gtCallArgs;
6769 bool generateUniqueVN = false;
6770 bool useEntryPointAddrAsArg0 = false;
6776 generateUniqueVN = true;
6777 vnpExc = ValueNumStore::VNPForEmptyExcSet();
6783 generateUniqueVN = true;
6784 ValueNumPair vnp1 = vnStore->VNPNormVal(args->Rest()->Current()->gtVNPair);
6786 // The New Array helper may throw an overflow exception
6787 vnpExc = vnStore->VNPExcSetSingleton(vnStore->VNPairForFunc(TYP_REF, VNF_NewArrOverflowExc, vnp1));
6791 case VNF_BoxNullable:
6793 // Generate unique VN so, VNForFunc generates a uniq value number for box nullable.
6794 // Alternatively instead of using vnpUniq below in VNPairForFunc(...),
6795 // we could use the value number of what the byref arg0 points to.
6797 // But retrieving the value number of what the byref arg0 points to is quite a bit more work
6798 // and doing so only very rarely allows for an additional optimization.
6799 generateUniqueVN = true;
6803 case VNF_JitReadyToRunNew:
6805 generateUniqueVN = true;
6806 vnpExc = ValueNumStore::VNPForEmptyExcSet();
6807 useEntryPointAddrAsArg0 = true;
6811 case VNF_JitReadyToRunNewArr:
6813 generateUniqueVN = true;
6814 ValueNumPair vnp1 = vnStore->VNPNormVal(args->Current()->gtVNPair);
6816 // The New Array helper may throw an overflow exception
6817 vnpExc = vnStore->VNPExcSetSingleton(vnStore->VNPairForFunc(TYP_REF, VNF_NewArrOverflowExc, vnp1));
6818 useEntryPointAddrAsArg0 = true;
6822 case VNF_ReadyToRunStaticBase:
6823 case VNF_ReadyToRunIsInstanceOf:
6824 case VNF_ReadyToRunCastClass:
6826 useEntryPointAddrAsArg0 = true;
6832 assert(s_helperCallProperties.IsPure(eeGetHelperNum(call->gtCallMethHnd)));
6837 if (generateUniqueVN)
6842 ValueNumPair vnpUniq;
6843 if (generateUniqueVN)
6845 // Generate unique VN so, VNForFunc generates a unique value number.
6846 vnpUniq.SetBoth(vnStore->VNForExpr(compCurBB, call->TypeGet()));
6851 if (generateUniqueVN)
6853 call->gtVNPair = vnStore->VNPairForFunc(call->TypeGet(), vnf, vnpUniq);
6857 call->gtVNPair.SetBoth(vnStore->VNForFunc(call->TypeGet(), vnf));
6862 // Has at least one argument.
6864 ValueNumPair vnp0x = ValueNumStore::VNPForEmptyExcSet();
6865 #ifdef FEATURE_READYTORUN_COMPILER
6866 if (useEntryPointAddrAsArg0)
6868 ValueNum callAddrVN = vnStore->VNForPtrSizeIntCon((ssize_t)call->gtCall.gtEntryPoint.addr);
6869 vnp0 = ValueNumPair(callAddrVN, callAddrVN);
6874 assert(!useEntryPointAddrAsArg0);
6875 ValueNumPair vnp0wx = args->Current()->gtVNPair;
6876 vnStore->VNPUnpackExc(vnp0wx, &vnp0, &vnp0x);
6878 // Also include in the argument exception sets
6879 vnpExc = vnStore->VNPExcSetUnion(vnpExc, vnp0x);
6881 args = args->Rest();
6885 if (generateUniqueVN)
6887 call->gtVNPair = vnStore->VNPairForFunc(call->TypeGet(), vnf, vnp0, vnpUniq);
6891 call->gtVNPair = vnStore->VNPairForFunc(call->TypeGet(), vnf, vnp0);
6896 // Has at least two arguments.
6897 ValueNumPair vnp1wx = args->Current()->gtVNPair;
6899 ValueNumPair vnp1x = ValueNumStore::VNPForEmptyExcSet();
6900 vnStore->VNPUnpackExc(vnp1wx, &vnp1, &vnp1x);
6901 vnpExc = vnStore->VNPExcSetUnion(vnpExc, vnp1x);
6903 args = args->Rest();
6906 if (generateUniqueVN)
6908 call->gtVNPair = vnStore->VNPairForFunc(call->TypeGet(), vnf, vnp0, vnp1, vnpUniq);
6912 call->gtVNPair = vnStore->VNPairForFunc(call->TypeGet(), vnf, vnp0, vnp1);
6917 ValueNumPair vnp2wx = args->Current()->gtVNPair;
6919 ValueNumPair vnp2x = ValueNumStore::VNPForEmptyExcSet();
6920 vnStore->VNPUnpackExc(vnp2wx, &vnp2, &vnp2x);
6921 vnpExc = vnStore->VNPExcSetUnion(vnpExc, vnp2x);
6923 args = args->Rest();
6924 assert(nArgs == 3); // Our current maximum.
6925 assert(args == nullptr);
6926 if (generateUniqueVN)
6928 call->gtVNPair = vnStore->VNPairForFunc(call->TypeGet(), vnf, vnp0, vnp1, vnp2, vnpUniq);
6932 call->gtVNPair = vnStore->VNPairForFunc(call->TypeGet(), vnf, vnp0, vnp1, vnp2);
6936 // Add the accumulated exceptions.
6937 call->gtVNPair = vnStore->VNPWithExc(call->gtVNPair, vnpExc);
6941 void Compiler::fgValueNumberCall(GenTreeCall* call)
6943 // First: do value numbering of any argument placeholder nodes in the argument list
6944 // (by transferring from the VN of the late arg that they are standing in for...)
6946 GenTreeArgList* args = call->gtCallArgs;
6947 bool updatedArgPlace = false;
6948 while (args != nullptr)
6950 GenTreePtr arg = args->Current();
6951 if (arg->OperGet() == GT_ARGPLACE)
6953 // Find the corresponding late arg.
6954 GenTreePtr lateArg = nullptr;
6955 for (unsigned j = 0; j < call->fgArgInfo->ArgCount(); j++)
6957 if (call->fgArgInfo->ArgTable()[j]->argNum == i)
6959 lateArg = call->fgArgInfo->ArgTable()[j]->node;
6963 assert(lateArg != nullptr);
6964 assert(lateArg->gtVNPair.BothDefined());
6965 arg->gtVNPair = lateArg->gtVNPair;
6966 updatedArgPlace = true;
6970 printf("VN of ARGPLACE tree ");
6971 Compiler::printTreeID(arg);
6972 printf(" updated to ");
6973 vnpPrint(arg->gtVNPair, 1);
6979 args = args->Rest();
6981 if (updatedArgPlace)
6983 // Now we have to update the VN's of the argument list nodes, since that will be used in determining
6985 fgUpdateArgListVNs(call->gtCallArgs);
6988 if (call->gtCallType == CT_HELPER)
6990 bool modHeap = fgValueNumberHelperCall(call);
6994 // For now, arbitrary side effect on Heap.
6995 fgMutateHeap(call DEBUGARG("HELPER - modifies heap"));
7000 if (call->TypeGet() == TYP_VOID)
7002 call->gtVNPair.SetBoth(ValueNumStore::VNForVoid());
7006 call->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, call->TypeGet()));
7009 // For now, arbitrary side effect on Heap.
7010 fgMutateHeap(call DEBUGARG("CALL"));
7014 void Compiler::fgUpdateArgListVNs(GenTreeArgList* args)
7016 if (args == nullptr)
7021 fgUpdateArgListVNs(args->Rest());
7022 fgValueNumberTree(args);
7025 VNFunc Compiler::fgValueNumberHelperMethVNFunc(CorInfoHelpFunc helpFunc)
7027 assert(s_helperCallProperties.IsPure(helpFunc) || s_helperCallProperties.IsAllocator(helpFunc));
7029 VNFunc vnf = VNF_Boundary; // An illegal value...
7032 // These translate to other function symbols:
7033 case CORINFO_HELP_DIV:
7034 vnf = VNFunc(GT_DIV);
7036 case CORINFO_HELP_MOD:
7037 vnf = VNFunc(GT_MOD);
7039 case CORINFO_HELP_UDIV:
7040 vnf = VNFunc(GT_UDIV);
7042 case CORINFO_HELP_UMOD:
7043 vnf = VNFunc(GT_UMOD);
7045 case CORINFO_HELP_LLSH:
7046 vnf = VNFunc(GT_LSH);
7048 case CORINFO_HELP_LRSH:
7049 vnf = VNFunc(GT_RSH);
7051 case CORINFO_HELP_LRSZ:
7052 vnf = VNFunc(GT_RSZ);
7054 case CORINFO_HELP_LMUL:
7055 case CORINFO_HELP_LMUL_OVF:
7056 vnf = VNFunc(GT_MUL);
7058 case CORINFO_HELP_ULMUL_OVF:
7059 vnf = VNFunc(GT_MUL);
7060 break; // Is this the right thing?
7061 case CORINFO_HELP_LDIV:
7062 vnf = VNFunc(GT_DIV);
7064 case CORINFO_HELP_LMOD:
7065 vnf = VNFunc(GT_MOD);
7067 case CORINFO_HELP_ULDIV:
7068 vnf = VNFunc(GT_UDIV);
7070 case CORINFO_HELP_ULMOD:
7071 vnf = VNFunc(GT_UMOD);
7074 case CORINFO_HELP_LNG2DBL:
7077 case CORINFO_HELP_ULNG2DBL:
7080 case CORINFO_HELP_DBL2INT:
7083 case CORINFO_HELP_DBL2INT_OVF:
7086 case CORINFO_HELP_DBL2LNG:
7089 case CORINFO_HELP_DBL2LNG_OVF:
7092 case CORINFO_HELP_DBL2UINT:
7095 case CORINFO_HELP_DBL2UINT_OVF:
7098 case CORINFO_HELP_DBL2ULNG:
7101 case CORINFO_HELP_DBL2ULNG_OVF:
7104 case CORINFO_HELP_FLTREM:
7105 vnf = VNFunc(GT_MOD);
7107 case CORINFO_HELP_DBLREM:
7108 vnf = VNFunc(GT_MOD);
7110 case CORINFO_HELP_FLTROUND:
7112 break; // Is this the right thing?
7113 case CORINFO_HELP_DBLROUND:
7115 break; // Is this the right thing?
7117 // These allocation operations probably require some augmentation -- perhaps allocSiteId,
7118 // something about array length...
7119 case CORINFO_HELP_NEW_CROSSCONTEXT:
7120 case CORINFO_HELP_NEWFAST:
7121 case CORINFO_HELP_NEWSFAST:
7122 case CORINFO_HELP_NEWSFAST_ALIGN8:
7126 case CORINFO_HELP_READYTORUN_NEW:
7127 vnf = VNF_JitReadyToRunNew;
7130 case CORINFO_HELP_NEWARR_1_DIRECT:
7131 case CORINFO_HELP_NEWARR_1_OBJ:
7132 case CORINFO_HELP_NEWARR_1_VC:
7133 case CORINFO_HELP_NEWARR_1_ALIGN8:
7134 vnf = VNF_JitNewArr;
7137 case CORINFO_HELP_READYTORUN_NEWARR_1:
7138 vnf = VNF_JitReadyToRunNewArr;
7141 case CORINFO_HELP_GETGENERICS_GCSTATIC_BASE:
7142 vnf = VNF_GetgenericsGcstaticBase;
7144 case CORINFO_HELP_GETGENERICS_NONGCSTATIC_BASE:
7145 vnf = VNF_GetgenericsNongcstaticBase;
7147 case CORINFO_HELP_GETSHARED_GCSTATIC_BASE:
7148 vnf = VNF_GetsharedGcstaticBase;
7150 case CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE:
7151 vnf = VNF_GetsharedNongcstaticBase;
7153 case CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR:
7154 vnf = VNF_GetsharedGcstaticBaseNoctor;
7156 case CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE_NOCTOR:
7157 vnf = VNF_GetsharedNongcstaticBaseNoctor;
7159 case CORINFO_HELP_READYTORUN_STATIC_BASE:
7160 vnf = VNF_ReadyToRunStaticBase;
7162 case CORINFO_HELP_GETSHARED_GCSTATIC_BASE_DYNAMICCLASS:
7163 vnf = VNF_GetsharedGcstaticBaseDynamicclass;
7165 case CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE_DYNAMICCLASS:
7166 vnf = VNF_GetsharedNongcstaticBaseDynamicclass;
7168 case CORINFO_HELP_CLASSINIT_SHARED_DYNAMICCLASS:
7169 vnf = VNF_ClassinitSharedDynamicclass;
7171 case CORINFO_HELP_GETGENERICS_GCTHREADSTATIC_BASE:
7172 vnf = VNF_GetgenericsGcthreadstaticBase;
7174 case CORINFO_HELP_GETGENERICS_NONGCTHREADSTATIC_BASE:
7175 vnf = VNF_GetgenericsNongcthreadstaticBase;
7177 case CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE:
7178 vnf = VNF_GetsharedGcthreadstaticBase;
7180 case CORINFO_HELP_GETSHARED_NONGCTHREADSTATIC_BASE:
7181 vnf = VNF_GetsharedNongcthreadstaticBase;
7183 case CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE_NOCTOR:
7184 vnf = VNF_GetsharedGcthreadstaticBaseNoctor;
7186 case CORINFO_HELP_GETSHARED_NONGCTHREADSTATIC_BASE_NOCTOR:
7187 vnf = VNF_GetsharedNongcthreadstaticBaseNoctor;
7189 case CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE_DYNAMICCLASS:
7190 vnf = VNF_GetsharedGcthreadstaticBaseDynamicclass;
7192 case CORINFO_HELP_GETSHARED_NONGCTHREADSTATIC_BASE_DYNAMICCLASS:
7193 vnf = VNF_GetsharedNongcthreadstaticBaseDynamicclass;
7195 case CORINFO_HELP_GETSTATICFIELDADDR_CONTEXT:
7196 vnf = VNF_GetStaticAddrContext;
7198 case CORINFO_HELP_GETSTATICFIELDADDR_TLS:
7199 vnf = VNF_GetStaticAddrTLS;
7202 case CORINFO_HELP_RUNTIMEHANDLE_METHOD:
7203 case CORINFO_HELP_RUNTIMEHANDLE_METHOD_LOG:
7204 vnf = VNF_RuntimeHandleMethod;
7207 case CORINFO_HELP_RUNTIMEHANDLE_CLASS:
7208 case CORINFO_HELP_RUNTIMEHANDLE_CLASS_LOG:
7209 vnf = VNF_RuntimeHandleClass;
7212 case CORINFO_HELP_STRCNS:
7216 case CORINFO_HELP_CHKCASTCLASS:
7217 case CORINFO_HELP_CHKCASTCLASS_SPECIAL:
7218 case CORINFO_HELP_CHKCASTARRAY:
7219 case CORINFO_HELP_CHKCASTINTERFACE:
7220 case CORINFO_HELP_CHKCASTANY:
7221 vnf = VNF_CastClass;
7224 case CORINFO_HELP_READYTORUN_CHKCAST:
7225 vnf = VNF_ReadyToRunCastClass;
7228 case CORINFO_HELP_ISINSTANCEOFCLASS:
7229 case CORINFO_HELP_ISINSTANCEOFINTERFACE:
7230 case CORINFO_HELP_ISINSTANCEOFARRAY:
7231 case CORINFO_HELP_ISINSTANCEOFANY:
7232 vnf = VNF_IsInstanceOf;
7235 case CORINFO_HELP_READYTORUN_ISINSTANCEOF:
7236 vnf = VNF_ReadyToRunIsInstanceOf;
7239 case CORINFO_HELP_LDELEMA_REF:
7243 case CORINFO_HELP_UNBOX:
7247 // A constant within any method.
7248 case CORINFO_HELP_GETCURRENTMANAGEDTHREADID:
7249 vnf = VNF_ManagedThreadId;
7252 case CORINFO_HELP_GETREFANY:
7253 // TODO-CQ: This should really be interpreted as just a struct field reference, in terms of values.
7254 vnf = VNF_GetRefanyVal;
7257 case CORINFO_HELP_GETCLASSFROMMETHODPARAM:
7258 vnf = VNF_GetClassFromMethodParam;
7261 case CORINFO_HELP_GETSYNCFROMCLASSHANDLE:
7262 vnf = VNF_GetSyncFromClassHandle;
7265 case CORINFO_HELP_LOOP_CLONE_CHOICE_ADDR:
7266 vnf = VNF_LoopCloneChoiceAddr;
7269 case CORINFO_HELP_BOX_NULLABLE:
7270 vnf = VNF_BoxNullable;
7277 assert(vnf != VNF_Boundary);
7281 bool Compiler::fgValueNumberHelperCall(GenTreeCall* call)
7283 CorInfoHelpFunc helpFunc = eeGetHelperNum(call->gtCallMethHnd);
7284 bool pure = s_helperCallProperties.IsPure(helpFunc);
7285 bool isAlloc = s_helperCallProperties.IsAllocator(helpFunc);
7286 bool modHeap = s_helperCallProperties.MutatesHeap(helpFunc);
7287 bool mayRunCctor = s_helperCallProperties.MayRunCctor(helpFunc);
7288 bool noThrow = s_helperCallProperties.NoThrow(helpFunc);
7290 ValueNumPair vnpExc = ValueNumStore::VNPForEmptyExcSet();
7292 // If the JIT helper can throw an exception make sure that we fill in
7293 // vnpExc with a Value Number that represents the exception(s) that can be thrown.
7296 // If the helper is known to only throw only one particular exception
7297 // we can set vnpExc to that exception, otherwise we conservatively
7298 // model the JIT helper as possibly throwing multiple different exceptions
7302 case CORINFO_HELP_OVERFLOW:
7303 // This helper always throws the VNF_OverflowExc exception
7304 vnpExc = vnStore->VNPExcSetSingleton(vnStore->VNPairForFunc(TYP_REF, VNF_OverflowExc));
7308 // Setup vnpExc with the information that multiple different exceptions
7309 // could be generated by this helper
7310 vnpExc = vnStore->VNPExcSetSingleton(vnStore->VNPairForFunc(TYP_REF, VNF_HelperMultipleExc));
7314 ValueNumPair vnpNorm;
7316 if (call->TypeGet() == TYP_VOID)
7318 vnpNorm = ValueNumStore::VNPForVoid();
7322 // TODO-CQ: this is a list of helpers we're going to treat as non-pure,
7323 // because they raise complications. Eventually, we need to handle those complications...
7324 bool needsFurtherWork = false;
7327 case CORINFO_HELP_NEW_MDARR:
7328 // This is a varargs helper. We need to represent the array shape in the VN world somehow.
7329 needsFurtherWork = true;
7335 if (!needsFurtherWork && (pure || isAlloc))
7337 VNFunc vnf = fgValueNumberHelperMethVNFunc(helpFunc);
7341 if ((call->gtFlags & GTF_CALL_HOISTABLE) == 0)
7347 fgValueNumberHelperCallFunc(call, vnf, vnpExc);
7352 vnpNorm.SetBoth(vnStore->VNForExpr(compCurBB, call->TypeGet()));
7356 call->gtVNPair = vnStore->VNPWithExc(vnpNorm, vnpExc);
7361 // This method asserts that SSA name constraints specified are satisfied.
7362 // Until we figure out otherwise, all VN's are assumed to be liberal.
7363 // TODO-Cleanup: new JitTestLabels for lib vs cons vs both VN classes?
7364 void Compiler::JitTestCheckVN()
7366 typedef SimplerHashTable<ssize_t, SmallPrimitiveKeyFuncs<ssize_t>, ValueNum, JitSimplerHashBehavior> LabelToVNMap;
7367 typedef SimplerHashTable<ValueNum, SmallPrimitiveKeyFuncs<ValueNum>, ssize_t, JitSimplerHashBehavior> VNToLabelMap;
7369 // If we have no test data, early out.
7370 if (m_nodeTestData == nullptr)
7375 NodeToTestDataMap* testData = GetNodeTestData();
7377 // First we have to know which nodes in the tree are reachable.
7378 typedef SimplerHashTable<GenTreePtr, PtrKeyFuncs<GenTree>, int, JitSimplerHashBehavior> NodeToIntMap;
7379 NodeToIntMap* reachable = FindReachableNodesInNodeTestData();
7381 LabelToVNMap* labelToVN = new (getAllocatorDebugOnly()) LabelToVNMap(getAllocatorDebugOnly());
7382 VNToLabelMap* vnToLabel = new (getAllocatorDebugOnly()) VNToLabelMap(getAllocatorDebugOnly());
7386 printf("\nJit Testing: Value numbering.\n");
7388 for (NodeToTestDataMap::KeyIterator ki = testData->Begin(); !ki.Equal(testData->End()); ++ki)
7390 TestLabelAndNum tlAndN;
7391 GenTreePtr node = ki.Get();
7392 ValueNum nodeVN = node->GetVN(VNK_Liberal);
7394 bool b = testData->Lookup(node, &tlAndN);
7396 if (tlAndN.m_tl == TL_VN || tlAndN.m_tl == TL_VNNorm)
7399 if (!reachable->Lookup(node, &dummy))
7402 Compiler::printTreeID(node);
7403 printf(" had a test constraint declared, but has become unreachable at the time the constraint is "
7405 "(This is probably as a result of some optimization -- \n"
7406 "you may need to modify the test case to defeat this opt.)\n");
7413 Compiler::printTreeID(node);
7414 printf(" -- VN class %d.\n", tlAndN.m_num);
7417 if (tlAndN.m_tl == TL_VNNorm)
7419 nodeVN = vnStore->VNNormVal(nodeVN);
7423 if (labelToVN->Lookup(tlAndN.m_num, &vn))
7427 printf(" Already in hash tables.\n");
7429 // The mapping(s) must be one-to-one: if the label has a mapping, then the ssaNm must, as well.
7431 bool b = vnToLabel->Lookup(vn, &num2);
7432 // And the mappings must be the same.
7433 if (tlAndN.m_num != num2)
7436 Compiler::printTreeID(node);
7437 printf(", with value number " STR_VN "%x, was declared in VN class %d,\n", nodeVN, tlAndN.m_num);
7438 printf("but this value number " STR_VN
7439 "%x has already been associated with a different SSA name class: %d.\n",
7443 // And the current node must be of the specified SSA family.
7447 Compiler::printTreeID(node);
7448 printf(", " STR_VN "%x was declared in SSA name class %d,\n", nodeVN, tlAndN.m_num);
7449 printf("but that name class was previously bound to a different value number: " STR_VN "%x.\n", vn);
7456 // The mapping(s) must be one-to-one: if the label has no mapping, then the ssaNm may not, either.
7457 if (vnToLabel->Lookup(nodeVN, &num))
7460 Compiler::printTreeID(node);
7461 printf(", " STR_VN "%x was declared in value number class %d,\n", nodeVN, tlAndN.m_num);
7463 "but this value number has already been associated with a different value number class: %d.\n",
7467 // Add to both mappings.
7468 labelToVN->Set(tlAndN.m_num, nodeVN);
7469 vnToLabel->Set(nodeVN, tlAndN.m_num);
7472 printf(" added to hash tables.\n");
7479 void Compiler::vnpPrint(ValueNumPair vnp, unsigned level)
7481 if (vnp.BothEqual())
7483 vnPrint(vnp.GetLiberal(), level);
7488 vnPrint(vnp.GetLiberal(), level);
7490 vnPrint(vnp.GetConservative(), level);
7495 void Compiler::vnPrint(ValueNum vn, unsigned level)
7498 if (ValueNumStore::isReservedVN(vn))
7500 printf(ValueNumStore::reservedName(vn));
7504 printf(STR_VN "%x", vn);
7507 vnStore->vnDump(this, vn);
7514 // Methods of ValueNumPair.
7515 ValueNumPair::ValueNumPair() : m_liberal(ValueNumStore::NoVN), m_conservative(ValueNumStore::NoVN)
7519 bool ValueNumPair::BothDefined() const
7521 return (m_liberal != ValueNumStore::NoVN) && (m_conservative != ValueNumStore::NoVN);