1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
10 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
11 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
20 #include "ssaconfig.h"
22 VNFunc GetVNFuncForOper(genTreeOps oper, bool isUnsigned)
24 if (!isUnsigned || (oper == GT_EQ) || (oper == GT_NE))
53 ValueNumStore::ValueNumStore(Compiler* comp, CompAllocator* alloc)
62 , m_fixedPointMapSels(alloc, 8)
63 , m_checkedBoundVNs(comp)
65 , m_intCnsMap(nullptr)
66 , m_longCnsMap(nullptr)
67 , m_handleMap(nullptr)
68 , m_floatCnsMap(nullptr)
69 , m_doubleCnsMap(nullptr)
70 , m_byrefCnsMap(nullptr)
71 , m_VNFunc0Map(nullptr)
72 , m_VNFunc1Map(nullptr)
73 , m_VNFunc2Map(nullptr)
74 , m_VNFunc3Map(nullptr)
75 , m_VNFunc4Map(nullptr)
77 // We have no current allocation chunks.
78 for (unsigned i = 0; i < TYP_COUNT; i++)
80 for (unsigned j = CEA_None; j <= CEA_Count + MAX_LOOP_NUM; j++)
82 m_curAllocChunk[i][j] = NoChunk;
86 for (unsigned i = 0; i < SmallIntConstNum; i++)
88 m_VNsForSmallIntConsts[i] = NoVN;
90 // We will reserve chunk 0 to hold some special constants, like the constant NULL, the "exception" value, and the
92 Chunk* specialConstChunk = new (m_alloc) Chunk(m_alloc, &m_nextChunkBase, TYP_REF, CEA_Const, MAX_LOOP_NUM);
93 specialConstChunk->m_numUsed +=
94 SRC_NumSpecialRefConsts; // Implicitly allocate 0 ==> NULL, and 1 ==> Exception, 2 ==> ZeroMap.
95 ChunkNum cn = m_chunks.Push(specialConstChunk);
98 m_mapSelectBudget = (int)JitConfig.JitVNMapSelBudget(); // We cast the unsigned DWORD to a signed int.
100 // This value must be non-negative and non-zero, reset the value to DEFAULT_MAP_SELECT_BUDGET if it isn't.
101 if (m_mapSelectBudget <= 0)
103 m_mapSelectBudget = DEFAULT_MAP_SELECT_BUDGET;
108 template <typename T>
109 T ValueNumStore::EvalOp(VNFunc vnf, T v0)
111 genTreeOps oper = genTreeOps(vnf);
113 // Here we handle those unary ops that are the same for integral and floating-point types.
119 // Must be int-specific
120 return EvalOpIntegral(vnf, v0);
124 template <typename T>
125 T ValueNumStore::EvalOpIntegral(VNFunc vnf, T v0)
127 genTreeOps oper = genTreeOps(vnf);
129 // Here we handle unary ops that are the same for all integral types.
140 template <typename T>
141 T ValueNumStore::EvalOp(VNFunc vnf, T v0, T v1, ValueNum* pExcSet)
143 if (vnf < VNF_Boundary)
145 genTreeOps oper = genTreeOps(vnf);
146 // Here we handle those that are the same for integral and floating-point types.
158 *pExcSet = VNExcSetSingleton(VNForFunc(TYP_REF, VNF_DivideByZeroExc));
161 if (IsOverflowIntDiv(v0, v1))
163 *pExcSet = VNExcSetSingleton(VNForFunc(TYP_REF, VNF_ArithmeticExc));
172 // Must be int-specific
173 return EvalOpIntegral(vnf, v0, v1, pExcSet);
176 else // must be a VNF_ function
178 typedef typename jitstd::make_unsigned<T>::type UT;
182 return T(UT(v0) > UT(v1));
184 return T(UT(v0) >= UT(v1));
186 return T(UT(v0) < UT(v1));
188 return T(UT(v0) <= UT(v1));
190 return T(UT(v0) + UT(v1));
192 return T(UT(v0) - UT(v1));
194 return T(UT(v0) * UT(v1));
196 // Must be int-specific
197 return EvalOpIntegral(vnf, v0, v1, pExcSet);
206 unsigned bits = 0xFFC00000u;
208 static_assert(sizeof(bits) == sizeof(result), "sizeof(unsigned) must equal sizeof(float)");
209 memcpy(&result, &bits, sizeof(result));
218 unsigned long long bits = 0xFFF8000000000000ull;
220 static_assert(sizeof(bits) == sizeof(result), "sizeof(unsigned long long) must equal sizeof(double)");
221 memcpy(&result, &bits, sizeof(result));
226 template <typename TFp, typename TFpTraits>
227 TFp FpRem(TFp dividend, TFp divisor)
229 // From the ECMA standard:
231 // If [divisor] is zero or [dividend] is infinity
232 // the result is NaN.
233 // If [divisor] is infinity,
234 // the result is [dividend]
236 if (divisor == 0 || !_finite(dividend))
238 return TFpTraits::NaN();
240 else if (!_finite(divisor) && !_isnan(divisor))
245 return (TFp)fmod((double)dividend, (double)divisor);
248 // Specialize for double for floating operations, that doesn't involve unsigned.
250 double ValueNumStore::EvalOp<double>(VNFunc vnf, double v0, double v1, ValueNum* pExcSet)
252 genTreeOps oper = genTreeOps(vnf);
253 // Here we handle those that are the same for floating-point types.
265 return FpRem<double, DoubleTraits>(v0, v1);
272 // Specialize for float for floating operations, that doesn't involve unsigned.
274 float ValueNumStore::EvalOp<float>(VNFunc vnf, float v0, float v1, ValueNum* pExcSet)
276 genTreeOps oper = genTreeOps(vnf);
277 // Here we handle those that are the same for floating-point types.
289 return FpRem<float, FloatTraits>(v0, v1);
296 template <typename T>
297 int ValueNumStore::EvalComparison(VNFunc vnf, T v0, T v1)
299 if (vnf < VNF_Boundary)
301 genTreeOps oper = genTreeOps(vnf);
302 // Here we handle those that are the same for floating-point types.
321 else // must be a VNF_ function
326 return unsigned(v0) > unsigned(v1);
328 return unsigned(v0) >= unsigned(v1);
330 return unsigned(v0) < unsigned(v1);
332 return unsigned(v0) <= unsigned(v1);
340 template <typename T>
341 int ValueNumStore::EvalOrderedComparisonFloat(VNFunc vnf, T v0, T v1)
345 // All comparisons below are ordered comparisons.
347 // We should guard this function from unordered comparisons
348 // identified by the GTF_RELOP_NAN_UN flag. Either the flag
349 // should be bubbled (similar to GTF_UNSIGNED for ints)
350 // to this point or we should bail much earlier if any of
351 // the operands are NaN.
353 genTreeOps oper = genTreeOps(vnf);
354 // Here we handle those that are the same for floating-point types.
375 int ValueNumStore::EvalComparison<double>(VNFunc vnf, double v0, double v1)
377 return EvalOrderedComparisonFloat(vnf, v0, v1);
381 int ValueNumStore::EvalComparison<float>(VNFunc vnf, float v0, float v1)
383 return EvalOrderedComparisonFloat(vnf, v0, v1);
386 template <typename T>
387 T ValueNumStore::EvalOpIntegral(VNFunc vnf, T v0, T v1, ValueNum* pExcSet)
389 genTreeOps oper = genTreeOps(vnf);
413 return v0 << (v1 & 0x3F);
422 return v0 >> (v1 & 0x3F);
431 return UINT64(v0) >> (v1 & 0x3F);
435 return UINT32(v0) >> v1;
440 return (v0 << v1) | (UINT64(v0) >> (64 - v1));
444 return (v0 << v1) | (UINT32(v0) >> (32 - v1));
450 return (v0 << (64 - v1)) | (UINT64(v0) >> v1);
454 return (v0 << (32 - v1)) | (UINT32(v0) >> v1);
461 *pExcSet = VNExcSetSingleton(VNForFunc(TYP_REF, VNF_DivideByZeroExc));
463 else if (IsOverflowIntDiv(v0, v1))
465 *pExcSet = VNExcSetSingleton(VNForFunc(TYP_REF, VNF_ArithmeticExc));
468 else // We are not dividing by Zero, so we can calculate the exact result.
470 // Perform the appropriate operation.
475 else // Must be GT_MOD
485 *pExcSet = VNExcSetSingleton(VNForFunc(TYP_REF, VNF_DivideByZeroExc));
488 else // We are not dividing by Zero, so we can calculate the exact result.
490 typedef typename jitstd::make_unsigned<T>::type UT;
491 // We need for force the source operands for the divide or mod operation
492 // to be considered unsigned.
496 // This is return unsigned(v0) / unsigned(v1) for both sizes of integers
497 return T(UT(v0) / UT(v1));
499 else // Must be GT_UMOD
501 // This is return unsigned(v0) % unsigned(v1) for both sizes of integers
502 return T(UT(v0) % UT(v1));
510 ValueNum ValueNumStore::VNExcSetSingleton(ValueNum x)
512 ValueNum res = VNForFunc(TYP_REF, VNF_ExcSetCons, x, VNForEmptyExcSet());
514 if (m_pComp->verbose)
516 printf(" " STR_VN "%x = singleton exc set", res);
524 ValueNumPair ValueNumStore::VNPExcSetSingleton(ValueNumPair xp)
526 return ValueNumPair(VNExcSetSingleton(xp.GetLiberal()), VNExcSetSingleton(xp.GetConservative()));
529 ValueNum ValueNumStore::VNExcSetUnion(ValueNum xs0, ValueNum xs1 DEBUGARG(bool topLevel))
531 if (xs0 == VNForEmptyExcSet())
535 else if (xs1 == VNForEmptyExcSet())
542 bool b0 = GetVNFunc(xs0, &funcXs0);
543 assert(b0 && funcXs0.m_func == VNF_ExcSetCons); // Precondition: xs0 is an exception set.
545 bool b1 = GetVNFunc(xs1, &funcXs1);
546 assert(b1 && funcXs1.m_func == VNF_ExcSetCons); // Precondition: xs1 is an exception set.
548 if (funcXs0.m_args[0] < funcXs1.m_args[0])
550 res = VNForFunc(TYP_REF, VNF_ExcSetCons, funcXs0.m_args[0],
551 VNExcSetUnion(funcXs0.m_args[1], xs1 DEBUGARG(false)));
553 else if (funcXs0.m_args[0] == funcXs1.m_args[0])
555 // Equal elements; only add one to the result.
556 res = VNExcSetUnion(funcXs0.m_args[1], xs1);
560 assert(funcXs0.m_args[0] > funcXs1.m_args[0]);
561 res = VNForFunc(TYP_REF, VNF_ExcSetCons, funcXs1.m_args[0],
562 VNExcSetUnion(xs0, funcXs1.m_args[1] DEBUGARG(false)));
569 ValueNumPair ValueNumStore::VNPExcSetUnion(ValueNumPair xs0vnp, ValueNumPair xs1vnp)
571 return ValueNumPair(VNExcSetUnion(xs0vnp.GetLiberal(), xs1vnp.GetLiberal()),
572 VNExcSetUnion(xs0vnp.GetConservative(), xs1vnp.GetConservative()));
575 void ValueNumStore::VNUnpackExc(ValueNum vnWx, ValueNum* pvn, ValueNum* pvnx)
577 assert(vnWx != NoVN);
579 if (GetVNFunc(vnWx, &funcApp) && funcApp.m_func == VNF_ValWithExc)
581 *pvn = funcApp.m_args[0];
582 *pvnx = funcApp.m_args[1];
590 void ValueNumStore::VNPUnpackExc(ValueNumPair vnWx, ValueNumPair* pvn, ValueNumPair* pvnx)
592 VNUnpackExc(vnWx.GetLiberal(), pvn->GetLiberalAddr(), pvnx->GetLiberalAddr());
593 VNUnpackExc(vnWx.GetConservative(), pvn->GetConservativeAddr(), pvnx->GetConservativeAddr());
596 ValueNum ValueNumStore::VNNormVal(ValueNum vn)
599 if (GetVNFunc(vn, &funcApp) && funcApp.m_func == VNF_ValWithExc)
601 return funcApp.m_args[0];
609 ValueNumPair ValueNumStore::VNPNormVal(ValueNumPair vnp)
611 return ValueNumPair(VNNormVal(vnp.GetLiberal()), VNNormVal(vnp.GetConservative()));
614 ValueNum ValueNumStore::VNExcVal(ValueNum vn)
617 if (GetVNFunc(vn, &funcApp) && funcApp.m_func == VNF_ValWithExc)
619 return funcApp.m_args[1];
623 return VNForEmptyExcSet();
627 ValueNumPair ValueNumStore::VNPExcVal(ValueNumPair vnp)
629 return ValueNumPair(VNExcVal(vnp.GetLiberal()), VNExcVal(vnp.GetConservative()));
632 // If vn "excSet" is not "VNForEmptyExcSet()", return "VNF_ValWithExc(vn, excSet)". Otherwise,
634 ValueNum ValueNumStore::VNWithExc(ValueNum vn, ValueNum excSet)
636 if (excSet == VNForEmptyExcSet())
643 ValueNum vnX = VNForEmptyExcSet();
644 VNUnpackExc(vn, &vnNorm, &vnX);
645 return VNForFunc(TypeOfVN(vnNorm), VNF_ValWithExc, vnNorm, VNExcSetUnion(vnX, excSet));
649 ValueNumPair ValueNumStore::VNPWithExc(ValueNumPair vnp, ValueNumPair excSetVNP)
651 return ValueNumPair(VNWithExc(vnp.GetLiberal(), excSetVNP.GetLiberal()),
652 VNWithExc(vnp.GetConservative(), excSetVNP.GetConservative()));
655 bool ValueNumStore::IsKnownNonNull(ValueNum vn)
662 return GetVNFunc(vn, &funcAttr) && (s_vnfOpAttribs[funcAttr.m_func] & VNFOA_KnownNonNull) != 0;
665 bool ValueNumStore::IsSharedStatic(ValueNum vn)
672 return GetVNFunc(vn, &funcAttr) && (s_vnfOpAttribs[funcAttr.m_func] & VNFOA_SharedStatic) != 0;
675 ValueNumStore::Chunk::Chunk(CompAllocator* alloc,
676 ValueNum* pNextBaseVN,
678 ChunkExtraAttribs attribs,
679 BasicBlock::loopNumber loopNum)
680 : m_defs(nullptr), m_numUsed(0), m_baseVN(*pNextBaseVN), m_typ(typ), m_attribs(attribs), m_loopNum(loopNum)
682 // Allocate "m_defs" here, according to the typ/attribs pair.
687 break; // Nothing to do.
692 m_defs = new (alloc) Alloc<TYP_INT>::Type[ChunkSize];
695 m_defs = new (alloc) Alloc<TYP_FLOAT>::Type[ChunkSize];
698 m_defs = new (alloc) Alloc<TYP_LONG>::Type[ChunkSize];
701 m_defs = new (alloc) Alloc<TYP_DOUBLE>::Type[ChunkSize];
704 m_defs = new (alloc) Alloc<TYP_BYREF>::Type[ChunkSize];
707 // We allocate space for a single REF constant, NULL, so we can access these values uniformly.
708 // Since this value is always the same, we represent it as a static.
709 m_defs = &s_specialRefConsts[0];
710 break; // Nothing to do.
712 assert(false); // Should not reach here.
717 m_defs = new (alloc) VNHandle[ChunkSize];
721 m_defs = new (alloc) VNFunc[ChunkSize];
725 m_defs = new (alloc) VNDefFunc1Arg[ChunkSize];
728 m_defs = new (alloc) VNDefFunc2Arg[ChunkSize];
731 m_defs = new (alloc) VNDefFunc3Arg[ChunkSize];
734 m_defs = new (alloc) VNDefFunc4Arg[ChunkSize];
739 *pNextBaseVN += ChunkSize;
742 ValueNumStore::Chunk* ValueNumStore::GetAllocChunk(var_types typ,
743 ChunkExtraAttribs attribs,
744 BasicBlock::loopNumber loopNum)
748 if (loopNum == MAX_LOOP_NUM)
750 // Loop nest is unknown/irrelevant for this VN.
755 // Loop nest is interesting. Since we know this is only true for unique VNs, we know attribs will
756 // be CEA_None and can just index based on loop number.
757 noway_assert(attribs == CEA_None);
758 // Map NOT_IN_LOOP -> MAX_LOOP_NUM to make the index range contiguous [0..MAX_LOOP_NUM]
759 index = CEA_Count + (loopNum == BasicBlock::NOT_IN_LOOP ? MAX_LOOP_NUM : loopNum);
761 ChunkNum cn = m_curAllocChunk[typ][index];
764 res = m_chunks.Get(cn);
765 if (res->m_numUsed < ChunkSize)
770 // Otherwise, must allocate a new one.
771 res = new (m_alloc) Chunk(m_alloc, &m_nextChunkBase, typ, attribs, loopNum);
772 cn = m_chunks.Push(res);
773 m_curAllocChunk[typ][index] = cn;
777 ValueNum ValueNumStore::VNForIntCon(INT32 cnsVal)
779 if (IsSmallIntConst(cnsVal))
781 unsigned ind = cnsVal - SmallIntConstMin;
782 ValueNum vn = m_VNsForSmallIntConsts[ind];
787 vn = GetVNForIntCon(cnsVal);
788 m_VNsForSmallIntConsts[ind] = vn;
793 return GetVNForIntCon(cnsVal);
797 ValueNum ValueNumStore::VNForLongCon(INT64 cnsVal)
800 if (GetLongCnsMap()->Lookup(cnsVal, &res))
806 Chunk* c = GetAllocChunk(TYP_LONG, CEA_Const);
807 unsigned offsetWithinChunk = c->AllocVN();
808 res = c->m_baseVN + offsetWithinChunk;
809 reinterpret_cast<INT64*>(c->m_defs)[offsetWithinChunk] = cnsVal;
810 GetLongCnsMap()->Set(cnsVal, res);
815 ValueNum ValueNumStore::VNForFloatCon(float cnsVal)
818 if (GetFloatCnsMap()->Lookup(cnsVal, &res))
824 Chunk* c = GetAllocChunk(TYP_FLOAT, CEA_Const);
825 unsigned offsetWithinChunk = c->AllocVN();
826 res = c->m_baseVN + offsetWithinChunk;
827 reinterpret_cast<float*>(c->m_defs)[offsetWithinChunk] = cnsVal;
828 GetFloatCnsMap()->Set(cnsVal, res);
833 ValueNum ValueNumStore::VNForDoubleCon(double cnsVal)
836 if (GetDoubleCnsMap()->Lookup(cnsVal, &res))
842 Chunk* c = GetAllocChunk(TYP_DOUBLE, CEA_Const);
843 unsigned offsetWithinChunk = c->AllocVN();
844 res = c->m_baseVN + offsetWithinChunk;
845 reinterpret_cast<double*>(c->m_defs)[offsetWithinChunk] = cnsVal;
846 GetDoubleCnsMap()->Set(cnsVal, res);
851 ValueNum ValueNumStore::VNForByrefCon(INT64 cnsVal)
854 if (GetByrefCnsMap()->Lookup(cnsVal, &res))
860 Chunk* c = GetAllocChunk(TYP_BYREF, CEA_Const);
861 unsigned offsetWithinChunk = c->AllocVN();
862 res = c->m_baseVN + offsetWithinChunk;
863 reinterpret_cast<INT64*>(c->m_defs)[offsetWithinChunk] = cnsVal;
864 GetByrefCnsMap()->Set(cnsVal, res);
869 ValueNum ValueNumStore::VNForCastOper(var_types castToType, bool srcIsUnsigned /*=false*/)
871 assert(castToType != TYP_STRUCT);
872 INT32 cnsVal = INT32(castToType) << INT32(VCA_BitCount);
873 assert((cnsVal & INT32(VCA_ReservedBits)) == 0);
877 // We record the srcIsUnsigned by or-ing a 0x01
878 cnsVal |= INT32(VCA_UnsignedSrc);
880 ValueNum result = VNForIntCon(cnsVal);
883 if (m_pComp->verbose)
885 printf(" VNForCastOper(%s%s) is " STR_VN "%x\n", varTypeName(castToType),
886 srcIsUnsigned ? ", unsignedSrc" : "", result);
893 ValueNum ValueNumStore::VNForHandle(ssize_t cnsVal, unsigned handleFlags)
895 assert((handleFlags & ~GTF_ICON_HDL_MASK) == 0);
899 VNHandle::Initialize(&handle, cnsVal, handleFlags);
900 if (GetHandleMap()->Lookup(handle, &res))
906 Chunk* c = GetAllocChunk(TYP_I_IMPL, CEA_Handle);
907 unsigned offsetWithinChunk = c->AllocVN();
908 res = c->m_baseVN + offsetWithinChunk;
909 reinterpret_cast<VNHandle*>(c->m_defs)[offsetWithinChunk] = handle;
910 GetHandleMap()->Set(handle, res);
915 // Returns the value number for zero of the given "typ".
916 // It has an unreached() for a "typ" that has no zero value, such as TYP_VOID.
917 ValueNum ValueNumStore::VNZeroForType(var_types typ)
928 return VNForIntCon(0);
931 return VNForLongCon(0);
933 #if FEATURE_X87_DOUBLES
934 return VNForDoubleCon(0.0);
936 return VNForFloatCon(0.0f);
939 return VNForDoubleCon(0.0);
943 return VNForByrefCon(0);
946 // TODO-CQ: Improve value numbering for SIMD types.
951 #endif // FEATURE_SIMD
952 return VNForZeroMap(); // Recursion!
954 // These should be unreached.
956 unreached(); // Should handle all types.
960 // Returns the value number for one of the given "typ".
961 // It returns NoVN for a "typ" that has no one value, such as TYP_REF.
962 ValueNum ValueNumStore::VNOneForType(var_types typ)
973 return VNForIntCon(1);
976 return VNForLongCon(1);
978 return VNForFloatCon(1.0f);
980 return VNForDoubleCon(1.0);
987 class Object* ValueNumStore::s_specialRefConsts[] = {nullptr, nullptr, nullptr};
989 // Nullary operators (i.e., symbolic constants).
990 ValueNum ValueNumStore::VNForFunc(var_types typ, VNFunc func)
992 assert(VNFuncArity(func) == 0);
993 assert(func != VNF_NotAField);
997 if (GetVNFunc0Map()->Lookup(func, &res))
1003 Chunk* c = GetAllocChunk(typ, CEA_Func0);
1004 unsigned offsetWithinChunk = c->AllocVN();
1005 res = c->m_baseVN + offsetWithinChunk;
1006 reinterpret_cast<VNFunc*>(c->m_defs)[offsetWithinChunk] = func;
1007 GetVNFunc0Map()->Set(func, res);
1012 ValueNum ValueNumStore::VNForFunc(var_types typ, VNFunc func, ValueNum arg0VN)
1014 assert(arg0VN == VNNormVal(arg0VN)); // Arguments don't carry exceptions.
1017 VNDefFunc1Arg fstruct(func, arg0VN);
1019 // Do constant-folding.
1020 if (CanEvalForConstantArgs(func) && IsVNConstant(arg0VN))
1022 return EvalFuncForConstantArgs(typ, func, arg0VN);
1025 if (GetVNFunc1Map()->Lookup(fstruct, &res))
1031 // Otherwise, create a new VN for this application.
1032 Chunk* c = GetAllocChunk(typ, CEA_Func1);
1033 unsigned offsetWithinChunk = c->AllocVN();
1034 res = c->m_baseVN + offsetWithinChunk;
1035 reinterpret_cast<VNDefFunc1Arg*>(c->m_defs)[offsetWithinChunk] = fstruct;
1036 GetVNFunc1Map()->Set(fstruct, res);
1041 // Windows x86 and Windows ARM/ARM64 may not define _isnanf() but they do define _isnan().
1042 // We will redirect the macros to these other functions if the macro is not defined for the
1043 // platform. This has the side effect of a possible implicit upcasting for arguments passed.
1044 #if (defined(_TARGET_X86_) || defined(_TARGET_ARM_) || defined(_TARGET_ARM64_)) && !defined(FEATURE_PAL)
1046 #if !defined(_isnanf)
1047 #define _isnanf _isnan
1052 ValueNum ValueNumStore::VNForFunc(var_types typ, VNFunc func, ValueNum arg0VN, ValueNum arg1VN)
1054 assert(arg0VN != NoVN && arg1VN != NoVN);
1055 assert(arg0VN == VNNormVal(arg0VN)); // Arguments carry no exceptions.
1056 assert(arg1VN == VNNormVal(arg1VN)); // Arguments carry no exceptions.
1057 assert(VNFuncArity(func) == 2);
1058 assert(func != VNF_MapSelect); // Precondition: use the special function VNForMapSelect defined for that.
1062 // Do constant-folding.
1063 if (CanEvalForConstantArgs(func) && IsVNConstant(arg0VN) && IsVNConstant(arg1VN))
1065 bool canFold = true; // Normally we will be able to fold this 'func'
1067 // Special case for VNF_Cast of constant handles
1068 // Don't allow eval/fold of a GT_CAST(non-I_IMPL, Handle)
1070 if ((func == VNF_Cast) && (typ != TYP_I_IMPL) && IsVNHandle(arg0VN))
1075 // It is possible for us to have mismatched types (see Bug 750863)
1076 // We don't try to fold a binary operation when one of the constant operands
1077 // is a floating-point constant and the other is not.
1079 var_types arg0VNtyp = TypeOfVN(arg0VN);
1080 bool arg0IsFloating = varTypeIsFloating(arg0VNtyp);
1082 var_types arg1VNtyp = TypeOfVN(arg1VN);
1083 bool arg1IsFloating = varTypeIsFloating(arg1VNtyp);
1085 if (arg0IsFloating != arg1IsFloating)
1090 // NaNs are unordered wrt to other floats. While an ordered
1091 // comparison would return false, an unordered comparison
1092 // will return true if any operands are a NaN. We only perform
1093 // ordered NaN comparison in EvalComparison.
1094 if ((arg0IsFloating && (((arg0VNtyp == TYP_FLOAT) && _isnanf(GetConstantSingle(arg0VN))) ||
1095 ((arg0VNtyp == TYP_DOUBLE) && _isnan(GetConstantDouble(arg0VN))))) ||
1096 (arg1IsFloating && (((arg1VNtyp == TYP_FLOAT) && _isnanf(GetConstantSingle(arg1VN))) ||
1097 ((arg1VNtyp == TYP_DOUBLE) && _isnan(GetConstantDouble(arg1VN))))))
1101 if (typ == TYP_BYREF)
1103 // We don't want to fold expressions that produce TYP_BYREF
1109 return EvalFuncForConstantArgs(typ, func, arg0VN, arg1VN);
1112 // We canonicalize commutative operations.
1113 // (Perhaps should eventually handle associative/commutative [AC] ops -- but that gets complicated...)
1114 if (VNFuncIsCommutative(func))
1116 // Order arg0 arg1 by numerical VN value.
1117 if (arg0VN > arg1VN)
1119 jitstd::swap(arg0VN, arg1VN);
1122 VNDefFunc2Arg fstruct(func, arg0VN, arg1VN);
1123 if (GetVNFunc2Map()->Lookup(fstruct, &res))
1129 // We have ways of evaluating some binary functions.
1130 if (func < VNF_Boundary)
1132 if (typ != TYP_BYREF) // We don't want/need to optimize a zero byref
1134 ValueNum resultVN = NoVN;
1135 ValueNum ZeroVN, OneVN; // We may need to create one of these in the switch below.
1136 switch (genTreeOps(func))
1139 // This identity does not apply for floating point (when x == -0.0)
1140 if (!varTypeIsFloating(typ))
1142 // (x + 0) == (0 + x) => x
1143 ZeroVN = VNZeroForType(typ);
1144 if (arg0VN == ZeroVN)
1148 else if (arg1VN == ZeroVN)
1157 ZeroVN = VNZeroForType(typ);
1158 if (arg1VN == ZeroVN)
1165 // (x * 1) == (1 * x) => x
1166 OneVN = VNOneForType(typ);
1169 if (arg0VN == OneVN)
1173 else if (arg1VN == OneVN)
1179 if (!varTypeIsFloating(typ))
1181 // (x * 0) == (0 * x) => 0 (unless x is NaN, which we must assume a fp value may be)
1182 ZeroVN = VNZeroForType(typ);
1183 if (arg0VN == ZeroVN)
1187 else if (arg1VN == ZeroVN)
1197 OneVN = VNOneForType(typ);
1200 if (arg1VN == OneVN)
1209 // (x | 0) == (0 | x) => x
1210 // (x ^ 0) == (0 ^ x) => x
1211 ZeroVN = VNZeroForType(typ);
1212 if (arg0VN == ZeroVN)
1216 else if (arg1VN == ZeroVN)
1223 // (x & 0) == (0 & x) => 0
1224 ZeroVN = VNZeroForType(typ);
1225 if (arg0VN == ZeroVN)
1229 else if (arg1VN == ZeroVN)
1244 ZeroVN = VNZeroForType(typ);
1245 if (arg1VN == ZeroVN)
1252 // (x == x) => true (unless x is NaN)
1253 if (!varTypeIsFloating(TypeOfVN(arg0VN)) && (arg0VN != NoVN) && (arg0VN == arg1VN))
1255 resultVN = VNOneForType(typ);
1257 if ((arg0VN == VNForNull() && IsKnownNonNull(arg1VN)) ||
1258 (arg1VN == VNForNull() && IsKnownNonNull(arg0VN)))
1260 resultVN = VNZeroForType(typ);
1264 // (x != x) => false (unless x is NaN)
1265 if (!varTypeIsFloating(TypeOfVN(arg0VN)) && (arg0VN != NoVN) && (arg0VN == arg1VN))
1267 resultVN = VNZeroForType(typ);
1269 if ((arg0VN == VNForNull() && IsKnownNonNull(arg1VN)) ||
1270 (arg1VN == VNForNull() && IsKnownNonNull(arg0VN)))
1272 resultVN = VNOneForType(typ);
1280 if ((resultVN != NoVN) && (TypeOfVN(resultVN) == typ))
1286 else // must be a VNF_ function
1288 if (func == VNF_CastClass)
1290 // In terms of values, a castclass always returns its second argument, the object being cast.
1291 // The IL operation may also throw an exception
1292 return VNWithExc(arg1VN, VNExcSetSingleton(VNForFunc(TYP_REF, VNF_InvalidCastExc, arg1VN, arg0VN)));
1296 // Otherwise, assign a new VN for the function application.
1297 Chunk* c = GetAllocChunk(typ, CEA_Func2);
1298 unsigned offsetWithinChunk = c->AllocVN();
1299 res = c->m_baseVN + offsetWithinChunk;
1300 reinterpret_cast<VNDefFunc2Arg*>(c->m_defs)[offsetWithinChunk] = fstruct;
1301 GetVNFunc2Map()->Set(fstruct, res);
1306 //------------------------------------------------------------------------------
1307 // VNForMapStore : Evaluate VNF_MapStore with the given arguments.
1312 // arg0VN - Map value number
1313 // arg1VN - Index value number
1314 // arg2VN - New value for map[index]
1317 // Value number for the result of the evaluation.
1319 ValueNum ValueNumStore::VNForMapStore(var_types typ, ValueNum arg0VN, ValueNum arg1VN, ValueNum arg2VN)
1321 ValueNum result = VNForFunc(typ, VNF_MapStore, arg0VN, arg1VN, arg2VN);
1323 if (m_pComp->verbose)
1325 printf(" VNForMapStore(" STR_VN "%x, " STR_VN "%x, " STR_VN "%x):%s returns ", arg0VN, arg1VN, arg2VN,
1327 m_pComp->vnPrint(result, 1);
1334 //------------------------------------------------------------------------------
1335 // VNForMapSelect : Evaluate VNF_MapSelect with the given arguments.
1339 // vnk - Value number kind
1341 // arg0VN - Map value number
1342 // arg1VN - Index value number
1345 // Value number for the result of the evaluation.
1348 // This requires a "ValueNumKind" because it will attempt, given "select(phi(m1, ..., mk), ind)", to evaluate
1349 // "select(m1, ind)", ..., "select(mk, ind)" to see if they agree. It needs to know which kind of value number
1350 // (liberal/conservative) to read from the SSA def referenced in the phi argument.
1352 ValueNum ValueNumStore::VNForMapSelect(ValueNumKind vnk, var_types typ, ValueNum arg0VN, ValueNum arg1VN)
1354 int budget = m_mapSelectBudget;
1355 bool usedRecursiveVN = false;
1356 ValueNum result = VNForMapSelectWork(vnk, typ, arg0VN, arg1VN, &budget, &usedRecursiveVN);
1358 // The remaining budget should always be between [0..m_mapSelectBudget]
1359 assert((budget >= 0) && (budget <= m_mapSelectBudget));
1362 if (m_pComp->verbose)
1364 printf(" VNForMapSelect(" STR_VN "%x, " STR_VN "%x):%s returns ", arg0VN, arg1VN, varTypeName(typ));
1365 m_pComp->vnPrint(result, 1);
1372 //------------------------------------------------------------------------------
1373 // VNForMapSelectWork : A method that does the work for VNForMapSelect and may call itself recursively.
1377 // vnk - Value number kind
1379 // arg0VN - Zeroth argument
1380 // arg1VN - First argument
1381 // pBudget - Remaining budget for the outer evaluation
1382 // pUsedRecursiveVN - Out-parameter that is set to true iff RecursiveVN was returned from this method
1383 // or from a method called during one of recursive invocations.
1386 // Value number for the result of the evaluation.
1389 // This requires a "ValueNumKind" because it will attempt, given "select(phi(m1, ..., mk), ind)", to evaluate
1390 // "select(m1, ind)", ..., "select(mk, ind)" to see if they agree. It needs to know which kind of value number
1391 // (liberal/conservative) to read from the SSA def referenced in the phi argument.
1393 ValueNum ValueNumStore::VNForMapSelectWork(
1394 ValueNumKind vnk, var_types typ, ValueNum arg0VN, ValueNum arg1VN, int* pBudget, bool* pUsedRecursiveVN)
1397 // This label allows us to directly implement a tail call by setting up the arguments, and doing a goto to here.
1398 assert(arg0VN != NoVN && arg1VN != NoVN);
1399 assert(arg0VN == VNNormVal(arg0VN)); // Arguments carry no exceptions.
1400 assert(arg1VN == VNNormVal(arg1VN)); // Arguments carry no exceptions.
1402 *pUsedRecursiveVN = false;
1405 // Provide a mechanism for writing tests that ensure we don't call this ridiculously often.
1408 // This printing is sometimes useful in debugging.
1409 // if ((m_numMapSels % 1000) == 0) printf("%d VNF_MapSelect applications.\n", m_numMapSels);
1411 unsigned selLim = JitConfig.JitVNMapSelLimit();
1412 assert(selLim == 0 || m_numMapSels < selLim);
1416 VNDefFunc2Arg fstruct(VNF_MapSelect, arg0VN, arg1VN);
1417 if (GetVNFunc2Map()->Lookup(fstruct, &res))
1424 // Give up if we've run out of budget.
1425 if (--(*pBudget) <= 0)
1427 // We have to use 'nullptr' for the basic block here, because subsequent expressions
1428 // in different blocks may find this result in the VNFunc2Map -- other expressions in
1429 // the IR may "evaluate" to this same VNForExpr, so it is not "unique" in the sense
1430 // that permits the BasicBlock attribution.
1431 res = VNForExpr(nullptr, typ);
1432 GetVNFunc2Map()->Set(fstruct, res);
1436 // If it's recursive, stop the recursion.
1437 if (SelectIsBeingEvaluatedRecursively(arg0VN, arg1VN))
1439 *pUsedRecursiveVN = true;
1443 if (arg0VN == VNForZeroMap())
1445 return VNZeroForType(typ);
1447 else if (IsVNFunc(arg0VN))
1450 GetVNFunc(arg0VN, &funcApp);
1451 if (funcApp.m_func == VNF_MapStore)
1453 // select(store(m, i, v), i) == v
1454 if (funcApp.m_args[1] == arg1VN)
1456 #if FEATURE_VN_TRACE_APPLY_SELECTORS
1457 JITDUMP(" AX1: select([" STR_VN "%x]store(" STR_VN "%x, " STR_VN "%x, " STR_VN "%x), " STR_VN
1458 "%x) ==> " STR_VN "%x.\n",
1459 funcApp.m_args[0], arg0VN, funcApp.m_args[1], funcApp.m_args[2], arg1VN, funcApp.m_args[2]);
1461 return funcApp.m_args[2];
1463 // i # j ==> select(store(m, i, v), j) == select(m, j)
1464 // Currently the only source of distinctions is when both indices are constants.
1465 else if (IsVNConstant(arg1VN) && IsVNConstant(funcApp.m_args[1]))
1467 assert(funcApp.m_args[1] != arg1VN); // we already checked this above.
1468 #if FEATURE_VN_TRACE_APPLY_SELECTORS
1469 JITDUMP(" AX2: " STR_VN "%x != " STR_VN "%x ==> select([" STR_VN "%x]store(" STR_VN
1470 "%x, " STR_VN "%x, " STR_VN "%x), " STR_VN "%x) ==> select(" STR_VN "%x, " STR_VN "%x).\n",
1471 arg1VN, funcApp.m_args[1], arg0VN, funcApp.m_args[0], funcApp.m_args[1], funcApp.m_args[2],
1472 arg1VN, funcApp.m_args[0], arg1VN);
1474 // This is the equivalent of the recursive tail call:
1475 // return VNForMapSelect(vnk, typ, funcApp.m_args[0], arg1VN);
1476 // Make sure we capture any exceptions from the "i" and "v" of the store...
1477 arg0VN = funcApp.m_args[0];
1481 else if (funcApp.m_func == VNF_PhiDef || funcApp.m_func == VNF_PhiMemoryDef)
1483 unsigned lclNum = BAD_VAR_NUM;
1484 bool isMemory = false;
1485 VNFuncApp phiFuncApp;
1486 bool defArgIsFunc = false;
1487 if (funcApp.m_func == VNF_PhiDef)
1489 lclNum = unsigned(funcApp.m_args[0]);
1490 defArgIsFunc = GetVNFunc(funcApp.m_args[2], &phiFuncApp);
1494 assert(funcApp.m_func == VNF_PhiMemoryDef);
1496 defArgIsFunc = GetVNFunc(funcApp.m_args[1], &phiFuncApp);
1498 if (defArgIsFunc && phiFuncApp.m_func == VNF_Phi)
1500 // select(phi(m1, m2), x): if select(m1, x) == select(m2, x), return that, else new fresh.
1501 // Get the first argument of the phi.
1503 // We need to be careful about breaking infinite recursion. Record the outer select.
1504 m_fixedPointMapSels.Push(VNDefFunc2Arg(VNF_MapSelect, arg0VN, arg1VN));
1506 assert(IsVNConstant(phiFuncApp.m_args[0]));
1507 unsigned phiArgSsaNum = ConstantValue<unsigned>(phiFuncApp.m_args[0]);
1511 phiArgVN = m_pComp->GetMemoryPerSsaData(phiArgSsaNum)->m_vnPair.Get(vnk);
1515 phiArgVN = m_pComp->lvaTable[lclNum].GetPerSsaData(phiArgSsaNum)->m_vnPair.Get(vnk);
1517 if (phiArgVN != ValueNumStore::NoVN)
1519 bool allSame = true;
1520 ValueNum argRest = phiFuncApp.m_args[1];
1521 ValueNum sameSelResult =
1522 VNForMapSelectWork(vnk, typ, phiArgVN, arg1VN, pBudget, pUsedRecursiveVN);
1524 // It is possible that we just now exceeded our budget, if so we need to force an early exit
1525 // and stop calling VNForMapSelectWork
1528 // We don't have any budget remaining to verify that all phiArgs are the same
1529 // so setup the default failure case now.
1533 while (allSame && argRest != ValueNumStore::NoVN)
1535 ValueNum cur = argRest;
1536 VNFuncApp phiArgFuncApp;
1537 if (GetVNFunc(argRest, &phiArgFuncApp) && phiArgFuncApp.m_func == VNF_Phi)
1539 cur = phiArgFuncApp.m_args[0];
1540 argRest = phiArgFuncApp.m_args[1];
1544 argRest = ValueNumStore::NoVN; // Cause the loop to terminate.
1546 assert(IsVNConstant(cur));
1547 phiArgSsaNum = ConstantValue<unsigned>(cur);
1550 phiArgVN = m_pComp->GetMemoryPerSsaData(phiArgSsaNum)->m_vnPair.Get(vnk);
1554 phiArgVN = m_pComp->lvaTable[lclNum].GetPerSsaData(phiArgSsaNum)->m_vnPair.Get(vnk);
1556 if (phiArgVN == ValueNumStore::NoVN)
1562 bool usedRecursiveVN = false;
1563 ValueNum curResult =
1564 VNForMapSelectWork(vnk, typ, phiArgVN, arg1VN, pBudget, &usedRecursiveVN);
1565 *pUsedRecursiveVN |= usedRecursiveVN;
1566 if (sameSelResult == ValueNumStore::RecursiveVN)
1568 sameSelResult = curResult;
1570 if (curResult != ValueNumStore::RecursiveVN && curResult != sameSelResult)
1576 if (allSame && sameSelResult != ValueNumStore::RecursiveVN)
1578 // Make sure we're popping what we pushed.
1579 assert(FixedPointMapSelsTopHasValue(arg0VN, arg1VN));
1580 m_fixedPointMapSels.Pop();
1582 // To avoid exponential searches, we make sure that this result is memo-ized.
1583 // The result is always valid for memoization if we didn't rely on RecursiveVN to get it.
1584 // If RecursiveVN was used, we are processing a loop and we can't memo-ize this intermediate
1585 // result if, e.g., this block is in a multi-entry loop.
1586 if (!*pUsedRecursiveVN)
1588 GetVNFunc2Map()->Set(fstruct, sameSelResult);
1591 return sameSelResult;
1593 // Otherwise, fall through to creating the select(phi(m1, m2), x) function application.
1595 // Make sure we're popping what we pushed.
1596 assert(FixedPointMapSelsTopHasValue(arg0VN, arg1VN));
1597 m_fixedPointMapSels.Pop();
1602 // Otherwise, assign a new VN for the function application.
1603 Chunk* c = GetAllocChunk(typ, CEA_Func2);
1604 unsigned offsetWithinChunk = c->AllocVN();
1605 res = c->m_baseVN + offsetWithinChunk;
1606 reinterpret_cast<VNDefFunc2Arg*>(c->m_defs)[offsetWithinChunk] = fstruct;
1607 GetVNFunc2Map()->Set(fstruct, res);
1612 ValueNum ValueNumStore::EvalFuncForConstantArgs(var_types typ, VNFunc func, ValueNum arg0VN)
1614 assert(CanEvalForConstantArgs(func));
1615 assert(IsVNConstant(arg0VN));
1616 switch (TypeOfVN(arg0VN))
1620 int resVal = EvalOp(func, ConstantValue<int>(arg0VN));
1621 // Unary op on a handle results in a handle.
1622 return IsVNHandle(arg0VN) ? VNForHandle(ssize_t(resVal), GetHandleFlags(arg0VN)) : VNForIntCon(resVal);
1626 INT64 resVal = EvalOp(func, ConstantValue<INT64>(arg0VN));
1627 // Unary op on a handle results in a handle.
1628 return IsVNHandle(arg0VN) ? VNForHandle(ssize_t(resVal), GetHandleFlags(arg0VN)) : VNForLongCon(resVal);
1631 return VNForFloatCon(EvalOp(func, ConstantValue<float>(arg0VN)));
1633 return VNForDoubleCon(EvalOp(func, ConstantValue<double>(arg0VN)));
1635 // If arg0 has a possible exception, it wouldn't have been constant.
1636 assert(!VNHasExc(arg0VN));
1638 assert(arg0VN == VNForNull()); // Only other REF constant.
1639 assert(func == VNFunc(GT_ARR_LENGTH)); // Only function we can apply to a REF constant!
1640 return VNWithExc(VNForVoid(), VNExcSetSingleton(VNForFunc(TYP_REF, VNF_NullPtrExc, VNForNull())));
1646 bool ValueNumStore::SelectIsBeingEvaluatedRecursively(ValueNum map, ValueNum ind)
1648 for (unsigned i = 0; i < m_fixedPointMapSels.Size(); i++)
1650 VNDefFunc2Arg& elem = m_fixedPointMapSels.GetRef(i);
1651 assert(elem.m_func == VNF_MapSelect);
1652 if (elem.m_arg0 == map && elem.m_arg1 == ind)
1661 bool ValueNumStore::FixedPointMapSelsTopHasValue(ValueNum map, ValueNum index)
1663 if (m_fixedPointMapSels.Size() == 0)
1667 VNDefFunc2Arg& top = m_fixedPointMapSels.TopRef();
1668 return top.m_func == VNF_MapSelect && top.m_arg0 == map && top.m_arg1 == index;
1672 // Given an integer constant value number return its value as an int.
1674 int ValueNumStore::GetConstantInt32(ValueNum argVN)
1676 assert(IsVNConstant(argVN));
1677 var_types argVNtyp = TypeOfVN(argVN);
1684 result = ConstantValue<int>(argVN);
1686 #ifndef _TARGET_64BIT_
1689 result = (int)ConstantValue<size_t>(argVN);
1698 // Given an integer constant value number return its value as an INT64.
1700 INT64 ValueNumStore::GetConstantInt64(ValueNum argVN)
1702 assert(IsVNConstant(argVN));
1703 var_types argVNtyp = TypeOfVN(argVN);
1710 result = (INT64)ConstantValue<int>(argVN);
1713 result = ConstantValue<INT64>(argVN);
1717 result = (INT64)ConstantValue<size_t>(argVN);
1725 // Given a double constant value number return its value as a double.
1727 double ValueNumStore::GetConstantDouble(ValueNum argVN)
1729 assert(IsVNConstant(argVN));
1730 assert(TypeOfVN(argVN) == TYP_DOUBLE);
1732 return ConstantValue<double>(argVN);
1735 // Given a float constant value number return its value as a float.
1737 float ValueNumStore::GetConstantSingle(ValueNum argVN)
1739 assert(IsVNConstant(argVN));
1740 assert(TypeOfVN(argVN) == TYP_FLOAT);
1742 return ConstantValue<float>(argVN);
1745 // Compute the proper value number when the VNFunc has all constant arguments
1746 // This essentially performs constant folding at value numbering time
1748 ValueNum ValueNumStore::EvalFuncForConstantArgs(var_types typ, VNFunc func, ValueNum arg0VN, ValueNum arg1VN)
1750 assert(CanEvalForConstantArgs(func));
1751 assert(IsVNConstant(arg0VN) && IsVNConstant(arg1VN));
1752 assert(!VNHasExc(arg0VN) && !VNHasExc(arg1VN)); // Otherwise, would not be constant.
1754 // if our func is the VNF_Cast operation we handle it first
1755 if (func == VNF_Cast)
1757 return EvalCastForConstantArgs(typ, func, arg0VN, arg1VN);
1760 var_types arg0VNtyp = TypeOfVN(arg0VN);
1761 var_types arg1VNtyp = TypeOfVN(arg1VN);
1763 // When both arguments are floating point types
1764 // We defer to the EvalFuncForConstantFPArgs()
1765 if (varTypeIsFloating(arg0VNtyp) && varTypeIsFloating(arg1VNtyp))
1767 return EvalFuncForConstantFPArgs(typ, func, arg0VN, arg1VN);
1770 // after this we shouldn't have to deal with floating point types for arg0VN or arg1VN
1771 assert(!varTypeIsFloating(arg0VNtyp));
1772 assert(!varTypeIsFloating(arg1VNtyp));
1774 // Stack-normalize the result type.
1775 if (varTypeIsSmall(typ))
1780 ValueNum result; // left uninitialized, we are required to initialize it on all paths below.
1781 ValueNum excSet = VNForEmptyExcSet();
1783 // Are both args of the same type?
1784 if (arg0VNtyp == arg1VNtyp)
1786 if (arg0VNtyp == TYP_INT)
1788 int arg0Val = ConstantValue<int>(arg0VN);
1789 int arg1Val = ConstantValue<int>(arg1VN);
1791 assert(typ == TYP_INT);
1792 int resultVal = EvalOp(func, arg0Val, arg1Val, &excSet);
1793 // Bin op on a handle results in a handle.
1794 ValueNum handleVN = IsVNHandle(arg0VN) ? arg0VN : IsVNHandle(arg1VN) ? arg1VN : NoVN;
1795 ValueNum resultVN = (handleVN != NoVN)
1796 ? VNForHandle(ssize_t(resultVal), GetHandleFlags(handleVN)) // Use VN for Handle
1797 : VNForIntCon(resultVal);
1798 result = VNWithExc(resultVN, excSet);
1800 else if (arg0VNtyp == TYP_LONG)
1802 INT64 arg0Val = ConstantValue<INT64>(arg0VN);
1803 INT64 arg1Val = ConstantValue<INT64>(arg1VN);
1805 if (VNFuncIsComparison(func))
1807 assert(typ == TYP_INT);
1808 result = VNForIntCon(EvalComparison(func, arg0Val, arg1Val));
1812 assert(typ == TYP_LONG);
1813 INT64 resultVal = EvalOp(func, arg0Val, arg1Val, &excSet);
1814 ValueNum handleVN = IsVNHandle(arg0VN) ? arg0VN : IsVNHandle(arg1VN) ? arg1VN : NoVN;
1815 ValueNum resultVN = (handleVN != NoVN)
1816 ? VNForHandle(ssize_t(resultVal), GetHandleFlags(handleVN)) // Use VN for Handle
1817 : VNForLongCon(resultVal);
1818 result = VNWithExc(resultVN, excSet);
1821 else // both args are TYP_REF or both args are TYP_BYREF
1823 INT64 arg0Val = ConstantValue<size_t>(arg0VN); // We represent ref/byref constants as size_t's.
1824 INT64 arg1Val = ConstantValue<size_t>(arg1VN); // Also we consider null to be zero.
1826 if (VNFuncIsComparison(func))
1828 assert(typ == TYP_INT);
1829 result = VNForIntCon(EvalComparison(func, arg0Val, arg1Val));
1831 else if (typ == TYP_INT) // We could see GT_OR of a constant ByRef and Null
1833 int resultVal = (int)EvalOp(func, arg0Val, arg1Val, &excSet);
1834 result = VNWithExc(VNForIntCon(resultVal), excSet);
1836 else // We could see GT_OR of a constant ByRef and Null
1838 assert((typ == TYP_BYREF) || (typ == TYP_LONG));
1839 INT64 resultVal = EvalOp(func, arg0Val, arg1Val, &excSet);
1840 result = VNWithExc(VNForByrefCon(resultVal), excSet);
1844 else // We have args of different types
1846 // We represent ref/byref constants as size_t's.
1847 // Also we consider null to be zero.
1849 INT64 arg0Val = GetConstantInt64(arg0VN);
1850 INT64 arg1Val = GetConstantInt64(arg1VN);
1852 if (VNFuncIsComparison(func))
1854 assert(typ == TYP_INT);
1855 result = VNForIntCon(EvalComparison(func, arg0Val, arg1Val));
1857 else if (typ == TYP_INT) // We could see GT_OR of an int and constant ByRef or Null
1859 int resultVal = (int)EvalOp(func, arg0Val, arg1Val, &excSet);
1860 result = VNWithExc(VNForIntCon(resultVal), excSet);
1864 assert(typ != TYP_INT);
1865 ValueNum resultValx = VNForEmptyExcSet();
1866 INT64 resultVal = EvalOp(func, arg0Val, arg1Val, &resultValx);
1868 // check for the Exception case
1869 if (resultValx != VNForEmptyExcSet())
1871 result = VNWithExc(VNForVoid(), resultValx);
1878 result = VNForByrefCon(resultVal);
1881 result = VNForLongCon(resultVal);
1884 assert(resultVal == 0); // Only valid REF constant
1885 result = VNForNull();
1897 // Compute the proper value number when the VNFunc has all constant floating-point arguments
1898 // This essentially must perform constant folding at value numbering time
1900 ValueNum ValueNumStore::EvalFuncForConstantFPArgs(var_types typ, VNFunc func, ValueNum arg0VN, ValueNum arg1VN)
1902 assert(CanEvalForConstantArgs(func));
1903 assert(IsVNConstant(arg0VN) && IsVNConstant(arg1VN));
1905 // We expect both argument types to be floating-point types
1906 var_types arg0VNtyp = TypeOfVN(arg0VN);
1907 var_types arg1VNtyp = TypeOfVN(arg1VN);
1909 assert(varTypeIsFloating(arg0VNtyp));
1910 assert(varTypeIsFloating(arg1VNtyp));
1912 // We also expect both arguments to be of the same floating-point type
1913 assert(arg0VNtyp == arg1VNtyp);
1915 ValueNum result; // left uninitialized, we are required to initialize it on all paths below.
1917 if (VNFuncIsComparison(func))
1919 assert(genActualType(typ) == TYP_INT);
1921 if (arg0VNtyp == TYP_FLOAT)
1923 result = VNForIntCon(EvalComparison(func, GetConstantSingle(arg0VN), GetConstantSingle(arg1VN)));
1927 assert(arg0VNtyp == TYP_DOUBLE);
1928 result = VNForIntCon(EvalComparison(func, GetConstantDouble(arg0VN), GetConstantDouble(arg1VN)));
1933 // We expect the return type to be the same as the argument type
1934 assert(varTypeIsFloating(typ));
1935 assert(arg0VNtyp == typ);
1937 ValueNum exception = VNForEmptyExcSet();
1939 if (typ == TYP_FLOAT)
1941 float floatResultVal = EvalOp(func, GetConstantSingle(arg0VN), GetConstantSingle(arg1VN), &exception);
1942 assert(exception == VNForEmptyExcSet()); // Floating point ops don't throw.
1943 result = VNForFloatCon(floatResultVal);
1947 assert(typ == TYP_DOUBLE);
1949 double doubleResultVal = EvalOp(func, GetConstantDouble(arg0VN), GetConstantDouble(arg1VN), &exception);
1950 assert(exception == VNForEmptyExcSet()); // Floating point ops don't throw.
1951 result = VNForDoubleCon(doubleResultVal);
1958 // Compute the proper value number for a VNF_Cast with constant arguments
1959 // This essentially must perform constant folding at value numbering time
1961 ValueNum ValueNumStore::EvalCastForConstantArgs(var_types typ, VNFunc func, ValueNum arg0VN, ValueNum arg1VN)
1963 assert(func == VNF_Cast);
1964 assert(IsVNConstant(arg0VN) && IsVNConstant(arg1VN));
1966 // Stack-normalize the result type.
1967 if (varTypeIsSmall(typ))
1972 var_types arg0VNtyp = TypeOfVN(arg0VN);
1973 var_types arg1VNtyp = TypeOfVN(arg1VN);
1975 // arg1VN is really the gtCastType that we are casting to
1976 assert(arg1VNtyp == TYP_INT);
1977 int arg1Val = ConstantValue<int>(arg1VN);
1978 assert(arg1Val >= 0);
1980 if (IsVNHandle(arg0VN))
1982 // We don't allow handles to be cast to random var_types.
1983 assert(typ == TYP_I_IMPL);
1986 // We previously encoded the castToType operation using vnForCastOper()
1988 bool srcIsUnsigned = ((arg1Val & INT32(VCA_UnsignedSrc)) != 0);
1989 var_types castToType = var_types(arg1Val >> INT32(VCA_BitCount));
1991 var_types castFromType = arg0VNtyp;
1993 switch (castFromType) // GT_CAST source type
1995 #ifndef _TARGET_64BIT_
2001 int arg0Val = GetConstantInt32(arg0VN);
2006 assert(typ == TYP_INT);
2007 return VNForIntCon(INT8(arg0Val));
2010 assert(typ == TYP_INT);
2011 return VNForIntCon(UINT8(arg0Val));
2013 assert(typ == TYP_INT);
2014 return VNForIntCon(INT16(arg0Val));
2016 assert(typ == TYP_INT);
2017 return VNForIntCon(UINT16(arg0Val));
2020 assert(typ == TYP_INT);
2024 assert(!IsVNHandle(arg0VN));
2025 #ifdef _TARGET_64BIT_
2026 if (typ == TYP_LONG)
2030 return VNForLongCon(INT64(unsigned(arg0Val)));
2034 return VNForLongCon(INT64(arg0Val));
2039 assert(typ == TYP_BYREF);
2042 return VNForByrefCon(INT64(unsigned(arg0Val)));
2046 return VNForByrefCon(INT64(arg0Val));
2049 #else // TARGET_32BIT
2051 return VNForLongCon(INT64(unsigned(arg0Val)));
2053 return VNForLongCon(INT64(arg0Val));
2056 assert(typ == TYP_BYREF);
2057 return VNForByrefCon((INT64)arg0Val);
2059 assert(typ == TYP_FLOAT);
2062 return VNForFloatCon(float(unsigned(arg0Val)));
2066 return VNForFloatCon(float(arg0Val));
2069 assert(typ == TYP_DOUBLE);
2072 return VNForDoubleCon(double(unsigned(arg0Val)));
2076 return VNForDoubleCon(double(arg0Val));
2084 #ifdef _TARGET_64BIT_
2089 INT64 arg0Val = GetConstantInt64(arg0VN);
2094 assert(typ == TYP_INT);
2095 return VNForIntCon(INT8(arg0Val));
2098 assert(typ == TYP_INT);
2099 return VNForIntCon(UINT8(arg0Val));
2101 assert(typ == TYP_INT);
2102 return VNForIntCon(INT16(arg0Val));
2104 assert(typ == TYP_INT);
2105 return VNForIntCon(UINT16(arg0Val));
2107 assert(typ == TYP_INT);
2108 return VNForIntCon(INT32(arg0Val));
2110 assert(typ == TYP_INT);
2111 return VNForIntCon(UINT32(arg0Val));
2114 assert(typ == TYP_LONG);
2117 assert(typ == TYP_BYREF);
2118 return VNForByrefCon((INT64)arg0Val);
2120 assert(typ == TYP_FLOAT);
2123 return VNForFloatCon(FloatingPointUtils::convertUInt64ToFloat(UINT64(arg0Val)));
2127 return VNForFloatCon(float(arg0Val));
2130 assert(typ == TYP_DOUBLE);
2133 return VNForDoubleCon(FloatingPointUtils::convertUInt64ToDouble(UINT64(arg0Val)));
2137 return VNForDoubleCon(double(arg0Val));
2145 float arg0Val = GetConstantSingle(arg0VN);
2150 assert(typ == TYP_INT);
2151 return VNForIntCon(INT8(arg0Val));
2154 assert(typ == TYP_INT);
2155 return VNForIntCon(UINT8(arg0Val));
2157 assert(typ == TYP_INT);
2158 return VNForIntCon(INT16(arg0Val));
2160 assert(typ == TYP_INT);
2161 return VNForIntCon(UINT16(arg0Val));
2163 assert(typ == TYP_INT);
2164 return VNForIntCon(INT32(arg0Val));
2166 assert(typ == TYP_INT);
2167 return VNForIntCon(UINT32(arg0Val));
2169 assert(typ == TYP_LONG);
2170 return VNForLongCon(INT64(arg0Val));
2172 assert(typ == TYP_LONG);
2173 return VNForLongCon(UINT64(arg0Val));
2175 assert(typ == TYP_FLOAT);
2176 return VNForFloatCon(arg0Val);
2178 assert(typ == TYP_DOUBLE);
2179 return VNForDoubleCon(double(arg0Val));
2186 double arg0Val = GetConstantDouble(arg0VN);
2191 assert(typ == TYP_INT);
2192 return VNForIntCon(INT8(arg0Val));
2195 assert(typ == TYP_INT);
2196 return VNForIntCon(UINT8(arg0Val));
2198 assert(typ == TYP_INT);
2199 return VNForIntCon(INT16(arg0Val));
2201 assert(typ == TYP_INT);
2202 return VNForIntCon(UINT16(arg0Val));
2204 assert(typ == TYP_INT);
2205 return VNForIntCon(INT32(arg0Val));
2207 assert(typ == TYP_INT);
2208 return VNForIntCon(UINT32(arg0Val));
2210 assert(typ == TYP_LONG);
2211 return VNForLongCon(INT64(arg0Val));
2213 assert(typ == TYP_LONG);
2214 return VNForLongCon(UINT64(arg0Val));
2216 assert(typ == TYP_FLOAT);
2217 return VNForFloatCon(float(arg0Val));
2219 assert(typ == TYP_DOUBLE);
2220 return VNForDoubleCon(arg0Val);
2230 bool ValueNumStore::CanEvalForConstantArgs(VNFunc vnf)
2232 if (vnf < VNF_Boundary)
2234 // We'll refine this as we get counterexamples. But to
2235 // a first approximation, VNFuncs that are genTreeOps should
2236 // be things we can evaluate.
2237 genTreeOps oper = genTreeOps(vnf);
2238 // Some exceptions...
2241 case GT_MKREFANY: // We can't evaluate these.
2248 assert(false && "Unexpected GT_MULHI node encountered before lowering");
2256 // some VNF_ that we can evaluate
2259 case VNF_Cast: // We can evaluate these.
2261 case VNF_ObjGetType:
2269 unsigned ValueNumStore::VNFuncArity(VNFunc vnf)
2271 // Read the bit field out of the table...
2272 return (s_vnfOpAttribs[vnf] & VNFOA_ArityMask) >> VNFOA_ArityShift;
2276 bool ValueNumStore::IsOverflowIntDiv(int v0, int v1)
2278 return (v1 == -1) && (v0 == INT32_MIN);
2281 bool ValueNumStore::IsOverflowIntDiv(INT64 v0, INT64 v1)
2283 return (v1 == -1) && (v0 == INT64_MIN);
2285 template <typename T>
2286 bool ValueNumStore::IsOverflowIntDiv(T v0, T v1)
2292 bool ValueNumStore::IsIntZero(int v)
2297 bool ValueNumStore::IsIntZero(unsigned v)
2302 bool ValueNumStore::IsIntZero(INT64 v)
2307 bool ValueNumStore::IsIntZero(UINT64 v)
2311 template <typename T>
2312 bool ValueNumStore::IsIntZero(T v)
2318 float ValueNumStore::EvalOpIntegral<float>(VNFunc vnf, float v0)
2320 assert(!"EvalOpIntegral<float>");
2325 double ValueNumStore::EvalOpIntegral<double>(VNFunc vnf, double v0)
2327 assert(!"EvalOpIntegral<double>");
2332 float ValueNumStore::EvalOpIntegral<float>(VNFunc vnf, float v0, float v1, ValueNum* pExcSet)
2334 genTreeOps oper = genTreeOps(vnf);
2338 return fmodf(v0, v1);
2340 // For any other values of 'oper', we will assert and return 0.0f
2343 assert(!"EvalOpIntegral<float> with pExcSet");
2348 double ValueNumStore::EvalOpIntegral<double>(VNFunc vnf, double v0, double v1, ValueNum* pExcSet)
2350 genTreeOps oper = genTreeOps(vnf);
2354 return fmod(v0, v1);
2356 // For any other value of 'oper', we will assert and return 0.0
2359 assert(!"EvalOpIntegral<double> with pExcSet");
2363 ValueNum ValueNumStore::VNForFunc(var_types typ, VNFunc func, ValueNum arg0VN, ValueNum arg1VN, ValueNum arg2VN)
2365 assert(arg0VN != NoVN);
2366 assert(arg1VN != NoVN);
2367 assert(arg2VN != NoVN);
2368 assert(VNFuncArity(func) == 3);
2370 // Function arguments carry no exceptions.
2371 CLANG_FORMAT_COMMENT_ANCHOR;
2374 if (func != VNF_PhiDef)
2376 // For a phi definition first and second argument are "plain" local/ssa numbers.
2377 // (I don't know if having such non-VN arguments to a VN function is a good idea -- if we wanted to declare
2378 // ValueNum to be "short" it would be a problem, for example. But we'll leave it for now, with these explicit
2380 assert(arg0VN == VNNormVal(arg0VN));
2381 assert(arg1VN == VNNormVal(arg1VN));
2383 assert(arg2VN == VNNormVal(arg2VN));
2386 assert(VNFuncArity(func) == 3);
2389 VNDefFunc3Arg fstruct(func, arg0VN, arg1VN, arg2VN);
2390 if (GetVNFunc3Map()->Lookup(fstruct, &res))
2396 Chunk* c = GetAllocChunk(typ, CEA_Func3);
2397 unsigned offsetWithinChunk = c->AllocVN();
2398 res = c->m_baseVN + offsetWithinChunk;
2399 reinterpret_cast<VNDefFunc3Arg*>(c->m_defs)[offsetWithinChunk] = fstruct;
2400 GetVNFunc3Map()->Set(fstruct, res);
2405 ValueNum ValueNumStore::VNForFunc(
2406 var_types typ, VNFunc func, ValueNum arg0VN, ValueNum arg1VN, ValueNum arg2VN, ValueNum arg3VN)
2408 assert(arg0VN != NoVN && arg1VN != NoVN && arg2VN != NoVN && arg3VN != NoVN);
2409 // Function arguments carry no exceptions.
2410 assert(arg0VN == VNNormVal(arg0VN));
2411 assert(arg1VN == VNNormVal(arg1VN));
2412 assert(arg2VN == VNNormVal(arg2VN));
2413 assert(arg3VN == VNNormVal(arg3VN));
2414 assert(VNFuncArity(func) == 4);
2417 VNDefFunc4Arg fstruct(func, arg0VN, arg1VN, arg2VN, arg3VN);
2418 if (GetVNFunc4Map()->Lookup(fstruct, &res))
2424 Chunk* c = GetAllocChunk(typ, CEA_Func4);
2425 unsigned offsetWithinChunk = c->AllocVN();
2426 res = c->m_baseVN + offsetWithinChunk;
2427 reinterpret_cast<VNDefFunc4Arg*>(c->m_defs)[offsetWithinChunk] = fstruct;
2428 GetVNFunc4Map()->Set(fstruct, res);
2433 //------------------------------------------------------------------------
2434 // VNForExpr: Opaque value number that is equivalent to itself but unique
2435 // from all other value numbers.
2438 // block - BasicBlock where the expression that produces this value occurs.
2439 // May be nullptr to force conservative "could be anywhere" interpretation.
2440 // typ - Type of the expression in the IR
2443 // A new value number distinct from any previously generated, that compares as equal
2444 // to itself, but not any other value number, and is annotated with the given
2447 ValueNum ValueNumStore::VNForExpr(BasicBlock* block, var_types typ)
2449 BasicBlock::loopNumber loopNum;
2450 if (block == nullptr)
2452 loopNum = MAX_LOOP_NUM;
2456 loopNum = block->bbNatLoopNum;
2459 // We always allocate a new, unique VN in this call.
2460 // The 'typ' is used to partition the allocation of VNs into different chunks.
2461 Chunk* c = GetAllocChunk(typ, CEA_None, loopNum);
2462 unsigned offsetWithinChunk = c->AllocVN();
2463 ValueNum result = c->m_baseVN + offsetWithinChunk;
2467 ValueNum ValueNumStore::VNApplySelectors(ValueNumKind vnk,
2469 FieldSeqNode* fieldSeq,
2470 size_t* wbFinalStructSize)
2472 if (fieldSeq == nullptr)
2478 assert(fieldSeq != FieldSeqStore::NotAField());
2480 // Skip any "FirstElem" pseudo-fields or any "ConstantIndex" pseudo-fields
2481 if (fieldSeq->IsPseudoField())
2483 return VNApplySelectors(vnk, map, fieldSeq->m_next, wbFinalStructSize);
2486 // Otherwise, is a real field handle.
2487 CORINFO_FIELD_HANDLE fldHnd = fieldSeq->m_fieldHnd;
2488 CORINFO_CLASS_HANDLE structHnd = NO_CLASS_HANDLE;
2489 ValueNum fldHndVN = VNForHandle(ssize_t(fldHnd), GTF_ICON_FIELD_HDL);
2490 noway_assert(fldHnd != nullptr);
2491 CorInfoType fieldCit = m_pComp->info.compCompHnd->getFieldType(fldHnd, &structHnd);
2492 var_types fieldType = JITtype2varType(fieldCit);
2494 size_t structSize = 0;
2495 if (varTypeIsStruct(fieldType))
2497 structSize = m_pComp->info.compCompHnd->getClassSize(structHnd);
2498 // We do not normalize the type field accesses during importation unless they
2499 // are used in a call, return or assignment.
2500 if ((fieldType == TYP_STRUCT) && (structSize <= m_pComp->largestEnregisterableStructSize()))
2502 fieldType = m_pComp->impNormStructType(structHnd);
2505 if (wbFinalStructSize != nullptr)
2507 *wbFinalStructSize = structSize;
2511 if (m_pComp->verbose)
2513 printf(" VNApplySelectors:\n");
2514 const char* modName;
2515 const char* fldName = m_pComp->eeGetFieldName(fldHnd, &modName);
2516 printf(" VNForHandle(Fseq[%s]) is " STR_VN "%x, fieldType is %s", fldName, fldHndVN,
2517 varTypeName(fieldType));
2518 if (varTypeIsStruct(fieldType))
2520 printf(", size = %d", structSize);
2526 if (fieldSeq->m_next != nullptr)
2528 ValueNum newMap = VNForMapSelect(vnk, fieldType, map, fldHndVN);
2529 return VNApplySelectors(vnk, newMap, fieldSeq->m_next, wbFinalStructSize);
2531 else // end of fieldSeq
2533 return VNForMapSelect(vnk, fieldType, map, fldHndVN);
2538 ValueNum ValueNumStore::VNApplySelectorsTypeCheck(ValueNum elem, var_types indType, size_t elemStructSize)
2540 var_types elemTyp = TypeOfVN(elem);
2542 // Check if the elemTyp is matching/compatible
2544 if (indType != elemTyp)
2546 bool isConstant = IsVNConstant(elem);
2547 if (isConstant && (elemTyp == genActualType(indType)))
2549 // (i.e. We recorded a constant of TYP_INT for a TYP_BYTE field)
2553 // We are trying to read from an 'elem' of type 'elemType' using 'indType' read
2555 size_t elemTypSize = (elemTyp == TYP_STRUCT) ? elemStructSize : genTypeSize(elemTyp);
2556 size_t indTypeSize = genTypeSize(indType);
2558 if ((indType == TYP_REF) && (varTypeIsStruct(elemTyp)))
2560 // indType is TYP_REF and elemTyp is TYP_STRUCT
2562 // We have a pointer to a static that is a Boxed Struct
2566 else if (indTypeSize > elemTypSize)
2568 // Reading beyong the end of 'elem'
2570 // return a new unique value number
2571 elem = VNForExpr(nullptr, indType);
2572 JITDUMP(" *** Mismatched types in VNApplySelectorsTypeCheck (reading beyond the end)\n");
2574 else if (varTypeIsStruct(indType))
2576 // indType is TYP_STRUCT
2578 // return a new unique value number
2579 elem = VNForExpr(nullptr, indType);
2580 JITDUMP(" *** Mismatched types in VNApplySelectorsTypeCheck (indType is TYP_STRUCT)\n");
2584 // We are trying to read an 'elem' of type 'elemType' using 'indType' read
2586 // insert a cast of elem to 'indType'
2587 elem = VNForCast(elem, indType, elemTyp);
2594 ValueNum ValueNumStore::VNApplySelectorsAssignTypeCoerce(ValueNum elem, var_types indType, BasicBlock* block)
2596 var_types elemTyp = TypeOfVN(elem);
2598 // Check if the elemTyp is matching/compatible
2600 if (indType != elemTyp)
2602 bool isConstant = IsVNConstant(elem);
2603 if (isConstant && (elemTyp == genActualType(indType)))
2605 // (i.e. We recorded a constant of TYP_INT for a TYP_BYTE field)
2609 // We are trying to write an 'elem' of type 'elemType' using 'indType' store
2611 if (varTypeIsStruct(indType))
2613 // return a new unique value number
2614 elem = VNForExpr(block, indType);
2615 JITDUMP(" *** Mismatched types in VNApplySelectorsAssignTypeCoerce (indType is TYP_STRUCT)\n");
2619 // We are trying to write an 'elem' of type 'elemType' using 'indType' store
2621 // insert a cast of elem to 'indType'
2622 elem = VNForCast(elem, indType, elemTyp);
2629 //------------------------------------------------------------------------
2630 // VNApplySelectorsAssign: Compute the value number corresponding to "map" but with
2631 // the element at "fieldSeq" updated to have type "elem"; this is the new memory
2632 // value for an assignment of value "elem" into the memory at location "fieldSeq"
2633 // that occurs in block "block" and has type "indType" (so long as the selectors
2634 // into that memory occupy disjoint locations, which is true for GcHeap).
2637 // vnk - Identifies whether to recurse to Conservative or Liberal value numbers
2638 // when recursing through phis
2639 // map - Value number for the field map before the assignment
2640 // elem - Value number for the value being stored (to the given field)
2641 // indType - Type of the indirection storing the value to the field
2642 // block - Block where the assignment occurs
2645 // The value number corresponding to memory after the assignment.
2647 ValueNum ValueNumStore::VNApplySelectorsAssign(
2648 ValueNumKind vnk, ValueNum map, FieldSeqNode* fieldSeq, ValueNum elem, var_types indType, BasicBlock* block)
2650 if (fieldSeq == nullptr)
2652 return VNApplySelectorsAssignTypeCoerce(elem, indType, block);
2656 assert(fieldSeq != FieldSeqStore::NotAField());
2658 // Skip any "FirstElem" pseudo-fields or any "ConstantIndex" pseudo-fields
2659 // These will occur, at least, in struct static expressions, for method table offsets.
2660 if (fieldSeq->IsPseudoField())
2662 return VNApplySelectorsAssign(vnk, map, fieldSeq->m_next, elem, indType, block);
2665 // Otherwise, fldHnd is a real field handle.
2666 CORINFO_FIELD_HANDLE fldHnd = fieldSeq->m_fieldHnd;
2667 CORINFO_CLASS_HANDLE structType = nullptr;
2668 noway_assert(fldHnd != nullptr);
2669 CorInfoType fieldCit = m_pComp->info.compCompHnd->getFieldType(fldHnd, &structType);
2670 var_types fieldType = JITtype2varType(fieldCit);
2672 ValueNum fieldHndVN = VNForHandle(ssize_t(fldHnd), GTF_ICON_FIELD_HDL);
2675 if (m_pComp->verbose)
2677 printf(" fieldHnd " STR_VN "%x is ", fieldHndVN);
2678 vnDump(m_pComp, fieldHndVN);
2681 ValueNum seqNextVN = VNForFieldSeq(fieldSeq->m_next);
2682 ValueNum fieldSeqVN = VNForFunc(TYP_REF, VNF_FieldSeq, fieldHndVN, seqNextVN);
2684 printf(" fieldSeq " STR_VN "%x is ", fieldSeqVN);
2685 vnDump(m_pComp, fieldSeqVN);
2691 if (fieldSeq->m_next)
2693 ValueNum fseqMap = VNForMapSelect(vnk, fieldType, map, fieldHndVN);
2694 elemAfter = VNApplySelectorsAssign(vnk, fseqMap, fieldSeq->m_next, elem, indType, block);
2698 elemAfter = VNApplySelectorsAssignTypeCoerce(elem, indType, block);
2701 ValueNum newMap = VNForMapStore(fieldType, map, fieldHndVN, elemAfter);
2706 ValueNumPair ValueNumStore::VNPairApplySelectors(ValueNumPair map, FieldSeqNode* fieldSeq, var_types indType)
2708 size_t structSize = 0;
2709 ValueNum liberalVN = VNApplySelectors(VNK_Liberal, map.GetLiberal(), fieldSeq, &structSize);
2710 liberalVN = VNApplySelectorsTypeCheck(liberalVN, indType, structSize);
2713 ValueNum conservVN = VNApplySelectors(VNK_Conservative, map.GetConservative(), fieldSeq, &structSize);
2714 conservVN = VNApplySelectorsTypeCheck(conservVN, indType, structSize);
2716 return ValueNumPair(liberalVN, conservVN);
2719 bool ValueNumStore::IsVNNotAField(ValueNum vn)
2721 return m_chunks.GetNoExpand(GetChunkNum(vn))->m_attribs == CEA_NotAField;
2724 ValueNum ValueNumStore::VNForFieldSeq(FieldSeqNode* fieldSeq)
2726 if (fieldSeq == nullptr)
2730 else if (fieldSeq == FieldSeqStore::NotAField())
2732 // We always allocate a new, unique VN in this call.
2733 Chunk* c = GetAllocChunk(TYP_REF, CEA_NotAField);
2734 unsigned offsetWithinChunk = c->AllocVN();
2735 ValueNum result = c->m_baseVN + offsetWithinChunk;
2740 ssize_t fieldHndVal = ssize_t(fieldSeq->m_fieldHnd);
2741 ValueNum fieldHndVN = VNForHandle(fieldHndVal, GTF_ICON_FIELD_HDL);
2742 ValueNum seqNextVN = VNForFieldSeq(fieldSeq->m_next);
2743 ValueNum fieldSeqVN = VNForFunc(TYP_REF, VNF_FieldSeq, fieldHndVN, seqNextVN);
2746 if (m_pComp->verbose)
2748 printf(" fieldHnd " STR_VN "%x is ", fieldHndVN);
2749 vnDump(m_pComp, fieldHndVN);
2752 printf(" fieldSeq " STR_VN "%x is ", fieldSeqVN);
2753 vnDump(m_pComp, fieldSeqVN);
2762 FieldSeqNode* ValueNumStore::FieldSeqVNToFieldSeq(ValueNum vn)
2764 if (vn == VNForNull())
2769 assert(IsVNFunc(vn));
2772 GetVNFunc(vn, &funcApp);
2773 if (funcApp.m_func == VNF_NotAField)
2775 return FieldSeqStore::NotAField();
2778 assert(funcApp.m_func == VNF_FieldSeq);
2779 const ssize_t fieldHndVal = ConstantValue<ssize_t>(funcApp.m_args[0]);
2780 FieldSeqNode* head =
2781 m_pComp->GetFieldSeqStore()->CreateSingleton(reinterpret_cast<CORINFO_FIELD_HANDLE>(fieldHndVal));
2782 FieldSeqNode* tail = FieldSeqVNToFieldSeq(funcApp.m_args[1]);
2783 return m_pComp->GetFieldSeqStore()->Append(head, tail);
2786 ValueNum ValueNumStore::FieldSeqVNAppend(ValueNum fsVN1, ValueNum fsVN2)
2788 if (fsVN1 == VNForNull())
2793 assert(IsVNFunc(fsVN1));
2796 GetVNFunc(fsVN1, &funcApp1);
2798 if ((funcApp1.m_func == VNF_NotAField) || IsVNNotAField(fsVN2))
2800 return VNForFieldSeq(FieldSeqStore::NotAField());
2803 assert(funcApp1.m_func == VNF_FieldSeq);
2804 ValueNum tailRes = FieldSeqVNAppend(funcApp1.m_args[1], fsVN2);
2805 ValueNum fieldSeqVN = VNForFunc(TYP_REF, VNF_FieldSeq, funcApp1.m_args[0], tailRes);
2808 if (m_pComp->verbose)
2810 printf(" fieldSeq " STR_VN "%x is ", fieldSeqVN);
2811 vnDump(m_pComp, fieldSeqVN);
2819 ValueNum ValueNumStore::ExtendPtrVN(GenTree* opA, GenTree* opB)
2821 if (opB->OperGet() == GT_CNS_INT)
2823 FieldSeqNode* fldSeq = opB->gtIntCon.gtFieldSeq;
2824 if (fldSeq != nullptr)
2826 return ExtendPtrVN(opA, opB->gtIntCon.gtFieldSeq);
2832 ValueNum ValueNumStore::ExtendPtrVN(GenTree* opA, FieldSeqNode* fldSeq)
2834 assert(fldSeq != nullptr);
2836 ValueNum res = NoVN;
2838 ValueNum opAvnWx = opA->gtVNPair.GetLiberal();
2839 assert(VNIsValid(opAvnWx));
2841 ValueNum opAvnx = VNForEmptyExcSet();
2842 VNUnpackExc(opAvnWx, &opAvn, &opAvnx);
2843 assert(VNIsValid(opAvn) && VNIsValid(opAvnx));
2846 if (!GetVNFunc(opAvn, &funcApp))
2851 if (funcApp.m_func == VNF_PtrToLoc)
2854 // For PtrToLoc, lib == cons.
2855 VNFuncApp consFuncApp;
2856 assert(GetVNFunc(VNNormVal(opA->GetVN(VNK_Conservative)), &consFuncApp) && consFuncApp.Equals(funcApp));
2858 ValueNum fldSeqVN = VNForFieldSeq(fldSeq);
2859 res = VNForFunc(TYP_BYREF, VNF_PtrToLoc, funcApp.m_args[0], FieldSeqVNAppend(funcApp.m_args[1], fldSeqVN));
2861 else if (funcApp.m_func == VNF_PtrToStatic)
2863 ValueNum fldSeqVN = VNForFieldSeq(fldSeq);
2864 res = VNForFunc(TYP_BYREF, VNF_PtrToStatic, FieldSeqVNAppend(funcApp.m_args[0], fldSeqVN));
2866 else if (funcApp.m_func == VNF_PtrToArrElem)
2868 ValueNum fldSeqVN = VNForFieldSeq(fldSeq);
2869 res = VNForFunc(TYP_BYREF, VNF_PtrToArrElem, funcApp.m_args[0], funcApp.m_args[1], funcApp.m_args[2],
2870 FieldSeqVNAppend(funcApp.m_args[3], fldSeqVN));
2874 res = VNWithExc(res, opAvnx);
2879 ValueNum Compiler::fgValueNumberArrIndexAssign(CORINFO_CLASS_HANDLE elemTypeEq,
2882 FieldSeqNode* fldSeq,
2886 bool invalidateArray = false;
2887 ValueNum elemTypeEqVN = vnStore->VNForHandle(ssize_t(elemTypeEq), GTF_ICON_CLASS_HDL);
2888 var_types arrElemType = DecodeElemType(elemTypeEq);
2889 ValueNum hAtArrType = vnStore->VNForMapSelect(VNK_Liberal, TYP_REF, fgCurMemoryVN[GcHeap], elemTypeEqVN);
2890 ValueNum hAtArrTypeAtArr = vnStore->VNForMapSelect(VNK_Liberal, TYP_REF, hAtArrType, arrVN);
2891 ValueNum hAtArrTypeAtArrAtInx = vnStore->VNForMapSelect(VNK_Liberal, arrElemType, hAtArrTypeAtArr, inxVN);
2893 ValueNum newValAtInx = ValueNumStore::NoVN;
2894 ValueNum newValAtArr = ValueNumStore::NoVN;
2895 ValueNum newValAtArrType = ValueNumStore::NoVN;
2897 if (fldSeq == FieldSeqStore::NotAField())
2899 // This doesn't represent a proper array access
2900 JITDUMP(" *** NotAField sequence encountered in fgValueNumberArrIndexAssign\n");
2902 // Store a new unique value for newValAtArrType
2903 newValAtArrType = vnStore->VNForExpr(compCurBB, TYP_REF);
2904 invalidateArray = true;
2908 // Note that this does the right thing if "fldSeq" is null -- returns last "rhs" argument.
2909 // This is the value that should be stored at "arr[inx]".
2911 vnStore->VNApplySelectorsAssign(VNK_Liberal, hAtArrTypeAtArrAtInx, fldSeq, rhsVN, indType, compCurBB);
2913 var_types arrElemFldType = arrElemType; // Uses arrElemType unless we has a non-null fldSeq
2914 if (vnStore->IsVNFunc(newValAtInx))
2917 vnStore->GetVNFunc(newValAtInx, &funcApp);
2918 if (funcApp.m_func == VNF_MapStore)
2920 arrElemFldType = vnStore->TypeOfVN(newValAtInx);
2924 if (indType != arrElemFldType)
2926 // Mismatched types: Store between different types (indType into array of arrElemFldType)
2929 JITDUMP(" *** Mismatched types in fgValueNumberArrIndexAssign\n");
2931 // Store a new unique value for newValAtArrType
2932 newValAtArrType = vnStore->VNForExpr(compCurBB, TYP_REF);
2933 invalidateArray = true;
2937 if (!invalidateArray)
2939 newValAtArr = vnStore->VNForMapStore(indType, hAtArrTypeAtArr, inxVN, newValAtInx);
2940 newValAtArrType = vnStore->VNForMapStore(TYP_REF, hAtArrType, arrVN, newValAtArr);
2946 printf(" hAtArrType " STR_VN "%x is MapSelect(curGcHeap(" STR_VN "%x), ", hAtArrType, fgCurMemoryVN[GcHeap]);
2948 if (arrElemType == TYP_STRUCT)
2950 printf("%s[]).\n", eeGetClassName(elemTypeEq));
2954 printf("%s[]).\n", varTypeName(arrElemType));
2956 printf(" hAtArrTypeAtArr " STR_VN "%x is MapSelect(hAtArrType(" STR_VN "%x), arr=" STR_VN "%x)\n",
2957 hAtArrTypeAtArr, hAtArrType, arrVN);
2958 printf(" hAtArrTypeAtArrAtInx " STR_VN "%x is MapSelect(hAtArrTypeAtArr(" STR_VN "%x), inx=" STR_VN "%x):%s\n",
2959 hAtArrTypeAtArrAtInx, hAtArrTypeAtArr, inxVN, varTypeName(arrElemType));
2961 if (!invalidateArray)
2963 printf(" newValAtInd " STR_VN "%x is ", newValAtInx);
2964 vnStore->vnDump(this, newValAtInx);
2967 printf(" newValAtArr " STR_VN "%x is ", newValAtArr);
2968 vnStore->vnDump(this, newValAtArr);
2972 printf(" newValAtArrType " STR_VN "%x is ", newValAtArrType);
2973 vnStore->vnDump(this, newValAtArrType);
2976 printf(" fgCurMemoryVN assigned:\n");
2980 return vnStore->VNForMapStore(TYP_REF, fgCurMemoryVN[GcHeap], elemTypeEqVN, newValAtArrType);
2983 ValueNum Compiler::fgValueNumberArrIndexVal(GenTree* tree, VNFuncApp* pFuncApp, ValueNum addrXvn)
2985 assert(vnStore->IsVNHandle(pFuncApp->m_args[0]));
2986 CORINFO_CLASS_HANDLE arrElemTypeEQ = CORINFO_CLASS_HANDLE(vnStore->ConstantValue<ssize_t>(pFuncApp->m_args[0]));
2987 ValueNum arrVN = pFuncApp->m_args[1];
2988 ValueNum inxVN = pFuncApp->m_args[2];
2989 FieldSeqNode* fldSeq = vnStore->FieldSeqVNToFieldSeq(pFuncApp->m_args[3]);
2990 return fgValueNumberArrIndexVal(tree, arrElemTypeEQ, arrVN, inxVN, addrXvn, fldSeq);
2993 ValueNum Compiler::fgValueNumberArrIndexVal(GenTree* tree,
2994 CORINFO_CLASS_HANDLE elemTypeEq,
2998 FieldSeqNode* fldSeq)
3000 assert(tree == nullptr || tree->OperIsIndir());
3002 // The VN inputs are required to be non-exceptional values.
3003 assert(arrVN == vnStore->VNNormVal(arrVN));
3004 assert(inxVN == vnStore->VNNormVal(inxVN));
3006 var_types elemTyp = DecodeElemType(elemTypeEq);
3007 var_types indType = (tree == nullptr) ? elemTyp : tree->TypeGet();
3008 ValueNum selectedElem;
3010 if (fldSeq == FieldSeqStore::NotAField())
3012 // This doesn't represent a proper array access
3013 JITDUMP(" *** NotAField sequence encountered in fgValueNumberArrIndexVal\n");
3015 // a new unique value number
3016 selectedElem = vnStore->VNForExpr(compCurBB, elemTyp);
3021 printf(" IND of PtrToArrElem is unique VN " STR_VN "%x.\n", selectedElem);
3025 if (tree != nullptr)
3027 tree->gtVNPair.SetBoth(selectedElem);
3032 ValueNum elemTypeEqVN = vnStore->VNForHandle(ssize_t(elemTypeEq), GTF_ICON_CLASS_HDL);
3033 ValueNum hAtArrType = vnStore->VNForMapSelect(VNK_Liberal, TYP_REF, fgCurMemoryVN[GcHeap], elemTypeEqVN);
3034 ValueNum hAtArrTypeAtArr = vnStore->VNForMapSelect(VNK_Liberal, TYP_REF, hAtArrType, arrVN);
3035 ValueNum wholeElem = vnStore->VNForMapSelect(VNK_Liberal, elemTyp, hAtArrTypeAtArr, inxVN);
3040 printf(" hAtArrType " STR_VN "%x is MapSelect(curGcHeap(" STR_VN "%x), ", hAtArrType,
3041 fgCurMemoryVN[GcHeap]);
3042 if (elemTyp == TYP_STRUCT)
3044 printf("%s[]).\n", eeGetClassName(elemTypeEq));
3048 printf("%s[]).\n", varTypeName(elemTyp));
3051 printf(" hAtArrTypeAtArr " STR_VN "%x is MapSelect(hAtArrType(" STR_VN "%x), arr=" STR_VN "%x).\n",
3052 hAtArrTypeAtArr, hAtArrType, arrVN);
3054 printf(" wholeElem " STR_VN "%x is MapSelect(hAtArrTypeAtArr(" STR_VN "%x), ind=" STR_VN "%x).\n",
3055 wholeElem, hAtArrTypeAtArr, inxVN);
3059 selectedElem = wholeElem;
3060 size_t elemStructSize = 0;
3063 selectedElem = vnStore->VNApplySelectors(VNK_Liberal, wholeElem, fldSeq, &elemStructSize);
3064 elemTyp = vnStore->TypeOfVN(selectedElem);
3066 selectedElem = vnStore->VNApplySelectorsTypeCheck(selectedElem, indType, elemStructSize);
3067 selectedElem = vnStore->VNWithExc(selectedElem, excVN);
3070 if (verbose && (selectedElem != wholeElem))
3072 printf(" selectedElem is " STR_VN "%x after applying selectors.\n", selectedElem);
3076 if (tree != nullptr)
3078 tree->gtVNPair.SetLiberal(selectedElem);
3079 // TODO-CQ: what to do here about exceptions? We don't have the array and ind conservative
3080 // values, so we don't have their exceptions. Maybe we should.
3081 tree->gtVNPair.SetConservative(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
3085 return selectedElem;
3088 ValueNum Compiler::fgValueNumberByrefExposedLoad(var_types type, ValueNum pointerVN)
3090 ValueNum memoryVN = fgCurMemoryVN[ByrefExposed];
3091 // The memoization for VNFunc applications does not factor in the result type, so
3092 // VNF_ByrefExposedLoad takes the loaded type as an explicit parameter.
3093 ValueNum typeVN = vnStore->VNForIntCon(type);
3094 ValueNum loadVN = vnStore->VNForFunc(type, VNF_ByrefExposedLoad, typeVN, vnStore->VNNormVal(pointerVN), memoryVN);
3099 var_types ValueNumStore::TypeOfVN(ValueNum vn)
3106 Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
3110 //------------------------------------------------------------------------
3111 // LoopOfVN: If the given value number is an opaque one associated with a particular
3112 // expression in the IR, give the loop number where the expression occurs; otherwise,
3113 // returns MAX_LOOP_NUM.
3116 // vn - Value number to query
3119 // The correspondingblock's bbNatLoopNum, which may be BasicBlock::NOT_IN_LOOP.
3120 // Returns MAX_LOOP_NUM if this VN is not an opaque value number associated with
3121 // a particular expression/location in the IR.
3123 BasicBlock::loopNumber ValueNumStore::LoopOfVN(ValueNum vn)
3127 return MAX_LOOP_NUM;
3130 Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
3131 return c->m_loopNum;
3134 bool ValueNumStore::IsVNConstant(ValueNum vn)
3140 Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
3141 if (c->m_attribs == CEA_Const)
3143 return vn != VNForVoid(); // Void is not a "real" constant -- in the sense that it represents no value.
3147 return c->m_attribs == CEA_Handle;
3151 bool ValueNumStore::IsVNInt32Constant(ValueNum vn)
3153 if (!IsVNConstant(vn))
3158 return TypeOfVN(vn) == TYP_INT;
3161 unsigned ValueNumStore::GetHandleFlags(ValueNum vn)
3163 assert(IsVNHandle(vn));
3164 Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
3165 unsigned offset = ChunkOffset(vn);
3166 VNHandle* handle = &reinterpret_cast<VNHandle*>(c->m_defs)[offset];
3167 return handle->m_flags;
3170 bool ValueNumStore::IsVNHandle(ValueNum vn)
3177 Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
3178 return c->m_attribs == CEA_Handle;
3181 bool ValueNumStore::IsVNConstantBound(ValueNum vn)
3183 // Do we have "var < 100"?
3190 if (!GetVNFunc(vn, &funcAttr))
3194 if (funcAttr.m_func != (VNFunc)GT_LE && funcAttr.m_func != (VNFunc)GT_GE && funcAttr.m_func != (VNFunc)GT_LT &&
3195 funcAttr.m_func != (VNFunc)GT_GT)
3200 return IsVNInt32Constant(funcAttr.m_args[0]) != IsVNInt32Constant(funcAttr.m_args[1]);
3203 void ValueNumStore::GetConstantBoundInfo(ValueNum vn, ConstantBoundInfo* info)
3205 assert(IsVNConstantBound(vn));
3208 // Do we have var < 100?
3210 GetVNFunc(vn, &funcAttr);
3212 bool isOp1Const = IsVNInt32Constant(funcAttr.m_args[1]);
3216 info->cmpOper = funcAttr.m_func;
3217 info->cmpOpVN = funcAttr.m_args[0];
3218 info->constVal = GetConstantInt32(funcAttr.m_args[1]);
3222 info->cmpOper = GenTree::SwapRelop((genTreeOps)funcAttr.m_func);
3223 info->cmpOpVN = funcAttr.m_args[1];
3224 info->constVal = GetConstantInt32(funcAttr.m_args[0]);
3228 //------------------------------------------------------------------------
3229 // IsVNArrLenUnsignedBound: Checks if the specified vn represents an expression
3230 // such as "(uint)i < (uint)len" that implies that the index is valid
3231 // (0 <= i && i < a.len).
3234 // vn - Value number to query
3235 // info - Pointer to an UnsignedCompareCheckedBoundInfo object to return information about
3236 // the expression. Not populated if the vn expression isn't suitable (e.g. i <= len).
3237 // This enables optCreateJTrueBoundAssertion to immediatly create an OAK_NO_THROW
3238 // assertion instead of the OAK_EQUAL/NOT_EQUAL assertions created by signed compares
3239 // (IsVNCompareCheckedBound, IsVNCompareCheckedBoundArith) that require further processing.
3241 bool ValueNumStore::IsVNUnsignedCompareCheckedBound(ValueNum vn, UnsignedCompareCheckedBoundInfo* info)
3245 if (GetVNFunc(vn, &funcApp))
3247 if ((funcApp.m_func == VNF_LT_UN) || (funcApp.m_func == VNF_GE_UN))
3249 // We only care about "(uint)i < (uint)len" and its negation "(uint)i >= (uint)len"
3250 if (IsVNCheckedBound(funcApp.m_args[1]))
3252 info->vnIdx = funcApp.m_args[0];
3253 info->cmpOper = funcApp.m_func;
3254 info->vnBound = funcApp.m_args[1];
3258 else if ((funcApp.m_func == VNF_GT_UN) || (funcApp.m_func == VNF_LE_UN))
3260 // We only care about "(uint)a.len > (uint)i" and its negation "(uint)a.len <= (uint)i"
3261 if (IsVNCheckedBound(funcApp.m_args[0]))
3263 info->vnIdx = funcApp.m_args[1];
3264 // Let's keep a consistent operand order - it's always i < len, never len > i
3265 info->cmpOper = (funcApp.m_func == VNF_GT_UN) ? VNF_LT_UN : VNF_GE_UN;
3266 info->vnBound = funcApp.m_args[0];
3275 bool ValueNumStore::IsVNCompareCheckedBound(ValueNum vn)
3277 // Do we have "var < len"?
3284 if (!GetVNFunc(vn, &funcAttr))
3288 if (funcAttr.m_func != (VNFunc)GT_LE && funcAttr.m_func != (VNFunc)GT_GE && funcAttr.m_func != (VNFunc)GT_LT &&
3289 funcAttr.m_func != (VNFunc)GT_GT)
3293 if (!IsVNCheckedBound(funcAttr.m_args[0]) && !IsVNCheckedBound(funcAttr.m_args[1]))
3301 void ValueNumStore::GetCompareCheckedBound(ValueNum vn, CompareCheckedBoundArithInfo* info)
3303 assert(IsVNCompareCheckedBound(vn));
3305 // Do we have var < a.len?
3307 GetVNFunc(vn, &funcAttr);
3309 bool isOp1CheckedBound = IsVNCheckedBound(funcAttr.m_args[1]);
3310 if (isOp1CheckedBound)
3312 info->cmpOper = funcAttr.m_func;
3313 info->cmpOp = funcAttr.m_args[0];
3314 info->vnBound = funcAttr.m_args[1];
3318 info->cmpOper = GenTree::SwapRelop((genTreeOps)funcAttr.m_func);
3319 info->cmpOp = funcAttr.m_args[1];
3320 info->vnBound = funcAttr.m_args[0];
3324 bool ValueNumStore::IsVNCheckedBoundArith(ValueNum vn)
3326 // Do we have "a.len +or- var"
3334 return GetVNFunc(vn, &funcAttr) && // vn is a func.
3335 (funcAttr.m_func == (VNFunc)GT_ADD || funcAttr.m_func == (VNFunc)GT_SUB) && // the func is +/-
3336 (IsVNCheckedBound(funcAttr.m_args[0]) || IsVNCheckedBound(funcAttr.m_args[1])); // either op1 or op2 is a.len
3339 void ValueNumStore::GetCheckedBoundArithInfo(ValueNum vn, CompareCheckedBoundArithInfo* info)
3341 // Do we have a.len +/- var?
3342 assert(IsVNCheckedBoundArith(vn));
3343 VNFuncApp funcArith;
3344 GetVNFunc(vn, &funcArith);
3346 bool isOp1CheckedBound = IsVNCheckedBound(funcArith.m_args[1]);
3347 if (isOp1CheckedBound)
3349 info->arrOper = funcArith.m_func;
3350 info->arrOp = funcArith.m_args[0];
3351 info->vnBound = funcArith.m_args[1];
3355 info->arrOper = funcArith.m_func;
3356 info->arrOp = funcArith.m_args[1];
3357 info->vnBound = funcArith.m_args[0];
3361 bool ValueNumStore::IsVNCompareCheckedBoundArith(ValueNum vn)
3363 // Do we have: "var < a.len - var"
3370 if (!GetVNFunc(vn, &funcAttr))
3375 // Suitable comparator.
3376 if (funcAttr.m_func != (VNFunc)GT_LE && funcAttr.m_func != (VNFunc)GT_GE && funcAttr.m_func != (VNFunc)GT_LT &&
3377 funcAttr.m_func != (VNFunc)GT_GT)
3382 // Either the op0 or op1 is arr len arithmetic.
3383 if (!IsVNCheckedBoundArith(funcAttr.m_args[0]) && !IsVNCheckedBoundArith(funcAttr.m_args[1]))
3391 void ValueNumStore::GetCompareCheckedBoundArithInfo(ValueNum vn, CompareCheckedBoundArithInfo* info)
3393 assert(IsVNCompareCheckedBoundArith(vn));
3396 GetVNFunc(vn, &funcAttr);
3398 // Check whether op0 or op1 is checked bound arithmetic.
3399 bool isOp1CheckedBoundArith = IsVNCheckedBoundArith(funcAttr.m_args[1]);
3400 if (isOp1CheckedBoundArith)
3402 info->cmpOper = funcAttr.m_func;
3403 info->cmpOp = funcAttr.m_args[0];
3404 GetCheckedBoundArithInfo(funcAttr.m_args[1], info);
3408 info->cmpOper = GenTree::SwapRelop((genTreeOps)funcAttr.m_func);
3409 info->cmpOp = funcAttr.m_args[1];
3410 GetCheckedBoundArithInfo(funcAttr.m_args[0], info);
3414 ValueNum ValueNumStore::GetArrForLenVn(ValueNum vn)
3422 if (GetVNFunc(vn, &funcAttr) && funcAttr.m_func == (VNFunc)GT_ARR_LENGTH)
3424 return funcAttr.m_args[0];
3429 bool ValueNumStore::IsVNNewArr(ValueNum vn, VNFuncApp* funcApp)
3435 bool result = false;
3436 if (GetVNFunc(vn, funcApp))
3438 result = (funcApp->m_func == VNF_JitNewArr) || (funcApp->m_func == VNF_JitReadyToRunNewArr);
3443 int ValueNumStore::GetNewArrSize(ValueNum vn)
3446 if (IsVNNewArr(vn, &funcApp))
3448 ValueNum arg1VN = funcApp.m_args[1];
3449 if (IsVNConstant(arg1VN) && TypeOfVN(arg1VN) == TYP_INT)
3451 return ConstantValue<int>(arg1VN);
3457 bool ValueNumStore::IsVNArrLen(ValueNum vn)
3464 return (GetVNFunc(vn, &funcAttr) && funcAttr.m_func == (VNFunc)GT_ARR_LENGTH);
3467 bool ValueNumStore::IsVNCheckedBound(ValueNum vn)
3470 if (m_checkedBoundVNs.TryGetValue(vn, &dummy))
3472 // This VN appeared as the conservative VN of the length argument of some
3473 // GT_ARR_BOUND node.
3478 // Even if we haven't seen this VN in a bounds check, if it is an array length
3479 // VN then consider it a checked bound VN. This facilitates better bounds check
3480 // removal by ensuring that compares against array lengths get put in the
3481 // optCseCheckedBoundMap; such an array length might get CSEd with one that was
3482 // directly used in a bounds check, and having the map entry will let us update
3483 // the compare's VN so that OptimizeRangeChecks can recognize such compares.
3490 void ValueNumStore::SetVNIsCheckedBound(ValueNum vn)
3492 // This is meant to flag VNs for lengths that aren't known at compile time, so we can
3493 // form and propagate assertions about them. Ensure that callers filter out constant
3494 // VNs since they're not what we're looking to flag, and assertion prop can reason
3495 // directly about constants.
3496 assert(!IsVNConstant(vn));
3497 m_checkedBoundVNs.AddOrUpdate(vn, true);
3500 ValueNum ValueNumStore::EvalMathFuncUnary(var_types typ, CorInfoIntrinsics gtMathFN, ValueNum arg0VN)
3502 assert(arg0VN == VNNormVal(arg0VN));
3504 // If the math intrinsic is not implemented by target-specific instructions, such as implemented
3505 // by user calls, then don't do constant folding on it. This minimizes precision loss.
3507 if (IsVNConstant(arg0VN) && m_pComp->IsTargetIntrinsic(gtMathFN))
3509 assert(varTypeIsFloating(TypeOfVN(arg0VN)));
3511 if (typ == TYP_DOUBLE)
3513 // Both operand and its result must be of the same floating point type.
3514 assert(typ == TypeOfVN(arg0VN));
3515 double arg0Val = GetConstantDouble(arg0VN);
3520 case CORINFO_INTRINSIC_Sin:
3523 case CORINFO_INTRINSIC_Cos:
3526 case CORINFO_INTRINSIC_Sqrt:
3527 res = sqrt(arg0Val);
3529 case CORINFO_INTRINSIC_Abs:
3530 res = fabs(arg0Val);
3532 case CORINFO_INTRINSIC_Ceiling:
3533 res = ceil(arg0Val);
3535 case CORINFO_INTRINSIC_Floor:
3536 res = floor(arg0Val);
3538 case CORINFO_INTRINSIC_Round:
3539 res = FloatingPointUtils::round(arg0Val);
3542 unreached(); // the above are the only math intrinsics at the time of this writing.
3545 return VNForDoubleCon(res);
3547 else if (typ == TYP_FLOAT)
3549 // Both operand and its result must be of the same floating point type.
3550 assert(typ == TypeOfVN(arg0VN));
3551 float arg0Val = GetConstantSingle(arg0VN);
3556 case CORINFO_INTRINSIC_Sin:
3557 res = sinf(arg0Val);
3559 case CORINFO_INTRINSIC_Cos:
3560 res = cosf(arg0Val);
3562 case CORINFO_INTRINSIC_Sqrt:
3563 res = sqrtf(arg0Val);
3565 case CORINFO_INTRINSIC_Abs:
3566 res = fabsf(arg0Val);
3568 case CORINFO_INTRINSIC_Ceiling:
3569 res = ceilf(arg0Val);
3571 case CORINFO_INTRINSIC_Floor:
3572 res = floorf(arg0Val);
3574 case CORINFO_INTRINSIC_Round:
3575 res = FloatingPointUtils::round(arg0Val);
3578 unreached(); // the above are the only math intrinsics at the time of this writing.
3581 return VNForFloatCon(res);
3585 // CORINFO_INTRINSIC_Round is currently the only intrinsic that takes floating-point arguments
3586 // and that returns a non floating-point result.
3588 assert(typ == TYP_INT);
3589 assert(gtMathFN == CORINFO_INTRINSIC_Round);
3593 switch (TypeOfVN(arg0VN))
3597 double arg0Val = GetConstantDouble(arg0VN);
3598 res = int(FloatingPointUtils::round(arg0Val));
3603 float arg0Val = GetConstantSingle(arg0VN);
3604 res = int(FloatingPointUtils::round(arg0Val));
3611 return VNForIntCon(res);
3616 assert(typ == TYP_DOUBLE || typ == TYP_FLOAT || (typ == TYP_INT && gtMathFN == CORINFO_INTRINSIC_Round));
3618 VNFunc vnf = VNF_Boundary;
3621 case CORINFO_INTRINSIC_Sin:
3624 case CORINFO_INTRINSIC_Cos:
3627 case CORINFO_INTRINSIC_Cbrt:
3630 case CORINFO_INTRINSIC_Sqrt:
3633 case CORINFO_INTRINSIC_Abs:
3636 case CORINFO_INTRINSIC_Round:
3637 if (typ == TYP_DOUBLE)
3639 vnf = VNF_RoundDouble;
3641 else if (typ == TYP_FLOAT)
3643 vnf = VNF_RoundFloat;
3645 else if (typ == TYP_INT)
3651 noway_assert(!"Invalid INTRINSIC_Round");
3654 case CORINFO_INTRINSIC_Cosh:
3657 case CORINFO_INTRINSIC_Sinh:
3660 case CORINFO_INTRINSIC_Tan:
3663 case CORINFO_INTRINSIC_Tanh:
3666 case CORINFO_INTRINSIC_Asin:
3669 case CORINFO_INTRINSIC_Asinh:
3672 case CORINFO_INTRINSIC_Acos:
3675 case CORINFO_INTRINSIC_Acosh:
3678 case CORINFO_INTRINSIC_Atan:
3681 case CORINFO_INTRINSIC_Atanh:
3684 case CORINFO_INTRINSIC_Log10:
3687 case CORINFO_INTRINSIC_Exp:
3690 case CORINFO_INTRINSIC_Ceiling:
3693 case CORINFO_INTRINSIC_Floor:
3697 unreached(); // the above are the only math intrinsics at the time of this writing.
3700 return VNForFunc(typ, vnf, arg0VN);
3704 ValueNum ValueNumStore::EvalMathFuncBinary(var_types typ, CorInfoIntrinsics gtMathFN, ValueNum arg0VN, ValueNum arg1VN)
3706 assert(varTypeIsFloating(typ));
3707 assert(arg0VN == VNNormVal(arg0VN));
3708 assert(arg1VN == VNNormVal(arg1VN));
3710 VNFunc vnf = VNF_Boundary;
3712 // Currently, none of the binary math intrinsic are implemented by target-specific instructions.
3713 // To minimize precision loss, do not do constant folding on them.
3717 case CORINFO_INTRINSIC_Atan2:
3721 case CORINFO_INTRINSIC_Pow:
3726 unreached(); // the above are the only binary math intrinsics at the time of this writing.
3729 return VNForFunc(typ, vnf, arg0VN, arg1VN);
3732 bool ValueNumStore::IsVNFunc(ValueNum vn)
3738 Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
3739 switch (c->m_attribs)
3753 bool ValueNumStore::GetVNFunc(ValueNum vn, VNFuncApp* funcApp)
3760 Chunk* c = m_chunks.GetNoExpand(GetChunkNum(vn));
3761 unsigned offset = ChunkOffset(vn);
3762 assert(offset < c->m_numUsed);
3763 switch (c->m_attribs)
3767 VNDefFunc4Arg* farg4 = &reinterpret_cast<VNDefFunc4Arg*>(c->m_defs)[offset];
3768 funcApp->m_func = farg4->m_func;
3769 funcApp->m_arity = 4;
3770 funcApp->m_args[0] = farg4->m_arg0;
3771 funcApp->m_args[1] = farg4->m_arg1;
3772 funcApp->m_args[2] = farg4->m_arg2;
3773 funcApp->m_args[3] = farg4->m_arg3;
3778 VNDefFunc3Arg* farg3 = &reinterpret_cast<VNDefFunc3Arg*>(c->m_defs)[offset];
3779 funcApp->m_func = farg3->m_func;
3780 funcApp->m_arity = 3;
3781 funcApp->m_args[0] = farg3->m_arg0;
3782 funcApp->m_args[1] = farg3->m_arg1;
3783 funcApp->m_args[2] = farg3->m_arg2;
3788 VNDefFunc2Arg* farg2 = &reinterpret_cast<VNDefFunc2Arg*>(c->m_defs)[offset];
3789 funcApp->m_func = farg2->m_func;
3790 funcApp->m_arity = 2;
3791 funcApp->m_args[0] = farg2->m_arg0;
3792 funcApp->m_args[1] = farg2->m_arg1;
3797 VNDefFunc1Arg* farg1 = &reinterpret_cast<VNDefFunc1Arg*>(c->m_defs)[offset];
3798 funcApp->m_func = farg1->m_func;
3799 funcApp->m_arity = 1;
3800 funcApp->m_args[0] = farg1->m_arg0;
3805 VNDefFunc0Arg* farg0 = &reinterpret_cast<VNDefFunc0Arg*>(c->m_defs)[offset];
3806 funcApp->m_func = farg0->m_func;
3807 funcApp->m_arity = 0;
3812 funcApp->m_func = VNF_NotAField;
3813 funcApp->m_arity = 0;
3821 ValueNum ValueNumStore::VNForRefInAddr(ValueNum vn)
3823 var_types vnType = TypeOfVN(vn);
3824 if (vnType == TYP_REF)
3829 assert(vnType == TYP_BYREF);
3831 if (GetVNFunc(vn, &funcApp))
3833 assert(funcApp.m_arity == 2 && (funcApp.m_func == VNFunc(GT_ADD) || funcApp.m_func == VNFunc(GT_SUB)));
3834 var_types vnArg0Type = TypeOfVN(funcApp.m_args[0]);
3835 if (vnArg0Type == TYP_REF || vnArg0Type == TYP_BYREF)
3837 return VNForRefInAddr(funcApp.m_args[0]);
3841 assert(funcApp.m_func == VNFunc(GT_ADD) &&
3842 (TypeOfVN(funcApp.m_args[1]) == TYP_REF || TypeOfVN(funcApp.m_args[1]) == TYP_BYREF));
3843 return VNForRefInAddr(funcApp.m_args[1]);
3848 assert(IsVNConstant(vn));
3853 bool ValueNumStore::VNIsValid(ValueNum vn)
3855 ChunkNum cn = GetChunkNum(vn);
3856 if (cn >= m_chunks.Size())
3861 Chunk* c = m_chunks.GetNoExpand(cn);
3862 return ChunkOffset(vn) < c->m_numUsed;
3867 void ValueNumStore::vnDump(Compiler* comp, ValueNum vn, bool isPtr)
3874 else if (IsVNHandle(vn))
3876 ssize_t val = ConstantValue<ssize_t>(vn);
3877 printf("Hnd const: 0x%p", dspPtr(val));
3879 else if (IsVNConstant(vn))
3881 var_types vnt = TypeOfVN(vn);
3892 int val = ConstantValue<int>(vn);
3895 printf("PtrCns[%p]", dspPtr(val));
3900 if ((val > -1000) && (val < 1000))
3902 printf(" %ld", val);
3906 printf(" 0x%X", val);
3914 INT64 val = ConstantValue<INT64>(vn);
3917 printf("LngPtrCns: 0x%p", dspPtr(val));
3922 if ((val > -1000) && (val < 1000))
3924 printf(" %ld", val);
3926 else if ((val & 0xFFFFFFFF00000000LL) == 0)
3928 printf(" 0x%X", val);
3932 printf(" 0x%llx", val);
3938 printf("FltCns[%f]", ConstantValue<float>(vn));
3941 printf("DblCns[%f]", ConstantValue<double>(vn));
3944 if (vn == VNForNull())
3948 else if (vn == VNForVoid())
3954 assert(vn == VNForZeroMap());
3967 #endif // FEATURE_SIMD
3968 printf("structVal");
3971 // These should be unreached.
3976 else if (IsVNCompareCheckedBound(vn))
3978 CompareCheckedBoundArithInfo info;
3979 GetCompareCheckedBound(vn, &info);
3982 else if (IsVNCompareCheckedBoundArith(vn))
3984 CompareCheckedBoundArithInfo info;
3985 GetCompareCheckedBoundArithInfo(vn, &info);
3988 else if (IsVNFunc(vn))
3991 GetVNFunc(vn, &funcApp);
3992 // A few special cases...
3993 switch (funcApp.m_func)
3996 vnDumpFieldSeq(comp, &funcApp, true);
3999 vnDumpMapSelect(comp, &funcApp);
4002 vnDumpMapStore(comp, &funcApp);
4005 printf("%s(", VNFuncName(funcApp.m_func));
4006 for (unsigned i = 0; i < funcApp.m_arity; i++)
4013 printf(STR_VN "%x", funcApp.m_args[i]);
4015 #if FEATURE_VN_DUMP_FUNC_ARGS
4017 vnDump(comp, funcApp.m_args[i]);
4025 // Otherwise, just a VN with no structure; print just the VN.
4031 void ValueNumStore::vnDumpFieldSeq(Compiler* comp, VNFuncApp* fieldSeq, bool isHead)
4033 assert(fieldSeq->m_func == VNF_FieldSeq); // Precondition.
4034 // First arg is the field handle VN.
4035 assert(IsVNConstant(fieldSeq->m_args[0]) && TypeOfVN(fieldSeq->m_args[0]) == TYP_I_IMPL);
4036 ssize_t fieldHndVal = ConstantValue<ssize_t>(fieldSeq->m_args[0]);
4037 bool hasTail = (fieldSeq->m_args[1] != VNForNull());
4039 if (isHead && hasTail)
4044 CORINFO_FIELD_HANDLE fldHnd = CORINFO_FIELD_HANDLE(fieldHndVal);
4045 if (fldHnd == FieldSeqStore::FirstElemPseudoField)
4047 printf("#FirstElem");
4049 else if (fldHnd == FieldSeqStore::ConstantIndexPseudoField)
4051 printf("#ConstantIndex");
4055 const char* modName;
4056 const char* fldName = m_pComp->eeGetFieldName(fldHnd, &modName);
4057 printf("%s", fldName);
4063 assert(IsVNFunc(fieldSeq->m_args[1]));
4065 GetVNFunc(fieldSeq->m_args[1], &tail);
4066 vnDumpFieldSeq(comp, &tail, false);
4069 if (isHead && hasTail)
4075 void ValueNumStore::vnDumpMapSelect(Compiler* comp, VNFuncApp* mapSelect)
4077 assert(mapSelect->m_func == VNF_MapSelect); // Precondition.
4079 ValueNum mapVN = mapSelect->m_args[0]; // First arg is the map id
4080 ValueNum indexVN = mapSelect->m_args[1]; // Second arg is the index
4082 comp->vnPrint(mapVN, 0);
4084 comp->vnPrint(indexVN, 0);
4088 void ValueNumStore::vnDumpMapStore(Compiler* comp, VNFuncApp* mapStore)
4090 assert(mapStore->m_func == VNF_MapStore); // Precondition.
4092 ValueNum mapVN = mapStore->m_args[0]; // First arg is the map id
4093 ValueNum indexVN = mapStore->m_args[1]; // Second arg is the index
4094 ValueNum newValVN = mapStore->m_args[2]; // Third arg is the new value
4096 comp->vnPrint(mapVN, 0);
4098 comp->vnPrint(indexVN, 0);
4100 comp->vnPrint(newValVN, 0);
4105 // Static fields, methods.
4106 static UINT8 vnfOpAttribs[VNF_COUNT];
4107 static genTreeOps genTreeOpsIllegalAsVNFunc[] = {GT_IND, // When we do heap memory.
4108 GT_NULLCHECK, GT_QMARK, GT_COLON, GT_LOCKADD, GT_XADD, GT_XCHG,
4109 GT_CMPXCHG, GT_LCLHEAP, GT_BOX,
4111 // These need special semantics:
4112 GT_COMMA, // == second argument (but with exception(s) from first).
4113 GT_ADDR, GT_ARR_BOUNDS_CHECK,
4114 GT_OBJ, // May reference heap memory.
4115 GT_BLK, // May reference heap memory.
4116 GT_INIT_VAL, // Not strictly a pass-through.
4118 // These control-flow operations need no values.
4119 GT_JTRUE, GT_RETURN, GT_SWITCH, GT_RETFILT, GT_CKFINITE};
4121 UINT8* ValueNumStore::s_vnfOpAttribs = nullptr;
4123 void ValueNumStore::InitValueNumStoreStatics()
4125 // Make sure we've gotten constants right...
4126 assert(unsigned(VNFOA_Arity) == (1 << VNFOA_ArityShift));
4127 assert(unsigned(VNFOA_AfterArity) == (unsigned(VNFOA_Arity) << VNFOA_ArityBits));
4129 s_vnfOpAttribs = &vnfOpAttribs[0];
4130 for (unsigned i = 0; i < GT_COUNT; i++)
4132 genTreeOps gtOper = static_cast<genTreeOps>(i);
4134 if (GenTree::OperIsUnary(gtOper))
4138 else if (GenTree::OperIsBinary(gtOper))
4142 // Since GT_ARR_BOUNDS_CHECK is not currently GTK_BINOP
4143 else if (gtOper == GT_ARR_BOUNDS_CHECK)
4147 vnfOpAttribs[i] |= (arity << VNFOA_ArityShift);
4149 if (GenTree::OperIsCommutative(gtOper))
4151 vnfOpAttribs[i] |= VNFOA_Commutative;
4155 // I so wish this wasn't the best way to do this...
4157 int vnfNum = VNF_Boundary + 1; // The macro definition below will update this after using it.
4159 #define ValueNumFuncDef(vnf, arity, commute, knownNonNull, sharedStatic) \
4161 vnfOpAttribs[vnfNum] |= VNFOA_Commutative; \
4163 vnfOpAttribs[vnfNum] |= VNFOA_KnownNonNull; \
4165 vnfOpAttribs[vnfNum] |= VNFOA_SharedStatic; \
4166 vnfOpAttribs[vnfNum] |= (arity << VNFOA_ArityShift); \
4169 #include "valuenumfuncs.h"
4170 #undef ValueNumFuncDef
4172 for (unsigned i = 0; i < _countof(genTreeOpsIllegalAsVNFunc); i++)
4174 vnfOpAttribs[genTreeOpsIllegalAsVNFunc[i]] |= VNFOA_IllegalGenTreeOp;
4179 // Define the name array.
4180 #define ValueNumFuncDef(vnf, arity, commute, knownNonNull, sharedStatic) #vnf,
4182 const char* ValueNumStore::VNFuncNameArr[] = {
4183 #include "valuenumfuncs.h"
4184 #undef ValueNumFuncDef
4188 const char* ValueNumStore::VNFuncName(VNFunc vnf)
4190 if (vnf < VNF_Boundary)
4192 return GenTree::OpName(genTreeOps(vnf));
4196 return VNFuncNameArr[vnf - (VNF_Boundary + 1)];
4200 static const char* s_reservedNameArr[] = {
4201 "$VN.Recursive", // -2 RecursiveVN
4202 "$VN.No", // -1 NoVN
4203 "$VN.Null", // 0 VNForNull()
4204 "$VN.ZeroMap", // 1 VNForZeroMap()
4205 "$VN.ReadOnlyHeap", // 2 VNForROH()
4206 "$VN.Void", // 3 VNForVoid()
4207 "$VN.EmptyExcSet" // 4 VNForEmptyExcSet()
4210 // Returns the string name of "vn" when it is a reserved value number, nullptr otherwise
4212 const char* ValueNumStore::reservedName(ValueNum vn)
4214 int val = vn - ValueNumStore::RecursiveVN; // Add two, making 'RecursiveVN' equal to zero
4215 int max = ValueNumStore::SRC_NumSpecialRefConsts - ValueNumStore::RecursiveVN;
4217 if ((val >= 0) && (val < max))
4219 return s_reservedNameArr[val];
4226 // Returns true if "vn" is a reserved value number
4229 bool ValueNumStore::isReservedVN(ValueNum vn)
4231 int val = vn - ValueNumStore::RecursiveVN; // Adding two, making 'RecursiveVN' equal to zero
4232 int max = ValueNumStore::SRC_NumSpecialRefConsts - ValueNumStore::RecursiveVN;
4234 if ((val >= 0) && (val < max))
4242 void ValueNumStore::RunTests(Compiler* comp)
4244 VNFunc VNF_Add = GenTreeOpToVNFunc(GT_ADD);
4246 ValueNumStore* vns = new (comp->getAllocatorDebugOnly()) ValueNumStore(comp, comp->getAllocatorDebugOnly());
4247 ValueNum vnNull = VNForNull();
4248 assert(vnNull == VNForNull());
4250 ValueNum vnFor1 = vns->VNForIntCon(1);
4251 assert(vnFor1 == vns->VNForIntCon(1));
4252 assert(vns->TypeOfVN(vnFor1) == TYP_INT);
4253 assert(vns->IsVNConstant(vnFor1));
4254 assert(vns->ConstantValue<int>(vnFor1) == 1);
4256 ValueNum vnFor100 = vns->VNForIntCon(100);
4257 assert(vnFor100 == vns->VNForIntCon(100));
4258 assert(vnFor100 != vnFor1);
4259 assert(vns->TypeOfVN(vnFor100) == TYP_INT);
4260 assert(vns->IsVNConstant(vnFor100));
4261 assert(vns->ConstantValue<int>(vnFor100) == 100);
4263 ValueNum vnFor1F = vns->VNForFloatCon(1.0f);
4264 assert(vnFor1F == vns->VNForFloatCon(1.0f));
4265 assert(vnFor1F != vnFor1 && vnFor1F != vnFor100);
4266 assert(vns->TypeOfVN(vnFor1F) == TYP_FLOAT);
4267 assert(vns->IsVNConstant(vnFor1F));
4268 assert(vns->ConstantValue<float>(vnFor1F) == 1.0f);
4270 ValueNum vnFor1D = vns->VNForDoubleCon(1.0);
4271 assert(vnFor1D == vns->VNForDoubleCon(1.0));
4272 assert(vnFor1D != vnFor1F && vnFor1D != vnFor1 && vnFor1D != vnFor100);
4273 assert(vns->TypeOfVN(vnFor1D) == TYP_DOUBLE);
4274 assert(vns->IsVNConstant(vnFor1D));
4275 assert(vns->ConstantValue<double>(vnFor1D) == 1.0);
4277 ValueNum vnRandom1 = vns->VNForExpr(nullptr, TYP_INT);
4278 ValueNum vnForFunc2a = vns->VNForFunc(TYP_INT, VNF_Add, vnFor1, vnRandom1);
4279 assert(vnForFunc2a == vns->VNForFunc(TYP_INT, VNF_Add, vnFor1, vnRandom1));
4280 assert(vnForFunc2a != vnFor1D && vnForFunc2a != vnFor1F && vnForFunc2a != vnFor1 && vnForFunc2a != vnRandom1);
4281 assert(vns->TypeOfVN(vnForFunc2a) == TYP_INT);
4282 assert(!vns->IsVNConstant(vnForFunc2a));
4283 assert(vns->IsVNFunc(vnForFunc2a));
4285 bool b = vns->GetVNFunc(vnForFunc2a, &fa2a);
4287 assert(fa2a.m_func == VNF_Add && fa2a.m_arity == 2 && fa2a.m_args[0] == vnFor1 && fa2a.m_args[1] == vnRandom1);
4289 ValueNum vnForFunc2b = vns->VNForFunc(TYP_INT, VNF_Add, vnFor1, vnFor100);
4290 assert(vnForFunc2b == vns->VNForFunc(TYP_INT, VNF_Add, vnFor1, vnFor100));
4291 assert(vnForFunc2b != vnFor1D && vnForFunc2b != vnFor1F && vnForFunc2b != vnFor1 && vnForFunc2b != vnFor100);
4292 assert(vns->TypeOfVN(vnForFunc2b) == TYP_INT);
4293 assert(vns->IsVNConstant(vnForFunc2b));
4294 assert(vns->ConstantValue<int>(vnForFunc2b) == 101);
4296 // printf("Did ValueNumStore::RunTests.\n");
4300 typedef JitExpandArrayStack<BasicBlock*> BlockStack;
4302 // This represents the "to do" state of the value number computation.
4303 struct ValueNumberState
4305 // These two stacks collectively represent the set of blocks that are candidates for
4306 // processing, because at least one predecessor has been processed. Blocks on "m_toDoAllPredsDone"
4307 // have had *all* predecessors processed, and thus are candidates for some extra optimizations.
4308 // Blocks on "m_toDoNotAllPredsDone" have at least one predecessor that has not been processed.
4309 // Blocks are initially on "m_toDoNotAllPredsDone" may be moved to "m_toDoAllPredsDone" when their last
4310 // unprocessed predecessor is processed, thus maintaining the invariants.
4311 BlockStack m_toDoAllPredsDone;
4312 BlockStack m_toDoNotAllPredsDone;
4316 // TBD: This should really be a bitset...
4318 // first bit indicates completed,
4319 // second bit indicates that it's been pushed on all-done stack,
4320 // third bit indicates that it's been pushed on not-all-done stack.
4326 BVB_onAllDone = 0x2,
4327 BVB_onNotAllDone = 0x4,
4330 bool GetVisitBit(unsigned bbNum, BlockVisitBits bvb)
4332 return (m_visited[bbNum] & bvb) != 0;
4334 void SetVisitBit(unsigned bbNum, BlockVisitBits bvb)
4336 m_visited[bbNum] |= bvb;
4339 ValueNumberState(Compiler* comp)
4340 : m_toDoAllPredsDone(comp->getAllocator(), /*minSize*/ 4)
4341 , m_toDoNotAllPredsDone(comp->getAllocator(), /*minSize*/ 4)
4343 , m_visited(new (comp, CMK_ValueNumber) BYTE[comp->fgBBNumMax + 1]())
4347 BasicBlock* ChooseFromNotAllPredsDone()
4349 assert(m_toDoAllPredsDone.Size() == 0);
4350 // If we have no blocks with all preds done, then (ideally, if all cycles have been captured by loops)
4351 // we must have at least one block within a loop. We want to do the loops first. Doing a loop entry block
4352 // should break the cycle, making the rest of the body of the loop (unless there's a nested loop) doable by the
4353 // all-preds-done rule. If several loop entry blocks are available, at least one should have all non-loop preds
4354 // done -- we choose that.
4355 for (unsigned i = 0; i < m_toDoNotAllPredsDone.Size(); i++)
4357 BasicBlock* cand = m_toDoNotAllPredsDone.Get(i);
4359 // Skip any already-completed blocks (a block may have all its preds finished, get added to the
4360 // all-preds-done todo set, and get processed there). Do this by moving the last one down, to
4361 // keep the array compact.
4362 while (GetVisitBit(cand->bbNum, BVB_complete))
4364 if (i + 1 < m_toDoNotAllPredsDone.Size())
4366 cand = m_toDoNotAllPredsDone.Pop();
4367 m_toDoNotAllPredsDone.Set(i, cand);
4371 // "cand" is the last element; delete it.
4372 (void)m_toDoNotAllPredsDone.Pop();
4376 // We may have run out of non-complete candidates above. If so, we're done.
4377 if (i == m_toDoNotAllPredsDone.Size())
4382 // See if "cand" is a loop entry.
4384 if (m_comp->optBlockIsLoopEntry(cand, &lnum))
4386 // "lnum" is the innermost loop of which "cand" is the entry; find the outermost.
4387 unsigned lnumPar = m_comp->optLoopTable[lnum].lpParent;
4388 while (lnumPar != BasicBlock::NOT_IN_LOOP)
4390 if (m_comp->optLoopTable[lnumPar].lpEntry == cand)
4398 lnumPar = m_comp->optLoopTable[lnumPar].lpParent;
4401 bool allNonLoopPredsDone = true;
4402 for (flowList* pred = m_comp->BlockPredsWithEH(cand); pred != nullptr; pred = pred->flNext)
4404 BasicBlock* predBlock = pred->flBlock;
4405 if (!m_comp->optLoopTable[lnum].lpContains(predBlock))
4407 if (!GetVisitBit(predBlock->bbNum, BVB_complete))
4409 allNonLoopPredsDone = false;
4413 if (allNonLoopPredsDone)
4420 // If we didn't find a loop entry block with all non-loop preds done above, then return a random member (if
4422 if (m_toDoNotAllPredsDone.Size() == 0)
4428 return m_toDoNotAllPredsDone.Pop();
4432 // Debugging output that is too detailed for a normal JIT dump...
4433 #define DEBUG_VN_VISIT 0
4435 // Record that "blk" has been visited, and add any unvisited successors of "blk" to the appropriate todo set.
4436 void FinishVisit(BasicBlock* blk)
4438 #ifdef DEBUG_VN_VISIT
4439 JITDUMP("finish(BB%02u).\n", blk->bbNum);
4440 #endif // DEBUG_VN_VISIT
4442 SetVisitBit(blk->bbNum, BVB_complete);
4444 for (BasicBlock* succ : blk->GetAllSuccs(m_comp))
4446 #ifdef DEBUG_VN_VISIT
4447 JITDUMP(" Succ(BB%02u).\n", succ->bbNum);
4448 #endif // DEBUG_VN_VISIT
4450 if (GetVisitBit(succ->bbNum, BVB_complete))
4454 #ifdef DEBUG_VN_VISIT
4455 JITDUMP(" Not yet completed.\n");
4456 #endif // DEBUG_VN_VISIT
4458 bool allPredsVisited = true;
4459 for (flowList* pred = m_comp->BlockPredsWithEH(succ); pred != nullptr; pred = pred->flNext)
4461 BasicBlock* predBlock = pred->flBlock;
4462 if (!GetVisitBit(predBlock->bbNum, BVB_complete))
4464 allPredsVisited = false;
4469 if (allPredsVisited)
4471 #ifdef DEBUG_VN_VISIT
4472 JITDUMP(" All preds complete, adding to allDone.\n");
4473 #endif // DEBUG_VN_VISIT
4475 assert(!GetVisitBit(succ->bbNum, BVB_onAllDone)); // Only last completion of last succ should add to
4477 m_toDoAllPredsDone.Push(succ);
4478 SetVisitBit(succ->bbNum, BVB_onAllDone);
4482 #ifdef DEBUG_VN_VISIT
4483 JITDUMP(" Not all preds complete Adding to notallDone, if necessary...\n");
4484 #endif // DEBUG_VN_VISIT
4486 if (!GetVisitBit(succ->bbNum, BVB_onNotAllDone))
4488 #ifdef DEBUG_VN_VISIT
4489 JITDUMP(" Was necessary.\n");
4490 #endif // DEBUG_VN_VISIT
4491 m_toDoNotAllPredsDone.Push(succ);
4492 SetVisitBit(succ->bbNum, BVB_onNotAllDone);
4500 return m_toDoAllPredsDone.Size() > 0 || m_toDoNotAllPredsDone.Size() > 0;
4504 void Compiler::fgValueNumber()
4507 // This could be a JITDUMP, but some people find it convenient to set a breakpoint on the printf.
4510 printf("\n*************** In fgValueNumber()\n");
4514 // If we skipped SSA, skip VN as well.
4515 if (fgSsaPassesCompleted == 0)
4520 // Allocate the value number store.
4521 assert(fgVNPassesCompleted > 0 || vnStore == nullptr);
4522 if (fgVNPassesCompleted == 0)
4524 CompAllocator* allocator = new (this, CMK_ValueNumber) CompAllocator(this, CMK_ValueNumber);
4525 vnStore = new (this, CMK_ValueNumber) ValueNumStore(this, allocator);
4530 // Make sure the memory SSA names have no value numbers.
4531 for (unsigned i = 0; i < lvMemoryNumSsaNames; i++)
4533 lvMemoryPerSsaData.GetRef(i).m_vnPair = noVnp;
4535 for (BasicBlock* blk = fgFirstBB; blk != nullptr; blk = blk->bbNext)
4537 // Now iterate over the block's statements, and their trees.
4538 for (GenTree* stmts = blk->FirstNonPhiDef(); stmts != nullptr; stmts = stmts->gtNext)
4540 assert(stmts->IsStatement());
4541 for (GenTree* tree = stmts->gtStmt.gtStmtList; tree; tree = tree->gtNext)
4543 tree->gtVNPair.SetBoth(ValueNumStore::NoVN);
4549 // Compute the side effects of loops.
4550 optComputeLoopSideEffects();
4552 // At the block level, we will use a modified worklist algorithm. We will have two
4553 // "todo" sets of unvisited blocks. Blocks (other than the entry block) are put in a
4554 // todo set only when some predecessor has been visited, so all blocks have at least one
4555 // predecessor visited. The distinction between the two sets is whether *all* predecessors have
4556 // already been visited. We visit such blocks preferentially if they exist, since phi definitions
4557 // in such blocks will have all arguments defined, enabling a simplification in the case that all
4558 // arguments to the phi have the same VN. If no such blocks exist, we pick a block with at least
4559 // one unvisited predecessor. In this case, we assign a new VN for phi definitions.
4561 // Start by giving incoming arguments value numbers.
4562 // Also give must-init vars a zero of their type.
4563 for (unsigned i = 0; i < lvaCount; i++)
4565 LclVarDsc* varDsc = &lvaTable[i];
4566 if (varDsc->lvIsParam)
4568 // We assume that code equivalent to this variable initialization loop
4569 // has been performed when doing SSA naming, so that all the variables we give
4570 // initial VNs to here have been given initial SSA definitions there.
4571 // SSA numbers always start from FIRST_SSA_NUM, and we give the value number to SSA name FIRST_SSA_NUM.
4572 // We use the VNF_InitVal(i) from here so we know that this value is loop-invariant
4574 ValueNum initVal = vnStore->VNForFunc(varDsc->TypeGet(), VNF_InitVal, vnStore->VNForIntCon(i));
4575 LclSsaVarDsc* ssaDef = varDsc->GetPerSsaData(SsaConfig::FIRST_SSA_NUM);
4576 ssaDef->m_vnPair.SetBoth(initVal);
4577 ssaDef->m_defLoc.m_blk = fgFirstBB;
4579 else if (info.compInitMem || varDsc->lvMustInit ||
4580 (varDsc->lvTracked && VarSetOps::IsMember(this, fgFirstBB->bbLiveIn, varDsc->lvVarIndex)))
4582 // The last clause covers the use-before-def variables (the ones that are live-in to the the first block),
4583 // these are variables that are read before being initialized (at least on some control flow paths)
4584 // if they are not must-init, then they get VNF_InitVal(i), as with the param case.)
4586 bool isZeroed = (info.compInitMem || varDsc->lvMustInit);
4587 ValueNum initVal = ValueNumStore::NoVN; // We must assign a new value to initVal
4588 var_types typ = varDsc->TypeGet();
4592 case TYP_LCLBLK: // The outgoing args area for arm and x64
4593 case TYP_BLK: // A blob of memory
4594 // TYP_BLK is used for the EHSlots LclVar on x86 (aka shadowSPslotsVar)
4595 // and for the lvaInlinedPInvokeFrameVar on x64, arm and x86
4596 // The stack associated with these LclVars are not zero initialized
4597 // thus we set 'initVN' to a new, unique VN.
4599 initVal = vnStore->VNForExpr(fgFirstBB);
4605 // LclVars of TYP_BYREF can be zero-inited.
4606 initVal = vnStore->VNForByrefCon(0);
4610 // Here we have uninitialized TYP_BYREF
4611 initVal = vnStore->VNForFunc(typ, VNF_InitVal, vnStore->VNForIntCon(i));
4618 // By default we will zero init these LclVars
4619 initVal = vnStore->VNZeroForType(typ);
4623 initVal = vnStore->VNForFunc(typ, VNF_InitVal, vnStore->VNForIntCon(i));
4628 bool isVarargParam = (i == lvaVarargsBaseOfStkArgs || i == lvaVarargsHandleArg);
4630 initVal = vnStore->VNForExpr(fgFirstBB); // a new, unique VN.
4632 assert(initVal != ValueNumStore::NoVN);
4634 LclSsaVarDsc* ssaDef = varDsc->GetPerSsaData(SsaConfig::FIRST_SSA_NUM);
4635 ssaDef->m_vnPair.SetBoth(initVal);
4636 ssaDef->m_defLoc.m_blk = fgFirstBB;
4639 // Give memory an initial value number (about which we know nothing).
4640 ValueNum memoryInitVal = vnStore->VNForFunc(TYP_REF, VNF_InitVal, vnStore->VNForIntCon(-1)); // Use -1 for memory.
4641 GetMemoryPerSsaData(SsaConfig::FIRST_SSA_NUM)->m_vnPair.SetBoth(memoryInitVal);
4645 printf("Memory Initial Value in BB01 is: " STR_VN "%x\n", memoryInitVal);
4649 ValueNumberState vs(this);
4651 // Push the first block. This has no preds.
4652 vs.m_toDoAllPredsDone.Push(fgFirstBB);
4654 while (vs.ToDoExists())
4656 while (vs.m_toDoAllPredsDone.Size() > 0)
4658 BasicBlock* toDo = vs.m_toDoAllPredsDone.Pop();
4659 fgValueNumberBlock(toDo);
4660 // Record that we've visited "toDo", and add successors to the right sets.
4661 vs.FinishVisit(toDo);
4663 // OK, we've run out of blocks whose predecessors are done. Pick one whose predecessors are not all done,
4664 // process that. This may make more "all-done" blocks, so we'll go around the outer loop again --
4665 // note that this is an "if", not a "while" loop.
4666 if (vs.m_toDoNotAllPredsDone.Size() > 0)
4668 BasicBlock* toDo = vs.ChooseFromNotAllPredsDone();
4669 if (toDo == nullptr)
4671 continue; // We may have run out, because of completed blocks on the not-all-preds done list.
4674 fgValueNumberBlock(toDo);
4675 // Record that we've visited "toDo", and add successors to the right sest.
4676 vs.FinishVisit(toDo);
4684 fgVNPassesCompleted++;
4687 void Compiler::fgValueNumberBlock(BasicBlock* blk)
4692 compCurStmtNum = blk->bbStmtNum - 1; // Set compCurStmtNum
4695 unsigned outerLoopNum = BasicBlock::NOT_IN_LOOP;
4697 // First: visit phi's. If "newVNForPhis", give them new VN's. If not,
4698 // first check to see if all phi args have the same value.
4699 GenTree* firstNonPhi = blk->FirstNonPhiDef();
4700 for (GenTree* phiDefs = blk->bbTreeList; phiDefs != firstNonPhi; phiDefs = phiDefs->gtNext)
4702 // TODO-Cleanup: It has been proposed that we should have an IsPhiDef predicate. We would use it
4703 // in Block::FirstNonPhiDef as well.
4704 GenTree* phiDef = phiDefs->gtStmt.gtStmtExpr;
4705 assert(phiDef->OperGet() == GT_ASG);
4706 GenTreeLclVarCommon* newSsaVar = phiDef->gtOp.gtOp1->AsLclVarCommon();
4708 ValueNumPair phiAppVNP;
4709 ValueNumPair sameVNPair;
4711 GenTree* phiFunc = phiDef->gtOp.gtOp2;
4713 // At this point a GT_PHI node should never have a nullptr for gtOp1
4714 // and the gtOp1 should always be a GT_LIST node.
4715 GenTree* phiOp1 = phiFunc->gtOp.gtOp1;
4716 noway_assert(phiOp1 != nullptr);
4717 noway_assert(phiOp1->OperGet() == GT_LIST);
4719 GenTreeArgList* phiArgs = phiFunc->gtOp.gtOp1->AsArgList();
4721 // A GT_PHI node should have more than one argument.
4722 noway_assert(phiArgs->Rest() != nullptr);
4724 GenTreeLclVarCommon* phiArg = phiArgs->Current()->AsLclVarCommon();
4725 phiArgs = phiArgs->Rest();
4727 phiAppVNP.SetBoth(vnStore->VNForIntCon(phiArg->gtSsaNum));
4728 bool allSameLib = true;
4729 bool allSameCons = true;
4730 sameVNPair = lvaTable[phiArg->gtLclNum].GetPerSsaData(phiArg->gtSsaNum)->m_vnPair;
4731 if (!sameVNPair.BothDefined())
4734 allSameCons = false;
4736 while (phiArgs != nullptr)
4738 phiArg = phiArgs->Current()->AsLclVarCommon();
4739 // Set the VN of the phi arg.
4740 phiArg->gtVNPair = lvaTable[phiArg->gtLclNum].GetPerSsaData(phiArg->gtSsaNum)->m_vnPair;
4741 if (phiArg->gtVNPair.BothDefined())
4743 if (phiArg->gtVNPair.GetLiberal() != sameVNPair.GetLiberal())
4747 if (phiArg->gtVNPair.GetConservative() != sameVNPair.GetConservative())
4749 allSameCons = false;
4755 allSameCons = false;
4757 ValueNumPair phiArgSsaVNP;
4758 phiArgSsaVNP.SetBoth(vnStore->VNForIntCon(phiArg->gtSsaNum));
4759 phiAppVNP = vnStore->VNPairForFunc(newSsaVar->TypeGet(), VNF_Phi, phiArgSsaVNP, phiAppVNP);
4760 phiArgs = phiArgs->Rest();
4763 ValueNumPair newVNPair;
4766 newVNPair.SetLiberal(sameVNPair.GetLiberal());
4770 newVNPair.SetLiberal(phiAppVNP.GetLiberal());
4774 newVNPair.SetConservative(sameVNPair.GetConservative());
4778 newVNPair.SetConservative(phiAppVNP.GetConservative());
4781 LclSsaVarDsc* newSsaVarDsc = lvaTable[newSsaVar->gtLclNum].GetPerSsaData(newSsaVar->GetSsaNum());
4782 // If all the args of the phi had the same value(s, liberal and conservative), then there wasn't really
4783 // a reason to have the phi -- just pass on that value.
4784 if (allSameLib && allSameCons)
4786 newSsaVarDsc->m_vnPair = newVNPair;
4790 printf("In SSA definition, incoming phi args all same, set VN of local %d/%d to ",
4791 newSsaVar->GetLclNum(), newSsaVar->GetSsaNum());
4792 vnpPrint(newVNPair, 1);
4799 // They were not the same; we need to create a phi definition.
4800 ValueNumPair lclNumVNP;
4801 lclNumVNP.SetBoth(ValueNum(newSsaVar->GetLclNum()));
4802 ValueNumPair ssaNumVNP;
4803 ssaNumVNP.SetBoth(ValueNum(newSsaVar->GetSsaNum()));
4804 ValueNumPair vnPhiDef =
4805 vnStore->VNPairForFunc(newSsaVar->TypeGet(), VNF_PhiDef, lclNumVNP, ssaNumVNP, phiAppVNP);
4806 newSsaVarDsc->m_vnPair = vnPhiDef;
4810 printf("SSA definition: set VN of local %d/%d to ", newSsaVar->GetLclNum(), newSsaVar->GetSsaNum());
4811 vnpPrint(vnPhiDef, 1);
4818 // Now do the same for each MemoryKind.
4819 for (MemoryKind memoryKind : allMemoryKinds())
4821 // Is there a phi for this block?
4822 if (blk->bbMemorySsaPhiFunc[memoryKind] == nullptr)
4824 fgCurMemoryVN[memoryKind] = GetMemoryPerSsaData(blk->bbMemorySsaNumIn[memoryKind])->m_vnPair.GetLiberal();
4825 assert(fgCurMemoryVN[memoryKind] != ValueNumStore::NoVN);
4829 if ((memoryKind == ByrefExposed) && byrefStatesMatchGcHeapStates)
4831 // The update for GcHeap will copy its result to ByrefExposed.
4832 assert(memoryKind < GcHeap);
4833 assert(blk->bbMemorySsaPhiFunc[memoryKind] == blk->bbMemorySsaPhiFunc[GcHeap]);
4838 ValueNum newMemoryVN;
4839 if (optBlockIsLoopEntry(blk, &loopNum))
4841 newMemoryVN = fgMemoryVNForLoopSideEffects(memoryKind, blk, loopNum);
4845 // Are all the VN's the same?
4846 BasicBlock::MemoryPhiArg* phiArgs = blk->bbMemorySsaPhiFunc[memoryKind];
4847 assert(phiArgs != BasicBlock::EmptyMemoryPhiDef);
4848 // There should be > 1 args to a phi.
4849 assert(phiArgs->m_nextArg != nullptr);
4850 ValueNum phiAppVN = vnStore->VNForIntCon(phiArgs->GetSsaNum());
4851 JITDUMP(" Building phi application: $%x = SSA# %d.\n", phiAppVN, phiArgs->GetSsaNum());
4852 bool allSame = true;
4853 ValueNum sameVN = GetMemoryPerSsaData(phiArgs->GetSsaNum())->m_vnPair.GetLiberal();
4854 if (sameVN == ValueNumStore::NoVN)
4858 phiArgs = phiArgs->m_nextArg;
4859 while (phiArgs != nullptr)
4861 ValueNum phiArgVN = GetMemoryPerSsaData(phiArgs->GetSsaNum())->m_vnPair.GetLiberal();
4862 if (phiArgVN == ValueNumStore::NoVN || phiArgVN != sameVN)
4867 ValueNum oldPhiAppVN = phiAppVN;
4869 unsigned phiArgSSANum = phiArgs->GetSsaNum();
4870 ValueNum phiArgSSANumVN = vnStore->VNForIntCon(phiArgSSANum);
4871 JITDUMP(" Building phi application: $%x = SSA# %d.\n", phiArgSSANumVN, phiArgSSANum);
4872 phiAppVN = vnStore->VNForFunc(TYP_REF, VNF_Phi, phiArgSSANumVN, phiAppVN);
4873 JITDUMP(" Building phi application: $%x = phi($%x, $%x).\n", phiAppVN, phiArgSSANumVN,
4875 phiArgs = phiArgs->m_nextArg;
4879 newMemoryVN = sameVN;
4884 vnStore->VNForFunc(TYP_REF, VNF_PhiMemoryDef, vnStore->VNForHandle(ssize_t(blk), 0), phiAppVN);
4887 GetMemoryPerSsaData(blk->bbMemorySsaNumIn[memoryKind])->m_vnPair.SetLiberal(newMemoryVN);
4888 fgCurMemoryVN[memoryKind] = newMemoryVN;
4889 if ((memoryKind == GcHeap) && byrefStatesMatchGcHeapStates)
4891 // Keep the CurMemoryVNs in sync
4892 fgCurMemoryVN[ByrefExposed] = newMemoryVN;
4898 printf("The SSA definition for %s (#%d) at start of BB%02u is ", memoryKindNames[memoryKind],
4899 blk->bbMemorySsaNumIn[memoryKind], blk->bbNum);
4900 vnPrint(fgCurMemoryVN[memoryKind], 1);
4906 // Now iterate over the remaining statements, and their trees.
4907 for (GenTree* stmt = firstNonPhi; stmt != nullptr; stmt = stmt->gtNext)
4909 assert(stmt->IsStatement());
4915 printf("\n***** BB%02u, stmt %d (before)\n", blk->bbNum, compCurStmtNum);
4916 gtDispTree(stmt->gtStmt.gtStmtExpr);
4921 for (GenTree* tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
4923 fgValueNumberTree(tree);
4929 printf("\n***** BB%02u, stmt %d (after)\n", blk->bbNum, compCurStmtNum);
4930 gtDispTree(stmt->gtStmt.gtStmtExpr);
4934 printf("---------\n");
4940 for (MemoryKind memoryKind : allMemoryKinds())
4942 if ((memoryKind == GcHeap) && byrefStatesMatchGcHeapStates)
4944 // The update to the shared SSA data will have already happened for ByrefExposed.
4945 assert(memoryKind > ByrefExposed);
4946 assert(blk->bbMemorySsaNumOut[memoryKind] == blk->bbMemorySsaNumOut[ByrefExposed]);
4947 assert(GetMemoryPerSsaData(blk->bbMemorySsaNumOut[memoryKind])->m_vnPair.GetLiberal() ==
4948 fgCurMemoryVN[memoryKind]);
4952 if (blk->bbMemorySsaNumOut[memoryKind] != blk->bbMemorySsaNumIn[memoryKind])
4954 GetMemoryPerSsaData(blk->bbMemorySsaNumOut[memoryKind])->m_vnPair.SetLiberal(fgCurMemoryVN[memoryKind]);
4958 compCurBB = nullptr;
4961 ValueNum Compiler::fgMemoryVNForLoopSideEffects(MemoryKind memoryKind,
4962 BasicBlock* entryBlock,
4963 unsigned innermostLoopNum)
4965 // "loopNum" is the innermost loop for which "blk" is the entry; find the outermost one.
4966 assert(innermostLoopNum != BasicBlock::NOT_IN_LOOP);
4967 unsigned loopsInNest = innermostLoopNum;
4968 unsigned loopNum = innermostLoopNum;
4969 while (loopsInNest != BasicBlock::NOT_IN_LOOP)
4971 if (optLoopTable[loopsInNest].lpEntry != entryBlock)
4975 loopNum = loopsInNest;
4976 loopsInNest = optLoopTable[loopsInNest].lpParent;
4982 printf("Computing %s state for block BB%02u, entry block for loops %d to %d:\n", memoryKindNames[memoryKind],
4983 entryBlock->bbNum, innermostLoopNum, loopNum);
4987 // If this loop has memory havoc effects, just use a new, unique VN.
4988 if (optLoopTable[loopNum].lpLoopHasMemoryHavoc[memoryKind])
4990 ValueNum res = vnStore->VNForExpr(entryBlock, TYP_REF);
4994 printf(" Loop %d has memory havoc effect; heap state is new fresh $%x.\n", loopNum, res);
5000 // Otherwise, find the predecessors of the entry block that are not in the loop.
5001 // If there is only one such, use its memory value as the "base." If more than one,
5002 // use a new unique VN.
5003 BasicBlock* nonLoopPred = nullptr;
5004 bool multipleNonLoopPreds = false;
5005 for (flowList* pred = BlockPredsWithEH(entryBlock); pred != nullptr; pred = pred->flNext)
5007 BasicBlock* predBlock = pred->flBlock;
5008 if (!optLoopTable[loopNum].lpContains(predBlock))
5010 if (nonLoopPred == nullptr)
5012 nonLoopPred = predBlock;
5019 printf(" Entry block has >1 non-loop preds: (at least) BB%02u and BB%02u.\n", nonLoopPred->bbNum,
5023 multipleNonLoopPreds = true;
5028 if (multipleNonLoopPreds)
5030 ValueNum res = vnStore->VNForExpr(entryBlock, TYP_REF);
5034 printf(" Therefore, memory state is new, fresh $%x.\n", res);
5039 // Otherwise, there is a single non-loop pred.
5040 assert(nonLoopPred != nullptr);
5041 // What is its memory post-state?
5042 ValueNum newMemoryVN = GetMemoryPerSsaData(nonLoopPred->bbMemorySsaNumOut[memoryKind])->m_vnPair.GetLiberal();
5043 assert(newMemoryVN !=
5044 ValueNumStore::NoVN); // We must have processed the single non-loop pred before reaching the loop entry.
5049 printf(" Init %s state is $%x, with new, fresh VN at:\n", memoryKindNames[memoryKind], newMemoryVN);
5052 // Modify "base" by setting all the modified fields/field maps/array maps to unknown values.
5053 // These annotations apply specifically to the GcHeap, where we disambiguate across such stores.
5054 if (memoryKind == GcHeap)
5056 // First the fields/field maps.
5057 Compiler::LoopDsc::FieldHandleSet* fieldsMod = optLoopTable[loopNum].lpFieldsModified;
5058 if (fieldsMod != nullptr)
5060 for (Compiler::LoopDsc::FieldHandleSet::KeyIterator ki = fieldsMod->Begin(); !ki.Equal(fieldsMod->End());
5063 CORINFO_FIELD_HANDLE fldHnd = ki.Get();
5064 ValueNum fldHndVN = vnStore->VNForHandle(ssize_t(fldHnd), GTF_ICON_FIELD_HDL);
5069 const char* modName;
5070 const char* fldName = eeGetFieldName(fldHnd, &modName);
5071 printf(" VNForHandle(Fseq[%s]) is " STR_VN "%x\n", fldName, fldHndVN);
5073 printf(" fgCurMemoryVN assigned:\n");
5078 vnStore->VNForMapStore(TYP_REF, newMemoryVN, fldHndVN, vnStore->VNForExpr(entryBlock, TYP_REF));
5081 // Now do the array maps.
5082 Compiler::LoopDsc::ClassHandleSet* elemTypesMod = optLoopTable[loopNum].lpArrayElemTypesModified;
5083 if (elemTypesMod != nullptr)
5085 for (Compiler::LoopDsc::ClassHandleSet::KeyIterator ki = elemTypesMod->Begin();
5086 !ki.Equal(elemTypesMod->End()); ++ki)
5088 CORINFO_CLASS_HANDLE elemClsHnd = ki.Get();
5093 var_types elemTyp = DecodeElemType(elemClsHnd);
5094 if (varTypeIsStruct(elemTyp))
5096 printf(" Array map %s[]\n", eeGetClassName(elemClsHnd));
5100 printf(" Array map %s[]\n", varTypeName(elemTyp));
5102 printf(" fgCurMemoryVN assigned:\n");
5106 ValueNum elemTypeVN = vnStore->VNForHandle(ssize_t(elemClsHnd), GTF_ICON_CLASS_HDL);
5107 ValueNum uniqueVN = vnStore->VNForExpr(entryBlock, TYP_REF);
5108 newMemoryVN = vnStore->VNForMapStore(TYP_REF, newMemoryVN, elemTypeVN, uniqueVN);
5114 // If there were any fields/elements modified, this should have been recorded as havoc
5115 // for ByrefExposed.
5116 assert(memoryKind == ByrefExposed);
5117 assert((optLoopTable[loopNum].lpFieldsModified == nullptr) ||
5118 optLoopTable[loopNum].lpLoopHasMemoryHavoc[memoryKind]);
5119 assert((optLoopTable[loopNum].lpArrayElemTypesModified == nullptr) ||
5120 optLoopTable[loopNum].lpLoopHasMemoryHavoc[memoryKind]);
5126 printf(" Final %s state is $%x.\n", memoryKindNames[memoryKind], newMemoryVN);
5132 void Compiler::fgMutateGcHeap(GenTree* tree DEBUGARG(const char* msg))
5134 // Update the current memory VN, and if we're tracking the heap SSA # caused by this node, record it.
5135 recordGcHeapStore(tree, vnStore->VNForExpr(compCurBB, TYP_REF) DEBUGARG(msg));
5138 void Compiler::fgMutateAddressExposedLocal(GenTree* tree DEBUGARG(const char* msg))
5140 // Update the current ByrefExposed VN, and if we're tracking the heap SSA # caused by this node, record it.
5141 recordAddressExposedLocalStore(tree, vnStore->VNForExpr(compCurBB) DEBUGARG(msg));
5144 void Compiler::recordGcHeapStore(GenTree* curTree, ValueNum gcHeapVN DEBUGARG(const char* msg))
5146 // bbMemoryDef must include GcHeap for any block that mutates the GC Heap
5147 // and GC Heap mutations are also ByrefExposed mutations
5148 assert((compCurBB->bbMemoryDef & memoryKindSet(GcHeap, ByrefExposed)) == memoryKindSet(GcHeap, ByrefExposed));
5149 fgCurMemoryVN[GcHeap] = gcHeapVN;
5151 if (byrefStatesMatchGcHeapStates)
5153 // Since GcHeap and ByrefExposed share SSA nodes, they need to share
5154 // value numbers too.
5155 fgCurMemoryVN[ByrefExposed] = gcHeapVN;
5159 // GcHeap and ByrefExposed have different defnums and VNs. We conservatively
5160 // assume that this GcHeap store may alias any byref load/store, so don't
5161 // bother trying to record the map/select stuff, and instead just an opaque VN
5163 fgCurMemoryVN[ByrefExposed] = vnStore->VNForExpr(compCurBB);
5169 printf(" fgCurMemoryVN[GcHeap] assigned by %s at ", msg);
5170 Compiler::printTreeID(curTree);
5171 printf(" to VN: " STR_VN "%x.\n", gcHeapVN);
5175 // If byrefStatesMatchGcHeapStates is true, then since GcHeap and ByrefExposed share
5176 // their SSA map entries, the below will effectively update both.
5177 fgValueNumberRecordMemorySsa(GcHeap, curTree);
5180 void Compiler::recordAddressExposedLocalStore(GenTree* curTree, ValueNum memoryVN DEBUGARG(const char* msg))
5182 // This should only happen if GcHeap and ByrefExposed are being tracked separately;
5183 // otherwise we'd go through recordGcHeapStore.
5184 assert(!byrefStatesMatchGcHeapStates);
5186 // bbMemoryDef must include ByrefExposed for any block that mutates an address-exposed local
5187 assert((compCurBB->bbMemoryDef & memoryKindSet(ByrefExposed)) != 0);
5188 fgCurMemoryVN[ByrefExposed] = memoryVN;
5193 printf(" fgCurMemoryVN[ByrefExposed] assigned by %s at ", msg);
5194 Compiler::printTreeID(curTree);
5195 printf(" to VN: " STR_VN "%x.\n", memoryVN);
5199 fgValueNumberRecordMemorySsa(ByrefExposed, curTree);
5202 void Compiler::fgValueNumberRecordMemorySsa(MemoryKind memoryKind, GenTree* tree)
5205 if (GetMemorySsaMap(memoryKind)->Lookup(tree, &ssaNum))
5207 GetMemoryPerSsaData(ssaNum)->m_vnPair.SetLiberal(fgCurMemoryVN[memoryKind]);
5212 Compiler::printTreeID(tree);
5213 printf(" sets %s SSA # %d to VN $%x: ", memoryKindNames[memoryKind], ssaNum, fgCurMemoryVN[memoryKind]);
5214 vnStore->vnDump(this, fgCurMemoryVN[memoryKind]);
5221 // The input 'tree' is a leaf node that is a constant
5222 // Assign the proper value number to the tree
5223 void Compiler::fgValueNumberTreeConst(GenTree* tree)
5225 genTreeOps oper = tree->OperGet();
5226 var_types typ = tree->TypeGet();
5227 assert(GenTree::OperIsConst(oper));
5240 if (tree->IsCnsIntOrI() && tree->IsIconHandle())
5242 tree->gtVNPair.SetBoth(
5243 vnStore->VNForHandle(ssize_t(tree->gtIntConCommon.IconValue()), tree->GetIconHandleFlag()));
5245 else if ((typ == TYP_LONG) || (typ == TYP_ULONG))
5247 tree->gtVNPair.SetBoth(vnStore->VNForLongCon(INT64(tree->gtIntConCommon.LngValue())));
5251 tree->gtVNPair.SetBoth(vnStore->VNForIntCon(int(tree->gtIntConCommon.IconValue())));
5256 tree->gtVNPair.SetBoth(vnStore->VNForFloatCon((float)tree->gtDblCon.gtDconVal));
5259 tree->gtVNPair.SetBoth(vnStore->VNForDoubleCon(tree->gtDblCon.gtDconVal));
5262 if (tree->gtIntConCommon.IconValue() == 0)
5264 tree->gtVNPair.SetBoth(ValueNumStore::VNForNull());
5268 assert(tree->gtFlags == GTF_ICON_STR_HDL); // Constant object can be only frozen string.
5269 tree->gtVNPair.SetBoth(
5270 vnStore->VNForHandle(ssize_t(tree->gtIntConCommon.IconValue()), tree->GetIconHandleFlag()));
5275 if (tree->gtIntConCommon.IconValue() == 0)
5277 tree->gtVNPair.SetBoth(ValueNumStore::VNForNull());
5281 assert(tree->IsCnsIntOrI());
5283 if (tree->IsIconHandle())
5285 tree->gtVNPair.SetBoth(
5286 vnStore->VNForHandle(ssize_t(tree->gtIntConCommon.IconValue()), tree->GetIconHandleFlag()));
5290 tree->gtVNPair.SetBoth(vnStore->VNForByrefCon(tree->gtIntConCommon.IconValue()));
5300 //------------------------------------------------------------------------
5301 // fgValueNumberBlockAssignment: Perform value numbering for block assignments.
5304 // tree - the block assignment to be value numbered.
5305 // evalAsgLhsInd - true iff we should value number the LHS of the assignment.
5311 // 'tree' must be a block assignment (GT_INITBLK, GT_COPYBLK, GT_COPYOBJ).
5313 void Compiler::fgValueNumberBlockAssignment(GenTree* tree, bool evalAsgLhsInd)
5315 GenTree* lhs = tree->gtGetOp1();
5316 GenTree* rhs = tree->gtGetOp2();
5318 // Sometimes we query the memory ssa map in an assertion, and need a dummy location for the ignored result.
5319 unsigned memorySsaNum;
5322 if (tree->OperIsInitBlkOp())
5324 GenTreeLclVarCommon* lclVarTree;
5327 if (tree->DefinesLocal(this, &lclVarTree, &isEntire))
5329 assert(lclVarTree->gtFlags & GTF_VAR_DEF);
5330 // Should not have been recorded as updating the GC heap.
5331 assert(!GetMemorySsaMap(GcHeap)->Lookup(tree, &memorySsaNum));
5333 unsigned lclNum = lclVarTree->GetLclNum();
5335 // Ignore vars that we excluded from SSA (for example, because they're address-exposed). They don't have
5336 // SSA names in which to store VN's on defs. We'll yield unique VN's when we read from them.
5337 if (!fgExcludeFromSsa(lclNum))
5339 // Should not have been recorded as updating ByrefExposed.
5340 assert(!GetMemorySsaMap(ByrefExposed)->Lookup(tree, &memorySsaNum));
5342 unsigned lclDefSsaNum = GetSsaNumForLocalVarDef(lclVarTree);
5344 ValueNum initBlkVN = ValueNumStore::NoVN;
5345 GenTree* initConst = rhs;
5346 if (isEntire && initConst->OperGet() == GT_CNS_INT)
5348 unsigned initVal = 0xFF & (unsigned)initConst->AsIntConCommon()->IconValue();
5351 initBlkVN = vnStore->VNZeroForType(lclVarTree->TypeGet());
5354 ValueNum lclVarVN = (initBlkVN != ValueNumStore::NoVN)
5356 : vnStore->VNForExpr(compCurBB, var_types(lvaTable[lclNum].lvType));
5358 lvaTable[lclNum].GetPerSsaData(lclDefSsaNum)->m_vnPair.SetBoth(lclVarVN);
5362 printf("N%03u ", tree->gtSeqNum);
5363 Compiler::printTreeID(tree);
5365 gtDispNodeName(tree);
5366 printf(" V%02u/%d => ", lclNum, lclDefSsaNum);
5367 vnPrint(lclVarVN, 1);
5372 else if (lvaVarAddrExposed(lclVarTree->gtLclNum))
5374 fgMutateAddressExposedLocal(tree DEBUGARG("INITBLK - address-exposed local"));
5379 // For now, arbitrary side effect on GcHeap/ByrefExposed.
5380 // TODO-CQ: Why not be complete, and get this case right?
5381 fgMutateGcHeap(tree DEBUGARG("INITBLK - non local"));
5383 // Initblock's are of type void. Give them the void "value" -- they may occur in argument lists, which we
5384 // want to be able to give VN's to.
5385 tree->gtVNPair.SetBoth(ValueNumStore::VNForVoid());
5389 assert(tree->OperIsCopyBlkOp());
5390 // TODO-Cleanup: We should factor things so that we uniformly rely on "PtrTo" VN's, and
5391 // the memory cases can be shared with assignments.
5392 GenTreeLclVarCommon* lclVarTree = nullptr;
5393 bool isEntire = false;
5394 // Note that we don't care about exceptions here, since we're only using the values
5395 // to perform an assignment (which happens after any exceptions are raised...)
5397 if (tree->DefinesLocal(this, &lclVarTree, &isEntire))
5399 // Should not have been recorded as updating the GC heap.
5400 assert(!GetMemorySsaMap(GcHeap)->Lookup(tree, &memorySsaNum));
5402 unsigned lhsLclNum = lclVarTree->GetLclNum();
5403 FieldSeqNode* lhsFldSeq = nullptr;
5404 // If it's excluded from SSA, don't need to do anything.
5405 if (!fgExcludeFromSsa(lhsLclNum))
5407 // Should not have been recorded as updating ByrefExposed.
5408 assert(!GetMemorySsaMap(ByrefExposed)->Lookup(tree, &memorySsaNum));
5410 unsigned lclDefSsaNum = GetSsaNumForLocalVarDef(lclVarTree);
5412 if (lhs->IsLocalExpr(this, &lclVarTree, &lhsFldSeq) ||
5413 (lhs->OperIsBlk() && (lhs->AsBlk()->gtBlkSize == lvaLclSize(lhsLclNum))))
5415 noway_assert(lclVarTree->gtLclNum == lhsLclNum);
5420 if (lhs->OperIsBlk())
5422 lhsAddr = lhs->AsBlk()->Addr();
5426 assert(lhs->OperGet() == GT_IND);
5427 lhsAddr = lhs->gtOp.gtOp1;
5430 // For addr-of-local expressions, lib/cons shouldn't matter.
5431 assert(lhsAddr->gtVNPair.BothEqual());
5432 ValueNum lhsAddrVN = lhsAddr->GetVN(VNK_Liberal);
5434 // Unpack the PtrToLoc value number of the address.
5435 assert(vnStore->IsVNFunc(lhsAddrVN));
5437 VNFuncApp lhsAddrFuncApp;
5438 vnStore->GetVNFunc(lhsAddrVN, &lhsAddrFuncApp);
5440 assert(lhsAddrFuncApp.m_func == VNF_PtrToLoc);
5441 assert(vnStore->IsVNConstant(lhsAddrFuncApp.m_args[0]) &&
5442 vnStore->ConstantValue<unsigned>(lhsAddrFuncApp.m_args[0]) == lhsLclNum);
5444 lhsFldSeq = vnStore->FieldSeqVNToFieldSeq(lhsAddrFuncApp.m_args[1]);
5447 // Now we need to get the proper RHS.
5448 GenTreeLclVarCommon* rhsLclVarTree = nullptr;
5449 LclVarDsc* rhsVarDsc = nullptr;
5450 FieldSeqNode* rhsFldSeq = nullptr;
5451 ValueNumPair rhsVNPair;
5452 bool isNewUniq = false;
5453 if (!rhs->OperIsIndir())
5455 if (rhs->IsLocalExpr(this, &rhsLclVarTree, &rhsFldSeq))
5457 unsigned rhsLclNum = rhsLclVarTree->GetLclNum();
5458 rhsVarDsc = &lvaTable[rhsLclNum];
5459 if (fgExcludeFromSsa(rhsLclNum) || rhsFldSeq == FieldSeqStore::NotAField())
5461 rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, rhsLclVarTree->TypeGet()));
5466 rhsVNPair = lvaTable[rhsLclVarTree->GetLclNum()]
5467 .GetPerSsaData(rhsLclVarTree->GetSsaNum())
5469 var_types indType = rhsLclVarTree->TypeGet();
5471 rhsVNPair = vnStore->VNPairApplySelectors(rhsVNPair, rhsFldSeq, indType);
5476 rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, rhs->TypeGet()));
5482 GenTree* srcAddr = rhs->AsIndir()->Addr();
5483 VNFuncApp srcAddrFuncApp;
5484 if (srcAddr->IsLocalAddrExpr(this, &rhsLclVarTree, &rhsFldSeq))
5486 unsigned rhsLclNum = rhsLclVarTree->GetLclNum();
5487 rhsVarDsc = &lvaTable[rhsLclNum];
5488 if (fgExcludeFromSsa(rhsLclNum) || rhsFldSeq == FieldSeqStore::NotAField())
5494 rhsVNPair = lvaTable[rhsLclVarTree->GetLclNum()]
5495 .GetPerSsaData(rhsLclVarTree->GetSsaNum())
5497 var_types indType = rhsLclVarTree->TypeGet();
5499 rhsVNPair = vnStore->VNPairApplySelectors(rhsVNPair, rhsFldSeq, indType);
5502 else if (vnStore->GetVNFunc(vnStore->VNNormVal(srcAddr->gtVNPair.GetLiberal()), &srcAddrFuncApp))
5504 if (srcAddrFuncApp.m_func == VNF_PtrToStatic)
5506 var_types indType = lclVarTree->TypeGet();
5507 ValueNum fieldSeqVN = srcAddrFuncApp.m_args[0];
5509 FieldSeqNode* zeroOffsetFldSeq = nullptr;
5510 if (GetZeroOffsetFieldMap()->Lookup(srcAddr, &zeroOffsetFldSeq))
5513 vnStore->FieldSeqVNAppend(fieldSeqVN, vnStore->VNForFieldSeq(zeroOffsetFldSeq));
5516 FieldSeqNode* fldSeqForStaticVar = vnStore->FieldSeqVNToFieldSeq(fieldSeqVN);
5518 if (fldSeqForStaticVar != FieldSeqStore::NotAField())
5520 // We model statics as indices into GcHeap (which is a subset of ByrefExposed).
5521 ValueNum selectedStaticVar;
5522 size_t structSize = 0;
5523 selectedStaticVar = vnStore->VNApplySelectors(VNK_Liberal, fgCurMemoryVN[GcHeap],
5524 fldSeqForStaticVar, &structSize);
5526 vnStore->VNApplySelectorsTypeCheck(selectedStaticVar, indType, structSize);
5528 rhsVNPair.SetLiberal(selectedStaticVar);
5529 rhsVNPair.SetConservative(vnStore->VNForExpr(compCurBB, indType));
5533 JITDUMP(" *** Missing field sequence info for Src/RHS of COPYBLK\n");
5534 rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, indType)); // a new unique value number
5537 else if (srcAddrFuncApp.m_func == VNF_PtrToArrElem)
5540 fgValueNumberArrIndexVal(nullptr, &srcAddrFuncApp, vnStore->VNForEmptyExcSet());
5541 rhsVNPair.SetLiberal(elemLib);
5542 rhsVNPair.SetConservative(vnStore->VNForExpr(compCurBB, lclVarTree->TypeGet()));
5555 if (lhsFldSeq == FieldSeqStore::NotAField())
5557 // We don't have proper field sequence information for the lhs
5559 JITDUMP(" *** Missing field sequence info for Dst/LHS of COPYBLK\n");
5562 else if (lhsFldSeq != nullptr && isEntire)
5564 // This can occur in for structs with one field, itself of a struct type.
5565 // We won't promote these.
5566 // TODO-Cleanup: decide what exactly to do about this.
5567 // Always treat them as maps, making them use/def, or reconstitute the
5571 else if (!isNewUniq)
5573 ValueNumPair oldLhsVNPair = lvaTable[lhsLclNum].GetPerSsaData(lclVarTree->GetSsaNum())->m_vnPair;
5574 rhsVNPair = vnStore->VNPairApplySelectorsAssign(oldLhsVNPair, lhsFldSeq, rhsVNPair,
5575 lclVarTree->TypeGet(), compCurBB);
5580 rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lclVarTree->TypeGet()));
5583 lvaTable[lhsLclNum].GetPerSsaData(lclDefSsaNum)->m_vnPair = vnStore->VNPNormVal(rhsVNPair);
5589 Compiler::printTreeID(tree);
5590 printf(" assigned VN to local var V%02u/%d: ", lhsLclNum, lclDefSsaNum);
5593 printf("new uniq ");
5595 vnpPrint(rhsVNPair, 1);
5600 else if (lvaVarAddrExposed(lhsLclNum))
5602 fgMutateAddressExposedLocal(tree DEBUGARG("COPYBLK - address-exposed local"));
5607 // For now, arbitrary side effect on GcHeap/ByrefExposed.
5608 // TODO-CQ: Why not be complete, and get this case right?
5609 fgMutateGcHeap(tree DEBUGARG("COPYBLK - non local"));
5611 // Copyblock's are of type void. Give them the void "value" -- they may occur in argument lists, which we want
5612 // to be able to give VN's to.
5613 tree->gtVNPair.SetBoth(ValueNumStore::VNForVoid());
5617 void Compiler::fgValueNumberTree(GenTree* tree, bool evalAsgLhsInd)
5619 genTreeOps oper = tree->OperGet();
5622 // TODO-CQ: For now TYP_SIMD values are not handled by value numbering to be amenable for CSE'ing.
5623 if (oper == GT_SIMD)
5625 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, TYP_UNKNOWN));
5630 #ifdef FEATURE_HW_INTRINSICS
5631 if (oper == GT_HWIntrinsic)
5633 // TODO-CQ: For now hardware intrinsics are not handled by value numbering to be amenable for CSE'ing.
5634 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, TYP_UNKNOWN));
5636 GenTreeHWIntrinsic* hwIntrinsicNode = tree->AsHWIntrinsic();
5637 assert(hwIntrinsicNode != nullptr);
5639 // For safety/correctness we must mutate the global heap valuenumber
5640 // for any HW intrinsic that performs a memory store operation
5641 if (hwIntrinsicNode->OperIsMemoryStore())
5643 fgMutateGcHeap(tree DEBUGARG("HWIntrinsic - MemoryStore"));
5648 #endif // FEATURE_HW_INTRINSICS
5650 var_types typ = tree->TypeGet();
5651 if (GenTree::OperIsConst(oper))
5653 // If this is a struct assignment, with a constant rhs, it is an initBlk, and it is not
5654 // really useful to value number the constant.
5655 if (!varTypeIsStruct(tree))
5657 fgValueNumberTreeConst(tree);
5660 else if (GenTree::OperIsLeaf(oper))
5667 GenTreeLclVarCommon* lcl = tree->AsLclVarCommon();
5668 unsigned lclNum = lcl->gtLclNum;
5670 if ((lcl->gtFlags & GTF_VAR_DEF) == 0 ||
5671 (lcl->gtFlags & GTF_VAR_USEASG)) // If it is a "pure" def, will handled as part of the assignment.
5673 LclVarDsc* varDsc = &lvaTable[lcl->gtLclNum];
5674 if (varDsc->lvPromoted && varDsc->lvFieldCnt == 1)
5676 // If the promoted var has only one field var, treat like a use of the field var.
5677 lclNum = varDsc->lvFieldLclStart;
5680 // Initialize to the undefined value, so we know whether we hit any of the cases here.
5681 lcl->gtVNPair = ValueNumPair();
5683 if (lcl->gtSsaNum == SsaConfig::RESERVED_SSA_NUM)
5685 // Not an SSA variable.
5687 if (lvaVarAddrExposed(lclNum))
5689 // Address-exposed locals are part of ByrefExposed.
5690 ValueNum addrVN = vnStore->VNForFunc(TYP_BYREF, VNF_PtrToLoc, vnStore->VNForIntCon(lclNum),
5691 vnStore->VNForFieldSeq(nullptr));
5692 ValueNum loadVN = fgValueNumberByrefExposedLoad(typ, addrVN);
5694 lcl->gtVNPair.SetBoth(loadVN);
5698 // Assign odd cases a new, unique, VN.
5699 lcl->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lcl->TypeGet()));
5704 var_types varType = varDsc->TypeGet();
5705 ValueNumPair wholeLclVarVNP = varDsc->GetPerSsaData(lcl->gtSsaNum)->m_vnPair;
5707 // Check for mismatched LclVar size
5709 unsigned typSize = genTypeSize(genActualType(typ));
5710 unsigned varSize = genTypeSize(genActualType(varType));
5712 if (typSize == varSize)
5714 lcl->gtVNPair = wholeLclVarVNP;
5716 else // mismatched LclVar definition and LclVar use size
5718 if (typSize < varSize)
5720 // the indirection is reading less that the whole LclVar
5721 // create a new VN that represent the partial value
5723 ValueNumPair partialLclVarVNP = vnStore->VNPairForCast(wholeLclVarVNP, typ, varType);
5724 lcl->gtVNPair = partialLclVarVNP;
5728 assert(typSize > varSize);
5729 // the indirection is reading beyond the end of the field
5731 lcl->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, typ)); // return a new unique value
5736 // Temporary, to make progress.
5737 // TODO-CQ: This should become an assert again...
5738 if (lcl->gtVNPair.GetLiberal() == ValueNumStore::NoVN)
5740 assert(lcl->gtVNPair.GetConservative() == ValueNumStore::NoVN);
5742 // We don't want to fabricate arbitrary value numbers to things we can't reason about.
5743 // So far, we know about two of these cases:
5744 // Case 1) We have a local var who has never been defined but it's seen as a use.
5745 // This is the case of storeIndir(addr(lclvar)) = expr. In this case since we only
5746 // take the address of the variable, this doesn't mean it's a use nor we have to
5747 // initialize it, so in this very rare case, we fabricate a value number.
5748 // Case 2) Local variables that represent structs which are assigned using CpBlk.
5749 GenTree* nextNode = lcl->gtNext;
5750 assert((nextNode->gtOper == GT_ADDR && nextNode->gtOp.gtOp1 == lcl) ||
5751 varTypeIsStruct(lcl->TypeGet()));
5752 lcl->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lcl->TypeGet()));
5754 assert(lcl->gtVNPair.BothDefined());
5757 // TODO-Review: For the short term, we have a workaround for copyblk/initblk. Those that use
5758 // addrSpillTemp will have a statement like "addrSpillTemp = addr(local)." If we previously decided
5759 // that this block operation defines the local, we will have labeled the "local" node as a DEF
5760 // This flag propogates to the "local" on the RHS. So we'll assume that this is correct,
5761 // and treat it as a def (to a new, unique VN).
5762 else if ((lcl->gtFlags & GTF_VAR_DEF) != 0)
5764 LclVarDsc* varDsc = &lvaTable[lcl->gtLclNum];
5765 if (lcl->gtSsaNum != SsaConfig::RESERVED_SSA_NUM)
5768 .GetPerSsaData(lcl->gtSsaNum)
5769 ->m_vnPair.SetBoth(vnStore->VNForExpr(compCurBB, lcl->TypeGet()));
5771 lcl->gtVNPair = ValueNumPair(); // Avoid confusion -- we don't set the VN of a lcl being defined.
5777 // Use the value of the function pointer (actually, a method handle.)
5778 tree->gtVNPair.SetBoth(
5779 vnStore->VNForHandle(ssize_t(tree->gtFptrVal.gtFptrMethod), GTF_ICON_METHOD_HDL));
5782 // This group passes through a value from a child node.
5784 tree->SetVNsFromNode(tree->gtRetExpr.gtInlineCandidate);
5789 GenTreeLclFld* lclFld = tree->AsLclFld();
5790 assert(fgExcludeFromSsa(lclFld->GetLclNum()) || lclFld->gtFieldSeq != nullptr);
5791 // If this is a (full) def, then the variable will be labeled with the new SSA number,
5792 // which will not have a value. We skip; it will be handled by one of the assignment-like
5793 // forms (assignment, or initBlk or copyBlk).
5794 if (((lclFld->gtFlags & GTF_VAR_DEF) == 0) || (lclFld->gtFlags & GTF_VAR_USEASG))
5796 unsigned lclNum = lclFld->GetLclNum();
5797 unsigned ssaNum = lclFld->GetSsaNum();
5798 LclVarDsc* varDsc = &lvaTable[lclNum];
5800 if (ssaNum == SsaConfig::UNINIT_SSA_NUM)
5802 if (varDsc->GetPerSsaData(ssaNum)->m_vnPair.GetLiberal() == ValueNumStore::NoVN)
5804 ValueNum vnForLcl = vnStore->VNForExpr(compCurBB, lclFld->TypeGet());
5805 varDsc->GetPerSsaData(ssaNum)->m_vnPair = ValueNumPair(vnForLcl, vnForLcl);
5809 var_types indType = tree->TypeGet();
5810 if (lclFld->gtFieldSeq == FieldSeqStore::NotAField() || fgExcludeFromSsa(lclFld->GetLclNum()))
5812 // This doesn't represent a proper field access or it's a struct
5813 // with overlapping fields that is hard to reason about; return a new unique VN.
5814 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, indType));
5818 ValueNumPair lclVNPair = varDsc->GetPerSsaData(ssaNum)->m_vnPair;
5819 tree->gtVNPair = vnStore->VNPairApplySelectors(lclVNPair, lclFld->gtFieldSeq, indType);
5825 // The ones below here all get a new unique VN -- but for various reasons, explained after each.
5827 // We know nothing about the value of a caught expression.
5828 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
5832 // Skip GT_CLS_VAR nodes that are the LHS of an assignment. (We labeled these earlier.)
5833 // We will "evaluate" this as part of the assignment. (Unless we're explicitly told by
5834 // the caller to evaluate anyway -- perhaps the assignment is an "op=" assignment.)
5836 if (((tree->gtFlags & GTF_CLS_VAR_ASG_LHS) == 0) || evalAsgLhsInd)
5838 bool isVolatile = (tree->gtFlags & GTF_FLD_VOLATILE) != 0;
5842 // For Volatile indirection, first mutate GcHeap/ByrefExposed
5843 fgMutateGcHeap(tree DEBUGARG("GTF_FLD_VOLATILE - read"));
5846 // We just mutate GcHeap/ByrefExposed if isVolatile is true, and then do the read as normal.
5850 // 2: volatile read s;
5853 // We should never assume that the values read by 1 and 2 are the same (because the heap was mutated
5854 // in between them)... but we *should* be able to prove that the values read in 2 and 3 are the
5858 ValueNumPair clsVarVNPair;
5860 // If the static field handle is for a struct type field, then the value of the static
5861 // is a "ref" to the boxed struct -- treat it as the address of the static (we assume that a
5862 // first element offset will be added to get to the actual struct...)
5863 GenTreeClsVar* clsVar = tree->AsClsVar();
5864 FieldSeqNode* fldSeq = clsVar->gtFieldSeq;
5865 assert(fldSeq != nullptr); // We need to have one.
5866 ValueNum selectedStaticVar = ValueNumStore::NoVN;
5867 if (gtIsStaticFieldPtrToBoxedStruct(clsVar->TypeGet(), fldSeq->m_fieldHnd))
5869 clsVarVNPair.SetBoth(
5870 vnStore->VNForFunc(TYP_BYREF, VNF_PtrToStatic, vnStore->VNForFieldSeq(fldSeq)));
5874 // This is a reference to heap memory.
5875 // We model statics as indices into GcHeap (which is a subset of ByrefExposed).
5877 FieldSeqNode* fldSeqForStaticVar =
5878 GetFieldSeqStore()->CreateSingleton(tree->gtClsVar.gtClsVarHnd);
5879 size_t structSize = 0;
5880 selectedStaticVar = vnStore->VNApplySelectors(VNK_Liberal, fgCurMemoryVN[GcHeap],
5881 fldSeqForStaticVar, &structSize);
5883 vnStore->VNApplySelectorsTypeCheck(selectedStaticVar, tree->TypeGet(), structSize);
5885 clsVarVNPair.SetLiberal(selectedStaticVar);
5886 // The conservative interpretation always gets a new, unique VN.
5887 clsVarVNPair.SetConservative(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
5890 // The ValueNum returned must represent the full-sized IL-Stack value
5891 // If we need to widen this value then we need to introduce a VNF_Cast here to represent
5892 // the widened value. This is necessary since the CSE package can replace all occurances
5893 // of a given ValueNum with a LclVar that is a full-sized IL-Stack value
5895 if (varTypeIsSmall(tree->TypeGet()))
5897 var_types castToType = tree->TypeGet();
5898 clsVarVNPair = vnStore->VNPairForCast(clsVarVNPair, castToType, castToType);
5900 tree->gtVNPair = clsVarVNPair;
5904 case GT_MEMORYBARRIER: // Leaf
5905 // For MEMORYBARRIER add an arbitrary side effect on GcHeap/ByrefExposed.
5906 fgMutateGcHeap(tree DEBUGARG("MEMORYBARRIER"));
5909 // These do not represent values.
5911 case GT_JMP: // Control flow
5912 case GT_LABEL: // Control flow
5913 #if !FEATURE_EH_FUNCLETS
5914 case GT_END_LFIN: // Control flow
5917 // This node is a standin for an argument whose value will be computed later. (Perhaps it's
5918 // a register argument, and we don't want to preclude use of the register in arg evaluation yet.)
5919 // We give this a "fake" value number now; if the call in which it occurs cares about the
5920 // value (e.g., it's a helper call whose result is a function of argument values) we'll reset
5921 // this later, when the later args have been assigned VNs.
5922 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
5926 // This one is special because we should never process it in this method: it should
5927 // always be taken care of, when needed, during pre-processing of a blocks phi definitions.
5935 else if (GenTree::OperIsSimple(oper))
5938 // Sometimes we query the memory ssa map in an assertion, and need a dummy location for the ignored result.
5939 unsigned memorySsaNum;
5942 if (GenTree::OperIsAssignment(oper) && !varTypeIsStruct(tree))
5945 GenTree* lhs = tree->gtOp.gtOp1;
5946 GenTree* rhs = tree->gtOp.gtOp2;
5948 ValueNumPair rhsVNPair;
5951 rhsVNPair = rhs->gtVNPair;
5953 else // Must be an "op="
5955 #ifndef LEGACY_BACKEND
5958 // If the LHS is an IND, we didn't evaluate it when we visited it previously.
5959 // But we didn't know that the parent was an op=. We do now, so go back and evaluate it.
5960 // (We actually check if the effective val is the IND. We will have evaluated any non-last
5961 // args of an LHS comma already -- including their memory effects.)
5962 GenTree* lhsVal = lhs->gtEffectiveVal(/*commaOnly*/ true);
5963 if (lhsVal->OperIsIndir() || (lhsVal->OperGet() == GT_CLS_VAR))
5965 fgValueNumberTree(lhsVal, /*evalAsgLhsInd*/ true);
5967 // Now we can make this assertion:
5968 assert(lhsVal->gtVNPair.BothDefined());
5969 genTreeOps op = GenTree::OpAsgToOper(oper);
5970 if (GenTree::OperIsBinary(op))
5972 ValueNumPair lhsNormVNP;
5973 ValueNumPair lhsExcVNP;
5974 lhsExcVNP.SetBoth(ValueNumStore::VNForEmptyExcSet());
5975 vnStore->VNPUnpackExc(lhsVal->gtVNPair, &lhsNormVNP, &lhsExcVNP);
5976 assert(rhs->gtVNPair.BothDefined());
5977 ValueNumPair rhsNormVNP;
5978 ValueNumPair rhsExcVNP;
5979 rhsExcVNP.SetBoth(ValueNumStore::VNForEmptyExcSet());
5980 vnStore->VNPUnpackExc(rhs->gtVNPair, &rhsNormVNP, &rhsExcVNP);
5981 rhsVNPair = vnStore->VNPWithExc(vnStore->VNPairForFunc(tree->TypeGet(),
5982 GetVNFuncForOper(op, (tree->gtFlags &
5983 GTF_UNSIGNED) != 0),
5984 lhsNormVNP, rhsNormVNP),
5985 vnStore->VNPExcSetUnion(lhsExcVNP, rhsExcVNP));
5989 // As of now, GT_CHS ==> GT_NEG is the only pattern fitting this.
5990 assert(GenTree::OperIsUnary(op));
5991 ValueNumPair lhsNormVNP;
5992 ValueNumPair lhsExcVNP;
5993 lhsExcVNP.SetBoth(ValueNumStore::VNForEmptyExcSet());
5994 vnStore->VNPUnpackExc(lhsVal->gtVNPair, &lhsNormVNP, &lhsExcVNP);
5995 rhsVNPair = vnStore->VNPWithExc(vnStore->VNPairForFunc(tree->TypeGet(),
5996 GetVNFuncForOper(op, (tree->gtFlags &
5997 GTF_UNSIGNED) != 0),
6001 #endif // !LEGACY_BACKEND
6004 // Is the type being stored different from the type computed by the rhs?
6005 if (rhs->TypeGet() != lhs->TypeGet())
6007 // This means that there is an implicit cast on the rhs value
6009 // We will add a cast function to reflect the possible narrowing of the rhs value
6011 var_types castToType = lhs->TypeGet();
6012 var_types castFromType = rhs->TypeGet();
6013 bool isUnsigned = varTypeIsUnsigned(castFromType);
6015 rhsVNPair = vnStore->VNPairForCast(rhsVNPair, castToType, castFromType, isUnsigned);
6018 if (tree->TypeGet() != TYP_VOID)
6020 // Assignment operators, as expressions, return the value of the RHS.
6021 tree->gtVNPair = rhsVNPair;
6024 // Now that we've labeled the assignment as a whole, we don't care about exceptions.
6025 rhsVNPair = vnStore->VNPNormVal(rhsVNPair);
6027 // If the types of the rhs and lhs are different then we
6028 // may want to change the ValueNumber assigned to the lhs.
6030 if (rhs->TypeGet() != lhs->TypeGet())
6032 if (rhs->TypeGet() == TYP_REF)
6034 // If we have an unsafe IL assignment of a TYP_REF to a non-ref (typically a TYP_BYREF)
6035 // then don't propagate this ValueNumber to the lhs, instead create a new unique VN
6037 rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lhs->TypeGet()));
6041 // We have to handle the case where the LHS is a comma. In that case, we don't evaluate the comma,
6042 // so we give it VNForVoid, and we're really interested in the effective value.
6043 GenTree* lhsCommaIter = lhs;
6044 while (lhsCommaIter->OperGet() == GT_COMMA)
6046 lhsCommaIter->gtVNPair.SetBoth(vnStore->VNForVoid());
6047 lhsCommaIter = lhsCommaIter->gtOp.gtOp2;
6049 lhs = lhs->gtEffectiveVal();
6051 // Now, record the new VN for an assignment (performing the indicated "state update").
6052 // It's safe to use gtEffectiveVal here, because the non-last elements of a comma list on the
6053 // LHS will come before the assignment in evaluation order.
6054 switch (lhs->OperGet())
6059 GenTreeLclVarCommon* lcl = lhs->AsLclVarCommon();
6060 unsigned lclDefSsaNum = GetSsaNumForLocalVarDef(lcl);
6062 // Should not have been recorded as updating the GC heap.
6063 assert(!GetMemorySsaMap(GcHeap)->Lookup(tree, &memorySsaNum));
6065 if (lclDefSsaNum != SsaConfig::RESERVED_SSA_NUM)
6067 // Should not have been recorded as updating ByrefExposed mem.
6068 assert(!GetMemorySsaMap(ByrefExposed)->Lookup(tree, &memorySsaNum));
6070 assert(rhsVNPair.GetLiberal() != ValueNumStore::NoVN);
6072 lhs->gtVNPair = rhsVNPair;
6073 lvaTable[lcl->gtLclNum].GetPerSsaData(lclDefSsaNum)->m_vnPair = rhsVNPair;
6078 printf("N%03u ", lhs->gtSeqNum);
6079 Compiler::printTreeID(lhs);
6081 gtDispNodeName(lhs);
6082 gtDispLeaf(lhs, nullptr);
6084 vnpPrint(lhs->gtVNPair, 1);
6089 else if (lvaVarAddrExposed(lcl->gtLclNum))
6091 // We could use MapStore here and MapSelect on reads of address-exposed locals
6092 // (using the local nums as selectors) to get e.g. propagation of values
6093 // through address-taken locals in regions of code with no calls or byref
6095 // For now, just use a new opaque VN.
6096 ValueNum heapVN = vnStore->VNForExpr(compCurBB);
6097 recordAddressExposedLocalStore(tree, heapVN DEBUGARG("local assign"));
6105 Compiler::printTreeID(tree);
6106 printf(" assigns to non-address-taken local var V%02u; excluded from SSA, so value not "
6116 GenTreeLclFld* lclFld = lhs->AsLclFld();
6117 unsigned lclDefSsaNum = GetSsaNumForLocalVarDef(lclFld);
6119 // Should not have been recorded as updating the GC heap.
6120 assert(!GetMemorySsaMap(GcHeap)->Lookup(tree, &memorySsaNum));
6122 if (lclDefSsaNum != SsaConfig::RESERVED_SSA_NUM)
6124 ValueNumPair newLhsVNPair;
6125 // Is this a full definition?
6126 if ((lclFld->gtFlags & GTF_VAR_USEASG) == 0)
6128 assert(!lclFld->IsPartialLclFld(this));
6129 assert(rhsVNPair.GetLiberal() != ValueNumStore::NoVN);
6130 newLhsVNPair = rhsVNPair;
6134 // We should never have a null field sequence here.
6135 assert(lclFld->gtFieldSeq != nullptr);
6136 if (lclFld->gtFieldSeq == FieldSeqStore::NotAField())
6138 // We don't know what field this represents. Assign a new VN to the whole variable
6139 // (since we may be writing to an unknown portion of it.)
6140 newLhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lvaGetActualType(lclFld->gtLclNum)));
6144 // We do know the field sequence.
6145 // The "lclFld" node will be labeled with the SSA number of its "use" identity
6146 // (we looked in a side table above for its "def" identity). Look up that value.
6147 ValueNumPair oldLhsVNPair =
6148 lvaTable[lclFld->GetLclNum()].GetPerSsaData(lclFld->GetSsaNum())->m_vnPair;
6149 newLhsVNPair = vnStore->VNPairApplySelectorsAssign(oldLhsVNPair, lclFld->gtFieldSeq,
6150 rhsVNPair, // Pre-value.
6151 lclFld->TypeGet(), compCurBB);
6154 lvaTable[lclFld->GetLclNum()].GetPerSsaData(lclDefSsaNum)->m_vnPair = newLhsVNPair;
6155 lhs->gtVNPair = newLhsVNPair;
6159 if (lhs->gtVNPair.GetLiberal() != ValueNumStore::NoVN)
6161 printf("N%03u ", lhs->gtSeqNum);
6162 Compiler::printTreeID(lhs);
6164 gtDispNodeName(lhs);
6165 gtDispLeaf(lhs, nullptr);
6167 vnpPrint(lhs->gtVNPair, 1);
6173 else if (lvaVarAddrExposed(lclFld->gtLclNum))
6175 // This side-effects ByrefExposed. Just use a new opaque VN.
6176 // As with GT_LCL_VAR, we could probably use MapStore here and MapSelect at corresponding
6177 // loads, but to do so would have to identify the subset of address-exposed locals
6178 // whose fields can be disambiguated.
6179 ValueNum heapVN = vnStore->VNForExpr(compCurBB);
6180 recordAddressExposedLocalStore(tree, heapVN DEBUGARG("local field assign"));
6186 assert(false); // Phi arg cannot be LHS.
6192 bool isVolatile = (lhs->gtFlags & GTF_IND_VOLATILE) != 0;
6196 // For Volatile store indirection, first mutate GcHeap/ByrefExposed
6197 fgMutateGcHeap(lhs DEBUGARG("GTF_IND_VOLATILE - store"));
6198 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lhs->TypeGet()));
6201 GenTree* arg = lhs->gtOp.gtOp1;
6203 // Indicates whether the argument of the IND is the address of a local.
6204 bool wasLocal = false;
6206 lhs->gtVNPair = rhsVNPair;
6209 ValueNum argVN = arg->gtVNPair.GetLiberal();
6211 bool argIsVNFunc = vnStore->GetVNFunc(vnStore->VNNormVal(argVN), &funcApp);
6213 // Is this an assignment to a (field of, perhaps) a local?
6214 // If it is a PtrToLoc, lib and cons VNs will be the same.
6217 IndirectAssignmentAnnotation* pIndirAnnot =
6218 nullptr; // This will be used if "tree" is an "indirect assignment",
6220 if (funcApp.m_func == VNF_PtrToLoc)
6222 assert(arg->gtVNPair.BothEqual()); // If it's a PtrToLoc, lib/cons shouldn't differ.
6223 assert(vnStore->IsVNConstant(funcApp.m_args[0]));
6224 unsigned lclNum = vnStore->ConstantValue<unsigned>(funcApp.m_args[0]);
6228 if (!fgExcludeFromSsa(lclNum))
6230 FieldSeqNode* fieldSeq = vnStore->FieldSeqVNToFieldSeq(funcApp.m_args[1]);
6232 // Either "arg" is the address of (part of) a local itself, or the assignment is an
6233 // "indirect assignment", where an outer comma expression assigned the address of a
6234 // local to a temp, and that temp is our lhs, and we recorded this in a table when we
6235 // made the indirect assignment...or else we have a "rogue" PtrToLoc, one that should
6236 // have made the local in question address-exposed. Assert on that.
6237 GenTreeLclVarCommon* lclVarTree = nullptr;
6238 bool isEntire = false;
6239 unsigned lclDefSsaNum = SsaConfig::RESERVED_SSA_NUM;
6240 ValueNumPair newLhsVNPair;
6242 if (arg->DefinesLocalAddr(this, genTypeSize(lhs->TypeGet()), &lclVarTree, &isEntire))
6244 // The local #'s should agree.
6245 assert(lclNum == lclVarTree->GetLclNum());
6247 if (fieldSeq == FieldSeqStore::NotAField())
6249 // We don't know where we're storing, so give the local a new, unique VN.
6250 // Do this by considering it an "entire" assignment, with an unknown RHS.
6252 rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lclVarTree->TypeGet()));
6257 newLhsVNPair = rhsVNPair;
6258 lclDefSsaNum = lclVarTree->GetSsaNum();
6262 // Don't use the lclVarTree's VN: if it's a local field, it will
6263 // already be dereferenced by it's field sequence.
6264 ValueNumPair oldLhsVNPair = lvaTable[lclVarTree->GetLclNum()]
6265 .GetPerSsaData(lclVarTree->GetSsaNum())
6267 lclDefSsaNum = GetSsaNumForLocalVarDef(lclVarTree);
6269 vnStore->VNPairApplySelectorsAssign(oldLhsVNPair, fieldSeq, rhsVNPair,
6270 lhs->TypeGet(), compCurBB);
6272 lvaTable[lclNum].GetPerSsaData(lclDefSsaNum)->m_vnPair = newLhsVNPair;
6274 else if (m_indirAssignMap != nullptr && GetIndirAssignMap()->Lookup(tree, &pIndirAnnot))
6276 // The local #'s should agree.
6277 assert(lclNum == pIndirAnnot->m_lclNum);
6278 assert(pIndirAnnot->m_defSsaNum != SsaConfig::RESERVED_SSA_NUM);
6279 lclDefSsaNum = pIndirAnnot->m_defSsaNum;
6280 // Does this assignment write the entire width of the local?
6281 if (genTypeSize(lhs->TypeGet()) == genTypeSize(var_types(lvaTable[lclNum].lvType)))
6283 assert(pIndirAnnot->m_useSsaNum == SsaConfig::RESERVED_SSA_NUM);
6284 assert(pIndirAnnot->m_isEntire);
6285 newLhsVNPair = rhsVNPair;
6289 assert(pIndirAnnot->m_useSsaNum != SsaConfig::RESERVED_SSA_NUM);
6290 assert(!pIndirAnnot->m_isEntire);
6291 assert(pIndirAnnot->m_fieldSeq == fieldSeq);
6292 ValueNumPair oldLhsVNPair =
6293 lvaTable[lclNum].GetPerSsaData(pIndirAnnot->m_useSsaNum)->m_vnPair;
6295 vnStore->VNPairApplySelectorsAssign(oldLhsVNPair, fieldSeq, rhsVNPair,
6296 lhs->TypeGet(), compCurBB);
6298 lvaTable[lclNum].GetPerSsaData(lclDefSsaNum)->m_vnPair = newLhsVNPair;
6302 unreached(); // "Rogue" PtrToLoc, as discussed above.
6308 Compiler::printTreeID(tree);
6309 printf(" assigned VN to local var V%02u/%d: VN ", lclNum, lclDefSsaNum);
6310 vnpPrint(newLhsVNPair, 1);
6315 else if (lvaVarAddrExposed(lclNum))
6317 // Need to record the effect on ByrefExposed.
6318 // We could use MapStore here and MapSelect on reads of address-exposed locals
6319 // (using the local nums as selectors) to get e.g. propagation of values
6320 // through address-taken locals in regions of code with no calls or byref
6322 // For now, just use a new opaque VN.
6323 ValueNum heapVN = vnStore->VNForExpr(compCurBB);
6324 recordAddressExposedLocalStore(tree, heapVN DEBUGARG("PtrToLoc indir"));
6329 // Was the argument of the GT_IND the address of a local, handled above?
6332 GenTree* obj = nullptr;
6333 GenTree* staticOffset = nullptr;
6334 FieldSeqNode* fldSeq = nullptr;
6336 // Is the LHS an array index expression?
6337 if (argIsVNFunc && funcApp.m_func == VNF_PtrToArrElem)
6339 CORINFO_CLASS_HANDLE elemTypeEq =
6340 CORINFO_CLASS_HANDLE(vnStore->ConstantValue<ssize_t>(funcApp.m_args[0]));
6341 ValueNum arrVN = funcApp.m_args[1];
6342 ValueNum inxVN = funcApp.m_args[2];
6343 FieldSeqNode* fldSeq = vnStore->FieldSeqVNToFieldSeq(funcApp.m_args[3]);
6345 // Does the child of the GT_IND 'arg' have an associated zero-offset field sequence?
6346 FieldSeqNode* addrFieldSeq = nullptr;
6347 if (GetZeroOffsetFieldMap()->Lookup(arg, &addrFieldSeq))
6349 fldSeq = GetFieldSeqStore()->Append(addrFieldSeq, fldSeq);
6356 Compiler::printTreeID(tree);
6357 printf(" assigns to an array element:\n");
6361 ValueNum heapVN = fgValueNumberArrIndexAssign(elemTypeEq, arrVN, inxVN, fldSeq,
6362 rhsVNPair.GetLiberal(), lhs->TypeGet());
6363 recordGcHeapStore(tree, heapVN DEBUGARG("Array element assignment"));
6365 // It may be that we haven't parsed it yet. Try.
6366 else if (lhs->gtFlags & GTF_IND_ARR_INDEX)
6369 bool b = GetArrayInfoMap()->Lookup(lhs, &arrInfo);
6371 ValueNum arrVN = ValueNumStore::NoVN;
6372 ValueNum inxVN = ValueNumStore::NoVN;
6373 FieldSeqNode* fldSeq = nullptr;
6376 GenTree* arr = nullptr;
6377 arg->ParseArrayAddress(this, &arrInfo, &arr, &inxVN, &fldSeq);
6380 fgMutateGcHeap(tree DEBUGARG("assignment to unparseable array expression"));
6383 // Otherwise, parsing succeeded.
6385 // Need to form H[arrType][arr][ind][fldSeq] = rhsVNPair.GetLiberal()
6387 // Get the element type equivalence class representative.
6388 CORINFO_CLASS_HANDLE elemTypeEq =
6389 EncodeElemType(arrInfo.m_elemType, arrInfo.m_elemStructType);
6390 arrVN = arr->gtVNPair.GetLiberal();
6392 FieldSeqNode* zeroOffsetFldSeq = nullptr;
6393 if (GetZeroOffsetFieldMap()->Lookup(arg, &zeroOffsetFldSeq))
6395 fldSeq = GetFieldSeqStore()->Append(fldSeq, zeroOffsetFldSeq);
6398 ValueNum heapVN = fgValueNumberArrIndexAssign(elemTypeEq, arrVN, inxVN, fldSeq,
6399 rhsVNPair.GetLiberal(), lhs->TypeGet());
6400 recordGcHeapStore(tree, heapVN DEBUGARG("assignment to unparseable array expression"));
6402 else if (arg->IsFieldAddr(this, &obj, &staticOffset, &fldSeq))
6404 if (fldSeq == FieldSeqStore::NotAField())
6406 fgMutateGcHeap(tree DEBUGARG("NotAField"));
6410 assert(fldSeq != nullptr);
6412 CORINFO_CLASS_HANDLE fldCls = info.compCompHnd->getFieldClass(fldSeq->m_fieldHnd);
6415 // Make sure that the class containing it is not a value class (as we are expecting
6416 // an instance field)
6417 assert((info.compCompHnd->getClassAttribs(fldCls) & CORINFO_FLG_VALUECLASS) == 0);
6418 assert(staticOffset == nullptr);
6421 // Get the first (instance or static) field from field seq. GcHeap[field] will yield
6423 if (fldSeq->IsFirstElemFieldSeq())
6425 fldSeq = fldSeq->m_next;
6426 assert(fldSeq != nullptr);
6429 // Get a field sequence for just the first field in the sequence
6431 FieldSeqNode* firstFieldOnly = GetFieldSeqStore()->CreateSingleton(fldSeq->m_fieldHnd);
6433 // The final field in the sequence will need to match the 'indType'
6434 var_types indType = lhs->TypeGet();
6436 vnStore->VNApplySelectors(VNK_Liberal, fgCurMemoryVN[GcHeap], firstFieldOnly);
6438 // The type of the field is "struct" if there are more fields in the sequence,
6439 // otherwise it is the type returned from VNApplySelectors above.
6440 var_types firstFieldType = vnStore->TypeOfVN(fldMapVN);
6443 rhsVNPair.GetLiberal(); // The value number from the rhs of the assignment
6444 ValueNum newFldMapVN = ValueNumStore::NoVN;
6446 // when (obj != nullptr) we have an instance field, otherwise a static field
6447 // when (staticOffset != nullptr) it represents a offset into a static or the call to
6448 // Shared Static Base
6449 if ((obj != nullptr) || (staticOffset != nullptr))
6451 ValueNum valAtAddr = fldMapVN;
6452 ValueNum normVal = ValueNumStore::NoVN;
6456 // construct the ValueNumber for 'fldMap at obj'
6457 normVal = vnStore->VNNormVal(obj->GetVN(VNK_Liberal));
6459 vnStore->VNForMapSelect(VNK_Liberal, firstFieldType, fldMapVN, normVal);
6461 else // (staticOffset != nullptr)
6463 // construct the ValueNumber for 'fldMap at staticOffset'
6464 normVal = vnStore->VNNormVal(staticOffset->GetVN(VNK_Liberal));
6466 vnStore->VNForMapSelect(VNK_Liberal, firstFieldType, fldMapVN, normVal);
6468 // Now get rid of any remaining struct field dereferences. (if they exist)
6472 vnStore->VNApplySelectorsAssign(VNK_Liberal, valAtAddr, fldSeq->m_next,
6473 storeVal, indType, compCurBB);
6476 // From which we can construct the new ValueNumber for 'fldMap at normVal'
6477 newFldMapVN = vnStore->VNForMapStore(vnStore->TypeOfVN(fldMapVN), fldMapVN, normVal,
6482 // plain static field
6484 // Now get rid of any remaining struct field dereferences. (if they exist)
6488 vnStore->VNApplySelectorsAssign(VNK_Liberal, fldMapVN, fldSeq->m_next,
6489 storeVal, indType, compCurBB);
6492 newFldMapVN = vnStore->VNApplySelectorsAssign(VNK_Liberal, fgCurMemoryVN[GcHeap],
6493 fldSeq, storeVal, indType, compCurBB);
6496 // It is not strictly necessary to set the lhs value number,
6497 // but the dumps read better with it set to the 'storeVal' that we just computed
6498 lhs->gtVNPair.SetBoth(storeVal);
6503 printf(" fgCurMemoryVN assigned:\n");
6506 // bbMemoryDef must include GcHeap for any block that mutates the GC heap
6507 assert((compCurBB->bbMemoryDef & memoryKindSet(GcHeap)) != 0);
6509 // Update the field map for firstField in GcHeap to this new value.
6511 vnStore->VNApplySelectorsAssign(VNK_Liberal, fgCurMemoryVN[GcHeap], firstFieldOnly,
6512 newFldMapVN, indType, compCurBB);
6514 recordGcHeapStore(tree, heapVN DEBUGARG("StoreField"));
6519 GenTreeLclVarCommon* lclVarTree = nullptr;
6520 bool isLocal = tree->DefinesLocal(this, &lclVarTree);
6522 if (isLocal && lvaVarAddrExposed(lclVarTree->gtLclNum))
6524 // Store to address-exposed local; need to record the effect on ByrefExposed.
6525 // We could use MapStore here and MapSelect on reads of address-exposed locals
6526 // (using the local nums as selectors) to get e.g. propagation of values
6527 // through address-taken locals in regions of code with no calls or byref
6529 // For now, just use a new opaque VN.
6530 ValueNum memoryVN = vnStore->VNForExpr(compCurBB);
6531 recordAddressExposedLocalStore(tree, memoryVN DEBUGARG("PtrToLoc indir"));
6535 // If it doesn't define a local, then it might update GcHeap/ByrefExposed.
6536 // For the new ByrefExposed VN, we could use an operator here like
6537 // VNF_ByrefExposedStore that carries the VNs of the pointer and RHS, then
6538 // at byref loads if the current ByrefExposed VN happens to be
6539 // VNF_ByrefExposedStore with the same pointer VN, we could propagate the
6540 // VN from the RHS to the VN for the load. This would e.g. allow tracking
6541 // values through assignments to out params. For now, just model this
6542 // as an opaque GcHeap/ByrefExposed mutation.
6543 fgMutateGcHeap(tree DEBUGARG("assign-of-IND"));
6548 // We don't actually evaluate an IND on the LHS, so give it the Void value.
6549 tree->gtVNPair.SetBoth(vnStore->VNForVoid());
6555 bool isVolatile = (lhs->gtFlags & GTF_FLD_VOLATILE) != 0;
6559 // For Volatile store indirection, first mutate GcHeap/ByrefExposed
6560 fgMutateGcHeap(lhs DEBUGARG("GTF_CLS_VAR - store")); // always change fgCurMemoryVN
6563 // We model statics as indices into GcHeap (which is a subset of ByrefExposed).
6564 FieldSeqNode* fldSeqForStaticVar = GetFieldSeqStore()->CreateSingleton(lhs->gtClsVar.gtClsVarHnd);
6565 assert(fldSeqForStaticVar != FieldSeqStore::NotAField());
6567 ValueNum storeVal = rhsVNPair.GetLiberal(); // The value number from the rhs of the assignment
6568 storeVal = vnStore->VNApplySelectorsAssign(VNK_Liberal, fgCurMemoryVN[GcHeap], fldSeqForStaticVar,
6569 storeVal, lhs->TypeGet(), compCurBB);
6571 // It is not strictly necessary to set the lhs value number,
6572 // but the dumps read better with it set to the 'storeVal' that we just computed
6573 lhs->gtVNPair.SetBoth(storeVal);
6577 printf(" fgCurMemoryVN assigned:\n");
6580 // bbMemoryDef must include GcHeap for any block that mutates the GC heap
6581 assert((compCurBB->bbMemoryDef & memoryKindSet(GcHeap)) != 0);
6583 // Update the field map for the fgCurMemoryVN and SSA for the tree
6584 recordGcHeapStore(tree, storeVal DEBUGARG("Static Field store"));
6589 assert(!"Unknown node for lhs of assignment!");
6591 // For Unknown stores, mutate GcHeap/ByrefExposed
6592 fgMutateGcHeap(lhs DEBUGARG("Unkwown Assignment - store")); // always change fgCurMemoryVN
6596 // Other kinds of assignment: initblk and copyblk.
6597 else if (oper == GT_ASG && varTypeIsStruct(tree))
6599 fgValueNumberBlockAssignment(tree, evalAsgLhsInd);
6601 else if (oper == GT_ADDR)
6603 // We have special representations for byrefs to lvalues.
6604 GenTree* arg = tree->gtOp.gtOp1;
6605 if (arg->OperIsLocal())
6607 FieldSeqNode* fieldSeq = nullptr;
6608 ValueNum newVN = ValueNumStore::NoVN;
6609 if (fgExcludeFromSsa(arg->gtLclVarCommon.GetLclNum()))
6611 newVN = vnStore->VNForExpr(compCurBB, TYP_BYREF);
6613 else if (arg->OperGet() == GT_LCL_FLD)
6615 fieldSeq = arg->AsLclFld()->gtFieldSeq;
6616 if (fieldSeq == nullptr)
6618 // Local field with unknown field seq -- not a precise pointer.
6619 newVN = vnStore->VNForExpr(compCurBB, TYP_BYREF);
6622 if (newVN == ValueNumStore::NoVN)
6624 assert(arg->gtLclVarCommon.GetSsaNum() != ValueNumStore::NoVN);
6625 newVN = vnStore->VNForFunc(TYP_BYREF, VNF_PtrToLoc,
6626 vnStore->VNForIntCon(arg->gtLclVarCommon.GetLclNum()),
6627 vnStore->VNForFieldSeq(fieldSeq));
6629 tree->gtVNPair.SetBoth(newVN);
6631 else if ((arg->gtOper == GT_IND) || arg->OperIsBlk())
6633 // Usually the ADDR and IND just cancel out...
6634 // except when this GT_ADDR has a valid zero-offset field sequence
6636 FieldSeqNode* zeroOffsetFieldSeq = nullptr;
6637 if (GetZeroOffsetFieldMap()->Lookup(tree, &zeroOffsetFieldSeq) &&
6638 (zeroOffsetFieldSeq != FieldSeqStore::NotAField()))
6640 ValueNum addrExtended = vnStore->ExtendPtrVN(arg->gtOp.gtOp1, zeroOffsetFieldSeq);
6641 if (addrExtended != ValueNumStore::NoVN)
6643 tree->gtVNPair.SetBoth(addrExtended); // We don't care about lib/cons differences for addresses.
6647 // ExtendPtrVN returned a failure result
6648 // So give this address a new unique value
6649 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, TYP_BYREF));
6654 // They just cancel, so fetch the ValueNumber from the op1 of the GT_IND node.
6656 GenTree* addr = arg->AsIndir()->Addr();
6657 tree->gtVNPair = addr->gtVNPair;
6659 // For the CSE phase mark the address as GTF_DONT_CSE
6660 // because it will end up with the same value number as tree (the GT_ADDR).
6661 addr->gtFlags |= GTF_DONT_CSE;
6666 // May be more cases to do here! But we'll punt for now.
6667 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, TYP_BYREF));
6670 else if ((oper == GT_IND) || GenTree::OperIsBlk(oper))
6672 // So far, we handle cases in which the address is a ptr-to-local, or if it's
6673 // a pointer to an object field or array alement. Other cases become uses of
6674 // the current ByrefExposed value and the pointer value, so that at least we
6675 // can recognize redundant loads with no stores between them.
6676 GenTree* addr = tree->AsIndir()->Addr();
6677 GenTreeLclVarCommon* lclVarTree = nullptr;
6678 FieldSeqNode* fldSeq1 = nullptr;
6679 FieldSeqNode* fldSeq2 = nullptr;
6680 GenTree* obj = nullptr;
6681 GenTree* staticOffset = nullptr;
6682 bool isVolatile = (tree->gtFlags & GTF_IND_VOLATILE) != 0;
6684 // See if the addr has any exceptional part.
6685 ValueNumPair addrNvnp;
6686 ValueNumPair addrXvnp = ValueNumPair(ValueNumStore::VNForEmptyExcSet(), ValueNumStore::VNForEmptyExcSet());
6687 vnStore->VNPUnpackExc(addr->gtVNPair, &addrNvnp, &addrXvnp);
6689 // Is the dereference immutable? If so, model it as referencing the read-only heap.
6690 if (tree->gtFlags & GTF_IND_INVARIANT)
6692 assert(!isVolatile); // We don't expect both volatile and invariant
6694 ValueNumPair(vnStore->VNForMapSelect(VNK_Liberal, TYP_REF, ValueNumStore::VNForROH(),
6695 addrNvnp.GetLiberal()),
6696 vnStore->VNForMapSelect(VNK_Conservative, TYP_REF, ValueNumStore::VNForROH(),
6697 addrNvnp.GetConservative()));
6698 tree->gtVNPair = vnStore->VNPWithExc(tree->gtVNPair, addrXvnp);
6700 else if (isVolatile)
6702 // For Volatile indirection, mutate GcHeap/ByrefExposed
6703 fgMutateGcHeap(tree DEBUGARG("GTF_IND_VOLATILE - read"));
6705 // The value read by the GT_IND can immediately change
6706 ValueNum newUniq = vnStore->VNForExpr(compCurBB, tree->TypeGet());
6707 tree->gtVNPair = vnStore->VNPWithExc(ValueNumPair(newUniq, newUniq), addrXvnp);
6709 // We always want to evaluate the LHS when the GT_IND node is marked with GTF_IND_ARR_INDEX
6710 // as this will relabel the GT_IND child correctly using the VNF_PtrToArrElem
6711 else if ((tree->gtFlags & GTF_IND_ARR_INDEX) != 0)
6714 bool b = GetArrayInfoMap()->Lookup(tree, &arrInfo);
6717 ValueNum inxVN = ValueNumStore::NoVN;
6718 FieldSeqNode* fldSeq = nullptr;
6720 // GenTree* addr = tree->gtOp.gtOp1;
6721 ValueNum addrVN = addrNvnp.GetLiberal();
6724 GenTree* arr = nullptr;
6725 addr->ParseArrayAddress(this, &arrInfo, &arr, &inxVN, &fldSeq);
6728 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6731 assert(fldSeq != FieldSeqStore::NotAField());
6734 // Need to form H[arrType][arr][ind][fldSeq]
6735 // Get the array element type equivalence class rep.
6736 CORINFO_CLASS_HANDLE elemTypeEq = EncodeElemType(arrInfo.m_elemType, arrInfo.m_elemStructType);
6737 ValueNum elemTypeEqVN = vnStore->VNForHandle(ssize_t(elemTypeEq), GTF_ICON_CLASS_HDL);
6739 // We take the "VNNormVal"s here, because if either has exceptional outcomes, they will be captured
6740 // as part of the value of the composite "addr" operation...
6741 ValueNum arrVN = vnStore->VNNormVal(arr->gtVNPair.GetLiberal());
6742 inxVN = vnStore->VNNormVal(inxVN);
6744 // Additionally, relabel the address with a PtrToArrElem value number.
6745 ValueNum fldSeqVN = vnStore->VNForFieldSeq(fldSeq);
6747 vnStore->VNForFunc(TYP_BYREF, VNF_PtrToArrElem, elemTypeEqVN, arrVN, inxVN, fldSeqVN);
6749 // The aggregate "addr" VN should have had all the exceptions bubble up...
6750 elemAddr = vnStore->VNWithExc(elemAddr, addrXvnp.GetLiberal());
6751 addr->gtVNPair.SetBoth(elemAddr);
6755 printf(" Relabeled IND_ARR_INDEX address node ");
6756 Compiler::printTreeID(addr);
6757 printf(" with l:" STR_VN "%x: ", elemAddr);
6758 vnStore->vnDump(this, elemAddr);
6760 if (vnStore->VNNormVal(elemAddr) != elemAddr)
6762 printf(" [" STR_VN "%x is: ", vnStore->VNNormVal(elemAddr));
6763 vnStore->vnDump(this, vnStore->VNNormVal(elemAddr));
6768 // We now need to retrieve the value number for the array element value
6769 // and give this value number to the GT_IND node 'tree'
6770 // We do this whenever we have an rvalue, or for the LHS when we have an "op=",
6771 // but we don't do it for a normal LHS assignment into an array element.
6773 if (evalAsgLhsInd || ((tree->gtFlags & GTF_IND_ASG_LHS) == 0))
6775 fgValueNumberArrIndexVal(tree, elemTypeEq, arrVN, inxVN, addrXvnp.GetLiberal(), fldSeq);
6778 else if (tree->gtFlags & GTF_IND_ARR_LEN)
6780 // It's an array length. The argument is the sum of an array ref with some integer values...
6781 ValueNum arrRefLib = vnStore->VNForRefInAddr(tree->gtOp.gtOp1->gtVNPair.GetLiberal());
6782 ValueNum arrRefCons = vnStore->VNForRefInAddr(tree->gtOp.gtOp1->gtVNPair.GetConservative());
6784 assert(vnStore->TypeOfVN(arrRefLib) == TYP_REF || vnStore->TypeOfVN(arrRefLib) == TYP_BYREF);
6785 if (vnStore->IsVNConstant(arrRefLib))
6787 // (or in weird cases, a REF or BYREF constant, in which case the result is an exception).
6788 tree->gtVNPair.SetLiberal(
6789 vnStore->VNWithExc(ValueNumStore::VNForVoid(),
6790 vnStore->VNExcSetSingleton(
6791 vnStore->VNForFunc(TYP_REF, VNF_NullPtrExc, arrRefLib))));
6795 tree->gtVNPair.SetLiberal(vnStore->VNForFunc(TYP_INT, VNFunc(GT_ARR_LENGTH), arrRefLib));
6797 assert(vnStore->TypeOfVN(arrRefCons) == TYP_REF || vnStore->TypeOfVN(arrRefCons) == TYP_BYREF);
6798 if (vnStore->IsVNConstant(arrRefCons))
6800 // (or in weird cases, a REF or BYREF constant, in which case the result is an exception).
6801 tree->gtVNPair.SetConservative(
6802 vnStore->VNWithExc(ValueNumStore::VNForVoid(),
6803 vnStore->VNExcSetSingleton(
6804 vnStore->VNForFunc(TYP_REF, VNF_NullPtrExc, arrRefCons))));
6808 tree->gtVNPair.SetConservative(vnStore->VNForFunc(TYP_INT, VNFunc(GT_ARR_LENGTH), arrRefCons));
6812 // In general we skip GT_IND nodes on that are the LHS of an assignment. (We labeled these earlier.)
6813 // We will "evaluate" this as part of the assignment. (Unless we're explicitly told by
6814 // the caller to evaluate anyway -- perhaps the assignment is an "op=" assignment.)
6815 else if (((tree->gtFlags & GTF_IND_ASG_LHS) == 0) || evalAsgLhsInd)
6817 FieldSeqNode* localFldSeq = nullptr;
6820 // Is it a local or a heap address?
6821 if (addr->IsLocalAddrExpr(this, &lclVarTree, &localFldSeq) &&
6822 !fgExcludeFromSsa(lclVarTree->GetLclNum()))
6824 unsigned lclNum = lclVarTree->GetLclNum();
6825 unsigned ssaNum = lclVarTree->GetSsaNum();
6826 LclVarDsc* varDsc = &lvaTable[lclNum];
6828 if ((localFldSeq == FieldSeqStore::NotAField()) || (localFldSeq == nullptr))
6830 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6834 var_types indType = tree->TypeGet();
6835 ValueNumPair lclVNPair = varDsc->GetPerSsaData(ssaNum)->m_vnPair;
6836 tree->gtVNPair = vnStore->VNPairApplySelectors(lclVNPair, localFldSeq, indType);
6839 tree->gtVNPair = vnStore->VNPWithExc(tree->gtVNPair, addrXvnp);
6841 else if (vnStore->GetVNFunc(addrNvnp.GetLiberal(), &funcApp) && funcApp.m_func == VNF_PtrToStatic)
6843 var_types indType = tree->TypeGet();
6844 ValueNum fieldSeqVN = funcApp.m_args[0];
6846 FieldSeqNode* fldSeqForStaticVar = vnStore->FieldSeqVNToFieldSeq(fieldSeqVN);
6848 if (fldSeqForStaticVar != FieldSeqStore::NotAField())
6850 ValueNum selectedStaticVar;
6851 // We model statics as indices into the GcHeap (which is a subset of ByrefExposed).
6852 size_t structSize = 0;
6853 selectedStaticVar = vnStore->VNApplySelectors(VNK_Liberal, fgCurMemoryVN[GcHeap],
6854 fldSeqForStaticVar, &structSize);
6855 selectedStaticVar = vnStore->VNApplySelectorsTypeCheck(selectedStaticVar, indType, structSize);
6857 tree->gtVNPair.SetLiberal(selectedStaticVar);
6858 tree->gtVNPair.SetConservative(vnStore->VNForExpr(compCurBB, indType));
6862 JITDUMP(" *** Missing field sequence info for VNF_PtrToStatic value GT_IND\n");
6863 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, indType)); // a new unique value number
6865 tree->gtVNPair = vnStore->VNPWithExc(tree->gtVNPair, addrXvnp);
6867 else if (vnStore->GetVNFunc(addrNvnp.GetLiberal(), &funcApp) && (funcApp.m_func == VNF_PtrToArrElem))
6869 fgValueNumberArrIndexVal(tree, &funcApp, addrXvnp.GetLiberal());
6871 else if (addr->IsFieldAddr(this, &obj, &staticOffset, &fldSeq2))
6873 if (fldSeq2 == FieldSeqStore::NotAField())
6875 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6877 else if (fldSeq2 != nullptr)
6879 // Get the first (instance or static) field from field seq. GcHeap[field] will yield the "field
6881 CLANG_FORMAT_COMMENT_ANCHOR;
6884 CORINFO_CLASS_HANDLE fldCls = info.compCompHnd->getFieldClass(fldSeq2->m_fieldHnd);
6887 // Make sure that the class containing it is not a value class (as we are expecting an
6889 assert((info.compCompHnd->getClassAttribs(fldCls) & CORINFO_FLG_VALUECLASS) == 0);
6890 assert(staticOffset == nullptr);
6893 // Get a field sequence for just the first field in the sequence
6895 FieldSeqNode* firstFieldOnly = GetFieldSeqStore()->CreateSingleton(fldSeq2->m_fieldHnd);
6896 size_t structSize = 0;
6898 vnStore->VNApplySelectors(VNK_Liberal, fgCurMemoryVN[GcHeap], firstFieldOnly, &structSize);
6900 // The final field in the sequence will need to match the 'indType'
6901 var_types indType = tree->TypeGet();
6903 // The type of the field is "struct" if there are more fields in the sequence,
6904 // otherwise it is the type returned from VNApplySelectors above.
6905 var_types firstFieldType = vnStore->TypeOfVN(fldMapVN);
6907 ValueNum valAtAddr = fldMapVN;
6910 // construct the ValueNumber for 'fldMap at obj'
6911 ValueNum objNormVal = vnStore->VNNormVal(obj->GetVN(VNK_Liberal));
6912 valAtAddr = vnStore->VNForMapSelect(VNK_Liberal, firstFieldType, fldMapVN, objNormVal);
6914 else if (staticOffset != nullptr)
6916 // construct the ValueNumber for 'fldMap at staticOffset'
6917 ValueNum offsetNormVal = vnStore->VNNormVal(staticOffset->GetVN(VNK_Liberal));
6918 valAtAddr = vnStore->VNForMapSelect(VNK_Liberal, firstFieldType, fldMapVN, offsetNormVal);
6921 // Now get rid of any remaining struct field dereferences.
6922 if (fldSeq2->m_next)
6924 valAtAddr = vnStore->VNApplySelectors(VNK_Liberal, valAtAddr, fldSeq2->m_next, &structSize);
6926 valAtAddr = vnStore->VNApplySelectorsTypeCheck(valAtAddr, indType, structSize);
6928 tree->gtVNPair.SetLiberal(valAtAddr);
6930 // The conservative value is a new, unique VN.
6931 tree->gtVNPair.SetConservative(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6932 tree->gtVNPair = vnStore->VNPWithExc(tree->gtVNPair, addrXvnp);
6936 // Occasionally we do an explicit null test on a REF, so we just dereference it with no
6937 // field sequence. The result is probably unused.
6938 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6939 tree->gtVNPair = vnStore->VNPWithExc(tree->gtVNPair, addrXvnp);
6942 else // We don't know where the address points, so it is an ByrefExposed load.
6944 ValueNum addrVN = addr->gtVNPair.GetLiberal();
6945 ValueNum loadVN = fgValueNumberByrefExposedLoad(typ, addrVN);
6946 tree->gtVNPair.SetLiberal(loadVN);
6947 tree->gtVNPair.SetConservative(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
6948 tree->gtVNPair = vnStore->VNPWithExc(tree->gtVNPair, addrXvnp);
6952 else if (tree->OperGet() == GT_CAST)
6954 fgValueNumberCastTree(tree);
6956 else if (tree->OperGet() == GT_INTRINSIC)
6958 fgValueNumberIntrinsic(tree);
6960 else if (ValueNumStore::VNFuncIsLegal(GetVNFuncForOper(oper, (tree->gtFlags & GTF_UNSIGNED) != 0)))
6962 if (GenTree::OperIsUnary(oper))
6964 if (tree->gtOp.gtOp1 != nullptr)
6966 if (tree->OperGet() == GT_NOP)
6968 // Pass through arg vn.
6969 tree->gtVNPair = tree->gtOp.gtOp1->gtVNPair;
6973 ValueNumPair op1VNP;
6974 ValueNumPair op1VNPx = ValueNumStore::VNPForEmptyExcSet();
6975 vnStore->VNPUnpackExc(tree->gtOp.gtOp1->gtVNPair, &op1VNP, &op1VNPx);
6977 // If we are fetching the array length for an array ref that came from global memory
6978 // then for CSE safety we must use the conservative value number for both
6980 if ((tree->OperGet() == GT_ARR_LENGTH) && ((tree->gtOp.gtOp1->gtFlags & GTF_GLOB_REF) != 0))
6982 // use the conservative value number for both when computing the VN for the ARR_LENGTH
6983 op1VNP.SetBoth(op1VNP.GetConservative());
6987 vnStore->VNPWithExc(vnStore->VNPairForFunc(tree->TypeGet(),
6988 GetVNFuncForOper(oper, (tree->gtFlags &
6989 GTF_UNSIGNED) != 0),
6994 else // Is actually nullary.
6996 // Mostly we'll leave these without a value number, assuming we'll detect these as VN failures
6997 // if they actually need to have values. With the exception of NOPs, which can sometimes have
6999 if (tree->OperGet() == GT_NOP)
7001 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
7007 assert(!GenTree::OperIsAssignment(oper)); // We handled assignments earlier.
7008 assert(GenTree::OperIsBinary(oper));
7009 // Standard binary operator.
7010 ValueNumPair op2VNPair;
7011 if (tree->gtOp.gtOp2 == nullptr)
7013 op2VNPair.SetBoth(ValueNumStore::VNForNull());
7017 op2VNPair = tree->gtOp.gtOp2->gtVNPair;
7019 // A few special case: if we add a field offset constant to a PtrToXXX, we get back a new PtrToXXX.
7020 ValueNum newVN = ValueNumStore::NoVN;
7022 ValueNumPair op1vnp;
7023 ValueNumPair op1Xvnp = ValueNumStore::VNPForEmptyExcSet();
7024 vnStore->VNPUnpackExc(tree->gtOp.gtOp1->gtVNPair, &op1vnp, &op1Xvnp);
7025 ValueNumPair op2vnp;
7026 ValueNumPair op2Xvnp = ValueNumStore::VNPForEmptyExcSet();
7027 vnStore->VNPUnpackExc(op2VNPair, &op2vnp, &op2Xvnp);
7028 ValueNumPair excSet = vnStore->VNPExcSetUnion(op1Xvnp, op2Xvnp);
7032 newVN = vnStore->ExtendPtrVN(tree->gtOp.gtOp1, tree->gtOp.gtOp2);
7033 if (newVN == ValueNumStore::NoVN)
7035 newVN = vnStore->ExtendPtrVN(tree->gtOp.gtOp2, tree->gtOp.gtOp1);
7038 if (newVN != ValueNumStore::NoVN)
7040 newVN = vnStore->VNWithExc(newVN, excSet.GetLiberal());
7041 // We don't care about differences between liberal and conservative for pointer values.
7042 tree->gtVNPair.SetBoth(newVN);
7047 ValueNumPair normalRes =
7048 vnStore->VNPairForFunc(tree->TypeGet(),
7049 GetVNFuncForOper(oper, (tree->gtFlags & GTF_UNSIGNED) != 0), op1vnp,
7051 // Overflow-checking operations add an overflow exception
7052 if (tree->gtOverflowEx())
7054 ValueNum overflowExcSet =
7055 vnStore->VNExcSetSingleton(vnStore->VNForFunc(TYP_REF, VNF_OverflowExc));
7056 excSet = vnStore->VNPExcSetUnion(excSet, ValueNumPair(overflowExcSet, overflowExcSet));
7058 tree->gtVNPair = vnStore->VNPWithExc(normalRes, excSet);
7062 else // ValueNumStore::VNFuncIsLegal returns false
7064 // Some of the genTreeOps that aren't legal VNFuncs so they get special handling.
7069 ValueNumPair op1vnp;
7070 ValueNumPair op1Xvnp = ValueNumStore::VNPForEmptyExcSet();
7071 vnStore->VNPUnpackExc(tree->gtOp.gtOp1->gtVNPair, &op1vnp, &op1Xvnp);
7072 ValueNumPair op2vnp;
7073 ValueNumPair op2Xvnp = ValueNumStore::VNPForEmptyExcSet();
7075 GenTree* op2 = tree->gtGetOp2();
7076 if (op2->OperIsIndir() && ((op2->gtFlags & GTF_IND_ASG_LHS) != 0))
7078 // If op2 represents the lhs of an assignment then we give a VNForVoid for the lhs
7079 op2vnp = ValueNumPair(ValueNumStore::VNForVoid(), ValueNumStore::VNForVoid());
7081 else if ((op2->OperGet() == GT_CLS_VAR) && (op2->gtFlags & GTF_CLS_VAR_ASG_LHS))
7083 // If op2 represents the lhs of an assignment then we give a VNForVoid for the lhs
7084 op2vnp = ValueNumPair(ValueNumStore::VNForVoid(), ValueNumStore::VNForVoid());
7088 vnStore->VNPUnpackExc(op2->gtVNPair, &op2vnp, &op2Xvnp);
7091 tree->gtVNPair = vnStore->VNPWithExc(op2vnp, vnStore->VNPExcSetUnion(op1Xvnp, op2Xvnp));
7096 // Explicit null check.
7098 vnStore->VNPWithExc(ValueNumPair(ValueNumStore::VNForVoid(), ValueNumStore::VNForVoid()),
7099 vnStore->VNPExcSetSingleton(
7100 vnStore->VNPairForFunc(TYP_REF, VNF_NullPtrExc,
7101 tree->gtOp.gtOp1->gtVNPair)));
7104 case GT_LOCKADD: // Binop
7105 case GT_XADD: // Binop
7106 case GT_XCHG: // Binop
7107 // For CMPXCHG and other intrinsics add an arbitrary side effect on GcHeap/ByrefExposed.
7108 fgMutateGcHeap(tree DEBUGARG("Interlocked intrinsic"));
7109 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
7114 // These nodes never need to have a ValueNumber
7115 tree->gtVNPair.SetBoth(ValueNumStore::NoVN);
7119 // BOX doesn't do anything at this point, the actual object allocation
7120 // and initialization happens separately (and not numbering BOX correctly
7121 // prevents seeing allocation related assertions through it)
7122 tree->gtVNPair = tree->gtGetOp1()->gtVNPair;
7126 // The default action is to give the node a new, unique VN.
7127 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
7134 assert(GenTree::OperIsSpecial(oper));
7136 // TBD: We must handle these individually. For now:
7140 fgValueNumberCall(tree->AsCall());
7143 case GT_ARR_BOUNDS_CHECK:
7146 #endif // FEATURE_SIMD
7147 #ifdef FEATURE_HW_INTRINSICS
7148 case GT_HW_INTRINSIC_CHK:
7149 #endif // FEATURE_HW_INTRINSICS
7151 // A bounds check node has no value, but may throw exceptions.
7152 ValueNumPair excSet = vnStore->VNPExcSetSingleton(
7153 vnStore->VNPairForFunc(TYP_REF, VNF_IndexOutOfRangeExc,
7154 vnStore->VNPNormVal(tree->AsBoundsChk()->gtIndex->gtVNPair),
7155 vnStore->VNPNormVal(tree->AsBoundsChk()->gtArrLen->gtVNPair)));
7156 excSet = vnStore->VNPExcSetUnion(excSet, vnStore->VNPExcVal(tree->AsBoundsChk()->gtIndex->gtVNPair));
7157 excSet = vnStore->VNPExcSetUnion(excSet, vnStore->VNPExcVal(tree->AsBoundsChk()->gtArrLen->gtVNPair));
7159 tree->gtVNPair = vnStore->VNPWithExc(vnStore->VNPForVoid(), excSet);
7161 // Record non-constant value numbers that are used as the length argument to bounds checks, so that
7162 // assertion prop will know that comparisons against them are worth analyzing.
7163 ValueNum lengthVN = tree->AsBoundsChk()->gtArrLen->gtVNPair.GetConservative();
7164 if ((lengthVN != ValueNumStore::NoVN) && !vnStore->IsVNConstant(lengthVN))
7166 vnStore->SetVNIsCheckedBound(lengthVN);
7171 case GT_CMPXCHG: // Specialop
7172 // For CMPXCHG and other intrinsics add an arbitrary side effect on GcHeap/ByrefExposed.
7173 fgMutateGcHeap(tree DEBUGARG("Interlocked intrinsic"));
7174 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
7178 tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet()));
7184 if (tree->gtVNPair.GetLiberal() != ValueNumStore::NoVN)
7186 printf("N%03u ", tree->gtSeqNum);
7189 gtDispNodeName(tree);
7190 if (tree->OperIsLeaf() || tree->OperIsLocalStore()) // local stores used to be leaves
7192 gtDispLeaf(tree, nullptr);
7195 vnpPrint(tree->gtVNPair, 1);
7202 void Compiler::fgValueNumberIntrinsic(GenTree* tree)
7204 assert(tree->OperGet() == GT_INTRINSIC);
7205 GenTreeIntrinsic* intrinsic = tree->AsIntrinsic();
7206 ValueNumPair arg0VNP, arg1VNP;
7207 ValueNumPair arg0VNPx = ValueNumStore::VNPForEmptyExcSet();
7208 ValueNumPair arg1VNPx = ValueNumStore::VNPForEmptyExcSet();
7210 vnStore->VNPUnpackExc(intrinsic->gtOp.gtOp1->gtVNPair, &arg0VNP, &arg0VNPx);
7212 if (intrinsic->gtOp.gtOp2 != nullptr)
7214 vnStore->VNPUnpackExc(intrinsic->gtOp.gtOp2->gtVNPair, &arg1VNP, &arg1VNPx);
7217 if (IsMathIntrinsic(intrinsic->gtIntrinsicId))
7219 // GT_INTRINSIC is a currently a subtype of binary operators. But most of
7220 // the math intrinsics are actually unary operations.
7222 if (intrinsic->gtOp.gtOp2 == nullptr)
7224 intrinsic->gtVNPair =
7225 vnStore->VNPWithExc(vnStore->EvalMathFuncUnary(tree->TypeGet(), intrinsic->gtIntrinsicId, arg0VNP),
7230 ValueNumPair newVNP =
7231 vnStore->EvalMathFuncBinary(tree->TypeGet(), intrinsic->gtIntrinsicId, arg0VNP, arg1VNP);
7232 ValueNumPair excSet = vnStore->VNPExcSetUnion(arg0VNPx, arg1VNPx);
7233 intrinsic->gtVNPair = vnStore->VNPWithExc(newVNP, excSet);
7238 switch (intrinsic->gtIntrinsicId)
7240 case CORINFO_INTRINSIC_Object_GetType:
7241 intrinsic->gtVNPair =
7242 vnStore->VNPWithExc(vnStore->VNPairForFunc(intrinsic->TypeGet(), VNF_ObjGetType, arg0VNP),
7252 void Compiler::fgValueNumberCastTree(GenTree* tree)
7254 assert(tree->OperGet() == GT_CAST);
7256 ValueNumPair srcVNPair = tree->gtOp.gtOp1->gtVNPair;
7257 var_types castToType = tree->CastToType();
7258 var_types castFromType = tree->CastFromType();
7259 bool srcIsUnsigned = ((tree->gtFlags & GTF_UNSIGNED) != 0);
7260 bool hasOverflowCheck = tree->gtOverflowEx();
7262 assert(genActualType(castToType) == genActualType(tree->TypeGet())); // Insure that the resultType is correct
7264 tree->gtVNPair = vnStore->VNPairForCast(srcVNPair, castToType, castFromType, srcIsUnsigned, hasOverflowCheck);
7267 // Compute the normal ValueNumber for a cast operation with no exceptions
7268 ValueNum ValueNumStore::VNForCast(ValueNum srcVN,
7269 var_types castToType,
7270 var_types castFromType,
7271 bool srcIsUnsigned /* = false */)
7273 // The resulting type after performingthe cast is always widened to a supported IL stack size
7274 var_types resultType = genActualType(castToType);
7276 // When we're considering actual value returned by a non-checking cast whether or not the source is
7277 // unsigned does *not* matter for non-widening casts. That is, if we cast an int or a uint to short,
7278 // we just extract the first two bytes from the source bit pattern, not worrying about the interpretation.
7279 // The same is true in casting between signed/unsigned types of the same width. Only when we're doing
7280 // a widening cast do we care about whether the source was unsigned,so we know whether to sign or zero extend it.
7282 bool srcIsUnsignedNorm = srcIsUnsigned;
7283 if (genTypeSize(castToType) <= genTypeSize(castFromType))
7285 srcIsUnsignedNorm = false;
7288 ValueNum castTypeVN = VNForCastOper(castToType, srcIsUnsigned);
7289 ValueNum resultVN = VNForFunc(resultType, VNF_Cast, srcVN, castTypeVN);
7292 if (m_pComp->verbose)
7294 printf(" VNForCast(" STR_VN "%x, " STR_VN "%x) returns ", srcVN, castTypeVN);
7295 m_pComp->vnPrint(resultVN, 1);
7303 // Compute the ValueNumberPair for a cast operation
7304 ValueNumPair ValueNumStore::VNPairForCast(ValueNumPair srcVNPair,
7305 var_types castToType,
7306 var_types castFromType,
7307 bool srcIsUnsigned, /* = false */
7308 bool hasOverflowCheck) /* = false */
7310 // The resulting type after performingthe cast is always widened to a supported IL stack size
7311 var_types resultType = genActualType(castToType);
7313 ValueNumPair castArgVNP;
7314 ValueNumPair castArgxVNP = ValueNumStore::VNPForEmptyExcSet();
7315 VNPUnpackExc(srcVNPair, &castArgVNP, &castArgxVNP);
7317 // When we're considering actual value returned by a non-checking cast (or a checking cast that succeeds),
7318 // whether or not the source is unsigned does *not* matter for non-widening casts.
7319 // That is, if we cast an int or a uint to short, we just extract the first two bytes from the source
7320 // bit pattern, not worrying about the interpretation. The same is true in casting between signed/unsigned
7321 // types of the same width. Only when we're doing a widening cast do we care about whether the source
7322 // was unsigned, so we know whether to sign or zero extend it.
7324 // Important: Casts to floating point cannot be optimized in this fashion. (bug 946768)
7326 bool srcIsUnsignedNorm = srcIsUnsigned;
7327 if (genTypeSize(castToType) <= genTypeSize(castFromType) && !varTypeIsFloating(castToType))
7329 srcIsUnsignedNorm = false;
7332 ValueNum castTypeVN = VNForCastOper(castToType, srcIsUnsignedNorm);
7333 ValueNumPair castTypeVNPair(castTypeVN, castTypeVN);
7334 ValueNumPair castNormRes = VNPairForFunc(resultType, VNF_Cast, castArgVNP, castTypeVNPair);
7336 ValueNumPair resultVNP = VNPWithExc(castNormRes, castArgxVNP);
7338 // If we have a check for overflow, add the exception information.
7339 if (hasOverflowCheck)
7341 // For overflow checking, we always need to know whether the source is unsigned.
7342 castTypeVNPair.SetBoth(VNForCastOper(castToType, srcIsUnsigned));
7343 ValueNumPair excSet =
7344 VNPExcSetSingleton(VNPairForFunc(TYP_REF, VNF_ConvOverflowExc, castArgVNP, castTypeVNPair));
7345 excSet = VNPExcSetUnion(excSet, castArgxVNP);
7346 resultVNP = VNPWithExc(castNormRes, excSet);
7352 void Compiler::fgValueNumberHelperCallFunc(GenTreeCall* call, VNFunc vnf, ValueNumPair vnpExc)
7354 unsigned nArgs = ValueNumStore::VNFuncArity(vnf);
7355 assert(vnf != VNF_Boundary);
7356 GenTreeArgList* args = call->gtCallArgs;
7357 bool generateUniqueVN = false;
7358 bool useEntryPointAddrAsArg0 = false;
7364 generateUniqueVN = true;
7365 vnpExc = ValueNumStore::VNPForEmptyExcSet();
7371 generateUniqueVN = true;
7372 ValueNumPair vnp1 = vnStore->VNPNormVal(args->Rest()->Current()->gtVNPair);
7374 // The New Array helper may throw an overflow exception
7375 vnpExc = vnStore->VNPExcSetSingleton(vnStore->VNPairForFunc(TYP_REF, VNF_NewArrOverflowExc, vnp1));
7380 case VNF_BoxNullable:
7382 // Generate unique VN so, VNForFunc generates a uniq value number for box nullable.
7383 // Alternatively instead of using vnpUniq below in VNPairForFunc(...),
7384 // we could use the value number of what the byref arg0 points to.
7386 // But retrieving the value number of what the byref arg0 points to is quite a bit more work
7387 // and doing so only very rarely allows for an additional optimization.
7388 generateUniqueVN = true;
7392 case VNF_JitReadyToRunNew:
7394 generateUniqueVN = true;
7395 vnpExc = ValueNumStore::VNPForEmptyExcSet();
7396 useEntryPointAddrAsArg0 = true;
7400 case VNF_JitReadyToRunNewArr:
7402 generateUniqueVN = true;
7403 ValueNumPair vnp1 = vnStore->VNPNormVal(args->Current()->gtVNPair);
7405 // The New Array helper may throw an overflow exception
7406 vnpExc = vnStore->VNPExcSetSingleton(vnStore->VNPairForFunc(TYP_REF, VNF_NewArrOverflowExc, vnp1));
7407 useEntryPointAddrAsArg0 = true;
7411 case VNF_ReadyToRunStaticBase:
7412 case VNF_ReadyToRunGenericStaticBase:
7413 case VNF_ReadyToRunIsInstanceOf:
7414 case VNF_ReadyToRunCastClass:
7416 useEntryPointAddrAsArg0 = true;
7422 assert(s_helperCallProperties.IsPure(eeGetHelperNum(call->gtCallMethHnd)));
7427 if (generateUniqueVN)
7432 ValueNumPair vnpUniq;
7433 if (generateUniqueVN)
7435 // Generate unique VN so, VNForFunc generates a unique value number.
7436 vnpUniq.SetBoth(vnStore->VNForExpr(compCurBB, call->TypeGet()));
7439 #if defined(FEATURE_READYTORUN_COMPILER) && defined(_TARGET_ARMARCH_)
7440 if (call->IsR2RRelativeIndir())
7443 assert(args->Current()->OperGet() == GT_ARGPLACE);
7445 // Find the corresponding late arg.
7446 GenTree* indirectCellAddress = call->fgArgInfo->GetLateArg(0);
7447 assert(indirectCellAddress->IsCnsIntOrI() && indirectCellAddress->gtRegNum == REG_R2R_INDIRECT_PARAM);
7449 // For ARM indirectCellAddress is consumed by the call itself, so it should have added as an implicit argument
7450 // in morph. So we do not need to use EntryPointAddrAsArg0, because arg0 is already an entry point addr.
7451 useEntryPointAddrAsArg0 = false;
7453 #endif // FEATURE_READYTORUN_COMPILER && _TARGET_ARMARCH_
7457 if (generateUniqueVN)
7459 call->gtVNPair = vnStore->VNPairForFunc(call->TypeGet(), vnf, vnpUniq);
7463 call->gtVNPair.SetBoth(vnStore->VNForFunc(call->TypeGet(), vnf));
7468 auto getCurrentArg = [call, &args, useEntryPointAddrAsArg0](int currentIndex) {
7469 GenTree* arg = args->Current();
7470 if ((arg->gtFlags & GTF_LATE_ARG) != 0)
7472 // This arg is a setup node that moves the arg into position.
7473 // Value-numbering will have visited the separate late arg that
7474 // holds the actual value, and propagated/computed the value number
7475 // for this arg there.
7476 if (useEntryPointAddrAsArg0)
7478 // The args in the fgArgInfo don't include the entry point, so
7479 // index into them using one less than the requested index.
7482 return call->fgArgInfo->GetLateArg(currentIndex);
7486 // Has at least one argument.
7488 ValueNumPair vnp0x = ValueNumStore::VNPForEmptyExcSet();
7489 #ifdef FEATURE_READYTORUN_COMPILER
7490 if (useEntryPointAddrAsArg0)
7492 ValueNum callAddrVN = vnStore->VNForPtrSizeIntCon((ssize_t)call->gtCall.gtEntryPoint.addr);
7493 vnp0 = ValueNumPair(callAddrVN, callAddrVN);
7496 #endif // FEATURE_READYTORUN_COMPILER
7498 assert(!useEntryPointAddrAsArg0);
7499 ValueNumPair vnp0wx = getCurrentArg(0)->gtVNPair;
7500 vnStore->VNPUnpackExc(vnp0wx, &vnp0, &vnp0x);
7502 // Also include in the argument exception sets
7503 vnpExc = vnStore->VNPExcSetUnion(vnpExc, vnp0x);
7505 args = args->Rest();
7509 if (generateUniqueVN)
7511 call->gtVNPair = vnStore->VNPairForFunc(call->TypeGet(), vnf, vnp0, vnpUniq);
7515 call->gtVNPair = vnStore->VNPairForFunc(call->TypeGet(), vnf, vnp0);
7520 // Has at least two arguments.
7521 ValueNumPair vnp1wx = getCurrentArg(1)->gtVNPair;
7523 ValueNumPair vnp1x = ValueNumStore::VNPForEmptyExcSet();
7524 vnStore->VNPUnpackExc(vnp1wx, &vnp1, &vnp1x);
7525 vnpExc = vnStore->VNPExcSetUnion(vnpExc, vnp1x);
7527 args = args->Rest();
7530 if (generateUniqueVN)
7532 call->gtVNPair = vnStore->VNPairForFunc(call->TypeGet(), vnf, vnp0, vnp1, vnpUniq);
7536 call->gtVNPair = vnStore->VNPairForFunc(call->TypeGet(), vnf, vnp0, vnp1);
7541 ValueNumPair vnp2wx = getCurrentArg(2)->gtVNPair;
7543 ValueNumPair vnp2x = ValueNumStore::VNPForEmptyExcSet();
7544 vnStore->VNPUnpackExc(vnp2wx, &vnp2, &vnp2x);
7545 vnpExc = vnStore->VNPExcSetUnion(vnpExc, vnp2x);
7547 args = args->Rest();
7548 assert(nArgs == 3); // Our current maximum.
7549 assert(args == nullptr);
7550 if (generateUniqueVN)
7552 call->gtVNPair = vnStore->VNPairForFunc(call->TypeGet(), vnf, vnp0, vnp1, vnp2, vnpUniq);
7556 call->gtVNPair = vnStore->VNPairForFunc(call->TypeGet(), vnf, vnp0, vnp1, vnp2);
7560 // Add the accumulated exceptions.
7561 call->gtVNPair = vnStore->VNPWithExc(call->gtVNPair, vnpExc);
7563 assert(args == nullptr ||
7564 generateUniqueVN); // All arguments should be processed or we generate unique VN and do not care.
7567 void Compiler::fgValueNumberCall(GenTreeCall* call)
7569 // First: do value numbering of any argument placeholder nodes in the argument list
7570 // (by transferring from the VN of the late arg that they are standing in for...)
7572 GenTreeArgList* args = call->gtCallArgs;
7573 bool updatedArgPlace = false;
7574 while (args != nullptr)
7576 GenTree* arg = args->Current();
7577 if (arg->OperGet() == GT_ARGPLACE)
7579 // Find the corresponding late arg.
7580 GenTree* lateArg = call->fgArgInfo->GetLateArg(i);
7581 assert(lateArg->gtVNPair.BothDefined());
7582 arg->gtVNPair = lateArg->gtVNPair;
7583 updatedArgPlace = true;
7587 printf("VN of ARGPLACE tree ");
7588 Compiler::printTreeID(arg);
7589 printf(" updated to ");
7590 vnpPrint(arg->gtVNPair, 1);
7596 args = args->Rest();
7598 if (updatedArgPlace)
7600 // Now we have to update the VN's of the argument list nodes, since that will be used in determining
7602 fgUpdateArgListVNs(call->gtCallArgs);
7605 if (call->gtCallType == CT_HELPER)
7607 bool modHeap = fgValueNumberHelperCall(call);
7611 // For now, arbitrary side effect on GcHeap/ByrefExposed.
7612 fgMutateGcHeap(call DEBUGARG("HELPER - modifies heap"));
7617 if (call->TypeGet() == TYP_VOID)
7619 call->gtVNPair.SetBoth(ValueNumStore::VNForVoid());
7623 call->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, call->TypeGet()));
7626 // For now, arbitrary side effect on GcHeap/ByrefExposed.
7627 fgMutateGcHeap(call DEBUGARG("CALL"));
7631 void Compiler::fgUpdateArgListVNs(GenTreeArgList* args)
7633 if (args == nullptr)
7638 fgUpdateArgListVNs(args->Rest());
7639 fgValueNumberTree(args);
7642 VNFunc Compiler::fgValueNumberHelperMethVNFunc(CorInfoHelpFunc helpFunc)
7644 assert(s_helperCallProperties.IsPure(helpFunc) || s_helperCallProperties.IsAllocator(helpFunc));
7646 VNFunc vnf = VNF_Boundary; // An illegal value...
7649 // These translate to other function symbols:
7650 case CORINFO_HELP_DIV:
7651 vnf = VNFunc(GT_DIV);
7653 case CORINFO_HELP_MOD:
7654 vnf = VNFunc(GT_MOD);
7656 case CORINFO_HELP_UDIV:
7657 vnf = VNFunc(GT_UDIV);
7659 case CORINFO_HELP_UMOD:
7660 vnf = VNFunc(GT_UMOD);
7662 case CORINFO_HELP_LLSH:
7663 vnf = VNFunc(GT_LSH);
7665 case CORINFO_HELP_LRSH:
7666 vnf = VNFunc(GT_RSH);
7668 case CORINFO_HELP_LRSZ:
7669 vnf = VNFunc(GT_RSZ);
7671 case CORINFO_HELP_LMUL:
7672 case CORINFO_HELP_LMUL_OVF:
7673 vnf = VNFunc(GT_MUL);
7675 case CORINFO_HELP_ULMUL_OVF:
7676 vnf = VNFunc(GT_MUL);
7677 break; // Is this the right thing?
7678 case CORINFO_HELP_LDIV:
7679 vnf = VNFunc(GT_DIV);
7681 case CORINFO_HELP_LMOD:
7682 vnf = VNFunc(GT_MOD);
7684 case CORINFO_HELP_ULDIV:
7685 vnf = VNFunc(GT_UDIV);
7687 case CORINFO_HELP_ULMOD:
7688 vnf = VNFunc(GT_UMOD);
7691 case CORINFO_HELP_LNG2DBL:
7694 case CORINFO_HELP_ULNG2DBL:
7697 case CORINFO_HELP_DBL2INT:
7700 case CORINFO_HELP_DBL2INT_OVF:
7703 case CORINFO_HELP_DBL2LNG:
7706 case CORINFO_HELP_DBL2LNG_OVF:
7709 case CORINFO_HELP_DBL2UINT:
7712 case CORINFO_HELP_DBL2UINT_OVF:
7715 case CORINFO_HELP_DBL2ULNG:
7718 case CORINFO_HELP_DBL2ULNG_OVF:
7721 case CORINFO_HELP_FLTREM:
7722 vnf = VNFunc(GT_MOD);
7724 case CORINFO_HELP_DBLREM:
7725 vnf = VNFunc(GT_MOD);
7727 case CORINFO_HELP_FLTROUND:
7729 break; // Is this the right thing?
7730 case CORINFO_HELP_DBLROUND:
7732 break; // Is this the right thing?
7734 // These allocation operations probably require some augmentation -- perhaps allocSiteId,
7735 // something about array length...
7736 case CORINFO_HELP_NEW_CROSSCONTEXT:
7737 case CORINFO_HELP_NEWFAST:
7738 case CORINFO_HELP_NEWSFAST:
7739 case CORINFO_HELP_NEWSFAST_ALIGN8:
7743 case CORINFO_HELP_READYTORUN_NEW:
7744 vnf = VNF_JitReadyToRunNew;
7747 case CORINFO_HELP_NEWARR_1_DIRECT:
7748 case CORINFO_HELP_NEWARR_1_OBJ:
7749 case CORINFO_HELP_NEWARR_1_VC:
7750 case CORINFO_HELP_NEWARR_1_ALIGN8:
7751 vnf = VNF_JitNewArr;
7754 case CORINFO_HELP_NEWARR_1_R2R_DIRECT:
7755 case CORINFO_HELP_READYTORUN_NEWARR_1:
7756 vnf = VNF_JitReadyToRunNewArr;
7759 case CORINFO_HELP_GETGENERICS_GCSTATIC_BASE:
7760 vnf = VNF_GetgenericsGcstaticBase;
7762 case CORINFO_HELP_GETGENERICS_NONGCSTATIC_BASE:
7763 vnf = VNF_GetgenericsNongcstaticBase;
7765 case CORINFO_HELP_GETSHARED_GCSTATIC_BASE:
7766 vnf = VNF_GetsharedGcstaticBase;
7768 case CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE:
7769 vnf = VNF_GetsharedNongcstaticBase;
7771 case CORINFO_HELP_GETSHARED_GCSTATIC_BASE_NOCTOR:
7772 vnf = VNF_GetsharedGcstaticBaseNoctor;
7774 case CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE_NOCTOR:
7775 vnf = VNF_GetsharedNongcstaticBaseNoctor;
7777 case CORINFO_HELP_READYTORUN_STATIC_BASE:
7778 vnf = VNF_ReadyToRunStaticBase;
7780 case CORINFO_HELP_READYTORUN_GENERIC_STATIC_BASE:
7781 vnf = VNF_ReadyToRunGenericStaticBase;
7783 case CORINFO_HELP_GETSHARED_GCSTATIC_BASE_DYNAMICCLASS:
7784 vnf = VNF_GetsharedGcstaticBaseDynamicclass;
7786 case CORINFO_HELP_GETSHARED_NONGCSTATIC_BASE_DYNAMICCLASS:
7787 vnf = VNF_GetsharedNongcstaticBaseDynamicclass;
7789 case CORINFO_HELP_CLASSINIT_SHARED_DYNAMICCLASS:
7790 vnf = VNF_ClassinitSharedDynamicclass;
7792 case CORINFO_HELP_GETGENERICS_GCTHREADSTATIC_BASE:
7793 vnf = VNF_GetgenericsGcthreadstaticBase;
7795 case CORINFO_HELP_GETGENERICS_NONGCTHREADSTATIC_BASE:
7796 vnf = VNF_GetgenericsNongcthreadstaticBase;
7798 case CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE:
7799 vnf = VNF_GetsharedGcthreadstaticBase;
7801 case CORINFO_HELP_GETSHARED_NONGCTHREADSTATIC_BASE:
7802 vnf = VNF_GetsharedNongcthreadstaticBase;
7804 case CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE_NOCTOR:
7805 vnf = VNF_GetsharedGcthreadstaticBaseNoctor;
7807 case CORINFO_HELP_GETSHARED_NONGCTHREADSTATIC_BASE_NOCTOR:
7808 vnf = VNF_GetsharedNongcthreadstaticBaseNoctor;
7810 case CORINFO_HELP_GETSHARED_GCTHREADSTATIC_BASE_DYNAMICCLASS:
7811 vnf = VNF_GetsharedGcthreadstaticBaseDynamicclass;
7813 case CORINFO_HELP_GETSHARED_NONGCTHREADSTATIC_BASE_DYNAMICCLASS:
7814 vnf = VNF_GetsharedNongcthreadstaticBaseDynamicclass;
7816 case CORINFO_HELP_GETSTATICFIELDADDR_CONTEXT:
7817 vnf = VNF_GetStaticAddrContext;
7819 case CORINFO_HELP_GETSTATICFIELDADDR_TLS:
7820 vnf = VNF_GetStaticAddrTLS;
7823 case CORINFO_HELP_RUNTIMEHANDLE_METHOD:
7824 case CORINFO_HELP_RUNTIMEHANDLE_METHOD_LOG:
7825 vnf = VNF_RuntimeHandleMethod;
7828 case CORINFO_HELP_RUNTIMEHANDLE_CLASS:
7829 case CORINFO_HELP_RUNTIMEHANDLE_CLASS_LOG:
7830 vnf = VNF_RuntimeHandleClass;
7833 case CORINFO_HELP_STRCNS:
7837 case CORINFO_HELP_CHKCASTCLASS:
7838 case CORINFO_HELP_CHKCASTCLASS_SPECIAL:
7839 case CORINFO_HELP_CHKCASTARRAY:
7840 case CORINFO_HELP_CHKCASTINTERFACE:
7841 case CORINFO_HELP_CHKCASTANY:
7842 vnf = VNF_CastClass;
7845 case CORINFO_HELP_READYTORUN_CHKCAST:
7846 vnf = VNF_ReadyToRunCastClass;
7849 case CORINFO_HELP_ISINSTANCEOFCLASS:
7850 case CORINFO_HELP_ISINSTANCEOFINTERFACE:
7851 case CORINFO_HELP_ISINSTANCEOFARRAY:
7852 case CORINFO_HELP_ISINSTANCEOFANY:
7853 vnf = VNF_IsInstanceOf;
7856 case CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE:
7857 vnf = VNF_TypeHandleToRuntimeType;
7860 case CORINFO_HELP_READYTORUN_ISINSTANCEOF:
7861 vnf = VNF_ReadyToRunIsInstanceOf;
7864 case CORINFO_HELP_LDELEMA_REF:
7868 case CORINFO_HELP_UNBOX:
7872 // A constant within any method.
7873 case CORINFO_HELP_GETCURRENTMANAGEDTHREADID:
7874 vnf = VNF_ManagedThreadId;
7877 case CORINFO_HELP_GETREFANY:
7878 // TODO-CQ: This should really be interpreted as just a struct field reference, in terms of values.
7879 vnf = VNF_GetRefanyVal;
7882 case CORINFO_HELP_GETCLASSFROMMETHODPARAM:
7883 vnf = VNF_GetClassFromMethodParam;
7886 case CORINFO_HELP_GETSYNCFROMCLASSHANDLE:
7887 vnf = VNF_GetSyncFromClassHandle;
7890 case CORINFO_HELP_LOOP_CLONE_CHOICE_ADDR:
7891 vnf = VNF_LoopCloneChoiceAddr;
7894 case CORINFO_HELP_BOX:
7898 case CORINFO_HELP_BOX_NULLABLE:
7899 vnf = VNF_BoxNullable;
7906 assert(vnf != VNF_Boundary);
7910 bool Compiler::fgValueNumberHelperCall(GenTreeCall* call)
7912 CorInfoHelpFunc helpFunc = eeGetHelperNum(call->gtCallMethHnd);
7913 bool pure = s_helperCallProperties.IsPure(helpFunc);
7914 bool isAlloc = s_helperCallProperties.IsAllocator(helpFunc);
7915 bool modHeap = s_helperCallProperties.MutatesHeap(helpFunc);
7916 bool mayRunCctor = s_helperCallProperties.MayRunCctor(helpFunc);
7917 bool noThrow = s_helperCallProperties.NoThrow(helpFunc);
7919 ValueNumPair vnpExc = ValueNumStore::VNPForEmptyExcSet();
7921 // If the JIT helper can throw an exception make sure that we fill in
7922 // vnpExc with a Value Number that represents the exception(s) that can be thrown.
7925 // If the helper is known to only throw only one particular exception
7926 // we can set vnpExc to that exception, otherwise we conservatively
7927 // model the JIT helper as possibly throwing multiple different exceptions
7931 case CORINFO_HELP_OVERFLOW:
7932 // This helper always throws the VNF_OverflowExc exception
7933 vnpExc = vnStore->VNPExcSetSingleton(vnStore->VNPairForFunc(TYP_REF, VNF_OverflowExc));
7937 // Setup vnpExc with the information that multiple different exceptions
7938 // could be generated by this helper
7939 vnpExc = vnStore->VNPExcSetSingleton(vnStore->VNPairForFunc(TYP_REF, VNF_HelperMultipleExc));
7943 ValueNumPair vnpNorm;
7945 if (call->TypeGet() == TYP_VOID)
7947 vnpNorm = ValueNumStore::VNPForVoid();
7951 // TODO-CQ: this is a list of helpers we're going to treat as non-pure,
7952 // because they raise complications. Eventually, we need to handle those complications...
7953 bool needsFurtherWork = false;
7956 case CORINFO_HELP_NEW_MDARR:
7957 // This is a varargs helper. We need to represent the array shape in the VN world somehow.
7958 needsFurtherWork = true;
7964 if (!needsFurtherWork && (pure || isAlloc))
7966 VNFunc vnf = fgValueNumberHelperMethVNFunc(helpFunc);
7970 if ((call->gtFlags & GTF_CALL_HOISTABLE) == 0)
7976 fgValueNumberHelperCallFunc(call, vnf, vnpExc);
7981 vnpNorm.SetBoth(vnStore->VNForExpr(compCurBB, call->TypeGet()));
7985 call->gtVNPair = vnStore->VNPWithExc(vnpNorm, vnpExc);
7990 // This method asserts that SSA name constraints specified are satisfied.
7991 // Until we figure out otherwise, all VN's are assumed to be liberal.
7992 // TODO-Cleanup: new JitTestLabels for lib vs cons vs both VN classes?
7993 void Compiler::JitTestCheckVN()
7995 typedef JitHashTable<ssize_t, JitSmallPrimitiveKeyFuncs<ssize_t>, ValueNum> LabelToVNMap;
7996 typedef JitHashTable<ValueNum, JitSmallPrimitiveKeyFuncs<ValueNum>, ssize_t> VNToLabelMap;
7998 // If we have no test data, early out.
7999 if (m_nodeTestData == nullptr)
8004 NodeToTestDataMap* testData = GetNodeTestData();
8006 // First we have to know which nodes in the tree are reachable.
8007 typedef JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, int> NodeToIntMap;
8008 NodeToIntMap* reachable = FindReachableNodesInNodeTestData();
8010 LabelToVNMap* labelToVN = new (getAllocatorDebugOnly()) LabelToVNMap(getAllocatorDebugOnly());
8011 VNToLabelMap* vnToLabel = new (getAllocatorDebugOnly()) VNToLabelMap(getAllocatorDebugOnly());
8015 printf("\nJit Testing: Value numbering.\n");
8017 for (NodeToTestDataMap::KeyIterator ki = testData->Begin(); !ki.Equal(testData->End()); ++ki)
8019 TestLabelAndNum tlAndN;
8020 GenTree* node = ki.Get();
8021 ValueNum nodeVN = node->GetVN(VNK_Liberal);
8023 bool b = testData->Lookup(node, &tlAndN);
8025 if (tlAndN.m_tl == TL_VN || tlAndN.m_tl == TL_VNNorm)
8028 if (!reachable->Lookup(node, &dummy))
8031 Compiler::printTreeID(node);
8032 printf(" had a test constraint declared, but has become unreachable at the time the constraint is "
8034 "(This is probably as a result of some optimization -- \n"
8035 "you may need to modify the test case to defeat this opt.)\n");
8042 Compiler::printTreeID(node);
8043 printf(" -- VN class %d.\n", tlAndN.m_num);
8046 if (tlAndN.m_tl == TL_VNNorm)
8048 nodeVN = vnStore->VNNormVal(nodeVN);
8052 if (labelToVN->Lookup(tlAndN.m_num, &vn))
8056 printf(" Already in hash tables.\n");
8058 // The mapping(s) must be one-to-one: if the label has a mapping, then the ssaNm must, as well.
8060 bool b = vnToLabel->Lookup(vn, &num2);
8061 // And the mappings must be the same.
8062 if (tlAndN.m_num != num2)
8065 Compiler::printTreeID(node);
8066 printf(", with value number " STR_VN "%x, was declared in VN class %d,\n", nodeVN, tlAndN.m_num);
8067 printf("but this value number " STR_VN
8068 "%x has already been associated with a different SSA name class: %d.\n",
8072 // And the current node must be of the specified SSA family.
8076 Compiler::printTreeID(node);
8077 printf(", " STR_VN "%x was declared in SSA name class %d,\n", nodeVN, tlAndN.m_num);
8078 printf("but that name class was previously bound to a different value number: " STR_VN "%x.\n", vn);
8085 // The mapping(s) must be one-to-one: if the label has no mapping, then the ssaNm may not, either.
8086 if (vnToLabel->Lookup(nodeVN, &num))
8089 Compiler::printTreeID(node);
8090 printf(", " STR_VN "%x was declared in value number class %d,\n", nodeVN, tlAndN.m_num);
8092 "but this value number has already been associated with a different value number class: %d.\n",
8096 // Add to both mappings.
8097 labelToVN->Set(tlAndN.m_num, nodeVN);
8098 vnToLabel->Set(nodeVN, tlAndN.m_num);
8101 printf(" added to hash tables.\n");
8108 void Compiler::vnpPrint(ValueNumPair vnp, unsigned level)
8110 if (vnp.BothEqual())
8112 vnPrint(vnp.GetLiberal(), level);
8117 vnPrint(vnp.GetLiberal(), level);
8119 vnPrint(vnp.GetConservative(), level);
8124 void Compiler::vnPrint(ValueNum vn, unsigned level)
8127 if (ValueNumStore::isReservedVN(vn))
8129 printf(ValueNumStore::reservedName(vn));
8133 printf(STR_VN "%x", vn);
8136 vnStore->vnDump(this, vn);
8143 // Methods of ValueNumPair.
8144 ValueNumPair::ValueNumPair() : m_liberal(ValueNumStore::NoVN), m_conservative(ValueNumStore::NoVN)
8148 bool ValueNumPair::BothDefined() const
8150 return (m_liberal != ValueNumStore::NoVN) && (m_conservative != ValueNumStore::NoVN);