1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
8 #include "rangecheck.h"
10 // Max stack depth (path length) in walking the UD chain.
11 static const int MAX_SEARCH_DEPTH = 100;
13 // Max nodes to visit in the UD chain for the current method being compiled.
14 static const int MAX_VISIT_BUDGET = 8192;
16 // RangeCheck constructor.
17 RangeCheck::RangeCheck(Compiler* pCompiler)
18 : m_pOverflowMap(nullptr)
19 , m_pRangeMap(nullptr)
20 , m_pSearchPath(nullptr)
22 , m_fMappedDefs(false)
23 , m_pDefTable(nullptr)
25 , m_pCompiler(pCompiler)
26 , m_alloc(pCompiler, CMK_RangeCheck)
27 , m_nVisitBudget(MAX_VISIT_BUDGET)
31 bool RangeCheck::IsOverBudget()
33 return (m_nVisitBudget <= 0);
36 // Get the range map in which computed ranges are cached.
37 RangeCheck::RangeMap* RangeCheck::GetRangeMap()
39 if (m_pRangeMap == nullptr)
41 m_pRangeMap = new (&m_alloc) RangeMap(&m_alloc);
46 // Get the overflow map in which computed overflows are cached.
47 RangeCheck::OverflowMap* RangeCheck::GetOverflowMap()
49 if (m_pOverflowMap == nullptr)
51 m_pOverflowMap = new (&m_alloc) OverflowMap(&m_alloc);
53 return m_pOverflowMap;
56 // Get the length of the array vn, if it is new.
57 int RangeCheck::GetArrLength(ValueNum vn)
59 ValueNum arrRefVN = m_pCompiler->vnStore->GetArrForLenVn(vn);
60 return m_pCompiler->vnStore->GetNewArrSize(arrRefVN);
63 // Check if the computed range is within bounds.
64 bool RangeCheck::BetweenBounds(Range& range, int lower, GenTree* upper)
67 if (m_pCompiler->verbose)
69 printf("%s BetweenBounds <%d, ", range.ToString(m_pCompiler->getAllocatorDebugOnly()), lower);
70 Compiler::printTreeID(upper);
75 ValueNumStore* vnStore = m_pCompiler->vnStore;
77 // Get the VN for the upper limit.
78 ValueNum uLimitVN = upper->gtVNPair.GetConservative();
81 JITDUMP("VN%04X upper bound is: ", uLimitVN);
82 if (m_pCompiler->verbose)
84 vnStore->vnDump(m_pCompiler, uLimitVN);
91 if (vnStore->IsVNConstant(uLimitVN))
93 ssize_t constVal = -1;
94 unsigned iconFlags = 0;
96 if (m_pCompiler->optIsTreeKnownIntValue(true, upper, &constVal, &iconFlags))
98 arrSize = (int)constVal;
101 else if (vnStore->IsVNArrLen(uLimitVN))
103 // Get the array reference from the length.
104 ValueNum arrRefVN = vnStore->GetArrForLenVn(uLimitVN);
105 // Check if array size can be obtained.
106 arrSize = vnStore->GetNewArrSize(arrRefVN);
108 else if (!vnStore->IsVNCheckedBound(uLimitVN))
110 // If the upper limit is not length, then bail.
114 JITDUMP("Array size is: %d\n", arrSize);
116 // Upper limit: len + ucns (upper limit constant).
117 if (range.UpperLimit().IsBinOpArray())
119 if (range.UpperLimit().vn != uLimitVN)
124 int ucns = range.UpperLimit().GetConstant();
126 // Upper limit: Len + [0..n]
132 // Since upper limit is bounded by the array, return true if lower bound is good.
133 if (range.LowerLimit().IsConstant() && range.LowerLimit().GetConstant() >= 0)
138 // Check if we have the array size allocated by new.
145 // upper limit = len + ucns. ucns < 0
146 // lower limit = len + lcns.
147 if (range.LowerLimit().IsBinOpArray())
149 int lcns = range.LowerLimit().GetConstant();
150 if (lcns >= 0 || -lcns > arrSize)
154 return (range.LowerLimit().vn == uLimitVN && lcns <= ucns);
157 // If upper limit is constant
158 else if (range.UpperLimit().IsConstant())
164 int ucns = range.UpperLimit().GetConstant();
169 if (range.LowerLimit().IsConstant())
171 int lcns = range.LowerLimit().GetConstant();
172 // Make sure lcns < ucns which is already less than arrSize.
173 return (lcns >= 0 && lcns <= ucns);
175 if (range.LowerLimit().IsBinOpArray())
177 int lcns = range.LowerLimit().GetConstant();
178 // len + lcns, make sure we don't subtract too much from len.
179 if (lcns >= 0 || -lcns > arrSize)
183 // Make sure a.len + lcns <= ucns.
184 return (range.LowerLimit().vn == uLimitVN && (arrSize + lcns) <= ucns);
191 void RangeCheck::OptimizeRangeCheck(BasicBlock* block, GenTree* stmt, GenTree* treeParent)
193 // Check if we are dealing with a bounds check node.
194 if (treeParent->OperGet() != GT_COMMA)
199 // If we are not looking at array bounds check, bail.
200 GenTree* tree = treeParent->gtOp.gtOp1;
201 if (!tree->OperIsBoundsCheck())
206 GenTreeBoundsChk* bndsChk = tree->AsBoundsChk();
207 m_pCurBndsChk = bndsChk;
208 GenTree* treeIndex = bndsChk->gtIndex;
210 // Take care of constant index first, like a[2], for example.
211 ValueNum idxVn = treeIndex->gtVNPair.GetConservative();
212 ValueNum arrLenVn = bndsChk->gtArrLen->gtVNPair.GetConservative();
215 if (m_pCompiler->vnStore->IsVNConstant(arrLenVn))
217 ssize_t constVal = -1;
218 unsigned iconFlags = 0;
220 if (m_pCompiler->optIsTreeKnownIntValue(true, bndsChk->gtArrLen, &constVal, &iconFlags))
222 arrSize = (int)constVal;
227 if (tree->gtOper != GT_SIMD_CHK
228 #ifdef FEATURE_HW_INTRINSICS
229 && tree->gtOper != GT_HW_INTRINSIC_CHK
230 #endif // FEATURE_HW_INTRINSICS
232 #endif // FEATURE_SIMD
234 arrSize = GetArrLength(arrLenVn);
237 JITDUMP("ArrSize for lengthVN:%03X = %d\n", arrLenVn, arrSize);
238 if (m_pCompiler->vnStore->IsVNConstant(idxVn) && (arrSize > 0))
241 unsigned iconFlags = 0;
242 if (!m_pCompiler->optIsTreeKnownIntValue(true, treeIndex, &idxVal, &iconFlags))
247 JITDUMP("[RangeCheck::OptimizeRangeCheck] Is index %d in <0, arrLenVn VN%X sz:%d>.\n", idxVal, arrLenVn,
249 if ((idxVal < arrSize) && (idxVal >= 0))
251 JITDUMP("Removing range check\n");
252 m_pCompiler->optRemoveRangeCheck(treeParent, stmt);
257 GetRangeMap()->RemoveAll();
258 GetOverflowMap()->RemoveAll();
259 m_pSearchPath = new (&m_alloc) SearchPath(&m_alloc);
261 // Get the range for this index.
262 Range range = GetRange(block, treeIndex, false DEBUGARG(0));
264 // If upper or lower limit is found to be unknown (top), or it was found to
265 // be unknown because of over budget or a deep search, then return early.
266 if (range.UpperLimit().IsUnknown() || range.LowerLimit().IsUnknown())
268 // Note: If we had stack depth too deep in the GetRange call, we'd be
269 // too deep even in the DoesOverflow call. So return early.
273 if (DoesOverflow(block, treeIndex))
275 JITDUMP("Method determined to overflow.\n");
279 JITDUMP("Range value %s\n", range.ToString(m_pCompiler->getAllocatorDebugOnly()));
280 m_pSearchPath->RemoveAll();
281 Widen(block, treeIndex, &range);
283 // If upper or lower limit is unknown, then return.
284 if (range.UpperLimit().IsUnknown() || range.LowerLimit().IsUnknown())
289 // Is the range between the lower and upper bound values.
290 if (BetweenBounds(range, 0, bndsChk->gtArrLen))
292 JITDUMP("[RangeCheck::OptimizeRangeCheck] Between bounds\n");
293 m_pCompiler->optRemoveRangeCheck(treeParent, stmt);
298 void RangeCheck::Widen(BasicBlock* block, GenTree* tree, Range* pRange)
301 if (m_pCompiler->verbose)
303 printf("[RangeCheck::Widen] BB%02d, \n", block->bbNum);
304 Compiler::printTreeID(tree);
309 Range& range = *pRange;
311 // Try to deduce the lower bound, if it is not known already.
312 if (range.LowerLimit().IsDependent() || range.LowerLimit().IsUnknown())
314 // To determine the lower bound, ask if the loop increases monotonically.
315 bool increasing = IsMonotonicallyIncreasing(tree, false);
316 JITDUMP("IsMonotonicallyIncreasing %d", increasing);
319 GetRangeMap()->RemoveAll();
320 *pRange = GetRange(block, tree, true DEBUGARG(0));
325 bool RangeCheck::IsBinOpMonotonicallyIncreasing(GenTreeOp* binop)
327 #ifdef LEGACY_BACKEND
328 assert(binop->OperIs(GT_ADD, GT_ASG_ADD));
330 assert(binop->OperIs(GT_ADD));
333 GenTree* op1 = binop->gtGetOp1();
334 GenTree* op2 = binop->gtGetOp2();
336 JITDUMP("[RangeCheck::IsBinOpMonotonicallyIncreasing] [%06d], [%06d]\n", Compiler::dspTreeID(op1),
337 Compiler::dspTreeID(op2));
338 // Check if we have a var + const.
339 if (op2->OperGet() == GT_LCL_VAR)
341 jitstd::swap(op1, op2);
343 if (op1->OperGet() != GT_LCL_VAR)
345 JITDUMP("Not monotonic because op1 is not lclVar.\n");
348 switch (op2->OperGet())
351 // When adding two local variables, we also must ensure that any constant is non-negative.
352 return IsMonotonicallyIncreasing(op1, true) && IsMonotonicallyIncreasing(op2, true);
355 return (op2->AsIntConCommon()->IconValue() >= 0) && IsMonotonicallyIncreasing(op1, false);
358 JITDUMP("Not monotonic because expression is not recognized.\n");
363 // The parameter rejectNegativeConst is true when we are adding two local vars (see above)
364 bool RangeCheck::IsMonotonicallyIncreasing(GenTree* expr, bool rejectNegativeConst)
366 JITDUMP("[RangeCheck::IsMonotonicallyIncreasing] [%06d]\n", Compiler::dspTreeID(expr));
368 // Add hashtable entry for expr.
369 bool alreadyPresent = m_pSearchPath->Set(expr, nullptr);
375 // Remove hashtable entry for expr when we exit the present scope.
376 auto code = [this, expr] { m_pSearchPath->Remove(expr); };
377 jitstd::utility::scoped_code<decltype(code)> finally(code);
379 if (m_pSearchPath->GetCount() > MAX_SEARCH_DEPTH)
384 // If expr is constant, then it is not part of the dependency
385 // loop which has to increase monotonically.
386 ValueNum vn = expr->gtVNPair.GetConservative();
387 if (m_pCompiler->vnStore->IsVNInt32Constant(vn))
389 if (rejectNegativeConst)
391 int cons = m_pCompiler->vnStore->ConstantValue<int>(vn);
399 // If the rhs expr is local, then try to find the def of the local.
400 else if (expr->IsLocal())
402 BasicBlock* asgBlock;
403 GenTreeOp* asg = GetSsaDefAsg(expr->AsLclVarCommon(), &asgBlock);
409 switch (asg->OperGet())
412 return IsMonotonicallyIncreasing(asg->gtGetOp2(), rejectNegativeConst);
414 #ifdef LEGACY_BACKEND
416 return IsBinOpMonotonicallyIncreasing(asg);
420 #ifndef LEGACY_BACKEND
423 // All other 'asg->OperGet()' kinds, return false
426 JITDUMP("Unknown local definition type\n");
429 else if (expr->OperGet() == GT_ADD)
431 return IsBinOpMonotonicallyIncreasing(expr->AsOp());
433 else if (expr->OperGet() == GT_PHI)
435 for (GenTreeArgList* args = expr->gtOp.gtOp1->AsArgList(); args != nullptr; args = args->Rest())
437 // If the arg is already in the path, skip.
438 if (m_pSearchPath->Lookup(args->Current()))
442 if (!IsMonotonicallyIncreasing(args->Current(), rejectNegativeConst))
444 JITDUMP("Phi argument not monotonic\n");
450 JITDUMP("Unknown tree type\n");
454 // Given a lclvar use, try to find the lclvar's defining assignment and its containing block.
455 GenTreeOp* RangeCheck::GetSsaDefAsg(GenTreeLclVarCommon* lclUse, BasicBlock** asgBlock)
457 unsigned ssaNum = lclUse->GetSsaNum();
459 if (ssaNum == SsaConfig::RESERVED_SSA_NUM)
464 LclSsaVarDsc* ssaData = m_pCompiler->lvaTable[lclUse->GetLclNum()].GetPerSsaData(ssaNum);
465 GenTree* lclDef = ssaData->m_defLoc.m_tree;
467 if (lclDef == nullptr)
472 // We have the def node but we also need the assignment node to get its source.
473 // gtGetParent can be used to get the assignment node but it's rather expensive
474 // and not strictly necessary here, there shouldn't be any other node between
475 // the assignment node and its destination node.
476 GenTree* asg = lclDef->gtNext;
478 if (!asg->OperIsAssignment() || (asg->gtGetOp1() != lclDef))
484 Location* loc = GetDef(lclUse);
485 assert(loc->parent == asg);
486 assert(loc->block == ssaData->m_defLoc.m_blk);
489 *asgBlock = ssaData->m_defLoc.m_blk;
494 UINT64 RangeCheck::HashCode(unsigned lclNum, unsigned ssaNum)
496 assert(ssaNum != SsaConfig::RESERVED_SSA_NUM);
497 return UINT64(lclNum) << 32 | ssaNum;
500 // Get the def location of a given variable.
501 RangeCheck::Location* RangeCheck::GetDef(unsigned lclNum, unsigned ssaNum)
503 Location* loc = nullptr;
504 if (ssaNum == SsaConfig::RESERVED_SSA_NUM)
513 if (m_pDefTable == nullptr)
517 m_pDefTable->Lookup(HashCode(lclNum, ssaNum), &loc);
521 RangeCheck::Location* RangeCheck::GetDef(GenTreeLclVarCommon* lcl)
523 return GetDef(lcl->GetLclNum(), lcl->GetSsaNum());
526 // Add the def location to the hash table.
527 void RangeCheck::SetDef(UINT64 hash, Location* loc)
529 if (m_pDefTable == nullptr)
531 m_pDefTable = new (&m_alloc) VarToLocMap(&m_alloc);
535 if (m_pDefTable->Lookup(hash, &loc2))
537 JITDUMP("Already have BB%02d, [%06d], [%06d] for hash => %0I64X", loc2->block->bbNum,
538 Compiler::dspTreeID(loc2->stmt), Compiler::dspTreeID(loc2->tree), hash);
542 m_pDefTable->Set(hash, loc);
546 // Merge assertions on the edge flowing into the block about a variable.
547 void RangeCheck::MergeEdgeAssertions(GenTreeLclVarCommon* lcl, ASSERT_VALARG_TP assertions, Range* pRange)
549 if (BitVecOps::IsEmpty(m_pCompiler->apTraits, assertions))
554 if (lcl->gtSsaNum == SsaConfig::RESERVED_SSA_NUM)
558 // Walk through the "assertions" to check if the apply.
559 BitVecOps::Iter iter(m_pCompiler->apTraits, assertions);
561 while (iter.NextElem(&index))
563 AssertionIndex assertionIndex = GetAssertionIndex(index);
565 Compiler::AssertionDsc* curAssertion = m_pCompiler->optGetAssertion(assertionIndex);
567 Limit limit(Limit::keUndef);
568 genTreeOps cmpOper = GT_NONE;
570 // Current assertion is of the form (i < len - cns) != 0
571 if (curAssertion->IsCheckedBoundArithBound())
573 ValueNumStore::CompareCheckedBoundArithInfo info;
575 // Get i, len, cns and < as "info."
576 m_pCompiler->vnStore->GetCompareCheckedBoundArithInfo(curAssertion->op1.vn, &info);
578 if (m_pCompiler->lvaTable[lcl->gtLclNum].GetPerSsaData(lcl->gtSsaNum)->m_vnPair.GetConservative() !=
584 if ((info.arrOper != GT_ADD) && (info.arrOper != GT_SUB))
589 // If the operand that operates on the bound is not constant, then done.
590 if (!m_pCompiler->vnStore->IsVNInt32Constant(info.arrOp))
595 int cons = m_pCompiler->vnStore->ConstantValue<int>(info.arrOp);
596 limit = Limit(Limit::keBinOpArray, info.vnBound, info.arrOper == GT_SUB ? -cons : cons);
597 cmpOper = (genTreeOps)info.cmpOper;
599 // Current assertion is of the form (i < len) != 0
600 else if (curAssertion->IsCheckedBoundBound())
602 ValueNumStore::CompareCheckedBoundArithInfo info;
604 // Get the info as "i", "<" and "len"
605 m_pCompiler->vnStore->GetCompareCheckedBound(curAssertion->op1.vn, &info);
608 m_pCompiler->lvaTable[lcl->gtLclNum].GetPerSsaData(lcl->gtSsaNum)->m_vnPair.GetConservative();
609 // If we don't have the same variable we are comparing against, bail.
610 if (lclVn != info.cmpOp)
614 limit = Limit(Limit::keBinOpArray, info.vnBound, 0);
615 cmpOper = (genTreeOps)info.cmpOper;
617 // Current assertion is of the form (i < 100) != 0
618 else if (curAssertion->IsConstantBound())
620 ValueNumStore::ConstantBoundInfo info;
622 // Get the info as "i", "<" and "100"
623 m_pCompiler->vnStore->GetConstantBoundInfo(curAssertion->op1.vn, &info);
626 m_pCompiler->lvaTable[lcl->gtLclNum].GetPerSsaData(lcl->gtSsaNum)->m_vnPair.GetConservative();
628 // If we don't have the same variable we are comparing against, bail.
629 if (lclVn != info.cmpOpVN)
634 limit = Limit(Limit::keConstant, info.constVal);
635 cmpOper = (genTreeOps)info.cmpOper;
637 // Current assertion is not supported, ignore it
643 assert(limit.IsBinOpArray() || limit.IsConstant());
645 // Make sure the assertion is of the form != 0 or == 0.
646 if (curAssertion->op2.vn != m_pCompiler->vnStore->VNZeroForType(TYP_INT))
651 if (m_pCompiler->verbose)
653 m_pCompiler->optPrintAssertion(curAssertion, assertionIndex);
657 ValueNum arrLenVN = m_pCurBndsChk->gtArrLen->gtVNPair.GetConservative();
659 if (m_pCompiler->vnStore->IsVNConstant(arrLenVN))
661 // Set arrLenVN to NoVN; this will make it match the "vn" recorded on
662 // constant limits (where we explicitly track the constant and don't
663 // redundantly store its VN in the "vn" field).
664 arrLenVN = ValueNumStore::NoVN;
667 // During assertion prop we add assertions of the form:
674 // At this point, we have detected that op1.vn is (i < length) or (i < length + cns) or
675 // (i < 100) and the op2.vn is 0.
677 // Now, let us check if we are == 0 (i.e., op1 assertion is false) or != 0 (op1 assertion
680 // If we have an assertion of the form == 0 (i.e., equals false), then reverse relop.
681 // The relop has to be reversed because we have: (i < length) is false which is the same
683 if (curAssertion->assertionKind == Compiler::OAK_EQUAL)
685 cmpOper = GenTree::ReverseRelop(cmpOper);
688 // Bounds are inclusive, so add -1 for upper bound when "<". But make sure we won't overflow.
689 if (cmpOper == GT_LT && !limit.AddConstant(-1))
693 // Bounds are inclusive, so add +1 for lower bound when ">". But make sure we won't overflow.
694 if (cmpOper == GT_GT && !limit.AddConstant(1))
699 // Doesn't tighten the current bound. So skip.
700 if (pRange->uLimit.IsConstant() && limit.vn != arrLenVN)
705 // Check if the incoming limit from assertions tightens the existing upper limit.
706 if (pRange->uLimit.IsBinOpArray() && (pRange->uLimit.vn == arrLenVN))
708 // We have checked the current range's (pRange's) upper limit is either of the form:
710 // and length == the bndsChkCandidate's arrLen
712 // We want to check if the incoming limit tightens the bound, and for that
713 // we need to make sure that incoming limit is also on the same length (or
714 // length + cns) and not some other length.
716 if (limit.vn != arrLenVN)
718 JITDUMP("Array length VN did not match cur=$%x, assert=$%x\n", arrLenVN, limit.vn);
722 int curCns = pRange->uLimit.cns;
723 int limCns = (limit.IsBinOpArray()) ? limit.cns : 0;
725 // Incoming limit doesn't tighten the existing upper limit.
726 if (limCns >= curCns)
728 JITDUMP("Bound limit %d doesn't tighten current bound %d\n", limCns, curCns);
734 // Current range's upper bound is not "length + cns" and the
735 // incoming limit is not on the same length as the bounds check candidate.
736 // So we could skip this assertion. But in cases, of Dependent or Unknown
737 // type of upper limit, the incoming assertion still tightens the upper
738 // bound to a saner value. So do not skip the assertion.
741 // cmpOp (loop index i) cmpOper len +/- cns
745 pRange->uLimit = limit;
749 pRange->lLimit = limit;
753 pRange->lLimit = limit;
757 pRange->uLimit = limit;
761 // All other 'cmpOper' kinds leave lLimit/uLimit unchanged
764 JITDUMP("The range after edge merging:");
765 JITDUMP(pRange->ToString(m_pCompiler->getAllocatorDebugOnly()));
770 // Merge assertions from the pred edges of the block, i.e., check for any assertions about "op's" value numbers for phi
771 // arguments. If not a phi argument, check if we assertions about local variables.
772 void RangeCheck::MergeAssertion(BasicBlock* block, GenTree* op, Range* pRange DEBUGARG(int indent))
774 JITDUMP("Merging assertions from pred edges of BB%02d for op [%06d] $%03x\n", block->bbNum, Compiler::dspTreeID(op),
775 op->gtVNPair.GetConservative());
776 ASSERT_TP assertions = BitVecOps::UninitVal();
778 // If we have a phi arg, we can get to the block from it and use its assertion out.
779 if (op->gtOper == GT_PHI_ARG)
781 GenTreePhiArg* arg = (GenTreePhiArg*)op;
782 BasicBlock* pred = arg->gtPredBB;
783 if (pred->bbFallsThrough() && pred->bbNext == block)
785 assertions = pred->bbAssertionOut;
786 JITDUMP("Merge assertions from pred BB%02d edge: %s\n", pred->bbNum,
787 BitVecOps::ToString(m_pCompiler->apTraits, assertions));
789 else if ((pred->bbJumpKind == BBJ_COND || pred->bbJumpKind == BBJ_ALWAYS) && pred->bbJumpDest == block)
791 if (m_pCompiler->bbJtrueAssertionOut != nullptr)
793 assertions = m_pCompiler->bbJtrueAssertionOut[pred->bbNum];
794 JITDUMP("Merge assertions from pred BB%02d JTrue edge: %s\n", pred->bbNum,
795 BitVecOps::ToString(m_pCompiler->apTraits, assertions));
799 // Get assertions from bbAssertionIn.
800 else if (op->IsLocal())
802 assertions = block->bbAssertionIn;
805 if (!BitVecOps::MayBeUninit(assertions))
807 // Perform the merge step to fine tune the range value.
808 MergeEdgeAssertions(op->AsLclVarCommon(), assertions, pRange);
812 // Compute the range for a binary operation.
813 Range RangeCheck::ComputeRangeForBinOp(BasicBlock* block, GenTreeOp* binop, bool monotonic DEBUGARG(int indent))
815 #ifdef LEGACY_BACKEND
816 assert(binop->OperIs(GT_ADD, GT_ASG_ADD));
818 assert(binop->OperIs(GT_ADD));
821 GenTree* op1 = binop->gtGetOp1();
822 GenTree* op2 = binop->gtGetOp2();
824 Range* op1RangeCached = nullptr;
825 Range op1Range = Limit(Limit::keUndef);
826 // Check if the range value is already cached.
827 if (!GetRangeMap()->Lookup(op1, &op1RangeCached))
829 // If we already have the op in the path, then, just rely on assertions, else
831 if (m_pSearchPath->Lookup(op1))
833 op1Range = Range(Limit(Limit::keDependent));
837 op1Range = GetRange(block, op1, monotonic DEBUGARG(indent));
839 MergeAssertion(block, op1, &op1Range DEBUGARG(indent + 1));
843 op1Range = *op1RangeCached;
846 Range* op2RangeCached;
847 Range op2Range = Limit(Limit::keUndef);
848 // Check if the range value is already cached.
849 if (!GetRangeMap()->Lookup(op2, &op2RangeCached))
851 // If we already have the op in the path, then, just rely on assertions, else
853 if (m_pSearchPath->Lookup(op2))
855 op2Range = Range(Limit(Limit::keDependent));
859 op2Range = GetRange(block, op2, monotonic DEBUGARG(indent));
861 MergeAssertion(block, op2, &op2Range DEBUGARG(indent + 1));
865 op2Range = *op2RangeCached;
868 Range r = RangeOps::Add(op1Range, op2Range);
869 JITDUMP("BinOp add ranges %s %s = %s\n", op1Range.ToString(m_pCompiler->getAllocatorDebugOnly()),
870 op2Range.ToString(m_pCompiler->getAllocatorDebugOnly()), r.ToString(m_pCompiler->getAllocatorDebugOnly()));
874 // Compute the range for a local var definition.
875 Range RangeCheck::ComputeRangeForLocalDef(BasicBlock* block,
876 GenTreeLclVarCommon* lcl,
877 bool monotonic DEBUGARG(int indent))
879 BasicBlock* asgBlock;
880 GenTreeOp* asg = GetSsaDefAsg(lcl, &asgBlock);
883 return Range(Limit(Limit::keUnknown));
886 if (m_pCompiler->verbose)
888 JITDUMP("----------------------------------------------------\n");
889 m_pCompiler->gtDispTree(asg);
890 JITDUMP("----------------------------------------------------\n");
893 switch (asg->OperGet())
895 // If the operator of the definition is assignment, then compute the range of the rhs.
898 Range range = GetRange(asgBlock, asg->gtGetOp2(), monotonic DEBUGARG(indent));
899 if (!BitVecOps::MayBeUninit(block->bbAssertionIn))
901 JITDUMP("Merge assertions from BB%02d:%s for assignment about [%06d]\n", block->bbNum,
902 BitVecOps::ToString(m_pCompiler->apTraits, block->bbAssertionIn),
903 Compiler::dspTreeID(asg->gtGetOp1()));
904 MergeEdgeAssertions(asg->gtGetOp1()->AsLclVarCommon(), block->bbAssertionIn, &range);
905 JITDUMP("done merging\n");
910 #ifdef LEGACY_BACKEND
912 // If the operator of the definition is +=, then compute the range of the operands of +.
913 // Note that gtGetOp1 will return op1 to be the lhs; in the formulation of ssa, we have
914 // a side table for defs and the lhs of a += is considered to be a use for SSA numbering.
915 return ComputeRangeForBinOp(asgBlock, asg, monotonic DEBUGARG(indent));
919 #ifndef LEGACY_BACKEND
922 // All other 'asg->OperGet()' kinds, return Limit::keUnknown
925 return Range(Limit(Limit::keUnknown));
928 // https://msdn.microsoft.com/en-us/windows/apps/hh285054.aspx
929 // CLR throws IDS_EE_ARRAY_DIMENSIONS_EXCEEDED if array length is > INT_MAX.
930 // new byte[INT_MAX]; still throws OutOfMemoryException on my system with 32 GB RAM.
931 // I believe practical limits are still smaller than this number.
932 #define ARRLEN_MAX (0x7FFFFFFF)
934 // Get the limit's maximum possible value, treating array length to be ARRLEN_MAX.
935 bool RangeCheck::GetLimitMax(Limit& limit, int* pMax)
940 case Limit::keConstant:
941 max1 = limit.GetConstant();
944 case Limit::keBinOpArray:
946 int tmp = GetArrLength(limit.vn);
951 if (IntAddOverflows(tmp, limit.GetConstant()))
955 max1 = tmp + limit.GetConstant();
965 // Check if the arithmetic overflows.
966 bool RangeCheck::AddOverflows(Limit& limit1, Limit& limit2)
969 if (!GetLimitMax(limit1, &max1))
975 if (!GetLimitMax(limit2, &max2))
980 return IntAddOverflows(max1, max2);
983 // Does the bin operation overflow.
984 bool RangeCheck::DoesBinOpOverflow(BasicBlock* block, GenTreeOp* binop)
986 GenTree* op1 = binop->gtGetOp1();
987 GenTree* op2 = binop->gtGetOp2();
989 if (!m_pSearchPath->Lookup(op1) && DoesOverflow(block, op1))
994 if (!m_pSearchPath->Lookup(op2) && DoesOverflow(block, op2))
999 // Get the cached ranges of op1
1000 Range* op1Range = nullptr;
1001 if (!GetRangeMap()->Lookup(op1, &op1Range))
1005 // Get the cached ranges of op2
1006 Range* op2Range = nullptr;
1007 if (!GetRangeMap()->Lookup(op2, &op2Range))
1012 // If dependent, check if we can use some assertions.
1013 if (op1Range->UpperLimit().IsDependent())
1015 MergeAssertion(block, op1, op1Range DEBUGARG(0));
1018 // If dependent, check if we can use some assertions.
1019 if (op2Range->UpperLimit().IsDependent())
1021 MergeAssertion(block, op2, op2Range DEBUGARG(0));
1024 JITDUMP("Checking bin op overflow %s %s\n", op1Range->ToString(m_pCompiler->getAllocatorDebugOnly()),
1025 op2Range->ToString(m_pCompiler->getAllocatorDebugOnly()));
1027 if (!AddOverflows(op1Range->UpperLimit(), op2Range->UpperLimit()))
1034 // Check if the var definition the rhs involves arithmetic that overflows.
1035 bool RangeCheck::DoesVarDefOverflow(GenTreeLclVarCommon* lcl)
1037 BasicBlock* asgBlock;
1038 GenTreeOp* asg = GetSsaDefAsg(lcl, &asgBlock);
1044 switch (asg->OperGet())
1047 return DoesOverflow(asgBlock, asg->gtGetOp2());
1049 #ifdef LEGACY_BACKEND
1051 // For GT_ASG_ADD, op2 is use, op1 is also use since we side table for defs in useasg case.
1052 return DoesBinOpOverflow(asgBlock, asg);
1056 #ifndef LEGACY_BACKEND
1057 noway_assert(false);
1059 // All other 'asg->OperGet()' kinds, conservatively return true
1065 bool RangeCheck::DoesPhiOverflow(BasicBlock* block, GenTree* expr)
1067 for (GenTreeArgList* args = expr->gtOp.gtOp1->AsArgList(); args != nullptr; args = args->Rest())
1069 GenTree* arg = args->Current();
1070 if (m_pSearchPath->Lookup(arg))
1074 if (DoesOverflow(block, arg))
1082 bool RangeCheck::DoesOverflow(BasicBlock* block, GenTree* expr)
1084 bool overflows = false;
1085 if (!GetOverflowMap()->Lookup(expr, &overflows))
1087 overflows = ComputeDoesOverflow(block, expr);
1092 bool RangeCheck::ComputeDoesOverflow(BasicBlock* block, GenTree* expr)
1094 JITDUMP("Does overflow [%06d]?\n", Compiler::dspTreeID(expr));
1095 m_pSearchPath->Set(expr, block);
1097 bool overflows = true;
1099 if (m_pSearchPath->GetCount() > MAX_SEARCH_DEPTH)
1103 // If the definition chain resolves to a constant, it doesn't overflow.
1104 else if (m_pCompiler->vnStore->IsVNConstant(expr->gtVNPair.GetConservative()))
1108 // Check if the var def has rhs involving arithmetic that overflows.
1109 else if (expr->IsLocal())
1111 overflows = DoesVarDefOverflow(expr->AsLclVarCommon());
1113 // Check if add overflows.
1114 else if (expr->OperGet() == GT_ADD)
1116 overflows = DoesBinOpOverflow(block, expr->AsOp());
1118 // Walk through phi arguments to check if phi arguments involve arithmetic that overflows.
1119 else if (expr->OperGet() == GT_PHI)
1121 overflows = DoesPhiOverflow(block, expr);
1123 GetOverflowMap()->Set(expr, overflows);
1124 m_pSearchPath->Remove(expr);
1128 // Compute the range recursively by asking for the range of each variable in the dependency chain.
1129 // eg.: c = a + b; ask range of "a" and "b" and add the results.
1130 // If the result cannot be determined i.e., the dependency chain does not terminate in a value,
1131 // but continues to loop, which will happen with phi nodes. We end the looping by calling the
1132 // value as "dependent" (dep).
1133 // If the loop is proven to be "monotonic", then make liberal decisions while merging phi node.
1134 // eg.: merge((0, dep), (dep, dep)) = (0, dep)
1135 Range RangeCheck::ComputeRange(BasicBlock* block, GenTree* expr, bool monotonic DEBUGARG(int indent))
1137 bool newlyAdded = !m_pSearchPath->Set(expr, block);
1138 Range range = Limit(Limit::keUndef);
1140 ValueNum vn = expr->gtVNPair.GetConservative();
1141 // If newly added in the current search path, then reduce the budget.
1144 // Assert that we are not re-entrant for a node which has been
1145 // visited and resolved before and not currently on the search path.
1146 noway_assert(!GetRangeMap()->Lookup(expr));
1149 // Prevent quadratic behavior.
1152 // Set to unknown, since an Unknown range resolution, will stop further
1153 // searches. This is because anything that merges with Unknown will
1154 // yield Unknown. Unknown is lattice top.
1155 range = Range(Limit(Limit::keUnknown));
1156 JITDUMP("GetRange not tractable within max node visit budget.\n");
1158 // Prevent unbounded recursion.
1159 else if (m_pSearchPath->GetCount() > MAX_SEARCH_DEPTH)
1161 // Unknown is lattice top, anything that merges with Unknown will yield Unknown.
1162 range = Range(Limit(Limit::keUnknown));
1163 JITDUMP("GetRange not tractable within max stack depth.\n");
1165 // TODO-CQ: The current implementation is reliant on integer storage types
1166 // for constants. It could use INT64. Still, representing ULONG constants
1167 // might require preserving the var_type whether it is a un/signed 64-bit.
1168 // JIT64 doesn't do anything for "long" either. No asm diffs.
1169 else if (expr->TypeGet() == TYP_LONG || expr->TypeGet() == TYP_ULONG)
1171 range = Range(Limit(Limit::keUnknown));
1172 JITDUMP("GetRange long or ulong, setting to unknown value.\n");
1174 // If VN is constant return range as constant.
1175 else if (m_pCompiler->vnStore->IsVNConstant(vn))
1177 range = (m_pCompiler->vnStore->TypeOfVN(vn) == TYP_INT)
1178 ? Range(Limit(Limit::keConstant, m_pCompiler->vnStore->ConstantValue<int>(vn)))
1179 : Limit(Limit::keUnknown);
1181 // If local, find the definition from the def map and evaluate the range for rhs.
1182 else if (expr->IsLocal())
1184 range = ComputeRangeForLocalDef(block, expr->AsLclVarCommon(), monotonic DEBUGARG(indent + 1));
1185 MergeAssertion(block, expr, &range DEBUGARG(indent + 1));
1187 // If add, then compute the range for the operands and add them.
1188 else if (expr->OperGet() == GT_ADD)
1190 range = ComputeRangeForBinOp(block, expr->AsOp(), monotonic DEBUGARG(indent + 1));
1192 // If phi, then compute the range for arguments, calling the result "dependent" when looping begins.
1193 else if (expr->OperGet() == GT_PHI)
1195 for (GenTreeArgList* args = expr->gtOp.gtOp1->AsArgList(); args != nullptr; args = args->Rest())
1197 Range argRange = Range(Limit(Limit::keUndef));
1198 if (m_pSearchPath->Lookup(args->Current()))
1200 JITDUMP("PhiArg [%06d] is already being computed\n", Compiler::dspTreeID(args->Current()));
1201 argRange = Range(Limit(Limit::keDependent));
1205 argRange = GetRange(block, args->Current(), monotonic DEBUGARG(indent + 1));
1207 assert(!argRange.LowerLimit().IsUndef());
1208 assert(!argRange.UpperLimit().IsUndef());
1209 MergeAssertion(block, args->Current(), &argRange DEBUGARG(indent + 1));
1210 JITDUMP("Merging ranges %s %s:", range.ToString(m_pCompiler->getAllocatorDebugOnly()),
1211 argRange.ToString(m_pCompiler->getAllocatorDebugOnly()));
1212 range = RangeOps::Merge(range, argRange, monotonic);
1213 JITDUMP("%s\n", range.ToString(m_pCompiler->getAllocatorDebugOnly()));
1218 // The expression is not recognized, so the result is unknown.
1219 range = Range(Limit(Limit::keUnknown));
1222 GetRangeMap()->Set(expr, new (&m_alloc) Range(range));
1223 m_pSearchPath->Remove(expr);
1228 void Indent(int indent)
1230 for (int i = 0; i < indent; ++i)
1237 // Get the range, if it is already computed, use the cached range value, else compute it.
1238 Range RangeCheck::GetRange(BasicBlock* block, GenTree* expr, bool monotonic DEBUGARG(int indent))
1241 if (m_pCompiler->verbose)
1244 JITDUMP("[RangeCheck::GetRange] BB%02d", block->bbNum);
1245 m_pCompiler->gtDispTree(expr);
1247 JITDUMP("{\n", expr);
1251 Range* pRange = nullptr;
1253 GetRangeMap()->Lookup(expr, &pRange) ? *pRange : ComputeRange(block, expr, monotonic DEBUGARG(indent));
1256 if (m_pCompiler->verbose)
1259 JITDUMP(" %s Range [%06d] => %s\n", (pRange == nullptr) ? "Computed" : "Cached", Compiler::dspTreeID(expr),
1260 range.ToString(m_pCompiler->getAllocatorDebugOnly()));
1262 JITDUMP("}\n", expr);
1269 // If this is a tree local definition add its location to the def map.
1270 void RangeCheck::MapStmtDefs(const Location& loc)
1272 GenTreeLclVarCommon* tree = loc.tree;
1274 unsigned lclNum = tree->GetLclNum();
1275 unsigned ssaNum = tree->GetSsaNum();
1276 if (ssaNum == SsaConfig::RESERVED_SSA_NUM)
1281 // If useasg then get the correct ssaNum to add to the map.
1282 if (tree->gtFlags & GTF_VAR_USEASG)
1284 unsigned ssaNum = m_pCompiler->GetSsaNumForLocalVarDef(tree);
1285 if (ssaNum != SsaConfig::RESERVED_SSA_NUM)
1287 // To avoid ind(addr) use asgs
1288 if (loc.parent->OperIsAssignment())
1290 SetDef(HashCode(lclNum, ssaNum), new (&m_alloc) Location(loc));
1294 // If def get the location and store it against the variable's ssaNum.
1295 else if (tree->gtFlags & GTF_VAR_DEF)
1297 if (loc.parent->OperGet() == GT_ASG)
1299 SetDef(HashCode(lclNum, ssaNum), new (&m_alloc) Location(loc));
1304 struct MapMethodDefsData
1310 MapMethodDefsData(RangeCheck* rc, BasicBlock* block, GenTree* stmt) : rc(rc), block(block), stmt(stmt)
1315 Compiler::fgWalkResult MapMethodDefsVisitor(GenTree** ptr, Compiler::fgWalkData* data)
1317 GenTree* tree = *ptr;
1318 MapMethodDefsData* rcd = ((MapMethodDefsData*)data->pCallbackData);
1320 if (tree->IsLocal())
1322 rcd->rc->MapStmtDefs(RangeCheck::Location(rcd->block, rcd->stmt, tree->AsLclVarCommon(), data->parent));
1325 return Compiler::WALK_CONTINUE;
1328 void RangeCheck::MapMethodDefs()
1330 // First, gather where all definitions occur in the program and store it in a map.
1331 for (BasicBlock* block = m_pCompiler->fgFirstBB; block; block = block->bbNext)
1333 for (GenTree* stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
1335 MapMethodDefsData data(this, block, stmt);
1336 m_pCompiler->fgWalkTreePre(&stmt->gtStmt.gtStmtExpr, MapMethodDefsVisitor, &data, false, true);
1339 m_fMappedDefs = true;
1343 // Entry point to range check optimizations.
1344 void RangeCheck::OptimizeRangeChecks()
1346 if (m_pCompiler->fgSsaPassesCompleted == 0)
1351 if (m_pCompiler->verbose)
1353 JITDUMP("*************** In OptimizeRangeChecks()\n");
1354 JITDUMP("Blocks/trees before phase\n");
1355 m_pCompiler->fgDispBasicBlocks(true);
1359 // Walk through trees looking for arrBndsChk node and check if it can be optimized.
1360 for (BasicBlock* block = m_pCompiler->fgFirstBB; block; block = block->bbNext)
1362 for (GenTree* stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
1364 for (GenTree* tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
1370 OptimizeRangeCheck(block, stmt, tree);