if (conValTree != nullptr)
{
- if (tree->OperIs(GT_LCL_VAR))
+ if (!optIsProfitableToSubstitute(tree, block, conValTree))
{
- if (!optIsProfitableToSubstitute(tree->AsLclVar(), block, conValTree))
- {
- // Not profitable to substitute
- return nullptr;
- }
+ // Not profitable to substitute
+ return nullptr;
}
// Were able to optimize.
}
//------------------------------------------------------------------------------
-// optIsProfitableToSubstitute: Checks if value worth substituting to lcl location
+// optIsProfitableToSubstitute: Checks if value worth substituting to dest
//
// Arguments:
-// lcl - lcl to replace with value if profitable
-// lclBlock - Basic block lcl located in
-// value - value we plan to substitute to lcl
+// dest - destination to substitute value to
+// destBlock - Basic block of destination
+// value - value we plan to substitute
//
// Returns:
// False if it's likely not profitable to do substitution, True otherwise
//
-bool Compiler::optIsProfitableToSubstitute(GenTreeLclVarCommon* lcl, BasicBlock* lclBlock, GenTree* value)
+bool Compiler::optIsProfitableToSubstitute(GenTree* dest, BasicBlock* destBlock, GenTree* value)
{
+ // Giving up on these kinds of handles demonstrated size improvements
+ if (value->IsIconHandle(GTF_ICON_STATIC_HDL, GTF_ICON_CLASS_HDL))
+ {
+ return false;
+ }
+
// A simple heuristic: If the constant is defined outside of a loop (not far from its head)
// and is used inside it - don't propagate.
// TODO: Extend on more kinds of trees
- if (!value->OperIs(GT_CNS_VEC, GT_CNS_DBL))
+ if (!value->OperIs(GT_CNS_VEC, GT_CNS_DBL) || !dest->OperIs(GT_LCL_VAR))
{
return true;
}
+ const GenTreeLclVar* lcl = dest->AsLclVar();
+
gtPrepareCost(value);
if ((value->GetCostEx() > 1) && (value->GetCostSz() > 1))
// NOTE: this currently does not take "a float living across a call" case into account
// where we might end up with spill/restore on ABIs without callee-saved registers
const weight_t defBlockWeight = defBlock->getBBWeight(this);
- const weight_t lclblockWeight = lclBlock->getBBWeight(this);
+ const weight_t lclblockWeight = destBlock->getBBWeight(this);
if ((defBlockWeight > 0) && ((lclblockWeight / defBlockWeight) >= BB_LOOP_WEIGHT_SCALE))
{
- JITDUMP("Constant propagation inside loop " FMT_BB " is not profitable\n", lclBlock->bbNum);
+ JITDUMP("Constant propagation inside loop " FMT_BB " is not profitable\n", destBlock->bbNum);
return false;
}
}
GenTree* optConstantAssertionProp(AssertionDsc* curAssertion,
GenTreeLclVarCommon* tree,
Statement* stmt DEBUGARG(AssertionIndex index));
- bool optIsProfitableToSubstitute(GenTreeLclVarCommon* lcl, BasicBlock* lclBlock, GenTree* value);
+ bool optIsProfitableToSubstitute(GenTree* dest, BasicBlock* destBlock, GenTree* value);
bool optZeroObjAssertionProp(GenTree* tree, ASSERT_VALARG_TP assertions);
// Assertion propagation functions.
, m_longCnsMap(nullptr)
, m_handleMap(nullptr)
, m_embeddedToCompileTimeHandleMap(alloc)
+ , m_fieldAddressToFieldSeqMap(alloc)
, m_floatCnsMap(nullptr)
, m_doubleCnsMap(nullptr)
, m_byrefCnsMap(nullptr)
case TYP_BOOL:
if (tree->IsIconHandle())
{
- const ssize_t embeddedHandle = tree->AsIntCon()->IconValue();
- tree->gtVNPair.SetBoth(vnStore->VNForHandle(embeddedHandle, tree->GetIconHandleFlag()));
- if (tree->GetIconHandleFlag() == GTF_ICON_CLASS_HDL)
+ const GenTreeIntCon* cns = tree->AsIntCon();
+ const GenTreeFlags handleFlags = tree->GetIconHandleFlag();
+ tree->gtVNPair.SetBoth(vnStore->VNForHandle(cns->IconValue(), handleFlags));
+ if (handleFlags == GTF_ICON_CLASS_HDL)
{
- const ssize_t compileTimeHandle = tree->AsIntCon()->gtCompileTimeHandle;
- vnStore->AddToEmbeddedHandleMap(embeddedHandle, compileTimeHandle);
+ vnStore->AddToEmbeddedHandleMap(cns->IconValue(), cns->gtCompileTimeHandle);
+ }
+ else if ((handleFlags == GTF_ICON_STATIC_HDL) && (cns->gtFieldSeq != nullptr) &&
+ (cns->gtFieldSeq->GetKind() == FieldSeq::FieldKind::SimpleStaticKnownAddress))
+ {
+ assert(cns->IconValue() == cns->gtFieldSeq->GetOffset());
+
+ // For now we're interested only in SimpleStaticKnownAddress
+ vnStore->AddToFieldAddressToFieldSeqMap(cns->gtVNPair.GetLiberal(), cns->gtFieldSeq);
}
}
else if ((typ == TYP_LONG) || (typ == TYP_ULONG))
ValueNum op1vn = op1->gtVNPair.GetLiberal();
ValueNum op2vn = op2->gtVNPair.GetLiberal();
- if (!op1->IsIconHandle(GTF_ICON_STATIC_HDL) && op1->gtVNPair.BothEqual() && vnStore->IsVNConstant(op1vn) &&
+ if (op1->gtVNPair.BothEqual() && vnStore->IsVNConstant(op1vn) && !vnStore->IsVNHandle(op1vn) &&
varTypeIsIntegral(vnStore->TypeOfVN(op1vn)))
{
val += vnStore->CoercedConstantValue<ssize_t>(op1vn);
tree = op2;
}
- else if (!op2->IsIconHandle(GTF_ICON_STATIC_HDL) && op2->gtVNPair.BothEqual() && vnStore->IsVNConstant(op2vn) &&
+ else if (op2->gtVNPair.BothEqual() && vnStore->IsVNConstant(op2vn) && !vnStore->IsVNHandle(op2vn) &&
varTypeIsIntegral(vnStore->TypeOfVN(op2vn)))
{
val += vnStore->CoercedConstantValue<ssize_t>(op2vn);
}
// Base address is expected to be static field's address
- if ((tree->IsCnsIntOrI()) && (tree->AsIntCon()->gtFieldSeq != nullptr) &&
- (tree->AsIntCon()->gtFieldSeq->GetKind() == FieldSeq::FieldKind::SimpleStaticKnownAddress))
+ ValueNum treeVN = tree->gtVNPair.GetLiberal();
+ if (tree->gtVNPair.BothEqual() && vnStore->IsVNHandle(treeVN) &&
+ (vnStore->GetHandleFlags(treeVN) == GTF_ICON_STATIC_HDL))
{
- *pFseq = tree->AsIntCon()->gtFieldSeq;
- *byteOffset = tree->AsIntCon()->IconValue() + val - tree->AsIntCon()->gtFieldSeq->GetOffset();
- return true;
+ FieldSeq* fldSeq = vnStore->GetFieldSeqFromAddress(treeVN);
+ if (fldSeq != nullptr)
+ {
+ assert(fldSeq->GetKind() == FieldSeq::FieldKind::SimpleStaticKnownAddress);
+ assert(fldSeq->GetOffset() == vnStore->CoercedConstantValue<ssize_t>(treeVN));
+
+ *pFseq = fldSeq;
+ *byteOffset = val;
+ return true;
+ }
}
return false;
}
bool srcIsUnsigned, /* = false */
bool hasOverflowCheck) /* = false */
{
+
+ if ((castFromType == TYP_I_IMPL) && (castToType == TYP_BYREF) && IsVNHandle(srcVN))
+ {
+ // Omit cast for (h)CNS_INT [TYP_I_IMPL -> TYP_BYREF]
+ return srcVN;
+ }
+
// The resulting type after performing the cast is always widened to a supported IL stack size
var_types resultType = genActualType(castToType);
bool srcIsUnsigned, /* = false */
bool hasOverflowCheck) /* = false */
{
- ValueNum srcLibVN = srcVNPair.GetLiberal();
- ValueNum srcConVN = srcVNPair.GetConservative();
+ ValueNum srcLibVN = srcVNPair.GetLiberal();
+ ValueNum srcConVN = srcVNPair.GetConservative();
+
ValueNum castLibVN = VNForCast(srcLibVN, castToType, castFromType, srcIsUnsigned, hasOverflowCheck);
ValueNum castConVN;
m_embeddedToCompileTimeHandleMap.AddOrUpdate(embeddedHandle, compileTimeHandle);
}
+ void AddToFieldAddressToFieldSeqMap(ValueNum fldAddr, FieldSeq* fldSeq)
+ {
+ m_fieldAddressToFieldSeqMap.AddOrUpdate(fldAddr, fldSeq);
+ }
+
+ FieldSeq* GetFieldSeqFromAddress(ValueNum fldAddr)
+ {
+ FieldSeq* fldSeq;
+ if (m_fieldAddressToFieldSeqMap.TryGetValue(fldAddr, &fldSeq))
+ {
+ return fldSeq;
+ }
+ return nullptr;
+ }
+
// And the single constant for an object reference type.
static ValueNum VNForNull()
{
typedef SmallHashTable<ssize_t, ssize_t> EmbeddedToCompileTimeHandleMap;
EmbeddedToCompileTimeHandleMap m_embeddedToCompileTimeHandleMap;
+ typedef SmallHashTable<ValueNum, FieldSeq*> FieldAddressToFieldSeqMap;
+ FieldAddressToFieldSeqMap m_fieldAddressToFieldSeqMap;
+
struct LargePrimitiveKeyFuncsFloat : public JitLargePrimitiveKeyFuncs<float>
{
static bool Equals(float x, float y)