* Enable checking of GTF_EXCEPT and GTF_ASG flags.
fgDebugCheckFlags is modified to check that GTF_EXCEPT and GTF_ASG are set precisely when needed.
It's also modified to handle several special operators correctly.
fgAddrCouldBeNull is updated to check for handles, implicit byref locals, and stack byrefs.
OperMayThrow is modified to handle several operators correctly.
GTF_IND_NONFAULTING is reused on operations for which OperIsIndir() is true and on GT_ARR_LENGTH.
Various places in morph are updated to set side effect flags correctly.
gtUpdateSideEffects is re-written so that it's precise for GTF_ASG and GTF_EXCEPT
and conservatively correct for the other side effects. It's now called from more places
to keep the flags up-to-date after transformations.
NoThrow in HelperCallProperties is updated and GTF_EXCEPT flag is set on helper calls according to
that property.
optRemoveRangeCheck is cleaned up and simplified.
if ((tree->gtGetOp1()->OperGet() == GT_ARR_BOUNDS_CHECK) &&
((tree->gtGetOp1()->gtFlags & GTF_ARR_BOUND_INBND) != 0))
{
- optRemoveRangeCheck(tree, stmt, true, GTF_ASG, true /* force remove */);
+ optRemoveRangeCheck(tree, stmt);
return optAssertionProp_Update(tree, tree, stmt);
}
return nullptr;
}
#endif
tree->gtFlags &= ~GTF_EXCEPT;
+ tree->gtFlags |= GTF_IND_NONFAULTING;
// Set this flag to prevent reordering
tree->gtFlags |= GTF_ORDER_SIDEEFF;
#endif
if (tree->gtFlags & GTF_IND_NONFAULTING)
{
- if ((op == GT_IND) || (op == GT_STOREIND))
+ if (tree->OperIsIndirOrArrLength())
{
chars += printf("[IND_NONFAULTING]");
}
GenTreeArgList* args,
IL_OFFSETX ilOffset = BAD_IL_OFFSET);
- GenTreeCall* gtNewHelperCallNode(unsigned helper,
- var_types type,
- unsigned flags = 0,
- GenTreeArgList* args = nullptr);
+ GenTreeCall* gtNewHelperCallNode(unsigned helper, var_types type, GenTreeArgList* args = nullptr);
GenTreePtr gtNewLclvNode(unsigned lnum, var_types type, IL_OFFSETX ILoffs = BAD_IL_OFFSET);
GenTreePtr gtNewIndexRef(var_types typ, GenTreePtr arrayOp, GenTreePtr indexOp);
+ GenTreeArrLen* gtNewArrLen(var_types typ, GenTree* arrayOp, int lenOffset);
+
+ GenTree* gtNewIndir(var_types typ, GenTree* addr);
+
GenTreeArgList* gtNewArgList(GenTreePtr op);
GenTreeArgList* gtNewArgList(GenTreePtr op1, GenTreePtr op2);
GenTreeArgList* gtNewArgList(GenTreePtr op1, GenTreePtr op2, GenTreePtr op3);
GenTreePtr gtReplaceTree(GenTreePtr stmt, GenTreePtr tree, GenTreePtr replacementTree);
- void gtUpdateSideEffects(GenTreePtr tree, unsigned oldGtFlags, unsigned newGtFlags);
+ void gtUpdateSideEffects(GenTree* stmt, GenTree* tree);
+
+ void gtUpdateTreeAncestorsSideEffects(GenTree* tree);
+
+ void gtUpdateStmtSideEffects(GenTree* stmt);
+
+ void gtResetNodeSideEffects(GenTree* tree);
// Returns "true" iff the complexity (not formally defined, but first interpretation
// is #of nodes in subtree) of "tree" is greater than "limit".
void fgFixupStructReturn(GenTreePtr call);
GenTreePtr fgMorphLocalVar(GenTreePtr tree, bool forceRemorph);
+
+public:
bool fgAddrCouldBeNull(GenTreePtr addr);
+
+private:
GenTreePtr fgMorphField(GenTreePtr tree, MorphAddrContext* mac);
bool fgCanFastTailCall(GenTreeCall* call);
void fgMorphTailCall(GenTreeCall* call);
void fgMarkAddressExposedLocals();
bool fgNodesMayInterfere(GenTree* store, GenTree* load);
+ static fgWalkPreFn fgUpdateSideEffectsPre;
+ static fgWalkPostFn fgUpdateSideEffectsPost;
+
// Returns true if the type of tree is of size at least "width", or if "tree" is not a
// local variable.
bool fgFitsInOrNotLoc(GenTreePtr tree, unsigned width);
LclVarDsc* optIsTrackedLocal(GenTreePtr tree);
public:
- void optRemoveRangeCheck(
- GenTreePtr tree, GenTreePtr stmt, bool updateCSEcounts, unsigned sideEffFlags = 0, bool forceRemove = false);
+ void optRemoveRangeCheck(GenTreePtr tree, GenTreePtr stmt);
bool optIsRangeCheckRemovable(GenTreePtr tree);
protected:
/*****************************************************************************/
-inline GenTreeCall* Compiler::gtNewHelperCallNode(unsigned helper, var_types type, unsigned flags, GenTreeArgList* args)
+//------------------------------------------------------------------------------
+// gtNewHelperCallNode : Helper to create a call helper node.
+//
+//
+// Arguments:
+// helper - Call helper
+// type - Type of the node
+// args - Call args
+//
+// Return Value:
+// New CT_HELPER node
+
+inline GenTreeCall* Compiler::gtNewHelperCallNode(unsigned helper, var_types type, GenTreeArgList* args)
{
+ unsigned flags = s_helperCallProperties.NoThrow((CorInfoHelpFunc)helper) ? 0 : GTF_EXCEPT;
GenTreeCall* result = gtNewCallNode(CT_HELPER, eeFindHelper(helper), type, args);
result->gtFlags |= flags;
return gtIndx;
}
+//------------------------------------------------------------------------------
+// gtNewArrLen : Helper to create an array length node.
+//
+//
+// Arguments:
+// typ - Type of the node
+// arrayOp - Array node
+// lenOffset - Offset of the length field
+//
+// Return Value:
+// New GT_ARR_LENGTH node
+
+inline GenTreeArrLen* Compiler::gtNewArrLen(var_types typ, GenTree* arrayOp, int lenOffset)
+{
+ GenTreeArrLen* arrLen = new (this, GT_ARR_LENGTH) GenTreeArrLen(typ, arrayOp, lenOffset);
+ static_assert_no_msg(GTF_ARRLEN_NONFAULTING == GTF_IND_NONFAULTING);
+ arrLen->SetIndirExceptionFlags(this);
+ return arrLen;
+}
+
+//------------------------------------------------------------------------------
+// gtNewIndir : Helper to create an indirection node.
+//
+// Arguments:
+// typ - Type of the node
+// addr - Address of the indirection
+//
+// Return Value:
+// New GT_IND node
+
+inline GenTree* Compiler::gtNewIndir(var_types typ, GenTree* addr)
+{
+ GenTree* indir = gtNewOperNode(GT_IND, typ, addr);
+ indir->SetIndirExceptionFlags(this);
+ return indir;
+}
+
/*****************************************************************************
*
* Create (and check for) a "nothing" node, i.e. a node that doesn't produce
{
assert(!OperIsConst(oper)); // use ChangeOperLeaf() instead
+ unsigned mask = GTF_COMMON_MASK;
+ if (this->OperIsIndirOrArrLength() && OperIsIndirOrArrLength(oper))
+ {
+ mask |= GTF_IND_NONFAULTING;
+ }
SetOper(oper, vnUpdate);
- gtFlags &= GTF_COMMON_MASK;
+ gtFlags &= mask;
// Do "oper"-specific initializations...
switch (oper)
inline void GenTree::ChangeOperUnchecked(genTreeOps oper)
{
+ unsigned mask = GTF_COMMON_MASK;
+ if (this->OperIsIndirOrArrLength() && OperIsIndirOrArrLength(oper))
+ {
+ mask |= GTF_IND_NONFAULTING;
+ }
SetOperRaw(oper); // Trust the caller and don't use SetOper()
- gtFlags &= GTF_COMMON_MASK;
+ gtFlags &= mask;
}
/*****************************************************************************
return score + ((preferOp2) ? 1 : -1);
}
-/**************************************************************************************
- *
- * Perform copy propagation on a given tree as we walk the graph and if it is a local
- * variable, then look up all currently live definitions and check if any of those
- * definitions share the same value number. If so, then we can make the replacement.
- *
- */
+//------------------------------------------------------------------------------
+// optCopyProp : Perform copy propagation on a given tree as we walk the graph and if it is a local
+// variable, then look up all currently live definitions and check if any of those
+// definitions share the same value number. If so, then we can make the replacement.
+//
+// Arguments:
+// block - Block the tree belongs to
+// stmt - Statement the tree belongs to
+// tree - The tree to perform copy propagation on
+// curSsaName - The map from lclNum to its recently live definitions as a stack
+
void Compiler::optCopyProp(BasicBlock* block, GenTreePtr stmt, GenTreePtr tree, LclNumToGenTreePtrStack* curSsaName)
{
// TODO-Review: EH successor/predecessor iteration seems broken.
lvaTable[newLclNum].incRefCnts(block->getBBWeight(this), this);
tree->gtLclVarCommon.SetLclNum(newLclNum);
tree->AsLclVarCommon()->SetSsaNum(newSsaNum);
+ gtUpdateSideEffects(stmt, tree);
#ifdef DEBUG
if (verbose)
{
return tree->IsLocal() && !fgExcludeFromSsa(tree->AsLclVarCommon()->GetLclNum());
}
-/**************************************************************************************
- *
- * Perform copy propagation using currently live definitions on the current block's
- * variables. Also as new definitions are encountered update the "curSsaName" which
- * tracks the currently live definitions.
- *
- */
+//------------------------------------------------------------------------------
+// optBlockCopyProp : Perform copy propagation using currently live definitions on the current block's
+// variables. Also as new definitions are encountered update the "curSsaName" which
+// tracks the currently live definitions.
+//
+// Arguments:
+// block - Block the tree belongs to
+// curSsaName - The map from lclNum to its recently live definitions as a stack
+
void Compiler::optBlockCopyProp(BasicBlock* block, LclNumToGenTreePtrStack* curSsaName)
{
JITDUMP("Copy Assertion for BB%02u\n", block->bbNum);
for (GenTreePtr tree = stmt->gtStmt.gtStmtList; tree; tree = tree->gtNext)
{
compUpdateLife</*ForCodeGen*/ false>(tree);
+
optCopyProp(block, stmt, tree, curSsaName);
// TODO-Review: Merge this loop with the following loop to correctly update the
GenTreeArgList* argList = m_compiler->gtNewArgList(loOp1, hiOp1, shiftByOp);
- GenTree* call = m_compiler->gtNewHelperCallNode(helper, TYP_LONG, 0, argList);
+ GenTree* call = m_compiler->gtNewHelperCallNode(helper, TYP_LONG, argList);
call->gtFlags |= shift->gtFlags & GTF_ALL_EFFECT;
if (shift->IsUnusedValue())
{
if (optEarlyPropRewriteTree(tree))
{
+ gtUpdateSideEffects(stmt, tree);
isRewritten = true;
}
}
- // Morph the stmt and update the evaluation order if the stmt has been rewritten.
+ // Update the evaluation order and the statement info if the stmt has been rewritten.
if (isRewritten)
{
gtSetStmtInfo(stmt);
// Set this flag to prevent reordering
nullCheckTree->gtFlags |= GTF_ORDER_SIDEEFF;
+ nullCheckTree->gtFlags |= GTF_IND_NONFAULTING;
defRHS->gtFlags &= ~(GTF_EXCEPT | GTF_DONT_CSE);
defRHS->gtFlags |=
// In such cases we still want to add the method entry callback node
GenTreeArgList* args = gtNewArgList(gtNewIconEmbMethHndNode(info.compMethodHnd));
- GenTreePtr call = gtNewHelperCallNode(CORINFO_HELP_BBT_FCN_ENTER, TYP_VOID, 0, args);
+ GenTreePtr call = gtNewHelperCallNode(CORINFO_HELP_BBT_FCN_ENTER, TYP_VOID, args);
stmt = gtNewStmt(call);
}
}
GenTreeArgList* args = gtNewArgList(arg);
- GenTreePtr call = gtNewHelperCallNode(CORINFO_HELP_BBT_FCN_ENTER, TYP_VOID, 0, args);
+ GenTreePtr call = gtNewHelperCallNode(CORINFO_HELP_BBT_FCN_ENTER, TYP_VOID, args);
GenTreePtr handle =
gtNewIconEmbHndNode((void*)&bbProfileBufferStart->ExecutionCount, nullptr, GTF_ICON_BBC_PTR);
argList = gtNewArgList(opModuleIDArg);
}
- if (!s_helperCallProperties.NoThrow(helper))
- {
- callFlags |= GTF_EXCEPT;
- }
+ GenTreeCall* result = gtNewHelperCallNode(helper, type, argList);
+ result->gtFlags |= callFlags;
- return gtNewHelperCallNode(helper, type, callFlags, argList);
+ return result;
}
GenTreeCall* Compiler::fgGetSharedCCtor(CORINFO_CLASS_HANDLE cls)
return fgGetStaticsCCtorHelper(cls, info.compCompHnd->getSharedCCtorHelper(cls));
}
+//------------------------------------------------------------------------------
+// fgAddrCouldBeNull : Check whether the address tree can represent null.
//
-// Returns true unless the address expression could
-// never represent a NULL
//
+// Arguments:
+// addr - Address to check
+//
+// Return Value:
+// True if address could be null; false otherwise
+
bool Compiler::fgAddrCouldBeNull(GenTreePtr addr)
{
- if (addr->gtOper == GT_ADDR)
+ if ((addr->gtOper == GT_CNS_INT) && addr->IsIconHandle())
+ {
+ return false;
+ }
+ else if (addr->gtOper == GT_LCL_VAR)
+ {
+ unsigned varNum = addr->AsLclVarCommon()->GetLclNum();
+
+ if (lvaIsImplicitByRefLocal(varNum))
+ {
+ return false;
+ }
+
+ LclVarDsc* varDsc = &lvaTable[varNum];
+
+ if (varDsc->lvStackByref)
+ {
+ return false;
+ }
+ }
+ else if (addr->gtOper == GT_ADDR)
{
if (addr->gtOp.gtOp1->gtOper == GT_CNS_INT)
{
return true;
}
}
- else if (addr->gtOp.gtOp1->OperIsLocalAddr())
- {
- return false;
- }
+
return false; // we can't have a null address
}
else if (addr->gtOper == GT_ADD)
helperArgs = gtNewArgList(thisPointer, targetObjPointers, ctxTree);
entryPoint = genericLookup;
}
- call = gtNewHelperCallNode(CORINFO_HELP_READYTORUN_DELEGATE_CTOR, TYP_VOID, GTF_EXCEPT, helperArgs);
+ call = gtNewHelperCallNode(CORINFO_HELP_READYTORUN_DELEGATE_CTOR, TYP_VOID, helperArgs);
call->setEntryPoint(entryPoint);
}
}
GenTreePtr targetObjPointers = call->gtCallArgs->Current();
GenTreeArgList* helperArgs = gtNewArgList(thisPointer, targetObjPointers);
- call = gtNewHelperCallNode(CORINFO_HELP_READYTORUN_DELEGATE_CTOR, TYP_VOID, GTF_EXCEPT, helperArgs);
+ call = gtNewHelperCallNode(CORINFO_HELP_READYTORUN_DELEGATE_CTOR, TYP_VOID, helperArgs);
CORINFO_LOOKUP entryPoint;
info.compCompHnd->getReadyToRunDelegateCtorHelper(ldftnToken, clsHnd, &entryPoint);
tree = gtNewLclvNode(info.compTypeCtxtArg, TYP_I_IMPL);
// Call helper CORINFO_HELP_GETCLASSFROMMETHODPARAM to get the class handle
// from the method handle.
- tree = gtNewHelperCallNode(CORINFO_HELP_GETCLASSFROMMETHODPARAM, TYP_I_IMPL, 0, gtNewArgList(tree));
+ tree = gtNewHelperCallNode(CORINFO_HELP_GETCLASSFROMMETHODPARAM, TYP_I_IMPL, gtNewArgList(tree));
break;
}
noway_assert(tree); // tree should now contain the CORINFO_CLASS_HANDLE for the exact class.
// Given the class handle, get the pointer to the Monitor.
- tree = gtNewHelperCallNode(CORINFO_HELP_GETSYNCFROMCLASSHANDLE, TYP_I_IMPL, 0, gtNewArgList(tree));
+ tree = gtNewHelperCallNode(CORINFO_HELP_GETSYNCFROMCLASSHANDLE, TYP_I_IMPL, gtNewArgList(tree));
}
noway_assert(tree);
if (info.compIsStatic)
{
tree = fgGetCritSectOfStaticMethod();
- tree = gtNewHelperCallNode(enter ? CORINFO_HELP_MON_ENTER_STATIC : CORINFO_HELP_MON_EXIT_STATIC, TYP_VOID, 0,
+ tree = gtNewHelperCallNode(enter ? CORINFO_HELP_MON_ENTER_STATIC : CORINFO_HELP_MON_EXIT_STATIC, TYP_VOID,
gtNewArgList(tree, varAddrNode));
}
else
{
tree = gtNewLclvNode(lvaThisVar, TYP_REF);
- tree = gtNewHelperCallNode(enter ? CORINFO_HELP_MON_ENTER : CORINFO_HELP_MON_EXIT, TYP_VOID, 0,
+ tree = gtNewHelperCallNode(enter ? CORINFO_HELP_MON_ENTER : CORINFO_HELP_MON_EXIT, TYP_VOID,
gtNewArgList(tree, varAddrNode));
}
tree = gtNewOperNode(GT_ADDR, TYP_I_IMPL, gtNewLclvNode(lvaReversePInvokeFrameVar, TYP_BLK));
- tree = gtNewHelperCallNode(CORINFO_HELP_JIT_REVERSE_PINVOKE_ENTER, TYP_VOID, 0, gtNewArgList(tree));
+ tree = gtNewHelperCallNode(CORINFO_HELP_JIT_REVERSE_PINVOKE_ENTER, TYP_VOID, gtNewArgList(tree));
fgEnsureFirstBBisScratch();
tree = gtNewOperNode(GT_ADDR, TYP_I_IMPL, gtNewLclvNode(lvaReversePInvokeFrameVar, TYP_BLK));
- tree = gtNewHelperCallNode(CORINFO_HELP_JIT_REVERSE_PINVOKE_EXIT, TYP_VOID, 0, gtNewArgList(tree));
+ tree = gtNewHelperCallNode(CORINFO_HELP_JIT_REVERSE_PINVOKE_EXIT, TYP_VOID, gtNewArgList(tree));
assert(genReturnBB != nullptr);
tree = gtNewIconEmbMethHndNode(info.compMethodHnd);
- tree = gtNewHelperCallNode(info.compCompHnd->getSecurityPrologHelper(info.compMethodHnd), TYP_VOID, 0,
+ tree = gtNewHelperCallNode(info.compCompHnd->getSecurityPrologHelper(info.compMethodHnd), TYP_VOID,
gtNewArgList(tree, gtNewOperNode(GT_ADDR, TYP_BYREF,
gtNewLclvNode(lvaSecurityObject, TYP_REF))));
{
tree = fgGetCritSectOfStaticMethod();
- tree = gtNewHelperCallNode(CORINFO_HELP_MON_ENTER_STATIC, TYP_VOID, 0, gtNewArgList(tree));
+ tree = gtNewHelperCallNode(CORINFO_HELP_MON_ENTER_STATIC, TYP_VOID, gtNewArgList(tree));
}
else
{
tree = gtNewLclvNode(info.compThisArg, TYP_REF);
- tree = gtNewHelperCallNode(CORINFO_HELP_MON_ENTER, TYP_VOID, 0, gtNewArgList(tree));
+ tree = gtNewHelperCallNode(CORINFO_HELP_MON_ENTER, TYP_VOID, gtNewArgList(tree));
}
/* Create a new basic block and stick the call in it */
{
tree = fgGetCritSectOfStaticMethod();
- tree = gtNewHelperCallNode(CORINFO_HELP_MON_EXIT_STATIC, TYP_VOID, 0, gtNewArgList(tree));
+ tree = gtNewHelperCallNode(CORINFO_HELP_MON_EXIT_STATIC, TYP_VOID, gtNewArgList(tree));
}
else
{
tree = gtNewLclvNode(info.compThisArg, TYP_REF);
- tree = gtNewHelperCallNode(CORINFO_HELP_MON_EXIT, TYP_VOID, 0, gtNewArgList(tree));
+ tree = gtNewHelperCallNode(CORINFO_HELP_MON_EXIT, TYP_VOID, gtNewArgList(tree));
}
fgInsertStmtAtEnd(genReturnBB, tree);
tree = gtNewIconEmbMethHndNode(info.compMethodHnd);
- tree = gtNewHelperCallNode(CORINFO_HELP_VERIFICATION_RUNTIME_CHECK, TYP_VOID, 0, gtNewArgList(tree));
+ tree = gtNewHelperCallNode(CORINFO_HELP_VERIFICATION_RUNTIME_CHECK, TYP_VOID, gtNewArgList(tree));
/* Create a new basic block and stick the call in it */
noway_assert(helper != CORINFO_HELP_UNDEF);
// Add the appropriate helper call.
- tree = gtNewHelperCallNode(helper, TYP_VOID, GTF_EXCEPT);
+ tree = gtNewHelperCallNode(helper, TYP_VOID);
// There are no args here but fgMorphArgs has side effects
// such as setting the outgoing arg area (which is necessary
chkFlags |= GTF_ORDER_SIDEEFF;
break;
+ case GT_MEMORYBARRIER:
+ chkFlags |= GTF_GLOB_REF | GTF_ASG;
+ break;
+
default:
break;
}
*/
}
- if (kind & GTK_ASGOP)
+ if (tree->OperRequiresAsgFlag())
{
chkFlags |= GTF_ASG;
}
- /* Note that it is OK for treeFlags not to have a GTF_EXCEPT,
- AssertionProp's non-Null may have cleared it */
- if (tree->OperMayThrow())
+ if (tree->OperMayThrow(this))
{
- chkFlags |= (treeFlags & GTF_EXCEPT);
+ chkFlags |= GTF_EXCEPT;
}
if (oper == GT_ADDR && (op1->OperIsLocal() || op1->gtOper == GT_CLS_VAR ||
else
{
+ if (tree->OperMayThrow(this))
+ {
+ chkFlags |= GTF_EXCEPT;
+ }
+
switch (tree->OperGet())
{
case GT_CALL:
chkFlags |= GTF_CALL;
- if ((treeFlags & GTF_EXCEPT) && !(chkFlags & GTF_EXCEPT))
- {
- switch (eeGetHelperNum(call->gtCallMethHnd))
- {
- // Is this a helper call that can throw an exception ?
- case CORINFO_HELP_LDIV:
- case CORINFO_HELP_LMOD:
- case CORINFO_HELP_METHOD_ACCESS_CHECK:
- case CORINFO_HELP_FIELD_ACCESS_CHECK:
- case CORINFO_HELP_CLASS_ACCESS_CHECK:
- case CORINFO_HELP_DELEGATE_SECURITY_CHECK:
- chkFlags |= GTF_EXCEPT;
- break;
- default:
- break;
- }
- }
-
if (call->gtCallObjp)
{
fgDebugCheckFlags(call->gtCallObjp);
break;
case GT_ARR_OFFSET:
+
fgDebugCheckFlags(tree->gtArrOffs.gtOffset);
chkFlags |= (tree->gtArrOffs.gtOffset->gtFlags & GTF_ALL_EFFECT);
fgDebugCheckFlags(tree->gtArrOffs.gtIndex);
chkFlags |= (tree->gtArrOffs.gtArrObj->gtFlags & GTF_ALL_EFFECT);
break;
+ case GT_ARR_BOUNDS_CHECK:
+#ifdef FEATURE_SIMD
+ case GT_SIMD_CHK:
+#endif // FEATURE_SIMD
+
+ GenTreeBoundsChk* bndsChk;
+ bndsChk = tree->AsBoundsChk();
+ fgDebugCheckFlags(bndsChk->gtIndex);
+ chkFlags |= (bndsChk->gtIndex->gtFlags & GTF_ALL_EFFECT);
+ fgDebugCheckFlags(bndsChk->gtArrLen);
+ chkFlags |= (bndsChk->gtArrLen->gtFlags & GTF_ALL_EFFECT);
+ break;
+
+ case GT_CMPXCHG:
+
+ chkFlags |= (GTF_GLOB_REF | GTF_ASG);
+ GenTreeCmpXchg* cmpXchg;
+ cmpXchg = tree->AsCmpXchg();
+ fgDebugCheckFlags(cmpXchg->gtOpLocation);
+ chkFlags |= (cmpXchg->gtOpLocation->gtFlags & GTF_ALL_EFFECT);
+ fgDebugCheckFlags(cmpXchg->gtOpValue);
+ chkFlags |= (cmpXchg->gtOpValue->gtFlags & GTF_ALL_EFFECT);
+ fgDebugCheckFlags(cmpXchg->gtOpComparand);
+ chkFlags |= (cmpXchg->gtOpComparand->gtFlags & GTF_ALL_EFFECT);
+ break;
+
+ case GT_STORE_DYN_BLK:
+ case GT_DYN_BLK:
+
+ GenTreeDynBlk* dynBlk;
+ dynBlk = tree->AsDynBlk();
+ fgDebugCheckFlags(dynBlk->gtDynamicSize);
+ chkFlags |= (dynBlk->gtDynamicSize->gtFlags & GTF_ALL_EFFECT);
+ fgDebugCheckFlags(dynBlk->Addr());
+ chkFlags |= (dynBlk->Addr()->gtFlags & GTF_ALL_EFFECT);
+ if (tree->OperGet() == GT_STORE_DYN_BLK)
+ {
+ fgDebugCheckFlags(dynBlk->Data());
+ chkFlags |= (dynBlk->Data()->gtFlags & GTF_ALL_EFFECT);
+ }
+ break;
+
default:
+
+#ifdef DEBUG
+ gtDispTree(tree);
+#endif
+
+ assert(!"Unknown operator for fgDebugCheckFlags");
break;
}
}
}
else if (treeFlags & ~chkFlags)
{
-#if 0
- // TODO-Cleanup:
- /* The tree has extra flags set. However, this will happen if we
- replace a subtree with something, but don't clear the flags up
- the tree. Can't flag this unless we start clearing flags above.
-
- Note: we need this working for GTF_CALL and CSEs, so I'm enabling
- it for calls.
- */
- if (tree->OperGet() != GT_CALL && (treeFlags & GTF_CALL) && !(chkFlags & GTF_CALL))
+ // TODO: We are currently only checking extra GTF_EXCEPT and GTF_ASG flags.
+ if ((treeFlags & ~chkFlags & ~GTF_GLOB_REF & ~GTF_ORDER_SIDEEFF & ~GTF_CALL) != 0)
{
// Print the tree so we can see it in the log.
- printf("Extra GTF_CALL flags on parent tree [%X]: ", tree);
+ printf("Extra flags on parent tree [%X]: ", tree);
GenTree::gtDispFlags(treeFlags & ~chkFlags, GTF_DEBUG_NONE);
printf("\n");
gtDispTree(tree);
noway_assert(!"Extra flags on tree");
// Print the tree again so we can see it right after we hook up the debugger.
- printf("Extra GTF_CALL flags on parent tree [%X]: ", tree);
+ printf("Extra flags on parent tree [%X]: ", tree);
GenTree::gtDispFlags(treeFlags & ~chkFlags, GTF_DEBUG_NONE);
printf("\n");
gtDispTree(tree);
- }
-#endif // 0
+ }
}
}
return parent;
}
-/*****************************************************************************
- *
- * Returns true if the given operator may cause an exception.
- */
+//------------------------------------------------------------------------------
+// OperMayThrow : Check whether the operation requires GTF_ASG flag regardless
+// of the children's flags.
+//
+
+bool GenTree::OperRequiresAsgFlag()
+{
+ return ((OperKind() & GTK_ASGOP) || (gtOper == GT_XADD) || (gtOper == GT_XCHG) || (gtOper == GT_LOCKADD) ||
+ (gtOper == GT_CMPXCHG) || (gtOper == GT_MEMORYBARRIER));
+}
+
+//------------------------------------------------------------------------------
+// OperMayThrow : Check whether the operation may throw.
+//
+//
+// Arguments:
+// comp - Compiler instance
+//
+// Return Value:
+// True if the given operator may cause an exception
-bool GenTree::OperMayThrow()
+bool GenTree::OperMayThrow(Compiler* comp)
{
GenTreePtr op;
}
return true;
- case GT_IND:
- op = gtOp.gtOp1;
-
- /* Indirections of handles are known to be safe */
- if (op->gtOper == GT_CNS_INT)
- {
- if (op->IsIconHandle())
- {
- /* No exception is thrown on this indirection */
- return false;
- }
- }
- if (this->gtFlags & GTF_IND_NONFAULTING)
- {
- return false;
- }
- // Non-Null AssertionProp will remove the GTF_EXCEPT flag and mark the GT_IND with GTF_ORDER_SIDEEFF flag
- if ((this->gtFlags & GTF_ALL_EFFECT) == GTF_ORDER_SIDEEFF)
- {
- return false;
- }
-
- return true;
-
case GT_INTRINSIC:
// If this is an intrinsic that represents the object.GetType(), it can throw an NullReferenceException.
// Report it as may throw.
break;
+ case GT_CALL:
+
+ CorInfoHelpFunc helper;
+ helper = comp->eeGetHelperNum(this->AsCall()->gtCallMethHnd);
+ return ((helper == CORINFO_HELP_UNDEF) || !comp->s_helperCallProperties.NoThrow(helper));
+
+ case GT_IND:
case GT_BLK:
case GT_OBJ:
case GT_DYN_BLK:
case GT_STORE_BLK:
- return !Compiler::fgIsIndirOfAddrOfLocal(this);
+ case GT_NULLCHECK:
+ return (((this->gtFlags & GTF_IND_NONFAULTING) == 0) && comp->fgAddrCouldBeNull(this->AsIndir()->Addr()));
+
+ case GT_ARR_LENGTH:
+ return (((this->gtFlags & GTF_IND_NONFAULTING) == 0) && comp->fgAddrCouldBeNull(gtOp.gtOp1));
case GT_ARR_BOUNDS_CHECK:
case GT_ARR_ELEM:
case GT_ARR_INDEX:
+ case GT_ARR_OFFSET:
case GT_CATCH_ARG:
- case GT_ARR_LENGTH:
case GT_LCLHEAP:
case GT_CKFINITE:
- case GT_NULLCHECK:
#ifdef FEATURE_SIMD
case GT_SIMD_CHK:
#endif // FEATURE_SIMD
if ((addr->gtFlags & GTF_GLOB_REF) == 0)
{
GenTreeLclVarCommon* lclNode = addr->IsLocalAddrExpr();
- if ((lclNode != nullptr) && !lvaIsImplicitByRefLocal(lclNode->gtLclNum))
+ if (lclNode != nullptr)
{
- newBlkOrObjNode->gtFlags &= ~GTF_GLOB_REF;
+ newBlkOrObjNode->gtFlags |= GTF_IND_NONFAULTING;
+ if (!lvaIsImplicitByRefLocal(lclNode->gtLclNum))
+ {
+ newBlkOrObjNode->gtFlags &= ~GTF_GLOB_REF;
+ }
}
}
return newBlkOrObjNode;
break;
case GT_ARR_LENGTH:
- copy = new (this, GT_ARR_LENGTH)
- GenTreeArrLen(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtArrLen.ArrLenOffset());
+ copy = gtNewArrLen(tree->TypeGet(), tree->gtOp.gtOp1, tree->gtArrLen.ArrLenOffset());
break;
case GT_ARR_INDEX:
//
// Notes:
// The caller must ensure that the original statement has been sequenced,
+// and the side effect flags are updated on the statement nodes,
// but this method will sequence 'replacementTree', and insert it into the
// proper place in the statement sequence.
treeLastNode->gtNext = treeNextNode;
treeNextNode->gtPrev = treeLastNode;
}
-
- // Propagate side-effect flags of "replacementTree" to its parents if needed.
- gtUpdateSideEffects(treeParent, tree->gtFlags, replacementTree->gtFlags);
}
return replacementTree;
}
//------------------------------------------------------------------------
-// gtUpdateSideEffects: Update the side effects for ancestors.
+// gtUpdateSideEffects: Update the side effects of a tree and its ancestors
//
// Arguments:
-// treeParent - The immediate parent node.
-// oldGtFlags - The stale gtFlags.
-// newGtFlags - The new gtFlags.
-//
+// stmt - The tree's statement
+// tree - Tree to update the side effects for
//
-// Assumptions:
-// Linear order of the stmt has been established.
+// Note: If tree's order hasn't been established, the method updates side effect
+// flags on all statement's nodes.
+
+void Compiler::gtUpdateSideEffects(GenTree* stmt, GenTree* tree)
+{
+ if (fgStmtListThreaded)
+ {
+ gtUpdateTreeAncestorsSideEffects(tree);
+ }
+ else
+ {
+ gtUpdateStmtSideEffects(stmt);
+ }
+}
+
+//------------------------------------------------------------------------
+// gtUpdateTreeAncestorsSideEffects: Update the side effects of a tree and its ancestors
+// when statement order has been established.
//
-// Notes:
-// The routine is used for updating the stale side effect flags for ancestor
-// nodes starting from treeParent up to the top-level stmt expr.
+// Arguments:
+// tree - Tree to update the side effects for
-void Compiler::gtUpdateSideEffects(GenTreePtr treeParent, unsigned oldGtFlags, unsigned newGtFlags)
+void Compiler::gtUpdateTreeAncestorsSideEffects(GenTree* tree)
{
assert(fgStmtListThreaded);
+ while (tree != nullptr)
+ {
+ gtResetNodeSideEffects(tree);
+ unsigned nChildren = tree->NumChildren();
+ for (unsigned childNum = 0; childNum < nChildren; childNum++)
+ {
+ tree->gtFlags |= (tree->GetChild(childNum)->gtFlags & GTF_ALL_EFFECT);
+ }
+ tree = tree->gtGetParent(nullptr);
+ }
+}
- oldGtFlags = oldGtFlags & GTF_ALL_EFFECT;
- newGtFlags = newGtFlags & GTF_ALL_EFFECT;
+//------------------------------------------------------------------------
+// gtUpdateStmtSideEffects: Update the side effects for statement tree nodes.
+//
+// Arguments:
+// stmt - The statement to update side effects on
- if (oldGtFlags != newGtFlags)
+void Compiler::gtUpdateStmtSideEffects(GenTree* stmt)
+{
+ fgWalkTree(&stmt->gtStmt.gtStmtExpr, fgUpdateSideEffectsPre, fgUpdateSideEffectsPost);
+}
+
+//------------------------------------------------------------------------
+// gtResetNodeSideEffects: Update the side effects based on the node operation.
+//
+// Arguments:
+// tree - Tree to update the side effects on
+//
+// Notes:
+// This method currently only updates GTF_EXCEPT and GTF_ASG flags. The other side effect
+// flags may remain unnecessarily (conservatively) set.
+// The caller of this method is expected to update the flags based on the children's flags.
+
+void Compiler::gtResetNodeSideEffects(GenTree* tree)
+{
+ if (tree->OperMayThrow(this))
+ {
+ tree->gtFlags |= GTF_EXCEPT;
+ }
+ else
{
- while (treeParent)
+ tree->gtFlags &= ~GTF_EXCEPT;
+ if (tree->OperIsIndirOrArrLength())
{
- treeParent->gtFlags &= ~oldGtFlags;
- treeParent->gtFlags |= newGtFlags;
- treeParent = treeParent->gtGetParent(nullptr);
+ tree->gtFlags |= GTF_IND_NONFAULTING;
}
}
+
+ if (tree->OperRequiresAsgFlag())
+ {
+ tree->gtFlags |= GTF_ASG;
+ }
+ else
+ {
+ tree->gtFlags &= ~GTF_ASG;
+ }
+}
+
+//------------------------------------------------------------------------
+// fgUpdateSideEffectsPre: Update the side effects based on the tree operation.
+//
+// Arguments:
+// pTree - Pointer to the tree to update the side effects
+// fgWalkPre - Walk data
+//
+// Notes:
+// This method currently only updates GTF_EXCEPT and GTF_ASG flags. The other side effect
+// flags may remain unnecessarily (conservatively) set.
+
+Compiler::fgWalkResult Compiler::fgUpdateSideEffectsPre(GenTree** pTree, fgWalkData* fgWalkPre)
+{
+ fgWalkPre->compiler->gtResetNodeSideEffects(*pTree);
+
+ return WALK_CONTINUE;
+}
+
+//------------------------------------------------------------------------
+// fgUpdateSideEffectsPost: Update the side effects of the parent based on the tree's flags.
+//
+// Arguments:
+// pTree - Pointer to the tree
+// fgWalkPost - Walk data
+//
+// Notes:
+// The routine is used for updating the stale side effect flags for ancestor
+// nodes starting from treeParent up to the top-level stmt expr.
+
+Compiler::fgWalkResult Compiler::fgUpdateSideEffectsPost(GenTree** pTree, fgWalkData* fgWalkPost)
+{
+ GenTree* tree = *pTree;
+ GenTree* parent = fgWalkPost->parent;
+ if (parent != nullptr)
+ {
+ parent->gtFlags |= (tree->gtFlags & GTF_ALL_EFFECT);
+ }
+ return WALK_CONTINUE;
}
/*****************************************************************************
*
- * Comapres two trees and returns true when both trees are the same.
+ * Compares two trees and returns true when both trees are the same.
* Instead of fully comparing the two trees this method can just return false.
* Thus callers should not assume that the trees are different when false is returned.
* Only when true is returned can the caller perform code optimizations.
assert(op1);
op2 = op1;
- op1 = gtNewHelperCallNode(CORINFO_HELP_OVERFLOW, TYP_VOID, GTF_EXCEPT,
+ op1 = gtNewHelperCallNode(CORINFO_HELP_OVERFLOW, TYP_VOID,
gtNewArgList(gtNewIconNode(compCurBB->bbTryIndex)));
if (vnStore != nullptr)
CNS_LONG:
+ if (fieldSeq != FieldSeqStore::NotAField())
+ {
+ return tree;
+ }
+
#ifdef DEBUG
if (verbose)
{
args = gtNewListNode(objPtr, args);
}
- GenTreePtr tree = gtNewHelperCallNode(pFieldInfo->helper, genActualType(helperType), 0, args);
+ GenTreePtr tree = gtNewHelperCallNode(pFieldInfo->helper, genActualType(helperType), args);
if (pFieldInfo->fieldAccessor == CORINFO_FIELD_INSTANCE_HELPER)
{
if (flags & GTF_EXCEPT)
{
- if (tree->OperMayThrow())
+ if (tree->OperMayThrow(this))
{
return true;
}
#define GTF_IND_ARR_LEN 0x80000000 // GT_IND -- the indirection represents an array length (of the REF
// contribution to its argument).
#define GTF_IND_VOLATILE 0x40000000 // GT_IND -- the load or store must use volatile sematics (this is a nop on X86)
-#define GTF_IND_NONFAULTING 0x20000000 // GT_IND -- An indir that cannot fault.
+#define GTF_IND_NONFAULTING 0x20000000 // Operations for which OperIsIndir() is true -- An indir that cannot fault.
+ // Same as GTF_ARRLEN_NONFAULTING.
#define GTF_IND_TGTANYWHERE 0x10000000 // GT_IND -- the target could be anywhere
#define GTF_IND_TLS_REF 0x08000000 // GT_IND -- the target is accessed via TLS
#define GTF_IND_ASG_LHS 0x04000000 // GT_IND -- this GT_IND node is (the effective val) of the LHS of an
#define GTF_ARR_BOUND_INBND 0x80000000 // GT_ARR_BOUNDS_CHECK -- have proved this check is always in-bounds
#define GTF_ARRLEN_ARR_IDX 0x80000000 // GT_ARR_LENGTH -- Length which feeds into an array index expression
+#define GTF_ARRLEN_NONFAULTING 0x20000000 // GT_ARR_LENGTH -- An array length operation that cannot fault. Same as GT_IND_NONFAULTING.
#define GTF_FIELD_LIST_HEAD 0x80000000 // GT_FIELD_LIST -- Indicates that this is the first field in a list of
// struct fields constituting a single call argument.
return gtOper == GT_IND || gtOper == GT_STOREIND || gtOper == GT_NULLCHECK || OperIsBlk(gtOper);
}
+ static bool OperIsIndirOrArrLength(genTreeOps gtOper)
+ {
+ return OperIsIndir(gtOper) || (gtOper == GT_ARR_LENGTH);
+ }
+
bool OperIsIndir() const
{
return OperIsIndir(gtOper);
}
+ bool OperIsIndirOrArrLength() const
+ {
+ return OperIsIndirOrArrLength(gtOper);
+ }
+
static bool OperIsImplicitIndir(genTreeOps gtOper)
{
switch (gtOper)
// Returns true if it is a GT_COPY or GT_RELOAD of a multi-reg call node
inline bool IsCopyOrReloadOfMultiRegCall() const;
- bool OperMayThrow();
+ bool OperRequiresAsgFlag();
+
+ bool OperMayThrow(Compiler* comp);
unsigned GetScaleIndexMul();
unsigned GetScaleIndexShf();
gtFlags &= ~GTF_REUSE_REG_VAL;
}
+ void SetIndirExceptionFlags(Compiler* comp)
+ {
+ assert(OperIsIndirOrArrLength());
+ gtFlags |= OperMayThrow(comp) ? GTF_EXCEPT : GTF_IND_NONFAULTING;
+ }
+
#if MEASURE_NODE_SIZE
static void DumpNodeSizes(FILE* fp);
#endif
{
// There's no reason to do a compare-exchange on a local location, so we'll assume that all of these
// have global effects.
- gtFlags |= GTF_GLOB_EFFECT;
+ gtFlags |= (GTF_GLOB_REF | GTF_ASG);
}
#if DEBUGGABLE_GENTREE
GenTreeCmpXchg() : GenTree()
return nullptr;
}
- GenTreeCall* op1 = gtNewHelperCallNode(helper, type, GTF_EXCEPT, args);
+ GenTreeCall* op1 = gtNewHelperCallNode(helper, type, args);
op1->setEntryPoint(lookup);
gtNewArgList(ctxTree, gtNewIconEmbHndNode(pRuntimeLookup->signature, nullptr, GTF_ICON_TOKEN_HDL, 0,
nullptr, compileTimeHandle));
- return gtNewHelperCallNode(pRuntimeLookup->helper, TYP_I_IMPL, GTF_EXCEPT, helperArgs);
+ return gtNewHelperCallNode(pRuntimeLookup->helper, TYP_I_IMPL, helperArgs);
}
// Slot pointer
GenTreeArgList* helperArgs =
gtNewArgList(ctxTree, gtNewIconEmbHndNode(pRuntimeLookup->signature, nullptr, GTF_ICON_TOKEN_HDL, 0, nullptr,
compileTimeHandle));
- GenTreePtr helperCall = gtNewHelperCallNode(pRuntimeLookup->helper, TYP_I_IMPL, GTF_EXCEPT, helperArgs);
+ GenTreePtr helperCall = gtNewHelperCallNode(pRuntimeLookup->helper, TYP_I_IMPL, helperArgs);
// Check for null and possibly call helper
GenTreePtr relop = gtNewOperNode(GT_NE, TYP_INT, handle, gtNewIconNode(0, TYP_I_IMPL));
// on a local are probably pretty useless anyway, so we probably don't care.
op1 = gtNewOperNode(interlockedOperator, genActualType(callType), op1, op2);
- op1->gtFlags |= GTF_GLOB_EFFECT;
+ op1->gtFlags |= GTF_GLOB_REF | GTF_ASG;
retNode = op1;
break;
#endif // _TARGET_XARCH_
assert(sig->numArgs == 0);
op1 = new (this, GT_MEMORYBARRIER) GenTree(GT_MEMORYBARRIER, TYP_VOID);
- op1->gtFlags |= GTF_GLOB_EFFECT;
+ op1->gtFlags |= GTF_GLOB_REF | GTF_ASG;
retNode = op1;
break;
op1 = impPopStack().val;
if (!opts.MinOpts() && !opts.compDbgCode)
{
- GenTreeArrLen* arrLen =
- new (this, GT_ARR_LENGTH) GenTreeArrLen(TYP_INT, op1, offsetof(CORINFO_String, stringLen));
- op1 = arrLen;
+ GenTreeArrLen* arrLen = gtNewArrLen(TYP_INT, op1, offsetof(CORINFO_String, stringLen));
+ op1 = arrLen;
}
else
{
}
assert(verCurrentState.esStackDepth == 0);
- GenTreePtr op1 = gtNewHelperCallNode(CORINFO_HELP_VERIFICATION, TYP_VOID, GTF_EXCEPT,
- gtNewArgList(gtNewIconNode(block->bbCodeOffs)));
+ GenTreePtr op1 =
+ gtNewHelperCallNode(CORINFO_HELP_VERIFICATION, TYP_VOID, gtNewArgList(gtNewIconNode(block->bbCodeOffs)));
// verCurrentState.esStackDepth = 0;
impAppendTree(op1, (unsigned)CHECK_SPILL_NONE, impCurStmtOffs);
{
runtimeMethodHandle = gtNewIconEmbMethHndNode(pResolvedToken->hMethod);
}
- return gtNewHelperCallNode(CORINFO_HELP_GVMLOOKUP_FOR_SLOT, TYP_I_IMPL, GTF_EXCEPT,
+ return gtNewHelperCallNode(CORINFO_HELP_GVMLOOKUP_FOR_SLOT, TYP_I_IMPL,
gtNewArgList(thisPtr, runtimeMethodHandle));
}
{
if (!pCallInfo->exactContextNeedsRuntimeLookup)
{
- GenTreeCall* call = gtNewHelperCallNode(CORINFO_HELP_READYTORUN_VIRTUAL_FUNC_PTR, TYP_I_IMPL, GTF_EXCEPT,
- gtNewArgList(thisPtr));
+ GenTreeCall* call =
+ gtNewHelperCallNode(CORINFO_HELP_READYTORUN_VIRTUAL_FUNC_PTR, TYP_I_IMPL, gtNewArgList(thisPtr));
call->setEntryPoint(pCallInfo->codePointerLookup.constLookup);
// Call helper function. This gets the target address of the final destination callsite.
- return gtNewHelperCallNode(CORINFO_HELP_VIRTUAL_FUNC_PTR, TYP_I_IMPL, GTF_EXCEPT, helpArgs);
+ return gtNewHelperCallNode(CORINFO_HELP_VIRTUAL_FUNC_PTR, TYP_I_IMPL, helpArgs);
}
//------------------------------------------------------------------------
return;
}
- op1 = gtNewHelperCallNode(info.compCompHnd->getNewHelper(pResolvedToken, info.compMethodHnd), TYP_REF, 0,
+ op1 = gtNewHelperCallNode(info.compCompHnd->getNewHelper(pResolvedToken, info.compMethodHnd), TYP_REF,
gtNewArgList(op2));
}
}
GenTreeArgList* args = gtNewArgList(op2, impGetStructAddr(exprToBox, operCls, (unsigned)CHECK_SPILL_ALL, true));
- op1 = gtNewHelperCallNode(boxHelper, TYP_REF, GTF_EXCEPT, args);
+ op1 = gtNewHelperCallNode(boxHelper, TYP_REF, args);
}
/* Push the result back on the stack, */
args = gtNewListNode(classHandle, args);
- node = gtNewHelperCallNode(CORINFO_HELP_NEW_MDARR_NONVARARG, TYP_REF, 0, args);
+ node = gtNewHelperCallNode(CORINFO_HELP_NEW_MDARR_NONVARARG, TYP_REF, args);
}
else
{
unsigned argFlags = 0;
args = impPopList(pCallInfo->sig.numArgs, &pCallInfo->sig, args);
- node = gtNewHelperCallNode(CORINFO_HELP_NEW_MDARR, TYP_REF, 0, args);
+ node = gtNewHelperCallNode(CORINFO_HELP_NEW_MDARR, TYP_REF, args);
// varargs, so we pop the arguments
node->gtFlags |= GTF_CALL_POP_ARGS;
if (runtimeLookup)
{
- node = gtNewHelperCallNode(CORINFO_HELP_INITCLASS, TYP_VOID, 0, gtNewArgList(node));
+ node = gtNewHelperCallNode(CORINFO_HELP_INITCLASS, TYP_VOID, gtNewArgList(node));
}
else
{
break;
}
- op1 = gtNewHelperCallNode(pFieldInfo->helper, type, 0, gtNewArgList(op1));
+ op1 = gtNewHelperCallNode(pFieldInfo->helper, type, gtNewArgList(op1));
FieldSeqNode* fs = GetFieldSeqStore()->CreateSingleton(pResolvedToken->hField);
op1 = gtNewOperNode(GT_ADD, type, op1,
callFlags |= GTF_CALL_HOISTABLE;
}
- op1 = gtNewHelperCallNode(CORINFO_HELP_READYTORUN_STATIC_BASE, TYP_BYREF, callFlags);
+ op1 = gtNewHelperCallNode(CORINFO_HELP_READYTORUN_STATIC_BASE, TYP_BYREF);
+ op1->gtFlags |= callFlags;
op1->gtCall.setEntryPoint(pFieldInfo->fieldLookup);
}
callFlags |= GTF_CALL_HOISTABLE;
}
var_types type = TYP_BYREF;
- op1 = gtNewHelperCallNode(CORINFO_HELP_READYTORUN_GENERIC_STATIC_BASE, type, callFlags, args);
+ op1 = gtNewHelperCallNode(CORINFO_HELP_READYTORUN_GENERIC_STATIC_BASE, type, args);
+ op1->gtFlags |= callFlags;
op1->gtCall.setEntryPoint(pFieldInfo->fieldLookup);
FieldSeqNode* fs = GetFieldSeqStore()->CreateSingleton(pResolvedToken->hField);
* Mark as CSE'able, and hoistable. Consider marking hoistable unless you're in the inlinee.
* Also, consider sticking this in the first basic block.
*/
- GenTreePtr callout = gtNewHelperCallNode(helperInfo->helperNum, TYP_VOID, GTF_EXCEPT, args);
+ GenTreePtr callout = gtNewHelperCallNode(helperInfo->helperNum, TYP_VOID, args);
impAppendTree(callout, (unsigned)CHECK_SPILL_NONE, impCurStmtOffs);
}
//
op2->gtFlags |= GTF_DONT_CSE;
- return gtNewHelperCallNode(helper, TYP_REF, 0, gtNewArgList(op2, op1));
+ return gtNewHelperCallNode(helper, TYP_REF, gtNewArgList(op2, op1));
}
JITDUMP("\nExpanding %s inline\n", isCastClass ? "castclass" : "isinst");
//
const CorInfoHelpFunc specialHelper = CORINFO_HELP_CHKCASTCLASS_SPECIAL;
- condTrue = gtNewHelperCallNode(specialHelper, TYP_REF, 0, gtNewArgList(op2Var, gtClone(op1)));
+ condTrue = gtNewHelperCallNode(specialHelper, TYP_REF, gtNewArgList(op2Var, gtClone(op1)));
}
else
{
args = gtNewArgList(op1); // Type
args = gtNewListNode(impPopStack().val, args); // index
args = gtNewListNode(impPopStack().val, args); // array
- op1 = gtNewHelperCallNode(CORINFO_HELP_LDELEMA_REF, TYP_BYREF, GTF_EXCEPT, args);
+ op1 = gtNewHelperCallNode(CORINFO_HELP_LDELEMA_REF, TYP_BYREF, args);
impPushOnStack(op1, tiRetVal);
break;
STELEM_REF_POST_VERIFY:
/* Call a helper function to do the assignment */
- op1 = gtNewHelperCallNode(CORINFO_HELP_ARRADDR_ST, TYP_VOID, 0, impPopList(3, nullptr));
+ op1 = gtNewHelperCallNode(CORINFO_HELP_ARRADDR_ST, TYP_VOID, impPopList(3, nullptr));
goto SPILL_APPEND;
/* Special case: integer/long division may throw an exception */
- if (varTypeIsIntegral(op1->TypeGet()) && op1->OperMayThrow())
+ if (varTypeIsIntegral(op1->TypeGet()) && op1->OperMayThrow(this))
{
op1->gtFlags |= GTF_EXCEPT;
}
// Note that this only works for shared generic code because the same helper is used for all
// reference array types
- op1 =
- gtNewHelperCallNode(info.compCompHnd->getNewArrHelper(resolvedToken.hClass), TYP_REF, 0, args);
+ op1 = gtNewHelperCallNode(info.compCompHnd->getNewArrHelper(resolvedToken.hClass), TYP_REF, args);
}
op1->gtCall.compileTimeHelperArgumentHandle = (CORINFO_GENERIC_HANDLE)resolvedToken.hClass;
// Call helper GETREFANY(classHandle, op1);
args = gtNewArgList(op2, op1);
- op1 = gtNewHelperCallNode(CORINFO_HELP_GETREFANY, TYP_BYREF, 0, args);
+ op1 = gtNewHelperCallNode(CORINFO_HELP_GETREFANY, TYP_BYREF, args);
impPushOnStack(op1, tiRetVal);
break;
{
GenTreeArgList* helperArgs = gtNewArgList(op1);
- op1 = gtNewHelperCallNode(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE_MAYBENULL, TYP_STRUCT, GTF_EXCEPT,
- helperArgs);
+ op1 = gtNewHelperCallNode(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE_MAYBENULL, TYP_STRUCT, helperArgs);
// The handle struct is returned in register
op1->gtCall.gtReturnType = TYP_REF;
GenTreeArgList* helperArgs = gtNewArgList(op1);
- op1 = gtNewHelperCallNode(helper, TYP_STRUCT, GTF_EXCEPT, helperArgs);
+ op1 = gtNewHelperCallNode(helper, TYP_STRUCT, helperArgs);
// The handle struct is returned in register
op1->gtCall.gtReturnType = TYP_REF;
return;
}
args = gtNewArgList(op2, op1);
- op1 = gtNewHelperCallNode(helper, TYP_VOID, 0, args);
+ op1 = gtNewHelperCallNode(helper, TYP_VOID, args);
op1 = new (this, GT_COLON) GenTreeColon(TYP_VOID, gtNewNothingNode(), op1);
op1 = gtNewQmarkNode(TYP_VOID, condBox, op1);
{
JITDUMP("\n Importing %s as helper call because %s\n", opcode == CEE_UNBOX ? "UNBOX" : "UNBOX.ANY",
canExpandInline ? "want smaller code or faster jitting" : "inline expansion not legal");
- unsigned callFlags = (helper == CORINFO_HELP_UNBOX) ? 0 : GTF_EXCEPT;
// Don't optimize, just call the helper and be done with it
args = gtNewArgList(op2, op1);
- op1 = gtNewHelperCallNode(helper,
- (var_types)((helper == CORINFO_HELP_UNBOX) ? TYP_BYREF : TYP_STRUCT),
- callFlags, args);
+ op1 =
+ gtNewHelperCallNode(helper,
+ (var_types)((helper == CORINFO_HELP_UNBOX) ? TYP_BYREF : TYP_STRUCT), args);
}
assert(helper == CORINFO_HELP_UNBOX && op1->gtType == TYP_BYREF || // Unbox helper returns a byref.
block->bbSetRunRarely(); // any block with a throw is rare
/* Pop the exception object and create the 'throw' helper call */
- op1 = gtNewHelperCallNode(CORINFO_HELP_THROW, TYP_VOID, GTF_EXCEPT, gtNewArgList(impPopStack().val));
+ op1 = gtNewHelperCallNode(CORINFO_HELP_THROW, TYP_VOID, gtNewArgList(impPopStack().val));
EVAL_APPEND:
if (verCurrentState.esStackDepth > 0)
/* Create the 'rethrow' helper call */
- op1 = gtNewHelperCallNode(CORINFO_HELP_RETHROW, TYP_VOID, GTF_EXCEPT);
+ op1 = gtNewHelperCallNode(CORINFO_HELP_RETHROW, TYP_VOID);
goto EVAL_APPEND;
if (!opts.MinOpts() && !opts.compDbgCode)
{
/* Use GT_ARR_LENGTH operator so rng check opts see this */
- GenTreeArrLen* arrLen =
- new (this, GT_ARR_LENGTH) GenTreeArrLen(TYP_INT, op1, offsetof(CORINFO_Array, length));
+ GenTreeArrLen* arrLen = gtNewArrLen(TYP_INT, op1, offsetof(CORINFO_Array, length));
/* Mark the block as containing a length expression */
/* Create the expression "*(array_addr + ArrLenOffs)" */
op1 = gtNewOperNode(GT_ADD, TYP_BYREF, op1,
gtNewIconNode(offsetof(CORINFO_Array, length), TYP_I_IMPL));
- op1 = gtNewOperNode(GT_IND, TYP_INT, op1);
+ op1 = gtNewIndir(TYP_INT, op1);
op1->gtFlags |= GTF_IND_ARR_LEN;
}
- /* An indirection will cause a GPF if the address is null */
- op1->gtFlags |= GTF_EXCEPT;
-
/* Push the result back on the stack */
impPushOnStack(op1, tiRetVal);
break;
// confirm that the argument is a GC pointer (for debugging (GC stress))
GenTreeArgList* args = gtNewArgList(op2);
- op2 = gtNewHelperCallNode(CORINFO_HELP_CHECK_OBJ, TYP_REF, 0, args);
+ op2 = gtNewHelperCallNode(CORINFO_HELP_CHECK_OBJ, TYP_REF, args);
if (verbose)
{
if (!node->IsValue() || node->IsUnusedValue())
{
unsigned sideEffects = node->gtFlags & (GTF_SIDE_EFFECT | GTF_SET_FLAGS);
- if ((sideEffects == 0) || ((sideEffects == GTF_EXCEPT) && !node->OperMayThrow()))
+ if ((sideEffects == 0) || ((sideEffects == GTF_EXCEPT) && !node->OperMayThrow(this)))
{
JITDUMP("Removing dead node:\n");
DISPNODE(node);
{
gtSetStmtInfo(compCurStmt);
fgSetStmtSeq(compCurStmt);
+ gtUpdateStmtSideEffects(compCurStmt);
}
#ifdef DEBUG
// If asked for arrlen invoke arr length operator.
if (oper == ArrLen)
{
- GenTreePtr arrLen = new (comp, GT_ARR_LENGTH) GenTreeArrLen(TYP_INT, arr, offsetof(CORINFO_Array, length));
+ GenTreePtr arrLen = comp->gtNewArrLen(TYP_INT, arr, offsetof(CORINFO_Array, length));
return arrLen;
}
else
GenTreeArgList* argList = comp->gtNewArgList(frameAddr, PhysReg(REG_SECRET_STUB_PARAM));
#endif
- GenTree* call = comp->gtNewHelperCallNode(CORINFO_HELP_INIT_PINVOKE_FRAME, TYP_I_IMPL, 0, argList);
+ GenTree* call = comp->gtNewHelperCallNode(CORINFO_HELP_INIT_PINVOKE_FRAME, TYP_I_IMPL, argList);
// some sanity checks on the frame list root vardsc
LclVarDsc* varDsc = &comp->lvaTable[comp->info.compLvFrameListRoot];
// Insert call to CORINFO_HELP_JIT_PINVOKE_BEGIN
GenTree* helperCall =
- comp->gtNewHelperCallNode(CORINFO_HELP_JIT_PINVOKE_BEGIN, TYP_VOID, 0, comp->gtNewArgList(frameAddr));
+ comp->gtNewHelperCallNode(CORINFO_HELP_JIT_PINVOKE_BEGIN, TYP_VOID, comp->gtNewArgList(frameAddr));
comp->fgMorphTree(helperCall);
BlockRange().InsertBefore(insertBefore, LIR::SeqTree(comp, helperCall));
// Insert call to CORINFO_HELP_JIT_PINVOKE_END
GenTreeCall* helperCall =
- comp->gtNewHelperCallNode(CORINFO_HELP_JIT_PINVOKE_END, TYP_VOID, 0, comp->gtNewArgList(frameAddr));
+ comp->gtNewHelperCallNode(CORINFO_HELP_JIT_PINVOKE_END, TYP_VOID, comp->gtNewArgList(frameAddr));
comp->fgMorphTree(helperCall);
BlockRange().InsertAfter(call, LIR::SeqTree(comp, helperCall));
// The helper call ought to be semantically equivalent to the original node, so preserve its VN.
tree->ChangeOper(GT_CALL, GenTree::PRESERVE_VN);
- tree->gtFlags |= GTF_CALL;
- if (args)
- {
- tree->gtFlags |= (args->gtFlags & GTF_ALL_EFFECT);
- }
tree->gtCall.gtCallType = CT_HELPER;
tree->gtCall.gtCallMethHnd = eeFindHelper(helper);
tree->gtCall.gtCallArgs = args;
}
#endif // _TARGET_XXX_
+ if (tree->OperMayThrow(this))
+ {
+ tree->gtFlags |= GTF_EXCEPT;
+ }
+ else
+ {
+ tree->gtFlags &= ~GTF_EXCEPT;
+ }
+ tree->gtFlags |= GTF_CALL;
+ if (args)
+ {
+ tree->gtFlags |= (args->gtFlags & GTF_ALL_EFFECT);
+ }
+
/* Perform the morphing */
tree = fgMorphArgs(tree->AsCall());
if (unsignedSrc || !unsignedDst)
{
tree->gtFlags &= ~GTF_OVERFLOW;
+ if (!(oper->gtFlags & GTF_EXCEPT))
+ {
+ tree->gtFlags &= ~GTF_EXCEPT;
+ }
}
}
else // if (srcSize > dstSize)
argx = fgMorphTree(*parentArgx);
*parentArgx = argx;
- flagsSummary |= argx->gtFlags;
assert(args->OperIsList());
assert(argx == args->Current());
if (newArgEntry->isNonStandard)
{
+ flagsSummary |= args->Current()->gtFlags;
continue;
}
fgMakeOutgoingStructArgCopy(call, args, argIndex,
copyBlkClass FEATURE_UNIX_AMD64_STRUCT_PASSING_ONLY_ARG(&structDesc));
- // This can cause a GTF_EXCEPT flag to be set.
- // TODO-CQ: Fix the cases where this happens. We shouldn't be adding any new flags.
- // This currently occurs in the case where we are re-morphing the args on x86/RyuJIT, and
- // there are no register arguments. Then reMorphing is never true, so we keep re-copying
- // any struct arguments.
- // i.e. assert(((call->gtFlags & GTF_EXCEPT) != 0) || ((args->Current()->gtFlags & GTF_EXCEPT) == 0)
- flagsSummary |= (args->Current()->gtFlags & GTF_EXCEPT);
-
#ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
hasStackArgCopy = true;
#endif
{
argSlots += size;
}
+ flagsSummary |= args->Current()->gtFlags;
} // end foreach argument loop
if (!reMorphing)
call->gtFlags |= (flagsSummary & GTF_ALL_EFFECT);
+ if (!call->OperMayThrow(this) && ((flagsSummary & GTF_EXCEPT) == 0))
+ {
+ call->gtFlags &= ~GTF_EXCEPT;
+ }
+
// If the register arguments have already been determined
// or we have no register arguments then we don't need to
// call SortArgs() and EvalArgsToTemps()
{
curAddr = baseAddr;
}
- GenTreePtr curItem = gtNewOperNode(GT_IND, type[inx], curAddr);
+ GenTreePtr curItem = gtNewIndir(type[inx], curAddr);
// For safety all GT_IND should have at least GT_GLOB_REF set.
curItem->gtFlags |= GTF_GLOB_REF;
- if (fgAddrCouldBeNull(curItem))
- {
- // This indirection can cause a GPF if the address could be null.
- curItem->gtFlags |= GTF_EXCEPT;
- }
listEntry = new (this, GT_FIELD_LIST) GenTreeFieldList(curItem, offset, type[inx], listEntry);
if (newArg == nullptr)
// to ref counting of the lclVars.
lvaTable[tmp].incRefCnts(compCurBB->getBBWeight(this), this);
- GenTreePtr src;
if (argx->gtOper == GT_OBJ)
{
argx->gtFlags &= ~(GTF_ALL_EFFECT) | (argx->AsBlk()->Addr()->gtFlags & GTF_ALL_EFFECT);
+ argx->SetIndirExceptionFlags(this);
}
else
{
}
#endif // _TARGET_64BIT_
- GenTree* arrLen = new (this, GT_ARR_LENGTH) GenTreeArrLen(TYP_INT, arrRef, (int)lenOffs);
+ GenTree* arrLen = gtNewArrLen(TYP_INT, arrRef, (int)lenOffs);
if (bndsChkType != TYP_INT)
{
tree->SetOper(GT_IND);
tree->gtOp.gtOp1 = addr;
- if (fgAddrCouldBeNull(addr))
- {
- // This indirection can cause a GPF if the address could be null.
- tree->gtFlags |= GTF_EXCEPT;
- }
+ tree->gtFlags &= (~GTF_EXCEPT | addr->gtFlags);
+ tree->SetIndirExceptionFlags(this);
if (addExplicitNullCheck)
{
CORINFO_INTRINSIC_GetCurrentManagedThread)
{
// substitute expression with call to helper
- GenTreePtr newCall = gtNewHelperCallNode(CORINFO_HELP_GETCURRENTMANAGEDTHREADID, TYP_INT, 0);
+ GenTreePtr newCall = gtNewHelperCallNode(CORINFO_HELP_GETCURRENTMANAGEDTHREADID, TYP_INT);
JITDUMP("get_ManagedThreadId(get_CurrentThread) folding performed\n");
return fgMorphTree(newCall);
}
gtNewIconEmbScpHndNode(tree->gtStrCon.gtScpHnd));
}
- tree = gtNewHelperCallNode(helper, TYP_REF, 0, args);
+ tree = gtNewHelperCallNode(helper, TYP_REF, args);
return fgMorphTree(tree);
}
}
if (dest == lclVarTree)
{
- dest = gtNewOperNode(GT_IND, asgType, gtNewOperNode(GT_ADDR, TYP_BYREF, dest));
+ dest = gtNewIndir(asgType, gtNewOperNode(GT_ADDR, TYP_BYREF, dest));
}
}
}
if (!fgIsIndirOfAddrOfLocal(dest))
{
- dest->gtFlags |= (GTF_EXCEPT | GTF_GLOB_REF | GTF_IND_TGTANYWHERE);
- tree->gtFlags |= (GTF_EXCEPT | GTF_GLOB_REF | GTF_IND_TGTANYWHERE);
+ dest->gtFlags |= (GTF_GLOB_REF | GTF_IND_TGTANYWHERE);
+ tree->gtFlags |= GTF_GLOB_REF;
}
+
+ dest->gtFlags &= (~GTF_EXCEPT | dest->AsIndir()->Addr()->gtFlags);
+ dest->SetIndirExceptionFlags(this);
+ tree->gtFlags |= (dest->gtFlags & GTF_EXCEPT);
}
LclVarDsc* srcVarDsc = nullptr;
}
}
- if (src->OperIsIndir() && !fgIsIndirOfAddrOfLocal(src))
+ if (src->OperIsIndir())
{
- // If we have no information about the src, we have to assume it could
- // live anywhere (not just in the GC heap).
- // Mark the GT_IND node so that we use the correct write barrier helper in case
- // the field is a GC ref.
+ if (!fgIsIndirOfAddrOfLocal(src))
+ {
+ // If we have no information about the src, we have to assume it could
+ // live anywhere (not just in the GC heap).
+ // Mark the GT_IND node so that we use the correct write barrier helper in case
+ // the field is a GC ref.
+ src->gtFlags |= (GTF_GLOB_REF | GTF_IND_TGTANYWHERE);
+ }
- src->gtFlags |= (GTF_EXCEPT | GTF_GLOB_REF | GTF_IND_TGTANYWHERE);
+ src->gtFlags &= (~GTF_EXCEPT | src->AsIndir()->Addr()->gtFlags);
+ src->SetIndirExceptionFlags(this);
}
}
else
asg->ChangeType(asgType);
dest->gtFlags |= GTF_DONT_CSE;
+ asg->gtFlags &= ~GTF_EXCEPT;
asg->gtFlags |= ((dest->gtFlags | src->gtFlags) & GTF_ALL_EFFECT);
// Un-set GTF_REVERSE_OPS, and it will be set later if appropriate.
asg->gtFlags &= ~GTF_REVERSE_OPS;
else if (effectiveVal->TypeGet() != asgType)
{
GenTree* addr = gtNewOperNode(GT_ADDR, TYP_BYREF, effectiveVal);
- effectiveVal = gtNewOperNode(GT_IND, asgType, addr);
+ effectiveVal = gtNewIndir(asgType, addr);
}
}
else
}
else
{
- newTree = new (this, GT_IND) GenTreeIndir(GT_IND, asgType, addr, nullptr);
+ newTree = gtNewIndir(asgType, addr);
}
effectiveVal = newTree;
}
// When performing a field by field assignment we can have one of Source() or Dest treated as a blob of bytes
// and in such cases we will call lvaSetVarDoNotEnregister() on the one treated as a blob of bytes.
// if the Source() or Dest() is a a struct that has a "CustomLayout" and "ConstainsHoles" then we
-// can not use a field by field assignment and must the orginal block copy unmodified.
+// can not use a field by field assignment and must leave the orginal block copy unmodified.
GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
{
{
noway_assert(rhs->gtOper == GT_LCL_VAR);
GenTree* rhsAddr = gtNewOperNode(GT_ADDR, TYP_BYREF, rhs);
- rhs = gtNewOperNode(GT_IND, TYP_STRUCT, rhsAddr);
+ rhs = gtNewIndir(TYP_STRUCT, rhsAddr);
}
#endif // LEGACY_BACKEND
// Formerly, liveness did not consider copyblk arguments of simple types as being
dest = gtNewOperNode(GT_ADD, TYP_BYREF, dest, fieldOffsetNode);
- dest = gtNewOperNode(GT_IND, lvaTable[fieldLclNum].TypeGet(), dest);
+ dest = gtNewIndir(lvaTable[fieldLclNum].TypeGet(), dest);
// !!! The destination could be on stack. !!!
// This flag will let us choose the correct write barrier.
new (this, GT_CNS_INT)
GenTreeIntCon(TYP_I_IMPL, lvaTable[fieldLclNum].lvFldOffset, curFieldSeq));
- src = gtNewOperNode(GT_IND, lvaTable[fieldLclNum].TypeGet(), src);
+ src = gtNewIndir(lvaTable[fieldLclNum].TypeGet(), src);
}
}
tree = fgMorphToEmulatedFP(tree);
#endif
- /* Could this operator throw an exception? */
- if (fgGlobalMorph && tree->OperMayThrow())
- {
- if (((tree->OperGet() != GT_IND) && !tree->OperIsBlk()) || fgAddrCouldBeNull(tree->gtOp.gtOp1))
- {
- /* Mark the tree node as potentially throwing an exception */
- tree->gtFlags |= GTF_EXCEPT;
- }
- }
-
/*-------------------------------------------------------------------------
* Process the first operand, if any
*/
tree->gtFlags &= ~GTF_CALL;
}
- if (!tree->OperMayThrow())
- {
- tree->gtFlags &= ~GTF_EXCEPT;
- }
-
/* Propagate the new flags */
tree->gtFlags |= (op1->gtFlags & GTF_ALL_EFFECT);
DONE_MORPHING_CHILDREN:
+ if (tree->OperMayThrow(this))
+ {
+ // Mark the tree node as potentially throwing an exception
+ tree->gtFlags |= GTF_EXCEPT;
+ }
+ else
+ {
+ if (tree->OperIsIndirOrArrLength())
+ {
+ tree->gtFlags |= GTF_IND_NONFAULTING;
+ }
+ if (((op1 == nullptr) || ((op1->gtFlags & GTF_EXCEPT) == 0)) &&
+ ((op2 == nullptr) || ((op2->gtFlags & GTF_EXCEPT) == 0)))
+ {
+ tree->gtFlags &= ~GTF_EXCEPT;
+ }
+ }
+
+ if (tree->OperRequiresAsgFlag())
+ {
+ tree->gtFlags |= GTF_ASG;
+ }
+ else
+ {
+ if (((op1 == nullptr) || ((op1->gtFlags & GTF_ASG) == 0)) &&
+ ((op2 == nullptr) || ((op2->gtFlags & GTF_ASG) == 0)))
+ {
+ tree->gtFlags &= ~GTF_ASG;
+ }
+ }
/*-------------------------------------------------------------------------
* Now do POST-ORDER processing
*/
#endif
while (commaNode->gtOp.gtOp2->gtOper == GT_COMMA)
{
- commaNode = commaNode->gtOp.gtOp2;
- commaNode->gtType = typ;
- commaNode->gtFlags = (treeFlags & ~GTF_REVERSE_OPS); // Bashing the GT_COMMA flags here is
- // dangerous, clear the GTF_REVERSE_OPS at
- // least.
+ commaNode = commaNode->gtOp.gtOp2;
+ commaNode->gtType = typ;
+ commaNode->gtFlags =
+ (treeFlags & ~GTF_REVERSE_OPS & ~GTF_ASG); // Bashing the GT_COMMA flags here is
+ // dangerous, clear the GTF_REVERSE_OPS at
+ // least.
+ commaNode->gtFlags |=
+ ((commaNode->gtOp.gtOp1->gtFlags & GTF_ASG) | (commaNode->gtOp.gtOp2->gtFlags & GTF_ASG));
#ifdef DEBUG
commaNode->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED;
#endif
assert(b);
GetArrayInfoMap()->Remove(tree);
}
- tree = op1;
- op1 = gtNewOperNode(GT_IND, typ, commaNode->gtOp.gtOp2);
- op1->gtFlags = treeFlags;
+ tree = op1;
+ GenTree* addr = commaNode->gtOp.gtOp2;
+ op1 = gtNewIndir(typ, addr);
+ // This is very conservative
+ op1->gtFlags |= treeFlags & ~GTF_ALL_EFFECT & ~GTF_IND_NONFAULTING;
+ op1->gtFlags |= (addr->gtFlags & GTF_ALL_EFFECT);
+
if (wasArrIndex)
{
GetArrayInfoMap()->Set(op1, arrInfo);
op1->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED;
#endif
commaNode->gtOp.gtOp2 = op1;
+ commaNode->gtFlags |= (op1->gtFlags & GTF_ALL_EFFECT);
return tree;
}
if (commaOp2->gtOper == GT_IND)
{
commaOp2->gtFlags |= GTF_IND_NONFAULTING;
+ commaOp2->gtFlags &= ~GTF_EXCEPT;
+ commaOp2->gtFlags |= (commaOp2->gtOp.gtOp1->gtFlags & GTF_EXCEPT);
}
op1 = gtNewOperNode(GT_ADDR, TYP_BYREF, commaOp2);
commaNode = commaNode->gtOp.gtOp2;
}
+ tree->gtFlags &= ~GTF_EXCEPT;
+
+ // Propagate the new flags
+ tree->gtFlags |= (tree->gtOp.gtOp1->gtFlags & GTF_EXCEPT);
+ tree->gtFlags |= (tree->gtOp.gtOp2->gtFlags & GTF_EXCEPT);
+
return tree;
}
break;
case GT_CALL:
+ if (tree->OperMayThrow(this))
+ {
+ tree->gtFlags |= GTF_EXCEPT;
+ }
+ else
+ {
+ tree->gtFlags &= ~GTF_EXCEPT;
+ }
tree = fgMorphCall(tree->AsCall());
break;
case GT_ARR_ELEM:
tree->gtArrElem.gtArrObj = fgMorphTree(tree->gtArrElem.gtArrObj);
- tree->gtFlags |= tree->gtArrElem.gtArrObj->gtFlags & GTF_ALL_EFFECT;
unsigned dim;
for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
{
tree->gtArrElem.gtArrInds[dim] = fgMorphTree(tree->gtArrElem.gtArrInds[dim]);
+ }
+
+ tree->gtFlags |= tree->gtArrElem.gtArrObj->gtFlags & GTF_ALL_EFFECT;
+
+ for (dim = 0; dim < tree->gtArrElem.gtArrRank; dim++)
+ {
tree->gtFlags |= tree->gtArrElem.gtArrInds[dim]->gtFlags & GTF_ALL_EFFECT;
}
+
if (fgGlobalMorph)
{
fgSetRngChkTarget(tree, false);
case GT_ARR_OFFSET:
tree->gtArrOffs.gtOffset = fgMorphTree(tree->gtArrOffs.gtOffset);
+ tree->gtArrOffs.gtIndex = fgMorphTree(tree->gtArrOffs.gtIndex);
+ tree->gtArrOffs.gtArrObj = fgMorphTree(tree->gtArrOffs.gtArrObj);
+
tree->gtFlags |= tree->gtArrOffs.gtOffset->gtFlags & GTF_ALL_EFFECT;
- tree->gtArrOffs.gtIndex = fgMorphTree(tree->gtArrOffs.gtIndex);
tree->gtFlags |= tree->gtArrOffs.gtIndex->gtFlags & GTF_ALL_EFFECT;
- tree->gtArrOffs.gtArrObj = fgMorphTree(tree->gtArrOffs.gtArrObj);
tree->gtFlags |= tree->gtArrOffs.gtArrObj->gtFlags & GTF_ALL_EFFECT;
if (fgGlobalMorph)
{
tree->gtCmpXchg.gtOpLocation = fgMorphTree(tree->gtCmpXchg.gtOpLocation);
tree->gtCmpXchg.gtOpValue = fgMorphTree(tree->gtCmpXchg.gtOpValue);
tree->gtCmpXchg.gtOpComparand = fgMorphTree(tree->gtCmpXchg.gtOpComparand);
+
+ tree->gtFlags &= ~GTF_EXCEPT;
+
+ tree->gtFlags |= tree->gtCmpXchg.gtOpLocation->gtFlags & GTF_ALL_EFFECT;
+ tree->gtFlags |= tree->gtCmpXchg.gtOpValue->gtFlags & GTF_ALL_EFFECT;
+ tree->gtFlags |= tree->gtCmpXchg.gtOpComparand->gtFlags & GTF_ALL_EFFECT;
break;
case GT_STORE_DYN_BLK:
- tree->gtDynBlk.Data() = fgMorphTree(tree->gtDynBlk.Data());
- __fallthrough;
case GT_DYN_BLK:
+ if (tree->OperGet() == GT_STORE_DYN_BLK)
+ {
+ tree->gtDynBlk.Data() = fgMorphTree(tree->gtDynBlk.Data());
+ }
tree->gtDynBlk.Addr() = fgMorphTree(tree->gtDynBlk.Addr());
tree->gtDynBlk.gtDynamicSize = fgMorphTree(tree->gtDynBlk.gtDynamicSize);
+
+ tree->gtFlags &= ~GTF_EXCEPT;
+ tree->SetIndirExceptionFlags(this);
+
+ if (tree->OperGet() == GT_STORE_DYN_BLK)
+ {
+ tree->gtFlags |= tree->gtDynBlk.Data()->gtFlags & GTF_ALL_EFFECT;
+ }
+ tree->gtFlags |= tree->gtDynBlk.Addr()->gtFlags & GTF_ALL_EFFECT;
+ tree->gtFlags |= tree->gtDynBlk.gtDynamicSize->gtFlags & GTF_ALL_EFFECT;
break;
case GT_INDEX_ADDR:
vtTree->gtFlags |= GTF_EXCEPT; // Null-pointer exception
GenTreePtr methodHnd = gtNewIconEmbMethHndNode(info.compMethodHnd);
- return gtNewHelperCallNode(CORINFO_HELP_INITINSTCLASS, TYP_VOID, 0,
- gtNewArgList(vtTree, methodHnd));
+ return gtNewHelperCallNode(CORINFO_HELP_INITINSTCLASS, TYP_VOID, gtNewArgList(vtTree, methodHnd));
}
case CORINFO_LOOKUP_CLASSPARAM:
{
GenTreePtr vtTree = gtNewLclvNode(info.compTypeCtxtArg, TYP_I_IMPL);
- return gtNewHelperCallNode(CORINFO_HELP_INITCLASS, TYP_VOID, 0, gtNewArgList(vtTree));
+ return gtNewHelperCallNode(CORINFO_HELP_INITCLASS, TYP_VOID, gtNewArgList(vtTree));
}
case CORINFO_LOOKUP_METHODPARAM:
{
GenTreePtr methHndTree = gtNewLclvNode(info.compTypeCtxtArg, TYP_I_IMPL);
- return gtNewHelperCallNode(CORINFO_HELP_INITINSTCLASS, TYP_VOID, 0,
+ return gtNewHelperCallNode(CORINFO_HELP_INITINSTCLASS, TYP_VOID,
gtNewArgList(gtNewIconNode(0), methHndTree));
}
}
// confirm that the argument is a GC pointer (for debugging (GC stress))
GenTreePtr op = gtNewLclvNode(i, TYP_REF);
GenTreeArgList* args = gtNewArgList(op);
- op = gtNewHelperCallNode(CORINFO_HELP_CHECK_OBJ, TYP_VOID, 0, args);
+ op = gtNewHelperCallNode(CORINFO_HELP_CHECK_OBJ, TYP_VOID, args);
fgEnsureFirstBBisScratch();
fgInsertStmtAtEnd(fgFirstBB, op);
{
assert(simdStructNode->OperIsLocal());
assert(lvaIsImplicitByRefLocal(simdStructNode->AsLclVarCommon()->gtLclNum));
- simdStructNode = gtNewOperNode(GT_IND, simdType, simdStructNode);
+ simdStructNode = gtNewIndir(simdType, simdStructNode);
}
else
{
{
LcJaggedArrayOptInfo* arrIndexInfo = optInfo->AsLcJaggedArrayOptInfo();
compCurBB = arrIndexInfo->arrIndex.useBlock;
- optRemoveRangeCheck(arrIndexInfo->arrIndex.bndsChks[arrIndexInfo->dim], arrIndexInfo->stmt, true,
- GTF_ASG, true);
+ optRemoveRangeCheck(arrIndexInfo->arrIndex.bndsChks[arrIndexInfo->dim], arrIndexInfo->stmt);
DBEXEC(dynamicPath, optDebugLogLoopCloning(arrIndexInfo->arrIndex.useBlock, arrIndexInfo->stmt));
}
break;
fgWalkTreePre(&deadTree, optRemoveTreeVisitor, (void*)keepList);
}
-/*****************************************************************************
- *
- * Given an array index node, mark it as not needing a range check.
- */
+//------------------------------------------------------------------------------
+// optRemoveRangeCheck : Given an array index node, mark it as not needing a range check.
+//
+// Arguments:
+// tree - Range check tree
+// stmt - Statement the tree belongs to
-void Compiler::optRemoveRangeCheck(
- GenTreePtr tree, GenTreePtr stmt, bool updateCSEcounts, unsigned sideEffFlags, bool forceRemove)
+void Compiler::optRemoveRangeCheck(GenTreePtr tree, GenTreePtr stmt)
{
- GenTreePtr add1;
- GenTreePtr* addp;
-
- GenTreePtr nop1;
- GenTreePtr* nopp;
-
- GenTreePtr icon;
- GenTreePtr mult;
-
- GenTreePtr base;
-
- ssize_t ival;
-
#if !REARRANGE_ADDS
noway_assert(!"can't remove range checks without REARRANGE_ADDS right now");
#endif
noway_assert(stmt->gtOper == GT_STMT);
noway_assert(tree->gtOper == GT_COMMA);
- noway_assert(tree->gtOp.gtOp1->OperIsBoundsCheck());
- noway_assert(forceRemove || optIsRangeCheckRemovable(tree->gtOp.gtOp1));
+
+ GenTree* bndsChkTree = tree->gtOp.gtOp1;
+
+ noway_assert(bndsChkTree->OperIsBoundsCheck());
GenTreeBoundsChk* bndsChk = tree->gtOp.gtOp1->AsBoundsChk();
#endif
GenTreePtr sideEffList = nullptr;
- if (sideEffFlags)
- {
- gtExtractSideEffList(tree->gtOp.gtOp1, &sideEffList, sideEffFlags);
- }
+
+ gtExtractSideEffList(bndsChkTree, &sideEffList, GTF_ASG);
// Decrement the ref counts for any LclVars that are being deleted
//
- optRemoveTree(tree->gtOp.gtOp1, sideEffList);
+ optRemoveTree(bndsChkTree, sideEffList);
// Just replace the bndsChk with a NOP as an operand to the GT_COMMA, if there are no side effects.
tree->gtOp.gtOp1 = (sideEffList != nullptr) ? sideEffList : gtNewNothingNode();
-
// TODO-CQ: We should also remove the GT_COMMA, but in any case we can no longer CSE the GT_COMMA.
tree->gtFlags |= GTF_DONT_CSE;
+ gtUpdateSideEffects(stmt, tree);
+
/* Recalculate the gtCostSz, etc... */
gtSetStmtInfo(stmt);
compCurBB = block;
for (GenTreePtr stmt = block->bbTreeList; stmt; stmt = stmt->gtNext)
{
- info.stmt = stmt;
- fgWalkTreePre(&stmt->gtStmt.gtStmtExpr, optCanOptimizeByLoopCloningVisitor, &info, false, false);
+ info.stmt = stmt;
+ const bool lclVarsOnly = false;
+ const bool computeStack = false;
+ fgWalkTreePre(&stmt->gtStmt.gtStmtExpr, optCanOptimizeByLoopCloningVisitor, &info, lclVarsOnly,
+ computeStack);
}
}
if (arrSize > 0 && idxVal < arrSize && idxVal >= 0)
{
JITDUMP("Removing range check\n");
- m_pCompiler->optRemoveRangeCheck(treeParent, stmt, true, GTF_ASG, true /* force remove */);
+ m_pCompiler->optRemoveRangeCheck(treeParent, stmt);
return;
}
}
if (BetweenBounds(range, 0, bndsChk->gtArrLen))
{
JITDUMP("[RangeCheck::OptimizeRangeCheck] Between bounds\n");
- m_pCompiler->optRemoveRangeCheck(treeParent, stmt, true, GTF_ASG, true /* force remove */);
+ m_pCompiler->optRemoveRangeCheck(treeParent, stmt);
}
return;
}
// The length for boundary check should be the maximum index number which should be
// (first argument's index number) + (how many array arguments we have) - 1
// = indexVal + arrayElementsCount - 1
- unsigned arrayElementsCount = simdSize / genTypeSize(baseType);
- checkIndexExpr = new (this, GT_CNS_INT) GenTreeIntCon(TYP_INT, indexVal + arrayElementsCount - 1);
- GenTreeArrLen* arrLen =
- new (this, GT_ARR_LENGTH) GenTreeArrLen(TYP_INT, arrayRef, (int)offsetof(CORINFO_Array, length));
+ unsigned arrayElementsCount = simdSize / genTypeSize(baseType);
+ checkIndexExpr = new (this, GT_CNS_INT) GenTreeIntCon(TYP_INT, indexVal + arrayElementsCount - 1);
+ GenTreeArrLen* arrLen = gtNewArrLen(TYP_INT, arrayRef, (int)offsetof(CORINFO_Array, length));
GenTreeBoundsChk* arrBndsChk = new (this, GT_ARR_BOUNDS_CHECK)
GenTreeBoundsChk(GT_ARR_BOUNDS_CHECK, TYP_VOID, checkIndexExpr, arrLen, SCK_RNGCHK_FAIL);
op3 = gtCloneExpr(index);
}
- GenTreeArrLen* arrLen = new (this, GT_ARR_LENGTH)
- GenTreeArrLen(TYP_INT, arrayRefForArgRngChk, (int)offsetof(CORINFO_Array, length));
+ GenTreeArrLen* arrLen =
+ gtNewArrLen(TYP_INT, arrayRefForArgRngChk, (int)offsetof(CORINFO_Array, length));
argRngChk = new (this, GT_ARR_BOUNDS_CHECK)
GenTreeBoundsChk(GT_ARR_BOUNDS_CHECK, TYP_VOID, index, arrLen, op3CheckKind);
// Now, clone op3 to create another node for the argChk
{
op2CheckKind = SCK_ARG_EXCPN;
}
- GenTreeArrLen* arrLen = new (this, GT_ARR_LENGTH)
- GenTreeArrLen(TYP_INT, arrayRefForArgChk, (int)offsetof(CORINFO_Array, length));
+ GenTreeArrLen* arrLen = gtNewArrLen(TYP_INT, arrayRefForArgChk, (int)offsetof(CORINFO_Array, length));
GenTreeBoundsChk* argChk = new (this, GT_ARR_BOUNDS_CHECK)
GenTreeBoundsChk(GT_ARR_BOUNDS_CHECK, TYP_VOID, checkIndexExpr, arrLen, op2CheckKind);
// This (or these) are not pure, in that they have "VM side effects"...but they don't mutate the heap.
case CORINFO_HELP_ENDCATCH:
+
+ noThrow = true;
break;
// Arithmetic helpers that may throw
break;
// helpers that return internal handle
- // TODO-ARM64-Bug?: Can these throw or not?
case CORINFO_HELP_GETCLASSFROMMETHODPARAM:
case CORINFO_HELP_GETSYNCFROMCLASSHANDLE:
- isPure = true;
+ isPure = true;
+ noThrow = true;
break;
// Helpers that load the base address for static variables.
case CORINFO_HELP_THROWNULLREF:
case CORINFO_HELP_THROW:
case CORINFO_HELP_RETHROW:
+ case CORINFO_HELP_THROW_ARGUMENTEXCEPTION:
+ case CORINFO_HELP_THROW_ARGUMENTOUTOFRANGEEXCEPTION:
break;
case CORINFO_HELP_FIELD_ACCESS_CHECK:
case CORINFO_HELP_CLASS_ACCESS_CHECK:
case CORINFO_HELP_DELEGATE_SECURITY_CHECK:
+ case CORINFO_HELP_MON_EXIT_STATIC:
break;
noThrow = true;
break;
+ case CORINFO_HELP_DBG_IS_JUST_MY_CODE:
+ case CORINFO_HELP_BBT_FCN_ENTER:
+ case CORINFO_HELP_POLL_GC:
+ case CORINFO_HELP_MON_ENTER:
+ case CORINFO_HELP_MON_EXIT:
+ case CORINFO_HELP_MON_ENTER_STATIC:
+ case CORINFO_HELP_JIT_REVERSE_PINVOKE_ENTER:
+ case CORINFO_HELP_JIT_REVERSE_PINVOKE_EXIT:
+ case CORINFO_HELP_SECURITY_PROLOG:
+ case CORINFO_HELP_SECURITY_PROLOG_FRAMED:
+ case CORINFO_HELP_VERIFICATION_RUNTIME_CHECK:
+ case CORINFO_HELP_GETFIELDADDR:
+ case CORINFO_HELP_INIT_PINVOKE_FRAME:
+ case CORINFO_HELP_JIT_PINVOKE_BEGIN:
+ case CORINFO_HELP_JIT_PINVOKE_END:
+ case CORINFO_HELP_GETCURRENTMANAGEDTHREADID:
+
+ noThrow = true;
+ break;
+
// Not sure how to handle optimization involving the rest of these helpers
default: