compLocallocUsed = false;
compLocallocOptimized = false;
compQmarkRationalized = false;
+ compAssignmentRationalized = false;
compQmarkUsed = false;
compFloatingPointUsed = false;
//
DoPhase(this, PHASE_DETERMINE_FIRST_COLD_BLOCK, &Compiler::fgDetermineFirstColdBlock);
+ DoPhase(this, PHASE_RATIONALIZE_ASSIGNMENTS, &Compiler::fgRationalizeAssignments);
+
#ifdef DEBUG
// Stash the current estimate of the function's size if necessary.
if (verbose)
GenTree* gtNewOneConNode(var_types type, var_types simdBaseType = TYP_UNDEF);
- GenTreeLclVar* gtNewStoreLclVar(unsigned dstLclNum, GenTree* src);
+ GenTreeLclVar* gtNewStoreLclVarNode(unsigned lclNum, GenTree* data);
+
+ GenTreeLclFld* gtNewStoreLclFldNode(unsigned lclNum, var_types type, unsigned offset, GenTree* data);
GenTree* gtNewBlkOpNode(GenTree* dst, GenTree* srcOrFillVal, bool isVolatile = false);
void fgExpandQmarkStmt(BasicBlock* block, Statement* stmt);
void fgExpandQmarkNodes();
+ PhaseStatus fgRationalizeAssignments();
+ GenTree* fgRationalizeAssignment(GenTreeOp* assignment);
+
// Do "simple lowering." This functionality is (conceptually) part of "general"
// lowering that is distributed between fgMorph and the lowering phase of LSRA.
PhaseStatus fgSimpleLowering();
bool compLocallocOptimized; // Does the method have an optimized localloc
bool compQmarkUsed; // Does the method use GT_QMARK/GT_COLON
bool compQmarkRationalized; // Is it allowed to use a GT_QMARK/GT_COLON node.
+ bool compAssignmentRationalized; // Have the ASG nodes been turned into their store equivalents?
bool compHasBackwardJump; // Does the method (or some inlinee) have a lexically backwards jump?
bool compHasBackwardJumpInHandler; // Does the method have a lexically backwards jump in a handler?
bool compSwitchedToOptimized; // Codegen initially was Tier0 but jit switched to FullOpts
CompPhaseNameMacro(PHASE_VN_BASED_DEAD_STORE_REMOVAL,"VN-based dead store removal", false, -1, false)
CompPhaseNameMacro(PHASE_OPT_UPDATE_FLOW_GRAPH, "Update flow graph opt pass", false, -1, false)
CompPhaseNameMacro(PHASE_COMPUTE_EDGE_WEIGHTS2, "Compute edge weights (2, false)",false, -1, false)
+CompPhaseNameMacro(PHASE_RATIONALIZE_ASSIGNMENTS, "Rationalize assignments", false, -1, false)
CompPhaseNameMacro(PHASE_STRESS_SPLIT_TREE, "Stress gtSplitTree", false, -1, false)
CompPhaseNameMacro(PHASE_EXPAND_RTLOOKUPS, "Expand runtime lookups", false, -1, true)
CompPhaseNameMacro(PHASE_EXPAND_STATIC_INIT, "Expand static init", false, -1, true)
bool ShouldLink(GenTree* node)
{
- return node->OperIsLocal() || node->OperIs(GT_LCL_ADDR);
+ return node->OperIsAnyLocal();
}
public:
int nodeIndex = 0;
for (GenTree* cur = first; cur != nullptr; cur = cur->gtNext)
{
- success &= cur->OperIsLocal() || cur->OperIs(GT_LCL_ADDR);
+ success &= cur->OperIsAnyLocal();
success &= (nodeIndex < expected->Height()) && (cur == expected->Bottom(nodeIndex));
nodeIndex++;
}
}
//------------------------------------------------------------------------
+// fgRationalizeAssignments: Rewrite assignment nodes into stores.
+//
+// TODO-ASG: delete.
+//
+PhaseStatus Compiler::fgRationalizeAssignments()
+{
+ class AssignmentRationalizationVisitor : public GenTreeVisitor<AssignmentRationalizationVisitor>
+ {
+ public:
+ enum
+ {
+ DoPreOrder = true
+ };
+
+ AssignmentRationalizationVisitor(Compiler* compiler) : GenTreeVisitor(compiler)
+ {
+ }
+
+ fgWalkResult PreOrderVisit(GenTree** use, GenTree* user)
+ {
+ GenTree* node = *use;
+
+ // GTF_ASG is sometimes not propagated from setup arg assignments so we have to check for GTF_CALL too.
+ if ((node->gtFlags & (GTF_ASG | GTF_CALL)) == 0)
+ {
+ return fgWalkResult::WALK_SKIP_SUBTREES;
+ }
+
+ if (node->OperIs(GT_ASG))
+ {
+ GenTreeFlags lhsRhsFlags = node->gtGetOp1()->gtFlags | node->gtGetOp2()->gtFlags;
+ *use = m_compiler->fgRationalizeAssignment(node->AsOp());
+
+ // TP: return early quickly for simple assignments.
+ if ((lhsRhsFlags & (GTF_ASG | GTF_CALL)) == 0)
+ {
+ return fgWalkResult::WALK_SKIP_SUBTREES;
+ }
+ }
+
+ return fgWalkResult::WALK_CONTINUE;
+ }
+ };
+
+ AssignmentRationalizationVisitor visitor(this);
+ for (BasicBlock* block : Blocks())
+ {
+ for (Statement* stmt : block->Statements())
+ {
+ GenTree** use = stmt->GetRootNodePointer();
+ if (visitor.PreOrderVisit(use, nullptr) == fgWalkResult::WALK_CONTINUE)
+ {
+ visitor.WalkTree(use, nullptr);
+ }
+ }
+ }
+
+ compAssignmentRationalized = true;
+
+#ifdef DEBUG
+ if (JitConfig.JitStressMorphStores())
+ {
+ for (BasicBlock* block : Blocks())
+ {
+ for (Statement* stmt : block->Statements())
+ {
+ fgMorphBlockStmt(block, stmt DEBUGARG("fgRationalizeAssignments"));
+ }
+ }
+ }
+#endif // DEBUG
+
+ return PhaseStatus::MODIFIED_EVERYTHING;
+}
+
+//------------------------------------------------------------------------
+// fgRationalizeAssignment: Rewrite GT_ASG into a store node.
+//
+// Arguments:
+// assignment - The assignment node to rewrite
+//
+// Return Value:
+// Assignment's location, turned into the appropriate store node.
+//
+GenTree* Compiler::fgRationalizeAssignment(GenTreeOp* assignment)
+{
+ assert(assignment->OperGet() == GT_ASG);
+
+ bool isReverseOp = assignment->IsReverseOp();
+ GenTree* location = assignment->gtGetOp1();
+ GenTree* value = assignment->gtGetOp2();
+ if (location->OperIsLocal())
+ {
+ assert((location->gtFlags & GTF_VAR_DEF) != 0);
+ }
+ else if (value->OperIs(GT_LCL_VAR))
+ {
+ assert((value->gtFlags & GTF_VAR_DEF) == 0);
+ }
+
+ if (assignment->OperIsInitBlkOp())
+ {
+ // No SIMD types are allowed for InitBlks (including zero-inits).
+ assert(assignment->TypeIs(TYP_STRUCT) && location->TypeIs(TYP_STRUCT));
+ }
+
+ genTreeOps storeOp;
+ switch (location->OperGet())
+ {
+ case GT_LCL_VAR:
+ storeOp = GT_STORE_LCL_VAR;
+ break;
+ case GT_LCL_FLD:
+ storeOp = GT_STORE_LCL_FLD;
+ break;
+ case GT_BLK:
+ storeOp = GT_STORE_BLK;
+ break;
+ case GT_IND:
+ storeOp = GT_STOREIND;
+ break;
+ default:
+ unreached();
+ }
+
+ JITDUMP("Rewriting GT_ASG(%s, X) to %s(X)\n", GenTree::OpName(location->OperGet()), GenTree::OpName(storeOp));
+
+ GenTree* store = location;
+ store->SetOperRaw(storeOp);
+ store->Data() = value;
+ store->gtFlags |= GTF_ASG;
+ store->AddAllEffectsFlags(value);
+ store->AddAllEffectsFlags(assignment->gtFlags & GTF_GLOB_REF); // TODO-ASG: zero-diff quirk, delete.
+ if (isReverseOp && !store->OperIsLocalStore())
+ {
+ store->SetReverseOp();
+ }
+
+ if (storeOp == GT_STOREIND)
+ {
+ store->AsStoreInd()->SetRMWStatusDefault();
+ }
+
+ // [..., LHS, ..., RHS, ASG] -> [..., ..., RHS, LHS<STORE>] (normal)
+ // [..., RHS, ..., LHS, ASG] -> [..., RHS, ..., LHS<STORE>] (reversed)
+ if (assignment->gtPrev != nullptr)
+ {
+ assert(fgNodeThreading == NodeThreading::AllTrees);
+ if (isReverseOp)
+ {
+ GenTree* nextNode = assignment->gtNext;
+ store->gtNext = nextNode;
+ if (nextNode != nullptr)
+ {
+ nextNode->gtPrev = store;
+ }
+ }
+ else
+ {
+ if (store->gtPrev != nullptr)
+ {
+ store->gtPrev->gtNext = store->gtNext;
+ }
+ store->gtNext->gtPrev = store->gtPrev;
+
+ store->gtPrev = assignment->gtPrev;
+ store->gtNext = assignment->gtNext;
+ store->gtPrev->gtNext = store;
+ if (store->gtNext != nullptr)
+ {
+ store->gtNext->gtPrev = store;
+ }
+ }
+ }
+
+ DISPNODE(store);
+ JITDUMP("\n");
+
+ return store;
+}
+
+//------------------------------------------------------------------------
// fgSimpleLowering: do full walk of all IR, lowering selected operations
// and computing lvaOutgoingArgSpaceSize.
//
LocalsGenTreeList::iterator LocalsGenTreeList::begin() const
{
GenTree* first = m_stmt->GetTreeList();
- assert((first == nullptr) || first->OperIsLocal() || first->OperIs(GT_LCL_ADDR));
+ assert((first == nullptr) || first->OperIsAnyLocal());
return iterator(static_cast<GenTreeLclVarCommon*>(first));
}
// these should be included in the comparison.
switch (oper)
{
+ case GT_STORE_LCL_FLD:
+ if ((op1->AsLclFld()->GetLclOffs() != op2->AsLclFld()->GetLclOffs()) ||
+ (op1->AsLclFld()->GetLayout() != op2->AsLclFld()->GetLayout()))
+ {
+ return false;
+ }
+ FALLTHROUGH;
+ case GT_STORE_LCL_VAR:
+ if (op1->AsLclVarCommon()->GetLclNum() != op2->AsLclVarCommon()->GetLclNum())
+ {
+ return false;
+ }
+ break;
+
case GT_ARR_LENGTH:
if (op1->AsArrLen()->ArrLenOffset() != op2->AsArrLen()->ArrLenOffset())
{
// these should be included in the hash code.
switch (oper)
{
+ case GT_STORE_BLK:
+ if (op1->AsBlk()->GetLayout() != op2->AsBlk()->GetLayout())
+ {
+ return false;
+ }
+ FALLTHROUGH;
+
+ case GT_STOREIND:
+ if ((op1->gtFlags & GTF_IND_FLAGS) != (op2->gtFlags & GTF_IND_FLAGS))
+ {
+ return false;
+ }
+ break;
+
case GT_INTRINSIC:
if (op1->AsIntrinsic()->gtIntrinsicName != op2->AsIntrinsic()->gtIntrinsicName)
{
if (tree->OperIsLeaf())
{
- if ((tree->OperIsLocal() || tree->OperIs(GT_LCL_ADDR)) && (tree->AsLclVarCommon()->GetLclNum() == lclNum))
+ if (tree->OperIsAnyLocal() && (tree->AsLclVarCommon()->GetLclNum() == lclNum))
{
return true;
}
// these should be included in the hash code.
switch (oper)
{
+ case GT_STORE_LCL_VAR:
+ hash = genTreeHashAdd(hash, tree->AsLclVar()->GetLclNum());
+ break;
+ case GT_STORE_LCL_FLD:
+ hash = genTreeHashAdd(hash, tree->AsLclFld()->GetLclNum());
+ hash = genTreeHashAdd(hash, tree->AsLclFld()->GetLclOffs());
+ hash = genTreeHashAdd(hash, tree->AsLclFld()->GetLayout());
+ break;
+ case GT_STOREIND:
+ hash = genTreeHashAdd(hash, tree->AsStoreInd()->GetRMWStatus());
+ break;
case GT_ARR_LENGTH:
hash += tree->AsArrLen()->ArrLenOffset();
break;
bool GenTree::OperRequiresAsgFlag()
{
- if (OperIs(GT_ASG, GT_STORE_DYN_BLK) ||
- OperIs(GT_XADD, GT_XORR, GT_XAND, GT_XCHG, GT_LOCKADD, GT_CMPXCHG, GT_MEMORYBARRIER))
+ switch (OperGet())
{
- return true;
- }
+ case GT_STORE_LCL_VAR:
+ case GT_STORE_LCL_FLD:
+ case GT_STOREIND:
+ case GT_STORE_BLK:
+ case GT_STORE_DYN_BLK:
+ case GT_ASG:
+ case GT_XADD:
+ case GT_XORR:
+ case GT_XAND:
+ case GT_XCHG:
+ case GT_LOCKADD:
+ case GT_CMPXCHG:
+ case GT_MEMORYBARRIER:
+ return true;
+
+ // If the call has return buffer argument, it produced a definition and hence
+ // should be marked with assignment.
+ case GT_CALL:
+ return AsCall()->IsOptimizingRetBufAsLocal();
#ifdef FEATURE_HW_INTRINSICS
- if (gtOper == GT_HWINTRINSIC)
- {
- return AsHWIntrinsic()->OperRequiresAsgFlag();
- }
+ case GT_HWINTRINSIC:
+ return AsHWIntrinsic()->OperRequiresAsgFlag();
#endif // FEATURE_HW_INTRINSICS
- if (gtOper == GT_CALL)
- {
- // If the call has return buffer argument, it produced a definition and hence
- // should be marked with assignment.
- return AsCall()->IsOptimizingRetBufAsLocal();
+ default:
+ assert(!OperIsStore() && !OperIsAtomicOp());
+ return false;
}
- return false;
}
//------------------------------------------------------------------------------
{
switch (gtOper)
{
+ case GT_ADD:
+ case GT_SUB:
+ case GT_MUL:
+ case GT_CAST:
+#ifndef TARGET_64BIT
+ case GT_ADD_HI:
+ case GT_SUB_HI:
+#endif // !TARGET_64BIT
+ return gtOverflow() ? ExceptionSetFlags::OverflowException : ExceptionSetFlags::None;
+
case GT_MOD:
case GT_DIV:
case GT_UMOD:
return ExceptionSetFlags::All;
case GT_IND:
+ case GT_STOREIND:
case GT_BLK:
case GT_NULLCHECK:
case GT_STORE_BLK:
#endif // FEATURE_HW_INTRINSICS
default:
- if (gtOverflowEx())
- {
- return ExceptionSetFlags::OverflowException;
- }
-
+ assert(!OperMayOverflow() && !OperIsIndirOrArrMetaData());
return ExceptionSetFlags::None;
}
}
}
}
-GenTreeLclVar* Compiler::gtNewStoreLclVar(unsigned dstLclNum, GenTree* src)
+GenTreeLclVar* Compiler::gtNewStoreLclVarNode(unsigned lclNum, GenTree* data)
+{
+ LclVarDsc* varDsc = lvaGetDesc(lclNum);
+ var_types type = varDsc->lvNormalizeOnLoad() ? varDsc->TypeGet() : genActualType(varDsc);
+ GenTreeLclVar* store = new (this, GT_STORE_LCL_VAR) GenTreeLclVar(type, lclNum, data);
+ store->gtFlags |= (GTF_VAR_DEF | GTF_ASG);
+ if (varDsc->IsAddressExposed())
+ {
+ store->gtFlags |= GTF_GLOB_REF;
+ }
+
+ return store;
+}
+
+GenTreeLclFld* Compiler::gtNewStoreLclFldNode(unsigned lclNum, var_types type, unsigned offset, GenTree* data)
{
- GenTreeLclVar* store = new (this, GT_STORE_LCL_VAR) GenTreeLclVar(GT_STORE_LCL_VAR, src->TypeGet(), dstLclNum);
- store->gtOp1 = src;
- store->gtFlags = (src->gtFlags & GTF_COMMON_MASK);
- store->gtFlags |= GTF_VAR_DEF | GTF_ASG;
+ assert((genActualType(type) == genActualType(data)) || ((type == TYP_STRUCT) && data->TypeIs(TYP_INT)));
+
+ ClassLayout* layout = (type == TYP_STRUCT) ? data->GetLayout(this) : nullptr;
+ GenTreeLclFld* store = new (this, GT_STORE_LCL_FLD) GenTreeLclFld(type, lclNum, offset, data, layout);
+ store->gtFlags |= (GTF_VAR_DEF | GTF_ASG);
+ if (store->IsPartialLclFld(this))
+ {
+ store->gtFlags |= GTF_VAR_USEASG;
+ }
+ if (lvaGetDesc(lclNum)->IsAddressExposed())
+ {
+ store->gtFlags |= GTF_GLOB_REF;
+ }
+
return store;
}
GenTreeLclFld* Compiler::gtNewLclFldNode(unsigned lnum, var_types type, unsigned offset)
{
- GenTreeLclFld* node = new (this, GT_LCL_FLD) GenTreeLclFld(GT_LCL_FLD, type, lnum, offset);
+ GenTreeLclFld* node = new (this, GT_LCL_FLD) GenTreeLclFld(GT_LCL_FLD, type, lnum, offset, nullptr);
return node;
}
switch (oper)
{
+ case GT_STORE_LCL_VAR:
+ copy = new (this, GT_STORE_LCL_VAR)
+ GenTreeLclVar(tree->TypeGet(), tree->AsLclVar()->GetLclNum(), tree->AsLclVar()->Data());
+ break;
+
+ case GT_STORE_LCL_FLD:
+ copy = new (this, GT_STORE_LCL_FLD)
+ GenTreeLclFld(tree->TypeGet(), tree->AsLclFld()->GetLclNum(), tree->AsLclFld()->GetLclOffs(),
+ tree->AsLclFld()->Data(), tree->AsLclFld()->GetLayout());
+ break;
+
/* These nodes sometimes get bashed to "fat" ones */
case GT_MUL:
}
break;
+ case GT_STOREIND:
+ copy = new (this, GT_STOREIND)
+ GenTreeStoreInd(tree->TypeGet(), tree->AsIndir()->Addr(), tree->AsIndir()->Data());
+ copy->AsStoreInd()->SetRMWStatus(tree->AsStoreInd()->GetRMWStatus());
+ break;
+
+ case GT_STORE_BLK:
+ copy = new (this, GT_STORE_BLK) GenTreeBlk(GT_STORE_BLK, tree->TypeGet(), tree->AsBlk()->Addr(),
+ tree->AsBlk()->Data(), tree->AsBlk()->GetLayout());
+ break;
+
case GT_ARR_ADDR:
copy = new (this, GT_ARR_ADDR)
GenTreeArrAddr(tree->AsArrAddr()->Addr(), tree->AsArrAddr()->GetElemType(),
//
void GenTree::SetIndirExceptionFlags(Compiler* comp)
{
- assert(OperIsIndirOrArrMetaData() && OperIsUnary());
+ assert(OperIsIndirOrArrMetaData() && OperIsSimple());
if (IndirMayFault(comp))
{
gtFlags |= GTF_IND_NONFAULTING;
gtFlags &= ~GTF_EXCEPT;
gtFlags |= addr->gtFlags & GTF_EXCEPT;
+ if (OperIsBinary())
+ {
+ gtFlags |= gtGetOp2()->gtFlags & GTF_EXCEPT;
+ }
}
#ifdef DEBUG
DISPTREE(tree);
assert(cond->TypeIs(TYP_INT));
- assert((tree->gtFlags & GTF_SIDE_EFFECT & ~GTF_ASG) == 0);
- assert((tree->gtFlags & GTF_ORDER_SIDEEFF) == 0);
GenTree* replacement = nullptr;
if (cond->IsIntegralConst(0))
return tree;
}
- if (tree->OperIs(GT_NOP, GT_ALLOCOBJ, GT_RUNTIMELOOKUP))
- {
- return tree;
- }
-
-#ifdef FEATURE_HW_INTRINSICS
- if (tree->OperIs(GT_HWINTRINSIC))
+ if (tree->OperIs(GT_NOP, GT_ALLOCOBJ, GT_RUNTIMELOOKUP) || tree->OperIsStore() || tree->OperIsHWIntrinsic())
{
return tree;
}
-#endif
if (tree->OperIsUnary())
{
GenTree* asg;
GenTree* dest = gtNewLclvNode(tmp, dstTyp);
- if (val->IsInitVal())
- {
- asg = gtNewAssignNode(dest, val);
- }
- else if (varTypeIsStruct(varDsc))
+ if (varTypeIsStruct(varDsc) && !val->IsInitVal())
{
asg = impAssignStruct(dest, val, CHECK_SPILL_NONE, pAfterStmt, di, block);
}
else
{
- assert(!varTypeIsStruct(valTyp));
asg = gtNewAssignNode(dest, val);
}
- if (compRationalIRForm)
+ if (compAssignmentRationalized)
{
- Rationalizer::RewriteAssignmentIntoStoreLcl(asg->AsOp());
+ asg = fgRationalizeAssignment(asg->AsOp());
}
return asg;
{
if (flags & GTF_ASG)
{
- // TODO-Bug: This only checks for GT_ASG/GT_STORE_DYN_BLK but according to OperRequiresAsgFlag
- // there are many more opers that are considered to have an assignment side effect: atomic ops
- // (GT_CMPXCHG & co.), GT_MEMORYBARRIER (not classified as an atomic op) and HW intrinsic
- // memory stores. Atomic ops have special handling in gtExtractSideEffList but the others
- // will simply be dropped is they are ever subject to an "extract side effects" operation.
- // It is possible that the reason no bugs have yet been observed in this area is that the
- // other nodes are likely to always be tree roots.
- if (tree->OperIs(GT_ASG, GT_STORE_DYN_BLK))
+ if (tree->OperRequiresAsgFlag())
{
return true;
}
return Compiler::WALK_SKIP_SUBTREES;
}
- // TODO-Cleanup: These have GTF_ASG set but for some reason gtNodeHasSideEffects ignores
- // them. See the related gtNodeHasSideEffects comment as well.
- // Also, these nodes must always be preserved, no matter what side effect flags are passed
- // in. But then it should never be the case that gtExtractSideEffList gets called without
- // specifying GTF_ASG so there doesn't seem to be any reason to be inconsistent with
- // gtNodeHasSideEffects and make this check unconditionally.
- if (node->OperIsAtomicOp())
- {
- Append(node);
- return Compiler::WALK_SKIP_SUBTREES;
- }
-
if (node->OperIs(GT_QMARK))
{
GenTree* prevSideEffects = m_result;
bool GenTree::IsPhiDefn()
{
- bool res = OperIs(GT_ASG) && AsOp()->gtOp2->OperIs(GT_PHI);
- assert(!res || AsOp()->gtOp1->OperIs(GT_LCL_VAR));
- return res;
+ if (OperIs(GT_ASG))
+ {
+ return AsOp()->gtOp2->OperIs(GT_PHI);
+ }
+ if (OperIs(GT_STORE_LCL_VAR))
+ {
+ return AsLclVar()->Data()->OperIs(GT_PHI);
+ }
+ return false;
}
bool GenTree::IsLclVarAddr() const
GTK_EXOP = 0x10, // Indicates that an oper for a node type that extends GenTreeOp (or GenTreeUnOp)
// by adding non-node fields to unary or binary operator.
GTK_NOVALUE = 0x20, // node does not produce a value
+ GTK_STORE = 0x40, // node represents a store
GTK_MASK = 0xFF
};
return OperIsAtomicOp(gtOper);
}
- bool OperIsStore() const
+ static bool OperIsStore(genTreeOps gtOper)
{
- return OperIsStore(gtOper);
+ return (OperKind(gtOper) & GTK_STORE) != 0;
}
- static bool OperIsStore(genTreeOps gtOper)
+ bool OperIsStore() const
{
- return (gtOper == GT_STOREIND || gtOper == GT_STORE_LCL_VAR || gtOper == GT_STORE_LCL_FLD ||
- OperIsStoreBlk(gtOper) || OperIsAtomicOp(gtOper));
+ return OperIsStore(gtOper);
}
static bool OperIsMultiOp(genTreeOps gtOper)
SetLclNum(lclNum);
}
+ GenTreeLclVarCommon(genTreeOps oper, var_types type, unsigned lclNum, GenTree* data)
+ : GenTreeUnOp(oper, type, data DEBUGARG(/* largeNode */ false))
+ {
+ assert(OperIsLocalStore());
+ SetLclNum(lclNum);
+ }
+
GenTree*& Data()
{
assert(OperIsLocalStore());
MultiRegSpillFlags gtSpillFlags;
public:
- INDEBUG(IL_OFFSET gtLclILoffs;) // instr offset of ref (only for JIT dumps)
+ INDEBUG(IL_OFFSET gtLclILoffs = BAD_IL_OFFSET;) // instr offset of ref (only for JIT dumps)
// Multireg support
bool IsMultiReg() const
assert(OperIsScalarLocal(oper));
}
+ GenTreeLclVar(var_types type, unsigned lclNum, GenTree* data)
+ : GenTreeLclVarCommon(GT_STORE_LCL_VAR, type, lclNum, data)
+ {
+ }
+
#if DEBUGGABLE_GENTREE
GenTreeLclVar() : GenTreeLclVarCommon()
{
SetLayout(layout);
}
+ GenTreeLclFld(var_types type, unsigned lclNum, unsigned lclOffs, GenTree* data, ClassLayout* layout)
+ : GenTreeLclVarCommon(GT_STORE_LCL_FLD, type, lclNum, data), m_lclOffs(static_cast<uint16_t>(lclOffs))
+ {
+ assert(lclOffs <= UINT16_MAX);
+ SetLayout(layout);
+ }
+
uint16_t GetLclOffs() const
{
return m_lclOffs;
inline bool GenTree::OperIsBlkOp()
{
- return ((gtOper == GT_ASG) && varTypeIsStruct(AsOp()->gtOp1)) || OperIsStoreBlk();
+ if (OperIs(GT_STORE_DYN_BLK))
+ {
+ return true;
+ }
+ if (OperIs(GT_ASG) || OperIsStore())
+ {
+ return varTypeIsStruct(this);
+ }
+
+ return false;
}
inline bool GenTree::OperIsInitBlkOp()
{
return false;
}
- GenTree* src;
- if (gtOper == GT_ASG)
- {
- src = gtGetOp2();
- }
- else
- {
- src = AsBlk()->Data()->gtSkipReloadOrCopy();
- }
- return src->OperIsInitVal() || src->IsIntegralConst();
+ GenTree* src = Data();
+ bool isInitBlk = src->TypeIs(TYP_INT);
+ assert(isInitBlk == src->gtSkipReloadOrCopy()->IsInitVal());
+
+ return isInitBlk;
}
inline bool GenTree::OperIsCopyBlkOp()
inline GenTree*& GenTree::Data()
{
- assert(OperIsStore() && (OperIsIndir() || OperIsLocal()));
- return OperIsLocalStore() ? AsLclVarCommon()->Data() : AsIndir()->Data();
+ assert(OperIsStore() || OperIs(GT_STORE_DYN_BLK, GT_ASG));
+ return OperIsLocalStore() ? AsLclVarCommon()->Data() : static_cast<GenTreeOp*>(this)->gtOp2;
}
inline GenTree* GenTree::gtEffectiveVal(bool commaOnly /* = false */)
{
GenTree* tree = *use;
- if (tree->OperIsLocal() || tree->OperIs(GT_LCL_ADDR))
+ if (tree->OperIsAnyLocal())
{
unsigned int lclNum = tree->AsLclVarCommon()->GetLclNum();
unsigned int shadowLclNum = m_compiler->gsShadowVarInfo[lclNum].shadowCopy;
GTNODE(PHI_ARG , GenTreePhiArg ,0,GTK_LEAF) // phi(phiarg, phiarg, phiarg)
GTNODE(LCL_VAR , GenTreeLclVar ,0,GTK_LEAF) // local variable
GTNODE(LCL_FLD , GenTreeLclFld ,0,GTK_LEAF) // field in a non-primitive variable
-GTNODE(STORE_LCL_VAR , GenTreeLclVar ,0,GTK_UNOP|GTK_NOVALUE) // store to local variable
-GTNODE(STORE_LCL_FLD , GenTreeLclFld ,0,GTK_UNOP|GTK_NOVALUE) // store to a part of the variable
+GTNODE(STORE_LCL_VAR , GenTreeLclVar ,0,GTK_UNOP|GTK_EXOP|GTK_NOVALUE|GTK_STORE) // store to local variable
+GTNODE(STORE_LCL_FLD , GenTreeLclFld ,0,GTK_UNOP|GTK_EXOP|GTK_NOVALUE|GTK_STORE) // store to a part of the variable
GTNODE(LCL_ADDR , GenTreeLclFld ,0,GTK_LEAF) // local address
//-----------------------------------------------------------------------------
GTNODE(INTRINSIC , GenTreeIntrinsic ,0,GTK_BINOP|GTK_EXOP)
+GTNODE(ASG , GenTreeOp ,0,GTK_BINOP|DBK_NOTLIR)
GTNODE(LOCKADD , GenTreeOp ,0,GTK_BINOP|GTK_NOVALUE|DBK_NOTHIR)
GTNODE(XAND , GenTreeOp ,0,GTK_BINOP)
GTNODE(XORR , GenTreeOp ,0,GTK_BINOP)
GTNODE(BOUNDS_CHECK , GenTreeBoundsChk ,0,GTK_BINOP|GTK_EXOP|GTK_NOVALUE) // a bounds check - for arrays/spans/SIMDs/HWINTRINSICs
-GTNODE(IND , GenTreeIndir ,0,GTK_UNOP) // Load indirection
-GTNODE(STOREIND , GenTreeStoreInd ,0,GTK_BINOP|GTK_NOVALUE) // Store indirection
-GTNODE(BLK , GenTreeBlk ,0,GTK_UNOP|GTK_EXOP) // Struct load
-GTNODE(STORE_BLK , GenTreeBlk ,0,GTK_BINOP|GTK_EXOP|GTK_NOVALUE) // Struct store
-GTNODE(STORE_DYN_BLK , GenTreeStoreDynBlk ,0,GTK_SPECIAL|GTK_NOVALUE) // Dynamically sized block store, with native uint size
-GTNODE(NULLCHECK , GenTreeIndir ,0,GTK_UNOP|GTK_NOVALUE) // Null checks the source
+GTNODE(IND , GenTreeIndir ,0,GTK_UNOP) // Load indirection
+GTNODE(STOREIND , GenTreeStoreInd ,0,GTK_BINOP|GTK_EXOP|GTK_NOVALUE|GTK_STORE) // Store indirection
+GTNODE(BLK , GenTreeBlk ,0,GTK_UNOP|GTK_EXOP) // Struct load
+GTNODE(STORE_BLK , GenTreeBlk ,0,GTK_BINOP|GTK_EXOP|GTK_NOVALUE|GTK_STORE) // Struct store
+GTNODE(STORE_DYN_BLK , GenTreeStoreDynBlk ,0,GTK_SPECIAL|GTK_NOVALUE) // Dynamically sized block store, with native uint size
+GTNODE(NULLCHECK , GenTreeIndir ,0,GTK_UNOP|GTK_NOVALUE) // Null checks the source
GTNODE(ARR_LENGTH , GenTreeArrLen ,0,GTK_UNOP|GTK_EXOP) // single-dimension (SZ) array length
GTNODE(MDARR_LENGTH , GenTreeMDArr ,0,GTK_UNOP|GTK_EXOP) // multi-dimension (MD) array length of a specific dimension
GTNODE(ROL , GenTreeOp ,0,GTK_BINOP)
GTNODE(ROR , GenTreeOp ,0,GTK_BINOP)
-GTNODE(ASG , GenTreeOp ,0,GTK_BINOP|DBK_NOTLIR)
GTNODE(EQ , GenTreeOp ,0,GTK_BINOP)
GTNODE(NE , GenTreeOp ,0,GTK_BINOP)
GTNODE(LT , GenTreeOp ,0,GTK_BINOP)
CONFIG_INTEGER(JitStressProcedureSplitting, W("JitStressProcedureSplitting"), 0) // Always split after the first basic
// block. Skips functions with EH
// for simplicity.
+CONFIG_INTEGER(JitStressMorphStores, W("JitStressMorphStores"), 0) // Morph trees after assignment rationalization
CONFIG_INTEGER(JitStressRegs, W("JitStressRegs"), 0)
CONFIG_STRING(JitStressRegsRange, W("JitStressRegsRange")) // Only apply JitStressRegs to methods in this hash range
else
{
assert(lastNode->gtNext == nullptr);
- assert(lastNode->OperIsLocal() || lastNode->OperIs(GT_LCL_ADDR));
+ assert(lastNode->OperIsAnyLocal());
}
firstNode->gtPrev = nullptr;
fgWalkResult PostOrderVisit(GenTree** use, GenTree* user)
{
GenTree* node = *use;
- if (node->OperIsLocal() || node->OperIs(GT_LCL_ADDR))
+ if (node->OperIsAnyLocal())
{
SequenceLocal(node->AsLclVarCommon());
}
MorphLocalField(node, user);
}
- if (node->OperIsLocal() || node->OperIs(GT_LCL_ADDR))
+ if (node->OperIsAnyLocal())
{
unsigned const lclNum = node->AsLclVarCommon()->GetLclNum();
LclVarDsc* const varDsc = m_compiler->lvaGetDesc(lclNum);
{
for (GenTree* cur = stmt->GetTreeListEnd(); cur != nullptr;)
{
- assert(cur->OperIsLocal() || cur->OperIs(GT_LCL_ADDR));
+ assert(cur->OperIsAnyLocal());
bool isDef = ((cur->gtFlags & GTF_VAR_DEF) != 0) && ((cur->gtFlags & GTF_VAR_USEASG) == 0);
bool conditional = cur != dst;
// Ignore conditional defs that would otherwise
{
for (GenTree* cur = stmt->GetTreeListEnd(); cur != nullptr;)
{
- assert(cur->OperIsLocal() || cur->OperIs(GT_LCL_ADDR));
+ assert(cur->OperIsAnyLocal());
if (!fgComputeLifeLocal(life, keepAliveVars, cur))
{
cur = cur->gtPrev;
unsigned lclNum = comp->lvaGrabTemp(true DEBUGARG("Lowering is creating a new local variable"));
comp->lvaTable[lclNum].lvType = rhs->TypeGet();
- GenTreeLclVar* store = comp->gtNewStoreLclVar(lclNum, rhs);
+ GenTreeLclVar* store = comp->gtNewStoreLclVarNode(lclNum, rhs);
switchBBRange.InsertAfter(node, store);
switchBBRange.Remove(node);
{
comp->lvaSetStruct(tmpLclNum, comp->lvaGetDesc(lclNum)->GetLayout(), false);
}
- GenTreeLclVar* storeLclVar = comp->gtNewStoreLclVar(tmpLclNum, value);
+ GenTreeLclVar* storeLclVar = comp->gtNewStoreLclVarNode(tmpLclNum, value);
BlockRange().InsertBefore(insertTempBefore, LIR::SeqTree(comp, storeLclVar));
ContainCheckRange(value, storeLclVar);
LowerNode(storeLclVar);
comp->lvaSetVarDoNotEnregister(spillNum DEBUGARG(DoNotEnregisterReason::LocalField));
CORINFO_CLASS_HANDLE retClsHnd = call->gtRetClsHnd;
comp->lvaSetStruct(spillNum, retClsHnd, false);
- GenTreeLclFld* spill = new (comp, GT_STORE_LCL_FLD) GenTreeLclFld(GT_STORE_LCL_FLD, call->gtType, spillNum, 0);
- spill->gtOp1 = call;
- spill->gtFlags |= GTF_VAR_DEF;
+ GenTreeLclFld* spill = comp->gtNewStoreLclFldNode(spillNum, call->TypeGet(), 0, call);
BlockRange().InsertAfter(call, spill);
ContainCheckStoreLoc(spill);
// --------------------------------------------------------
// InlinedCallFrame.m_pCallSiteSP = @RSP;
- GenTreeLclFld* storeSP = new (comp, GT_STORE_LCL_FLD)
- GenTreeLclFld(GT_STORE_LCL_FLD, TYP_I_IMPL, comp->lvaInlinedPInvokeFrameVar, callFrameInfo.offsetOfCallSiteSP);
- storeSP->gtOp1 = PhysReg(REG_SPBASE);
- storeSP->gtFlags |= GTF_VAR_DEF;
+ GenTree* spValue = PhysReg(REG_SPBASE);
+ GenTreeLclFld* storeSP = comp->gtNewStoreLclFldNode(comp->lvaInlinedPInvokeFrameVar, TYP_I_IMPL,
+ callFrameInfo.offsetOfCallSiteSP, spValue);
assert(inlinedPInvokeDsc->lvDoNotEnregister);
firstBlockRange.InsertBefore(insertionPoint, LIR::SeqTree(comp, storeSP));
// --------------------------------------------------------
// InlinedCallFrame.m_pCalleeSavedEBP = @RBP;
- GenTreeLclFld* storeFP =
- new (comp, GT_STORE_LCL_FLD) GenTreeLclFld(GT_STORE_LCL_FLD, TYP_I_IMPL, comp->lvaInlinedPInvokeFrameVar,
- callFrameInfo.offsetOfCalleeSavedFP);
+ GenTree* fpValue = PhysReg(REG_FPBASE);
+ GenTreeLclFld* storeFP = comp->gtNewStoreLclFldNode(comp->lvaInlinedPInvokeFrameVar, TYP_I_IMPL,
+ callFrameInfo.offsetOfCalleeSavedFP, fpValue);
assert(inlinedPInvokeDsc->lvDoNotEnregister);
- storeFP->gtOp1 = PhysReg(REG_FPBASE);
- storeFP->gtFlags |= GTF_VAR_DEF;
-
firstBlockRange.InsertBefore(insertionPoint, LIR::SeqTree(comp, storeFP));
DISPTREERANGE(firstBlockRange, storeFP);
#endif // !defined(TARGET_ARM)
if (src != nullptr)
{
// Store into InlinedCallFrame.m_Datum, the offset of which is given by offsetOfCallTarget.
- GenTreeLclFld* store =
- new (comp, GT_STORE_LCL_FLD) GenTreeLclFld(GT_STORE_LCL_FLD, TYP_I_IMPL, comp->lvaInlinedPInvokeFrameVar,
- callFrameInfo.offsetOfCallTarget);
- store->gtOp1 = src;
- store->gtFlags |= GTF_VAR_DEF;
+ GenTreeLclFld* store = comp->gtNewStoreLclFldNode(comp->lvaInlinedPInvokeFrameVar, TYP_I_IMPL,
+ callFrameInfo.offsetOfCallTarget, src);
InsertTreeBeforeAndContainCheck(insertBefore, store);
}
// ----------------------------------------------------------------------------------
// InlinedCallFrame.m_pCallSiteSP = SP
- GenTreeLclFld* storeCallSiteSP = new (comp, GT_STORE_LCL_FLD)
- GenTreeLclFld(GT_STORE_LCL_FLD, TYP_I_IMPL, comp->lvaInlinedPInvokeFrameVar, callFrameInfo.offsetOfCallSiteSP);
-
- storeCallSiteSP->gtOp1 = PhysReg(REG_SPBASE);
- storeCallSiteSP->gtFlags |= GTF_VAR_DEF;
+ GenTree* callSiteSP = PhysReg(REG_SPBASE);
+ GenTreeLclFld* storeCallSiteSP = comp->gtNewStoreLclFldNode(comp->lvaInlinedPInvokeFrameVar, TYP_I_IMPL,
+ callFrameInfo.offsetOfCallSiteSP, callSiteSP);
InsertTreeBeforeAndContainCheck(insertBefore, storeCallSiteSP);
// ----------------------------------------------------------------------------------
// InlinedCallFrame.m_pCallerReturnAddress = &label (the address of the instruction immediately following the call)
- GenTreeLclFld* storeLab =
- new (comp, GT_STORE_LCL_FLD) GenTreeLclFld(GT_STORE_LCL_FLD, TYP_I_IMPL, comp->lvaInlinedPInvokeFrameVar,
- callFrameInfo.offsetOfReturnAddress);
-
- storeLab->gtOp1 = new (comp, GT_LABEL) GenTree(GT_LABEL, TYP_I_IMPL);
- storeLab->gtFlags |= GTF_VAR_DEF;
+ GenTree* label = new (comp, GT_LABEL) GenTree(GT_LABEL, TYP_I_IMPL);
+ GenTreeLclFld* storeLab = comp->gtNewStoreLclFldNode(comp->lvaInlinedPInvokeFrameVar, TYP_I_IMPL,
+ callFrameInfo.offsetOfReturnAddress, label);
InsertTreeBeforeAndContainCheck(insertBefore, storeLab);
// ----------------------------------------------------------------------------------
// InlinedCallFrame.m_pCallerReturnAddress = nullptr
- GenTreeLclFld* const storeCallSiteTracker =
- new (comp, GT_STORE_LCL_FLD) GenTreeLclFld(GT_STORE_LCL_FLD, TYP_I_IMPL, comp->lvaInlinedPInvokeFrameVar,
- callFrameInfo.offsetOfReturnAddress);
-
- GenTreeIntCon* const constantZero = new (comp, GT_CNS_INT) GenTreeIntCon(TYP_I_IMPL, 0);
-
- storeCallSiteTracker->gtOp1 = constantZero;
- storeCallSiteTracker->gtFlags |= GTF_VAR_DEF;
+ GenTreeIntCon* const zero = comp->gtNewIconNode(0, TYP_I_IMPL);
+ GenTreeLclFld* const storeCallSiteTracker = comp->gtNewStoreLclFldNode(comp->lvaInlinedPInvokeFrameVar, TYP_I_IMPL,
+ callFrameInfo.offsetOfReturnAddress, zero);
- BlockRange().InsertBefore(insertionPoint, constantZero, storeCallSiteTracker);
+ BlockRange().InsertBefore(insertionPoint, zero, storeCallSiteTracker);
ContainCheckStoreLoc(storeCallSiteTracker);
#endif // USE_PER_FRAME_PINVOKE_INIT
}
{
if (!arg->IsValue() && !arg->OperIsPutArgStk())
{
- assert(arg->OperIsStore() || arg->OperIsCopyBlkOp());
+ assert(arg->OperIsStore());
return;
}
//
void Lowering::TryRetypingFloatingPointStoreToIntegerStore(GenTree* store)
{
- assert(store->OperIsStore() && !store->OperIsAtomicOp());
+ assert(store->OperIsStore());
if (!varTypeIsFloating(store))
{
//
regMaskTP LinearScan::getKillSetForBlockStore(GenTreeBlk* blkNode)
{
- assert(blkNode->OperIsStore());
+ assert(blkNode->OperIsStoreBlk());
regMaskTP killMask = RBM_NONE;
bool isCopyBlk = varTypeIsStruct(blkNode->Data());
// This must be one of the operand types that are neither contained nor produce a value.
// Stores and void-typed operands may be encountered when processing call nodes, which contain
// pointers to argument setup stores.
- assert(operand->OperIsStore() || operand->OperIsBlkOp() || operand->OperIsPutArgStk() ||
- operand->TypeIs(TYP_VOID));
+ assert(operand->OperIsStore() || operand->OperIsPutArgStk() || operand->TypeIs(TYP_VOID));
return 0;
}
}
GenTree* argObj = argx->gtEffectiveVal(true /*commaOnly*/);
bool makeOutArgCopy = false;
- if (isStructArg && varTypeIsStruct(argObj) && !argObj->OperIs(GT_ASG, GT_MKREFANY, GT_FIELD_LIST))
+ if (isStructArg && !reMorphing && !argObj->OperIs(GT_MKREFANY))
{
unsigned originalSize;
if (argObj->TypeGet() == TYP_STRUCT)
originalSize = genTypeSize(argx);
}
+ assert(argx->TypeGet() == arg.GetSignatureType());
assert(originalSize == info.compCompHnd->getClassSize(arg.GetSignatureClassHandle()));
// First, handle the case where the argument is passed by reference.
#pragma hdrstop
#endif
-// return op that is the store equivalent of the given load opcode
-genTreeOps storeForm(genTreeOps loadForm)
-{
- switch (loadForm)
- {
- case GT_LCL_VAR:
- return GT_STORE_LCL_VAR;
- case GT_LCL_FLD:
- return GT_STORE_LCL_FLD;
- default:
- noway_assert(!"not a data load opcode\n");
- unreached();
- }
-}
-
-// copy the flags determined by mask from src to dst
-void copyFlags(GenTree* dst, GenTree* src, GenTreeFlags mask)
-{
- dst->gtFlags &= ~mask;
- dst->gtFlags |= (src->gtFlags & mask);
-}
-
// RewriteNodeAsCall : Replace the given tree node by a GT_CALL.
//
// Arguments:
for (Statement* const stmt : block->Statements())
{
ValidateStatement(stmt, block);
-
- for (GenTree* const tree : stmt->TreeList())
- {
- // QMARK nodes should have been removed before this phase.
- assert(!tree->OperIs(GT_QMARK));
-
- if (tree->OperGet() == GT_ASG)
- {
- if (tree->gtGetOp1()->OperGet() == GT_LCL_VAR)
- {
- assert(tree->gtGetOp1()->gtFlags & GTF_VAR_DEF);
- }
- else if (tree->gtGetOp2()->OperGet() == GT_LCL_VAR)
- {
- assert(!(tree->gtGetOp2()->gtFlags & GTF_VAR_DEF));
- }
-
- if (tree->OperIsInitBlkOp())
- {
- // No SIMD types are allowed for InitBlks (including zero-inits).
- assert(tree->TypeIs(TYP_STRUCT) && tree->gtGetOp1()->TypeIs(TYP_STRUCT));
- }
- }
- }
}
}
}
#endif // DEBUG
-static void RewriteAssignmentIntoStoreLclCore(GenTreeOp* assignment,
- GenTree* location,
- GenTree* value,
- genTreeOps locationOp)
-{
- assert(assignment != nullptr);
- assert(assignment->OperGet() == GT_ASG);
- assert(location != nullptr);
- assert(value != nullptr);
-
- genTreeOps storeOp = storeForm(locationOp);
-
-#ifdef DEBUG
- JITDUMP("rewriting asg(%s, X) to %s(X)\n", GenTree::OpName(locationOp), GenTree::OpName(storeOp));
-#endif // DEBUG
-
- assignment->SetOper(storeOp);
- GenTreeLclVarCommon* store = assignment->AsLclVarCommon();
-
- GenTreeLclVarCommon* var = location->AsLclVarCommon();
- store->SetLclNum(var->GetLclNum());
- store->SetSsaNum(var->GetSsaNum());
-
- if (locationOp == GT_LCL_FLD)
- {
- store->AsLclFld()->SetLclOffs(var->AsLclFld()->GetLclOffs());
- store->AsLclFld()->SetLayout(var->AsLclFld()->GetLayout());
- }
-
- copyFlags(store, var, (GTF_LIVENESS_MASK | GTF_VAR_MULTIREG));
- store->gtFlags &= ~GTF_REVERSE_OPS;
-
- store->gtType = var->TypeGet();
- store->gtOp1 = value;
-
- DISPNODE(store);
- JITDUMP("\n");
-}
-
-void Rationalizer::RewriteAssignmentIntoStoreLcl(GenTreeOp* assignment)
-{
- assert(assignment != nullptr);
- assert(assignment->OperGet() == GT_ASG);
-
- GenTree* location = assignment->gtGetOp1();
- GenTree* value = assignment->gtGetOp2();
-
- RewriteAssignmentIntoStoreLclCore(assignment, location, value, location->OperGet());
-}
-
-void Rationalizer::RewriteAssignment(LIR::Use& use)
-{
- assert(use.IsInitialized());
-
- GenTreeOp* assignment = use.Def()->AsOp();
- assert(assignment->OperGet() == GT_ASG);
-
- GenTree* location = assignment->gtGetOp1();
- GenTree* value = assignment->gtGetOp2();
-
- genTreeOps locationOp = location->OperGet();
-
- switch (locationOp)
- {
- case GT_LCL_VAR:
- case GT_LCL_FLD:
- RewriteAssignmentIntoStoreLclCore(assignment, location, value, locationOp);
- BlockRange().Remove(location);
- break;
-
- case GT_IND:
- {
- GenTreeStoreInd* store =
- new (comp, GT_STOREIND) GenTreeStoreInd(location->TypeGet(), location->gtGetOp1(), value);
-
- copyFlags(store, assignment, GTF_ALL_EFFECT);
- copyFlags(store, location, GTF_IND_FLAGS);
-
- // TODO: JIT dump
-
- // Remove the GT_IND node and replace the assignment node with the store
- BlockRange().Remove(location);
- BlockRange().InsertBefore(assignment, store);
- use.ReplaceWith(store);
- BlockRange().Remove(assignment);
- }
- break;
-
- case GT_BLK:
- {
- assert(varTypeIsStruct(location));
- JITDUMP("Rewriting GT_ASG(%s(X), Y) to STORE_BLK(X,Y):\n", GenTree::OpName(location->gtOper));
-
- GenTreeBlk* storeBlk = location->AsBlk();
- storeBlk->SetOperRaw(GT_STORE_BLK);
- storeBlk->gtFlags &= ~GTF_DONT_CSE;
- storeBlk->gtFlags |= (assignment->gtFlags & (GTF_ALL_EFFECT | GTF_DONT_CSE));
- storeBlk->AsBlk()->Data() = value;
-
- // Remove the block node from its current position and replace the assignment node with it
- // (now in its store form).
- BlockRange().Remove(storeBlk);
- BlockRange().InsertBefore(assignment, storeBlk);
- use.ReplaceWith(storeBlk);
- BlockRange().Remove(assignment);
- DISPTREERANGE(BlockRange(), use.Def());
- JITDUMP("\n");
- }
- break;
-
- default:
- unreached();
- break;
- }
-}
-
Compiler::fgWalkResult Rationalizer::RewriteNode(GenTree** useEdge, Compiler::GenTreeStack& parentStack)
{
assert(useEdge != nullptr);
assert(node == use.Def());
switch (node->OperGet())
{
- case GT_ASG:
- RewriteAssignment(use);
- break;
-
case GT_CALL:
// In linear order we no longer need to retain the stores in early
// args as these have now been sequenced.
}
else
{
- if (!node->OperIsStore())
+ if (((node->gtFlags & GTF_ASG) != 0) && !node->OperRequiresAsgFlag())
{
- // Clear the GTF_ASG flag for all nodes but stores
+ // Clear the GTF_ASG flag for all nodes that do not require it
node->gtFlags &= ~GTF_ASG;
}
virtual PhaseStatus DoPhase() override;
- static void RewriteAssignmentIntoStoreLcl(GenTreeOp* assignment);
-
private:
inline LIR::Range& BlockRange() const
{
void RewriteIntrinsicAsUserCall(GenTree** use, Compiler::GenTreeStack& parents);
- // Other transformations
- void RewriteAssignment(LIR::Use& use);
-
#ifdef TARGET_ARM64
void RewriteSubLshDiv(GenTree** use);
#endif
// Is the operation a write? If so, set `node` to the location that is being written to.
bool isWrite = false;
- if (node->OperIs(GT_ASG))
- {
- isWrite = true;
- node = node->gtGetOp1();
- }
- else if (node->OperIsStore() || node->OperIs(GT_MEMORYBARRIER))
+ if (node->OperIsStore() || node->OperIs(GT_STORE_DYN_BLK, GT_MEMORYBARRIER))
{
isWrite = true;
}