From 8adbca4855b6a12a057c6176d1e909c66cb1f01d Mon Sep 17 00:00:00 2001 From: SingleAccretion <62474226+SingleAccretion@users.noreply.github.com> Date: Tue, 9 May 2023 20:54:44 +0300 Subject: [PATCH] Move assignment rationalization to before global morph (#85872) * Fix printing of volatile/unaligned * Fix missing GTF_ASG * QMARK expansion * Global morph * Tentatively fix stress * Another stress fix The assert is too strict, a better version in coming in the future changes. --- src/coreclr/jit/assertionprop.cpp | 251 +++++++++------- src/coreclr/jit/compiler.cpp | 4 +- src/coreclr/jit/compiler.h | 14 +- src/coreclr/jit/fgdiagnostic.cpp | 6 + src/coreclr/jit/fgprofile.cpp | 2 +- src/coreclr/jit/flowgraph.cpp | 72 +---- src/coreclr/jit/gentree.cpp | 141 ++++----- src/coreclr/jit/gentree.h | 35 +-- src/coreclr/jit/jitconfigvalues.h | 1 - src/coreclr/jit/lclvars.cpp | 45 +-- src/coreclr/jit/morph.cpp | 608 ++++++++++++++++++-------------------- src/coreclr/jit/morphblock.cpp | 294 +++++++----------- src/coreclr/jit/patchpoint.cpp | 4 +- src/coreclr/jit/promotion.cpp | 1 + 14 files changed, 652 insertions(+), 826 deletions(-) diff --git a/src/coreclr/jit/assertionprop.cpp b/src/coreclr/jit/assertionprop.cpp index 978e169..0b8e04b 100644 --- a/src/coreclr/jit/assertionprop.cpp +++ b/src/coreclr/jit/assertionprop.cpp @@ -1054,7 +1054,7 @@ AssertionIndex Compiler::optCreateAssertion(GenTree* op1, // // Are we making an assertion about a local variable? // - else if (op1->gtOper == GT_LCL_VAR) + else if (op1->OperIsScalarLocal()) { unsigned const lclNum = op1->AsLclVarCommon()->GetLclNum(); LclVarDsc* const lclVar = lvaGetDesc(lclNum); @@ -2255,34 +2255,25 @@ void Compiler::optAssertionGen(GenTree* tree) // the assertion is true after the tree is processed bool assertionProven = true; AssertionInfo assertionInfo; - switch (tree->gtOper) + switch (tree->OperGet()) { - case GT_ASG: - // An indirect store - we can create a non-null assertion. Note that we do not lose out - // on the dataflow assertions here as local propagation only deals with LCL_VAR LHSs. - if (tree->AsOp()->gtGetOp1()->OperIsIndir()) + case GT_STORE_LCL_VAR: + // VN takes care of non local assertions for assignments and data flow. + if (optLocalAssertionProp) { - assertionInfo = optCreateAssertion(tree->AsOp()->gtGetOp1()->AsIndir()->Addr(), nullptr, OAK_NOT_EQUAL); + assertionInfo = optCreateAssertion(tree, tree->AsLclVar()->Data(), OAK_EQUAL); } - // VN takes care of non local assertions for assignments and data flow. - else if (optLocalAssertionProp) + else { - assertionInfo = optCreateAssertion(tree->AsOp()->gtOp1, tree->AsOp()->gtOp2, OAK_EQUAL); + assertionInfo = optAssertionGenPhiDefn(tree); } break; - case GT_STORE_LCL_VAR: - assertionInfo = optAssertionGenPhiDefn(tree); - break; - case GT_BLK: case GT_IND: case GT_STOREIND: case GT_STORE_BLK: - // R-value indirections create non-null assertions, but not all indirections are R-values. - // Those under ADDR nodes or on the LHS of ASGs are "locations", and will not end up - // dereferencing their operands. We cannot reliably detect them here, however, and so - // will have to rely on the conservative approximation of the GTF_NO_CSE flag. + // Dynamic block copy sources should not generate non-null assertions; we detect them via NO_CSE. if (tree->CanCSE()) { assertionInfo = optCreateAssertion(tree->AsIndir()->Addr(), nullptr, OAK_NOT_EQUAL); @@ -2292,7 +2283,7 @@ void Compiler::optAssertionGen(GenTree* tree) case GT_ARR_LENGTH: case GT_MDARR_LENGTH: case GT_MDARR_LOWER_BOUND: - // An array meta-data access is an (always R-value) indirection (but doesn't derive from GenTreeIndir). + // An array meta-data access is an indirection (but doesn't derive from GenTreeIndir). assertionInfo = optCreateAssertion(tree->AsArrCommon()->ArrRef(), nullptr, OAK_NOT_EQUAL); break; @@ -3276,6 +3267,7 @@ GenTree* Compiler::optAssertionProp_LclVar(ASSERT_VALARG_TP assertions, GenTreeL // If we have a var definition then bail or // If this is the address of the var then it will have the GTF_DONT_CSE // flag set and we don't want to assertion prop on it. + // TODO-ASG: delete. if (tree->gtFlags & (GTF_VAR_DEF | GTF_DONT_CSE)) { return nullptr; @@ -3381,6 +3373,7 @@ GenTree* Compiler::optAssertionProp_LclFld(ASSERT_VALARG_TP assertions, GenTreeL // If we have a var definition then bail or // If this is the address of the var then it will have the GTF_DONT_CSE // flag set and we don't want to assertion prop on it. + // TODO-ASG: delete. if (tree->gtFlags & (GTF_VAR_DEF | GTF_DONT_CSE)) { return nullptr; @@ -3420,108 +3413,114 @@ GenTree* Compiler::optAssertionProp_LclFld(ASSERT_VALARG_TP assertions, GenTreeL } //------------------------------------------------------------------------ -// optAssertionProp_Asg: Try and optimize an assignment via assertions. +// optAssertionProp_LocalStore: Try and optimize a local store via assertions. // -// Propagates ZEROOBJ for the RHS. +// Propagates ZEROOBJ for the value. Suppresses no-op stores. // // Arguments: // assertions - set of live assertions -// asg - the store to optimize -// stmt - statement containing "asg" +// store - the store to optimize +// stmt - statement containing "store" // // Returns: -// Updated "asg", or "nullptr" +// Updated "store", or "nullptr" // // Notes: // stmt may be nullptr during local assertion prop // -GenTree* Compiler::optAssertionProp_Asg(ASSERT_VALARG_TP assertions, GenTreeOp* asg, Statement* stmt) +GenTree* Compiler::optAssertionProp_LocalStore(ASSERT_VALARG_TP assertions, GenTreeLclVarCommon* store, Statement* stmt) { - GenTree* rhs = asg->gtGetOp2(); + if (!optLocalAssertionProp) + { + // No ZEROOBJ assertions in global propagation. + return nullptr; + } - // Try and simplify the RHS. + // Try and simplify the value. // - bool madeChanges = false; - if (asg->OperIsCopyBlkOp()) + bool madeChanges = false; + GenTree* value = store->Data(); + if (value->TypeIs(TYP_STRUCT) && optZeroObjAssertionProp(value, assertions)) { - if (optZeroObjAssertionProp(rhs, assertions)) - { - madeChanges = true; - rhs = asg->gtGetOp2(); - } + madeChanges = true; } - // If we're assigning a value to a lcl/field that already has - // that value, suppress the assignment. + // If we're storing a value to a lcl/field that already has that value, suppress the store. // // For now we just check for zero. // - // In particular we want to make sure that for struct S the - // "redundant init" pattern + // In particular we want to make sure that for struct S the "redundant init" pattern // // S s = new S(); // s.field = 0; // // does not kill the zerobj assertion for s. // - if (optLocalAssertionProp) + unsigned const dstLclNum = store->GetLclNum(); + bool const dstLclIsStruct = lvaGetDesc(dstLclNum)->TypeGet() == TYP_STRUCT; + AssertionIndex const dstIndex = + optLocalAssertionIsEqualOrNotEqual(O1K_LCLVAR, dstLclNum, dstLclIsStruct ? O2K_ZEROOBJ : O2K_CONST_INT, 0, + assertions); + if (dstIndex != NO_ASSERTION_INDEX) { - GenTreeLclVarCommon* lhsVarTree = nullptr; - if (asg->DefinesLocal(this, &lhsVarTree)) + AssertionDsc* const dstAssertion = optGetAssertion(dstIndex); + if ((dstAssertion->assertionKind == OAK_EQUAL) && (dstAssertion->op2.u1.iconVal == 0)) { - unsigned const lhsLclNum = lhsVarTree->GetLclNum(); - LclVarDsc* const lhsLclDsc = lvaGetDesc(lhsLclNum); - bool const lhsLclIsStruct = lhsLclDsc->TypeGet() == TYP_STRUCT; - AssertionIndex const lhsIndex = - optLocalAssertionIsEqualOrNotEqual(O1K_LCLVAR, lhsLclNum, lhsLclIsStruct ? O2K_ZEROOBJ : O2K_CONST_INT, - 0, assertions); - if (lhsIndex != NO_ASSERTION_INDEX) + // Destination is zero. Is value a literal zero? If so we don't need the store. + // + // The latter part of the if below is a heuristic. + // + // If we elimiate a zero store for integral lclVars it can lead to unnecessary + // cloning. We need to make sure `optExtractInitTestIncr` still sees zero loop + // iter lower bounds. + // + if (value->IsIntegralConst(0) && (dstLclIsStruct || varTypeIsGC(store))) { - AssertionDsc* const lhsAssertion = optGetAssertion(lhsIndex); - if ((lhsAssertion->assertionKind == OAK_EQUAL) && (lhsAssertion->op2.u1.iconVal == 0)) - { - bool canOptimize = false; - - // LHS is zero. Is RHS a literal zero? If so we don't need the assignment. - // - // The latter part of the if below is a heuristic. - // - // If we elimiate a zero assignment for integral lclVars it can lead to - // unnecessary cloning. We need to make sure `optExtractInitTestIncr` - // still sees zero loop iter lower bounds. - // - if (rhs->IsIntegralConst(0) && (lhsLclIsStruct || varTypeIsGC(lhsVarTree))) - { - JITDUMP( - "[%06u] is assigning a constant zero to a struct field or gc local that is already zero\n", - dspTreeID(asg)); - JITDUMPEXEC(optPrintAssertion(lhsAssertion)); - canOptimize = true; - } + JITDUMP("[%06u] is assigning a constant zero to a struct field or gc local that is already zero\n", + dspTreeID(store)); + JITDUMPEXEC(optPrintAssertion(dstAssertion)); - if (canOptimize) - { - GenTree* list = nullptr; - gtExtractSideEffList(asg, &list, GTF_SIDE_EFFECT, /* ignoreRoot */ true); - - if (list != nullptr) - { - return optAssertionProp_Update(list, asg, stmt); - } - - asg->gtBashToNOP(); - return optAssertionProp_Update(asg, asg, stmt); - } - } + store->gtBashToNOP(); + return optAssertionProp_Update(store, store, stmt); } } } - // We might have simplified the RHS but were not able to remove the assignment + // We might have simplified the value but were not able to remove the assignment // if (madeChanges) { - return optAssertionProp_Update(asg, asg, stmt); + return optAssertionProp_Update(store, store, stmt); + } + + return nullptr; +} + +//------------------------------------------------------------------------ +// optAssertionProp_BlockStore: Try and optimize a struct store via assertions. +// +// Propagates ZEROOBJ for the value. Propagates non-null assertions. +// +// Arguments: +// assertions - set of live assertions +// store - the store to optimize +// stmt - statement containing "store" +// +// Returns: +// Updated "store", or "nullptr" +// +// Notes: +// stmt may be nullptr during local assertion prop +// +GenTree* Compiler::optAssertionProp_BlockStore(ASSERT_VALARG_TP assertions, GenTreeBlk* store, Statement* stmt) +{ + assert(store->OperIs(GT_STORE_BLK)); + + bool didZeroObjProp = optZeroObjAssertionProp(store->Data(), assertions); + bool didNonNullProp = optNonNullAssertionProp_Ind(assertions, store); + if (didZeroObjProp || didNonNullProp) + { + return optAssertionProp_Update(store, store, stmt); } return nullptr; @@ -4209,33 +4208,8 @@ GenTree* Compiler::optAssertionProp_Comma(ASSERT_VALARG_TP assertions, GenTree* // GenTree* Compiler::optAssertionProp_Ind(ASSERT_VALARG_TP assertions, GenTree* tree, Statement* stmt) { - assert(tree->OperIsIndir()); - - if (!(tree->gtFlags & GTF_EXCEPT)) + if (optNonNullAssertionProp_Ind(assertions, tree)) { - return nullptr; - } - -#ifdef DEBUG - bool vnBased = false; - AssertionIndex index = NO_ASSERTION_INDEX; -#endif - if (optAssertionIsNonNull(tree->AsIndir()->Addr(), assertions DEBUGARG(&vnBased) DEBUGARG(&index))) - { -#ifdef DEBUG - if (verbose) - { - (vnBased) ? printf("\nVN based non-null prop in " FMT_BB ":\n", compCurBB->bbNum) - : printf("\nNon-null prop for index #%02u in " FMT_BB ":\n", index, compCurBB->bbNum); - gtDispTree(tree, nullptr, nullptr, true); - } -#endif - tree->gtFlags &= ~GTF_EXCEPT; - tree->gtFlags |= GTF_IND_NONFAULTING; - - // Set this flag to prevent reordering - tree->gtFlags |= GTF_ORDER_SIDEEFF; - return optAssertionProp_Update(tree, tree, stmt); } @@ -4443,6 +4417,51 @@ GenTree* Compiler::optNonNullAssertionProp_Call(ASSERT_VALARG_TP assertions, Gen return nullptr; } +//------------------------------------------------------------------------ +// optNonNullAssertionProp_Ind: Possibly prove an indirection non-faulting. +// +// Arguments: +// assertions - Active assertions +// indir - The indirection +// +// Return Value: +// Whether the indirection was found to be non-faulting and marked as such. +// +bool Compiler::optNonNullAssertionProp_Ind(ASSERT_VALARG_TP assertions, GenTree* indir) +{ + assert(indir->OperIsIndir()); + + if (!(indir->gtFlags & GTF_EXCEPT)) + { + return false; + } + +#ifdef DEBUG + bool vnBased = false; + AssertionIndex index = NO_ASSERTION_INDEX; +#endif + if (optAssertionIsNonNull(indir->AsIndir()->Addr(), assertions DEBUGARG(&vnBased) DEBUGARG(&index))) + { +#ifdef DEBUG + if (verbose) + { + (vnBased) ? printf("\nVN based non-null prop in " FMT_BB ":\n", compCurBB->bbNum) + : printf("\nNon-null prop for index #%02u in " FMT_BB ":\n", index, compCurBB->bbNum); + gtDispTree(indir, nullptr, nullptr, true); + } +#endif + indir->gtFlags &= ~GTF_EXCEPT; + indir->gtFlags |= GTF_IND_NONFAULTING; + + // Set this flag to prevent reordering + indir->gtFlags |= GTF_ORDER_SIDEEFF; + + return true; + } + + return false; +} + /***************************************************************************** * * Given a tree consisting of a call and a set of available assertions, we @@ -4733,8 +4752,12 @@ GenTree* Compiler::optAssertionProp(ASSERT_VALARG_TP assertions, GenTree* tree, case GT_LCL_FLD: return optAssertionProp_LclFld(assertions, tree->AsLclVarCommon(), stmt); - case GT_ASG: - return optAssertionProp_Asg(assertions, tree->AsOp(), stmt); + case GT_STORE_LCL_VAR: + case GT_STORE_LCL_FLD: + return optAssertionProp_LocalStore(assertions, tree->AsLclVarCommon(), stmt); + + case GT_STORE_BLK: + return optAssertionProp_BlockStore(assertions, tree->AsBlk(), stmt); case GT_RETURN: return optAssertionProp_Return(assertions, tree->AsUnOp(), stmt); @@ -4742,7 +4765,6 @@ GenTree* Compiler::optAssertionProp(ASSERT_VALARG_TP assertions, GenTree* tree, case GT_BLK: case GT_IND: case GT_STOREIND: - case GT_STORE_BLK: case GT_NULLCHECK: case GT_STORE_DYN_BLK: return optAssertionProp_Ind(assertions, tree, stmt); @@ -5677,6 +5699,7 @@ GenTree* Compiler::optVNConstantPropOnJTrue(BasicBlock* block, GenTree* test) Compiler::fgWalkResult Compiler::optVNConstantPropCurStmt(BasicBlock* block, Statement* stmt, GenTree* tree) { // Don't perform const prop on expressions marked with GTF_DONT_CSE + // TODO-ASG: delete. if (!tree->CanCSE()) { return WALK_CONTINUE; diff --git a/src/coreclr/jit/compiler.cpp b/src/coreclr/jit/compiler.cpp index bc1b7b0..b1a87a6 100644 --- a/src/coreclr/jit/compiler.cpp +++ b/src/coreclr/jit/compiler.cpp @@ -4755,6 +4755,8 @@ void Compiler::compCompile(void** methodCodePtr, uint32_t* methodCodeSize, JitFl // Locals tree list is no longer kept valid. fgNodeThreading = NodeThreading::None; + DoPhase(this, PHASE_RATIONALIZE_ASSIGNMENTS, &Compiler::fgRationalizeAssignments); + // Apply the type update to implicit byref parameters; also choose (based on address-exposed // analysis) which implicit byref promotions to keep (requires copy to initialize) or discard. // @@ -4797,8 +4799,6 @@ void Compiler::compCompile(void** methodCodePtr, uint32_t* methodCodeSize, JitFl }; DoPhase(this, PHASE_MORPH_GLOBAL, morphGlobalPhase); - DoPhase(this, PHASE_RATIONALIZE_ASSIGNMENTS, &Compiler::fgRationalizeAssignments); - // GS security checks for unsafe buffers // DoPhase(this, PHASE_GS_COOKIE, &Compiler::gsPhase); diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h index 53cf934..4d4fb2b 100644 --- a/src/coreclr/jit/compiler.h +++ b/src/coreclr/jit/compiler.h @@ -1093,7 +1093,6 @@ public: bool lvNormalizeOnLoad() const { return varTypeIsSmall(TypeGet()) && - // lvIsStructField is treated the same as the aliased local, see fgDoNormalizeOnStore. // OSR exposed locals were normalize on load in the Tier0 frame so must be so for OSR too. (lvIsParam || m_addrExposed || lvIsStructField || lvIsOSRExposedLocal); } @@ -1101,7 +1100,6 @@ public: bool lvNormalizeOnStore() const { return varTypeIsSmall(TypeGet()) && - // lvIsStructField is treated the same as the aliased local, see fgDoNormalizeOnStore. // OSR exposed locals were normalize on load in the Tier0 frame so must be so for OSR too. !(lvIsParam || m_addrExposed || lvIsStructField || lvIsOSRExposedLocal); } @@ -5548,7 +5546,6 @@ public: inline void fgConvertBBToThrowBB(BasicBlock* block); bool fgCastNeeded(GenTree* tree, var_types toType); - GenTree* fgDoNormalizeOnStore(GenTree* tree); // The following check for loops that don't execute calls bool fgLoopCallMarked; @@ -5893,12 +5890,12 @@ private: void fgMakeOutgoingStructArgCopy(GenTreeCall* call, CallArg* arg); void fgMarkGlobalUses(Statement* stmt); - GenTree* fgMorphLocal(GenTreeLclVarCommon* lclNode); + GenTree* fgMorphLeafLocal(GenTreeLclVarCommon* lclNode); #ifdef TARGET_X86 GenTree* fgMorphExpandStackArgForVarArgs(GenTreeLclVarCommon* lclNode); #endif // TARGET_X86 GenTree* fgMorphExpandImplicitByRefArg(GenTreeLclVarCommon* lclNode); - GenTree* fgMorphLocalVar(GenTree* tree); + GenTree* fgMorphExpandLocal(GenTreeLclVarCommon* lclNode); public: bool fgAddrCouldBeNull(GenTree* addr); @@ -5965,7 +5962,7 @@ public: private: GenTree* fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac, bool* optAssertionPropDone = nullptr); void fgTryReplaceStructLocalWithField(GenTree* tree); - GenTree* fgOptimizeIndir(GenTreeIndir* indir); + GenTree* fgMorphFinalizeIndir(GenTreeIndir* indir); GenTree* fgOptimizeCast(GenTreeCast* cast); GenTree* fgOptimizeCastOnStore(GenTree* store); GenTree* fgOptimizeBitCast(GenTreeUnOp* bitCast); @@ -7622,19 +7619,20 @@ public: GenTree* optAssertionProp(ASSERT_VALARG_TP assertions, GenTree* tree, Statement* stmt, BasicBlock* block); GenTree* optAssertionProp_LclVar(ASSERT_VALARG_TP assertions, GenTreeLclVarCommon* tree, Statement* stmt); GenTree* optAssertionProp_LclFld(ASSERT_VALARG_TP assertions, GenTreeLclVarCommon* tree, Statement* stmt); - GenTree* optAssertionProp_Asg(ASSERT_VALARG_TP assertions, GenTreeOp* asg, Statement* stmt); + GenTree* optAssertionProp_LocalStore(ASSERT_VALARG_TP assertions, GenTreeLclVarCommon* store, Statement* stmt); + GenTree* optAssertionProp_BlockStore(ASSERT_VALARG_TP assertions, GenTreeBlk* store, Statement* stmt); GenTree* optAssertionProp_Return(ASSERT_VALARG_TP assertions, GenTreeUnOp* ret, Statement* stmt); GenTree* optAssertionProp_Ind(ASSERT_VALARG_TP assertions, GenTree* tree, Statement* stmt); GenTree* optAssertionProp_Cast(ASSERT_VALARG_TP assertions, GenTreeCast* cast, Statement* stmt); GenTree* optAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCall* call, Statement* stmt); GenTree* optAssertionProp_RelOp(ASSERT_VALARG_TP assertions, GenTree* tree, Statement* stmt); - GenTree* optAssertionProp_ConditionalOp(ASSERT_VALARG_TP assertions, GenTree* tree, Statement* stmt); GenTree* optAssertionProp_Comma(ASSERT_VALARG_TP assertions, GenTree* tree, Statement* stmt); GenTree* optAssertionProp_BndsChk(ASSERT_VALARG_TP assertions, GenTree* tree, Statement* stmt); GenTree* optAssertionPropGlobal_RelOp(ASSERT_VALARG_TP assertions, GenTree* tree, Statement* stmt); GenTree* optAssertionPropLocal_RelOp(ASSERT_VALARG_TP assertions, GenTree* tree, Statement* stmt); GenTree* optAssertionProp_Update(GenTree* newTree, GenTree* tree, Statement* stmt); GenTree* optNonNullAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCall* call); + bool optNonNullAssertionProp_Ind(ASSERT_VALARG_TP assertions, GenTree* indir); // Implied assertion functions. void optImpliedAssertions(AssertionIndex assertionIndex, ASSERT_TP& activeAssertions); diff --git a/src/coreclr/jit/fgdiagnostic.cpp b/src/coreclr/jit/fgdiagnostic.cpp index 4cac849..a639469 100644 --- a/src/coreclr/jit/fgdiagnostic.cpp +++ b/src/coreclr/jit/fgdiagnostic.cpp @@ -3072,6 +3072,12 @@ void Compiler::fgDebugCheckFlags(GenTree* tree) switch (tree->OperGet()) { + case GT_STORE_LCL_VAR: + case GT_STORE_LCL_FLD: + assert((tree->gtFlags & GTF_VAR_DEF) != 0); + assert(((tree->gtFlags & GTF_VAR_USEASG) != 0) == tree->IsPartialLclFld(this)); + break; + case GT_CATCH_ARG: expectedFlags |= GTF_ORDER_SIDEEFF; break; diff --git a/src/coreclr/jit/fgprofile.cpp b/src/coreclr/jit/fgprofile.cpp index 33f9223..a3a467a 100644 --- a/src/coreclr/jit/fgprofile.cpp +++ b/src/coreclr/jit/fgprofile.cpp @@ -2135,7 +2135,7 @@ public: GenTree* const tmpNode2 = compiler->gtNewLclvNode(tmpNum, TYP_REF); GenTree* const callCommaNode = compiler->gtNewOperNode(GT_COMMA, TYP_REF, helperCallNode, tmpNode2); GenTree* const tmpNode3 = compiler->gtNewLclvNode(tmpNum, TYP_REF); - GenTree* const asgNode = compiler->gtNewOperNode(GT_ASG, TYP_REF, tmpNode3, objUse->GetNode()); + GenTree* const asgNode = compiler->gtNewAssignNode(tmpNode3, objUse->GetNode()); GenTree* const asgCommaNode = compiler->gtNewOperNode(GT_COMMA, TYP_REF, asgNode, callCommaNode); // Update the call diff --git a/src/coreclr/jit/flowgraph.cpp b/src/coreclr/jit/flowgraph.cpp index 48be820..32013dc 100644 --- a/src/coreclr/jit/flowgraph.cpp +++ b/src/coreclr/jit/flowgraph.cpp @@ -1227,47 +1227,6 @@ bool Compiler::fgCastNeeded(GenTree* tree, var_types toType) return true; } -// If assigning to a local var, add a cast if the target is -// marked as NormalizedOnStore. Returns true if any change was made -GenTree* Compiler::fgDoNormalizeOnStore(GenTree* tree) -{ - // - // Only normalize the stores in the global morph phase - // - if (fgGlobalMorph) - { - noway_assert(tree->OperGet() == GT_ASG); - - GenTree* op1 = tree->AsOp()->gtOp1; - GenTree* op2 = tree->AsOp()->gtOp2; - - if (op1->gtOper == GT_LCL_VAR && genActualType(op1->TypeGet()) == TYP_INT) - { - // Small-typed arguments and aliased locals are normalized on load. - // Other small-typed locals are normalized on store. - // If it is an assignment to one of the latter, insert the cast on RHS - LclVarDsc* varDsc = lvaGetDesc(op1->AsLclVarCommon()->GetLclNum()); - - if (varDsc->lvNormalizeOnStore()) - { - noway_assert(op1->gtType <= TYP_INT); - op1->gtType = TYP_INT; - - if (fgCastNeeded(op2, varDsc->TypeGet())) - { - op2 = gtNewCastNode(TYP_INT, op2, false, varDsc->TypeGet()); - tree->AsOp()->gtOp2 = op2; - - // Propagate GTF_COLON_COND - op2->gtFlags |= (tree->gtFlags & GTF_COLON_COND); - } - } - } - } - - return tree; -} - /***************************************************************************** * * Mark whether the edge "srcBB -> dstBB" forms a loop that will always @@ -1814,8 +1773,8 @@ GenTree* Compiler::fgCreateMonitorTree(unsigned lvaMonAcquired, unsigned lvaThis if (block->bbJumpKind == BBJ_RETURN && block->lastStmt()->GetRootNode()->gtOper == GT_RETURN) { - GenTree* retNode = block->lastStmt()->GetRootNode(); - GenTree* retExpr = retNode->AsOp()->gtOp1; + GenTreeUnOp* retNode = block->lastStmt()->GetRootNode()->AsUnOp(); + GenTree* retExpr = retNode->gtOp1; if (retExpr != nullptr) { @@ -1823,16 +1782,16 @@ GenTree* Compiler::fgCreateMonitorTree(unsigned lvaMonAcquired, unsigned lvaThis // ret(...) -> // ret(comma(comma(tmp=...,call mon_exit), tmp)) // - GenTree* temp = fgInsertCommaFormTemp(&retNode->AsOp()->gtOp1); - GenTree* lclVar = retNode->AsOp()->gtOp1->AsOp()->gtOp2; - - // The return can't handle all of the trees that could be on the right-hand-side of an assignment, - // especially in the case of a struct. Therefore, we need to propagate GTF_DONT_CSE. - // If we don't, assertion propagation may, e.g., change a return of a local to a return of "CNS_INT struct - // 0", - // which downstream phases can't handle. + TempInfo tempInfo = fgMakeTemp(retExpr); + GenTree* lclVar = tempInfo.load; + + // TODO-1stClassStructs: delete this NO_CSE propagation. Requires handling multi-regs in copy prop. lclVar->gtFlags |= (retExpr->gtFlags & GTF_DONT_CSE); - retNode->AsOp()->gtOp1->AsOp()->gtOp2 = gtNewOperNode(GT_COMMA, retExpr->TypeGet(), tree, lclVar); + + retExpr = gtNewOperNode(GT_COMMA, lclVar->TypeGet(), tree, lclVar); + retExpr = gtNewOperNode(GT_COMMA, lclVar->TypeGet(), tempInfo.asg, retExpr); + retNode->gtOp1 = retExpr; + retNode->AddAllEffectsFlags(retExpr); } else { @@ -2902,14 +2861,11 @@ PhaseStatus Compiler::fgRationalizeAssignments() compAssignmentRationalized = true; #ifdef DEBUG - if (JitConfig.JitStressMorphStores()) + for (BasicBlock* block : Blocks()) { - for (BasicBlock* block : Blocks()) + for (Statement* stmt : block->Statements()) { - for (Statement* stmt : block->Statements()) - { - fgMorphBlockStmt(block, stmt DEBUGARG("fgRationalizeAssignments")); - } + assert(!gtTreeContainsOper(stmt->GetRootNode(), GT_ASG)); } } #endif // DEBUG diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp index ead839c..1fd6edf 100644 --- a/src/coreclr/jit/gentree.cpp +++ b/src/coreclr/jit/gentree.cpp @@ -3328,6 +3328,14 @@ AGAIN: // these should be included in the hash code. switch (oper) { + case GT_STOREIND: + hash = genTreeHashAdd(hash, tree->AsStoreInd()->GetRMWStatus()); + break; + + case GT_STORE_BLK: + hash = genTreeHashAdd(hash, tree->AsBlk()->GetLayout()); + break; + case GT_INTRINSIC: hash += tree->AsIntrinsic()->gtIntrinsicName; break; @@ -3339,10 +3347,6 @@ AGAIN: hash = genTreeHashAdd(hash, tree->AsBoundsChk()->gtThrowKind); break; - case GT_STORE_BLK: - hash ^= PtrToUlong(tree->AsBlk()->GetLayout()); - break; - // For the ones below no extra argument matters for comparison. case GT_ARR_INDEX: case GT_QMARK: @@ -7770,8 +7774,6 @@ GenTreeLclVar* Compiler::gtNewStoreLclVarNode(unsigned lclNum, GenTree* data) GenTreeLclFld* Compiler::gtNewStoreLclFldNode(unsigned lclNum, var_types type, unsigned offset, GenTree* data) { - assert((genActualType(type) == genActualType(data)) || ((type == TYP_STRUCT) && data->TypeIs(TYP_INT))); - ClassLayout* layout = (type == TYP_STRUCT) ? data->GetLayout(this) : nullptr; GenTreeLclFld* store = new (this, GT_STORE_LCL_FLD) GenTreeLclFld(type, lclNum, offset, data, layout); store->gtFlags |= (GTF_VAR_DEF | GTF_ASG); @@ -8196,8 +8198,7 @@ GenTreeBlk* Compiler::gtNewStoreBlkNode(ClassLayout* layout, GenTree* addr, GenT // GenTreeStoreInd* Compiler::gtNewStoreIndNode(var_types type, GenTree* addr, GenTree* data, GenTreeFlags indirFlags) { - assert((indirFlags & GTF_IND_INVARIANT) == 0); - assert((type != TYP_STRUCT) && (genActualType(type) == genActualType(data))); + assert(((indirFlags & GTF_IND_INVARIANT) == 0) && (type != TYP_STRUCT)); GenTreeStoreInd* store = new (this, GT_STOREIND) GenTreeStoreInd(type, addr, data); store->gtFlags |= GTF_ASG; @@ -10882,7 +10883,19 @@ void Compiler::gtDispNode(GenTree* tree, IndentStack* indentStack, _In_ _In_opt_ break; } } - FALLTHROUGH; + if (tree->gtFlags & GTF_IND_VOLATILE) + { + printf("V"); + --msgLength; + break; + } + if (tree->gtFlags & GTF_IND_UNALIGNED) + { + printf("U"); + --msgLength; + break; + } + goto DASH; case GT_ASG: if (tree->OperIsInitBlkOp()) @@ -14382,21 +14395,21 @@ GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions assert(op->IsBoxedValue()); // grab related parts for the optimization - GenTreeBox* box = op->AsBox(); - Statement* asgStmt = box->gtAsgStmtWhenInlinedBoxValue; - Statement* copyStmt = box->gtCopyStmtWhenInlinedBoxValue; + GenTreeBox* box = op->AsBox(); + Statement* allocStmt = box->gtAsgStmtWhenInlinedBoxValue; + Statement* copyStmt = box->gtCopyStmtWhenInlinedBoxValue; JITDUMP("gtTryRemoveBoxUpstreamEffects: %s to %s of BOX (valuetype)" " [%06u] (assign/newobj " FMT_STMT " copy " FMT_STMT "\n", (options == BR_DONT_REMOVE) ? "checking if it is possible" : "attempting", (options == BR_MAKE_LOCAL_COPY) ? "make local unboxed version" : "remove side effects", dspTreeID(op), - asgStmt->GetID(), copyStmt->GetID()); + allocStmt->GetID(), copyStmt->GetID()); - // If we don't recognize the form of the assign, bail. - GenTree* asg = asgStmt->GetRootNode(); - if (asg->gtOper != GT_ASG) + // If we don't recognize the form of the store, bail. + GenTree* boxLclDef = allocStmt->GetRootNode(); + if (!boxLclDef->OperIsStoreLclVar()) { - JITDUMP(" bailing; unexpected assignment op %s\n", GenTree::OpName(asg->gtOper)); + JITDUMP(" bailing; unexpected alloc def op %s\n", GenTree::OpName(boxLclDef->OperGet())); return nullptr; } @@ -14411,20 +14424,20 @@ GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions GenTree* boxTypeHandle = nullptr; if ((options == BR_REMOVE_AND_NARROW_WANT_TYPE_HANDLE) || (options == BR_DONT_REMOVE_WANT_TYPE_HANDLE)) { - GenTree* asgSrc = asg->AsOp()->gtOp2; - genTreeOps asgSrcOper = asgSrc->OperGet(); + GenTree* defSrc = boxLclDef->Data(); + genTreeOps defSrcOper = defSrc->OperGet(); // Allocation may be via AllocObj or via helper call, depending // on when this is invoked and whether the jit is using AllocObj // for R2R allocations. - if (asgSrcOper == GT_ALLOCOBJ) + if (defSrcOper == GT_ALLOCOBJ) { - GenTreeAllocObj* allocObj = asgSrc->AsAllocObj(); + GenTreeAllocObj* allocObj = defSrc->AsAllocObj(); boxTypeHandle = allocObj->AsOp()->gtOp1; } - else if (asgSrcOper == GT_CALL) + else if (defSrcOper == GT_CALL) { - GenTreeCall* newobjCall = asgSrc->AsCall(); + GenTreeCall* newobjCall = defSrc->AsCall(); // In R2R expansions the handle may not be an explicit operand to the helper, // so we can't remove the box. @@ -14447,7 +14460,7 @@ GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions // If we don't recognize the form of the copy, bail. GenTree* copy = copyStmt->GetRootNode(); - if (copy->gtOper != GT_ASG) + if (!copy->OperIs(GT_ASG, GT_STOREIND, GT_STORE_BLK)) { // GT_RET_EXPR is a tolerable temporary failure. // The jit will revisit this optimization after @@ -14458,10 +14471,8 @@ GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions } else { - // Anything else is a missed case we should - // figure out how to handle. One known case - // is GT_COMMAs enclosing the GT_ASG we are - // looking for. + // Anything else is a missed case we should figure out how to handle. + // One known case is GT_COMMAs enclosing the store we are looking for. JITDUMP(" bailing; unexpected copy op %s\n", GenTree::OpName(copy->gtOper)); } return nullptr; @@ -14483,9 +14494,10 @@ GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions // // The shape here is constrained to the patterns we produce // over in impImportAndPushBox for the inlined box case. - GenTree* copyDst = copy->AsOp()->gtOp1; + bool copyIsAsg = copy->OperIs(GT_ASG); + GenTree* copyDst = copyIsAsg ? copy->AsOp()->gtOp1 : copy; - if (!copyDst->OperIs(GT_BLK, GT_IND)) + if (copyIsAsg && !copyDst->OperIs(GT_BLK, GT_IND)) { JITDUMP("Unexpected copy dest operator %s\n", GenTree::OpName(copyDst->gtOper)); return nullptr; @@ -14521,8 +14533,8 @@ GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions lvaSetStruct(boxTempLcl, boxClass, isUnsafeValueClass); // Remove the newobj and assignment to box temp - JITDUMP("Bashing NEWOBJ [%06u] to NOP\n", dspTreeID(asg)); - asg->gtBashToNOP(); + JITDUMP("Bashing NEWOBJ [%06u] to NOP\n", dspTreeID(boxLclDef)); + boxLclDef->gtBashToNOP(); // Update the copy from the value to be boxed to the box temp copyDst->AsOp()->gtOp1 = gtNewLclVarAddrNode(boxTempLcl, TYP_BYREF); @@ -14533,9 +14545,8 @@ GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions return retValue; } - // If the copy is a struct copy, make sure we know how to isolate - // any source side effects. - GenTree* copySrc = copy->AsOp()->gtOp2; + // If the copy is a struct copy, make sure we know how to isolate any source side effects. + GenTree* copySrc = copy->Data(); // If the copy source is from a pending inline, wait for it to resolve. if (copySrc->gtOper == GT_RET_EXPR) @@ -14551,7 +14562,7 @@ GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions { hasSrcSideEffect = true; - if (varTypeIsStruct(copySrc->gtType)) + if (varTypeIsStruct(copySrc)) { isStructCopy = true; @@ -14579,8 +14590,8 @@ GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions // Otherwise, proceed with the optimization. // // Change the assignment expression to a NOP. - JITDUMP("\nBashing NEWOBJ [%06u] to NOP\n", dspTreeID(asg)); - asg->gtBashToNOP(); + JITDUMP("\nBashing NEWOBJ [%06u] to NOP\n", dspTreeID(boxLclDef)); + boxLclDef->gtBashToNOP(); // Change the copy expression so it preserves key // source side effects. @@ -14623,7 +14634,7 @@ GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions if (fgNodeThreading == NodeThreading::AllTrees) { - fgSetStmtSeq(asgStmt); + fgSetStmtSeq(allocStmt); fgSetStmtSeq(copyStmt); } @@ -16082,6 +16093,14 @@ GenTree* Compiler::gtNewTempAssign( if (compAssignmentRationalized) { store = gtNewStoreLclVarNode(tmp, val); + +#ifdef UNIX_AMD64_ABI + if (val->IsCall()) + { + // TODO-ASG: delete this zero-diff quirk. + varDsc->lvIsMultiRegRet = true; + } +#endif // UNIX_AMD64_ABI } else if (varTypeIsStruct(varDsc) && !val->IsInitVal()) { @@ -17417,49 +17436,7 @@ bool GenTree::DefinesLocal( return true; } - if (OperIs(GT_ASG)) - { - GenTree* lhs = AsOp()->gtGetOp1(); - - if (lhs->OperIs(GT_LCL_VAR)) - { - *pLclVarTree = lhs->AsLclVarCommon(); - if (pIsEntire != nullptr) - { - *pIsEntire = true; - } - if (pOffset != nullptr) - { - *pOffset = 0; - } - if (pSize != nullptr) - { - *pSize = comp->lvaLclExactSize(lhs->AsLclVarCommon()->GetLclNum()); - } - - return true; - } - - if (lhs->OperIs(GT_LCL_FLD)) - { - *pLclVarTree = lhs->AsLclVarCommon(); - if (pIsEntire != nullptr) - { - *pIsEntire = !lhs->AsLclFld()->IsPartialLclFld(comp); - } - if (pOffset != nullptr) - { - *pOffset = lhs->AsLclFld()->GetLclOffs(); - } - if (pSize != nullptr) - { - *pSize = lhs->AsLclFld()->GetSize(); - } - - return true; - } - } - else if (OperIs(GT_CALL)) + if (OperIs(GT_CALL)) { GenTreeLclVarCommon* lclAddr = comp->gtCallGetDefinedRetBufLclAddr(AsCall()); if (lclAddr == nullptr) diff --git a/src/coreclr/jit/gentree.h b/src/coreclr/jit/gentree.h index ee6948a..80ebe02 100644 --- a/src/coreclr/jit/gentree.h +++ b/src/coreclr/jit/gentree.h @@ -426,14 +426,13 @@ enum GenTreeFlags : unsigned int // well to make sure it's the right operator for the particular flag. //--------------------------------------------------------------------- -// These flags are also used by GT_LCL_FLD, and the last-use (DEATH) flags are also used by GenTreeCopyOrReload. + GTF_VAR_DEF = 0x80000000, // GT_STORE_LCL_VAR/GT_STORE_LCL_FLD/GT_LCL_ADDR -- this is a definition + GTF_VAR_USEASG = 0x40000000, // GT_STORE_LCL_FLD/GT_STORE_LCL_FLD/GT_LCL_ADDR -- this is a partial definition, a use of + // the previous definition is implied. A partial definition usually occurs when a struct + // field is assigned to (s.f = ...) or when a scalar typed variable is assigned to via a + // narrow store (*((byte*)&i) = ...). - GTF_VAR_DEF = 0x80000000, // GT_LCL_VAR -- this is a definition - GTF_VAR_USEASG = 0x40000000, // GT_LCL_VAR -- this is a partial definition, a use of the previous definition is implied - // A partial definition usually occurs when a struct field is assigned to (s.f = ...) or - // when a scalar typed variable is assigned to via a narrow store (*((byte*)&i) = ...). - -// Last-use bits. +// Last-use bits. Also used by GenTreeCopyOrReload. // Note that a node marked GTF_VAR_MULTIREG can only be a pure definition of all the fields, or a pure use of all the fields, // so we don't need the equivalent of GTF_VAR_USEASG. @@ -1635,7 +1634,7 @@ public: bool OperIsSsaDef() const { - return OperIs(GT_ASG, GT_CALL) || OperIsLocalStore(); + return OperIs(GT_CALL) || OperIsLocalStore(); } static bool OperIsHWIntrinsic(genTreeOps gtOper) @@ -1689,7 +1688,7 @@ public: return OperIs(GT_JCC, GT_SETCC, GT_SELECTCC); } - bool OperIsStoreLclVar(unsigned* pLclNum); + bool OperIsStoreLclVar(unsigned* pLclNum = nullptr); bool OperIsStoreLcl(unsigned* pLclNum); #ifdef DEBUG @@ -1785,8 +1784,6 @@ public: // The returned pointer might be nullptr if the node is not binary, or if non-null op2 is not required. inline GenTree* gtGetOp2IfPresent() const; - inline GenTree* GetStoreDestination(); - inline GenTree*& Data(); bool TryGetUse(GenTree* operand, GenTree*** pUse); @@ -8849,12 +8846,18 @@ inline bool GenTree::OperIsStoreLclVar(unsigned* pLclNum) // TODO-ASG: delete. { if (OperIs(GT_STORE_LCL_VAR)) { - *pLclNum = AsLclVar()->GetLclNum(); + if (pLclNum != nullptr) + { + *pLclNum = AsLclVar()->GetLclNum(); + } return true; } if (OperIs(GT_ASG) && gtGetOp1()->OperIs(GT_LCL_VAR)) { - *pLclNum = gtGetOp1()->AsLclVar()->GetLclNum(); + if (pLclNum != nullptr) + { + *pLclNum = gtGetOp1()->AsLclVar()->GetLclNum(); + } return true; } @@ -9260,12 +9263,6 @@ inline GenTree* GenTree::gtGetOp2IfPresent() const return op2; } -inline GenTree* GenTree::GetStoreDestination() // TODO-ASG: delete. -{ - assert(OperIs(GT_ASG) || OperIsStore()); - return OperIs(GT_ASG) ? gtGetOp1() : this; -} - inline GenTree*& GenTree::Data() { assert(OperIsStore() || OperIs(GT_STORE_DYN_BLK, GT_ASG)); diff --git a/src/coreclr/jit/jitconfigvalues.h b/src/coreclr/jit/jitconfigvalues.h index 578edeb..0b980d7 100644 --- a/src/coreclr/jit/jitconfigvalues.h +++ b/src/coreclr/jit/jitconfigvalues.h @@ -170,7 +170,6 @@ CONFIG_INTEGER(JitStressModeNamesOnly, W("JitStressModeNamesOnly"), 0) // Intern CONFIG_INTEGER(JitStressProcedureSplitting, W("JitStressProcedureSplitting"), 0) // Always split after the first basic // block. Skips functions with EH // for simplicity. -CONFIG_INTEGER(JitStressMorphStores, W("JitStressMorphStores"), 0) // Morph trees after assignment rationalization CONFIG_INTEGER(JitStressRegs, W("JitStressRegs"), 0) CONFIG_STRING(JitStressRegsRange, W("JitStressRegsRange")) // Only apply JitStressRegs to methods in this hash range diff --git a/src/coreclr/jit/lclvars.cpp b/src/coreclr/jit/lclvars.cpp index d9ce7d1..30f7154 100644 --- a/src/coreclr/jit/lclvars.cpp +++ b/src/coreclr/jit/lclvars.cpp @@ -8061,12 +8061,7 @@ unsigned Compiler::lvaStressLclFldPadding(unsigned lclNum) Compiler::fgWalkResult Compiler::lvaStressLclFldCB(GenTree** pTree, fgWalkData* data) { GenTree* const tree = *pTree; - GenTreeLclVarCommon* lcl = nullptr; - - if (tree->OperIs(GT_LCL_VAR, GT_LCL_FLD, GT_LCL_ADDR)) - { - lcl = tree->AsLclVarCommon(); - } + GenTreeLclVarCommon* lcl = tree->OperIsAnyLocal() ? tree->AsLclVarCommon() : nullptr; if (lcl == nullptr) { @@ -8083,23 +8078,24 @@ Compiler::fgWalkResult Compiler::lvaStressLclFldCB(GenTree** pTree, fgWalkData* if (varDsc->lvNoLclFldStress) { // Already determined we can't do anything for this var - return WALK_SKIP_SUBTREES; + return WALK_CONTINUE; } if (bFirstPass) { // Ignore locals that already have field appearances - if (lcl->OperIs(GT_LCL_FLD) || (lcl->OperIs(GT_LCL_ADDR) && (lcl->AsLclFld()->GetLclOffs() != 0))) + if (lcl->OperIs(GT_LCL_FLD, GT_STORE_LCL_FLD) || + (lcl->OperIs(GT_LCL_ADDR) && (lcl->AsLclFld()->GetLclOffs() != 0))) { varDsc->lvNoLclFldStress = true; - return WALK_SKIP_SUBTREES; + return WALK_CONTINUE; } // Ignore arguments and temps if (varDsc->lvIsParam || lclNum >= pComp->info.compLocalsCount) { varDsc->lvNoLclFldStress = true; - return WALK_SKIP_SUBTREES; + return WALK_CONTINUE; } // Ignore OSR locals; if in memory, they will live on the @@ -8108,7 +8104,7 @@ Compiler::fgWalkResult Compiler::lvaStressLclFldCB(GenTree** pTree, fgWalkData* if (pComp->lvaIsOSRLocal(lclNum)) { varDsc->lvNoLclFldStress = true; - return WALK_SKIP_SUBTREES; + return WALK_CONTINUE; } // Likewise for Tier0 methods with patchpoints -- @@ -8117,7 +8113,7 @@ Compiler::fgWalkResult Compiler::lvaStressLclFldCB(GenTree** pTree, fgWalkData* if (pComp->doesMethodHavePatchpoints() || pComp->doesMethodHavePartialCompilationPatchpoints()) { varDsc->lvNoLclFldStress = true; - return WALK_SKIP_SUBTREES; + return WALK_CONTINUE; } // Converting tail calls to loops may require insertion of explicit @@ -8128,21 +8124,21 @@ Compiler::fgWalkResult Compiler::lvaStressLclFldCB(GenTree** pTree, fgWalkData* if (pComp->compMayConvertTailCallToLoop) { varDsc->lvNoLclFldStress = true; - return WALK_SKIP_SUBTREES; + return WALK_CONTINUE; } // Fix for lcl_fld stress mode if (varDsc->lvKeepType) { varDsc->lvNoLclFldStress = true; - return WALK_SKIP_SUBTREES; + return WALK_CONTINUE; } // Can't have GC ptrs in block layouts. if (!varTypeIsArithmetic(lclType)) { varDsc->lvNoLclFldStress = true; - return WALK_SKIP_SUBTREES; + return WALK_CONTINUE; } // The noway_assert in the second pass below, requires that these types match @@ -8150,7 +8146,7 @@ Compiler::fgWalkResult Compiler::lvaStressLclFldCB(GenTree** pTree, fgWalkData* if (varType != lclType) { varDsc->lvNoLclFldStress = true; - return WALK_SKIP_SUBTREES; + return WALK_CONTINUE; } // Weed out "small" types like TYP_BYTE as we don't mark the GT_LCL_VAR @@ -8160,7 +8156,7 @@ Compiler::fgWalkResult Compiler::lvaStressLclFldCB(GenTree** pTree, fgWalkData* if (genTypeSize(varType) != genTypeSize(genActualType(varType))) { varDsc->lvNoLclFldStress = true; - return WALK_SKIP_SUBTREES; + return WALK_CONTINUE; } // Offset some of the local variable by a "random" non-zero amount @@ -8169,7 +8165,7 @@ Compiler::fgWalkResult Compiler::lvaStressLclFldCB(GenTree** pTree, fgWalkData* if (padding == 0) { varDsc->lvNoLclFldStress = true; - return WALK_SKIP_SUBTREES; + return WALK_CONTINUE; } } else @@ -8204,13 +8200,22 @@ Compiler::fgWalkResult Compiler::lvaStressLclFldCB(GenTree** pTree, fgWalkData* // Update the trees if (tree->OperIs(GT_LCL_VAR)) { - tree->ChangeOper(GT_LCL_FLD); + tree->SetOper(GT_LCL_FLD); + } + else if (tree->OperIs(GT_STORE_LCL_VAR)) + { + tree->SetOper(GT_STORE_LCL_FLD); } tree->AsLclFld()->SetLclOffs(padding); + + if (tree->OperIs(GT_STORE_LCL_FLD) && tree->IsPartialLclFld(pComp)) + { + tree->gtFlags |= GTF_VAR_USEASG; + } } - return WALK_SKIP_SUBTREES; + return WALK_CONTINUE; } /*****************************************************************************/ diff --git a/src/coreclr/jit/morph.cpp b/src/coreclr/jit/morph.cpp index 0272159..1005e57 100644 --- a/src/coreclr/jit/morph.cpp +++ b/src/coreclr/jit/morph.cpp @@ -1596,7 +1596,7 @@ GenTree* CallArgs::MakeTmpArgNode(Compiler* comp, CallArg* arg) // This is the main function responsible for assigning late nodes in arguments. // After this function we may have the following shapes of early and late // nodes in arguments: -// 1. Early: GT_ASG, Late: GT_LCL_VAR. +// 1. Early: GT_STORE_LCL_VAR, Late: GT_LCL_VAR. // When the argument needs to be evaluated early (e.g. because it has // side effects, or because it is a struct copy that requires it) it // will be assigned to a temp in the early node and passed as the local @@ -1855,7 +1855,7 @@ void CallArgs::SetNeedsTemp(CallArg* arg) // rhs - The right-hand side expression. // // Return Value: -// 'TempInfo' data that contains the GT_ASG and GT_LCL_VAR nodes for assignment +// 'TempInfo' data that contains the GT_STORE_LCL_VAR and GT_LCL_VAR nodes for store // and variable load respectively. // TempInfo Compiler::fgMakeTemp(GenTree* rhs) @@ -3392,17 +3392,15 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call) lvaSetStruct(tmp, impGetRefAnyClass(), false); lvaSetVarAddrExposed(tmp DEBUGARG(AddressExposedReason::TOO_CONSERVATIVE)); - // Build the mkrefany as a comma node: - // (tmp.ptr=argx),(tmp.type=handle) - GenTreeLclFld* destPtrSlot = gtNewLclFldNode(tmp, TYP_I_IMPL, OFFSETOF__CORINFO_TypedReference__dataPtr); - GenTreeLclFld* destTypeSlot = gtNewLclFldNode(tmp, TYP_I_IMPL, OFFSETOF__CORINFO_TypedReference__type); - - GenTree* asgPtrSlot = gtNewAssignNode(destPtrSlot, argx->AsOp()->gtOp1); - GenTree* asgTypeSlot = gtNewAssignNode(destTypeSlot, argx->AsOp()->gtOp2); - GenTree* asg = gtNewOperNode(GT_COMMA, TYP_VOID, asgPtrSlot, asgTypeSlot); + // Build the mkrefany as a comma node: (tmp.ptr=argx),(tmp.type=handle) + GenTree* storePtrSlot = + gtNewStoreLclFldNode(tmp, TYP_BYREF, OFFSETOF__CORINFO_TypedReference__dataPtr, argx->AsOp()->gtOp1); + GenTree* storeTypeSlot = + gtNewStoreLclFldNode(tmp, TYP_I_IMPL, OFFSETOF__CORINFO_TypedReference__type, argx->AsOp()->gtOp2); + GenTree* store = gtNewOperNode(GT_COMMA, TYP_VOID, storePtrSlot, storeTypeSlot); // Change the expression to "(tmp=val)" - arg.SetEarlyNode(asg); + arg.SetEarlyNode(store); call->gtArgs.SetTemp(&arg, tmp); hasMultiregStructArgs |= ((arg.AbiInfo.ArgType == TYP_STRUCT) && !arg.AbiInfo.PassedByRef); #endif // !TARGET_X86 @@ -3456,7 +3454,7 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call) if (call->gtCallType == CT_INDIRECT) { call->gtCallAddr = fgMorphTree(call->gtCallAddr); - // Const CSE may create an assignment node here + // Const CSE may create a store node here flagsSummary |= call->gtCallAddr->gtFlags; } @@ -4049,8 +4047,7 @@ void Compiler::fgMakeOutgoingStructArgCopy(GenTreeCall* call, CallArg* arg) } // Copy the valuetype to the temp - GenTree* dest = gtNewLclvNode(tmp, lvaGetDesc(tmp)->TypeGet()); - GenTree* copyBlk = gtNewAssignNode(dest, argx); + GenTree* copyBlk = gtNewStoreLclVarNode(tmp, argx); copyBlk = fgMorphCopyBlock(copyBlk); call->gtArgs.SetTemp(arg, tmp); @@ -4632,7 +4629,7 @@ GenTree* Compiler::fgMorphIndexAddr(GenTreeIndexAddr* indexAddr) } //------------------------------------------------------------------------ -// fgMorphLocal: Fully morph a local node. +// fgMorphLeafLocal: Fully morph a leaf local node. // // Arguments: // lclNode - The node to morph @@ -4640,17 +4637,11 @@ GenTree* Compiler::fgMorphIndexAddr(GenTreeIndexAddr* indexAddr) // Return Value: // The fully morphed tree. // -GenTree* Compiler::fgMorphLocal(GenTreeLclVarCommon* lclNode) +GenTree* Compiler::fgMorphLeafLocal(GenTreeLclVarCommon* lclNode) { - assert(lclNode->OperIs(GT_LCL_VAR, GT_LCL_FLD) || lclNode->OperIs(GT_LCL_ADDR)); - - GenTree* expandedTree = nullptr; -#ifdef TARGET_X86 - expandedTree = fgMorphExpandStackArgForVarArgs(lclNode); -#else - expandedTree = fgMorphExpandImplicitByRefArg(lclNode); -#endif + assert(lclNode->OperIs(GT_LCL_VAR, GT_LCL_FLD, GT_LCL_ADDR)); + GenTree* expandedTree = fgMorphExpandLocal(lclNode); if (expandedTree != nullptr) { expandedTree = fgMorphTree(expandedTree); @@ -4664,16 +4655,50 @@ GenTree* Compiler::fgMorphLocal(GenTreeLclVarCommon* lclNode) return lclNode; } - assert(lclNode->OperIs(GT_LCL_VAR, GT_LCL_FLD)); - - if (lclNode->OperIs(GT_LCL_VAR)) + LclVarDsc* varDsc = lvaGetDesc(lclNode); + if (varDsc->IsAddressExposed()) { - return fgMorphLocalVar(lclNode); + lclNode->gtFlags |= GTF_GLOB_REF; } - if (lvaGetDesc(lclNode)->IsAddressExposed()) + // Small-typed arguments and aliased locals are normalized on load. Other small-typed locals are + // normalized on store. If this is one of the former, insert a narrowing cast on the load. + // ie. Convert: var-short --> cast-short(var-int) + // + if (fgGlobalMorph && lclNode->OperIs(GT_LCL_VAR) && varDsc->lvNormalizeOnLoad() && + /* TODO-ASG: delete this zero-diff quirk */ lclNode->CanCSE()) { - lclNode->gtFlags |= GTF_GLOB_REF; + // TYP_BOOL quirk: previously, the code in optAssertionIsSubrange did not handle TYP_BOOL. + // Now it does, but this leads to some regressions because we lose the uniform VNs for trees + // that represent the "reduced" normalize-on-load locals, i. e. LCL_VAR(small type V00), created + // here with local assertions, and "expanded", i. e. CAST(small type <- LCL_VAR(int V00)). + // This is a pretty fundamental problem with how normalize-on-load locals appear to the optimizer. + // This quirk preserves the previous behavior. + // TODO-CQ: fix the VNs for normalize-on-load locals and remove this quirk. + var_types lclVarType = varDsc->TypeGet(); + bool isBoolQuirk = lclVarType == TYP_BOOL; + + // Assertion prop can tell us to omit adding a cast here. This is useful when the local is a small-typed + // parameter that is passed in a register: in that case, the ABI specifies that the upper bits might be + // invalid, but the assertion guarantees us that we have normalized when we wrote it. + if (optLocalAssertionProp && !isBoolQuirk && + optAssertionIsSubrange(lclNode, IntegralRange::ForType(lclVarType), apFull) != NO_ASSERTION_INDEX) + { + // The previous assertion can guarantee us that if this node gets + // assigned a register, it will be normalized already. It is still + // possible that this node ends up being in memory, in which case + // normalization will still be needed, so we better have the right + // type. + assert(lclNode->TypeGet() == varDsc->TypeGet()); + return lclNode; + } + + lclNode->gtType = TYP_INT; + fgMorphTreeDone(lclNode); + GenTree* cast = gtNewCastNode(TYP_INT, lclNode, false, lclVarType); + fgMorphTreeDone(cast); + + return cast; } return lclNode; @@ -4708,13 +4733,23 @@ GenTree* Compiler::fgMorphExpandStackArgForVarArgs(GenTreeLclVarCommon* lclNode) GenTree* offsetNode = gtNewIconNode(offset, TYP_I_IMPL); GenTree* argAddr = gtNewOperNode(GT_SUB, TYP_I_IMPL, argsBaseAddr, offsetNode); - if (lclNode->OperIs(GT_LCL_ADDR)) + GenTree* argNode; + if (lclNode->OperIsLocalStore()) + { + GenTree* data = lclNode->Data(); + argNode = lclNode->TypeIs(TYP_STRUCT) ? gtNewStoreBlkNode(lclNode->GetLayout(this), argAddr, data) + : gtNewStoreIndNode(lclNode->TypeGet(), argAddr, data)->AsIndir(); + } + else if (lclNode->OperIsLocalRead()) { - return argAddr; + argNode = lclNode->TypeIs(TYP_STRUCT) ? gtNewBlkIndir(lclNode->GetLayout(this), argAddr) + : gtNewIndir(lclNode->TypeGet(), argAddr); + } + else + { + argNode = argAddr; } - GenTree* argNode = lclNode->TypeIs(TYP_STRUCT) ? gtNewBlkIndir(lclNode->GetLayout(this), argAddr) - : gtNewIndir(lclNode->TypeGet(), argAddr); return argNode; } #endif @@ -4793,8 +4828,7 @@ GenTree* Compiler::fgMorphExpandImplicitByRefArg(GenTreeLclVarCommon* lclNode) } else if (varDsc->lvIsStructField && lvaIsImplicitByRefLocal(varDsc->lvParentLcl)) { - // This was a field reference to an implicit-by-reference struct parameter that was - // dependently promoted. + // This was a field reference to an implicit-by-reference struct parameter that was dependently promoted. newLclNum = varDsc->lvParentLcl; fieldOffset = varDsc->lvFldOffset; } @@ -4805,16 +4839,13 @@ GenTree* Compiler::fgMorphExpandImplicitByRefArg(GenTreeLclVarCommon* lclNode) // Add a level of indirection to this node. The "base" will be a local node referring to "newLclNum". // We will also add an offset, and, if the original "lclNode" represents a location, a dereference. - bool isAddress = lclNode->OperIs(GT_LCL_ADDR); + GenTree* data = lclNode->OperIsLocalStore() ? lclNode->Data() : nullptr; + bool isLoad = lclNode->OperIsLocalRead(); unsigned offset = lclNode->GetLclOffs() + fieldOffset; var_types argNodeType = lclNode->TypeGet(); - ClassLayout* argNodeLayout = nullptr; - if (argNodeType == TYP_STRUCT) - { - argNodeLayout = lclNode->GetLayout(this); - } + ClassLayout* argNodeLayout = (argNodeType == TYP_STRUCT) ? lclNode->GetLayout(this) : nullptr; - JITDUMP("\nRewriting an implicit by-ref parameter %s:\n", isAddress ? "address" : "reference"); + JITDUMP("\nRewriting an implicit by-ref parameter reference:\n"); DISPTREE(lclNode); lclNode->ChangeType(TYP_BYREF); @@ -4833,12 +4864,18 @@ GenTree* Compiler::fgMorphExpandImplicitByRefArg(GenTreeLclVarCommon* lclNode) addrNode = gtNewOperNode(GT_ADD, TYP_BYREF, addrNode, gtNewIconNode(offset, TYP_I_IMPL)); } + // Note: currently, we have to conservatively treat all indirections off of implicit byrefs + // as global. This is because we lose the information on whether the original local's address + // was exposed when we retype it in "fgRetypeImplicitByRefArgs". + // GenTree* newArgNode; - if (!isAddress) + if (data != nullptr) + { + newArgNode = (argNodeType == TYP_STRUCT) ? gtNewStoreBlkNode(argNodeLayout, addrNode, data) + : gtNewStoreIndNode(argNodeType, addrNode, data)->AsIndir(); + } + else if (isLoad) { - // Note: currently, we have to conservatively treat all indirections off of implicit byrefs as global. This - // is because we lose the information on whether the original local's address was exposed when we retype it - // in "fgRetypeImplicitByRefArgs". newArgNode = (argNodeType == TYP_STRUCT) ? gtNewBlkIndir(argNodeLayout, addrNode) : gtNewIndir(argNodeType, addrNode); } @@ -4854,75 +4891,42 @@ GenTree* Compiler::fgMorphExpandImplicitByRefArg(GenTreeLclVarCommon* lclNode) return newArgNode; } -/***************************************************************************** - * - * Transform the given GT_LCL_VAR tree for code generation. - */ - -GenTree* Compiler::fgMorphLocalVar(GenTree* tree) +GenTree* Compiler::fgMorphExpandLocal(GenTreeLclVarCommon* lclNode) { - assert(tree->OperIs(GT_LCL_VAR)); - - LclVarDsc* varDsc = lvaGetDesc(tree->AsLclVarCommon()); - - if (varDsc->IsAddressExposed()) - { - tree->gtFlags |= GTF_GLOB_REF; - } + GenTree* expandedTree = nullptr; +#ifdef TARGET_X86 + expandedTree = fgMorphExpandStackArgForVarArgs(lclNode); +#else + expandedTree = fgMorphExpandImplicitByRefArg(lclNode); +#endif - // If not during the global morphing phase bail. - if (!fgGlobalMorph) + if (expandedTree != nullptr) { - return tree; + return expandedTree; } - bool isLocation = (tree->gtFlags & GTF_DONT_CSE) != 0; - - noway_assert(!(tree->gtFlags & GTF_VAR_DEF) || isLocation); // GTF_VAR_DEF should always imply isLocation. - - if (!isLocation && varDsc->lvNormalizeOnLoad()) + // Small-typed arguments and aliased locals are normalized on load. Other small-typed locals are + // normalized on store. If it is an assignment to one of the latter, insert the cast on source. + if (fgGlobalMorph && lclNode->OperIs(GT_STORE_LCL_VAR) && genActualTypeIsInt(lclNode)) { - // TYP_BOOL quirk: previously, the code in optAssertionIsSubrange did not handle TYP_BOOL. - // Now it does, but this leads to some regressions because we lose the uniform VNs for trees - // that represent the "reduced" normalize-on-load locals, i. e. LCL_VAR(small type V00), created - // here with local assertions, and "expanded", i. e. CAST(small type <- LCL_VAR(int V00)). - // This is a pretty fundamental problem with how normalize-on-load locals appear to the optimizer. - // This quirk preserves the previous behavior. - // TODO-CQ: fix the VNs for normalize-on-load locals and remove this quirk. - var_types lclVarType = varDsc->TypeGet(); - bool isBoolQuirk = lclVarType == TYP_BOOL; + LclVarDsc* varDsc = lvaGetDesc(lclNode); - // Assertion prop can tell us to omit adding a cast here. This is - // useful when the local is a small-typed parameter that is passed in a - // register: in that case, the ABI specifies that the upper bits might - // be invalid, but the assertion guarantees us that we have normalized - // when we wrote it. - if (optLocalAssertionProp && !isBoolQuirk && - optAssertionIsSubrange(tree, IntegralRange::ForType(lclVarType), apFull) != NO_ASSERTION_INDEX) + if (varDsc->lvNormalizeOnStore()) { - // The previous assertion can guarantee us that if this node gets - // assigned a register, it will be normalized already. It is still - // possible that this node ends up being in memory, in which case - // normalization will still be needed, so we better have the right - // type. - assert(tree->TypeGet() == varDsc->TypeGet()); - return tree; - } + GenTree* value = lclNode->Data(); + noway_assert(genActualTypeIsInt(value)); - // Small-typed arguments and aliased locals are normalized on load. - // Other small-typed locals are normalized on store. - // Also, under the debugger as the debugger could write to the variable. - // If this is one of the former, insert a narrowing cast on the load. - // ie. Convert: var-short --> cast-short(var-int) + lclNode->gtType = TYP_INT; - tree->gtType = TYP_INT; - fgMorphTreeDone(tree); - tree = gtNewCastNode(TYP_INT, tree, false, lclVarType); - fgMorphTreeDone(tree); - return tree; + if (fgCastNeeded(value, varDsc->TypeGet())) + { + lclNode->Data() = gtNewCastNode(TYP_INT, value, false, varDsc->TypeGet()); + return lclNode; + } + } } - return tree; + return nullptr; } //------------------------------------------------------------------------ @@ -5099,16 +5103,16 @@ GenTree* Compiler::fgMorphExpandInstanceField(GenTree* tree, MorphAddrContext* m / \ / \ / \ / \ / \ / \ - +-----+-----+ +-----+-----+ +---------+ +-----------+ - asg | GT_ASG | ind | GT_IND | | tmpLcl | | fldOffset | - +-----+-----+ +-----+-----+ +---------+ +-----------+ + +------------+-----------| +-----+-----+ +---------+ +-----------+ + | STORE_LCL_VAR tmpLcl | ind | GT_IND | | tmpLcl | | fldOffset | + +------------+-----------| +-----+-----+ +---------+ +-----------+ + | | + | | | | - / \ | - / \ | - / \ | - +-----+-----+ +-----+-----+ +-----------+ - | tmpLcl | | objRef | | tmpLcl | - +-----------+ +-----------+ +-----------+ + | | + +-----------+ +-----------+ + | objRef | | tmpLcl | + +-----------+ +-----------+ */ @@ -6247,7 +6251,7 @@ GenTree* Compiler::fgMorphPotentialTailCall(GenTreeCall* call) } else { - assert(stmtOper == GT_RETURN || stmtOper == GT_ASG || stmtOper == GT_COMMA); + assert(stmtOper == GT_RETURN || stmtOper == GT_STORE_LCL_VAR || stmtOper == GT_COMMA); GenTree* treeWithCall; if (stmtOper == GT_RETURN) { @@ -6261,7 +6265,7 @@ GenTree* Compiler::fgMorphPotentialTailCall(GenTreeCall* call) } else { - treeWithCall = stmtExpr->gtGetOp2(); + treeWithCall = stmtExpr->AsLclVar()->Data(); } // Peel off casts @@ -6414,7 +6418,8 @@ void Compiler::fgValidateIRForTailCall(GenTreeCall* call) class TailCallIRValidatorVisitor final : public GenTreeVisitor { GenTreeCall* m_tailcall; - GenTree* m_prevVal; + unsigned m_lclNum; + bool m_active; public: enum @@ -6424,7 +6429,7 @@ void Compiler::fgValidateIRForTailCall(GenTreeCall* call) }; TailCallIRValidatorVisitor(Compiler* comp, GenTreeCall* tailcall) - : GenTreeVisitor(comp), m_tailcall(tailcall), m_prevVal(nullptr) + : GenTreeVisitor(comp), m_tailcall(tailcall), m_lclNum(BAD_VAR_NUM), m_active(false) { } @@ -6433,11 +6438,11 @@ void Compiler::fgValidateIRForTailCall(GenTreeCall* call) GenTree* tree = *use; // Wait until we get to the actual call... - if (m_prevVal == nullptr) + if (!m_active) { if (tree == m_tailcall) { - m_prevVal = m_tailcall; + m_active = true; } return WALK_CONTINUE; @@ -6450,35 +6455,31 @@ void Compiler::fgValidateIRForTailCall(GenTreeCall* call) return WALK_ABORT; } - // GT_NOP might appear due to assignments that end up as - // self-assignments, which get morphed to GT_NOP. + // GT_NOP might appear due to stores that end up as + // self-stores, which get morphed to GT_NOP. if (tree->OperIs(GT_NOP)) { } - // We might see arbitrary chains of assignments that trivially + // We might see arbitrary chains of stores that trivially // propagate the result. Example: // - // * ASG ref - // +--* LCL_VAR ref V05 tmp5 + // * STORE_LCL_VAR ref V05 tmp5 // \--* CALL ref CultureInfo.InitializeUserDefaultUICulture // (in a new statement/BB) - // * ASG ref - // +--* LCL_VAR ref V02 tmp2 + // * STORE_LCL_VAR ref V02 tmp2 // \--* LCL_VAR ref V05 tmp5 // (in a new statement/BB) // * RETURN ref // \--* LCL_VAR ref V02 tmp2 // - else if (tree->OperIs(GT_ASG)) + else if (tree->OperIs(GT_STORE_LCL_VAR)) { - assert(tree->gtGetOp1()->OperIs(GT_LCL_VAR) && ValidateUse(tree->gtGetOp2()) && - "Expected LHS of assignment to be local and RHS of assignment to be result of tailcall"); - m_prevVal = tree->gtGetOp1(); + assert(ValidateUse(tree->AsLclVar()->Data()) && "Expected value of store to be result of tailcall"); + m_lclNum = tree->AsLclVar()->GetLclNum(); } else if (tree->OperIs(GT_LCL_VAR)) { - assert((ValidateUse(tree) || (user->OperIs(GT_ASG) && user->gtGetOp1() == tree)) && - "Expected use of local to be tailcall value or LHS of assignment"); + assert(ValidateUse(tree) && "Expected use of local to be tailcall value"); } else { @@ -6491,37 +6492,28 @@ void Compiler::fgValidateIRForTailCall(GenTreeCall* call) bool ValidateUse(GenTree* node) { - if (m_prevVal->OperIs(GT_LCL_VAR)) + if (m_lclNum != BAD_VAR_NUM) { - return node->OperIs(GT_LCL_VAR) && - (node->AsLclVar()->GetLclNum() == m_prevVal->AsLclVar()->GetLclNum()); + return node->OperIs(GT_LCL_VAR) && (node->AsLclVar()->GetLclNum() == m_lclNum); } - else if (m_prevVal == m_tailcall) - { - if (node == m_tailcall) - { - return true; - } - - // If we do not use the call value directly we might have - // passed this function's ret buffer arg, so verify that is - // being used. - CallArg* retBufferArg = m_tailcall->gtArgs.GetRetBufferArg(); - if (retBufferArg != nullptr) - { - GenTree* retBufferNode = retBufferArg->GetNode(); - return retBufferNode->OperIs(GT_LCL_VAR) && - (retBufferNode->AsLclVar()->GetLclNum() == m_compiler->info.compRetBuffArg) && - node->OperIs(GT_LCL_VAR) && - (node->AsLclVar()->GetLclNum() == m_compiler->info.compRetBuffArg); - } - return false; + if (node == m_tailcall) + { + return true; } - else + + // If we do not use the call value directly we might have passed + // this function's ret buffer arg, so verify that is being used. + CallArg* retBufferArg = m_tailcall->gtArgs.GetRetBufferArg(); + if (retBufferArg != nullptr) { - return node == m_prevVal; + GenTree* retBufferNode = retBufferArg->GetNode(); + return retBufferNode->OperIs(GT_LCL_VAR) && + (retBufferNode->AsLclVar()->GetLclNum() == m_compiler->info.compRetBuffArg) && + node->OperIs(GT_LCL_VAR) && (node->AsLclVar()->GetLclNum() == m_compiler->info.compRetBuffArg); } + + return false; } }; @@ -7449,9 +7441,7 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa // block won't be in the loop (it's assumed to have no predecessors), we need to update the special local here. if (!info.compIsStatic && (lvaArg0Var != info.compThisArg)) { - var_types thisType = lvaTable[info.compThisArg].TypeGet(); - GenTree* arg0 = gtNewLclvNode(lvaArg0Var, thisType); - GenTree* arg0Assignment = gtNewAssignNode(arg0, gtNewLclvNode(info.compThisArg, thisType)); + GenTree* arg0Assignment = gtNewStoreLclVarNode(lvaArg0Var, gtNewLclVarNode(info.compThisArg)); Statement* arg0AssignmentStmt = gtNewStmt(arg0Assignment, callDI); fgInsertStmtBefore(block, paramAssignmentInsertionPoint, arg0AssignmentStmt); } @@ -7505,9 +7495,14 @@ void Compiler::fgMorphRecursiveFastTailCallIntoLoop(BasicBlock* block, GenTreeCa bool hadSuppressedInit = varDsc->lvSuppressedZeroInit; if ((info.compInitMem && (isUserLocal || structWithGCFields)) || hadSuppressedInit) { - GenTree* lcl = gtNewLclvNode(varNum, lclType); - GenTree* zero = gtNewZeroConNode((lclType == TYP_STRUCT) ? TYP_INT : lclType); - GenTree* init = gtNewAssignNode(lcl, zero); + GenTree* zero = (lclType == TYP_STRUCT) ? gtNewIconNode(0) : gtNewZeroConNode(lclType); + GenTree* init = gtNewStoreLclVarNode(varNum, zero); + init->gtType = lclType; // TODO-ASG: delete this zero-diff quirk. + if (lclType == TYP_STRUCT) + { + init = fgMorphInitBlock(init); + } + Statement* initStmt = gtNewStmt(init, callDI); fgInsertStmtBefore(block, lastStmt, initStmt); } @@ -7608,22 +7603,19 @@ Statement* Compiler::fgAssignRecursiveCallArgToCallerParam(GenTree* arg, // The argument is not assigned to a temp. We need to create a new temp and insert an assignment. // TODO: we can avoid a temp assignment if we can prove that the argument tree // doesn't involve any caller parameters. - unsigned tmpNum = lvaGrabTemp(true DEBUGARG("arg temp")); - lvaTable[tmpNum].lvType = arg->gtType; - GenTree* tempSrc = arg; - GenTree* tempDest = gtNewLclvNode(tmpNum, tempSrc->gtType); - GenTree* tmpAssignNode = gtNewAssignNode(tempDest, tempSrc); - Statement* tmpAssignStmt = gtNewStmt(tmpAssignNode, callDI); - fgInsertStmtBefore(block, tmpAssignmentInsertionPoint, tmpAssignStmt); + unsigned tmpNum = lvaGrabTemp(true DEBUGARG("arg temp")); + lvaTable[tmpNum].lvType = arg->gtType; + GenTree* tempSrc = arg; + GenTree* tmpStoreNode = gtNewStoreLclVarNode(tmpNum, tempSrc); + Statement* tmpStoreStmt = gtNewStmt(tmpStoreNode, callDI); + fgInsertStmtBefore(block, tmpAssignmentInsertionPoint, tmpStoreStmt); argInTemp = gtNewLclvNode(tmpNum, tempSrc->gtType); } // Now assign the temp to the parameter. - const LclVarDsc* paramDsc = lvaGetDesc(lclParamNum); - assert(paramDsc->lvIsParam); - GenTree* paramDest = gtNewLclvNode(lclParamNum, paramDsc->lvType); - GenTree* paramAssignNode = gtNewAssignNode(paramDest, argInTemp); - paramAssignStmt = gtNewStmt(paramAssignNode, callDI); + assert(lvaGetDesc(lclParamNum)->lvIsParam); + GenTree* paramStoreNode = gtNewStoreLclVarNode(lclParamNum, argInTemp); + paramAssignStmt = gtNewStmt(paramStoreNode, callDI); fgInsertStmtBefore(block, paramAssignmentInsertionPoint, paramAssignStmt); } @@ -7669,14 +7661,12 @@ GenTree* Compiler::fgMorphCall(GenTreeCall* call) assert(structHandle != NO_CLASS_HANDLE); const bool unsafeValueClsCheck = false; lvaSetStruct(tmpNum, structHandle, unsafeValueClsCheck); - var_types structType = lvaTable[tmpNum].lvType; - GenTree* dst = gtNewLclvNode(tmpNum, structType); - GenTree* assg = gtNewAssignNode(dst, call); - assg = fgMorphTree(assg); + GenTree* store = gtNewStoreLclVarNode(tmpNum, call); + store = fgMorphTree(store); - // Create the assignment statement and insert it before the current statement. - Statement* assgStmt = gtNewStmt(assg, compCurStmt->GetDebugInfo()); - fgInsertStmtBefore(compCurBB, compCurStmt, assgStmt); + // Create the store statement and insert it before the current statement. + Statement* storeStmt = gtNewStmt(store, compCurStmt->GetDebugInfo()); + fgInsertStmtBefore(compCurBB, compCurStmt, storeStmt); // Return the temp. GenTree* result = gtNewLclvNode(tmpNum, lvaTable[tmpNum].lvType); @@ -7684,14 +7674,10 @@ GenTree* Compiler::fgMorphCall(GenTreeCall* call) compCurBB->bbFlags |= BBF_HAS_CALL; // This block has a call -#ifdef DEBUG - if (verbose) - { - printf("\nInserting assignment of a multi-reg call result to a temp:\n"); - gtDispStmt(assgStmt); - } - result->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED; -#endif // DEBUG + JITDUMP("\nInserting assignment of a multi-reg call result to a temp:\n"); + DISPSTMT(storeStmt); + INDEBUG(result->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED); + return result; } #endif @@ -7888,17 +7874,8 @@ GenTree* Compiler::fgMorphCall(GenTreeCall* call) #endif // DEBUG GenTree* indexAddr = gtNewArrayIndexAddr(arr, index, TYP_REF, NO_CLASS_HANDLE); - GenTree* store; - if (compAssignmentRationalized) - { - store = gtNewStoreIndNode(TYP_REF, indexAddr, value); - } - else - { - store = gtNewAssignNode(gtNewIndir(TYP_REF, indexAddr), value); - } - - GenTree* result = fgMorphTree(store); + GenTree* store = gtNewStoreIndNode(TYP_REF, indexAddr, value); + GenTree* result = fgMorphTree(store); if (argSetup != nullptr) { result = new (this, GT_COMMA) GenTreeOp(GT_COMMA, TYP_VOID, argSetup, result); @@ -8131,9 +8108,9 @@ GenTree* Compiler::fgMorphLeaf(GenTree* tree) { assert(tree->OperIsLeaf()); - if (tree->OperIsNonPhiLocal() || tree->OperIs(GT_LCL_ADDR)) + if (tree->OperIs(GT_LCL_VAR, GT_LCL_FLD, GT_LCL_ADDR)) { - tree = fgMorphLocal(tree->AsLclVarCommon()); + tree = fgMorphLeafLocal(tree->AsLclVarCommon()); } else if (tree->OperIs(GT_FTN_ADDR)) { @@ -8489,15 +8466,30 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac, bool* optA // Some arithmetic operators need to use a helper call to the EE int helper; - case GT_ASG: - tree = fgDoNormalizeOnStore(tree); - /* fgDoNormalizeOnStore can change op2 */ - noway_assert(op1 == tree->AsOp()->gtOp1); - op2 = tree->AsOp()->gtOp2; + case GT_STORE_LCL_VAR: + case GT_STORE_LCL_FLD: + { + if (lvaGetDesc(tree->AsLclVarCommon())->IsAddressExposed()) + { + tree->AddAllEffectsFlags(GTF_GLOB_REF); + } - // Location nodes cannot be CSEd. - op1->gtFlags |= GTF_DONT_CSE; - break; + if (tree->IsPartialLclFld(this)) + { + tree->gtFlags |= GTF_VAR_USEASG; + } + + GenTree* expandedTree = fgMorphExpandLocal(tree->AsLclVarCommon()); + if (expandedTree != nullptr) + { + DBEXEC(tree != expandedTree, expandedTree->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED); + tree = expandedTree; + oper = tree->OperGet(); + op1 = tree->gtGetOp1(); + op2 = tree->gtGetOp2IfPresent(); + } + } + break; case GT_QMARK: case GT_JTRUE: @@ -9291,32 +9283,16 @@ DONE_MORPHING_CHILDREN: switch (oper) { - case GT_ASG: - fgAssignSetVarDef(tree); - - if (op2->OperIs(GT_CAST)) - { - tree = fgOptimizeCastOnStore(tree); - - assert(tree->OperIs(GT_ASG)); - op1 = tree->gtGetOp1(); - op2 = tree->gtGetOp2(); - } - - // Location nodes cannot be CSEd. - op1->gtFlags |= GTF_DONT_CSE; - break; - - case GT_STOREIND: case GT_STORE_LCL_VAR: case GT_STORE_LCL_FLD: + case GT_STOREIND: tree = fgOptimizeCastOnStore(tree); op1 = tree->gtGetOp1(); op2 = tree->gtGetOp2IfPresent(); if (tree->OperIs(GT_STOREIND)) { - GenTree* optimizedTree = fgOptimizeIndir(tree->AsIndir()); + GenTree* optimizedTree = fgMorphFinalizeIndir(tree->AsIndir()); if (optimizedTree != nullptr) { return optimizedTree; @@ -9667,33 +9643,15 @@ DONE_MORPHING_CHILDREN: tree->gtFlags |= (GTF_IND_INVARIANT | GTF_IND_NONFAULTING | GTF_IND_NONNULL); } - GenTree* optimizedTree = fgOptimizeIndir(tree->AsIndir()); + GenTree* optimizedTree = fgMorphFinalizeIndir(tree->AsIndir()); if (optimizedTree != nullptr) { return optimizedTree; } -#ifdef TARGET_ARM - GenTree* effOp1 = op1->gtEffectiveVal(true); - // Check for a misalignment floating point indirection. - if (effOp1->OperIs(GT_ADD) && varTypeIsFloating(typ)) - { - GenTree* addOp2 = effOp1->gtGetOp2(); - if (addOp2->IsCnsIntOrI()) - { - ssize_t offset = addOp2->AsIntCon()->gtIconVal; - if ((offset % emitTypeSize(TYP_FLOAT)) != 0) - { - tree->gtFlags |= GTF_IND_UNALIGNED; - } - } - } -#endif // TARGET_ARM - // Only do this optimization when we are in the global optimizer. Doing this after value numbering // could result in an invalid value number for the newly generated GT_IND node. - // We skip INDs with GTF_DONT_CSE which is set if the IND is a location. - if (!varTypeIsSIMD(tree) && op1->OperIs(GT_COMMA) && fgGlobalMorph && ((tree->gtFlags & GTF_DONT_CSE) == 0)) + if (!varTypeIsStruct(tree) && op1->OperIs(GT_COMMA) && fgGlobalMorph) { // Perform the transform IND(COMMA(x, ..., z)) -> COMMA(x, ..., IND(z)). GenTree* commaNode = op1; @@ -9711,7 +9669,7 @@ DONE_MORPHING_CHILDREN: commaNode->gtType = typ; commaNode->gtFlags = (treeFlags & ~GTF_REVERSE_OPS & ~GTF_ASG & ~GTF_CALL); // Bashing the GT_COMMA flags here is - // dangerous, clear the GTF_REVERSE_OPS, GT_ASG, and GT_CALL at + // dangerous, clear the GTF_REVERSE_OPS, GTF_ASG, and GTF_CALL at // least. commaNode->gtFlags |= ((commaNode->AsOp()->gtOp1->gtFlags | commaNode->AsOp()->gtOp2->gtFlags) & (GTF_ASG | GTF_CALL)); @@ -9783,7 +9741,7 @@ DONE_MORPHING_CHILDREN: case GT_COMMA: /* Special case: trees that don't produce a value */ - if (op2->OperIs(GT_ASG) || (op2->OperGet() == GT_COMMA && op2->TypeGet() == TYP_VOID) || fgIsThrow(op2)) + if (op2->OperIsStore() || (op2->OperGet() == GT_COMMA && op2->TypeGet() == TYP_VOID) || fgIsThrow(op2)) { typ = tree->gtType = TYP_VOID; } @@ -9926,7 +9884,8 @@ DONE_MORPHING_CHILDREN: // Propagate comma throws. // If we are in the Valuenum CSE phase then don't morph away anything as these // nodes may have CSE defs/uses in them. - if (fgGlobalMorph && (oper != GT_ASG) && (oper != GT_COLON)) + if (fgGlobalMorph && (oper != GT_COLON) && + /* TODO-ASG-Cleanup: delete this zero-diff quirk */ !GenTree::OperIsStore(oper)) { if ((op1 != nullptr) && fgIsCommaThrow(op1, true)) { @@ -10004,21 +9963,40 @@ void Compiler::fgTryReplaceStructLocalWithField(GenTree* tree) } //------------------------------------------------------------------------ -// fgOptimizeIndir: Optimize an indirection. +// fgMorphFinalizeIndir: Finalize morphing an indirection. // // Turns indirections off of local addresses into local field nodes. +// Adds UNALIGNED for some accesses on ARM for backwards compatibility. // // Arguments: -// indir - The indirection to optimize (can be a store) +// indir - The indirection to morph (can be a store) // // Return Value: -// The optimized tree or "nullptr" if no transformations were performed. +// The optimized tree or "nullptr" if no transformations that would +// replace it were performed. // -GenTree* Compiler::fgOptimizeIndir(GenTreeIndir* indir) +GenTree* Compiler::fgMorphFinalizeIndir(GenTreeIndir* indir) { assert(indir->isIndir()); GenTree* addr = indir->Addr(); +#ifdef TARGET_ARM + GenTree* effAddr = addr->gtEffectiveVal(true); + // Check for a misalignment floating point indirection. + if (effAddr->OperIs(GT_ADD) && varTypeIsFloating(indir)) + { + GenTree* addOp2 = effAddr->gtGetOp2(); + if (addOp2->IsCnsIntOrI()) + { + ssize_t offset = addOp2->AsIntCon()->IconValue(); + if ((offset % genTypeSize(TYP_FLOAT)) != 0) + { + indir->gtFlags |= GTF_IND_UNALIGNED; + } + } + } +#endif // TARGET_ARM + if (!indir->IsVolatile() && !indir->TypeIs(TYP_STRUCT) && addr->OperIs(GT_LCL_ADDR) && !optValnumCSE_phase) { unsigned size = indir->Size(); @@ -10171,17 +10149,14 @@ GenTree* Compiler::fgOptimizeCast(GenTreeCast* cast) // GenTree* Compiler::fgOptimizeCastOnStore(GenTree* store) { - assert(store->OperIs(GT_ASG) || store->OperIsStore()); + assert(store->OperIsStore()); GenTree* const src = store->Data(); if (!src->OperIs(GT_CAST)) return store; - GenTree* const dst = store->GetStoreDestination(); - - // TODO-ASG-Cleanup: delete the GT_LCL_VAR check. - if (dst->OperIs(GT_LCL_VAR, GT_STORE_LCL_VAR) && !lvaGetDesc(dst->AsLclVarCommon())->lvNormalizeOnLoad()) + if (store->OperIs(GT_STORE_LCL_VAR) && !lvaGetDesc(store->AsLclVarCommon())->lvNormalizeOnLoad()) return store; if (src->gtOverflow()) @@ -10197,7 +10172,7 @@ GenTree* Compiler::fgOptimizeCastOnStore(GenTree* store) if (gtIsActiveCSE_Candidate(cast->CastOp())) return store; - if (!varTypeIsSmall(dst)) + if (!varTypeIsSmall(store)) return store; if (!varTypeIsSmall(castToType)) @@ -10209,7 +10184,7 @@ GenTree* Compiler::fgOptimizeCastOnStore(GenTree* store) // If we are performing a narrowing cast and // castToType is larger or the same as op1's type // then we can discard the cast. - if (genTypeSize(castToType) < genTypeSize(dst)) + if (genTypeSize(castToType) < genTypeSize(store)) return store; if (genActualType(castFromType) == genActualType(castToType)) @@ -11463,13 +11438,6 @@ GenTree* Compiler::fgPropagateCommaThrow(GenTree* parent, GenTreeOp* commaThrow, assert(fgGlobalMorph); assert(fgIsCommaThrow(commaThrow)); - bool mightBeLocation = parent->OperIsIndir() && ((parent->gtFlags & GTF_DONT_CSE) != 0); - - if (mightBeLocation) - { - return nullptr; - } - if ((commaThrow->gtFlags & GTF_COLON_COND) == 0) { fgRemoveRestOfBlock = true; @@ -11652,7 +11620,6 @@ GenTree* Compiler::fgMorphSmpOpOptional(GenTreeOp* tree, bool* optAssertionPropD switch (oper) { - case GT_ASG: case GT_STOREIND: case GT_STORE_BLK: case GT_STORE_LCL_VAR: @@ -11690,7 +11657,7 @@ GenTree* Compiler::fgMorphSmpOpOptional(GenTreeOp* tree, bool* optAssertionPropD // one cast and sometimes there is another one after it that gets removed by this // code. fgMorphSmp should be improved to remove all redundant casts so this code // can be removed. - if ((tree->OperIs(GT_ASG) && op1->OperIs(GT_IND)) || tree->OperIs(GT_STOREIND)) + if (tree->OperIs(GT_STOREIND)) { if (typ == TYP_LONG) { @@ -11702,7 +11669,7 @@ GenTree* Compiler::fgMorphSmpOpOptional(GenTreeOp* tree, bool* optAssertionPropD break; } - if ((op2->gtFlags & GTF_CALL) && (op1->gtFlags & GTF_ALL_EFFECT)) + if (op2->gtFlags & GTF_CALL) { break; } @@ -12937,25 +12904,10 @@ void Compiler::fgMorphTreeDone(GenTree* tree, bool optAssertionPropDone, bool is // Kill active assertions // - if (optAssertionCount > 0) + GenTreeLclVarCommon* lclVarTree = nullptr; + if ((optAssertionCount > 0) && tree->DefinesLocal(this, &lclVarTree)) { - GenTreeLclVarCommon* lclVarTree = nullptr; - - // The check below will miss LIR-style assignments. - // - // But we shouldn't be running local assertion prop on these, - // as local prop gets disabled when we run global prop. - assert(!tree->OperIs(GT_STORE_LCL_VAR, GT_STORE_LCL_FLD)); - - // DefinesLocal can return true for some BLK op uses, so - // check what gets assigned only when we're at an assignment. - // - if (tree->OperIsSsaDef() && tree->DefinesLocal(this, &lclVarTree)) - { - const unsigned lclNum = lclVarTree->GetLclNum(); - noway_assert(lclNum < lvaCount); - fgKillDependentAssertions(lclNum DEBUGARG(tree)); - } + fgKillDependentAssertions(lclVarTree->GetLclNum() DEBUGARG(tree)); } // Generate new assertions @@ -13882,7 +13834,7 @@ void Compiler::fgMergeBlockReturn(BasicBlock* block) } if (genReturnLocal != BAD_VAR_NUM) { - // replace the GT_RETURN node to be a GT_ASG that stores the return value into genReturnLocal. + // replace the GT_RETURN node to be a STORE_LCL_VAR that stores the return value into genReturnLocal. // Method must be returning a value other than TYP_VOID. noway_assert(compMethodHasRetVal()); @@ -14211,8 +14163,8 @@ void Compiler::fgPostExpandQmarkChecks() // // Arguments: // expr - the tree, a root node that may contain a top level qmark. -// ppDst - [optional] if the top level GT_QMARK node is assigned ot a -// GT_LCL_VAR, then this is that local node. Otherwise nullptr. +// ppDst - [optional] if the top level GT_QMARK node is stored into +// a local, then this is that store node. Otherwise nullptr. // // Returns: // The GT_QMARK node, or nullptr if there is no top level qmark. @@ -14230,14 +14182,13 @@ GenTree* Compiler::fgGetTopLevelQmark(GenTree* expr, GenTree** ppDst /* = NULL * { topQmark = expr; } - else if (expr->OperIs(GT_ASG) && expr->gtGetOp2()->OperIs(GT_QMARK) && - expr->gtGetOp1()->OperIs(GT_LCL_VAR, GT_LCL_FLD)) + else if (expr->OperIsLocalStore() && expr->AsLclVarCommon()->Data()->OperIs(GT_QMARK)) { - topQmark = expr->gtGetOp2(); + topQmark = expr->AsLclVarCommon()->Data(); if (ppDst != nullptr) { - *ppDst = expr->gtGetOp1(); + *ppDst = expr; } } @@ -14290,8 +14241,7 @@ void Compiler::fgExpandQmarkForCastInstOf(BasicBlock* block, Statement* stmt) GenTree* qmark = fgGetTopLevelQmark(expr, &dst); noway_assert(dst != nullptr); - assert(dst->OperIs(GT_LCL_VAR, GT_LCL_FLD)); - + assert(dst->OperIsLocalStore()); assert(qmark->gtFlags & GTF_QMARK_CAST_INSTOF); // Get cond, true, false exprs for the qmark. @@ -14388,15 +14338,23 @@ void Compiler::fgExpandQmarkForCastInstOf(BasicBlock* block, Statement* stmt) jmpStmt = fgNewStmtFromTree(jmpTree, stmt->GetDebugInfo()); fgInsertStmtAtEnd(cond2Block, jmpStmt); - // AsgBlock should get tmp = op1 assignment. - trueExpr = gtNewAssignNode(gtClone(dst), trueExpr); - Statement* trueStmt = fgNewStmtFromTree(trueExpr, stmt->GetDebugInfo()); + unsigned dstLclNum = dst->AsLclVarCommon()->GetLclNum(); + + // AsgBlock should get tmp = op1. + GenTree* trueExprStore = + dst->OperIs(GT_STORE_LCL_FLD) + ? gtNewStoreLclFldNode(dstLclNum, dst->TypeGet(), dst->AsLclFld()->GetLclOffs(), trueExpr) + : gtNewStoreLclVarNode(dstLclNum, trueExpr)->AsLclVarCommon(); + Statement* trueStmt = fgNewStmtFromTree(trueExprStore, stmt->GetDebugInfo()); fgInsertStmtAtEnd(asgBlock, trueStmt); // Since we are adding helper in the JTRUE false path, reverse the cond2 and add the helper. gtReverseCond(cond2Expr); - GenTree* helperExpr = gtNewAssignNode(gtClone(dst), true2Expr); - Statement* helperStmt = fgNewStmtFromTree(helperExpr, stmt->GetDebugInfo()); + GenTree* helperExprStore = + dst->OperIs(GT_STORE_LCL_FLD) + ? gtNewStoreLclFldNode(dstLclNum, dst->TypeGet(), dst->AsLclFld()->GetLclOffs(), true2Expr) + : gtNewStoreLclVarNode(dstLclNum, true2Expr)->AsLclVarCommon(); + Statement* helperStmt = fgNewStmtFromTree(helperExprStore, stmt->GetDebugInfo()); fgInsertStmtAtEnd(helperBlock, helperStmt); // Finally remove the nested qmark stmt. @@ -14608,11 +14566,12 @@ void Compiler::fgExpandQmarkStmt(BasicBlock* block, Statement* stmt) fgRemoveStmt(block, stmt); // Since we have top level qmarks, we either have a dst for it in which case - // we need to create tmps for true and falseExprs, else just don't bother - // assigning. + // we need to create tmps for true and falseExprs, else just don't bother assigning. + unsigned dstLclNum = BAD_VAR_NUM; if (dst != nullptr) { - assert(dst->OperIs(GT_LCL_VAR, GT_LCL_FLD)); + dstLclNum = dst->AsLclVarCommon()->GetLclNum(); + assert(dst->OperIsLocalStore()); } else { @@ -14623,7 +14582,9 @@ void Compiler::fgExpandQmarkStmt(BasicBlock* block, Statement* stmt) { if (dst != nullptr) { - trueExpr = gtNewAssignNode(gtClone(dst), trueExpr); + trueExpr = dst->OperIs(GT_STORE_LCL_FLD) + ? gtNewStoreLclFldNode(dstLclNum, dst->TypeGet(), dst->AsLclFld()->GetLclOffs(), trueExpr) + : gtNewStoreLclVarNode(dstLclNum, trueExpr)->AsLclVarCommon(); } Statement* trueStmt = fgNewStmtFromTree(trueExpr, stmt->GetDebugInfo()); fgInsertStmtAtEnd(thenBlock, trueStmt); @@ -14634,7 +14595,9 @@ void Compiler::fgExpandQmarkStmt(BasicBlock* block, Statement* stmt) { if (dst != nullptr) { - falseExpr = gtNewAssignNode(gtClone(dst), falseExpr); + falseExpr = dst->OperIs(GT_STORE_LCL_FLD) + ? gtNewStoreLclFldNode(dstLclNum, dst->TypeGet(), dst->AsLclFld()->GetLclOffs(), falseExpr) + : gtNewStoreLclVarNode(dstLclNum, falseExpr)->AsLclVarCommon(); } Statement* falseStmt = fgNewStmtFromTree(falseExpr, stmt->GetDebugInfo()); fgInsertStmtAtEnd(elseBlock, falseStmt); @@ -14980,15 +14943,12 @@ PhaseStatus Compiler::fgRetypeImplicitByRefArgs() if (!undoPromotion) { // Insert IR that initializes the temp from the parameter. - // LHS is a simple reference to the temp. fgEnsureFirstBBisScratch(); - GenTree* lhs = gtNewLclvNode(newLclNum, varDsc->lvType); - // RHS is an indirection (using GT_BLK) off the parameter. GenTree* addr = gtNewLclvNode(lclNum, TYP_BYREF); - GenTree* rhs = (varDsc->TypeGet() == TYP_STRUCT) ? gtNewBlkIndir(varDsc->GetLayout(), addr) - : gtNewIndir(varDsc->TypeGet(), addr); - GenTree* assign = gtNewAssignNode(lhs, rhs); - fgNewStmtAtBeg(fgFirstBB, assign); + GenTree* data = (varDsc->TypeGet() == TYP_STRUCT) ? gtNewBlkIndir(varDsc->GetLayout(), addr) + : gtNewIndir(varDsc->TypeGet(), addr); + GenTree* store = gtNewStoreLclVarNode(newLclNum, data); + fgNewStmtAtBeg(fgFirstBB, store); } // Update the locals corresponding to the promoted fields. @@ -15190,7 +15150,7 @@ bool Compiler::fgCheckStmtAfterTailCall() if (nextMorphStmt != nullptr) { GenTree* callExpr = callStmt->GetRootNode(); - if (callExpr->gtOper != GT_ASG) + if (!callExpr->OperIs(GT_STORE_LCL_VAR)) { // The next stmt can be GT_RETURN(TYP_VOID) or GT_RETURN(lclVar), // where lclVar was return buffer in the call for structs or simd. @@ -15202,8 +15162,8 @@ bool Compiler::fgCheckStmtAfterTailCall() } else { - noway_assert(callExpr->gtGetOp1()->OperIsLocal()); - unsigned callResultLclNumber = callExpr->gtGetOp1()->AsLclVarCommon()->GetLclNum(); + noway_assert(callExpr->OperIs(GT_STORE_LCL_VAR)); + unsigned callResultLclNumber = callExpr->AsLclVar()->GetLclNum(); #if FEATURE_TAILCALL_OPT_SHARED_RETURN @@ -15213,7 +15173,7 @@ bool Compiler::fgCheckStmtAfterTailCall() // // And if we're returning a small type we may see a cast // on the source side. - while ((nextMorphStmt != nullptr) && (nextMorphStmt->GetRootNode()->OperIs(GT_ASG, GT_NOP))) + while ((nextMorphStmt != nullptr) && (nextMorphStmt->GetRootNode()->OperIs(GT_STORE_LCL_VAR, GT_NOP))) { if (nextMorphStmt->GetRootNode()->OperIs(GT_NOP)) { @@ -15222,11 +15182,9 @@ bool Compiler::fgCheckStmtAfterTailCall() } Statement* moveStmt = nextMorphStmt; GenTree* moveExpr = nextMorphStmt->GetRootNode(); - GenTree* moveDest = moveExpr->gtGetOp1(); - noway_assert(moveDest->OperIsLocal()); // Tunnel through any casts on the source side. - GenTree* moveSource = moveExpr->gtGetOp2(); + GenTree* moveSource = moveExpr->AsLclVar()->Data(); while (moveSource->OperIs(GT_CAST)) { noway_assert(!moveSource->gtOverflow()); @@ -15238,7 +15196,7 @@ bool Compiler::fgCheckStmtAfterTailCall() // along the chain. const unsigned srcLclNum = moveSource->AsLclVarCommon()->GetLclNum(); noway_assert(srcLclNum == callResultLclNumber); - const unsigned dstLclNum = moveDest->AsLclVarCommon()->GetLclNum(); + const unsigned dstLclNum = moveExpr->AsLclVar()->GetLclNum(); callResultLclNumber = dstLclNum; nextMorphStmt = moveStmt->GetNextStmt(); diff --git a/src/coreclr/jit/morphblock.cpp b/src/coreclr/jit/morphblock.cpp index f72ed24..2270b26 100644 --- a/src/coreclr/jit/morphblock.cpp +++ b/src/coreclr/jit/morphblock.cpp @@ -36,9 +36,8 @@ protected: Compiler* m_comp; bool m_initBlock; - GenTree* m_asg = nullptr; - GenTree* m_dst = nullptr; - GenTree* m_src = nullptr; + GenTree* m_store = nullptr; + GenTree* m_src = nullptr; unsigned m_blockSize = 0; ClassLayout* m_blockLayout = nullptr; @@ -69,7 +68,7 @@ protected: // // Arguments: // comp - a compiler instance; -// tree - A GT_ASG tree that performs block initialization. +// tree - A store tree that performs block initialization. // // Return Value: // A possibly modified tree to perform the initializetion. @@ -88,7 +87,7 @@ GenTree* MorphInitBlockHelper::MorphInitBlock(Compiler* comp, GenTree* tree) // Arguments: // comp - a compiler instance; // initBlock - true if this is init block op, false if it is a copy block; -// asg - GT_ASG node to morph. +// store - store node to morph. // // Notes: // Most class members are initialized via in-class member initializers. @@ -96,9 +95,9 @@ GenTree* MorphInitBlockHelper::MorphInitBlock(Compiler* comp, GenTree* tree) MorphInitBlockHelper::MorphInitBlockHelper(Compiler* comp, GenTree* store, bool initBlock = true) : m_comp(comp), m_initBlock(initBlock) { - assert(store->OperIs(GT_ASG) || store->OperIsStore()); + assert(store->OperIsStore()); assert((m_initBlock == store->OperIsInitBlkOp()) && (!m_initBlock == store->OperIsCopyBlkOp())); - m_asg = store; + m_store = store; } //------------------------------------------------------------------------ @@ -137,7 +136,7 @@ GenTree* MorphInitBlockHelper::Morph() #ifdef DEBUG // If we are going to return a different node than the input then morph // expects us to have set GTF_DEBUG_NODE_MORPHED. - if ((m_result != m_asg) || (sideEffects != nullptr)) + if ((m_result != m_store) || (sideEffects != nullptr)) { m_result->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED; } @@ -179,24 +178,9 @@ GenTree* MorphInitBlockHelper::Morph() // void MorphInitBlockHelper::PrepareDst() { - m_dst = m_asg->GetStoreDestination(); - - // Commas cannot be destinations. - assert(!m_dst->OperIs(GT_COMMA)); - - // TODO-ASG: delete this retyping. - if (m_asg->TypeGet() != m_dst->TypeGet()) - { - assert(!m_initBlock && "the store type should be final for an init block."); - JITDUMP("changing type of store from %-6s to %-6s\n", varTypeName(m_asg->TypeGet()), - varTypeName(m_dst->TypeGet())); - - m_asg->ChangeType(m_dst->TypeGet()); - } - - if (m_dst->IsLocal()) + if (m_store->OperIsLocalStore()) { - m_dstLclNode = m_dst->AsLclVarCommon(); + m_dstLclNode = m_store->AsLclVarCommon(); m_dstLclOffset = m_dstLclNode->GetLclOffs(); m_dstLclNum = m_dstLclNode->GetLclNum(); m_dstVarDsc = m_comp->lvaGetDesc(m_dstLclNum); @@ -204,23 +188,22 @@ void MorphInitBlockHelper::PrepareDst() // Kill everything about m_dstLclNum (and its field locals) if (m_comp->optLocalAssertionProp && (m_comp->optAssertionCount > 0)) { - m_comp->fgKillDependentAssertions(m_dstLclNum DEBUGARG(m_asg)); + m_comp->fgKillDependentAssertions(m_dstLclNum DEBUGARG(m_store)); } } else { - assert(m_dst == m_dst->gtEffectiveVal() && "the commas were skipped in MorphBlock"); - assert(m_dst->OperIsIndir() && (!m_dst->isIndir() || !m_dst->TypeIs(TYP_STRUCT))); + assert(m_store->OperIs(GT_STOREIND, GT_STORE_BLK)); } - if (m_dst->TypeIs(TYP_STRUCT)) + if (m_store->TypeIs(TYP_STRUCT)) { - m_blockLayout = m_dst->GetLayout(m_comp); + m_blockLayout = m_store->GetLayout(m_comp); m_blockSize = m_blockLayout->GetSize(); } else { - m_blockSize = genTypeSize(m_dst); + m_blockSize = genTypeSize(m_store); } assert(m_blockSize != 0); @@ -228,7 +211,7 @@ void MorphInitBlockHelper::PrepareDst() #if defined(DEBUG) if (m_comp->verbose) { - printf("PrepareDst for [%06u] ", m_comp->dspTreeID(m_dst)); + printf("PrepareDst for [%06u] ", m_comp->dspTreeID(m_store)); if (m_dstLclNode != nullptr) { printf("have found a local var V%02u.\n", m_dstLclNum); @@ -254,7 +237,7 @@ void MorphInitBlockHelper::PropagateBlockAssertions() { if (m_comp->optLocalAssertionProp) { - m_comp->optAssertionGen(m_asg); + m_comp->optAssertionGen(m_store); } } @@ -263,7 +246,7 @@ void MorphInitBlockHelper::PropagateBlockAssertions() // expanded tree // // Notes: -// After the copy/init is expanded, we may see additional expansions +// After the copy/init is expanded, we may see additional assertions // to generate. // void MorphInitBlockHelper::PropagateExpansionAssertions() @@ -272,7 +255,7 @@ void MorphInitBlockHelper::PropagateExpansionAssertions() // if (m_comp->optLocalAssertionProp && (m_transformationDecision == BlockTransformation::OneAsgBlock)) { - m_comp->optAssertionGen(m_asg); + m_comp->optAssertionGen(m_store); } } @@ -282,7 +265,7 @@ void MorphInitBlockHelper::PropagateExpansionAssertions() // void MorphInitBlockHelper::PrepareSrc() { - m_src = m_asg->Data(); + m_src = m_store->Data(); } //------------------------------------------------------------------------ @@ -317,13 +300,12 @@ void MorphInitBlockHelper::MorphStructCases() if (m_transformationDecision == BlockTransformation::Undefined) { - m_result = m_asg; + m_result = m_store; m_transformationDecision = BlockTransformation::StructBlock; if (m_dstVarDsc != nullptr) { - // TODO-ASG: delete the GT_LCL_FLD check on "m_dst". - if (m_dst->OperIs(GT_LCL_FLD, GT_STORE_LCL_FLD)) + if (m_store->OperIs(GT_STORE_LCL_FLD)) { m_comp->lvaSetVarDoNotEnregister(m_dstLclNum DEBUGARG(DoNotEnregisterReason::LocalField)); } @@ -475,8 +457,8 @@ void MorphInitBlockHelper::TryInitFieldByField() //------------------------------------------------------------------------ // TryPrimitiveInit: Replace block zero-initialization with a primitive store. // -// Transforms patterns like "ASG(BLK(ADDR(LCL_VAR int)), 0)" into simple -// assignments: "ASG(LCL_VAR int, 0)". +// Transforms patterns like "STORE_BLK(LCL_VAR_ADDR, 0)" into simple +// stores: "STORE_LCL_VAR(0)". // // If successful, will set "m_transformationDecision" to "OneAsgBlock". // @@ -494,27 +476,12 @@ void MorphInitBlockHelper::TryPrimitiveInit() m_src->BashToZeroConst(lclVarType); } - if (m_asg->OperIs(GT_ASG)) - { - m_dst->ChangeType(m_dstVarDsc->lvNormalizeOnLoad() ? lclVarType : genActualType(lclVarType)); - m_dst->ChangeOper(GT_LCL_VAR); - m_dst->AsLclVar()->SetLclNum(m_dstLclNum); - m_dst->gtFlags |= GTF_VAR_DEF; - - m_asg->ChangeType(m_dst->TypeGet()); - m_asg->AsOp()->gtOp1 = m_dst; - m_asg->AsOp()->gtOp2 = m_src; - } - else - { - m_asg->ChangeType(m_dstVarDsc->lvNormalizeOnLoad() ? lclVarType : genActualType(lclVarType)); - m_asg->ChangeOper(GT_STORE_LCL_VAR); - m_asg->AsLclVar()->SetLclNum(m_dstLclNum); - m_asg->gtFlags |= GTF_VAR_DEF; - } - INDEBUG(m_dst->AsLclVar()->ResetLclILoffs()); + m_store->ChangeType(m_dstVarDsc->lvNormalizeOnLoad() ? lclVarType : genActualType(lclVarType)); + m_store->ChangeOper(GT_STORE_LCL_VAR); + m_store->AsLclVar()->SetLclNum(m_dstLclNum); + m_store->gtFlags |= GTF_VAR_DEF; - m_result = m_asg; + m_result = m_store; m_transformationDecision = BlockTransformation::OneAsgBlock; } } @@ -533,37 +500,32 @@ void MorphInitBlockHelper::TryPrimitiveInit() // the nodes. // // Notes: -// We have a tree like the following (note that location-valued commas are -// illegal, so there cannot be a comma on the left): +// We have a tree like the following: // -// ASG STOREIND -// / \ / \. -// IND COMMA or B COMMA -// | / \ / \. -// B C D C D +// STOREIND +// / \. +// B COMMA +// / \. +// C D // // We'd like downstream code to just see and expand ASG(IND(B), D). // We will produce: // -// COMMA COMMA -// / \. / \. -// ASG COMMA STORE_LCL_VAR COMMA -// / \ / \. or / / \. -// tmp B C ASG B C STOREIND -// / \. / \. -// IND D tmp D -// | -// tmp +// COMMA +// / \. +// STORE_LCL_VAR COMMA +// / / \. +// B C STOREIND +// / \. +// tmp D // // If the store has GTF_REVERSE_OPS then we will produce: // -// COMMA COMMA -// / \. / \. -// C ASG or C STOREIND -// / \. / \. -// IND D B D -// | -// B +// COMMA +// / \. +// C STOREIND +// / \. +// B D // // While keeping the GTF_REVERSE_OPS. // @@ -588,11 +550,9 @@ GenTree* MorphInitBlockHelper::EliminateCommas(GenTree** commaPool) *commaPool = comma; }; - GenTree* dst = m_asg->GetStoreDestination(); - GenTree* src = m_asg->Data(); - assert(dst->OperIsIndir() || dst->OperIsLocal()); + GenTree* src = m_store->Data(); - if (m_asg->IsReverseOp()) + if (m_store->IsReverseOp()) { while (src->OperIs(GT_COMMA)) { @@ -602,16 +562,16 @@ GenTree* MorphInitBlockHelper::EliminateCommas(GenTree** commaPool) } else { - if (dst->OperIsIndir() && src->OperIs(GT_COMMA)) + if (m_store->OperIsIndir() && src->OperIs(GT_COMMA)) { - GenTree* addr = dst->AsIndir()->Addr(); + GenTree* addr = m_store->AsIndir()->Addr(); if (((addr->gtFlags & GTF_ALL_EFFECT) != 0) || (((src->gtFlags & GTF_ASG) != 0) && !addr->IsInvariant())) { unsigned lhsAddrLclNum = m_comp->lvaGrabTemp(true DEBUGARG("Block morph LHS addr")); addSideEffect(m_comp->gtNewTempAssign(lhsAddrLclNum, addr)); - dst->AsUnOp()->gtOp1 = m_comp->gtNewLclvNode(lhsAddrLclNum, genActualType(addr)); - m_comp->gtUpdateNodeSideEffects(dst); + m_store->AsUnOp()->gtOp1 = m_comp->gtNewLclvNode(lhsAddrLclNum, genActualType(addr)); + m_comp->gtUpdateNodeSideEffects(m_store); } } @@ -624,8 +584,8 @@ GenTree* MorphInitBlockHelper::EliminateCommas(GenTree** commaPool) if (sideEffects != nullptr) { - m_asg->Data() = src; - m_comp->gtUpdateNodeSideEffects(m_asg); + m_store->Data() = src; + m_comp->gtUpdateNodeSideEffects(m_store); } return sideEffects; @@ -672,7 +632,7 @@ private: // // Arguments: // comp - a compiler instance; -// tree - A GT_ASG tree that performs block copy. +// tree - A store tree that performs block copy. // // Return Value: // A possibly modified tree to perform the copy. @@ -689,12 +649,12 @@ GenTree* MorphCopyBlockHelper::MorphCopyBlock(Compiler* comp, GenTree* tree) // // Arguments: // comp - a compiler instance; -// asg - GT_ASG node to morph. +// store - store node to morph. // // Notes: // Most class members are initialized via in-class member initializers. // -MorphCopyBlockHelper::MorphCopyBlockHelper(Compiler* comp, GenTree* asg) : MorphInitBlockHelper(comp, asg, false) +MorphCopyBlockHelper::MorphCopyBlockHelper(Compiler* comp, GenTree* store) : MorphInitBlockHelper(comp, store, false) { } @@ -704,7 +664,7 @@ MorphCopyBlockHelper::MorphCopyBlockHelper(Compiler* comp, GenTree* asg) : Morph // void MorphCopyBlockHelper::PrepareSrc() { - m_src = m_asg->Data(); + m_src = m_store->Data(); if (m_src->IsLocal()) { @@ -715,8 +675,8 @@ void MorphCopyBlockHelper::PrepareSrc() } // Verify that the types of the store and data match. - assert(m_dst->TypeGet() == m_src->TypeGet()); - if (m_dst->TypeIs(TYP_STRUCT)) + assert(m_store->TypeGet() == m_src->TypeGet()); + if (m_store->TypeIs(TYP_STRUCT)) { assert(ClassLayout::AreCompatible(m_blockLayout, m_src->GetLayout(m_comp))); } @@ -729,19 +689,19 @@ void MorphCopyBlockHelper::TrySpecialCases() { if (m_src->IsMultiRegNode()) { - assert(m_dst->OperIsScalarLocal()); + assert(m_store->OperIs(GT_STORE_LCL_VAR)); m_dstVarDsc->lvIsMultiRegRet = true; JITDUMP("Not morphing a multireg node return\n"); m_transformationDecision = BlockTransformation::SkipMultiRegSrc; - m_result = m_asg; + m_result = m_store; } - else if (m_src->IsCall() && m_dst->OperIsScalarLocal() && m_dstVarDsc->CanBeReplacedWithItsField(m_comp)) + else if (m_src->IsCall() && m_store->OperIs(GT_STORE_LCL_VAR) && m_dstVarDsc->CanBeReplacedWithItsField(m_comp)) { JITDUMP("Not morphing a single reg call return\n"); m_transformationDecision = BlockTransformation::SkipSingleRegCallSrc; - m_result = m_asg; + m_result = m_store; } } @@ -755,7 +715,7 @@ void MorphCopyBlockHelper::TrySpecialCases() void MorphCopyBlockHelper::MorphStructCases() { JITDUMP("block assignment to morph:\n"); - DISPTREE(m_asg); + DISPTREE(m_store); if (m_dstVarDsc != nullptr) { @@ -815,8 +775,7 @@ void MorphCopyBlockHelper::MorphStructCases() // If either src or dest is a reg-sized non-field-addressed struct, keep the copyBlock; // this will avoid having to DNER the enregisterable local when creating LCL_FLD nodes. - // TODO-ASG: delete the GT_LCL_VAR check on "m_dst". - if ((m_dst->OperIs(GT_STORE_LCL_VAR, GT_LCL_VAR) && m_dstVarDsc->lvRegStruct) || + if ((m_store->OperIs(GT_STORE_LCL_VAR) && m_dstVarDsc->lvRegStruct) || (m_src->OperIs(GT_LCL_VAR) && m_srcVarDsc->lvRegStruct)) { requiresCopyBlock = true; @@ -839,7 +798,7 @@ void MorphCopyBlockHelper::MorphStructCases() } #if defined(TARGET_ARM) - if ((m_dst->OperIsIndir()) && m_dst->AsIndir()->IsUnaligned()) + if ((m_store->OperIsIndir()) && m_store->AsIndir()->IsUnaligned()) { JITDUMP(" store is unaligned"); requiresCopyBlock = true; @@ -1001,7 +960,7 @@ void MorphCopyBlockHelper::MorphStructCases() if (m_transformationDecision == BlockTransformation::Undefined) { - m_result = m_asg; + m_result = m_store; m_transformationDecision = BlockTransformation::StructBlock; } } @@ -1015,8 +974,7 @@ void MorphCopyBlockHelper::MorphStructCases() // if (!m_dstDoFldAsg && (m_dstVarDsc != nullptr) && !m_dstSingleLclVarAsg) { - // TODO-ASG: delete the GT_LCL_FLD check on "m_dst". - if (m_dst->OperIs(GT_LCL_FLD, GT_STORE_LCL_FLD)) + if (m_store->OperIs(GT_STORE_LCL_FLD)) { m_comp->lvaSetVarDoNotEnregister(m_dstLclNum DEBUGARG(DoNotEnregisterReason::LocalField)); } @@ -1047,7 +1005,7 @@ void MorphCopyBlockHelper::MorphStructCases() // void MorphCopyBlockHelper::TryPrimitiveCopy() { - if (!m_dst->TypeIs(TYP_STRUCT)) + if (!m_store->TypeIs(TYP_STRUCT)) { return; } @@ -1057,27 +1015,26 @@ void MorphCopyBlockHelper::TryPrimitiveCopy() return; } - var_types asgType = TYP_UNDEF; + var_types storeType = TYP_UNDEF; // Can we use the LHS local directly? - // TODO-ASG: delete the GT_LCL_FLD check on "m_dst". - if (m_dst->OperIs(GT_LCL_FLD, GT_STORE_LCL_FLD)) + if (m_store->OperIs(GT_STORE_LCL_FLD)) { if (m_blockSize == genTypeSize(m_dstVarDsc)) { - asgType = m_dstVarDsc->TypeGet(); + storeType = m_dstVarDsc->TypeGet(); } } - else if (!m_dst->OperIsIndir()) + else if (!m_store->OperIsIndir()) { return; } if (m_srcVarDsc != nullptr) { - if ((asgType == TYP_UNDEF) && (m_blockSize == genTypeSize(m_srcVarDsc))) + if ((storeType == TYP_UNDEF) && (m_blockSize == genTypeSize(m_srcVarDsc))) { - asgType = m_srcVarDsc->TypeGet(); + storeType = m_srcVarDsc->TypeGet(); } } else if (!m_src->OperIsIndir()) @@ -1085,18 +1042,18 @@ void MorphCopyBlockHelper::TryPrimitiveCopy() return; } - if (asgType == TYP_UNDEF) + if (storeType == TYP_UNDEF) { return; } - auto doRetypeNode = [asgType](GenTree* op, LclVarDsc* varDsc, bool isUse) { + auto doRetypeNode = [storeType](GenTree* op, LclVarDsc* varDsc, bool isUse) { if (op->OperIsIndir()) { op->SetOper(isUse ? GT_IND : GT_STOREIND); - op->ChangeType(asgType); + op->ChangeType(storeType); } - else if (varDsc->TypeGet() == asgType) + else if (varDsc->TypeGet() == storeType) { op->SetOper(isUse ? GT_LCL_VAR : GT_STORE_LCL_VAR); op->ChangeType(varDsc->lvNormalizeOnLoad() ? varDsc->TypeGet() : genActualType(varDsc)); @@ -1108,16 +1065,14 @@ void MorphCopyBlockHelper::TryPrimitiveCopy() { op->SetOper(isUse ? GT_LCL_FLD : GT_STORE_LCL_FLD); } - op->ChangeType(asgType); + op->ChangeType(storeType); } }; - doRetypeNode(m_dst, m_dstVarDsc, /* isUse */ m_asg->OperIs(GT_ASG)); + doRetypeNode(m_store, m_dstVarDsc, /* isUse */ false); doRetypeNode(m_src, m_srcVarDsc, /* isUse */ true); - // TODO-ASG: delete. - m_asg->ChangeType(asgType); - m_result = m_asg; + m_result = m_store; m_transformationDecision = BlockTransformation::OneAsgBlock; } @@ -1199,7 +1154,7 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() if (!m_dstUseLclFld) { - dstAddr = m_dst->AsIndir()->Addr(); + dstAddr = m_store->AsIndir()->Addr(); // "dstAddr" might be a complex expression that we need to clone // and spill, unless we only end up using the address once. @@ -1248,7 +1203,6 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() // We may have allocated a temp above, and that may have caused the lvaTable to be expanded. // So, beyond this point we cannot rely on the old values of 'm_srcVarDsc' and 'm_dstVarDsc'. - bool useAsg = m_asg->OperIs(GT_ASG); for (unsigned i = 0; i < fieldCnt; ++i) { if (m_dstDoFldAsg && m_comp->fgGlobalMorph && m_dstLclNode->IsLastUse(i)) @@ -1369,26 +1323,13 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() } assert(srcFld != nullptr); - GenTree* dstFld; + GenTree* dstFldStore; if (m_dstDoFldAsg) { noway_assert((m_dstLclNum != BAD_VAR_NUM) && (dstAddr == nullptr)); unsigned dstFieldLclNum = m_comp->lvaGetDesc(m_dstLclNum)->lvFieldLclStart + i; - if (useAsg) - { - dstFld = m_comp->gtNewLclvNode(dstFieldLclNum, m_comp->lvaGetDesc(dstFieldLclNum)->TypeGet()); - - // If it had been labeled a "USEASG", assignments to the individual promoted fields are not. - dstFld->gtFlags |= m_dstLclNode->gtFlags & ~(GTF_NODE_MASK | GTF_VAR_USEASG | GTF_VAR_DEATH_MASK); - - // Don't CSE the lhs of an assignment. - dstFld->gtFlags |= GTF_DONT_CSE; - } - else - { - dstFld = m_comp->gtNewStoreLclVarNode(dstFieldLclNum, srcFld); - } + dstFldStore = m_comp->gtNewStoreLclVarNode(dstFieldLclNum, srcFld); } else { @@ -1400,14 +1341,7 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() noway_assert(m_dstVarDsc != nullptr); noway_assert(addrSpill == nullptr); - if (useAsg) - { - dstFld = m_comp->gtNewLclvNode(m_dstLclNum, m_dstVarDsc->TypeGet()); - } - else - { - dstFld = m_comp->gtNewStoreLclVarNode(m_dstLclNum, srcFld); - } + dstFldStore = m_comp->gtNewStoreLclVarNode(m_dstLclNum, srcFld); } else { @@ -1458,14 +1392,7 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() dstAddrClone = m_comp->gtNewOperNode(GT_ADD, TYP_BYREF, dstAddrClone, fieldOffsetNode); } - if (useAsg) - { - dstFld = m_comp->gtNewIndir(srcType, dstAddrClone); - } - else - { - dstFld = m_comp->gtNewStoreIndNode(srcType, dstAddrClone, srcFld); - } + dstFldStore = m_comp->gtNewStoreIndNode(srcType, dstAddrClone, srcFld); } else { @@ -1474,41 +1401,32 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() // If the dst was a struct type field "B" in a struct "A" then we add // add offset of ("B" in "A") + current offset in "B". unsigned totalOffset = m_dstLclOffset + srcFieldOffset; - if (useAsg) - { - dstFld = m_comp->gtNewLclFldNode(m_dstLclNum, srcType, totalOffset); - } - else - { - dstFld = m_comp->gtNewStoreLclFldNode(m_dstLclNum, srcType, totalOffset, srcFld); - } + dstFldStore = m_comp->gtNewStoreLclFldNode(m_dstLclNum, srcType, totalOffset, srcFld); // TODO-1stClassStructs: remove this and implement storing to a field in a struct in a reg. m_comp->lvaSetVarDoNotEnregister(m_dstLclNum DEBUGARG(DoNotEnregisterReason::LocalField)); } } } - noway_assert(dstFld->TypeGet() == srcFld->TypeGet()); - - GenTree* storeOneFld = useAsg ? m_comp->gtNewAssignNode(dstFld, srcFld) : dstFld; + noway_assert(dstFldStore->TypeGet() == srcFld->TypeGet()); if (m_comp->optLocalAssertionProp) { - m_comp->optAssertionGen(storeOneFld); + m_comp->optAssertionGen(dstFldStore); } if (addrSpillStore != nullptr) { - result = m_comp->gtNewOperNode(GT_COMMA, TYP_VOID, addrSpillStore, storeOneFld); + result = m_comp->gtNewOperNode(GT_COMMA, TYP_VOID, addrSpillStore, dstFldStore); addrSpillStore = nullptr; } else if (result != nullptr) { - result = m_comp->gtNewOperNode(GT_COMMA, TYP_VOID, result, storeOneFld); + result = m_comp->gtNewOperNode(GT_COMMA, TYP_VOID, result, dstFldStore); } else { - result = storeOneFld; + result = dstFldStore; } } @@ -1576,7 +1494,7 @@ bool MorphCopyBlockHelper::CanReuseAddressForDecomposedStore(GenTree* addrNode) // fgMorphCopyBlock: Perform the morphing of a block copy. // // Arguments: -// tree - a block copy (i.e. an assignment with a block op on the lhs). +// tree - a block copy (i.e. a store with a struct type). // // Return Value: // We can return the original block copy unmodified (least desirable, but always correct) @@ -1604,18 +1522,18 @@ GenTree* Compiler::fgMorphCopyBlock(GenTree* tree) // fgMorphInitBlock: Morph a block initialization assignment tree. // // Arguments: -// tree - A GT_ASG tree that performs block initialization. +// tree - A store tree that performs block initialization. // // Return Value: // If the destination is a promoted struct local variable then we will try to // perform a field by field assignment for each of the promoted struct fields. // This is not always possible (e.g. if the struct is address exposed). // -// Otherwise the original GT_ASG tree is returned unmodified, note that the +// Otherwise the original store tree is returned unmodified, note that the // nodes can still be changed. // // Assumptions: -// GT_ASG's children have already been morphed. +// store's children have already been morphed. // GenTree* Compiler::fgMorphInitBlock(GenTree* tree) { @@ -1632,7 +1550,7 @@ GenTree* Compiler::fgMorphInitBlock(GenTree* tree) // // Return Value: // In case the size turns into a constant - the store, transformed -// into an "ordinary" ASG(BLK, Data()) one, and further morphed by +// into an "ordinary" STORE_BLK one, and further morphed by // "fgMorphInitBlock"/"fgMorphCopyBlock". Otherwise, the original // tree (fully morphed). // @@ -1663,22 +1581,10 @@ GenTree* Compiler::fgMorphStoreDynBlock(GenTreeStoreDynBlk* tree) src->AsBlk()->Initialize(layout); } - GenTree* store; - if (compAssignmentRationalized) - { - store = gtNewStoreValueNode(layout, tree->Addr(), src, tree->gtFlags & GTF_IND_FLAGS); - } - else - { - GenTree* dst = gtNewLoadValueNode(layout, tree->Addr(), tree->gtFlags & GTF_IND_FLAGS); - dst->gtFlags |= GTF_GLOB_REF; - store = gtNewAssignNode(dst, src); - } + GenTree* store = gtNewStoreValueNode(layout, tree->Addr(), src, tree->gtFlags & GTF_IND_FLAGS); store->AddAllEffectsFlags(tree); INDEBUG(store->gtDebugFlags |= GTF_DEBUG_NODE_MORPHED); - fgAssignSetVarDef(store); - JITDUMP("MorphStoreDynBlock: transformed STORE_DYN_BLK into ASG(BLK, Data())\n"); return tree->OperIsCopyBlkOp() ? fgMorphCopyBlock(store) : fgMorphInitBlock(store); diff --git a/src/coreclr/jit/patchpoint.cpp b/src/coreclr/jit/patchpoint.cpp index e3a22a2..e16af87 100644 --- a/src/coreclr/jit/patchpoint.cpp +++ b/src/coreclr/jit/patchpoint.cpp @@ -165,7 +165,7 @@ private: GenTree* ppCounterAfter = compiler->gtNewLclvNode(ppCounterLclNum, TYP_INT); GenTree* one = compiler->gtNewIconNode(1, TYP_INT); GenTree* ppCounterSub = compiler->gtNewOperNode(GT_SUB, TYP_INT, ppCounterBefore, one); - GenTree* ppCounterAsg = compiler->gtNewOperNode(GT_ASG, TYP_INT, ppCounterAfter, ppCounterSub); + GenTree* ppCounterAsg = compiler->gtNewAssignNode(ppCounterAfter, ppCounterSub); compiler->fgNewStmtAtEnd(block, ppCounterAsg); @@ -202,7 +202,7 @@ private: GenTree* initialCounterNode = compiler->gtNewIconNode(initialCounterValue, TYP_INT); GenTree* ppCounterRef = compiler->gtNewLclvNode(ppCounterLclNum, TYP_INT); - GenTree* ppCounterAsg = compiler->gtNewOperNode(GT_ASG, TYP_INT, ppCounterRef, initialCounterNode); + GenTree* ppCounterAsg = compiler->gtNewAssignNode(ppCounterRef, initialCounterNode); compiler->fgNewStmtNearEnd(block, ppCounterAsg); } diff --git a/src/coreclr/jit/promotion.cpp b/src/coreclr/jit/promotion.cpp index 1ece87d..d2df196 100644 --- a/src/coreclr/jit/promotion.cpp +++ b/src/coreclr/jit/promotion.cpp @@ -830,6 +830,7 @@ void ReplaceVisitor::ReplaceLocal(GenTree** use, GenTree* user) if ((lcl->gtFlags & GTF_VAR_DEF) != 0) { + (*use)->gtFlags |= GTF_VAR_DEF; // TODO-ASG: delete. rep.NeedsWriteBack = true; rep.NeedsReadBack = false; } -- 2.7.4