}
}
- if (tree->gtOper == GT_ASG)
+ if (tree->OperIs(GT_ASG))
{
// For an assignment to a local variable, all references of that
// variable have to be spilled. If it is aliased, all calls and
// indirect accesses have to be spilled
- if (tree->AsOp()->gtOp1->gtOper == GT_LCL_VAR)
+ if (tree->AsOp()->gtOp1->OperIsLocal())
{
unsigned lclNum = tree->AsOp()->gtOp1->AsLclVarCommon()->GetLclNum();
for (unsigned level = 0; level < chkLevel; level++)
{
- assert(!gtHasRef(verCurrentState.esStack[level].val, lclNum));
- assert(!lvaTable[lclNum].IsAddressExposed() ||
- (verCurrentState.esStack[level].val->gtFlags & GTF_SIDE_EFFECT) == 0);
+ GenTree* stkTree = verCurrentState.esStack[level].val;
+ assert(!gtHasRef(stkTree, lclNum) || impIsInvariant(stkTree));
+ assert(!lvaTable[lclNum].IsAddressExposed() || ((stkTree->gtFlags & GTF_SIDE_EFFECT) == 0));
}
}
-
// If the access may be to global memory, all side effects have to be spilled.
-
else if (tree->AsOp()->gtOp1->gtFlags & GTF_GLOB_REF)
{
for (unsigned level = 0; level < chkLevel; level++)
// Arguments:
// stmt - The statement to add.
// chkLevel - [0..chkLevel) is the portion of the stack which we will check
-// for interference with stmt and spill if needed.
+// for interference with stmt and spilled if needed.
// checkConsumedDebugInfo - Whether to check for consumption of impCurStmtDI. impCurStmtDI
// marks the debug info of the current boundary and is set when we
// start importing IL at that boundary. If this parameter is true,
{
assert(chkLevel <= verCurrentState.esStackDepth);
- /* If the statement being appended has any side-effects, check the stack
- to see if anything needs to be spilled to preserve correct ordering. */
-
+ // If the statement being appended has any side-effects, check the stack to see if anything
+ // needs to be spilled to preserve correct ordering.
+ //
GenTree* expr = stmt->GetRootNode();
GenTreeFlags flags = expr->gtFlags & GTF_GLOB_EFFECT;
- // Assignment to (unaliased) locals don't count as a side-effect as
- // we handle them specially using impSpillLclRefs(). Temp locals should
- // be fine too.
-
- if ((expr->gtOper == GT_ASG) && (expr->AsOp()->gtOp1->gtOper == GT_LCL_VAR) &&
- ((expr->AsOp()->gtOp1->gtFlags & GTF_GLOB_REF) == 0) && !gtHasLocalsWithAddrOp(expr->AsOp()->gtOp2))
+ // Assignments to unaliased locals require special handling. Here, we look for trees that
+ // can modify them and spill the references. In doing so, we make two assumptions:
+ //
+ // 1. All locals which can be modified indirectly are marked as address-exposed or with
+ // "lvHasLdAddrOp" -- we will rely on "impSpillSideEffects(spillGlobEffects: true)"
+ // below to spill them.
+ // 2. Trees that assign to unaliased locals are always top-level (this avoids having to
+ // walk down the tree here), and are a subset of what is recognized here.
+ //
+ // If any of the above are violated (say for some temps), the relevant code must spill
+ // things manually.
+ //
+ LclVarDsc* dstVarDsc = nullptr;
+ if (expr->OperIs(GT_ASG) && expr->AsOp()->gtOp1->OperIsLocal())
{
- GenTreeFlags op2Flags = expr->AsOp()->gtOp2->gtFlags & GTF_GLOB_EFFECT;
- assert(flags == (op2Flags | GTF_ASG));
- flags = op2Flags;
+ dstVarDsc = lvaGetDesc(expr->AsOp()->gtOp1->AsLclVarCommon());
}
-
- if (flags != 0)
+ else if (expr->OperIs(GT_CALL, GT_RET_EXPR)) // The special case of calls with return buffers.
{
- bool spillGlobEffects = false;
+ GenTree* call = expr->OperIs(GT_RET_EXPR) ? expr->AsRetExpr()->gtInlineCandidate : expr;
- if ((flags & GTF_CALL) != 0)
+ if (call->TypeIs(TYP_VOID) && call->AsCall()->TreatAsShouldHaveRetBufArg(this))
{
- // If there is a call, we have to spill global refs
- spillGlobEffects = true;
- }
- else if (!expr->OperIs(GT_ASG))
- {
- if ((flags & GTF_ASG) != 0)
+ GenTree* retBuf;
+ if (call->AsCall()->ShouldHaveRetBufArg())
{
- // The expression is not an assignment node but it has an assignment side effect, it
- // must be an atomic op, HW intrinsic or some other kind of node that stores to memory.
- // Since we don't know what it assigns to, we need to spill global refs.
- spillGlobEffects = true;
+ assert(call->AsCall()->gtArgs.HasRetBuffer());
+ retBuf = call->AsCall()->gtArgs.GetRetBufferArg()->GetNode();
}
- }
- else
- {
- GenTree* lhs = expr->gtGetOp1();
- GenTree* rhs = expr->gtGetOp2();
-
- if (((rhs->gtFlags | lhs->gtFlags) & GTF_ASG) != 0)
+ else
{
- // Either side of the assignment node has an assignment side effect.
- // Since we don't know what it assigns to, we need to spill global refs.
- spillGlobEffects = true;
+ assert(!call->AsCall()->gtArgs.HasThisPointer());
+ retBuf = call->AsCall()->gtArgs.GetArgByIndex(0)->GetNode();
}
- else if ((lhs->gtFlags & GTF_GLOB_REF) != 0)
+
+ assert(retBuf->TypeIs(TYP_I_IMPL, TYP_BYREF));
+
+ GenTreeLclVarCommon* lclNode = retBuf->IsLocalAddrExpr();
+ if (lclNode != nullptr)
{
- spillGlobEffects = true;
+ dstVarDsc = lvaGetDesc(lclNode);
}
}
+ }
+
+ if ((dstVarDsc != nullptr) && !dstVarDsc->IsAddressExposed() && !dstVarDsc->lvHasLdAddrOp)
+ {
+ impSpillLclRefs(lvaGetLclNum(dstVarDsc), chkLevel);
+
+ if (expr->OperIs(GT_ASG))
+ {
+ // For assignments, limit the checking to what the RHS could modify/interfere with.
+ flags = expr->AsOp()->gtOp2->gtFlags & GTF_GLOB_EFFECT;
+ }
+ }
- impSpillSideEffects(spillGlobEffects, chkLevel DEBUGARG("impAppendStmt"));
+ if (flags != 0)
+ {
+ impSpillSideEffects((flags & (GTF_ASG | GTF_CALL)) != 0, chkLevel DEBUGARG("impAppendStmt"));
}
else
{
{
dest = gtNewObjNode(structHnd, destAddr);
gtSetObjGcInfo(dest->AsObj());
- // Although an obj as a call argument was always assumed to be a globRef
- // (which is itself overly conservative), that is not true of the operands
- // of a block assignment.
- dest->gtFlags &= ~GTF_GLOB_REF;
- dest->gtFlags |= (destAddr->gtFlags & GTF_GLOB_REF);
}
else
{
return gtNewLclvNode(tmp, TYP_I_IMPL);
}
-/******************************************************************************
- * Spills the stack at verCurrentState.esStack[level] and replaces it with a temp.
- * If tnum!=BAD_VAR_NUM, the temp var used to replace the tree is tnum,
- * else, grab a new temp.
- * For structs (which can be pushed on the stack using obj, etc),
- * special handling is needed
- */
-
struct RecursiveGuard
{
public:
}
}
-/*****************************************************************************
- *
- * If the stack contains any trees with references to local #lclNum, assign
- * those trees to temps and replace their place on the stack with refs to
- * their temps.
- */
-
-void Compiler::impSpillLclRefs(unsigned lclNum)
+//------------------------------------------------------------------------
+// impSpillLclRefs: Spill all trees referencing the given local.
+//
+// Arguments:
+// lclNum - The local's number
+// chkLevel - Height (exclusive) of the portion of the stack to check
+//
+void Compiler::impSpillLclRefs(unsigned lclNum, unsigned chkLevel)
{
- /* Before we make any appends to the tree list we must spill the
- * "special" side effects (GTF_ORDER_SIDEEFF) - GT_CATCH_ARG */
-
+ // Before we make any appends to the tree list we must spill the
+ // "special" side effects (GTF_ORDER_SIDEEFF) - GT_CATCH_ARG.
impSpillSpecialSideEff();
- for (unsigned level = 0; level < verCurrentState.esStackDepth; level++)
+ if (chkLevel == CHECK_SPILL_ALL)
+ {
+ chkLevel = verCurrentState.esStackDepth;
+ }
+
+ assert(chkLevel <= verCurrentState.esStackDepth);
+
+ for (unsigned level = 0; level < chkLevel; level++)
{
GenTree* tree = verCurrentState.esStack[level].val;
goto DECODE_OPCODE;
SPILL_APPEND:
-
- // We need to call impSpillLclRefs() for a struct type lclVar.
- // This is because there may be loads of that lclVar on the evaluation stack, and
- // we need to ensure that those loads are completed before we modify it.
- if ((op1->OperGet() == GT_ASG) && varTypeIsStruct(op1->gtGetOp1()))
- {
- GenTree* lhs = op1->gtGetOp1();
- GenTreeLclVarCommon* lclVar = nullptr;
- if (lhs->gtOper == GT_LCL_VAR)
- {
- lclVar = lhs->AsLclVarCommon();
- }
- else if (lhs->OperIsBlk())
- {
- // Check if LHS address is within some struct local, to catch
- // cases where we're updating the struct by something other than a stfld
- GenTree* addr = lhs->AsBlk()->Addr();
-
- // Catches ADDR(LCL_VAR), or ADD(ADDR(LCL_VAR),CNS_INT))
- lclVar = addr->IsLocalAddrExpr();
-
- // Catches ADDR(FIELD(... ADDR(LCL_VAR)))
- if (lclVar == nullptr)
- {
- GenTree* lclTree = nullptr;
- if (impIsAddressInLocal(addr, &lclTree))
- {
- lclVar = lclTree->AsLclVarCommon();
- }
- }
- }
- if (lclVar != nullptr)
- {
- impSpillLclRefs(lclVar->GetLclNum());
- }
- }
-
- /* Append 'op1' to the list of statements */
impAppendTree(op1, (unsigned)CHECK_SPILL_ALL, impCurStmtDI);
goto DONE_APPEND;
APPEND:
-
- /* Append 'op1' to the list of statements */
-
impAppendTree(op1, (unsigned)CHECK_SPILL_NONE, impCurStmtDI);
goto DONE_APPEND;
DONE_APPEND:
-
#ifdef DEBUG
// Remember at which BC offset the tree was finished
impNoteLastILoffs();
}
}
- /* Create the assignment node */
-
op2 = gtNewLclvNode(lclNum, lclTyp DEBUGARG(opcodeOffs + sz + 1));
- /* If the local is aliased or pinned, we need to spill calls and
- indirections from the stack. */
-
- if ((lvaTable[lclNum].IsAddressExposed() || lvaTable[lclNum].lvHasLdAddrOp ||
- lvaTable[lclNum].lvPinned) &&
- (verCurrentState.esStackDepth > 0))
+ // Stores to pinned locals can have the implicit side effect of "unpinning", so we must spill
+ // things that could depend on the pin. TODO-Bug: which can actually be anything, including
+ // unpinned unaliased locals, not just side-effecting trees.
+ if (lvaTable[lclNum].lvPinned)
{
- impSpillSideEffects(false,
- (unsigned)CHECK_SPILL_ALL DEBUGARG("Local could be aliased or is pinned"));
+ impSpillSideEffects(false, CHECK_SPILL_ALL DEBUGARG("Spill before store to pinned local"));
}
- /* Spill any refs to the local from the stack */
-
- impSpillLclRefs(lclNum);
-
// We can generate an assignment to a TYP_FLOAT from a TYP_DOUBLE
// We insert a cast to the dest 'op2' type
//
if (varTypeIsStruct(lclTyp))
{
- op1 = impAssignStruct(op2, op1, clsHnd, (unsigned)CHECK_SPILL_ALL);
+ op1 = impAssignStruct(op2, op1, clsHnd, CHECK_SPILL_ALL);
}
else
{
op1 = gtNewAssignNode(op2, op1);
}
-
goto SPILL_APPEND;
case CEE_LDLOCA:
#endif
op1 = gtNewOperNode(GT_IND, lclTyp, op1);
+ op1->gtFlags |= GTF_EXCEPT | GTF_GLOB_REF;
if (prefixFlags & PREFIX_VOLATILE)
{
}
op1 = gtNewAssignNode(op1, op2);
- op1->gtFlags |= GTF_EXCEPT | GTF_GLOB_REF;
-
- // Spill side-effects AND global-data-accesses
- if (verCurrentState.esStackDepth > 0)
- {
- impSpillSideEffects(true, (unsigned)CHECK_SPILL_ALL DEBUGARG("spill side effects before STIND"));
- }
-
- goto APPEND;
+ goto SPILL_APPEND;
case CEE_LDIND_I1:
lclTyp = TYP_BYTE;
assert(!"Unexpected fieldAccessor");
}
- // "impAssignStruct" will back-substitute the field address tree into calls that return things via
- // return buffers, so we have to delay calling it until after we have spilled everything needed.
- bool deferStructAssign = (lclTyp == TYP_STRUCT);
-
- if (!deferStructAssign)
+ if (lclTyp != TYP_STRUCT)
{
assert(op1->OperIs(GT_FIELD, GT_IND));
op1 = gtNewAssignNode(op1, op2);
}
- /* Check if the class needs explicit initialization */
-
+ // Check if the class needs explicit initialization.
if (fieldInfo.fieldFlags & CORINFO_FLG_FIELD_INITCLASS)
{
GenTree* helperNode = impInitClass(&resolvedToken);
}
}
- // An indirect store such as "st[s]fld" interferes with indirect accesses, so we must spill
- // global refs and potentially aliased locals.
- impSpillSideEffects(true, (unsigned)CHECK_SPILL_ALL DEBUGARG("spill side effects before STFLD"));
-
- if (deferStructAssign)
+ if (lclTyp == TYP_STRUCT)
{
- op1 = impAssignStruct(op1, op2, clsHnd, (unsigned)CHECK_SPILL_ALL);
+ op1 = impAssignStruct(op1, op2, clsHnd, CHECK_SPILL_ALL);
}
+ goto SPILL_APPEND;
}
- goto APPEND;
case CEE_NEWARR:
{
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+.assembly extern System.Runtime { }
+.assembly extern System.Console { }
+.assembly extern System.Numerics.Vectors { }
+
+.assembly Runtime_72133 { }
+
+#define TRUE "1"
+#define FALSE "0"
+
+.typedef [System.Numerics.Vectors]System.Numerics.Vector2 as Vector2
+
+.class Runtime_72133 extends [System.Runtime]System.Object
+{
+ .method static int32 Main()
+ {
+ .entrypoint
+ .locals (int32 result)
+
+ ldc.i4 100
+ stloc result
+
+ call bool .this::ProblemWithStructStObj()
+ brfalse VECTOR_STOBJ
+
+ ldloca result
+ ldstr "STRUCT_STOBJ failed"
+ call void .this::ReportFailure(int32*, string)
+
+ VECTOR_STOBJ:
+ call bool .this::ProblemWithVectorStObj()
+ brfalse CPBLK
+
+ ldloca result
+ ldstr "VECTOR_STOBJ failed"
+ call void .this::ReportFailure(int32*, string)
+
+ CPBLK:
+ call bool .this::ProblemWithCpBlk()
+ brfalse INITBLK
+
+ ldloca result
+ ldstr "CPBLK failed"
+ call void .this::ReportFailure(int32*, string)
+
+ INITBLK:
+ call bool .this::ProblemWithInitBlk()
+ brfalse INITOBJ
+
+ ldloca result
+ ldstr "INITBLK failed"
+ call void .this::ReportFailure(int32*, string)
+
+ INITOBJ:
+ call bool .this::ProblemWithInitObj()
+ brfalse DIRECT_INITOBJ
+
+ ldloca result
+ ldstr "INITOBJ failed"
+ call void .this::ReportFailure(int32*, string)
+
+ DIRECT_INITOBJ:
+ call bool .this::ProblemWithDirectInitObj()
+ brfalse RET_BUF_CALL
+
+ ldloca result
+ ldstr "DIRECT_INITOBJ failed"
+ call void .this::ReportFailure(int32*, string)
+
+ RET_BUF_CALL:
+ call bool .this::ProblemWithRetBufCall()
+ brfalse RETURN
+
+ ldloca result
+ ldstr "RET_BUF_CALL failed"
+ call void .this::ReportFailure(int32*, string)
+
+ RETURN:
+ ldloc result
+ ret
+ }
+
+ .method private static bool ProblemWithStructStObj() noinlining
+ {
+ .locals (int32 a, int32 b, int32 offs)
+
+ ldc.i4 1
+ call !!0 .this::Get<int32>(!!0)
+ stloc a
+
+ ldc.i4 2
+ call !!0 .this::Get<int32>(!!0)
+ stloc b
+
+ ldc.i4 0
+ call !!0 .this::Get<int32>(!!0)
+ stloc offs
+
+ ldloc a
+
+ ldloca a
+ ldloc offs
+ add
+ ldloca b
+ ldobj StructWithInt
+ stobj StructWithInt
+
+ ldc.i4 1
+ bne.un FAILURE
+ ldc.i4 FALSE
+ ret
+
+ FAILURE:
+ ldc.i4 TRUE
+ ret
+ }
+
+ .method private static bool ProblemWithVectorStObj() noinlining
+ {
+ .locals (int64 a, int64 b, int32 offs)
+
+ ldc.i8 1
+ call !!0 .this::Get<int64>(!!0)
+ stloc a
+
+ ldc.i8 2
+ call !!0 .this::Get<int64>(!!0)
+ stloc b
+
+ ldc.i4 0
+ call !!0 .this::Get<int32>(!!0)
+ stloc offs
+
+ ldloc a
+
+ ldloca a
+ ldloc offs
+ add
+ ldloca b
+ ldobj Vector2
+ stobj Vector2
+
+ ldc.i8 1
+ bne.un FAILURE
+ ldc.i4 FALSE
+ ret
+
+ FAILURE:
+ ldc.i4 TRUE
+ ret
+ }
+
+ .method private static bool ProblemWithCpBlk() noinlining
+ {
+ .locals (int32 a, int32 b, int32 offs)
+
+ ldc.i4 1
+ call !!0 .this::Get<int32>(!!0)
+ stloc a
+
+ ldc.i4 2
+ call !!0 .this::Get<int32>(!!0)
+ stloc b
+
+ ldc.i4 0
+ call !!0 .this::Get<int32>(!!0)
+ stloc offs
+
+ ldloc a
+
+ ldloca a
+ ldloc offs
+ add
+ ldloca b
+ sizeof int32
+ cpblk
+
+ ldc.i4 1
+ bne.un FAILURE
+ ldc.i4 FALSE
+ ret
+
+ FAILURE:
+ ldc.i4 TRUE
+ ret
+ }
+
+ .method private static bool ProblemWithInitBlk() noinlining
+ {
+ .locals (int32 a, int32 b, int32 offs)
+
+ ldc.i4 1
+ call !!0 .this::Get<int32>(!!0)
+ stloc a
+
+ ldc.i4 2
+ call !!0 .this::Get<int32>(!!0)
+ stloc b
+
+ ldc.i4 0
+ call !!0 .this::Get<int32>(!!0)
+ stloc offs
+
+ ldloc a
+
+ ldloca a
+ ldloc offs
+ add
+ ldloc b
+ sizeof int32
+ initblk
+
+ ldc.i4 1
+ bne.un FAILURE
+ ldc.i4 FALSE
+ ret
+
+ FAILURE:
+ ldc.i4 TRUE
+ ret
+ }
+
+ .method private static bool ProblemWithInitObj() noinlining
+ {
+ .locals (object a, int32 offs)
+
+ newobj instance void object::.ctor()
+ call !!0 .this::Get<object>(!!0)
+ stloc a
+
+ ldc.i4 0
+ call !!0 .this::Get<int32>(!!0)
+ stloc offs
+
+ ldloc a
+
+ ldloca a
+ ldloc offs
+ add
+ initobj object
+
+ ldnull
+ beq FAILURE
+ ldc.i4 FALSE
+ ret
+
+ FAILURE:
+ ldc.i4 TRUE
+ ret
+ }
+
+ .method private static bool ProblemWithDirectInitObj() noinlining
+ {
+ .locals (valuetype StructWithInt a, valuetype StructWithInt* pA)
+
+ ldloca a
+ ldflda int32 StructWithInt::Value
+ ldc.i4 1
+ call !!0 .this::Get<int32>(!!0)
+ stfld int32 StructWithInt::Value
+
+ ldloca a
+ stloc pA
+
+ ldloc pA
+ ldfld int32 StructWithInt::Value
+
+ ldloca a
+ initobj StructWithInt
+
+ ldc.i4 1
+ bne.un FAILURE
+ ldc.i4 FALSE
+ ret
+
+ FAILURE:
+ ldc.i4 TRUE
+ ret
+ }
+
+ .method private static bool ProblemWithRetBufCall() noinlining
+ {
+ .locals (valuetype LargeStruct a, valuetype LargeStruct b)
+
+ ldc.i8 1
+ call valuetype LargeStruct LargeStruct::CreateNoinline(int64)
+ dup
+ stloc a
+ stloc b
+
+ ldloc a
+
+ ldc.i8 2
+ call valuetype LargeStruct LargeStruct::CreateInline(int64)
+ stloc a
+
+ ldloc b
+ call bool LargeStruct::Equals(valuetype LargeStruct, valuetype LargeStruct)
+ brfalse FAILURE
+
+ ldloc b
+ stloc a
+
+ ldloc a
+
+ ldc.i8 3
+ call valuetype LargeStruct LargeStruct::CreateInline(int64)
+ stloc a
+
+ ldloc b
+ call bool LargeStruct::Equals(valuetype LargeStruct, valuetype LargeStruct)
+ brfalse FAILURE
+
+ ldc.i4 FALSE
+ ret
+
+ FAILURE:
+ ldc.i4 TRUE
+ ret
+ }
+
+ .method private static !!T Get<T>(!!T arg) noinlining
+ {
+ ldarg arg
+ ret
+ }
+
+ .method private static void ReportFailure(int32* pResult, string msg) noinlining
+ {
+ ldarg pResult
+ ldarg pResult
+ ldind.i4
+ ldc.i4 1
+ add
+ stind.i4
+
+ ldarg msg
+ call void [System.Console]System.Console::WriteLine(string)
+
+ ret
+ }
+}
+
+.class sealed sequential StructWithInt extends [System.Runtime]System.ValueType
+{
+ .field public int32 Value
+}
+
+.class sealed sequential LargeStruct extends [System.Runtime]System.ValueType
+{
+ .field public int64 LongOne
+ .field public int64 LongTwo
+ .field public int64 LongThree
+
+ .method public static bool Equals(valuetype LargeStruct left, valuetype LargeStruct right) noinlining
+ {
+ ldarg left
+ ldfld int64 .this::LongOne
+ ldarg right
+ ldfld int64 .this::LongOne
+ bne.un NOT_EQUAL
+
+ ldarg left
+ ldfld int64 .this::LongTwo
+ ldarg right
+ ldfld int64 .this::LongTwo
+ bne.un NOT_EQUAL
+
+ ldarg left
+ ldfld int64 .this::LongThree
+ ldarg right
+ ldfld int64 .this::LongThree
+ bne.un NOT_EQUAL
+
+ ldc.i4 TRUE
+ ret
+
+ NOT_EQUAL:
+ ldc.i4 FALSE
+ ret
+ }
+
+ .method public static valuetype LargeStruct CreateInline(int64 fieldValue)
+ {
+ .locals (valuetype LargeStruct result)
+
+ ldloca result
+ ldarg fieldValue
+ stfld int64 .this::LongOne
+
+ ldloca result
+ ldarg fieldValue
+ stfld int64 .this::LongTwo
+
+ ldloca result
+ ldarg fieldValue
+ stfld int64 .this::LongThree
+
+ ldloc result
+ ret
+ }
+
+ .method public static valuetype LargeStruct CreateNoinline(int64 fieldValue) noinlining
+ {
+ ldarg fieldValue
+ call valuetype LargeStruct .this::CreateInline(int64)
+ ret
+ }
+}