void CodeGen::genConsumeBlockSize(GenTreeBlk* blkNode, regNumber sizeReg)
{
- unsigned blockSize = blkNode->Size();
if (sizeReg != REG_NA)
{
+ unsigned blockSize = blkNode->Size();
if (blockSize != 0)
{
assert(blkNode->gtRsvdRegs == genRegMask(sizeReg));
else
{
noway_assert(blkNode->gtOper == GT_STORE_DYN_BLK);
- genConsumeRegAndCopy(blkNode->AsDynBlk()->gtDynamicSize, sizeReg);
+ genConsumeReg(blkNode->AsDynBlk()->gtDynamicSize);
}
}
}
}
#ifdef LEGACY_BACKEND
-/*****************************************************************************
- *
- * For GT_INITBLK and GT_COPYBLK, the tree looks like this :
- * tree->gtOp
- * / \
- * / \.
- * GT_LIST [size/clsHnd]
- * / \
- * / \
- * [dest] [val/src]
- *
- * ie. they are ternary operators. However we use nested binary trees so that
- * GTF_REVERSE_OPS will be set just like for other binary operators. As the
- * operands need to end up in specific registers to issue the "rep stos" or
- * the "rep movs" instruction, if we don't allow the order of evaluation of
- * the 3 operands to be mixed, we may generate really bad code.
- *
- * eg. For "rep stos", [val] has to be in EAX. Then if [size]
- * has a division, we will have to spill [val] from EAX. It will be better to
- * evaluate [size] and the evaluate [val] into EAX.
- *
- * This function stores the operands in the order to be evaluated
- * into opsPtr[]. The regsPtr[] contains reg0,reg1,reg2 in the correspondingly
- * switched order.
- */
+//------------------------------------------------------------------------
+// fgOrderBlockOps: Get the execution order for a block assignment
+//
+// Arguments:
+// tree - The block assignment
+// reg0 - The register for the destination
+// reg1 - The register for the source
+// reg2 - The register for the size
+// opsPtr - An array of 3 GenTreePtr's, an out argument for the operands, in order
+// regsPtr - An array of three regMaskTP - an out argument for the registers, in order
+//
+// Return Value:
+// The return values go into the arrays that are passed in, and provide the
+// operands and associated registers, in execution order.
+//
+// Notes:
+// This method is somewhat convoluted in order to preserve old behavior from when
+// block assignments had their dst and src in a GT_LIST as op1, and their size as op2.
+// The old tree was like this:
+// tree->gtOp
+// / \
+// GT_LIST [size/clsHnd]
+// / \
+// [dest] [val/src]
+//
+// The new tree looks like this:
+// GT_ASG
+// / \
+// blk/obj [val/src]
+// / \
+// [destAddr] [*size/clsHnd] *only for GT_DYN_BLK
+//
+// For the (usual) case of GT_BLK or GT_OBJ, the size is always "evaluated" (i.e.
+// instantiated into a register) last. In those cases, the GTF_REVERSE_OPS flag
+// on the assignment works as usual.
+// In order to preserve previous possible orderings, the order for evaluating
+// the size of a GT_DYN_BLK node is controlled by its gtEvalSizeFirst flag. If
+// that is set, the size is evaluated first, and then the src and dst are evaluated
+// according to the GTF_REVERSE_OPS flag on the assignment.
void Compiler::fgOrderBlockOps(GenTreePtr tree,
regMaskTP reg0,
return asg;
}
-// Creates a new Obj node.
+//------------------------------------------------------------------------
+// gtNewObjNode: Creates a new Obj node.
+//
+// Arguments:
+// structHnd - The class handle of the struct type.
+// addr - The address of the struct.
+//
+// Return Value:
+// Returns a node representing the struct value at the given address.
+//
+// Assumptions:
+// Any entry and exit conditions, such as required preconditions of
+// data structures, memory to be freed by caller, etc.
+//
+// Notes:
+// It will currently return a GT_OBJ node for any struct type, but may
+// return a GT_IND or a non-indirection for a scalar type.
+// The node will not yet have its GC info initialized. This is because
+// we may not need this info if this is an r-value.
+
GenTree* Compiler::gtNewObjNode(CORINFO_CLASS_HANDLE structHnd, GenTree* addr)
{
var_types nodeType = impNormStructType(structHnd);
void Compiler::gtSetObjGcInfo(GenTreeObj* objNode)
{
CORINFO_CLASS_HANDLE structHnd = objNode->gtClass;
- var_types nodeType = impNormStructType(structHnd);
+ var_types nodeType = objNode->TypeGet();
unsigned size = objNode->gtBlkSize;
unsigned slots = 0;
unsigned gcPtrCount = 0;
assert(varTypeIsStruct(nodeType));
assert(size == info.compCompHnd->getClassSize(structHnd));
+ assert(nodeType == impNormStructType(structHnd));
if (nodeType == TYP_STRUCT)
{
// Creates a new assignment node for a CpObj.
// Parameters (exactly the same as MSIL CpObj):
//
-// dst - The target to copy the struct to
-// src - The source to copy the struct from
+// dstAddr - The target to copy the struct to
+// srcAddr - The source to copy the struct from
// structHnd - A class token that represents the type of object being copied. May be null
// if FEATURE_SIMD is enabled and the source has a SIMD type.
// isVolatile - Is this marked as volatile memory?
-GenTree* Compiler::gtNewCpObjNode(GenTreePtr dst, GenTreePtr src, CORINFO_CLASS_HANDLE structHnd, bool isVolatile)
+GenTree* Compiler::gtNewCpObjNode(GenTreePtr dstAddr, GenTreePtr srcAddr, CORINFO_CLASS_HANDLE structHnd, bool isVolatile)
{
- GenTreePtr lhs = gtNewStructVal(structHnd, dst);
+ GenTreePtr lhs = gtNewStructVal(structHnd, dstAddr);
+ GenTree* src = nullptr;
unsigned size;
+
if (lhs->OperIsBlk())
{
size = lhs->AsBlk()->gtBlkSize;
size = genTypeSize(lhs->gtType);
}
- if (src->OperGet() == GT_ADDR)
+ if (srcAddr->OperGet() == GT_ADDR)
{
- src = src->gtOp.gtOp1;
+ src = srcAddr->gtOp.gtOp1;
}
else
{
- src = gtNewOperNode(GT_IND, lhs->TypeGet(), src);
+ src = gtNewOperNode(GT_IND, lhs->TypeGet(), srcAddr);
}
GenTree* result = gtNewBlkOpNode(lhs, src, size, isVolatile, true);
}
}
-// Initializes a BlkOp GenTree
-// Preconditions:
-// - Result is an assignment that is newly constructed
+//
+//------------------------------------------------------------------------
+// gtBlockOpInit: Initializes a BlkOp GenTree
//
-// Parameters:
-// - result is an assignment node that is to be initialized.
-// - dst is the target (destination) we want to either initialize or copy to
-// - src is the init value for IniBlk or the source struct for CpBlk/CpObj
-// - isVolatile specifies whether this node is a volatile memory operation.
+// Arguments:
+// result - an assignment node that is to be initialized.
+// dst - the target (destination) we want to either initialize or copy to.
+// src - the init value for InitBlk or the source struct for CpBlk/CpObj.
+// isVolatile - specifies whether this node is a volatile memory operation.
+//
+// Assumptions:
+// 'result' is an assignment that is newly constructed.
+// If 'dst' is TYP_STRUCT, then it must be a block node or lclVar.
//
-// This procedure centralizes all the logic to both enforce proper structure and
-// to properly construct any InitBlk/CpBlk node.
+// Notes:
+// This procedure centralizes all the logic to both enforce proper structure and
+// to properly construct any InitBlk/CpBlk node.
+
void Compiler::gtBlockOpInit(GenTreePtr result, GenTreePtr dst, GenTreePtr srcOrFillVal, bool isVolatile)
{
if (!result->OperIsBlkOp())
{
+ assert(dst->TypeGet() != TYP_STRUCT);
return;
}
#ifdef DEBUG
//
// Notes:
// If size is zero, the dst must be a GT_OBJ with the class handle.
+// 'dst' must be a block node or lclVar.
//
GenTree* Compiler::gtNewBlkOpNode(
GenTreePtr dst, GenTreePtr srcOrFillVal, unsigned size, bool isVolatile, bool isCopyBlock)
{
+ assert(dst->OperIsBlk() || dst->OperIsLocal());
if (isCopyBlock)
{
srcOrFillVal->gtFlags |= GTF_DONT_CSE;
return OperIsBlk(OperGet());
}
+ static bool OperIsStoreBlk(genTreeOps gtOper)
+ {
+ return ((gtOper == GT_STORE_BLK) || (gtOper == GT_STORE_OBJ) || (gtOper == GT_STORE_DYN_BLK));
+ }
+
+ bool OperIsStoreBlk() const
+ {
+ return OperIsStoreBlk(OperGet());
+ }
+
bool OperIsPutArgStk() const
{
return gtOper == GT_PUTARG_STK;
return (varTypeIsStruct(gtGetOp1()) && ((gtFlags & GTF_BLK_INIT) == 0));
}
#ifndef LEGACY_BACKEND
- else if (OperIsBlk())
+ else if (OperIsStoreBlk())
{
- return ((AsBlk()->Data() != nullptr) && ((gtFlags & GTF_BLK_INIT) == 0));
+ return ((gtFlags & GTF_BLK_INIT) == 0);
}
#endif
return false;
return (varTypeIsStruct(gtGetOp1()) && ((gtFlags & GTF_BLK_INIT) != 0));
}
#ifndef LEGACY_BACKEND
- else if (OperIsBlk())
+ else if (OperIsStoreBlk())
{
- return ((AsBlk()->Data() != nullptr) && ((gtFlags & GTF_BLK_INIT) != 0));
+ return ((gtFlags & GTF_BLK_INIT) != 0);
}
#endif
return false;
op1 = new (this, GT_DYN_BLK) GenTreeDynBlk(op1, op3);
size = 0;
}
- op2 = gtNewOperNode(GT_IND, TYP_STRUCT, op2);
+ if (op2->OperGet() == GT_ADDR)
+ {
+ op2 = op2->gtOp.gtOp1;
+ }
+ else
+ {
+ op2 = gtNewOperNode(GT_IND, TYP_STRUCT, op2);
+ }
op1 = gtNewBlkOpNode(op1, op2, size, (prefixFlags & PREFIX_VOLATILE) != 0, true);
goto SPILL_APPEND;
{
}
-// there is not much lowering to do with storing a local but
-// we do some handling of contained immediates and widening operations of unsigneds
+//------------------------------------------------------------------------
+// LowerStoreLoc: Lower a store of a lclVar
+//
+// Arguments:
+// storeLoc - the local store (GT_STORE_LCL_FLD or GT_STORE_LCL_VAR)
+//
+// Notes:
+// This involves:
+// - Setting the appropriate candidates for a store of a multi-reg call return value.
+// - Requesting an internal register for SIMD12 stores.
+// - Handling of contained immediates and widening operations of unsigneds.
+
void Lowering::LowerStoreLoc(GenTreeLclVarCommon* storeLoc)
{
TreeNodeInfo* info = &(storeLoc->gtLsraInfo);
// as that is how FEATURE_UNIX_AMD64_STRUCT_PASSING works.
// Pass by value in two registers, using a regular GT_OBJ node, created below.
+ // TODO-1stClassStructs: We should not need to set the GTF_DONT_CSE flag here;
+ // this is only to preserve former behavior (though some CSE'ing of struct
+ // values can be pessimizing, so enabling this may require some additional tuning).
arg->gtFlags |= GTF_DONT_CSE;
}
#endif // _TARGET_ARM64_
}
}
+//------------------------------------------------------------------------
+// fgMorphOneAsgBlockOp: Attempt to replace a block assignment with a scalar assignment
+//
+// Arguments:
+// tree - The block assignment to be possibly morphed
+//
+// Return Value:
+// The modified tree if successful, nullptr otherwise.
+//
+// Assumptions:
+// 'tree' must be a block assignment.
+//
+// Notes:
+// If successful, this method always returns the incoming tree, modifying only
+// its arguments.
+
GenTreePtr Compiler::fgMorphOneAsgBlockOp(GenTreePtr tree)
{
// This must be a block assignment.
#if FEATURE_SIMD
if (varTypeIsSIMD(asgType))
{
- assert(!isCopyBlock); // Else we would have returned the tree bove.
+ assert(!isCopyBlock); // Else we would have returned the tree above.
noway_assert(src->IsIntegralConst(0));
noway_assert(destVarDsc != nullptr);
GenTree* effectiveVal = tree->gtEffectiveVal();
// TODO-1stClassStucts: We would like to transform non-TYP_STRUCT nodes to
- // either plan lclVars or GT_INDs. However, for now we want to preserve most
+ // either plain lclVars or GT_INDs. However, for now we want to preserve most
// of the block nodes until the Rationalizer.
if (!varTypeIsStruct(asgType))
}
//------------------------------------------------------------------------
-// fgMorphCopyBlock: Perform the Morphing of a GT_COPYBLK and GT_COPYOBJ nodes
+// fgMorphCopyBlock: Perform the Morphing of block copy
//
// Arguments:
-// tree - a tree node with a gtOper of GT_COPYBLK or GT_COPYOBJ
-// the child nodes for tree have already been Morphed
+// tree - a block copy (i.e. an assignment with a block op on the lhs).
//
// Return Value:
-// We can return the orginal GT_COPYBLK or GT_COPYOBJ unmodified (least desirable, but always correct)
-// We can return a single assignment, when fgMorphOneAsgBlockOp transforms it (most desirable)
+// We can return the orginal block copy unmodified (least desirable, but always correct)
+// We can return a single assignment, when fgMorphOneAsgBlockOp transforms it (most desirable).
// If we have performed struct promotion of the Source() or the Dest() then we will try to
-// perform a field by field assignment for each of the promoted struct fields
+// perform a field by field assignment for each of the promoted struct fields.
+//
+// Assumptions:
+// The child nodes for tree have already been Morphed.
//
// Notes:
-// If we leave it as a GT_COPYBLK or GT_COPYOBJ we will call lvaSetVarDoNotEnregister() on both Source() and Dest()
+// If we leave it as a block copy we will call lvaSetVarDoNotEnregister() on both Source() and Dest().
// When performing a field by field assignment we can have one of Source() or Dest treated as a blob of bytes
// and in such cases we will call lvaSetVarDoNotEnregister() on the one treated as a blob of bytes.
// if the Source() or Dest() is a a struct that has a "CustomLayout" and "ConstainsHoles" then we
-// can not use a field by field assignment and must the orginal GT_COPYBLK unmodified.
+// can not use a field by field assignment and must the orginal block copy unmodified.
GenTreePtr Compiler::fgMorphCopyBlock(GenTreePtr tree)
{
#if FEATURE_MULTIREG_RET
// If this is a multi-reg return, we will not do any morphing of this node.
- if ((rhs->OperGet() == GT_CALL) && rhs->AsCall()->HasMultiRegRetVal())
+ if (rhs->IsMultiRegCall())
{
assert(dest->OperGet() == GT_LCL_VAR);
JITDUMP(" not morphing a multireg call return\n");
if (destLclVar->lvType == TYP_STRUCT)
{
// It would be nice if lvExactSize always corresponded to the size of the struct,
- // but it does not for the spill temps.
+ // but it doesn't always for the temps that the importer creates when it spills side
+ // effects.
+ // TODO-Cleanup: Determine when this happens, and whether it can be changed.
blockWidth = info.compCompHnd->getClassSize(destLclVar->lvVerTypeInfo.GetClassHandle());
}
else
// lclVar if possible.
//
// Arguments:
-// ppTree - A pointer-to-a-pointer for the GT_BLK or GT_OBJ
-// fgWalkData - A pointer to tree walk data providing the context
+// use - A use reference for a block node
+// keepBlk - True if this should remain a block node if it is not a lclVar
//
// Return Value:
// None.