GTNODE(INDEX_ADDR , GenTreeIndexAddr ,0,GTK_BINOP|GTK_EXOP) // Addr of SZ-array-element; used when aiming to minimize compile times.
GTNODE(MKREFANY , GenTreeOp ,0,GTK_BINOP|DBK_NOTLIR)
-GTNODE(LEA , GenTreeAddrMode ,0,GTK_BINOP|GTK_EXOP)
+GTNODE(LEA , GenTreeAddrMode ,0,GTK_BINOP|GTK_EXOP|DBK_NOTHIR)
#if !defined(TARGET_64BIT)
// A GT_LONG node simply represents the long value produced by the concatenation
}
noway_assert(cloned != nullptr);
- GenTree* newArg = new (comp, GT_ADDR)
- GenTreeAddrMode(TYP_BYREF, cloned, nullptr, 0, comp->eeGetEEInfo()->offsetOfWrapperDelegateIndirectCell);
+ GenTree* offsetNode = comp->gtNewIconNode(comp->eeGetEEInfo()->offsetOfWrapperDelegateIndirectCell, TYP_I_IMPL);
+ GenTree* newArg = comp->gtNewOperNode(GT_ADD, TYP_BYREF, cloned, offsetNode);
// Append newArg as the last arg
PushBack(comp, NewCallArg::Primitive(newArg).WellKnown(WellKnownArg::WrapperDelegateCell));
{
setLclRelatedToSIMDIntrinsic(simdStructNode);
}
- GenTree* copyBlkAddr = copyBlkDst;
- if (copyBlkAddr->gtOper == GT_LEA)
- {
- copyBlkAddr = copyBlkAddr->AsAddrMode()->Base();
- }
- GenTreeLclVarCommon* localDst = copyBlkAddr->IsLocalAddrExpr();
+
+ GenTreeLclVarCommon* localDst = copyBlkDst->IsLocalAddrExpr();
if (localDst != nullptr)
{
setLclRelatedToSIMDIntrinsic(localDst);
}
//--------------------------------------------------------------------------------------------------------
-// createAddressNodeForSIMDInit: Generate the address node(GT_LEA) if we want to intialize vector2, vector3 or vector4
+// createAddressNodeForSIMDInit: Generate the address node if we want to intialize vector2, vector3 or vector4
// from first argument's address.
//
// Arguments:
// TODO-CQ:
// 1. Currently just support for GT_FIELD and GT_INDEX, because we can only verify the GT_INDEX node or GT_Field
// are located contiguously or not. In future we should support more cases.
-// 2. Though it happens to just work fine front-end phases are not aware of GT_LEA node. Therefore, convert these
-// to use GT_ADDR.
+//
GenTree* Compiler::createAddressNodeForSIMDInit(GenTree* tree, unsigned simdSize)
{
assert(tree->OperGet() == GT_FIELD || tree->OperGet() == GT_INDEX);
- GenTree* byrefNode = nullptr;
- GenTree* startIndex = nullptr;
- unsigned offset = 0;
- var_types baseType = tree->gtType;
+ GenTree* byrefNode = nullptr;
+ unsigned offset = 0;
+ var_types baseType = tree->gtType;
if (tree->OperGet() == GT_FIELD)
{
{
unreached();
}
- GenTree* address =
- new (this, GT_LEA) GenTreeAddrMode(TYP_BYREF, byrefNode, startIndex, genTypeSize(tree->TypeGet()), offset);
+
+ GenTree* address = gtNewOperNode(GT_ADD, TYP_BYREF, byrefNode, gtNewIconNode(offset, TYP_I_IMPL));
+
return address;
}
assert(op1->TypeGet() == simdType);
// copy vector (op1) to array (op2) starting at index (op3)
- simdTree = op1;
+ simdTree = op1;
+ copyBlkDst = op2;
+ if (op3 != nullptr)
+ {
+#ifdef TARGET_64BIT
+ // Upcast the index: it is safe to use a zero-extending cast since we've bounds checked it above.
+ op3 = gtNewCastNode(TYP_I_IMPL, op3, /* fromUnsigned */ true, TYP_I_IMPL);
+#endif // !TARGET_64BIT
+ GenTree* elemSizeNode = gtNewIconNode(genTypeSize(simdBaseType), TYP_I_IMPL);
+ GenTree* indexOffs = gtNewOperNode(GT_MUL, TYP_I_IMPL, op3, elemSizeNode);
+ copyBlkDst = gtNewOperNode(GT_ADD, TYP_BYREF, copyBlkDst, indexOffs);
+ }
- // TODO-Cleanup: Though it happens to just work fine front-end phases are not aware of GT_LEA node.
- // Therefore, convert these to use GT_ADDR .
- copyBlkDst = new (this, GT_LEA)
- GenTreeAddrMode(TYP_BYREF, op2, op3, genTypeSize(simdBaseType), OFFSETOF__CORINFO_Array__data);
+ copyBlkDst = gtNewOperNode(GT_ADD, TYP_BYREF, copyBlkDst,
+ gtNewIconNode(OFFSETOF__CORINFO_Array__data, TYP_I_IMPL));
doCopyBlk = true;
}
}
{
assert(oper != GT_ASG); // We handled assignments earlier.
assert(GenTree::OperIsBinary(oper));
- // Standard binary operator.
- ValueNumPair op2VNPair;
- if (tree->AsOp()->gtOp2 == nullptr)
- {
- // Handle any GT_LEA nodes as they can have a nullptr for op2.
- op2VNPair.SetBoth(ValueNumStore::VNForNull());
- }
- else
- {
- op2VNPair = tree->AsOp()->gtOp2->gtVNPair;
- }
// Handle a few special cases: if we add a field offset constant to a PtrToXXX, we will get back a
// new
ValueNumPair op2vnp;
ValueNumPair op2Xvnp;
- vnStore->VNPUnpackExc(op2VNPair, &op2vnp, &op2Xvnp);
+ vnStore->VNPUnpackExc(tree->AsOp()->gtOp2->gtVNPair, &op2vnp, &op2Xvnp);
ValueNumPair excSetPair = vnStore->VNPExcSetUnion(op1Xvnp, op2Xvnp);
ValueNum newVN = ValueNumStore::NoVN;
--- /dev/null
+// Licensed to the .NET Foundation under one or more agreements.
+// The .NET Foundation licenses this file to you under the MIT license.
+
+using System.Numerics;
+using System.Runtime.CompilerServices;
+
+public unsafe class Runtime_64375
+{
+ public static int Main()
+ {
+ var a = new StructWithFloats { FloatOne = 1, FloatThree = 2 };
+
+ return Problem(&a) ? 101 : 100;
+ }
+
+ [MethodImpl(MethodImplOptions.NoInlining)]
+ private static bool Problem(StructWithFloats* p1)
+ {
+ var a = new Vector2(p1->FloatOne, p1->FloatTwo);
+ var b = new Vector2(p1->FloatThree, p1->FloatFour);
+
+ return a == b;
+ }
+
+ struct StructWithFloats
+ {
+ public float FloatOne;
+ public float FloatTwo;
+ public float FloatThree;
+ public float FloatFour;
+ }
+}
--- /dev/null
+<Project Sdk="Microsoft.NET.Sdk">
+ <PropertyGroup>
+ <OutputType>Exe</OutputType>
+ <Optimize>True</Optimize>
+ <AllowUnsafeBlocks>true</AllowUnsafeBlocks>
+ </PropertyGroup>
+ <ItemGroup>
+ <Compile Include="$(MSBuildProjectName).cs" />
+ </ItemGroup>
+</Project>
\ No newline at end of file