--- /dev/null
+/* { dg-do run } */
+/* { dg-require-effective-target int32plus } */
+/* { dg-options "-O3" } */
+
+typedef __INT32_TYPE__ int32_t;
+
+#define BIG_ENDIAN __attribute__((scalar_storage_order("big-endian")))
+
+/* host order version (little endian)*/
+struct _ip6_addr {
+ union {
+ char addr8[16];
+ int32_t addr32[4];
+ } u;
+};
+
+typedef struct _ip6_addr t_ip6_addr;
+
+struct _net_addr {
+ char is_v4;
+ union {
+ int32_t addr;
+ t_ip6_addr addr6;
+ } u;
+};
+
+typedef struct _net_addr t_net_addr;
+
+/* big endian version */
+struct _be_ip6_addr {
+ union {
+ char addr8[16];
+ } BIG_ENDIAN u;
+} BIG_ENDIAN;
+
+typedef struct _be_ip6_addr t_be_ip6_addr;
+
+struct _be_net_addr {
+ char is_v4;
+ union {
+ t_be_ip6_addr addr6;
+ int32_t addr;
+ } BIG_ENDIAN u;
+} BIG_ENDIAN;
+
+typedef struct _be_net_addr t_be_net_addr;
+
+/* convert */
+t_be_ip6_addr be_ip6_addr(const t_ip6_addr ip6)
+{
+ t_be_ip6_addr rc = {
+ .u.addr8[0] = ip6.u.addr8[0],
+ .u.addr8[1] = ip6.u.addr8[1],
+ .u.addr8[2] = ip6.u.addr8[2],
+ .u.addr8[3] = ip6.u.addr8[3],
+ .u.addr8[4] = ip6.u.addr8[4],
+ .u.addr8[5] = ip6.u.addr8[5],
+ .u.addr8[6] = ip6.u.addr8[6],
+ .u.addr8[7] = ip6.u.addr8[7],
+ .u.addr8[8] = ip6.u.addr8[8],
+ .u.addr8[9] = ip6.u.addr8[9],
+ .u.addr8[10] = ip6.u.addr8[10],
+ .u.addr8[11] = ip6.u.addr8[11],
+ .u.addr8[12] = ip6.u.addr8[12],
+ .u.addr8[13] = ip6.u.addr8[13],
+ .u.addr8[14] = ip6.u.addr8[14],
+ .u.addr8[15] = ip6.u.addr8[15],
+ };
+ return rc;
+}
+
+t_be_net_addr __attribute__((noipa)) be_net_addr(const t_net_addr ip)
+{
+ t_be_net_addr rc = {.is_v4 = ip.is_v4 };
+ if (ip.is_v4) {
+ rc.u.addr = ip.u.addr;
+ } else {
+ rc.u.addr6 = be_ip6_addr(ip.u.addr6);
+ }
+ return rc;
+}
+
+int main(void)
+{
+ t_be_net_addr out = { };
+
+ t_net_addr in = {
+ .is_v4 = 0,
+ .u.addr6.u.addr8 =
+ { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 }
+ };
+
+ out = be_net_addr(in);
+
+ // actually first 4 bytes are swapped
+ if (in.u.addr6.u.addr8[0] != out.u.addr6.u.addr8[0])
+ __builtin_abort();
+
+ return 0;
+}
vn_reference_op_t vro1, vro2;
vn_reference_op_s tem1, tem2;
bool deref1 = false, deref2 = false;
+ bool reverse1 = false, reverse2 = false;
for (; vr1->operands.iterate (i, &vro1); i++)
{
if (vro1->opcode == MEM_REF)
/* Do not look through a storage order barrier. */
else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
return false;
+ reverse1 |= vro1->reverse;
if (known_eq (vro1->off, -1))
break;
off1 += vro1->off;
/* Do not look through a storage order barrier. */
else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
return false;
+ reverse2 |= vro2->reverse;
if (known_eq (vro2->off, -1))
break;
off2 += vro2->off;
}
- if (maybe_ne (off1, off2))
+ if (maybe_ne (off1, off2) || reverse1 != reverse2)
return false;
if (deref1 && vro1->opcode == ADDR_EXPR)
{
temp.type = NULL_TREE;
temp.op0 = TREE_OPERAND (ref, 1);
temp.op1 = TREE_OPERAND (ref, 2);
+ temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
+ && TYPE_REVERSE_STORAGE_ORDER
+ (TREE_TYPE (TREE_OPERAND (ref, 0))));
{
tree this_offset = component_ref_field_offset (ref);
if (this_offset
* vn_ref_op_align_unit (&temp));
off.to_shwi (&temp.off);
}
+ temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
+ && TYPE_REVERSE_STORAGE_ORDER
+ (TREE_TYPE (TREE_OPERAND (ref, 0))));
}
break;
case VAR_DECL:
return false;
}
+/* Return true if OPS represent an access with reverse storage order. */
+
+static bool
+reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops)
+{
+ unsigned i = 0;
+ if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR)
+ ++i;
+ switch (ops[i].opcode)
+ {
+ case ARRAY_REF:
+ case COMPONENT_REF:
+ case BIT_FIELD_REF:
+ case MEM_REF:
+ return ops[i].reverse;
+ default:
+ return false;
+ }
+}
+
/* Transform any SSA_NAME's in a vector of vn_reference_op_s
structures into their value numbers. This is done in-place, and
the vector passed in is returned. *VALUEIZED_ANYTHING will specify
routines to extract the assigned bits. */
else if (known_eq (ref->size, maxsize)
&& is_gimple_reg_type (vr->type)
+ && !reverse_storage_order_for_component_p (vr->operands)
&& !contains_storage_order_barrier_p (vr->operands)
&& gimple_assign_single_p (def_stmt)
&& CHAR_BIT == 8
to access pieces from or we can combine to a larger entity. */
else if (known_eq (ref->size, maxsize)
&& is_gimple_reg_type (vr->type)
+ && !reverse_storage_order_for_component_p (vr->operands)
&& !contains_storage_order_barrier_p (vr->operands)
&& gimple_assign_single_p (def_stmt)
&& TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)