From f5ee5d05f367d6221b76f7a3ef7dad96605dbf04 Mon Sep 17 00:00:00 2001 From: Richard Biener Date: Thu, 23 Jan 2020 12:43:26 +0100 Subject: [PATCH] tree-optimization/93354 FRE redundant store removal validity fix This fixes tracking of the alias-set of partial defs for use by redundant store removal. 2020-01-23 Richard Biener PR tree-optimization/93381 * tree-ssa-sccvn.c (vn_walk_cb_data::push_partial_def): Take alias-set of the def as argument and record the first one. (vn_walk_cb_data::first_set): New member. (vn_reference_lookup_3): Pass the alias-set of the current def to push_partial_def. Fix alias-set used in the aggregate copy case. (vn_reference_lookup): Consistently set *last_vuse_ptr. * real.c (clear_significand_below): Fix out-of-bound access. * gcc.dg/torture/pr93354.c: New testcase. --- gcc/ChangeLog | 12 ++++++++++++ gcc/real.c | 5 ++++- gcc/testsuite/ChangeLog | 5 +++++ gcc/testsuite/gcc.dg/torture/pr93354.c | 22 ++++++++++++++++++++++ gcc/tree-ssa-sccvn.c | 34 ++++++++++++++++++++++------------ 5 files changed, 65 insertions(+), 13 deletions(-) create mode 100644 gcc/testsuite/gcc.dg/torture/pr93354.c diff --git a/gcc/ChangeLog b/gcc/ChangeLog index e62d3c0..cf8e212 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,15 @@ +2020-01-23 Richard Biener + + PR tree-optimization/93381 + * tree-ssa-sccvn.c (vn_walk_cb_data::push_partial_def): Take + alias-set of the def as argument and record the first one. + (vn_walk_cb_data::first_set): New member. + (vn_reference_lookup_3): Pass the alias-set of the current def + to push_partial_def. Fix alias-set used in the aggregate copy + case. + (vn_reference_lookup): Consistently set *last_vuse_ptr. + * real.c (clear_significand_below): Fix out-of-bound access. + 2020-01-23 Jakub Jelinek PR target/93346 diff --git a/gcc/real.c b/gcc/real.c index a57e5df..46d8126 100644 --- a/gcc/real.c +++ b/gcc/real.c @@ -420,7 +420,10 @@ clear_significand_below (REAL_VALUE_TYPE *r, unsigned int n) for (i = 0; i < w; ++i) r->sig[i] = 0; - r->sig[w] &= ~(((unsigned long)1 << (n % HOST_BITS_PER_LONG)) - 1); + /* We are actually passing N == SIGNIFICAND_BITS which would result + in an out-of-bound access below. */ + if (n % HOST_BITS_PER_LONG != 0) + r->sig[w] &= ~(((unsigned long)1 << (n % HOST_BITS_PER_LONG)) - 1); } /* Divide the significands of A and B, placing the result in R. Return diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog index dbcd8dd..68b0ea6 100644 --- a/gcc/testsuite/ChangeLog +++ b/gcc/testsuite/ChangeLog @@ -1,3 +1,8 @@ +2020-01-23 Richard Biener + + PR tree-optimization/93381 + * gcc.dg/torture/pr93354.c: New testcase. + 2020-01-23 Jakub Jelinek PR target/93346 diff --git a/gcc/testsuite/gcc.dg/torture/pr93354.c b/gcc/testsuite/gcc.dg/torture/pr93354.c new file mode 100644 index 0000000..8ccf1e6 --- /dev/null +++ b/gcc/testsuite/gcc.dg/torture/pr93354.c @@ -0,0 +1,22 @@ +/* { dg-do run } */ + +typedef __INT32_TYPE__ int32_t; +typedef __INT64_TYPE__ int64_t; +struct X { int32_t i; int32_t j; }; +void foo (int64_t *z) +{ + ((struct X *)z)->i = 0x05060708; + ((struct X *)z)->j = 0x01020304; + *z = 0x0102030405060708; +} + +int main() +{ + int64_t l = 0; + int64_t *p; + asm ("" : "=r" (p) : "0" (&l)); + foo (p); + if (l != 0x0102030405060708) + __builtin_abort (); + return 0; +} diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c index 0b8ee58..6e0b220 100644 --- a/gcc/tree-ssa-sccvn.c +++ b/gcc/tree-ssa-sccvn.c @@ -1693,7 +1693,8 @@ struct vn_walk_cb_data ao_ref_init (&orig_ref, orig_ref_); } ~vn_walk_cb_data (); - void *push_partial_def (const pd_data& pd, tree, HOST_WIDE_INT); + void *push_partial_def (const pd_data& pd, tree, + alias_set_type, HOST_WIDE_INT); vn_reference_t vr; ao_ref orig_ref; @@ -1706,6 +1707,7 @@ struct vn_walk_cb_data /* The first defs range to avoid splay tree setup in most cases. */ pd_range first_range; tree first_vuse; + alias_set_type first_set; splay_tree known_ranges; obstack ranges_obstack; }; @@ -1746,13 +1748,13 @@ pd_tree_dealloc (void *, void *) } /* Push PD to the vector of partial definitions returning a - value when we are ready to combine things with VUSE and MAXSIZEI, + value when we are ready to combine things with VUSE, SET and MAXSIZEI, NULL when we want to continue looking for partial defs or -1 on failure. */ void * vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse, - HOST_WIDE_INT maxsizei) + alias_set_type set, HOST_WIDE_INT maxsizei) { const HOST_WIDE_INT bufsize = 64; /* We're using a fixed buffer for encoding so fail early if the object @@ -1773,6 +1775,7 @@ vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse, first_range.offset = pd.offset; first_range.size = pd.size; first_vuse = vuse; + first_set = set; last_vuse_ptr = NULL; /* Continue looking for partial defs. */ return NULL; @@ -1903,10 +1906,10 @@ vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse, if (dump_file && (dump_flags & TDF_DETAILS)) fprintf (dump_file, "Successfully combined %u partial definitions\n", ndefs); - /* ??? If we track partial defs alias-set we could use that if it - is the same for all. Use zero for now. */ + /* We are using the alias-set of the first store we encounter which + should be appropriate here. */ return vn_reference_lookup_or_insert_for_pieces - (first_vuse, 0, vr->type, vr->operands, val); + (first_vuse, first_set, vr->type, vr->operands, val); } else { @@ -2492,7 +2495,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, pd.rhs = build_constructor (NULL_TREE, NULL); pd.offset = (offset2i - offseti) / BITS_PER_UNIT; pd.size = leni; - return data->push_partial_def (pd, vuse, maxsizei); + return data->push_partial_def (pd, vuse, 0, maxsizei); } } @@ -2553,7 +2556,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, pd.rhs = gimple_assign_rhs1 (def_stmt); pd.offset = (offset2i - offseti) / BITS_PER_UNIT; pd.size = size2i / BITS_PER_UNIT; - return data->push_partial_def (pd, vuse, maxsizei); + return data->push_partial_def (pd, vuse, get_alias_set (lhs), + maxsizei); } } } @@ -2665,7 +2669,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, pd.rhs = rhs; pd.offset = (offset2i - offseti) / BITS_PER_UNIT; pd.size = size2i / BITS_PER_UNIT; - return data->push_partial_def (pd, vuse, maxsizei); + return data->push_partial_def (pd, vuse, get_alias_set (lhs), + maxsizei); } } } @@ -2751,7 +2756,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, pd.rhs = SSA_VAL (def_rhs); pd.offset = (offset2i - offseti) / BITS_PER_UNIT; pd.size = size2i / BITS_PER_UNIT; - return data->push_partial_def (pd, vuse, maxsizei); + return data->push_partial_def (pd, vuse, get_alias_set (lhs), + maxsizei); } } } @@ -2764,6 +2770,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF || handled_component_p (gimple_assign_rhs1 (def_stmt)))) { + tree lhs = gimple_assign_lhs (def_stmt); tree base2; int i, j, k; auto_vec rhs; @@ -2870,7 +2877,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, { if (data->partial_defs.is_empty ()) return vn_reference_lookup_or_insert_for_pieces - (vuse, get_alias_set (rhs1), vr->type, vr->operands, val); + (vuse, get_alias_set (lhs), vr->type, vr->operands, val); /* This is the only interesting case for partial-def handling coming from targets that like to gimplify init-ctors as aggregate copies from constant data like aarch64 for @@ -2882,7 +2889,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_, pd.rhs = val; pd.offset = 0; pd.size = maxsizei / BITS_PER_UNIT; - return data->push_partial_def (pd, vuse, maxsizei); + return data->push_partial_def (pd, vuse, get_alias_set (lhs), + maxsizei); } } @@ -3205,6 +3213,8 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind, return NULL_TREE; } + if (last_vuse_ptr) + *last_vuse_ptr = vr1.vuse; return vn_reference_lookup_1 (&vr1, vnresult); } -- 2.7.4