tree-optimization/100923 - fix alias-ref construction wrt availability
authorRichard Biener <rguenther@suse.de>
Tue, 8 Jun 2021 10:52:12 +0000 (12:52 +0200)
committerRichard Biener <rguenther@suse.de>
Tue, 8 Jun 2021 13:09:18 +0000 (15:09 +0200)
This PR shows that building an ao_ref from value-numbers is prone to
expose bogus contextual alias info to the oracle.  The following makes
sure to construct ao_refs from SSA names available at the program point
only.

On the way it modifies the awkward valueize_refs[_1] API.

2021-06-08  Richard Biener  <rguenther@suse.de>

PR tree-optimization/100923
* tree-ssa-sccvn.c (valueize_refs_1): Take a pointer to
the operand vector to be valueized.
(valueize_refs): Likewise.
(valueize_shared_reference_ops_from_ref): Adjust.
(valueize_shared_reference_ops_from_call): Likewise.
(vn_reference_lookup_3): Likewise.
(vn_reference_lookup_pieces): Likewise.  Re-valueize
with honoring availability when we are about to create
the ao_ref and valueized before.
(vn_reference_lookup): Likewise.
(vn_reference_insert_pieces): Adjust.

* gcc.dg/torture/pr100923.c: New testcase.

gcc/testsuite/gcc.dg/torture/pr100923.c [new file with mode: 0644]
gcc/tree-ssa-sccvn.c

diff --git a/gcc/testsuite/gcc.dg/torture/pr100923.c b/gcc/testsuite/gcc.dg/torture/pr100923.c
new file mode 100644 (file)
index 0000000..05a6341
--- /dev/null
@@ -0,0 +1,25 @@
+/* { dg-do run } */
+
+int a = 1, b, c, *d = &a, *e = &a, f;
+void g(int h) {}
+void k(int *l)
+{
+  int ***j;
+  if (c)
+    {
+      *j = &l;
+      ***j;
+    }
+  g(*l);
+  *e = f;
+  if (*l)
+    {
+      int i = b / a;
+      a = i;
+    }
+}
+int main()
+{
+  k(d);
+  return 0;
+}
index e876121..64e3a70 100644 (file)
@@ -553,7 +553,7 @@ vuse_ssa_val (tree x)
   return x;
 }
 
-/* Similar to the above but used as callback for walk_non_aliases_vuses
+/* Similar to the above but used as callback for walk_non_aliased_vuses
    and thus should stop at unvisited VUSE to not walk across region
    boundaries.  */
 
@@ -1579,8 +1579,8 @@ contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
    the vector passed in is returned.  *VALUEIZED_ANYTHING will specify
    whether any operands were valueized.  */
 
-static vec<vn_reference_op_s> 
-valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
+static void
+valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
                 bool with_avail = false)
 {
   vn_reference_op_t vro;
@@ -1588,7 +1588,7 @@ valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
 
   *valueized_anything = false;
 
-  FOR_EACH_VEC_ELT (orig, i, vro)
+  FOR_EACH_VEC_ELT (*orig, i, vro)
     {
       if (vro->opcode == SSA_NAME
          || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
@@ -1627,16 +1627,16 @@ valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
       if (i > 0
          && vro->op0
          && TREE_CODE (vro->op0) == ADDR_EXPR
-         && orig[i - 1].opcode == MEM_REF)
+         && (*orig)[i - 1].opcode == MEM_REF)
        {
-         if (vn_reference_fold_indirect (&orig, &i))
+         if (vn_reference_fold_indirect (orig, &i))
            *valueized_anything = true;
        }
       else if (i > 0
               && vro->opcode == SSA_NAME
-              && orig[i - 1].opcode == MEM_REF)
+              && (*orig)[i - 1].opcode == MEM_REF)
        {
-         if (vn_reference_maybe_forwprop_address (&orig, &i))
+         if (vn_reference_maybe_forwprop_address (orig, &i))
            *valueized_anything = true;
        }
       /* If it transforms a non-constant ARRAY_REF into a constant
@@ -1654,15 +1654,13 @@ valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
          off.to_shwi (&vro->off);
        }
     }
-
-  return orig;
 }
 
-static vec<vn_reference_op_s> 
-valueize_refs (vec<vn_reference_op_s> orig)
+static void
+valueize_refs (vec<vn_reference_op_s> *orig)
 {
   bool tem;
-  return valueize_refs_1 (orig, &tem);
+  valueize_refs_1 (orig, &tem);
 }
 
 static vec<vn_reference_op_s> shared_lookup_references;
@@ -1679,8 +1677,7 @@ valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
     return vNULL;
   shared_lookup_references.truncate (0);
   copy_reference_ops_from_ref (ref, &shared_lookup_references);
-  shared_lookup_references = valueize_refs_1 (shared_lookup_references,
-                                             valueized_anything);
+  valueize_refs_1 (&shared_lookup_references, valueized_anything);
   return shared_lookup_references;
 }
 
@@ -1695,7 +1692,7 @@ valueize_shared_reference_ops_from_call (gcall *call)
     return vNULL;
   shared_lookup_references.truncate (0);
   copy_reference_ops_from_call (call, &shared_lookup_references);
-  shared_lookup_references = valueize_refs (shared_lookup_references);
+  valueize_refs (&shared_lookup_references);
   return shared_lookup_references;
 }
 
@@ -2546,7 +2543,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
       if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
        {
          copy_reference_ops_from_ref (lhs, &lhs_ops);
-         lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
+         valueize_refs_1 (&lhs_ops, &valueized_anything, true);
        }
       vn_context_bb = saved_rpo_bb;
       ao_ref_init (&lhs_ref, lhs);
@@ -3225,7 +3222,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
        vr->operands.truncate (i + 1 + rhs.length ());
       FOR_EACH_VEC_ELT (rhs, j, vro)
        vr->operands[i + 1 + j] = *vro;
-      vr->operands = valueize_refs (vr->operands);
+      valueize_refs (&vr->operands);
       if (old == shared_lookup_references)
        shared_lookup_references = vr->operands;
       vr->hashcode = vn_reference_compute_hash (vr);
@@ -3526,8 +3523,9 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set,
          operands.address (),
          sizeof (vn_reference_op_s)
          * operands.length ());
-  vr1.operands = operands = shared_lookup_references
-    = valueize_refs (shared_lookup_references);
+  bool valueized_p;
+  valueize_refs_1 (&shared_lookup_references, &valueized_p);
+  vr1.operands = shared_lookup_references;
   vr1.type = type;
   vr1.set = set;
   vr1.base_set = base_set;
@@ -3543,13 +3541,31 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set,
       ao_ref r;
       unsigned limit = param_sccvn_max_alias_queries_per_access;
       vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE);
+      vec<vn_reference_op_s> ops_for_ref;
+      if (!valueized_p)
+       ops_for_ref = vr1.operands;
+      else
+       {
+         /* For ao_ref_from_mem we have to ensure only available SSA names
+            end up in base and the only convenient way to make this work
+            for PRE is to re-valueize with that in mind.  */
+         ops_for_ref.create (operands.length ());
+         ops_for_ref.quick_grow (operands.length ());
+         memcpy (ops_for_ref.address (),
+                 operands.address (),
+                 sizeof (vn_reference_op_s)
+                 * operands.length ());
+         valueize_refs_1 (&ops_for_ref, &valueized_p, true);
+       }
       if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
-                                        vr1.operands))
+                                        ops_for_ref))
        *vnresult
          = ((vn_reference_t)
             walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
                                     vn_reference_lookup_3, vuse_valueize,
                                     limit, &data));
+      if (ops_for_ref != shared_lookup_references)
+       ops_for_ref.release ();
       gcc_checking_assert (vr1.operands == shared_lookup_references);
     }
 
@@ -3578,14 +3594,14 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
 {
   vec<vn_reference_op_s> operands;
   struct vn_reference_s vr1;
-  bool valuezied_anything;
+  bool valueized_anything;
 
   if (vnresult)
     *vnresult = NULL;
 
   vr1.vuse = vuse_ssa_val (vuse);
   vr1.operands = operands
-    = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
+    = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
   vr1.type = TREE_TYPE (op);
   ao_ref op_ref;
   ao_ref_init (&op_ref, op);
@@ -3601,11 +3617,18 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
       vn_reference_t wvnresult;
       ao_ref r;
       unsigned limit = param_sccvn_max_alias_queries_per_access;
+      auto_vec<vn_reference_op_s> ops_for_ref;
+      if (valueized_anything)
+       {
+         copy_reference_ops_from_ref (op, &ops_for_ref);
+         bool tem;
+         valueize_refs_1 (&ops_for_ref, &tem, true);
+       }
       /* Make sure to use a valueized reference if we valueized anything.
          Otherwise preserve the full reference for advanced TBAA.  */
-      if (!valuezied_anything
+      if (!valueized_anything
          || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
-                                            vr1.type, vr1.operands))
+                                            vr1.type, ops_for_ref))
        ao_ref_init (&r, op);
       vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
                            last_vuse_ptr, kind, tbaa_p, mask);
@@ -3733,7 +3756,8 @@ vn_reference_insert_pieces (tree vuse, alias_set_type set,
   vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
   vr1->value_id = value_id;
   vr1->vuse = vuse_ssa_val (vuse);
-  vr1->operands = valueize_refs (operands);
+  vr1->operands = operands;
+  valueize_refs (&vr1->operands);
   vr1->type = type;
   vr1->punned = false;
   vr1->set = set;