2014-05-20 Richard Biener <rguenther@suse.de>
+ PR tree-optimization/61221
+ * tree-ssa-pre.c (el_to_update): Remove.
+ (eliminate_dom_walker::before_dom_children): Handle released
+ VDEFs by value-numbering them to the associated VUSE. Update
+ stmt immediately for substituted call address.
+ (eliminate): Remove delayed stmt updating code.
+ * tree-ssa-sccvn.c (vuse_ssa_val): New function valueizing
+ possibly late re-numbered vuses.
+ (vn_reference_lookup_2): Adjust.
+ (vn_reference_lookup_pieces): Likewise.
+ (vn_reference_lookup): Likewise.
+
+2014-05-20 Richard Biener <rguenther@suse.de>
+
* config.gcc: Remove need_64bit_hwint.
* configure.ac: Do not define NEED_64BIT_HOST_WIDE_INT.
* hwint.h: Do not check NEED_64BIT_HOST_WIDE_INT but assume
/* Local state for the eliminate domwalk. */
static vec<gimple> el_to_remove;
-static vec<gimple> el_to_update;
static unsigned int el_todo;
static vec<tree> el_avail;
static vec<tree> el_avail_stack;
print_gimple_stmt (dump_file, stmt, 0, 0);
}
pre_stats.eliminations++;
+
+ tree vdef = gimple_vdef (stmt);
+ tree vuse = gimple_vuse (stmt);
propagate_tree_value_into_stmt (&gsi, sprime);
stmt = gsi_stmt (gsi);
update_stmt (stmt);
+ if (vdef != gimple_vdef (stmt))
+ VN_INFO (vdef)->valnum = vuse;
/* If we removed EH side-effects from the statement, clean
its EH information. */
sprime = fold_convert (gimple_expr_type (stmt), sprime);
pre_stats.eliminations++;
+
+ tree vdef = gimple_vdef (stmt);
+ tree vuse = gimple_vuse (stmt);
propagate_tree_value_into_stmt (&gsi, sprime);
stmt = gsi_stmt (gsi);
update_stmt (stmt);
+ if (vdef != gimple_vdef (stmt))
+ VN_INFO (vdef)->valnum = vuse;
/* If we removed EH side-effects from the statement, clean
its EH information. */
}
gimple_call_set_fn (stmt, fn);
- el_to_update.safe_push (stmt);
+ tree vdef = gimple_vdef (stmt);
+ tree vuse = gimple_vuse (stmt);
+ update_stmt (stmt);
+ if (vdef != gimple_vdef (stmt))
+ VN_INFO (vdef)->valnum = vuse;
/* When changing a call into a noreturn call, cfg cleanup
is needed to fix up the noreturn call. */
need_ab_cleanup = BITMAP_ALLOC (NULL);
el_to_remove.create (0);
- el_to_update.create (0);
el_todo = 0;
el_avail.create (0);
el_avail_stack.create (0);
}
el_to_remove.release ();
- /* We cannot update call statements with virtual operands during
- SSA walk. This might remove them which in turn makes our
- VN lattice invalid. */
- FOR_EACH_VEC_ELT (el_to_update, i, stmt)
- update_stmt (stmt);
- el_to_update.release ();
-
return el_todo;
}
#define SSA_VAL(x) (VN_INFO ((x))->valnum)
+/* Return the SSA value of the VUSE x, supporting released VDEFs
+ during elimination which will value-number the VDEF to the
+ associated VUSE (but not substitute in the whole lattice). */
+
+static inline tree
+vuse_ssa_val (tree x)
+{
+ if (!x)
+ return NULL_TREE;
+
+ do
+ {
+ x = SSA_VAL (x);
+ }
+ while (SSA_NAME_IN_FREE_LIST (x));
+
+ return x;
+}
+
/* This represents the top of the VN lattice, which is the universal
value. */
/* Fixup vuse and hash. */
if (vr->vuse)
vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
- vr->vuse = SSA_VAL (vuse);
+ vr->vuse = vuse_ssa_val (vuse);
if (vr->vuse)
vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
vnresult = &tmp;
*vnresult = NULL;
- vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
+ vr1.vuse = vuse_ssa_val (vuse);
shared_lookup_references.truncate (0);
shared_lookup_references.safe_grow (operands.length ());
memcpy (shared_lookup_references.address (),
if (vnresult)
*vnresult = NULL;
- vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
+ vr1.vuse = vuse_ssa_val (vuse);
vr1.operands = operands
= valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
vr1.type = TREE_TYPE (op);