return x;
}
-/* Similar to the above but used as callback for walk_non_aliases_vuses
+/* Similar to the above but used as callback for walk_non_aliased_vuses
and thus should stop at unvisited VUSE to not walk across region
boundaries. */
the vector passed in is returned. *VALUEIZED_ANYTHING will specify
whether any operands were valueized. */
-static vec<vn_reference_op_s>
-valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
+static void
+valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
bool with_avail = false)
{
vn_reference_op_t vro;
*valueized_anything = false;
- FOR_EACH_VEC_ELT (orig, i, vro)
+ FOR_EACH_VEC_ELT (*orig, i, vro)
{
if (vro->opcode == SSA_NAME
|| (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
if (i > 0
&& vro->op0
&& TREE_CODE (vro->op0) == ADDR_EXPR
- && orig[i - 1].opcode == MEM_REF)
+ && (*orig)[i - 1].opcode == MEM_REF)
{
- if (vn_reference_fold_indirect (&orig, &i))
+ if (vn_reference_fold_indirect (orig, &i))
*valueized_anything = true;
}
else if (i > 0
&& vro->opcode == SSA_NAME
- && orig[i - 1].opcode == MEM_REF)
+ && (*orig)[i - 1].opcode == MEM_REF)
{
- if (vn_reference_maybe_forwprop_address (&orig, &i))
+ if (vn_reference_maybe_forwprop_address (orig, &i))
*valueized_anything = true;
}
/* If it transforms a non-constant ARRAY_REF into a constant
off.to_shwi (&vro->off);
}
}
-
- return orig;
}
-static vec<vn_reference_op_s>
-valueize_refs (vec<vn_reference_op_s> orig)
+static void
+valueize_refs (vec<vn_reference_op_s> *orig)
{
bool tem;
- return valueize_refs_1 (orig, &tem);
+ valueize_refs_1 (orig, &tem);
}
static vec<vn_reference_op_s> shared_lookup_references;
return vNULL;
shared_lookup_references.truncate (0);
copy_reference_ops_from_ref (ref, &shared_lookup_references);
- shared_lookup_references = valueize_refs_1 (shared_lookup_references,
- valueized_anything);
+ valueize_refs_1 (&shared_lookup_references, valueized_anything);
return shared_lookup_references;
}
return vNULL;
shared_lookup_references.truncate (0);
copy_reference_ops_from_call (call, &shared_lookup_references);
- shared_lookup_references = valueize_refs (shared_lookup_references);
+ valueize_refs (&shared_lookup_references);
return shared_lookup_references;
}
if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
{
copy_reference_ops_from_ref (lhs, &lhs_ops);
- lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
+ valueize_refs_1 (&lhs_ops, &valueized_anything, true);
}
vn_context_bb = saved_rpo_bb;
ao_ref_init (&lhs_ref, lhs);
vr->operands.truncate (i + 1 + rhs.length ());
FOR_EACH_VEC_ELT (rhs, j, vro)
vr->operands[i + 1 + j] = *vro;
- vr->operands = valueize_refs (vr->operands);
+ valueize_refs (&vr->operands);
if (old == shared_lookup_references)
shared_lookup_references = vr->operands;
vr->hashcode = vn_reference_compute_hash (vr);
operands.address (),
sizeof (vn_reference_op_s)
* operands.length ());
- vr1.operands = operands = shared_lookup_references
- = valueize_refs (shared_lookup_references);
+ bool valueized_p;
+ valueize_refs_1 (&shared_lookup_references, &valueized_p);
+ vr1.operands = shared_lookup_references;
vr1.type = type;
vr1.set = set;
vr1.base_set = base_set;
ao_ref r;
unsigned limit = param_sccvn_max_alias_queries_per_access;
vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE);
+ vec<vn_reference_op_s> ops_for_ref;
+ if (!valueized_p)
+ ops_for_ref = vr1.operands;
+ else
+ {
+ /* For ao_ref_from_mem we have to ensure only available SSA names
+ end up in base and the only convenient way to make this work
+ for PRE is to re-valueize with that in mind. */
+ ops_for_ref.create (operands.length ());
+ ops_for_ref.quick_grow (operands.length ());
+ memcpy (ops_for_ref.address (),
+ operands.address (),
+ sizeof (vn_reference_op_s)
+ * operands.length ());
+ valueize_refs_1 (&ops_for_ref, &valueized_p, true);
+ }
if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
- vr1.operands))
+ ops_for_ref))
*vnresult
= ((vn_reference_t)
walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
vn_reference_lookup_3, vuse_valueize,
limit, &data));
+ if (ops_for_ref != shared_lookup_references)
+ ops_for_ref.release ();
gcc_checking_assert (vr1.operands == shared_lookup_references);
}
{
vec<vn_reference_op_s> operands;
struct vn_reference_s vr1;
- bool valuezied_anything;
+ bool valueized_anything;
if (vnresult)
*vnresult = NULL;
vr1.vuse = vuse_ssa_val (vuse);
vr1.operands = operands
- = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
+ = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
vr1.type = TREE_TYPE (op);
ao_ref op_ref;
ao_ref_init (&op_ref, op);
vn_reference_t wvnresult;
ao_ref r;
unsigned limit = param_sccvn_max_alias_queries_per_access;
+ auto_vec<vn_reference_op_s> ops_for_ref;
+ if (valueized_anything)
+ {
+ copy_reference_ops_from_ref (op, &ops_for_ref);
+ bool tem;
+ valueize_refs_1 (&ops_for_ref, &tem, true);
+ }
/* Make sure to use a valueized reference if we valueized anything.
Otherwise preserve the full reference for advanced TBAA. */
- if (!valuezied_anything
+ if (!valueized_anything
|| !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
- vr1.type, vr1.operands))
+ vr1.type, ops_for_ref))
ao_ref_init (&r, op);
vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
last_vuse_ptr, kind, tbaa_p, mask);
vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
vr1->value_id = value_id;
vr1->vuse = vuse_ssa_val (vuse);
- vr1->operands = valueize_refs (operands);
+ vr1->operands = operands;
+ valueize_refs (&vr1->operands);
vr1->type = type;
vr1->punned = false;
vr1->set = set;