struct ipa_modref_summary;
+/* parm indexes greater than 0 are normal parms.
+ Some negative values have special meaning. */
+enum modref_special_parms {
+ MODREF_UNKNOWN_PARM = -1,
+ MODREF_STATIC_CHAIN_PARM = -2,
+ MODREF_RETSLOT_PARM = -3,
+ /* Used in modref_parm_map to tak references which can be removed
+ from the summary during summary update since they now points to loca
+ memory. */
+ MODREF_LOCAL_MEMORY_PARM = -4
+};
+
/* Memory access. */
struct GTY(()) modref_access_node
{
/* Return true if access node holds no useful info. */
bool useful_p () const
{
- return parm_index != -1;
+ return parm_index != MODREF_UNKNOWN_PARM;
}
/* Return true if range info is useful. */
bool range_info_useful_p () const
{
- return parm_index != -1 && parm_offset_known
+ return parm_index != MODREF_UNKNOWN_PARM && parm_offset_known
&& (known_size_p (size)
|| known_size_p (max_size)
|| known_ge (offset, 0));
{
if (parm_index != a.parm_index)
return false;
- if (parm_index >= 0)
+ if (parm_index != MODREF_UNKNOWN_PARM)
{
if (parm_offset_known != a.parm_offset_known)
return false;
bool contains (const modref_access_node &a) const
{
poly_int64 aoffset_adj = 0;
- if (parm_index >= 0)
+ if (parm_index != MODREF_UNKNOWN_PARM)
{
if (parm_index != a.parm_index)
return false;
/* We assume that containment was tested earlier. */
gcc_checking_assert (!contains (a) && !a.contains (*this));
- if (parm_index >= 0)
+ if (parm_index != MODREF_UNKNOWN_PARM)
{
if (parm_index != a.parm_index)
return false;
{
if (parm_index != a.parm_index)
{
- gcc_checking_assert (parm_index != -1);
- parm_index = -1;
+ gcc_checking_assert (parm_index != MODREF_UNKNOWN_PARM);
+ parm_index = MODREF_UNKNOWN_PARM;
return;
}
/* Access node specifying no useful info. */
const modref_access_node unspecified_modref_access_node
- = {0, -1, -1, 0, -1, false, 0};
+ = {0, -1, -1, 0, MODREF_UNKNOWN_PARM, false, 0};
template <typename T>
struct GTY((user)) modref_ref_node
size_t i, j;
modref_access_node *a2;
+ /* Only the following kind of paramters needs to be tracked.
+ We do not track return slots because they are seen as a direct store
+ in the caller. */
+ gcc_checking_assert (a.parm_index >= 0
+ || a.parm_index == MODREF_STATIC_CHAIN_PARM
+ || a.parm_index == MODREF_UNKNOWN_PARM);
if (flag_checking)
verify ();
struct modref_parm_map
{
/* Index of parameter we translate to.
- -1 indicates that parameter is unknown
- -2 indicates that parameter points to local memory and access can be
- discarded. */
+ Values from special_params enum are permitted too. */
int parm_index;
bool parm_offset_known;
poly_int64 parm_offset;
}
/* Merge OTHER into the tree.
- PARM_MAP, if non-NULL, maps parm indexes of callee to caller. -2 is used
- to signalize that parameter is local and does not need to be tracked.
+ PARM_MAP, if non-NULL, maps parm indexes of callee to caller.
+ Similar CHAIN_MAP, if non-NULL, maps static chain of callee to caller.
Return true if something has changed. */
bool merge (modref_tree <T> *other, vec <modref_parm_map> *parm_map,
+ modref_parm_map *static_chain_map,
bool record_accesses)
{
if (!other || every_base)
{
modref_access_node a = *access_node;
- if (a.parm_index != -1 && parm_map)
+ if (a.parm_index != MODREF_UNKNOWN_PARM && parm_map)
{
if (a.parm_index >= (int)parm_map->length ())
- a.parm_index = -1;
- else if ((*parm_map) [a.parm_index].parm_index == -2)
- continue;
+ a.parm_index = MODREF_UNKNOWN_PARM;
else
{
- a.parm_offset
- += (*parm_map) [a.parm_index].parm_offset;
- a.parm_offset_known
- &= (*parm_map)
- [a.parm_index].parm_offset_known;
- a.parm_index
- = (*parm_map) [a.parm_index].parm_index;
+ modref_parm_map &m
+ = a.parm_index == MODREF_STATIC_CHAIN_PARM
+ ? *static_chain_map
+ : (*parm_map) [a.parm_index];
+ if (m.parm_index == MODREF_LOCAL_MEMORY_PARM)
+ continue;
+ a.parm_offset += m.parm_offset;
+ a.parm_offset_known &= m.parm_offset_known;
+ a.parm_index = m.parm_index;
}
}
changed |= insert (base_node->base, ref_node->ref, a,
/* Copy OTHER to THIS. */
void copy_from (modref_tree <T> *other)
{
- merge (other, NULL, false);
+ merge (other, NULL, NULL, false);
}
/* Search BASE in tree; return NULL if failed. */
if (ref_node->every_access)
return true;
FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
- if (access_node->parm_index < 0)
+ if (access_node->parm_index == MODREF_UNKNOWN_PARM)
return true;
}
}
if (access_node->parm_index < (int)map->length ())
access_node->parm_index = (*map)[access_node->parm_index];
else
- access_node->parm_index = -1;
+ access_node->parm_index = MODREF_UNKNOWN_PARM;
}
}
}
dump_access (modref_access_node *a, FILE *out)
{
fprintf (out, " access:");
- if (a->parm_index != -1)
+ if (a->parm_index != MODREF_UNKNOWN_PARM)
{
- fprintf (out, " Parm %i", a->parm_index);
+ if (a->parm_index >= 0)
+ fprintf (out, " Parm %i", a->parm_index);
+ else if (a->parm_index == MODREF_STATIC_CHAIN_PARM)
+ fprintf (out, " Static chain");
+ else
+ gcc_unreachable ();
if (a->parm_offset_known)
{
fprintf (out, " param offset:");
base = ao_ref_base (ref);
modref_access_node a = {ref->offset, ref->size, ref->max_size,
- 0, -1, false, 0};
+ 0, MODREF_UNKNOWN_PARM, false, 0};
if (TREE_CODE (base) == MEM_REF || TREE_CODE (base) == TARGET_MEM_REF)
{
tree memref = base;
base = TREE_OPERAND (base, 0);
+
if (TREE_CODE (base) == SSA_NAME
&& SSA_NAME_IS_DEFAULT_DEF (base)
&& TREE_CODE (SSA_NAME_VAR (base)) == PARM_DECL)
{
a.parm_index = 0;
- for (tree t = DECL_ARGUMENTS (current_function_decl);
- t != SSA_NAME_VAR (base); t = DECL_CHAIN (t))
- {
- if (!t)
- {
- a.parm_index = -1;
- break;
- }
- a.parm_index++;
- }
- if (TREE_CODE (memref) == MEM_REF)
- {
- a.parm_offset_known
- = wi::to_poly_wide (TREE_OPERAND
- (memref, 1)).to_shwi (&a.parm_offset);
- }
+ if (cfun->static_chain_decl
+ && base == ssa_default_def (cfun, cfun->static_chain_decl))
+ a.parm_index = MODREF_STATIC_CHAIN_PARM;
else
- a.parm_offset_known = false;
+ for (tree t = DECL_ARGUMENTS (current_function_decl);
+ t != SSA_NAME_VAR (base); t = DECL_CHAIN (t))
+ a.parm_index++;
+ }
+ else
+ a.parm_index = MODREF_UNKNOWN_PARM;
+
+ if (a.parm_index != MODREF_UNKNOWN_PARM
+ && TREE_CODE (memref) == MEM_REF)
+ {
+ a.parm_offset_known
+ = wi::to_poly_wide (TREE_OPERAND
+ (memref, 1)).to_shwi (&a.parm_offset);
}
else
- a.parm_index = -1;
+ a.parm_offset_known = false;
}
else
- a.parm_index = -1;
+ a.parm_index = MODREF_UNKNOWN_PARM;
return a;
}
return false;
}
-/* Determine parm_map for argument I of STMT. */
+/* Determine parm_map for argument OP. */
modref_parm_map
-parm_map_for_arg (gimple *stmt, int i)
+parm_map_for_arg (tree op)
{
- tree op = gimple_call_arg (stmt, i);
bool offset_known;
poly_int64 offset;
struct modref_parm_map parm_map;
{
if (!t)
{
- index = -1;
+ index = MODREF_UNKNOWN_PARM;
break;
}
index++;
parm_map.parm_offset = offset;
}
else if (points_to_local_or_readonly_memory_p (op))
- parm_map.parm_index = -2;
+ parm_map.parm_index = MODREF_LOCAL_MEMORY_PARM;
else
- parm_map.parm_index = -1;
+ parm_map.parm_index = MODREF_UNKNOWN_PARM;
return parm_map;
}
bool record_adjustments)
{
auto_vec <modref_parm_map, 32> parm_map;
+ modref_parm_map chain_map;
bool changed = false;
/* We can not safely optimize based on summary of callee if it does
parm_map.safe_grow_cleared (gimple_call_num_args (stmt), true);
for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
{
- parm_map[i] = parm_map_for_arg (stmt, i);
+ parm_map[i] = parm_map_for_arg (gimple_call_arg (stmt, i));
if (dump_file)
{
fprintf (dump_file, " %i", parm_map[i].parm_index);
}
}
}
+ if (gimple_call_chain (stmt))
+ {
+ chain_map = parm_map_for_arg (gimple_call_chain (stmt));
+ if (dump_file)
+ {
+ fprintf (dump_file, "static chain %i", chain_map.parm_index);
+ if (chain_map.parm_offset_known)
+ {
+ fprintf (dump_file, " offset:");
+ print_dec ((poly_int64_pod)chain_map.parm_offset,
+ dump_file, SIGNED);
+ }
+ }
+ }
if (dump_file)
fprintf (dump_file, "\n");
/* Merge with callee's summary. */
changed |= cur_summary->loads->merge (callee_summary->loads, &parm_map,
- record_adjustments);
+ &chain_map, record_adjustments);
if (!ignore_stores)
{
changed |= cur_summary->stores->merge (callee_summary->stores,
- &parm_map,
+ &parm_map, &chain_map,
record_adjustments);
if (!cur_summary->writes_errno
&& callee_summary->writes_errno)
else if (!fnspec.arg_specified_p (i)
|| fnspec.arg_maybe_read_p (i))
{
- modref_parm_map map = parm_map_for_arg (call, i);
+ modref_parm_map map = parm_map_for_arg
+ (gimple_call_arg (call, i));
- if (map.parm_index == -2)
+ if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
continue;
- if (map.parm_index == -1)
+ if (map.parm_index == MODREF_UNKNOWN_PARM)
{
collapse_loads (cur_summary, cur_summary_lto);
break;
else if (!fnspec.arg_specified_p (i)
|| fnspec.arg_maybe_written_p (i))
{
- modref_parm_map map = parm_map_for_arg (call, i);
+ modref_parm_map map = parm_map_for_arg
+ (gimple_call_arg (call, i));
- if (map.parm_index == -2)
+ if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
continue;
- if (map.parm_index == -1)
+ if (map.parm_index == MODREF_UNKNOWN_PARM)
{
collapse_stores (cur_summary, cur_summary_lto);
break;
struct escape_point
{
- /* Extra hidden args we keep track of. */
- enum hidden_args
- {
- retslot_arg = -1,
- static_chain_arg = -2
- };
/* Value escapes to this call. */
gcall *call;
/* Argument it escapes to. */
if (summary_lto)
summary_lto->retslot_flags = flags;
eaf_analysis.record_escape_points (retslot,
- escape_point::retslot_arg, flags);
+ MODREF_RETSLOT_PARM, flags);
}
}
if (static_chain)
if (summary_lto)
summary_lto->static_chain_flags = flags;
eaf_analysis.record_escape_points (static_chain,
- escape_point::static_chain_arg,
+ MODREF_STATIC_CHAIN_PARM,
flags);
}
}
{
if (es && es->param[i].points_to_local_or_readonly_memory)
{
- (*parm_map)[i].parm_index = -2;
+ (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
continue;
}
(callee_pi, i));
if (cst && points_to_local_or_readonly_memory_p (cst))
{
- (*parm_map)[i].parm_index = -2;
+ (*parm_map)[i].parm_index = MODREF_LOCAL_MEMORY_PARM;
continue;
}
}
|| fnspec.arg_maybe_read_p (i))
{
modref_parm_map map = parm_map[i];
- if (map.parm_index == -2)
+ if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
continue;
- if (map.parm_index == -1)
+ if (map.parm_index == MODREF_UNKNOWN_PARM)
{
collapse_loads (cur_summary, cur_summary_lto);
break;
|| fnspec.arg_maybe_written_p (i))
{
modref_parm_map map = parm_map[i];
- if (map.parm_index == -2)
+ if (map.parm_index == MODREF_LOCAL_MEMORY_PARM)
continue;
- if (map.parm_index == -1)
+ if (map.parm_index == MODREF_UNKNOWN_PARM)
{
collapse_stores (cur_summary, cur_summary_lto);
break;
auto_vec <modref_parm_map, 32> parm_map;
+ modref_parm_map chain_map;
+ /* TODO: Once we get jump functions for static chains we could
+ compute this. */
+ chain_map.parm_index = MODREF_UNKNOWN_PARM;
compute_parm_map (callee_edge, &parm_map);
if (callee_summary)
{
changed |= cur_summary->loads->merge
- (callee_summary->loads, &parm_map, !first);
+ (callee_summary->loads, &parm_map,
+ &chain_map, !first);
if (!ignore_stores)
{
changed |= cur_summary->stores->merge
(callee_summary->stores, &parm_map,
- !first);
+ &chain_map, !first);
if (!cur_summary->writes_errno
&& callee_summary->writes_errno)
{
{
changed |= cur_summary_lto->loads->merge
(callee_summary_lto->loads, &parm_map,
- !first);
+ &chain_map, !first);
if (!ignore_stores)
{
changed |= cur_summary_lto->stores->merge
(callee_summary_lto->stores, &parm_map,
- !first);
+ &chain_map, !first);
if (!cur_summary_lto->writes_errno
&& callee_summary_lto->writes_errno)
{
if (!(flags & EAF_UNUSED)
&& cur_summary && ee->parm_index < (int)cur_summary->arg_flags.length ())
{
- eaf_flags_t &f = ee->parm_index == escape_point::retslot_arg
+ eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
? cur_summary->retslot_flags
- : ee->parm_index == escape_point::static_chain_arg
+ : ee->parm_index == MODREF_STATIC_CHAIN_PARM
? cur_summary->static_chain_flags
: cur_summary->arg_flags[ee->parm_index];
if ((f & flags) != f)
&& cur_summary_lto
&& ee->parm_index < (int)cur_summary_lto->arg_flags.length ())
{
- eaf_flags_t &f = ee->parm_index == escape_point::retslot_arg
+ eaf_flags_t &f = ee->parm_index == MODREF_RETSLOT_PARM
? cur_summary_lto->retslot_flags
- : ee->parm_index == escape_point::static_chain_arg
+ : ee->parm_index == MODREF_STATIC_CHAIN_PARM
? cur_summary_lto->static_chain_flags
: cur_summary_lto->arg_flags[ee->parm_index];
if ((f & flags_lto) != f)
if (callee_info || callee_info_lto)
{
auto_vec <modref_parm_map, 32> parm_map;
+ modref_parm_map chain_map;
+ /* TODO: Once we get jump functions for static chains we could
+ compute this. */
+ chain_map.parm_index = MODREF_UNKNOWN_PARM;
compute_parm_map (edge, &parm_map);
if (!ignore_stores)
{
if (to_info && callee_info)
- to_info->stores->merge (callee_info->stores, &parm_map, false);
+ to_info->stores->merge (callee_info->stores, &parm_map,
+ &chain_map, false);
if (to_info_lto && callee_info_lto)
to_info_lto->stores->merge (callee_info_lto->stores, &parm_map,
- false);
+ &chain_map, false);
}
if (!(flags & (ECF_CONST | ECF_NOVOPS)))
{
if (to_info && callee_info)
- to_info->loads->merge (callee_info->loads, &parm_map, false);
+ to_info->loads->merge (callee_info->loads, &parm_map,
+ &chain_map, false);
if (to_info_lto && callee_info_lto)
to_info_lto->loads->merge (callee_info_lto->loads, &parm_map,
- false);
+ &chain_map, false);
}
}