MODREF_UNKNOWN_PARM = -1,
MODREF_STATIC_CHAIN_PARM = -2,
MODREF_RETSLOT_PARM = -3,
+ /* Used for bases that points to memory that escapes from function. */
+ MODREF_GLOBAL_MEMORY_PARM = -4,
/* Used in modref_parm_map to tak references which can be removed
from the summary during summary update since they now points to loca
memory. */
- MODREF_LOCAL_MEMORY_PARM = -4
+ MODREF_LOCAL_MEMORY_PARM = -5
};
/* Modref record accesses relative to function parameters.
bool useful_for_kill_p () const
{
return parm_offset_known && parm_index != MODREF_UNKNOWN_PARM
+ && parm_index != MODREF_GLOBAL_MEMORY_PARM
&& parm_index != MODREF_RETSLOT_PARM && known_size_p (size)
&& known_eq (max_size, size)
&& known_gt (size, 0);
in the caller. */
gcc_checking_assert (a.parm_index >= 0
|| a.parm_index == MODREF_STATIC_CHAIN_PARM
+ || a.parm_index == MODREF_GLOBAL_MEMORY_PARM
|| a.parm_index == MODREF_UNKNOWN_PARM);
if (!a.useful_p ())
unsigned int max_accesses,
modref_tree <T> *other, vec <modref_parm_map> *parm_map,
modref_parm_map *static_chain_map,
- bool record_accesses)
+ bool record_accesses,
+ bool promote_unknown_to_global = false)
{
if (!other || every_base)
return false;
{
modref_access_node a = *access_node;
- if (a.parm_index != MODREF_UNKNOWN_PARM && parm_map)
+ if (a.parm_index != MODREF_UNKNOWN_PARM
+ && a.parm_index != MODREF_GLOBAL_MEMORY_PARM
+ && parm_map)
{
if (a.parm_index >= (int)parm_map->length ())
a.parm_index = MODREF_UNKNOWN_PARM;
a.parm_index = m.parm_index;
}
}
+ if (a.parm_index == MODREF_UNKNOWN_PARM
+ && promote_unknown_to_global)
+ a.parm_index = MODREF_GLOBAL_MEMORY_PARM;
changed |= insert (max_bases, max_refs, max_accesses,
base_node->base, ref_node->ref,
a, record_accesses);
bool merge (tree fndecl,
modref_tree <T> *other, vec <modref_parm_map> *parm_map,
modref_parm_map *static_chain_map,
- bool record_accesses)
+ bool record_accesses,
+ bool promote_unknown_to_global = false)
{
return merge (opt_for_fn (fndecl, param_modref_max_bases),
opt_for_fn (fndecl, param_modref_max_refs),
opt_for_fn (fndecl, param_modref_max_accesses),
- other, parm_map, static_chain_map, record_accesses);
+ other, parm_map, static_chain_map, record_accesses,
+ promote_unknown_to_global);
}
/* Copy OTHER to THIS. */
if (ref_node->every_access)
return true;
FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
- if (access_node->parm_index == MODREF_UNKNOWN_PARM)
+ if (access_node->parm_index == MODREF_UNKNOWN_PARM
+ || access_node->parm_index == MODREF_GLOBAL_MEMORY_PARM)
return true;
}
}
return parm_map;
}
+/* Return true if ARG with EAF flags FLAGS can not make any caller's parameter
+ used (if LOAD is true we check loads, otherwise stores). */
+
+static bool
+verify_arg (tree arg, int flags, bool load)
+{
+ if (flags & EAF_UNUSED)
+ return true;
+ if (load && (flags & EAF_NO_DIRECT_READ))
+ return true;
+ if (!load
+ && (flags & (EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER))
+ == (EAF_NO_DIRECT_CLOBBER | EAF_NO_INDIRECT_CLOBBER))
+ return true;
+ if (is_gimple_constant (arg))
+ return true;
+ if (DECL_P (arg) && TREE_READONLY (arg))
+ return true;
+ if (TREE_CODE (arg) == ADDR_EXPR)
+ {
+ tree t = get_base_address (TREE_OPERAND (arg, 0));
+ if (is_gimple_constant (t))
+ return true;
+ if (DECL_P (t)
+ && (TREE_READONLY (t) || TREE_CODE (t) == FUNCTION_DECL))
+ return true;
+ }
+ return false;
+}
+
+/* Return true if STMT may access memory that is pointed to by parameters
+ of caller and which is not seen as an escape by PTA.
+ CALLEE_ECF_FLAGS are ECF flags of callee. If LOAD is true then by access
+ we mean load, otherwise we mean store. */
+
+static bool
+may_access_nonescaping_parm_p (gcall *call, int callee_ecf_flags, bool load)
+{
+ int implicit_flags = 0;
+
+ if (ignore_stores_p (current_function_decl, callee_ecf_flags))
+ implicit_flags |= ignore_stores_eaf_flags;
+ if (callee_ecf_flags & ECF_PURE)
+ implicit_flags |= implicit_pure_eaf_flags;
+ if (callee_ecf_flags & (ECF_CONST | ECF_NOVOPS))
+ implicit_flags |= implicit_const_eaf_flags;
+ if (gimple_call_chain (call)
+ && !verify_arg (gimple_call_chain (call),
+ gimple_call_static_chain_flags (call) | implicit_flags,
+ load))
+ return true;
+ for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
+ if (!verify_arg (gimple_call_arg (call, i),
+ gimple_call_arg_flags (call, i) | implicit_flags,
+ load))
+ return true;
+ return false;
+}
+
+
/* Analyze memory accesses (loads, stores and kills) performed
by the function. Set also side_effects, calls_interposable
and nondeterminism flags. */
bool record_access_p (tree);
bool record_unknown_load ();
bool record_unknown_store ();
+ bool record_global_memory_load ();
+ bool record_global_memory_store ();
bool merge_call_side_effects (gimple *, modref_summary *,
cgraph_node *, bool);
modref_access_node get_access_for_fnspec (gcall *, attr_fnspec &,
return changed;
}
+/* Record unknown load from gloal memory. */
+
+bool
+modref_access_analysis::record_global_memory_load ()
+{
+ bool changed = false;
+ modref_access_node a = {0, -1, -1,
+ 0, MODREF_GLOBAL_MEMORY_PARM, false, 0};
+
+ if (m_summary && !m_summary->loads->every_base)
+ changed |= m_summary->loads->insert (current_function_decl, 0, 0, a, false);
+ if (m_summary_lto && !m_summary_lto->loads->every_base)
+ changed |= m_summary_lto->loads->insert (current_function_decl,
+ 0, 0, a, false);
+ return changed;
+}
+
+/* Record unknown store from gloal memory. */
+
+bool
+modref_access_analysis::record_global_memory_store ()
+{
+ bool changed = false;
+ modref_access_node a = {0, -1, -1,
+ 0, MODREF_GLOBAL_MEMORY_PARM, false, 0};
+
+ if (m_summary && !m_summary->stores->every_base)
+ changed |= m_summary->stores->insert (current_function_decl,
+ 0, 0, a, false);
+ if (m_summary_lto && !m_summary_lto->stores->every_base)
+ changed |= m_summary_lto->stores->insert (current_function_decl,
+ 0, 0, a, false);
+ return changed;
+}
+
/* Merge side effects of call STMT to function with CALLEE_SUMMARY.
Return true if something changed.
If IGNORE_STORES is true, do not merge stores.
(gimple *stmt, modref_summary *callee_summary,
cgraph_node *callee_node, bool record_adjustments)
{
- int flags = gimple_call_flags (stmt);
+ gcall *call = as_a <gcall *> (stmt);
+ int flags = gimple_call_flags (call);
/* Nothing to do for non-looping cont functions. */
if ((flags & (ECF_CONST | ECF_NOVOPS))
fprintf (dump_file, " Parm map:");
auto_vec <modref_parm_map, 32> parm_map;
- parm_map.safe_grow_cleared (gimple_call_num_args (stmt), true);
- for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
+ parm_map.safe_grow_cleared (gimple_call_num_args (call), true);
+ for (unsigned i = 0; i < gimple_call_num_args (call); i++)
{
- parm_map[i] = parm_map_for_ptr (gimple_call_arg (stmt, i));
+ parm_map[i] = parm_map_for_ptr (gimple_call_arg (call, i));
if (dump_file)
{
fprintf (dump_file, " %i", parm_map[i].parm_index);
}
modref_parm_map chain_map;
- if (gimple_call_chain (stmt))
+ if (gimple_call_chain (call))
{
- chain_map = parm_map_for_ptr (gimple_call_chain (stmt));
+ chain_map = parm_map_for_ptr (gimple_call_chain (call));
if (dump_file)
{
fprintf (dump_file, "static chain %i", chain_map.parm_index);
if (m_always_executed
&& callee_summary->kills.length ()
&& (!cfun->can_throw_non_call_exceptions
- || !stmt_could_throw_p (cfun, stmt)))
+ || !stmt_could_throw_p (cfun, call)))
{
/* Watch for self recursive updates. */
auto_vec<modref_access_node, 32> saved_kills;
changed |= m_summary->loads->merge (current_function_decl,
callee_summary->loads,
&parm_map, &chain_map,
- record_adjustments);
+ record_adjustments,
+ !may_access_nonescaping_parm_p
+ (call, flags, true));
/* Merge in stores. */
if (!ignore_stores_p (current_function_decl, flags))
{
changed |= m_summary->stores->merge (current_function_decl,
callee_summary->stores,
&parm_map, &chain_map,
- record_adjustments);
+ record_adjustments,
+ !may_access_nonescaping_parm_p
+ (call, flags, false));
if (!m_summary->writes_errno
&& callee_summary->writes_errno)
{
}
return a;
}
-
/* Apply side effects of call STMT to CUR_SUMMARY using FNSPEC.
If IGNORE_STORES is true ignore them.
Return false if no useful summary can be produced. */
if (dump_file && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
fprintf (dump_file, " Builtin with no fnspec: %s\n",
IDENTIFIER_POINTER (DECL_NAME (gimple_call_fndecl (call))));
- record_unknown_load ();
if (!ignore_stores_p (current_function_decl, flags))
- record_unknown_store ();
+ {
+ if (!may_access_nonescaping_parm_p (call, flags, false))
+ record_global_memory_store ();
+ else
+ record_unknown_store ();
+ if (!may_access_nonescaping_parm_p (call, flags, true))
+ record_global_memory_load ();
+ else
+ record_unknown_load ();
+ }
+ else
+ {
+ if (!may_access_nonescaping_parm_p (call, flags, true))
+ record_global_memory_load ();
+ else
+ record_unknown_load ();
+ }
return;
}
/* Process fnspec. */
if (fnspec.global_memory_read_p ())
- record_unknown_load ();
+ {
+ if (may_access_nonescaping_parm_p (call, flags, true))
+ record_unknown_load ();
+ else
+ record_global_memory_load ();
+ }
else
{
for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
if (ignore_stores_p (current_function_decl, flags))
return;
if (fnspec.global_memory_written_p ())
- record_unknown_store ();
+ {
+ if (may_access_nonescaping_parm_p (call, flags, false))
+ record_unknown_store ();
+ else
+ record_global_memory_store ();
+ }
else
{
for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
simplified. */
int flags = gimple_call_flags (stmt);
+ if (dump_file)
+ {
+ fprintf (dump_file, " - Analyzing call:");
+ print_gimple_stmt (dump_file, stmt, 0);
+ }
+
if ((flags & (ECF_CONST | ECF_NOVOPS))
&& !(flags & ECF_LOOPING_CONST_OR_PURE))
{