{
cgraph_node *first_clone = cgraph_node::get (decl);
- if (first_clone && !first_clone->global.inlined_to)
+ if (first_clone && !first_clone->inlined_to)
return first_clone;
cgraph_node *node = cgraph_node::create (decl);
node = node->next_sharing_asm_name)
{
cgraph_node *cn = dyn_cast <cgraph_node *> (node);
- if (cn && !cn->global.inlined_to)
+ if (cn && !cn->inlined_to)
return cn;
}
return NULL;
{
cgraph_node *n = cgraph_node::get (decl);
if (!n
- || (!n->clones && !n->clone_of && !n->global.inlined_to
+ || (!n->clones && !n->clone_of && !n->inlined_to
&& ((symtab->global_info_ready || in_lto_p)
&& (TREE_ASM_WRITTEN (n->decl)
|| DECL_EXTERNAL (n->decl)
{
/* Indirect inlining can figure out that all uses of the address are
inlined. */
- if (global.inlined_to)
+ if (inlined_to)
{
gcc_assert (cfun->after_inlining);
gcc_assert (callers->indirect_inlining_edge);
dump_base (f);
- if (global.inlined_to)
+ if (inlined_to)
fprintf (f, " Function %s is inline copy in %s\n",
dump_name (),
- global.inlined_to->dump_name ());
+ inlined_to->dump_name ());
if (clone_of)
fprintf (f, " Clone of %s\n", clone_of->dump_asm_name ());
if (symtab->function_flags_ready)
if (dyn_cast <cgraph_node *> (ref->referring)->count.initialized_p ())
sum += dyn_cast <cgraph_node *> (ref->referring)->count.ipa ();
- if (global.inlined_to
+ if (inlined_to
|| (symtab->state < EXPANSION
&& ultimate_alias_target () == this && only_called_directly_p ()))
ok = !count.ipa ().differs_from_p (sum);
{
cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
if (cref)
- ref = cref->global.inlined_to;
+ ref = cref->inlined_to;
}
enum availability avail;
if (!analyzed)
avail = AVAIL_NOT_AVAILABLE;
else if (local.local)
avail = AVAIL_LOCAL;
- else if (global.inlined_to)
+ else if (inlined_to)
avail = AVAIL_AVAILABLE;
else if (transparent_alias)
ultimate_alias_target (&avail, ref);
cgraph_node::will_be_removed_from_program_if_no_direct_calls_p
(bool will_inline)
{
- gcc_assert (!global.inlined_to);
+ gcc_assert (!inlined_to);
if (DECL_EXTERNAL (decl))
return true;
{
cgraph_node *node;
- if (!decl || callee->global.inlined_to)
+ if (!decl || callee->inlined_to)
return false;
if (symtab->state == LTO_STREAMING)
return false;
error ("cgraph count invalid");
error_found = true;
}
- if (global.inlined_to && same_comdat_group)
+ if (inlined_to && same_comdat_group)
{
error ("inline clone in same comdat group list");
error_found = true;
error ("local symbols must be defined");
error_found = true;
}
- if (global.inlined_to && externally_visible)
+ if (inlined_to && externally_visible)
{
error ("externally visible inline clone");
error_found = true;
}
- if (global.inlined_to && address_taken)
+ if (inlined_to && address_taken)
{
error ("inline clone with address taken");
error_found = true;
}
- if (global.inlined_to && force_output)
+ if (inlined_to && force_output)
{
error ("inline clone is forced to output");
error_found = true;
}
if (!e->inline_failed)
{
- if (global.inlined_to
- != (e->caller->global.inlined_to
- ? e->caller->global.inlined_to : e->caller))
+ if (inlined_to
+ != (e->caller->inlined_to
+ ? e->caller->inlined_to : e->caller))
{
error ("inlined_to pointer is wrong");
error_found = true;
}
}
else
- if (global.inlined_to)
+ if (inlined_to)
{
error ("inlined_to pointer set for noninline callers");
error_found = true;
if (e->verify_count ())
error_found = true;
if (gimple_has_body_p (e->caller->decl)
- && !e->caller->global.inlined_to
+ && !e->caller->inlined_to
&& !e->speculative
/* Optimized out calls are redirected to __builtin_unreachable. */
&& (e->count.nonzero_p ()
if (e->verify_count ())
error_found = true;
if (gimple_has_body_p (e->caller->decl)
- && !e->caller->global.inlined_to
+ && !e->caller->inlined_to
&& !e->speculative
&& e->count.ipa_p ()
&& count
error_found = true;
}
}
- if (!callers && global.inlined_to)
+ if (!callers && inlined_to)
{
error ("inlined_to pointer is set but no predecessors found");
error_found = true;
}
- if (global.inlined_to == this)
+ if (inlined_to == this)
{
error ("inlined_to pointer refers to itself");
error_found = true;
error ("More than one edge out of thunk node");
error_found = true;
}
- if (gimple_has_body_p (decl) && !global.inlined_to)
+ if (gimple_has_body_p (decl) && !inlined_to)
{
error ("Thunk is not supposed to have body");
error_found = true;
}
else if (analyzed && gimple_has_body_p (decl)
&& !TREE_ASM_WRITTEN (decl)
- && (!DECL_EXTERNAL (decl) || global.inlined_to)
+ && (!DECL_EXTERNAL (decl) || inlined_to)
&& !flag_wpa)
{
if (this_cfun->cfg)
early.
TODO: Materializing clones here will likely lead to smaller LTRANS
footprint. */
- gcc_assert (!global.inlined_to && !clone_of);
+ gcc_assert (!inlined_to && !clone_of);
if (ipa_transforms_to_apply.exists ())
{
opt_pass *saved_current_pass = current_pass;
sreal
cgraph_edge::sreal_frequency ()
{
- return count.to_sreal_scale (caller->global.inlined_to
- ? caller->global.inlined_to->count
+ return count.to_sreal_scale (caller->inlined_to
+ ? caller->inlined_to->count
: caller->count);
}
unsigned tm_may_enter_irr : 1;
};
-/* Information about the function that needs to be computed globally
- once compilation is finished. Available only with -funit-at-a-time. */
-
-struct GTY(()) cgraph_global_info {
- /* For inline clones this points to the function they will be
- inlined into. */
- cgraph_node *inlined_to;
-};
-
/* Represent which DECL tree (or reference to such tree)
will be replaced by another tree while versioning. */
struct GTY(()) ipa_replace_map
If the new node is being inlined into another one, NEW_INLINED_TO should be
the outline function the new one is (even indirectly) inlined to.
- All hooks will see this in node's global.inlined_to, when invoked.
+ All hooks will see this in node's inlined_to, when invoked.
Can be NULL if the node is not inlined. SUFFIX is string that is appended
to the original name. */
cgraph_node *create_clone (tree decl, profile_count count,
vec<ipa_opt_pass> GTY((skip)) ipa_transforms_to_apply;
cgraph_local_info local;
- cgraph_global_info global;
+
+ /* For inline clones this points to the function they will be
+ inlined into. */
+ cgraph_node *inlined_to;
+
struct cgraph_rtl_info *rtl;
cgraph_clone_info clone;
cgraph_thunk_info thunk;
if (!is_a <cgraph_node *> (this))
return true;
cnode = dyn_cast <cgraph_node *> (this);
- if (cnode->global.inlined_to)
+ if (cnode->inlined_to)
return false;
return true;
}
if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
{
- if (cn->global.inlined_to)
- source = cn->global.inlined_to;
+ if (cn->inlined_to)
+ source = cn->inlined_to;
}
if (cgraph_node *cn = dyn_cast <cgraph_node *> (target))
{
- if (cn->global.inlined_to)
- target = cn->global.inlined_to;
+ if (cn->inlined_to)
+ target = cn->inlined_to;
}
return source->get_comdat_group () == target->get_comdat_group ();
inline bool
cgraph_node::only_called_directly_or_aliased_p (void)
{
- gcc_assert (!global.inlined_to);
+ gcc_assert (!inlined_to);
return (!force_output && !address_taken
&& !ifunc_resolver
&& !used_from_other_partition
inline bool
cgraph_node::can_remove_if_no_direct_calls_and_refs_p (void)
{
- gcc_checking_assert (!global.inlined_to);
+ gcc_checking_assert (!inlined_to);
/* Extern inlines can always go, we will use the external definition. */
if (DECL_EXTERNAL (decl))
return true;
cgraph_edge::recursive_p (void)
{
cgraph_node *c = callee->ultimate_alias_target ();
- if (caller->global.inlined_to)
- return caller->global.inlined_to->decl == c->decl;
+ if (caller->inlined_to)
+ return caller->inlined_to->decl == c->decl;
else
return caller->decl == c->decl;
}
inline int
cgraph_edge::frequency ()
{
- return count.to_cgraph_frequency (caller->global.inlined_to
- ? caller->global.inlined_to->count
+ return count.to_cgraph_frequency (caller->inlined_to
+ ? caller->inlined_to->count
: caller->count);
}
cgraph_node::mark_force_output (void)
{
force_output = 1;
- gcc_checking_assert (!global.inlined_to);
+ gcc_checking_assert (!inlined_to);
}
/* Return true if function should be optimized for size. */
node->record_stmt_references (gsi_stmt (gsi));
}
record_eh_tables (node, cfun);
- gcc_assert (!node->global.inlined_to);
+ gcc_assert (!node->inlined_to);
return 0;
}
If the new node is being inlined into another one, NEW_INLINED_TO should be
the outline function the new one is (even indirectly) inlined to. All hooks
- will see this in node's global.inlined_to, when invoked. Can be NULL if the
+ will see this in node's inlined_to, when invoked. Can be NULL if the
node is not inlined.
If PARAM_ADJUSTMENTS is non-NULL, the parameter manipulation information
new_node->externally_visible = false;
new_node->no_reorder = no_reorder;
new_node->local.local = true;
- new_node->global = global;
- new_node->global.inlined_to = new_inlined_to;
+ new_node->inlined_to = new_inlined_to;
new_node->rtl = rtl;
new_node->frequency = frequency;
new_node->tp_first_run = tp_first_run;
new_version->externally_visible = false;
new_version->no_reorder = no_reorder;
new_version->local.local = new_version->definition;
- new_version->global = global;
+ new_version->inlined_to = inlined_to;
new_version->rtl = rtl;
new_version->count = count;
/* Reset our data structures so we can analyze the function again. */
memset (&local, 0, sizeof (local));
- memset (&global, 0, sizeof (global));
+ inlined_to = NULL;
memset (&rtl, 0, sizeof (rtl));
analyzed = false;
definition = false;
if (node->analyzed
&& !node->thunk.thunk_p
&& !node->alias
- && !node->global.inlined_to
+ && !node->inlined_to
&& !TREE_ASM_WRITTEN (decl)
&& !DECL_EXTERNAL (decl))
{
{
/* We should've reclaimed all functions that are not needed. */
if (flag_checking
- && !node->global.inlined_to
+ && !node->inlined_to
&& gimple_has_body_p (decl)
/* FIXME: in ltrans unit when offline copy is outside partition but inline copies
are inside partition, we can end up not removing the body since we no longer
node->debug ();
internal_error ("failed to reclaim unneeded function");
}
- gcc_assert (node->global.inlined_to
+ gcc_assert (node->inlined_to
|| !gimple_has_body_p (decl)
|| node->in_other_partition
|| node->clones
if (node->same_comdat_group && !node->process)
{
tree decl = node->decl;
- if (!node->global.inlined_to
+ if (!node->inlined_to
&& gimple_has_body_p (decl)
/* FIXME: in an ltrans unit when the offline copy is outside a
partition but inline copies are inside a partition, we can
for (e = callers; e;)
if (e->caller->thunk.thunk_p
- && !e->caller->global.inlined_to)
+ && !e->caller->inlined_to)
{
cgraph_node *thunk = e->caller;
location_t saved_loc;
/* We ought to not compile any inline clones. */
- gcc_assert (!global.inlined_to);
+ gcc_assert (!inlined_to);
/* __RTL functions are compiled as soon as they are parsed, so don't
do it again. */
bool error_found = false;
FOR_EACH_DEFINED_FUNCTION (node)
- if (node->global.inlined_to
+ if (node->inlined_to
|| gimple_has_body_p (node->decl))
{
error_found = true;
if (!snode || !snode->definition)
return false;
node = dyn_cast <cgraph_node *> (snode);
- return !node || !node->global.inlined_to;
+ return !node || !node->inlined_to;
}
/* We will later output the initializer, so we can refer to it.
|| (!snode->forced_by_abi && !snode->force_output))))
return false;
node = dyn_cast <cgraph_node *> (snode);
- return !node || !node->global.inlined_to;
+ return !node || !node->inlined_to;
}
/* Create a temporary for TYPE for a statement STMT. If the current function
if (cgraph_node * cn = dyn_cast <cgraph_node *> (symbol2))
{
- if (cn->global.inlined_to)
- symbol2 = cn->global.inlined_to;
+ if (cn->inlined_to)
+ symbol2 = cn->inlined_to;
}
/* The actual merge operation. */
/* If we see inline clone, its comdat group actually
corresponds to the comdat group of the function it
is inlined to. */
- if (cn->global.inlined_to)
- symbol2 = cn->global.inlined_to;
+ if (cn->inlined_to)
+ symbol2 = cn->inlined_to;
}
/* The actual merge operation. */
|| target_node->definition)
&& target_node->real_symbol_p ())
{
- gcc_assert (!target_node->global.inlined_to);
+ gcc_assert (!target_node->inlined_to);
gcc_assert (target_node->real_symbol_p ());
/* When sanitizing, do not assume that __cxa_pure_virtual is not called
by valid program. */
class ipa_call_summary *es = ipa_call_summaries->get (e);
int i, count = ipa_get_cs_argument_count (args);
- if (e->caller->global.inlined_to)
- caller_parms_info = IPA_NODE_REF (e->caller->global.inlined_to);
+ if (e->caller->inlined_to)
+ caller_parms_info = IPA_NODE_REF (e->caller->inlined_to);
else
caller_parms_info = IPA_NODE_REF (e->caller);
callee_pi = IPA_NODE_REF (e->callee);
set_hint_predicate (&info->loop_stride, p);
}
}
- if (!dst->global.inlined_to)
+ if (!dst->inlined_to)
ipa_update_overall_fn_summary (dst);
}
struct cgraph_node *node;
FOR_EACH_DEFINED_FUNCTION (node)
- if (!node->global.inlined_to)
+ if (!node->inlined_to)
ipa_dump_fn_summary (f, node);
}
HOST_WIDE_INT self_stack_size;
struct cgraph_edge *e;
- gcc_assert (!node->global.inlined_to);
+ gcc_assert (!node->inlined_to);
if (!ipa_fn_summaries)
ipa_fn_summary_alloc ();
ipa_get_stack_frame_offset (struct cgraph_node *node)
{
HOST_WIDE_INT offset = 0;
- if (!node->global.inlined_to)
+ if (!node->inlined_to)
return 0;
node = node->callers->caller;
while (true)
{
offset += ipa_size_summaries->get (node)->estimated_self_stack_size;
- if (!node->global.inlined_to)
+ if (!node->inlined_to)
return offset;
node = node->callers->caller;
}
ipa_merge_fn_summary_after_inlining (struct cgraph_edge *edge)
{
ipa_fn_summary *callee_info = ipa_fn_summaries->get (edge->callee);
- struct cgraph_node *to = (edge->caller->global.inlined_to
- ? edge->caller->global.inlined_to : edge->caller);
+ struct cgraph_node *to = (edge->caller->inlined_to
+ ? edge->caller->inlined_to : edge->caller);
class ipa_fn_summary *info = ipa_fn_summaries->get (to);
clause_t clause = 0; /* not_inline is known to be false. */
size_time_entry *e;
simple_edge_hints (struct cgraph_edge *edge)
{
int hints = 0;
- struct cgraph_node *to = (edge->caller->global.inlined_to
- ? edge->caller->global.inlined_to : edge->caller);
+ struct cgraph_node *to = (edge->caller->inlined_to
+ ? edge->caller->inlined_to : edge->caller);
struct cgraph_node *callee = edge->callee->ultimate_alias_target ();
int to_scc_no = ipa_fn_summaries->get (to)->scc_no;
int callee_scc_no = ipa_fn_summaries->get (callee)->scc_no;
may hurt optimization of the caller's hot path. */
if (edge->count.ipa ().initialized_p () && edge->maybe_hot_p ()
&& (edge->count.ipa ().apply_scale (2, 1)
- > (edge->caller->global.inlined_to
- ? edge->caller->global.inlined_to->count.ipa ()
+ > (edge->caller->inlined_to
+ ? edge->caller->inlined_to->count.ipa ()
: edge->caller->count.ipa ())))
hints |= INLINE_HINT_known_hot;
struct cgraph_node *inlining_into;
struct cgraph_edge *next;
- if (e->caller->global.inlined_to)
- inlining_into = e->caller->global.inlined_to;
+ if (e->caller->inlined_to)
+ inlining_into = e->caller->inlined_to;
else
inlining_into = e->caller;
For now we keep the ohter functions in the group in program until
cgraph_remove_unreachable_functions gets rid of them. */
- gcc_assert (!e->callee->global.inlined_to);
+ gcc_assert (!e->callee->inlined_to);
e->callee->remove_from_same_comdat_group ();
if (e->callee->definition
&& inline_account_function_p (e->callee))
else
e->callee->remove_from_same_comdat_group ();
- e->callee->global.inlined_to = inlining_into;
+ e->callee->inlined_to = inlining_into;
/* Recursively clone all bodies. */
for (e = e->callee->callees; e; e = next)
/* Don't inline inlined edges. */
gcc_assert (e->inline_failed);
/* Don't even think of inlining inline clone. */
- gcc_assert (!callee->global.inlined_to);
+ gcc_assert (!callee->inlined_to);
to = e->caller;
- if (to->global.inlined_to)
- to = to->global.inlined_to;
+ if (to->inlined_to)
+ to = to->inlined_to;
if (to->thunk.thunk_p)
{
struct cgraph_node *target = to->callees->callee;
clone_inlined_nodes (e, true, update_original, overall_size);
- gcc_assert (curr->callee->global.inlined_to == to);
+ gcc_assert (curr->callee->inlined_to == to);
old_size = ipa_size_summaries->get (to)->size;
ipa_merge_fn_summary_after_inlining (e);
limit = size_info->self_size;
if (stack_size_limit < size_info->estimated_self_stack_size)
stack_size_limit = size_info->estimated_self_stack_size;
- if (to->global.inlined_to)
+ if (to->inlined_to)
to = to->callers->caller;
else
break;
bool inlinable = true;
enum availability avail;
- cgraph_node *caller = e->caller->global.inlined_to
- ? e->caller->global.inlined_to : e->caller;
+ cgraph_node *caller = (e->caller->inlined_to
+ ? e->caller->inlined_to : e->caller);
cgraph_node *callee = e->callee->ultimate_alias_target (&avail, caller);
if (!callee->definition)
bool inlinable = true;
enum availability avail;
- cgraph_node *caller = e->caller->global.inlined_to
- ? e->caller->global.inlined_to : e->caller;
+ cgraph_node *caller = (e->caller->inlined_to
+ ? e->caller->inlined_to : e->caller);
cgraph_node *callee = e->callee->ultimate_alias_target (&avail, caller);
tree caller_tree = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (caller->decl);
tree callee_tree
compute_uninlined_call_time (struct cgraph_edge *edge,
sreal uninlined_call_time)
{
- cgraph_node *caller = (edge->caller->global.inlined_to
- ? edge->caller->global.inlined_to
+ cgraph_node *caller = (edge->caller->inlined_to
+ ? edge->caller->inlined_to
: edge->caller);
sreal freq = edge->sreal_frequency ();
compute_inlined_call_time (struct cgraph_edge *edge,
sreal time)
{
- cgraph_node *caller = (edge->caller->global.inlined_to
- ? edge->caller->global.inlined_to
+ cgraph_node *caller = (edge->caller->inlined_to
+ ? edge->caller->inlined_to
: edge->caller);
sreal caller_time = ipa_fn_summaries->get (caller)->time;
sreal spec_time = estimate_edge_time (e, &unspec_time);
sreal time = compute_uninlined_call_time (e, unspec_time);
sreal inlined_time = compute_inlined_call_time (e, spec_time);
- cgraph_node *caller = e->caller->global.inlined_to
- ? e->caller->global.inlined_to
- : e->caller;
+ cgraph_node *caller = (e->caller->inlined_to
+ ? e->caller->inlined_to
+ : e->caller);
int limit = opt_for_fn (caller->decl, optimize) >= 3
? PARAM_VALUE (PARAM_INLINE_MIN_SPEEDUP)
: PARAM_VALUE (PARAM_INLINE_MIN_SPEEDUP_O2);
reason = "--param max-inline-recursive-depth exceeded.";
want_inline = false;
}
- else if (outer_node->global.inlined_to
+ else if (outer_node->inlined_to
&& (caller_freq = outer_node->callers->sreal_frequency ()) == 0)
{
reason = "caller frequency is 0";
if (node->alias)
return false;
/* Already inlined? */
- if (node->global.inlined_to)
+ if (node->inlined_to)
return false;
/* Does it have callers? */
if (!node->call_for_symbol_and_aliases (has_caller_p, NULL, true))
struct cgraph_node *callee = edge->callee->ultimate_alias_target ();
class ipa_fn_summary *callee_info = ipa_fn_summaries->get (callee);
ipa_hints hints;
- cgraph_node *caller = (edge->caller->global.inlined_to
- ? edge->caller->global.inlined_to
+ cgraph_node *caller = (edge->caller->inlined_to
+ ? edge->caller->inlined_to
: edge->caller);
growth = estimate_edge_growth (edge);
if (growth > overall_growth
/* ... and having only one caller which is not inlined ... */
&& callee_info->single_caller
- && !edge->caller->global.inlined_to
+ && !edge->caller->inlined_to
/* ... and edges executed only conditionally ... */
&& edge->sreal_frequency () < 1
/* ... consider case where callee is not inline but caller is ... */
struct cgraph_node *where = node;
struct ipa_ref *ref;
- if (where->global.inlined_to)
- where = where->global.inlined_to;
+ if (where->inlined_to)
+ where = where->inlined_to;
if (edge_growth_cache != NULL)
for (edge = where->callers; edge; edge = edge->next_caller)
struct ipa_ref *ref;
if ((!node->alias && !ipa_fn_summaries->get (node)->inlinable)
- || node->global.inlined_to)
+ || node->inlined_to)
return;
if (!bitmap_set_bit (updated_nodes, node->get_uid ()))
return;
int n = 0;
node = edge->caller;
- if (node->global.inlined_to)
- node = node->global.inlined_to;
+ if (node->inlined_to)
+ node = node->inlined_to;
if (DECL_DECLARED_INLINE_P (node->decl))
limit = PARAM_VALUE (PARAM_MAX_INLINE_INSNS_RECURSIVE);
depth = 1;
for (cnode = curr->caller;
- cnode->global.inlined_to; cnode = cnode->callers->caller)
+ cnode->inlined_to; cnode = cnode->callers->caller)
if (node->decl
== curr->callee->ultimate_alias_target ()->decl)
depth++;
node = next)
{
next = symtab->next_function (node);
- if (node->global.inlined_to == master_clone)
+ if (node->inlined_to == master_clone)
node->remove ();
}
master_clone->remove ();
if (edge->speculative && !speculation_useful_p (edge, false))
{
struct cgraph_node *node = edge->caller;
- struct cgraph_node *where = node->global.inlined_to
- ? node->global.inlined_to : node;
+ struct cgraph_node *where = node->inlined_to
+ ? node->inlined_to : node;
auto_bitmap updated_nodes;
if (edge->count.ipa ().initialized_p ())
free (order);
FOR_EACH_DEFINED_FUNCTION (node)
- if (!node->global.inlined_to)
+ if (!node->inlined_to)
{
if (!node->alias && node->analyzed
&& (node->has_gimple_body_p () || node->thunk.thunk_p)
if (opt_for_fn (n2->decl, optimize))
{
ipa_fn_summary *info2 = ipa_fn_summaries->get
- (n2->global.inlined_to ? n2->global.inlined_to : n2);
+ (n2->inlined_to ? n2->inlined_to : n2);
if (info2->scc_no)
break;
info2->scc_no = id;
}
if (update)
{
- struct cgraph_node *where = node->global.inlined_to
- ? node->global.inlined_to : node;
+ struct cgraph_node *where = node->inlined_to
+ ? node->inlined_to : node;
ipa_update_overall_fn_summary (where);
reset_edge_caches (where);
update_caller_keys (&edge_heap, where,
if (edge->recursive_p ())
{
where = edge->caller;
- if (where->global.inlined_to)
- where = where->global.inlined_to;
+ if (where->inlined_to)
+ where = where->inlined_to;
if (!recursive_inlining (edge,
opt_for_fn (edge->caller->decl,
flag_indirect_inlining)
selective. */
where = edge->caller;
- while (where->global.inlined_to)
+ while (where->inlined_to)
{
if (where->decl == callee->decl)
outer_node = where, depth++;
else if (depth && dump_file)
fprintf (dump_file, " Peeling recursion with depth %i\n", depth);
- gcc_checking_assert (!callee->global.inlined_to);
+ gcc_checking_assert (!callee->inlined_to);
inline_call (edge, true, &new_indirect_edges, &overall_size, true);
add_new_edges_to_heap (&edge_heap, new_indirect_edges);
update_callee_keys (&edge_heap, where, updated_nodes);
}
where = edge->caller;
- if (where->global.inlined_to)
- where = where->global.inlined_to;
+ if (where->inlined_to)
+ where = where->inlined_to;
/* Our profitability metric can depend on local properties
such as number of inlinable calls and size of the function body.
node->aux = NULL;
if (update)
- ipa_update_overall_fn_summary (node->global.inlined_to
- ? node->global.inlined_to : node);
+ ipa_update_overall_fn_summary (node->inlined_to
+ ? node->inlined_to : node);
}
/* Inline NODE to all callers. Worker for cgraph_for_node_and_aliases.
int *num_calls = (int *)data;
bool callee_removed = false;
- while (node->callers && !node->global.inlined_to)
+ while (node->callers && !node->inlined_to)
{
struct cgraph_node *caller = node->callers->caller;
struct cgraph_node *node;
FOR_EACH_DEFINED_FUNCTION (node)
- if (!node->global.inlined_to
+ if (!node->inlined_to
&& !node->alias)
{
ipa_fn_summary *s = ipa_fn_summaries->get (node);
}
if (update)
{
- struct cgraph_node *where = node->global.inlined_to
- ? node->global.inlined_to : node;
+ struct cgraph_node *where = node->inlined_to
+ ? node->inlined_to : node;
reset_edge_caches (where);
ipa_update_overall_fn_summary (where);
}
if (profile_info
&& !(edge->callee->count.ipa () == profile_count::zero ())
&& (edge->caller->frequency != NODE_FREQUENCY_UNLIKELY_EXECUTED
- || (edge->caller->global.inlined_to
- && edge->caller->global.inlined_to->frequency
+ || (edge->caller->inlined_to
+ && edge->caller->inlined_to->frequency
!= NODE_FREQUENCY_UNLIKELY_EXECUTED)))
d->maybe_unlikely_executed = false;
if (edge->count.ipa ().initialized_p ()
/* Because may-edges are not explicitely represented and vtable may be external,
we may create the first reference to the object in the unit. */
- if (!callee || callee->global.inlined_to)
+ if (!callee || callee->inlined_to)
{
/* We are better to ensure we can refer to it.
/* We cannot make edges to inline clones. It is bug that someone removed
the cgraph node too early. */
- gcc_assert (!callee->global.inlined_to);
+ gcc_assert (!callee->inlined_to);
if (dump_file && !unreachable)
{
ipa_check_create_edge_args ();
top = IPA_EDGE_REF (cs);
- new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
- ? cs->caller->global.inlined_to
+ new_root_info = IPA_NODE_REF (cs->caller->inlined_to
+ ? cs->caller->inlined_to
: cs->caller);
inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
class ipa_edge_args *args = IPA_EDGE_REF (cs);
if (!args)
return;
- struct cgraph_node *new_root = cs->caller->global.inlined_to
- ? cs->caller->global.inlined_to : cs->caller;
+ struct cgraph_node *new_root = cs->caller->inlined_to
+ ? cs->caller->inlined_to : cs->caller;
class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
int count, i;
gcc_checking_assert (ok);
clone = cs->caller;
- while (clone->global.inlined_to
+ while (clone->inlined_to
&& clone != rdesc->cs->caller
&& IPA_NODE_REF (clone)->ipcp_orig_node)
{
We need to find the duplicate that refers to our tree of
inline clones. */
- gcc_assert (dst->caller->global.inlined_to);
+ gcc_assert (dst->caller->inlined_to);
for (dst_rdesc = src_rdesc->next_duplicate;
dst_rdesc;
dst_rdesc = dst_rdesc->next_duplicate)
{
struct cgraph_node *top;
- top = dst_rdesc->cs->caller->global.inlined_to
- ? dst_rdesc->cs->caller->global.inlined_to
+ top = dst_rdesc->cs->caller->inlined_to
+ ? dst_rdesc->cs->caller->inlined_to
: dst_rdesc->cs->caller;
- if (dst->caller->global.inlined_to == top)
+ if (dst->caller->inlined_to == top)
break;
}
gcc_assert (dst_rdesc);
else if (dst_jf->type == IPA_JF_PASS_THROUGH
&& src->caller == dst->caller)
{
- struct cgraph_node *inline_root = dst->caller->global.inlined_to
- ? dst->caller->global.inlined_to : dst->caller;
+ struct cgraph_node *inline_root = dst->caller->inlined_to
+ ? dst->caller->inlined_to : dst->caller;
class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
/* Inline clones share declaration with their offline copies;
do not modify their declarations since the offline copy may
be different. */
- if (!w->global.inlined_to)
+ if (!w->inlined_to)
switch (this_state)
{
case IPA_CONST:
/* Inline clones share declaration with their offline copies;
do not modify their declarations since the offline copy may
be different. */
- if (!w->global.inlined_to)
+ if (!w->inlined_to)
{
w->set_nothrow_flag (true);
if (dump_file)
funct_state l = funct_state_summaries->get (node);
if (!node->alias
&& l->malloc_state == STATE_MALLOC
- && !node->global.inlined_to)
+ && !node->inlined_to)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Function %s found to be malloc\n",
ipa_reference_global_vars_info_t node_g;
/* No need to produce summaries for inline clones. */
- if (node->global.inlined_to)
+ if (node->inlined_to)
continue;
node_info = get_reference_vars_info (node);
ipa_reference_optimization_summary_t info;
/* See if we have (non-empty) info. */
- if (!node->definition || node->global.inlined_to)
+ if (!node->definition || node->inlined_to)
return false;
info = get_reference_optimization_summary (node);
if (!info)
if (!node->aux
&& (pass
|| (!node->address_taken
- && !node->global.inlined_to
+ && !node->inlined_to
&& !node->alias && !node->thunk.thunk_p
&& !node->only_called_directly_p ())))
{
|| DECL_EXTERNAL (node->decl));
if (cgraph_externally_visible_p (node, whole_program))
{
- gcc_assert (!node->global.inlined_to);
+ gcc_assert (!node->inlined_to);
node->externally_visible = true;
}
else
{
struct cgraph_edge *e;
for (e = node->callees; e; e = e->next_callee)
- if (e->callee->global.inlined_to)
+ if (e->callee->inlined_to)
{
- e->callee->global.inlined_to = inlined_to;
+ e->callee->inlined_to = inlined_to;
update_inlined_to_pointer (e->callee, inlined_to);
}
}
node->used_as_abstract_origin = false;
node->indirect_call_target = false;
if (node->definition
- && !node->global.inlined_to
+ && !node->inlined_to
&& !node->in_other_partition
&& !node->can_remove_if_no_direct_calls_and_refs_p ())
{
- gcc_assert (!node->global.inlined_to);
+ gcc_assert (!node->inlined_to);
reachable.add (node);
enqueue_node (node, &first, &reachable);
}
/* When inline clone exists, mark body to be preserved so when removing
offline copy of the function we don't kill it. */
- if (cnode->global.inlined_to)
+ if (cnode->inlined_to)
body_needed_for_clonning.add (cnode->decl);
/* For non-inline clones, force their origins to the boundary and ensure
to turn it into normal cone. */
FOR_EACH_FUNCTION (node)
{
- if (node->global.inlined_to
+ if (node->inlined_to
&& !node->callers)
{
gcc_assert (node->clones);
- node->global.inlined_to = NULL;
+ node->inlined_to = NULL;
update_inlined_to_pointer (node, node);
}
node->aux = NULL;
struct cgraph_node *cnode = dyn_cast <cgraph_node *> (ref->referring);
if (cnode)
{
- if (cnode->global.inlined_to)
- cnode = cnode->global.inlined_to;
+ if (cnode->inlined_to)
+ cnode = cnode->inlined_to;
if (!function)
function = cnode;
else if (function != cnode)
struct cgraph_edge *e;
if (!node->definition)
return false;
- if (node->global.inlined_to)
+ if (node->inlined_to)
return false;
for (e = node->callers; e; e = e->next_caller)
{
boundary_p = !lto_symtab_encoder_in_partition_p (encoder, node);
if (node->analyzed && (!boundary_p || node->alias
- || (node->thunk.thunk_p && !node->global.inlined_to)))
+ || (node->thunk.thunk_p && !node->inlined_to)))
tag = LTO_symtab_analyzed_node;
else
tag = LTO_symtab_unavail_node;
&& node->get_partitioning_class () == SYMBOL_PARTITION)
{
/* Inline clones cannot be part of boundary.
- gcc_assert (!node->global.inlined_to);
+ gcc_assert (!node->inlined_to);
FIXME: At the moment they can be, when partition contains an inline
clone that is clone of inline clone from outside partition. We can
if (tag == LTO_symtab_analyzed_node)
{
- if (node->global.inlined_to)
+ if (node->inlined_to)
{
- ref = lto_symtab_encoder_lookup (encoder, node->global.inlined_to);
+ ref = lto_symtab_encoder_lookup (encoder, node->inlined_to);
gcc_assert (ref != LCC_NOT_FOUND);
}
else
if (!lto_symtab_encoder_in_partition_p (encoder, callee))
{
/* We should have moved all the inlines. */
- gcc_assert (!callee->global.inlined_to);
+ gcc_assert (!callee->inlined_to);
add_node_to (encoder, callee, false);
}
}
&& !lto_symtab_encoder_in_partition_p
(encoder, callee))
{
- gcc_assert (!callee->global.inlined_to);
+ gcc_assert (!callee->inlined_to);
add_node_to (encoder, callee, false);
}
}
if (node->alias && node->analyzed)
create_references (encoder, node);
if (cnode
- && cnode->thunk.thunk_p && !cnode->global.inlined_to)
+ && cnode->thunk.thunk_p && !cnode->inlined_to)
add_node_to (encoder, cnode->callees->callee, false);
while (node->transparent_alias && node->analyzed)
{
{
node = dyn_cast <cgraph_node *> (lto_symtab_encoder_deref (encoder, i));
if (node
- && ((node->thunk.thunk_p && !node->global.inlined_to)
+ && ((node->thunk.thunk_p && !node->inlined_to)
|| lto_symtab_encoder_in_partition_p (encoder, node)))
{
output_outgoing_cgraph_edges (node->callees, ob, encoder);
input_overwrite_node (file_data, node, tag, &bp);
/* Store a reference for now, and fix up later to be a pointer. */
- node->global.inlined_to = (cgraph_node *) (intptr_t) ref;
+ node->inlined_to = (cgraph_node *) (intptr_t) ref;
if (group)
{
int ref;
if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
{
- ref = (int) (intptr_t) cnode->global.inlined_to;
+ ref = (int) (intptr_t) cnode->inlined_to;
/* We share declaration of builtins, so we may read same node twice. */
if (!node->aux)
/* Fixup inlined_to from reference to pointer. */
if (ref != LCC_NOT_FOUND)
- dyn_cast<cgraph_node *> (node)->global.inlined_to
+ dyn_cast<cgraph_node *> (node)->inlined_to
= dyn_cast<cgraph_node *> (nodes[ref]);
else
- cnode->global.inlined_to = NULL;
+ cnode->inlined_to = NULL;
}
ref = (int) (intptr_t) node->same_comdat_group;
/* Add all thunks associated with the function. */
for (e = cnode->callers; e; e = e->next_caller)
- if (e->caller->thunk.thunk_p && !e->caller->global.inlined_to)
+ if (e->caller->thunk.thunk_p && !e->caller->inlined_to)
add_symbol_to_partition_1 (part, e->caller);
}
if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
{
cnode = cnode->function_symbol ();
- if (cnode->global.inlined_to)
- cnode = cnode->global.inlined_to;
+ if (cnode->inlined_to)
+ cnode = cnode->inlined_to;
return cnode;
}
else if (varpool_node *vnode = dyn_cast <varpool_node *> (node))
prevailing_node->forced_by_abi = true;
if (node->address_taken)
{
- gcc_assert (!prevailing_node->global.inlined_to);
+ gcc_assert (!prevailing_node->inlined_to);
prevailing_node->mark_address_taken ();
}
if (node->definition && prevailing_node->definition
cgraph_node *ce = dyn_cast <cgraph_node *> (e);
if ((!TREE_PUBLIC (e->decl) && !DECL_EXTERNAL (e->decl))
- || (ce != NULL && ce->global.inlined_to))
+ || (ce != NULL && ce->inlined_to))
continue;
symtab_node *to = symtab_node::get (lto_symtab_prevailing_decl (e->decl));
tree attr = lookup_attribute ("omp declare simd",
DECL_ATTRIBUTES (node->decl));
if (attr == NULL_TREE
- || node->global.inlined_to
+ || node->inlined_to
|| lookup_attribute ("noclone", DECL_ATTRIBUTES (node->decl)))
return;
continue;
if (TREE_ASM_WRITTEN (e->caller->decl))
continue;
- if (!e->caller->process && !e->caller->global.inlined_to)
+ if (!e->caller->process && !e->caller->inlined_to)
break;
}
if (dump_file && e)
if (DECL_ABSTRACT_P (decl))
return SYMBOL_EXTERNAL;
- if (cnode && cnode->global.inlined_to)
+ if (cnode && cnode->inlined_to)
return SYMBOL_DUPLICATE;
/* Transparent aliases are always duplicated. */
return true;
/* Inline clones always binds locally. */
- if (cnode && cnode->global.inlined_to)
+ if (cnode && cnode->inlined_to)
return true;
if (DECL_EXTERNAL (decl))
{
cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
if (cref)
- ref = cref->global.inlined_to;
+ ref = cref->inlined_to;
}
/* If this is a reference from symbol itself and there are no aliases, we
{
if ((node->alias
|| (node->thunk.thunk_p
- && ! node->global.inlined_to))
+ && ! node->inlined_to))
&& node->analyzed
&& !node->ifunc_resolver)
insert_vi_for_tree (node->decl, (varinfo_t)data);
/* Nodes without a body are not interesting. Especially do not
visit clones at this point for now - we get duplicate decls
there for inline clones at least. */
- if (!node->has_gimple_body_p () || node->global.inlined_to)
+ if (!node->has_gimple_body_p () || node->inlined_to)
continue;
node->get_body ();