1 /* Callgraph handling code.
2 Copyright (C) 2003-2019 Free Software Foundation, Inc.
3 Contributed by Jan Hubicka
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains basic routines manipulating call graph
23 The call-graph is a data structure designed for inter-procedural
24 optimization. It represents a multi-graph where nodes are functions
25 (symbols within symbol table) and edges are call sites. */
29 #include "coretypes.h"
36 #include "alloc-pool.h"
37 #include "gimple-ssa.h"
39 #include "lto-streamer.h"
40 #include "fold-const.h"
43 #include "print-tree.h"
44 #include "langhooks.h"
47 #include "gimple-iterator.h"
50 #include "value-prof.h"
51 #include "ipa-utils.h"
52 #include "symbol-summary.h"
55 #include "ipa-fnsummary.h"
57 #include "gimple-pretty-print.h"
63 #include "stringpool.h"
67 /* FIXME: Only for PROP_loops, but cgraph shouldn't have to know about this. */
68 #include "tree-pass.h"
70 /* Queue of cgraph nodes scheduled to be lowered. */
71 symtab_node *x_cgraph_nodes_queue;
72 #define cgraph_nodes_queue ((cgraph_node *)x_cgraph_nodes_queue)
74 /* Symbol table global context. */
77 /* List of hooks triggered on cgraph_edge events. */
78 struct cgraph_edge_hook_list {
79 cgraph_edge_hook hook;
81 struct cgraph_edge_hook_list *next;
84 /* List of hooks triggered on cgraph_node events. */
85 struct cgraph_node_hook_list {
86 cgraph_node_hook hook;
88 struct cgraph_node_hook_list *next;
91 /* List of hooks triggered on events involving two cgraph_edges. */
92 struct cgraph_2edge_hook_list {
93 cgraph_2edge_hook hook;
95 struct cgraph_2edge_hook_list *next;
98 /* List of hooks triggered on events involving two cgraph_nodes. */
99 struct cgraph_2node_hook_list {
100 cgraph_2node_hook hook;
102 struct cgraph_2node_hook_list *next;
105 /* Hash descriptor for cgraph_function_version_info. */
107 struct function_version_hasher : ggc_ptr_hash<cgraph_function_version_info>
109 static hashval_t hash (cgraph_function_version_info *);
110 static bool equal (cgraph_function_version_info *,
111 cgraph_function_version_info *);
114 /* Map a cgraph_node to cgraph_function_version_info using this htab.
115 The cgraph_function_version_info has a THIS_NODE field that is the
116 corresponding cgraph_node.. */
118 static GTY(()) hash_table<function_version_hasher> *cgraph_fnver_htab = NULL;
120 /* Hash function for cgraph_fnver_htab. */
122 function_version_hasher::hash (cgraph_function_version_info *ptr)
124 int uid = ptr->this_node->get_uid ();
125 return (hashval_t)(uid);
128 /* eq function for cgraph_fnver_htab. */
130 function_version_hasher::equal (cgraph_function_version_info *n1,
131 cgraph_function_version_info *n2)
133 return n1->this_node->get_uid () == n2->this_node->get_uid ();
136 /* Mark as GC root all allocated nodes. */
137 static GTY(()) struct cgraph_function_version_info *
138 version_info_node = NULL;
140 /* Return true if NODE's address can be compared. */
143 symtab_node::address_can_be_compared_p ()
145 /* Address of virtual tables and functions is never compared. */
146 if (DECL_VIRTUAL_P (decl))
148 /* Address of C++ cdtors is never compared. */
149 if (is_a <cgraph_node *> (this)
150 && (DECL_CXX_CONSTRUCTOR_P (decl)
151 || DECL_CXX_DESTRUCTOR_P (decl)))
153 /* Constant pool symbols addresses are never compared.
154 flag_merge_constants permits us to assume the same on readonly vars. */
155 if (is_a <varpool_node *> (this)
156 && (DECL_IN_CONSTANT_POOL (decl)
157 || (flag_merge_constants >= 2
158 && TREE_READONLY (decl) && !TREE_THIS_VOLATILE (decl))))
163 /* Get the cgraph_function_version_info node corresponding to node. */
164 cgraph_function_version_info *
165 cgraph_node::function_version (void)
167 cgraph_function_version_info key;
168 key.this_node = this;
170 if (cgraph_fnver_htab == NULL)
173 return cgraph_fnver_htab->find (&key);
176 /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
177 corresponding to cgraph_node NODE. */
178 cgraph_function_version_info *
179 cgraph_node::insert_new_function_version (void)
181 version_info_node = NULL;
182 version_info_node = ggc_cleared_alloc<cgraph_function_version_info> ();
183 version_info_node->this_node = this;
185 if (cgraph_fnver_htab == NULL)
186 cgraph_fnver_htab = hash_table<function_version_hasher>::create_ggc (2);
188 *cgraph_fnver_htab->find_slot (version_info_node, INSERT)
190 return version_info_node;
193 /* Remove the cgraph_function_version_info node given by DECL_V. */
195 delete_function_version (cgraph_function_version_info *decl_v)
200 if (decl_v->prev != NULL)
201 decl_v->prev->next = decl_v->next;
203 if (decl_v->next != NULL)
204 decl_v->next->prev = decl_v->prev;
206 if (cgraph_fnver_htab != NULL)
207 cgraph_fnver_htab->remove_elt (decl_v);
210 /* Remove the cgraph_function_version_info and cgraph_node for DECL. This
211 DECL is a duplicate declaration. */
213 cgraph_node::delete_function_version_by_decl (tree decl)
215 cgraph_node *decl_node = cgraph_node::get (decl);
217 if (decl_node == NULL)
220 delete_function_version (decl_node->function_version ());
222 decl_node->remove ();
225 /* Record that DECL1 and DECL2 are semantically identical function
228 cgraph_node::record_function_versions (tree decl1, tree decl2)
230 cgraph_node *decl1_node = cgraph_node::get_create (decl1);
231 cgraph_node *decl2_node = cgraph_node::get_create (decl2);
232 cgraph_function_version_info *decl1_v = NULL;
233 cgraph_function_version_info *decl2_v = NULL;
234 cgraph_function_version_info *before;
235 cgraph_function_version_info *after;
237 gcc_assert (decl1_node != NULL && decl2_node != NULL);
238 decl1_v = decl1_node->function_version ();
239 decl2_v = decl2_node->function_version ();
241 if (decl1_v != NULL && decl2_v != NULL)
245 decl1_v = decl1_node->insert_new_function_version ();
248 decl2_v = decl2_node->insert_new_function_version ();
250 /* Chain decl2_v and decl1_v. All semantically identical versions
251 will be chained together. */
256 while (before->next != NULL)
257 before = before->next;
259 while (after->prev != NULL)
262 before->next = after;
263 after->prev = before;
266 /* Initialize callgraph dump file. */
269 symbol_table::initialize (void)
272 dump_file = dump_begin (TDI_cgraph, NULL);
274 if (!ipa_clones_dump_file)
275 ipa_clones_dump_file = dump_begin (TDI_clones, NULL);
278 /* Allocate new callgraph node and insert it into basic data structures. */
281 symbol_table::create_empty (void)
283 cgraph_node *node = allocate_cgraph_symbol ();
285 node->type = SYMTAB_FUNCTION;
286 node->frequency = NODE_FREQUENCY_NORMAL;
287 node->count_materialization_scale = REG_BR_PROB_BASE;
293 /* Register HOOK to be called with DATA on each removed edge. */
294 cgraph_edge_hook_list *
295 symbol_table::add_edge_removal_hook (cgraph_edge_hook hook, void *data)
297 cgraph_edge_hook_list *entry;
298 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
300 entry = (cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
310 /* Remove ENTRY from the list of hooks called on removing edges. */
312 symbol_table::remove_edge_removal_hook (cgraph_edge_hook_list *entry)
314 cgraph_edge_hook_list **ptr = &m_first_edge_removal_hook;
316 while (*ptr != entry)
322 /* Call all edge removal hooks. */
324 symbol_table::call_edge_removal_hooks (cgraph_edge *e)
326 cgraph_edge_hook_list *entry = m_first_edge_removal_hook;
329 entry->hook (e, entry->data);
334 /* Register HOOK to be called with DATA on each removed node. */
335 cgraph_node_hook_list *
336 symbol_table::add_cgraph_removal_hook (cgraph_node_hook hook, void *data)
338 cgraph_node_hook_list *entry;
339 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
341 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
351 /* Remove ENTRY from the list of hooks called on removing nodes. */
353 symbol_table::remove_cgraph_removal_hook (cgraph_node_hook_list *entry)
355 cgraph_node_hook_list **ptr = &m_first_cgraph_removal_hook;
357 while (*ptr != entry)
363 /* Call all node removal hooks. */
365 symbol_table::call_cgraph_removal_hooks (cgraph_node *node)
367 cgraph_node_hook_list *entry = m_first_cgraph_removal_hook;
370 entry->hook (node, entry->data);
375 /* Call all node removal hooks. */
377 symbol_table::call_cgraph_insertion_hooks (cgraph_node *node)
379 cgraph_node_hook_list *entry = m_first_cgraph_insertion_hook;
382 entry->hook (node, entry->data);
388 /* Register HOOK to be called with DATA on each inserted node. */
389 cgraph_node_hook_list *
390 symbol_table::add_cgraph_insertion_hook (cgraph_node_hook hook, void *data)
392 cgraph_node_hook_list *entry;
393 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
395 entry = (cgraph_node_hook_list *) xmalloc (sizeof (*entry));
405 /* Remove ENTRY from the list of hooks called on inserted nodes. */
407 symbol_table::remove_cgraph_insertion_hook (cgraph_node_hook_list *entry)
409 cgraph_node_hook_list **ptr = &m_first_cgraph_insertion_hook;
411 while (*ptr != entry)
417 /* Register HOOK to be called with DATA on each duplicated edge. */
418 cgraph_2edge_hook_list *
419 symbol_table::add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
421 cgraph_2edge_hook_list *entry;
422 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
424 entry = (cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
434 /* Remove ENTRY from the list of hooks called on duplicating edges. */
436 symbol_table::remove_edge_duplication_hook (cgraph_2edge_hook_list *entry)
438 cgraph_2edge_hook_list **ptr = &m_first_edge_duplicated_hook;
440 while (*ptr != entry)
446 /* Call all edge duplication hooks. */
448 symbol_table::call_edge_duplication_hooks (cgraph_edge *cs1, cgraph_edge *cs2)
450 cgraph_2edge_hook_list *entry = m_first_edge_duplicated_hook;
453 entry->hook (cs1, cs2, entry->data);
458 /* Register HOOK to be called with DATA on each duplicated node. */
459 cgraph_2node_hook_list *
460 symbol_table::add_cgraph_duplication_hook (cgraph_2node_hook hook, void *data)
462 cgraph_2node_hook_list *entry;
463 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
465 entry = (cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
475 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
477 symbol_table::remove_cgraph_duplication_hook (cgraph_2node_hook_list *entry)
479 cgraph_2node_hook_list **ptr = &m_first_cgraph_duplicated_hook;
481 while (*ptr != entry)
487 /* Call all node duplication hooks. */
489 symbol_table::call_cgraph_duplication_hooks (cgraph_node *node,
492 cgraph_2node_hook_list *entry = m_first_cgraph_duplicated_hook;
495 entry->hook (node, node2, entry->data);
500 /* Return cgraph node assigned to DECL. Create new one when needed. */
503 cgraph_node::create (tree decl)
505 cgraph_node *node = symtab->create_empty ();
506 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
510 node->count = profile_count::uninitialized ();
512 if ((flag_openacc || flag_openmp)
513 && lookup_attribute ("omp declare target", DECL_ATTRIBUTES (decl)))
515 node->offloadable = 1;
516 if (ENABLE_OFFLOADING)
517 g->have_offload = true;
520 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (decl)))
521 node->ifunc_resolver = true;
523 node->register_symbol ();
525 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
527 node->origin = cgraph_node::get_create (DECL_CONTEXT (decl));
528 node->next_nested = node->origin->nested;
529 node->origin->nested = node;
534 /* Try to find a call graph node for declaration DECL and if it does not exist
535 or if it corresponds to an inline clone, create a new one. */
538 cgraph_node::get_create (tree decl)
540 cgraph_node *first_clone = cgraph_node::get (decl);
542 if (first_clone && !first_clone->global.inlined_to)
545 cgraph_node *node = cgraph_node::create (decl);
548 first_clone->clone_of = node;
549 node->clones = first_clone;
550 symtab->symtab_prevail_in_asm_name_hash (node);
551 node->decl->decl_with_vis.symtab_node = node;
553 fprintf (dump_file, "Introduced new external node "
554 "(%s) and turned into root of the clone tree.\n",
558 fprintf (dump_file, "Introduced new external node "
559 "(%s).\n", node->dump_name ());
563 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
564 the function body is associated with (not necessarily cgraph_node (DECL). */
567 cgraph_node::create_alias (tree alias, tree target)
569 cgraph_node *alias_node;
571 gcc_assert (TREE_CODE (target) == FUNCTION_DECL
572 || TREE_CODE (target) == IDENTIFIER_NODE);
573 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
574 alias_node = cgraph_node::get_create (alias);
575 gcc_assert (!alias_node->definition);
576 alias_node->alias_target = target;
577 alias_node->definition = true;
578 alias_node->alias = true;
579 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (alias)) != NULL)
580 alias_node->transparent_alias = alias_node->weakref = true;
581 if (lookup_attribute ("ifunc", DECL_ATTRIBUTES (alias)))
582 alias_node->ifunc_resolver = true;
586 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
588 Same body aliases are output whenever the body of DECL is output,
589 and cgraph_node::get (ALIAS) transparently returns
590 cgraph_node::get (DECL). */
593 cgraph_node::create_same_body_alias (tree alias, tree decl)
597 /* If aliases aren't supported by the assembler, fail. */
598 if (!TARGET_SUPPORTS_ALIASES)
601 /* Langhooks can create same body aliases of symbols not defined.
602 Those are useless. Drop them on the floor. */
603 if (symtab->global_info_ready)
606 n = cgraph_node::create_alias (alias, decl);
607 n->cpp_implicit_alias = true;
608 if (symtab->cpp_implicit_aliases_done)
609 n->resolve_alias (cgraph_node::get (decl));
613 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
614 aliases DECL with an adjustments made into the first parameter.
615 See comments in struct cgraph_thunk_info for detail on the parameters. */
618 cgraph_node::create_thunk (tree alias, tree, bool this_adjusting,
619 HOST_WIDE_INT fixed_offset,
620 HOST_WIDE_INT virtual_value,
621 HOST_WIDE_INT indirect_offset,
627 node = cgraph_node::get (alias);
631 node = cgraph_node::create (alias);
633 /* Make sure that if VIRTUAL_OFFSET is in sync with VIRTUAL_VALUE. */
634 gcc_checking_assert (virtual_offset
635 ? virtual_value == wi::to_wide (virtual_offset)
636 : virtual_value == 0);
638 node->thunk.fixed_offset = fixed_offset;
639 node->thunk.virtual_value = virtual_value;
640 node->thunk.indirect_offset = indirect_offset;
641 node->thunk.alias = real_alias;
642 node->thunk.this_adjusting = this_adjusting;
643 node->thunk.virtual_offset_p = virtual_offset != NULL;
644 node->thunk.thunk_p = true;
645 node->definition = true;
650 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
651 Return NULL if there's no such node. */
654 cgraph_node::get_for_asmname (tree asmname)
656 /* We do not want to look at inline clones. */
657 for (symtab_node *node = symtab_node::get_for_asmname (asmname);
659 node = node->next_sharing_asm_name)
661 cgraph_node *cn = dyn_cast <cgraph_node *> (node);
662 if (cn && !cn->global.inlined_to)
668 /* Returns a hash value for X (which really is a cgraph_edge). */
671 cgraph_edge_hasher::hash (cgraph_edge *e)
673 /* This is a really poor hash function, but it is what htab_hash_pointer
675 return (hashval_t) ((intptr_t)e->call_stmt >> 3);
678 /* Returns a hash value for X (which really is a cgraph_edge). */
681 cgraph_edge_hasher::hash (gimple *call_stmt)
683 /* This is a really poor hash function, but it is what htab_hash_pointer
685 return (hashval_t) ((intptr_t)call_stmt >> 3);
688 /* Return nonzero if the call_stmt of cgraph_edge X is stmt *Y. */
691 cgraph_edge_hasher::equal (cgraph_edge *x, gimple *y)
693 return x->call_stmt == y;
696 /* Add call graph edge E to call site hash of its caller. */
699 cgraph_update_edge_in_call_site_hash (cgraph_edge *e)
701 gimple *call = e->call_stmt;
702 *e->caller->call_site_hash->find_slot_with_hash
703 (call, cgraph_edge_hasher::hash (call), INSERT) = e;
706 /* Add call graph edge E to call site hash of its caller. */
709 cgraph_add_edge_to_call_site_hash (cgraph_edge *e)
711 /* There are two speculative edges for every statement (one direct,
712 one indirect); always hash the direct one. */
713 if (e->speculative && e->indirect_unknown_callee)
715 cgraph_edge **slot = e->caller->call_site_hash->find_slot_with_hash
716 (e->call_stmt, cgraph_edge_hasher::hash (e->call_stmt), INSERT);
719 gcc_assert (((cgraph_edge *)*slot)->speculative);
724 gcc_assert (!*slot || e->speculative);
728 /* Return the callgraph edge representing the GIMPLE_CALL statement
732 cgraph_node::get_edge (gimple *call_stmt)
738 return call_site_hash->find_with_hash
739 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
741 /* This loop may turn out to be performance problem. In such case adding
742 hashtables into call nodes with very many edges is probably best
743 solution. It is not good idea to add pointer into CALL_EXPR itself
744 because we want to make possible having multiple cgraph nodes representing
745 different clones of the same body before the body is actually cloned. */
746 for (e = callees; e; e = e->next_callee)
748 if (e->call_stmt == call_stmt)
754 for (e = indirect_calls; e; e = e->next_callee)
756 if (e->call_stmt == call_stmt)
763 call_site_hash = hash_table<cgraph_edge_hasher>::create_ggc (120);
764 for (e2 = callees; e2; e2 = e2->next_callee)
765 cgraph_add_edge_to_call_site_hash (e2);
766 for (e2 = indirect_calls; e2; e2 = e2->next_callee)
767 cgraph_add_edge_to_call_site_hash (e2);
774 /* Change field call_stmt of edge to NEW_STMT.
775 If UPDATE_SPECULATIVE and E is any component of speculative
776 edge, then update all components. */
779 cgraph_edge::set_call_stmt (gcall *new_stmt, bool update_speculative)
783 /* Speculative edges has three component, update all of them
785 if (update_speculative && speculative)
787 cgraph_edge *direct, *indirect;
790 speculative_call_info (direct, indirect, ref);
791 direct->set_call_stmt (new_stmt, false);
792 indirect->set_call_stmt (new_stmt, false);
793 ref->stmt = new_stmt;
797 /* Only direct speculative edges go to call_site_hash. */
798 if (caller->call_site_hash
799 && (!speculative || !indirect_unknown_callee))
801 caller->call_site_hash->remove_elt_with_hash
802 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
805 cgraph_edge *e = this;
807 call_stmt = new_stmt;
808 if (indirect_unknown_callee
809 && (decl = gimple_call_fndecl (new_stmt)))
811 /* Constant propagation (and possibly also inlining?) can turn an
812 indirect call into a direct one. */
813 cgraph_node *new_callee = cgraph_node::get (decl);
815 gcc_checking_assert (new_callee);
816 e = make_direct (new_callee);
819 function *fun = DECL_STRUCT_FUNCTION (e->caller->decl);
820 e->can_throw_external = stmt_can_throw_external (fun, new_stmt);
821 if (e->caller->call_site_hash)
822 cgraph_add_edge_to_call_site_hash (e);
825 /* Allocate a cgraph_edge structure and fill it with data according to the
826 parameters of which only CALLEE can be NULL (when creating an indirect call
830 symbol_table::create_edge (cgraph_node *caller, cgraph_node *callee,
831 gcall *call_stmt, profile_count count,
832 bool indir_unknown_callee)
836 /* LTO does not actually have access to the call_stmt since these
837 have not been loaded yet. */
840 /* This is a rather expensive check possibly triggering
841 construction of call stmt hashtable. */
843 gcc_checking_assert (!(e = caller->get_edge (call_stmt))
846 gcc_assert (is_gimple_call (call_stmt));
852 free_edges = NEXT_FREE_EDGE (edge);
856 edge = ggc_alloc<cgraph_edge> ();
857 edge->m_summary_id = -1;
862 gcc_assert (++edges_max_uid != 0);
863 edge->m_uid = edges_max_uid;
865 edge->caller = caller;
866 edge->callee = callee;
867 edge->prev_caller = NULL;
868 edge->next_caller = NULL;
869 edge->prev_callee = NULL;
870 edge->next_callee = NULL;
871 edge->lto_stmt_uid = 0;
875 edge->call_stmt = call_stmt;
876 edge->can_throw_external
877 = call_stmt ? stmt_can_throw_external (DECL_STRUCT_FUNCTION (caller->decl),
880 && callee && callee->decl
881 && !gimple_check_call_matching_types (call_stmt, callee->decl,
884 edge->inline_failed = CIF_MISMATCHED_ARGUMENTS;
885 edge->call_stmt_cannot_inline_p = true;
889 edge->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
890 edge->call_stmt_cannot_inline_p = false;
893 edge->indirect_info = NULL;
894 edge->indirect_inlining_edge = 0;
895 edge->speculative = false;
896 edge->indirect_unknown_callee = indir_unknown_callee;
897 if (opt_for_fn (edge->caller->decl, flag_devirtualize)
898 && call_stmt && DECL_STRUCT_FUNCTION (caller->decl))
899 edge->in_polymorphic_cdtor
900 = decl_maybe_in_construction_p (NULL, NULL, call_stmt,
903 edge->in_polymorphic_cdtor = caller->thunk.thunk_p;
904 if (call_stmt && caller->call_site_hash)
905 cgraph_add_edge_to_call_site_hash (edge);
910 /* Create edge from a given function to CALLEE in the cgraph. */
913 cgraph_node::create_edge (cgraph_node *callee,
914 gcall *call_stmt, profile_count count)
916 cgraph_edge *edge = symtab->create_edge (this, callee, call_stmt, count,
919 initialize_inline_failed (edge);
921 edge->next_caller = callee->callers;
923 callee->callers->prev_caller = edge;
924 edge->next_callee = callees;
926 callees->prev_callee = edge;
928 callee->callers = edge;
933 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
935 cgraph_indirect_call_info *
936 cgraph_allocate_init_indirect_info (void)
938 cgraph_indirect_call_info *ii;
940 ii = ggc_cleared_alloc<cgraph_indirect_call_info> ();
941 ii->param_index = -1;
945 /* Create an indirect edge with a yet-undetermined callee where the call
946 statement destination is a formal parameter of the caller with index
950 cgraph_node::create_indirect_edge (gcall *call_stmt, int ecf_flags,
952 bool compute_indirect_info)
954 cgraph_edge *edge = symtab->create_edge (this, NULL, call_stmt,
958 initialize_inline_failed (edge);
960 edge->indirect_info = cgraph_allocate_init_indirect_info ();
961 edge->indirect_info->ecf_flags = ecf_flags;
962 edge->indirect_info->vptr_changed = true;
964 /* Record polymorphic call info. */
965 if (compute_indirect_info
967 && (target = gimple_call_fn (call_stmt))
968 && virtual_method_call_p (target))
970 ipa_polymorphic_call_context context (decl, target, call_stmt);
972 /* Only record types can have virtual calls. */
973 edge->indirect_info->polymorphic = true;
974 edge->indirect_info->param_index = -1;
975 edge->indirect_info->otr_token
976 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
977 edge->indirect_info->otr_type = obj_type_ref_class (target);
978 gcc_assert (TREE_CODE (edge->indirect_info->otr_type) == RECORD_TYPE);
979 edge->indirect_info->context = context;
982 edge->next_callee = indirect_calls;
984 indirect_calls->prev_callee = edge;
985 indirect_calls = edge;
990 /* Remove the edge from the list of the callees of the caller. */
993 cgraph_edge::remove_caller (void)
996 prev_callee->next_callee = next_callee;
998 next_callee->prev_callee = prev_callee;
1001 if (indirect_unknown_callee)
1002 caller->indirect_calls = next_callee;
1004 caller->callees = next_callee;
1006 if (caller->call_site_hash)
1007 caller->call_site_hash->remove_elt_with_hash
1008 (call_stmt, cgraph_edge_hasher::hash (call_stmt));
1011 /* Put the edge onto the free list. */
1014 symbol_table::free_edge (cgraph_edge *e)
1016 if (e->indirect_info)
1017 ggc_free (e->indirect_info);
1019 /* Clear out the edge so we do not dangle pointers. */
1020 int summary_id = e->m_summary_id;
1021 memset (e, 0, sizeof (*e));
1022 e->m_summary_id = summary_id;
1023 NEXT_FREE_EDGE (e) = free_edges;
1028 /* Remove the edge in the cgraph. */
1031 cgraph_edge::remove (void)
1033 /* Call all edge removal hooks. */
1034 symtab->call_edge_removal_hooks (this);
1036 if (!indirect_unknown_callee)
1037 /* Remove from callers list of the callee. */
1040 /* Remove from callees list of the callers. */
1043 /* Put the edge onto the free list. */
1044 symtab->free_edge (this);
1047 /* Turn edge into speculative call calling N2. Update
1048 the profile so the direct call is taken COUNT times
1051 At clone materialization time, the indirect call E will
1054 if (call_dest == N2)
1059 At this time the function just creates the direct call,
1060 the referencd representing the if conditional and attaches
1061 them all to the orginal indirect call statement.
1063 Return direct edge created. */
1066 cgraph_edge::make_speculative (cgraph_node *n2, profile_count direct_count)
1068 cgraph_node *n = caller;
1069 ipa_ref *ref = NULL;
1073 fprintf (dump_file, "Indirect call -> speculative call %s => %s\n",
1074 n->dump_name (), n2->dump_name ());
1076 e2 = n->create_edge (n2, call_stmt, direct_count);
1077 initialize_inline_failed (e2);
1078 e2->speculative = true;
1079 if (TREE_NOTHROW (n2->decl))
1080 e2->can_throw_external = false;
1082 e2->can_throw_external = can_throw_external;
1083 e2->lto_stmt_uid = lto_stmt_uid;
1084 e2->in_polymorphic_cdtor = in_polymorphic_cdtor;
1086 symtab->call_edge_duplication_hooks (this, e2);
1087 ref = n->create_reference (n2, IPA_REF_ADDR, call_stmt);
1088 ref->lto_stmt_uid = lto_stmt_uid;
1089 ref->speculative = speculative;
1090 n2->mark_address_taken ();
1094 /* Speculative call consist of three components:
1095 1) an indirect edge representing the original call
1096 2) an direct edge representing the new call
1097 3) ADDR_EXPR reference representing the speculative check.
1098 All three components are attached to single statement (the indirect
1099 call) and if one of them exists, all of them must exist.
1101 Given speculative call edge, return all three components.
1105 cgraph_edge::speculative_call_info (cgraph_edge *&direct,
1106 cgraph_edge *&indirect,
1107 ipa_ref *&reference)
1112 cgraph_edge *e = this;
1114 if (!e->indirect_unknown_callee)
1115 for (e2 = e->caller->indirect_calls;
1116 e2->call_stmt != e->call_stmt || e2->lto_stmt_uid != e->lto_stmt_uid;
1117 e2 = e2->next_callee)
1122 /* We can take advantage of the call stmt hash. */
1125 e = e->caller->get_edge (e2->call_stmt);
1126 gcc_assert (e->speculative && !e->indirect_unknown_callee);
1129 for (e = e->caller->callees;
1130 e2->call_stmt != e->call_stmt
1131 || e2->lto_stmt_uid != e->lto_stmt_uid;
1135 gcc_assert (e->speculative && e2->speculative);
1140 for (i = 0; e->caller->iterate_reference (i, ref); i++)
1141 if (ref->speculative
1142 && ((ref->stmt && ref->stmt == e->call_stmt)
1143 || (!ref->stmt && ref->lto_stmt_uid == e->lto_stmt_uid)))
1149 /* Speculative edge always consist of all three components - direct edge,
1150 indirect and reference. */
1152 gcc_assert (e && e2 && ref);
1155 /* Speculative call edge turned out to be direct call to CALLE_DECL.
1156 Remove the speculative call sequence and return edge representing the call.
1157 It is up to caller to redirect the call as appropriate. */
1160 cgraph_edge::resolve_speculation (tree callee_decl)
1162 cgraph_edge *edge = this;
1166 gcc_assert (edge->speculative);
1167 edge->speculative_call_info (e2, edge, ref);
1169 || !ref->referred->semantically_equivalent_p
1170 (symtab_node::get (callee_decl)))
1176 fprintf (dump_file, "Speculative indirect call %s => %s has "
1177 "turned out to have contradicting known target ",
1178 edge->caller->dump_name (),
1179 e2->callee->dump_name ());
1180 print_generic_expr (dump_file, callee_decl);
1181 fprintf (dump_file, "\n");
1185 fprintf (dump_file, "Removing speculative call %s => %s\n",
1186 edge->caller->dump_name (),
1187 e2->callee->dump_name ());
1193 cgraph_edge *tmp = edge;
1195 fprintf (dump_file, "Speculative call turned into direct call.\n");
1198 /* FIXME: If EDGE is inlined, we should scale up the frequencies and counts
1199 in the functions inlined through it. */
1201 edge->count += e2->count;
1202 edge->speculative = false;
1203 e2->speculative = false;
1204 ref->remove_reference ();
1205 if (e2->indirect_unknown_callee || e2->inline_failed)
1208 e2->callee->remove_symbol_and_inline_clones ();
1209 if (edge->caller->call_site_hash)
1210 cgraph_update_edge_in_call_site_hash (edge);
1214 /* Make an indirect edge with an unknown callee an ordinary edge leading to
1215 CALLEE. DELTA is an integer constant that is to be added to the this
1216 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1219 cgraph_edge::make_direct (cgraph_node *callee)
1221 cgraph_edge *edge = this;
1222 gcc_assert (indirect_unknown_callee);
1224 /* If we are redirecting speculative call, make it non-speculative. */
1225 if (indirect_unknown_callee && speculative)
1227 edge = edge->resolve_speculation (callee->decl);
1229 /* On successful speculation just return the pre existing direct edge. */
1230 if (!indirect_unknown_callee)
1234 indirect_unknown_callee = 0;
1235 ggc_free (indirect_info);
1236 indirect_info = NULL;
1238 /* Get the edge out of the indirect edge list. */
1240 prev_callee->next_callee = next_callee;
1242 next_callee->prev_callee = prev_callee;
1244 caller->indirect_calls = next_callee;
1246 /* Put it into the normal callee list */
1248 next_callee = caller->callees;
1249 if (caller->callees)
1250 caller->callees->prev_callee = edge;
1251 caller->callees = edge;
1253 /* Insert to callers list of the new callee. */
1254 edge->set_callee (callee);
1257 && !gimple_check_call_matching_types (call_stmt, callee->decl, false))
1259 call_stmt_cannot_inline_p = true;
1260 inline_failed = CIF_MISMATCHED_ARGUMENTS;
1263 /* We need to re-determine the inlining status of the edge. */
1264 initialize_inline_failed (edge);
1268 /* If necessary, change the function declaration in the call statement
1269 associated with E so that it corresponds to the edge callee. */
1272 cgraph_edge::redirect_call_stmt_to_callee (void)
1274 cgraph_edge *e = this;
1276 tree decl = gimple_call_fndecl (e->call_stmt);
1278 gimple_stmt_iterator gsi;
1286 e->speculative_call_info (e, e2, ref);
1287 /* If there already is an direct call (i.e. as a result of inliner's
1288 substitution), forget about speculating. */
1290 e = e->resolve_speculation (decl);
1291 /* If types do not match, speculation was likely wrong.
1292 The direct edge was possibly redirected to the clone with a different
1293 signature. We did not update the call statement yet, so compare it
1294 with the reference that still points to the proper type. */
1295 else if (!gimple_check_call_matching_types (e->call_stmt,
1296 ref->referred->decl,
1300 fprintf (dump_file, "Not expanding speculative call of %s -> %s\n"
1302 e->caller->dump_name (),
1303 e->callee->dump_name ());
1304 e = e->resolve_speculation ();
1305 /* We are producing the final function body and will throw away the
1306 callgraph edges really soon. Reset the counts/frequencies to
1307 keep verifier happy in the case of roundoff errors. */
1308 e->count = gimple_bb (e->call_stmt)->count;
1310 /* Expand speculation into GIMPLE code. */
1316 "Expanding speculative call of %s -> %s count: ",
1317 e->caller->dump_name (),
1318 e->callee->dump_name ());
1319 e->count.dump (dump_file);
1320 fprintf (dump_file, "\n");
1322 gcc_assert (e2->speculative);
1323 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
1325 profile_probability prob = e->count.probability_in (e->count
1327 if (!prob.initialized_p ())
1328 prob = profile_probability::even ();
1329 new_stmt = gimple_ic (e->call_stmt,
1330 dyn_cast<cgraph_node *> (ref->referred),
1332 e->speculative = false;
1333 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt,
1335 e->count = gimple_bb (e->call_stmt)->count;
1336 e2->speculative = false;
1337 e2->count = gimple_bb (e2->call_stmt)->count;
1338 ref->speculative = false;
1340 /* Indirect edges are not both in the call site hash.
1342 if (e->caller->call_site_hash)
1343 cgraph_update_edge_in_call_site_hash (e2);
1345 /* Continue redirecting E to proper target. */
1350 if (e->indirect_unknown_callee
1351 || decl == e->callee->decl)
1352 return e->call_stmt;
1354 if (flag_checking && decl)
1356 cgraph_node *node = cgraph_node::get (decl);
1357 gcc_assert (!node || !node->clone.combined_args_to_skip);
1360 if (symtab->dump_file)
1362 fprintf (symtab->dump_file, "updating call of %s -> %s: ",
1363 e->caller->dump_name (), e->callee->dump_name ());
1364 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1365 if (e->callee->clone.combined_args_to_skip)
1367 fprintf (symtab->dump_file, " combined args to skip: ");
1368 dump_bitmap (symtab->dump_file,
1369 e->callee->clone.combined_args_to_skip);
1373 if (e->callee->clone.combined_args_to_skip)
1377 new_stmt = e->call_stmt;
1378 if (e->callee->clone.combined_args_to_skip)
1380 = gimple_call_copy_skip_args (new_stmt,
1381 e->callee->clone.combined_args_to_skip);
1382 tree old_fntype = gimple_call_fntype (e->call_stmt);
1383 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1384 cgraph_node *origin = e->callee;
1385 while (origin->clone_of)
1386 origin = origin->clone_of;
1388 if ((origin->former_clone_of
1389 && old_fntype == TREE_TYPE (origin->former_clone_of))
1390 || old_fntype == TREE_TYPE (origin->decl))
1391 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1394 bitmap skip = e->callee->clone.combined_args_to_skip;
1395 tree t = cgraph_build_function_type_skip_args (old_fntype, skip,
1397 gimple_call_set_fntype (new_stmt, t);
1400 if (gimple_vdef (new_stmt)
1401 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1402 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1404 gsi = gsi_for_stmt (e->call_stmt);
1406 /* For optimized away parameters, add on the caller side
1408 DEBUG D#X => parm_Y(D)
1409 stmts and associate D#X with parm in decl_debug_args_lookup
1410 vector to say for debug info that if parameter parm had been passed,
1411 it would have value parm_Y(D). */
1412 if (e->callee->clone.combined_args_to_skip && MAY_HAVE_DEBUG_BIND_STMTS)
1414 vec<tree, va_gc> **debug_args
1415 = decl_debug_args_lookup (e->callee->decl);
1416 tree old_decl = gimple_call_fndecl (e->call_stmt);
1417 if (debug_args && old_decl)
1420 unsigned i = 0, num;
1421 unsigned len = vec_safe_length (*debug_args);
1422 unsigned nargs = gimple_call_num_args (e->call_stmt);
1423 for (parm = DECL_ARGUMENTS (old_decl), num = 0;
1424 parm && num < nargs;
1425 parm = DECL_CHAIN (parm), num++)
1426 if (bitmap_bit_p (e->callee->clone.combined_args_to_skip, num)
1427 && is_gimple_reg (parm))
1431 while (i < len && (**debug_args)[i] != DECL_ORIGIN (parm))
1437 && (**debug_args)[i] != DECL_ORIGIN (parm))
1442 tree ddecl = (**debug_args)[i + 1];
1443 tree arg = gimple_call_arg (e->call_stmt, num);
1444 if (!useless_type_conversion_p (TREE_TYPE (ddecl),
1448 if (!fold_convertible_p (TREE_TYPE (ddecl), arg))
1450 if (TREE_CODE (arg) == SSA_NAME
1451 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg))
1453 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (arg)))
1454 && useless_type_conversion_p (TREE_TYPE (ddecl),
1458 arg = fold_convert (TREE_TYPE (ddecl), arg);
1462 = gimple_build_debug_bind (ddecl, unshare_expr (arg),
1464 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
1469 gsi_replace (&gsi, new_stmt, false);
1470 /* We need to defer cleaning EH info on the new statement to
1471 fixup-cfg. We may not have dominator information at this point
1472 and thus would end up with unreachable blocks and have no way
1473 to communicate that we need to run CFG cleanup then. */
1474 lp_nr = lookup_stmt_eh_lp (e->call_stmt);
1477 remove_stmt_from_eh_lp (e->call_stmt);
1478 add_stmt_to_eh_lp (new_stmt, lp_nr);
1483 new_stmt = e->call_stmt;
1484 gimple_call_set_fndecl (new_stmt, e->callee->decl);
1485 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1488 /* If changing the call to __cxa_pure_virtual or similar noreturn function,
1489 adjust gimple_call_fntype too. */
1490 if (gimple_call_noreturn_p (new_stmt)
1491 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (e->callee->decl)))
1492 && TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl))
1493 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (e->callee->decl)))
1495 gimple_call_set_fntype (new_stmt, TREE_TYPE (e->callee->decl));
1497 /* If the call becomes noreturn, remove the LHS if possible. */
1498 tree lhs = gimple_call_lhs (new_stmt);
1500 && gimple_call_noreturn_p (new_stmt)
1501 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (new_stmt)))
1502 || should_remove_lhs_p (lhs)))
1504 if (TREE_CODE (lhs) == SSA_NAME)
1506 tree var = create_tmp_reg_fn (DECL_STRUCT_FUNCTION (e->caller->decl),
1507 TREE_TYPE (lhs), NULL);
1508 var = get_or_create_ssa_default_def
1509 (DECL_STRUCT_FUNCTION (e->caller->decl), var);
1510 gimple *set_stmt = gimple_build_assign (lhs, var);
1511 gsi = gsi_for_stmt (new_stmt);
1512 gsi_insert_before_without_update (&gsi, set_stmt, GSI_SAME_STMT);
1513 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), set_stmt);
1515 gimple_call_set_lhs (new_stmt, NULL_TREE);
1516 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1519 /* If new callee has no static chain, remove it. */
1520 if (gimple_call_chain (new_stmt) && !DECL_STATIC_CHAIN (e->callee->decl))
1522 gimple_call_set_chain (new_stmt, NULL);
1523 update_stmt_fn (DECL_STRUCT_FUNCTION (e->caller->decl), new_stmt);
1526 maybe_remove_unused_call_args (DECL_STRUCT_FUNCTION (e->caller->decl),
1529 e->caller->set_call_stmt_including_clones (e->call_stmt, new_stmt, false);
1531 if (symtab->dump_file)
1533 fprintf (symtab->dump_file, " updated to:");
1534 print_gimple_stmt (symtab->dump_file, e->call_stmt, 0, dump_flags);
1539 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1540 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1541 of OLD_STMT if it was previously call statement.
1542 If NEW_STMT is NULL, the call has been dropped without any
1546 cgraph_update_edges_for_call_stmt_node (cgraph_node *node,
1547 gimple *old_stmt, tree old_call,
1550 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1551 ? gimple_call_fndecl (new_stmt) : 0;
1553 /* We are seeing indirect calls, then there is nothing to update. */
1554 if (!new_call && !old_call)
1556 /* See if we turned indirect call into direct call or folded call to one builtin
1557 into different builtin. */
1558 if (old_call != new_call)
1560 cgraph_edge *e = node->get_edge (old_stmt);
1561 cgraph_edge *ne = NULL;
1562 profile_count count;
1566 /* Keep calls marked as dead dead. */
1567 if (new_stmt && is_gimple_call (new_stmt) && e->callee
1568 && fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
1570 node->get_edge (old_stmt)->set_call_stmt
1571 (as_a <gcall *> (new_stmt));
1574 /* See if the edge is already there and has the correct callee. It
1575 might be so because of indirect inlining has already updated
1576 it. We also might've cloned and redirected the edge. */
1577 if (new_call && e->callee)
1579 cgraph_node *callee = e->callee;
1582 if (callee->decl == new_call
1583 || callee->former_clone_of == new_call)
1585 e->set_call_stmt (as_a <gcall *> (new_stmt));
1588 callee = callee->clone_of;
1592 /* Otherwise remove edge and create new one; we can't simply redirect
1593 since function has changed, so inline plan and other information
1594 attached to edge is invalid. */
1596 if (e->indirect_unknown_callee || e->inline_failed)
1599 e->callee->remove_symbol_and_inline_clones ();
1603 /* We are seeing new direct call; compute profile info based on BB. */
1604 basic_block bb = gimple_bb (new_stmt);
1610 ne = node->create_edge (cgraph_node::get_create (new_call),
1611 as_a <gcall *> (new_stmt), count);
1612 gcc_assert (ne->inline_failed);
1615 /* We only updated the call stmt; update pointer in cgraph edge.. */
1616 else if (old_stmt != new_stmt)
1617 node->get_edge (old_stmt)->set_call_stmt (as_a <gcall *> (new_stmt));
1620 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1621 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1622 of OLD_STMT before it was updated (updating can happen inplace). */
1625 cgraph_update_edges_for_call_stmt (gimple *old_stmt, tree old_decl,
1628 cgraph_node *orig = cgraph_node::get (cfun->decl);
1631 gcc_checking_assert (orig);
1632 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1634 for (node = orig->clones; node != orig;)
1636 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1638 node = node->clones;
1639 else if (node->next_sibling_clone)
1640 node = node->next_sibling_clone;
1643 while (node != orig && !node->next_sibling_clone)
1644 node = node->clone_of;
1646 node = node->next_sibling_clone;
1652 /* Remove all callees from the node. */
1655 cgraph_node::remove_callees (void)
1659 /* It is sufficient to remove the edges from the lists of callers of
1660 the callees. The callee list of the node can be zapped with one
1662 for (e = callees; e; e = f)
1665 symtab->call_edge_removal_hooks (e);
1666 if (!e->indirect_unknown_callee)
1667 e->remove_callee ();
1668 symtab->free_edge (e);
1670 for (e = indirect_calls; e; e = f)
1673 symtab->call_edge_removal_hooks (e);
1674 if (!e->indirect_unknown_callee)
1675 e->remove_callee ();
1676 symtab->free_edge (e);
1678 indirect_calls = NULL;
1682 call_site_hash->empty ();
1683 call_site_hash = NULL;
1687 /* Remove all callers from the node. */
1690 cgraph_node::remove_callers (void)
1694 /* It is sufficient to remove the edges from the lists of callees of
1695 the callers. The caller list of the node can be zapped with one
1697 for (e = callers; e; e = f)
1700 symtab->call_edge_removal_hooks (e);
1701 e->remove_caller ();
1702 symtab->free_edge (e);
1707 /* Helper function for cgraph_release_function_body and free_lang_data.
1708 It releases body from function DECL without having to inspect its
1709 possibly non-existent symtab node. */
1712 release_function_body (tree decl)
1714 function *fn = DECL_STRUCT_FUNCTION (decl);
1718 && loops_for_fn (fn))
1720 fn->curr_properties &= ~PROP_loops;
1721 loop_optimizer_finalize (fn);
1725 delete_tree_ssa (fn);
1730 gcc_assert (!dom_info_available_p (fn, CDI_DOMINATORS));
1731 gcc_assert (!dom_info_available_p (fn, CDI_POST_DOMINATORS));
1732 delete_tree_cfg_annotations (fn);
1736 if (fn->value_histograms)
1737 free_histograms (fn);
1738 gimple_set_body (decl, NULL);
1739 /* Struct function hangs a lot of data that would leak if we didn't
1740 removed all pointers to it. */
1742 DECL_STRUCT_FUNCTION (decl) = NULL;
1744 DECL_SAVED_TREE (decl) = NULL;
1747 /* Release memory used to represent body of function.
1748 Use this only for functions that are released before being translated to
1749 target code (i.e. RTL). Functions that are compiled to RTL and beyond
1750 are free'd in final.c via free_after_compilation().
1751 KEEP_ARGUMENTS are useful only if you want to rebuild body as thunk. */
1754 cgraph_node::release_body (bool keep_arguments)
1756 ipa_transforms_to_apply.release ();
1757 if (!used_as_abstract_origin && symtab->state != PARSING)
1759 DECL_RESULT (decl) = NULL;
1761 if (!keep_arguments)
1762 DECL_ARGUMENTS (decl) = NULL;
1764 /* If the node is abstract and needed, then do not clear
1765 DECL_INITIAL of its associated function declaration because it's
1766 needed to emit debug info later. */
1767 if (!used_as_abstract_origin && DECL_INITIAL (decl))
1768 DECL_INITIAL (decl) = error_mark_node;
1769 release_function_body (decl);
1772 lto_free_function_in_decl_state_for_node (this);
1773 lto_file_data = NULL;
1777 /* Remove function from symbol table. */
1780 cgraph_node::remove (void)
1784 if (symtab->ipa_clones_dump_file && symtab->cloned_nodes.contains (this))
1785 fprintf (symtab->ipa_clones_dump_file,
1786 "Callgraph removal;%s;%d;%s;%d;%d\n", asm_name (), order,
1787 DECL_SOURCE_FILE (decl), DECL_SOURCE_LINE (decl),
1788 DECL_SOURCE_COLUMN (decl));
1790 symtab->call_cgraph_removal_hooks (this);
1793 ipa_transforms_to_apply.release ();
1794 delete_function_version (function_version ());
1796 /* Incremental inlining access removed nodes stored in the postorder list.
1798 force_output = false;
1799 forced_by_abi = false;
1800 for (n = nested; n; n = n->next_nested)
1805 cgraph_node **node2 = &origin->nested;
1807 while (*node2 != this)
1808 node2 = &(*node2)->next_nested;
1809 *node2 = next_nested;
1812 if (prev_sibling_clone)
1813 prev_sibling_clone->next_sibling_clone = next_sibling_clone;
1815 clone_of->clones = next_sibling_clone;
1816 if (next_sibling_clone)
1817 next_sibling_clone->prev_sibling_clone = prev_sibling_clone;
1820 cgraph_node *n, *next;
1824 for (n = clones; n->next_sibling_clone; n = n->next_sibling_clone)
1825 n->clone_of = clone_of;
1826 n->clone_of = clone_of;
1827 n->next_sibling_clone = clone_of->clones;
1828 if (clone_of->clones)
1829 clone_of->clones->prev_sibling_clone = n;
1830 clone_of->clones = clones;
1834 /* We are removing node with clones. This makes clones inconsistent,
1835 but assume they will be removed subsequently and just keep clone
1836 tree intact. This can happen in unreachable function removal since
1837 we remove unreachable functions in random order, not by bottom-up
1838 walk of clone trees. */
1839 for (n = clones; n; n = next)
1841 next = n->next_sibling_clone;
1842 n->next_sibling_clone = NULL;
1843 n->prev_sibling_clone = NULL;
1849 /* While all the clones are removed after being proceeded, the function
1850 itself is kept in the cgraph even after it is compiled. Check whether
1851 we are done with this body and reclaim it proactively if this is the case.
1853 if (symtab->state != LTO_STREAMING)
1855 n = cgraph_node::get (decl);
1857 || (!n->clones && !n->clone_of && !n->global.inlined_to
1858 && ((symtab->global_info_ready || in_lto_p)
1859 && (TREE_ASM_WRITTEN (n->decl)
1860 || DECL_EXTERNAL (n->decl)
1862 || (!flag_wpa && n->in_other_partition)))))
1867 lto_free_function_in_decl_state_for_node (this);
1868 lto_file_data = NULL;
1874 call_site_hash->empty ();
1875 call_site_hash = NULL;
1878 symtab->release_symbol (this);
1881 /* Likewise indicate that a node is having address taken. */
1884 cgraph_node::mark_address_taken (void)
1886 /* Indirect inlining can figure out that all uses of the address are
1888 if (global.inlined_to)
1890 gcc_assert (cfun->after_inlining);
1891 gcc_assert (callers->indirect_inlining_edge);
1894 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1895 IPA_REF_ADDR reference exists (and thus it should be set on node
1896 representing alias we take address of) and as a test whether address
1897 of the object was taken (and thus it should be set on node alias is
1898 referring to). We should remove the first use and the remove the
1901 cgraph_node *node = ultimate_alias_target ();
1902 node->address_taken = 1;
1905 /* Return local info for the compiled function. */
1908 cgraph_node::local_info (tree decl)
1910 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1911 cgraph_node *node = get (decl);
1914 return &node->ultimate_alias_target ()->local;
1917 /* Return local info for the compiled function. */
1920 cgraph_node::rtl_info (tree decl)
1922 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1923 cgraph_node *node = get (decl);
1926 enum availability avail;
1927 node = node->ultimate_alias_target (&avail);
1928 if (decl != current_function_decl
1929 && (avail < AVAIL_AVAILABLE
1930 || (node->decl != current_function_decl
1931 && !TREE_ASM_WRITTEN (node->decl))))
1933 /* Allocate if it doesn't exist. */
1934 if (node->rtl == NULL)
1935 node->rtl = ggc_cleared_alloc<cgraph_rtl_info> ();
1939 /* Return a string describing the failure REASON. */
1942 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1945 #define DEFCIFCODE(code, type, string) string,
1947 static const char *cif_string_table[CIF_N_REASONS] = {
1948 #include "cif-code.def"
1951 /* Signedness of an enum type is implementation defined, so cast it
1952 to unsigned before testing. */
1953 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1954 return cif_string_table[reason];
1957 /* Return a type describing the failure REASON. */
1959 cgraph_inline_failed_type_t
1960 cgraph_inline_failed_type (cgraph_inline_failed_t reason)
1963 #define DEFCIFCODE(code, type, string) type,
1965 static cgraph_inline_failed_type_t cif_type_table[CIF_N_REASONS] = {
1966 #include "cif-code.def"
1969 /* Signedness of an enum type is implementation defined, so cast it
1970 to unsigned before testing. */
1971 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1972 return cif_type_table[reason];
1975 /* Names used to print out the availability enum. */
1976 const char * const cgraph_availability_names[] =
1977 {"unset", "not_available", "overwritable", "available", "local"};
1979 /* Output flags of edge to a file F. */
1982 cgraph_edge::dump_edge_flags (FILE *f)
1985 fprintf (f, "(speculative) ");
1987 fprintf (f, "(inlined) ");
1988 if (call_stmt_cannot_inline_p)
1989 fprintf (f, "(call_stmt_cannot_inline_p) ");
1990 if (indirect_inlining_edge)
1991 fprintf (f, "(indirect_inlining) ");
1992 if (count.initialized_p ())
1997 fprintf (f, "%.2f per call) ", sreal_frequency ().to_double ());
1999 if (can_throw_external)
2000 fprintf (f, "(can throw external) ");
2003 /* Dump call graph node to file F. */
2006 cgraph_node::dump (FILE *f)
2012 if (global.inlined_to)
2013 fprintf (f, " Function %s is inline copy in %s\n",
2015 global.inlined_to->dump_name ());
2017 fprintf (f, " Clone of %s\n", clone_of->dump_asm_name ());
2018 if (symtab->function_flags_ready)
2019 fprintf (f, " Availability: %s\n",
2020 cgraph_availability_names [get_availability ()]);
2023 fprintf (f, " Profile id: %i\n",
2025 cgraph_function_version_info *vi = function_version ();
2028 fprintf (f, " Version info: ");
2029 if (vi->prev != NULL)
2031 fprintf (f, "prev: ");
2032 fprintf (f, "%s ", vi->prev->this_node->dump_asm_name ());
2034 if (vi->next != NULL)
2036 fprintf (f, "next: ");
2037 fprintf (f, "%s ", vi->next->this_node->dump_asm_name ());
2039 if (vi->dispatcher_resolver != NULL_TREE)
2040 fprintf (f, "dispatcher: %s",
2041 lang_hooks.decl_printable_name (vi->dispatcher_resolver, 2));
2045 fprintf (f, " Function flags:");
2046 if (count.initialized_p ())
2048 fprintf (f, " count:");
2051 if (tp_first_run > 0)
2052 fprintf (f, " first_run:%i", tp_first_run);
2054 fprintf (f, " nested in:%s", origin->asm_name ());
2055 if (gimple_has_body_p (decl))
2056 fprintf (f, " body");
2058 fprintf (f, " process");
2060 fprintf (f, " local");
2061 if (local.redefined_extern_inline)
2062 fprintf (f, " redefined_extern_inline");
2063 if (only_called_at_startup)
2064 fprintf (f, " only_called_at_startup");
2065 if (only_called_at_exit)
2066 fprintf (f, " only_called_at_exit");
2068 fprintf (f, " tm_clone");
2069 if (calls_comdat_local)
2070 fprintf (f, " calls_comdat_local");
2072 fprintf (f, " icf_merged");
2074 fprintf (f, " merged_comdat");
2076 fprintf (f, " split_part");
2077 if (indirect_call_target)
2078 fprintf (f, " indirect_call_target");
2080 fprintf (f, " nonfreeing_fn");
2081 if (DECL_STATIC_CONSTRUCTOR (decl))
2082 fprintf (f," static_constructor (priority:%i)", get_init_priority ());
2083 if (DECL_STATIC_DESTRUCTOR (decl))
2084 fprintf (f," static_destructor (priority:%i)", get_fini_priority ());
2085 if (frequency == NODE_FREQUENCY_HOT)
2086 fprintf (f, " hot");
2087 if (frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2088 fprintf (f, " unlikely_executed");
2089 if (frequency == NODE_FREQUENCY_EXECUTED_ONCE)
2090 fprintf (f, " executed_once");
2091 if (opt_for_fn (decl, optimize_size))
2092 fprintf (f, " optimize_size");
2093 if (parallelized_function)
2094 fprintf (f, " parallelized_function");
2100 fprintf (f, " Thunk");
2102 fprintf (f, " of %s (asm:%s)",
2103 lang_hooks.decl_printable_name (thunk.alias, 2),
2104 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2105 fprintf (f, " fixed offset %i virtual value %i indirect_offset %i "
2106 "has virtual offset %i\n",
2107 (int)thunk.fixed_offset,
2108 (int)thunk.virtual_value,
2109 (int)thunk.indirect_offset,
2110 (int)thunk.virtual_offset_p);
2112 else if (former_thunk_p ())
2113 fprintf (f, " Former thunk fixed offset %i virtual value %i "
2114 "indirect_offset %i has virtual offset %i\n",
2115 (int)thunk.fixed_offset,
2116 (int)thunk.virtual_value,
2117 (int)thunk.indirect_offset,
2118 (int)thunk.virtual_offset_p);
2119 if (alias && thunk.alias
2120 && DECL_P (thunk.alias))
2122 fprintf (f, " Alias of %s",
2123 lang_hooks.decl_printable_name (thunk.alias, 2));
2124 if (DECL_ASSEMBLER_NAME_SET_P (thunk.alias))
2125 fprintf (f, " (asm:%s)",
2126 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk.alias)));
2130 fprintf (f, " Called by: ");
2132 profile_count sum = profile_count::zero ();
2133 for (edge = callers; edge; edge = edge->next_caller)
2135 fprintf (f, "%s ", edge->caller->dump_name ());
2136 edge->dump_edge_flags (f);
2137 if (edge->count.initialized_p ())
2138 sum += edge->count.ipa ();
2141 fprintf (f, "\n Calls: ");
2142 for (edge = callees; edge; edge = edge->next_callee)
2144 fprintf (f, "%s ", edge->callee->dump_name ());
2145 edge->dump_edge_flags (f);
2149 if (count.ipa ().initialized_p ())
2155 FOR_EACH_ALIAS (this, ref)
2156 if (dyn_cast <cgraph_node *> (ref->referring)->count.initialized_p ())
2157 sum += dyn_cast <cgraph_node *> (ref->referring)->count.ipa ();
2159 if (global.inlined_to
2160 || (symtab->state < EXPANSION
2161 && ultimate_alias_target () == this && only_called_directly_p ()))
2162 ok = !count.ipa ().differs_from_p (sum);
2163 else if (count.ipa () > profile_count::from_gcov_type (100)
2164 && count.ipa () < sum.apply_scale (99, 100))
2165 ok = false, min = true;
2168 fprintf (f, " Invalid sum of caller counts ");
2171 fprintf (f, ", should be at most ");
2173 fprintf (f, ", should be ");
2174 count.ipa ().dump (f);
2179 for (edge = indirect_calls; edge; edge = edge->next_callee)
2181 if (edge->indirect_info->polymorphic)
2183 fprintf (f, " Polymorphic indirect call of type ");
2184 print_generic_expr (f, edge->indirect_info->otr_type, TDF_SLIM);
2185 fprintf (f, " token:%i", (int) edge->indirect_info->otr_token);
2188 fprintf (f, " Indirect call");
2189 edge->dump_edge_flags (f);
2190 if (edge->indirect_info->param_index != -1)
2192 fprintf (f, " of param:%i", edge->indirect_info->param_index);
2193 if (edge->indirect_info->agg_contents)
2194 fprintf (f, " loaded from %s %s at offset %i",
2195 edge->indirect_info->member_ptr ? "member ptr" : "aggregate",
2196 edge->indirect_info->by_ref ? "passed by reference":"",
2197 (int)edge->indirect_info->offset);
2198 if (edge->indirect_info->vptr_changed)
2199 fprintf (f, " (vptr maybe changed)");
2202 if (edge->indirect_info->polymorphic)
2203 edge->indirect_info->context.dump (f);
2207 /* Dump call graph node NODE to stderr. */
2210 cgraph_node::debug (void)
2215 /* Dump the callgraph to file F. */
2218 cgraph_node::dump_cgraph (FILE *f)
2222 fprintf (f, "callgraph:\n\n");
2223 FOR_EACH_FUNCTION (node)
2227 /* Return true when the DECL can possibly be inlined. */
2230 cgraph_function_possibly_inlined_p (tree decl)
2232 if (!symtab->global_info_ready)
2233 return !DECL_UNINLINABLE (decl);
2234 return DECL_POSSIBLY_INLINED (decl);
2237 /* cgraph_node is no longer nested function; update cgraph accordingly. */
2239 cgraph_node::unnest (void)
2241 cgraph_node **node2 = &origin->nested;
2242 gcc_assert (origin);
2244 while (*node2 != this)
2245 node2 = &(*node2)->next_nested;
2246 *node2 = next_nested;
2250 /* Return function availability. See cgraph.h for description of individual
2253 cgraph_node::get_availability (symtab_node *ref)
2257 cgraph_node *cref = dyn_cast <cgraph_node *> (ref);
2259 ref = cref->global.inlined_to;
2261 enum availability avail;
2263 avail = AVAIL_NOT_AVAILABLE;
2264 else if (local.local)
2265 avail = AVAIL_LOCAL;
2266 else if (global.inlined_to)
2267 avail = AVAIL_AVAILABLE;
2268 else if (transparent_alias)
2269 ultimate_alias_target (&avail, ref);
2270 else if (ifunc_resolver
2271 || lookup_attribute ("noipa", DECL_ATTRIBUTES (decl)))
2272 avail = AVAIL_INTERPOSABLE;
2273 else if (!externally_visible)
2274 avail = AVAIL_AVAILABLE;
2275 /* If this is a reference from symbol itself and there are no aliases, we
2276 may be sure that the symbol was not interposed by something else because
2277 the symbol itself would be unreachable otherwise.
2279 Also comdat groups are always resolved in groups. */
2280 else if ((this == ref && !has_aliases_p ())
2281 || (ref && get_comdat_group ()
2282 && get_comdat_group () == ref->get_comdat_group ()))
2283 avail = AVAIL_AVAILABLE;
2284 /* Inline functions are safe to be analyzed even if their symbol can
2285 be overwritten at runtime. It is not meaningful to enforce any sane
2286 behavior on replacing inline function by different body. */
2287 else if (DECL_DECLARED_INLINE_P (decl))
2288 avail = AVAIL_AVAILABLE;
2290 /* If the function can be overwritten, return OVERWRITABLE. Take
2291 care at least of two notable extensions - the COMDAT functions
2292 used to share template instantiations in C++ (this is symmetric
2293 to code cp_cannot_inline_tree_fn and probably shall be shared and
2294 the inlinability hooks completely eliminated). */
2296 else if (decl_replaceable_p (decl) && !DECL_EXTERNAL (decl))
2297 avail = AVAIL_INTERPOSABLE;
2298 else avail = AVAIL_AVAILABLE;
2303 /* Worker for cgraph_node_can_be_local_p. */
2305 cgraph_node_cannot_be_local_p_1 (cgraph_node *node, void *)
2307 return !(!node->force_output
2308 && ((DECL_COMDAT (node->decl)
2309 && !node->forced_by_abi
2310 && !node->used_from_object_file_p ()
2311 && !node->same_comdat_group)
2312 || !node->externally_visible));
2315 /* Return true if cgraph_node can be made local for API change.
2316 Extern inline functions and C++ COMDAT functions can be made local
2317 at the expense of possible code size growth if function is used in multiple
2318 compilation units. */
2320 cgraph_node::can_be_local_p (void)
2322 return (!address_taken
2323 && !call_for_symbol_thunks_and_aliases (cgraph_node_cannot_be_local_p_1,
2327 /* Call callback on cgraph_node, thunks and aliases associated to cgraph_node.
2328 When INCLUDE_OVERWRITABLE is false, overwritable symbols are
2329 skipped. When EXCLUDE_VIRTUAL_THUNKS is true, virtual thunks are
2332 cgraph_node::call_for_symbol_thunks_and_aliases (bool (*callback)
2333 (cgraph_node *, void *),
2335 bool include_overwritable,
2336 bool exclude_virtual_thunks)
2340 enum availability avail = AVAIL_AVAILABLE;
2342 if (include_overwritable
2343 || (avail = get_availability ()) > AVAIL_INTERPOSABLE)
2345 if (callback (this, data))
2348 FOR_EACH_ALIAS (this, ref)
2350 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2351 if (include_overwritable
2352 || alias->get_availability () > AVAIL_INTERPOSABLE)
2353 if (alias->call_for_symbol_thunks_and_aliases (callback, data,
2354 include_overwritable,
2355 exclude_virtual_thunks))
2358 if (avail <= AVAIL_INTERPOSABLE)
2360 for (e = callers; e; e = e->next_caller)
2361 if (e->caller->thunk.thunk_p
2362 && (include_overwritable
2363 || e->caller->get_availability () > AVAIL_INTERPOSABLE)
2364 && !(exclude_virtual_thunks
2365 && e->caller->thunk.virtual_offset_p))
2366 if (e->caller->call_for_symbol_thunks_and_aliases (callback, data,
2367 include_overwritable,
2368 exclude_virtual_thunks))
2374 /* Worker to bring NODE local. */
2377 cgraph_node::make_local (cgraph_node *node, void *)
2379 gcc_checking_assert (node->can_be_local_p ());
2380 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2382 node->make_decl_local ();
2383 node->set_section (NULL);
2384 node->set_comdat_group (NULL);
2385 node->externally_visible = false;
2386 node->forced_by_abi = false;
2387 node->local.local = true;
2388 node->set_section (NULL);
2389 node->unique_name = ((node->resolution == LDPR_PREVAILING_DEF_IRONLY
2390 || node->resolution == LDPR_PREVAILING_DEF_IRONLY_EXP)
2391 && !flag_incremental_link);
2392 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2393 gcc_assert (node->get_availability () == AVAIL_LOCAL);
2398 /* Bring cgraph node local. */
2401 cgraph_node::make_local (void)
2403 call_for_symbol_thunks_and_aliases (cgraph_node::make_local, NULL, true);
2406 /* Worker to set nothrow flag. */
2409 set_nothrow_flag_1 (cgraph_node *node, bool nothrow, bool non_call,
2414 if (nothrow && !TREE_NOTHROW (node->decl))
2416 /* With non-call exceptions we can't say for sure if other function body
2417 was not possibly optimized to stil throw. */
2418 if (!non_call || node->binds_to_current_def_p ())
2420 TREE_NOTHROW (node->decl) = true;
2422 for (e = node->callers; e; e = e->next_caller)
2423 e->can_throw_external = false;
2426 else if (!nothrow && TREE_NOTHROW (node->decl))
2428 TREE_NOTHROW (node->decl) = false;
2432 FOR_EACH_ALIAS (node, ref)
2434 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2435 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2436 set_nothrow_flag_1 (alias, nothrow, non_call, changed);
2438 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2439 if (e->caller->thunk.thunk_p
2440 && (!nothrow || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2441 set_nothrow_flag_1 (e->caller, nothrow, non_call, changed);
2444 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2445 if any to NOTHROW. */
2448 cgraph_node::set_nothrow_flag (bool nothrow)
2450 bool changed = false;
2451 bool non_call = opt_for_fn (decl, flag_non_call_exceptions);
2453 if (!nothrow || get_availability () > AVAIL_INTERPOSABLE)
2454 set_nothrow_flag_1 (this, nothrow, non_call, &changed);
2459 FOR_EACH_ALIAS (this, ref)
2461 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2462 if (!nothrow || alias->get_availability () > AVAIL_INTERPOSABLE)
2463 set_nothrow_flag_1 (alias, nothrow, non_call, &changed);
2469 /* Worker to set malloc flag. */
2471 set_malloc_flag_1 (cgraph_node *node, bool malloc_p, bool *changed)
2473 if (malloc_p && !DECL_IS_MALLOC (node->decl))
2475 DECL_IS_MALLOC (node->decl) = true;
2480 FOR_EACH_ALIAS (node, ref)
2482 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2483 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2484 set_malloc_flag_1 (alias, malloc_p, changed);
2487 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2488 if (e->caller->thunk.thunk_p
2489 && (!malloc_p || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2490 set_malloc_flag_1 (e->caller, malloc_p, changed);
2493 /* Set DECL_IS_MALLOC on NODE's decl and on NODE's aliases if any. */
2496 cgraph_node::set_malloc_flag (bool malloc_p)
2498 bool changed = false;
2500 if (!malloc_p || get_availability () > AVAIL_INTERPOSABLE)
2501 set_malloc_flag_1 (this, malloc_p, &changed);
2506 FOR_EACH_ALIAS (this, ref)
2508 cgraph_node *alias = dyn_cast<cgraph_node *> (ref->referring);
2509 if (!malloc_p || alias->get_availability () > AVAIL_INTERPOSABLE)
2510 set_malloc_flag_1 (alias, malloc_p, &changed);
2516 /* Worker to set_const_flag. */
2519 set_const_flag_1 (cgraph_node *node, bool set_const, bool looping,
2522 /* Static constructors and destructors without a side effect can be
2524 if (set_const && !looping)
2526 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2528 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2531 if (DECL_STATIC_DESTRUCTOR (node->decl))
2533 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2539 if (TREE_READONLY (node->decl))
2541 TREE_READONLY (node->decl) = 0;
2542 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2548 /* Consider function:
2555 During early optimization we will turn this into:
2562 Now if this function will be detected as CONST however when interposed
2563 it may end up being just pure. We always must assume the worst
2565 if (TREE_READONLY (node->decl))
2567 if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2569 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2573 else if (node->binds_to_current_def_p ())
2575 TREE_READONLY (node->decl) = true;
2576 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2577 DECL_PURE_P (node->decl) = false;
2582 if (dump_file && (dump_flags & TDF_DETAILS))
2583 fprintf (dump_file, "Dropping state to PURE because function does "
2584 "not bind to current def.\n");
2585 if (!DECL_PURE_P (node->decl))
2587 DECL_PURE_P (node->decl) = true;
2588 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = looping;
2591 else if (!looping && DECL_LOOPING_CONST_OR_PURE_P (node->decl))
2593 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2600 FOR_EACH_ALIAS (node, ref)
2602 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2603 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2604 set_const_flag_1 (alias, set_const, looping, changed);
2606 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
2607 if (e->caller->thunk.thunk_p
2608 && (!set_const || e->caller->get_availability () > AVAIL_INTERPOSABLE))
2610 /* Virtual thunks access virtual offset in the vtable, so they can
2611 only be pure, never const. */
2613 && (e->caller->thunk.virtual_offset_p
2614 || !node->binds_to_current_def_p (e->caller)))
2615 *changed |= e->caller->set_pure_flag (true, looping);
2617 set_const_flag_1 (e->caller, set_const, looping, changed);
2621 /* If SET_CONST is true, mark function, aliases and thunks to be ECF_CONST.
2622 If SET_CONST if false, clear the flag.
2624 When setting the flag be careful about possible interposition and
2625 do not set the flag for functions that can be interposet and set pure
2626 flag for functions that can bind to other definition.
2628 Return true if any change was done. */
2631 cgraph_node::set_const_flag (bool set_const, bool looping)
2633 bool changed = false;
2634 if (!set_const || get_availability () > AVAIL_INTERPOSABLE)
2635 set_const_flag_1 (this, set_const, looping, &changed);
2640 FOR_EACH_ALIAS (this, ref)
2642 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
2643 if (!set_const || alias->get_availability () > AVAIL_INTERPOSABLE)
2644 set_const_flag_1 (alias, set_const, looping, &changed);
2650 /* Info used by set_pure_flag_1. */
2652 struct set_pure_flag_info
2659 /* Worker to set_pure_flag. */
2662 set_pure_flag_1 (cgraph_node *node, void *data)
2664 struct set_pure_flag_info *info = (struct set_pure_flag_info *)data;
2665 /* Static constructors and destructors without a side effect can be
2667 if (info->pure && !info->looping)
2669 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2671 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2672 info->changed = true;
2674 if (DECL_STATIC_DESTRUCTOR (node->decl))
2676 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2677 info->changed = true;
2682 if (!DECL_PURE_P (node->decl) && !TREE_READONLY (node->decl))
2684 DECL_PURE_P (node->decl) = true;
2685 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = info->looping;
2686 info->changed = true;
2688 else if (DECL_LOOPING_CONST_OR_PURE_P (node->decl)
2691 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2692 info->changed = true;
2697 if (DECL_PURE_P (node->decl))
2699 DECL_PURE_P (node->decl) = false;
2700 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = false;
2701 info->changed = true;
2707 /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
2710 When setting the flag, be careful about possible interposition.
2711 Return true if any change was done. */
2714 cgraph_node::set_pure_flag (bool pure, bool looping)
2716 struct set_pure_flag_info info = {pure, looping, false};
2717 call_for_symbol_thunks_and_aliases (set_pure_flag_1, &info, !pure, true);
2718 return info.changed;
2721 /* Return true when cgraph_node cannot return or throw and thus
2722 it is safe to ignore its side effects for IPA analysis. */
2725 cgraph_node::cannot_return_p (void)
2727 int flags = flags_from_decl_or_type (decl);
2728 if (!opt_for_fn (decl, flag_exceptions))
2729 return (flags & ECF_NORETURN) != 0;
2731 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2732 == (ECF_NORETURN | ECF_NOTHROW));
2735 /* Return true when call of edge cannot lead to return from caller
2736 and thus it is safe to ignore its side effects for IPA analysis
2737 when computing side effects of the caller.
2738 FIXME: We could actually mark all edges that have no reaching
2739 patch to the exit block or throw to get better results. */
2741 cgraph_edge::cannot_lead_to_return_p (void)
2743 if (caller->cannot_return_p ())
2745 if (indirect_unknown_callee)
2747 int flags = indirect_info->ecf_flags;
2748 if (!opt_for_fn (caller->decl, flag_exceptions))
2749 return (flags & ECF_NORETURN) != 0;
2751 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2752 == (ECF_NORETURN | ECF_NOTHROW));
2755 return callee->cannot_return_p ();
2758 /* Return true if the call can be hot. */
2761 cgraph_edge::maybe_hot_p (void)
2763 if (!maybe_hot_count_p (NULL, count.ipa ()))
2765 if (caller->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED
2767 && callee->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED))
2769 if (caller->frequency > NODE_FREQUENCY_UNLIKELY_EXECUTED
2771 && callee->frequency <= NODE_FREQUENCY_EXECUTED_ONCE))
2773 if (opt_for_fn (caller->decl, optimize_size))
2775 if (caller->frequency == NODE_FREQUENCY_HOT)
2777 /* If profile is now known yet, be conservative.
2778 FIXME: this predicate is used by early inliner and can do better there. */
2779 if (symtab->state < IPA_SSA)
2781 if (caller->frequency == NODE_FREQUENCY_EXECUTED_ONCE
2782 && sreal_frequency () * 2 < 3)
2784 if (PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) == 0
2785 || sreal_frequency () * PARAM_VALUE (HOT_BB_FREQUENCY_FRACTION) <= 1)
2790 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2793 nonremovable_p (cgraph_node *node, void *)
2795 return !node->can_remove_if_no_direct_calls_and_refs_p ();
2798 /* Return true if whole comdat group can be removed if there are no direct
2802 cgraph_node::can_remove_if_no_direct_calls_p (bool will_inline)
2804 struct ipa_ref *ref;
2806 /* For local symbols or non-comdat group it is the same as
2807 can_remove_if_no_direct_calls_p. */
2808 if (!externally_visible || !same_comdat_group)
2810 if (DECL_EXTERNAL (decl))
2814 return !call_for_symbol_and_aliases (nonremovable_p, NULL, true);
2817 if (will_inline && address_taken)
2820 /* Otheriwse check if we can remove the symbol itself and then verify
2821 that only uses of the comdat groups are direct call to THIS
2823 if (!can_remove_if_no_direct_calls_and_refs_p ())
2826 /* Check that all refs come from within the comdat group. */
2827 for (int i = 0; iterate_referring (i, ref); i++)
2828 if (ref->referring->get_comdat_group () != get_comdat_group ())
2831 struct cgraph_node *target = ultimate_alias_target ();
2832 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2833 next != this; next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2835 if (!externally_visible)
2838 && !next->can_remove_if_no_direct_calls_and_refs_p ())
2841 /* If we see different symbol than THIS, be sure to check calls. */
2842 if (next->ultimate_alias_target () != target)
2843 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2844 if (e->caller->get_comdat_group () != get_comdat_group ()
2848 /* If function is not being inlined, we care only about
2849 references outside of the comdat group. */
2851 for (int i = 0; next->iterate_referring (i, ref); i++)
2852 if (ref->referring->get_comdat_group () != get_comdat_group ())
2858 /* Return true when function cgraph_node can be expected to be removed
2859 from program when direct calls in this compilation unit are removed.
2861 As a special case COMDAT functions are
2862 cgraph_can_remove_if_no_direct_calls_p while the are not
2863 cgraph_only_called_directly_p (it is possible they are called from other
2866 This function behaves as cgraph_only_called_directly_p because eliminating
2867 all uses of COMDAT function does not make it necessarily disappear from
2868 the program unless we are compiling whole program or we do LTO. In this
2869 case we know we win since dynamic linking will not really discard the
2870 linkonce section. */
2873 cgraph_node::will_be_removed_from_program_if_no_direct_calls_p
2876 gcc_assert (!global.inlined_to);
2877 if (DECL_EXTERNAL (decl))
2880 if (!in_lto_p && !flag_whole_program)
2882 /* If the symbol is in comdat group, we need to verify that whole comdat
2883 group becomes unreachable. Technically we could skip references from
2884 within the group, too. */
2885 if (!only_called_directly_p ())
2887 if (same_comdat_group && externally_visible)
2889 struct cgraph_node *target = ultimate_alias_target ();
2891 if (will_inline && address_taken)
2893 for (cgraph_node *next = dyn_cast<cgraph_node *> (same_comdat_group);
2895 next = dyn_cast<cgraph_node *> (next->same_comdat_group))
2897 if (!externally_visible)
2900 && !next->only_called_directly_p ())
2903 /* If we see different symbol than THIS,
2904 be sure to check calls. */
2905 if (next->ultimate_alias_target () != target)
2906 for (cgraph_edge *e = next->callers; e; e = e->next_caller)
2907 if (e->caller->get_comdat_group () != get_comdat_group ()
2915 return can_remove_if_no_direct_calls_p (will_inline);
2919 /* Worker for cgraph_only_called_directly_p. */
2922 cgraph_not_only_called_directly_p_1 (cgraph_node *node, void *)
2924 return !node->only_called_directly_or_aliased_p ();
2927 /* Return true when function cgraph_node and all its aliases are only called
2929 i.e. it is not externally visible, address was not taken and
2930 it is not used in any other non-standard way. */
2933 cgraph_node::only_called_directly_p (void)
2935 gcc_assert (ultimate_alias_target () == this);
2936 return !call_for_symbol_and_aliases (cgraph_not_only_called_directly_p_1,
2941 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
2944 collect_callers_of_node_1 (cgraph_node *node, void *data)
2946 vec<cgraph_edge *> *redirect_callers = (vec<cgraph_edge *> *)data;
2948 enum availability avail;
2949 node->ultimate_alias_target (&avail);
2951 if (avail > AVAIL_INTERPOSABLE)
2952 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
2953 if (!cs->indirect_inlining_edge
2954 && !cs->caller->thunk.thunk_p)
2955 redirect_callers->safe_push (cs);
2959 /* Collect all callers of cgraph_node and its aliases that are known to lead to
2960 cgraph_node (i.e. are not overwritable). */
2963 cgraph_node::collect_callers (void)
2965 vec<cgraph_edge *> redirect_callers = vNULL;
2966 call_for_symbol_thunks_and_aliases (collect_callers_of_node_1,
2967 &redirect_callers, false);
2968 return redirect_callers;
2972 /* Return TRUE if NODE2 a clone of NODE or is equivalent to it. Return
2973 optimistically true if this cannot be determined. */
2976 clone_of_p (cgraph_node *node, cgraph_node *node2)
2978 node = node->ultimate_alias_target ();
2979 node2 = node2->ultimate_alias_target ();
2981 if (node2->clone_of == node
2982 || node2->former_clone_of == node->decl)
2985 if (!node->thunk.thunk_p && !node->former_thunk_p ())
2987 while (node2 && node->decl != node2->decl)
2988 node2 = node2->clone_of;
2989 return node2 != NULL;
2992 /* There are no virtual clones of thunks so check former_clone_of or if we
2993 might have skipped thunks because this adjustments are no longer
2995 while (node->thunk.thunk_p || node->former_thunk_p ())
2997 if (!node->thunk.this_adjusting)
2999 /* In case of instrumented expanded thunks, which can have multiple calls
3000 in them, we do not know how to continue and just have to be
3002 if (node->callees->next_callee)
3004 node = node->callees->callee->ultimate_alias_target ();
3006 if (!node2->clone.args_to_skip
3007 || !bitmap_bit_p (node2->clone.args_to_skip, 0))
3009 if (node2->former_clone_of == node->decl)
3012 cgraph_node *n2 = node2;
3013 while (n2 && node->decl != n2->decl)
3022 /* Verify edge count and frequency. */
3025 cgraph_edge::verify_count ()
3027 bool error_found = false;
3028 if (!count.verify ())
3030 error ("caller edge count invalid");
3036 /* Switch to THIS_CFUN if needed and print STMT to stderr. */
3038 cgraph_debug_gimple_stmt (function *this_cfun, gimple *stmt)
3040 bool fndecl_was_null = false;
3041 /* debug_gimple_stmt needs correct cfun */
3042 if (cfun != this_cfun)
3043 set_cfun (this_cfun);
3044 /* ...and an actual current_function_decl */
3045 if (!current_function_decl)
3047 current_function_decl = this_cfun->decl;
3048 fndecl_was_null = true;
3050 debug_gimple_stmt (stmt);
3051 if (fndecl_was_null)
3052 current_function_decl = NULL;
3055 /* Verify that call graph edge corresponds to DECL from the associated
3056 statement. Return true if the verification should fail. */
3059 cgraph_edge::verify_corresponds_to_fndecl (tree decl)
3063 if (!decl || callee->global.inlined_to)
3065 if (symtab->state == LTO_STREAMING)
3067 node = cgraph_node::get (decl);
3069 /* We do not know if a node from a different partition is an alias or what it
3070 aliases and therefore cannot do the former_clone_of check reliably. When
3071 body_removed is set, we have lost all information about what was alias or
3072 thunk of and also cannot proceed. */
3074 || node->body_removed
3075 || node->in_other_partition
3076 || callee->icf_merged
3077 || callee->in_other_partition)
3080 node = node->ultimate_alias_target ();
3082 /* Optimizers can redirect unreachable calls or calls triggering undefined
3083 behavior to builtin_unreachable. */
3085 if (fndecl_built_in_p (callee->decl, BUILT_IN_UNREACHABLE))
3088 if (callee->former_clone_of != node->decl
3089 && (node != callee->ultimate_alias_target ())
3090 && !clone_of_p (node, callee))
3096 /* Disable warnings about missing quoting in GCC diagnostics for
3097 the verification errors. Their format strings don't follow GCC
3098 diagnostic conventions and the calls are ultimately followed by
3099 one to internal_error. */
3101 # pragma GCC diagnostic push
3102 # pragma GCC diagnostic ignored "-Wformat-diag"
3105 /* Verify cgraph nodes of given cgraph node. */
3107 cgraph_node::verify_node (void)
3110 function *this_cfun = DECL_STRUCT_FUNCTION (decl);
3111 basic_block this_block;
3112 gimple_stmt_iterator gsi;
3113 bool error_found = false;
3118 timevar_push (TV_CGRAPH_VERIFY);
3119 error_found |= verify_base ();
3120 for (e = callees; e; e = e->next_callee)
3123 error ("aux field set for edge %s->%s",
3124 identifier_to_locale (e->caller->name ()),
3125 identifier_to_locale (e->callee->name ()));
3128 if (!count.verify ())
3130 error ("cgraph count invalid");
3133 if (global.inlined_to && same_comdat_group)
3135 error ("inline clone in same comdat group list");
3138 if (!definition && !in_other_partition && local.local)
3140 error ("local symbols must be defined");
3143 if (global.inlined_to && externally_visible)
3145 error ("externally visible inline clone");
3148 if (global.inlined_to && address_taken)
3150 error ("inline clone with address taken");
3153 if (global.inlined_to && force_output)
3155 error ("inline clone is forced to output");
3158 for (e = indirect_calls; e; e = e->next_callee)
3162 error ("aux field set for indirect edge from %s",
3163 identifier_to_locale (e->caller->name ()));
3166 if (!e->indirect_unknown_callee
3167 || !e->indirect_info)
3169 error ("An indirect edge from %s is not marked as indirect or has "
3170 "associated indirect_info, the corresponding statement is: ",
3171 identifier_to_locale (e->caller->name ()));
3172 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3176 bool check_comdat = comdat_local_p ();
3177 for (e = callers; e; e = e->next_caller)
3179 if (e->verify_count ())
3182 && !in_same_comdat_group_p (e->caller))
3184 error ("comdat-local function called by %s outside its comdat",
3185 identifier_to_locale (e->caller->name ()));
3188 if (!e->inline_failed)
3190 if (global.inlined_to
3191 != (e->caller->global.inlined_to
3192 ? e->caller->global.inlined_to : e->caller))
3194 error ("inlined_to pointer is wrong");
3197 if (callers->next_caller)
3199 error ("multiple inline callers");
3204 if (global.inlined_to)
3206 error ("inlined_to pointer set for noninline callers");
3210 for (e = callees; e; e = e->next_callee)
3212 if (e->verify_count ())
3214 if (gimple_has_body_p (e->caller->decl)
3215 && !e->caller->global.inlined_to
3217 /* Optimized out calls are redirected to __builtin_unreachable. */
3218 && (e->count.nonzero_p ()
3219 || ! e->callee->decl
3220 || !fndecl_built_in_p (e->callee->decl, BUILT_IN_UNREACHABLE))
3222 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3223 && (!e->count.ipa_p ()
3224 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3226 error ("caller edge count does not match BB count");
3227 fprintf (stderr, "edge count: ");
3228 e->count.dump (stderr);
3229 fprintf (stderr, "\n bb count: ");
3230 gimple_bb (e->call_stmt)->count.dump (stderr);
3231 fprintf (stderr, "\n");
3235 for (e = indirect_calls; e; e = e->next_callee)
3237 if (e->verify_count ())
3239 if (gimple_has_body_p (e->caller->decl)
3240 && !e->caller->global.inlined_to
3242 && e->count.ipa_p ()
3244 == ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (decl))->count
3245 && (!e->count.ipa_p ()
3246 && e->count.differs_from_p (gimple_bb (e->call_stmt)->count)))
3248 error ("indirect call count does not match BB count");
3249 fprintf (stderr, "edge count: ");
3250 e->count.dump (stderr);
3251 fprintf (stderr, "\n bb count: ");
3252 gimple_bb (e->call_stmt)->count.dump (stderr);
3253 fprintf (stderr, "\n");
3257 if (!callers && global.inlined_to)
3259 error ("inlined_to pointer is set but no predecessors found");
3262 if (global.inlined_to == this)
3264 error ("inlined_to pointer refers to itself");
3270 cgraph_node *first_clone = clone_of->clones;
3271 if (first_clone != this)
3273 if (prev_sibling_clone->clone_of != clone_of)
3275 error ("cgraph_node has wrong clone_of");
3283 for (n = clones; n; n = n->next_sibling_clone)
3284 if (n->clone_of != this)
3288 error ("cgraph_node has wrong clone list");
3292 if ((prev_sibling_clone || next_sibling_clone) && !clone_of)
3294 error ("cgraph_node is in clone list but it is not clone");
3297 if (!prev_sibling_clone && clone_of && clone_of->clones != this)
3299 error ("cgraph_node has wrong prev_clone pointer");
3302 if (prev_sibling_clone && prev_sibling_clone->next_sibling_clone != this)
3304 error ("double linked list of clones corrupted");
3308 if (analyzed && alias)
3310 bool ref_found = false;
3312 ipa_ref *ref = NULL;
3316 error ("Alias has call edges");
3319 for (i = 0; iterate_reference (i, ref); i++)
3320 if (ref->use != IPA_REF_ALIAS)
3322 error ("Alias has non-alias reference");
3327 error ("Alias has more than one alias reference");
3334 error ("Analyzed alias has no reference");
3339 if (analyzed && thunk.thunk_p)
3343 error ("No edge out of thunk node");
3346 else if (callees->next_callee)
3348 error ("More than one edge out of thunk node");
3351 if (gimple_has_body_p (decl) && !global.inlined_to)
3353 error ("Thunk is not supposed to have body");
3357 else if (analyzed && gimple_has_body_p (decl)
3358 && !TREE_ASM_WRITTEN (decl)
3359 && (!DECL_EXTERNAL (decl) || global.inlined_to)
3364 hash_set<gimple *> stmts;
3366 ipa_ref *ref = NULL;
3368 /* Reach the trees by walking over the CFG, and note the
3369 enclosing basic-blocks in the call edges. */
3370 FOR_EACH_BB_FN (this_block, this_cfun)
3372 for (gsi = gsi_start_phis (this_block);
3373 !gsi_end_p (gsi); gsi_next (&gsi))
3374 stmts.add (gsi_stmt (gsi));
3375 for (gsi = gsi_start_bb (this_block);
3379 gimple *stmt = gsi_stmt (gsi);
3381 if (is_gimple_call (stmt))
3383 cgraph_edge *e = get_edge (stmt);
3384 tree decl = gimple_call_fndecl (stmt);
3389 error ("shared call_stmt:");
3390 cgraph_debug_gimple_stmt (this_cfun, stmt);
3393 if (!e->indirect_unknown_callee)
3395 if (e->verify_corresponds_to_fndecl (decl))
3397 error ("edge points to wrong declaration:");
3398 debug_tree (e->callee->decl);
3399 fprintf (stderr," Instead of:");
3406 error ("an indirect edge with unknown callee "
3407 "corresponding to a call_stmt with "
3408 "a known declaration:");
3410 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3416 error ("missing callgraph edge for call stmt:");
3417 cgraph_debug_gimple_stmt (this_cfun, stmt);
3423 for (i = 0; iterate_reference (i, ref); i++)
3424 if (ref->stmt && !stmts.contains (ref->stmt))
3426 error ("reference to dead statement");
3427 cgraph_debug_gimple_stmt (this_cfun, ref->stmt);
3432 /* No CFG available?! */
3435 for (e = callees; e; e = e->next_callee)
3439 error ("edge %s->%s has no corresponding call_stmt",
3440 identifier_to_locale (e->caller->name ()),
3441 identifier_to_locale (e->callee->name ()));
3442 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3447 for (e = indirect_calls; e; e = e->next_callee)
3449 if (!e->aux && !e->speculative)
3451 error ("an indirect edge from %s has no corresponding call_stmt",
3452 identifier_to_locale (e->caller->name ()));
3453 cgraph_debug_gimple_stmt (this_cfun, e->call_stmt);
3462 internal_error ("verify_cgraph_node failed");
3464 timevar_pop (TV_CGRAPH_VERIFY);
3467 /* Verify whole cgraph structure. */
3469 cgraph_node::verify_cgraph_nodes (void)
3476 FOR_EACH_FUNCTION (node)
3481 # pragma GCC diagnostic pop
3484 /* Walk the alias chain to return the function cgraph_node is alias of.
3485 Walk through thunks, too.
3486 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3487 When REF is non-NULL, assume that reference happens in symbol REF
3488 when determining the availability. */
3491 cgraph_node::function_symbol (enum availability *availability,
3492 struct symtab_node *ref)
3494 cgraph_node *node = ultimate_alias_target (availability, ref);
3496 while (node->thunk.thunk_p)
3499 node = node->callees->callee;
3502 enum availability a;
3503 a = node->get_availability (ref);
3504 if (a < *availability)
3507 node = node->ultimate_alias_target (availability, ref);
3512 /* Walk the alias chain to return the function cgraph_node is alias of.
3513 Walk through non virtual thunks, too. Thus we return either a function
3514 or a virtual thunk node.
3515 When AVAILABILITY is non-NULL, get minimal availability in the chain.
3516 When REF is non-NULL, assume that reference happens in symbol REF
3517 when determining the availability. */
3520 cgraph_node::function_or_virtual_thunk_symbol
3521 (enum availability *availability,
3522 struct symtab_node *ref)
3524 cgraph_node *node = ultimate_alias_target (availability, ref);
3526 while (node->thunk.thunk_p && !node->thunk.virtual_offset_p)
3529 node = node->callees->callee;
3532 enum availability a;
3533 a = node->get_availability (ref);
3534 if (a < *availability)
3537 node = node->ultimate_alias_target (availability, ref);
3542 /* When doing LTO, read cgraph_node's body from disk if it is not already
3546 cgraph_node::get_untransformed_body (void)
3548 lto_file_decl_data *file_data;
3549 const char *data, *name;
3551 tree decl = this->decl;
3553 /* Check if body is already there. Either we have gimple body or
3554 the function is thunk and in that case we set DECL_ARGUMENTS. */
3555 if (DECL_ARGUMENTS (decl) || gimple_has_body_p (decl))
3558 gcc_assert (in_lto_p && !DECL_RESULT (decl));
3560 timevar_push (TV_IPA_LTO_GIMPLE_IN);
3562 file_data = lto_file_data;
3563 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
3565 /* We may have renamed the declaration, e.g., a static function. */
3566 name = lto_get_decl_name_mapping (file_data, name);
3567 struct lto_in_decl_state *decl_state
3568 = lto_get_function_in_decl_state (file_data, decl);
3570 data = lto_get_section_data (file_data, LTO_section_function_body,
3571 name, &len, decl_state->compressed);
3573 fatal_error (input_location, "%s: section %s is missing",
3574 file_data->file_name,
3577 gcc_assert (DECL_STRUCT_FUNCTION (decl) == NULL);
3580 fprintf (stderr, " in:%s", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)));
3581 lto_input_function_body (file_data, this, data);
3582 lto_stats.num_function_bodies++;
3583 lto_free_section_data (file_data, LTO_section_function_body, name,
3584 data, len, decl_state->compressed);
3585 lto_free_function_in_decl_state_for_node (this);
3586 /* Keep lto file data so ipa-inline-analysis knows about cross module
3589 timevar_pop (TV_IPA_LTO_GIMPLE_IN);
3594 /* Prepare function body. When doing LTO, read cgraph_node's body from disk
3595 if it is not already present. When some IPA transformations are scheduled,
3599 cgraph_node::get_body (void)
3603 updated = get_untransformed_body ();
3605 /* Getting transformed body makes no sense for inline clones;
3606 we should never use this on real clones because they are materialized
3608 TODO: Materializing clones here will likely lead to smaller LTRANS
3610 gcc_assert (!global.inlined_to && !clone_of);
3611 if (ipa_transforms_to_apply.exists ())
3613 opt_pass *saved_current_pass = current_pass;
3614 FILE *saved_dump_file = dump_file;
3615 const char *saved_dump_file_name = dump_file_name;
3616 dump_flags_t saved_dump_flags = dump_flags;
3617 dump_file_name = NULL;
3618 set_dump_file (NULL);
3620 push_cfun (DECL_STRUCT_FUNCTION (decl));
3621 execute_all_ipa_transforms ();
3622 cgraph_edge::rebuild_edges ();
3623 free_dominance_info (CDI_DOMINATORS);
3624 free_dominance_info (CDI_POST_DOMINATORS);
3628 current_pass = saved_current_pass;
3629 set_dump_file (saved_dump_file);
3630 dump_file_name = saved_dump_file_name;
3631 dump_flags = saved_dump_flags;
3636 /* Return the DECL_STRUCT_FUNCTION of the function. */
3639 cgraph_node::get_fun (void)
3641 cgraph_node *node = this;
3642 struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
3644 while (!fun && node->clone_of)
3646 node = node->clone_of;
3647 fun = DECL_STRUCT_FUNCTION (node->decl);
3653 /* Verify if the type of the argument matches that of the function
3654 declaration. If we cannot verify this or there is a mismatch,
3658 gimple_check_call_args (gimple *stmt, tree fndecl, bool args_count_match)
3661 unsigned int i, nargs;
3663 /* Calls to internal functions always match their signature. */
3664 if (gimple_call_internal_p (stmt))
3667 nargs = gimple_call_num_args (stmt);
3669 /* Get argument types for verification. */
3671 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3673 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
3675 /* Verify if the type of the argument matches that of the function
3676 declaration. If we cannot verify this or there is a mismatch,
3678 if (fndecl && DECL_ARGUMENTS (fndecl))
3680 for (i = 0, p = DECL_ARGUMENTS (fndecl);
3682 i++, p = DECL_CHAIN (p))
3685 /* We cannot distinguish a varargs function from the case
3686 of excess parameters, still deferring the inlining decision
3687 to the callee is possible. */
3690 arg = gimple_call_arg (stmt, i);
3691 if (p == error_mark_node
3692 || DECL_ARG_TYPE (p) == error_mark_node
3693 || arg == error_mark_node
3694 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
3695 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
3698 if (args_count_match && p)
3703 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
3706 /* If this is a varargs function defer inlining decision
3710 arg = gimple_call_arg (stmt, i);
3711 if (TREE_VALUE (p) == error_mark_node
3712 || arg == error_mark_node
3713 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
3714 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
3715 && !fold_convertible_p (TREE_VALUE (p), arg)))
3727 /* Verify if the type of the argument and lhs of CALL_STMT matches
3728 that of the function declaration CALLEE. If ARGS_COUNT_MATCH is
3729 true, the arg count needs to be the same.
3730 If we cannot verify this or there is a mismatch, return false. */
3733 gimple_check_call_matching_types (gimple *call_stmt, tree callee,
3734 bool args_count_match)
3738 if ((DECL_RESULT (callee)
3739 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
3740 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
3741 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
3743 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
3744 || !gimple_check_call_args (call_stmt, callee, args_count_match))
3749 /* Reset all state within cgraph.c so that we can rerun the compiler
3750 within the same process. For use by toplev::finalize. */
3753 cgraph_c_finalize (void)
3757 x_cgraph_nodes_queue = NULL;
3759 cgraph_fnver_htab = NULL;
3760 version_info_node = NULL;
3763 /* A wroker for call_for_symbol_and_aliases. */
3766 cgraph_node::call_for_symbol_and_aliases_1 (bool (*callback) (cgraph_node *,
3769 bool include_overwritable)
3772 FOR_EACH_ALIAS (this, ref)
3774 cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
3775 if (include_overwritable
3776 || alias->get_availability () > AVAIL_INTERPOSABLE)
3777 if (alias->call_for_symbol_and_aliases (callback, data,
3778 include_overwritable))
3784 /* Return true if NODE has thunk. */
3787 cgraph_node::has_thunk_p (cgraph_node *node, void *)
3789 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
3790 if (e->caller->thunk.thunk_p)
3795 /* Expected frequency of executions within the function. */
3798 cgraph_edge::sreal_frequency ()
3800 return count.to_sreal_scale (caller->global.inlined_to
3801 ? caller->global.inlined_to->count
3806 /* During LTO stream in this can be used to check whether call can possibly
3807 be internal to the current translation unit. */
3810 cgraph_edge::possibly_call_in_translation_unit_p (void)
3812 gcc_checking_assert (in_lto_p && caller->prevailing_p ());
3814 /* While incremental linking we may end up getting function body later. */
3815 if (flag_incremental_link == INCREMENTAL_LINK_LTO)
3818 /* We may be smarter here and avoid stremaing in indirect calls we can't
3819 track, but that would require arranging stremaing the indirect call
3824 /* If calle is local to the original translation unit, it will be defined. */
3825 if (!TREE_PUBLIC (callee->decl) && !DECL_EXTERNAL (callee->decl))
3828 /* Otherwise we need to lookup prevailing symbol (symbol table is not merged,
3829 yet) and see if it is a definition. In fact we may also resolve aliases,
3830 but that is probably not too important. */
3831 symtab_node *node = callee;
3832 for (int n = 10; node->previous_sharing_asm_name && n ; n--)
3833 node = node->previous_sharing_asm_name;
3834 if (node->previous_sharing_asm_name)
3835 node = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (callee->decl));
3836 gcc_assert (TREE_PUBLIC (node->decl));
3837 return node->get_availability () >= AVAIL_AVAILABLE;
3840 /* A stashed copy of "symtab" for use by selftest::symbol_table_test.
3841 This needs to be a global so that it can be a GC root, and thus
3842 prevent the stashed copy from being garbage-collected if the GC runs
3843 during a symbol_table_test. */
3845 symbol_table *saved_symtab;
3849 namespace selftest {
3851 /* class selftest::symbol_table_test. */
3853 /* Constructor. Store the old value of symtab, and create a new one. */
3855 symbol_table_test::symbol_table_test ()
3857 gcc_assert (saved_symtab == NULL);
3858 saved_symtab = symtab;
3859 symtab = new (ggc_cleared_alloc <symbol_table> ()) symbol_table ();
3862 /* Destructor. Restore the old value of symtab. */
3864 symbol_table_test::~symbol_table_test ()
3866 gcc_assert (saved_symtab != NULL);
3867 symtab = saved_symtab;
3868 saved_symtab = NULL;
3871 /* Verify that symbol_table_test works. */
3874 test_symbol_table_test ()
3876 /* Simulate running two selftests involving symbol tables. */
3877 for (int i = 0; i < 2; i++)
3879 symbol_table_test stt;
3880 tree test_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
3881 get_identifier ("test_decl"),
3882 build_function_type_list (void_type_node,
3884 cgraph_node *node = cgraph_node::get_create (test_decl);
3887 /* Verify that the node has order 0 on both iterations,
3888 and thus that nodes have predictable dump names in selftests. */
3889 ASSERT_EQ (node->order, 0);
3890 ASSERT_STREQ (node->dump_name (), "test_decl/0");
3894 /* Run all of the selftests within this file. */
3899 test_symbol_table_test ();
3902 } // namespace selftest
3904 #endif /* CHECKING_P */
3906 #include "gt-cgraph.h"