1 /* Callgraph handling code.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains basic routines manipulating call graph
26 The call-graph is data structure designed for intra-procedural optimization
27 but it is also used in non-unit-at-a-time compilation to allow easier code
30 The call-graph consist of nodes and edges represented via linked lists.
31 Each function (external or not) corresponds to the unique node.
33 The mapping from declarations to call-graph nodes is done using hash table
34 based on DECL_UID. The call-graph nodes are created lazily using
35 cgraph_node function when called for unknown declaration.
37 The callgraph at the moment does not represent all indirect calls or calls
38 from other compilation units. Flag NEEDED is set for each node that may be
39 accessed in such an invisible way and it shall be considered an entry point
42 On the other hand, the callgraph currently does contain some edges for
43 indirect calls with unknown callees which can be accessed through
44 indirect_calls field of a node. It should be noted however that at the
45 moment only calls which are potential candidates for indirect inlining are
48 Interprocedural information:
50 Callgraph is place to store data needed for interprocedural optimization.
51 All data structures are divided into three components: local_info that
52 is produced while analyzing the function, global_info that is result
53 of global walking of the callgraph on the end of compilation and
54 rtl_info used by RTL backend to propagate data from already compiled
55 functions to their callers.
57 Moreover, each node has a uid which can be used to keep information in
58 on-the-side arrays. UIDs are reused and therefore reasonably dense.
62 The function inlining information is decided in advance and maintained
63 in the callgraph as so called inline plan.
64 For each inlined call, the callee's node is cloned to represent the
65 new function copy produced by inliner.
66 Each inlined call gets a unique corresponding clone node of the callee
67 and the data structure is updated while inlining is performed, so
68 the clones are eliminated and their callee edges redirected to the
71 Each edge has "inline_failed" field. When the field is set to NULL,
72 the call will be inlined. When it is non-NULL it contains a reason
73 why inlining wasn't performed. */
77 #include "coretypes.h"
80 #include "tree-inline.h"
81 #include "langhooks.h"
88 #include "basic-block.h"
93 #include "tree-dump.h"
94 #include "tree-flow.h"
95 #include "value-prof.h"
97 #include "diagnostic-core.h"
99 #include "ipa-utils.h"
100 #include "lto-streamer.h"
101 #include "ipa-inline.h"
104 const char * const ld_plugin_symbol_resolution_names[]=
109 "prevailing_def_ironly",
115 "prevailing_def_ironly_exp"
118 static void cgraph_node_remove_callers (struct cgraph_node *node);
119 static inline void cgraph_edge_remove_caller (struct cgraph_edge *e);
120 static inline void cgraph_edge_remove_callee (struct cgraph_edge *e);
122 /* Hash table used to convert declarations into nodes. */
123 static GTY((param_is (struct cgraph_node))) htab_t cgraph_hash;
124 /* Hash table used to convert assembler names into nodes. */
125 static GTY((param_is (struct cgraph_node))) htab_t assembler_name_hash;
127 /* The linked list of cgraph nodes. */
128 struct cgraph_node *cgraph_nodes;
130 /* Queue of cgraph nodes scheduled to be lowered. */
131 struct cgraph_node *cgraph_nodes_queue;
133 /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
134 secondary queue used during optimization to accommodate passes that
135 may generate new functions that need to be optimized and expanded. */
136 struct cgraph_node *cgraph_new_nodes;
138 /* Number of nodes in existence. */
141 /* Maximal uid used in cgraph nodes. */
144 /* Maximal uid used in cgraph edges. */
145 int cgraph_edge_max_uid;
147 /* Set when whole unit has been analyzed so we can access global info. */
148 bool cgraph_global_info_ready = false;
150 /* What state callgraph is in right now. */
151 enum cgraph_state cgraph_state = CGRAPH_STATE_CONSTRUCTION;
153 /* Set when the cgraph is fully build and the basic flags are computed. */
154 bool cgraph_function_flags_ready = false;
156 /* Linked list of cgraph asm nodes. */
157 struct cgraph_asm_node *cgraph_asm_nodes;
159 /* Last node in cgraph_asm_nodes. */
160 static GTY(()) struct cgraph_asm_node *cgraph_asm_last_node;
162 /* The order index of the next cgraph node to be created. This is
163 used so that we can sort the cgraph nodes in order by when we saw
164 them, to support -fno-toplevel-reorder. */
167 /* List of hooks triggered on cgraph_edge events. */
168 struct cgraph_edge_hook_list {
169 cgraph_edge_hook hook;
171 struct cgraph_edge_hook_list *next;
174 /* List of hooks triggered on cgraph_node events. */
175 struct cgraph_node_hook_list {
176 cgraph_node_hook hook;
178 struct cgraph_node_hook_list *next;
181 /* List of hooks triggered on events involving two cgraph_edges. */
182 struct cgraph_2edge_hook_list {
183 cgraph_2edge_hook hook;
185 struct cgraph_2edge_hook_list *next;
188 /* List of hooks triggered on events involving two cgraph_nodes. */
189 struct cgraph_2node_hook_list {
190 cgraph_2node_hook hook;
192 struct cgraph_2node_hook_list *next;
195 /* List of hooks triggered when an edge is removed. */
196 struct cgraph_edge_hook_list *first_cgraph_edge_removal_hook;
197 /* List of hooks triggered when a node is removed. */
198 struct cgraph_node_hook_list *first_cgraph_node_removal_hook;
199 /* List of hooks triggered when an edge is duplicated. */
200 struct cgraph_2edge_hook_list *first_cgraph_edge_duplicated_hook;
201 /* List of hooks triggered when a node is duplicated. */
202 struct cgraph_2node_hook_list *first_cgraph_node_duplicated_hook;
203 /* List of hooks triggered when an function is inserted. */
204 struct cgraph_node_hook_list *first_cgraph_function_insertion_hook;
206 /* Head of a linked list of unused (freed) call graph nodes.
207 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
208 static GTY(()) struct cgraph_node *free_nodes;
209 /* Head of a linked list of unused (freed) call graph edges.
210 Do not GTY((delete)) this list so UIDs gets reliably recycled. */
211 static GTY(()) struct cgraph_edge *free_edges;
213 /* Did procss_same_body_aliases run? */
214 bool same_body_aliases_done;
216 /* Macros to access the next item in the list of free cgraph nodes and
218 #define NEXT_FREE_NODE(NODE) (NODE)->next
219 #define NEXT_FREE_EDGE(EDGE) (EDGE)->prev_caller
221 /* Register HOOK to be called with DATA on each removed edge. */
222 struct cgraph_edge_hook_list *
223 cgraph_add_edge_removal_hook (cgraph_edge_hook hook, void *data)
225 struct cgraph_edge_hook_list *entry;
226 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
228 entry = (struct cgraph_edge_hook_list *) xmalloc (sizeof (*entry));
238 /* Remove ENTRY from the list of hooks called on removing edges. */
240 cgraph_remove_edge_removal_hook (struct cgraph_edge_hook_list *entry)
242 struct cgraph_edge_hook_list **ptr = &first_cgraph_edge_removal_hook;
244 while (*ptr != entry)
250 /* Call all edge removal hooks. */
252 cgraph_call_edge_removal_hooks (struct cgraph_edge *e)
254 struct cgraph_edge_hook_list *entry = first_cgraph_edge_removal_hook;
257 entry->hook (e, entry->data);
262 /* Register HOOK to be called with DATA on each removed node. */
263 struct cgraph_node_hook_list *
264 cgraph_add_node_removal_hook (cgraph_node_hook hook, void *data)
266 struct cgraph_node_hook_list *entry;
267 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
269 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
279 /* Remove ENTRY from the list of hooks called on removing nodes. */
281 cgraph_remove_node_removal_hook (struct cgraph_node_hook_list *entry)
283 struct cgraph_node_hook_list **ptr = &first_cgraph_node_removal_hook;
285 while (*ptr != entry)
291 /* Call all node removal hooks. */
293 cgraph_call_node_removal_hooks (struct cgraph_node *node)
295 struct cgraph_node_hook_list *entry = first_cgraph_node_removal_hook;
298 entry->hook (node, entry->data);
303 /* Register HOOK to be called with DATA on each inserted node. */
304 struct cgraph_node_hook_list *
305 cgraph_add_function_insertion_hook (cgraph_node_hook hook, void *data)
307 struct cgraph_node_hook_list *entry;
308 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
310 entry = (struct cgraph_node_hook_list *) xmalloc (sizeof (*entry));
320 /* Remove ENTRY from the list of hooks called on inserted nodes. */
322 cgraph_remove_function_insertion_hook (struct cgraph_node_hook_list *entry)
324 struct cgraph_node_hook_list **ptr = &first_cgraph_function_insertion_hook;
326 while (*ptr != entry)
332 /* Call all node insertion hooks. */
334 cgraph_call_function_insertion_hooks (struct cgraph_node *node)
336 struct cgraph_node_hook_list *entry = first_cgraph_function_insertion_hook;
339 entry->hook (node, entry->data);
344 /* Register HOOK to be called with DATA on each duplicated edge. */
345 struct cgraph_2edge_hook_list *
346 cgraph_add_edge_duplication_hook (cgraph_2edge_hook hook, void *data)
348 struct cgraph_2edge_hook_list *entry;
349 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
351 entry = (struct cgraph_2edge_hook_list *) xmalloc (sizeof (*entry));
361 /* Remove ENTRY from the list of hooks called on duplicating edges. */
363 cgraph_remove_edge_duplication_hook (struct cgraph_2edge_hook_list *entry)
365 struct cgraph_2edge_hook_list **ptr = &first_cgraph_edge_duplicated_hook;
367 while (*ptr != entry)
373 /* Call all edge duplication hooks. */
375 cgraph_call_edge_duplication_hooks (struct cgraph_edge *cs1,
376 struct cgraph_edge *cs2)
378 struct cgraph_2edge_hook_list *entry = first_cgraph_edge_duplicated_hook;
381 entry->hook (cs1, cs2, entry->data);
386 /* Register HOOK to be called with DATA on each duplicated node. */
387 struct cgraph_2node_hook_list *
388 cgraph_add_node_duplication_hook (cgraph_2node_hook hook, void *data)
390 struct cgraph_2node_hook_list *entry;
391 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
393 entry = (struct cgraph_2node_hook_list *) xmalloc (sizeof (*entry));
403 /* Remove ENTRY from the list of hooks called on duplicating nodes. */
405 cgraph_remove_node_duplication_hook (struct cgraph_2node_hook_list *entry)
407 struct cgraph_2node_hook_list **ptr = &first_cgraph_node_duplicated_hook;
409 while (*ptr != entry)
415 /* Call all node duplication hooks. */
417 cgraph_call_node_duplication_hooks (struct cgraph_node *node1,
418 struct cgraph_node *node2)
420 struct cgraph_2node_hook_list *entry = first_cgraph_node_duplicated_hook;
423 entry->hook (node1, node2, entry->data);
428 /* Returns a hash code for P. */
431 hash_node (const void *p)
433 const struct cgraph_node *n = (const struct cgraph_node *) p;
434 return (hashval_t) DECL_UID (n->decl);
438 /* Returns nonzero if P1 and P2 are equal. */
441 eq_node (const void *p1, const void *p2)
443 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
444 const struct cgraph_node *n2 = (const struct cgraph_node *) p2;
445 return DECL_UID (n1->decl) == DECL_UID (n2->decl);
448 /* Allocate new callgraph node. */
450 static inline struct cgraph_node *
451 cgraph_allocate_node (void)
453 struct cgraph_node *node;
458 free_nodes = NEXT_FREE_NODE (node);
462 node = ggc_alloc_cleared_cgraph_node ();
463 node->uid = cgraph_max_uid++;
469 /* Allocate new callgraph node and insert it into basic data structures. */
471 static struct cgraph_node *
472 cgraph_create_node_1 (void)
474 struct cgraph_node *node = cgraph_allocate_node ();
476 node->next = cgraph_nodes;
477 node->order = cgraph_order++;
479 cgraph_nodes->previous = node;
480 node->previous = NULL;
481 node->frequency = NODE_FREQUENCY_NORMAL;
482 node->count_materialization_scale = REG_BR_PROB_BASE;
483 ipa_empty_ref_list (&node->ref_list);
489 /* Return cgraph node assigned to DECL. Create new one when needed. */
492 cgraph_create_node (tree decl)
494 struct cgraph_node key, *node, **slot;
496 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
499 cgraph_hash = htab_create_ggc (10, hash_node, eq_node, NULL);
502 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key, INSERT);
505 node = cgraph_create_node_1 ();
508 if (DECL_CONTEXT (decl) && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL)
510 node->origin = cgraph_get_create_node (DECL_CONTEXT (decl));
511 node->next_nested = node->origin->nested;
512 node->origin->nested = node;
514 if (assembler_name_hash)
517 tree name = DECL_ASSEMBLER_NAME (decl);
519 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
520 decl_assembler_name_hash (name),
522 /* We can have multiple declarations with same assembler name. For C++
523 it is __builtin_strlen and strlen, for instance. Do we need to
524 record them all? Original implementation marked just first one
525 so lets hope for the best. */
532 /* Try to find a call graph node for declaration DECL and if it does not exist,
536 cgraph_get_create_node (tree decl)
538 struct cgraph_node *node;
540 node = cgraph_get_node (decl);
544 return cgraph_create_node (decl);
547 /* Mark ALIAS as an alias to DECL. DECL_NODE is cgraph node representing
548 the function body is associated with (not neccesarily cgraph_node (DECL). */
551 cgraph_create_function_alias (tree alias, tree decl)
553 struct cgraph_node *alias_node;
555 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
556 gcc_assert (TREE_CODE (alias) == FUNCTION_DECL);
557 alias_node = cgraph_get_create_node (alias);
558 gcc_assert (!alias_node->local.finalized);
559 alias_node->thunk.alias = decl;
560 alias_node->local.finalized = true;
561 alias_node->alias = 1;
563 if ((TREE_PUBLIC (alias) && !DECL_COMDAT (alias) && !DECL_EXTERNAL (alias))
564 || (DECL_VIRTUAL_P (alias)
565 && (DECL_COMDAT (alias) || DECL_EXTERNAL (alias))))
566 cgraph_mark_reachable_node (alias_node);
570 /* Attempt to mark ALIAS as an alias to DECL. Return alias node if successful
572 Same body aliases are output whenever the body of DECL is output,
573 and cgraph_get_node (ALIAS) transparently returns cgraph_get_node (DECL). */
576 cgraph_same_body_alias (struct cgraph_node *decl_node ATTRIBUTE_UNUSED, tree alias, tree decl)
578 struct cgraph_node *n;
579 #ifndef ASM_OUTPUT_DEF
580 /* If aliases aren't supported by the assembler, fail. */
583 /* Langhooks can create same body aliases of symbols not defined.
584 Those are useless. Drop them on the floor. */
585 if (cgraph_global_info_ready)
588 n = cgraph_create_function_alias (alias, decl);
589 n->same_body_alias = true;
590 if (same_body_aliases_done)
591 ipa_record_reference (n, NULL, cgraph_get_node (decl), NULL, IPA_REF_ALIAS,
596 /* Add thunk alias into callgraph. The alias declaration is ALIAS and it
597 aliases DECL with an adjustments made into the first parameter.
598 See comments in thunk_adjust for detail on the parameters. */
601 cgraph_add_thunk (struct cgraph_node *decl_node ATTRIBUTE_UNUSED,
602 tree alias, tree decl,
604 HOST_WIDE_INT fixed_offset, HOST_WIDE_INT virtual_value,
608 struct cgraph_node *node;
610 node = cgraph_get_node (alias);
613 gcc_assert (node->local.finalized);
614 gcc_assert (!node->alias);
615 gcc_assert (!node->thunk.thunk_p);
616 cgraph_remove_node (node);
619 node = cgraph_create_node (alias);
620 gcc_checking_assert (!virtual_offset
621 || double_int_equal_p
622 (tree_to_double_int (virtual_offset),
623 shwi_to_double_int (virtual_value)));
624 node->thunk.fixed_offset = fixed_offset;
625 node->thunk.this_adjusting = this_adjusting;
626 node->thunk.virtual_value = virtual_value;
627 node->thunk.virtual_offset_p = virtual_offset != NULL;
628 node->thunk.alias = real_alias;
629 node->thunk.thunk_p = true;
630 node->local.finalized = true;
632 if (cgraph_decide_is_function_needed (node, decl))
633 cgraph_mark_needed_node (node);
635 if ((TREE_PUBLIC (decl) && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
636 || (DECL_VIRTUAL_P (decl)
637 && (DECL_COMDAT (decl) || DECL_EXTERNAL (decl))))
638 cgraph_mark_reachable_node (node);
643 /* Returns the cgraph node assigned to DECL or NULL if no cgraph node
647 cgraph_get_node (const_tree decl)
649 struct cgraph_node key, *node = NULL, **slot;
651 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
656 key.decl = CONST_CAST2 (tree, const_tree, decl);
658 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, &key,
666 /* Insert already constructed node into hashtable. */
669 cgraph_insert_node_to_hashtable (struct cgraph_node *node)
671 struct cgraph_node **slot;
673 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, node, INSERT);
679 /* Returns a hash code for P. */
682 hash_node_by_assembler_name (const void *p)
684 const struct cgraph_node *n = (const struct cgraph_node *) p;
685 return (hashval_t) decl_assembler_name_hash (DECL_ASSEMBLER_NAME (n->decl));
688 /* Returns nonzero if P1 and P2 are equal. */
691 eq_assembler_name (const void *p1, const void *p2)
693 const struct cgraph_node *n1 = (const struct cgraph_node *) p1;
694 const_tree name = (const_tree)p2;
695 return (decl_assembler_name_equal (n1->decl, name));
698 /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
699 Return NULL if there's no such node. */
702 cgraph_node_for_asm (tree asmname)
704 struct cgraph_node *node;
707 if (!assembler_name_hash)
709 assembler_name_hash =
710 htab_create_ggc (10, hash_node_by_assembler_name, eq_assembler_name,
712 for (node = cgraph_nodes; node; node = node->next)
713 if (!node->global.inlined_to)
715 tree name = DECL_ASSEMBLER_NAME (node->decl);
716 slot = htab_find_slot_with_hash (assembler_name_hash, name,
717 decl_assembler_name_hash (name),
719 /* We can have multiple declarations with same assembler name. For C++
720 it is __builtin_strlen and strlen, for instance. Do we need to
721 record them all? Original implementation marked just first one
722 so lets hope for the best. */
728 slot = htab_find_slot_with_hash (assembler_name_hash, asmname,
729 decl_assembler_name_hash (asmname),
734 node = (struct cgraph_node *) *slot;
740 /* Returns a hash value for X (which really is a die_struct). */
743 edge_hash (const void *x)
745 return htab_hash_pointer (((const struct cgraph_edge *) x)->call_stmt);
748 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
751 edge_eq (const void *x, const void *y)
753 return ((const struct cgraph_edge *) x)->call_stmt == y;
756 /* Add call graph edge E to call site hash of its caller. */
759 cgraph_add_edge_to_call_site_hash (struct cgraph_edge *e)
762 slot = htab_find_slot_with_hash (e->caller->call_site_hash,
764 htab_hash_pointer (e->call_stmt),
770 /* Return the callgraph edge representing the GIMPLE_CALL statement
774 cgraph_edge (struct cgraph_node *node, gimple call_stmt)
776 struct cgraph_edge *e, *e2;
779 if (node->call_site_hash)
780 return (struct cgraph_edge *)
781 htab_find_with_hash (node->call_site_hash, call_stmt,
782 htab_hash_pointer (call_stmt));
784 /* This loop may turn out to be performance problem. In such case adding
785 hashtables into call nodes with very many edges is probably best
786 solution. It is not good idea to add pointer into CALL_EXPR itself
787 because we want to make possible having multiple cgraph nodes representing
788 different clones of the same body before the body is actually cloned. */
789 for (e = node->callees; e; e = e->next_callee)
791 if (e->call_stmt == call_stmt)
797 for (e = node->indirect_calls; e; e = e->next_callee)
799 if (e->call_stmt == call_stmt)
806 node->call_site_hash = htab_create_ggc (120, edge_hash, edge_eq, NULL);
807 for (e2 = node->callees; e2; e2 = e2->next_callee)
808 cgraph_add_edge_to_call_site_hash (e2);
809 for (e2 = node->indirect_calls; e2; e2 = e2->next_callee)
810 cgraph_add_edge_to_call_site_hash (e2);
817 /* Change field call_stmt of edge E to NEW_STMT. */
820 cgraph_set_call_stmt (struct cgraph_edge *e, gimple new_stmt)
824 if (e->caller->call_site_hash)
826 htab_remove_elt_with_hash (e->caller->call_site_hash,
828 htab_hash_pointer (e->call_stmt));
831 e->call_stmt = new_stmt;
832 if (e->indirect_unknown_callee
833 && (decl = gimple_call_fndecl (new_stmt)))
835 /* Constant propagation (and possibly also inlining?) can turn an
836 indirect call into a direct one. */
837 struct cgraph_node *new_callee = cgraph_get_node (decl);
839 gcc_checking_assert (new_callee);
840 cgraph_make_edge_direct (e, new_callee);
843 push_cfun (DECL_STRUCT_FUNCTION (e->caller->decl));
844 e->can_throw_external = stmt_can_throw_external (new_stmt);
846 if (e->caller->call_site_hash)
847 cgraph_add_edge_to_call_site_hash (e);
850 /* Like cgraph_set_call_stmt but walk the clone tree and update all
851 clones sharing the same function body. */
854 cgraph_set_call_stmt_including_clones (struct cgraph_node *orig,
855 gimple old_stmt, gimple new_stmt)
857 struct cgraph_node *node;
858 struct cgraph_edge *edge = cgraph_edge (orig, old_stmt);
861 cgraph_set_call_stmt (edge, new_stmt);
867 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
869 cgraph_set_call_stmt (edge, new_stmt);
872 else if (node->next_sibling_clone)
873 node = node->next_sibling_clone;
876 while (node != orig && !node->next_sibling_clone)
877 node = node->clone_of;
879 node = node->next_sibling_clone;
884 /* Like cgraph_create_edge walk the clone tree and update all clones sharing
885 same function body. If clones already have edge for OLD_STMT; only
886 update the edge same way as cgraph_set_call_stmt_including_clones does.
888 TODO: COUNT and LOOP_DEPTH should be properly distributed based on relative
889 frequencies of the clones. */
892 cgraph_create_edge_including_clones (struct cgraph_node *orig,
893 struct cgraph_node *callee,
895 gimple stmt, gcov_type count,
897 cgraph_inline_failed_t reason)
899 struct cgraph_node *node;
900 struct cgraph_edge *edge;
902 if (!cgraph_edge (orig, stmt))
904 edge = cgraph_create_edge (orig, callee, stmt, count, freq);
905 edge->inline_failed = reason;
912 struct cgraph_edge *edge = cgraph_edge (node, old_stmt);
914 /* It is possible that clones already contain the edge while
915 master didn't. Either we promoted indirect call into direct
916 call in the clone or we are processing clones of unreachable
917 master where edges has been removed. */
919 cgraph_set_call_stmt (edge, stmt);
920 else if (!cgraph_edge (node, stmt))
922 edge = cgraph_create_edge (node, callee, stmt, count,
924 edge->inline_failed = reason;
929 else if (node->next_sibling_clone)
930 node = node->next_sibling_clone;
933 while (node != orig && !node->next_sibling_clone)
934 node = node->clone_of;
936 node = node->next_sibling_clone;
941 /* Allocate a cgraph_edge structure and fill it with data according to the
942 parameters of which only CALLEE can be NULL (when creating an indirect call
945 static struct cgraph_edge *
946 cgraph_create_edge_1 (struct cgraph_node *caller, struct cgraph_node *callee,
947 gimple call_stmt, gcov_type count, int freq)
949 struct cgraph_edge *edge;
951 /* LTO does not actually have access to the call_stmt since these
952 have not been loaded yet. */
955 /* This is a rather expensive check possibly triggering
956 construction of call stmt hashtable. */
957 gcc_checking_assert (!cgraph_edge (caller, call_stmt));
959 gcc_assert (is_gimple_call (call_stmt));
965 free_edges = NEXT_FREE_EDGE (edge);
969 edge = ggc_alloc_cgraph_edge ();
970 edge->uid = cgraph_edge_max_uid++;
974 edge->caller = caller;
975 edge->callee = callee;
976 edge->prev_caller = NULL;
977 edge->next_caller = NULL;
978 edge->prev_callee = NULL;
979 edge->next_callee = NULL;
982 gcc_assert (count >= 0);
983 edge->frequency = freq;
984 gcc_assert (freq >= 0);
985 gcc_assert (freq <= CGRAPH_FREQ_MAX);
987 edge->call_stmt = call_stmt;
988 push_cfun (DECL_STRUCT_FUNCTION (caller->decl));
989 edge->can_throw_external
990 = call_stmt ? stmt_can_throw_external (call_stmt) : false;
993 && callee && callee->decl
994 && !gimple_check_call_matching_types (call_stmt, callee->decl))
995 edge->call_stmt_cannot_inline_p = true;
997 edge->call_stmt_cannot_inline_p = false;
998 if (call_stmt && caller->call_site_hash)
999 cgraph_add_edge_to_call_site_hash (edge);
1001 edge->indirect_info = NULL;
1002 edge->indirect_inlining_edge = 0;
1007 /* Create edge from CALLER to CALLEE in the cgraph. */
1009 struct cgraph_edge *
1010 cgraph_create_edge (struct cgraph_node *caller, struct cgraph_node *callee,
1011 gimple call_stmt, gcov_type count, int freq)
1013 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, callee, call_stmt,
1016 edge->indirect_unknown_callee = 0;
1017 initialize_inline_failed (edge);
1019 edge->next_caller = callee->callers;
1020 if (callee->callers)
1021 callee->callers->prev_caller = edge;
1022 edge->next_callee = caller->callees;
1023 if (caller->callees)
1024 caller->callees->prev_callee = edge;
1025 caller->callees = edge;
1026 callee->callers = edge;
1031 /* Allocate cgraph_indirect_call_info and set its fields to default values. */
1033 struct cgraph_indirect_call_info *
1034 cgraph_allocate_init_indirect_info (void)
1036 struct cgraph_indirect_call_info *ii;
1038 ii = ggc_alloc_cleared_cgraph_indirect_call_info ();
1039 ii->param_index = -1;
1043 /* Create an indirect edge with a yet-undetermined callee where the call
1044 statement destination is a formal parameter of the caller with index
1047 struct cgraph_edge *
1048 cgraph_create_indirect_edge (struct cgraph_node *caller, gimple call_stmt,
1050 gcov_type count, int freq)
1052 struct cgraph_edge *edge = cgraph_create_edge_1 (caller, NULL, call_stmt,
1055 edge->indirect_unknown_callee = 1;
1056 initialize_inline_failed (edge);
1058 edge->indirect_info = cgraph_allocate_init_indirect_info ();
1059 edge->indirect_info->ecf_flags = ecf_flags;
1061 edge->next_callee = caller->indirect_calls;
1062 if (caller->indirect_calls)
1063 caller->indirect_calls->prev_callee = edge;
1064 caller->indirect_calls = edge;
1069 /* Remove the edge E from the list of the callers of the callee. */
1072 cgraph_edge_remove_callee (struct cgraph_edge *e)
1074 gcc_assert (!e->indirect_unknown_callee);
1076 e->prev_caller->next_caller = e->next_caller;
1078 e->next_caller->prev_caller = e->prev_caller;
1079 if (!e->prev_caller)
1080 e->callee->callers = e->next_caller;
1083 /* Remove the edge E from the list of the callees of the caller. */
1086 cgraph_edge_remove_caller (struct cgraph_edge *e)
1089 e->prev_callee->next_callee = e->next_callee;
1091 e->next_callee->prev_callee = e->prev_callee;
1092 if (!e->prev_callee)
1094 if (e->indirect_unknown_callee)
1095 e->caller->indirect_calls = e->next_callee;
1097 e->caller->callees = e->next_callee;
1099 if (e->caller->call_site_hash)
1100 htab_remove_elt_with_hash (e->caller->call_site_hash,
1102 htab_hash_pointer (e->call_stmt));
1105 /* Put the edge onto the free list. */
1108 cgraph_free_edge (struct cgraph_edge *e)
1112 /* Clear out the edge so we do not dangle pointers. */
1113 memset (e, 0, sizeof (*e));
1115 NEXT_FREE_EDGE (e) = free_edges;
1119 /* Remove the edge E in the cgraph. */
1122 cgraph_remove_edge (struct cgraph_edge *e)
1124 /* Call all edge removal hooks. */
1125 cgraph_call_edge_removal_hooks (e);
1127 if (!e->indirect_unknown_callee)
1128 /* Remove from callers list of the callee. */
1129 cgraph_edge_remove_callee (e);
1131 /* Remove from callees list of the callers. */
1132 cgraph_edge_remove_caller (e);
1134 /* Put the edge onto the free list. */
1135 cgraph_free_edge (e);
1138 /* Set callee of call graph edge E and add it to the corresponding set of
1142 cgraph_set_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1144 e->prev_caller = NULL;
1146 n->callers->prev_caller = e;
1147 e->next_caller = n->callers;
1152 /* Redirect callee of E to N. The function does not update underlying
1156 cgraph_redirect_edge_callee (struct cgraph_edge *e, struct cgraph_node *n)
1158 /* Remove from callers list of the current callee. */
1159 cgraph_edge_remove_callee (e);
1161 /* Insert to callers list of the new callee. */
1162 cgraph_set_edge_callee (e, n);
1165 /* Make an indirect EDGE with an unknown callee an ordinary edge leading to
1166 CALLEE. DELTA is an integer constant that is to be added to the this
1167 pointer (first parameter) to compensate for skipping a thunk adjustment. */
1170 cgraph_make_edge_direct (struct cgraph_edge *edge, struct cgraph_node *callee)
1172 edge->indirect_unknown_callee = 0;
1174 /* Get the edge out of the indirect edge list. */
1175 if (edge->prev_callee)
1176 edge->prev_callee->next_callee = edge->next_callee;
1177 if (edge->next_callee)
1178 edge->next_callee->prev_callee = edge->prev_callee;
1179 if (!edge->prev_callee)
1180 edge->caller->indirect_calls = edge->next_callee;
1182 /* Put it into the normal callee list */
1183 edge->prev_callee = NULL;
1184 edge->next_callee = edge->caller->callees;
1185 if (edge->caller->callees)
1186 edge->caller->callees->prev_callee = edge;
1187 edge->caller->callees = edge;
1189 /* Insert to callers list of the new callee. */
1190 cgraph_set_edge_callee (edge, callee);
1192 if (edge->call_stmt)
1193 edge->call_stmt_cannot_inline_p
1194 = !gimple_check_call_matching_types (edge->call_stmt, callee->decl);
1196 /* We need to re-determine the inlining status of the edge. */
1197 initialize_inline_failed (edge);
1201 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1202 OLD_STMT changed into NEW_STMT. OLD_CALL is gimple_call_fndecl
1203 of OLD_STMT if it was previously call statement.
1204 If NEW_STMT is NULL, the call has been dropped without any
1208 cgraph_update_edges_for_call_stmt_node (struct cgraph_node *node,
1209 gimple old_stmt, tree old_call,
1212 tree new_call = (new_stmt && is_gimple_call (new_stmt))
1213 ? gimple_call_fndecl (new_stmt) : 0;
1215 /* We are seeing indirect calls, then there is nothing to update. */
1216 if (!new_call && !old_call)
1218 /* See if we turned indirect call into direct call or folded call to one builtin
1219 into different builtin. */
1220 if (old_call != new_call)
1222 struct cgraph_edge *e = cgraph_edge (node, old_stmt);
1223 struct cgraph_edge *ne = NULL;
1229 /* See if the edge is already there and has the correct callee. It
1230 might be so because of indirect inlining has already updated
1231 it. We also might've cloned and redirected the edge. */
1232 if (new_call && e->callee)
1234 struct cgraph_node *callee = e->callee;
1237 if (callee->decl == new_call
1238 || callee->former_clone_of == new_call)
1240 callee = callee->clone_of;
1244 /* Otherwise remove edge and create new one; we can't simply redirect
1245 since function has changed, so inline plan and other information
1246 attached to edge is invalid. */
1248 frequency = e->frequency;
1249 cgraph_remove_edge (e);
1253 /* We are seeing new direct call; compute profile info based on BB. */
1254 basic_block bb = gimple_bb (new_stmt);
1256 frequency = compute_call_stmt_bb_frequency (current_function_decl,
1262 ne = cgraph_create_edge (node, cgraph_get_create_node (new_call),
1263 new_stmt, count, frequency);
1264 gcc_assert (ne->inline_failed);
1267 /* We only updated the call stmt; update pointer in cgraph edge.. */
1268 else if (old_stmt != new_stmt)
1269 cgraph_set_call_stmt (cgraph_edge (node, old_stmt), new_stmt);
1272 /* Update or remove the corresponding cgraph edge if a GIMPLE_CALL
1273 OLD_STMT changed into NEW_STMT. OLD_DECL is gimple_call_fndecl
1274 of OLD_STMT before it was updated (updating can happen inplace). */
1277 cgraph_update_edges_for_call_stmt (gimple old_stmt, tree old_decl, gimple new_stmt)
1279 struct cgraph_node *orig = cgraph_get_node (cfun->decl);
1280 struct cgraph_node *node;
1282 gcc_checking_assert (orig);
1283 cgraph_update_edges_for_call_stmt_node (orig, old_stmt, old_decl, new_stmt);
1285 for (node = orig->clones; node != orig;)
1287 cgraph_update_edges_for_call_stmt_node (node, old_stmt, old_decl, new_stmt);
1289 node = node->clones;
1290 else if (node->next_sibling_clone)
1291 node = node->next_sibling_clone;
1294 while (node != orig && !node->next_sibling_clone)
1295 node = node->clone_of;
1297 node = node->next_sibling_clone;
1303 /* Remove all callees from the node. */
1306 cgraph_node_remove_callees (struct cgraph_node *node)
1308 struct cgraph_edge *e, *f;
1310 /* It is sufficient to remove the edges from the lists of callers of
1311 the callees. The callee list of the node can be zapped with one
1313 for (e = node->callees; e; e = f)
1316 cgraph_call_edge_removal_hooks (e);
1317 if (!e->indirect_unknown_callee)
1318 cgraph_edge_remove_callee (e);
1319 cgraph_free_edge (e);
1321 for (e = node->indirect_calls; e; e = f)
1324 cgraph_call_edge_removal_hooks (e);
1325 if (!e->indirect_unknown_callee)
1326 cgraph_edge_remove_callee (e);
1327 cgraph_free_edge (e);
1329 node->indirect_calls = NULL;
1330 node->callees = NULL;
1331 if (node->call_site_hash)
1333 htab_delete (node->call_site_hash);
1334 node->call_site_hash = NULL;
1338 /* Remove all callers from the node. */
1341 cgraph_node_remove_callers (struct cgraph_node *node)
1343 struct cgraph_edge *e, *f;
1345 /* It is sufficient to remove the edges from the lists of callees of
1346 the callers. The caller list of the node can be zapped with one
1348 for (e = node->callers; e; e = f)
1351 cgraph_call_edge_removal_hooks (e);
1352 cgraph_edge_remove_caller (e);
1353 cgraph_free_edge (e);
1355 node->callers = NULL;
1358 /* Release memory used to represent body of function NODE. */
1361 cgraph_release_function_body (struct cgraph_node *node)
1363 if (DECL_STRUCT_FUNCTION (node->decl))
1365 tree old_decl = current_function_decl;
1366 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
1370 cfun->curr_properties &= ~PROP_loops;
1371 loop_optimizer_finalize ();
1373 if (cfun->gimple_df)
1375 current_function_decl = node->decl;
1377 delete_tree_cfg_annotations ();
1379 current_function_decl = old_decl;
1383 gcc_assert (dom_computed[0] == DOM_NONE);
1384 gcc_assert (dom_computed[1] == DOM_NONE);
1387 if (cfun->value_histograms)
1390 gimple_set_body (node->decl, NULL);
1391 VEC_free (ipa_opt_pass, heap,
1392 node->ipa_transforms_to_apply);
1393 /* Struct function hangs a lot of data that would leak if we didn't
1394 removed all pointers to it. */
1395 ggc_free (DECL_STRUCT_FUNCTION (node->decl));
1396 DECL_STRUCT_FUNCTION (node->decl) = NULL;
1398 DECL_SAVED_TREE (node->decl) = NULL;
1399 /* If the node is abstract and needed, then do not clear DECL_INITIAL
1400 of its associated function function declaration because it's
1401 needed to emit debug info later. */
1402 if (!node->abstract_and_needed)
1403 DECL_INITIAL (node->decl) = error_mark_node;
1406 /* Remove the node from cgraph. */
1409 cgraph_remove_node (struct cgraph_node *node)
1412 bool kill_body = false;
1413 struct cgraph_node *n;
1414 int uid = node->uid;
1416 cgraph_call_node_removal_hooks (node);
1417 cgraph_node_remove_callers (node);
1418 cgraph_node_remove_callees (node);
1419 ipa_remove_all_references (&node->ref_list);
1420 ipa_remove_all_refering (&node->ref_list);
1421 VEC_free (ipa_opt_pass, heap,
1422 node->ipa_transforms_to_apply);
1424 /* Incremental inlining access removed nodes stored in the postorder list.
1426 node->needed = node->reachable = false;
1427 for (n = node->nested; n; n = n->next_nested)
1429 node->nested = NULL;
1432 struct cgraph_node **node2 = &node->origin->nested;
1434 while (*node2 != node)
1435 node2 = &(*node2)->next_nested;
1436 *node2 = node->next_nested;
1439 node->previous->next = node->next;
1441 cgraph_nodes = node->next;
1443 node->next->previous = node->previous;
1445 node->previous = NULL;
1446 slot = htab_find_slot (cgraph_hash, node, NO_INSERT);
1449 struct cgraph_node *next_inline_clone;
1451 for (next_inline_clone = node->clones;
1452 next_inline_clone && next_inline_clone->decl != node->decl;
1453 next_inline_clone = next_inline_clone->next_sibling_clone)
1456 /* If there is inline clone of the node being removed, we need
1457 to put it into the position of removed node and reorganize all
1458 other clones to be based on it. */
1459 if (next_inline_clone)
1461 struct cgraph_node *n;
1462 struct cgraph_node *new_clones;
1464 *slot = next_inline_clone;
1466 /* Unlink inline clone from the list of clones of removed node. */
1467 if (next_inline_clone->next_sibling_clone)
1468 next_inline_clone->next_sibling_clone->prev_sibling_clone
1469 = next_inline_clone->prev_sibling_clone;
1470 if (next_inline_clone->prev_sibling_clone)
1472 gcc_assert (node->clones != next_inline_clone);
1473 next_inline_clone->prev_sibling_clone->next_sibling_clone
1474 = next_inline_clone->next_sibling_clone;
1478 gcc_assert (node->clones == next_inline_clone);
1479 node->clones = next_inline_clone->next_sibling_clone;
1482 new_clones = node->clones;
1483 node->clones = NULL;
1485 /* Copy clone info. */
1486 next_inline_clone->clone = node->clone;
1488 /* Now place it into clone tree at same level at NODE. */
1489 next_inline_clone->clone_of = node->clone_of;
1490 next_inline_clone->prev_sibling_clone = NULL;
1491 next_inline_clone->next_sibling_clone = NULL;
1494 if (node->clone_of->clones)
1495 node->clone_of->clones->prev_sibling_clone = next_inline_clone;
1496 next_inline_clone->next_sibling_clone = node->clone_of->clones;
1497 node->clone_of->clones = next_inline_clone;
1500 /* Merge the clone list. */
1503 if (!next_inline_clone->clones)
1504 next_inline_clone->clones = new_clones;
1507 n = next_inline_clone->clones;
1508 while (n->next_sibling_clone)
1509 n = n->next_sibling_clone;
1510 n->next_sibling_clone = new_clones;
1511 new_clones->prev_sibling_clone = n;
1515 /* Update clone_of pointers. */
1519 n->clone_of = next_inline_clone;
1520 n = n->next_sibling_clone;
1525 htab_clear_slot (cgraph_hash, slot);
1530 if (node->prev_sibling_clone)
1531 node->prev_sibling_clone->next_sibling_clone = node->next_sibling_clone;
1532 else if (node->clone_of)
1533 node->clone_of->clones = node->next_sibling_clone;
1534 if (node->next_sibling_clone)
1535 node->next_sibling_clone->prev_sibling_clone = node->prev_sibling_clone;
1538 struct cgraph_node *n, *next;
1542 for (n = node->clones; n->next_sibling_clone; n = n->next_sibling_clone)
1543 n->clone_of = node->clone_of;
1544 n->clone_of = node->clone_of;
1545 n->next_sibling_clone = node->clone_of->clones;
1546 if (node->clone_of->clones)
1547 node->clone_of->clones->prev_sibling_clone = n;
1548 node->clone_of->clones = node->clones;
1552 /* We are removing node with clones. this makes clones inconsistent,
1553 but assume they will be removed subsequently and just keep clone
1554 tree intact. This can happen in unreachable function removal since
1555 we remove unreachable functions in random order, not by bottom-up
1556 walk of clone trees. */
1557 for (n = node->clones; n; n = next)
1559 next = n->next_sibling_clone;
1560 n->next_sibling_clone = NULL;
1561 n->prev_sibling_clone = NULL;
1567 if (node->same_comdat_group)
1569 struct cgraph_node *prev;
1570 for (prev = node->same_comdat_group;
1571 prev->same_comdat_group != node;
1572 prev = prev->same_comdat_group)
1574 if (node->same_comdat_group == prev)
1575 prev->same_comdat_group = NULL;
1577 prev->same_comdat_group = node->same_comdat_group;
1578 node->same_comdat_group = NULL;
1581 /* While all the clones are removed after being proceeded, the function
1582 itself is kept in the cgraph even after it is compiled. Check whether
1583 we are done with this body and reclaim it proactively if this is the case.
1585 if (!kill_body && *slot)
1587 struct cgraph_node *n = (struct cgraph_node *) *slot;
1588 if (!n->clones && !n->clone_of && !n->global.inlined_to
1589 && (cgraph_global_info_ready
1590 && (TREE_ASM_WRITTEN (n->decl) || DECL_EXTERNAL (n->decl)
1591 || n->in_other_partition)))
1594 if (assembler_name_hash)
1596 tree name = DECL_ASSEMBLER_NAME (node->decl);
1597 slot = htab_find_slot_with_hash (assembler_name_hash, name,
1598 decl_assembler_name_hash (name),
1600 /* Inline clones are not hashed. */
1601 if (slot && *slot == node)
1602 htab_clear_slot (assembler_name_hash, slot);
1606 cgraph_release_function_body (node);
1608 if (node->call_site_hash)
1610 htab_delete (node->call_site_hash);
1611 node->call_site_hash = NULL;
1615 /* Clear out the node to NULL all pointers and add the node to the free
1617 memset (node, 0, sizeof(*node));
1619 NEXT_FREE_NODE (node) = free_nodes;
1623 /* Add NEW_ to the same comdat group that OLD is in. */
1626 cgraph_add_to_same_comdat_group (struct cgraph_node *new_,
1627 struct cgraph_node *old)
1629 gcc_assert (DECL_ONE_ONLY (old->decl));
1630 gcc_assert (!new_->same_comdat_group);
1631 gcc_assert (new_ != old);
1633 DECL_COMDAT_GROUP (new_->decl) = DECL_COMDAT_GROUP (old->decl);
1634 new_->same_comdat_group = old;
1635 if (!old->same_comdat_group)
1636 old->same_comdat_group = new_;
1639 struct cgraph_node *n;
1640 for (n = old->same_comdat_group;
1641 n->same_comdat_group != old;
1642 n = n->same_comdat_group)
1644 n->same_comdat_group = new_;
1648 /* Remove the node from cgraph and all inline clones inlined into it.
1649 Skip however removal of FORBIDDEN_NODE and return true if it needs to be
1650 removed. This allows to call the function from outer loop walking clone
1654 cgraph_remove_node_and_inline_clones (struct cgraph_node *node, struct cgraph_node *forbidden_node)
1656 struct cgraph_edge *e, *next;
1659 if (node == forbidden_node)
1661 for (e = node->callees; e; e = next)
1663 next = e->next_callee;
1664 if (!e->inline_failed)
1665 found |= cgraph_remove_node_and_inline_clones (e->callee, forbidden_node);
1667 cgraph_remove_node (node);
1671 /* Notify finalize_compilation_unit that given node is reachable. */
1674 cgraph_mark_reachable_node (struct cgraph_node *node)
1676 if (!node->reachable && node->local.finalized)
1678 if (cgraph_global_info_ready)
1680 /* Verify that function does not appear to be needed out of blue
1681 during the optimization process. This can happen for extern
1682 inlines when bodies was removed after inlining. */
1683 gcc_assert ((node->analyzed || node->in_other_partition
1684 || DECL_EXTERNAL (node->decl)));
1687 notice_global_symbol (node->decl);
1688 node->reachable = 1;
1690 node->next_needed = cgraph_nodes_queue;
1691 cgraph_nodes_queue = node;
1695 /* Likewise indicate that a node is needed, i.e. reachable via some
1699 cgraph_mark_needed_node (struct cgraph_node *node)
1702 gcc_assert (!node->global.inlined_to);
1703 cgraph_mark_reachable_node (node);
1706 /* Likewise indicate that a node is having address taken. */
1709 cgraph_mark_address_taken_node (struct cgraph_node *node)
1711 gcc_assert (!node->global.inlined_to);
1712 cgraph_mark_reachable_node (node);
1713 /* FIXME: address_taken flag is used both as a shortcut for testing whether
1714 IPA_REF_ADDR reference exists (and thus it should be set on node
1715 representing alias we take address of) and as a test whether address
1716 of the object was taken (and thus it should be set on node alias is
1717 referring to). We should remove the first use and the remove the
1719 node->address_taken = 1;
1720 node = cgraph_function_or_thunk_node (node, NULL);
1721 node->address_taken = 1;
1724 /* Return local info for the compiled function. */
1726 struct cgraph_local_info *
1727 cgraph_local_info (tree decl)
1729 struct cgraph_node *node;
1731 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1732 node = cgraph_get_node (decl);
1735 return &node->local;
1738 /* Return local info for the compiled function. */
1740 struct cgraph_global_info *
1741 cgraph_global_info (tree decl)
1743 struct cgraph_node *node;
1745 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL && cgraph_global_info_ready);
1746 node = cgraph_get_node (decl);
1749 return &node->global;
1752 /* Return local info for the compiled function. */
1754 struct cgraph_rtl_info *
1755 cgraph_rtl_info (tree decl)
1757 struct cgraph_node *node;
1759 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
1760 node = cgraph_get_node (decl);
1762 || (decl != current_function_decl
1763 && !TREE_ASM_WRITTEN (node->decl)))
1768 /* Return a string describing the failure REASON. */
1771 cgraph_inline_failed_string (cgraph_inline_failed_t reason)
1774 #define DEFCIFCODE(code, string) string,
1776 static const char *cif_string_table[CIF_N_REASONS] = {
1777 #include "cif-code.def"
1780 /* Signedness of an enum type is implementation defined, so cast it
1781 to unsigned before testing. */
1782 gcc_assert ((unsigned) reason < CIF_N_REASONS);
1783 return cif_string_table[reason];
1786 /* Return name of the node used in debug output. */
1788 cgraph_node_name (struct cgraph_node *node)
1790 return lang_hooks.decl_printable_name (node->decl, 2);
1793 /* Names used to print out the availability enum. */
1794 const char * const cgraph_availability_names[] =
1795 {"unset", "not_available", "overwritable", "available", "local"};
1798 /* Dump call graph node NODE to file F. */
1801 dump_cgraph_node (FILE *f, struct cgraph_node *node)
1803 struct cgraph_edge *edge;
1804 int indirect_calls_count = 0;
1806 fprintf (f, "%s/%i", cgraph_node_name (node), node->uid);
1807 dump_addr (f, " @", (void *)node);
1808 if (DECL_ASSEMBLER_NAME_SET_P (node->decl))
1809 fprintf (f, " (asm: %s)", IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->decl)));
1810 if (node->global.inlined_to)
1811 fprintf (f, " (inline copy in %s/%i)",
1812 cgraph_node_name (node->global.inlined_to),
1813 node->global.inlined_to->uid);
1814 if (node->same_comdat_group)
1815 fprintf (f, " (same comdat group as %s/%i)",
1816 cgraph_node_name (node->same_comdat_group),
1817 node->same_comdat_group->uid);
1819 fprintf (f, " (clone of %s/%i)",
1820 cgraph_node_name (node->clone_of),
1821 node->clone_of->uid);
1822 if (cgraph_function_flags_ready)
1823 fprintf (f, " availability:%s",
1824 cgraph_availability_names [cgraph_function_body_availability (node)]);
1826 fprintf (f, " analyzed");
1827 if (node->in_other_partition)
1828 fprintf (f, " in_other_partition");
1830 fprintf (f, " executed "HOST_WIDEST_INT_PRINT_DEC"x",
1831 (HOST_WIDEST_INT)node->count);
1833 fprintf (f, " nested in: %s", cgraph_node_name (node->origin));
1835 fprintf (f, " needed");
1836 if (node->address_taken)
1837 fprintf (f, " address_taken");
1838 else if (node->reachable)
1839 fprintf (f, " reachable");
1840 else if (node->reachable_from_other_partition)
1841 fprintf (f, " reachable_from_other_partition");
1842 if (gimple_has_body_p (node->decl))
1843 fprintf (f, " body");
1845 fprintf (f, " process");
1846 if (node->local.local)
1847 fprintf (f, " local");
1848 if (node->local.externally_visible)
1849 fprintf (f, " externally_visible");
1850 if (node->resolution != LDPR_UNKNOWN)
1852 ld_plugin_symbol_resolution_names[(int)node->resolution]);
1853 if (node->local.finalized)
1854 fprintf (f, " finalized");
1855 if (node->local.redefined_extern_inline)
1856 fprintf (f, " redefined_extern_inline");
1857 if (TREE_ASM_WRITTEN (node->decl))
1858 fprintf (f, " asm_written");
1859 if (node->only_called_at_startup)
1860 fprintf (f, " only_called_at_startup");
1861 if (node->only_called_at_exit)
1862 fprintf (f, " only_called_at_exit");
1863 else if (node->alias)
1864 fprintf (f, " alias");
1866 fprintf (f, " tm_clone");
1870 if (node->thunk.thunk_p)
1872 fprintf (f, " thunk of %s (asm: %s) fixed offset %i virtual value %i has "
1873 "virtual offset %i)\n",
1874 lang_hooks.decl_printable_name (node->thunk.alias, 2),
1875 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)),
1876 (int)node->thunk.fixed_offset,
1877 (int)node->thunk.virtual_value,
1878 (int)node->thunk.virtual_offset_p);
1880 if (node->alias && node->thunk.alias)
1882 fprintf (f, " alias of %s",
1883 lang_hooks.decl_printable_name (node->thunk.alias, 2));
1884 if (DECL_ASSEMBLER_NAME_SET_P (node->thunk.alias))
1885 fprintf (f, " (asm: %s)",
1886 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (node->thunk.alias)));
1890 fprintf (f, " called by: ");
1892 for (edge = node->callers; edge; edge = edge->next_caller)
1894 fprintf (f, "%s/%i ", cgraph_node_name (edge->caller),
1897 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1898 (HOST_WIDEST_INT)edge->count);
1899 if (edge->frequency)
1900 fprintf (f, "(%.2f per call) ",
1901 edge->frequency / (double)CGRAPH_FREQ_BASE);
1902 if (!edge->inline_failed)
1903 fprintf(f, "(inlined) ");
1904 if (edge->indirect_inlining_edge)
1905 fprintf(f, "(indirect_inlining) ");
1906 if (edge->can_throw_external)
1907 fprintf(f, "(can throw external) ");
1910 fprintf (f, "\n calls: ");
1911 for (edge = node->callees; edge; edge = edge->next_callee)
1913 fprintf (f, "%s/%i ", cgraph_node_name (edge->callee),
1915 if (!edge->inline_failed)
1916 fprintf(f, "(inlined) ");
1917 if (edge->indirect_inlining_edge)
1918 fprintf(f, "(indirect_inlining) ");
1920 fprintf (f, "("HOST_WIDEST_INT_PRINT_DEC"x) ",
1921 (HOST_WIDEST_INT)edge->count);
1922 if (edge->frequency)
1923 fprintf (f, "(%.2f per call) ",
1924 edge->frequency / (double)CGRAPH_FREQ_BASE);
1925 if (edge->can_throw_external)
1926 fprintf(f, "(can throw external) ");
1929 fprintf (f, " References: ");
1930 ipa_dump_references (f, &node->ref_list);
1931 fprintf (f, " Refering this function: ");
1932 ipa_dump_refering (f, &node->ref_list);
1934 for (edge = node->indirect_calls; edge; edge = edge->next_callee)
1935 indirect_calls_count++;
1936 if (indirect_calls_count)
1937 fprintf (f, " has %i outgoing edges for indirect calls.\n",
1938 indirect_calls_count);
1942 /* Dump call graph node NODE to stderr. */
1945 debug_cgraph_node (struct cgraph_node *node)
1947 dump_cgraph_node (stderr, node);
1951 /* Dump the callgraph to file F. */
1954 dump_cgraph (FILE *f)
1956 struct cgraph_node *node;
1958 fprintf (f, "callgraph:\n\n");
1959 for (node = cgraph_nodes; node; node = node->next)
1960 dump_cgraph_node (f, node);
1964 /* Dump the call graph to stderr. */
1969 dump_cgraph (stderr);
1973 /* Set the DECL_ASSEMBLER_NAME and update cgraph hashtables. */
1976 change_decl_assembler_name (tree decl, tree name)
1978 struct cgraph_node *node;
1980 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
1981 SET_DECL_ASSEMBLER_NAME (decl, name);
1984 if (name == DECL_ASSEMBLER_NAME (decl))
1987 if (assembler_name_hash
1988 && TREE_CODE (decl) == FUNCTION_DECL
1989 && (node = cgraph_get_node (decl)) != NULL)
1991 tree old_name = DECL_ASSEMBLER_NAME (decl);
1992 slot = htab_find_slot_with_hash (assembler_name_hash, old_name,
1993 decl_assembler_name_hash (old_name),
1995 /* Inline clones are not hashed. */
1996 if (slot && *slot == node)
1997 htab_clear_slot (assembler_name_hash, slot);
1999 if (TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl))
2000 && DECL_RTL_SET_P (decl))
2001 warning (0, "%D renamed after being referenced in assembly", decl);
2003 SET_DECL_ASSEMBLER_NAME (decl, name);
2005 if (assembler_name_hash
2006 && TREE_CODE (decl) == FUNCTION_DECL
2007 && (node = cgraph_get_node (decl)) != NULL)
2009 slot = htab_find_slot_with_hash (assembler_name_hash, name,
2010 decl_assembler_name_hash (name),
2012 gcc_assert (!*slot);
2017 /* Add a top-level asm statement to the list. */
2019 struct cgraph_asm_node *
2020 cgraph_add_asm_node (tree asm_str)
2022 struct cgraph_asm_node *node;
2024 node = ggc_alloc_cleared_cgraph_asm_node ();
2025 node->asm_str = asm_str;
2026 node->order = cgraph_order++;
2028 if (cgraph_asm_nodes == NULL)
2029 cgraph_asm_nodes = node;
2031 cgraph_asm_last_node->next = node;
2032 cgraph_asm_last_node = node;
2036 /* Return true when the DECL can possibly be inlined. */
2038 cgraph_function_possibly_inlined_p (tree decl)
2040 if (!cgraph_global_info_ready)
2041 return !DECL_UNINLINABLE (decl);
2042 return DECL_POSSIBLY_INLINED (decl);
2045 /* Create clone of E in the node N represented by CALL_EXPR the callgraph. */
2046 struct cgraph_edge *
2047 cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
2048 gimple call_stmt, unsigned stmt_uid, gcov_type count_scale,
2049 int freq_scale, bool update_original)
2051 struct cgraph_edge *new_edge;
2052 gcov_type count = e->count * count_scale / REG_BR_PROB_BASE;
2055 /* We do not want to ignore loop nest after frequency drops to 0. */
2058 freq = e->frequency * (gcov_type) freq_scale / CGRAPH_FREQ_BASE;
2059 if (freq > CGRAPH_FREQ_MAX)
2060 freq = CGRAPH_FREQ_MAX;
2062 if (e->indirect_unknown_callee)
2066 if (call_stmt && (decl = gimple_call_fndecl (call_stmt)))
2068 struct cgraph_node *callee = cgraph_get_node (decl);
2069 gcc_checking_assert (callee);
2070 new_edge = cgraph_create_edge (n, callee, call_stmt, count, freq);
2074 new_edge = cgraph_create_indirect_edge (n, call_stmt,
2075 e->indirect_info->ecf_flags,
2077 *new_edge->indirect_info = *e->indirect_info;
2082 new_edge = cgraph_create_edge (n, e->callee, call_stmt, count, freq);
2083 if (e->indirect_info)
2085 new_edge->indirect_info
2086 = ggc_alloc_cleared_cgraph_indirect_call_info ();
2087 *new_edge->indirect_info = *e->indirect_info;
2091 new_edge->inline_failed = e->inline_failed;
2092 new_edge->indirect_inlining_edge = e->indirect_inlining_edge;
2093 new_edge->lto_stmt_uid = stmt_uid;
2094 /* Clone flags that depend on call_stmt availability manually. */
2095 new_edge->can_throw_external = e->can_throw_external;
2096 new_edge->call_stmt_cannot_inline_p = e->call_stmt_cannot_inline_p;
2097 if (update_original)
2099 e->count -= new_edge->count;
2103 cgraph_call_edge_duplication_hooks (e, new_edge);
2108 /* Create node representing clone of N executed COUNT times. Decrease
2109 the execution counts from original node too.
2110 The new clone will have decl set to DECL that may or may not be the same
2113 When UPDATE_ORIGINAL is true, the counts are subtracted from the original
2114 function's profile to reflect the fact that part of execution is handled
2116 When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
2117 the new clone. Otherwise the caller is responsible for doing so later. */
2119 struct cgraph_node *
2120 cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
2121 bool update_original,
2122 VEC(cgraph_edge_p,heap) *redirect_callers,
2123 bool call_duplication_hook)
2125 struct cgraph_node *new_node = cgraph_create_node_1 ();
2126 struct cgraph_edge *e;
2127 gcov_type count_scale;
2130 new_node->decl = decl;
2131 new_node->origin = n->origin;
2132 if (new_node->origin)
2134 new_node->next_nested = new_node->origin->nested;
2135 new_node->origin->nested = new_node;
2137 new_node->analyzed = n->analyzed;
2138 new_node->local = n->local;
2139 new_node->local.externally_visible = false;
2140 new_node->local.local = true;
2141 new_node->global = n->global;
2142 new_node->rtl = n->rtl;
2143 new_node->count = count;
2144 new_node->frequency = n->frequency;
2145 new_node->clone = n->clone;
2146 new_node->clone.tree_map = 0;
2149 if (new_node->count > n->count)
2150 count_scale = REG_BR_PROB_BASE;
2152 count_scale = new_node->count * REG_BR_PROB_BASE / n->count;
2156 if (update_original)
2163 FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
2165 /* Redirect calls to the old version node to point to its new
2167 cgraph_redirect_edge_callee (e, new_node);
2171 for (e = n->callees;e; e=e->next_callee)
2172 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2173 count_scale, freq, update_original);
2175 for (e = n->indirect_calls; e; e = e->next_callee)
2176 cgraph_clone_edge (e, new_node, e->call_stmt, e->lto_stmt_uid,
2177 count_scale, freq, update_original);
2178 ipa_clone_references (new_node, NULL, &n->ref_list);
2180 new_node->next_sibling_clone = n->clones;
2182 n->clones->prev_sibling_clone = new_node;
2183 n->clones = new_node;
2184 new_node->clone_of = n;
2186 if (n->decl != decl)
2188 struct cgraph_node **slot;
2189 slot = (struct cgraph_node **) htab_find_slot (cgraph_hash, new_node, INSERT);
2190 gcc_assert (!*slot);
2192 if (assembler_name_hash)
2195 tree name = DECL_ASSEMBLER_NAME (decl);
2197 aslot = htab_find_slot_with_hash (assembler_name_hash, name,
2198 decl_assembler_name_hash (name),
2200 gcc_assert (!*aslot);
2205 if (call_duplication_hook)
2206 cgraph_call_node_duplication_hooks (n, new_node);
2210 /* Create a new name for clone of DECL, add SUFFIX. Returns an identifier. */
2212 static GTY(()) unsigned int clone_fn_id_num;
2215 clone_function_name (tree decl, const char *suffix)
2217 tree name = DECL_ASSEMBLER_NAME (decl);
2218 size_t len = IDENTIFIER_LENGTH (name);
2219 char *tmp_name, *prefix;
2221 prefix = XALLOCAVEC (char, len + strlen (suffix) + 2);
2222 memcpy (prefix, IDENTIFIER_POINTER (name), len);
2223 strcpy (prefix + len + 1, suffix);
2224 #ifndef NO_DOT_IN_LABEL
2226 #elif !defined NO_DOLLAR_IN_LABEL
2231 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix, clone_fn_id_num++);
2232 return get_identifier (tmp_name);
2235 /* Create callgraph node clone with new declaration. The actual body will
2236 be copied later at compilation stage.
2238 TODO: after merging in ipa-sra use function call notes instead of args_to_skip
2241 struct cgraph_node *
2242 cgraph_create_virtual_clone (struct cgraph_node *old_node,
2243 VEC(cgraph_edge_p,heap) *redirect_callers,
2244 VEC(ipa_replace_map_p,gc) *tree_map,
2245 bitmap args_to_skip,
2246 const char * suffix)
2248 tree old_decl = old_node->decl;
2249 struct cgraph_node *new_node = NULL;
2252 struct ipa_replace_map *map;
2255 gcc_checking_assert (tree_versionable_function_p (old_decl));
2257 gcc_assert (old_node->local.can_change_signature || !args_to_skip);
2259 /* Make a new FUNCTION_DECL tree node */
2261 new_decl = copy_node (old_decl);
2263 new_decl = build_function_decl_skip_args (old_decl, args_to_skip, false);
2264 DECL_STRUCT_FUNCTION (new_decl) = NULL;
2266 /* Generate a new name for the new version. */
2267 DECL_NAME (new_decl) = clone_function_name (old_decl, suffix);
2268 SET_DECL_ASSEMBLER_NAME (new_decl, DECL_NAME (new_decl));
2269 SET_DECL_RTL (new_decl, NULL);
2271 new_node = cgraph_clone_node (old_node, new_decl, old_node->count,
2272 CGRAPH_FREQ_BASE, false,
2273 redirect_callers, false);
2274 /* Update the properties.
2275 Make clone visible only within this translation unit. Make sure
2276 that is not weak also.
2277 ??? We cannot use COMDAT linkage because there is no
2278 ABI support for this. */
2279 DECL_EXTERNAL (new_node->decl) = 0;
2280 if (DECL_ONE_ONLY (old_decl))
2281 DECL_SECTION_NAME (new_node->decl) = NULL;
2282 DECL_COMDAT_GROUP (new_node->decl) = 0;
2283 TREE_PUBLIC (new_node->decl) = 0;
2284 DECL_COMDAT (new_node->decl) = 0;
2285 DECL_WEAK (new_node->decl) = 0;
2286 DECL_STATIC_CONSTRUCTOR (new_node->decl) = 0;
2287 DECL_STATIC_DESTRUCTOR (new_node->decl) = 0;
2288 new_node->clone.tree_map = tree_map;
2289 new_node->clone.args_to_skip = args_to_skip;
2290 FOR_EACH_VEC_ELT (ipa_replace_map_p, tree_map, i, map)
2292 tree var = map->new_tree;
2295 if (TREE_CODE (var) != ADDR_EXPR)
2297 var = get_base_var (var);
2301 /* Record references of the future statement initializing the constant
2303 if (TREE_CODE (var) == FUNCTION_DECL)
2305 struct cgraph_node *ref_node = cgraph_get_node (var);
2306 gcc_checking_assert (ref_node);
2307 ipa_record_reference (new_node, NULL, ref_node, NULL, IPA_REF_ADDR,
2310 else if (TREE_CODE (var) == VAR_DECL)
2311 ipa_record_reference (new_node, NULL, NULL, varpool_node (var),
2312 IPA_REF_ADDR, NULL);
2315 new_node->clone.combined_args_to_skip = old_node->clone.combined_args_to_skip;
2316 else if (old_node->clone.combined_args_to_skip)
2318 int newi = 0, oldi = 0;
2320 bitmap new_args_to_skip = BITMAP_GGC_ALLOC ();
2321 struct cgraph_node *orig_node;
2322 for (orig_node = old_node; orig_node->clone_of; orig_node = orig_node->clone_of)
2324 for (arg = DECL_ARGUMENTS (orig_node->decl); arg; arg = DECL_CHAIN (arg), oldi++)
2326 if (bitmap_bit_p (old_node->clone.combined_args_to_skip, oldi))
2328 bitmap_set_bit (new_args_to_skip, oldi);
2331 if (bitmap_bit_p (args_to_skip, newi))
2332 bitmap_set_bit (new_args_to_skip, oldi);
2335 new_node->clone.combined_args_to_skip = new_args_to_skip;
2338 new_node->clone.combined_args_to_skip = args_to_skip;
2339 new_node->local.externally_visible = 0;
2340 new_node->local.local = 1;
2341 new_node->lowered = true;
2342 new_node->reachable = true;
2344 cgraph_call_node_duplication_hooks (old_node, new_node);
2350 /* NODE is no longer nested function; update cgraph accordingly. */
2352 cgraph_unnest_node (struct cgraph_node *node)
2354 struct cgraph_node **node2 = &node->origin->nested;
2355 gcc_assert (node->origin);
2357 while (*node2 != node)
2358 node2 = &(*node2)->next_nested;
2359 *node2 = node->next_nested;
2360 node->origin = NULL;
2363 /* Return function availability. See cgraph.h for description of individual
2366 cgraph_function_body_availability (struct cgraph_node *node)
2368 enum availability avail;
2369 gcc_assert (cgraph_function_flags_ready);
2370 if (!node->analyzed)
2371 avail = AVAIL_NOT_AVAILABLE;
2372 else if (node->local.local)
2373 avail = AVAIL_LOCAL;
2374 else if (!node->local.externally_visible)
2375 avail = AVAIL_AVAILABLE;
2376 /* Inline functions are safe to be analyzed even if their symbol can
2377 be overwritten at runtime. It is not meaningful to enforce any sane
2378 behaviour on replacing inline function by different body. */
2379 else if (DECL_DECLARED_INLINE_P (node->decl))
2380 avail = AVAIL_AVAILABLE;
2382 /* If the function can be overwritten, return OVERWRITABLE. Take
2383 care at least of two notable extensions - the COMDAT functions
2384 used to share template instantiations in C++ (this is symmetric
2385 to code cp_cannot_inline_tree_fn and probably shall be shared and
2386 the inlinability hooks completely eliminated).
2388 ??? Does the C++ one definition rule allow us to always return
2389 AVAIL_AVAILABLE here? That would be good reason to preserve this
2392 else if (decl_replaceable_p (node->decl) && !DECL_EXTERNAL (node->decl))
2393 avail = AVAIL_OVERWRITABLE;
2394 else avail = AVAIL_AVAILABLE;
2399 /* Add the function FNDECL to the call graph.
2400 Unlike cgraph_finalize_function, this function is intended to be used
2401 by middle end and allows insertion of new function at arbitrary point
2402 of compilation. The function can be either in high, low or SSA form
2405 The function is assumed to be reachable and have address taken (so no
2406 API breaking optimizations are performed on it).
2408 Main work done by this function is to enqueue the function for later
2409 processing to avoid need the passes to be re-entrant. */
2412 cgraph_add_new_function (tree fndecl, bool lowered)
2414 struct cgraph_node *node;
2415 switch (cgraph_state)
2417 case CGRAPH_STATE_CONSTRUCTION:
2418 /* Just enqueue function to be processed at nearest occurrence. */
2419 node = cgraph_create_node (fndecl);
2420 node->next_needed = cgraph_new_nodes;
2422 node->lowered = true;
2423 cgraph_new_nodes = node;
2426 case CGRAPH_STATE_IPA:
2427 case CGRAPH_STATE_IPA_SSA:
2428 case CGRAPH_STATE_EXPANSION:
2429 /* Bring the function into finalized state and enqueue for later
2430 analyzing and compilation. */
2431 node = cgraph_get_create_node (fndecl);
2432 node->local.local = false;
2433 node->local.finalized = true;
2434 node->reachable = node->needed = true;
2435 if (!lowered && cgraph_state == CGRAPH_STATE_EXPANSION)
2437 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2438 current_function_decl = fndecl;
2439 gimple_register_cfg_hooks ();
2440 bitmap_obstack_initialize (NULL);
2441 execute_pass_list (all_lowering_passes);
2442 execute_pass_list (pass_early_local_passes.pass.sub);
2443 bitmap_obstack_release (NULL);
2445 current_function_decl = NULL;
2450 node->lowered = true;
2451 node->next_needed = cgraph_new_nodes;
2452 cgraph_new_nodes = node;
2455 case CGRAPH_STATE_FINISHED:
2456 /* At the very end of compilation we have to do all the work up
2458 node = cgraph_create_node (fndecl);
2460 node->lowered = true;
2461 cgraph_analyze_function (node);
2462 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
2463 current_function_decl = fndecl;
2464 gimple_register_cfg_hooks ();
2465 bitmap_obstack_initialize (NULL);
2466 if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
2467 execute_pass_list (pass_early_local_passes.pass.sub);
2468 bitmap_obstack_release (NULL);
2469 tree_rest_of_compilation (fndecl);
2471 current_function_decl = NULL;
2475 /* Set a personality if required and we already passed EH lowering. */
2477 && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
2478 == eh_personality_lang))
2479 DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
2482 /* Worker for cgraph_node_can_be_local_p. */
2484 cgraph_node_cannot_be_local_p_1 (struct cgraph_node *node,
2485 void *data ATTRIBUTE_UNUSED)
2487 return !(!node->needed
2488 && ((DECL_COMDAT (node->decl) && !node->same_comdat_group)
2489 || !node->local.externally_visible));
2492 /* Return true if NODE can be made local for API change.
2493 Extern inline functions and C++ COMDAT functions can be made local
2494 at the expense of possible code size growth if function is used in multiple
2495 compilation units. */
2497 cgraph_node_can_be_local_p (struct cgraph_node *node)
2499 return (!node->address_taken
2500 && !cgraph_for_node_and_aliases (node,
2501 cgraph_node_cannot_be_local_p_1,
2505 /* Make DECL local. FIXME: We shouldn't need to mess with rtl this early,
2506 but other code such as notice_global_symbol generates rtl. */
2508 cgraph_make_decl_local (tree decl)
2512 if (TREE_CODE (decl) == VAR_DECL)
2513 DECL_COMMON (decl) = 0;
2514 else gcc_assert (TREE_CODE (decl) == FUNCTION_DECL);
2516 if (DECL_ONE_ONLY (decl) || DECL_COMDAT (decl))
2518 /* It is possible that we are linking against library defining same COMDAT
2519 function. To avoid conflict we need to rename our local name of the
2520 function just in the case WHOPR partitioning decide to make it hidden
2521 to avoid cross partition references. */
2524 const char *old_name;
2526 old_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
2527 if (TREE_CODE (decl) == FUNCTION_DECL)
2529 struct cgraph_node *node = cgraph_get_node (decl);
2530 change_decl_assembler_name (decl,
2531 clone_function_name (decl, "local"));
2532 if (node->local.lto_file_data)
2533 lto_record_renamed_decl (node->local.lto_file_data,
2536 (DECL_ASSEMBLER_NAME (decl)));
2538 else if (TREE_CODE (decl) == VAR_DECL)
2540 struct varpool_node *vnode = varpool_get_node (decl);
2541 /* change_decl_assembler_name will warn here on vtables because
2542 C++ frontend still sets TREE_SYMBOL_REFERENCED on them. */
2543 SET_DECL_ASSEMBLER_NAME (decl,
2544 clone_function_name (decl, "local"));
2545 if (vnode->lto_file_data)
2546 lto_record_renamed_decl (vnode->lto_file_data,
2549 (DECL_ASSEMBLER_NAME (decl)));
2552 DECL_SECTION_NAME (decl) = 0;
2553 DECL_COMDAT (decl) = 0;
2555 DECL_COMDAT_GROUP (decl) = 0;
2556 DECL_WEAK (decl) = 0;
2557 DECL_EXTERNAL (decl) = 0;
2558 TREE_PUBLIC (decl) = 0;
2559 if (!DECL_RTL_SET_P (decl))
2562 /* Update rtl flags. */
2563 make_decl_rtl (decl);
2565 rtl = DECL_RTL (decl);
2569 symbol = XEXP (rtl, 0);
2570 if (GET_CODE (symbol) != SYMBOL_REF)
2573 SYMBOL_REF_WEAK (symbol) = DECL_WEAK (decl);
2576 /* Call calback on NODE, thunks and aliases asociated to NODE.
2577 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2581 cgraph_for_node_thunks_and_aliases (struct cgraph_node *node,
2582 bool (*callback) (struct cgraph_node *, void *),
2584 bool include_overwritable)
2586 struct cgraph_edge *e;
2588 struct ipa_ref *ref;
2590 if (callback (node, data))
2592 for (e = node->callers; e; e = e->next_caller)
2593 if (e->caller->thunk.thunk_p
2594 && (include_overwritable
2595 || cgraph_function_body_availability (e->caller) > AVAIL_OVERWRITABLE))
2596 if (cgraph_for_node_thunks_and_aliases (e->caller, callback, data,
2597 include_overwritable))
2599 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
2600 if (ref->use == IPA_REF_ALIAS)
2602 struct cgraph_node *alias = ipa_ref_refering_node (ref);
2603 if (include_overwritable
2604 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2605 if (cgraph_for_node_thunks_and_aliases (alias, callback, data,
2606 include_overwritable))
2612 /* Call calback on NODE and aliases asociated to NODE.
2613 When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
2617 cgraph_for_node_and_aliases (struct cgraph_node *node,
2618 bool (*callback) (struct cgraph_node *, void *),
2620 bool include_overwritable)
2623 struct ipa_ref *ref;
2625 if (callback (node, data))
2627 for (i = 0; ipa_ref_list_refering_iterate (&node->ref_list, i, ref); i++)
2628 if (ref->use == IPA_REF_ALIAS)
2630 struct cgraph_node *alias = ipa_ref_refering_node (ref);
2631 if (include_overwritable
2632 || cgraph_function_body_availability (alias) > AVAIL_OVERWRITABLE)
2633 if (cgraph_for_node_and_aliases (alias, callback, data,
2634 include_overwritable))
2640 /* Worker to bring NODE local. */
2643 cgraph_make_node_local_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2645 gcc_checking_assert (cgraph_node_can_be_local_p (node));
2646 if (DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
2648 cgraph_make_decl_local (node->decl);
2650 node->local.externally_visible = false;
2651 node->local.local = true;
2652 node->resolution = LDPR_PREVAILING_DEF_IRONLY;
2653 gcc_assert (cgraph_function_body_availability (node) == AVAIL_LOCAL);
2658 /* Bring NODE local. */
2661 cgraph_make_node_local (struct cgraph_node *node)
2663 cgraph_for_node_thunks_and_aliases (node, cgraph_make_node_local_1,
2667 /* Worker to set nothrow flag. */
2670 cgraph_set_nothrow_flag_1 (struct cgraph_node *node, void *data)
2672 struct cgraph_edge *e;
2674 TREE_NOTHROW (node->decl) = data != NULL;
2677 for (e = node->callers; e; e = e->next_caller)
2678 e->can_throw_external = false;
2682 /* Set TREE_NOTHROW on NODE's decl and on aliases of NODE
2683 if any to NOTHROW. */
2686 cgraph_set_nothrow_flag (struct cgraph_node *node, bool nothrow)
2688 cgraph_for_node_thunks_and_aliases (node, cgraph_set_nothrow_flag_1,
2689 (void *)(size_t)nothrow, false);
2692 /* Worker to set const flag. */
2695 cgraph_set_const_flag_1 (struct cgraph_node *node, void *data)
2697 /* Static constructors and destructors without a side effect can be
2699 if (data && !((size_t)data & 2))
2701 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2702 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2703 if (DECL_STATIC_DESTRUCTOR (node->decl))
2704 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2706 TREE_READONLY (node->decl) = data != NULL;
2707 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2711 /* Set TREE_READONLY on NODE's decl and on aliases of NODE
2712 if any to READONLY. */
2715 cgraph_set_const_flag (struct cgraph_node *node, bool readonly, bool looping)
2717 cgraph_for_node_thunks_and_aliases (node, cgraph_set_const_flag_1,
2718 (void *)(size_t)(readonly + (int)looping * 2),
2722 /* Worker to set pure flag. */
2725 cgraph_set_pure_flag_1 (struct cgraph_node *node, void *data)
2727 /* Static pureructors and destructors without a side effect can be
2729 if (data && !((size_t)data & 2))
2731 if (DECL_STATIC_CONSTRUCTOR (node->decl))
2732 DECL_STATIC_CONSTRUCTOR (node->decl) = 0;
2733 if (DECL_STATIC_DESTRUCTOR (node->decl))
2734 DECL_STATIC_DESTRUCTOR (node->decl) = 0;
2736 DECL_PURE_P (node->decl) = data != NULL;
2737 DECL_LOOPING_CONST_OR_PURE_P (node->decl) = ((size_t)data & 2) != 0;
2741 /* Set DECL_PURE_P on NODE's decl and on aliases of NODE
2745 cgraph_set_pure_flag (struct cgraph_node *node, bool pure, bool looping)
2747 cgraph_for_node_thunks_and_aliases (node, cgraph_set_pure_flag_1,
2748 (void *)(size_t)(pure + (int)looping * 2),
2752 /* Data used by cgraph_propagate_frequency. */
2754 struct cgraph_propagate_frequency_data
2756 bool maybe_unlikely_executed;
2757 bool maybe_executed_once;
2758 bool only_called_at_startup;
2759 bool only_called_at_exit;
2762 /* Worker for cgraph_propagate_frequency_1. */
2765 cgraph_propagate_frequency_1 (struct cgraph_node *node, void *data)
2767 struct cgraph_propagate_frequency_data *d;
2768 struct cgraph_edge *edge;
2770 d = (struct cgraph_propagate_frequency_data *)data;
2771 for (edge = node->callers;
2772 edge && (d->maybe_unlikely_executed || d->maybe_executed_once
2773 || d->only_called_at_startup || d->only_called_at_exit);
2774 edge = edge->next_caller)
2776 if (edge->caller != node)
2778 d->only_called_at_startup &= edge->caller->only_called_at_startup;
2779 /* It makes sense to put main() together with the static constructors.
2780 It will be executed for sure, but rest of functions called from
2781 main are definitely not at startup only. */
2782 if (MAIN_NAME_P (DECL_NAME (edge->caller->decl)))
2783 d->only_called_at_startup = 0;
2784 d->only_called_at_exit &= edge->caller->only_called_at_exit;
2786 if (!edge->frequency)
2788 switch (edge->caller->frequency)
2790 case NODE_FREQUENCY_UNLIKELY_EXECUTED:
2792 case NODE_FREQUENCY_EXECUTED_ONCE:
2793 if (dump_file && (dump_flags & TDF_DETAILS))
2794 fprintf (dump_file, " Called by %s that is executed once\n",
2795 cgraph_node_name (edge->caller));
2796 d->maybe_unlikely_executed = false;
2797 if (inline_edge_summary (edge)->loop_depth)
2799 d->maybe_executed_once = false;
2800 if (dump_file && (dump_flags & TDF_DETAILS))
2801 fprintf (dump_file, " Called in loop\n");
2804 case NODE_FREQUENCY_HOT:
2805 case NODE_FREQUENCY_NORMAL:
2806 if (dump_file && (dump_flags & TDF_DETAILS))
2807 fprintf (dump_file, " Called by %s that is normal or hot\n",
2808 cgraph_node_name (edge->caller));
2809 d->maybe_unlikely_executed = false;
2810 d->maybe_executed_once = false;
2814 return edge != NULL;
2817 /* See if the frequency of NODE can be updated based on frequencies of its
2820 cgraph_propagate_frequency (struct cgraph_node *node)
2822 struct cgraph_propagate_frequency_data d = {true, true, true, true};
2823 bool changed = false;
2825 if (!node->local.local)
2827 gcc_assert (node->analyzed);
2828 if (dump_file && (dump_flags & TDF_DETAILS))
2829 fprintf (dump_file, "Processing frequency %s\n", cgraph_node_name (node));
2831 cgraph_for_node_and_aliases (node, cgraph_propagate_frequency_1, &d, true);
2833 if ((d.only_called_at_startup && !d.only_called_at_exit)
2834 && !node->only_called_at_startup)
2836 node->only_called_at_startup = true;
2838 fprintf (dump_file, "Node %s promoted to only called at startup.\n",
2839 cgraph_node_name (node));
2842 if ((d.only_called_at_exit && !d.only_called_at_startup)
2843 && !node->only_called_at_exit)
2845 node->only_called_at_exit = true;
2847 fprintf (dump_file, "Node %s promoted to only called at exit.\n",
2848 cgraph_node_name (node));
2851 /* These come either from profile or user hints; never update them. */
2852 if (node->frequency == NODE_FREQUENCY_HOT
2853 || node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
2855 if (d.maybe_unlikely_executed)
2857 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
2859 fprintf (dump_file, "Node %s promoted to unlikely executed.\n",
2860 cgraph_node_name (node));
2863 else if (d.maybe_executed_once && node->frequency != NODE_FREQUENCY_EXECUTED_ONCE)
2865 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
2867 fprintf (dump_file, "Node %s promoted to executed once.\n",
2868 cgraph_node_name (node));
2874 /* Return true when NODE can not return or throw and thus
2875 it is safe to ignore its side effects for IPA analysis. */
2878 cgraph_node_cannot_return (struct cgraph_node *node)
2880 int flags = flags_from_decl_or_type (node->decl);
2881 if (!flag_exceptions)
2882 return (flags & ECF_NORETURN) != 0;
2884 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2885 == (ECF_NORETURN | ECF_NOTHROW));
2888 /* Return true when call of E can not lead to return from caller
2889 and thus it is safe to ignore its side effects for IPA analysis
2890 when computing side effects of the caller.
2891 FIXME: We could actually mark all edges that have no reaching
2892 patch to EXIT_BLOCK_PTR or throw to get better results. */
2894 cgraph_edge_cannot_lead_to_return (struct cgraph_edge *e)
2896 if (cgraph_node_cannot_return (e->caller))
2898 if (e->indirect_unknown_callee)
2900 int flags = e->indirect_info->ecf_flags;
2901 if (!flag_exceptions)
2902 return (flags & ECF_NORETURN) != 0;
2904 return ((flags & (ECF_NORETURN | ECF_NOTHROW))
2905 == (ECF_NORETURN | ECF_NOTHROW));
2908 return cgraph_node_cannot_return (e->callee);
2911 /* Return true when function NODE can be removed from callgraph
2912 if all direct calls are eliminated. */
2915 cgraph_can_remove_if_no_direct_calls_and_refs_p (struct cgraph_node *node)
2917 gcc_assert (!node->global.inlined_to);
2918 /* Extern inlines can always go, we will use the external definition. */
2919 if (DECL_EXTERNAL (node->decl))
2921 /* When function is needed, we can not remove it. */
2922 if (node->needed || node->reachable_from_other_partition)
2924 if (DECL_STATIC_CONSTRUCTOR (node->decl)
2925 || DECL_STATIC_DESTRUCTOR (node->decl))
2927 /* Only COMDAT functions can be removed if externally visible. */
2928 if (node->local.externally_visible
2929 && (!DECL_COMDAT (node->decl)
2930 || cgraph_used_from_object_file_p (node)))
2935 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2938 nonremovable_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2940 return !cgraph_can_remove_if_no_direct_calls_and_refs_p (node);
2943 /* Return true when function NODE and its aliases can be removed from callgraph
2944 if all direct calls are eliminated. */
2947 cgraph_can_remove_if_no_direct_calls_p (struct cgraph_node *node)
2949 /* Extern inlines can always go, we will use the external definition. */
2950 if (DECL_EXTERNAL (node->decl))
2952 if (node->address_taken)
2954 return !cgraph_for_node_and_aliases (node, nonremovable_p, NULL, true);
2957 /* Worker for cgraph_can_remove_if_no_direct_calls_p. */
2960 used_from_object_file_p (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2962 return cgraph_used_from_object_file_p (node);
2965 /* Return true when function NODE can be expected to be removed
2966 from program when direct calls in this compilation unit are removed.
2968 As a special case COMDAT functions are
2969 cgraph_can_remove_if_no_direct_calls_p while the are not
2970 cgraph_only_called_directly_p (it is possible they are called from other
2973 This function behaves as cgraph_only_called_directly_p because eliminating
2974 all uses of COMDAT function does not make it necessarily disappear from
2975 the program unless we are compiling whole program or we do LTO. In this
2976 case we know we win since dynamic linking will not really discard the
2977 linkonce section. */
2980 cgraph_will_be_removed_from_program_if_no_direct_calls (struct cgraph_node *node)
2982 gcc_assert (!node->global.inlined_to);
2983 if (cgraph_for_node_and_aliases (node, used_from_object_file_p, NULL, true))
2985 if (!in_lto_p && !flag_whole_program)
2986 return cgraph_only_called_directly_p (node);
2989 if (DECL_EXTERNAL (node->decl))
2991 return cgraph_can_remove_if_no_direct_calls_p (node);
2995 /* Return true when RESOLUTION indicate that linker will use
2996 the symbol from non-LTO object files. */
2999 resolution_used_from_other_file_p (enum ld_plugin_symbol_resolution resolution)
3001 return (resolution == LDPR_PREVAILING_DEF
3002 || resolution == LDPR_PREEMPTED_REG
3003 || resolution == LDPR_RESOLVED_EXEC
3004 || resolution == LDPR_RESOLVED_DYN);
3008 /* Return true when NODE is known to be used from other (non-LTO) object file.
3009 Known only when doing LTO via linker plugin. */
3012 cgraph_used_from_object_file_p (struct cgraph_node *node)
3014 gcc_assert (!node->global.inlined_to);
3015 if (!TREE_PUBLIC (node->decl) || DECL_EXTERNAL (node->decl))
3017 if (resolution_used_from_other_file_p (node->resolution))
3022 /* Worker for cgraph_only_called_directly_p. */
3025 cgraph_not_only_called_directly_p_1 (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3027 return !cgraph_only_called_directly_or_aliased_p (node);
3030 /* Return true when function NODE and all its aliases are only called
3032 i.e. it is not externally visible, address was not taken and
3033 it is not used in any other non-standard way. */
3036 cgraph_only_called_directly_p (struct cgraph_node *node)
3038 gcc_assert (cgraph_function_or_thunk_node (node, NULL) == node);
3039 return !cgraph_for_node_and_aliases (node, cgraph_not_only_called_directly_p_1,
3044 /* Collect all callers of NODE. Worker for collect_callers_of_node. */
3047 collect_callers_of_node_1 (struct cgraph_node *node, void *data)
3049 VEC (cgraph_edge_p, heap) ** redirect_callers = (VEC (cgraph_edge_p, heap) **)data;
3050 struct cgraph_edge *cs;
3051 enum availability avail;
3052 cgraph_function_or_thunk_node (node, &avail);
3054 if (avail > AVAIL_OVERWRITABLE)
3055 for (cs = node->callers; cs != NULL; cs = cs->next_caller)
3056 if (!cs->indirect_inlining_edge)
3057 VEC_safe_push (cgraph_edge_p, heap, *redirect_callers, cs);
3061 /* Collect all callers of NODE and its aliases that are known to lead to NODE
3062 (i.e. are not overwritable). */
3064 VEC (cgraph_edge_p, heap) *
3065 collect_callers_of_node (struct cgraph_node *node)
3067 VEC (cgraph_edge_p, heap) * redirect_callers = NULL;
3068 cgraph_for_node_and_aliases (node, collect_callers_of_node_1,
3069 &redirect_callers, false);
3070 return redirect_callers;
3073 #include "gt-cgraph.h"