1 /* Callgraph construction.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Jan Hubicka
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
27 #include "tree-flow.h"
28 #include "langhooks.h"
29 #include "pointer-set.h"
33 #include "tree-pass.h"
34 #include "ipa-utils.h"
37 /* Context of record_reference. */
38 struct record_reference_ctx
41 struct varpool_node *varpool_node;
44 /* Walk tree and record all calls and references to functions/variables.
45 Called via walk_tree: TP is pointer to tree to be examined.
46 When DATA is non-null, record references to callgraph.
50 record_reference (tree *tp, int *walk_subtrees, void *data)
54 struct record_reference_ctx *ctx = (struct record_reference_ctx *)data;
56 switch (TREE_CODE (t))
65 /* Record dereferences to the functions. This makes the
66 functions reachable unconditionally. */
67 decl = get_base_var (*tp);
68 if (TREE_CODE (decl) == FUNCTION_DECL)
71 cgraph_mark_address_taken_node (cgraph_node (decl));
72 ipa_record_reference (NULL, ctx->varpool_node,
73 cgraph_node (decl), NULL,
77 if (TREE_CODE (decl) == VAR_DECL)
79 struct varpool_node *vnode = varpool_node (decl);
80 if (lang_hooks.callgraph.analyze_expr)
81 lang_hooks.callgraph.analyze_expr (&decl, walk_subtrees);
82 varpool_mark_needed_node (vnode);
83 if (vnode->alias && vnode->extra_name)
84 vnode = vnode->extra_name;
85 ipa_record_reference (NULL, ctx->varpool_node,
93 /* Save some cycles by not walking types and declaration as we
94 won't find anything useful there anyway. */
95 if (IS_TYPE_OR_DECL_P (*tp))
101 if ((unsigned int) TREE_CODE (t) >= LAST_AND_UNUSED_TREE_CODE)
102 return lang_hooks.callgraph.analyze_expr (tp, walk_subtrees);
109 /* Record references to typeinfos in the type list LIST. */
112 record_type_list (struct cgraph_node *node, tree list)
114 for (; list; list = TREE_CHAIN (list))
116 tree type = TREE_VALUE (list);
119 type = lookup_type_for_runtime (type);
121 if (TREE_CODE (type) == ADDR_EXPR)
123 type = TREE_OPERAND (type, 0);
124 if (TREE_CODE (type) == VAR_DECL)
126 struct varpool_node *vnode = varpool_node (type);
127 varpool_mark_needed_node (vnode);
128 ipa_record_reference (node, NULL,
136 /* Record all references we will introduce by producing EH tables
140 record_eh_tables (struct cgraph_node *node, struct function *fun)
144 i = fun->eh->region_tree;
153 case ERT_MUST_NOT_THROW:
159 for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
160 record_type_list (node, c->type_list);
164 case ERT_ALLOWED_EXCEPTIONS:
165 record_type_list (node, i->u.allowed.type_list);
168 /* If there are sub-regions, process them. */
171 /* If there are peers, process them. */
172 else if (i->next_peer)
174 /* Otherwise, step back up the tree to the next peer. */
183 while (i->next_peer == NULL);
189 /* Reset inlining information of all incoming call edges of NODE. */
192 reset_inline_failed (struct cgraph_node *node)
194 struct cgraph_edge *e;
196 for (e = node->callers; e; e = e->next_caller)
198 e->callee->global.inlined_to = NULL;
200 e->inline_failed = CIF_BODY_NOT_AVAILABLE;
201 else if (node->local.redefined_extern_inline)
202 e->inline_failed = CIF_REDEFINED_EXTERN_INLINE;
203 else if (!node->local.inlinable)
204 e->inline_failed = CIF_FUNCTION_NOT_INLINABLE;
205 else if (e->call_stmt_cannot_inline_p)
206 e->inline_failed = CIF_MISMATCHED_ARGUMENTS;
208 e->inline_failed = CIF_FUNCTION_NOT_CONSIDERED;
212 /* Computes the frequency of the call statement so that it can be stored in
213 cgraph_edge. BB is the basic block of the call statement. */
215 compute_call_stmt_bb_frequency (tree decl, basic_block bb)
217 int entry_freq = ENTRY_BLOCK_PTR_FOR_FUNCTION
218 (DECL_STRUCT_FUNCTION (decl))->frequency;
219 int freq = bb->frequency;
221 if (profile_status_for_function (DECL_STRUCT_FUNCTION (decl)) == PROFILE_ABSENT)
222 return CGRAPH_FREQ_BASE;
225 entry_freq = 1, freq++;
227 freq = freq * CGRAPH_FREQ_BASE / entry_freq;
228 if (freq > CGRAPH_FREQ_MAX)
229 freq = CGRAPH_FREQ_MAX;
234 /* Mark address taken in STMT. */
237 mark_address (gimple stmt ATTRIBUTE_UNUSED, tree addr,
238 void *data ATTRIBUTE_UNUSED)
240 if (TREE_CODE (addr) == FUNCTION_DECL)
242 struct cgraph_node *node = cgraph_node (addr);
243 cgraph_mark_address_taken_node (node);
244 ipa_record_reference ((struct cgraph_node *)data, NULL,
250 addr = get_base_address (addr);
251 if (addr && TREE_CODE (addr) == VAR_DECL
252 && (TREE_STATIC (addr) || DECL_EXTERNAL (addr)))
254 struct varpool_node *vnode = varpool_node (addr);
257 if (lang_hooks.callgraph.analyze_expr)
258 lang_hooks.callgraph.analyze_expr (&addr, &walk_subtrees);
259 varpool_mark_needed_node (vnode);
260 if (vnode->alias && vnode->extra_name)
261 vnode = vnode->extra_name;
262 ipa_record_reference ((struct cgraph_node *)data, NULL,
271 /* Mark load of T. */
274 mark_load (gimple stmt ATTRIBUTE_UNUSED, tree t,
275 void *data ATTRIBUTE_UNUSED)
277 t = get_base_address (t);
278 if (t && TREE_CODE (t) == VAR_DECL
279 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
281 struct varpool_node *vnode = varpool_node (t);
284 if (lang_hooks.callgraph.analyze_expr)
285 lang_hooks.callgraph.analyze_expr (&t, &walk_subtrees);
286 varpool_mark_needed_node (vnode);
287 if (vnode->alias && vnode->extra_name)
288 vnode = vnode->extra_name;
289 ipa_record_reference ((struct cgraph_node *)data, NULL,
296 /* Mark store of T. */
299 mark_store (gimple stmt ATTRIBUTE_UNUSED, tree t,
300 void *data ATTRIBUTE_UNUSED)
302 t = get_base_address (t);
303 if (t && TREE_CODE (t) == VAR_DECL
304 && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
306 struct varpool_node *vnode = varpool_node (t);
309 if (lang_hooks.callgraph.analyze_expr)
310 lang_hooks.callgraph.analyze_expr (&t, &walk_subtrees);
311 varpool_mark_needed_node (vnode);
312 if (vnode->alias && vnode->extra_name)
313 vnode = vnode->extra_name;
314 ipa_record_reference ((struct cgraph_node *)data, NULL,
316 IPA_REF_STORE, NULL);
321 /* Create cgraph edges for function calls.
322 Also look for functions and variables having addresses taken. */
325 build_cgraph_edges (void)
328 struct cgraph_node *node = cgraph_node (current_function_decl);
329 struct pointer_set_t *visited_nodes = pointer_set_create ();
330 gimple_stmt_iterator gsi;
333 /* Create the callgraph edges and record the nodes referenced by the function.
337 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
339 gimple stmt = gsi_stmt (gsi);
342 if (is_gimple_call (stmt))
344 int freq = compute_call_stmt_bb_frequency (current_function_decl,
346 decl = gimple_call_fndecl (stmt);
348 cgraph_create_edge (node, cgraph_node (decl), stmt,
352 cgraph_create_indirect_edge (node, stmt,
353 gimple_call_flags (stmt),
357 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
358 mark_store, mark_address);
359 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
360 && gimple_omp_parallel_child_fn (stmt))
362 tree fn = gimple_omp_parallel_child_fn (stmt);
363 cgraph_mark_needed_node (cgraph_node (fn));
365 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
367 tree fn = gimple_omp_task_child_fn (stmt);
369 cgraph_mark_needed_node (cgraph_node (fn));
370 fn = gimple_omp_task_copy_fn (stmt);
372 cgraph_mark_needed_node (cgraph_node (fn));
375 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
376 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
377 mark_load, mark_store, mark_address);
380 /* Look for initializers of constant variables and private statics. */
381 for (step = cfun->local_decls;
383 step = TREE_CHAIN (step))
385 tree decl = TREE_VALUE (step);
386 if (TREE_CODE (decl) == VAR_DECL
387 && (TREE_STATIC (decl) && !DECL_EXTERNAL (decl)))
388 varpool_finalize_decl (decl);
390 record_eh_tables (node, cfun);
392 pointer_set_destroy (visited_nodes);
396 struct gimple_opt_pass pass_build_cgraph_edges =
400 "*build_cgraph_edges", /* name */
402 build_cgraph_edges, /* execute */
405 0, /* static_pass_number */
407 PROP_cfg, /* properties_required */
408 0, /* properties_provided */
409 0, /* properties_destroyed */
410 0, /* todo_flags_start */
411 0 /* todo_flags_finish */
415 /* Record references to functions and other variables present in the
416 initial value of DECL, a variable.
417 When ONLY_VARS is true, we mark needed only variables, not functions. */
420 record_references_in_initializer (tree decl, bool only_vars)
422 struct pointer_set_t *visited_nodes = pointer_set_create ();
423 struct varpool_node *node = varpool_node (decl);
424 struct record_reference_ctx ctx = {false, NULL};
426 ctx.varpool_node = node;
427 ctx.only_vars = only_vars;
428 walk_tree (&DECL_INITIAL (decl), record_reference,
429 &ctx, visited_nodes);
430 pointer_set_destroy (visited_nodes);
433 /* Rebuild cgraph edges for current function node. This needs to be run after
434 passes that don't update the cgraph. */
437 rebuild_cgraph_edges (void)
440 struct cgraph_node *node = cgraph_node (current_function_decl);
441 gimple_stmt_iterator gsi;
443 cgraph_node_remove_callees (node);
444 ipa_remove_all_references (&node->ref_list);
446 node->count = ENTRY_BLOCK_PTR->count;
450 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
452 gimple stmt = gsi_stmt (gsi);
455 if (is_gimple_call (stmt))
457 int freq = compute_call_stmt_bb_frequency (current_function_decl,
459 decl = gimple_call_fndecl (stmt);
461 cgraph_create_edge (node, cgraph_node (decl), stmt,
465 cgraph_create_indirect_edge (node, stmt,
466 gimple_call_flags (stmt),
470 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
471 mark_store, mark_address);
474 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
475 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
476 mark_load, mark_store, mark_address);
478 record_eh_tables (node, cfun);
479 gcc_assert (!node->global.inlined_to);
484 /* Rebuild cgraph edges for current function node. This needs to be run after
485 passes that don't update the cgraph. */
488 cgraph_rebuild_references (void)
491 struct cgraph_node *node = cgraph_node (current_function_decl);
492 gimple_stmt_iterator gsi;
494 ipa_remove_all_references (&node->ref_list);
496 node->count = ENTRY_BLOCK_PTR->count;
500 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
502 gimple stmt = gsi_stmt (gsi);
504 walk_stmt_load_store_addr_ops (stmt, node, mark_load,
505 mark_store, mark_address);
508 for (gsi = gsi_start (phi_nodes (bb)); !gsi_end_p (gsi); gsi_next (&gsi))
509 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), node,
510 mark_load, mark_store, mark_address);
512 record_eh_tables (node, cfun);
515 struct gimple_opt_pass pass_rebuild_cgraph_edges =
519 "*rebuild_cgraph_edges", /* name */
521 rebuild_cgraph_edges, /* execute */
524 0, /* static_pass_number */
526 PROP_cfg, /* properties_required */
527 0, /* properties_provided */
528 0, /* properties_destroyed */
529 0, /* todo_flags_start */
530 0, /* todo_flags_finish */
536 remove_cgraph_callee_edges (void)
538 cgraph_node_remove_callees (cgraph_node (current_function_decl));
542 struct gimple_opt_pass pass_remove_cgraph_callee_edges =
546 "*remove_cgraph_callee_edges", /* name */
548 remove_cgraph_callee_edges, /* execute */
551 0, /* static_pass_number */
553 0, /* properties_required */
554 0, /* properties_provided */
555 0, /* properties_destroyed */
556 0, /* todo_flags_start */
557 0, /* todo_flags_finish */