1 /* Callgraph based analysis of static variables.
2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file marks functions as being either const (TREE_READONLY) or
23 pure (DECL_PURE_P). It can also set a variant of these that
24 are allowed to loop indefinitely (DECL_LOOPING_CONST_PURE_P).
26 This must be run after inlining decisions have been made since
27 otherwise, the local sets will not contain information that is
28 consistent with post inlined state. The global sets are not prone
29 to this problem since they are by definition transitive. */
31 /* The code in this module is called by the ipa pass manager. It
32 should be one of the later passes since it's information is used by
33 the rest of the compilation. */
37 #include "coretypes.h"
40 #include "tree-flow.h"
41 #include "tree-inline.h"
42 #include "tree-pass.h"
43 #include "langhooks.h"
44 #include "pointer-set.h"
46 #include "ipa-utils.h"
53 #include "diagnostic.h"
54 #include "gimple-pretty-print.h"
55 #include "langhooks.h"
57 #include "lto-streamer.h"
59 #include "tree-scalar-evolution.h"
63 static struct pointer_set_t *visited_nodes;
65 /* Lattice values for const and pure functions. Everything starts out
66 being const, then may drop to pure and then neither depending on
68 enum pure_const_state_e
75 const char *pure_const_names[3] = {"const", "pure", "neither"};
77 /* Holder for the const_state. There is one of these per function
82 enum pure_const_state_e pure_const_state;
83 /* What user set here; we can be always sure about this. */
84 enum pure_const_state_e state_previously_known;
85 bool looping_previously_known;
87 /* True if the function could possibly infinite loop. There are a
88 lot of ways that this could be determined. We are pretty
89 conservative here. While it is possible to cse pure and const
90 calls, it is not legal to have dce get rid of the call if there
91 is a possibility that the call could infinite loop since this is
92 a behavioral change. */
98 typedef struct funct_state_d * funct_state;
100 /* The storage of the funct_state is abstracted because there is the
101 possibility that it may be desirable to move this to the cgraph
104 /* Array, indexed by cgraph node uid, of function states. */
106 DEF_VEC_P (funct_state);
107 DEF_VEC_ALLOC_P (funct_state, heap);
108 static VEC (funct_state, heap) *funct_state_vec;
110 /* Holders of ipa cgraph hooks: */
111 static struct cgraph_node_hook_list *function_insertion_hook_holder;
112 static struct cgraph_2node_hook_list *node_duplication_hook_holder;
113 static struct cgraph_node_hook_list *node_removal_hook_holder;
115 /* Try to guess if function body will always be visible to compiler
116 when compiling the call and whether compiler will be able
117 to propagate the information by itself. */
120 function_always_visible_to_compiler_p (tree decl)
122 return (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl));
125 /* Emit suggestion about attribute ATTRIB_NAME for DECL. KNOWN_FINITE
126 is true if the function is known to be finite. The diagnostic is
127 controlled by OPTION. WARNED_ABOUT is a pointer_set unique for
128 OPTION, this function may initialize it and it is always returned
131 static struct pointer_set_t *
132 suggest_attribute (int option, tree decl, bool known_finite,
133 struct pointer_set_t *warned_about,
134 const char * attrib_name)
136 if (!option_enabled (option))
138 if (TREE_THIS_VOLATILE (decl)
139 || (known_finite && function_always_visible_to_compiler_p (decl)))
143 warned_about = pointer_set_create ();
144 if (pointer_set_contains (warned_about, decl))
146 pointer_set_insert (warned_about, decl);
147 warning_at (DECL_SOURCE_LOCATION (decl),
150 ? _("function might be candidate for attribute %<%s%>")
151 : _("function might be candidate for attribute %<%s%>"
152 " if it is known to return normally"), attrib_name);
156 /* Emit suggestion about __attribute_((pure)) for DECL. KNOWN_FINITE
157 is true if the function is known to be finite. */
160 warn_function_pure (tree decl, bool known_finite)
162 static struct pointer_set_t *warned_about;
165 = suggest_attribute (OPT_Wsuggest_attribute_pure, decl,
166 known_finite, warned_about, "pure");
169 /* Emit suggestion about __attribute_((const)) for DECL. KNOWN_FINITE
170 is true if the function is known to be finite. */
173 warn_function_const (tree decl, bool known_finite)
175 static struct pointer_set_t *warned_about;
177 = suggest_attribute (OPT_Wsuggest_attribute_const, decl,
178 known_finite, warned_about, "const");
182 warn_function_noreturn (tree decl)
184 static struct pointer_set_t *warned_about;
185 if (!lang_hooks.missing_noreturn_ok_p (decl))
187 = suggest_attribute (OPT_Wsuggest_attribute_noreturn, decl,
188 true, warned_about, "noreturn");
190 /* Init the function state. */
195 free (funct_state_vec);
199 /* Return true if we have a function state for NODE. */
202 has_function_state (struct cgraph_node *node)
205 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
207 return VEC_index (funct_state, funct_state_vec, node->uid) != NULL;
210 /* Return the function state from NODE. */
212 static inline funct_state
213 get_function_state (struct cgraph_node *node)
215 static struct funct_state_d varying
216 = { IPA_NEITHER, IPA_NEITHER, true, true, true };
218 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
219 /* We might want to put correct previously_known state into varying. */
221 return VEC_index (funct_state, funct_state_vec, node->uid);
224 /* Set the function state S for NODE. */
227 set_function_state (struct cgraph_node *node, funct_state s)
230 || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
231 VEC_safe_grow_cleared (funct_state, heap, funct_state_vec, node->uid + 1);
232 VEC_replace (funct_state, funct_state_vec, node->uid, s);
235 /* Check to see if the use (or definition when CHECKING_WRITE is true)
236 variable T is legal in a function that is either pure or const. */
239 check_decl (funct_state local,
240 tree t, bool checking_write, bool ipa)
242 /* Do not want to do anything with volatile except mark any
243 function that uses one to be not const or pure. */
244 if (TREE_THIS_VOLATILE (t))
246 local->pure_const_state = IPA_NEITHER;
248 fprintf (dump_file, " Volatile operand is not const/pure");
252 /* Do not care about a local automatic that is not static. */
253 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
256 /* If the variable has the "used" attribute, treat it as if it had a
257 been touched by the devil. */
258 if (DECL_PRESERVE_P (t))
260 local->pure_const_state = IPA_NEITHER;
262 fprintf (dump_file, " Used static/global variable is not const/pure\n");
266 /* In IPA mode we are not interested in checking actual loads and stores;
267 they will be processed at propagation time using ipa_ref. */
271 /* Since we have dealt with the locals and params cases above, if we
272 are CHECKING_WRITE, this cannot be a pure or constant
276 local->pure_const_state = IPA_NEITHER;
278 fprintf (dump_file, " static/global memory write is not const/pure\n");
282 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
284 /* Readonly reads are safe. */
285 if (TREE_READONLY (t) && !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (t)))
286 return; /* Read of a constant, do not change the function state. */
290 fprintf (dump_file, " global memory read is not const\n");
291 /* Just a regular read. */
292 if (local->pure_const_state == IPA_CONST)
293 local->pure_const_state = IPA_PURE;
298 /* Compilation level statics can be read if they are readonly
300 if (TREE_READONLY (t))
304 fprintf (dump_file, " static memory read is not const\n");
305 /* Just a regular read. */
306 if (local->pure_const_state == IPA_CONST)
307 local->pure_const_state = IPA_PURE;
312 /* Check to see if the use (or definition when CHECKING_WRITE is true)
313 variable T is legal in a function that is either pure or const. */
316 check_op (funct_state local, tree t, bool checking_write)
318 t = get_base_address (t);
319 if (t && TREE_THIS_VOLATILE (t))
321 local->pure_const_state = IPA_NEITHER;
323 fprintf (dump_file, " Volatile indirect ref is not const/pure\n");
327 && INDIRECT_REF_P (t)
328 && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
329 && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
332 fprintf (dump_file, " Indirect ref to local memory is OK\n");
335 else if (checking_write)
337 local->pure_const_state = IPA_NEITHER;
339 fprintf (dump_file, " Indirect ref write is not const/pure\n");
345 fprintf (dump_file, " Indirect ref read is not const\n");
346 if (local->pure_const_state == IPA_CONST)
347 local->pure_const_state = IPA_PURE;
351 /* compute state based on ECF FLAGS and store to STATE and LOOPING. */
354 state_from_flags (enum pure_const_state_e *state, bool *looping,
355 int flags, bool cannot_lead_to_return)
358 if (flags & ECF_LOOPING_CONST_OR_PURE)
361 if (dump_file && (dump_flags & TDF_DETAILS))
362 fprintf (dump_file, " looping");
364 if (flags & ECF_CONST)
367 if (dump_file && (dump_flags & TDF_DETAILS))
368 fprintf (dump_file, " const\n");
370 else if (flags & ECF_PURE)
373 if (dump_file && (dump_flags & TDF_DETAILS))
374 fprintf (dump_file, " pure\n");
376 else if (cannot_lead_to_return)
380 if (dump_file && (dump_flags & TDF_DETAILS))
381 fprintf (dump_file, " ignoring side effects->pure looping\n");
385 if (dump_file && (dump_flags & TDF_DETAILS))
386 fprintf (dump_file, " neihter\n");
387 *state = IPA_NEITHER;
392 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
393 into STATE and LOOPING better of the two variants.
394 Be sure to merge looping correctly. IPA_NEITHER functions
395 have looping 0 even if they don't have to return. */
398 better_state (enum pure_const_state_e *state, bool *looping,
399 enum pure_const_state_e state2, bool looping2)
403 if (*state == IPA_NEITHER)
406 *looping = MIN (*looping, looping2);
408 else if (state2 != IPA_NEITHER)
409 *looping = MIN (*looping, looping2);
412 /* Merge STATE and STATE2 and LOOPING and LOOPING2 and store
413 into STATE and LOOPING worse of the two variants. */
416 worse_state (enum pure_const_state_e *state, bool *looping,
417 enum pure_const_state_e state2, bool looping2)
419 *state = MAX (*state, state2);
420 *looping = MAX (*looping, looping2);
423 /* Check the parameters of a function call to CALL_EXPR to see if
424 there are any references in the parameters that are not allowed for
425 pure or const functions. Also check to see if this is either an
426 indirect call, a call outside the compilation unit, or has special
427 attributes that may also effect the purity. The CALL_EXPR node for
428 the entire call expression. */
431 check_call (funct_state local, gimple call, bool ipa)
433 int flags = gimple_call_flags (call);
434 tree callee_t = gimple_call_fndecl (call);
435 bool possibly_throws = stmt_could_throw_p (call);
436 bool possibly_throws_externally = (possibly_throws
437 && stmt_can_throw_external (call));
442 for (i = 0; i < gimple_num_ops (call); i++)
443 if (gimple_op (call, i)
444 && tree_could_throw_p (gimple_op (call, i)))
446 if (possibly_throws && cfun->can_throw_non_call_exceptions)
449 fprintf (dump_file, " operand can throw; looping\n");
450 local->looping = true;
452 if (possibly_throws_externally)
455 fprintf (dump_file, " operand can throw externally\n");
456 local->can_throw = true;
461 /* The const and pure flags are set by a variety of places in the
462 compiler (including here). If someone has already set the flags
463 for the callee, (such as for some of the builtins) we will use
464 them, otherwise we will compute our own information.
466 Const and pure functions have less clobber effects than other
467 functions so we process these first. Otherwise if it is a call
468 outside the compilation unit or an indirect call we punt. This
469 leaves local calls which will be processed by following the call
473 /* built_in_return is really just an return statemnt. */
474 if (gimple_call_builtin_p (call, BUILT_IN_RETURN))
476 /* When bad things happen to bad functions, they cannot be const
478 if (setjmp_call_p (callee_t))
481 fprintf (dump_file, " setjmp is not const/pure\n");
482 local->looping = true;
483 local->pure_const_state = IPA_NEITHER;
486 if (DECL_BUILT_IN_CLASS (callee_t) == BUILT_IN_NORMAL)
487 switch (DECL_FUNCTION_CODE (callee_t))
489 case BUILT_IN_LONGJMP:
490 case BUILT_IN_NONLOCAL_GOTO:
492 fprintf (dump_file, " longjmp and nonlocal goto is not const/pure\n");
493 local->pure_const_state = IPA_NEITHER;
494 local->looping = true;
501 /* When not in IPA mode, we can still handle self recursion. */
502 if (!ipa && callee_t == current_function_decl)
505 fprintf (dump_file, " Recursive call can loop.\n");
506 local->looping = true;
508 /* Either calle is unknown or we are doing local analysis.
509 Look to see if there are any bits available for the callee (such as by
510 declaration or because it is builtin) and process solely on the basis of
514 enum pure_const_state_e call_state;
516 if (possibly_throws && cfun->can_throw_non_call_exceptions)
519 fprintf (dump_file, " can throw; looping\n");
520 local->looping = true;
522 if (possibly_throws_externally)
526 fprintf (dump_file, " can throw externally to lp %i\n",
527 lookup_stmt_eh_lp (call));
529 fprintf (dump_file, " callee:%s\n",
530 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (callee_t)));
532 local->can_throw = true;
534 if (dump_file && (dump_flags & TDF_DETAILS))
535 fprintf (dump_file, " checking flags for call:");
536 state_from_flags (&call_state, &call_looping, flags,
537 ((flags & (ECF_NORETURN | ECF_NOTHROW))
538 == (ECF_NORETURN | ECF_NOTHROW))
539 || (!flag_exceptions && (flags & ECF_NORETURN)));
540 worse_state (&local->pure_const_state, &local->looping,
541 call_state, call_looping);
543 /* Direct functions calls are handled by IPA propagation. */
546 /* Wrapper around check_decl for loads in local more. */
549 check_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
552 check_decl ((funct_state)data, op, false, false);
554 check_op ((funct_state)data, op, false);
558 /* Wrapper around check_decl for stores in local more. */
561 check_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
564 check_decl ((funct_state)data, op, true, false);
566 check_op ((funct_state)data, op, true);
570 /* Wrapper around check_decl for loads in ipa mode. */
573 check_ipa_load (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
576 check_decl ((funct_state)data, op, false, true);
578 check_op ((funct_state)data, op, false);
582 /* Wrapper around check_decl for stores in ipa mode. */
585 check_ipa_store (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
588 check_decl ((funct_state)data, op, true, true);
590 check_op ((funct_state)data, op, true);
594 /* Look into pointer pointed to by GSIP and figure out what interesting side
597 check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
599 gimple stmt = gsi_stmt (*gsip);
602 if (is_gimple_debug (stmt))
607 fprintf (dump_file, " scanning: ");
608 print_gimple_stmt (dump_file, stmt, 0, 0);
611 /* Look for loads and stores. */
612 walk_stmt_load_store_ops (stmt, local,
613 ipa ? check_ipa_load : check_load,
614 ipa ? check_ipa_store : check_store);
616 if (gimple_code (stmt) != GIMPLE_CALL
617 && stmt_could_throw_p (stmt))
619 if (cfun->can_throw_non_call_exceptions)
622 fprintf (dump_file, " can throw; looping");
623 local->looping = true;
625 if (stmt_can_throw_external (stmt))
628 fprintf (dump_file, " can throw externally");
629 local->can_throw = true;
632 switch (gimple_code (stmt))
635 check_call (local, stmt, ipa);
638 if (DECL_NONLOCAL (gimple_label_label (stmt)))
639 /* Target of long jump. */
642 fprintf (dump_file, " nonlocal label is not const/pure");
643 local->pure_const_state = IPA_NEITHER;
647 for (i = 0; i < gimple_asm_nclobbers (stmt); i++)
649 tree op = gimple_asm_clobber_op (stmt, i);
650 if (strcmp (TREE_STRING_POINTER (TREE_VALUE (op)), "memory") == 0)
653 fprintf (dump_file, " memory asm clobber is not const/pure");
654 /* Abandon all hope, ye who enter here. */
655 local->pure_const_state = IPA_NEITHER;
658 if (gimple_asm_volatile_p (stmt))
661 fprintf (dump_file, " volatile is not const/pure");
662 /* Abandon all hope, ye who enter here. */
663 local->pure_const_state = IPA_NEITHER;
664 local->looping = true;
673 /* This is the main routine for finding the reference patterns for
674 global variables within a function FN. */
677 analyze_function (struct cgraph_node *fn, bool ipa)
679 tree decl = fn->decl;
680 tree old_decl = current_function_decl;
682 basic_block this_block;
684 l = XCNEW (struct funct_state_d);
685 l->pure_const_state = IPA_CONST;
686 l->state_previously_known = IPA_NEITHER;
687 l->looping_previously_known = true;
689 l->can_throw = false;
693 fprintf (dump_file, "\n\n local analysis of %s\n ",
694 cgraph_node_name (fn));
697 push_cfun (DECL_STRUCT_FUNCTION (decl));
698 current_function_decl = decl;
700 FOR_EACH_BB (this_block)
702 gimple_stmt_iterator gsi;
703 struct walk_stmt_info wi;
705 memset (&wi, 0, sizeof(wi));
706 for (gsi = gsi_start_bb (this_block);
710 check_stmt (&gsi, l, ipa);
711 if (l->pure_const_state == IPA_NEITHER && l->looping && l->can_throw)
717 if (l->pure_const_state != IPA_NEITHER)
719 /* Const functions cannot have back edges (an
720 indication of possible infinite loop side
722 if (mark_dfs_back_edges ())
724 /* Preheaders are needed for SCEV to work.
725 Simple lateches and recorded exits improve chances that loop will
726 proved to be finite in testcases such as in loop-15.c and loop-24.c */
727 loop_optimizer_init (LOOPS_NORMAL
728 | LOOPS_HAVE_RECORDED_EXITS);
729 if (dump_file && (dump_flags & TDF_DETAILS))
730 flow_loops_dump (dump_file, NULL, 0);
731 if (mark_irreducible_loops ())
734 fprintf (dump_file, " has irreducible loops\n");
742 FOR_EACH_LOOP (li, loop, 0)
743 if (!finite_loop_p (loop))
746 fprintf (dump_file, " can not prove finiteness of loop %i\n", loop->num);
752 loop_optimizer_finalize ();
756 if (dump_file && (dump_flags & TDF_DETAILS))
757 fprintf (dump_file, " checking previously known:");
758 state_from_flags (&l->state_previously_known, &l->looping_previously_known,
759 flags_from_decl_or_type (fn->decl),
760 cgraph_node_cannot_return (fn));
762 better_state (&l->pure_const_state, &l->looping,
763 l->state_previously_known,
764 l->looping_previously_known);
765 if (TREE_NOTHROW (decl))
766 l->can_throw = false;
769 current_function_decl = old_decl;
773 fprintf (dump_file, "Function is locally looping.\n");
775 fprintf (dump_file, "Function is locally throwing.\n");
776 if (l->pure_const_state == IPA_CONST)
777 fprintf (dump_file, "Function is locally const.\n");
778 if (l->pure_const_state == IPA_PURE)
779 fprintf (dump_file, "Function is locally pure.\n");
784 /* Called when new function is inserted to callgraph late. */
786 add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
788 if (cgraph_function_body_availability (node) < AVAIL_OVERWRITABLE)
790 /* There are some shared nodes, in particular the initializers on
791 static declarations. We do not need to scan them more than once
792 since all we would be interested in are the addressof
794 visited_nodes = pointer_set_create ();
795 if (cgraph_function_body_availability (node) > AVAIL_OVERWRITABLE)
796 set_function_state (node, analyze_function (node, true));
797 pointer_set_destroy (visited_nodes);
798 visited_nodes = NULL;
801 /* Called when new clone is inserted to callgraph late. */
804 duplicate_node_data (struct cgraph_node *src, struct cgraph_node *dst,
805 void *data ATTRIBUTE_UNUSED)
807 if (has_function_state (src))
809 funct_state l = XNEW (struct funct_state_d);
810 gcc_assert (!has_function_state (dst));
811 memcpy (l, get_function_state (src), sizeof (*l));
812 set_function_state (dst, l);
816 /* Called when new clone is inserted to callgraph late. */
819 remove_node_data (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
821 if (has_function_state (node))
823 free (get_function_state (node));
824 set_function_state (node, NULL);
830 register_hooks (void)
832 static bool init_p = false;
839 node_removal_hook_holder =
840 cgraph_add_node_removal_hook (&remove_node_data, NULL);
841 node_duplication_hook_holder =
842 cgraph_add_node_duplication_hook (&duplicate_node_data, NULL);
843 function_insertion_hook_holder =
844 cgraph_add_function_insertion_hook (&add_new_function, NULL);
848 /* Analyze each function in the cgraph to see if it is locally PURE or
852 generate_summary (void)
854 struct cgraph_node *node;
858 /* There are some shared nodes, in particular the initializers on
859 static declarations. We do not need to scan them more than once
860 since all we would be interested in are the addressof
862 visited_nodes = pointer_set_create ();
864 /* Process all of the functions.
866 We process AVAIL_OVERWRITABLE functions. We can not use the results
867 by default, but the info can be used at LTO with -fwhole-program or
868 when function got clonned and the clone is AVAILABLE. */
870 for (node = cgraph_nodes; node; node = node->next)
871 if (cgraph_function_body_availability (node) >= AVAIL_OVERWRITABLE)
872 set_function_state (node, analyze_function (node, true));
874 pointer_set_destroy (visited_nodes);
875 visited_nodes = NULL;
879 /* Serialize the ipa info for lto. */
882 pure_const_write_summary (cgraph_node_set set,
883 varpool_node_set vset ATTRIBUTE_UNUSED)
885 struct cgraph_node *node;
886 struct lto_simple_output_block *ob
887 = lto_create_simple_output_block (LTO_section_ipa_pure_const);
888 unsigned int count = 0;
889 cgraph_node_set_iterator csi;
891 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
893 node = csi_node (csi);
894 if (node->analyzed && has_function_state (node))
898 lto_output_uleb128_stream (ob->main_stream, count);
900 /* Process all of the functions. */
901 for (csi = csi_start (set); !csi_end_p (csi); csi_next (&csi))
903 node = csi_node (csi);
904 if (node->analyzed && has_function_state (node))
906 struct bitpack_d *bp;
909 lto_cgraph_encoder_t encoder;
911 fs = get_function_state (node);
913 encoder = ob->decl_state->cgraph_node_encoder;
914 node_ref = lto_cgraph_encoder_encode (encoder, node);
915 lto_output_uleb128_stream (ob->main_stream, node_ref);
917 /* Note that flags will need to be read in the opposite
918 order as we are pushing the bitflags into FLAGS. */
919 bp = bitpack_create ();
920 bp_pack_value (bp, fs->pure_const_state, 2);
921 bp_pack_value (bp, fs->state_previously_known, 2);
922 bp_pack_value (bp, fs->looping_previously_known, 1);
923 bp_pack_value (bp, fs->looping, 1);
924 bp_pack_value (bp, fs->can_throw, 1);
925 lto_output_bitpack (ob->main_stream, bp);
930 lto_destroy_simple_output_block (ob);
934 /* Deserialize the ipa info for lto. */
937 pure_const_read_summary (void)
939 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
940 struct lto_file_decl_data *file_data;
944 while ((file_data = file_data_vec[j++]))
948 struct lto_input_block *ib
949 = lto_create_simple_input_block (file_data,
950 LTO_section_ipa_pure_const,
955 unsigned int count = lto_input_uleb128 (ib);
957 for (i = 0; i < count; i++)
960 struct cgraph_node *node;
961 struct bitpack_d *bp;
963 lto_cgraph_encoder_t encoder;
965 fs = XCNEW (struct funct_state_d);
966 index = lto_input_uleb128 (ib);
967 encoder = file_data->cgraph_node_encoder;
968 node = lto_cgraph_encoder_deref (encoder, index);
969 set_function_state (node, fs);
971 /* Note that the flags must be read in the opposite
972 order in which they were written (the bitflags were
973 pushed into FLAGS). */
974 bp = lto_input_bitpack (ib);
976 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
977 fs->state_previously_known
978 = (enum pure_const_state_e) bp_unpack_value (bp, 2);
979 fs->looping_previously_known = bp_unpack_value (bp, 1);
980 fs->looping = bp_unpack_value (bp, 1);
981 fs->can_throw = bp_unpack_value (bp, 1);
984 int flags = flags_from_decl_or_type (node->decl);
985 fprintf (dump_file, "Read info for %s/%i ",
986 cgraph_node_name (node),
988 if (flags & ECF_CONST)
989 fprintf (dump_file, " const");
990 if (flags & ECF_PURE)
991 fprintf (dump_file, " pure");
992 if (flags & ECF_NOTHROW)
993 fprintf (dump_file, " nothrow");
994 fprintf (dump_file, "\n pure const state: %s\n",
995 pure_const_names[fs->pure_const_state]);
996 fprintf (dump_file, " previously known state: %s\n",
997 pure_const_names[fs->looping_previously_known]);
999 fprintf (dump_file," function is locally looping\n");
1000 if (fs->looping_previously_known)
1001 fprintf (dump_file," function is previously known looping\n");
1003 fprintf (dump_file," function is locally throwing\n");
1005 bitpack_delete (bp);
1008 lto_destroy_simple_input_block (file_data,
1009 LTO_section_ipa_pure_const,
1017 ignore_edge (struct cgraph_edge *e)
1019 return (!e->can_throw_external);
1022 /* Return true if NODE is self recursive function. */
1025 self_recursive_p (struct cgraph_node *node)
1027 struct cgraph_edge *e;
1028 for (e = node->callees; e; e = e->next_callee)
1029 if (e->callee == node)
1034 /* Produce transitive closure over the callgraph and compute pure/const
1038 propagate_pure_const (void)
1040 struct cgraph_node *node;
1041 struct cgraph_node *w;
1042 struct cgraph_node **order =
1043 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1046 struct ipa_dfs_info * w_info;
1048 order_pos = ipa_utils_reduced_inorder (order, true, false, NULL);
1051 dump_cgraph (dump_file);
1052 ipa_utils_print_order(dump_file, "reduced", order, order_pos);
1055 /* Propagate the local information thru the call graph to produce
1056 the global information. All the nodes within a cycle will have
1057 the same info so we collapse cycles first. Then we can do the
1058 propagation in one pass from the leaves to the roots. */
1059 for (i = 0; i < order_pos; i++ )
1061 enum pure_const_state_e pure_const_state = IPA_CONST;
1062 bool looping = false;
1066 if (dump_file && (dump_flags & TDF_DETAILS))
1067 fprintf (dump_file, "Starting cycle\n");
1069 /* Find the worst state for any node in the cycle. */
1071 while (w && pure_const_state != IPA_NEITHER)
1073 struct cgraph_edge *e;
1074 struct cgraph_edge *ie;
1076 struct ipa_ref *ref;
1078 funct_state w_l = get_function_state (w);
1079 if (dump_file && (dump_flags & TDF_DETAILS))
1080 fprintf (dump_file, " Visiting %s/%i state:%s looping %i\n",
1081 cgraph_node_name (w),
1083 pure_const_names[w_l->pure_const_state],
1086 /* First merge in function body properties. */
1087 worse_state (&pure_const_state, &looping,
1088 w_l->pure_const_state, w_l->looping);
1089 if (pure_const_state == IPA_NEITHER)
1092 /* For overwritable nodes we can not assume anything. */
1093 if (cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1095 worse_state (&pure_const_state, &looping,
1096 w_l->state_previously_known,
1097 w_l->looping_previously_known);
1098 if (dump_file && (dump_flags & TDF_DETAILS))
1101 " Overwritable. state %s looping %i\n",
1102 pure_const_names[w_l->state_previously_known],
1103 w_l->looping_previously_known);
1110 /* We consider recursive cycles as possibly infinite.
1111 This might be relaxed since infinite recursion leads to stack
1116 /* Now walk the edges and merge in callee properties. */
1117 for (e = w->callees; e; e = e->next_callee)
1119 struct cgraph_node *y = e->callee;
1120 enum pure_const_state_e edge_state = IPA_CONST;
1121 bool edge_looping = false;
1123 if (dump_file && (dump_flags & TDF_DETAILS))
1127 cgraph_node_name (e->callee),
1130 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
1132 funct_state y_l = get_function_state (y);
1133 if (dump_file && (dump_flags & TDF_DETAILS))
1136 " state:%s looping:%i\n",
1137 pure_const_names[y_l->pure_const_state],
1140 if (y_l->pure_const_state > IPA_PURE
1141 && cgraph_edge_cannot_lead_to_return (e))
1143 if (dump_file && (dump_flags & TDF_DETAILS))
1145 " Ignoring side effects"
1146 " -> pure, looping\n");
1147 edge_state = IPA_PURE;
1148 edge_looping = true;
1152 edge_state = y_l->pure_const_state;
1153 edge_looping = y_l->looping;
1157 state_from_flags (&edge_state, &edge_looping,
1158 flags_from_decl_or_type (y->decl),
1159 cgraph_edge_cannot_lead_to_return (e));
1161 /* Merge the results with what we already know. */
1162 better_state (&edge_state, &edge_looping,
1163 w_l->state_previously_known,
1164 w_l->looping_previously_known);
1165 worse_state (&pure_const_state, &looping,
1166 edge_state, edge_looping);
1167 if (pure_const_state == IPA_NEITHER)
1170 if (pure_const_state == IPA_NEITHER)
1173 /* Now process the indirect call. */
1174 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1176 enum pure_const_state_e edge_state = IPA_CONST;
1177 bool edge_looping = false;
1179 if (dump_file && (dump_flags & TDF_DETAILS))
1180 fprintf (dump_file, " Indirect call");
1181 state_from_flags (&edge_state, &edge_looping,
1182 ie->indirect_info->ecf_flags,
1183 cgraph_edge_cannot_lead_to_return (ie));
1184 /* Merge the results with what we already know. */
1185 better_state (&edge_state, &edge_looping,
1186 w_l->state_previously_known,
1187 w_l->looping_previously_known);
1188 worse_state (&pure_const_state, &looping,
1189 edge_state, edge_looping);
1190 if (pure_const_state == IPA_NEITHER)
1193 if (pure_const_state == IPA_NEITHER)
1196 /* And finally all loads and stores. */
1197 for (i = 0; ipa_ref_list_reference_iterate (&node->ref_list, i, ref); i++)
1199 enum pure_const_state_e ref_state = IPA_CONST;
1200 bool ref_looping = false;
1204 /* readonly reads are safe. */
1205 if (TREE_READONLY (ipa_ref_varpool_node (ref)->decl))
1207 if (dump_file && (dump_flags & TDF_DETAILS))
1208 fprintf (dump_file, " nonreadonly global var read\n");
1209 ref_state = IPA_PURE;
1212 if (ipa_ref_cannot_lead_to_return (ref))
1214 ref_state = IPA_NEITHER;
1215 if (dump_file && (dump_flags & TDF_DETAILS))
1216 fprintf (dump_file, " global var write\n");
1221 better_state (&ref_state, &ref_looping,
1222 w_l->state_previously_known,
1223 w_l->looping_previously_known);
1224 worse_state (&pure_const_state, &looping,
1225 ref_state, ref_looping);
1226 if (pure_const_state == IPA_NEITHER)
1229 w_info = (struct ipa_dfs_info *) w->aux;
1230 w = w_info->next_cycle;
1232 if (dump_file && (dump_flags & TDF_DETAILS))
1233 fprintf (dump_file, "Result %s looping %i\n",
1234 pure_const_names [pure_const_state],
1237 /* Copy back the region's pure_const_state which is shared by
1238 all nodes in the region. */
1242 funct_state w_l = get_function_state (w);
1243 enum pure_const_state_e this_state = pure_const_state;
1244 bool this_looping = looping;
1246 if (w_l->state_previously_known != IPA_NEITHER
1247 && this_state > w_l->state_previously_known)
1249 this_state = w_l->state_previously_known;
1250 this_looping |= w_l->looping_previously_known;
1252 if (!this_looping && self_recursive_p (w))
1253 this_looping = true;
1254 if (!w_l->looping_previously_known)
1255 this_looping = false;
1257 /* All nodes within a cycle share the same info. */
1258 w_l->pure_const_state = this_state;
1259 w_l->looping = this_looping;
1264 if (!TREE_READONLY (w->decl))
1266 warn_function_const (w->decl, !this_looping);
1268 fprintf (dump_file, "Function found to be %sconst: %s\n",
1269 this_looping ? "looping " : "",
1270 cgraph_node_name (w));
1272 cgraph_set_readonly_flag (w, true);
1273 cgraph_set_looping_const_or_pure_flag (w, this_looping);
1277 if (!DECL_PURE_P (w->decl))
1279 warn_function_pure (w->decl, !this_looping);
1281 fprintf (dump_file, "Function found to be %spure: %s\n",
1282 this_looping ? "looping " : "",
1283 cgraph_node_name (w));
1285 cgraph_set_pure_flag (w, true);
1286 cgraph_set_looping_const_or_pure_flag (w, this_looping);
1292 w_info = (struct ipa_dfs_info *) w->aux;
1293 w = w_info->next_cycle;
1298 for (node = cgraph_nodes; node; node = node->next)
1300 /* Get rid of the aux information. */
1303 w_info = (struct ipa_dfs_info *) node->aux;
1312 /* Produce transitive closure over the callgraph and compute nothrow
1316 propagate_nothrow (void)
1318 struct cgraph_node *node;
1319 struct cgraph_node *w;
1320 struct cgraph_node **order =
1321 XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
1324 struct ipa_dfs_info * w_info;
1326 order_pos = ipa_utils_reduced_inorder (order, true, false, ignore_edge);
1329 dump_cgraph (dump_file);
1330 ipa_utils_print_order(dump_file, "reduced for nothrow", order, order_pos);
1333 /* Propagate the local information thru the call graph to produce
1334 the global information. All the nodes within a cycle will have
1335 the same info so we collapse cycles first. Then we can do the
1336 propagation in one pass from the leaves to the roots. */
1337 for (i = 0; i < order_pos; i++ )
1339 bool can_throw = false;
1342 /* Find the worst state for any node in the cycle. */
1346 struct cgraph_edge *e, *ie;
1347 funct_state w_l = get_function_state (w);
1350 || cgraph_function_body_availability (w) == AVAIL_OVERWRITABLE)
1356 for (e = w->callees; e; e = e->next_callee)
1358 struct cgraph_node *y = e->callee;
1360 if (cgraph_function_body_availability (y) > AVAIL_OVERWRITABLE)
1362 funct_state y_l = get_function_state (y);
1366 if (y_l->can_throw && !TREE_NOTHROW (w->decl)
1367 && e->can_throw_external)
1370 else if (e->can_throw_external && !TREE_NOTHROW (y->decl))
1373 for (ie = node->indirect_calls; ie; ie = ie->next_callee)
1374 if (ie->can_throw_external)
1376 w_info = (struct ipa_dfs_info *) w->aux;
1377 w = w_info->next_cycle;
1380 /* Copy back the region's pure_const_state which is shared by
1381 all nodes in the region. */
1385 funct_state w_l = get_function_state (w);
1386 if (!can_throw && !TREE_NOTHROW (w->decl))
1388 struct cgraph_edge *e;
1389 cgraph_set_nothrow_flag (w, true);
1390 for (e = w->callers; e; e = e->next_caller)
1391 e->can_throw_external = false;
1393 fprintf (dump_file, "Function found to be nothrow: %s\n",
1394 cgraph_node_name (w));
1396 else if (can_throw && !TREE_NOTHROW (w->decl))
1397 w_l->can_throw = true;
1398 w_info = (struct ipa_dfs_info *) w->aux;
1399 w = w_info->next_cycle;
1404 for (node = cgraph_nodes; node; node = node->next)
1406 /* Get rid of the aux information. */
1409 w_info = (struct ipa_dfs_info *) node->aux;
1419 /* Produce the global information by preforming a transitive closure
1420 on the local information that was produced by generate_summary. */
1425 struct cgraph_node *node;
1427 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
1428 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
1429 cgraph_remove_node_removal_hook (node_removal_hook_holder);
1431 /* Nothrow makes more function to not lead to return and improve
1433 propagate_nothrow ();
1434 propagate_pure_const ();
1437 for (node = cgraph_nodes; node; node = node->next)
1438 if (has_function_state (node))
1439 free (get_function_state (node));
1440 VEC_free (funct_state, heap, funct_state_vec);
1446 gate_pure_const (void)
1448 return (flag_ipa_pure_const
1449 /* Don't bother doing anything if the program has errors. */
1453 struct ipa_opt_pass_d pass_ipa_pure_const =
1457 "pure-const", /* name */
1458 gate_pure_const, /* gate */
1459 propagate, /* execute */
1462 0, /* static_pass_number */
1463 TV_IPA_PURE_CONST, /* tv_id */
1464 0, /* properties_required */
1465 0, /* properties_provided */
1466 0, /* properties_destroyed */
1467 0, /* todo_flags_start */
1468 0 /* todo_flags_finish */
1470 generate_summary, /* generate_summary */
1471 pure_const_write_summary, /* write_summary */
1472 pure_const_read_summary, /* read_summary */
1473 NULL, /* write_optimization_summary */
1474 NULL, /* read_optimization_summary */
1475 NULL, /* stmt_fixup */
1477 NULL, /* function_transform */
1478 NULL /* variable_transform */
1481 /* Return true if function should be skipped for local pure const analysis. */
1484 skip_function_for_local_pure_const (struct cgraph_node *node)
1486 /* Because we do not schedule pass_fixup_cfg over whole program after early optimizations
1487 we must not promote functions that are called by already processed functions. */
1489 if (function_called_by_processed_nodes_p ())
1492 fprintf (dump_file, "Function called in recursive cycle; ignoring\n");
1495 if (cgraph_function_body_availability (node) <= AVAIL_OVERWRITABLE)
1498 fprintf (dump_file, "Function is not available or overwrittable; not analyzing.\n");
1504 /* Simple local pass for pure const discovery reusing the analysis from
1505 ipa_pure_const. This pass is effective when executed together with
1506 other optimization passes in early optimization pass queue. */
1509 local_pure_const (void)
1511 bool changed = false;
1514 struct cgraph_node *node;
1516 node = cgraph_node (current_function_decl);
1517 skip = skip_function_for_local_pure_const (node);
1518 if (!warn_suggest_attribute_const
1519 && !warn_suggest_attribute_pure
1523 /* First do NORETURN discovery. */
1524 if (!skip && !TREE_THIS_VOLATILE (current_function_decl)
1525 && EDGE_COUNT (EXIT_BLOCK_PTR->preds) == 0)
1527 warn_function_noreturn (cfun->decl);
1529 fprintf (dump_file, "Function found to be noreturn: %s\n",
1530 lang_hooks.decl_printable_name (current_function_decl, 2));
1532 /* Update declaration and reduce profile to executed once. */
1533 TREE_THIS_VOLATILE (current_function_decl) = 1;
1534 if (node->frequency > NODE_FREQUENCY_EXECUTED_ONCE)
1535 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
1539 l = analyze_function (node, false);
1541 switch (l->pure_const_state)
1544 if (!TREE_READONLY (current_function_decl))
1546 warn_function_const (current_function_decl, !l->looping);
1549 cgraph_set_readonly_flag (node, true);
1550 cgraph_set_looping_const_or_pure_flag (node, l->looping);
1554 fprintf (dump_file, "Function found to be %sconst: %s\n",
1555 l->looping ? "looping " : "",
1556 lang_hooks.decl_printable_name (current_function_decl,
1559 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1564 cgraph_set_looping_const_or_pure_flag (node, false);
1568 fprintf (dump_file, "Function found to be non-looping: %s\n",
1569 lang_hooks.decl_printable_name (current_function_decl,
1575 if (!DECL_PURE_P (current_function_decl))
1579 cgraph_set_pure_flag (node, true);
1580 cgraph_set_looping_const_or_pure_flag (node, l->looping);
1583 warn_function_pure (current_function_decl, !l->looping);
1585 fprintf (dump_file, "Function found to be %spure: %s\n",
1586 l->looping ? "looping " : "",
1587 lang_hooks.decl_printable_name (current_function_decl,
1590 else if (DECL_LOOPING_CONST_OR_PURE_P (current_function_decl)
1595 cgraph_set_looping_const_or_pure_flag (node, false);
1599 fprintf (dump_file, "Function found to be non-looping: %s\n",
1600 lang_hooks.decl_printable_name (current_function_decl,
1608 if (!l->can_throw && !TREE_NOTHROW (current_function_decl))
1610 struct cgraph_edge *e;
1612 cgraph_set_nothrow_flag (node, true);
1613 for (e = node->callers; e; e = e->next_caller)
1614 e->can_throw_external = false;
1617 fprintf (dump_file, "Function found to be nothrow: %s\n",
1618 lang_hooks.decl_printable_name (current_function_decl,
1624 return execute_fixup_cfg ();
1629 struct gimple_opt_pass pass_local_pure_const =
1633 "local-pure-const", /* name */
1634 gate_pure_const, /* gate */
1635 local_pure_const, /* execute */
1638 0, /* static_pass_number */
1639 TV_IPA_PURE_CONST, /* tv_id */
1640 0, /* properties_required */
1641 0, /* properties_provided */
1642 0, /* properties_destroyed */
1643 0, /* todo_flags_start */
1644 0 /* todo_flags_finish */