1 /* Branch prediction routines for the GNU compiler.
2 Copyright (C) 2000-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 [1] "Branch Prediction for Free"
23 Ball and Larus; PLDI '93.
24 [2] "Static Branch Frequency and Program Profile Analysis"
25 Wu and Larus; MICRO-27.
26 [3] "Corpus-based Static Branch Prediction"
27 Calder, Grunwald, Lindsay, Martin, Mozer, and Zorn; PLDI '95. */
32 #include "coretypes.h"
38 #include "tree-pass.h"
44 #include "diagnostic-core.h"
45 #include "gimple-predict.h"
46 #include "fold-const.h"
52 #include "gimple-iterator.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-ssa-loop.h"
56 #include "tree-scalar-evolution.h"
57 #include "ipa-utils.h"
58 #include "gimple-pretty-print.h"
61 #include "stringpool.h"
64 /* Enum with reasons why a predictor is ignored. */
70 REASON_SINGLE_EDGE_DUPLICATE,
71 REASON_EDGE_PAIR_DUPLICATE
74 /* String messages for the aforementioned enum. */
76 static const char *reason_messages[] = {"", " (ignored)",
77 " (single edge duplicate)", " (edge pair duplicate)"};
80 static void combine_predictions_for_insn (rtx_insn *, basic_block);
81 static void dump_prediction (FILE *, enum br_predictor, int, basic_block,
82 enum predictor_reason, edge);
83 static void predict_paths_leading_to (basic_block, enum br_predictor,
85 class loop *in_loop = NULL);
86 static void predict_paths_leading_to_edge (edge, enum br_predictor,
88 class loop *in_loop = NULL);
89 static bool can_predict_insn_p (const rtx_insn *);
90 static HOST_WIDE_INT get_predictor_value (br_predictor, HOST_WIDE_INT);
91 static void determine_unlikely_bbs ();
93 /* Information we hold about each branch predictor.
94 Filled using information from predict.def. */
98 const char *const name; /* Name used in the debugging dumps. */
99 const int hitrate; /* Expected hitrate used by
100 predict_insn_def call. */
104 /* Use given predictor without Dempster-Shaffer theory if it matches
105 using first_match heuristics. */
106 #define PRED_FLAG_FIRST_MATCH 1
108 /* Recompute hitrate in percent to our representation. */
110 #define HITRATE(VAL) ((int) ((VAL) * REG_BR_PROB_BASE + 50) / 100)
112 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) {NAME, HITRATE, FLAGS},
113 static const struct predictor_info predictor_info[]= {
114 #include "predict.def"
116 /* Upper bound on predictors. */
121 static gcov_type min_count = -1;
123 /* Determine the threshold for hot BB counts. */
126 get_hot_bb_threshold ()
130 const int hot_frac = param_hot_bb_count_fraction;
131 const gcov_type min_hot_count
133 ? profile_info->sum_max / hot_frac
134 : (gcov_type)profile_count::max_count;
135 set_hot_bb_threshold (min_hot_count);
137 fprintf (dump_file, "Setting hotness threshold to %" PRId64 ".\n",
143 /* Set the threshold for hot BB counts. */
146 set_hot_bb_threshold (gcov_type min)
151 /* Return TRUE if COUNT is considered to be hot in function FUN. */
154 maybe_hot_count_p (struct function *fun, profile_count count)
156 if (!count.initialized_p ())
158 if (count.ipa () == profile_count::zero ())
162 struct cgraph_node *node = cgraph_node::get (fun->decl);
163 if (!profile_info || profile_status_for_fn (fun) != PROFILE_READ)
165 if (node->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED)
167 if (node->frequency == NODE_FREQUENCY_HOT)
170 if (profile_status_for_fn (fun) == PROFILE_ABSENT)
172 if (node->frequency == NODE_FREQUENCY_EXECUTED_ONCE
173 && count < (ENTRY_BLOCK_PTR_FOR_FN (fun)->count.apply_scale (2, 3)))
175 if (count.apply_scale (param_hot_bb_frequency_fraction, 1)
176 < ENTRY_BLOCK_PTR_FOR_FN (fun)->count)
180 /* Code executed at most once is not hot. */
181 if (count <= MAX (profile_info ? profile_info->runs : 1, 1))
183 return (count >= get_hot_bb_threshold ());
186 /* Return true if basic block BB of function FUN can be CPU intensive
187 and should thus be optimized for maximum performance. */
190 maybe_hot_bb_p (struct function *fun, const_basic_block bb)
192 gcc_checking_assert (fun);
193 return maybe_hot_count_p (fun, bb->count);
196 /* Return true if edge E can be CPU intensive and should thus be optimized
197 for maximum performance. */
200 maybe_hot_edge_p (edge e)
202 return maybe_hot_count_p (cfun, e->count ());
205 /* Return true if COUNT is considered to be never executed in function FUN
206 or if function FUN is considered so in the static profile. */
209 probably_never_executed (struct function *fun, profile_count count)
211 gcc_checking_assert (fun);
212 if (count.ipa () == profile_count::zero ())
214 /* Do not trust adjusted counts. This will make us to drop int cold section
215 code with low execution count as a result of inlining. These low counts
216 are not safe even with read profile and may lead us to dropping
217 code which actually gets executed into cold section of binary that is not
219 if (count.precise_p () && profile_status_for_fn (fun) == PROFILE_READ)
221 const int unlikely_frac = param_unlikely_bb_count_fraction;
222 if (count.apply_scale (unlikely_frac, 1) >= profile_info->runs)
226 if ((!profile_info || profile_status_for_fn (fun) != PROFILE_READ)
227 && (cgraph_node::get (fun->decl)->frequency
228 == NODE_FREQUENCY_UNLIKELY_EXECUTED))
233 /* Return true if basic block BB of function FUN is probably never executed. */
236 probably_never_executed_bb_p (struct function *fun, const_basic_block bb)
238 return probably_never_executed (fun, bb->count);
241 /* Return true if edge E is unlikely executed for obvious reasons. */
244 unlikely_executed_edge_p (edge e)
246 return (e->count () == profile_count::zero ()
247 || e->probability == profile_probability::never ())
248 || (e->flags & (EDGE_EH | EDGE_FAKE));
251 /* Return true if edge E of function FUN is probably never executed. */
254 probably_never_executed_edge_p (struct function *fun, edge e)
256 if (unlikely_executed_edge_p (e))
258 return probably_never_executed (fun, e->count ());
261 /* Return true if function FUN should always be optimized for size. */
264 optimize_function_for_size_p (struct function *fun)
266 if (!fun || !fun->decl)
267 return optimize_size;
268 cgraph_node *n = cgraph_node::get (fun->decl);
269 return n && n->optimize_for_size_p ();
272 /* Return true if function FUN should always be optimized for speed. */
275 optimize_function_for_speed_p (struct function *fun)
277 return !optimize_function_for_size_p (fun);
280 /* Return the optimization type that should be used for function FUN. */
283 function_optimization_type (struct function *fun)
285 return (optimize_function_for_speed_p (fun)
287 : OPTIMIZE_FOR_SIZE);
290 /* Return TRUE if basic block BB should be optimized for size. */
293 optimize_bb_for_size_p (const_basic_block bb)
295 return (optimize_function_for_size_p (cfun)
296 || (bb && !maybe_hot_bb_p (cfun, bb)));
299 /* Return TRUE if basic block BB should be optimized for speed. */
302 optimize_bb_for_speed_p (const_basic_block bb)
304 return !optimize_bb_for_size_p (bb);
307 /* Return the optimization type that should be used for basic block BB. */
310 bb_optimization_type (const_basic_block bb)
312 return (optimize_bb_for_speed_p (bb)
314 : OPTIMIZE_FOR_SIZE);
317 /* Return TRUE if edge E should be optimized for size. */
320 optimize_edge_for_size_p (edge e)
322 return optimize_function_for_size_p (cfun) || !maybe_hot_edge_p (e);
325 /* Return TRUE if edge E should be optimized for speed. */
328 optimize_edge_for_speed_p (edge e)
330 return !optimize_edge_for_size_p (e);
333 /* Return TRUE if the current function is optimized for size. */
336 optimize_insn_for_size_p (void)
338 return optimize_function_for_size_p (cfun) || !crtl->maybe_hot_insn_p;
341 /* Return TRUE if the current function is optimized for speed. */
344 optimize_insn_for_speed_p (void)
346 return !optimize_insn_for_size_p ();
349 /* Return TRUE if LOOP should be optimized for size. */
352 optimize_loop_for_size_p (class loop *loop)
354 return optimize_bb_for_size_p (loop->header);
357 /* Return TRUE if LOOP should be optimized for speed. */
360 optimize_loop_for_speed_p (class loop *loop)
362 return optimize_bb_for_speed_p (loop->header);
365 /* Return TRUE if nest rooted at LOOP should be optimized for speed. */
368 optimize_loop_nest_for_speed_p (class loop *loop)
370 class loop *l = loop;
371 if (optimize_loop_for_speed_p (loop))
374 while (l && l != loop)
376 if (optimize_loop_for_speed_p (l))
384 while (l != loop && !l->next)
393 /* Return TRUE if nest rooted at LOOP should be optimized for size. */
396 optimize_loop_nest_for_size_p (class loop *loop)
398 return !optimize_loop_nest_for_speed_p (loop);
401 /* Return true if edge E is likely to be well predictable by branch
405 predictable_edge_p (edge e)
407 if (!e->probability.initialized_p ())
409 if ((e->probability.to_reg_br_prob_base ()
410 <= param_predictable_branch_outcome * REG_BR_PROB_BASE / 100)
411 || (REG_BR_PROB_BASE - e->probability.to_reg_br_prob_base ()
412 <= param_predictable_branch_outcome * REG_BR_PROB_BASE / 100))
418 /* Set RTL expansion for BB profile. */
421 rtl_profile_for_bb (basic_block bb)
423 crtl->maybe_hot_insn_p = maybe_hot_bb_p (cfun, bb);
426 /* Set RTL expansion for edge profile. */
429 rtl_profile_for_edge (edge e)
431 crtl->maybe_hot_insn_p = maybe_hot_edge_p (e);
434 /* Set RTL expansion to default mode (i.e. when profile info is not known). */
436 default_rtl_profile (void)
438 crtl->maybe_hot_insn_p = true;
441 /* Return true if the one of outgoing edges is already predicted by
445 rtl_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
448 if (!INSN_P (BB_END (bb)))
450 for (note = REG_NOTES (BB_END (bb)); note; note = XEXP (note, 1))
451 if (REG_NOTE_KIND (note) == REG_BR_PRED
452 && INTVAL (XEXP (XEXP (note, 0), 0)) == (int)predictor)
457 /* Structure representing predictions in tree level. */
459 struct edge_prediction {
460 struct edge_prediction *ep_next;
462 enum br_predictor ep_predictor;
466 /* This map contains for a basic block the list of predictions for the
469 static hash_map<const_basic_block, edge_prediction *> *bb_predictions;
471 /* Return true if the one of outgoing edges is already predicted by
475 gimple_predicted_by_p (const_basic_block bb, enum br_predictor predictor)
477 struct edge_prediction *i;
478 edge_prediction **preds = bb_predictions->get (bb);
483 for (i = *preds; i; i = i->ep_next)
484 if (i->ep_predictor == predictor)
489 /* Return true if the one of outgoing edges is already predicted by
490 PREDICTOR for edge E predicted as TAKEN. */
493 edge_predicted_by_p (edge e, enum br_predictor predictor, bool taken)
495 struct edge_prediction *i;
496 basic_block bb = e->src;
497 edge_prediction **preds = bb_predictions->get (bb);
501 int probability = predictor_info[(int) predictor].hitrate;
504 probability = REG_BR_PROB_BASE - probability;
506 for (i = *preds; i; i = i->ep_next)
507 if (i->ep_predictor == predictor
509 && i->ep_probability == probability)
514 /* Same predicate as above, working on edges. */
516 edge_probability_reliable_p (const_edge e)
518 return e->probability.probably_reliable_p ();
521 /* Same predicate as edge_probability_reliable_p, working on notes. */
523 br_prob_note_reliable_p (const_rtx note)
525 gcc_assert (REG_NOTE_KIND (note) == REG_BR_PROB);
526 return profile_probability::from_reg_br_prob_note
527 (XINT (note, 0)).probably_reliable_p ();
531 predict_insn (rtx_insn *insn, enum br_predictor predictor, int probability)
533 gcc_assert (any_condjump_p (insn));
534 if (!flag_guess_branch_prob)
537 add_reg_note (insn, REG_BR_PRED,
538 gen_rtx_CONCAT (VOIDmode,
539 GEN_INT ((int) predictor),
540 GEN_INT ((int) probability)));
543 /* Predict insn by given predictor. */
546 predict_insn_def (rtx_insn *insn, enum br_predictor predictor,
547 enum prediction taken)
549 int probability = predictor_info[(int) predictor].hitrate;
550 gcc_assert (probability != PROB_UNINITIALIZED);
553 probability = REG_BR_PROB_BASE - probability;
555 predict_insn (insn, predictor, probability);
558 /* Predict edge E with given probability if possible. */
561 rtl_predict_edge (edge e, enum br_predictor predictor, int probability)
564 last_insn = BB_END (e->src);
566 /* We can store the branch prediction information only about
567 conditional jumps. */
568 if (!any_condjump_p (last_insn))
571 /* We always store probability of branching. */
572 if (e->flags & EDGE_FALLTHRU)
573 probability = REG_BR_PROB_BASE - probability;
575 predict_insn (last_insn, predictor, probability);
578 /* Predict edge E with the given PROBABILITY. */
580 gimple_predict_edge (edge e, enum br_predictor predictor, int probability)
582 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
583 && EDGE_COUNT (e->src->succs) > 1
584 && flag_guess_branch_prob
587 struct edge_prediction *i = XNEW (struct edge_prediction);
588 edge_prediction *&preds = bb_predictions->get_or_insert (e->src);
592 i->ep_probability = probability;
593 i->ep_predictor = predictor;
598 /* Filter edge predictions PREDS by a function FILTER. DATA are passed
599 to the filter function. */
602 filter_predictions (edge_prediction **preds,
603 bool (*filter) (edge_prediction *, void *), void *data)
610 struct edge_prediction **prediction = preds;
611 struct edge_prediction *next;
615 if ((*filter) (*prediction, data))
616 prediction = &((*prediction)->ep_next);
619 next = (*prediction)->ep_next;
627 /* Filter function predicate that returns true for a edge predicate P
628 if its edge is equal to DATA. */
631 equal_edge_p (edge_prediction *p, void *data)
633 return p->ep_edge == (edge)data;
636 /* Remove all predictions on given basic block that are attached
639 remove_predictions_associated_with_edge (edge e)
644 edge_prediction **preds = bb_predictions->get (e->src);
645 filter_predictions (preds, equal_edge_p, e);
648 /* Clears the list of predictions stored for BB. */
651 clear_bb_predictions (basic_block bb)
653 edge_prediction **preds = bb_predictions->get (bb);
654 struct edge_prediction *pred, *next;
659 for (pred = *preds; pred; pred = next)
661 next = pred->ep_next;
667 /* Return true when we can store prediction on insn INSN.
668 At the moment we represent predictions only on conditional
669 jumps, not at computed jump or other complicated cases. */
671 can_predict_insn_p (const rtx_insn *insn)
673 return (JUMP_P (insn)
674 && any_condjump_p (insn)
675 && EDGE_COUNT (BLOCK_FOR_INSN (insn)->succs) >= 2);
678 /* Predict edge E by given predictor if possible. */
681 predict_edge_def (edge e, enum br_predictor predictor,
682 enum prediction taken)
684 int probability = predictor_info[(int) predictor].hitrate;
687 probability = REG_BR_PROB_BASE - probability;
689 predict_edge (e, predictor, probability);
692 /* Invert all branch predictions or probability notes in the INSN. This needs
693 to be done each time we invert the condition used by the jump. */
696 invert_br_probabilities (rtx insn)
700 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
701 if (REG_NOTE_KIND (note) == REG_BR_PROB)
702 XINT (note, 0) = profile_probability::from_reg_br_prob_note
703 (XINT (note, 0)).invert ().to_reg_br_prob_note ();
704 else if (REG_NOTE_KIND (note) == REG_BR_PRED)
705 XEXP (XEXP (note, 0), 1)
706 = GEN_INT (REG_BR_PROB_BASE - INTVAL (XEXP (XEXP (note, 0), 1)));
709 /* Dump information about the branch prediction to the output file. */
712 dump_prediction (FILE *file, enum br_predictor predictor, int probability,
713 basic_block bb, enum predictor_reason reason = REASON_NONE,
723 FOR_EACH_EDGE (e, ei, bb->succs)
724 if (! (e->flags & EDGE_FALLTHRU))
727 char edge_info_str[128];
729 sprintf (edge_info_str, " of edge %d->%d", ep_edge->src->index,
730 ep_edge->dest->index);
732 edge_info_str[0] = '\0';
734 fprintf (file, " %s heuristics%s%s: %.2f%%",
735 predictor_info[predictor].name,
736 edge_info_str, reason_messages[reason],
737 probability * 100.0 / REG_BR_PROB_BASE);
739 if (bb->count.initialized_p ())
741 fprintf (file, " exec ");
742 bb->count.dump (file);
745 fprintf (file, " hit ");
746 e->count ().dump (file);
747 fprintf (file, " (%.1f%%)", e->count ().to_gcov_type() * 100.0
748 / bb->count.to_gcov_type ());
752 fprintf (file, "\n");
754 /* Print output that be easily read by analyze_brprob.py script. We are
755 interested only in counts that are read from GCDA files. */
756 if (dump_file && (dump_flags & TDF_DETAILS)
757 && bb->count.precise_p ()
758 && reason == REASON_NONE)
760 fprintf (file, ";;heuristics;%s;%" PRId64 ";%" PRId64 ";%.1f;\n",
761 predictor_info[predictor].name,
762 bb->count.to_gcov_type (), e->count ().to_gcov_type (),
763 probability * 100.0 / REG_BR_PROB_BASE);
767 /* Return true if STMT is known to be unlikely executed. */
770 unlikely_executed_stmt_p (gimple *stmt)
772 if (!is_gimple_call (stmt))
774 /* NORETURN attribute alone is not strong enough: exit() may be quite
775 likely executed once during program run. */
776 if (gimple_call_fntype (stmt)
777 && lookup_attribute ("cold",
778 TYPE_ATTRIBUTES (gimple_call_fntype (stmt)))
779 && !lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl)))
781 tree decl = gimple_call_fndecl (stmt);
784 if (lookup_attribute ("cold", DECL_ATTRIBUTES (decl))
785 && !lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl)))
788 cgraph_node *n = cgraph_node::get (decl);
793 n = n->ultimate_alias_target (&avail);
794 if (avail < AVAIL_AVAILABLE)
797 || n->decl == current_function_decl)
799 return n->frequency == NODE_FREQUENCY_UNLIKELY_EXECUTED;
802 /* Return true if BB is unlikely executed. */
805 unlikely_executed_bb_p (basic_block bb)
807 if (bb->count == profile_count::zero ())
809 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun) || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
811 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
812 !gsi_end_p (gsi); gsi_next (&gsi))
814 if (unlikely_executed_stmt_p (gsi_stmt (gsi)))
816 if (stmt_can_terminate_bb_p (gsi_stmt (gsi)))
822 /* We cannot predict the probabilities of outgoing edges of bb. Set them
823 evenly and hope for the best. If UNLIKELY_EDGES is not null, distribute
824 even probability for all edges not mentioned in the set. These edges
825 are given PROB_VERY_UNLIKELY probability. Similarly for LIKELY_EDGES,
826 if we have exactly one likely edge, make the other edges predicted
830 set_even_probabilities (basic_block bb,
831 hash_set<edge> *unlikely_edges = NULL,
832 hash_set<edge_prediction *> *likely_edges = NULL)
834 unsigned nedges = 0, unlikely_count = 0;
837 profile_probability all = profile_probability::always ();
839 FOR_EACH_EDGE (e, ei, bb->succs)
840 if (e->probability.initialized_p ())
841 all -= e->probability;
842 else if (!unlikely_executed_edge_p (e))
845 if (unlikely_edges != NULL && unlikely_edges->contains (e))
847 all -= profile_probability::very_unlikely ();
852 /* Make the distribution even if all edges are unlikely. */
853 unsigned likely_count = likely_edges ? likely_edges->elements () : 0;
854 if (unlikely_count == nedges)
856 unlikely_edges = NULL;
860 /* If we have one likely edge, then use its probability and distribute
861 remaining probabilities as even. */
862 if (likely_count == 1)
864 FOR_EACH_EDGE (e, ei, bb->succs)
865 if (e->probability.initialized_p ())
867 else if (!unlikely_executed_edge_p (e))
869 edge_prediction *prediction = *likely_edges->begin ();
870 int p = prediction->ep_probability;
871 profile_probability prob
872 = profile_probability::from_reg_br_prob_base (p);
874 if (prediction->ep_edge == e)
875 e->probability = prob;
876 else if (unlikely_edges != NULL && unlikely_edges->contains (e))
877 e->probability = profile_probability::very_unlikely ();
880 profile_probability remainder = prob.invert ();
881 remainder -= profile_probability::very_unlikely ()
882 .apply_scale (unlikely_count, 1);
883 int count = nedges - unlikely_count - 1;
884 gcc_assert (count >= 0);
886 e->probability = remainder.apply_scale (1, count);
890 e->probability = profile_probability::never ();
894 /* Make all unlikely edges unlikely and the rest will have even
896 unsigned scale = nedges - unlikely_count;
897 FOR_EACH_EDGE (e, ei, bb->succs)
898 if (e->probability.initialized_p ())
900 else if (!unlikely_executed_edge_p (e))
902 if (unlikely_edges != NULL && unlikely_edges->contains (e))
903 e->probability = profile_probability::very_unlikely ();
905 e->probability = all.apply_scale (1, scale);
908 e->probability = profile_probability::never ();
912 /* Add REG_BR_PROB note to JUMP with PROB. */
915 add_reg_br_prob_note (rtx_insn *jump, profile_probability prob)
917 gcc_checking_assert (JUMP_P (jump) && !find_reg_note (jump, REG_BR_PROB, 0));
918 add_int_reg_note (jump, REG_BR_PROB, prob.to_reg_br_prob_note ());
921 /* Combine all REG_BR_PRED notes into single probability and attach REG_BR_PROB
922 note if not already present. Remove now useless REG_BR_PRED notes. */
925 combine_predictions_for_insn (rtx_insn *insn, basic_block bb)
930 int best_probability = PROB_EVEN;
931 enum br_predictor best_predictor = END_PREDICTORS;
932 int combined_probability = REG_BR_PROB_BASE / 2;
934 bool first_match = false;
937 if (!can_predict_insn_p (insn))
939 set_even_probabilities (bb);
943 prob_note = find_reg_note (insn, REG_BR_PROB, 0);
944 pnote = ®_NOTES (insn);
946 fprintf (dump_file, "Predictions for insn %i bb %i\n", INSN_UID (insn),
949 /* We implement "first match" heuristics and use probability guessed
950 by predictor with smallest index. */
951 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
952 if (REG_NOTE_KIND (note) == REG_BR_PRED)
954 enum br_predictor predictor = ((enum br_predictor)
955 INTVAL (XEXP (XEXP (note, 0), 0)));
956 int probability = INTVAL (XEXP (XEXP (note, 0), 1));
959 if (best_predictor > predictor
960 && predictor_info[predictor].flags & PRED_FLAG_FIRST_MATCH)
961 best_probability = probability, best_predictor = predictor;
963 d = (combined_probability * probability
964 + (REG_BR_PROB_BASE - combined_probability)
965 * (REG_BR_PROB_BASE - probability));
967 /* Use FP math to avoid overflows of 32bit integers. */
969 /* If one probability is 0% and one 100%, avoid division by zero. */
970 combined_probability = REG_BR_PROB_BASE / 2;
972 combined_probability = (((double) combined_probability) * probability
973 * REG_BR_PROB_BASE / d + 0.5);
976 /* Decide which heuristic to use. In case we didn't match anything,
977 use no_prediction heuristic, in case we did match, use either
978 first match or Dempster-Shaffer theory depending on the flags. */
980 if (best_predictor != END_PREDICTORS)
984 dump_prediction (dump_file, PRED_NO_PREDICTION,
985 combined_probability, bb);
989 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability,
990 bb, !first_match ? REASON_NONE : REASON_IGNORED);
992 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability,
993 bb, first_match ? REASON_NONE : REASON_IGNORED);
997 combined_probability = best_probability;
998 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb);
1002 if (REG_NOTE_KIND (*pnote) == REG_BR_PRED)
1004 enum br_predictor predictor = ((enum br_predictor)
1005 INTVAL (XEXP (XEXP (*pnote, 0), 0)));
1006 int probability = INTVAL (XEXP (XEXP (*pnote, 0), 1));
1008 dump_prediction (dump_file, predictor, probability, bb,
1009 (!first_match || best_predictor == predictor)
1010 ? REASON_NONE : REASON_IGNORED);
1011 *pnote = XEXP (*pnote, 1);
1014 pnote = &XEXP (*pnote, 1);
1019 profile_probability p
1020 = profile_probability::from_reg_br_prob_base (combined_probability);
1021 add_reg_br_prob_note (insn, p);
1023 /* Save the prediction into CFG in case we are seeing non-degenerated
1024 conditional jump. */
1025 if (!single_succ_p (bb))
1027 BRANCH_EDGE (bb)->probability = p;
1028 FALLTHRU_EDGE (bb)->probability
1029 = BRANCH_EDGE (bb)->probability.invert ();
1032 else if (!single_succ_p (bb))
1034 profile_probability prob = profile_probability::from_reg_br_prob_note
1035 (XINT (prob_note, 0));
1037 BRANCH_EDGE (bb)->probability = prob;
1038 FALLTHRU_EDGE (bb)->probability = prob.invert ();
1041 single_succ_edge (bb)->probability = profile_probability::always ();
1044 /* Edge prediction hash traits. */
1046 struct predictor_hash: pointer_hash <edge_prediction>
1049 static inline hashval_t hash (const edge_prediction *);
1050 static inline bool equal (const edge_prediction *, const edge_prediction *);
1053 /* Calculate hash value of an edge prediction P based on predictor and
1054 normalized probability. */
1057 predictor_hash::hash (const edge_prediction *p)
1059 inchash::hash hstate;
1060 hstate.add_int (p->ep_predictor);
1062 int prob = p->ep_probability;
1063 if (prob > REG_BR_PROB_BASE / 2)
1064 prob = REG_BR_PROB_BASE - prob;
1066 hstate.add_int (prob);
1068 return hstate.end ();
1071 /* Return true whether edge predictions P1 and P2 use the same predictor and
1072 have equal (or opposed probability). */
1075 predictor_hash::equal (const edge_prediction *p1, const edge_prediction *p2)
1077 return (p1->ep_predictor == p2->ep_predictor
1078 && (p1->ep_probability == p2->ep_probability
1079 || p1->ep_probability == REG_BR_PROB_BASE - p2->ep_probability));
1082 struct predictor_hash_traits: predictor_hash,
1083 typed_noop_remove <edge_prediction *> {};
1085 /* Return true if edge prediction P is not in DATA hash set. */
1088 not_removed_prediction_p (edge_prediction *p, void *data)
1090 hash_set<edge_prediction *> *remove = (hash_set<edge_prediction *> *) data;
1091 return !remove->contains (p);
1094 /* Prune predictions for a basic block BB. Currently we do following
1097 1) remove duplicate prediction that is guessed with the same probability
1098 (different than 1/2) to both edge
1099 2) remove duplicates for a prediction that belongs with the same probability
1105 prune_predictions_for_bb (basic_block bb)
1107 edge_prediction **preds = bb_predictions->get (bb);
1111 hash_table <predictor_hash_traits> s (13);
1112 hash_set <edge_prediction *> remove;
1114 /* Step 1: identify predictors that should be removed. */
1115 for (edge_prediction *pred = *preds; pred; pred = pred->ep_next)
1117 edge_prediction *existing = s.find (pred);
1120 if (pred->ep_edge == existing->ep_edge
1121 && pred->ep_probability == existing->ep_probability)
1123 /* Remove a duplicate predictor. */
1124 dump_prediction (dump_file, pred->ep_predictor,
1125 pred->ep_probability, bb,
1126 REASON_SINGLE_EDGE_DUPLICATE, pred->ep_edge);
1130 else if (pred->ep_edge != existing->ep_edge
1131 && pred->ep_probability == existing->ep_probability
1132 && pred->ep_probability != REG_BR_PROB_BASE / 2)
1134 /* Remove both predictors as they predict the same
1136 dump_prediction (dump_file, existing->ep_predictor,
1137 pred->ep_probability, bb,
1138 REASON_EDGE_PAIR_DUPLICATE,
1140 dump_prediction (dump_file, pred->ep_predictor,
1141 pred->ep_probability, bb,
1142 REASON_EDGE_PAIR_DUPLICATE,
1145 remove.add (existing);
1150 edge_prediction **slot2 = s.find_slot (pred, INSERT);
1154 /* Step 2: Remove predictors. */
1155 filter_predictions (preds, not_removed_prediction_p, &remove);
1159 /* Combine predictions into single probability and store them into CFG.
1160 Remove now useless prediction entries.
1161 If DRY_RUN is set, only produce dumps and do not modify profile. */
1164 combine_predictions_for_bb (basic_block bb, bool dry_run)
1166 int best_probability = PROB_EVEN;
1167 enum br_predictor best_predictor = END_PREDICTORS;
1168 int combined_probability = REG_BR_PROB_BASE / 2;
1170 bool first_match = false;
1172 struct edge_prediction *pred;
1174 edge e, first = NULL, second = NULL;
1179 FOR_EACH_EDGE (e, ei, bb->succs)
1181 if (!unlikely_executed_edge_p (e))
1184 if (first && !second)
1189 else if (!e->probability.initialized_p ())
1190 e->probability = profile_probability::never ();
1191 if (!e->probability.initialized_p ())
1193 else if (e->probability == profile_probability::never ())
1197 /* When there is no successor or only one choice, prediction is easy.
1199 When we have a basic block with more than 2 successors, the situation
1200 is more complicated as DS theory cannot be used literally.
1201 More precisely, let's assume we predicted edge e1 with probability p1,
1202 thus: m1({b1}) = p1. As we're going to combine more than 2 edges, we
1203 need to find probability of e.g. m1({b2}), which we don't know.
1204 The only approximation is to equally distribute 1-p1 to all edges
1207 According to numbers we've got from SPEC2006 benchark, there's only
1208 one interesting reliable predictor (noreturn call), which can be
1209 handled with a bit easier approach. */
1212 hash_set<edge> unlikely_edges (4);
1213 hash_set<edge_prediction *> likely_edges (4);
1215 /* Identify all edges that have a probability close to very unlikely.
1216 Doing the approach for very unlikely doesn't worth for doing as
1217 there's no such probability in SPEC2006 benchmark. */
1218 edge_prediction **preds = bb_predictions->get (bb);
1220 for (pred = *preds; pred; pred = pred->ep_next)
1222 if (pred->ep_probability <= PROB_VERY_UNLIKELY
1223 || pred->ep_predictor == PRED_COLD_LABEL)
1224 unlikely_edges.add (pred->ep_edge);
1225 else if (pred->ep_probability >= PROB_VERY_LIKELY
1226 || pred->ep_predictor == PRED_BUILTIN_EXPECT
1227 || pred->ep_predictor == PRED_HOT_LABEL)
1228 likely_edges.add (pred);
1231 /* It can happen that an edge is both in likely_edges and unlikely_edges.
1232 Clear both sets in that situation. */
1233 for (hash_set<edge_prediction *>::iterator it = likely_edges.begin ();
1234 it != likely_edges.end (); ++it)
1235 if (unlikely_edges.contains ((*it)->ep_edge))
1237 likely_edges.empty ();
1238 unlikely_edges.empty ();
1243 set_even_probabilities (bb, &unlikely_edges, &likely_edges);
1244 clear_bb_predictions (bb);
1247 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
1248 if (unlikely_edges.is_empty ())
1250 "%i edges in bb %i predicted to even probabilities\n",
1255 "%i edges in bb %i predicted with some unlikely edges\n",
1257 FOR_EACH_EDGE (e, ei, bb->succs)
1258 if (!unlikely_executed_edge_p (e))
1259 dump_prediction (dump_file, PRED_COMBINED,
1260 e->probability.to_reg_br_prob_base (), bb, REASON_NONE, e);
1267 fprintf (dump_file, "Predictions for bb %i\n", bb->index);
1269 prune_predictions_for_bb (bb);
1271 edge_prediction **preds = bb_predictions->get (bb);
1275 /* We implement "first match" heuristics and use probability guessed
1276 by predictor with smallest index. */
1277 for (pred = *preds; pred; pred = pred->ep_next)
1279 enum br_predictor predictor = pred->ep_predictor;
1280 int probability = pred->ep_probability;
1282 if (pred->ep_edge != first)
1283 probability = REG_BR_PROB_BASE - probability;
1286 /* First match heuristics would be widly confused if we predicted
1288 if (best_predictor > predictor
1289 && predictor_info[predictor].flags & PRED_FLAG_FIRST_MATCH)
1291 struct edge_prediction *pred2;
1292 int prob = probability;
1294 for (pred2 = (struct edge_prediction *) *preds;
1295 pred2; pred2 = pred2->ep_next)
1296 if (pred2 != pred && pred2->ep_predictor == pred->ep_predictor)
1298 int probability2 = pred2->ep_probability;
1300 if (pred2->ep_edge != first)
1301 probability2 = REG_BR_PROB_BASE - probability2;
1303 if ((probability < REG_BR_PROB_BASE / 2) !=
1304 (probability2 < REG_BR_PROB_BASE / 2))
1307 /* If the same predictor later gave better result, go for it! */
1308 if ((probability >= REG_BR_PROB_BASE / 2 && (probability2 > probability))
1309 || (probability <= REG_BR_PROB_BASE / 2 && (probability2 < probability)))
1310 prob = probability2;
1313 best_probability = prob, best_predictor = predictor;
1316 d = (combined_probability * probability
1317 + (REG_BR_PROB_BASE - combined_probability)
1318 * (REG_BR_PROB_BASE - probability));
1320 /* Use FP math to avoid overflows of 32bit integers. */
1322 /* If one probability is 0% and one 100%, avoid division by zero. */
1323 combined_probability = REG_BR_PROB_BASE / 2;
1325 combined_probability = (((double) combined_probability)
1327 * REG_BR_PROB_BASE / d + 0.5);
1331 /* Decide which heuristic to use. In case we didn't match anything,
1332 use no_prediction heuristic, in case we did match, use either
1333 first match or Dempster-Shaffer theory depending on the flags. */
1335 if (best_predictor != END_PREDICTORS)
1339 dump_prediction (dump_file, PRED_NO_PREDICTION, combined_probability, bb);
1343 dump_prediction (dump_file, PRED_DS_THEORY, combined_probability, bb,
1344 !first_match ? REASON_NONE : REASON_IGNORED);
1346 dump_prediction (dump_file, PRED_FIRST_MATCH, best_probability, bb,
1347 first_match ? REASON_NONE : REASON_IGNORED);
1351 combined_probability = best_probability;
1352 dump_prediction (dump_file, PRED_COMBINED, combined_probability, bb);
1356 for (pred = (struct edge_prediction *) *preds; pred; pred = pred->ep_next)
1358 enum br_predictor predictor = pred->ep_predictor;
1359 int probability = pred->ep_probability;
1361 dump_prediction (dump_file, predictor, probability, bb,
1362 (!first_match || best_predictor == predictor)
1363 ? REASON_NONE : REASON_IGNORED, pred->ep_edge);
1366 clear_bb_predictions (bb);
1369 /* If we have only one successor which is unknown, we can compute missing
1373 profile_probability prob = profile_probability::always ();
1374 edge missing = NULL;
1376 FOR_EACH_EDGE (e, ei, bb->succs)
1377 if (e->probability.initialized_p ())
1378 prob -= e->probability;
1379 else if (missing == NULL)
1383 missing->probability = prob;
1385 /* If nothing is unknown, we have nothing to update. */
1386 else if (!nunknown && nzero != (int)EDGE_COUNT (bb->succs))
1391 = profile_probability::from_reg_br_prob_base (combined_probability);
1392 second->probability = first->probability.invert ();
1396 /* Check if T1 and T2 satisfy the IV_COMPARE condition.
1397 Return the SSA_NAME if the condition satisfies, NULL otherwise.
1399 T1 and T2 should be one of the following cases:
1400 1. T1 is SSA_NAME, T2 is NULL
1401 2. T1 is SSA_NAME, T2 is INTEGER_CST between [-4, 4]
1402 3. T2 is SSA_NAME, T1 is INTEGER_CST between [-4, 4] */
1405 strips_small_constant (tree t1, tree t2)
1412 else if (TREE_CODE (t1) == SSA_NAME)
1414 else if (tree_fits_shwi_p (t1))
1415 value = tree_to_shwi (t1);
1421 else if (tree_fits_shwi_p (t2))
1422 value = tree_to_shwi (t2);
1423 else if (TREE_CODE (t2) == SSA_NAME)
1431 if (value <= 4 && value >= -4)
1437 /* Return the SSA_NAME in T or T's operands.
1438 Return NULL if SSA_NAME cannot be found. */
1441 get_base_value (tree t)
1443 if (TREE_CODE (t) == SSA_NAME)
1446 if (!BINARY_CLASS_P (t))
1449 switch (TREE_OPERAND_LENGTH (t))
1452 return strips_small_constant (TREE_OPERAND (t, 0), NULL);
1454 return strips_small_constant (TREE_OPERAND (t, 0),
1455 TREE_OPERAND (t, 1));
1461 /* Check the compare STMT in LOOP. If it compares an induction
1462 variable to a loop invariant, return true, and save
1463 LOOP_INVARIANT, COMPARE_CODE and LOOP_STEP.
1464 Otherwise return false and set LOOP_INVAIANT to NULL. */
1467 is_comparison_with_loop_invariant_p (gcond *stmt, class loop *loop,
1468 tree *loop_invariant,
1469 enum tree_code *compare_code,
1473 tree op0, op1, bound, base;
1475 enum tree_code code;
1478 code = gimple_cond_code (stmt);
1479 *loop_invariant = NULL;
1495 op0 = gimple_cond_lhs (stmt);
1496 op1 = gimple_cond_rhs (stmt);
1498 if ((TREE_CODE (op0) != SSA_NAME && TREE_CODE (op0) != INTEGER_CST)
1499 || (TREE_CODE (op1) != SSA_NAME && TREE_CODE (op1) != INTEGER_CST))
1501 if (!simple_iv (loop, loop_containing_stmt (stmt), op0, &iv0, true))
1503 if (!simple_iv (loop, loop_containing_stmt (stmt), op1, &iv1, true))
1505 if (TREE_CODE (iv0.step) != INTEGER_CST
1506 || TREE_CODE (iv1.step) != INTEGER_CST)
1508 if ((integer_zerop (iv0.step) && integer_zerop (iv1.step))
1509 || (!integer_zerop (iv0.step) && !integer_zerop (iv1.step)))
1512 if (integer_zerop (iv0.step))
1514 if (code != NE_EXPR && code != EQ_EXPR)
1515 code = invert_tree_comparison (code, false);
1518 if (tree_fits_shwi_p (iv1.step))
1527 if (tree_fits_shwi_p (iv0.step))
1533 if (TREE_CODE (bound) != INTEGER_CST)
1534 bound = get_base_value (bound);
1537 if (TREE_CODE (base) != INTEGER_CST)
1538 base = get_base_value (base);
1542 *loop_invariant = bound;
1543 *compare_code = code;
1545 *loop_iv_base = base;
1549 /* Compare two SSA_NAMEs: returns TRUE if T1 and T2 are value coherent. */
1552 expr_coherent_p (tree t1, tree t2)
1555 tree ssa_name_1 = NULL;
1556 tree ssa_name_2 = NULL;
1558 gcc_assert (TREE_CODE (t1) == SSA_NAME || TREE_CODE (t1) == INTEGER_CST);
1559 gcc_assert (TREE_CODE (t2) == SSA_NAME || TREE_CODE (t2) == INTEGER_CST);
1564 if (TREE_CODE (t1) == INTEGER_CST && TREE_CODE (t2) == INTEGER_CST)
1566 if (TREE_CODE (t1) == INTEGER_CST || TREE_CODE (t2) == INTEGER_CST)
1569 /* Check to see if t1 is expressed/defined with t2. */
1570 stmt = SSA_NAME_DEF_STMT (t1);
1571 gcc_assert (stmt != NULL);
1572 if (is_gimple_assign (stmt))
1574 ssa_name_1 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1575 if (ssa_name_1 && ssa_name_1 == t2)
1579 /* Check to see if t2 is expressed/defined with t1. */
1580 stmt = SSA_NAME_DEF_STMT (t2);
1581 gcc_assert (stmt != NULL);
1582 if (is_gimple_assign (stmt))
1584 ssa_name_2 = SINGLE_SSA_TREE_OPERAND (stmt, SSA_OP_USE);
1585 if (ssa_name_2 && ssa_name_2 == t1)
1589 /* Compare if t1 and t2's def_stmts are identical. */
1590 if (ssa_name_2 != NULL && ssa_name_1 == ssa_name_2)
1596 /* Return true if E is predicted by one of loop heuristics. */
1599 predicted_by_loop_heuristics_p (basic_block bb)
1601 struct edge_prediction *i;
1602 edge_prediction **preds = bb_predictions->get (bb);
1607 for (i = *preds; i; i = i->ep_next)
1608 if (i->ep_predictor == PRED_LOOP_ITERATIONS_GUESSED
1609 || i->ep_predictor == PRED_LOOP_ITERATIONS_MAX
1610 || i->ep_predictor == PRED_LOOP_ITERATIONS
1611 || i->ep_predictor == PRED_LOOP_EXIT
1612 || i->ep_predictor == PRED_LOOP_EXIT_WITH_RECURSION
1613 || i->ep_predictor == PRED_LOOP_EXTRA_EXIT)
1618 /* Predict branch probability of BB when BB contains a branch that compares
1619 an induction variable in LOOP with LOOP_IV_BASE_VAR to LOOP_BOUND_VAR. The
1620 loop exit is compared using LOOP_BOUND_CODE, with step of LOOP_BOUND_STEP.
1623 for (int i = 0; i < bound; i++) {
1630 In this loop, we will predict the branch inside the loop to be taken. */
1633 predict_iv_comparison (class loop *loop, basic_block bb,
1634 tree loop_bound_var,
1635 tree loop_iv_base_var,
1636 enum tree_code loop_bound_code,
1637 int loop_bound_step)
1640 tree compare_var, compare_base;
1641 enum tree_code compare_code;
1642 tree compare_step_var;
1646 if (predicted_by_loop_heuristics_p (bb))
1649 stmt = last_stmt (bb);
1650 if (!stmt || gimple_code (stmt) != GIMPLE_COND)
1652 if (!is_comparison_with_loop_invariant_p (as_a <gcond *> (stmt),
1659 /* Find the taken edge. */
1660 FOR_EACH_EDGE (then_edge, ei, bb->succs)
1661 if (then_edge->flags & EDGE_TRUE_VALUE)
1664 /* When comparing an IV to a loop invariant, NE is more likely to be
1665 taken while EQ is more likely to be not-taken. */
1666 if (compare_code == NE_EXPR)
1668 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1671 else if (compare_code == EQ_EXPR)
1673 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1677 if (!expr_coherent_p (loop_iv_base_var, compare_base))
1680 /* If loop bound, base and compare bound are all constants, we can
1681 calculate the probability directly. */
1682 if (tree_fits_shwi_p (loop_bound_var)
1683 && tree_fits_shwi_p (compare_var)
1684 && tree_fits_shwi_p (compare_base))
1687 wi::overflow_type overflow;
1688 bool overall_overflow = false;
1689 widest_int compare_count, tem;
1691 /* (loop_bound - base) / compare_step */
1692 tem = wi::sub (wi::to_widest (loop_bound_var),
1693 wi::to_widest (compare_base), SIGNED, &overflow);
1694 overall_overflow |= overflow;
1695 widest_int loop_count = wi::div_trunc (tem,
1696 wi::to_widest (compare_step_var),
1698 overall_overflow |= overflow;
1700 if (!wi::neg_p (wi::to_widest (compare_step_var))
1701 ^ (compare_code == LT_EXPR || compare_code == LE_EXPR))
1703 /* (loop_bound - compare_bound) / compare_step */
1704 tem = wi::sub (wi::to_widest (loop_bound_var),
1705 wi::to_widest (compare_var), SIGNED, &overflow);
1706 overall_overflow |= overflow;
1707 compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var),
1709 overall_overflow |= overflow;
1713 /* (compare_bound - base) / compare_step */
1714 tem = wi::sub (wi::to_widest (compare_var),
1715 wi::to_widest (compare_base), SIGNED, &overflow);
1716 overall_overflow |= overflow;
1717 compare_count = wi::div_trunc (tem, wi::to_widest (compare_step_var),
1719 overall_overflow |= overflow;
1721 if (compare_code == LE_EXPR || compare_code == GE_EXPR)
1723 if (loop_bound_code == LE_EXPR || loop_bound_code == GE_EXPR)
1725 if (wi::neg_p (compare_count))
1727 if (wi::neg_p (loop_count))
1729 if (loop_count == 0)
1731 else if (wi::cmps (compare_count, loop_count) == 1)
1732 probability = REG_BR_PROB_BASE;
1735 tem = compare_count * REG_BR_PROB_BASE;
1736 tem = wi::udiv_trunc (tem, loop_count);
1737 probability = tem.to_uhwi ();
1740 /* FIXME: The branch prediction seems broken. It has only 20% hitrate. */
1741 if (!overall_overflow)
1742 predict_edge (then_edge, PRED_LOOP_IV_COMPARE, probability);
1747 if (expr_coherent_p (loop_bound_var, compare_var))
1749 if ((loop_bound_code == LT_EXPR || loop_bound_code == LE_EXPR)
1750 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1751 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1752 else if ((loop_bound_code == GT_EXPR || loop_bound_code == GE_EXPR)
1753 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1754 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1755 else if (loop_bound_code == NE_EXPR)
1757 /* If the loop backedge condition is "(i != bound)", we do
1758 the comparison based on the step of IV:
1759 * step < 0 : backedge condition is like (i > bound)
1760 * step > 0 : backedge condition is like (i < bound) */
1761 gcc_assert (loop_bound_step != 0);
1762 if (loop_bound_step > 0
1763 && (compare_code == LT_EXPR
1764 || compare_code == LE_EXPR))
1765 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1766 else if (loop_bound_step < 0
1767 && (compare_code == GT_EXPR
1768 || compare_code == GE_EXPR))
1769 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1771 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1774 /* The branch is predicted not-taken if loop_bound_code is
1775 opposite with compare_code. */
1776 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1778 else if (expr_coherent_p (loop_iv_base_var, compare_var))
1781 for (i = s; i < h; i++)
1783 The branch should be predicted taken. */
1784 if (loop_bound_step > 0
1785 && (compare_code == GT_EXPR || compare_code == GE_EXPR))
1786 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1787 else if (loop_bound_step < 0
1788 && (compare_code == LT_EXPR || compare_code == LE_EXPR))
1789 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, TAKEN);
1791 predict_edge_def (then_edge, PRED_LOOP_IV_COMPARE_GUESS, NOT_TAKEN);
1795 /* Predict for extra loop exits that will lead to EXIT_EDGE. The extra loop
1796 exits are resulted from short-circuit conditions that will generate an
1799 if (foo() || global > 10)
1802 This will be translated into:
1807 if foo() goto BB6 else goto BB5
1809 if global > 10 goto BB6 else goto BB7
1813 iftmp = (PHI 0(BB5), 1(BB6))
1814 if iftmp == 1 goto BB8 else goto BB3
1816 outside of the loop...
1818 The edge BB7->BB8 is loop exit because BB8 is outside of the loop.
1819 From the dataflow, we can infer that BB4->BB6 and BB5->BB6 are also loop
1820 exits. This function takes BB7->BB8 as input, and finds out the extra loop
1821 exits to predict them using PRED_LOOP_EXTRA_EXIT. */
1824 predict_extra_loop_exits (edge exit_edge)
1827 bool check_value_one;
1828 gimple *lhs_def_stmt;
1830 tree cmp_rhs, cmp_lhs;
1834 last = last_stmt (exit_edge->src);
1837 cmp_stmt = dyn_cast <gcond *> (last);
1841 cmp_rhs = gimple_cond_rhs (cmp_stmt);
1842 cmp_lhs = gimple_cond_lhs (cmp_stmt);
1843 if (!TREE_CONSTANT (cmp_rhs)
1844 || !(integer_zerop (cmp_rhs) || integer_onep (cmp_rhs)))
1846 if (TREE_CODE (cmp_lhs) != SSA_NAME)
1849 /* If check_value_one is true, only the phi_args with value '1' will lead
1850 to loop exit. Otherwise, only the phi_args with value '0' will lead to
1852 check_value_one = (((integer_onep (cmp_rhs))
1853 ^ (gimple_cond_code (cmp_stmt) == EQ_EXPR))
1854 ^ ((exit_edge->flags & EDGE_TRUE_VALUE) != 0));
1856 lhs_def_stmt = SSA_NAME_DEF_STMT (cmp_lhs);
1860 phi_stmt = dyn_cast <gphi *> (lhs_def_stmt);
1864 for (i = 0; i < gimple_phi_num_args (phi_stmt); i++)
1868 tree val = gimple_phi_arg_def (phi_stmt, i);
1869 edge e = gimple_phi_arg_edge (phi_stmt, i);
1871 if (!TREE_CONSTANT (val) || !(integer_zerop (val) || integer_onep (val)))
1873 if ((check_value_one ^ integer_onep (val)) == 1)
1875 if (EDGE_COUNT (e->src->succs) != 1)
1877 predict_paths_leading_to_edge (e, PRED_LOOP_EXTRA_EXIT, NOT_TAKEN);
1881 FOR_EACH_EDGE (e1, ei, e->src->preds)
1882 predict_paths_leading_to_edge (e1, PRED_LOOP_EXTRA_EXIT, NOT_TAKEN);
1887 /* Predict edge probabilities by exploiting loop structure. */
1890 predict_loops (void)
1894 hash_set <class loop *> with_recursion(10);
1896 FOR_EACH_BB_FN (bb, cfun)
1898 gimple_stmt_iterator gsi;
1901 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1902 if (is_gimple_call (gsi_stmt (gsi))
1903 && (decl = gimple_call_fndecl (gsi_stmt (gsi))) != NULL
1904 && recursive_call_p (current_function_decl, decl))
1906 loop = bb->loop_father;
1907 while (loop && !with_recursion.add (loop))
1908 loop = loop_outer (loop);
1912 /* Try to predict out blocks in a loop that are not part of a
1914 FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
1916 basic_block bb, *bbs;
1917 unsigned j, n_exits = 0;
1919 class tree_niter_desc niter_desc;
1921 class nb_iter_bound *nb_iter;
1922 enum tree_code loop_bound_code = ERROR_MARK;
1923 tree loop_bound_step = NULL;
1924 tree loop_bound_var = NULL;
1925 tree loop_iv_base = NULL;
1927 bool recursion = with_recursion.contains (loop);
1929 exits = get_loop_exit_edges (loop);
1930 FOR_EACH_VEC_ELT (exits, j, ex)
1931 if (!unlikely_executed_edge_p (ex) && !(ex->flags & EDGE_ABNORMAL_CALL))
1939 if (dump_file && (dump_flags & TDF_DETAILS))
1940 fprintf (dump_file, "Predicting loop %i%s with %i exits.\n",
1941 loop->num, recursion ? " (with recursion)":"", n_exits);
1942 if (dump_file && (dump_flags & TDF_DETAILS)
1943 && max_loop_iterations_int (loop) >= 0)
1946 "Loop %d iterates at most %i times.\n", loop->num,
1947 (int)max_loop_iterations_int (loop));
1949 if (dump_file && (dump_flags & TDF_DETAILS)
1950 && likely_max_loop_iterations_int (loop) >= 0)
1952 fprintf (dump_file, "Loop %d likely iterates at most %i times.\n",
1953 loop->num, (int)likely_max_loop_iterations_int (loop));
1956 FOR_EACH_VEC_ELT (exits, j, ex)
1959 HOST_WIDE_INT nitercst;
1960 int max = param_max_predicted_iterations;
1962 enum br_predictor predictor;
1965 if (unlikely_executed_edge_p (ex)
1966 || (ex->flags & EDGE_ABNORMAL_CALL))
1968 /* Loop heuristics do not expect exit conditional to be inside
1969 inner loop. We predict from innermost to outermost loop. */
1970 if (predicted_by_loop_heuristics_p (ex->src))
1972 if (dump_file && (dump_flags & TDF_DETAILS))
1973 fprintf (dump_file, "Skipping exit %i->%i because "
1974 "it is already predicted.\n",
1975 ex->src->index, ex->dest->index);
1978 predict_extra_loop_exits (ex);
1980 if (number_of_iterations_exit (loop, ex, &niter_desc, false, false))
1981 niter = niter_desc.niter;
1982 if (!niter || TREE_CODE (niter_desc.niter) != INTEGER_CST)
1983 niter = loop_niter_by_eval (loop, ex);
1984 if (dump_file && (dump_flags & TDF_DETAILS)
1985 && TREE_CODE (niter) == INTEGER_CST)
1987 fprintf (dump_file, "Exit %i->%i %d iterates ",
1988 ex->src->index, ex->dest->index,
1990 print_generic_expr (dump_file, niter, TDF_SLIM);
1991 fprintf (dump_file, " times.\n");
1994 if (TREE_CODE (niter) == INTEGER_CST)
1996 if (tree_fits_uhwi_p (niter)
1998 && compare_tree_int (niter, max - 1) == -1)
1999 nitercst = tree_to_uhwi (niter) + 1;
2002 predictor = PRED_LOOP_ITERATIONS;
2004 /* If we have just one exit and we can derive some information about
2005 the number of iterations of the loop from the statements inside
2006 the loop, use it to predict this exit. */
2007 else if (n_exits == 1
2008 && estimated_stmt_executions (loop, &nit))
2010 if (wi::gtu_p (nit, max))
2013 nitercst = nit.to_shwi ();
2014 predictor = PRED_LOOP_ITERATIONS_GUESSED;
2016 /* If we have likely upper bound, trust it for very small iteration
2017 counts. Such loops would otherwise get mispredicted by standard
2018 LOOP_EXIT heuristics. */
2019 else if (n_exits == 1
2020 && likely_max_stmt_executions (loop, &nit)
2022 RDIV (REG_BR_PROB_BASE,
2026 ? PRED_LOOP_EXIT_WITH_RECURSION
2027 : PRED_LOOP_EXIT].hitrate)))
2029 nitercst = nit.to_shwi ();
2030 predictor = PRED_LOOP_ITERATIONS_MAX;
2034 if (dump_file && (dump_flags & TDF_DETAILS))
2035 fprintf (dump_file, "Nothing known about exit %i->%i.\n",
2036 ex->src->index, ex->dest->index);
2040 if (dump_file && (dump_flags & TDF_DETAILS))
2041 fprintf (dump_file, "Recording prediction to %i iterations by %s.\n",
2042 (int)nitercst, predictor_info[predictor].name);
2043 /* If the prediction for number of iterations is zero, do not
2044 predict the exit edges. */
2048 probability = RDIV (REG_BR_PROB_BASE, nitercst);
2049 predict_edge (ex, predictor, probability);
2053 /* Find information about loop bound variables. */
2054 for (nb_iter = loop->bounds; nb_iter;
2055 nb_iter = nb_iter->next)
2057 && gimple_code (nb_iter->stmt) == GIMPLE_COND)
2059 stmt = as_a <gcond *> (nb_iter->stmt);
2062 if (!stmt && last_stmt (loop->header)
2063 && gimple_code (last_stmt (loop->header)) == GIMPLE_COND)
2064 stmt = as_a <gcond *> (last_stmt (loop->header));
2066 is_comparison_with_loop_invariant_p (stmt, loop,
2072 bbs = get_loop_body (loop);
2074 for (j = 0; j < loop->num_nodes; j++)
2081 /* Bypass loop heuristics on continue statement. These
2082 statements construct loops via "non-loop" constructs
2083 in the source language and are better to be handled
2085 if (predicted_by_p (bb, PRED_CONTINUE))
2087 if (dump_file && (dump_flags & TDF_DETAILS))
2088 fprintf (dump_file, "BB %i predicted by continue.\n",
2093 /* If we already used more reliable loop exit predictors, do not
2094 bother with PRED_LOOP_EXIT. */
2095 if (!predicted_by_loop_heuristics_p (bb))
2097 /* For loop with many exits we don't want to predict all exits
2098 with the pretty large probability, because if all exits are
2099 considered in row, the loop would be predicted to iterate
2100 almost never. The code to divide probability by number of
2101 exits is very rough. It should compute the number of exits
2102 taken in each patch through function (not the overall number
2103 of exits that might be a lot higher for loops with wide switch
2104 statements in them) and compute n-th square root.
2106 We limit the minimal probability by 2% to avoid
2107 EDGE_PROBABILITY_RELIABLE from trusting the branch prediction
2108 as this was causing regression in perl benchmark containing such
2111 int probability = ((REG_BR_PROB_BASE
2114 ? PRED_LOOP_EXIT_WITH_RECURSION
2115 : PRED_LOOP_EXIT].hitrate)
2117 if (probability < HITRATE (2))
2118 probability = HITRATE (2);
2119 FOR_EACH_EDGE (e, ei, bb->succs)
2120 if (e->dest->index < NUM_FIXED_BLOCKS
2121 || !flow_bb_inside_loop_p (loop, e->dest))
2123 if (dump_file && (dump_flags & TDF_DETAILS))
2125 "Predicting exit %i->%i with prob %i.\n",
2126 e->src->index, e->dest->index, probability);
2128 recursion ? PRED_LOOP_EXIT_WITH_RECURSION
2129 : PRED_LOOP_EXIT, probability);
2133 predict_iv_comparison (loop, bb, loop_bound_var, loop_iv_base,
2135 tree_to_shwi (loop_bound_step));
2138 /* In the following code
2143 guess that cond is unlikely. */
2144 if (loop_outer (loop)->num)
2146 basic_block bb = NULL;
2147 edge preheader_edge = loop_preheader_edge (loop);
2149 if (single_pred_p (preheader_edge->src)
2150 && single_succ_p (preheader_edge->src))
2151 preheader_edge = single_pred_edge (preheader_edge->src);
2153 gimple *stmt = last_stmt (preheader_edge->src);
2154 /* Pattern match fortran loop preheader:
2155 _16 = BUILTIN_EXPECT (_15, 1, PRED_FORTRAN_LOOP_PREHEADER);
2156 _17 = (logical(kind=4)) _16;
2162 Loop guard branch prediction says nothing about duplicated loop
2163 headers produced by fortran frontend and in this case we want
2164 to predict paths leading to this preheader. */
2167 && gimple_code (stmt) == GIMPLE_COND
2168 && gimple_cond_code (stmt) == NE_EXPR
2169 && TREE_CODE (gimple_cond_lhs (stmt)) == SSA_NAME
2170 && integer_zerop (gimple_cond_rhs (stmt)))
2172 gimple *call_stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (stmt));
2173 if (gimple_code (call_stmt) == GIMPLE_ASSIGN
2174 && gimple_expr_code (call_stmt) == NOP_EXPR
2175 && TREE_CODE (gimple_assign_rhs1 (call_stmt)) == SSA_NAME)
2176 call_stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (call_stmt));
2177 if (gimple_call_internal_p (call_stmt, IFN_BUILTIN_EXPECT)
2178 && TREE_CODE (gimple_call_arg (call_stmt, 2)) == INTEGER_CST
2179 && tree_fits_uhwi_p (gimple_call_arg (call_stmt, 2))
2180 && tree_to_uhwi (gimple_call_arg (call_stmt, 2))
2181 == PRED_FORTRAN_LOOP_PREHEADER)
2182 bb = preheader_edge->src;
2186 if (!dominated_by_p (CDI_DOMINATORS,
2187 loop_outer (loop)->latch, loop->header))
2188 predict_paths_leading_to_edge (loop_preheader_edge (loop),
2190 ? PRED_LOOP_GUARD_WITH_RECURSION
2197 if (!dominated_by_p (CDI_DOMINATORS,
2198 loop_outer (loop)->latch, bb))
2199 predict_paths_leading_to (bb,
2201 ? PRED_LOOP_GUARD_WITH_RECURSION
2208 /* Free basic blocks from get_loop_body. */
2213 /* Attempt to predict probabilities of BB outgoing edges using local
2216 bb_estimate_probability_locally (basic_block bb)
2218 rtx_insn *last_insn = BB_END (bb);
2221 if (! can_predict_insn_p (last_insn))
2223 cond = get_condition (last_insn, NULL, false, false);
2227 /* Try "pointer heuristic."
2228 A comparison ptr == 0 is predicted as false.
2229 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
2230 if (COMPARISON_P (cond)
2231 && ((REG_P (XEXP (cond, 0)) && REG_POINTER (XEXP (cond, 0)))
2232 || (REG_P (XEXP (cond, 1)) && REG_POINTER (XEXP (cond, 1)))))
2234 if (GET_CODE (cond) == EQ)
2235 predict_insn_def (last_insn, PRED_POINTER, NOT_TAKEN);
2236 else if (GET_CODE (cond) == NE)
2237 predict_insn_def (last_insn, PRED_POINTER, TAKEN);
2241 /* Try "opcode heuristic."
2242 EQ tests are usually false and NE tests are usually true. Also,
2243 most quantities are positive, so we can make the appropriate guesses
2244 about signed comparisons against zero. */
2245 switch (GET_CODE (cond))
2248 /* Unconditional branch. */
2249 predict_insn_def (last_insn, PRED_UNCONDITIONAL,
2250 cond == const0_rtx ? NOT_TAKEN : TAKEN);
2255 /* Floating point comparisons appears to behave in a very
2256 unpredictable way because of special role of = tests in
2258 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
2260 /* Comparisons with 0 are often used for booleans and there is
2261 nothing useful to predict about them. */
2262 else if (XEXP (cond, 1) == const0_rtx
2263 || XEXP (cond, 0) == const0_rtx)
2266 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, NOT_TAKEN);
2271 /* Floating point comparisons appears to behave in a very
2272 unpredictable way because of special role of = tests in
2274 if (FLOAT_MODE_P (GET_MODE (XEXP (cond, 0))))
2276 /* Comparisons with 0 are often used for booleans and there is
2277 nothing useful to predict about them. */
2278 else if (XEXP (cond, 1) == const0_rtx
2279 || XEXP (cond, 0) == const0_rtx)
2282 predict_insn_def (last_insn, PRED_OPCODE_NONEQUAL, TAKEN);
2286 predict_insn_def (last_insn, PRED_FPOPCODE, TAKEN);
2290 predict_insn_def (last_insn, PRED_FPOPCODE, NOT_TAKEN);
2295 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
2296 || XEXP (cond, 1) == constm1_rtx)
2297 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, NOT_TAKEN);
2302 if (XEXP (cond, 1) == const0_rtx || XEXP (cond, 1) == const1_rtx
2303 || XEXP (cond, 1) == constm1_rtx)
2304 predict_insn_def (last_insn, PRED_OPCODE_POSITIVE, TAKEN);
2312 /* Set edge->probability for each successor edge of BB. */
2314 guess_outgoing_edge_probabilities (basic_block bb)
2316 bb_estimate_probability_locally (bb);
2317 combine_predictions_for_insn (BB_END (bb), bb);
2320 static tree expr_expected_value (tree, bitmap, enum br_predictor *predictor,
2321 HOST_WIDE_INT *probability);
2323 /* Helper function for expr_expected_value. */
2326 expr_expected_value_1 (tree type, tree op0, enum tree_code code,
2327 tree op1, bitmap visited, enum br_predictor *predictor,
2328 HOST_WIDE_INT *probability)
2332 /* Reset returned probability value. */
2334 *predictor = PRED_UNCONDITIONAL;
2336 if (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS)
2338 if (TREE_CONSTANT (op0))
2341 if (code == IMAGPART_EXPR)
2343 if (TREE_CODE (TREE_OPERAND (op0, 0)) == SSA_NAME)
2345 def = SSA_NAME_DEF_STMT (TREE_OPERAND (op0, 0));
2346 if (is_gimple_call (def)
2347 && gimple_call_internal_p (def)
2348 && (gimple_call_internal_fn (def)
2349 == IFN_ATOMIC_COMPARE_EXCHANGE))
2351 /* Assume that any given atomic operation has low contention,
2352 and thus the compare-and-swap operation succeeds. */
2353 *predictor = PRED_COMPARE_AND_SWAP;
2354 return build_one_cst (TREE_TYPE (op0));
2359 if (code != SSA_NAME)
2362 def = SSA_NAME_DEF_STMT (op0);
2364 /* If we were already here, break the infinite cycle. */
2365 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0)))
2368 if (gimple_code (def) == GIMPLE_PHI)
2370 /* All the arguments of the PHI node must have the same constant
2372 int i, n = gimple_phi_num_args (def);
2373 tree val = NULL, new_val;
2375 for (i = 0; i < n; i++)
2377 tree arg = PHI_ARG_DEF (def, i);
2378 enum br_predictor predictor2;
2380 /* If this PHI has itself as an argument, we cannot
2381 determine the string length of this argument. However,
2382 if we can find an expected constant value for the other
2383 PHI args then we can still be sure that this is
2384 likely a constant. So be optimistic and just
2385 continue with the next argument. */
2386 if (arg == PHI_RESULT (def))
2389 HOST_WIDE_INT probability2;
2390 new_val = expr_expected_value (arg, visited, &predictor2,
2393 /* It is difficult to combine value predictors. Simply assume
2394 that later predictor is weaker and take its prediction. */
2395 if (*predictor < predictor2)
2397 *predictor = predictor2;
2398 *probability = probability2;
2404 else if (!operand_equal_p (val, new_val, false))
2409 if (is_gimple_assign (def))
2411 if (gimple_assign_lhs (def) != op0)
2414 return expr_expected_value_1 (TREE_TYPE (gimple_assign_lhs (def)),
2415 gimple_assign_rhs1 (def),
2416 gimple_assign_rhs_code (def),
2417 gimple_assign_rhs2 (def),
2418 visited, predictor, probability);
2421 if (is_gimple_call (def))
2423 tree decl = gimple_call_fndecl (def);
2426 if (gimple_call_internal_p (def)
2427 && gimple_call_internal_fn (def) == IFN_BUILTIN_EXPECT)
2429 gcc_assert (gimple_call_num_args (def) == 3);
2430 tree val = gimple_call_arg (def, 0);
2431 if (TREE_CONSTANT (val))
2433 tree val2 = gimple_call_arg (def, 2);
2434 gcc_assert (TREE_CODE (val2) == INTEGER_CST
2435 && tree_fits_uhwi_p (val2)
2436 && tree_to_uhwi (val2) < END_PREDICTORS);
2437 *predictor = (enum br_predictor) tree_to_uhwi (val2);
2438 if (*predictor == PRED_BUILTIN_EXPECT)
2440 = HITRATE (param_builtin_expect_probability);
2441 return gimple_call_arg (def, 1);
2446 if (DECL_IS_MALLOC (decl) || DECL_IS_OPERATOR_NEW_P (decl))
2449 *predictor = PRED_MALLOC_NONNULL;
2450 return boolean_true_node;
2453 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
2454 switch (DECL_FUNCTION_CODE (decl))
2456 case BUILT_IN_EXPECT:
2459 if (gimple_call_num_args (def) != 2)
2461 val = gimple_call_arg (def, 0);
2462 if (TREE_CONSTANT (val))
2464 *predictor = PRED_BUILTIN_EXPECT;
2466 = HITRATE (param_builtin_expect_probability);
2467 return gimple_call_arg (def, 1);
2469 case BUILT_IN_EXPECT_WITH_PROBABILITY:
2472 if (gimple_call_num_args (def) != 3)
2474 val = gimple_call_arg (def, 0);
2475 if (TREE_CONSTANT (val))
2477 /* Compute final probability as:
2478 probability * REG_BR_PROB_BASE. */
2479 tree prob = gimple_call_arg (def, 2);
2480 tree t = TREE_TYPE (prob);
2481 tree base = build_int_cst (integer_type_node,
2483 base = build_real_from_int_cst (t, base);
2484 tree r = fold_build2_initializer_loc (UNKNOWN_LOCATION,
2485 MULT_EXPR, t, prob, base);
2486 if (TREE_CODE (r) != REAL_CST)
2488 error_at (gimple_location (def),
2489 "probability %qE must be "
2490 "constant floating-point expression", prob);
2494 = real_to_integer (TREE_REAL_CST_PTR (r));
2495 if (probi >= 0 && probi <= REG_BR_PROB_BASE)
2497 *predictor = PRED_BUILTIN_EXPECT_WITH_PROBABILITY;
2498 *probability = probi;
2501 error_at (gimple_location (def),
2502 "probability %qE is outside "
2503 "the range [0.0, 1.0]", prob);
2505 return gimple_call_arg (def, 1);
2508 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_N:
2509 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
2510 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
2511 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
2512 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
2513 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
2514 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE:
2515 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_N:
2516 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
2517 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
2518 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
2519 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
2520 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
2521 /* Assume that any given atomic operation has low contention,
2522 and thus the compare-and-swap operation succeeds. */
2523 *predictor = PRED_COMPARE_AND_SWAP;
2524 return boolean_true_node;
2525 case BUILT_IN_REALLOC:
2527 *predictor = PRED_MALLOC_NONNULL;
2528 return boolean_true_node;
2537 if (get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS)
2540 enum br_predictor predictor2;
2541 HOST_WIDE_INT probability2;
2542 op0 = expr_expected_value (op0, visited, predictor, probability);
2545 op1 = expr_expected_value (op1, visited, &predictor2, &probability2);
2548 res = fold_build2 (code, type, op0, op1);
2549 if (TREE_CODE (res) == INTEGER_CST
2550 && TREE_CODE (op0) == INTEGER_CST
2551 && TREE_CODE (op1) == INTEGER_CST)
2553 /* Combine binary predictions. */
2554 if (*probability != -1 || probability2 != -1)
2556 HOST_WIDE_INT p1 = get_predictor_value (*predictor, *probability);
2557 HOST_WIDE_INT p2 = get_predictor_value (predictor2, probability2);
2558 *probability = RDIV (p1 * p2, REG_BR_PROB_BASE);
2561 if (*predictor < predictor2)
2562 *predictor = predictor2;
2568 if (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS)
2571 op0 = expr_expected_value (op0, visited, predictor, probability);
2574 res = fold_build1 (code, type, op0);
2575 if (TREE_CONSTANT (res))
2582 /* Return constant EXPR will likely have at execution time, NULL if unknown.
2583 The function is used by builtin_expect branch predictor so the evidence
2584 must come from this construct and additional possible constant folding.
2586 We may want to implement more involved value guess (such as value range
2587 propagation based prediction), but such tricks shall go to new
2591 expr_expected_value (tree expr, bitmap visited,
2592 enum br_predictor *predictor,
2593 HOST_WIDE_INT *probability)
2595 enum tree_code code;
2598 if (TREE_CONSTANT (expr))
2600 *predictor = PRED_UNCONDITIONAL;
2605 extract_ops_from_tree (expr, &code, &op0, &op1);
2606 return expr_expected_value_1 (TREE_TYPE (expr),
2607 op0, code, op1, visited, predictor,
2612 /* Return probability of a PREDICTOR. If the predictor has variable
2613 probability return passed PROBABILITY. */
2615 static HOST_WIDE_INT
2616 get_predictor_value (br_predictor predictor, HOST_WIDE_INT probability)
2620 case PRED_BUILTIN_EXPECT:
2621 case PRED_BUILTIN_EXPECT_WITH_PROBABILITY:
2622 gcc_assert (probability != -1);
2625 gcc_assert (probability == -1);
2626 return predictor_info[(int) predictor].hitrate;
2630 /* Predict using opcode of the last statement in basic block. */
2632 tree_predict_by_opcode (basic_block bb)
2634 gimple *stmt = last_stmt (bb);
2641 enum br_predictor predictor;
2642 HOST_WIDE_INT probability;
2647 if (gswitch *sw = dyn_cast <gswitch *> (stmt))
2649 tree index = gimple_switch_index (sw);
2650 tree val = expr_expected_value (index, auto_bitmap (),
2651 &predictor, &probability);
2652 if (val && TREE_CODE (val) == INTEGER_CST)
2654 edge e = find_taken_edge_switch_expr (sw, val);
2655 if (predictor == PRED_BUILTIN_EXPECT)
2657 int percent = param_builtin_expect_probability;
2658 gcc_assert (percent >= 0 && percent <= 100);
2659 predict_edge (e, PRED_BUILTIN_EXPECT,
2663 predict_edge_def (e, predictor, TAKEN);
2667 if (gimple_code (stmt) != GIMPLE_COND)
2669 FOR_EACH_EDGE (then_edge, ei, bb->succs)
2670 if (then_edge->flags & EDGE_TRUE_VALUE)
2672 op0 = gimple_cond_lhs (stmt);
2673 op1 = gimple_cond_rhs (stmt);
2674 cmp = gimple_cond_code (stmt);
2675 type = TREE_TYPE (op0);
2676 val = expr_expected_value_1 (boolean_type_node, op0, cmp, op1, auto_bitmap (),
2677 &predictor, &probability);
2678 if (val && TREE_CODE (val) == INTEGER_CST)
2680 HOST_WIDE_INT prob = get_predictor_value (predictor, probability);
2681 if (integer_zerop (val))
2682 prob = REG_BR_PROB_BASE - prob;
2683 predict_edge (then_edge, predictor, prob);
2685 /* Try "pointer heuristic."
2686 A comparison ptr == 0 is predicted as false.
2687 Similarly, a comparison ptr1 == ptr2 is predicted as false. */
2688 if (POINTER_TYPE_P (type))
2691 predict_edge_def (then_edge, PRED_TREE_POINTER, NOT_TAKEN);
2692 else if (cmp == NE_EXPR)
2693 predict_edge_def (then_edge, PRED_TREE_POINTER, TAKEN);
2697 /* Try "opcode heuristic."
2698 EQ tests are usually false and NE tests are usually true. Also,
2699 most quantities are positive, so we can make the appropriate guesses
2700 about signed comparisons against zero. */
2705 /* Floating point comparisons appears to behave in a very
2706 unpredictable way because of special role of = tests in
2708 if (FLOAT_TYPE_P (type))
2710 /* Comparisons with 0 are often used for booleans and there is
2711 nothing useful to predict about them. */
2712 else if (integer_zerop (op0) || integer_zerop (op1))
2715 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, NOT_TAKEN);
2720 /* Floating point comparisons appears to behave in a very
2721 unpredictable way because of special role of = tests in
2723 if (FLOAT_TYPE_P (type))
2725 /* Comparisons with 0 are often used for booleans and there is
2726 nothing useful to predict about them. */
2727 else if (integer_zerop (op0)
2728 || integer_zerop (op1))
2731 predict_edge_def (then_edge, PRED_TREE_OPCODE_NONEQUAL, TAKEN);
2735 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, TAKEN);
2738 case UNORDERED_EXPR:
2739 predict_edge_def (then_edge, PRED_TREE_FPOPCODE, NOT_TAKEN);
2744 if (integer_zerop (op1)
2745 || integer_onep (op1)
2746 || integer_all_onesp (op1)
2749 || real_minus_onep (op1))
2750 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, NOT_TAKEN);
2755 if (integer_zerop (op1)
2756 || integer_onep (op1)
2757 || integer_all_onesp (op1)
2760 || real_minus_onep (op1))
2761 predict_edge_def (then_edge, PRED_TREE_OPCODE_POSITIVE, TAKEN);
2769 /* Returns TRUE if the STMT is exit(0) like statement. */
2772 is_exit_with_zero_arg (const gimple *stmt)
2774 /* This is not exit, _exit or _Exit. */
2775 if (!gimple_call_builtin_p (stmt, BUILT_IN_EXIT)
2776 && !gimple_call_builtin_p (stmt, BUILT_IN__EXIT)
2777 && !gimple_call_builtin_p (stmt, BUILT_IN__EXIT2))
2780 /* Argument is an interger zero. */
2781 return integer_zerop (gimple_call_arg (stmt, 0));
2784 /* Try to guess whether the value of return means error code. */
2786 static enum br_predictor
2787 return_prediction (tree val, enum prediction *prediction)
2791 return PRED_NO_PREDICTION;
2792 /* Different heuristics for pointers and scalars. */
2793 if (POINTER_TYPE_P (TREE_TYPE (val)))
2795 /* NULL is usually not returned. */
2796 if (integer_zerop (val))
2798 *prediction = NOT_TAKEN;
2799 return PRED_NULL_RETURN;
2802 else if (INTEGRAL_TYPE_P (TREE_TYPE (val)))
2804 /* Negative return values are often used to indicate
2806 if (TREE_CODE (val) == INTEGER_CST
2807 && tree_int_cst_sgn (val) < 0)
2809 *prediction = NOT_TAKEN;
2810 return PRED_NEGATIVE_RETURN;
2812 /* Constant return values seems to be commonly taken.
2813 Zero/one often represent booleans so exclude them from the
2815 if (TREE_CONSTANT (val)
2816 && (!integer_zerop (val) && !integer_onep (val)))
2818 *prediction = NOT_TAKEN;
2819 return PRED_CONST_RETURN;
2822 return PRED_NO_PREDICTION;
2825 /* Return zero if phi result could have values other than -1, 0 or 1,
2826 otherwise return a bitmask, with bits 0, 1 and 2 set if -1, 0 and 1
2827 values are used or likely. */
2830 zero_one_minusone (gphi *phi, int limit)
2832 int phi_num_args = gimple_phi_num_args (phi);
2834 for (int i = 0; i < phi_num_args; i++)
2836 tree t = PHI_ARG_DEF (phi, i);
2837 if (TREE_CODE (t) != INTEGER_CST)
2839 wide_int w = wi::to_wide (t);
2849 for (int i = 0; i < phi_num_args; i++)
2851 tree t = PHI_ARG_DEF (phi, i);
2852 if (TREE_CODE (t) == INTEGER_CST)
2854 if (TREE_CODE (t) != SSA_NAME)
2856 gimple *g = SSA_NAME_DEF_STMT (t);
2857 if (gimple_code (g) == GIMPLE_PHI && limit > 0)
2858 if (int r = zero_one_minusone (as_a <gphi *> (g), limit - 1))
2863 if (!is_gimple_assign (g))
2865 if (gimple_assign_cast_p (g))
2867 tree rhs1 = gimple_assign_rhs1 (g);
2868 if (TREE_CODE (rhs1) != SSA_NAME
2869 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
2870 || TYPE_PRECISION (TREE_TYPE (rhs1)) != 1
2871 || !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
2876 if (TREE_CODE_CLASS (gimple_assign_rhs_code (g)) != tcc_comparison)
2883 /* Find the basic block with return expression and look up for possible
2884 return value trying to apply RETURN_PREDICTION heuristics. */
2886 apply_return_prediction (void)
2888 greturn *return_stmt = NULL;
2892 int phi_num_args, i;
2893 enum br_predictor pred;
2894 enum prediction direction;
2897 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2899 gimple *last = last_stmt (e->src);
2901 && gimple_code (last) == GIMPLE_RETURN)
2903 return_stmt = as_a <greturn *> (last);
2909 return_val = gimple_return_retval (return_stmt);
2912 if (TREE_CODE (return_val) != SSA_NAME
2913 || !SSA_NAME_DEF_STMT (return_val)
2914 || gimple_code (SSA_NAME_DEF_STMT (return_val)) != GIMPLE_PHI)
2916 phi = as_a <gphi *> (SSA_NAME_DEF_STMT (return_val));
2917 phi_num_args = gimple_phi_num_args (phi);
2918 pred = return_prediction (PHI_ARG_DEF (phi, 0), &direction);
2920 /* Avoid the case where the function returns -1, 0 and 1 values and
2921 nothing else. Those could be qsort etc. comparison functions
2922 where the negative return isn't less probable than positive.
2923 For this require that the function returns at least -1 or 1
2924 or -1 and a boolean value or comparison result, so that functions
2925 returning just -1 and 0 are treated as if -1 represents error value. */
2926 if (INTEGRAL_TYPE_P (TREE_TYPE (return_val))
2927 && !TYPE_UNSIGNED (TREE_TYPE (return_val))
2928 && TYPE_PRECISION (TREE_TYPE (return_val)) > 1)
2929 if (int r = zero_one_minusone (phi, 3))
2930 if ((r & (1 | 4)) == (1 | 4))
2933 /* Avoid the degenerate case where all return values form the function
2934 belongs to same category (ie they are all positive constants)
2935 so we can hardly say something about them. */
2936 for (i = 1; i < phi_num_args; i++)
2937 if (pred != return_prediction (PHI_ARG_DEF (phi, i), &direction))
2939 if (i != phi_num_args)
2940 for (i = 0; i < phi_num_args; i++)
2942 pred = return_prediction (PHI_ARG_DEF (phi, i), &direction);
2943 if (pred != PRED_NO_PREDICTION)
2944 predict_paths_leading_to_edge (gimple_phi_arg_edge (phi, i), pred,
2949 /* Look for basic block that contains unlikely to happen events
2950 (such as noreturn calls) and mark all paths leading to execution
2951 of this basic blocks as unlikely. */
2954 tree_bb_level_predictions (void)
2957 bool has_return_edges = false;
2961 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
2962 if (!unlikely_executed_edge_p (e) && !(e->flags & EDGE_ABNORMAL_CALL))
2964 has_return_edges = true;
2968 apply_return_prediction ();
2970 FOR_EACH_BB_FN (bb, cfun)
2972 gimple_stmt_iterator gsi;
2974 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2976 gimple *stmt = gsi_stmt (gsi);
2979 if (is_gimple_call (stmt))
2981 if (gimple_call_noreturn_p (stmt)
2983 && !is_exit_with_zero_arg (stmt))
2984 predict_paths_leading_to (bb, PRED_NORETURN,
2986 decl = gimple_call_fndecl (stmt);
2988 && lookup_attribute ("cold",
2989 DECL_ATTRIBUTES (decl)))
2990 predict_paths_leading_to (bb, PRED_COLD_FUNCTION,
2992 if (decl && recursive_call_p (current_function_decl, decl))
2993 predict_paths_leading_to (bb, PRED_RECURSIVE_CALL,
2996 else if (gimple_code (stmt) == GIMPLE_PREDICT)
2998 predict_paths_leading_to (bb, gimple_predict_predictor (stmt),
2999 gimple_predict_outcome (stmt));
3000 /* Keep GIMPLE_PREDICT around so early inlining will propagate
3001 hints to callers. */
3007 /* Callback for hash_map::traverse, asserts that the pointer map is
3011 assert_is_empty (const_basic_block const &, edge_prediction *const &value,
3014 gcc_assert (!value);
3018 /* Predict branch probabilities and estimate profile for basic block BB.
3019 When LOCAL_ONLY is set do not use any global properties of CFG. */
3022 tree_estimate_probability_bb (basic_block bb, bool local_only)
3027 FOR_EACH_EDGE (e, ei, bb->succs)
3029 /* Look for block we are guarding (ie we dominate it,
3030 but it doesn't postdominate us). */
3031 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun) && e->dest != bb
3033 && dominated_by_p (CDI_DOMINATORS, e->dest, e->src)
3034 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e->dest))
3036 gimple_stmt_iterator bi;
3038 /* The call heuristic claims that a guarded function call
3039 is improbable. This is because such calls are often used
3040 to signal exceptional situations such as printing error
3042 for (bi = gsi_start_bb (e->dest); !gsi_end_p (bi);
3045 gimple *stmt = gsi_stmt (bi);
3046 if (is_gimple_call (stmt)
3047 && !gimple_inexpensive_call_p (as_a <gcall *> (stmt))
3048 /* Constant and pure calls are hardly used to signalize
3049 something exceptional. */
3050 && gimple_has_side_effects (stmt))
3052 if (gimple_call_fndecl (stmt))
3053 predict_edge_def (e, PRED_CALL, NOT_TAKEN);
3054 else if (virtual_method_call_p (gimple_call_fn (stmt)))
3055 predict_edge_def (e, PRED_POLYMORPHIC_CALL, NOT_TAKEN);
3057 predict_edge_def (e, PRED_INDIR_CALL, TAKEN);
3063 tree_predict_by_opcode (bb);
3066 /* Predict branch probabilities and estimate profile of the tree CFG.
3067 This function can be called from the loop optimizers to recompute
3068 the profile information.
3069 If DRY_RUN is set, do not modify CFG and only produce dump files. */
3072 tree_estimate_probability (bool dry_run)
3076 add_noreturn_fake_exit_edges ();
3077 connect_infinite_loops_to_exit ();
3078 /* We use loop_niter_by_eval, which requires that the loops have
3080 create_preheaders (CP_SIMPLE_PREHEADERS);
3081 calculate_dominance_info (CDI_POST_DOMINATORS);
3082 /* Decide which edges are known to be unlikely. This improves later
3083 branch prediction. */
3084 determine_unlikely_bbs ();
3086 bb_predictions = new hash_map<const_basic_block, edge_prediction *>;
3087 tree_bb_level_predictions ();
3088 record_loop_exits ();
3090 if (number_of_loops (cfun) > 1)
3093 FOR_EACH_BB_FN (bb, cfun)
3094 tree_estimate_probability_bb (bb, false);
3096 FOR_EACH_BB_FN (bb, cfun)
3097 combine_predictions_for_bb (bb, dry_run);
3100 bb_predictions->traverse<void *, assert_is_empty> (NULL);
3102 delete bb_predictions;
3103 bb_predictions = NULL;
3106 estimate_bb_frequencies (false);
3107 free_dominance_info (CDI_POST_DOMINATORS);
3108 remove_fake_exit_edges ();
3111 /* Set edge->probability for each successor edge of BB. */
3113 tree_guess_outgoing_edge_probabilities (basic_block bb)
3115 bb_predictions = new hash_map<const_basic_block, edge_prediction *>;
3116 tree_estimate_probability_bb (bb, true);
3117 combine_predictions_for_bb (bb, false);
3119 bb_predictions->traverse<void *, assert_is_empty> (NULL);
3120 delete bb_predictions;
3121 bb_predictions = NULL;
3124 /* Predict edges to successors of CUR whose sources are not postdominated by
3125 BB by PRED and recurse to all postdominators. */
3128 predict_paths_for_bb (basic_block cur, basic_block bb,
3129 enum br_predictor pred,
3130 enum prediction taken,
3131 bitmap visited, class loop *in_loop = NULL)
3137 /* If we exited the loop or CUR is unconditional in the loop, there is
3140 && (!flow_bb_inside_loop_p (in_loop, cur)
3141 || dominated_by_p (CDI_DOMINATORS, in_loop->latch, cur)))
3144 /* We are looking for all edges forming edge cut induced by
3145 set of all blocks postdominated by BB. */
3146 FOR_EACH_EDGE (e, ei, cur->preds)
3147 if (e->src->index >= NUM_FIXED_BLOCKS
3148 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, bb))
3154 /* Ignore fake edges and eh, we predict them as not taken anyway. */
3155 if (unlikely_executed_edge_p (e))
3157 gcc_assert (bb == cur || dominated_by_p (CDI_POST_DOMINATORS, cur, bb));
3159 /* See if there is an edge from e->src that is not abnormal
3160 and does not lead to BB and does not exit the loop. */
3161 FOR_EACH_EDGE (e2, ei2, e->src->succs)
3163 && !unlikely_executed_edge_p (e2)
3164 && !dominated_by_p (CDI_POST_DOMINATORS, e2->dest, bb)
3165 && (!in_loop || !loop_exit_edge_p (in_loop, e2)))
3171 /* If there is non-abnormal path leaving e->src, predict edge
3172 using predictor. Otherwise we need to look for paths
3175 The second may lead to infinite loop in the case we are predicitng
3176 regions that are only reachable by abnormal edges. We simply
3177 prevent visiting given BB twice. */
3180 if (!edge_predicted_by_p (e, pred, taken))
3181 predict_edge_def (e, pred, taken);
3183 else if (bitmap_set_bit (visited, e->src->index))
3184 predict_paths_for_bb (e->src, e->src, pred, taken, visited, in_loop);
3186 for (son = first_dom_son (CDI_POST_DOMINATORS, cur);
3188 son = next_dom_son (CDI_POST_DOMINATORS, son))
3189 predict_paths_for_bb (son, bb, pred, taken, visited, in_loop);
3192 /* Sets branch probabilities according to PREDiction and
3196 predict_paths_leading_to (basic_block bb, enum br_predictor pred,
3197 enum prediction taken, class loop *in_loop)
3199 predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop);
3202 /* Like predict_paths_leading_to but take edge instead of basic block. */
3205 predict_paths_leading_to_edge (edge e, enum br_predictor pred,
3206 enum prediction taken, class loop *in_loop)
3208 bool has_nonloop_edge = false;
3212 basic_block bb = e->src;
3213 FOR_EACH_EDGE (e2, ei, bb->succs)
3214 if (e2->dest != e->src && e2->dest != e->dest
3215 && !unlikely_executed_edge_p (e2)
3216 && !dominated_by_p (CDI_POST_DOMINATORS, e->src, e2->dest))
3218 has_nonloop_edge = true;
3222 if (!has_nonloop_edge)
3223 predict_paths_for_bb (bb, bb, pred, taken, auto_bitmap (), in_loop);
3225 predict_edge_def (e, pred, taken);
3228 /* This is used to carry information about basic blocks. It is
3229 attached to the AUX field of the standard CFG block. */
3234 /* Estimated frequency of execution of basic_block. */
3237 /* To keep queue of basic blocks to process. */
3240 /* Number of predecessors we need to visit first. */
3244 /* Similar information for edges. */
3245 class edge_prob_info
3248 /* In case edge is a loopback edge, the probability edge will be reached
3249 in case header is. Estimated number of iterations of the loop can be
3250 then computed as 1 / (1 - back_edge_prob). */
3251 sreal back_edge_prob;
3252 /* True if the edge is a loopback edge in the natural loop. */
3253 unsigned int back_edge:1;
3256 #define BLOCK_INFO(B) ((block_info *) (B)->aux)
3258 #define EDGE_INFO(E) ((edge_prob_info *) (E)->aux)
3260 /* Helper function for estimate_bb_frequencies.
3261 Propagate the frequencies in blocks marked in
3262 TOVISIT, starting in HEAD. */
3265 propagate_freq (basic_block head, bitmap tovisit,
3266 sreal max_cyclic_prob)
3275 /* For each basic block we need to visit count number of his predecessors
3276 we need to visit first. */
3277 EXECUTE_IF_SET_IN_BITMAP (tovisit, 0, i, bi)
3282 bb = BASIC_BLOCK_FOR_FN (cfun, i);
3284 FOR_EACH_EDGE (e, ei, bb->preds)
3286 bool visit = bitmap_bit_p (tovisit, e->src->index);
3288 if (visit && !(e->flags & EDGE_DFS_BACK))
3290 else if (visit && dump_file && !EDGE_INFO (e)->back_edge)
3292 "Irreducible region hit, ignoring edge to %i->%i\n",
3293 e->src->index, bb->index);
3295 BLOCK_INFO (bb)->npredecessors = count;
3296 /* When function never returns, we will never process exit block. */
3297 if (!count && bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
3298 bb->count = profile_count::zero ();
3301 BLOCK_INFO (head)->frequency = 1;
3303 for (bb = head; bb; bb = nextbb)
3306 sreal cyclic_probability = 0;
3307 sreal frequency = 0;
3309 nextbb = BLOCK_INFO (bb)->next;
3310 BLOCK_INFO (bb)->next = NULL;
3312 /* Compute frequency of basic block. */
3316 FOR_EACH_EDGE (e, ei, bb->preds)
3317 gcc_assert (!bitmap_bit_p (tovisit, e->src->index)
3318 || (e->flags & EDGE_DFS_BACK));
3320 FOR_EACH_EDGE (e, ei, bb->preds)
3321 if (EDGE_INFO (e)->back_edge)
3322 cyclic_probability += EDGE_INFO (e)->back_edge_prob;
3323 else if (!(e->flags & EDGE_DFS_BACK))
3325 /* FIXME: Graphite is producing edges with no profile. Once
3326 this is fixed, drop this. */
3327 sreal tmp = e->probability.initialized_p () ?
3328 e->probability.to_sreal () : 0;
3329 frequency += tmp * BLOCK_INFO (e->src)->frequency;
3332 if (cyclic_probability == 0)
3334 BLOCK_INFO (bb)->frequency = frequency;
3338 if (cyclic_probability > max_cyclic_prob)
3342 "cyclic probability of bb %i is %f (capped to %f)"
3343 "; turning freq %f",
3344 bb->index, cyclic_probability.to_double (),
3345 max_cyclic_prob.to_double (),
3346 frequency.to_double ());
3348 cyclic_probability = max_cyclic_prob;
3352 "cyclic probability of bb %i is %f; turning freq %f",
3353 bb->index, cyclic_probability.to_double (),
3354 frequency.to_double ());
3356 BLOCK_INFO (bb)->frequency = frequency
3357 / (sreal (1) - cyclic_probability);
3359 fprintf (dump_file, " to %f\n",
3360 BLOCK_INFO (bb)->frequency.to_double ());
3364 bitmap_clear_bit (tovisit, bb->index);
3366 e = find_edge (bb, head);
3369 /* FIXME: Graphite is producing edges with no profile. Once
3370 this is fixed, drop this. */
3371 sreal tmp = e->probability.initialized_p () ?
3372 e->probability.to_sreal () : 0;
3373 EDGE_INFO (e)->back_edge_prob = tmp * BLOCK_INFO (bb)->frequency;
3376 /* Propagate to successor blocks. */
3377 FOR_EACH_EDGE (e, ei, bb->succs)
3378 if (!(e->flags & EDGE_DFS_BACK)
3379 && BLOCK_INFO (e->dest)->npredecessors)
3381 BLOCK_INFO (e->dest)->npredecessors--;
3382 if (!BLOCK_INFO (e->dest)->npredecessors)
3387 BLOCK_INFO (last)->next = e->dest;
3395 /* Estimate frequencies in loops at same nest level. */
3398 estimate_loops_at_level (class loop *first_loop, sreal max_cyclic_prob)
3402 for (loop = first_loop; loop; loop = loop->next)
3407 auto_bitmap tovisit;
3409 estimate_loops_at_level (loop->inner, max_cyclic_prob);
3411 /* Find current loop back edge and mark it. */
3412 e = loop_latch_edge (loop);
3413 EDGE_INFO (e)->back_edge = 1;
3415 bbs = get_loop_body (loop);
3416 for (i = 0; i < loop->num_nodes; i++)
3417 bitmap_set_bit (tovisit, bbs[i]->index);
3419 propagate_freq (loop->header, tovisit, max_cyclic_prob);
3423 /* Propagates frequencies through structure of loops. */
3426 estimate_loops (void)
3428 auto_bitmap tovisit;
3430 sreal max_cyclic_prob = (sreal)1
3431 - (sreal)1 / (param_max_predicted_iterations + 1);
3433 /* Start by estimating the frequencies in the loops. */
3434 if (number_of_loops (cfun) > 1)
3435 estimate_loops_at_level (current_loops->tree_root->inner, max_cyclic_prob);
3437 /* Now propagate the frequencies through all the blocks. */
3438 FOR_ALL_BB_FN (bb, cfun)
3440 bitmap_set_bit (tovisit, bb->index);
3442 propagate_freq (ENTRY_BLOCK_PTR_FOR_FN (cfun), tovisit, max_cyclic_prob);
3445 /* Drop the profile for NODE to guessed, and update its frequency based on
3446 whether it is expected to be hot given the CALL_COUNT. */
3449 drop_profile (struct cgraph_node *node, profile_count call_count)
3451 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
3452 /* In the case where this was called by another function with a
3453 dropped profile, call_count will be 0. Since there are no
3454 non-zero call counts to this function, we don't know for sure
3455 whether it is hot, and therefore it will be marked normal below. */
3456 bool hot = maybe_hot_count_p (NULL, call_count);
3460 "Dropping 0 profile for %s. %s based on calls.\n",
3462 hot ? "Function is hot" : "Function is normal");
3463 /* We only expect to miss profiles for functions that are reached
3464 via non-zero call edges in cases where the function may have
3465 been linked from another module or library (COMDATs and extern
3466 templates). See the comments below for handle_missing_profiles.
3467 Also, only warn in cases where the missing counts exceed the
3468 number of training runs. In certain cases with an execv followed
3469 by a no-return call the profile for the no-return call is not
3470 dumped and there can be a mismatch. */
3471 if (!DECL_COMDAT (node->decl) && !DECL_EXTERNAL (node->decl)
3472 && call_count > profile_info->runs)
3474 if (flag_profile_correction)
3478 "Missing counts for called function %s\n",
3479 node->dump_name ());
3482 warning (0, "Missing counts for called function %s",
3483 node->dump_name ());
3487 if (opt_for_fn (node->decl, flag_guess_branch_prob))
3490 = !ENTRY_BLOCK_PTR_FOR_FN (fn)->count.nonzero_p ();
3491 FOR_ALL_BB_FN (bb, fn)
3492 if (clear_zeros || !(bb->count == profile_count::zero ()))
3493 bb->count = bb->count.guessed_local ();
3494 fn->cfg->count_max = fn->cfg->count_max.guessed_local ();
3498 FOR_ALL_BB_FN (bb, fn)
3499 bb->count = profile_count::uninitialized ();
3500 fn->cfg->count_max = profile_count::uninitialized ();
3503 struct cgraph_edge *e;
3504 for (e = node->callees; e; e = e->next_callee)
3505 e->count = gimple_bb (e->call_stmt)->count;
3506 for (e = node->indirect_calls; e; e = e->next_callee)
3507 e->count = gimple_bb (e->call_stmt)->count;
3508 node->count = ENTRY_BLOCK_PTR_FOR_FN (fn)->count;
3510 profile_status_for_fn (fn)
3511 = (flag_guess_branch_prob ? PROFILE_GUESSED : PROFILE_ABSENT);
3513 = hot ? NODE_FREQUENCY_HOT : NODE_FREQUENCY_NORMAL;
3516 /* In the case of COMDAT routines, multiple object files will contain the same
3517 function and the linker will select one for the binary. In that case
3518 all the other copies from the profile instrument binary will be missing
3519 profile counts. Look for cases where this happened, due to non-zero
3520 call counts going to 0-count functions, and drop the profile to guessed
3521 so that we can use the estimated probabilities and avoid optimizing only
3524 The other case where the profile may be missing is when the routine
3525 is not going to be emitted to the object file, e.g. for "extern template"
3526 class methods. Those will be marked DECL_EXTERNAL. Emit a warning in
3527 all other cases of non-zero calls to 0-count functions. */
3530 handle_missing_profiles (void)
3532 const int unlikely_frac = param_unlikely_bb_count_fraction;
3533 struct cgraph_node *node;
3534 auto_vec<struct cgraph_node *, 64> worklist;
3536 /* See if 0 count function has non-0 count callers. In this case we
3537 lost some profile. Drop its function profile to PROFILE_GUESSED. */
3538 FOR_EACH_DEFINED_FUNCTION (node)
3540 struct cgraph_edge *e;
3541 profile_count call_count = profile_count::zero ();
3542 gcov_type max_tp_first_run = 0;
3543 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
3545 if (node->count.ipa ().nonzero_p ())
3547 for (e = node->callers; e; e = e->next_caller)
3548 if (e->count.ipa ().initialized_p () && e->count.ipa () > 0)
3550 call_count = call_count + e->count.ipa ();
3552 if (e->caller->tp_first_run > max_tp_first_run)
3553 max_tp_first_run = e->caller->tp_first_run;
3556 /* If time profile is missing, let assign the maximum that comes from
3557 caller functions. */
3558 if (!node->tp_first_run && max_tp_first_run)
3559 node->tp_first_run = max_tp_first_run + 1;
3563 && call_count.apply_scale (unlikely_frac, 1) >= profile_info->runs)
3565 drop_profile (node, call_count);
3566 worklist.safe_push (node);
3570 /* Propagate the profile dropping to other 0-count COMDATs that are
3571 potentially called by COMDATs we already dropped the profile on. */
3572 while (worklist.length () > 0)
3574 struct cgraph_edge *e;
3576 node = worklist.pop ();
3577 for (e = node->callees; e; e = e->next_caller)
3579 struct cgraph_node *callee = e->callee;
3580 struct function *fn = DECL_STRUCT_FUNCTION (callee->decl);
3582 if (!(e->count.ipa () == profile_count::zero ())
3583 && callee->count.ipa ().nonzero_p ())
3585 if ((DECL_COMDAT (callee->decl) || DECL_EXTERNAL (callee->decl))
3587 && profile_status_for_fn (fn) == PROFILE_READ)
3589 drop_profile (node, profile_count::zero ());
3590 worklist.safe_push (callee);
3596 /* Convert counts measured by profile driven feedback to frequencies.
3597 Return nonzero iff there was any nonzero execution count. */
3600 update_max_bb_count (void)
3602 profile_count true_count_max = profile_count::uninitialized ();
3605 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
3606 true_count_max = true_count_max.max (bb->count);
3608 cfun->cfg->count_max = true_count_max;
3610 return true_count_max.ipa ().nonzero_p ();
3613 /* Return true if function is likely to be expensive, so there is no point to
3614 optimize performance of prologue, epilogue or do inlining at the expense
3615 of code size growth. THRESHOLD is the limit of number of instructions
3616 function can execute at average to be still considered not expensive. */
3619 expensive_function_p (int threshold)
3623 /* If profile was scaled in a way entry block has count 0, then the function
3624 is deifnitly taking a lot of time. */
3625 if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.nonzero_p ())
3628 profile_count limit = ENTRY_BLOCK_PTR_FOR_FN
3629 (cfun)->count.apply_scale (threshold, 1);
3630 profile_count sum = profile_count::zero ();
3631 FOR_EACH_BB_FN (bb, cfun)
3635 if (!bb->count.initialized_p ())
3638 fprintf (dump_file, "Function is considered expensive because"
3639 " count of bb %i is not initialized\n", bb->index);
3643 FOR_BB_INSNS (bb, insn)
3644 if (active_insn_p (insn))
3655 /* All basic blocks that are reachable only from unlikely basic blocks are
3659 propagate_unlikely_bbs_forward (void)
3661 auto_vec<basic_block, 64> worklist;
3666 if (!(ENTRY_BLOCK_PTR_FOR_FN (cfun)->count == profile_count::zero ()))
3668 ENTRY_BLOCK_PTR_FOR_FN (cfun)->aux = (void *)(size_t) 1;
3669 worklist.safe_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3671 while (worklist.length () > 0)
3673 bb = worklist.pop ();
3674 FOR_EACH_EDGE (e, ei, bb->succs)
3675 if (!(e->count () == profile_count::zero ())
3676 && !(e->dest->count == profile_count::zero ())
3679 e->dest->aux = (void *)(size_t) 1;
3680 worklist.safe_push (e->dest);
3685 FOR_ALL_BB_FN (bb, cfun)
3689 if (!(bb->count == profile_count::zero ())
3690 && (dump_file && (dump_flags & TDF_DETAILS)))
3692 "Basic block %i is marked unlikely by forward prop\n",
3694 bb->count = profile_count::zero ();
3701 /* Determine basic blocks/edges that are known to be unlikely executed and set
3702 their counters to zero.
3703 This is done with first identifying obviously unlikely BBs/edges and then
3704 propagating in both directions. */
3707 determine_unlikely_bbs ()
3710 auto_vec<basic_block, 64> worklist;
3714 FOR_EACH_BB_FN (bb, cfun)
3716 if (!(bb->count == profile_count::zero ())
3717 && unlikely_executed_bb_p (bb))
3719 if (dump_file && (dump_flags & TDF_DETAILS))
3720 fprintf (dump_file, "Basic block %i is locally unlikely\n",
3722 bb->count = profile_count::zero ();
3725 FOR_EACH_EDGE (e, ei, bb->succs)
3726 if (!(e->probability == profile_probability::never ())
3727 && unlikely_executed_edge_p (e))
3729 if (dump_file && (dump_flags & TDF_DETAILS))
3730 fprintf (dump_file, "Edge %i->%i is locally unlikely\n",
3731 bb->index, e->dest->index);
3732 e->probability = profile_probability::never ();
3735 gcc_checking_assert (!bb->aux);
3737 propagate_unlikely_bbs_forward ();
3739 auto_vec<int, 64> nsuccs;
3740 nsuccs.safe_grow_cleared (last_basic_block_for_fn (cfun));
3741 FOR_ALL_BB_FN (bb, cfun)
3742 if (!(bb->count == profile_count::zero ())
3743 && bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3745 nsuccs[bb->index] = 0;
3746 FOR_EACH_EDGE (e, ei, bb->succs)
3747 if (!(e->probability == profile_probability::never ())
3748 && !(e->dest->count == profile_count::zero ()))
3749 nsuccs[bb->index]++;
3750 if (!nsuccs[bb->index])
3751 worklist.safe_push (bb);
3753 while (worklist.length () > 0)
3755 bb = worklist.pop ();
3756 if (bb->count == profile_count::zero ())
3758 if (bb != ENTRY_BLOCK_PTR_FOR_FN (cfun))
3761 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
3762 !gsi_end_p (gsi); gsi_next (&gsi))
3763 if (stmt_can_terminate_bb_p (gsi_stmt (gsi))
3764 /* stmt_can_terminate_bb_p special cases noreturns because it
3765 assumes that fake edges are created. We want to know that
3766 noreturn alone does not imply BB to be unlikely. */
3767 || (is_gimple_call (gsi_stmt (gsi))
3768 && (gimple_call_flags (gsi_stmt (gsi)) & ECF_NORETURN)))
3776 if (dump_file && (dump_flags & TDF_DETAILS))
3778 "Basic block %i is marked unlikely by backward prop\n",
3780 bb->count = profile_count::zero ();
3781 FOR_EACH_EDGE (e, ei, bb->preds)
3782 if (!(e->probability == profile_probability::never ()))
3784 if (!(e->src->count == profile_count::zero ()))
3786 gcc_checking_assert (nsuccs[e->src->index] > 0);
3787 nsuccs[e->src->index]--;
3788 if (!nsuccs[e->src->index])
3789 worklist.safe_push (e->src);
3793 /* Finally all edges from non-0 regions to 0 are unlikely. */
3794 FOR_ALL_BB_FN (bb, cfun)
3796 if (!(bb->count == profile_count::zero ()))
3797 FOR_EACH_EDGE (e, ei, bb->succs)
3798 if (!(e->probability == profile_probability::never ())
3799 && e->dest->count == profile_count::zero ())
3801 if (dump_file && (dump_flags & TDF_DETAILS))
3802 fprintf (dump_file, "Edge %i->%i is unlikely because "
3803 "it enters unlikely block\n",
3804 bb->index, e->dest->index);
3805 e->probability = profile_probability::never ();
3810 FOR_EACH_EDGE (e, ei, bb->succs)
3811 if (e->probability == profile_probability::never ())
3821 && !(other->probability == profile_probability::always ()))
3823 if (dump_file && (dump_flags & TDF_DETAILS))
3824 fprintf (dump_file, "Edge %i->%i is locally likely\n",
3825 bb->index, other->dest->index);
3826 other->probability = profile_probability::always ();
3829 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count == profile_count::zero ())
3830 cgraph_node::get (current_function_decl)->count = profile_count::zero ();
3833 /* Estimate and propagate basic block frequencies using the given branch
3834 probabilities. If FORCE is true, the frequencies are used to estimate
3835 the counts even when there are already non-zero profile counts. */
3838 estimate_bb_frequencies (bool force)
3843 determine_unlikely_bbs ();
3845 if (force || profile_status_for_fn (cfun) != PROFILE_READ
3846 || !update_max_bb_count ())
3849 mark_dfs_back_edges ();
3851 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->probability =
3852 profile_probability::always ();
3854 /* Set up block info for each basic block. */
3855 alloc_aux_for_blocks (sizeof (block_info));
3856 alloc_aux_for_edges (sizeof (edge_prob_info));
3857 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
3862 FOR_EACH_EDGE (e, ei, bb->succs)
3864 /* FIXME: Graphite is producing edges with no profile. Once
3865 this is fixed, drop this. */
3866 if (e->probability.initialized_p ())
3867 EDGE_INFO (e)->back_edge_prob
3868 = e->probability.to_sreal ();
3870 /* back_edge_prob = 0.5 */
3871 EDGE_INFO (e)->back_edge_prob = sreal (1, -1);
3875 /* First compute frequencies locally for each loop from innermost
3876 to outermost to examine frequencies for back edges. */
3880 FOR_EACH_BB_FN (bb, cfun)
3881 if (freq_max < BLOCK_INFO (bb)->frequency)
3882 freq_max = BLOCK_INFO (bb)->frequency;
3884 /* Scaling frequencies up to maximal profile count may result in
3885 frequent overflows especially when inlining loops.
3886 Small scalling results in unnecesary precision loss. Stay in
3887 the half of the (exponential) range. */
3888 freq_max = (sreal (1) << (profile_count::n_bits / 2)) / freq_max;
3891 profile_count ipa_count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ();
3892 cfun->cfg->count_max = profile_count::uninitialized ();
3893 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
3895 sreal tmp = BLOCK_INFO (bb)->frequency * freq_max + sreal (1, -1);
3896 profile_count count = profile_count::from_gcov_type (tmp.to_int ());
3898 /* If we have profile feedback in which this function was never
3899 executed, then preserve this info. */
3900 if (!(bb->count == profile_count::zero ()))
3901 bb->count = count.guessed_local ().combine_with_ipa_count (ipa_count);
3902 cfun->cfg->count_max = cfun->cfg->count_max.max (bb->count);
3905 free_aux_for_blocks ();
3906 free_aux_for_edges ();
3908 compute_function_frequency ();
3911 /* Decide whether function is hot, cold or unlikely executed. */
3913 compute_function_frequency (void)
3916 struct cgraph_node *node = cgraph_node::get (current_function_decl);
3918 if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
3919 || MAIN_NAME_P (DECL_NAME (current_function_decl)))
3920 node->only_called_at_startup = true;
3921 if (DECL_STATIC_DESTRUCTOR (current_function_decl))
3922 node->only_called_at_exit = true;
3924 if (!ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa_p ())
3926 int flags = flags_from_decl_or_type (current_function_decl);
3927 if (lookup_attribute ("cold", DECL_ATTRIBUTES (current_function_decl))
3929 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
3930 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (current_function_decl))
3932 node->frequency = NODE_FREQUENCY_HOT;
3933 else if (flags & ECF_NORETURN)
3934 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
3935 else if (MAIN_NAME_P (DECL_NAME (current_function_decl)))
3936 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
3937 else if (DECL_STATIC_CONSTRUCTOR (current_function_decl)
3938 || DECL_STATIC_DESTRUCTOR (current_function_decl))
3939 node->frequency = NODE_FREQUENCY_EXECUTED_ONCE;
3943 node->frequency = NODE_FREQUENCY_UNLIKELY_EXECUTED;
3944 warn_function_cold (current_function_decl);
3945 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa() == profile_count::zero ())
3947 FOR_EACH_BB_FN (bb, cfun)
3949 if (maybe_hot_bb_p (cfun, bb))
3951 node->frequency = NODE_FREQUENCY_HOT;
3954 if (!probably_never_executed_bb_p (cfun, bb))
3955 node->frequency = NODE_FREQUENCY_NORMAL;
3959 /* Build PREDICT_EXPR. */
3961 build_predict_expr (enum br_predictor predictor, enum prediction taken)
3963 tree t = build1 (PREDICT_EXPR, void_type_node,
3964 build_int_cst (integer_type_node, predictor));
3965 SET_PREDICT_EXPR_OUTCOME (t, taken);
3970 predictor_name (enum br_predictor predictor)
3972 return predictor_info[predictor].name;
3975 /* Predict branch probabilities and estimate profile of the tree CFG. */
3979 const pass_data pass_data_profile =
3981 GIMPLE_PASS, /* type */
3982 "profile_estimate", /* name */
3983 OPTGROUP_NONE, /* optinfo_flags */
3984 TV_BRANCH_PROB, /* tv_id */
3985 PROP_cfg, /* properties_required */
3986 0, /* properties_provided */
3987 0, /* properties_destroyed */
3988 0, /* todo_flags_start */
3989 0, /* todo_flags_finish */
3992 class pass_profile : public gimple_opt_pass
3995 pass_profile (gcc::context *ctxt)
3996 : gimple_opt_pass (pass_data_profile, ctxt)
3999 /* opt_pass methods: */
4000 virtual bool gate (function *) { return flag_guess_branch_prob; }
4001 virtual unsigned int execute (function *);
4003 }; // class pass_profile
4006 pass_profile::execute (function *fun)
4010 if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
4013 loop_optimizer_init (LOOPS_NORMAL);
4014 if (dump_file && (dump_flags & TDF_DETAILS))
4015 flow_loops_dump (dump_file, NULL, 0);
4017 mark_irreducible_loops ();
4019 nb_loops = number_of_loops (fun);
4023 tree_estimate_probability (false);
4028 loop_optimizer_finalize ();
4029 if (dump_file && (dump_flags & TDF_DETAILS))
4030 gimple_dump_cfg (dump_file, dump_flags);
4031 if (profile_status_for_fn (fun) == PROFILE_ABSENT)
4032 profile_status_for_fn (fun) = PROFILE_GUESSED;
4033 if (dump_file && (dump_flags & TDF_DETAILS))
4036 FOR_EACH_LOOP (loop, LI_FROM_INNERMOST)
4037 if (loop->header->count.initialized_p ())
4038 fprintf (dump_file, "Loop got predicted %d to iterate %i times.\n",
4040 (int)expected_loop_iterations_unbounded (loop));
4048 make_pass_profile (gcc::context *ctxt)
4050 return new pass_profile (ctxt);
4053 /* Return true when PRED predictor should be removed after early
4054 tree passes. Most of the predictors are beneficial to survive
4055 as early inlining can also distribute then into caller's bodies. */
4058 strip_predictor_early (enum br_predictor pred)
4062 case PRED_TREE_EARLY_RETURN:
4069 /* Get rid of all builtin_expect calls and GIMPLE_PREDICT statements
4070 we no longer need. EARLY is set to true when called from early
4074 strip_predict_hints (function *fun, bool early)
4079 bool changed = false;
4081 FOR_EACH_BB_FN (bb, fun)
4083 gimple_stmt_iterator bi;
4084 for (bi = gsi_start_bb (bb); !gsi_end_p (bi);)
4086 gimple *stmt = gsi_stmt (bi);
4088 if (gimple_code (stmt) == GIMPLE_PREDICT)
4091 || strip_predictor_early (gimple_predict_predictor (stmt)))
4093 gsi_remove (&bi, true);
4098 else if (is_gimple_call (stmt))
4100 tree fndecl = gimple_call_fndecl (stmt);
4103 && ((fndecl != NULL_TREE
4104 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
4105 && gimple_call_num_args (stmt) == 2)
4106 || (fndecl != NULL_TREE
4107 && fndecl_built_in_p (fndecl,
4108 BUILT_IN_EXPECT_WITH_PROBABILITY)
4109 && gimple_call_num_args (stmt) == 3)
4110 || (gimple_call_internal_p (stmt)
4111 && gimple_call_internal_fn (stmt) == IFN_BUILTIN_EXPECT)))
4113 var = gimple_call_lhs (stmt);
4118 = gimple_build_assign (var, gimple_call_arg (stmt, 0));
4119 gsi_replace (&bi, ass_stmt, true);
4123 gsi_remove (&bi, true);
4131 return changed ? TODO_cleanup_cfg : 0;
4136 const pass_data pass_data_strip_predict_hints =
4138 GIMPLE_PASS, /* type */
4139 "*strip_predict_hints", /* name */
4140 OPTGROUP_NONE, /* optinfo_flags */
4141 TV_BRANCH_PROB, /* tv_id */
4142 PROP_cfg, /* properties_required */
4143 0, /* properties_provided */
4144 0, /* properties_destroyed */
4145 0, /* todo_flags_start */
4146 0, /* todo_flags_finish */
4149 class pass_strip_predict_hints : public gimple_opt_pass
4152 pass_strip_predict_hints (gcc::context *ctxt)
4153 : gimple_opt_pass (pass_data_strip_predict_hints, ctxt)
4156 /* opt_pass methods: */
4157 opt_pass * clone () { return new pass_strip_predict_hints (m_ctxt); }
4158 void set_pass_param (unsigned int n, bool param)
4160 gcc_assert (n == 0);
4164 virtual unsigned int execute (function *);
4169 }; // class pass_strip_predict_hints
4172 pass_strip_predict_hints::execute (function *fun)
4174 return strip_predict_hints (fun, early_p);
4180 make_pass_strip_predict_hints (gcc::context *ctxt)
4182 return new pass_strip_predict_hints (ctxt);
4185 /* Rebuild function frequencies. Passes are in general expected to
4186 maintain profile by hand, however in some cases this is not possible:
4187 for example when inlining several functions with loops freuqencies might run
4188 out of scale and thus needs to be recomputed. */
4191 rebuild_frequencies (void)
4193 timevar_push (TV_REBUILD_FREQUENCIES);
4195 /* When the max bb count in the function is small, there is a higher
4196 chance that there were truncation errors in the integer scaling
4197 of counts by inlining and other optimizations. This could lead
4198 to incorrect classification of code as being cold when it isn't.
4199 In that case, force the estimation of bb counts/frequencies from the
4200 branch probabilities, rather than computing frequencies from counts,
4201 which may also lead to frequencies incorrectly reduced to 0. There
4202 is less precision in the probabilities, so we only do this for small
4204 cfun->cfg->count_max = profile_count::uninitialized ();
4206 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL, next_bb)
4207 cfun->cfg->count_max = cfun->cfg->count_max.max (bb->count);
4209 if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
4211 loop_optimizer_init (0);
4212 add_noreturn_fake_exit_edges ();
4213 mark_irreducible_loops ();
4214 connect_infinite_loops_to_exit ();
4215 estimate_bb_frequencies (true);
4216 remove_fake_exit_edges ();
4217 loop_optimizer_finalize ();
4219 else if (profile_status_for_fn (cfun) == PROFILE_READ)
4220 update_max_bb_count ();
4221 else if (profile_status_for_fn (cfun) == PROFILE_ABSENT
4222 && !flag_guess_branch_prob)
4226 timevar_pop (TV_REBUILD_FREQUENCIES);
4229 /* Perform a dry run of the branch prediction pass and report comparsion of
4230 the predicted and real profile into the dump file. */
4233 report_predictor_hitrates (void)
4237 loop_optimizer_init (LOOPS_NORMAL);
4238 if (dump_file && (dump_flags & TDF_DETAILS))
4239 flow_loops_dump (dump_file, NULL, 0);
4241 mark_irreducible_loops ();
4243 nb_loops = number_of_loops (cfun);
4247 tree_estimate_probability (true);
4252 loop_optimizer_finalize ();
4255 /* Force edge E to be cold.
4256 If IMPOSSIBLE is true, for edge to have count and probability 0 otherwise
4257 keep low probability to represent possible error in a guess. This is used
4258 i.e. in case we predict loop to likely iterate given number of times but
4259 we are not 100% sure.
4261 This function locally updates profile without attempt to keep global
4262 consistency which cannot be reached in full generality without full profile
4263 rebuild from probabilities alone. Doing so is not necessarily a good idea
4264 because frequencies and counts may be more realistic then probabilities.
4266 In some cases (such as for elimination of early exits during full loop
4267 unrolling) the caller can ensure that profile will get consistent
4271 force_edge_cold (edge e, bool impossible)
4273 profile_count count_sum = profile_count::zero ();
4274 profile_probability prob_sum = profile_probability::never ();
4277 bool uninitialized_exit = false;
4279 /* When branch probability guesses are not known, then do nothing. */
4280 if (!impossible && !e->count ().initialized_p ())
4283 profile_probability goal = (impossible ? profile_probability::never ()
4284 : profile_probability::very_unlikely ());
4286 /* If edge is already improbably or cold, just return. */
4287 if (e->probability <= goal
4288 && (!impossible || e->count () == profile_count::zero ()))
4290 FOR_EACH_EDGE (e2, ei, e->src->succs)
4293 if (e->flags & EDGE_FAKE)
4295 if (e2->count ().initialized_p ())
4296 count_sum += e2->count ();
4297 if (e2->probability.initialized_p ())
4298 prob_sum += e2->probability;
4300 uninitialized_exit = true;
4303 /* If we are not guessing profiles but have some other edges out,
4304 just assume the control flow goes elsewhere. */
4305 if (uninitialized_exit)
4306 e->probability = goal;
4307 /* If there are other edges out of e->src, redistribute probabilitity
4309 else if (prob_sum > profile_probability::never ())
4311 if (!(e->probability < goal))
4312 e->probability = goal;
4314 profile_probability prob_comp = prob_sum / e->probability.invert ();
4316 if (dump_file && (dump_flags & TDF_DETAILS))
4317 fprintf (dump_file, "Making edge %i->%i %s by redistributing "
4318 "probability to other edges.\n",
4319 e->src->index, e->dest->index,
4320 impossible ? "impossible" : "cold");
4321 FOR_EACH_EDGE (e2, ei, e->src->succs)
4324 e2->probability /= prob_comp;
4326 if (current_ir_type () != IR_GIMPLE
4327 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
4328 update_br_prob_note (e->src);
4330 /* If all edges out of e->src are unlikely, the basic block itself
4334 if (prob_sum == profile_probability::never ())
4335 e->probability = profile_probability::always ();
4339 e->probability = profile_probability::never ();
4340 /* If BB has some edges out that are not impossible, we cannot
4341 assume that BB itself is. */
4344 if (current_ir_type () != IR_GIMPLE
4345 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
4346 update_br_prob_note (e->src);
4347 if (e->src->count == profile_count::zero ())
4349 if (count_sum == profile_count::zero () && impossible)
4352 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
4354 else if (current_ir_type () == IR_GIMPLE)
4355 for (gimple_stmt_iterator gsi = gsi_start_bb (e->src);
4356 !gsi_end_p (gsi); gsi_next (&gsi))
4358 if (stmt_can_terminate_bb_p (gsi_stmt (gsi)))
4364 /* FIXME: Implement RTL path. */
4369 if (dump_file && (dump_flags & TDF_DETAILS))
4371 "Making bb %i impossible and dropping count to 0.\n",
4373 e->src->count = profile_count::zero ();
4374 FOR_EACH_EDGE (e2, ei, e->src->preds)
4375 force_edge_cold (e2, impossible);
4380 /* If we did not adjusting, the source basic block has no likely edeges
4381 leaving other direction. In that case force that bb cold, too.
4382 This in general is difficult task to do, but handle special case when
4383 BB has only one predecestor. This is common case when we are updating
4384 after loop transforms. */
4385 if (!(prob_sum > profile_probability::never ())
4386 && count_sum == profile_count::zero ()
4387 && single_pred_p (e->src) && e->src->count.to_frequency (cfun)
4388 > (impossible ? 0 : 1))
4390 int old_frequency = e->src->count.to_frequency (cfun);
4391 if (dump_file && (dump_flags & TDF_DETAILS))
4392 fprintf (dump_file, "Making bb %i %s.\n", e->src->index,
4393 impossible ? "impossible" : "cold");
4394 int new_frequency = MIN (e->src->count.to_frequency (cfun),
4395 impossible ? 0 : 1);
4397 e->src->count = profile_count::zero ();
4399 e->src->count = e->count ().apply_scale (new_frequency,
4401 force_edge_cold (single_pred_edge (e->src), impossible);
4403 else if (dump_file && (dump_flags & TDF_DETAILS)
4404 && maybe_hot_bb_p (cfun, e->src))
4405 fprintf (dump_file, "Giving up on making bb %i %s.\n", e->src->index,
4406 impossible ? "impossible" : "cold");
4412 namespace selftest {
4414 /* Test that value range of predictor values defined in predict.def is
4415 within range (50, 100]. */
4417 struct branch_predictor
4423 #define DEF_PREDICTOR(ENUM, NAME, HITRATE, FLAGS) { NAME, HITRATE },
4426 test_prediction_value_range ()
4428 branch_predictor predictors[] = {
4429 #include "predict.def"
4430 { NULL, PROB_UNINITIALIZED }
4433 for (unsigned i = 0; predictors[i].name != NULL; i++)
4435 if (predictors[i].probability == PROB_UNINITIALIZED)
4438 unsigned p = 100 * predictors[i].probability / REG_BR_PROB_BASE;
4439 ASSERT_TRUE (p >= 50 && p <= 100);
4443 #undef DEF_PREDICTOR
4445 /* Run all of the selfests within this file. */
4450 test_prediction_value_range ();
4453 } // namespace selftest
4454 #endif /* CHECKING_P. */