1 /* Transformations based on profile information for values.
2 Copyright (C) 2003-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
31 #include "data-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "tree-nested.h"
37 #include "value-prof.h"
40 #include "gimple-iterator.h"
42 #include "gimple-pretty-print.h"
46 /* In this file value profile based optimizations are placed. Currently the
47 following optimizations are implemented (for more detailed descriptions
48 see comments at value_profile_transformations):
50 1) Division/modulo specialization. Provided that we can determine that the
51 operands of the division have some special properties, we may use it to
52 produce more effective code.
54 2) Indirect/virtual call specialization. If we can determine most
55 common function callee in indirect/virtual call. We can use this
56 information to improve code effectiveness (especially info for
59 3) Speculative prefetching. If we are able to determine that the difference
60 between addresses accessed by a memory reference is usually constant, we
61 may add the prefetch instructions.
62 FIXME: This transformation was removed together with RTL based value
66 Value profiling internals
67 ==========================
69 Every value profiling transformation starts with defining what values
70 to profile. There are different histogram types (see HIST_TYPE_* in
71 value-prof.h) and each transformation can request one or more histogram
72 types per GIMPLE statement. The function gimple_find_values_to_profile()
73 collects the values to profile in a vec, and adds the number of counters
74 required for the different histogram types.
76 For a -fprofile-generate run, the statements for which values should be
77 recorded, are instrumented in instrument_values(). The instrumentation
78 is done by helper functions that can be found in tree-profile.cc, where
79 new types of histograms can be added if necessary.
81 After a -fprofile-use, the value profiling data is read back in by
82 compute_value_histograms() that translates the collected data to
83 histograms and attaches them to the profiled statements via
84 gimple_add_histogram_value(). Histograms are stored in a hash table
85 that is attached to every intrumented function, see VALUE_HISTOGRAMS
88 The value-profile transformations driver is the function
89 gimple_value_profile_transformations(). It traverses all statements in
90 the to-be-transformed function, and looks for statements with one or
91 more histograms attached to it. If a statement has histograms, the
92 transformation functions are called on the statement.
94 Limitations / FIXME / TODO:
95 * Only one histogram of each type can be associated with a statement.
96 * Some value profile transformations are done in builtins.cc (?!)
97 * Updating of histograms needs some TLC.
98 * The value profiling code could be used to record analysis results
99 from non-profiling (e.g. VRP).
100 * Adding new profilers should be simplified, starting with a cleanup
101 of what-happens-where and with making gimple_find_values_to_profile
102 and gimple_value_profile_transformations table-driven, perhaps...
105 static bool gimple_divmod_fixed_value_transform (gimple_stmt_iterator *);
106 static bool gimple_mod_pow2_value_transform (gimple_stmt_iterator *);
107 static bool gimple_mod_subtract_transform (gimple_stmt_iterator *);
108 static bool gimple_stringops_transform (gimple_stmt_iterator *);
109 static void dump_ic_profile (gimple_stmt_iterator *gsi);
111 /* Allocate histogram value. */
114 gimple_alloc_histogram_value (struct function *fun ATTRIBUTE_UNUSED,
115 enum hist_type type, gimple *stmt, tree value)
117 histogram_value hist = (histogram_value) xcalloc (1, sizeof (*hist));
118 hist->hvalue.value = value;
119 hist->hvalue.stmt = stmt;
124 /* Hash value for histogram. */
127 histogram_hash (const void *x)
129 return htab_hash_pointer (((const_histogram_value)x)->hvalue.stmt);
132 /* Return nonzero if statement for histogram_value X is Y. */
135 histogram_eq (const void *x, const void *y)
137 return ((const_histogram_value) x)->hvalue.stmt == (const gimple *) y;
140 /* Set histogram for STMT. */
143 set_histogram_value (struct function *fun, gimple *stmt, histogram_value hist)
146 if (!hist && !VALUE_HISTOGRAMS (fun))
148 if (!VALUE_HISTOGRAMS (fun))
149 VALUE_HISTOGRAMS (fun) = htab_create (1, histogram_hash,
151 loc = htab_find_slot_with_hash (VALUE_HISTOGRAMS (fun), stmt,
152 htab_hash_pointer (stmt),
153 hist ? INSERT : NO_INSERT);
157 htab_clear_slot (VALUE_HISTOGRAMS (fun), loc);
163 /* Get histogram list for STMT. */
166 gimple_histogram_value (struct function *fun, gimple *stmt)
168 if (!VALUE_HISTOGRAMS (fun))
170 return (histogram_value) htab_find_with_hash (VALUE_HISTOGRAMS (fun), stmt,
171 htab_hash_pointer (stmt));
174 /* Add histogram for STMT. */
177 gimple_add_histogram_value (struct function *fun, gimple *stmt,
178 histogram_value hist)
180 hist->hvalue.next = gimple_histogram_value (fun, stmt);
181 set_histogram_value (fun, stmt, hist);
185 /* Remove histogram HIST from STMT's histogram list. */
188 gimple_remove_histogram_value (struct function *fun, gimple *stmt,
189 histogram_value hist)
191 histogram_value hist2 = gimple_histogram_value (fun, stmt);
194 set_histogram_value (fun, stmt, hist->hvalue.next);
198 while (hist2->hvalue.next != hist)
199 hist2 = hist2->hvalue.next;
200 hist2->hvalue.next = hist->hvalue.next;
202 free (hist->hvalue.counters);
204 memset (hist, 0xab, sizeof (*hist));
208 /* Lookup histogram of type TYPE in the STMT. */
211 gimple_histogram_value_of_type (struct function *fun, gimple *stmt,
214 histogram_value hist;
215 for (hist = gimple_histogram_value (fun, stmt); hist;
216 hist = hist->hvalue.next)
217 if (hist->type == type)
222 /* Dump information about HIST to DUMP_FILE. */
225 dump_histogram_value (FILE *dump_file, histogram_value hist)
229 case HIST_TYPE_INTERVAL:
230 if (hist->hvalue.counters)
232 fprintf (dump_file, "Interval counter range [%d,%d]: [",
233 hist->hdata.intvl.int_start,
234 (hist->hdata.intvl.int_start
235 + hist->hdata.intvl.steps - 1));
238 for (i = 0; i < hist->hdata.intvl.steps; i++)
240 fprintf (dump_file, "%d:%" PRId64,
241 hist->hdata.intvl.int_start + i,
242 (int64_t) hist->hvalue.counters[i]);
243 if (i != hist->hdata.intvl.steps - 1)
244 fprintf (dump_file, ", ");
246 fprintf (dump_file, "] outside range: %" PRId64 ".\n",
247 (int64_t) hist->hvalue.counters[i]);
252 if (hist->hvalue.counters)
253 fprintf (dump_file, "Pow2 counter pow2:%" PRId64
254 " nonpow2:%" PRId64 ".\n",
255 (int64_t) hist->hvalue.counters[1],
256 (int64_t) hist->hvalue.counters[0]);
259 case HIST_TYPE_TOPN_VALUES:
260 case HIST_TYPE_INDIR_CALL:
261 if (hist->hvalue.counters)
264 (hist->type == HIST_TYPE_TOPN_VALUES
265 ? "Top N value counter" : "Indirect call counter"));
266 if (hist->hvalue.counters)
268 unsigned count = hist->hvalue.counters[1];
269 fprintf (dump_file, " all: %" PRId64 ", %" PRId64 " values: ",
270 (int64_t) hist->hvalue.counters[0], (int64_t) count);
271 for (unsigned i = 0; i < count; i++)
273 fprintf (dump_file, "[%" PRId64 ":%" PRId64 "]",
274 (int64_t) hist->hvalue.counters[2 * i + 2],
275 (int64_t) hist->hvalue.counters[2 * i + 3]);
277 fprintf (dump_file, ", ");
279 fprintf (dump_file, ".\n");
284 case HIST_TYPE_AVERAGE:
285 if (hist->hvalue.counters)
286 fprintf (dump_file, "Average value sum:%" PRId64
287 " times:%" PRId64 ".\n",
288 (int64_t) hist->hvalue.counters[0],
289 (int64_t) hist->hvalue.counters[1]);
293 if (hist->hvalue.counters)
294 fprintf (dump_file, "IOR value ior:%" PRId64 ".\n",
295 (int64_t) hist->hvalue.counters[0]);
298 case HIST_TYPE_TIME_PROFILE:
299 if (hist->hvalue.counters)
300 fprintf (dump_file, "Time profile time:%" PRId64 ".\n",
301 (int64_t) hist->hvalue.counters[0]);
308 /* Dump information about HIST to DUMP_FILE. */
311 stream_out_histogram_value (struct output_block *ob, histogram_value hist)
316 bp = bitpack_create (ob->main_stream);
317 bp_pack_enum (&bp, hist_type, HIST_TYPE_MAX, hist->type);
318 bp_pack_value (&bp, hist->hvalue.next != NULL, 1);
319 streamer_write_bitpack (&bp);
322 case HIST_TYPE_INTERVAL:
323 streamer_write_hwi (ob, hist->hdata.intvl.int_start);
324 streamer_write_uhwi (ob, hist->hdata.intvl.steps);
329 for (i = 0; i < hist->n_counters; i++)
331 /* When user uses an unsigned type with a big value, constant converted
332 to gcov_type (a signed type) can be negative. */
333 gcov_type value = hist->hvalue.counters[i];
334 streamer_write_gcov_count (ob, value);
336 if (hist->hvalue.next)
337 stream_out_histogram_value (ob, hist->hvalue.next);
340 /* Dump information about HIST to DUMP_FILE. */
343 stream_in_histogram_value (class lto_input_block *ib, gimple *stmt)
346 unsigned int ncounters = 0;
349 histogram_value new_val;
351 histogram_value *next_p = NULL;
355 bp = streamer_read_bitpack (ib);
356 type = bp_unpack_enum (&bp, hist_type, HIST_TYPE_MAX);
357 next = bp_unpack_value (&bp, 1);
358 new_val = gimple_alloc_histogram_value (cfun, type, stmt);
361 case HIST_TYPE_INTERVAL:
362 new_val->hdata.intvl.int_start = streamer_read_hwi (ib);
363 new_val->hdata.intvl.steps = streamer_read_uhwi (ib);
364 ncounters = new_val->hdata.intvl.steps + 2;
368 case HIST_TYPE_AVERAGE:
372 case HIST_TYPE_TOPN_VALUES:
373 case HIST_TYPE_INDIR_CALL:
377 case HIST_TYPE_TIME_PROFILE:
385 /* TOP N counters have variable number of counters. */
386 if (type == HIST_TYPE_INDIR_CALL || type == HIST_TYPE_TOPN_VALUES)
388 gcov_type total = streamer_read_gcov_count (ib);
389 gcov_type ncounters = streamer_read_gcov_count (ib);
390 new_val->hvalue.counters = XNEWVAR (gcov_type,
391 sizeof (*new_val->hvalue.counters)
392 * (2 + 2 * ncounters));
393 new_val->hvalue.counters[0] = total;
394 new_val->hvalue.counters[1] = ncounters;
395 new_val->n_counters = 2 + 2 * ncounters;
396 for (i = 0; i < 2 * ncounters; i++)
397 new_val->hvalue.counters[2 + i] = streamer_read_gcov_count (ib);
401 new_val->hvalue.counters = XNEWVAR (gcov_type,
402 sizeof (*new_val->hvalue.counters)
404 new_val->n_counters = ncounters;
405 for (i = 0; i < ncounters; i++)
406 new_val->hvalue.counters[i] = streamer_read_gcov_count (ib);
410 gimple_add_histogram_value (cfun, stmt, new_val);
413 next_p = &new_val->hvalue.next;
418 /* Dump all histograms attached to STMT to DUMP_FILE. */
421 dump_histograms_for_stmt (struct function *fun, FILE *dump_file, gimple *stmt)
423 histogram_value hist;
424 for (hist = gimple_histogram_value (fun, stmt); hist; hist = hist->hvalue.next)
425 dump_histogram_value (dump_file, hist);
428 /* Remove all histograms associated with STMT. */
431 gimple_remove_stmt_histograms (struct function *fun, gimple *stmt)
434 while ((val = gimple_histogram_value (fun, stmt)) != NULL)
435 gimple_remove_histogram_value (fun, stmt, val);
438 /* Duplicate all histograms associates with OSTMT to STMT. */
441 gimple_duplicate_stmt_histograms (struct function *fun, gimple *stmt,
442 struct function *ofun, gimple *ostmt)
445 for (val = gimple_histogram_value (ofun, ostmt); val != NULL; val = val->hvalue.next)
447 histogram_value new_val = gimple_alloc_histogram_value (fun, val->type);
448 memcpy (new_val, val, sizeof (*val));
449 new_val->hvalue.stmt = stmt;
450 new_val->hvalue.counters = XNEWVAR (gcov_type, sizeof (*new_val->hvalue.counters) * new_val->n_counters);
451 memcpy (new_val->hvalue.counters, val->hvalue.counters, sizeof (*new_val->hvalue.counters) * new_val->n_counters);
452 gimple_add_histogram_value (fun, stmt, new_val);
456 /* Move all histograms associated with OSTMT to STMT. */
459 gimple_move_stmt_histograms (struct function *fun, gimple *stmt, gimple *ostmt)
461 histogram_value val = gimple_histogram_value (fun, ostmt);
464 /* The following three statements can't be reordered,
465 because histogram hashtab relies on stmt field value
466 for finding the exact slot. */
467 set_histogram_value (fun, ostmt, NULL);
468 for (; val != NULL; val = val->hvalue.next)
469 val->hvalue.stmt = stmt;
470 set_histogram_value (fun, stmt, val);
474 static bool error_found = false;
476 /* Helper function for verify_histograms. For each histogram reachable via htab
477 walk verify that it was reached via statement walk. */
480 visit_hist (void **slot, void *data)
482 hash_set<histogram_value> *visited = (hash_set<histogram_value> *) data;
483 histogram_value hist = *(histogram_value *) slot;
485 if (!visited->contains (hist)
486 && hist->type != HIST_TYPE_TIME_PROFILE)
488 error ("dead histogram");
489 dump_histogram_value (stderr, hist);
490 debug_gimple_stmt (hist->hvalue.stmt);
496 /* Verify sanity of the histograms. */
499 verify_histograms (void)
502 gimple_stmt_iterator gsi;
503 histogram_value hist;
506 hash_set<histogram_value> visited_hists;
507 FOR_EACH_BB_FN (bb, cfun)
508 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
510 gimple *stmt = gsi_stmt (gsi);
512 for (hist = gimple_histogram_value (cfun, stmt); hist;
513 hist = hist->hvalue.next)
515 if (hist->hvalue.stmt != stmt)
517 error ("histogram value statement does not correspond to "
518 "the statement it is associated with");
519 debug_gimple_stmt (stmt);
520 dump_histogram_value (stderr, hist);
523 visited_hists.add (hist);
526 if (VALUE_HISTOGRAMS (cfun))
527 htab_traverse (VALUE_HISTOGRAMS (cfun), visit_hist, &visited_hists);
529 internal_error ("%qs failed", __func__);
532 /* Helper function for verify_histograms. For each histogram reachable via htab
533 walk verify that it was reached via statement walk. */
536 free_hist (void **slot, void *data ATTRIBUTE_UNUSED)
538 histogram_value hist = *(histogram_value *) slot;
539 free (hist->hvalue.counters);
545 free_histograms (struct function *fn)
547 if (VALUE_HISTOGRAMS (fn))
549 htab_traverse (VALUE_HISTOGRAMS (fn), free_hist, NULL);
550 htab_delete (VALUE_HISTOGRAMS (fn));
551 VALUE_HISTOGRAMS (fn) = NULL;
555 /* The overall number of invocations of the counter should match
556 execution count of basic block. Report it as error rather than
557 internal error as it might mean that user has misused the profile
561 check_counter (gimple *stmt, const char * name,
562 gcov_type *count, gcov_type *all, profile_count bb_count_d)
564 gcov_type bb_count = bb_count_d.ipa ().to_gcov_type ();
565 if (*all != bb_count || *count > *all)
567 dump_user_location_t locus;
568 locus = ((stmt != NULL)
569 ? dump_user_location_t (stmt)
570 : dump_user_location_t::from_function_decl
571 (current_function_decl));
572 if (flag_profile_correction)
574 if (dump_enabled_p ())
575 dump_printf_loc (MSG_MISSED_OPTIMIZATION, locus,
576 "correcting inconsistent value profile: %s "
577 "profiler overall count (%d) does not match BB "
578 "count (%d)\n", name, (int)*all, (int)bb_count);
586 error_at (locus.get_location_t (), "corrupted value profile: %s "
587 "profile counter (%d out of %d) inconsistent with "
588 "basic-block count (%d)",
600 /* GIMPLE based transformations. */
603 gimple_value_profile_transformations (void)
606 gimple_stmt_iterator gsi;
607 bool changed = false;
609 FOR_EACH_BB_FN (bb, cfun)
611 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
613 gimple *stmt = gsi_stmt (gsi);
614 histogram_value th = gimple_histogram_value (cfun, stmt);
620 fprintf (dump_file, "Trying transformations on stmt ");
621 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
622 dump_histograms_for_stmt (cfun, dump_file, stmt);
625 /* Transformations: */
626 /* The order of things in this conditional controls which
627 transformation is used when more than one is applicable. */
628 /* It is expected that any code added by the transformations
629 will be added before the current statement, and that the
630 current statement remain valid (although possibly
631 modified) upon return. */
632 if (gimple_mod_subtract_transform (&gsi)
633 || gimple_divmod_fixed_value_transform (&gsi)
634 || gimple_mod_pow2_value_transform (&gsi)
635 || gimple_stringops_transform (&gsi))
637 stmt = gsi_stmt (gsi);
639 /* Original statement may no longer be in the same block. */
640 if (bb != gimple_bb (stmt))
642 bb = gimple_bb (stmt);
643 gsi = gsi_for_stmt (stmt);
647 /* The function never thansforms a GIMPLE statement. */
648 if (dump_enabled_p ())
649 dump_ic_profile (&gsi);
656 /* Generate code for transformation 1 (with parent gimple assignment
657 STMT and probability of taking the optimal path PROB, which is
658 equivalent to COUNT/ALL within roundoff error). This generates the
659 result into a temp and returns the temp; it does not replace or
660 alter the original STMT. */
663 gimple_divmod_fixed_value (gassign *stmt, tree value, profile_probability prob,
664 gcov_type count, gcov_type all)
666 gassign *stmt1, *stmt2;
668 tree tmp0, tmp1, tmp2;
669 gimple *bb1end, *bb2end, *bb3end;
670 basic_block bb, bb2, bb3, bb4;
671 tree optype, op1, op2;
672 edge e12, e13, e23, e24, e34;
673 gimple_stmt_iterator gsi;
675 gcc_assert (is_gimple_assign (stmt)
676 && (gimple_assign_rhs_code (stmt) == TRUNC_DIV_EXPR
677 || gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR));
679 optype = TREE_TYPE (gimple_assign_lhs (stmt));
680 op1 = gimple_assign_rhs1 (stmt);
681 op2 = gimple_assign_rhs2 (stmt);
683 bb = gimple_bb (stmt);
684 gsi = gsi_for_stmt (stmt);
686 tmp0 = make_temp_ssa_name (optype, NULL, "PROF");
687 tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
688 stmt1 = gimple_build_assign (tmp0, fold_convert (optype, value));
689 stmt2 = gimple_build_assign (tmp1, op2);
690 stmt3 = gimple_build_cond (NE_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
691 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
692 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
693 gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
696 tmp2 = create_tmp_reg (optype, "PROF");
697 stmt1 = gimple_build_assign (tmp2, gimple_assign_rhs_code (stmt), op1, tmp0);
698 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
701 stmt1 = gimple_build_assign (tmp2, gimple_assign_rhs_code (stmt), op1, op2);
702 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
706 /* Edge e23 connects bb2 to bb3, etc. */
707 e12 = split_block (bb, bb1end);
709 bb2->count = profile_count::from_gcov_type (count);
710 e23 = split_block (bb2, bb2end);
712 bb3->count = profile_count::from_gcov_type (all - count);
713 e34 = split_block (bb3, bb3end);
715 bb4->count = profile_count::from_gcov_type (all);
717 e12->flags &= ~EDGE_FALLTHRU;
718 e12->flags |= EDGE_FALSE_VALUE;
719 e12->probability = prob;
721 e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
722 e13->probability = prob.invert ();
726 e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
727 e24->probability = profile_probability::always ();
729 e34->probability = profile_probability::always ();
734 /* Return the n-th value count of TOPN_VALUE histogram. If
735 there's a value, return true and set VALUE and COUNT
738 Counters have the following meaning.
740 abs (counters[0]) is the number of executions
741 for i in 0 ... TOPN-1
742 counters[2 * i + 2] is target
743 counters[2 * i + 3] is corresponding hitrate counter.
745 Value of counters[0] negative when counter became
746 full during merging and some values are lost. */
749 get_nth_most_common_value (gimple *stmt, const char *counter_type,
750 histogram_value hist, gcov_type *value,
751 gcov_type *count, gcov_type *all, unsigned n)
753 unsigned counters = hist->hvalue.counters[1];
760 gcov_type read_all = abs_hwi (hist->hvalue.counters[0]);
761 gcov_type covered = 0;
762 for (unsigned i = 0; i < counters; ++i)
763 covered += hist->hvalue.counters[2 * i + 3];
765 gcov_type v = hist->hvalue.counters[2 * n + 2];
766 gcov_type c = hist->hvalue.counters[2 * n + 3];
768 if (hist->hvalue.counters[0] < 0
769 && flag_profile_reproducible == PROFILE_REPRODUCIBILITY_PARALLEL_RUNS)
772 fprintf (dump_file, "Histogram value dropped in '%s' mode\n",
773 "-fprofile-reproducible=parallel-runs");
776 else if (covered != read_all
777 && flag_profile_reproducible == PROFILE_REPRODUCIBILITY_MULTITHREADED)
780 fprintf (dump_file, "Histogram value dropped in '%s' mode\n",
781 "-fprofile-reproducible=multithreaded");
785 /* Indirect calls can't be verified. */
787 && check_counter (stmt, counter_type, &c, &read_all,
788 gimple_bb (stmt)->count))
798 /* Do transform 1) on INSN if applicable. */
801 gimple_divmod_fixed_value_transform (gimple_stmt_iterator *si)
803 histogram_value histogram;
805 gcov_type val, count, all;
806 tree result, value, tree_val;
807 profile_probability prob;
810 stmt = dyn_cast <gassign *> (gsi_stmt (*si));
814 if (!INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt))))
817 code = gimple_assign_rhs_code (stmt);
819 if (code != TRUNC_DIV_EXPR && code != TRUNC_MOD_EXPR)
822 histogram = gimple_histogram_value_of_type (cfun, stmt,
823 HIST_TYPE_TOPN_VALUES);
827 if (!get_nth_most_common_value (stmt, "divmod", histogram, &val, &count,
831 value = histogram->hvalue.value;
832 gimple_remove_histogram_value (cfun, stmt, histogram);
834 /* We require that count is at least half of all. */
835 if (simple_cst_equal (gimple_assign_rhs2 (stmt), value) != 1
837 || optimize_bb_for_size_p (gimple_bb (stmt)))
840 /* Compute probability of taking the optimal path. */
842 prob = profile_probability::probability_in_gcov_type (count, all);
844 prob = profile_probability::never ();
846 if (sizeof (gcov_type) == sizeof (HOST_WIDE_INT))
847 tree_val = build_int_cst (get_gcov_type (), val);
851 a[0] = (unsigned HOST_WIDE_INT) val;
852 a[1] = val >> (HOST_BITS_PER_WIDE_INT - 1) >> 1;
854 tree_val = wide_int_to_tree (get_gcov_type (), wide_int::from_array (a, 2,
855 TYPE_PRECISION (get_gcov_type ()), false));
857 result = gimple_divmod_fixed_value (stmt, tree_val, prob, count, all);
859 if (dump_enabled_p ())
860 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
861 "Transformation done: div/mod by constant %T\n", tree_val);
863 gimple_assign_set_rhs_from_tree (si, result);
864 update_stmt (gsi_stmt (*si));
869 /* Generate code for transformation 2 (with parent gimple assign STMT and
870 probability of taking the optimal path PROB, which is equivalent to COUNT/ALL
871 within roundoff error). This generates the result into a temp and returns
872 the temp; it does not replace or alter the original STMT. */
875 gimple_mod_pow2 (gassign *stmt, profile_probability prob, gcov_type count, gcov_type all)
877 gassign *stmt1, *stmt2, *stmt3;
880 gimple *bb1end, *bb2end, *bb3end;
881 basic_block bb, bb2, bb3, bb4;
882 tree optype, op1, op2;
883 edge e12, e13, e23, e24, e34;
884 gimple_stmt_iterator gsi;
887 gcc_assert (is_gimple_assign (stmt)
888 && gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR);
890 optype = TREE_TYPE (gimple_assign_lhs (stmt));
891 op1 = gimple_assign_rhs1 (stmt);
892 op2 = gimple_assign_rhs2 (stmt);
894 bb = gimple_bb (stmt);
895 gsi = gsi_for_stmt (stmt);
897 result = create_tmp_reg (optype, "PROF");
898 tmp2 = make_temp_ssa_name (optype, NULL, "PROF");
899 tmp3 = make_temp_ssa_name (optype, NULL, "PROF");
900 stmt2 = gimple_build_assign (tmp2, PLUS_EXPR, op2,
901 build_int_cst (optype, -1));
902 stmt3 = gimple_build_assign (tmp3, BIT_AND_EXPR, tmp2, op2);
903 stmt4 = gimple_build_cond (NE_EXPR, tmp3, build_int_cst (optype, 0),
904 NULL_TREE, NULL_TREE);
905 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
906 gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
907 gsi_insert_before (&gsi, stmt4, GSI_SAME_STMT);
910 /* tmp2 == op2-1 inherited from previous block. */
911 stmt1 = gimple_build_assign (result, BIT_AND_EXPR, op1, tmp2);
912 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
915 stmt1 = gimple_build_assign (result, gimple_assign_rhs_code (stmt),
917 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
921 /* Edge e23 connects bb2 to bb3, etc. */
922 e12 = split_block (bb, bb1end);
924 bb2->count = profile_count::from_gcov_type (count);
925 e23 = split_block (bb2, bb2end);
927 bb3->count = profile_count::from_gcov_type (all - count);
928 e34 = split_block (bb3, bb3end);
930 bb4->count = profile_count::from_gcov_type (all);
932 e12->flags &= ~EDGE_FALLTHRU;
933 e12->flags |= EDGE_FALSE_VALUE;
934 e12->probability = prob;
936 e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
937 e13->probability = prob.invert ();
941 e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
942 e24->probability = profile_probability::always ();
944 e34->probability = profile_probability::always ();
949 /* Do transform 2) on INSN if applicable. */
952 gimple_mod_pow2_value_transform (gimple_stmt_iterator *si)
954 histogram_value histogram;
956 gcov_type count, wrong_values, all;
957 tree lhs_type, result, value;
958 profile_probability prob;
961 stmt = dyn_cast <gassign *> (gsi_stmt (*si));
965 lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
966 if (!INTEGRAL_TYPE_P (lhs_type))
969 code = gimple_assign_rhs_code (stmt);
971 if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (lhs_type))
974 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_POW2);
978 value = histogram->hvalue.value;
979 wrong_values = histogram->hvalue.counters[0];
980 count = histogram->hvalue.counters[1];
982 gimple_remove_histogram_value (cfun, stmt, histogram);
984 /* We require that we hit a power of 2 at least half of all evaluations. */
985 if (simple_cst_equal (gimple_assign_rhs2 (stmt), value) != 1
986 || count < wrong_values
987 || optimize_bb_for_size_p (gimple_bb (stmt)))
990 /* Compute probability of taking the optimal path. */
991 all = count + wrong_values;
993 if (check_counter (stmt, "pow2", &count, &all, gimple_bb (stmt)->count))
996 if (dump_enabled_p ())
997 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
998 "Transformation done: mod power of 2\n");
1001 prob = profile_probability::probability_in_gcov_type (count, all);
1003 prob = profile_probability::never ();
1005 result = gimple_mod_pow2 (stmt, prob, count, all);
1007 gimple_assign_set_rhs_from_tree (si, result);
1008 update_stmt (gsi_stmt (*si));
1013 /* Generate code for transformations 3 and 4 (with parent gimple assign STMT, and
1014 NCOUNTS the number of cases to support. Currently only NCOUNTS==0 or 1 is
1015 supported and this is built into this interface. The probabilities of taking
1016 the optimal paths are PROB1 and PROB2, which are equivalent to COUNT1/ALL and
1017 COUNT2/ALL respectively within roundoff error). This generates the
1018 result into a temp and returns the temp; it does not replace or alter
1019 the original STMT. */
1020 /* FIXME: Generalize the interface to handle NCOUNTS > 1. */
1023 gimple_mod_subtract (gassign *stmt, profile_probability prob1,
1024 profile_probability prob2, int ncounts,
1025 gcov_type count1, gcov_type count2, gcov_type all)
1031 gimple *bb1end, *bb2end = NULL, *bb3end;
1032 basic_block bb, bb2, bb3, bb4;
1033 tree optype, op1, op2;
1034 edge e12, e23 = 0, e24, e34, e14;
1035 gimple_stmt_iterator gsi;
1038 gcc_assert (is_gimple_assign (stmt)
1039 && gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR);
1041 optype = TREE_TYPE (gimple_assign_lhs (stmt));
1042 op1 = gimple_assign_rhs1 (stmt);
1043 op2 = gimple_assign_rhs2 (stmt);
1045 bb = gimple_bb (stmt);
1046 gsi = gsi_for_stmt (stmt);
1048 result = create_tmp_reg (optype, "PROF");
1049 tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
1050 stmt1 = gimple_build_assign (result, op1);
1051 stmt2 = gimple_build_assign (tmp1, op2);
1052 stmt3 = gimple_build_cond (LT_EXPR, result, tmp1, NULL_TREE, NULL_TREE);
1053 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1054 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
1055 gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
1058 if (ncounts) /* Assumed to be 0 or 1 */
1060 stmt1 = gimple_build_assign (result, MINUS_EXPR, result, tmp1);
1061 stmt2 = gimple_build_cond (LT_EXPR, result, tmp1, NULL_TREE, NULL_TREE);
1062 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1063 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
1067 /* Fallback case. */
1068 stmt1 = gimple_build_assign (result, gimple_assign_rhs_code (stmt),
1070 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1074 /* Edge e23 connects bb2 to bb3, etc. */
1075 /* However block 3 is optional; if it is not there, references
1076 to 3 really refer to block 2. */
1077 e12 = split_block (bb, bb1end);
1079 bb2->count = profile_count::from_gcov_type (all - count1);
1081 if (ncounts) /* Assumed to be 0 or 1. */
1083 e23 = split_block (bb2, bb2end);
1085 bb3->count = profile_count::from_gcov_type (all - count1 - count2);
1088 e34 = split_block (ncounts ? bb3 : bb2, bb3end);
1090 bb4->count = profile_count::from_gcov_type (all);
1092 e12->flags &= ~EDGE_FALLTHRU;
1093 e12->flags |= EDGE_FALSE_VALUE;
1094 e12->probability = prob1.invert ();
1096 e14 = make_edge (bb, bb4, EDGE_TRUE_VALUE);
1097 e14->probability = prob1;
1099 if (ncounts) /* Assumed to be 0 or 1. */
1101 e23->flags &= ~EDGE_FALLTHRU;
1102 e23->flags |= EDGE_FALSE_VALUE;
1103 e23->probability = prob2.invert ();
1105 e24 = make_edge (bb2, bb4, EDGE_TRUE_VALUE);
1106 e24->probability = prob2;
1109 e34->probability = profile_probability::always ();
1114 /* Do transforms 3) and 4) on the statement pointed-to by SI if applicable. */
1117 gimple_mod_subtract_transform (gimple_stmt_iterator *si)
1119 histogram_value histogram;
1120 enum tree_code code;
1121 gcov_type count, wrong_values, all;
1122 tree lhs_type, result;
1123 profile_probability prob1, prob2;
1124 unsigned int i, steps;
1125 gcov_type count1, count2;
1127 stmt = dyn_cast <gassign *> (gsi_stmt (*si));
1131 lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1132 if (!INTEGRAL_TYPE_P (lhs_type))
1135 code = gimple_assign_rhs_code (stmt);
1137 if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (lhs_type))
1140 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_INTERVAL);
1146 for (i = 0; i < histogram->hdata.intvl.steps; i++)
1147 all += histogram->hvalue.counters[i];
1149 wrong_values += histogram->hvalue.counters[i];
1150 wrong_values += histogram->hvalue.counters[i+1];
1151 steps = histogram->hdata.intvl.steps;
1152 all += wrong_values;
1153 count1 = histogram->hvalue.counters[0];
1154 count2 = histogram->hvalue.counters[1];
1156 if (check_counter (stmt, "interval", &count1, &all, gimple_bb (stmt)->count))
1158 gimple_remove_histogram_value (cfun, stmt, histogram);
1162 if (flag_profile_correction && count1 + count2 > all)
1163 all = count1 + count2;
1165 gcc_assert (count1 + count2 <= all);
1167 /* We require that we use just subtractions in at least 50% of all
1170 for (i = 0; i < histogram->hdata.intvl.steps; i++)
1172 count += histogram->hvalue.counters[i];
1173 if (count * 2 >= all)
1177 || optimize_bb_for_size_p (gimple_bb (stmt)))
1180 gimple_remove_histogram_value (cfun, stmt, histogram);
1181 if (dump_enabled_p ())
1182 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
1183 "Transformation done: mod subtract\n");
1185 /* Compute probability of taking the optimal path(s). */
1188 prob1 = profile_probability::probability_in_gcov_type (count1, all);
1189 prob2 = profile_probability::probability_in_gcov_type (count2, all);
1193 prob1 = prob2 = profile_probability::never ();
1196 /* In practice, "steps" is always 2. This interface reflects this,
1197 and will need to be changed if "steps" can change. */
1198 result = gimple_mod_subtract (stmt, prob1, prob2, i, count1, count2, all);
1200 gimple_assign_set_rhs_from_tree (si, result);
1201 update_stmt (gsi_stmt (*si));
1206 typedef int_hash <unsigned int, 0, UINT_MAX> profile_id_hash;
1208 static hash_map<profile_id_hash, cgraph_node *> *cgraph_node_map = 0;
1210 /* Returns true if node graph is initialized. This
1211 is used to test if profile_id has been created
1212 for cgraph_nodes. */
1215 coverage_node_map_initialized_p (void)
1217 return cgraph_node_map != 0;
1220 /* Initialize map from PROFILE_ID to CGRAPH_NODE.
1221 When LOCAL is true, the PROFILE_IDs are computed. when it is false we assume
1222 that the PROFILE_IDs was already assigned. */
1225 init_node_map (bool local)
1227 struct cgraph_node *n;
1228 cgraph_node_map = new hash_map<profile_id_hash, cgraph_node *>;
1230 FOR_EACH_DEFINED_FUNCTION (n)
1231 if (n->has_gimple_body_p () || n->thunk)
1234 dump_user_location_t loc
1235 = dump_user_location_t::from_function_decl (n->decl);
1238 n->profile_id = coverage_compute_profile_id (n);
1239 while ((val = cgraph_node_map->get (n->profile_id))
1242 if (dump_enabled_p ())
1243 dump_printf_loc (MSG_MISSED_OPTIMIZATION, loc,
1244 "Local profile-id %i conflict"
1245 " with nodes %s %s\n",
1248 (*val)->dump_name ());
1249 n->profile_id = (n->profile_id + 1) & 0x7fffffff;
1252 else if (!n->profile_id)
1254 if (dump_enabled_p ())
1255 dump_printf_loc (MSG_MISSED_OPTIMIZATION, loc,
1256 "Node %s has no profile-id"
1257 " (profile feedback missing?)\n",
1261 else if ((val = cgraph_node_map->get (n->profile_id)))
1263 if (dump_enabled_p ())
1264 dump_printf_loc (MSG_MISSED_OPTIMIZATION, loc,
1265 "Node %s has IP profile-id %i conflict. "
1267 n->dump_name (), n->profile_id);
1271 cgraph_node_map->put (n->profile_id, n);
1275 /* Delete the CGRAPH_NODE_MAP. */
1280 delete cgraph_node_map;
1283 /* Return cgraph node for function with pid */
1286 find_func_by_profile_id (int profile_id)
1288 cgraph_node **val = cgraph_node_map->get (profile_id);
1295 /* Do transformation
1297 if (actual_callee_address == address_of_most_common_function/method)
1304 gimple_ic (gcall *icall_stmt, struct cgraph_node *direct_call,
1305 profile_probability prob)
1310 tree tmp0, tmp1, tmp;
1311 basic_block cond_bb, dcall_bb, icall_bb, join_bb = NULL;
1312 edge e_cd, e_ci, e_di, e_dj = NULL, e_ij;
1313 gimple_stmt_iterator gsi;
1318 cond_bb = gimple_bb (icall_stmt);
1319 gsi = gsi_for_stmt (icall_stmt);
1321 tmp0 = make_temp_ssa_name (ptr_type_node, NULL, "PROF");
1322 tmp1 = make_temp_ssa_name (ptr_type_node, NULL, "PROF");
1323 tmp = unshare_expr (gimple_call_fn (icall_stmt));
1324 load_stmt = gimple_build_assign (tmp0, tmp);
1325 gsi_insert_before (&gsi, load_stmt, GSI_SAME_STMT);
1327 tmp = fold_convert (ptr_type_node, build_addr (direct_call->decl));
1328 load_stmt = gimple_build_assign (tmp1, tmp);
1329 gsi_insert_before (&gsi, load_stmt, GSI_SAME_STMT);
1331 cond_stmt = gimple_build_cond (EQ_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
1332 gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
1334 if (TREE_CODE (gimple_vdef (icall_stmt)) == SSA_NAME)
1336 unlink_stmt_vdef (icall_stmt);
1337 release_ssa_name (gimple_vdef (icall_stmt));
1339 gimple_set_vdef (icall_stmt, NULL_TREE);
1340 gimple_set_vuse (icall_stmt, NULL_TREE);
1341 update_stmt (icall_stmt);
1342 dcall_stmt = as_a <gcall *> (gimple_copy (icall_stmt));
1343 gimple_call_set_fndecl (dcall_stmt, direct_call->decl);
1344 dflags = flags_from_decl_or_type (direct_call->decl);
1345 if ((dflags & ECF_NORETURN) != 0
1346 && should_remove_lhs_p (gimple_call_lhs (dcall_stmt)))
1347 gimple_call_set_lhs (dcall_stmt, NULL_TREE);
1348 gsi_insert_before (&gsi, dcall_stmt, GSI_SAME_STMT);
1351 /* Edge e_cd connects cond_bb to dcall_bb, etc; note the first letters. */
1352 e_cd = split_block (cond_bb, cond_stmt);
1353 dcall_bb = e_cd->dest;
1354 dcall_bb->count = cond_bb->count.apply_probability (prob);
1356 e_di = split_block (dcall_bb, dcall_stmt);
1357 icall_bb = e_di->dest;
1358 icall_bb->count = cond_bb->count - dcall_bb->count;
1360 /* Do not disturb existing EH edges from the indirect call. */
1361 if (!stmt_ends_bb_p (icall_stmt))
1362 e_ij = split_block (icall_bb, icall_stmt);
1365 e_ij = find_fallthru_edge (icall_bb->succs);
1366 /* The indirect call might be noreturn. */
1369 e_ij->probability = profile_probability::always ();
1370 e_ij = single_pred_edge (split_edge (e_ij));
1375 join_bb = e_ij->dest;
1376 join_bb->count = cond_bb->count;
1379 e_cd->flags = (e_cd->flags & ~EDGE_FALLTHRU) | EDGE_TRUE_VALUE;
1380 e_cd->probability = prob;
1382 e_ci = make_edge (cond_bb, icall_bb, EDGE_FALSE_VALUE);
1383 e_ci->probability = prob.invert ();
1389 if ((dflags & ECF_NORETURN) == 0)
1391 e_dj = make_edge (dcall_bb, join_bb, EDGE_FALLTHRU);
1392 e_dj->probability = profile_probability::always ();
1394 e_ij->probability = profile_probability::always ();
1397 /* Insert PHI node for the call result if necessary. */
1398 if (gimple_call_lhs (icall_stmt)
1399 && TREE_CODE (gimple_call_lhs (icall_stmt)) == SSA_NAME
1400 && (dflags & ECF_NORETURN) == 0)
1402 tree result = gimple_call_lhs (icall_stmt);
1403 gphi *phi = create_phi_node (result, join_bb);
1404 gimple_call_set_lhs (icall_stmt,
1405 duplicate_ssa_name (result, icall_stmt));
1406 add_phi_arg (phi, gimple_call_lhs (icall_stmt), e_ij, UNKNOWN_LOCATION);
1407 gimple_call_set_lhs (dcall_stmt,
1408 duplicate_ssa_name (result, dcall_stmt));
1409 add_phi_arg (phi, gimple_call_lhs (dcall_stmt), e_dj, UNKNOWN_LOCATION);
1412 /* Build an EH edge for the direct call if necessary. */
1413 lp_nr = lookup_stmt_eh_lp (icall_stmt);
1414 if (lp_nr > 0 && stmt_could_throw_p (cfun, dcall_stmt))
1416 add_stmt_to_eh_lp (dcall_stmt, lp_nr);
1419 FOR_EACH_EDGE (e_eh, ei, icall_bb->succs)
1420 if (e_eh->flags & (EDGE_EH | EDGE_ABNORMAL))
1422 e = make_edge (dcall_bb, e_eh->dest, e_eh->flags);
1423 e->probability = e_eh->probability;
1424 for (gphi_iterator psi = gsi_start_phis (e_eh->dest);
1425 !gsi_end_p (psi); gsi_next (&psi))
1427 gphi *phi = psi.phi ();
1428 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1429 PHI_ARG_DEF_FROM_EDGE (phi, e_eh));
1432 if (!stmt_could_throw_p (cfun, dcall_stmt))
1433 gimple_purge_dead_eh_edges (dcall_bb);
1437 /* Dump info about indirect call profile. */
1440 dump_ic_profile (gimple_stmt_iterator *gsi)
1443 histogram_value histogram;
1444 gcov_type val, count, all;
1445 struct cgraph_node *direct_call;
1447 stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1451 if (gimple_call_fndecl (stmt) != NULL_TREE)
1454 if (gimple_call_internal_p (stmt))
1457 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_INDIR_CALL);
1462 all = histogram->hvalue.counters[0];
1464 for (unsigned j = 0; j < GCOV_TOPN_MAXIMUM_TRACKED_VALUES; j++)
1466 if (!get_nth_most_common_value (NULL, "indirect call", histogram, &val,
1472 direct_call = find_func_by_profile_id ((int) val);
1474 if (direct_call == NULL)
1476 MSG_MISSED_OPTIMIZATION, stmt,
1477 "Indirect call -> direct call from other "
1478 "module %T=> %i (will resolve by ipa-profile only with LTO)\n",
1479 gimple_call_fn (stmt), (int) val);
1481 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
1482 "Indirect call -> direct call "
1483 "%T => %T (will resolve by ipa-profile)\n",
1484 gimple_call_fn (stmt), direct_call->decl);
1485 dump_printf_loc (MSG_NOTE, stmt,
1486 "hist->count %" PRId64 " hist->all %" PRId64 "\n",
1491 /* Return true if the stringop CALL shall be profiled. SIZE_ARG be
1492 set to the argument index for the size of the string operation. */
1495 interesting_stringop_to_profile_p (gcall *call, int *size_arg)
1497 enum built_in_function fcode;
1499 fcode = DECL_FUNCTION_CODE (gimple_call_fndecl (call));
1502 case BUILT_IN_MEMCPY:
1503 case BUILT_IN_MEMPCPY:
1504 case BUILT_IN_MEMMOVE:
1506 return validate_gimple_arglist (call, POINTER_TYPE, POINTER_TYPE,
1507 INTEGER_TYPE, VOID_TYPE);
1508 case BUILT_IN_MEMSET:
1510 return validate_gimple_arglist (call, POINTER_TYPE, INTEGER_TYPE,
1511 INTEGER_TYPE, VOID_TYPE);
1512 case BUILT_IN_BZERO:
1514 return validate_gimple_arglist (call, POINTER_TYPE, INTEGER_TYPE,
1521 /* Convert stringop (..., vcall_size)
1523 if (vcall_size == icall_size)
1524 stringop (..., icall_size);
1526 stringop (..., vcall_size);
1527 assuming we'll propagate a true constant into ICALL_SIZE later. */
1530 gimple_stringop_fixed_value (gcall *vcall_stmt, tree icall_size, profile_probability prob,
1531 gcov_type count, gcov_type all)
1536 tree tmp0, tmp1, vcall_size, optype;
1537 basic_block cond_bb, icall_bb, vcall_bb, join_bb;
1538 edge e_ci, e_cv, e_iv, e_ij, e_vj;
1539 gimple_stmt_iterator gsi;
1542 if (!interesting_stringop_to_profile_p (vcall_stmt, &size_arg))
1545 cond_bb = gimple_bb (vcall_stmt);
1546 gsi = gsi_for_stmt (vcall_stmt);
1548 vcall_size = gimple_call_arg (vcall_stmt, size_arg);
1549 optype = TREE_TYPE (vcall_size);
1551 tmp0 = make_temp_ssa_name (optype, NULL, "PROF");
1552 tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
1553 tmp_stmt = gimple_build_assign (tmp0, fold_convert (optype, icall_size));
1554 gsi_insert_before (&gsi, tmp_stmt, GSI_SAME_STMT);
1556 tmp_stmt = gimple_build_assign (tmp1, vcall_size);
1557 gsi_insert_before (&gsi, tmp_stmt, GSI_SAME_STMT);
1559 cond_stmt = gimple_build_cond (EQ_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
1560 gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
1562 if (TREE_CODE (gimple_vdef (vcall_stmt)) == SSA_NAME)
1564 unlink_stmt_vdef (vcall_stmt);
1565 release_ssa_name (gimple_vdef (vcall_stmt));
1567 gimple_set_vdef (vcall_stmt, NULL);
1568 gimple_set_vuse (vcall_stmt, NULL);
1569 update_stmt (vcall_stmt);
1570 icall_stmt = as_a <gcall *> (gimple_copy (vcall_stmt));
1571 gimple_call_set_arg (icall_stmt, size_arg,
1572 fold_convert (optype, icall_size));
1573 gsi_insert_before (&gsi, icall_stmt, GSI_SAME_STMT);
1576 /* Edge e_ci connects cond_bb to icall_bb, etc. */
1577 e_ci = split_block (cond_bb, cond_stmt);
1578 icall_bb = e_ci->dest;
1579 icall_bb->count = profile_count::from_gcov_type (count);
1581 e_iv = split_block (icall_bb, icall_stmt);
1582 vcall_bb = e_iv->dest;
1583 vcall_bb->count = profile_count::from_gcov_type (all - count);
1585 e_vj = split_block (vcall_bb, vcall_stmt);
1586 join_bb = e_vj->dest;
1587 join_bb->count = profile_count::from_gcov_type (all);
1589 e_ci->flags = (e_ci->flags & ~EDGE_FALLTHRU) | EDGE_TRUE_VALUE;
1590 e_ci->probability = prob;
1592 e_cv = make_edge (cond_bb, vcall_bb, EDGE_FALSE_VALUE);
1593 e_cv->probability = prob.invert ();
1597 e_ij = make_edge (icall_bb, join_bb, EDGE_FALLTHRU);
1598 e_ij->probability = profile_probability::always ();
1600 e_vj->probability = profile_probability::always ();
1602 /* Insert PHI node for the call result if necessary. */
1603 if (gimple_call_lhs (vcall_stmt)
1604 && TREE_CODE (gimple_call_lhs (vcall_stmt)) == SSA_NAME)
1606 tree result = gimple_call_lhs (vcall_stmt);
1607 gphi *phi = create_phi_node (result, join_bb);
1608 gimple_call_set_lhs (vcall_stmt,
1609 duplicate_ssa_name (result, vcall_stmt));
1610 add_phi_arg (phi, gimple_call_lhs (vcall_stmt), e_vj, UNKNOWN_LOCATION);
1611 gimple_call_set_lhs (icall_stmt,
1612 duplicate_ssa_name (result, icall_stmt));
1613 add_phi_arg (phi, gimple_call_lhs (icall_stmt), e_ij, UNKNOWN_LOCATION);
1616 /* Because these are all string op builtins, they're all nothrow. */
1617 gcc_assert (!stmt_could_throw_p (cfun, vcall_stmt));
1618 gcc_assert (!stmt_could_throw_p (cfun, icall_stmt));
1621 /* Find values inside STMT for that we want to measure histograms for
1622 division/modulo optimization. */
1625 gimple_stringops_transform (gimple_stmt_iterator *gsi)
1629 enum built_in_function fcode;
1630 histogram_value histogram;
1631 gcov_type count, all, val;
1633 unsigned int dest_align, src_align;
1634 profile_probability prob;
1638 stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1642 if (!gimple_call_builtin_p (gsi_stmt (*gsi), BUILT_IN_NORMAL))
1645 if (!interesting_stringop_to_profile_p (stmt, &size_arg))
1648 blck_size = gimple_call_arg (stmt, size_arg);
1649 if (TREE_CODE (blck_size) == INTEGER_CST)
1652 histogram = gimple_histogram_value_of_type (cfun, stmt,
1653 HIST_TYPE_TOPN_VALUES);
1657 if (!get_nth_most_common_value (stmt, "stringops", histogram, &val, &count,
1661 gimple_remove_histogram_value (cfun, stmt, histogram);
1663 /* We require that count is at least half of all. */
1664 if (2 * count < all || optimize_bb_for_size_p (gimple_bb (stmt)))
1666 if (check_counter (stmt, "value", &count, &all, gimple_bb (stmt)->count))
1669 prob = profile_probability::probability_in_gcov_type (count, all);
1671 prob = profile_probability::never ();
1673 dest = gimple_call_arg (stmt, 0);
1674 dest_align = get_pointer_alignment (dest);
1675 fcode = DECL_FUNCTION_CODE (gimple_call_fndecl (stmt));
1678 case BUILT_IN_MEMCPY:
1679 case BUILT_IN_MEMPCPY:
1680 case BUILT_IN_MEMMOVE:
1681 src = gimple_call_arg (stmt, 1);
1682 src_align = get_pointer_alignment (src);
1683 if (!can_move_by_pieces (val, MIN (dest_align, src_align)))
1686 case BUILT_IN_MEMSET:
1687 if (!can_store_by_pieces (val, builtin_memset_read_str,
1688 gimple_call_arg (stmt, 1),
1692 case BUILT_IN_BZERO:
1693 if (!can_store_by_pieces (val, builtin_memset_read_str,
1702 if (sizeof (gcov_type) == sizeof (HOST_WIDE_INT))
1703 tree_val = build_int_cst (get_gcov_type (), val);
1707 a[0] = (unsigned HOST_WIDE_INT) val;
1708 a[1] = val >> (HOST_BITS_PER_WIDE_INT - 1) >> 1;
1710 tree_val = wide_int_to_tree (get_gcov_type (), wide_int::from_array (a, 2,
1711 TYPE_PRECISION (get_gcov_type ()), false));
1714 if (dump_enabled_p ())
1715 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
1716 "Transformation done: single value %i stringop for %s\n",
1717 (int)val, built_in_names[(int)fcode]);
1719 gimple_stringop_fixed_value (stmt, tree_val, prob, count, all);
1725 stringop_block_profile (gimple *stmt, unsigned int *expected_align,
1726 HOST_WIDE_INT *expected_size)
1728 histogram_value histogram;
1729 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_AVERAGE);
1732 *expected_size = -1;
1733 else if (!histogram->hvalue.counters[1])
1735 *expected_size = -1;
1736 gimple_remove_histogram_value (cfun, stmt, histogram);
1741 size = ((histogram->hvalue.counters[0]
1742 + histogram->hvalue.counters[1] / 2)
1743 / histogram->hvalue.counters[1]);
1744 /* Even if we can hold bigger value in SIZE, INT_MAX
1745 is safe "infinity" for code generation strategies. */
1748 *expected_size = size;
1749 gimple_remove_histogram_value (cfun, stmt, histogram);
1752 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_IOR);
1755 *expected_align = 0;
1756 else if (!histogram->hvalue.counters[0])
1758 gimple_remove_histogram_value (cfun, stmt, histogram);
1759 *expected_align = 0;
1764 unsigned int alignment;
1766 count = histogram->hvalue.counters[0];
1768 while (!(count & alignment)
1769 && (alignment <= UINT_MAX / 2 / BITS_PER_UNIT))
1771 *expected_align = alignment * BITS_PER_UNIT;
1772 gimple_remove_histogram_value (cfun, stmt, histogram);
1777 /* Find values inside STMT for that we want to measure histograms for
1778 division/modulo optimization. */
1781 gimple_divmod_values_to_profile (gimple *stmt, histogram_values *values)
1783 tree lhs, divisor, op0, type;
1784 histogram_value hist;
1786 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1789 lhs = gimple_assign_lhs (stmt);
1790 type = TREE_TYPE (lhs);
1791 if (!INTEGRAL_TYPE_P (type))
1794 switch (gimple_assign_rhs_code (stmt))
1796 case TRUNC_DIV_EXPR:
1797 case TRUNC_MOD_EXPR:
1798 divisor = gimple_assign_rhs2 (stmt);
1799 op0 = gimple_assign_rhs1 (stmt);
1801 if (TREE_CODE (divisor) == SSA_NAME)
1802 /* Check for the case where the divisor is the same value most
1804 values->safe_push (gimple_alloc_histogram_value (cfun,
1805 HIST_TYPE_TOPN_VALUES,
1808 /* For mod, check whether it is not often a noop (or replaceable by
1809 a few subtractions). */
1810 if (gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR
1811 && TYPE_UNSIGNED (type)
1812 && TREE_CODE (divisor) == SSA_NAME)
1815 /* Check for a special case where the divisor is power of 2. */
1816 values->safe_push (gimple_alloc_histogram_value (cfun,
1819 val = build2 (TRUNC_DIV_EXPR, type, op0, divisor);
1820 hist = gimple_alloc_histogram_value (cfun, HIST_TYPE_INTERVAL,
1822 hist->hdata.intvl.int_start = 0;
1823 hist->hdata.intvl.steps = 2;
1824 values->safe_push (hist);
1833 /* Find calls inside STMT for that we want to measure histograms for
1834 indirect/virtual call optimization. */
1837 gimple_indirect_call_to_profile (gimple *stmt, histogram_values *values)
1841 if (gimple_code (stmt) != GIMPLE_CALL
1842 || gimple_call_internal_p (stmt)
1843 || gimple_call_fndecl (stmt) != NULL_TREE)
1846 callee = gimple_call_fn (stmt);
1847 histogram_value v = gimple_alloc_histogram_value (cfun, HIST_TYPE_INDIR_CALL,
1849 values->safe_push (v);
1854 /* Find values inside STMT for that we want to measure histograms for
1855 string operations. */
1858 gimple_stringops_values_to_profile (gimple *gs, histogram_values *values)
1865 stmt = dyn_cast <gcall *> (gs);
1869 if (!gimple_call_builtin_p (gs, BUILT_IN_NORMAL))
1872 if (!interesting_stringop_to_profile_p (stmt, &size_arg))
1875 dest = gimple_call_arg (stmt, 0);
1876 blck_size = gimple_call_arg (stmt, size_arg);
1878 if (TREE_CODE (blck_size) != INTEGER_CST)
1880 values->safe_push (gimple_alloc_histogram_value (cfun,
1881 HIST_TYPE_TOPN_VALUES,
1883 values->safe_push (gimple_alloc_histogram_value (cfun, HIST_TYPE_AVERAGE,
1887 if (TREE_CODE (blck_size) != INTEGER_CST)
1888 values->safe_push (gimple_alloc_histogram_value (cfun, HIST_TYPE_IOR,
1892 /* Find values inside STMT for that we want to measure histograms and adds
1893 them to list VALUES. */
1896 gimple_values_to_profile (gimple *stmt, histogram_values *values)
1898 gimple_divmod_values_to_profile (stmt, values);
1899 gimple_stringops_values_to_profile (stmt, values);
1900 gimple_indirect_call_to_profile (stmt, values);
1904 gimple_find_values_to_profile (histogram_values *values)
1907 gimple_stmt_iterator gsi;
1909 histogram_value hist = NULL;
1912 FOR_EACH_BB_FN (bb, cfun)
1913 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1914 gimple_values_to_profile (gsi_stmt (gsi), values);
1916 values->safe_push (gimple_alloc_histogram_value (cfun,
1917 HIST_TYPE_TIME_PROFILE));
1919 FOR_EACH_VEC_ELT (*values, i, hist)
1923 case HIST_TYPE_INTERVAL:
1924 hist->n_counters = hist->hdata.intvl.steps + 2;
1927 case HIST_TYPE_POW2:
1928 hist->n_counters = 2;
1931 case HIST_TYPE_TOPN_VALUES:
1932 case HIST_TYPE_INDIR_CALL:
1933 hist->n_counters = GCOV_TOPN_MEM_COUNTERS;
1936 case HIST_TYPE_TIME_PROFILE:
1937 hist->n_counters = 1;
1940 case HIST_TYPE_AVERAGE:
1941 hist->n_counters = 2;
1945 hist->n_counters = 1;
1951 if (dump_file && hist->hvalue.stmt != NULL)
1953 fprintf (dump_file, "Stmt ");
1954 print_gimple_stmt (dump_file, hist->hvalue.stmt, 0, TDF_SLIM);
1955 dump_histogram_value (dump_file, hist);