/* The tracer pass for the GNU compiler.
Contributed by Jan Hubicka, SuSE Labs.
- Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007
+ Adapted to work on GIMPLE instead of RTL by Robert Kidd, UIUC.
+ Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
Free Software Foundation, Inc.
This file is part of GCC.
#include "params.h"
#include "coverage.h"
#include "tree-pass.h"
+#include "tree-flow.h"
+#include "tree-inline.h"
static int count_insns (basic_block);
-static bool ignore_bb_p (basic_block);
-static bool better_p (edge, edge);
+static bool ignore_bb_p (const_basic_block);
+static bool better_p (const_edge, const_edge);
static edge find_best_successor (basic_block);
static edge find_best_predecessor (basic_block);
static int find_trace (basic_block, basic_block *);
static void tail_duplicate (void);
-static void layout_superblocks (void);
/* Minimal outgoing edge probability considered for superblock formation. */
static int probability_cutoff;
static int branch_ratio_cutoff;
-/* Return true if BB has been seen - it is connected to some trace
- already. */
+/* A bit BB->index is set if BB has already been seen, i.e. it is
+ connected to some trace already. */
+sbitmap bb_seen;
-#define seen(bb) (bb->il.rtl->visited || bb->aux)
+static inline void
+mark_bb_seen (basic_block bb)
+{
+ unsigned int size = SBITMAP_SIZE_BYTES (bb_seen) * 8;
+
+ if ((unsigned int)bb->index >= size)
+ bb_seen = sbitmap_resize (bb_seen, size * 2, 0);
+
+ SET_BIT (bb_seen, bb->index);
+}
+
+static inline bool
+bb_seen_p (basic_block bb)
+{
+ return TEST_BIT (bb_seen, bb->index);
+}
/* Return true if we should ignore the basic block for purposes of tracing. */
static bool
-ignore_bb_p (basic_block bb)
+ignore_bb_p (const_basic_block bb)
{
if (bb->index < NUM_FIXED_BLOCKS)
return true;
- if (!maybe_hot_bb_p (bb))
+ if (optimize_bb_for_size_p (bb))
return true;
return false;
}
static int
count_insns (basic_block bb)
{
- rtx insn;
+ gimple_stmt_iterator gsi;
+ gimple stmt;
int n = 0;
- for (insn = BB_HEAD (bb);
- insn != NEXT_INSN (BB_END (bb));
- insn = NEXT_INSN (insn))
- if (active_insn_p (insn))
- n++;
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ stmt = gsi_stmt (gsi);
+ n += estimate_num_insns (stmt, &eni_size_weights);
+ }
return n;
}
/* Return true if E1 is more frequent than E2. */
static bool
-better_p (edge e1, edge e2)
+better_p (const_edge e1, const_edge e2)
{
if (e1->count != e2->count)
return e1->count > e2->count;
while ((e = find_best_predecessor (bb)) != NULL)
{
basic_block bb2 = e->src;
- if (seen (bb2) || (e->flags & (EDGE_DFS_BACK | EDGE_COMPLEX))
+ if (bb_seen_p (bb2) || (e->flags & (EDGE_DFS_BACK | EDGE_COMPLEX))
|| find_best_successor (bb2) != e)
break;
if (dump_file)
while ((e = find_best_successor (bb)) != NULL)
{
bb = e->dest;
- if (seen (bb) || (e->flags & (EDGE_DFS_BACK | EDGE_COMPLEX))
+ if (bb_seen_p (bb) || (e->flags & (EDGE_DFS_BACK | EDGE_COMPLEX))
|| find_best_predecessor (bb) != e)
break;
if (dump_file)
int max_dup_insns;
basic_block bb;
+ /* Create an oversized sbitmap to reduce the chance that we need to
+ resize it. */
+ bb_seen = sbitmap_alloc (last_basic_block * 2);
+ sbitmap_zero (bb_seen);
+ initialize_original_copy_tables ();
+
if (profile_info && flag_branch_probabilities)
probability_cutoff = PARAM_VALUE (TRACER_MIN_BRANCH_PROBABILITY_FEEDBACK);
else
while (traced_insns < cover_insns && nduplicated < max_dup_insns
&& !fibheap_empty (heap))
{
- basic_block bb = fibheap_extract_min (heap);
+ basic_block bb = (basic_block) fibheap_extract_min (heap);
int n, pos;
if (!bb)
if (ignore_bb_p (bb))
continue;
- gcc_assert (!seen (bb));
+ gcc_assert (!bb_seen_p (bb));
n = find_trace (bb, trace);
&& can_duplicate_block_p (bb2))
{
edge e;
- basic_block old = bb2;
+ basic_block copy;
+
+ nduplicated += counts [bb2->index];
e = find_edge (bb, bb2);
+
+ copy = duplicate_block (bb2, e, bb);
+ flush_pending_stmts (e);
- nduplicated += counts [bb2->index];
- bb2 = duplicate_block (bb2, e, bb);
+ add_phi_args_after_copy (©, 1, NULL);
/* Reconsider the original copy of block we've duplicated.
Removing the most common predecessor may make it to be
head. */
- blocks[old->index] =
- fibheap_insert (heap, -old->frequency, old);
+ blocks[bb2->index] =
+ fibheap_insert (heap, -bb2->frequency, bb2);
if (dump_file)
fprintf (dump_file, "Duplicated %i as %i [%i]\n",
- old->index, bb2->index, bb2->frequency);
+ bb2->index, copy->index, copy->frequency);
+
+ bb2 = copy;
}
- bb->aux = bb2;
- bb2->il.rtl->visited = 1;
+ mark_bb_seen (bb2);
bb = bb2;
/* In case the trace became infrequent, stop duplicating. */
if (ignore_bb_p (bb))
fprintf (dump_file, "Duplicated %i insns (%i%%)\n", nduplicated,
nduplicated * 100 / ninsns);
+ free_original_copy_tables ();
+ sbitmap_free (bb_seen);
free (blocks);
free (trace);
free (counts);
fibheap_delete (heap);
}
-/* Connect the superblocks into linear sequence. At the moment we attempt to keep
- the original order as much as possible, but the algorithm may be made smarter
- later if needed. BB reordering pass should void most of the benefits of such
- change though. */
-
-static void
-layout_superblocks (void)
-{
- basic_block end = single_succ (ENTRY_BLOCK_PTR);
- basic_block bb = end->next_bb;
-
- while (bb != EXIT_BLOCK_PTR)
- {
- edge_iterator ei;
- edge e, best = NULL;
- while (end->aux)
- end = end->aux;
-
- FOR_EACH_EDGE (e, ei, end->succs)
- if (e->dest != EXIT_BLOCK_PTR
- && e->dest != single_succ (ENTRY_BLOCK_PTR)
- && !e->dest->il.rtl->visited
- && (!best || EDGE_FREQUENCY (e) > EDGE_FREQUENCY (best)))
- best = e;
-
- if (best)
- {
- end->aux = best->dest;
- best->dest->il.rtl->visited = 1;
- }
- else
- for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb)
- {
- if (!bb->il.rtl->visited)
- {
- end->aux = bb;
- bb->il.rtl->visited = 1;
- break;
- }
- }
- }
-}
-
/* Main entry point to this file. */
-void
+static unsigned int
tracer (void)
{
- gcc_assert (current_ir_type () == IR_RTL_CFGLAYOUT);
+ gcc_assert (current_ir_type () == IR_GIMPLE);
if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
- return;
+ return 0;
mark_dfs_back_edges ();
if (dump_file)
dump_flow_info (dump_file, dump_flags);
+
+ /* Trace formation is done on the fly inside tail_duplicate */
tail_duplicate ();
- layout_superblocks ();
- relink_block_chain (/*stay_in_cfglayout_mode=*/true);
-
+
+ /* FIXME: We really only need to do this when we know tail duplication
+ has altered the CFG. */
+ free_dominance_info (CDI_DOMINATORS);
if (dump_file)
dump_flow_info (dump_file, dump_flags);
- /* Merge basic blocks in duplicated traces. */
- cleanup_cfg (0);
+ return 0;
}
\f
static bool
-gate_handle_tracer (void)
+gate_tracer (void)
{
- return (optimize > 0 && flag_tracer);
+ return (optimize > 0 && flag_tracer && flag_reorder_blocks);
}
-/* Run tracer. */
-static unsigned int
-rest_of_handle_tracer (void)
-{
- if (dump_file)
- dump_flow_info (dump_file, dump_flags);
- tracer ();
- return 0;
-}
-
-struct tree_opt_pass pass_tracer =
+struct gimple_opt_pass pass_tracer =
{
+ {
+ GIMPLE_PASS,
"tracer", /* name */
- gate_handle_tracer, /* gate */
- rest_of_handle_tracer, /* execute */
+ gate_tracer, /* gate */
+ tracer, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
- 'T' /* letter */
+ TODO_dump_func
+ | TODO_update_ssa
+ | TODO_verify_ssa /* todo_flags_finish */
+ }
};
-