* sched-int.h (schedule_block): Adjust declaration.
* sched-rgn.c (bb_state_array, bb_state): New static variables.
(sched_rgn_init): Initialize them.
(sched_rgn_free): Free them.
(schedule_region): Save scheduling state for future blocks, and
pass such state to schedule_block.
* params.def (PARAM_SCHED_STATE_EDGE_PROB_CUTOFF): New.
* doc/invoke.texi (--param): Document it.
* haifa-sched.c (schedule_block): New arg init_state. Use it to
initialize state if nonnull. All callers changed.
Call advance_one_cycle after scheduling.
From-SVN: r192203
+2012-10-08 Bernd Schmidt <bernds@codesourcery.com>
+
+ * sched-int.h (schedule_block): Adjust declaration.
+ * sched-rgn.c (bb_state_array, bb_state): New static variables.
+ (sched_rgn_init): Initialize them.
+ (sched_rgn_free): Free them.
+ (schedule_region): Save scheduling state for future blocks, and
+ pass such state to schedule_block.
+ * params.def (PARAM_SCHED_STATE_EDGE_PROB_CUTOFF): New.
+ * doc/invoke.texi (--param): Document it.
+ * haifa-sched.c (schedule_block): New arg init_state. Use it to
+ initialize state if nonnull. All callers changed.
+ Call advance_one_cycle after scheduling.
+
2012-10-08 Georg-Johann Lay <avr@gjlay.de>
PR target/54854
speculative insns are scheduled.
The default value is 40.
+@item sched-spec-state-edge-prob-cutoff
+The minimum probability an edge must have for the scheduler to save its
+state across it.
+The default value is 10.
+
@item sched-mem-true-dep-cost
Minimal distance (in CPU cycles) between store and load targeting same
memory locations. The default value is 1.
region. */
bool
-schedule_block (basic_block *target_bb)
+schedule_block (basic_block *target_bb, state_t init_state)
{
int i;
bool success = modulo_ii == 0;
if (sched_verbose)
dump_new_block_header (0, *target_bb, head, tail);
- state_reset (curr_state);
+ if (init_state == NULL)
+ state_reset (curr_state);
+ else
+ memcpy (curr_state, init_state, dfa_state_size);
/* Clear the ready list. */
ready.first = ready.veclen - 1;
if (ls.modulo_epilogue)
success = true;
end_schedule:
+ advance_one_cycle ();
perform_replacements_new_cycle ();
if (modulo_ii > 0)
{
"The minimal probability of speculation success (in percents), so that speculative insn will be scheduled.",
40, 0, 100)
+DEFPARAM(PARAM_SCHED_STATE_EDGE_PROB_CUTOFF,
+ "sched-state-edge-prob-cutoff",
+ "The minimum probability an edge must have for the scheduler to save its state across it.",
+ 10, 0, 100)
+
DEFPARAM(PARAM_SELSCHED_MAX_LOOKAHEAD,
"selsched-max-lookahead",
"The maximum size of the lookahead window of selective scheduling",
/* Make ready list big enough to hold all the instructions from the ebb. */
sched_extend_ready_list (rgn_n_insns);
- success = schedule_block (&target_bb);
+ success = schedule_block (&target_bb, NULL);
gcc_assert (success || modulo_scheduling);
/* Free ready list. */
extern int set_priorities (rtx, rtx);
extern void sched_setup_bb_reg_pressure_info (basic_block, rtx);
-extern bool schedule_block (basic_block *);
+extern bool schedule_block (basic_block *, state_t);
extern int cycle_issued_insns;
extern int issue_rate;
static basic_block *bblst_table;
static int bblst_size, bblst_last;
+static char *bb_state_array;
+static state_t *bb_state;
+
/* Target info declarations.
The block currently being scheduled is referred to as the "target" block,
curr_bb = first_bb;
if (dbg_cnt (sched_block))
{
- schedule_block (&curr_bb);
+ edge f;
+
+ schedule_block (&curr_bb, bb_state[first_bb->index]);
gcc_assert (EBB_FIRST_BB (bb) == first_bb);
sched_rgn_n_insns += sched_n_insns;
+ f = find_fallthru_edge (last_bb->succs);
+ if (f && f->probability * 100 / REG_BR_PROB_BASE >=
+ PARAM_VALUE (PARAM_SCHED_STATE_EDGE_PROB_CUTOFF))
+ {
+ memcpy (bb_state[f->dest->index], curr_state,
+ dfa_state_size);
+ if (sched_verbose >= 5)
+ fprintf (sched_dump, "saving state for edge %d->%d\n",
+ f->src->index, f->dest->index);
+ }
}
else
{
void
sched_rgn_init (bool single_blocks_p)
{
+ int i;
+
min_spec_prob = ((PARAM_VALUE (PARAM_MIN_SPEC_PROB) * REG_BR_PROB_BASE)
/ 100);
CONTAINING_RGN (ENTRY_BLOCK) = -1;
CONTAINING_RGN (EXIT_BLOCK) = -1;
+ if (!sel_sched_p ())
+ {
+ bb_state_array = (char *) xmalloc (last_basic_block * dfa_state_size);
+ bb_state = XNEWVEC (state_t, last_basic_block);
+ for (i = 0; i < last_basic_block; i++)
+ {
+ bb_state[i] = (state_t) (bb_state_array + i * dfa_state_size);
+
+ state_reset (bb_state[i]);
+ }
+ }
+ else
+ {
+ bb_state_array = NULL;
+ bb_state = NULL;
+ }
+
/* Compute regions for scheduling. */
if (single_blocks_p
|| n_basic_blocks == NUM_FIXED_BLOCKS + 1
void
sched_rgn_finish (void)
{
+ free (bb_state_array);
+ free (bb_state);
+
/* Reposition the prologue and epilogue notes in case we moved the
prologue/epilogue insns. */
if (reload_completed)