/* Perform branch target register load optimizations.
- Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ Free Software Foundation, Inc.
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 2, or (at your option) any later
+Software Foundation; either version 3, or (at your option) any later
version.
GCC is distributed in the hope that it will be useful, but WITHOUT ANY
for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to the Free
-Software Foundation, 59 Temple Place - Suite 330, Boston, MA
-02111-1307, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "coretypes.h"
#include "tm.h"
-#include "bitmap.h"
-#include "sbitmap.h"
#include "rtl.h"
#include "hard-reg-set.h"
-#include "basic-block.h"
#include "regs.h"
-#include "obstack.h"
#include "fibheap.h"
#include "output.h"
#include "target.h"
#include "function.h"
#include "except.h"
#include "tm_p.h"
+#include "toplev.h"
+#include "tree-pass.h"
+#include "recog.h"
+#include "df.h"
/* Target register optimizations - these are performed after reload. */
as appropriate. */
char other_btr_uses_before_def;
char other_btr_uses_after_use;
+ /* We set own_end when we have moved a definition into a dominator.
+ Thus, when a later combination removes this definition again, we know
+ to clear out trs_live_at_end again. */
+ char own_end;
bitmap live_range;
} *btr_def;
static int issue_rate;
-static int basic_block_freq (basic_block);
-static int insn_sets_btr_p (rtx, int, int *);
+static int basic_block_freq (const_basic_block);
+static int insn_sets_btr_p (const_rtx, int, int *);
static rtx *find_btr_use (rtx);
static int btr_referenced_p (rtx, rtx *);
static int find_btr_reference (rtx *, void *);
static void build_btr_def_use_webs (fibheap_t);
static int block_at_edge_of_live_range_p (int, btr_def);
static void clear_btr_from_live_range (btr_def def);
-static void add_btr_to_live_range (btr_def);
+static void add_btr_to_live_range (btr_def, int);
static void augment_live_range (bitmap, HARD_REG_SET *, basic_block,
- basic_block);
+ basic_block, int);
static int choose_btr (HARD_REG_SET);
static void combine_btr_defs (btr_def, HARD_REG_SET *);
static void btr_def_live_range (btr_def, HARD_REG_SET *);
static void move_btr_def (basic_block, int, btr_def, bitmap, HARD_REG_SET *);
static int migrate_btr_def (btr_def, int);
static void migrate_btr_defs (enum reg_class, int);
-static int can_move_up (basic_block, rtx, int);
-static void note_btr_set (rtx, rtx, void *);
+static int can_move_up (const_basic_block, const_rtx, int);
+static void note_btr_set (rtx, const_rtx, void *);
\f
/* The following code performs code motion of target load instructions
(instructions that set branch target registers), to move them
/* Return an estimate of the frequency of execution of block bb. */
static int
-basic_block_freq (basic_block bb)
+basic_block_freq (const_basic_block bb)
{
return bb->frequency;
}
find_btr_reference (rtx *px, void *preg)
{
rtx x;
- int regno, i;
if (px == preg)
return -1;
x = *px;
if (!REG_P (x))
return 0;
- regno = REGNO (x);
- for (i = hard_regno_nregs[regno][GET_MODE (x)] - 1; i >= 0; i--)
- if (TEST_HARD_REG_BIT (all_btrs, regno+i))
- {
- btr_reference_found = px;
- return 1;
- }
+ if (overlaps_hard_reg_set_p (all_btrs, GET_MODE (x), REGNO (x)))
+ {
+ btr_reference_found = px;
+ return 1;
+ }
return -1;
}
If such a set is found and REGNO is nonzero, assign the register number
of the destination register to *REGNO. */
static int
-insn_sets_btr_p (rtx insn, int check_const, int *regno)
+insn_sets_btr_p (const_rtx insn, int check_const, int *regno)
{
rtx set;
- if (GET_CODE (insn) == INSN
+ if (NONJUMP_INSN_P (insn)
&& (set = single_set (insn)))
{
rtx dest = SET_DEST (set);
if (REG_P (dest)
&& TEST_HARD_REG_BIT (all_btrs, REGNO (dest)))
{
- if (btr_referenced_p (src, NULL))
- abort();
+ gcc_assert (!btr_referenced_p (src, NULL));
+
if (!check_const || CONSTANT_P (src))
{
if (regno)
if (!this_group)
{
- this_group = obstack_alloc (&migrate_btrl_obstack,
- sizeof (struct btr_def_group_s));
+ this_group = XOBNEW (&migrate_btrl_obstack, struct btr_def_group_s);
this_group->src = def_src;
this_group->members = NULL;
this_group->next = *all_btr_def_groups;
unsigned int dest_reg, int other_btr_uses_before_def,
btr_def_group *all_btr_def_groups)
{
- btr_def this
- = obstack_alloc (&migrate_btrl_obstack, sizeof (struct btr_def_s));
- this->bb = bb;
- this->luid = insn_luid;
- this->insn = insn;
- this->btr = dest_reg;
- this->cost = basic_block_freq (bb);
- this->has_ambiguous_use = 0;
- this->other_btr_uses_before_def = other_btr_uses_before_def;
- this->other_btr_uses_after_use = 0;
- this->next_this_bb = NULL;
- this->next_this_group = NULL;
- this->uses = NULL;
- this->live_range = NULL;
- find_btr_def_group (all_btr_def_groups, this);
-
- fibheap_insert (all_btr_defs, -this->cost, this);
+ btr_def this_def = XOBNEW (&migrate_btrl_obstack, struct btr_def_s);
+ this_def->bb = bb;
+ this_def->luid = insn_luid;
+ this_def->insn = insn;
+ this_def->btr = dest_reg;
+ this_def->cost = basic_block_freq (bb);
+ this_def->has_ambiguous_use = 0;
+ this_def->other_btr_uses_before_def = other_btr_uses_before_def;
+ this_def->other_btr_uses_after_use = 0;
+ this_def->next_this_bb = NULL;
+ this_def->next_this_group = NULL;
+ this_def->uses = NULL;
+ this_def->live_range = NULL;
+ find_btr_def_group (all_btr_def_groups, this_def);
+
+ fibheap_insert (all_btr_defs, -this_def->cost, this_def);
if (dump_file)
fprintf (dump_file,
"Found target reg definition: sets %u { bb %d, insn %d }%s priority %d\n",
- dest_reg, bb->index, INSN_UID (insn), (this->group ? "" : ":not const"),
- this->cost);
+ dest_reg, bb->index, INSN_UID (insn),
+ (this_def->group ? "" : ":not const"), this_def->cost);
- return this;
+ return this_def;
}
/* Create a new target register user structure, for a use in block BB,
usep = NULL;
}
use = usep ? *usep : NULL_RTX;
- user = obstack_alloc (&migrate_btrl_obstack, sizeof (struct btr_user_s));
+ user = XOBNEW (&migrate_btrl_obstack, struct btr_user_s);
user->bb = bb;
user->luid = insn_luid;
user->insn = insn;
straightforward definitions. DATA points to information about the
current basic block that needs updating. */
static void
-note_btr_set (rtx dest, rtx set ATTRIBUTE_UNUSED, void *data)
+note_btr_set (rtx dest, const_rtx set ATTRIBUTE_UNUSED, void *data)
{
- defs_uses_info *info = data;
+ defs_uses_info *info = (defs_uses_info *) data;
int regno, end_regno;
if (!REG_P (dest))
return;
regno = REGNO (dest);
- end_regno = regno + hard_regno_nregs[regno][GET_MODE (dest)];
+ end_regno = END_HARD_REGNO (dest);
for (; regno < end_regno; regno++)
if (TEST_HARD_REG_BIT (all_btrs, regno))
{
btr_def_group all_btr_def_groups = NULL;
defs_uses_info info;
- sbitmap_vector_zero (bb_gen, n_basic_blocks);
- for (i = 0; i < n_basic_blocks; i++)
+ sbitmap_vector_zero (bb_gen, last_basic_block);
+ for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
basic_block bb = BASIC_BLOCK (i);
int reg;
CLEAR_HARD_REG_SET (info.btrs_written_in_block);
for (reg = first_btr; reg <= last_btr; reg++)
if (TEST_HARD_REG_BIT (all_btrs, reg)
- && REGNO_REG_SET_P (bb->global_live_at_start, reg))
+ && REGNO_REG_SET_P (df_get_live_in (bb), reg))
SET_HARD_REG_BIT (info.btrs_live_in_block, reg);
for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb));
SET_BIT (btr_defset[regno - first_btr], insn_uid);
note_other_use_this_block (regno, info.users_this_bb);
}
+ /* Check for the blockage emitted by expand_nl_goto_receiver. */
+ else if (cfun->has_nonlocal_label
+ && GET_CODE (PATTERN (insn)) == UNSPEC_VOLATILE)
+ {
+ btr_user user;
+
+ /* Do the equivalent of calling note_other_use_this_block
+ for every target register. */
+ for (user = info.users_this_bb; user != NULL;
+ user = user->next)
+ if (user->use)
+ user->other_use_this_block = 1;
+ IOR_HARD_REG_SET (info.btrs_written_in_block, all_btrs);
+ IOR_HARD_REG_SET (info.btrs_live_in_block, all_btrs);
+ sbitmap_zero (info.bb_gen);
+ }
else
{
if (btr_referenced_p (PATTERN (insn), NULL))
user->next = info.users_this_bb;
info.users_this_bb = user;
}
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
HARD_REG_SET *clobbered = &call_used_reg_set;
HARD_REG_SET call_saved;
COPY_HARD_REG_SET (btrs_live[i], info.btrs_live_in_block);
COPY_HARD_REG_SET (btrs_written[i], info.btrs_written_in_block);
- REG_SET_TO_HARD_REG_SET (btrs_live_at_end[i], bb->global_live_at_end);
+ REG_SET_TO_HARD_REG_SET (btrs_live_at_end[i], df_get_live_out (bb));
/* If this block ends in a jump insn, add any uses or even clobbers
of branch target registers that it might have. */
for (insn = BB_END (bb); insn != BB_HEAD (bb) && ! INSN_P (insn); )
IOR_HARD_REG_SET (btrs_live_at_end[i], tmp);
can_throw = 1;
}
- if (can_throw || GET_CODE (insn) == JUMP_INSN)
+ if (can_throw || JUMP_P (insn))
{
int regno;
/* For each basic block, form the set BB_KILL - the set
of definitions that the block kills. */
- sbitmap_vector_zero (bb_kill, n_basic_blocks);
- for (i = 0; i < n_basic_blocks; i++)
+ sbitmap_vector_zero (bb_kill, last_basic_block);
+ for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
for (regno = first_btr; regno <= last_btr; regno++)
if (TEST_HARD_REG_BIT (all_btrs, regno)
int changed;
sbitmap bb_in = sbitmap_alloc (max_uid);
- for (i = 0; i < n_basic_blocks; i++)
+ for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
sbitmap_copy (bb_out[i], bb_gen[i]);
changed = 1;
while (changed)
{
changed = 0;
- for (i = 0; i < n_basic_blocks; i++)
+ for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
sbitmap_union_of_preds (bb_in, bb_out, i);
changed |= sbitmap_union_of_diff_cg (bb_out[i], bb_gen[i],
/* Link uses to the uses lists of all of their reaching defs.
Count up the number of reaching defs of each use. */
- for (i = 0; i < n_basic_blocks; i++)
+ for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
basic_block bb = BASIC_BLOCK (i);
rtx insn;
{
/* Find all the reaching defs for this use. */
sbitmap reaching_defs_of_reg = sbitmap_alloc(max_uid);
- int uid;
+ unsigned int uid = 0;
+ sbitmap_iterator sbi;
if (user->use)
sbitmap_a_and_b (
reaching_defs,
btr_defset[reg - first_btr]);
}
- EXECUTE_IF_SET_IN_SBITMAP (reaching_defs_of_reg, 0, uid,
+ EXECUTE_IF_SET_IN_SBITMAP (reaching_defs_of_reg, 0, uid, sbi)
{
btr_def def = def_array[uid];
def->other_btr_uses_after_use = 1;
user->next = def->uses;
def->uses = user;
- });
+ }
sbitmap_free (reaching_defs_of_reg);
}
- if (GET_CODE (insn) == CALL_INSN)
+ if (CALL_P (insn))
{
int regno;
build_btr_def_use_webs (fibheap_t all_btr_defs)
{
const int max_uid = get_max_uid ();
- btr_def *def_array = xcalloc (max_uid, sizeof (btr_def));
- btr_user *use_array = xcalloc (max_uid, sizeof (btr_user));
+ btr_def *def_array = XCNEWVEC (btr_def, max_uid);
+ btr_user *use_array = XCNEWVEC (btr_user, max_uid);
sbitmap *btr_defset = sbitmap_vector_alloc (
(last_btr - first_btr) + 1, max_uid);
- sbitmap *bb_gen = sbitmap_vector_alloc (n_basic_blocks, max_uid);
- HARD_REG_SET *btrs_written = xcalloc (n_basic_blocks, sizeof (HARD_REG_SET));
+ sbitmap *bb_gen = sbitmap_vector_alloc (last_basic_block, max_uid);
+ HARD_REG_SET *btrs_written = XCNEWVEC (HARD_REG_SET, last_basic_block);
sbitmap *bb_kill;
sbitmap *bb_out;
compute_defs_uses_and_gen (all_btr_defs, def_array, use_array, btr_defset,
bb_gen, btrs_written);
- bb_kill = sbitmap_vector_alloc (n_basic_blocks, max_uid);
+ bb_kill = sbitmap_vector_alloc (last_basic_block, max_uid);
compute_kill (bb_kill, btr_defset, btrs_written);
free (btrs_written);
- bb_out = sbitmap_vector_alloc (n_basic_blocks, max_uid);
+ bb_out = sbitmap_vector_alloc (last_basic_block, max_uid);
compute_out (bb_out, bb_gen, bb_kill, max_uid);
sbitmap_vector_free (bb_gen);
static void
clear_btr_from_live_range (btr_def def)
{
- int bb;
-
- EXECUTE_IF_SET_IN_BITMAP
- (def->live_range, 0, bb,
- {
- if ((!def->other_btr_uses_before_def
- && !def->other_btr_uses_after_use)
- || !block_at_edge_of_live_range_p (bb, def))
- {
- CLEAR_HARD_REG_BIT (btrs_live[bb], def->btr);
- CLEAR_HARD_REG_BIT (btrs_live_at_end[bb], def->btr);
- if (dump_file)
- dump_btrs_live (bb);
- }
- });
+ unsigned bb;
+ bitmap_iterator bi;
+
+ EXECUTE_IF_SET_IN_BITMAP (def->live_range, 0, bb, bi)
+ {
+ if ((!def->other_btr_uses_before_def
+ && !def->other_btr_uses_after_use)
+ || !block_at_edge_of_live_range_p (bb, def))
+ {
+ CLEAR_HARD_REG_BIT (btrs_live[bb], def->btr);
+ CLEAR_HARD_REG_BIT (btrs_live_at_end[bb], def->btr);
+ if (dump_file)
+ dump_btrs_live (bb);
+ }
+ }
+ if (def->own_end)
+ CLEAR_HARD_REG_BIT (btrs_live_at_end[def->bb->index], def->btr);
}
/* We are adding the def/use web DEF. Add the target register used
in this web to the live set of all of the basic blocks that contain
- the live range of the web. */
+ the live range of the web.
+ If OWN_END is set, also show that the register is live from our
+ definitions at the end of the basic block where it is defined. */
static void
-add_btr_to_live_range (btr_def def)
+add_btr_to_live_range (btr_def def, int own_end)
{
- int bb;
- EXECUTE_IF_SET_IN_BITMAP
- (def->live_range, 0, bb,
- {
- SET_HARD_REG_BIT (btrs_live[bb], def->btr);
- SET_HARD_REG_BIT (btrs_live_at_end[bb], def->btr);
- if (dump_file)
- dump_btrs_live (bb);
- });
+ unsigned bb;
+ bitmap_iterator bi;
+
+ EXECUTE_IF_SET_IN_BITMAP (def->live_range, 0, bb, bi)
+ {
+ SET_HARD_REG_BIT (btrs_live[bb], def->btr);
+ SET_HARD_REG_BIT (btrs_live_at_end[bb], def->btr);
+ if (dump_file)
+ dump_btrs_live (bb);
+ }
+ if (own_end)
+ {
+ SET_HARD_REG_BIT (btrs_live_at_end[def->bb->index], def->btr);
+ def->own_end = 1;
+ }
}
/* Update a live range to contain the basic block NEW_BLOCK, and all
all other blocks in the existing live range.
Also add to the set BTRS_LIVE_IN_RANGE all target registers that
are live in the blocks that we add to the live range.
+ If FULL_RANGE is set, include the full live range of NEW_BB;
+ otherwise, if NEW_BB dominates HEAD_BB, only add registers that
+ are life at the end of NEW_BB for NEW_BB itself.
It is a precondition that either NEW_BLOCK dominates HEAD,or
HEAD dom NEW_BLOCK. This is used to speed up the
implementation of this function. */
static void
augment_live_range (bitmap live_range, HARD_REG_SET *btrs_live_in_range,
- basic_block head_bb, basic_block new_bb)
+ basic_block head_bb, basic_block new_bb, int full_range)
{
basic_block *worklist, *tos;
- tos = worklist = xmalloc (sizeof (basic_block) * (n_basic_blocks + 1));
+ tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
if (dominated_by_p (CDI_DOMINATORS, new_bb, head_bb))
- *tos++ = new_bb;
- else if (dominated_by_p (CDI_DOMINATORS, head_bb, new_bb))
+ {
+ if (new_bb == head_bb)
+ {
+ if (full_range)
+ IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[new_bb->index]);
+ free (tos);
+ return;
+ }
+ *tos++ = new_bb;
+ }
+ else
{
edge e;
+ edge_iterator ei;
int new_block = new_bb->index;
+ gcc_assert (dominated_by_p (CDI_DOMINATORS, head_bb, new_bb));
+
+ IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[head_bb->index]);
bitmap_set_bit (live_range, new_block);
- if (flag_btr_bb_exclusive)
+ /* A previous btr migration could have caused a register to be
+ live just at the end of new_block which we need in full, so
+ use trs_live_at_end even if full_range is set. */
+ IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live_at_end[new_block]);
+ if (full_range)
IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[new_block]);
- else
- {
- IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live_at_end[new_block]);
- IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[head_bb->index]);
- }
if (dump_file)
{
fprintf (dump_file,
dump_hard_reg_set (*btrs_live_in_range);
fprintf (dump_file, "\n");
}
- for (e = head_bb->pred; e; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, head_bb->preds)
*tos++ = e->src;
}
- else
- abort();
while (tos != worklist)
{
if (!bitmap_bit_p (live_range, bb->index))
{
edge e;
+ edge_iterator ei;
bitmap_set_bit (live_range, bb->index);
IOR_HARD_REG_SET (*btrs_live_in_range,
btrs_live[bb->index]);
+ /* A previous btr migration could have caused a register to be
+ live just at the end of a block which we need in full. */
+ IOR_HARD_REG_SET (*btrs_live_in_range,
+ btrs_live_at_end[bb->index]);
if (dump_file)
{
fprintf (dump_file,
fprintf (dump_file, "\n");
}
- for (e = bb->pred; e != NULL; e = e->pred_next)
+ FOR_EACH_EDGE (e, ei, bb->preds)
{
basic_block pred = e->src;
if (!bitmap_bit_p (live_range, pred->index))
choose_btr (HARD_REG_SET used_btrs)
{
int i;
- GO_IF_HARD_REG_SUBSET (all_btrs, used_btrs, give_up);
- for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
- {
+ if (!hard_reg_set_subset_p (all_btrs, used_btrs))
+ for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
+ {
#ifdef REG_ALLOC_ORDER
- int regno = reg_alloc_order[i];
+ int regno = reg_alloc_order[i];
#else
- int regno = i;
+ int regno = i;
#endif
- if (TEST_HARD_REG_BIT (all_btrs, regno)
- && !TEST_HARD_REG_BIT (used_btrs, regno))
- return regno;
- }
-give_up:
+ if (TEST_HARD_REG_BIT (all_btrs, regno)
+ && !TEST_HARD_REG_BIT (used_btrs, regno))
+ return regno;
+ }
return -1;
}
{
btr_user user;
- def->live_range = BITMAP_XMALLOC ();
+ def->live_range = BITMAP_ALLOC (NULL);
bitmap_set_bit (def->live_range, def->bb->index);
- if (flag_btr_bb_exclusive)
- COPY_HARD_REG_SET (*btrs_live_in_range, btrs_live[def->bb->index]);
- else
- COPY_HARD_REG_SET (*btrs_live_in_range,
- btrs_live_at_end[def->bb->index]);
+ COPY_HARD_REG_SET (*btrs_live_in_range,
+ (flag_btr_bb_exclusive
+ ? btrs_live : btrs_live_at_end)[def->bb->index]);
for (user = def->uses; user != NULL; user = user->next)
augment_live_range (def->live_range, btrs_live_in_range,
- def->bb, user->bb);
+ def->bb, user->bb,
+ (flag_btr_bb_exclusive
+ || user->insn != BB_END (def->bb)
+ || !JUMP_P (user->insn)));
}
else
{
the set of target registers live over it, because migration
of other PT instructions may have affected it.
*/
- int bb;
- int def_bb = def->bb->index;
+ unsigned bb;
+ unsigned def_bb = flag_btr_bb_exclusive ? -1 : def->bb->index;
+ bitmap_iterator bi;
CLEAR_HARD_REG_SET (*btrs_live_in_range);
- if (flag_btr_bb_exclusive)
- EXECUTE_IF_SET_IN_BITMAP
- (def->live_range, 0, bb,
- {
- IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[bb]);
- });
- else
- EXECUTE_IF_SET_IN_BITMAP
- (def->live_range, 0, bb,
- {
- IOR_HARD_REG_SET (*btrs_live_in_range,
- (def_bb == bb
- ? btrs_live_at_end : btrs_live) [bb]);
- });
+ EXECUTE_IF_SET_IN_BITMAP (def->live_range, 0, bb, bi)
+ {
+ IOR_HARD_REG_SET (*btrs_live_in_range,
+ (def_bb == bb
+ ? btrs_live_at_end : btrs_live) [bb]);
+ }
}
if (!def->other_btr_uses_before_def &&
!def->other_btr_uses_after_use)
target registers live over the merged range. */
int btr;
HARD_REG_SET combined_btrs_live;
- bitmap combined_live_range = BITMAP_XMALLOC ();
+ bitmap combined_live_range = BITMAP_ALLOC (NULL);
btr_user user;
if (other_def->live_range == NULL)
for (user = other_def->uses; user != NULL; user = user->next)
augment_live_range (combined_live_range, &combined_btrs_live,
- def->bb, user->bb);
+ def->bb, user->bb,
+ (flag_btr_bb_exclusive
+ || user->insn != BB_END (def->bb)
+ || !JUMP_P (user->insn)));
btr = choose_btr (combined_btrs_live);
if (btr != -1)
clear_btr_from_live_range (other_def);
other_def->uses = NULL;
bitmap_copy (def->live_range, combined_live_range);
- if (other_def->other_btr_uses_after_use)
+ if (other_def->btr == btr && other_def->other_btr_uses_after_use)
def->other_btr_uses_after_use = 1;
COPY_HARD_REG_SET (*btrs_live_in_range, combined_btrs_live);
delete_insn (other_def->insn);
}
- BITMAP_XFREE (combined_live_range);
+ BITMAP_FREE (combined_live_range);
}
}
}
def->bb = new_def_bb;
def->luid = 0;
def->cost = basic_block_freq (new_def_bb);
- def->other_btr_uses_before_def
- = TEST_HARD_REG_BIT (btrs_live[b->index], btr) ? 1 : 0;
bitmap_copy (def->live_range, live_range);
combine_btr_defs (def, btrs_live_in_range);
btr = def->btr;
- add_btr_to_live_range (def);
- if (GET_CODE (insp) == CODE_LABEL)
+ def->other_btr_uses_before_def
+ = TEST_HARD_REG_BIT (btrs_live[b->index], btr) ? 1 : 0;
+ add_btr_to_live_range (def, 1);
+ if (LABEL_P (insp))
insp = NEXT_INSN (insp);
/* N.B.: insp is expected to be NOTE_INSN_BASIC_BLOCK now. Some
optimizations can result in insp being both first and last insn of
{
insp = BB_END (b);
for (insp = BB_END (b); ! INSN_P (insp); insp = PREV_INSN (insp))
- if (insp == BB_HEAD (b))
- abort ();
- if (GET_CODE (insp) == JUMP_INSN || can_throw_internal (insp))
+ gcc_assert (insp != BB_HEAD (b));
+
+ if (JUMP_P (insp) || can_throw_internal (insp))
insp = PREV_INSN (insp);
}
/* Insert target register initialization at head of basic block. */
def->insn = emit_insn_after (new_insn, insp);
- regs_ever_live[btr] = 1;
+ df_set_regs_ever_live (btr, true);
if (dump_file)
fprintf (dump_file, "New pt is insn %d, inserted after insn %d\n",
replacement_rtx = btr_rtx;
else
replacement_rtx = gen_rtx_REG (GET_MODE (user->use), btr);
- replace_rtx (user->insn, user->use, replacement_rtx);
+ validate_replace_rtx (user->use, replacement_rtx, user->insn);
user->use = replacement_rtx;
}
}
/* We anticipate intra-block scheduling to be done. See if INSN could move
up within BB by N_INSNS. */
static int
-can_move_up (basic_block bb, rtx insn, int n_insns)
+can_move_up (const_basic_block bb, const_rtx insn, int n_insns)
{
while (insn != BB_HEAD (bb) && n_insns > 0)
{
HARD_REG_SET btrs_live_in_range;
int btr_used_near_def = 0;
int def_basic_block_freq;
- basic_block try;
+ basic_block attempt;
int give_up = 0;
int def_moved = 0;
btr_user user;
- int def_latency = 1;
+ int def_latency;
if (dump_file)
fprintf (dump_file,
}
btr_def_live_range (def, &btrs_live_in_range);
- live_range = BITMAP_XMALLOC ();
+ live_range = BITMAP_ALLOC (NULL);
bitmap_copy (live_range, def->live_range);
#ifdef INSN_SCHEDULING
- if (targetm.sched.use_dfa_pipeline_interface ())
- def_latency = insn_default_latency (def->insn);
- else
- def_latency = result_ready_cost (def->insn);
+ def_latency = insn_default_latency (def->insn) * issue_rate;
+#else
+ def_latency = issue_rate;
#endif
- def_latency *= issue_rate;
-
for (user = def->uses; user != NULL; user = user->next)
{
if (user->bb == def->bb
def_basic_block_freq = basic_block_freq (def->bb);
- for (try = get_immediate_dominator (CDI_DOMINATORS, def->bb);
- !give_up && try && try != ENTRY_BLOCK_PTR && def->cost >= min_cost;
- try = get_immediate_dominator (CDI_DOMINATORS, try))
+ for (attempt = get_immediate_dominator (CDI_DOMINATORS, def->bb);
+ !give_up && attempt && attempt != ENTRY_BLOCK_PTR && def->cost >= min_cost;
+ attempt = get_immediate_dominator (CDI_DOMINATORS, attempt))
{
/* Try to move the instruction that sets the target register into
- basic block TRY. */
- int try_freq = basic_block_freq (try);
+ basic block ATTEMPT. */
+ int try_freq = basic_block_freq (attempt);
+ edge_iterator ei;
+ edge e;
+
+ /* If ATTEMPT has abnormal edges, skip it. */
+ FOR_EACH_EDGE (e, ei, attempt->succs)
+ if (e->flags & EDGE_COMPLEX)
+ break;
+ if (e)
+ continue;
if (dump_file)
- fprintf (dump_file, "trying block %d ...", try->index);
+ fprintf (dump_file, "trying block %d ...", attempt->index);
if (try_freq < def_basic_block_freq
|| (try_freq == def_basic_block_freq && btr_used_near_def))
{
int btr;
- augment_live_range (live_range, &btrs_live_in_range, def->bb, try);
+ augment_live_range (live_range, &btrs_live_in_range, def->bb, attempt,
+ flag_btr_bb_exclusive);
if (dump_file)
{
fprintf (dump_file, "Now btrs live in range are: ");
btr = choose_btr (btrs_live_in_range);
if (btr != -1)
{
- move_btr_def (try, btr, def, live_range, &btrs_live_in_range);
+ move_btr_def (attempt, btr, def, live_range, &btrs_live_in_range);
bitmap_copy(live_range, def->live_range);
btr_used_near_def = 0;
def_moved = 1;
if (dump_file)
fprintf (dump_file, "failed to move\n");
}
- BITMAP_XFREE (live_range);
+ BITMAP_FREE (live_range);
return !give_up;
}
{
int i;
- for (i = 0; i < n_basic_blocks; i++)
+ for (i = NUM_FIXED_BLOCKS; i < last_basic_block; i++)
{
basic_block bb = BASIC_BLOCK (i);
fprintf(dump_file,
CLEAR_HARD_REG_SET (all_btrs);
for (first_btr = -1, reg = 0; reg < FIRST_PSEUDO_REGISTER; reg++)
if (TEST_HARD_REG_BIT (reg_class_contents[(int) btr_class], reg)
- && (allow_callee_save || call_used_regs[reg] || regs_ever_live[reg]))
+ && (allow_callee_save || call_used_regs[reg]
+ || df_regs_ever_live_p (reg)))
{
SET_HARD_REG_BIT (all_btrs, reg);
last_btr = reg;
first_btr = reg;
}
- btrs_live = xcalloc (n_basic_blocks, sizeof (HARD_REG_SET));
- btrs_live_at_end = xcalloc (n_basic_blocks, sizeof (HARD_REG_SET));
+ btrs_live = XCNEWVEC (HARD_REG_SET, last_basic_block);
+ btrs_live_at_end = XCNEWVEC (HARD_REG_SET, last_basic_block);
build_btr_def_use_webs (all_btr_defs);
while (!fibheap_empty (all_btr_defs))
{
- btr_def def = fibheap_extract_min (all_btr_defs);
+ btr_def def = (btr_def) fibheap_extract_min (all_btr_defs);
int min_cost = -fibheap_min_key (all_btr_defs);
if (migrate_btr_def (def, min_cost))
{
}
}
else
- {
- if (def->live_range)
- BITMAP_XFREE (def->live_range);
- }
+ BITMAP_FREE (def->live_range);
}
free (btrs_live);
fibheap_delete (all_btr_defs);
}
-void
+static void
branch_target_load_optimize (bool after_prologue_epilogue_gen)
{
- enum reg_class class = targetm.branch_target_register_class ();
- if (class != NO_REGS)
+ enum reg_class klass = targetm.branch_target_register_class ();
+ if (klass != NO_REGS)
{
/* Initialize issue_rate. */
if (targetm.sched.issue_rate)
else
issue_rate = 1;
- /* Build the CFG for migrate_btr_defs. */
+ if (!after_prologue_epilogue_gen)
+ {
+ /* Build the CFG for migrate_btr_defs. */
#if 1
- /* This may or may not be needed, depending on where we
- run this phase. */
- cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
+ /* This may or may not be needed, depending on where we
+ run this phase. */
+ cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
#endif
+ }
+ df_analyze ();
- life_analysis (NULL, 0);
/* Dominator info is also needed for migrate_btr_def. */
calculate_dominance_info (CDI_DOMINATORS);
- migrate_btr_defs (class,
+ migrate_btr_defs (klass,
(targetm.branch_target_register_callee_saved
(after_prologue_epilogue_gen)));
free_dominance_info (CDI_DOMINATORS);
+ }
+}
+\f
+static bool
+gate_handle_branch_target_load_optimize1 (void)
+{
+ return flag_branch_target_load_optimize;
+}
+
+
+static unsigned int
+rest_of_handle_branch_target_load_optimize1 (void)
+{
+ branch_target_load_optimize (epilogue_completed);
+ return 0;
+}
- update_life_info (NULL, UPDATE_LIFE_GLOBAL_RM_NOTES,
- PROP_DEATH_NOTES | PROP_REG_INFO);
+struct rtl_opt_pass pass_branch_target_load_optimize1 =
+{
+ {
+ RTL_PASS,
+ "btl1", /* name */
+ gate_handle_branch_target_load_optimize1, /* gate */
+ rest_of_handle_branch_target_load_optimize1, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_NONE, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func |
+ TODO_verify_rtl_sharing |
+ TODO_ggc_collect, /* todo_flags_finish */
+ }
+};
+
+static bool
+gate_handle_branch_target_load_optimize2 (void)
+{
+ return (optimize > 0 && flag_branch_target_load_optimize2);
+}
+
+
+static unsigned int
+rest_of_handle_branch_target_load_optimize2 (void)
+{
+ static int warned = 0;
+
+ /* Leave this a warning for now so that it is possible to experiment
+ with running this pass twice. In 3.6, we should either make this
+ an error, or use separate dump files. */
+ if (flag_branch_target_load_optimize
+ && flag_branch_target_load_optimize2
+ && !warned)
+ {
+ warning (0, "branch target register load optimization is not intended "
+ "to be run twice");
+
+ warned = 1;
}
+
+ branch_target_load_optimize (epilogue_completed);
+ return 0;
}
+
+struct rtl_opt_pass pass_branch_target_load_optimize2 =
+{
+ {
+ RTL_PASS,
+ "btl2", /* name */
+ gate_handle_branch_target_load_optimize2, /* gate */
+ rest_of_handle_branch_target_load_optimize2, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_NONE, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func |
+ TODO_ggc_collect, /* todo_flags_finish */
+ }
+};