+2008-01-18 Kenneth Zadeck <zadeck@naturalbridge.com>
+ Steven Bosscher <stevenb.gcc@gmail.com>
+
+ PR rtl-optimization/26854
+ PR rtl-optimization/34400
+ * df-problems.c (df_live_scratch): New scratch bitmap.
+ (df_live_alloc): Allocate df_live_scratch when doing df_live.
+ (df_live_reset): Clear the proper bitmaps.
+ (df_live_bb_local_compute): Only process the artificial defs once
+ since the order is not important.
+ (df_live_init): Init the df_live sets only with the variables
+ found live by df_lr.
+ (df_live_transfer_function): Use the df_lr sets to prune the
+ df_live sets as they are being computed.
+ (df_live_free): Free df_live_scratch.
+
2008-01-18 Ian Lance Taylor <iant@google.com>
* common.opt: Add fmerge-debug-strings.
bitmap *out;
};
+/* Scratch var used by transfer functions. This is used to implement
+ an optimization to reduce the amount of space used to compute the
+ combined lr and live analysis. */
+static bitmap df_live_scratch;
/* Set basic block info. */
if (!df_live->block_pool)
df_live->block_pool = create_alloc_pool ("df_live_block pool",
sizeof (struct df_live_bb_info), 100);
+ if (!df_live_scratch)
+ df_live_scratch = BITMAP_ALLOC (NULL);
df_grow_bb_info (df_live);
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
+ struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
gcc_assert (bb_info);
bitmap_clear (bb_info->in);
bitmap_clear (bb_info->out);
struct df_ref **def_rec;
int luid = 0;
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
- }
-
FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
struct df_ref *def = *def_rec;
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
+ bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
}
}
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
+ struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
- bitmap_copy (bb_info->out, bb_info->gen);
+ /* No register may reach a location where it is not used. Thus
+ we trim the rr result to the places where it is used. */
+ bitmap_and (bb_info->out, bb_info->gen, bb_lr_info->out);
bitmap_clear (bb_info->in);
}
}
df_live_transfer_function (int bb_index)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
+ struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
bitmap in = bb_info->in;
bitmap out = bb_info->out;
bitmap gen = bb_info->gen;
bitmap kill = bb_info->kill;
- return bitmap_ior_and_compl (out, gen, in, kill);
+ /* We need to use a scratch set here so that the value returned from
+ this function invocation properly reflects if the sets changed in
+ a significant way; i.e. not just because the lr set was anded
+ in. */
+ bitmap_and (df_live_scratch, gen, bb_lr_info->out);
+ /* No register may reach a location where it is not used. Thus
+ we trim the rr result to the places where it is used. */
+ bitmap_and_into (in, bb_lr_info->in);
+
+ return bitmap_ior_and_compl (out, df_live_scratch, in, kill);
}
free_alloc_pool (df_live->block_pool);
df_live->block_info_size = 0;
free (df_live->block_info);
+
+ if (df_live_scratch)
+ BITMAP_FREE (df_live_scratch);
}
BITMAP_FREE (df_live->out_of_date_transfer_functions);
free (df_live);