/* Liveness for SSA trees.
- Copyright (C) 2003, 2004, 2005, 2007, 2008, 2009, 2010
- Free Software Foundation, Inc.
+ Copyright (C) 2003-2013 Free Software Foundation, Inc.
Contributed by Andrew MacLeod <amacleod@redhat.com>
This file is part of GCC.
#include "config.h"
#include "system.h"
#include "coretypes.h"
+#include "hash-table.h"
#include "tm.h"
#include "tree.h"
-#include "tree-pretty-print.h"
#include "gimple-pretty-print.h"
#include "bitmap.h"
-#include "tree-flow.h"
-#include "tree-dump.h"
+#include "sbitmap.h"
+#include "basic-block.h"
+#include "tree-ssa-alias.h"
+#include "internal-fn.h"
+#include "gimple-expr.h"
+#include "is-a.h"
+#include "gimple.h"
+#include "gimple-iterator.h"
+#include "gimple-ssa.h"
+#include "tree-phinodes.h"
+#include "ssa-iterators.h"
+#include "stringpool.h"
+#include "tree-ssanames.h"
+#include "expr.h"
+#include "tree-dfa.h"
+#include "timevar.h"
+#include "dumpfile.h"
#include "tree-ssa-live.h"
#include "diagnostic-core.h"
-#include "toplev.h"
#include "debug.h"
#include "flags.h"
-#include "gimple.h"
#ifdef ENABLE_CHECKING
static void verify_live_on_entry (tree_live_info_p);
ssa_name or variable, and vice versa. */
+/* Hashtable helpers. */
+
+struct tree_int_map_hasher : typed_noop_remove <tree_int_map>
+{
+ typedef tree_int_map value_type;
+ typedef tree_int_map compare_type;
+ static inline hashval_t hash (const value_type *);
+ static inline bool equal (const value_type *, const compare_type *);
+};
+
+inline hashval_t
+tree_int_map_hasher::hash (const value_type *v)
+{
+ return tree_map_base_hash (v);
+}
+
+inline bool
+tree_int_map_hasher::equal (const value_type *v, const compare_type *c)
+{
+ return tree_int_map_eq (v, c);
+}
+
+
/* This routine will initialize the basevar fields of MAP. */
static void
var_map_base_init (var_map map)
{
- int x, num_part, num;
+ int x, num_part;
tree var;
- var_ann_t ann;
+ hash_table <tree_int_map_hasher> tree_to_index;
+ struct tree_int_map *m, *mapstorage;
- num = 0;
num_part = num_var_partitions (map);
+ tree_to_index.create (num_part);
+ /* We can have at most num_part entries in the hash tables, so it's
+ enough to allocate so many map elements once, saving some malloc
+ calls. */
+ mapstorage = m = XNEWVEC (struct tree_int_map, num_part);
/* If a base table already exists, clear it, otherwise create it. */
- if (map->partition_to_base_index != NULL)
- {
- free (map->partition_to_base_index);
- VEC_truncate (tree, map->basevars, 0);
- }
- else
- map->basevars = VEC_alloc (tree, heap, MAX (40, (num_part / 10)));
-
+ free (map->partition_to_base_index);
map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part);
/* Build the base variable list, and point partitions at their bases. */
for (x = 0; x < num_part; x++)
{
+ struct tree_int_map **slot;
+ unsigned baseindex;
var = partition_to_var (map, x);
- if (TREE_CODE (var) == SSA_NAME)
- var = SSA_NAME_VAR (var);
- ann = var_ann (var);
+ if (SSA_NAME_VAR (var)
+ && (!VAR_P (SSA_NAME_VAR (var))
+ || !DECL_IGNORED_P (SSA_NAME_VAR (var))))
+ m->base.from = SSA_NAME_VAR (var);
+ else
+ /* This restricts what anonymous SSA names we can coalesce
+ as it restricts the sets we compute conflicts for.
+ Using TREE_TYPE to generate sets is the easies as
+ type equivalency also holds for SSA names with the same
+ underlying decl.
+
+ Check gimple_can_coalesce_p when changing this code. */
+ m->base.from = (TYPE_CANONICAL (TREE_TYPE (var))
+ ? TYPE_CANONICAL (TREE_TYPE (var))
+ : TREE_TYPE (var));
/* If base variable hasn't been seen, set it up. */
- if (!ann->base_var_processed)
- {
- ann->base_var_processed = 1;
- VAR_ANN_BASE_INDEX (ann) = num++;
- VEC_safe_push (tree, heap, map->basevars, var);
+ slot = tree_to_index.find_slot (m, INSERT);
+ if (!*slot)
+ {
+ baseindex = m - mapstorage;
+ m->to = baseindex;
+ *slot = m;
+ m++;
}
- map->partition_to_base_index[x] = VAR_ANN_BASE_INDEX (ann);
+ else
+ baseindex = (*slot)->to;
+ map->partition_to_base_index[x] = baseindex;
}
- map->num_basevars = num;
-
- /* Now clear the processed bit. */
- for (x = 0; x < num; x++)
- {
- var = VEC_index (tree, map->basevars, x);
- var_ann (var)->base_var_processed = 0;
- }
+ map->num_basevars = m - mapstorage;
-#ifdef ENABLE_CHECKING
- for (x = 0; x < num_part; x++)
- {
- tree var2;
- var = SSA_NAME_VAR (partition_to_var (map, x));
- var2 = VEC_index (tree, map->basevars, basevar_index (map, x));
- gcc_assert (var == var2);
- }
-#endif
+ free (mapstorage);
+ tree_to_index. dispose ();
}
/* Free the basevar info if it is present. */
if (map->partition_to_base_index != NULL)
{
- VEC_free (tree, heap, map->basevars);
free (map->partition_to_base_index);
map->partition_to_base_index = NULL;
map->num_basevars = 0;
map->partition_size = size;
map->num_basevars = 0;
map->partition_to_base_index = NULL;
- map->basevars = NULL;
return map;
}
{
var_map_base_fini (map);
partition_delete (map->var_partition);
- if (map->partition_to_view)
- free (map->partition_to_view);
- if (map->view_to_partition)
- free (map->view_to_partition);
+ free (map->partition_to_view);
+ free (map->view_to_partition);
free (map);
}
for (x = 0; x < map->partition_size; x++)
{
tmp = partition_find (map->var_partition, x);
- if (ssa_name (tmp) != NULL_TREE && is_gimple_reg (ssa_name (tmp))
+ if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
&& (!has_zero_uses (ssa_name (tmp))
|| !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))))
bitmap_set_bit (used, tmp);
/* Create a partition view which includes all the used partitions in MAP. If
WANT_BASES is true, create the base variable map as well. */
-extern void
+void
partition_view_normal (var_map map, bool want_bases)
{
bitmap used;
the bitmap ONLY. If WANT_BASES is true, create the base variable map
as well. */
-extern void
+void
partition_view_bitmap (var_map map, bitmap only, bool want_bases)
{
bitmap used;
}
partition_view_fini (map, new_partitions);
- BITMAP_FREE (used);
if (want_bases)
var_map_base_init (map);
else
}
-static inline void mark_all_vars_used (tree *, void *data);
+static bitmap usedvars;
+
+/* Mark VAR as used, so that it'll be preserved during rtl expansion.
+ Returns true if VAR wasn't marked before. */
+
+static inline bool
+set_is_used (tree var)
+{
+ return bitmap_set_bit (usedvars, DECL_UID (var));
+}
+
+/* Return true if VAR is marked as used. */
+
+static inline bool
+is_used_p (tree var)
+{
+ return bitmap_bit_p (usedvars, DECL_UID (var));
+}
+
+static inline void mark_all_vars_used (tree *);
/* Helper function for mark_all_vars_used, called via walk_tree. */
static tree
-mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data)
+mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
{
tree t = *tp;
enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
tree b;
if (TREE_CODE (t) == SSA_NAME)
- t = SSA_NAME_VAR (t);
+ {
+ *walk_subtrees = 0;
+ t = SSA_NAME_VAR (t);
+ if (!t)
+ return NULL;
+ }
if (IS_EXPR_CODE_CLASS (c)
&& (b = TREE_BLOCK (t)) != NULL)
fields do not contain vars. */
if (TREE_CODE (t) == TARGET_MEM_REF)
{
- mark_all_vars_used (&TMR_BASE (t), data);
- mark_all_vars_used (&TMR_INDEX (t), data);
- mark_all_vars_used (&TMR_INDEX2 (t), data);
+ mark_all_vars_used (&TMR_BASE (t));
+ mark_all_vars_used (&TMR_INDEX (t));
+ mark_all_vars_used (&TMR_INDEX2 (t));
*walk_subtrees = 0;
return NULL;
}
eliminated as unused. */
if (TREE_CODE (t) == VAR_DECL)
{
- if (data != NULL && bitmap_clear_bit ((bitmap) data, DECL_UID (t)))
- mark_all_vars_used (&DECL_INITIAL (t), data);
- set_is_used (t);
+ /* When a global var becomes used for the first time also walk its
+ initializer (non global ones don't have any). */
+ if (set_is_used (t) && is_global_var (t))
+ mark_all_vars_used (&DECL_INITIAL (t));
}
/* remove_unused_scope_block_p requires information about labels
which are not DECL_IGNORED_P to tell if they might be used in the IL. */
- if (TREE_CODE (t) == LABEL_DECL)
+ else if (TREE_CODE (t) == LABEL_DECL)
/* Although the TREE_USED values that the frontend uses would be
acceptable (albeit slightly over-conservative) for our purposes,
init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
{
tree *t, *next;
bool unused = !TREE_USED (scope);
- var_ann_t ann;
int nsubblocks = 0;
for (t = &BLOCK_VARS (scope); *t; t = next)
info about optimized-out variables in the scope blocks.
Exception are the scope blocks not containing any instructions
at all so user can't get into the scopes at first place. */
- else if ((ann = var_ann (*t)) != NULL
- && ann->used)
+ else if (is_used_p (*t))
unused = false;
else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
/* For labels that are still used in the IL, the decision to
can be considered dead. We only want to keep around blocks user can
breakpoint into and ask about value of optimized out variables.
- Similarly we need to keep around types at least until all variables of
- all nested blocks are gone. We track no information on whether given
- type is used or not. */
+ Similarly we need to keep around types at least until all
+ variables of all nested blocks are gone. We track no
+ information on whether given type is used or not, so we have
+ to keep them even when not emitting debug information,
+ otherwise we may end up remapping variables and their (local)
+ types in different orders depending on whether debug
+ information is being generated. */
- else if (debug_info_level == DINFO_LEVEL_NORMAL
+ else if (TREE_CODE (*t) == TYPE_DECL
+ || debug_info_level == DINFO_LEVEL_NORMAL
|| debug_info_level == DINFO_LEVEL_VERBOSE)
;
else
eliminated. */
else if (!nsubblocks)
;
- /* For terse debug info we can eliminate info on unused variables. */
- else if (debug_info_level == DINFO_LEVEL_NONE
- || debug_info_level == DINFO_LEVEL_TERSE)
+ /* When not generating debug info we can eliminate info on unused
+ variables. */
+ else if (debug_info_level == DINFO_LEVEL_NONE)
{
- /* Even for -g0/-g1 don't prune outer scopes from artificial
+ /* Even for -g0 don't prune outer scopes from artificial
functions, otherwise diagnostics using tree_nonartificial_location
will not be emitted properly. */
if (inlined_function_outer_scope_p (scope))
else
/* Verfify that only blocks with source location set
are entry points to the inlined functions. */
- gcc_assert (BLOCK_SOURCE_LOCATION (scope) == UNKNOWN_LOCATION);
+ gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
+ == UNKNOWN_LOCATION);
TREE_USED (scope) = !unused;
return unused;
eliminated during the tree->rtl conversion process. */
static inline void
-mark_all_vars_used (tree *expr_p, void *data)
+mark_all_vars_used (tree *expr_p)
{
- walk_tree (expr_p, mark_all_vars_used_1, data, NULL);
+ walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
+}
+
+/* Helper function for clear_unused_block_pointer, called via walk_tree. */
+
+static tree
+clear_unused_block_pointer_1 (tree *tp, int *, void *)
+{
+ if (EXPR_P (*tp) && TREE_BLOCK (*tp)
+ && !TREE_USED (TREE_BLOCK (*tp)))
+ TREE_SET_BLOCK (*tp, NULL);
+ return NULL_TREE;
}
+/* Set all block pointer in debug or clobber stmt to NULL if the block
+ is unused, so that they will not be streamed out. */
+
+static void
+clear_unused_block_pointer (void)
+{
+ basic_block bb;
+ gimple_stmt_iterator gsi;
+
+ FOR_EACH_BB (bb)
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ unsigned i;
+ tree b;
+ gimple stmt = gsi_stmt (gsi);
+
+ if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
+ continue;
+ b = gimple_block (stmt);
+ if (b && !TREE_USED (b))
+ gimple_set_block (stmt, NULL);
+ for (i = 0; i < gimple_num_ops (stmt); i++)
+ walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
+ NULL, NULL);
+ }
+}
/* Dump scope blocks starting at SCOPE to FILE. INDENT is the
indentation level and FLAGS is as in print_generic_expr. */
fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
TREE_USED (scope) ? "" : " (unused)",
BLOCK_ABSTRACT (scope) ? " (abstract)": "");
- if (BLOCK_SOURCE_LOCATION (scope) != UNKNOWN_LOCATION)
+ if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
{
expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
fprintf (file, " %s:%i", s.file, s.line);
fprintf (file, " \n");
for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
{
- bool used = false;
- var_ann_t ann;
-
- if ((ann = var_ann (var))
- && ann->used)
- used = true;
-
- fprintf (file, "%*s",indent, "");
+ fprintf (file, "%*s", indent, "");
print_generic_decl (file, var, flags);
- fprintf (file, "%s\n", used ? "" : " (unused)");
+ fprintf (file, "\n");
}
for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
{
remove_unused_locals (void)
{
basic_block bb;
- tree var, t;
- referenced_var_iterator rvi;
- var_ann_t ann;
- bitmap global_unused_vars = NULL;
+ tree var;
unsigned srcidx, dstidx, num;
+ bool have_local_clobbers = false;
/* Removing declarations from lexical blocks when not optimizing is
not only a waste of time, it actually causes differences in stack
if (!optimize)
return;
+ timevar_push (TV_REMOVE_UNUSED);
+
mark_scope_block_unused (DECL_INITIAL (current_function_decl));
- /* Assume all locals are unused. */
- FOR_EACH_REFERENCED_VAR (t, rvi)
- var_ann (t)->used = false;
+ usedvars = BITMAP_ALLOC (NULL);
/* Walk the CFG marking all referenced symbols. */
FOR_EACH_BB (bb)
if (is_gimple_debug (stmt))
continue;
+ if (gimple_clobber_p (stmt))
+ {
+ have_local_clobbers = true;
+ continue;
+ }
+
if (b)
TREE_USED (b) = true;
for (i = 0; i < gimple_num_ops (stmt); i++)
- mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i), NULL);
+ mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
}
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
tree def;
gimple phi = gsi_stmt (gsi);
- /* No point processing globals. */
- if (is_global_var (SSA_NAME_VAR (gimple_phi_result (phi))))
+ if (virtual_operand_p (gimple_phi_result (phi)))
continue;
def = gimple_phi_result (phi);
- mark_all_vars_used (&def, NULL);
+ mark_all_vars_used (&def);
FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
{
tree arg = USE_FROM_PTR (arg_p);
- mark_all_vars_used (&arg, NULL);
+ int index = PHI_ARG_INDEX_FROM_USE (arg_p);
+ tree block =
+ LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
+ if (block != NULL)
+ TREE_USED (block) = true;
+ mark_all_vars_used (&arg);
}
}
FOR_EACH_EDGE (e, ei, bb->succs)
- if (e->goto_locus)
- TREE_USED (e->goto_block) = true;
+ if (LOCATION_BLOCK (e->goto_locus) != NULL)
+ TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
}
+ /* We do a two-pass approach about the out-of-scope clobbers. We want
+ to remove them if they are the only references to a local variable,
+ but we want to retain them when there's any other. So the first pass
+ ignores them, and the second pass (if there were any) tries to remove
+ them. */
+ if (have_local_clobbers)
+ FOR_EACH_BB (bb)
+ {
+ gimple_stmt_iterator gsi;
+
+ for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
+ {
+ gimple stmt = gsi_stmt (gsi);
+ tree b = gimple_block (stmt);
+
+ if (gimple_clobber_p (stmt))
+ {
+ tree lhs = gimple_assign_lhs (stmt);
+ tree base = get_base_address (lhs);
+ /* Remove clobbers referencing unused vars, or clobbers
+ with MEM_REF lhs referencing uninitialized pointers. */
+ if ((TREE_CODE (base) == VAR_DECL && !is_used_p (base))
+ || (TREE_CODE (lhs) == MEM_REF
+ && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
+ && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))
+ && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0)))
+ != PARM_DECL)))
+ {
+ unlink_stmt_vdef (stmt);
+ gsi_remove (&gsi, true);
+ release_defs (stmt);
+ continue;
+ }
+ if (b)
+ TREE_USED (b) = true;
+ }
+ gsi_next (&gsi);
+ }
+ }
+
cfun->has_local_explicit_reg_vars = false;
- /* Remove unmarked local vars from local_decls. */
- num = VEC_length (tree, cfun->local_decls);
+ /* Remove unmarked local and global vars from local_decls. */
+ num = vec_safe_length (cfun->local_decls);
for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
{
- var = VEC_index (tree, cfun->local_decls, srcidx);
- if (TREE_CODE (var) != FUNCTION_DECL
- && (!(ann = var_ann (var))
- || !ann->used))
+ var = (*cfun->local_decls)[srcidx];
+ if (TREE_CODE (var) == VAR_DECL)
{
- if (is_global_var (var))
+ if (!is_used_p (var))
{
- if (global_unused_vars == NULL)
- global_unused_vars = BITMAP_ALLOC (NULL);
- bitmap_set_bit (global_unused_vars, DECL_UID (var));
+ tree def;
+ if (cfun->nonlocal_goto_save_area
+ && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
+ cfun->nonlocal_goto_save_area = NULL;
+ /* Release any default def associated with var. */
+ if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
+ {
+ set_ssa_default_def (cfun, var, NULL_TREE);
+ release_ssa_name (def);
+ }
+ continue;
}
- else
- continue;
}
- else if (TREE_CODE (var) == VAR_DECL
- && DECL_HARD_REGISTER (var)
- && !is_global_var (var))
+ if (TREE_CODE (var) == VAR_DECL
+ && DECL_HARD_REGISTER (var)
+ && !is_global_var (var))
cfun->has_local_explicit_reg_vars = true;
if (srcidx != dstidx)
- VEC_replace (tree, cfun->local_decls, dstidx, var);
+ (*cfun->local_decls)[dstidx] = var;
dstidx++;
}
if (dstidx != num)
- VEC_truncate (tree, cfun->local_decls, dstidx);
-
- /* Remove unmarked global vars from local_decls. */
- if (global_unused_vars != NULL)
{
- tree var;
- unsigned ix;
- FOR_EACH_LOCAL_DECL (cfun, ix, var)
- if (TREE_CODE (var) == VAR_DECL
- && is_global_var (var)
- && (ann = var_ann (var)) != NULL
- && ann->used)
- mark_all_vars_used (&DECL_INITIAL (var), global_unused_vars);
-
- num = VEC_length (tree, cfun->local_decls);
- for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
- {
- var = VEC_index (tree, cfun->local_decls, srcidx);
- if (TREE_CODE (var) == VAR_DECL
- && is_global_var (var)
- && bitmap_bit_p (global_unused_vars, DECL_UID (var)))
- continue;
-
- if (srcidx != dstidx)
- VEC_replace (tree, cfun->local_decls, dstidx, var);
- dstidx++;
- }
- if (dstidx != num)
- VEC_truncate (tree, cfun->local_decls, dstidx);
- BITMAP_FREE (global_unused_vars);
+ statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
+ cfun->local_decls->truncate (dstidx);
}
- /* Remove unused variables from REFERENCED_VARs. */
- FOR_EACH_REFERENCED_VAR (t, rvi)
- if (!is_global_var (t)
- && TREE_CODE (t) != PARM_DECL
- && TREE_CODE (t) != RESULT_DECL
- && !(ann = var_ann (t))->used
- && !ann->is_heapvar)
- remove_referenced_var (t);
remove_unused_scope_block_p (DECL_INITIAL (current_function_decl));
+ clear_unused_block_pointer ();
+
+ BITMAP_FREE (usedvars);
+
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Scope blocks after cleanups:\n");
dump_scope_blocks (dump_file, dump_flags);
}
+
+ timevar_pop (TV_REMOVE_UNUSED);
}
+/* Obstack for globale liveness info bitmaps. We don't want to put these
+ on the default obstack because these bitmaps can grow quite large and
+ we'll hold on to all that memory until the end of the compiler run.
+ As a bonus, delete_tree_live_info can destroy all the bitmaps by just
+ releasing the whole obstack. */
+static bitmap_obstack liveness_bitmap_obstack;
/* Allocate and return a new live range information object base on MAP. */
new_tree_live_info (var_map map)
{
tree_live_info_p live;
- unsigned x;
+ basic_block bb;
- live = (tree_live_info_p) xmalloc (sizeof (struct tree_live_info_d));
+ live = XNEW (struct tree_live_info_d);
live->map = map;
live->num_blocks = last_basic_block;
- live->livein = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap));
- for (x = 0; x < (unsigned)last_basic_block; x++)
- live->livein[x] = BITMAP_ALLOC (NULL);
+ live->livein = XNEWVEC (bitmap_head, last_basic_block);
+ FOR_EACH_BB (bb)
+ bitmap_initialize (&live->livein[bb->index], &liveness_bitmap_obstack);
- live->liveout = (bitmap *)xmalloc (last_basic_block * sizeof (bitmap));
- for (x = 0; x < (unsigned)last_basic_block; x++)
- live->liveout[x] = BITMAP_ALLOC (NULL);
+ live->liveout = XNEWVEC (bitmap_head, last_basic_block);
+ FOR_EACH_BB (bb)
+ bitmap_initialize (&live->liveout[bb->index], &liveness_bitmap_obstack);
live->work_stack = XNEWVEC (int, last_basic_block);
live->stack_top = live->work_stack;
- live->global = BITMAP_ALLOC (NULL);
+ live->global = BITMAP_ALLOC (&liveness_bitmap_obstack);
return live;
}
void
delete_tree_live_info (tree_live_info_p live)
{
- int x;
-
- BITMAP_FREE (live->global);
+ bitmap_obstack_release (&liveness_bitmap_obstack);
free (live->work_stack);
-
- for (x = live->num_blocks - 1; x >= 0; x--)
- BITMAP_FREE (live->liveout[x]);
free (live->liveout);
-
- for (x = live->num_blocks - 1; x >= 0; x--)
- BITMAP_FREE (live->livein[x]);
free (live->livein);
-
free (live);
}
edge_iterator ei;
basic_block pred_bb;
bitmap loe;
- gcc_assert (!TEST_BIT (visited, bb->index));
- SET_BIT (visited, bb->index);
+ gcc_checking_assert (!bitmap_bit_p (visited, bb->index));
+ bitmap_set_bit (visited, bb->index);
+
loe = live_on_entry (live, bb);
FOR_EACH_EDGE (e, ei, bb->preds)
{
pred_bb = e->src;
- if (pred_bb == ENTRY_BLOCK_PTR)
+ if (pred_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
continue;
/* TMP is variables live-on-entry from BB that aren't defined in the
predecessor block. This should be the live on entry vars to pred.
Note that liveout is the DEFs in a block while live on entry is
being calculated. */
- bitmap_and_compl (tmp, loe, live->liveout[pred_bb->index]);
+ bitmap_and_compl (tmp, loe, &live->liveout[pred_bb->index]);
/* Add these bits to live-on-entry for the pred. if there are any
changes, and pred_bb has been visited already, add it to the
revisit stack. */
change = bitmap_ior_into (live_on_entry (live, pred_bb), tmp);
- if (TEST_BIT (visited, pred_bb->index) && change)
+ if (bitmap_bit_p (visited, pred_bb->index) && change)
{
- RESET_BIT (visited, pred_bb->index);
+ bitmap_clear_bit (visited, pred_bb->index);
*(live->stack_top)++ = pred_bb->index;
}
}
unsigned b;
basic_block bb;
sbitmap visited = sbitmap_alloc (last_basic_block + 1);
- bitmap tmp = BITMAP_ALLOC (NULL);
+ bitmap tmp = BITMAP_ALLOC (&liveness_bitmap_obstack);
- sbitmap_zero (visited);
+ bitmap_clear (visited);
/* Visit all the blocks in reverse order and propagate live on entry values
into the predecessors blocks. */
def_bb = gimple_bb (stmt);
/* Mark defs in liveout bitmap temporarily. */
if (def_bb)
- bitmap_set_bit (live->liveout[def_bb->index], p);
+ bitmap_set_bit (&live->liveout[def_bb->index], p);
}
else
- def_bb = ENTRY_BLOCK_PTR;
+ def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
/* Visit each use of SSA_NAME and if it isn't in the same block as the def,
add it to the list of live on entry blocks. */
defined in that block, or whether its live on entry. */
int index = PHI_ARG_INDEX_FROM_USE (use);
edge e = gimple_phi_arg_edge (use_stmt, index);
- if (e->src != ENTRY_BLOCK_PTR)
+ if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
{
if (e->src != def_bb)
add_block = e->src;
if (add_block)
{
global = true;
- bitmap_set_bit (live->livein[add_block->index], p);
+ bitmap_set_bit (&live->livein[add_block->index], p);
}
}
/* live on entry calculations used liveout vectors for defs, clear them. */
FOR_EACH_BB (bb)
- bitmap_clear (liveinfo->liveout[bb->index]);
+ bitmap_clear (&liveinfo->liveout[bb->index]);
/* Set all the live-on-exit bits for uses in PHIs. */
FOR_EACH_BB (bb)
if (p == NO_PARTITION)
continue;
e = gimple_phi_arg_edge (phi, i);
- if (e->src != ENTRY_BLOCK_PTR)
- bitmap_set_bit (liveinfo->liveout[e->src->index], p);
+ if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
+ bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
}
}
/* Add each successors live on entry to this bock live on exit. */
FOR_EACH_EDGE (e, ei, bb->succs)
- if (e->dest != EXIT_BLOCK_PTR)
- bitmap_ior_into (liveinfo->liveout[bb->index],
+ if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
+ bitmap_ior_into (&liveinfo->liveout[bb->index],
live_on_entry (liveinfo, e->dest));
}
}
unsigned i;
tree_live_info_p live;
+ bitmap_obstack_initialize (&liveness_bitmap_obstack);
live = new_tree_live_info (map);
for (i = 0; i < num_var_partitions (map); i++)
{
else
p = x;
- if (ssa_name (p) == NULL_TREE)
+ if (ssa_name (p) == NULL_TREE
+ || virtual_operand_p (ssa_name (p)))
continue;
t = 0;
{
if (t++ == 0)
{
- fprintf(f, "Partition %d (", x);
+ fprintf (f, "Partition %d (", x);
print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
fprintf (f, " - ");
}
}
+/* Generic dump for the above. */
+
+DEBUG_FUNCTION void
+debug (_var_map &ref)
+{
+ dump_var_map (stderr, &ref);
+}
+
+DEBUG_FUNCTION void
+debug (_var_map *ptr)
+{
+ if (ptr)
+ debug (*ptr);
+ else
+ fprintf (stderr, "<nil>\n");
+}
+
+
/* Output live range info LIVE to file F, controlled by FLAG. */
void
FOR_EACH_BB (bb)
{
fprintf (f, "\nLive on entry to BB%d : ", bb->index);
- EXECUTE_IF_SET_IN_BITMAP (live->livein[bb->index], 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
{
print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
fprintf (f, " ");
FOR_EACH_BB (bb)
{
fprintf (f, "\nLive on exit from BB%d : ", bb->index);
- EXECUTE_IF_SET_IN_BITMAP (live->liveout[bb->index], 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
{
print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
fprintf (f, " ");
}
}
-struct GTY(()) numbered_tree_d
-{
- tree t;
- int num;
-};
-typedef struct numbered_tree_d numbered_tree;
-
-DEF_VEC_O (numbered_tree);
-DEF_VEC_ALLOC_O (numbered_tree, heap);
-/* Compare two declarations references by their DECL_UID / sequence number.
- Called via qsort. */
+/* Generic dump for the above. */
-static int
-compare_decls_by_uid (const void *pa, const void *pb)
+DEBUG_FUNCTION void
+debug (tree_live_info_d &ref)
{
- const numbered_tree *nt_a = ((const numbered_tree *)pa);
- const numbered_tree *nt_b = ((const numbered_tree *)pb);
-
- if (DECL_UID (nt_a->t) != DECL_UID (nt_b->t))
- return DECL_UID (nt_a->t) - DECL_UID (nt_b->t);
- return nt_a->num - nt_b->num;
+ dump_live_info (stderr, &ref, 0);
}
-/* Called via walk_gimple_stmt / walk_gimple_op by dump_enumerated_decls. */
-static tree
-dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
+DEBUG_FUNCTION void
+debug (tree_live_info_d *ptr)
{
- struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
- VEC (numbered_tree, heap) **list = (VEC (numbered_tree, heap) **) &wi->info;
- numbered_tree nt;
-
- if (!DECL_P (*tp))
- return NULL_TREE;
- nt.t = *tp;
- nt.num = VEC_length (numbered_tree, *list);
- VEC_safe_push (numbered_tree, heap, *list, &nt);
- *walk_subtrees = 0;
- return NULL_TREE;
+ if (ptr)
+ debug (*ptr);
+ else
+ fprintf (stderr, "<nil>\n");
}
-/* Find all the declarations used by the current function, sort them by uid,
- and emit the sorted list. Each declaration is tagged with a sequence
- number indicating when it was found during statement / tree walking,
- so that TDF_NOUID comparisons of anonymous declarations are still
- meaningful. Where a declaration was encountered more than once, we
- emit only the sequence number of the first encounter.
- FILE is the dump file where to output the list and FLAGS is as in
- print_generic_expr. */
-void
-dump_enumerated_decls (FILE *file, int flags)
-{
- basic_block bb;
- struct walk_stmt_info wi;
- VEC (numbered_tree, heap) *decl_list = VEC_alloc (numbered_tree, heap, 40);
-
- memset (&wi, '\0', sizeof (wi));
- wi.info = (void*) decl_list;
- FOR_EACH_BB (bb)
- {
- gimple_stmt_iterator gsi;
-
- for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
- if (!is_gimple_debug (gsi_stmt (gsi)))
- walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
- }
- decl_list = (VEC (numbered_tree, heap) *) wi.info;
- VEC_qsort (numbered_tree, decl_list, compare_decls_by_uid);
- if (VEC_length (numbered_tree, decl_list))
- {
- unsigned ix;
- numbered_tree *ntp;
- tree last = NULL_TREE;
-
- fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n",
- current_function_name ());
- FOR_EACH_VEC_ELT (numbered_tree, decl_list, ix, ntp)
- {
- if (ntp->t == last)
- continue;
- fprintf (file, "%d: ", ntp->num);
- print_generic_decl (file, ntp->t, flags);
- fprintf (file, "\n");
- last = ntp->t;
- }
- }
- VEC_free (numbered_tree, heap, decl_list);
-}
#ifdef ENABLE_CHECKING
/* Verify that SSA_VAR is a non-virtual SSA_NAME. */
register_ssa_partition_check (tree ssa_var)
{
gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
- if (!is_gimple_reg (SSA_NAME_VAR (ssa_var)))
+ if (virtual_operand_p (ssa_var))
{
fprintf (stderr, "Illegally registering a virtual SSA name :");
print_generic_expr (stderr, ssa_var, TDF_SLIM);
/* Check for live on entry partitions and report those with a DEF in
the program. This will typically mean an optimization has done
something wrong. */
- bb = ENTRY_BLOCK_PTR;
+ bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
num = 0;
FOR_EACH_EDGE (e, ei, bb->succs)
{
int entry_block = e->dest->index;
- if (e->dest == EXIT_BLOCK_PTR)
+ if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
continue;
for (i = 0; i < (unsigned)num_var_partitions (map); i++)
{
basic_block tmp;
- tree d;
+ tree d = NULL_TREE;
bitmap loe;
var = partition_to_var (map, i);
stmt = SSA_NAME_DEF_STMT (var);
tmp = gimple_bb (stmt);
- d = gimple_default_def (cfun, SSA_NAME_VAR (var));
+ if (SSA_NAME_VAR (var))
+ d = ssa_default_def (cfun, SSA_NAME_VAR (var));
loe = live_on_entry (live, e->dest);
if (loe && bitmap_bit_p (loe, i))