1 /* Liveness for SSA trees.
2 Copyright (C) 2003-2015 Free Software Foundation, Inc.
3 Contributed by Andrew MacLeod <amacleod@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "hash-table.h"
33 #include "fold-const.h"
34 #include "gimple-pretty-print.h"
38 #include "hard-reg-set.h"
40 #include "dominance.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-expr.h"
48 #include "gimple-iterator.h"
49 #include "gimple-ssa.h"
50 #include "tree-phinodes.h"
51 #include "ssa-iterators.h"
52 #include "stringpool.h"
53 #include "tree-ssanames.h"
57 #include "statistics.h"
58 #include "insn-config.h"
70 #include "tree-ssa-live.h"
71 #include "diagnostic-core.h"
74 #include "lto-streamer.h"
77 #include "ipa-utils.h"
79 #ifdef ENABLE_CHECKING
80 static void verify_live_on_entry (tree_live_info_p);
84 /* VARMAP maintains a mapping from SSA version number to real variables.
86 All SSA_NAMES are divided into partitions. Initially each ssa_name is the
87 only member of it's own partition. Coalescing will attempt to group any
88 ssa_names which occur in a copy or in a PHI node into the same partition.
90 At the end of out-of-ssa, each partition becomes a "real" variable and is
91 rewritten as a compiler variable.
93 The var_map data structure is used to manage these partitions. It allows
94 partitions to be combined, and determines which partition belongs to what
95 ssa_name or variable, and vice versa. */
98 /* Hashtable helpers. */
100 struct tree_int_map_hasher : typed_noop_remove <tree_int_map>
102 typedef tree_int_map *value_type;
103 typedef tree_int_map *compare_type;
104 static inline hashval_t hash (const tree_int_map *);
105 static inline bool equal (const tree_int_map *, const tree_int_map *);
109 tree_int_map_hasher::hash (const tree_int_map *v)
111 return tree_map_base_hash (v);
115 tree_int_map_hasher::equal (const tree_int_map *v, const tree_int_map *c)
117 return tree_int_map_eq (v, c);
121 /* This routine will initialize the basevar fields of MAP. */
124 var_map_base_init (var_map map)
128 struct tree_int_map *m, *mapstorage;
130 num_part = num_var_partitions (map);
131 hash_table<tree_int_map_hasher> tree_to_index (num_part);
132 /* We can have at most num_part entries in the hash tables, so it's
133 enough to allocate so many map elements once, saving some malloc
135 mapstorage = m = XNEWVEC (struct tree_int_map, num_part);
137 /* If a base table already exists, clear it, otherwise create it. */
138 free (map->partition_to_base_index);
139 map->partition_to_base_index = (int *) xmalloc (sizeof (int) * num_part);
141 /* Build the base variable list, and point partitions at their bases. */
142 for (x = 0; x < num_part; x++)
144 struct tree_int_map **slot;
146 var = partition_to_var (map, x);
147 if (SSA_NAME_VAR (var)
148 && (!VAR_P (SSA_NAME_VAR (var))
149 || !DECL_IGNORED_P (SSA_NAME_VAR (var))))
150 m->base.from = SSA_NAME_VAR (var);
152 /* This restricts what anonymous SSA names we can coalesce
153 as it restricts the sets we compute conflicts for.
154 Using TREE_TYPE to generate sets is the easies as
155 type equivalency also holds for SSA names with the same
158 Check gimple_can_coalesce_p when changing this code. */
159 m->base.from = (TYPE_CANONICAL (TREE_TYPE (var))
160 ? TYPE_CANONICAL (TREE_TYPE (var))
162 /* If base variable hasn't been seen, set it up. */
163 slot = tree_to_index.find_slot (m, INSERT);
166 baseindex = m - mapstorage;
172 baseindex = (*slot)->to;
173 map->partition_to_base_index[x] = baseindex;
176 map->num_basevars = m - mapstorage;
182 /* Remove the base table in MAP. */
185 var_map_base_fini (var_map map)
187 /* Free the basevar info if it is present. */
188 if (map->partition_to_base_index != NULL)
190 free (map->partition_to_base_index);
191 map->partition_to_base_index = NULL;
192 map->num_basevars = 0;
195 /* Create a variable partition map of SIZE, initialize and return it. */
198 init_var_map (int size)
202 map = (var_map) xmalloc (sizeof (struct _var_map));
203 map->var_partition = partition_new (size);
205 map->partition_to_view = NULL;
206 map->view_to_partition = NULL;
207 map->num_partitions = size;
208 map->partition_size = size;
209 map->num_basevars = 0;
210 map->partition_to_base_index = NULL;
215 /* Free memory associated with MAP. */
218 delete_var_map (var_map map)
220 var_map_base_fini (map);
221 partition_delete (map->var_partition);
222 free (map->partition_to_view);
223 free (map->view_to_partition);
228 /* This function will combine the partitions in MAP for VAR1 and VAR2. It
229 Returns the partition which represents the new partition. If the two
230 partitions cannot be combined, NO_PARTITION is returned. */
233 var_union (var_map map, tree var1, tree var2)
237 gcc_assert (TREE_CODE (var1) == SSA_NAME);
238 gcc_assert (TREE_CODE (var2) == SSA_NAME);
240 /* This is independent of partition_to_view. If partition_to_view is
241 on, then whichever one of these partitions is absorbed will never have a
242 dereference into the partition_to_view array any more. */
244 p1 = partition_find (map->var_partition, SSA_NAME_VERSION (var1));
245 p2 = partition_find (map->var_partition, SSA_NAME_VERSION (var2));
247 gcc_assert (p1 != NO_PARTITION);
248 gcc_assert (p2 != NO_PARTITION);
253 p3 = partition_union (map->var_partition, p1, p2);
255 if (map->partition_to_view)
256 p3 = map->partition_to_view[p3];
262 /* Compress the partition numbers in MAP such that they fall in the range
263 0..(num_partitions-1) instead of wherever they turned out during
264 the partitioning exercise. This removes any references to unused
265 partitions, thereby allowing bitmaps and other vectors to be much
268 This is implemented such that compaction doesn't affect partitioning.
269 Ie., once partitions are created and possibly merged, running one
270 or more different kind of compaction will not affect the partitions
271 themselves. Their index might change, but all the same variables will
272 still be members of the same partition group. This allows work on reduced
273 sets, and no loss of information when a larger set is later desired.
275 In particular, coalescing can work on partitions which have 2 or more
276 definitions, and then 'recompact' later to include all the single
277 definitions for assignment to program variables. */
280 /* Set MAP back to the initial state of having no partition view. Return a
281 bitmap which has a bit set for each partition number which is in use in the
285 partition_view_init (var_map map)
291 used = BITMAP_ALLOC (NULL);
293 /* Already in a view? Abandon the old one. */
294 if (map->partition_to_view)
296 free (map->partition_to_view);
297 map->partition_to_view = NULL;
299 if (map->view_to_partition)
301 free (map->view_to_partition);
302 map->view_to_partition = NULL;
305 /* Find out which partitions are actually referenced. */
306 for (x = 0; x < map->partition_size; x++)
308 tmp = partition_find (map->var_partition, x);
309 if (ssa_name (tmp) != NULL_TREE && !virtual_operand_p (ssa_name (tmp))
310 && (!has_zero_uses (ssa_name (tmp))
311 || !SSA_NAME_IS_DEFAULT_DEF (ssa_name (tmp))))
312 bitmap_set_bit (used, tmp);
315 map->num_partitions = map->partition_size;
320 /* This routine will finalize the view data for MAP based on the partitions
321 set in SELECTED. This is either the same bitmap returned from
322 partition_view_init, or a trimmed down version if some of those partitions
323 were not desired in this view. SELECTED is freed before returning. */
326 partition_view_fini (var_map map, bitmap selected)
329 unsigned count, i, x, limit;
331 gcc_assert (selected);
333 count = bitmap_count_bits (selected);
334 limit = map->partition_size;
336 /* If its a one-to-one ratio, we don't need any view compaction. */
339 map->partition_to_view = (int *)xmalloc (limit * sizeof (int));
340 memset (map->partition_to_view, 0xff, (limit * sizeof (int)));
341 map->view_to_partition = (int *)xmalloc (count * sizeof (int));
344 /* Give each selected partition an index. */
345 EXECUTE_IF_SET_IN_BITMAP (selected, 0, x, bi)
347 map->partition_to_view[x] = i;
348 map->view_to_partition[i] = x;
351 gcc_assert (i == count);
352 map->num_partitions = i;
355 BITMAP_FREE (selected);
359 /* Create a partition view which includes all the used partitions in MAP. If
360 WANT_BASES is true, create the base variable map as well. */
363 partition_view_normal (var_map map, bool want_bases)
367 used = partition_view_init (map);
368 partition_view_fini (map, used);
371 var_map_base_init (map);
373 var_map_base_fini (map);
377 /* Create a partition view in MAP which includes just partitions which occur in
378 the bitmap ONLY. If WANT_BASES is true, create the base variable map
382 partition_view_bitmap (var_map map, bitmap only, bool want_bases)
385 bitmap new_partitions = BITMAP_ALLOC (NULL);
389 used = partition_view_init (map);
390 EXECUTE_IF_SET_IN_BITMAP (only, 0, x, bi)
392 p = partition_find (map->var_partition, x);
393 gcc_assert (bitmap_bit_p (used, p));
394 bitmap_set_bit (new_partitions, p);
396 partition_view_fini (map, new_partitions);
399 var_map_base_init (map);
401 var_map_base_fini (map);
405 static bitmap usedvars;
407 /* Mark VAR as used, so that it'll be preserved during rtl expansion.
408 Returns true if VAR wasn't marked before. */
411 set_is_used (tree var)
413 return bitmap_set_bit (usedvars, DECL_UID (var));
416 /* Return true if VAR is marked as used. */
421 return bitmap_bit_p (usedvars, DECL_UID (var));
424 static inline void mark_all_vars_used (tree *);
426 /* Helper function for mark_all_vars_used, called via walk_tree. */
429 mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
432 enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t));
435 if (TREE_CODE (t) == SSA_NAME)
438 t = SSA_NAME_VAR (t);
443 if (IS_EXPR_CODE_CLASS (c)
444 && (b = TREE_BLOCK (t)) != NULL)
445 TREE_USED (b) = true;
447 /* Ignore TMR_OFFSET and TMR_STEP for TARGET_MEM_REFS, as those
448 fields do not contain vars. */
449 if (TREE_CODE (t) == TARGET_MEM_REF)
451 mark_all_vars_used (&TMR_BASE (t));
452 mark_all_vars_used (&TMR_INDEX (t));
453 mark_all_vars_used (&TMR_INDEX2 (t));
458 /* Only need to mark VAR_DECLS; parameters and return results are not
459 eliminated as unused. */
460 if (TREE_CODE (t) == VAR_DECL)
462 /* When a global var becomes used for the first time also walk its
463 initializer (non global ones don't have any). */
464 if (set_is_used (t) && is_global_var (t)
465 && DECL_CONTEXT (t) == current_function_decl)
466 mark_all_vars_used (&DECL_INITIAL (t));
468 /* remove_unused_scope_block_p requires information about labels
469 which are not DECL_IGNORED_P to tell if they might be used in the IL. */
470 else if (TREE_CODE (t) == LABEL_DECL)
471 /* Although the TREE_USED values that the frontend uses would be
472 acceptable (albeit slightly over-conservative) for our purposes,
473 init_vars_expansion clears TREE_USED for LABEL_DECLs too, so we
474 must re-compute it here. */
477 if (IS_TYPE_OR_DECL_P (t))
483 /* Mark the scope block SCOPE and its subblocks unused when they can be
484 possibly eliminated if dead. */
487 mark_scope_block_unused (tree scope)
490 TREE_USED (scope) = false;
491 if (!(*debug_hooks->ignore_block) (scope))
492 TREE_USED (scope) = true;
493 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
494 mark_scope_block_unused (t);
497 /* Look if the block is dead (by possibly eliminating its dead subblocks)
498 and return true if so.
499 Block is declared dead if:
500 1) No statements are associated with it.
501 2) Declares no live variables
502 3) All subblocks are dead
503 or there is precisely one subblocks and the block
504 has same abstract origin as outer block and declares
505 no variables, so it is pure wrapper.
506 When we are not outputting full debug info, we also eliminate dead variables
507 out of scope blocks to let them to be recycled by GGC and to save copying work
508 done by the inliner. */
511 remove_unused_scope_block_p (tree scope, bool in_ctor_dtor_block)
514 bool unused = !TREE_USED (scope);
517 /* For ipa-polymorphic-call.c purposes, preserve blocks:
518 1) with BLOCK_ABSTRACT_ORIGIN of a ctor/dtor or their clones */
519 if (inlined_polymorphic_ctor_dtor_block_p (scope, true))
521 in_ctor_dtor_block = true;
524 /* 2) inside such blocks, the outermost block with BLOCK_ABSTRACT_ORIGIN
525 being a FUNCTION_DECL. */
526 else if (in_ctor_dtor_block
527 && BLOCK_ABSTRACT_ORIGIN (scope)
528 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (scope)) == FUNCTION_DECL)
530 in_ctor_dtor_block = false;
534 for (t = &BLOCK_VARS (scope); *t; t = next)
536 next = &DECL_CHAIN (*t);
538 /* Debug info of nested function refers to the block of the
539 function. We might stil call it even if all statements
540 of function it was nested into was elliminated.
542 TODO: We can actually look into cgraph to see if function
543 will be output to file. */
544 if (TREE_CODE (*t) == FUNCTION_DECL)
547 /* If a decl has a value expr, we need to instantiate it
548 regardless of debug info generation, to avoid codegen
549 differences in memory overlap tests. update_equiv_regs() may
550 indirectly call validate_equiv_mem() to test whether a
551 SET_DEST overlaps with others, and if the value expr changes
552 by virtual register instantiation, we may get end up with
553 different results. */
554 else if (TREE_CODE (*t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*t))
557 /* Remove everything we don't generate debug info for. */
558 else if (DECL_IGNORED_P (*t))
560 *t = DECL_CHAIN (*t);
564 /* When we are outputting debug info, we usually want to output
565 info about optimized-out variables in the scope blocks.
566 Exception are the scope blocks not containing any instructions
567 at all so user can't get into the scopes at first place. */
568 else if (is_used_p (*t))
570 else if (TREE_CODE (*t) == LABEL_DECL && TREE_USED (*t))
571 /* For labels that are still used in the IL, the decision to
572 preserve them must not depend DEBUG_INFO_LEVEL, otherwise we
573 risk having different ordering in debug vs. non-debug builds
574 during inlining or versioning.
575 A label appearing here (we have already checked DECL_IGNORED_P)
576 should not be used in the IL unless it has been explicitly used
577 before, so we use TREE_USED as an approximation. */
578 /* In principle, we should do the same here as for the debug case
579 below, however, when debugging, there might be additional nested
580 levels that keep an upper level with a label live, so we have to
581 force this block to be considered used, too. */
584 /* When we are not doing full debug info, we however can keep around
585 only the used variables for cfgexpand's memory packing saving quite
588 For sake of -g3, we keep around those vars but we don't count this as
589 use of block, so innermost block with no used vars and no instructions
590 can be considered dead. We only want to keep around blocks user can
591 breakpoint into and ask about value of optimized out variables.
593 Similarly we need to keep around types at least until all
594 variables of all nested blocks are gone. We track no
595 information on whether given type is used or not, so we have
596 to keep them even when not emitting debug information,
597 otherwise we may end up remapping variables and their (local)
598 types in different orders depending on whether debug
599 information is being generated. */
601 else if (TREE_CODE (*t) == TYPE_DECL
602 || debug_info_level == DINFO_LEVEL_NORMAL
603 || debug_info_level == DINFO_LEVEL_VERBOSE)
607 *t = DECL_CHAIN (*t);
612 for (t = &BLOCK_SUBBLOCKS (scope); *t ;)
613 if (remove_unused_scope_block_p (*t, in_ctor_dtor_block))
615 if (BLOCK_SUBBLOCKS (*t))
617 tree next = BLOCK_CHAIN (*t);
618 tree supercontext = BLOCK_SUPERCONTEXT (*t);
620 *t = BLOCK_SUBBLOCKS (*t);
621 while (BLOCK_CHAIN (*t))
623 BLOCK_SUPERCONTEXT (*t) = supercontext;
624 t = &BLOCK_CHAIN (*t);
626 BLOCK_CHAIN (*t) = next;
627 BLOCK_SUPERCONTEXT (*t) = supercontext;
628 t = &BLOCK_CHAIN (*t);
632 *t = BLOCK_CHAIN (*t);
636 t = &BLOCK_CHAIN (*t);
643 /* Outer scope is always used. */
644 else if (!BLOCK_SUPERCONTEXT (scope)
645 || TREE_CODE (BLOCK_SUPERCONTEXT (scope)) == FUNCTION_DECL)
647 /* Innermost blocks with no live variables nor statements can be always
649 else if (!nsubblocks)
651 /* When not generating debug info we can eliminate info on unused
653 else if (!flag_auto_profile && debug_info_level == DINFO_LEVEL_NONE)
655 /* Even for -g0 don't prune outer scopes from artificial
656 functions, otherwise diagnostics using tree_nonartificial_location
657 will not be emitted properly. */
658 if (inlined_function_outer_scope_p (scope))
663 && TREE_CODE (ao) == BLOCK
664 && BLOCK_ABSTRACT_ORIGIN (ao) != ao)
665 ao = BLOCK_ABSTRACT_ORIGIN (ao);
667 && TREE_CODE (ao) == FUNCTION_DECL
668 && DECL_DECLARED_INLINE_P (ao)
669 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao)))
673 else if (BLOCK_VARS (scope) || BLOCK_NUM_NONLOCALIZED_VARS (scope))
675 /* See if this block is important for representation of inlined function.
676 Inlined functions are always represented by block with
677 block_ultimate_origin being set to FUNCTION_DECL and DECL_SOURCE_LOCATION
679 else if (inlined_function_outer_scope_p (scope))
682 /* Verfify that only blocks with source location set
683 are entry points to the inlined functions. */
684 gcc_assert (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope))
685 == UNKNOWN_LOCATION);
687 TREE_USED (scope) = !unused;
691 /* Mark all VAR_DECLS under *EXPR_P as used, so that they won't be
692 eliminated during the tree->rtl conversion process. */
695 mark_all_vars_used (tree *expr_p)
697 walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
700 /* Helper function for clear_unused_block_pointer, called via walk_tree. */
703 clear_unused_block_pointer_1 (tree *tp, int *, void *)
705 if (EXPR_P (*tp) && TREE_BLOCK (*tp)
706 && !TREE_USED (TREE_BLOCK (*tp)))
707 TREE_SET_BLOCK (*tp, NULL);
711 /* Set all block pointer in debug or clobber stmt to NULL if the block
712 is unused, so that they will not be streamed out. */
715 clear_unused_block_pointer (void)
718 gimple_stmt_iterator gsi;
720 FOR_EACH_BB_FN (bb, cfun)
721 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
725 gimple stmt = gsi_stmt (gsi);
727 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
729 b = gimple_block (stmt);
730 if (b && !TREE_USED (b))
731 gimple_set_block (stmt, NULL);
732 for (i = 0; i < gimple_num_ops (stmt); i++)
733 walk_tree (gimple_op_ptr (stmt, i), clear_unused_block_pointer_1,
738 /* Dump scope blocks starting at SCOPE to FILE. INDENT is the
739 indentation level and FLAGS is as in print_generic_expr. */
742 dump_scope_block (FILE *file, int indent, tree scope, int flags)
747 fprintf (file, "\n%*s{ Scope block #%i%s%s",indent, "" , BLOCK_NUMBER (scope),
748 TREE_USED (scope) ? "" : " (unused)",
749 BLOCK_ABSTRACT (scope) ? " (abstract)": "");
750 if (LOCATION_LOCUS (BLOCK_SOURCE_LOCATION (scope)) != UNKNOWN_LOCATION)
752 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (scope));
753 fprintf (file, " %s:%i", s.file, s.line);
755 if (BLOCK_ABSTRACT_ORIGIN (scope))
757 tree origin = block_ultimate_origin (scope);
760 fprintf (file, " Originating from :");
762 print_generic_decl (file, origin, flags);
764 fprintf (file, "#%i", BLOCK_NUMBER (origin));
767 fprintf (file, " \n");
768 for (var = BLOCK_VARS (scope); var; var = DECL_CHAIN (var))
770 fprintf (file, "%*s", indent, "");
771 print_generic_decl (file, var, flags);
772 fprintf (file, "\n");
774 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (scope); i++)
776 fprintf (file, "%*s",indent, "");
777 print_generic_decl (file, BLOCK_NONLOCALIZED_VAR (scope, i),
779 fprintf (file, " (nonlocalized)\n");
781 for (t = BLOCK_SUBBLOCKS (scope); t ; t = BLOCK_CHAIN (t))
782 dump_scope_block (file, indent + 2, t, flags);
783 fprintf (file, "\n%*s}\n",indent, "");
786 /* Dump the tree of lexical scopes starting at SCOPE to stderr. FLAGS
787 is as in print_generic_expr. */
790 debug_scope_block (tree scope, int flags)
792 dump_scope_block (stderr, 0, scope, flags);
796 /* Dump the tree of lexical scopes of current_function_decl to FILE.
797 FLAGS is as in print_generic_expr. */
800 dump_scope_blocks (FILE *file, int flags)
802 dump_scope_block (file, 0, DECL_INITIAL (current_function_decl), flags);
806 /* Dump the tree of lexical scopes of current_function_decl to stderr.
807 FLAGS is as in print_generic_expr. */
810 debug_scope_blocks (int flags)
812 dump_scope_blocks (stderr, flags);
815 /* Remove local variables that are not referenced in the IL. */
818 remove_unused_locals (void)
822 unsigned srcidx, dstidx, num;
823 bool have_local_clobbers = false;
825 /* Removing declarations from lexical blocks when not optimizing is
826 not only a waste of time, it actually causes differences in stack
831 timevar_push (TV_REMOVE_UNUSED);
833 mark_scope_block_unused (DECL_INITIAL (current_function_decl));
835 usedvars = BITMAP_ALLOC (NULL);
837 /* Walk the CFG marking all referenced symbols. */
838 FOR_EACH_BB_FN (bb, cfun)
840 gimple_stmt_iterator gsi;
845 /* Walk the statements. */
846 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
848 gimple stmt = gsi_stmt (gsi);
849 tree b = gimple_block (stmt);
851 if (is_gimple_debug (stmt))
854 if (gimple_clobber_p (stmt))
856 have_local_clobbers = true;
861 TREE_USED (b) = true;
863 for (i = 0; i < gimple_num_ops (stmt); i++)
864 mark_all_vars_used (gimple_op_ptr (gsi_stmt (gsi), i));
867 for (gphi_iterator gpi = gsi_start_phis (bb);
874 gphi *phi = gpi.phi ();
876 if (virtual_operand_p (gimple_phi_result (phi)))
879 def = gimple_phi_result (phi);
880 mark_all_vars_used (&def);
882 FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
884 tree arg = USE_FROM_PTR (arg_p);
885 int index = PHI_ARG_INDEX_FROM_USE (arg_p);
887 LOCATION_BLOCK (gimple_phi_arg_location (phi, index));
889 TREE_USED (block) = true;
890 mark_all_vars_used (&arg);
894 FOR_EACH_EDGE (e, ei, bb->succs)
895 if (LOCATION_BLOCK (e->goto_locus) != NULL)
896 TREE_USED (LOCATION_BLOCK (e->goto_locus)) = true;
899 /* We do a two-pass approach about the out-of-scope clobbers. We want
900 to remove them if they are the only references to a local variable,
901 but we want to retain them when there's any other. So the first pass
902 ignores them, and the second pass (if there were any) tries to remove
904 if (have_local_clobbers)
905 FOR_EACH_BB_FN (bb, cfun)
907 gimple_stmt_iterator gsi;
909 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
911 gimple stmt = gsi_stmt (gsi);
912 tree b = gimple_block (stmt);
914 if (gimple_clobber_p (stmt))
916 tree lhs = gimple_assign_lhs (stmt);
917 tree base = get_base_address (lhs);
918 /* Remove clobbers referencing unused vars, or clobbers
919 with MEM_REF lhs referencing uninitialized pointers. */
920 if ((TREE_CODE (base) == VAR_DECL && !is_used_p (base))
921 || (TREE_CODE (lhs) == MEM_REF
922 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
923 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0))
924 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (lhs, 0)))
927 unlink_stmt_vdef (stmt);
928 gsi_remove (&gsi, true);
933 TREE_USED (b) = true;
939 cfun->has_local_explicit_reg_vars = false;
941 /* Remove unmarked local and global vars from local_decls. */
942 num = vec_safe_length (cfun->local_decls);
943 for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
945 var = (*cfun->local_decls)[srcidx];
946 if (TREE_CODE (var) == VAR_DECL)
948 if (!is_used_p (var))
951 if (cfun->nonlocal_goto_save_area
952 && TREE_OPERAND (cfun->nonlocal_goto_save_area, 0) == var)
953 cfun->nonlocal_goto_save_area = NULL;
954 /* Release any default def associated with var. */
955 if ((def = ssa_default_def (cfun, var)) != NULL_TREE)
957 set_ssa_default_def (cfun, var, NULL_TREE);
958 release_ssa_name (def);
963 if (TREE_CODE (var) == VAR_DECL
964 && DECL_HARD_REGISTER (var)
965 && !is_global_var (var))
966 cfun->has_local_explicit_reg_vars = true;
968 if (srcidx != dstidx)
969 (*cfun->local_decls)[dstidx] = var;
974 statistics_counter_event (cfun, "unused VAR_DECLs removed", num - dstidx);
975 cfun->local_decls->truncate (dstidx);
978 remove_unused_scope_block_p (DECL_INITIAL (current_function_decl), false);
979 clear_unused_block_pointer ();
981 BITMAP_FREE (usedvars);
983 if (dump_file && (dump_flags & TDF_DETAILS))
985 fprintf (dump_file, "Scope blocks after cleanups:\n");
986 dump_scope_blocks (dump_file, dump_flags);
989 timevar_pop (TV_REMOVE_UNUSED);
992 /* Allocate and return a new live range information object base on MAP. */
994 static tree_live_info_p
995 new_tree_live_info (var_map map)
997 tree_live_info_p live;
1000 live = XNEW (struct tree_live_info_d);
1002 live->num_blocks = last_basic_block_for_fn (cfun);
1004 bitmap_obstack_initialize (&live->livein_obstack);
1005 bitmap_obstack_initialize (&live->liveout_obstack);
1006 live->livein = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
1007 FOR_EACH_BB_FN (bb, cfun)
1008 bitmap_initialize (&live->livein[bb->index], &live->livein_obstack);
1010 live->liveout = XNEWVEC (bitmap_head, last_basic_block_for_fn (cfun));
1011 FOR_EACH_BB_FN (bb, cfun)
1012 bitmap_initialize (&live->liveout[bb->index], &live->liveout_obstack);
1014 live->work_stack = XNEWVEC (int, last_basic_block_for_fn (cfun));
1015 live->stack_top = live->work_stack;
1017 live->global = BITMAP_ALLOC (NULL);
1022 /* Free storage for live range info object LIVE. */
1025 delete_tree_live_info (tree_live_info_p live)
1029 bitmap_obstack_release (&live->livein_obstack);
1030 free (live->livein);
1034 bitmap_obstack_release (&live->liveout_obstack);
1035 free (live->liveout);
1037 BITMAP_FREE (live->global);
1038 free (live->work_stack);
1043 /* Visit basic block BB and propagate any required live on entry bits from
1044 LIVE into the predecessors. VISITED is the bitmap of visited blocks.
1045 TMP is a temporary work bitmap which is passed in to avoid reallocating
1049 loe_visit_block (tree_live_info_p live, basic_block bb, sbitmap visited)
1054 basic_block pred_bb;
1057 gcc_checking_assert (!bitmap_bit_p (visited, bb->index));
1058 bitmap_set_bit (visited, bb->index);
1060 loe = live_on_entry (live, bb);
1062 FOR_EACH_EDGE (e, ei, bb->preds)
1065 if (pred_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1067 /* Variables live-on-entry from BB that aren't defined in the
1068 predecessor block. This should be the live on entry vars to pred.
1069 Note that liveout is the DEFs in a block while live on entry is
1071 Add these bits to live-on-entry for the pred. if there are any
1072 changes, and pred_bb has been visited already, add it to the
1074 change = bitmap_ior_and_compl_into (live_on_entry (live, pred_bb),
1075 loe, &live->liveout[pred_bb->index]);
1077 && bitmap_bit_p (visited, pred_bb->index))
1079 bitmap_clear_bit (visited, pred_bb->index);
1080 *(live->stack_top)++ = pred_bb->index;
1086 /* Using LIVE, fill in all the live-on-entry blocks between the defs and uses
1087 of all the variables. */
1090 live_worklist (tree_live_info_p live)
1094 sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun) + 1);
1096 bitmap_clear (visited);
1098 /* Visit all the blocks in reverse order and propagate live on entry values
1099 into the predecessors blocks. */
1100 FOR_EACH_BB_REVERSE_FN (bb, cfun)
1101 loe_visit_block (live, bb, visited);
1103 /* Process any blocks which require further iteration. */
1104 while (live->stack_top != live->work_stack)
1106 b = *--(live->stack_top);
1107 loe_visit_block (live, BASIC_BLOCK_FOR_FN (cfun, b), visited);
1110 sbitmap_free (visited);
1114 /* Calculate the initial live on entry vector for SSA_NAME using immediate_use
1115 links. Set the live on entry fields in LIVE. Def's are marked temporarily
1116 in the liveout vector. */
1119 set_var_live_on_entry (tree ssa_name, tree_live_info_p live)
1124 basic_block def_bb = NULL;
1125 imm_use_iterator imm_iter;
1126 bool global = false;
1128 p = var_to_partition (live->map, ssa_name);
1129 if (p == NO_PARTITION)
1132 stmt = SSA_NAME_DEF_STMT (ssa_name);
1135 def_bb = gimple_bb (stmt);
1136 /* Mark defs in liveout bitmap temporarily. */
1138 bitmap_set_bit (&live->liveout[def_bb->index], p);
1141 def_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1143 /* An undefined local variable does not need to be very alive. */
1144 if (ssa_undefined_value_p (ssa_name, false))
1147 /* Visit each use of SSA_NAME and if it isn't in the same block as the def,
1148 add it to the list of live on entry blocks. */
1149 FOR_EACH_IMM_USE_FAST (use, imm_iter, ssa_name)
1151 gimple use_stmt = USE_STMT (use);
1152 basic_block add_block = NULL;
1154 if (gimple_code (use_stmt) == GIMPLE_PHI)
1156 /* Uses in PHI's are considered to be live at exit of the SRC block
1157 as this is where a copy would be inserted. Check to see if it is
1158 defined in that block, or whether its live on entry. */
1159 int index = PHI_ARG_INDEX_FROM_USE (use);
1160 edge e = gimple_phi_arg_edge (as_a <gphi *> (use_stmt), index);
1161 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1163 if (e->src != def_bb)
1167 else if (is_gimple_debug (use_stmt))
1171 /* If its not defined in this block, its live on entry. */
1172 basic_block use_bb = gimple_bb (use_stmt);
1173 if (use_bb != def_bb)
1177 /* If there was a live on entry use, set the bit. */
1181 bitmap_set_bit (&live->livein[add_block->index], p);
1185 /* If SSA_NAME is live on entry to at least one block, fill in all the live
1186 on entry blocks between the def and all the uses. */
1188 bitmap_set_bit (live->global, p);
1192 /* Calculate the live on exit vectors based on the entry info in LIVEINFO. */
1195 calculate_live_on_exit (tree_live_info_p liveinfo)
1201 /* live on entry calculations used liveout vectors for defs, clear them. */
1202 FOR_EACH_BB_FN (bb, cfun)
1203 bitmap_clear (&liveinfo->liveout[bb->index]);
1205 /* Set all the live-on-exit bits for uses in PHIs. */
1206 FOR_EACH_BB_FN (bb, cfun)
1211 /* Mark the PHI arguments which are live on exit to the pred block. */
1212 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1214 gphi *phi = gsi.phi ();
1215 for (i = 0; i < gimple_phi_num_args (phi); i++)
1217 tree t = PHI_ARG_DEF (phi, i);
1220 if (TREE_CODE (t) != SSA_NAME)
1223 p = var_to_partition (liveinfo->map, t);
1224 if (p == NO_PARTITION)
1226 e = gimple_phi_arg_edge (phi, i);
1227 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1228 bitmap_set_bit (&liveinfo->liveout[e->src->index], p);
1232 /* Add each successors live on entry to this bock live on exit. */
1233 FOR_EACH_EDGE (e, ei, bb->succs)
1234 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1235 bitmap_ior_into (&liveinfo->liveout[bb->index],
1236 live_on_entry (liveinfo, e->dest));
1241 /* Given partition map MAP, calculate all the live on entry bitmaps for
1242 each partition. Return a new live info object. */
1245 calculate_live_ranges (var_map map, bool want_livein)
1249 tree_live_info_p live;
1251 live = new_tree_live_info (map);
1252 for (i = 0; i < num_var_partitions (map); i++)
1254 var = partition_to_var (map, i);
1255 if (var != NULL_TREE)
1256 set_var_live_on_entry (var, live);
1259 live_worklist (live);
1261 #ifdef ENABLE_CHECKING
1262 verify_live_on_entry (live);
1265 calculate_live_on_exit (live);
1269 bitmap_obstack_release (&live->livein_obstack);
1270 free (live->livein);
1271 live->livein = NULL;
1278 /* Output partition map MAP to file F. */
1281 dump_var_map (FILE *f, var_map map)
1287 fprintf (f, "\nPartition map \n\n");
1289 for (x = 0; x < map->num_partitions; x++)
1291 if (map->view_to_partition != NULL)
1292 p = map->view_to_partition[x];
1296 if (ssa_name (p) == NULL_TREE
1297 || virtual_operand_p (ssa_name (p)))
1301 for (y = 1; y < num_ssa_names; y++)
1303 p = partition_find (map->var_partition, y);
1304 if (map->partition_to_view)
1305 p = map->partition_to_view[p];
1310 fprintf (f, "Partition %d (", x);
1311 print_generic_expr (f, partition_to_var (map, p), TDF_SLIM);
1314 fprintf (f, "%d ", y);
1324 /* Generic dump for the above. */
1327 debug (_var_map &ref)
1329 dump_var_map (stderr, &ref);
1333 debug (_var_map *ptr)
1338 fprintf (stderr, "<nil>\n");
1342 /* Output live range info LIVE to file F, controlled by FLAG. */
1345 dump_live_info (FILE *f, tree_live_info_p live, int flag)
1349 var_map map = live->map;
1352 if ((flag & LIVEDUMP_ENTRY) && live->livein)
1354 FOR_EACH_BB_FN (bb, cfun)
1356 fprintf (f, "\nLive on entry to BB%d : ", bb->index);
1357 EXECUTE_IF_SET_IN_BITMAP (&live->livein[bb->index], 0, i, bi)
1359 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1366 if ((flag & LIVEDUMP_EXIT) && live->liveout)
1368 FOR_EACH_BB_FN (bb, cfun)
1370 fprintf (f, "\nLive on exit from BB%d : ", bb->index);
1371 EXECUTE_IF_SET_IN_BITMAP (&live->liveout[bb->index], 0, i, bi)
1373 print_generic_expr (f, partition_to_var (map, i), TDF_SLIM);
1382 /* Generic dump for the above. */
1385 debug (tree_live_info_d &ref)
1387 dump_live_info (stderr, &ref, 0);
1391 debug (tree_live_info_d *ptr)
1396 fprintf (stderr, "<nil>\n");
1400 #ifdef ENABLE_CHECKING
1401 /* Verify that SSA_VAR is a non-virtual SSA_NAME. */
1404 register_ssa_partition_check (tree ssa_var)
1406 gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
1407 if (virtual_operand_p (ssa_var))
1409 fprintf (stderr, "Illegally registering a virtual SSA name :");
1410 print_generic_expr (stderr, ssa_var, TDF_SLIM);
1411 fprintf (stderr, " in the SSA->Normal phase.\n");
1412 internal_error ("SSA corruption");
1417 /* Verify that the info in LIVE matches the current cfg. */
1420 verify_live_on_entry (tree_live_info_p live)
1429 var_map map = live->map;
1431 /* Check for live on entry partitions and report those with a DEF in
1432 the program. This will typically mean an optimization has done
1434 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
1436 FOR_EACH_EDGE (e, ei, bb->succs)
1438 int entry_block = e->dest->index;
1439 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1441 for (i = 0; i < (unsigned)num_var_partitions (map); i++)
1446 var = partition_to_var (map, i);
1447 stmt = SSA_NAME_DEF_STMT (var);
1448 tmp = gimple_bb (stmt);
1449 if (SSA_NAME_VAR (var))
1450 d = ssa_default_def (cfun, SSA_NAME_VAR (var));
1452 loe = live_on_entry (live, e->dest);
1453 if (loe && bitmap_bit_p (loe, i))
1455 if (!gimple_nop_p (stmt))
1458 print_generic_expr (stderr, var, TDF_SLIM);
1459 fprintf (stderr, " is defined ");
1461 fprintf (stderr, " in BB%d, ", tmp->index);
1462 fprintf (stderr, "by:\n");
1463 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
1464 fprintf (stderr, "\nIt is also live-on-entry to entry BB %d",
1466 fprintf (stderr, " So it appears to have multiple defs.\n");
1473 print_generic_expr (stderr, var, TDF_SLIM);
1474 fprintf (stderr, " is live-on-entry to BB%d ",
1478 fprintf (stderr, " but is not the default def of ");
1479 print_generic_expr (stderr, d, TDF_SLIM);
1480 fprintf (stderr, "\n");
1483 fprintf (stderr, " and there is no default def.\n");
1490 /* An undefined local variable does not need to be very
1492 if (ssa_undefined_value_p (var, false))
1495 /* The only way this var shouldn't be marked live on entry is
1496 if it occurs in a PHI argument of the block. */
1500 for (gsi = gsi_start_phis (e->dest);
1501 !gsi_end_p (gsi) && !ok;
1504 gphi *phi = gsi.phi ();
1505 for (z = 0; z < gimple_phi_num_args (phi); z++)
1506 if (var == gimple_phi_arg_def (phi, z))
1515 print_generic_expr (stderr, var, TDF_SLIM);
1516 fprintf (stderr, " is not marked live-on-entry to entry BB%d ",
1518 fprintf (stderr, "but it is a default def so it should be.\n");
1522 gcc_assert (num <= 0);