1 /* CFG cleanup for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "basic-block.h"
27 #include "diagnostic-core.h"
31 #include "langhooks.h"
33 #include "gimple-iterator.h"
34 #include "gimple-ssa.h"
36 #include "tree-phinodes.h"
37 #include "ssa-iterators.h"
38 #include "tree-ssanames.h"
39 #include "tree-ssa-loop-manip.h"
42 #include "tree-pass.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-scalar-evolution.h"
49 /* The set of blocks in that at least one of the following changes happened:
50 -- the statement at the end of the block was changed
51 -- the block was newly created
52 -- the set of the predecessors of the block changed
53 -- the set of the successors of the block changed
54 ??? Maybe we could track these changes separately, since they determine
55 what cleanups it makes sense to try on the block. */
56 bitmap cfgcleanup_altered_bbs;
58 /* Remove any fallthru edge from EV. Return true if an edge was removed. */
61 remove_fallthru_edge (vec<edge, va_gc> *ev)
66 FOR_EACH_EDGE (e, ei, ev)
67 if ((e->flags & EDGE_FALLTHRU) != 0)
69 if (e->flags & EDGE_COMPLEX)
70 e->flags &= ~EDGE_FALLTHRU;
72 remove_edge_and_dominated_blocks (e);
79 /* Disconnect an unreachable block in the control expression starting
83 cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi)
87 gimple stmt = gsi_stmt (gsi);
90 if (!single_succ_p (bb))
97 fold_defer_overflow_warnings ();
98 loc = gimple_location (stmt);
99 switch (gimple_code (stmt))
102 val = fold_binary_loc (loc, gimple_cond_code (stmt),
104 gimple_cond_lhs (stmt),
105 gimple_cond_rhs (stmt));
109 val = gimple_switch_index (stmt);
115 taken_edge = find_taken_edge (bb, val);
118 fold_undefer_and_ignore_overflow_warnings ();
122 /* Remove all the edges except the one that is always executed. */
124 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
130 fold_undefer_overflow_warnings
131 (true, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
135 taken_edge->probability += e->probability;
136 taken_edge->count += e->count;
137 remove_edge_and_dominated_blocks (e);
144 fold_undefer_and_ignore_overflow_warnings ();
145 if (taken_edge->probability > REG_BR_PROB_BASE)
146 taken_edge->probability = REG_BR_PROB_BASE;
149 taken_edge = single_succ_edge (bb);
151 bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
152 gsi_remove (&gsi, true);
153 taken_edge->flags = EDGE_FALLTHRU;
158 /* Try to remove superfluous control structures in basic block BB. Returns
159 true if anything changes. */
162 cleanup_control_flow_bb (basic_block bb)
164 gimple_stmt_iterator gsi;
168 /* If the last statement of the block could throw and now cannot,
169 we need to prune cfg. */
170 retval |= gimple_purge_dead_eh_edges (bb);
172 gsi = gsi_last_bb (bb);
176 stmt = gsi_stmt (gsi);
178 if (gimple_code (stmt) == GIMPLE_COND
179 || gimple_code (stmt) == GIMPLE_SWITCH)
180 retval |= cleanup_control_expr_graph (bb, gsi);
181 else if (gimple_code (stmt) == GIMPLE_GOTO
182 && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR
183 && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt), 0))
186 /* If we had a computed goto which has a compile-time determinable
187 destination, then we can eliminate the goto. */
191 basic_block target_block;
193 /* First look at all the outgoing edges. Delete any outgoing
194 edges which do not go to the right block. For the one
195 edge which goes to the right block, fix up its flags. */
196 label = TREE_OPERAND (gimple_goto_dest (stmt), 0);
197 target_block = label_to_block (label);
198 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
200 if (e->dest != target_block)
201 remove_edge_and_dominated_blocks (e);
204 /* Turn off the EDGE_ABNORMAL flag. */
205 e->flags &= ~EDGE_ABNORMAL;
207 /* And set EDGE_FALLTHRU. */
208 e->flags |= EDGE_FALLTHRU;
213 bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
214 bitmap_set_bit (cfgcleanup_altered_bbs, target_block->index);
216 /* Remove the GOTO_EXPR as it is not needed. The CFG has all the
217 relevant information we need. */
218 gsi_remove (&gsi, true);
222 /* Check for indirect calls that have been turned into
224 else if (is_gimple_call (stmt)
225 && gimple_call_noreturn_p (stmt)
226 && remove_fallthru_edge (bb->succs))
232 /* Return true if basic block BB does nothing except pass control
233 flow to another block and that we can safely insert a label at
234 the start of the successor block.
236 As a precondition, we require that BB be not equal to
240 tree_forwarder_block_p (basic_block bb, bool phi_wanted)
242 gimple_stmt_iterator gsi;
245 /* BB must have a single outgoing edge. */
246 if (single_succ_p (bb) != 1
247 /* If PHI_WANTED is false, BB must not have any PHI nodes.
248 Otherwise, BB must have PHI nodes. */
249 || gimple_seq_empty_p (phi_nodes (bb)) == phi_wanted
250 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
251 || single_succ (bb) == EXIT_BLOCK_PTR
252 /* Nor should this be an infinite loop. */
253 || single_succ (bb) == bb
254 /* BB may not have an abnormal outgoing edge. */
255 || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
258 gcc_checking_assert (bb != ENTRY_BLOCK_PTR);
260 locus = single_succ_edge (bb)->goto_locus;
262 /* There should not be an edge coming from entry, or an EH edge. */
267 FOR_EACH_EDGE (e, ei, bb->preds)
268 if (e->src == ENTRY_BLOCK_PTR || (e->flags & EDGE_EH))
270 /* If goto_locus of any of the edges differs, prevent removing
271 the forwarder block for -O0. */
272 else if (optimize == 0 && e->goto_locus != locus)
276 /* Now walk through the statements backward. We can ignore labels,
277 anything else means this is not a forwarder block. */
278 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
280 gimple stmt = gsi_stmt (gsi);
282 switch (gimple_code (stmt))
285 if (DECL_NONLOCAL (gimple_label_label (stmt)))
287 if (optimize == 0 && gimple_location (stmt) != locus)
291 /* ??? For now, hope there's a corresponding debug
292 assignment at the destination. */
304 /* Protect loop latches, headers and preheaders. */
305 if (bb->loop_father->header == bb)
307 dest = EDGE_SUCC (bb, 0)->dest;
309 if (dest->loop_father->header == dest)
315 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
316 those alternatives are equal in each of the PHI nodes, then return
317 true, else return false. */
320 phi_alternatives_equal (basic_block dest, edge e1, edge e2)
322 int n1 = e1->dest_idx;
323 int n2 = e2->dest_idx;
324 gimple_stmt_iterator gsi;
326 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
328 gimple phi = gsi_stmt (gsi);
329 tree val1 = gimple_phi_arg_def (phi, n1);
330 tree val2 = gimple_phi_arg_def (phi, n2);
332 gcc_assert (val1 != NULL_TREE);
333 gcc_assert (val2 != NULL_TREE);
335 if (!operand_equal_for_phi_arg_p (val1, val2))
342 /* Removes forwarder block BB. Returns false if this failed. */
345 remove_forwarder_block (basic_block bb)
347 edge succ = single_succ_edge (bb), e, s;
348 basic_block dest = succ->dest;
351 gimple_stmt_iterator gsi, gsi_to;
352 bool can_move_debug_stmts;
354 /* We check for infinite loops already in tree_forwarder_block_p.
355 However it may happen that the infinite loop is created
356 afterwards due to removal of forwarders. */
360 /* If the destination block consists of a nonlocal label or is a
361 EH landing pad, do not merge it. */
362 label = first_stmt (dest);
364 && gimple_code (label) == GIMPLE_LABEL
365 && (DECL_NONLOCAL (gimple_label_label (label))
366 || EH_LANDING_PAD_NR (gimple_label_label (label)) != 0))
369 /* If there is an abnormal edge to basic block BB, but not into
370 dest, problems might occur during removal of the phi node at out
371 of ssa due to overlapping live ranges of registers.
373 If there is an abnormal edge in DEST, the problems would occur
374 anyway since cleanup_dead_labels would then merge the labels for
375 two different eh regions, and rest of exception handling code
378 So if there is an abnormal edge to BB, proceed only if there is
379 no abnormal edge to DEST and there are no phi nodes in DEST. */
380 if (bb_has_abnormal_pred (bb)
381 && (bb_has_abnormal_pred (dest)
382 || !gimple_seq_empty_p (phi_nodes (dest))))
385 /* If there are phi nodes in DEST, and some of the blocks that are
386 predecessors of BB are also predecessors of DEST, check that the
387 phi node arguments match. */
388 if (!gimple_seq_empty_p (phi_nodes (dest)))
390 FOR_EACH_EDGE (e, ei, bb->preds)
392 s = find_edge (e->src, dest);
396 if (!phi_alternatives_equal (dest, succ, s))
401 can_move_debug_stmts = MAY_HAVE_DEBUG_STMTS && single_pred_p (dest);
403 /* Redirect the edges. */
404 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
406 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
408 if (e->flags & EDGE_ABNORMAL)
410 /* If there is an abnormal edge, redirect it anyway, and
411 move the labels to the new block to make it legal. */
412 s = redirect_edge_succ_nodup (e, dest);
415 s = redirect_edge_and_branch (e, dest);
419 /* Create arguments for the phi nodes, since the edge was not
421 for (gsi = gsi_start_phis (dest);
425 gimple phi = gsi_stmt (gsi);
426 source_location l = gimple_phi_arg_location_from_edge (phi, succ);
427 tree def = gimple_phi_arg_def (phi, succ->dest_idx);
428 add_phi_arg (phi, unshare_expr (def), s, l);
433 /* Move nonlocal labels and computed goto targets as well as user
434 defined labels and labels with an EH landing pad number to the
435 new block, so that the redirection of the abnormal edges works,
436 jump targets end up in a sane place and debug information for
437 labels is retained. */
438 gsi_to = gsi_start_bb (dest);
439 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
442 label = gsi_stmt (gsi);
443 if (is_gimple_debug (label))
445 decl = gimple_label_label (label);
446 if (EH_LANDING_PAD_NR (decl) != 0
447 || DECL_NONLOCAL (decl)
448 || FORCED_LABEL (decl)
449 || !DECL_ARTIFICIAL (decl))
451 gsi_remove (&gsi, false);
452 gsi_insert_before (&gsi_to, label, GSI_SAME_STMT);
458 /* Move debug statements if the destination has a single predecessor. */
459 if (can_move_debug_stmts)
461 gsi_to = gsi_after_labels (dest);
462 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); )
464 gimple debug = gsi_stmt (gsi);
465 if (!is_gimple_debug (debug))
467 gsi_remove (&gsi, false);
468 gsi_insert_before (&gsi_to, debug, GSI_SAME_STMT);
472 bitmap_set_bit (cfgcleanup_altered_bbs, dest->index);
474 /* Update the dominators. */
475 if (dom_info_available_p (CDI_DOMINATORS))
477 basic_block dom, dombb, domdest;
479 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
480 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
483 /* Shortcut to avoid calling (relatively expensive)
484 nearest_common_dominator unless necessary. */
488 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
490 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
493 /* And kill the forwarder block. */
494 delete_basic_block (bb);
499 /* STMT is a call that has been discovered noreturn. Fixup the CFG
500 and remove LHS. Return true if something changed. */
503 fixup_noreturn_call (gimple stmt)
505 basic_block bb = gimple_bb (stmt);
506 bool changed = false;
508 if (gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
511 /* First split basic block if stmt is not last. */
512 if (stmt != gsi_stmt (gsi_last_bb (bb)))
513 split_block (bb, stmt);
515 changed |= remove_fallthru_edge (bb->succs);
517 /* If there is LHS, remove it. */
518 if (gimple_call_lhs (stmt))
520 tree op = gimple_call_lhs (stmt);
521 gimple_call_set_lhs (stmt, NULL_TREE);
523 /* We need to remove SSA name to avoid checking errors.
524 All uses are dominated by the noreturn and thus will
525 be removed afterwards.
526 We proactively remove affected non-PHI statements to avoid
527 fixup_cfg from trying to update them and crashing. */
528 if (TREE_CODE (op) == SSA_NAME)
531 imm_use_iterator iter;
534 unsigned int bb_index;
536 bitmap blocks = BITMAP_ALLOC (NULL);
538 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
540 if (gimple_code (use_stmt) != GIMPLE_PHI)
541 bitmap_set_bit (blocks, gimple_bb (use_stmt)->index);
543 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
544 SET_USE (use_p, error_mark_node);
546 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
547 delete_basic_block (BASIC_BLOCK (bb_index));
548 BITMAP_FREE (blocks);
549 release_ssa_name (op);
554 /* Similarly remove VDEF if there is any. */
555 else if (gimple_vdef (stmt))
561 /* Split basic blocks on calls in the middle of a basic block that are now
562 known not to return, and remove the unreachable code. */
565 split_bbs_on_noreturn_calls (void)
567 bool changed = false;
571 /* Detect cases where a mid-block call is now known not to return. */
573 while (vec_safe_length (MODIFIED_NORETURN_CALLS (cfun)))
575 stmt = MODIFIED_NORETURN_CALLS (cfun)->pop ();
576 bb = gimple_bb (stmt);
577 /* BB might be deleted at this point, so verify first
578 BB is present in the cfg. */
580 || bb->index < NUM_FIXED_BLOCKS
581 || bb->index >= last_basic_block
582 || BASIC_BLOCK (bb->index) != bb
583 || !gimple_call_noreturn_p (stmt))
586 changed |= fixup_noreturn_call (stmt);
592 /* Tries to cleanup cfg in basic block BB. Returns true if anything
596 cleanup_tree_cfg_bb (basic_block bb)
598 bool retval = cleanup_control_flow_bb (bb);
600 if (tree_forwarder_block_p (bb, false)
601 && remove_forwarder_block (bb))
604 /* Merging the blocks may create new opportunities for folding
605 conditional branches (due to the elimination of single-valued PHI
607 if (single_succ_p (bb)
608 && can_merge_blocks_p (bb, single_succ (bb)))
610 merge_blocks (bb, single_succ (bb));
617 /* Iterate the cfg cleanups, while anything changes. */
620 cleanup_tree_cfg_1 (void)
626 retval |= split_bbs_on_noreturn_calls ();
628 /* Prepare the worklists of altered blocks. */
629 cfgcleanup_altered_bbs = BITMAP_ALLOC (NULL);
631 /* During forwarder block cleanup, we may redirect edges out of
632 SWITCH_EXPRs, which can get expensive. So we want to enable
633 recording of edge to CASE_LABEL_EXPR. */
634 start_recording_case_labels ();
636 /* Start by iterating over all basic blocks. We cannot use FOR_EACH_BB,
637 since the basic blocks may get removed. */
638 n = last_basic_block;
639 for (i = NUM_FIXED_BLOCKS; i < n; i++)
641 bb = BASIC_BLOCK (i);
643 retval |= cleanup_tree_cfg_bb (bb);
646 /* Now process the altered blocks, as long as any are available. */
647 while (!bitmap_empty_p (cfgcleanup_altered_bbs))
649 i = bitmap_first_set_bit (cfgcleanup_altered_bbs);
650 bitmap_clear_bit (cfgcleanup_altered_bbs, i);
651 if (i < NUM_FIXED_BLOCKS)
654 bb = BASIC_BLOCK (i);
658 retval |= cleanup_tree_cfg_bb (bb);
660 /* Rerun split_bbs_on_noreturn_calls, in case we have altered any noreturn
662 retval |= split_bbs_on_noreturn_calls ();
665 end_recording_case_labels ();
666 BITMAP_FREE (cfgcleanup_altered_bbs);
671 /* Remove unreachable blocks and other miscellaneous clean up work.
672 Return true if the flowgraph was modified, false otherwise. */
675 cleanup_tree_cfg_noloop (void)
679 timevar_push (TV_TREE_CLEANUP_CFG);
681 /* Iterate until there are no more cleanups left to do. If any
682 iteration changed the flowgraph, set CHANGED to true.
684 If dominance information is available, there cannot be any unreachable
686 if (!dom_info_available_p (CDI_DOMINATORS))
688 changed = delete_unreachable_blocks ();
689 calculate_dominance_info (CDI_DOMINATORS);
693 #ifdef ENABLE_CHECKING
694 verify_dominators (CDI_DOMINATORS);
699 changed |= cleanup_tree_cfg_1 ();
701 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
704 #ifdef ENABLE_CHECKING
708 timevar_pop (TV_TREE_CLEANUP_CFG);
710 if (changed && current_loops)
711 loops_state_set (LOOPS_NEED_FIXUP);
716 /* Repairs loop structures. */
719 repair_loop_structures (void)
722 unsigned n_new_loops;
724 calculate_dominance_info (CDI_DOMINATORS);
726 timevar_push (TV_REPAIR_LOOPS);
727 changed_bbs = BITMAP_ALLOC (NULL);
728 n_new_loops = fix_loop_structure (changed_bbs);
730 /* This usually does nothing. But sometimes parts of cfg that originally
731 were inside a loop get out of it due to edge removal (since they
732 become unreachable by back edges from latch). Also a former
733 irreducible loop can become reducible - in this case force a full
734 rewrite into loop-closed SSA form. */
735 if (loops_state_satisfies_p (LOOP_CLOSED_SSA))
736 rewrite_into_loop_closed_ssa (n_new_loops ? NULL : changed_bbs,
739 BITMAP_FREE (changed_bbs);
741 #ifdef ENABLE_CHECKING
742 verify_loop_structure ();
746 timevar_pop (TV_REPAIR_LOOPS);
749 /* Cleanup cfg and repair loop structures. */
752 cleanup_tree_cfg (void)
754 bool changed = cleanup_tree_cfg_noloop ();
756 if (current_loops != NULL
757 && loops_state_satisfies_p (LOOPS_NEED_FIXUP))
758 repair_loop_structures ();
763 /* Tries to merge the PHI nodes at BB into those at BB's sole successor.
764 Returns true if successful. */
767 remove_forwarder_block_with_phi (basic_block bb)
769 edge succ = single_succ_edge (bb);
770 basic_block dest = succ->dest;
772 basic_block dombb, domdest, dom;
774 /* We check for infinite loops already in tree_forwarder_block_p.
775 However it may happen that the infinite loop is created
776 afterwards due to removal of forwarders. */
780 /* If the destination block consists of a nonlocal label, do not
782 label = first_stmt (dest);
784 && gimple_code (label) == GIMPLE_LABEL
785 && DECL_NONLOCAL (gimple_label_label (label)))
788 /* Redirect each incoming edge to BB to DEST. */
789 while (EDGE_COUNT (bb->preds) > 0)
791 edge e = EDGE_PRED (bb, 0), s;
792 gimple_stmt_iterator gsi;
794 s = find_edge (e->src, dest);
797 /* We already have an edge S from E->src to DEST. If S and
798 E->dest's sole successor edge have the same PHI arguments
799 at DEST, redirect S to DEST. */
800 if (phi_alternatives_equal (dest, s, succ))
802 e = redirect_edge_and_branch (e, dest);
803 redirect_edge_var_map_clear (e);
807 /* PHI arguments are different. Create a forwarder block by
808 splitting E so that we can merge PHI arguments on E to
810 e = single_succ_edge (split_edge (e));
813 s = redirect_edge_and_branch (e, dest);
815 /* redirect_edge_and_branch must not create a new edge. */
818 /* Add to the PHI nodes at DEST each PHI argument removed at the
820 for (gsi = gsi_start_phis (dest);
824 gimple phi = gsi_stmt (gsi);
825 tree def = gimple_phi_arg_def (phi, succ->dest_idx);
826 source_location locus = gimple_phi_arg_location_from_edge (phi, succ);
828 if (TREE_CODE (def) == SSA_NAME)
830 edge_var_map_vector *head;
834 /* If DEF is one of the results of PHI nodes removed during
835 redirection, replace it with the PHI argument that used
837 head = redirect_edge_var_map_vector (e);
838 FOR_EACH_VEC_SAFE_ELT (head, i, vm)
840 tree old_arg = redirect_edge_var_map_result (vm);
841 tree new_arg = redirect_edge_var_map_def (vm);
846 locus = redirect_edge_var_map_location (vm);
852 add_phi_arg (phi, def, s, locus);
855 redirect_edge_var_map_clear (e);
858 /* Update the dominators. */
859 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
860 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
863 /* Shortcut to avoid calling (relatively expensive)
864 nearest_common_dominator unless necessary. */
868 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
870 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
872 /* Remove BB since all of BB's incoming edges have been redirected
874 delete_basic_block (bb);
879 /* This pass merges PHI nodes if one feeds into another. For example,
880 suppose we have the following:
887 # tem_6 = PHI <tem_17(8), tem_23(7)>;
890 # tem_3 = PHI <tem_6(9), tem_2(5)>;
893 Then we merge the first PHI node into the second one like so:
900 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
905 merge_phi_nodes (void)
907 basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks);
908 basic_block *current = worklist;
911 calculate_dominance_info (CDI_DOMINATORS);
913 /* Find all PHI nodes that we may be able to merge. */
918 /* Look for a forwarder block with PHI nodes. */
919 if (!tree_forwarder_block_p (bb, true))
922 dest = single_succ (bb);
924 /* We have to feed into another basic block with PHI
926 if (gimple_seq_empty_p (phi_nodes (dest))
927 /* We don't want to deal with a basic block with
929 || bb_has_abnormal_pred (bb))
932 if (!dominated_by_p (CDI_DOMINATORS, dest, bb))
934 /* If BB does not dominate DEST, then the PHI nodes at
935 DEST must be the only users of the results of the PHI
941 gimple_stmt_iterator gsi;
942 unsigned int dest_idx = single_succ_edge (bb)->dest_idx;
944 /* BB dominates DEST. There may be many users of the PHI
945 nodes in BB. However, there is still a trivial case we
946 can handle. If the result of every PHI in BB is used
947 only by a PHI in DEST, then we can trivially merge the
948 PHI nodes from BB into DEST. */
949 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
952 gimple phi = gsi_stmt (gsi);
953 tree result = gimple_phi_result (phi);
954 use_operand_p imm_use;
957 /* If the PHI's result is never used, then we can just
959 if (has_zero_uses (result))
962 /* Get the single use of the result of this PHI node. */
963 if (!single_imm_use (result, &imm_use, &use_stmt)
964 || gimple_code (use_stmt) != GIMPLE_PHI
965 || gimple_bb (use_stmt) != dest
966 || gimple_phi_arg_def (use_stmt, dest_idx) != result)
970 /* If the loop above iterated through all the PHI nodes
971 in BB, then we can merge the PHIs from BB into DEST. */
977 /* Now let's drain WORKLIST. */
978 bool changed = false;
979 while (current != worklist)
982 changed |= remove_forwarder_block_with_phi (bb);
986 /* Removing forwarder blocks can cause formerly irreducible loops
987 to become reducible if we merged two entry blocks. */
990 loops_state_set (LOOPS_NEED_FIXUP);
996 gate_merge_phi (void)
1003 const pass_data pass_data_merge_phi =
1005 GIMPLE_PASS, /* type */
1006 "mergephi", /* name */
1007 OPTGROUP_NONE, /* optinfo_flags */
1008 true, /* has_gate */
1009 true, /* has_execute */
1010 TV_TREE_MERGE_PHI, /* tv_id */
1011 ( PROP_cfg | PROP_ssa ), /* properties_required */
1012 0, /* properties_provided */
1013 0, /* properties_destroyed */
1014 0, /* todo_flags_start */
1015 TODO_verify_ssa, /* todo_flags_finish */
1018 class pass_merge_phi : public gimple_opt_pass
1021 pass_merge_phi (gcc::context *ctxt)
1022 : gimple_opt_pass (pass_data_merge_phi, ctxt)
1025 /* opt_pass methods: */
1026 opt_pass * clone () { return new pass_merge_phi (m_ctxt); }
1027 bool gate () { return gate_merge_phi (); }
1028 unsigned int execute () { return merge_phi_nodes (); }
1030 }; // class pass_merge_phi
1035 make_pass_merge_phi (gcc::context *ctxt)
1037 return new pass_merge_phi (ctxt);
1040 /* Pass: cleanup the CFG just before expanding trees to RTL.
1041 This is just a round of label cleanups and case node grouping
1042 because after the tree optimizers have run such cleanups may
1046 execute_cleanup_cfg_post_optimizing (void)
1048 unsigned int todo = 0;
1049 if (cleanup_tree_cfg ())
1050 todo |= TODO_update_ssa;
1051 maybe_remove_unreachable_handlers ();
1052 cleanup_dead_labels ();
1053 group_case_labels ();
1054 if ((flag_compare_debug_opt || flag_compare_debug)
1055 && flag_dump_final_insns)
1057 FILE *final_output = fopen (flag_dump_final_insns, "a");
1061 error ("could not open final insn dump file %qs: %m",
1062 flag_dump_final_insns);
1063 flag_dump_final_insns = NULL;
1067 int save_unnumbered = flag_dump_unnumbered;
1068 int save_noaddr = flag_dump_noaddr;
1070 flag_dump_noaddr = flag_dump_unnumbered = 1;
1071 fprintf (final_output, "\n");
1072 dump_enumerated_decls (final_output, dump_flags | TDF_NOUID);
1073 flag_dump_noaddr = save_noaddr;
1074 flag_dump_unnumbered = save_unnumbered;
1075 if (fclose (final_output))
1077 error ("could not close final insn dump file %qs: %m",
1078 flag_dump_final_insns);
1079 flag_dump_final_insns = NULL;
1088 const pass_data pass_data_cleanup_cfg_post_optimizing =
1090 GIMPLE_PASS, /* type */
1091 "optimized", /* name */
1092 OPTGROUP_NONE, /* optinfo_flags */
1093 false, /* has_gate */
1094 true, /* has_execute */
1095 TV_TREE_CLEANUP_CFG, /* tv_id */
1096 PROP_cfg, /* properties_required */
1097 0, /* properties_provided */
1098 0, /* properties_destroyed */
1099 0, /* todo_flags_start */
1100 TODO_remove_unused_locals, /* todo_flags_finish */
1103 class pass_cleanup_cfg_post_optimizing : public gimple_opt_pass
1106 pass_cleanup_cfg_post_optimizing (gcc::context *ctxt)
1107 : gimple_opt_pass (pass_data_cleanup_cfg_post_optimizing, ctxt)
1110 /* opt_pass methods: */
1111 unsigned int execute () {
1112 return execute_cleanup_cfg_post_optimizing ();
1115 }; // class pass_cleanup_cfg_post_optimizing
1120 make_pass_cleanup_cfg_post_optimizing (gcc::context *ctxt)
1122 return new pass_cleanup_cfg_post_optimizing (ctxt);