1 /* CFG cleanup for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "basic-block.h"
27 #include "diagnostic-core.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "tree-pass.h"
37 #include "tree-ssa-propagate.h"
38 #include "tree-scalar-evolution.h"
40 /* The set of blocks in that at least one of the following changes happened:
41 -- the statement at the end of the block was changed
42 -- the block was newly created
43 -- the set of the predecessors of the block changed
44 -- the set of the successors of the block changed
45 ??? Maybe we could track these changes separately, since they determine
46 what cleanups it makes sense to try on the block. */
47 bitmap cfgcleanup_altered_bbs;
49 /* Remove any fallthru edge from EV. Return true if an edge was removed. */
52 remove_fallthru_edge (vec<edge, va_gc> *ev)
57 FOR_EACH_EDGE (e, ei, ev)
58 if ((e->flags & EDGE_FALLTHRU) != 0)
60 remove_edge_and_dominated_blocks (e);
67 /* Disconnect an unreachable block in the control expression starting
71 cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi)
75 gimple stmt = gsi_stmt (gsi);
78 if (!single_succ_p (bb))
85 fold_defer_overflow_warnings ();
86 loc = gimple_location (stmt);
87 switch (gimple_code (stmt))
90 val = fold_binary_loc (loc, gimple_cond_code (stmt),
92 gimple_cond_lhs (stmt),
93 gimple_cond_rhs (stmt));
97 val = gimple_switch_index (stmt);
103 taken_edge = find_taken_edge (bb, val);
106 fold_undefer_and_ignore_overflow_warnings ();
110 /* Remove all the edges except the one that is always executed. */
112 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
118 fold_undefer_overflow_warnings
119 (true, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
123 taken_edge->probability += e->probability;
124 taken_edge->count += e->count;
125 remove_edge_and_dominated_blocks (e);
132 fold_undefer_and_ignore_overflow_warnings ();
133 if (taken_edge->probability > REG_BR_PROB_BASE)
134 taken_edge->probability = REG_BR_PROB_BASE;
137 taken_edge = single_succ_edge (bb);
139 bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
140 gsi_remove (&gsi, true);
141 taken_edge->flags = EDGE_FALLTHRU;
146 /* Try to remove superfluous control structures in basic block BB. Returns
147 true if anything changes. */
150 cleanup_control_flow_bb (basic_block bb)
152 gimple_stmt_iterator gsi;
156 /* If the last statement of the block could throw and now cannot,
157 we need to prune cfg. */
158 retval |= gimple_purge_dead_eh_edges (bb);
160 gsi = gsi_last_bb (bb);
164 stmt = gsi_stmt (gsi);
166 if (gimple_code (stmt) == GIMPLE_COND
167 || gimple_code (stmt) == GIMPLE_SWITCH)
168 retval |= cleanup_control_expr_graph (bb, gsi);
169 else if (gimple_code (stmt) == GIMPLE_GOTO
170 && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR
171 && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt), 0))
174 /* If we had a computed goto which has a compile-time determinable
175 destination, then we can eliminate the goto. */
179 basic_block target_block;
181 /* First look at all the outgoing edges. Delete any outgoing
182 edges which do not go to the right block. For the one
183 edge which goes to the right block, fix up its flags. */
184 label = TREE_OPERAND (gimple_goto_dest (stmt), 0);
185 target_block = label_to_block (label);
186 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
188 if (e->dest != target_block)
189 remove_edge_and_dominated_blocks (e);
192 /* Turn off the EDGE_ABNORMAL flag. */
193 e->flags &= ~EDGE_ABNORMAL;
195 /* And set EDGE_FALLTHRU. */
196 e->flags |= EDGE_FALLTHRU;
201 bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
202 bitmap_set_bit (cfgcleanup_altered_bbs, target_block->index);
204 /* Remove the GOTO_EXPR as it is not needed. The CFG has all the
205 relevant information we need. */
206 gsi_remove (&gsi, true);
210 /* Check for indirect calls that have been turned into
212 else if (is_gimple_call (stmt)
213 && gimple_call_noreturn_p (stmt)
214 && remove_fallthru_edge (bb->succs))
220 /* Return true if basic block BB does nothing except pass control
221 flow to another block and that we can safely insert a label at
222 the start of the successor block.
224 As a precondition, we require that BB be not equal to
228 tree_forwarder_block_p (basic_block bb, bool phi_wanted)
230 gimple_stmt_iterator gsi;
233 /* BB must have a single outgoing edge. */
234 if (single_succ_p (bb) != 1
235 /* If PHI_WANTED is false, BB must not have any PHI nodes.
236 Otherwise, BB must have PHI nodes. */
237 || gimple_seq_empty_p (phi_nodes (bb)) == phi_wanted
238 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
239 || single_succ (bb) == EXIT_BLOCK_PTR
240 /* Nor should this be an infinite loop. */
241 || single_succ (bb) == bb
242 /* BB may not have an abnormal outgoing edge. */
243 || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
246 gcc_checking_assert (bb != ENTRY_BLOCK_PTR);
248 locus = single_succ_edge (bb)->goto_locus;
250 /* There should not be an edge coming from entry, or an EH edge. */
255 FOR_EACH_EDGE (e, ei, bb->preds)
256 if (e->src == ENTRY_BLOCK_PTR || (e->flags & EDGE_EH))
258 /* If goto_locus of any of the edges differs, prevent removing
259 the forwarder block for -O0. */
260 else if (optimize == 0 && e->goto_locus != locus)
264 /* Now walk through the statements backward. We can ignore labels,
265 anything else means this is not a forwarder block. */
266 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
268 gimple stmt = gsi_stmt (gsi);
270 switch (gimple_code (stmt))
273 if (DECL_NONLOCAL (gimple_label_label (stmt)))
275 if (optimize == 0 && gimple_location (stmt) != locus)
279 /* ??? For now, hope there's a corresponding debug
280 assignment at the destination. */
292 /* Protect loop latches, headers and preheaders. */
293 if (bb->loop_father->header == bb)
295 dest = EDGE_SUCC (bb, 0)->dest;
297 if (dest->loop_father->header == dest)
303 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
304 those alternatives are equal in each of the PHI nodes, then return
305 true, else return false. */
308 phi_alternatives_equal (basic_block dest, edge e1, edge e2)
310 int n1 = e1->dest_idx;
311 int n2 = e2->dest_idx;
312 gimple_stmt_iterator gsi;
314 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
316 gimple phi = gsi_stmt (gsi);
317 tree val1 = gimple_phi_arg_def (phi, n1);
318 tree val2 = gimple_phi_arg_def (phi, n2);
320 gcc_assert (val1 != NULL_TREE);
321 gcc_assert (val2 != NULL_TREE);
323 if (!operand_equal_for_phi_arg_p (val1, val2))
330 /* Removes forwarder block BB. Returns false if this failed. */
333 remove_forwarder_block (basic_block bb)
335 edge succ = single_succ_edge (bb), e, s;
336 basic_block dest = succ->dest;
339 gimple_stmt_iterator gsi, gsi_to;
340 bool can_move_debug_stmts;
342 /* We check for infinite loops already in tree_forwarder_block_p.
343 However it may happen that the infinite loop is created
344 afterwards due to removal of forwarders. */
348 /* If the destination block consists of a nonlocal label or is a
349 EH landing pad, do not merge it. */
350 label = first_stmt (dest);
352 && gimple_code (label) == GIMPLE_LABEL
353 && (DECL_NONLOCAL (gimple_label_label (label))
354 || EH_LANDING_PAD_NR (gimple_label_label (label)) != 0))
357 /* If there is an abnormal edge to basic block BB, but not into
358 dest, problems might occur during removal of the phi node at out
359 of ssa due to overlapping live ranges of registers.
361 If there is an abnormal edge in DEST, the problems would occur
362 anyway since cleanup_dead_labels would then merge the labels for
363 two different eh regions, and rest of exception handling code
366 So if there is an abnormal edge to BB, proceed only if there is
367 no abnormal edge to DEST and there are no phi nodes in DEST. */
368 if (bb_has_abnormal_pred (bb)
369 && (bb_has_abnormal_pred (dest)
370 || !gimple_seq_empty_p (phi_nodes (dest))))
373 /* If there are phi nodes in DEST, and some of the blocks that are
374 predecessors of BB are also predecessors of DEST, check that the
375 phi node arguments match. */
376 if (!gimple_seq_empty_p (phi_nodes (dest)))
378 FOR_EACH_EDGE (e, ei, bb->preds)
380 s = find_edge (e->src, dest);
384 if (!phi_alternatives_equal (dest, succ, s))
389 can_move_debug_stmts = MAY_HAVE_DEBUG_STMTS && single_pred_p (dest);
391 /* Redirect the edges. */
392 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
394 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
396 if (e->flags & EDGE_ABNORMAL)
398 /* If there is an abnormal edge, redirect it anyway, and
399 move the labels to the new block to make it legal. */
400 s = redirect_edge_succ_nodup (e, dest);
403 s = redirect_edge_and_branch (e, dest);
407 /* Create arguments for the phi nodes, since the edge was not
409 for (gsi = gsi_start_phis (dest);
413 gimple phi = gsi_stmt (gsi);
414 source_location l = gimple_phi_arg_location_from_edge (phi, succ);
415 tree def = gimple_phi_arg_def (phi, succ->dest_idx);
416 add_phi_arg (phi, unshare_expr (def), s, l);
421 /* Move nonlocal labels and computed goto targets as well as user
422 defined labels and labels with an EH landing pad number to the
423 new block, so that the redirection of the abnormal edges works,
424 jump targets end up in a sane place and debug information for
425 labels is retained. */
426 gsi_to = gsi_start_bb (dest);
427 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
430 label = gsi_stmt (gsi);
431 if (is_gimple_debug (label))
433 decl = gimple_label_label (label);
434 if (EH_LANDING_PAD_NR (decl) != 0
435 || DECL_NONLOCAL (decl)
436 || FORCED_LABEL (decl)
437 || !DECL_ARTIFICIAL (decl))
439 gsi_remove (&gsi, false);
440 gsi_insert_before (&gsi_to, label, GSI_SAME_STMT);
446 /* Move debug statements if the destination has a single predecessor. */
447 if (can_move_debug_stmts)
449 gsi_to = gsi_after_labels (dest);
450 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); )
452 gimple debug = gsi_stmt (gsi);
453 if (!is_gimple_debug (debug))
455 gsi_remove (&gsi, false);
456 gsi_insert_before (&gsi_to, debug, GSI_SAME_STMT);
460 bitmap_set_bit (cfgcleanup_altered_bbs, dest->index);
462 /* Update the dominators. */
463 if (dom_info_available_p (CDI_DOMINATORS))
465 basic_block dom, dombb, domdest;
467 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
468 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
471 /* Shortcut to avoid calling (relatively expensive)
472 nearest_common_dominator unless necessary. */
476 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
478 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
481 /* And kill the forwarder block. */
482 delete_basic_block (bb);
487 /* STMT is a call that has been discovered noreturn. Fixup the CFG
488 and remove LHS. Return true if something changed. */
491 fixup_noreturn_call (gimple stmt)
493 basic_block bb = gimple_bb (stmt);
494 bool changed = false;
496 if (gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
499 /* First split basic block if stmt is not last. */
500 if (stmt != gsi_stmt (gsi_last_bb (bb)))
501 split_block (bb, stmt);
503 changed |= remove_fallthru_edge (bb->succs);
505 /* If there is LHS, remove it. */
506 if (gimple_call_lhs (stmt))
508 tree op = gimple_call_lhs (stmt);
509 gimple_call_set_lhs (stmt, NULL_TREE);
511 /* We need to remove SSA name to avoid checking errors.
512 All uses are dominated by the noreturn and thus will
513 be removed afterwards.
514 We proactively remove affected non-PHI statements to avoid
515 fixup_cfg from trying to update them and crashing. */
516 if (TREE_CODE (op) == SSA_NAME)
519 imm_use_iterator iter;
522 unsigned int bb_index;
524 bitmap blocks = BITMAP_ALLOC (NULL);
526 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
528 if (gimple_code (use_stmt) != GIMPLE_PHI)
529 bitmap_set_bit (blocks, gimple_bb (use_stmt)->index);
531 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
532 SET_USE (use_p, error_mark_node);
534 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
535 delete_basic_block (BASIC_BLOCK (bb_index));
536 BITMAP_FREE (blocks);
537 release_ssa_name (op);
542 /* Similarly remove VDEF if there is any. */
543 else if (gimple_vdef (stmt))
549 /* Split basic blocks on calls in the middle of a basic block that are now
550 known not to return, and remove the unreachable code. */
553 split_bbs_on_noreturn_calls (void)
555 bool changed = false;
559 /* Detect cases where a mid-block call is now known not to return. */
561 while (vec_safe_length (MODIFIED_NORETURN_CALLS (cfun)))
563 stmt = MODIFIED_NORETURN_CALLS (cfun)->pop ();
564 bb = gimple_bb (stmt);
565 /* BB might be deleted at this point, so verify first
566 BB is present in the cfg. */
568 || bb->index < NUM_FIXED_BLOCKS
569 || bb->index >= last_basic_block
570 || BASIC_BLOCK (bb->index) != bb
571 || !gimple_call_noreturn_p (stmt))
574 changed |= fixup_noreturn_call (stmt);
580 /* Tries to cleanup cfg in basic block BB. Returns true if anything
584 cleanup_tree_cfg_bb (basic_block bb)
586 bool retval = cleanup_control_flow_bb (bb);
588 if (tree_forwarder_block_p (bb, false)
589 && remove_forwarder_block (bb))
592 /* Merging the blocks may create new opportunities for folding
593 conditional branches (due to the elimination of single-valued PHI
595 if (single_succ_p (bb)
596 && can_merge_blocks_p (bb, single_succ (bb)))
598 merge_blocks (bb, single_succ (bb));
605 /* Iterate the cfg cleanups, while anything changes. */
608 cleanup_tree_cfg_1 (void)
614 retval |= split_bbs_on_noreturn_calls ();
616 /* Prepare the worklists of altered blocks. */
617 cfgcleanup_altered_bbs = BITMAP_ALLOC (NULL);
619 /* During forwarder block cleanup, we may redirect edges out of
620 SWITCH_EXPRs, which can get expensive. So we want to enable
621 recording of edge to CASE_LABEL_EXPR. */
622 start_recording_case_labels ();
624 /* Start by iterating over all basic blocks. We cannot use FOR_EACH_BB,
625 since the basic blocks may get removed. */
626 n = last_basic_block;
627 for (i = NUM_FIXED_BLOCKS; i < n; i++)
629 bb = BASIC_BLOCK (i);
631 retval |= cleanup_tree_cfg_bb (bb);
634 /* Now process the altered blocks, as long as any are available. */
635 while (!bitmap_empty_p (cfgcleanup_altered_bbs))
637 i = bitmap_first_set_bit (cfgcleanup_altered_bbs);
638 bitmap_clear_bit (cfgcleanup_altered_bbs, i);
639 if (i < NUM_FIXED_BLOCKS)
642 bb = BASIC_BLOCK (i);
646 retval |= cleanup_tree_cfg_bb (bb);
648 /* Rerun split_bbs_on_noreturn_calls, in case we have altered any noreturn
650 retval |= split_bbs_on_noreturn_calls ();
653 end_recording_case_labels ();
654 BITMAP_FREE (cfgcleanup_altered_bbs);
659 /* Remove unreachable blocks and other miscellaneous clean up work.
660 Return true if the flowgraph was modified, false otherwise. */
663 cleanup_tree_cfg_noloop (void)
667 timevar_push (TV_TREE_CLEANUP_CFG);
669 /* Iterate until there are no more cleanups left to do. If any
670 iteration changed the flowgraph, set CHANGED to true.
672 If dominance information is available, there cannot be any unreachable
674 if (!dom_info_available_p (CDI_DOMINATORS))
676 changed = delete_unreachable_blocks ();
677 calculate_dominance_info (CDI_DOMINATORS);
681 #ifdef ENABLE_CHECKING
682 verify_dominators (CDI_DOMINATORS);
687 changed |= cleanup_tree_cfg_1 ();
689 gcc_assert (dom_info_available_p (CDI_DOMINATORS));
692 #ifdef ENABLE_CHECKING
696 timevar_pop (TV_TREE_CLEANUP_CFG);
698 if (changed && current_loops)
699 loops_state_set (LOOPS_NEED_FIXUP);
704 /* Repairs loop structures. */
707 repair_loop_structures (void)
710 unsigned n_new_loops;
712 calculate_dominance_info (CDI_DOMINATORS);
714 timevar_push (TV_REPAIR_LOOPS);
715 changed_bbs = BITMAP_ALLOC (NULL);
716 n_new_loops = fix_loop_structure (changed_bbs);
718 /* This usually does nothing. But sometimes parts of cfg that originally
719 were inside a loop get out of it due to edge removal (since they
720 become unreachable by back edges from latch). Also a former
721 irreducible loop can become reducible - in this case force a full
722 rewrite into loop-closed SSA form. */
723 if (loops_state_satisfies_p (LOOP_CLOSED_SSA))
724 rewrite_into_loop_closed_ssa (n_new_loops ? NULL : changed_bbs,
727 BITMAP_FREE (changed_bbs);
729 #ifdef ENABLE_CHECKING
730 verify_loop_structure ();
734 timevar_pop (TV_REPAIR_LOOPS);
737 /* Cleanup cfg and repair loop structures. */
740 cleanup_tree_cfg (void)
742 bool changed = cleanup_tree_cfg_noloop ();
744 if (current_loops != NULL
745 && loops_state_satisfies_p (LOOPS_NEED_FIXUP))
746 repair_loop_structures ();
751 /* Merge the PHI nodes at BB into those at BB's sole successor. */
754 remove_forwarder_block_with_phi (basic_block bb)
756 edge succ = single_succ_edge (bb);
757 basic_block dest = succ->dest;
759 basic_block dombb, domdest, dom;
761 /* We check for infinite loops already in tree_forwarder_block_p.
762 However it may happen that the infinite loop is created
763 afterwards due to removal of forwarders. */
767 /* If the destination block consists of a nonlocal label, do not
769 label = first_stmt (dest);
771 && gimple_code (label) == GIMPLE_LABEL
772 && DECL_NONLOCAL (gimple_label_label (label)))
775 /* Redirect each incoming edge to BB to DEST. */
776 while (EDGE_COUNT (bb->preds) > 0)
778 edge e = EDGE_PRED (bb, 0), s;
779 gimple_stmt_iterator gsi;
781 s = find_edge (e->src, dest);
784 /* We already have an edge S from E->src to DEST. If S and
785 E->dest's sole successor edge have the same PHI arguments
786 at DEST, redirect S to DEST. */
787 if (phi_alternatives_equal (dest, s, succ))
789 e = redirect_edge_and_branch (e, dest);
790 redirect_edge_var_map_clear (e);
794 /* PHI arguments are different. Create a forwarder block by
795 splitting E so that we can merge PHI arguments on E to
797 e = single_succ_edge (split_edge (e));
800 s = redirect_edge_and_branch (e, dest);
802 /* redirect_edge_and_branch must not create a new edge. */
805 /* Add to the PHI nodes at DEST each PHI argument removed at the
807 for (gsi = gsi_start_phis (dest);
811 gimple phi = gsi_stmt (gsi);
812 tree def = gimple_phi_arg_def (phi, succ->dest_idx);
813 source_location locus = gimple_phi_arg_location_from_edge (phi, succ);
815 if (TREE_CODE (def) == SSA_NAME)
817 edge_var_map_vector *head;
821 /* If DEF is one of the results of PHI nodes removed during
822 redirection, replace it with the PHI argument that used
824 head = redirect_edge_var_map_vector (e);
825 FOR_EACH_VEC_SAFE_ELT (head, i, vm)
827 tree old_arg = redirect_edge_var_map_result (vm);
828 tree new_arg = redirect_edge_var_map_def (vm);
833 locus = redirect_edge_var_map_location (vm);
839 add_phi_arg (phi, def, s, locus);
842 redirect_edge_var_map_clear (e);
845 /* Update the dominators. */
846 dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
847 domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
850 /* Shortcut to avoid calling (relatively expensive)
851 nearest_common_dominator unless necessary. */
855 dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
857 set_immediate_dominator (CDI_DOMINATORS, dest, dom);
859 /* Remove BB since all of BB's incoming edges have been redirected
861 delete_basic_block (bb);
864 /* This pass merges PHI nodes if one feeds into another. For example,
865 suppose we have the following:
872 # tem_6 = PHI <tem_17(8), tem_23(7)>;
875 # tem_3 = PHI <tem_6(9), tem_2(5)>;
878 Then we merge the first PHI node into the second one like so:
885 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
890 merge_phi_nodes (void)
892 basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks);
893 basic_block *current = worklist;
896 calculate_dominance_info (CDI_DOMINATORS);
898 /* Find all PHI nodes that we may be able to merge. */
903 /* Look for a forwarder block with PHI nodes. */
904 if (!tree_forwarder_block_p (bb, true))
907 dest = single_succ (bb);
909 /* We have to feed into another basic block with PHI
911 if (gimple_seq_empty_p (phi_nodes (dest))
912 /* We don't want to deal with a basic block with
914 || bb_has_abnormal_pred (bb))
917 if (!dominated_by_p (CDI_DOMINATORS, dest, bb))
919 /* If BB does not dominate DEST, then the PHI nodes at
920 DEST must be the only users of the results of the PHI
926 gimple_stmt_iterator gsi;
927 unsigned int dest_idx = single_succ_edge (bb)->dest_idx;
929 /* BB dominates DEST. There may be many users of the PHI
930 nodes in BB. However, there is still a trivial case we
931 can handle. If the result of every PHI in BB is used
932 only by a PHI in DEST, then we can trivially merge the
933 PHI nodes from BB into DEST. */
934 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
937 gimple phi = gsi_stmt (gsi);
938 tree result = gimple_phi_result (phi);
939 use_operand_p imm_use;
942 /* If the PHI's result is never used, then we can just
944 if (has_zero_uses (result))
947 /* Get the single use of the result of this PHI node. */
948 if (!single_imm_use (result, &imm_use, &use_stmt)
949 || gimple_code (use_stmt) != GIMPLE_PHI
950 || gimple_bb (use_stmt) != dest
951 || gimple_phi_arg_def (use_stmt, dest_idx) != result)
955 /* If the loop above iterated through all the PHI nodes
956 in BB, then we can merge the PHIs from BB into DEST. */
962 /* Now let's drain WORKLIST. */
963 while (current != worklist)
966 remove_forwarder_block_with_phi (bb);
974 gate_merge_phi (void)
979 struct gimple_opt_pass pass_merge_phi =
983 "mergephi", /* name */
984 OPTGROUP_NONE, /* optinfo_flags */
985 gate_merge_phi, /* gate */
986 merge_phi_nodes, /* execute */
989 0, /* static_pass_number */
990 TV_TREE_MERGE_PHI, /* tv_id */
991 PROP_cfg | PROP_ssa, /* properties_required */
992 0, /* properties_provided */
993 0, /* properties_destroyed */
994 0, /* todo_flags_start */
995 TODO_ggc_collect /* todo_flags_finish */