1 /* Control flow functions for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "hash-table.h"
27 #include "trans-mem.h"
28 #include "stor-layout.h"
29 #include "print-tree.h"
31 #include "basic-block.h"
34 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-ssa-alias.h"
37 #include "internal-fn.h"
38 #include "gimple-fold.h"
40 #include "gimple-expr.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "gimple-walk.h"
46 #include "gimple-ssa.h"
49 #include "tree-phinodes.h"
50 #include "ssa-iterators.h"
51 #include "stringpool.h"
52 #include "tree-ssanames.h"
53 #include "tree-ssa-loop-manip.h"
54 #include "tree-ssa-loop-niter.h"
55 #include "tree-into-ssa.h"
59 #include "tree-dump.h"
60 #include "tree-pass.h"
61 #include "diagnostic-core.h"
64 #include "tree-ssa-propagate.h"
65 #include "value-prof.h"
66 #include "tree-inline.h"
68 #include "tree-ssa-live.h"
70 #include "tree-cfgcleanup.h"
72 /* This file contains functions for building the Control Flow Graph (CFG)
73 for a function tree. */
75 /* Local declarations. */
77 /* Initial capacity for the basic block array. */
78 static const int initial_cfg_capacity = 20;
80 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
81 which use a particular edge. The CASE_LABEL_EXPRs are chained together
82 via their CASE_CHAIN field, which we clear after we're done with the
83 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
85 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
86 update the case vector in response to edge redirections.
88 Right now this table is set up and torn down at key points in the
89 compilation process. It would be nice if we could make the table
90 more persistent. The key is getting notification of changes to
91 the CFG (particularly edge removal, creation and redirection). */
93 static struct pointer_map_t *edge_to_cases;
95 /* If we record edge_to_cases, this bitmap will hold indexes
96 of basic blocks that end in a GIMPLE_SWITCH which we touched
97 due to edge manipulations. */
99 static bitmap touched_switch_bbs;
101 /* CFG statistics. */
104 long num_merged_labels;
107 static struct cfg_stats_d cfg_stats;
109 /* Hash table to store last discriminator assigned for each locus. */
110 struct locus_discrim_map
113 /* Different calls belonging to the same source line will be assigned
114 different discriminators. But we want to keep the discriminator of
115 the first call in the same source line to be 0, in order to reduce
116 the .debug_line section size. needs_increment is used for this
117 purpose. It is initialized as false and will be set to true after
118 the first call is seen. */
119 bool needs_increment:1;
120 int discriminator:31;
123 /* Hashtable helpers. */
125 struct locus_discrim_hasher : typed_free_remove <locus_discrim_map>
127 typedef locus_discrim_map value_type;
128 typedef locus_discrim_map compare_type;
129 static inline hashval_t hash (const value_type *);
130 static inline bool equal (const value_type *, const compare_type *);
133 /* Trivial hash function for a location_t. ITEM is a pointer to
134 a hash table entry that maps a location_t to a discriminator. */
137 locus_discrim_hasher::hash (const value_type *item)
139 return LOCATION_LINE (item->locus);
142 /* Equality function for the locus-to-discriminator map. A and B
143 point to the two hash table entries to compare. */
146 locus_discrim_hasher::equal (const value_type *a, const compare_type *b)
148 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
151 static hash_table <locus_discrim_hasher> discriminator_per_locus;
153 /* Basic blocks and flowgraphs. */
154 static void make_blocks (gimple_seq);
157 static void make_edges (void);
158 static void assign_discriminators (void);
159 static void make_cond_expr_edges (basic_block);
160 static void make_gimple_switch_edges (basic_block);
161 static bool make_goto_expr_edges (basic_block);
162 static void make_gimple_asm_edges (basic_block);
163 static edge gimple_redirect_edge_and_branch (edge, basic_block);
164 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
166 /* Various helpers. */
167 static inline bool stmt_starts_bb_p (gimple, gimple);
168 static int gimple_verify_flow_info (void);
169 static void gimple_make_forwarder_block (edge);
170 static gimple first_non_label_stmt (basic_block);
171 static bool verify_gimple_transaction (gimple);
172 static bool call_can_make_abnormal_goto (gimple);
174 /* Flowgraph optimization and cleanup. */
175 static void gimple_merge_blocks (basic_block, basic_block);
176 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
177 static void remove_bb (basic_block);
178 static edge find_taken_edge_computed_goto (basic_block, tree);
179 static edge find_taken_edge_cond_expr (basic_block, tree);
180 static edge find_taken_edge_switch_expr (basic_block, tree);
181 static tree find_case_label_for_value (gimple, tree);
184 init_empty_tree_cfg_for_function (struct function *fn)
186 /* Initialize the basic block array. */
188 profile_status_for_fn (fn) = PROFILE_ABSENT;
189 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
190 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
191 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
192 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
193 initial_cfg_capacity);
195 /* Build a mapping of labels to their associated blocks. */
196 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
197 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
198 initial_cfg_capacity);
200 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
201 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
203 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
204 = EXIT_BLOCK_PTR_FOR_FN (fn);
205 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
206 = ENTRY_BLOCK_PTR_FOR_FN (fn);
210 init_empty_tree_cfg (void)
212 init_empty_tree_cfg_for_function (cfun);
215 /*---------------------------------------------------------------------------
217 ---------------------------------------------------------------------------*/
219 /* Entry point to the CFG builder for trees. SEQ is the sequence of
220 statements to be added to the flowgraph. */
223 build_gimple_cfg (gimple_seq seq)
225 /* Register specific gimple functions. */
226 gimple_register_cfg_hooks ();
228 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
230 init_empty_tree_cfg ();
234 /* Make sure there is always at least one block, even if it's empty. */
235 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
236 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
238 /* Adjust the size of the array. */
239 if (basic_block_info_for_fn (cfun)->length ()
240 < (size_t) n_basic_blocks_for_fn (cfun))
241 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
242 n_basic_blocks_for_fn (cfun));
244 /* To speed up statement iterator walks, we first purge dead labels. */
245 cleanup_dead_labels ();
247 /* Group case nodes to reduce the number of edges.
248 We do this after cleaning up dead labels because otherwise we miss
249 a lot of obvious case merging opportunities. */
250 group_case_labels ();
252 /* Create the edges of the flowgraph. */
253 discriminator_per_locus.create (13);
255 assign_discriminators ();
256 cleanup_dead_labels ();
257 discriminator_per_locus.dispose ();
261 /* Search for ANNOTATE call with annot_expr_ivdep_kind; if found, remove
262 it and set loop->safelen to INT_MAX. We assume that the annotation
263 comes immediately before the condition. */
266 replace_loop_annotate ()
270 gimple_stmt_iterator gsi;
273 FOR_EACH_LOOP (loop, 0)
275 gsi = gsi_last_bb (loop->header);
276 stmt = gsi_stmt (gsi);
277 if (stmt && gimple_code (stmt) == GIMPLE_COND)
279 gsi_prev_nondebug (&gsi);
282 stmt = gsi_stmt (gsi);
283 if (gimple_code (stmt) != GIMPLE_CALL)
285 if (!gimple_call_internal_p (stmt)
286 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
288 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
289 != annot_expr_ivdep_kind)
291 stmt = gimple_build_assign (gimple_call_lhs (stmt),
292 gimple_call_arg (stmt, 0));
293 gsi_replace (&gsi, stmt, true);
294 loop->safelen = INT_MAX;
298 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
299 FOR_EACH_BB_FN (bb, cfun)
301 gsi = gsi_last_bb (bb);
302 stmt = gsi_stmt (gsi);
303 if (stmt && gimple_code (stmt) == GIMPLE_COND)
304 gsi_prev_nondebug (&gsi);
307 stmt = gsi_stmt (gsi);
308 if (gimple_code (stmt) != GIMPLE_CALL)
310 if (!gimple_call_internal_p (stmt)
311 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
313 if ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))
314 != annot_expr_ivdep_kind)
316 warning_at (gimple_location (stmt), 0, "ignoring %<GCC ivdep%> "
318 stmt = gimple_build_assign (gimple_call_lhs (stmt),
319 gimple_call_arg (stmt, 0));
320 gsi_replace (&gsi, stmt, true);
326 execute_build_cfg (void)
328 gimple_seq body = gimple_body (current_function_decl);
330 build_gimple_cfg (body);
331 gimple_set_body (current_function_decl, NULL);
332 if (dump_file && (dump_flags & TDF_DETAILS))
334 fprintf (dump_file, "Scope blocks:\n");
335 dump_scope_blocks (dump_file, dump_flags);
338 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
339 replace_loop_annotate ();
345 const pass_data pass_data_build_cfg =
347 GIMPLE_PASS, /* type */
349 OPTGROUP_NONE, /* optinfo_flags */
350 false, /* has_gate */
351 true, /* has_execute */
352 TV_TREE_CFG, /* tv_id */
353 PROP_gimple_leh, /* properties_required */
354 ( PROP_cfg | PROP_loops ), /* properties_provided */
355 0, /* properties_destroyed */
356 0, /* todo_flags_start */
357 TODO_verify_stmts, /* todo_flags_finish */
360 class pass_build_cfg : public gimple_opt_pass
363 pass_build_cfg (gcc::context *ctxt)
364 : gimple_opt_pass (pass_data_build_cfg, ctxt)
367 /* opt_pass methods: */
368 unsigned int execute () { return execute_build_cfg (); }
370 }; // class pass_build_cfg
375 make_pass_build_cfg (gcc::context *ctxt)
377 return new pass_build_cfg (ctxt);
381 /* Return true if T is a computed goto. */
384 computed_goto_p (gimple t)
386 return (gimple_code (t) == GIMPLE_GOTO
387 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
390 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
391 the other edge points to a bb with just __builtin_unreachable ().
392 I.e. return true for C->M edge in:
400 __builtin_unreachable ();
404 assert_unreachable_fallthru_edge_p (edge e)
406 basic_block pred_bb = e->src;
407 gimple last = last_stmt (pred_bb);
408 if (last && gimple_code (last) == GIMPLE_COND)
410 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
411 if (other_bb == e->dest)
412 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
413 if (EDGE_COUNT (other_bb->succs) == 0)
415 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
420 stmt = gsi_stmt (gsi);
421 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
426 stmt = gsi_stmt (gsi);
428 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
435 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
436 could alter control flow except via eh. We initialize the flag at
437 CFG build time and only ever clear it later. */
440 gimple_call_initialize_ctrl_altering (gimple stmt)
442 int flags = gimple_call_flags (stmt);
444 /* A call alters control flow if it can make an abnormal goto. */
445 if (call_can_make_abnormal_goto (stmt)
446 /* A call also alters control flow if it does not return. */
447 || flags & ECF_NORETURN
448 /* TM ending statements have backedges out of the transaction.
449 Return true so we split the basic block containing them.
450 Note that the TM_BUILTIN test is merely an optimization. */
451 || ((flags & ECF_TM_BUILTIN)
452 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
453 /* BUILT_IN_RETURN call is same as return statement. */
454 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
455 gimple_call_set_ctrl_altering (stmt, true);
457 gimple_call_set_ctrl_altering (stmt, false);
461 /* Build a flowgraph for the sequence of stmts SEQ. */
464 make_blocks (gimple_seq seq)
466 gimple_stmt_iterator i = gsi_start (seq);
468 bool start_new_block = true;
469 bool first_stmt_of_seq = true;
470 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
472 while (!gsi_end_p (i))
479 if (stmt && is_gimple_call (stmt))
480 gimple_call_initialize_ctrl_altering (stmt);
482 /* If the statement starts a new basic block or if we have determined
483 in a previous pass that we need to create a new block for STMT, do
485 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
487 if (!first_stmt_of_seq)
488 gsi_split_seq_before (&i, &seq);
489 bb = create_basic_block (seq, NULL, bb);
490 start_new_block = false;
493 /* Now add STMT to BB and create the subgraphs for special statement
495 gimple_set_bb (stmt, bb);
497 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
499 if (stmt_ends_bb_p (stmt))
501 /* If the stmt can make abnormal goto use a new temporary
502 for the assignment to the LHS. This makes sure the old value
503 of the LHS is available on the abnormal edge. Otherwise
504 we will end up with overlapping life-ranges for abnormal
506 if (gimple_has_lhs (stmt)
507 && stmt_can_make_abnormal_goto (stmt)
508 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
510 tree lhs = gimple_get_lhs (stmt);
511 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
512 gimple s = gimple_build_assign (lhs, tmp);
513 gimple_set_location (s, gimple_location (stmt));
514 gimple_set_block (s, gimple_block (stmt));
515 gimple_set_lhs (stmt, tmp);
516 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
517 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
518 DECL_GIMPLE_REG_P (tmp) = 1;
519 gsi_insert_after (&i, s, GSI_SAME_STMT);
521 start_new_block = true;
525 first_stmt_of_seq = false;
530 /* Create and return a new empty basic block after bb AFTER. */
533 create_bb (void *h, void *e, basic_block after)
539 /* Create and initialize a new basic block. Since alloc_block uses
540 GC allocation that clears memory to allocate a basic block, we do
541 not have to clear the newly allocated basic block here. */
544 bb->index = last_basic_block_for_fn (cfun);
546 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
548 /* Add the new block to the linked list of blocks. */
549 link_block (bb, after);
551 /* Grow the basic block array if needed. */
552 if ((size_t) last_basic_block_for_fn (cfun)
553 == basic_block_info_for_fn (cfun)->length ())
556 (last_basic_block_for_fn (cfun)
557 + (last_basic_block_for_fn (cfun) + 3) / 4);
558 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
561 /* Add the newly created block to the array. */
562 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
564 n_basic_blocks_for_fn (cfun)++;
565 last_basic_block_for_fn (cfun)++;
571 /*---------------------------------------------------------------------------
573 ---------------------------------------------------------------------------*/
575 /* Fold COND_EXPR_COND of each COND_EXPR. */
578 fold_cond_expr_cond (void)
582 FOR_EACH_BB_FN (bb, cfun)
584 gimple stmt = last_stmt (bb);
586 if (stmt && gimple_code (stmt) == GIMPLE_COND)
588 location_t loc = gimple_location (stmt);
592 fold_defer_overflow_warnings ();
593 cond = fold_binary_loc (loc, gimple_cond_code (stmt), boolean_type_node,
594 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
597 zerop = integer_zerop (cond);
598 onep = integer_onep (cond);
601 zerop = onep = false;
603 fold_undefer_overflow_warnings (zerop || onep,
605 WARN_STRICT_OVERFLOW_CONDITIONAL);
607 gimple_cond_make_false (stmt);
609 gimple_cond_make_true (stmt);
614 /* If basic block BB has an abnormal edge to a basic block
615 containing IFN_ABNORMAL_DISPATCHER internal call, return
616 that the dispatcher's basic block, otherwise return NULL. */
619 get_abnormal_succ_dispatcher (basic_block bb)
624 FOR_EACH_EDGE (e, ei, bb->succs)
625 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
627 gimple_stmt_iterator gsi
628 = gsi_start_nondebug_after_labels_bb (e->dest);
629 gimple g = gsi_stmt (gsi);
631 && is_gimple_call (g)
632 && gimple_call_internal_p (g)
633 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
639 /* Helper function for make_edges. Create a basic block with
640 with ABNORMAL_DISPATCHER internal call in it if needed, and
641 create abnormal edges from BBS to it and from it to FOR_BB
642 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
645 handle_abnormal_edges (basic_block *dispatcher_bbs,
646 basic_block for_bb, int *bb_to_omp_idx,
647 auto_vec<basic_block> *bbs, bool computed_goto)
649 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
650 unsigned int idx = 0;
656 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
657 if (bb_to_omp_idx[for_bb->index] != 0)
661 /* If the dispatcher has been created already, then there are basic
662 blocks with abnormal edges to it, so just make a new edge to
664 if (*dispatcher == NULL)
666 /* Check if there are any basic blocks that need to have
667 abnormal edges to this dispatcher. If there are none, return
669 if (bb_to_omp_idx == NULL)
671 if (bbs->is_empty ())
676 FOR_EACH_VEC_ELT (*bbs, idx, bb)
677 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
683 /* Create the dispatcher bb. */
684 *dispatcher = create_basic_block (NULL, NULL, for_bb);
687 /* Factor computed gotos into a common computed goto site. Also
688 record the location of that site so that we can un-factor the
689 gotos after we have converted back to normal form. */
690 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
692 /* Create the destination of the factored goto. Each original
693 computed goto will put its desired destination into this
694 variable and jump to the label we create immediately below. */
695 tree var = create_tmp_var (ptr_type_node, "gotovar");
697 /* Build a label for the new block which will contain the
698 factored computed goto. */
699 tree factored_label_decl
700 = create_artificial_label (UNKNOWN_LOCATION);
701 gimple factored_computed_goto_label
702 = gimple_build_label (factored_label_decl);
703 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
705 /* Build our new computed goto. */
706 gimple factored_computed_goto = gimple_build_goto (var);
707 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
709 FOR_EACH_VEC_ELT (*bbs, idx, bb)
712 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
715 gsi = gsi_last_bb (bb);
716 gimple last = gsi_stmt (gsi);
718 gcc_assert (computed_goto_p (last));
720 /* Copy the original computed goto's destination into VAR. */
722 = gimple_build_assign (var, gimple_goto_dest (last));
723 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
725 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
726 e->goto_locus = gimple_location (last);
727 gsi_remove (&gsi, true);
732 tree arg = inner ? boolean_true_node : boolean_false_node;
733 gimple g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
735 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
736 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
738 /* Create predecessor edges of the dispatcher. */
739 FOR_EACH_VEC_ELT (*bbs, idx, bb)
742 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
744 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
749 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
752 /* Join all the blocks in the flowgraph. */
758 struct omp_region *cur_region = NULL;
759 auto_vec<basic_block> ab_edge_goto;
760 auto_vec<basic_block> ab_edge_call;
761 int *bb_to_omp_idx = NULL;
762 int cur_omp_region_idx = 0;
764 /* Create an edge from entry to the first block with executable
766 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
767 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
770 /* Traverse the basic block array placing edges. */
771 FOR_EACH_BB_FN (bb, cfun)
773 gimple last = last_stmt (bb);
777 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
781 enum gimple_code code = gimple_code (last);
785 if (make_goto_expr_edges (bb))
786 ab_edge_goto.safe_push (bb);
790 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
794 make_cond_expr_edges (bb);
798 make_gimple_switch_edges (bb);
802 make_eh_edges (last);
805 case GIMPLE_EH_DISPATCH:
806 fallthru = make_eh_dispatch_edges (last);
810 /* If this function receives a nonlocal goto, then we need to
811 make edges from this call site to all the nonlocal goto
813 if (stmt_can_make_abnormal_goto (last))
814 ab_edge_call.safe_push (bb);
816 /* If this statement has reachable exception handlers, then
817 create abnormal edges to them. */
818 make_eh_edges (last);
820 /* BUILTIN_RETURN is really a return statement. */
821 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
823 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
826 /* Some calls are known not to return. */
828 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
832 /* A GIMPLE_ASSIGN may throw internally and thus be considered
834 if (is_ctrl_altering_stmt (last))
835 make_eh_edges (last);
840 make_gimple_asm_edges (bb);
845 fallthru = make_gimple_omp_edges (bb, &cur_region,
846 &cur_omp_region_idx);
847 if (cur_region && bb_to_omp_idx == NULL)
848 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
851 case GIMPLE_TRANSACTION:
853 tree abort_label = gimple_transaction_label (last);
855 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
861 gcc_assert (!stmt_ends_bb_p (last));
869 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
872 /* Computed gotos are hell to deal with, especially if there are
873 lots of them with a large number of destinations. So we factor
874 them to a common computed goto location before we build the
875 edge list. After we convert back to normal form, we will un-factor
876 the computed gotos since factoring introduces an unwanted jump.
877 For non-local gotos and abnormal edges from calls to calls that return
878 twice or forced labels, factor the abnormal edges too, by having all
879 abnormal edges from the calls go to a common artificial basic block
880 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
881 basic block to all forced labels and calls returning twice.
882 We do this per-OpenMP structured block, because those regions
883 are guaranteed to be single entry single exit by the standard,
884 so it is not allowed to enter or exit such regions abnormally this way,
885 thus all computed gotos, non-local gotos and setjmp/longjmp calls
886 must not transfer control across SESE region boundaries. */
887 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
889 gimple_stmt_iterator gsi;
890 basic_block dispatcher_bb_array[2] = { NULL, NULL };
891 basic_block *dispatcher_bbs = dispatcher_bb_array;
892 int count = n_basic_blocks_for_fn (cfun);
895 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
897 FOR_EACH_BB_FN (bb, cfun)
899 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
901 gimple label_stmt = gsi_stmt (gsi);
904 if (gimple_code (label_stmt) != GIMPLE_LABEL)
907 target = gimple_label_label (label_stmt);
909 /* Make an edge to every label block that has been marked as a
910 potential target for a computed goto or a non-local goto. */
911 if (FORCED_LABEL (target))
912 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
913 &ab_edge_goto, true);
914 if (DECL_NONLOCAL (target))
916 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
917 &ab_edge_call, false);
922 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
923 gsi_next_nondebug (&gsi);
924 if (!gsi_end_p (gsi))
926 /* Make an edge to every setjmp-like call. */
927 gimple call_stmt = gsi_stmt (gsi);
928 if (is_gimple_call (call_stmt)
929 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
930 || gimple_call_builtin_p (call_stmt,
931 BUILT_IN_SETJMP_RECEIVER)))
932 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
933 &ab_edge_call, false);
938 XDELETE (dispatcher_bbs);
941 XDELETE (bb_to_omp_idx);
945 /* Fold COND_EXPR_COND of each COND_EXPR. */
946 fold_cond_expr_cond ();
949 /* Find the next available discriminator value for LOCUS. The
950 discriminator distinguishes among several basic blocks that
951 share a common locus, allowing for more accurate sample-based
952 profiling. If RETURN_NEXT is true, return the next discriminator
953 anyway. If RETURN_NEXT is not true, we may not increase the
954 discriminator if locus_discrim_map::needs_increment is false,
955 which is used when the stmt is the first call stmt in current
956 source line. locus_discrim_map::needs_increment will be set to
957 true after the first call is seen. */
960 next_discriminator_for_locus (location_t locus, bool return_next)
962 struct locus_discrim_map item;
963 struct locus_discrim_map **slot;
966 item.discriminator = 0;
967 slot = discriminator_per_locus.find_slot_with_hash (
968 &item, LOCATION_LINE (locus), INSERT);
970 if (*slot == HTAB_EMPTY_ENTRY)
972 *slot = XNEW (struct locus_discrim_map);
974 (*slot)->locus = locus;
975 (*slot)->needs_increment = false;
976 (*slot)->discriminator = 0;
978 if (return_next || (*slot)->needs_increment)
979 (*slot)->discriminator++;
981 (*slot)->needs_increment = true;
982 return (*slot)->discriminator;
985 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
988 same_line_p (location_t locus1, location_t locus2)
990 expanded_location from, to;
992 if (locus1 == locus2)
995 from = expand_location (locus1);
996 to = expand_location (locus2);
998 if (from.line != to.line)
1000 if (from.file == to.file)
1002 return (from.file != NULL
1004 && filename_cmp (from.file, to.file) == 0);
1007 /* Assign a unique discriminator value to instructions in block BB that
1008 have the same LOCUS as its predecessor block. */
1011 assign_discriminator (location_t locus, basic_block bb)
1013 gimple_stmt_iterator gsi;
1016 locus = map_discriminator_location (locus);
1018 if (locus == UNKNOWN_LOCATION)
1021 discriminator = next_discriminator_for_locus (locus, true);
1023 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1025 gimple stmt = gsi_stmt (gsi);
1026 location_t stmt_locus = gimple_location (stmt);
1027 if (same_line_p (locus, stmt_locus))
1028 gimple_set_location (stmt,
1029 location_with_discriminator (stmt_locus, discriminator));
1033 /* Assign discriminators to each basic block. */
1036 assign_discriminators (void)
1040 FOR_EACH_BB_FN (bb, cfun)
1044 gimple_stmt_iterator gsi;
1045 gimple last = last_stmt (bb);
1046 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1047 location_t curr_locus = UNKNOWN_LOCATION;
1050 /* Traverse the basic block, if two function calls within a basic block
1051 are mapped to a same line, assign a new discriminator because a call
1052 stmt could be a split point of a basic block. */
1053 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1055 gimple stmt = gsi_stmt (gsi);
1056 if (gimple_code (stmt) == GIMPLE_CALL)
1058 curr_locus = gimple_location (stmt);
1059 curr_discr = next_discriminator_for_locus (curr_locus, false);
1060 gimple_set_location (stmt, location_with_discriminator (
1061 curr_locus, curr_discr));
1065 if (locus == UNKNOWN_LOCATION)
1068 FOR_EACH_EDGE (e, ei, bb->succs)
1070 gimple first = first_non_label_stmt (e->dest);
1071 gimple last = last_stmt (e->dest);
1072 if ((first && same_line_p (locus, gimple_location (first)))
1073 || (last && same_line_p (locus, gimple_location (last))))
1075 if (((first && has_discriminator (gimple_location (first)))
1076 || (last && has_discriminator (gimple_location (last))))
1077 && !has_discriminator (locus))
1078 assign_discriminator (locus, bb);
1080 assign_discriminator (locus, e->dest);
1086 /* Create the edges for a GIMPLE_COND starting at block BB. */
1089 make_cond_expr_edges (basic_block bb)
1091 gimple entry = last_stmt (bb);
1092 gimple then_stmt, else_stmt;
1093 basic_block then_bb, else_bb;
1094 tree then_label, else_label;
1098 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1100 /* Entry basic blocks for each component. */
1101 then_label = gimple_cond_true_label (entry);
1102 else_label = gimple_cond_false_label (entry);
1103 then_bb = label_to_block (then_label);
1104 else_bb = label_to_block (else_label);
1105 then_stmt = first_stmt (then_bb);
1106 else_stmt = first_stmt (else_bb);
1108 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1109 e->goto_locus = gimple_location (then_stmt);
1110 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1112 e->goto_locus = gimple_location (else_stmt);
1114 /* We do not need the labels anymore. */
1115 gimple_cond_set_true_label (entry, NULL_TREE);
1116 gimple_cond_set_false_label (entry, NULL_TREE);
1120 /* Called for each element in the hash table (P) as we delete the
1121 edge to cases hash table.
1123 Clear all the TREE_CHAINs to prevent problems with copying of
1124 SWITCH_EXPRs and structure sharing rules, then free the hash table
1128 edge_to_cases_cleanup (const void *key ATTRIBUTE_UNUSED, void **value,
1129 void *data ATTRIBUTE_UNUSED)
1133 for (t = (tree) *value; t; t = next)
1135 next = CASE_CHAIN (t);
1136 CASE_CHAIN (t) = NULL;
1143 /* Start recording information mapping edges to case labels. */
1146 start_recording_case_labels (void)
1148 gcc_assert (edge_to_cases == NULL);
1149 edge_to_cases = pointer_map_create ();
1150 touched_switch_bbs = BITMAP_ALLOC (NULL);
1153 /* Return nonzero if we are recording information for case labels. */
1156 recording_case_labels_p (void)
1158 return (edge_to_cases != NULL);
1161 /* Stop recording information mapping edges to case labels and
1162 remove any information we have recorded. */
1164 end_recording_case_labels (void)
1168 pointer_map_traverse (edge_to_cases, edge_to_cases_cleanup, NULL);
1169 pointer_map_destroy (edge_to_cases);
1170 edge_to_cases = NULL;
1171 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1173 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1176 gimple stmt = last_stmt (bb);
1177 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1178 group_case_labels_stmt (stmt);
1181 BITMAP_FREE (touched_switch_bbs);
1184 /* If we are inside a {start,end}_recording_cases block, then return
1185 a chain of CASE_LABEL_EXPRs from T which reference E.
1187 Otherwise return NULL. */
1190 get_cases_for_edge (edge e, gimple t)
1195 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1196 chains available. Return NULL so the caller can detect this case. */
1197 if (!recording_case_labels_p ())
1200 slot = pointer_map_contains (edge_to_cases, e);
1202 return (tree) *slot;
1204 /* If we did not find E in the hash table, then this must be the first
1205 time we have been queried for information about E & T. Add all the
1206 elements from T to the hash table then perform the query again. */
1208 n = gimple_switch_num_labels (t);
1209 for (i = 0; i < n; i++)
1211 tree elt = gimple_switch_label (t, i);
1212 tree lab = CASE_LABEL (elt);
1213 basic_block label_bb = label_to_block (lab);
1214 edge this_edge = find_edge (e->src, label_bb);
1216 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1218 slot = pointer_map_insert (edge_to_cases, this_edge);
1219 CASE_CHAIN (elt) = (tree) *slot;
1223 return (tree) *pointer_map_contains (edge_to_cases, e);
1226 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1229 make_gimple_switch_edges (basic_block bb)
1231 gimple entry = last_stmt (bb);
1234 n = gimple_switch_num_labels (entry);
1236 for (i = 0; i < n; ++i)
1238 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1239 basic_block label_bb = label_to_block (lab);
1240 make_edge (bb, label_bb, 0);
1245 /* Return the basic block holding label DEST. */
1248 label_to_block_fn (struct function *ifun, tree dest)
1250 int uid = LABEL_DECL_UID (dest);
1252 /* We would die hard when faced by an undefined label. Emit a label to
1253 the very first basic block. This will hopefully make even the dataflow
1254 and undefined variable warnings quite right. */
1255 if (seen_error () && uid < 0)
1257 gimple_stmt_iterator gsi =
1258 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1261 stmt = gimple_build_label (dest);
1262 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1263 uid = LABEL_DECL_UID (dest);
1265 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1267 return (*ifun->cfg->x_label_to_block_map)[uid];
1270 /* Create edges for a goto statement at block BB. Returns true
1271 if abnormal edges should be created. */
1274 make_goto_expr_edges (basic_block bb)
1276 gimple_stmt_iterator last = gsi_last_bb (bb);
1277 gimple goto_t = gsi_stmt (last);
1279 /* A simple GOTO creates normal edges. */
1280 if (simple_goto_p (goto_t))
1282 tree dest = gimple_goto_dest (goto_t);
1283 basic_block label_bb = label_to_block (dest);
1284 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1285 e->goto_locus = gimple_location (goto_t);
1286 gsi_remove (&last, true);
1290 /* A computed GOTO creates abnormal edges. */
1294 /* Create edges for an asm statement with labels at block BB. */
1297 make_gimple_asm_edges (basic_block bb)
1299 gimple stmt = last_stmt (bb);
1300 int i, n = gimple_asm_nlabels (stmt);
1302 for (i = 0; i < n; ++i)
1304 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1305 basic_block label_bb = label_to_block (label);
1306 make_edge (bb, label_bb, 0);
1310 /*---------------------------------------------------------------------------
1312 ---------------------------------------------------------------------------*/
1314 /* Cleanup useless labels in basic blocks. This is something we wish
1315 to do early because it allows us to group case labels before creating
1316 the edges for the CFG, and it speeds up block statement iterators in
1317 all passes later on.
1318 We rerun this pass after CFG is created, to get rid of the labels that
1319 are no longer referenced. After then we do not run it any more, since
1320 (almost) no new labels should be created. */
1322 /* A map from basic block index to the leading label of that block. */
1323 static struct label_record
1328 /* True if the label is referenced from somewhere. */
1332 /* Given LABEL return the first label in the same basic block. */
1335 main_block_label (tree label)
1337 basic_block bb = label_to_block (label);
1338 tree main_label = label_for_bb[bb->index].label;
1340 /* label_to_block possibly inserted undefined label into the chain. */
1343 label_for_bb[bb->index].label = label;
1347 label_for_bb[bb->index].used = true;
1351 /* Clean up redundant labels within the exception tree. */
1354 cleanup_dead_labels_eh (void)
1361 if (cfun->eh == NULL)
1364 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1365 if (lp && lp->post_landing_pad)
1367 lab = main_block_label (lp->post_landing_pad);
1368 if (lab != lp->post_landing_pad)
1370 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1371 EH_LANDING_PAD_NR (lab) = lp->index;
1375 FOR_ALL_EH_REGION (r)
1379 case ERT_MUST_NOT_THROW:
1385 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1389 c->label = main_block_label (lab);
1394 case ERT_ALLOWED_EXCEPTIONS:
1395 lab = r->u.allowed.label;
1397 r->u.allowed.label = main_block_label (lab);
1403 /* Cleanup redundant labels. This is a three-step process:
1404 1) Find the leading label for each block.
1405 2) Redirect all references to labels to the leading labels.
1406 3) Cleanup all useless labels. */
1409 cleanup_dead_labels (void)
1412 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1414 /* Find a suitable label for each block. We use the first user-defined
1415 label if there is one, or otherwise just the first label we see. */
1416 FOR_EACH_BB_FN (bb, cfun)
1418 gimple_stmt_iterator i;
1420 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1423 gimple stmt = gsi_stmt (i);
1425 if (gimple_code (stmt) != GIMPLE_LABEL)
1428 label = gimple_label_label (stmt);
1430 /* If we have not yet seen a label for the current block,
1431 remember this one and see if there are more labels. */
1432 if (!label_for_bb[bb->index].label)
1434 label_for_bb[bb->index].label = label;
1438 /* If we did see a label for the current block already, but it
1439 is an artificially created label, replace it if the current
1440 label is a user defined label. */
1441 if (!DECL_ARTIFICIAL (label)
1442 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1444 label_for_bb[bb->index].label = label;
1450 /* Now redirect all jumps/branches to the selected label.
1451 First do so for each block ending in a control statement. */
1452 FOR_EACH_BB_FN (bb, cfun)
1454 gimple stmt = last_stmt (bb);
1455 tree label, new_label;
1460 switch (gimple_code (stmt))
1463 label = gimple_cond_true_label (stmt);
1466 new_label = main_block_label (label);
1467 if (new_label != label)
1468 gimple_cond_set_true_label (stmt, new_label);
1471 label = gimple_cond_false_label (stmt);
1474 new_label = main_block_label (label);
1475 if (new_label != label)
1476 gimple_cond_set_false_label (stmt, new_label);
1482 size_t i, n = gimple_switch_num_labels (stmt);
1484 /* Replace all destination labels. */
1485 for (i = 0; i < n; ++i)
1487 tree case_label = gimple_switch_label (stmt, i);
1488 label = CASE_LABEL (case_label);
1489 new_label = main_block_label (label);
1490 if (new_label != label)
1491 CASE_LABEL (case_label) = new_label;
1498 int i, n = gimple_asm_nlabels (stmt);
1500 for (i = 0; i < n; ++i)
1502 tree cons = gimple_asm_label_op (stmt, i);
1503 tree label = main_block_label (TREE_VALUE (cons));
1504 TREE_VALUE (cons) = label;
1509 /* We have to handle gotos until they're removed, and we don't
1510 remove them until after we've created the CFG edges. */
1512 if (!computed_goto_p (stmt))
1514 label = gimple_goto_dest (stmt);
1515 new_label = main_block_label (label);
1516 if (new_label != label)
1517 gimple_goto_set_dest (stmt, new_label);
1521 case GIMPLE_TRANSACTION:
1523 tree label = gimple_transaction_label (stmt);
1526 tree new_label = main_block_label (label);
1527 if (new_label != label)
1528 gimple_transaction_set_label (stmt, new_label);
1538 /* Do the same for the exception region tree labels. */
1539 cleanup_dead_labels_eh ();
1541 /* Finally, purge dead labels. All user-defined labels and labels that
1542 can be the target of non-local gotos and labels which have their
1543 address taken are preserved. */
1544 FOR_EACH_BB_FN (bb, cfun)
1546 gimple_stmt_iterator i;
1547 tree label_for_this_bb = label_for_bb[bb->index].label;
1549 if (!label_for_this_bb)
1552 /* If the main label of the block is unused, we may still remove it. */
1553 if (!label_for_bb[bb->index].used)
1554 label_for_this_bb = NULL;
1556 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1559 gimple stmt = gsi_stmt (i);
1561 if (gimple_code (stmt) != GIMPLE_LABEL)
1564 label = gimple_label_label (stmt);
1566 if (label == label_for_this_bb
1567 || !DECL_ARTIFICIAL (label)
1568 || DECL_NONLOCAL (label)
1569 || FORCED_LABEL (label))
1572 gsi_remove (&i, true);
1576 free (label_for_bb);
1579 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1580 the ones jumping to the same label.
1581 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1584 group_case_labels_stmt (gimple stmt)
1586 int old_size = gimple_switch_num_labels (stmt);
1587 int i, j, new_size = old_size;
1588 basic_block default_bb = NULL;
1590 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1592 /* Look for possible opportunities to merge cases. */
1594 while (i < old_size)
1596 tree base_case, base_high;
1597 basic_block base_bb;
1599 base_case = gimple_switch_label (stmt, i);
1601 gcc_assert (base_case);
1602 base_bb = label_to_block (CASE_LABEL (base_case));
1604 /* Discard cases that have the same destination as the
1606 if (base_bb == default_bb)
1608 gimple_switch_set_label (stmt, i, NULL_TREE);
1614 base_high = CASE_HIGH (base_case)
1615 ? CASE_HIGH (base_case)
1616 : CASE_LOW (base_case);
1619 /* Try to merge case labels. Break out when we reach the end
1620 of the label vector or when we cannot merge the next case
1621 label with the current one. */
1622 while (i < old_size)
1624 tree merge_case = gimple_switch_label (stmt, i);
1625 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1626 double_int bhp1 = tree_to_double_int (base_high) + double_int_one;
1628 /* Merge the cases if they jump to the same place,
1629 and their ranges are consecutive. */
1630 if (merge_bb == base_bb
1631 && tree_to_double_int (CASE_LOW (merge_case)) == bhp1)
1633 base_high = CASE_HIGH (merge_case) ?
1634 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1635 CASE_HIGH (base_case) = base_high;
1636 gimple_switch_set_label (stmt, i, NULL_TREE);
1645 /* Compress the case labels in the label vector, and adjust the
1646 length of the vector. */
1647 for (i = 0, j = 0; i < new_size; i++)
1649 while (! gimple_switch_label (stmt, j))
1651 gimple_switch_set_label (stmt, i,
1652 gimple_switch_label (stmt, j++));
1655 gcc_assert (new_size <= old_size);
1656 gimple_switch_set_num_labels (stmt, new_size);
1659 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1660 and scan the sorted vector of cases. Combine the ones jumping to the
1664 group_case_labels (void)
1668 FOR_EACH_BB_FN (bb, cfun)
1670 gimple stmt = last_stmt (bb);
1671 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1672 group_case_labels_stmt (stmt);
1676 /* Checks whether we can merge block B into block A. */
1679 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1682 gimple_stmt_iterator gsi;
1684 if (!single_succ_p (a))
1687 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1690 if (single_succ (a) != b)
1693 if (!single_pred_p (b))
1696 if (b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1699 /* If A ends by a statement causing exceptions or something similar, we
1700 cannot merge the blocks. */
1701 stmt = last_stmt (a);
1702 if (stmt && stmt_ends_bb_p (stmt))
1705 /* Do not allow a block with only a non-local label to be merged. */
1707 && gimple_code (stmt) == GIMPLE_LABEL
1708 && DECL_NONLOCAL (gimple_label_label (stmt)))
1711 /* Examine the labels at the beginning of B. */
1712 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
1715 stmt = gsi_stmt (gsi);
1716 if (gimple_code (stmt) != GIMPLE_LABEL)
1718 lab = gimple_label_label (stmt);
1720 /* Do not remove user forced labels or for -O0 any user labels. */
1721 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1725 /* Protect the loop latches. */
1726 if (current_loops && b->loop_father->latch == b)
1729 /* It must be possible to eliminate all phi nodes in B. If ssa form
1730 is not up-to-date and a name-mapping is registered, we cannot eliminate
1731 any phis. Symbols marked for renaming are never a problem though. */
1732 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi); gsi_next (&gsi))
1734 gimple phi = gsi_stmt (gsi);
1735 /* Technically only new names matter. */
1736 if (name_registered_for_update_p (PHI_RESULT (phi)))
1740 /* When not optimizing, don't merge if we'd lose goto_locus. */
1742 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1744 location_t goto_locus = single_succ_edge (a)->goto_locus;
1745 gimple_stmt_iterator prev, next;
1746 prev = gsi_last_nondebug_bb (a);
1747 next = gsi_after_labels (b);
1748 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1749 gsi_next_nondebug (&next);
1750 if ((gsi_end_p (prev)
1751 || gimple_location (gsi_stmt (prev)) != goto_locus)
1752 && (gsi_end_p (next)
1753 || gimple_location (gsi_stmt (next)) != goto_locus))
1760 /* Replaces all uses of NAME by VAL. */
1763 replace_uses_by (tree name, tree val)
1765 imm_use_iterator imm_iter;
1770 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1772 /* Mark the block if we change the last stmt in it. */
1773 if (cfgcleanup_altered_bbs
1774 && stmt_ends_bb_p (stmt))
1775 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1777 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1779 replace_exp (use, val);
1781 if (gimple_code (stmt) == GIMPLE_PHI)
1783 e = gimple_phi_arg_edge (stmt, PHI_ARG_INDEX_FROM_USE (use));
1784 if (e->flags & EDGE_ABNORMAL)
1786 /* This can only occur for virtual operands, since
1787 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1788 would prevent replacement. */
1789 gcc_checking_assert (virtual_operand_p (name));
1790 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1795 if (gimple_code (stmt) != GIMPLE_PHI)
1797 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1798 gimple orig_stmt = stmt;
1801 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1802 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1803 only change sth from non-invariant to invariant, and only
1804 when propagating constants. */
1805 if (is_gimple_min_invariant (val))
1806 for (i = 0; i < gimple_num_ops (stmt); i++)
1808 tree op = gimple_op (stmt, i);
1809 /* Operands may be empty here. For example, the labels
1810 of a GIMPLE_COND are nulled out following the creation
1811 of the corresponding CFG edges. */
1812 if (op && TREE_CODE (op) == ADDR_EXPR)
1813 recompute_tree_invariant_for_addr_expr (op);
1816 if (fold_stmt (&gsi))
1817 stmt = gsi_stmt (gsi);
1819 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1820 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1826 gcc_checking_assert (has_zero_uses (name));
1828 /* Also update the trees stored in loop structures. */
1833 FOR_EACH_LOOP (loop, 0)
1835 substitute_in_loop_info (loop, name, val);
1840 /* Merge block B into block A. */
1843 gimple_merge_blocks (basic_block a, basic_block b)
1845 gimple_stmt_iterator last, gsi, psi;
1848 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1850 /* Remove all single-valued PHI nodes from block B of the form
1851 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1852 gsi = gsi_last_bb (a);
1853 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1855 gimple phi = gsi_stmt (psi);
1856 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1858 bool may_replace_uses = (virtual_operand_p (def)
1859 || may_propagate_copy (def, use));
1861 /* In case we maintain loop closed ssa form, do not propagate arguments
1862 of loop exit phi nodes. */
1864 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1865 && !virtual_operand_p (def)
1866 && TREE_CODE (use) == SSA_NAME
1867 && a->loop_father != b->loop_father)
1868 may_replace_uses = false;
1870 if (!may_replace_uses)
1872 gcc_assert (!virtual_operand_p (def));
1874 /* Note that just emitting the copies is fine -- there is no problem
1875 with ordering of phi nodes. This is because A is the single
1876 predecessor of B, therefore results of the phi nodes cannot
1877 appear as arguments of the phi nodes. */
1878 copy = gimple_build_assign (def, use);
1879 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1880 remove_phi_node (&psi, false);
1884 /* If we deal with a PHI for virtual operands, we can simply
1885 propagate these without fussing with folding or updating
1887 if (virtual_operand_p (def))
1889 imm_use_iterator iter;
1890 use_operand_p use_p;
1893 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1894 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1895 SET_USE (use_p, use);
1897 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1898 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1901 replace_uses_by (def, use);
1903 remove_phi_node (&psi, true);
1907 /* Ensure that B follows A. */
1908 move_block_after (b, a);
1910 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1911 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1913 /* Remove labels from B and set gimple_bb to A for other statements. */
1914 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1916 gimple stmt = gsi_stmt (gsi);
1917 if (gimple_code (stmt) == GIMPLE_LABEL)
1919 tree label = gimple_label_label (stmt);
1922 gsi_remove (&gsi, false);
1924 /* Now that we can thread computed gotos, we might have
1925 a situation where we have a forced label in block B
1926 However, the label at the start of block B might still be
1927 used in other ways (think about the runtime checking for
1928 Fortran assigned gotos). So we can not just delete the
1929 label. Instead we move the label to the start of block A. */
1930 if (FORCED_LABEL (label))
1932 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1933 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1935 /* Other user labels keep around in a form of a debug stmt. */
1936 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1938 gimple dbg = gimple_build_debug_bind (label,
1941 gimple_debug_bind_reset_value (dbg);
1942 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1945 lp_nr = EH_LANDING_PAD_NR (label);
1948 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1949 lp->post_landing_pad = NULL;
1954 gimple_set_bb (stmt, a);
1959 /* Merge the sequences. */
1960 last = gsi_last_bb (a);
1961 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1962 set_bb_seq (b, NULL);
1964 if (cfgcleanup_altered_bbs)
1965 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1969 /* Return the one of two successors of BB that is not reachable by a
1970 complex edge, if there is one. Else, return BB. We use
1971 this in optimizations that use post-dominators for their heuristics,
1972 to catch the cases in C++ where function calls are involved. */
1975 single_noncomplex_succ (basic_block bb)
1978 if (EDGE_COUNT (bb->succs) != 2)
1981 e0 = EDGE_SUCC (bb, 0);
1982 e1 = EDGE_SUCC (bb, 1);
1983 if (e0->flags & EDGE_COMPLEX)
1985 if (e1->flags & EDGE_COMPLEX)
1991 /* T is CALL_EXPR. Set current_function_calls_* flags. */
1994 notice_special_calls (gimple call)
1996 int flags = gimple_call_flags (call);
1998 if (flags & ECF_MAY_BE_ALLOCA)
1999 cfun->calls_alloca = true;
2000 if (flags & ECF_RETURNS_TWICE)
2001 cfun->calls_setjmp = true;
2005 /* Clear flags set by notice_special_calls. Used by dead code removal
2006 to update the flags. */
2009 clear_special_calls (void)
2011 cfun->calls_alloca = false;
2012 cfun->calls_setjmp = false;
2015 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2018 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2020 /* Since this block is no longer reachable, we can just delete all
2021 of its PHI nodes. */
2022 remove_phi_nodes (bb);
2024 /* Remove edges to BB's successors. */
2025 while (EDGE_COUNT (bb->succs) > 0)
2026 remove_edge (EDGE_SUCC (bb, 0));
2030 /* Remove statements of basic block BB. */
2033 remove_bb (basic_block bb)
2035 gimple_stmt_iterator i;
2039 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2040 if (dump_flags & TDF_DETAILS)
2042 dump_bb (dump_file, bb, 0, dump_flags);
2043 fprintf (dump_file, "\n");
2049 struct loop *loop = bb->loop_father;
2051 /* If a loop gets removed, clean up the information associated
2053 if (loop->latch == bb
2054 || loop->header == bb)
2055 free_numbers_of_iterations_estimates_loop (loop);
2058 /* Remove all the instructions in the block. */
2059 if (bb_seq (bb) != NULL)
2061 /* Walk backwards so as to get a chance to substitute all
2062 released DEFs into debug stmts. See
2063 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2065 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2067 gimple stmt = gsi_stmt (i);
2068 if (gimple_code (stmt) == GIMPLE_LABEL
2069 && (FORCED_LABEL (gimple_label_label (stmt))
2070 || DECL_NONLOCAL (gimple_label_label (stmt))))
2073 gimple_stmt_iterator new_gsi;
2075 /* A non-reachable non-local label may still be referenced.
2076 But it no longer needs to carry the extra semantics of
2078 if (DECL_NONLOCAL (gimple_label_label (stmt)))
2080 DECL_NONLOCAL (gimple_label_label (stmt)) = 0;
2081 FORCED_LABEL (gimple_label_label (stmt)) = 1;
2084 new_bb = bb->prev_bb;
2085 new_gsi = gsi_start_bb (new_bb);
2086 gsi_remove (&i, false);
2087 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2091 /* Release SSA definitions if we are in SSA. Note that we
2092 may be called when not in SSA. For example,
2093 final_cleanup calls this function via
2094 cleanup_tree_cfg. */
2095 if (gimple_in_ssa_p (cfun))
2096 release_defs (stmt);
2098 gsi_remove (&i, true);
2102 i = gsi_last_bb (bb);
2108 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2109 bb->il.gimple.seq = NULL;
2110 bb->il.gimple.phi_nodes = NULL;
2114 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2115 predicate VAL, return the edge that will be taken out of the block.
2116 If VAL does not match a unique edge, NULL is returned. */
2119 find_taken_edge (basic_block bb, tree val)
2123 stmt = last_stmt (bb);
2126 gcc_assert (is_ctrl_stmt (stmt));
2131 if (!is_gimple_min_invariant (val))
2134 if (gimple_code (stmt) == GIMPLE_COND)
2135 return find_taken_edge_cond_expr (bb, val);
2137 if (gimple_code (stmt) == GIMPLE_SWITCH)
2138 return find_taken_edge_switch_expr (bb, val);
2140 if (computed_goto_p (stmt))
2142 /* Only optimize if the argument is a label, if the argument is
2143 not a label then we can not construct a proper CFG.
2145 It may be the case that we only need to allow the LABEL_REF to
2146 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2147 appear inside a LABEL_EXPR just to be safe. */
2148 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2149 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2150 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2157 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2158 statement, determine which of the outgoing edges will be taken out of the
2159 block. Return NULL if either edge may be taken. */
2162 find_taken_edge_computed_goto (basic_block bb, tree val)
2167 dest = label_to_block (val);
2170 e = find_edge (bb, dest);
2171 gcc_assert (e != NULL);
2177 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2178 statement, determine which of the two edges will be taken out of the
2179 block. Return NULL if either edge may be taken. */
2182 find_taken_edge_cond_expr (basic_block bb, tree val)
2184 edge true_edge, false_edge;
2186 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2188 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2189 return (integer_zerop (val) ? false_edge : true_edge);
2192 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2193 statement, determine which edge will be taken out of the block. Return
2194 NULL if any edge may be taken. */
2197 find_taken_edge_switch_expr (basic_block bb, tree val)
2199 basic_block dest_bb;
2204 switch_stmt = last_stmt (bb);
2205 taken_case = find_case_label_for_value (switch_stmt, val);
2206 dest_bb = label_to_block (CASE_LABEL (taken_case));
2208 e = find_edge (bb, dest_bb);
2214 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2215 We can make optimal use here of the fact that the case labels are
2216 sorted: We can do a binary search for a case matching VAL. */
2219 find_case_label_for_value (gimple switch_stmt, tree val)
2221 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2222 tree default_case = gimple_switch_default_label (switch_stmt);
2224 for (low = 0, high = n; high - low > 1; )
2226 size_t i = (high + low) / 2;
2227 tree t = gimple_switch_label (switch_stmt, i);
2230 /* Cache the result of comparing CASE_LOW and val. */
2231 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2238 if (CASE_HIGH (t) == NULL)
2240 /* A singe-valued case label. */
2246 /* A case range. We can only handle integer ranges. */
2247 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2252 return default_case;
2256 /* Dump a basic block on stderr. */
2259 gimple_debug_bb (basic_block bb)
2261 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2265 /* Dump basic block with index N on stderr. */
2268 gimple_debug_bb_n (int n)
2270 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2271 return BASIC_BLOCK_FOR_FN (cfun, n);
2275 /* Dump the CFG on stderr.
2277 FLAGS are the same used by the tree dumping functions
2278 (see TDF_* in dumpfile.h). */
2281 gimple_debug_cfg (int flags)
2283 gimple_dump_cfg (stderr, flags);
2287 /* Dump the program showing basic block boundaries on the given FILE.
2289 FLAGS are the same used by the tree dumping functions (see TDF_* in
2293 gimple_dump_cfg (FILE *file, int flags)
2295 if (flags & TDF_DETAILS)
2297 dump_function_header (file, current_function_decl, flags);
2298 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2299 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2300 last_basic_block_for_fn (cfun));
2302 brief_dump_cfg (file, flags | TDF_COMMENT);
2303 fprintf (file, "\n");
2306 if (flags & TDF_STATS)
2307 dump_cfg_stats (file);
2309 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2313 /* Dump CFG statistics on FILE. */
2316 dump_cfg_stats (FILE *file)
2318 static long max_num_merged_labels = 0;
2319 unsigned long size, total = 0;
2322 const char * const fmt_str = "%-30s%-13s%12s\n";
2323 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2324 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2325 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2326 const char *funcname = current_function_name ();
2328 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2330 fprintf (file, "---------------------------------------------------------\n");
2331 fprintf (file, fmt_str, "", " Number of ", "Memory");
2332 fprintf (file, fmt_str, "", " instances ", "used ");
2333 fprintf (file, "---------------------------------------------------------\n");
2335 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2337 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2338 SCALE (size), LABEL (size));
2341 FOR_EACH_BB_FN (bb, cfun)
2342 num_edges += EDGE_COUNT (bb->succs);
2343 size = num_edges * sizeof (struct edge_def);
2345 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2347 fprintf (file, "---------------------------------------------------------\n");
2348 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2350 fprintf (file, "---------------------------------------------------------\n");
2351 fprintf (file, "\n");
2353 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2354 max_num_merged_labels = cfg_stats.num_merged_labels;
2356 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2357 cfg_stats.num_merged_labels, max_num_merged_labels);
2359 fprintf (file, "\n");
2363 /* Dump CFG statistics on stderr. Keep extern so that it's always
2364 linked in the final executable. */
2367 debug_cfg_stats (void)
2369 dump_cfg_stats (stderr);
2372 /*---------------------------------------------------------------------------
2373 Miscellaneous helpers
2374 ---------------------------------------------------------------------------*/
2376 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2377 flow. Transfers of control flow associated with EH are excluded. */
2380 call_can_make_abnormal_goto (gimple t)
2382 /* If the function has no non-local labels, then a call cannot make an
2383 abnormal transfer of control. */
2384 if (!cfun->has_nonlocal_label
2385 && !cfun->calls_setjmp)
2388 /* Likewise if the call has no side effects. */
2389 if (!gimple_has_side_effects (t))
2392 /* Likewise if the called function is leaf. */
2393 if (gimple_call_flags (t) & ECF_LEAF)
2400 /* Return true if T can make an abnormal transfer of control flow.
2401 Transfers of control flow associated with EH are excluded. */
2404 stmt_can_make_abnormal_goto (gimple t)
2406 if (computed_goto_p (t))
2408 if (is_gimple_call (t))
2409 return call_can_make_abnormal_goto (t);
2414 /* Return true if T represents a stmt that always transfers control. */
2417 is_ctrl_stmt (gimple t)
2419 switch (gimple_code (t))
2433 /* Return true if T is a statement that may alter the flow of control
2434 (e.g., a call to a non-returning function). */
2437 is_ctrl_altering_stmt (gimple t)
2441 switch (gimple_code (t))
2444 /* Per stmt call flag indicates whether the call could alter
2446 if (gimple_call_ctrl_altering_p (t))
2450 case GIMPLE_EH_DISPATCH:
2451 /* EH_DISPATCH branches to the individual catch handlers at
2452 this level of a try or allowed-exceptions region. It can
2453 fallthru to the next statement as well. */
2457 if (gimple_asm_nlabels (t) > 0)
2462 /* OpenMP directives alter control flow. */
2465 case GIMPLE_TRANSACTION:
2466 /* A transaction start alters control flow. */
2473 /* If a statement can throw, it alters control flow. */
2474 return stmt_can_throw_internal (t);
2478 /* Return true if T is a simple local goto. */
2481 simple_goto_p (gimple t)
2483 return (gimple_code (t) == GIMPLE_GOTO
2484 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2488 /* Return true if STMT should start a new basic block. PREV_STMT is
2489 the statement preceding STMT. It is used when STMT is a label or a
2490 case label. Labels should only start a new basic block if their
2491 previous statement wasn't a label. Otherwise, sequence of labels
2492 would generate unnecessary basic blocks that only contain a single
2496 stmt_starts_bb_p (gimple stmt, gimple prev_stmt)
2501 /* Labels start a new basic block only if the preceding statement
2502 wasn't a label of the same type. This prevents the creation of
2503 consecutive blocks that have nothing but a single label. */
2504 if (gimple_code (stmt) == GIMPLE_LABEL)
2506 /* Nonlocal and computed GOTO targets always start a new block. */
2507 if (DECL_NONLOCAL (gimple_label_label (stmt))
2508 || FORCED_LABEL (gimple_label_label (stmt)))
2511 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2513 if (DECL_NONLOCAL (gimple_label_label (prev_stmt)))
2516 cfg_stats.num_merged_labels++;
2522 else if (gimple_code (stmt) == GIMPLE_CALL
2523 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2524 /* setjmp acts similar to a nonlocal GOTO target and thus should
2525 start a new block. */
2532 /* Return true if T should end a basic block. */
2535 stmt_ends_bb_p (gimple t)
2537 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2540 /* Remove block annotations and other data structures. */
2543 delete_tree_cfg_annotations (void)
2545 vec_free (label_to_block_map_for_fn (cfun));
2549 /* Return the first statement in basic block BB. */
2552 first_stmt (basic_block bb)
2554 gimple_stmt_iterator i = gsi_start_bb (bb);
2557 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2565 /* Return the first non-label statement in basic block BB. */
2568 first_non_label_stmt (basic_block bb)
2570 gimple_stmt_iterator i = gsi_start_bb (bb);
2571 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2573 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2576 /* Return the last statement in basic block BB. */
2579 last_stmt (basic_block bb)
2581 gimple_stmt_iterator i = gsi_last_bb (bb);
2584 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2592 /* Return the last statement of an otherwise empty block. Return NULL
2593 if the block is totally empty, or if it contains more than one
2597 last_and_only_stmt (basic_block bb)
2599 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2605 last = gsi_stmt (i);
2606 gsi_prev_nondebug (&i);
2610 /* Empty statements should no longer appear in the instruction stream.
2611 Everything that might have appeared before should be deleted by
2612 remove_useless_stmts, and the optimizers should just gsi_remove
2613 instead of smashing with build_empty_stmt.
2615 Thus the only thing that should appear here in a block containing
2616 one executable statement is a label. */
2617 prev = gsi_stmt (i);
2618 if (gimple_code (prev) == GIMPLE_LABEL)
2624 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2627 reinstall_phi_args (edge new_edge, edge old_edge)
2629 edge_var_map_vector *v;
2632 gimple_stmt_iterator phis;
2634 v = redirect_edge_var_map_vector (old_edge);
2638 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2639 v->iterate (i, &vm) && !gsi_end_p (phis);
2640 i++, gsi_next (&phis))
2642 gimple phi = gsi_stmt (phis);
2643 tree result = redirect_edge_var_map_result (vm);
2644 tree arg = redirect_edge_var_map_def (vm);
2646 gcc_assert (result == gimple_phi_result (phi));
2648 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2651 redirect_edge_var_map_clear (old_edge);
2654 /* Returns the basic block after which the new basic block created
2655 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2656 near its "logical" location. This is of most help to humans looking
2657 at debugging dumps. */
2660 split_edge_bb_loc (edge edge_in)
2662 basic_block dest = edge_in->dest;
2663 basic_block dest_prev = dest->prev_bb;
2667 edge e = find_edge (dest_prev, dest);
2668 if (e && !(e->flags & EDGE_COMPLEX))
2669 return edge_in->src;
2674 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2675 Abort on abnormal edges. */
2678 gimple_split_edge (edge edge_in)
2680 basic_block new_bb, after_bb, dest;
2683 /* Abnormal edges cannot be split. */
2684 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2686 dest = edge_in->dest;
2688 after_bb = split_edge_bb_loc (edge_in);
2690 new_bb = create_empty_bb (after_bb);
2691 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2692 new_bb->count = edge_in->count;
2693 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2694 new_edge->probability = REG_BR_PROB_BASE;
2695 new_edge->count = edge_in->count;
2697 e = redirect_edge_and_branch (edge_in, new_bb);
2698 gcc_assert (e == edge_in);
2699 reinstall_phi_args (new_edge, e);
2705 /* Verify properties of the address expression T with base object BASE. */
2708 verify_address (tree t, tree base)
2711 bool old_side_effects;
2713 bool new_side_effects;
2715 old_constant = TREE_CONSTANT (t);
2716 old_side_effects = TREE_SIDE_EFFECTS (t);
2718 recompute_tree_invariant_for_addr_expr (t);
2719 new_side_effects = TREE_SIDE_EFFECTS (t);
2720 new_constant = TREE_CONSTANT (t);
2722 if (old_constant != new_constant)
2724 error ("constant not recomputed when ADDR_EXPR changed");
2727 if (old_side_effects != new_side_effects)
2729 error ("side effects not recomputed when ADDR_EXPR changed");
2733 if (!(TREE_CODE (base) == VAR_DECL
2734 || TREE_CODE (base) == PARM_DECL
2735 || TREE_CODE (base) == RESULT_DECL))
2738 if (DECL_GIMPLE_REG_P (base))
2740 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2747 /* Callback for walk_tree, check that all elements with address taken are
2748 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2749 inside a PHI node. */
2752 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2759 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2760 #define CHECK_OP(N, MSG) \
2761 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2762 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2764 switch (TREE_CODE (t))
2767 if (SSA_NAME_IN_FREE_LIST (t))
2769 error ("SSA name in freelist but still referenced");
2775 error ("INDIRECT_REF in gimple IL");
2779 x = TREE_OPERAND (t, 0);
2780 if (!POINTER_TYPE_P (TREE_TYPE (x))
2781 || !is_gimple_mem_ref_addr (x))
2783 error ("invalid first operand of MEM_REF");
2786 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2787 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2789 error ("invalid offset operand of MEM_REF");
2790 return TREE_OPERAND (t, 1);
2792 if (TREE_CODE (x) == ADDR_EXPR
2793 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2799 x = fold (ASSERT_EXPR_COND (t));
2800 if (x == boolean_false_node)
2802 error ("ASSERT_EXPR with an always-false condition");
2808 error ("MODIFY_EXPR not expected while having tuples");
2815 gcc_assert (is_gimple_address (t));
2817 /* Skip any references (they will be checked when we recurse down the
2818 tree) and ensure that any variable used as a prefix is marked
2820 for (x = TREE_OPERAND (t, 0);
2821 handled_component_p (x);
2822 x = TREE_OPERAND (x, 0))
2825 if ((tem = verify_address (t, x)))
2828 if (!(TREE_CODE (x) == VAR_DECL
2829 || TREE_CODE (x) == PARM_DECL
2830 || TREE_CODE (x) == RESULT_DECL))
2833 if (!TREE_ADDRESSABLE (x))
2835 error ("address taken, but ADDRESSABLE bit not set");
2843 x = COND_EXPR_COND (t);
2844 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2846 error ("non-integral used in condition");
2849 if (!is_gimple_condexpr (x))
2851 error ("invalid conditional operand");
2856 case NON_LVALUE_EXPR:
2857 case TRUTH_NOT_EXPR:
2861 case FIX_TRUNC_EXPR:
2866 CHECK_OP (0, "invalid operand to unary operator");
2872 if (!is_gimple_reg_type (TREE_TYPE (t)))
2874 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2878 if (TREE_CODE (t) == BIT_FIELD_REF)
2880 tree t0 = TREE_OPERAND (t, 0);
2881 tree t1 = TREE_OPERAND (t, 1);
2882 tree t2 = TREE_OPERAND (t, 2);
2883 if (!tree_fits_uhwi_p (t1)
2884 || !tree_fits_uhwi_p (t2))
2886 error ("invalid position or size operand to BIT_FIELD_REF");
2889 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2890 && (TYPE_PRECISION (TREE_TYPE (t))
2891 != tree_to_uhwi (t1)))
2893 error ("integral result type precision does not match "
2894 "field size of BIT_FIELD_REF");
2897 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2898 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2899 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2900 != tree_to_uhwi (t1)))
2902 error ("mode precision of non-integral result does not "
2903 "match field size of BIT_FIELD_REF");
2906 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2907 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2908 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2910 error ("position plus size exceeds size of referenced object in "
2915 t = TREE_OPERAND (t, 0);
2920 case ARRAY_RANGE_REF:
2921 case VIEW_CONVERT_EXPR:
2922 /* We have a nest of references. Verify that each of the operands
2923 that determine where to reference is either a constant or a variable,
2924 verify that the base is valid, and then show we've already checked
2926 while (handled_component_p (t))
2928 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2929 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2930 else if (TREE_CODE (t) == ARRAY_REF
2931 || TREE_CODE (t) == ARRAY_RANGE_REF)
2933 CHECK_OP (1, "invalid array index");
2934 if (TREE_OPERAND (t, 2))
2935 CHECK_OP (2, "invalid array lower bound");
2936 if (TREE_OPERAND (t, 3))
2937 CHECK_OP (3, "invalid array stride");
2939 else if (TREE_CODE (t) == BIT_FIELD_REF
2940 || TREE_CODE (t) == REALPART_EXPR
2941 || TREE_CODE (t) == IMAGPART_EXPR)
2943 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2948 t = TREE_OPERAND (t, 0);
2951 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2953 error ("invalid reference prefix");
2960 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
2961 POINTER_PLUS_EXPR. */
2962 if (POINTER_TYPE_P (TREE_TYPE (t)))
2964 error ("invalid operand to plus/minus, type is a pointer");
2967 CHECK_OP (0, "invalid operand to binary operator");
2968 CHECK_OP (1, "invalid operand to binary operator");
2971 case POINTER_PLUS_EXPR:
2972 /* Check to make sure the first operand is a pointer or reference type. */
2973 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
2975 error ("invalid operand to pointer plus, first operand is not a pointer");
2978 /* Check to make sure the second operand is a ptrofftype. */
2979 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
2981 error ("invalid operand to pointer plus, second operand is not an "
2982 "integer type of appropriate width");
2992 case UNORDERED_EXPR:
3001 case TRUNC_DIV_EXPR:
3003 case FLOOR_DIV_EXPR:
3004 case ROUND_DIV_EXPR:
3005 case TRUNC_MOD_EXPR:
3007 case FLOOR_MOD_EXPR:
3008 case ROUND_MOD_EXPR:
3010 case EXACT_DIV_EXPR:
3020 CHECK_OP (0, "invalid operand to binary operator");
3021 CHECK_OP (1, "invalid operand to binary operator");
3025 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3029 case CASE_LABEL_EXPR:
3032 error ("invalid CASE_CHAIN");
3046 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3047 Returns true if there is an error, otherwise false. */
3050 verify_types_in_gimple_min_lval (tree expr)
3054 if (is_gimple_id (expr))
3057 if (TREE_CODE (expr) != TARGET_MEM_REF
3058 && TREE_CODE (expr) != MEM_REF)
3060 error ("invalid expression for min lvalue");
3064 /* TARGET_MEM_REFs are strange beasts. */
3065 if (TREE_CODE (expr) == TARGET_MEM_REF)
3068 op = TREE_OPERAND (expr, 0);
3069 if (!is_gimple_val (op))
3071 error ("invalid operand in indirect reference");
3072 debug_generic_stmt (op);
3075 /* Memory references now generally can involve a value conversion. */
3080 /* Verify if EXPR is a valid GIMPLE reference expression. If
3081 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3082 if there is an error, otherwise false. */
3085 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3087 while (handled_component_p (expr))
3089 tree op = TREE_OPERAND (expr, 0);
3091 if (TREE_CODE (expr) == ARRAY_REF
3092 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3094 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3095 || (TREE_OPERAND (expr, 2)
3096 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3097 || (TREE_OPERAND (expr, 3)
3098 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3100 error ("invalid operands to array reference");
3101 debug_generic_stmt (expr);
3106 /* Verify if the reference array element types are compatible. */
3107 if (TREE_CODE (expr) == ARRAY_REF
3108 && !useless_type_conversion_p (TREE_TYPE (expr),
3109 TREE_TYPE (TREE_TYPE (op))))
3111 error ("type mismatch in array reference");
3112 debug_generic_stmt (TREE_TYPE (expr));
3113 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3116 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3117 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3118 TREE_TYPE (TREE_TYPE (op))))
3120 error ("type mismatch in array range reference");
3121 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3122 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3126 if ((TREE_CODE (expr) == REALPART_EXPR
3127 || TREE_CODE (expr) == IMAGPART_EXPR)
3128 && !useless_type_conversion_p (TREE_TYPE (expr),
3129 TREE_TYPE (TREE_TYPE (op))))
3131 error ("type mismatch in real/imagpart reference");
3132 debug_generic_stmt (TREE_TYPE (expr));
3133 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3137 if (TREE_CODE (expr) == COMPONENT_REF
3138 && !useless_type_conversion_p (TREE_TYPE (expr),
3139 TREE_TYPE (TREE_OPERAND (expr, 1))))
3141 error ("type mismatch in component reference");
3142 debug_generic_stmt (TREE_TYPE (expr));
3143 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3147 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3149 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3150 that their operand is not an SSA name or an invariant when
3151 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3152 bug). Otherwise there is nothing to verify, gross mismatches at
3153 most invoke undefined behavior. */
3155 && (TREE_CODE (op) == SSA_NAME
3156 || is_gimple_min_invariant (op)))
3158 error ("conversion of an SSA_NAME on the left hand side");
3159 debug_generic_stmt (expr);
3162 else if (TREE_CODE (op) == SSA_NAME
3163 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3165 error ("conversion of register to a different size");
3166 debug_generic_stmt (expr);
3169 else if (!handled_component_p (op))
3176 if (TREE_CODE (expr) == MEM_REF)
3178 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3180 error ("invalid address operand in MEM_REF");
3181 debug_generic_stmt (expr);
3184 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3185 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3187 error ("invalid offset operand in MEM_REF");
3188 debug_generic_stmt (expr);
3192 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3194 if (!TMR_BASE (expr)
3195 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3197 error ("invalid address operand in TARGET_MEM_REF");
3200 if (!TMR_OFFSET (expr)
3201 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3202 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3204 error ("invalid offset operand in TARGET_MEM_REF");
3205 debug_generic_stmt (expr);
3210 return ((require_lvalue || !is_gimple_min_invariant (expr))
3211 && verify_types_in_gimple_min_lval (expr));
3214 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3215 list of pointer-to types that is trivially convertible to DEST. */
3218 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3222 if (!TYPE_POINTER_TO (src_obj))
3225 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3226 if (useless_type_conversion_p (dest, src))
3232 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3233 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3236 valid_fixed_convert_types_p (tree type1, tree type2)
3238 return (FIXED_POINT_TYPE_P (type1)
3239 && (INTEGRAL_TYPE_P (type2)
3240 || SCALAR_FLOAT_TYPE_P (type2)
3241 || FIXED_POINT_TYPE_P (type2)));
3244 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3245 is a problem, otherwise false. */
3248 verify_gimple_call (gimple stmt)
3250 tree fn = gimple_call_fn (stmt);
3251 tree fntype, fndecl;
3254 if (gimple_call_internal_p (stmt))
3258 error ("gimple call has two targets");
3259 debug_generic_stmt (fn);
3267 error ("gimple call has no target");
3272 if (fn && !is_gimple_call_addr (fn))
3274 error ("invalid function in gimple call");
3275 debug_generic_stmt (fn);
3280 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3281 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3282 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3284 error ("non-function in gimple call");
3288 fndecl = gimple_call_fndecl (stmt);
3290 && TREE_CODE (fndecl) == FUNCTION_DECL
3291 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3292 && !DECL_PURE_P (fndecl)
3293 && !TREE_READONLY (fndecl))
3295 error ("invalid pure const state for function");
3299 if (gimple_call_lhs (stmt)
3300 && (!is_gimple_lvalue (gimple_call_lhs (stmt))
3301 || verify_types_in_gimple_reference (gimple_call_lhs (stmt), true)))
3303 error ("invalid LHS in gimple call");
3307 if (gimple_call_lhs (stmt) && gimple_call_noreturn_p (stmt))
3309 error ("LHS in noreturn call");
3313 fntype = gimple_call_fntype (stmt);
3315 && gimple_call_lhs (stmt)
3316 && !useless_type_conversion_p (TREE_TYPE (gimple_call_lhs (stmt)),
3318 /* ??? At least C++ misses conversions at assignments from
3319 void * call results.
3320 ??? Java is completely off. Especially with functions
3321 returning java.lang.Object.
3322 For now simply allow arbitrary pointer type conversions. */
3323 && !(POINTER_TYPE_P (TREE_TYPE (gimple_call_lhs (stmt)))
3324 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3326 error ("invalid conversion in gimple call");
3327 debug_generic_stmt (TREE_TYPE (gimple_call_lhs (stmt)));
3328 debug_generic_stmt (TREE_TYPE (fntype));
3332 if (gimple_call_chain (stmt)
3333 && !is_gimple_val (gimple_call_chain (stmt)))
3335 error ("invalid static chain in gimple call");
3336 debug_generic_stmt (gimple_call_chain (stmt));
3340 /* If there is a static chain argument, this should not be an indirect
3341 call, and the decl should have DECL_STATIC_CHAIN set. */
3342 if (gimple_call_chain (stmt))
3344 if (!gimple_call_fndecl (stmt))
3346 error ("static chain in indirect gimple call");
3349 fn = TREE_OPERAND (fn, 0);
3351 if (!DECL_STATIC_CHAIN (fn))
3353 error ("static chain with function that doesn%'t use one");
3358 /* ??? The C frontend passes unpromoted arguments in case it
3359 didn't see a function declaration before the call. So for now
3360 leave the call arguments mostly unverified. Once we gimplify
3361 unit-at-a-time we have a chance to fix this. */
3363 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3365 tree arg = gimple_call_arg (stmt, i);
3366 if ((is_gimple_reg_type (TREE_TYPE (arg))
3367 && !is_gimple_val (arg))
3368 || (!is_gimple_reg_type (TREE_TYPE (arg))
3369 && !is_gimple_lvalue (arg)))
3371 error ("invalid argument to gimple call");
3372 debug_generic_expr (arg);
3380 /* Verifies the gimple comparison with the result type TYPE and
3381 the operands OP0 and OP1. */
3384 verify_gimple_comparison (tree type, tree op0, tree op1)
3386 tree op0_type = TREE_TYPE (op0);
3387 tree op1_type = TREE_TYPE (op1);
3389 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3391 error ("invalid operands in gimple comparison");
3395 /* For comparisons we do not have the operations type as the
3396 effective type the comparison is carried out in. Instead
3397 we require that either the first operand is trivially
3398 convertible into the second, or the other way around.
3399 Because we special-case pointers to void we allow
3400 comparisons of pointers with the same mode as well. */
3401 if (!useless_type_conversion_p (op0_type, op1_type)
3402 && !useless_type_conversion_p (op1_type, op0_type)
3403 && (!POINTER_TYPE_P (op0_type)
3404 || !POINTER_TYPE_P (op1_type)
3405 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3407 error ("mismatching comparison operand types");
3408 debug_generic_expr (op0_type);
3409 debug_generic_expr (op1_type);
3413 /* The resulting type of a comparison may be an effective boolean type. */
3414 if (INTEGRAL_TYPE_P (type)
3415 && (TREE_CODE (type) == BOOLEAN_TYPE
3416 || TYPE_PRECISION (type) == 1))
3418 if (TREE_CODE (op0_type) == VECTOR_TYPE
3419 || TREE_CODE (op1_type) == VECTOR_TYPE)
3421 error ("vector comparison returning a boolean");
3422 debug_generic_expr (op0_type);
3423 debug_generic_expr (op1_type);
3427 /* Or an integer vector type with the same size and element count
3428 as the comparison operand types. */
3429 else if (TREE_CODE (type) == VECTOR_TYPE
3430 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3432 if (TREE_CODE (op0_type) != VECTOR_TYPE
3433 || TREE_CODE (op1_type) != VECTOR_TYPE)
3435 error ("non-vector operands in vector comparison");
3436 debug_generic_expr (op0_type);
3437 debug_generic_expr (op1_type);
3441 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3442 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3443 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3444 /* The result of a vector comparison is of signed
3446 || TYPE_UNSIGNED (TREE_TYPE (type)))
3448 error ("invalid vector comparison resulting type");
3449 debug_generic_expr (type);
3455 error ("bogus comparison result type");
3456 debug_generic_expr (type);
3463 /* Verify a gimple assignment statement STMT with an unary rhs.
3464 Returns true if anything is wrong. */
3467 verify_gimple_assign_unary (gimple stmt)
3469 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3470 tree lhs = gimple_assign_lhs (stmt);
3471 tree lhs_type = TREE_TYPE (lhs);
3472 tree rhs1 = gimple_assign_rhs1 (stmt);
3473 tree rhs1_type = TREE_TYPE (rhs1);
3475 if (!is_gimple_reg (lhs))
3477 error ("non-register as LHS of unary operation");
3481 if (!is_gimple_val (rhs1))
3483 error ("invalid operand in unary operation");
3487 /* First handle conversions. */
3492 /* Allow conversions from pointer type to integral type only if
3493 there is no sign or zero extension involved.
3494 For targets were the precision of ptrofftype doesn't match that
3495 of pointers we need to allow arbitrary conversions to ptrofftype. */
3496 if ((POINTER_TYPE_P (lhs_type)
3497 && INTEGRAL_TYPE_P (rhs1_type))
3498 || (POINTER_TYPE_P (rhs1_type)
3499 && INTEGRAL_TYPE_P (lhs_type)
3500 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3501 || ptrofftype_p (sizetype))))
3504 /* Allow conversion from integral to offset type and vice versa. */
3505 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3506 && INTEGRAL_TYPE_P (rhs1_type))
3507 || (INTEGRAL_TYPE_P (lhs_type)
3508 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3511 /* Otherwise assert we are converting between types of the
3513 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3515 error ("invalid types in nop conversion");
3516 debug_generic_expr (lhs_type);
3517 debug_generic_expr (rhs1_type);
3524 case ADDR_SPACE_CONVERT_EXPR:
3526 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3527 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3528 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3530 error ("invalid types in address space conversion");
3531 debug_generic_expr (lhs_type);
3532 debug_generic_expr (rhs1_type);
3539 case FIXED_CONVERT_EXPR:
3541 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3542 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3544 error ("invalid types in fixed-point conversion");
3545 debug_generic_expr (lhs_type);
3546 debug_generic_expr (rhs1_type);
3555 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3556 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3557 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3559 error ("invalid types in conversion to floating point");
3560 debug_generic_expr (lhs_type);
3561 debug_generic_expr (rhs1_type);
3568 case FIX_TRUNC_EXPR:
3570 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3571 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3572 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3574 error ("invalid types in conversion to integer");
3575 debug_generic_expr (lhs_type);
3576 debug_generic_expr (rhs1_type);
3583 case VEC_UNPACK_HI_EXPR:
3584 case VEC_UNPACK_LO_EXPR:
3585 case REDUC_MAX_EXPR:
3586 case REDUC_MIN_EXPR:
3587 case REDUC_PLUS_EXPR:
3588 case VEC_UNPACK_FLOAT_HI_EXPR:
3589 case VEC_UNPACK_FLOAT_LO_EXPR:
3597 case NON_LVALUE_EXPR:
3605 /* For the remaining codes assert there is no conversion involved. */
3606 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3608 error ("non-trivial conversion in unary operation");
3609 debug_generic_expr (lhs_type);
3610 debug_generic_expr (rhs1_type);
3617 /* Verify a gimple assignment statement STMT with a binary rhs.
3618 Returns true if anything is wrong. */
3621 verify_gimple_assign_binary (gimple stmt)
3623 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3624 tree lhs = gimple_assign_lhs (stmt);
3625 tree lhs_type = TREE_TYPE (lhs);
3626 tree rhs1 = gimple_assign_rhs1 (stmt);
3627 tree rhs1_type = TREE_TYPE (rhs1);
3628 tree rhs2 = gimple_assign_rhs2 (stmt);
3629 tree rhs2_type = TREE_TYPE (rhs2);
3631 if (!is_gimple_reg (lhs))
3633 error ("non-register as LHS of binary operation");
3637 if (!is_gimple_val (rhs1)
3638 || !is_gimple_val (rhs2))
3640 error ("invalid operands in binary operation");
3644 /* First handle operations that involve different types. */
3649 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3650 || !(INTEGRAL_TYPE_P (rhs1_type)
3651 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3652 || !(INTEGRAL_TYPE_P (rhs2_type)
3653 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3655 error ("type mismatch in complex expression");
3656 debug_generic_expr (lhs_type);
3657 debug_generic_expr (rhs1_type);
3658 debug_generic_expr (rhs2_type);
3670 /* Shifts and rotates are ok on integral types, fixed point
3671 types and integer vector types. */
3672 if ((!INTEGRAL_TYPE_P (rhs1_type)
3673 && !FIXED_POINT_TYPE_P (rhs1_type)
3674 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3675 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3676 || (!INTEGRAL_TYPE_P (rhs2_type)
3677 /* Vector shifts of vectors are also ok. */
3678 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3679 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3680 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3681 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3682 || !useless_type_conversion_p (lhs_type, rhs1_type))
3684 error ("type mismatch in shift expression");
3685 debug_generic_expr (lhs_type);
3686 debug_generic_expr (rhs1_type);
3687 debug_generic_expr (rhs2_type);
3694 case VEC_LSHIFT_EXPR:
3695 case VEC_RSHIFT_EXPR:
3697 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3698 || !(INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3699 || POINTER_TYPE_P (TREE_TYPE (rhs1_type))
3700 || FIXED_POINT_TYPE_P (TREE_TYPE (rhs1_type))
3701 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3702 || (!INTEGRAL_TYPE_P (rhs2_type)
3703 && (TREE_CODE (rhs2_type) != VECTOR_TYPE
3704 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3705 || !useless_type_conversion_p (lhs_type, rhs1_type))
3707 error ("type mismatch in vector shift expression");
3708 debug_generic_expr (lhs_type);
3709 debug_generic_expr (rhs1_type);
3710 debug_generic_expr (rhs2_type);
3713 /* For shifting a vector of non-integral components we
3714 only allow shifting by a constant multiple of the element size. */
3715 if (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3716 && (TREE_CODE (rhs2) != INTEGER_CST
3717 || !div_if_zero_remainder (EXACT_DIV_EXPR, rhs2,
3718 TYPE_SIZE (TREE_TYPE (rhs1_type)))))
3720 error ("non-element sized vector shift of floating point vector");
3727 case WIDEN_LSHIFT_EXPR:
3729 if (!INTEGRAL_TYPE_P (lhs_type)
3730 || !INTEGRAL_TYPE_P (rhs1_type)
3731 || TREE_CODE (rhs2) != INTEGER_CST
3732 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3734 error ("type mismatch in widening vector shift expression");
3735 debug_generic_expr (lhs_type);
3736 debug_generic_expr (rhs1_type);
3737 debug_generic_expr (rhs2_type);
3744 case VEC_WIDEN_LSHIFT_HI_EXPR:
3745 case VEC_WIDEN_LSHIFT_LO_EXPR:
3747 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3748 || TREE_CODE (lhs_type) != VECTOR_TYPE
3749 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3750 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3751 || TREE_CODE (rhs2) != INTEGER_CST
3752 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3753 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3755 error ("type mismatch in widening vector shift expression");
3756 debug_generic_expr (lhs_type);
3757 debug_generic_expr (rhs1_type);
3758 debug_generic_expr (rhs2_type);
3768 tree lhs_etype = lhs_type;
3769 tree rhs1_etype = rhs1_type;
3770 tree rhs2_etype = rhs2_type;
3771 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3773 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3774 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3776 error ("invalid non-vector operands to vector valued plus");
3779 lhs_etype = TREE_TYPE (lhs_type);
3780 rhs1_etype = TREE_TYPE (rhs1_type);
3781 rhs2_etype = TREE_TYPE (rhs2_type);
3783 if (POINTER_TYPE_P (lhs_etype)
3784 || POINTER_TYPE_P (rhs1_etype)
3785 || POINTER_TYPE_P (rhs2_etype))
3787 error ("invalid (pointer) operands to plus/minus");
3791 /* Continue with generic binary expression handling. */
3795 case POINTER_PLUS_EXPR:
3797 if (!POINTER_TYPE_P (rhs1_type)
3798 || !useless_type_conversion_p (lhs_type, rhs1_type)
3799 || !ptrofftype_p (rhs2_type))
3801 error ("type mismatch in pointer plus expression");
3802 debug_generic_stmt (lhs_type);
3803 debug_generic_stmt (rhs1_type);
3804 debug_generic_stmt (rhs2_type);
3811 case TRUTH_ANDIF_EXPR:
3812 case TRUTH_ORIF_EXPR:
3813 case TRUTH_AND_EXPR:
3815 case TRUTH_XOR_EXPR:
3825 case UNORDERED_EXPR:
3833 /* Comparisons are also binary, but the result type is not
3834 connected to the operand types. */
3835 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3837 case WIDEN_MULT_EXPR:
3838 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3840 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3841 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3843 case WIDEN_SUM_EXPR:
3844 case VEC_WIDEN_MULT_HI_EXPR:
3845 case VEC_WIDEN_MULT_LO_EXPR:
3846 case VEC_WIDEN_MULT_EVEN_EXPR:
3847 case VEC_WIDEN_MULT_ODD_EXPR:
3848 case VEC_PACK_TRUNC_EXPR:
3849 case VEC_PACK_SAT_EXPR:
3850 case VEC_PACK_FIX_TRUNC_EXPR:
3855 case MULT_HIGHPART_EXPR:
3856 case TRUNC_DIV_EXPR:
3858 case FLOOR_DIV_EXPR:
3859 case ROUND_DIV_EXPR:
3860 case TRUNC_MOD_EXPR:
3862 case FLOOR_MOD_EXPR:
3863 case ROUND_MOD_EXPR:
3865 case EXACT_DIV_EXPR:
3871 /* Continue with generic binary expression handling. */
3878 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3879 || !useless_type_conversion_p (lhs_type, rhs2_type))
3881 error ("type mismatch in binary expression");
3882 debug_generic_stmt (lhs_type);
3883 debug_generic_stmt (rhs1_type);
3884 debug_generic_stmt (rhs2_type);
3891 /* Verify a gimple assignment statement STMT with a ternary rhs.
3892 Returns true if anything is wrong. */
3895 verify_gimple_assign_ternary (gimple stmt)
3897 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3898 tree lhs = gimple_assign_lhs (stmt);
3899 tree lhs_type = TREE_TYPE (lhs);
3900 tree rhs1 = gimple_assign_rhs1 (stmt);
3901 tree rhs1_type = TREE_TYPE (rhs1);
3902 tree rhs2 = gimple_assign_rhs2 (stmt);
3903 tree rhs2_type = TREE_TYPE (rhs2);
3904 tree rhs3 = gimple_assign_rhs3 (stmt);
3905 tree rhs3_type = TREE_TYPE (rhs3);
3907 if (!is_gimple_reg (lhs))
3909 error ("non-register as LHS of ternary operation");
3913 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3914 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3915 || !is_gimple_val (rhs2)
3916 || !is_gimple_val (rhs3))
3918 error ("invalid operands in ternary operation");
3922 /* First handle operations that involve different types. */
3925 case WIDEN_MULT_PLUS_EXPR:
3926 case WIDEN_MULT_MINUS_EXPR:
3927 if ((!INTEGRAL_TYPE_P (rhs1_type)
3928 && !FIXED_POINT_TYPE_P (rhs1_type))
3929 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3930 || !useless_type_conversion_p (lhs_type, rhs3_type)
3931 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3932 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3934 error ("type mismatch in widening multiply-accumulate expression");
3935 debug_generic_expr (lhs_type);
3936 debug_generic_expr (rhs1_type);
3937 debug_generic_expr (rhs2_type);
3938 debug_generic_expr (rhs3_type);
3944 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3945 || !useless_type_conversion_p (lhs_type, rhs2_type)
3946 || !useless_type_conversion_p (lhs_type, rhs3_type))
3948 error ("type mismatch in fused multiply-add expression");
3949 debug_generic_expr (lhs_type);
3950 debug_generic_expr (rhs1_type);
3951 debug_generic_expr (rhs2_type);
3952 debug_generic_expr (rhs3_type);
3959 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3960 || !useless_type_conversion_p (lhs_type, rhs3_type))
3962 error ("type mismatch in conditional expression");
3963 debug_generic_expr (lhs_type);
3964 debug_generic_expr (rhs2_type);
3965 debug_generic_expr (rhs3_type);
3971 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3972 || !useless_type_conversion_p (lhs_type, rhs2_type))
3974 error ("type mismatch in vector permute expression");
3975 debug_generic_expr (lhs_type);
3976 debug_generic_expr (rhs1_type);
3977 debug_generic_expr (rhs2_type);
3978 debug_generic_expr (rhs3_type);
3982 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3983 || TREE_CODE (rhs2_type) != VECTOR_TYPE
3984 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
3986 error ("vector types expected in vector permute expression");
3987 debug_generic_expr (lhs_type);
3988 debug_generic_expr (rhs1_type);
3989 debug_generic_expr (rhs2_type);
3990 debug_generic_expr (rhs3_type);
3994 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
3995 || TYPE_VECTOR_SUBPARTS (rhs2_type)
3996 != TYPE_VECTOR_SUBPARTS (rhs3_type)
3997 || TYPE_VECTOR_SUBPARTS (rhs3_type)
3998 != TYPE_VECTOR_SUBPARTS (lhs_type))
4000 error ("vectors with different element number found "
4001 "in vector permute expression");
4002 debug_generic_expr (lhs_type);
4003 debug_generic_expr (rhs1_type);
4004 debug_generic_expr (rhs2_type);
4005 debug_generic_expr (rhs3_type);
4009 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4010 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4011 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4013 error ("invalid mask type in vector permute expression");
4014 debug_generic_expr (lhs_type);
4015 debug_generic_expr (rhs1_type);
4016 debug_generic_expr (rhs2_type);
4017 debug_generic_expr (rhs3_type);
4024 case REALIGN_LOAD_EXPR:
4034 /* Verify a gimple assignment statement STMT with a single rhs.
4035 Returns true if anything is wrong. */
4038 verify_gimple_assign_single (gimple stmt)
4040 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4041 tree lhs = gimple_assign_lhs (stmt);
4042 tree lhs_type = TREE_TYPE (lhs);
4043 tree rhs1 = gimple_assign_rhs1 (stmt);
4044 tree rhs1_type = TREE_TYPE (rhs1);
4047 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4049 error ("non-trivial conversion at assignment");
4050 debug_generic_expr (lhs_type);
4051 debug_generic_expr (rhs1_type);
4055 if (gimple_clobber_p (stmt)
4056 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4058 error ("non-decl/MEM_REF LHS in clobber statement");
4059 debug_generic_expr (lhs);
4063 if (handled_component_p (lhs)
4064 || TREE_CODE (lhs) == MEM_REF
4065 || TREE_CODE (lhs) == TARGET_MEM_REF)
4066 res |= verify_types_in_gimple_reference (lhs, true);
4068 /* Special codes we cannot handle via their class. */
4073 tree op = TREE_OPERAND (rhs1, 0);
4074 if (!is_gimple_addressable (op))
4076 error ("invalid operand in unary expression");
4080 /* Technically there is no longer a need for matching types, but
4081 gimple hygiene asks for this check. In LTO we can end up
4082 combining incompatible units and thus end up with addresses
4083 of globals that change their type to a common one. */
4085 && !types_compatible_p (TREE_TYPE (op),
4086 TREE_TYPE (TREE_TYPE (rhs1)))
4087 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4090 error ("type mismatch in address expression");
4091 debug_generic_stmt (TREE_TYPE (rhs1));
4092 debug_generic_stmt (TREE_TYPE (op));
4096 return verify_types_in_gimple_reference (op, true);
4101 error ("INDIRECT_REF in gimple IL");
4107 case ARRAY_RANGE_REF:
4108 case VIEW_CONVERT_EXPR:
4111 case TARGET_MEM_REF:
4113 if (!is_gimple_reg (lhs)
4114 && is_gimple_reg_type (TREE_TYPE (lhs)))
4116 error ("invalid rhs for gimple memory store");
4117 debug_generic_stmt (lhs);
4118 debug_generic_stmt (rhs1);
4121 return res || verify_types_in_gimple_reference (rhs1, false);
4133 /* tcc_declaration */
4138 if (!is_gimple_reg (lhs)
4139 && !is_gimple_reg (rhs1)
4140 && is_gimple_reg_type (TREE_TYPE (lhs)))
4142 error ("invalid rhs for gimple memory store");
4143 debug_generic_stmt (lhs);
4144 debug_generic_stmt (rhs1);
4150 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4153 tree elt_i, elt_v, elt_t = NULL_TREE;
4155 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4157 /* For vector CONSTRUCTORs we require that either it is empty
4158 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4159 (then the element count must be correct to cover the whole
4160 outer vector and index must be NULL on all elements, or it is
4161 a CONSTRUCTOR of scalar elements, where we as an exception allow
4162 smaller number of elements (assuming zero filling) and
4163 consecutive indexes as compared to NULL indexes (such
4164 CONSTRUCTORs can appear in the IL from FEs). */
4165 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4167 if (elt_t == NULL_TREE)
4169 elt_t = TREE_TYPE (elt_v);
4170 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4172 tree elt_t = TREE_TYPE (elt_v);
4173 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4176 error ("incorrect type of vector CONSTRUCTOR"
4178 debug_generic_stmt (rhs1);
4181 else if (CONSTRUCTOR_NELTS (rhs1)
4182 * TYPE_VECTOR_SUBPARTS (elt_t)
4183 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4185 error ("incorrect number of vector CONSTRUCTOR"
4187 debug_generic_stmt (rhs1);
4191 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4194 error ("incorrect type of vector CONSTRUCTOR elements");
4195 debug_generic_stmt (rhs1);
4198 else if (CONSTRUCTOR_NELTS (rhs1)
4199 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4201 error ("incorrect number of vector CONSTRUCTOR elements");
4202 debug_generic_stmt (rhs1);
4206 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4208 error ("incorrect type of vector CONSTRUCTOR elements");
4209 debug_generic_stmt (rhs1);
4212 if (elt_i != NULL_TREE
4213 && (TREE_CODE (elt_t) == VECTOR_TYPE
4214 || TREE_CODE (elt_i) != INTEGER_CST
4215 || compare_tree_int (elt_i, i) != 0))
4217 error ("vector CONSTRUCTOR with non-NULL element index");
4218 debug_generic_stmt (rhs1);
4226 case WITH_SIZE_EXPR:
4236 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4237 is a problem, otherwise false. */
4240 verify_gimple_assign (gimple stmt)
4242 switch (gimple_assign_rhs_class (stmt))
4244 case GIMPLE_SINGLE_RHS:
4245 return verify_gimple_assign_single (stmt);
4247 case GIMPLE_UNARY_RHS:
4248 return verify_gimple_assign_unary (stmt);
4250 case GIMPLE_BINARY_RHS:
4251 return verify_gimple_assign_binary (stmt);
4253 case GIMPLE_TERNARY_RHS:
4254 return verify_gimple_assign_ternary (stmt);
4261 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4262 is a problem, otherwise false. */
4265 verify_gimple_return (gimple stmt)
4267 tree op = gimple_return_retval (stmt);
4268 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4270 /* We cannot test for present return values as we do not fix up missing
4271 return values from the original source. */
4275 if (!is_gimple_val (op)
4276 && TREE_CODE (op) != RESULT_DECL)
4278 error ("invalid operand in return statement");
4279 debug_generic_stmt (op);
4283 if ((TREE_CODE (op) == RESULT_DECL
4284 && DECL_BY_REFERENCE (op))
4285 || (TREE_CODE (op) == SSA_NAME
4286 && SSA_NAME_VAR (op)
4287 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4288 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4289 op = TREE_TYPE (op);
4291 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4293 error ("invalid conversion in return statement");
4294 debug_generic_stmt (restype);
4295 debug_generic_stmt (TREE_TYPE (op));
4303 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4304 is a problem, otherwise false. */
4307 verify_gimple_goto (gimple stmt)
4309 tree dest = gimple_goto_dest (stmt);
4311 /* ??? We have two canonical forms of direct goto destinations, a
4312 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4313 if (TREE_CODE (dest) != LABEL_DECL
4314 && (!is_gimple_val (dest)
4315 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4317 error ("goto destination is neither a label nor a pointer");
4324 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4325 is a problem, otherwise false. */
4328 verify_gimple_switch (gimple stmt)
4331 tree elt, prev_upper_bound = NULL_TREE;
4332 tree index_type, elt_type = NULL_TREE;
4334 if (!is_gimple_val (gimple_switch_index (stmt)))
4336 error ("invalid operand to switch statement");
4337 debug_generic_stmt (gimple_switch_index (stmt));
4341 index_type = TREE_TYPE (gimple_switch_index (stmt));
4342 if (! INTEGRAL_TYPE_P (index_type))
4344 error ("non-integral type switch statement");
4345 debug_generic_expr (index_type);
4349 elt = gimple_switch_label (stmt, 0);
4350 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4352 error ("invalid default case label in switch statement");
4353 debug_generic_expr (elt);
4357 n = gimple_switch_num_labels (stmt);
4358 for (i = 1; i < n; i++)
4360 elt = gimple_switch_label (stmt, i);
4362 if (! CASE_LOW (elt))
4364 error ("invalid case label in switch statement");
4365 debug_generic_expr (elt);
4369 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4371 error ("invalid case range in switch statement");
4372 debug_generic_expr (elt);
4378 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4379 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4381 error ("type mismatch for case label in switch statement");
4382 debug_generic_expr (elt);
4388 elt_type = TREE_TYPE (CASE_LOW (elt));
4389 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4391 error ("type precision mismatch in switch statement");
4396 if (prev_upper_bound)
4398 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4400 error ("case labels not sorted in switch statement");
4405 prev_upper_bound = CASE_HIGH (elt);
4406 if (! prev_upper_bound)
4407 prev_upper_bound = CASE_LOW (elt);
4413 /* Verify a gimple debug statement STMT.
4414 Returns true if anything is wrong. */
4417 verify_gimple_debug (gimple stmt ATTRIBUTE_UNUSED)
4419 /* There isn't much that could be wrong in a gimple debug stmt. A
4420 gimple debug bind stmt, for example, maps a tree, that's usually
4421 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4422 component or member of an aggregate type, to another tree, that
4423 can be an arbitrary expression. These stmts expand into debug
4424 insns, and are converted to debug notes by var-tracking.c. */
4428 /* Verify a gimple label statement STMT.
4429 Returns true if anything is wrong. */
4432 verify_gimple_label (gimple stmt)
4434 tree decl = gimple_label_label (stmt);
4438 if (TREE_CODE (decl) != LABEL_DECL)
4440 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4441 && DECL_CONTEXT (decl) != current_function_decl)
4443 error ("label's context is not the current function decl");
4447 uid = LABEL_DECL_UID (decl);
4450 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4452 error ("incorrect entry in label_to_block_map");
4456 uid = EH_LANDING_PAD_NR (decl);
4459 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4460 if (decl != lp->post_landing_pad)
4462 error ("incorrect setting of landing pad number");
4470 /* Verify the GIMPLE statement STMT. Returns true if there is an
4471 error, otherwise false. */
4474 verify_gimple_stmt (gimple stmt)
4476 switch (gimple_code (stmt))
4479 return verify_gimple_assign (stmt);
4482 return verify_gimple_label (stmt);
4485 return verify_gimple_call (stmt);
4488 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4490 error ("invalid comparison code in gimple cond");
4493 if (!(!gimple_cond_true_label (stmt)
4494 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4495 || !(!gimple_cond_false_label (stmt)
4496 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4498 error ("invalid labels in gimple cond");
4502 return verify_gimple_comparison (boolean_type_node,
4503 gimple_cond_lhs (stmt),
4504 gimple_cond_rhs (stmt));
4507 return verify_gimple_goto (stmt);
4510 return verify_gimple_switch (stmt);
4513 return verify_gimple_return (stmt);
4518 case GIMPLE_TRANSACTION:
4519 return verify_gimple_transaction (stmt);
4521 /* Tuples that do not have tree operands. */
4523 case GIMPLE_PREDICT:
4525 case GIMPLE_EH_DISPATCH:
4526 case GIMPLE_EH_MUST_NOT_THROW:
4530 /* OpenMP directives are validated by the FE and never operated
4531 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4532 non-gimple expressions when the main index variable has had
4533 its address taken. This does not affect the loop itself
4534 because the header of an GIMPLE_OMP_FOR is merely used to determine
4535 how to setup the parallel iteration. */
4539 return verify_gimple_debug (stmt);
4546 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4547 and false otherwise. */
4550 verify_gimple_phi (gimple phi)
4554 tree phi_result = gimple_phi_result (phi);
4559 error ("invalid PHI result");
4563 virtual_p = virtual_operand_p (phi_result);
4564 if (TREE_CODE (phi_result) != SSA_NAME
4566 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4568 error ("invalid PHI result");
4572 for (i = 0; i < gimple_phi_num_args (phi); i++)
4574 tree t = gimple_phi_arg_def (phi, i);
4578 error ("missing PHI def");
4582 /* Addressable variables do have SSA_NAMEs but they
4583 are not considered gimple values. */
4584 else if ((TREE_CODE (t) == SSA_NAME
4585 && virtual_p != virtual_operand_p (t))
4587 && (TREE_CODE (t) != SSA_NAME
4588 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4590 && !is_gimple_val (t)))
4592 error ("invalid PHI argument");
4593 debug_generic_expr (t);
4596 #ifdef ENABLE_TYPES_CHECKING
4597 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4599 error ("incompatible types in PHI argument %u", i);
4600 debug_generic_stmt (TREE_TYPE (phi_result));
4601 debug_generic_stmt (TREE_TYPE (t));
4610 /* Verify the GIMPLE statements inside the sequence STMTS. */
4613 verify_gimple_in_seq_2 (gimple_seq stmts)
4615 gimple_stmt_iterator ittr;
4618 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4620 gimple stmt = gsi_stmt (ittr);
4622 switch (gimple_code (stmt))
4625 err |= verify_gimple_in_seq_2 (gimple_bind_body (stmt));
4629 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4630 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4633 case GIMPLE_EH_FILTER:
4634 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4637 case GIMPLE_EH_ELSE:
4638 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (stmt));
4639 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (stmt));
4643 err |= verify_gimple_in_seq_2 (gimple_catch_handler (stmt));
4646 case GIMPLE_TRANSACTION:
4647 err |= verify_gimple_transaction (stmt);
4652 bool err2 = verify_gimple_stmt (stmt);
4654 debug_gimple_stmt (stmt);
4663 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4664 is a problem, otherwise false. */
4667 verify_gimple_transaction (gimple stmt)
4669 tree lab = gimple_transaction_label (stmt);
4670 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4672 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4676 /* Verify the GIMPLE statements inside the statement list STMTS. */
4679 verify_gimple_in_seq (gimple_seq stmts)
4681 timevar_push (TV_TREE_STMT_VERIFY);
4682 if (verify_gimple_in_seq_2 (stmts))
4683 internal_error ("verify_gimple failed");
4684 timevar_pop (TV_TREE_STMT_VERIFY);
4687 /* Return true when the T can be shared. */
4690 tree_node_can_be_shared (tree t)
4692 if (IS_TYPE_OR_DECL_P (t)
4693 || is_gimple_min_invariant (t)
4694 || TREE_CODE (t) == SSA_NAME
4695 || t == error_mark_node
4696 || TREE_CODE (t) == IDENTIFIER_NODE)
4699 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4708 /* Called via walk_tree. Verify tree sharing. */
4711 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4713 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4715 if (tree_node_can_be_shared (*tp))
4717 *walk_subtrees = false;
4721 if (pointer_set_insert (visited, *tp))
4727 /* Called via walk_gimple_stmt. Verify tree sharing. */
4730 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4732 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4733 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4736 static bool eh_error_found;
4738 verify_eh_throw_stmt_node (void **slot, void *data)
4740 struct throw_stmt_node *node = (struct throw_stmt_node *)*slot;
4741 struct pointer_set_t *visited = (struct pointer_set_t *) data;
4743 if (!pointer_set_contains (visited, node->stmt))
4745 error ("dead STMT in EH table");
4746 debug_gimple_stmt (node->stmt);
4747 eh_error_found = true;
4752 /* Verify if the location LOCs block is in BLOCKS. */
4755 verify_location (pointer_set_t *blocks, location_t loc)
4757 tree block = LOCATION_BLOCK (loc);
4758 if (block != NULL_TREE
4759 && !pointer_set_contains (blocks, block))
4761 error ("location references block not in block tree");
4764 if (block != NULL_TREE)
4765 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4769 /* Called via walk_tree. Verify that expressions have no blocks. */
4772 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4776 *walk_subtrees = false;
4780 location_t loc = EXPR_LOCATION (*tp);
4781 if (LOCATION_BLOCK (loc) != NULL)
4787 /* Called via walk_tree. Verify locations of expressions. */
4790 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4792 struct pointer_set_t *blocks = (struct pointer_set_t *) data;
4794 if (TREE_CODE (*tp) == VAR_DECL
4795 && DECL_HAS_DEBUG_EXPR_P (*tp))
4797 tree t = DECL_DEBUG_EXPR (*tp);
4798 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4802 if ((TREE_CODE (*tp) == VAR_DECL
4803 || TREE_CODE (*tp) == PARM_DECL
4804 || TREE_CODE (*tp) == RESULT_DECL)
4805 && DECL_HAS_VALUE_EXPR_P (*tp))
4807 tree t = DECL_VALUE_EXPR (*tp);
4808 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4815 *walk_subtrees = false;
4819 location_t loc = EXPR_LOCATION (*tp);
4820 if (verify_location (blocks, loc))
4826 /* Called via walk_gimple_op. Verify locations of expressions. */
4829 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4831 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4832 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4835 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4838 collect_subblocks (pointer_set_t *blocks, tree block)
4841 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4843 pointer_set_insert (blocks, t);
4844 collect_subblocks (blocks, t);
4848 /* Verify the GIMPLE statements in the CFG of FN. */
4851 verify_gimple_in_cfg (struct function *fn)
4855 struct pointer_set_t *visited, *visited_stmts, *blocks;
4857 timevar_push (TV_TREE_STMT_VERIFY);
4858 visited = pointer_set_create ();
4859 visited_stmts = pointer_set_create ();
4861 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4862 blocks = pointer_set_create ();
4863 if (DECL_INITIAL (fn->decl))
4865 pointer_set_insert (blocks, DECL_INITIAL (fn->decl));
4866 collect_subblocks (blocks, DECL_INITIAL (fn->decl));
4869 FOR_EACH_BB_FN (bb, fn)
4871 gimple_stmt_iterator gsi;
4873 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4875 gimple phi = gsi_stmt (gsi);
4879 pointer_set_insert (visited_stmts, phi);
4881 if (gimple_bb (phi) != bb)
4883 error ("gimple_bb (phi) is set to a wrong basic block");
4887 err2 |= verify_gimple_phi (phi);
4889 /* Only PHI arguments have locations. */
4890 if (gimple_location (phi) != UNKNOWN_LOCATION)
4892 error ("PHI node with location");
4896 for (i = 0; i < gimple_phi_num_args (phi); i++)
4898 tree arg = gimple_phi_arg_def (phi, i);
4899 tree addr = walk_tree (&arg, verify_node_sharing_1,
4903 error ("incorrect sharing of tree nodes");
4904 debug_generic_expr (addr);
4907 location_t loc = gimple_phi_arg_location (phi, i);
4908 if (virtual_operand_p (gimple_phi_result (phi))
4909 && loc != UNKNOWN_LOCATION)
4911 error ("virtual PHI with argument locations");
4914 addr = walk_tree (&arg, verify_expr_location_1, blocks, NULL);
4917 debug_generic_expr (addr);
4920 err2 |= verify_location (blocks, loc);
4924 debug_gimple_stmt (phi);
4928 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4930 gimple stmt = gsi_stmt (gsi);
4932 struct walk_stmt_info wi;
4936 pointer_set_insert (visited_stmts, stmt);
4938 if (gimple_bb (stmt) != bb)
4940 error ("gimple_bb (stmt) is set to a wrong basic block");
4944 err2 |= verify_gimple_stmt (stmt);
4945 err2 |= verify_location (blocks, gimple_location (stmt));
4947 memset (&wi, 0, sizeof (wi));
4948 wi.info = (void *) visited;
4949 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
4952 error ("incorrect sharing of tree nodes");
4953 debug_generic_expr (addr);
4957 memset (&wi, 0, sizeof (wi));
4958 wi.info = (void *) blocks;
4959 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
4962 debug_generic_expr (addr);
4966 /* ??? Instead of not checking these stmts at all the walker
4967 should know its context via wi. */
4968 if (!is_gimple_debug (stmt)
4969 && !is_gimple_omp (stmt))
4971 memset (&wi, 0, sizeof (wi));
4972 addr = walk_gimple_op (stmt, verify_expr, &wi);
4975 debug_generic_expr (addr);
4976 inform (gimple_location (stmt), "in statement");
4981 /* If the statement is marked as part of an EH region, then it is
4982 expected that the statement could throw. Verify that when we
4983 have optimizations that simplify statements such that we prove
4984 that they cannot throw, that we update other data structures
4986 lp_nr = lookup_stmt_eh_lp (stmt);
4989 if (!stmt_could_throw_p (stmt))
4991 error ("statement marked for throw, but doesn%'t");
4995 && !gsi_one_before_end_p (gsi)
4996 && stmt_can_throw_internal (stmt))
4998 error ("statement marked for throw in middle of block");
5004 debug_gimple_stmt (stmt);
5009 eh_error_found = false;
5010 if (get_eh_throw_stmt_table (cfun))
5011 htab_traverse (get_eh_throw_stmt_table (cfun),
5012 verify_eh_throw_stmt_node,
5015 if (err || eh_error_found)
5016 internal_error ("verify_gimple failed");
5018 pointer_set_destroy (visited);
5019 pointer_set_destroy (visited_stmts);
5020 pointer_set_destroy (blocks);
5021 verify_histograms ();
5022 timevar_pop (TV_TREE_STMT_VERIFY);
5026 /* Verifies that the flow information is OK. */
5029 gimple_verify_flow_info (void)
5033 gimple_stmt_iterator gsi;
5038 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5039 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5041 error ("ENTRY_BLOCK has IL associated with it");
5045 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5046 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5048 error ("EXIT_BLOCK has IL associated with it");
5052 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5053 if (e->flags & EDGE_FALLTHRU)
5055 error ("fallthru to exit from bb %d", e->src->index);
5059 FOR_EACH_BB_FN (bb, cfun)
5061 bool found_ctrl_stmt = false;
5065 /* Skip labels on the start of basic block. */
5066 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5069 gimple prev_stmt = stmt;
5071 stmt = gsi_stmt (gsi);
5073 if (gimple_code (stmt) != GIMPLE_LABEL)
5076 label = gimple_label_label (stmt);
5077 if (prev_stmt && DECL_NONLOCAL (label))
5079 error ("nonlocal label ");
5080 print_generic_expr (stderr, label, 0);
5081 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5086 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5088 error ("EH landing pad label ");
5089 print_generic_expr (stderr, label, 0);
5090 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5095 if (label_to_block (label) != bb)
5098 print_generic_expr (stderr, label, 0);
5099 fprintf (stderr, " to block does not match in bb %d",
5104 if (decl_function_context (label) != current_function_decl)
5107 print_generic_expr (stderr, label, 0);
5108 fprintf (stderr, " has incorrect context in bb %d",
5114 /* Verify that body of basic block BB is free of control flow. */
5115 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5117 gimple stmt = gsi_stmt (gsi);
5119 if (found_ctrl_stmt)
5121 error ("control flow in the middle of basic block %d",
5126 if (stmt_ends_bb_p (stmt))
5127 found_ctrl_stmt = true;
5129 if (gimple_code (stmt) == GIMPLE_LABEL)
5132 print_generic_expr (stderr, gimple_label_label (stmt), 0);
5133 fprintf (stderr, " in the middle of basic block %d", bb->index);
5138 gsi = gsi_last_bb (bb);
5139 if (gsi_end_p (gsi))
5142 stmt = gsi_stmt (gsi);
5144 if (gimple_code (stmt) == GIMPLE_LABEL)
5147 err |= verify_eh_edges (stmt);
5149 if (is_ctrl_stmt (stmt))
5151 FOR_EACH_EDGE (e, ei, bb->succs)
5152 if (e->flags & EDGE_FALLTHRU)
5154 error ("fallthru edge after a control statement in bb %d",
5160 if (gimple_code (stmt) != GIMPLE_COND)
5162 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5163 after anything else but if statement. */
5164 FOR_EACH_EDGE (e, ei, bb->succs)
5165 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5167 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5173 switch (gimple_code (stmt))
5180 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5184 || !(true_edge->flags & EDGE_TRUE_VALUE)
5185 || !(false_edge->flags & EDGE_FALSE_VALUE)
5186 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5187 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5188 || EDGE_COUNT (bb->succs) >= 3)
5190 error ("wrong outgoing edge flags at end of bb %d",
5198 if (simple_goto_p (stmt))
5200 error ("explicit goto at end of bb %d", bb->index);
5205 /* FIXME. We should double check that the labels in the
5206 destination blocks have their address taken. */
5207 FOR_EACH_EDGE (e, ei, bb->succs)
5208 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5209 | EDGE_FALSE_VALUE))
5210 || !(e->flags & EDGE_ABNORMAL))
5212 error ("wrong outgoing edge flags at end of bb %d",
5220 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5222 /* ... fallthru ... */
5224 if (!single_succ_p (bb)
5225 || (single_succ_edge (bb)->flags
5226 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5227 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5229 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5232 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5234 error ("return edge does not point to exit in bb %d",
5246 n = gimple_switch_num_labels (stmt);
5248 /* Mark all the destination basic blocks. */
5249 for (i = 0; i < n; ++i)
5251 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5252 basic_block label_bb = label_to_block (lab);
5253 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5254 label_bb->aux = (void *)1;
5257 /* Verify that the case labels are sorted. */
5258 prev = gimple_switch_label (stmt, 0);
5259 for (i = 1; i < n; ++i)
5261 tree c = gimple_switch_label (stmt, i);
5264 error ("found default case not at the start of "
5270 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5272 error ("case labels not sorted: ");
5273 print_generic_expr (stderr, prev, 0);
5274 fprintf (stderr," is greater than ");
5275 print_generic_expr (stderr, c, 0);
5276 fprintf (stderr," but comes before it.\n");
5281 /* VRP will remove the default case if it can prove it will
5282 never be executed. So do not verify there always exists
5283 a default case here. */
5285 FOR_EACH_EDGE (e, ei, bb->succs)
5289 error ("extra outgoing edge %d->%d",
5290 bb->index, e->dest->index);
5294 e->dest->aux = (void *)2;
5295 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5296 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5298 error ("wrong outgoing edge flags at end of bb %d",
5304 /* Check that we have all of them. */
5305 for (i = 0; i < n; ++i)
5307 tree lab = CASE_LABEL (gimple_switch_label (stmt, i));
5308 basic_block label_bb = label_to_block (lab);
5310 if (label_bb->aux != (void *)2)
5312 error ("missing edge %i->%i", bb->index, label_bb->index);
5317 FOR_EACH_EDGE (e, ei, bb->succs)
5318 e->dest->aux = (void *)0;
5322 case GIMPLE_EH_DISPATCH:
5323 err |= verify_eh_dispatch_edge (stmt);
5331 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5332 verify_dominators (CDI_DOMINATORS);
5338 /* Updates phi nodes after creating a forwarder block joined
5339 by edge FALLTHRU. */
5342 gimple_make_forwarder_block (edge fallthru)
5346 basic_block dummy, bb;
5348 gimple_stmt_iterator gsi;
5350 dummy = fallthru->src;
5351 bb = fallthru->dest;
5353 if (single_pred_p (bb))
5356 /* If we redirected a branch we must create new PHI nodes at the
5358 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5360 gimple phi, new_phi;
5362 phi = gsi_stmt (gsi);
5363 var = gimple_phi_result (phi);
5364 new_phi = create_phi_node (var, bb);
5365 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5366 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5370 /* Add the arguments we have stored on edges. */
5371 FOR_EACH_EDGE (e, ei, bb->preds)
5376 flush_pending_stmts (e);
5381 /* Return a non-special label in the head of basic block BLOCK.
5382 Create one if it doesn't exist. */
5385 gimple_block_label (basic_block bb)
5387 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5392 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5394 stmt = gsi_stmt (i);
5395 if (gimple_code (stmt) != GIMPLE_LABEL)
5397 label = gimple_label_label (stmt);
5398 if (!DECL_NONLOCAL (label))
5401 gsi_move_before (&i, &s);
5406 label = create_artificial_label (UNKNOWN_LOCATION);
5407 stmt = gimple_build_label (label);
5408 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5413 /* Attempt to perform edge redirection by replacing a possibly complex
5414 jump instruction by a goto or by removing the jump completely.
5415 This can apply only if all edges now point to the same block. The
5416 parameters and return values are equivalent to
5417 redirect_edge_and_branch. */
5420 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5422 basic_block src = e->src;
5423 gimple_stmt_iterator i;
5426 /* We can replace or remove a complex jump only when we have exactly
5428 if (EDGE_COUNT (src->succs) != 2
5429 /* Verify that all targets will be TARGET. Specifically, the
5430 edge that is not E must also go to TARGET. */
5431 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5434 i = gsi_last_bb (src);
5438 stmt = gsi_stmt (i);
5440 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5442 gsi_remove (&i, true);
5443 e = ssa_redirect_edge (e, target);
5444 e->flags = EDGE_FALLTHRU;
5452 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5453 edge representing the redirected branch. */
5456 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5458 basic_block bb = e->src;
5459 gimple_stmt_iterator gsi;
5463 if (e->flags & EDGE_ABNORMAL)
5466 if (e->dest == dest)
5469 if (e->flags & EDGE_EH)
5470 return redirect_eh_edge (e, dest);
5472 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5474 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5479 gsi = gsi_last_bb (bb);
5480 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5482 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5485 /* For COND_EXPR, we only need to redirect the edge. */
5489 /* No non-abnormal edges should lead from a non-simple goto, and
5490 simple ones should be represented implicitly. */
5495 tree label = gimple_block_label (dest);
5496 tree cases = get_cases_for_edge (e, stmt);
5498 /* If we have a list of cases associated with E, then use it
5499 as it's a lot faster than walking the entire case vector. */
5502 edge e2 = find_edge (e->src, dest);
5509 CASE_LABEL (cases) = label;
5510 cases = CASE_CHAIN (cases);
5513 /* If there was already an edge in the CFG, then we need
5514 to move all the cases associated with E to E2. */
5517 tree cases2 = get_cases_for_edge (e2, stmt);
5519 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5520 CASE_CHAIN (cases2) = first;
5522 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5526 size_t i, n = gimple_switch_num_labels (stmt);
5528 for (i = 0; i < n; i++)
5530 tree elt = gimple_switch_label (stmt, i);
5531 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5532 CASE_LABEL (elt) = label;
5540 int i, n = gimple_asm_nlabels (stmt);
5543 for (i = 0; i < n; ++i)
5545 tree cons = gimple_asm_label_op (stmt, i);
5546 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5549 label = gimple_block_label (dest);
5550 TREE_VALUE (cons) = label;
5554 /* If we didn't find any label matching the former edge in the
5555 asm labels, we must be redirecting the fallthrough
5557 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5562 gsi_remove (&gsi, true);
5563 e->flags |= EDGE_FALLTHRU;
5566 case GIMPLE_OMP_RETURN:
5567 case GIMPLE_OMP_CONTINUE:
5568 case GIMPLE_OMP_SECTIONS_SWITCH:
5569 case GIMPLE_OMP_FOR:
5570 /* The edges from OMP constructs can be simply redirected. */
5573 case GIMPLE_EH_DISPATCH:
5574 if (!(e->flags & EDGE_FALLTHRU))
5575 redirect_eh_dispatch_edge (stmt, e, dest);
5578 case GIMPLE_TRANSACTION:
5579 /* The ABORT edge has a stored label associated with it, otherwise
5580 the edges are simply redirectable. */
5582 gimple_transaction_set_label (stmt, gimple_block_label (dest));
5586 /* Otherwise it must be a fallthru edge, and we don't need to
5587 do anything besides redirecting it. */
5588 gcc_assert (e->flags & EDGE_FALLTHRU);
5592 /* Update/insert PHI nodes as necessary. */
5594 /* Now update the edges in the CFG. */
5595 e = ssa_redirect_edge (e, dest);
5600 /* Returns true if it is possible to remove edge E by redirecting
5601 it to the destination of the other edge from E->src. */
5604 gimple_can_remove_branch_p (const_edge e)
5606 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5612 /* Simple wrapper, as we can always redirect fallthru edges. */
5615 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5617 e = gimple_redirect_edge_and_branch (e, dest);
5624 /* Splits basic block BB after statement STMT (but at least after the
5625 labels). If STMT is NULL, BB is split just after the labels. */
5628 gimple_split_block (basic_block bb, void *stmt)
5630 gimple_stmt_iterator gsi;
5631 gimple_stmt_iterator gsi_tgt;
5638 new_bb = create_empty_bb (bb);
5640 /* Redirect the outgoing edges. */
5641 new_bb->succs = bb->succs;
5643 FOR_EACH_EDGE (e, ei, new_bb->succs)
5646 if (stmt && gimple_code ((gimple) stmt) == GIMPLE_LABEL)
5649 /* Move everything from GSI to the new basic block. */
5650 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5652 act = gsi_stmt (gsi);
5653 if (gimple_code (act) == GIMPLE_LABEL)
5666 if (gsi_end_p (gsi))
5669 /* Split the statement list - avoid re-creating new containers as this
5670 brings ugly quadratic memory consumption in the inliner.
5671 (We are still quadratic since we need to update stmt BB pointers,
5673 gsi_split_seq_before (&gsi, &list);
5674 set_bb_seq (new_bb, list);
5675 for (gsi_tgt = gsi_start (list);
5676 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5677 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5683 /* Moves basic block BB after block AFTER. */
5686 gimple_move_block_after (basic_block bb, basic_block after)
5688 if (bb->prev_bb == after)
5692 link_block (bb, after);
5698 /* Return TRUE if block BB has no executable statements, otherwise return
5702 gimple_empty_block_p (basic_block bb)
5704 /* BB must have no executable statements. */
5705 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5708 if (gsi_end_p (gsi))
5710 if (is_gimple_debug (gsi_stmt (gsi)))
5711 gsi_next_nondebug (&gsi);
5712 return gsi_end_p (gsi);
5716 /* Split a basic block if it ends with a conditional branch and if the
5717 other part of the block is not empty. */
5720 gimple_split_block_before_cond_jump (basic_block bb)
5722 gimple last, split_point;
5723 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5724 if (gsi_end_p (gsi))
5726 last = gsi_stmt (gsi);
5727 if (gimple_code (last) != GIMPLE_COND
5728 && gimple_code (last) != GIMPLE_SWITCH)
5730 gsi_prev_nondebug (&gsi);
5731 split_point = gsi_stmt (gsi);
5732 return split_block (bb, split_point)->dest;
5736 /* Return true if basic_block can be duplicated. */
5739 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5744 /* Create a duplicate of the basic block BB. NOTE: This does not
5745 preserve SSA form. */
5748 gimple_duplicate_bb (basic_block bb)
5751 gimple_stmt_iterator gsi, gsi_tgt;
5752 gimple_seq phis = phi_nodes (bb);
5753 gimple phi, stmt, copy;
5755 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5757 /* Copy the PHI nodes. We ignore PHI node arguments here because
5758 the incoming edges have not been setup yet. */
5759 for (gsi = gsi_start (phis); !gsi_end_p (gsi); gsi_next (&gsi))
5761 phi = gsi_stmt (gsi);
5762 copy = create_phi_node (NULL_TREE, new_bb);
5763 create_new_def_for (gimple_phi_result (phi), copy,
5764 gimple_phi_result_ptr (copy));
5765 gimple_set_uid (copy, gimple_uid (phi));
5768 gsi_tgt = gsi_start_bb (new_bb);
5769 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5771 def_operand_p def_p;
5772 ssa_op_iter op_iter;
5775 stmt = gsi_stmt (gsi);
5776 if (gimple_code (stmt) == GIMPLE_LABEL)
5779 /* Don't duplicate label debug stmts. */
5780 if (gimple_debug_bind_p (stmt)
5781 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5785 /* Create a new copy of STMT and duplicate STMT's virtual
5787 copy = gimple_copy (stmt);
5788 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5790 maybe_duplicate_eh_stmt (copy, stmt);
5791 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5793 /* When copying around a stmt writing into a local non-user
5794 aggregate, make sure it won't share stack slot with other
5796 lhs = gimple_get_lhs (stmt);
5797 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5799 tree base = get_base_address (lhs);
5801 && (TREE_CODE (base) == VAR_DECL
5802 || TREE_CODE (base) == RESULT_DECL)
5803 && DECL_IGNORED_P (base)
5804 && !TREE_STATIC (base)
5805 && !DECL_EXTERNAL (base)
5806 && (TREE_CODE (base) != VAR_DECL
5807 || !DECL_HAS_VALUE_EXPR_P (base)))
5808 DECL_NONSHAREABLE (base) = 1;
5811 /* Create new names for all the definitions created by COPY and
5812 add replacement mappings for each new name. */
5813 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5814 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5820 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5823 add_phi_args_after_copy_edge (edge e_copy)
5825 basic_block bb, bb_copy = e_copy->src, dest;
5828 gimple phi, phi_copy;
5830 gimple_stmt_iterator psi, psi_copy;
5832 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5835 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5837 if (e_copy->dest->flags & BB_DUPLICATED)
5838 dest = get_bb_original (e_copy->dest);
5840 dest = e_copy->dest;
5842 e = find_edge (bb, dest);
5845 /* During loop unrolling the target of the latch edge is copied.
5846 In this case we are not looking for edge to dest, but to
5847 duplicated block whose original was dest. */
5848 FOR_EACH_EDGE (e, ei, bb->succs)
5850 if ((e->dest->flags & BB_DUPLICATED)
5851 && get_bb_original (e->dest) == dest)
5855 gcc_assert (e != NULL);
5858 for (psi = gsi_start_phis (e->dest),
5859 psi_copy = gsi_start_phis (e_copy->dest);
5861 gsi_next (&psi), gsi_next (&psi_copy))
5863 phi = gsi_stmt (psi);
5864 phi_copy = gsi_stmt (psi_copy);
5865 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5866 add_phi_arg (phi_copy, def, e_copy,
5867 gimple_phi_arg_location_from_edge (phi, e));
5872 /* Basic block BB_COPY was created by code duplication. Add phi node
5873 arguments for edges going out of BB_COPY. The blocks that were
5874 duplicated have BB_DUPLICATED set. */
5877 add_phi_args_after_copy_bb (basic_block bb_copy)
5882 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5884 add_phi_args_after_copy_edge (e_copy);
5888 /* Blocks in REGION_COPY array of length N_REGION were created by
5889 duplication of basic blocks. Add phi node arguments for edges
5890 going from these blocks. If E_COPY is not NULL, also add
5891 phi node arguments for its destination.*/
5894 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5899 for (i = 0; i < n_region; i++)
5900 region_copy[i]->flags |= BB_DUPLICATED;
5902 for (i = 0; i < n_region; i++)
5903 add_phi_args_after_copy_bb (region_copy[i]);
5905 add_phi_args_after_copy_edge (e_copy);
5907 for (i = 0; i < n_region; i++)
5908 region_copy[i]->flags &= ~BB_DUPLICATED;
5911 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5912 important exit edge EXIT. By important we mean that no SSA name defined
5913 inside region is live over the other exit edges of the region. All entry
5914 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5915 to the duplicate of the region. Dominance and loop information is
5916 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5917 UPDATE_DOMINANCE is false then we assume that the caller will update the
5918 dominance information after calling this function. The new basic
5919 blocks are stored to REGION_COPY in the same order as they had in REGION,
5920 provided that REGION_COPY is not NULL.
5921 The function returns false if it is unable to copy the region,
5925 gimple_duplicate_sese_region (edge entry, edge exit,
5926 basic_block *region, unsigned n_region,
5927 basic_block *region_copy,
5928 bool update_dominance)
5931 bool free_region_copy = false, copying_header = false;
5932 struct loop *loop = entry->dest->loop_father;
5934 vec<basic_block> doms;
5936 int total_freq = 0, entry_freq = 0;
5937 gcov_type total_count = 0, entry_count = 0;
5939 if (!can_copy_bbs_p (region, n_region))
5942 /* Some sanity checking. Note that we do not check for all possible
5943 missuses of the functions. I.e. if you ask to copy something weird,
5944 it will work, but the state of structures probably will not be
5946 for (i = 0; i < n_region; i++)
5948 /* We do not handle subloops, i.e. all the blocks must belong to the
5950 if (region[i]->loop_father != loop)
5953 if (region[i] != entry->dest
5954 && region[i] == loop->header)
5958 /* In case the function is used for loop header copying (which is the primary
5959 use), ensure that EXIT and its copy will be new latch and entry edges. */
5960 if (loop->header == entry->dest)
5962 copying_header = true;
5964 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
5967 for (i = 0; i < n_region; i++)
5968 if (region[i] != exit->src
5969 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
5973 initialize_original_copy_tables ();
5976 set_loop_copy (loop, loop_outer (loop));
5978 set_loop_copy (loop, loop);
5982 region_copy = XNEWVEC (basic_block, n_region);
5983 free_region_copy = true;
5986 /* Record blocks outside the region that are dominated by something
5988 if (update_dominance)
5991 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
5994 if (entry->dest->count)
5996 total_count = entry->dest->count;
5997 entry_count = entry->count;
5998 /* Fix up corner cases, to avoid division by zero or creation of negative
6000 if (entry_count > total_count)
6001 entry_count = total_count;
6005 total_freq = entry->dest->frequency;
6006 entry_freq = EDGE_FREQUENCY (entry);
6007 /* Fix up corner cases, to avoid division by zero or creation of negative
6009 if (total_freq == 0)
6011 else if (entry_freq > total_freq)
6012 entry_freq = total_freq;
6015 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6016 split_edge_bb_loc (entry), update_dominance);
6019 scale_bbs_frequencies_gcov_type (region, n_region,
6020 total_count - entry_count,
6022 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6027 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6029 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6034 loop->header = exit->dest;
6035 loop->latch = exit->src;
6038 /* Redirect the entry and add the phi node arguments. */
6039 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6040 gcc_assert (redirected != NULL);
6041 flush_pending_stmts (entry);
6043 /* Concerning updating of dominators: We must recount dominators
6044 for entry block and its copy. Anything that is outside of the
6045 region, but was dominated by something inside needs recounting as
6047 if (update_dominance)
6049 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6050 doms.safe_push (get_bb_original (entry->dest));
6051 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6055 /* Add the other PHI node arguments. */
6056 add_phi_args_after_copy (region_copy, n_region, NULL);
6058 if (free_region_copy)
6061 free_original_copy_tables ();
6065 /* Checks if BB is part of the region defined by N_REGION BBS. */
6067 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6071 for (n = 0; n < n_region; n++)
6079 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6080 are stored to REGION_COPY in the same order in that they appear
6081 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6082 the region, EXIT an exit from it. The condition guarding EXIT
6083 is moved to ENTRY. Returns true if duplication succeeds, false
6109 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6110 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6111 basic_block *region_copy ATTRIBUTE_UNUSED)
6114 bool free_region_copy = false;
6115 struct loop *loop = exit->dest->loop_father;
6116 struct loop *orig_loop = entry->dest->loop_father;
6117 basic_block switch_bb, entry_bb, nentry_bb;
6118 vec<basic_block> doms;
6119 int total_freq = 0, exit_freq = 0;
6120 gcov_type total_count = 0, exit_count = 0;
6121 edge exits[2], nexits[2], e;
6122 gimple_stmt_iterator gsi;
6125 basic_block exit_bb;
6126 gimple_stmt_iterator psi;
6129 struct loop *target, *aloop, *cloop;
6131 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6133 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6135 if (!can_copy_bbs_p (region, n_region))
6138 initialize_original_copy_tables ();
6139 set_loop_copy (orig_loop, loop);
6142 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6144 if (bb_part_of_region_p (aloop->header, region, n_region))
6146 cloop = duplicate_loop (aloop, target);
6147 duplicate_subloops (aloop, cloop);
6153 region_copy = XNEWVEC (basic_block, n_region);
6154 free_region_copy = true;
6157 gcc_assert (!need_ssa_update_p (cfun));
6159 /* Record blocks outside the region that are dominated by something
6161 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6163 if (exit->src->count)
6165 total_count = exit->src->count;
6166 exit_count = exit->count;
6167 /* Fix up corner cases, to avoid division by zero or creation of negative
6169 if (exit_count > total_count)
6170 exit_count = total_count;
6174 total_freq = exit->src->frequency;
6175 exit_freq = EDGE_FREQUENCY (exit);
6176 /* Fix up corner cases, to avoid division by zero or creation of negative
6178 if (total_freq == 0)
6180 if (exit_freq > total_freq)
6181 exit_freq = total_freq;
6184 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6185 split_edge_bb_loc (exit), true);
6188 scale_bbs_frequencies_gcov_type (region, n_region,
6189 total_count - exit_count,
6191 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6196 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6198 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6201 /* Create the switch block, and put the exit condition to it. */
6202 entry_bb = entry->dest;
6203 nentry_bb = get_bb_copy (entry_bb);
6204 if (!last_stmt (entry->src)
6205 || !stmt_ends_bb_p (last_stmt (entry->src)))
6206 switch_bb = entry->src;
6208 switch_bb = split_edge (entry);
6209 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6211 gsi = gsi_last_bb (switch_bb);
6212 cond_stmt = last_stmt (exit->src);
6213 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6214 cond_stmt = gimple_copy (cond_stmt);
6216 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6218 sorig = single_succ_edge (switch_bb);
6219 sorig->flags = exits[1]->flags;
6220 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6222 /* Register the new edge from SWITCH_BB in loop exit lists. */
6223 rescan_loop_exit (snew, true, false);
6225 /* Add the PHI node arguments. */
6226 add_phi_args_after_copy (region_copy, n_region, snew);
6228 /* Get rid of now superfluous conditions and associated edges (and phi node
6230 exit_bb = exit->dest;
6232 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6233 PENDING_STMT (e) = NULL;
6235 /* The latch of ORIG_LOOP was copied, and so was the backedge
6236 to the original header. We redirect this backedge to EXIT_BB. */
6237 for (i = 0; i < n_region; i++)
6238 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6240 gcc_assert (single_succ_edge (region_copy[i]));
6241 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6242 PENDING_STMT (e) = NULL;
6243 for (psi = gsi_start_phis (exit_bb);
6247 phi = gsi_stmt (psi);
6248 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6249 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6252 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6253 PENDING_STMT (e) = NULL;
6255 /* Anything that is outside of the region, but was dominated by something
6256 inside needs to update dominance info. */
6257 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6259 /* Update the SSA web. */
6260 update_ssa (TODO_update_ssa);
6262 if (free_region_copy)
6265 free_original_copy_tables ();
6269 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6270 adding blocks when the dominator traversal reaches EXIT. This
6271 function silently assumes that ENTRY strictly dominates EXIT. */
6274 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6275 vec<basic_block> *bbs_p)
6279 for (son = first_dom_son (CDI_DOMINATORS, entry);
6281 son = next_dom_son (CDI_DOMINATORS, son))
6283 bbs_p->safe_push (son);
6285 gather_blocks_in_sese_region (son, exit, bbs_p);
6289 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6290 The duplicates are recorded in VARS_MAP. */
6293 replace_by_duplicate_decl (tree *tp, struct pointer_map_t *vars_map,
6296 tree t = *tp, new_t;
6297 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6300 if (DECL_CONTEXT (t) == to_context)
6303 loc = pointer_map_contains (vars_map, t);
6307 loc = pointer_map_insert (vars_map, t);
6311 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6312 add_local_decl (f, new_t);
6316 gcc_assert (TREE_CODE (t) == CONST_DECL);
6317 new_t = copy_node (t);
6319 DECL_CONTEXT (new_t) = to_context;
6324 new_t = (tree) *loc;
6330 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6331 VARS_MAP maps old ssa names and var_decls to the new ones. */
6334 replace_ssa_name (tree name, struct pointer_map_t *vars_map,
6340 gcc_assert (!virtual_operand_p (name));
6342 loc = pointer_map_contains (vars_map, name);
6346 tree decl = SSA_NAME_VAR (name);
6349 replace_by_duplicate_decl (&decl, vars_map, to_context);
6350 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6351 decl, SSA_NAME_DEF_STMT (name));
6352 if (SSA_NAME_IS_DEFAULT_DEF (name))
6353 set_ssa_default_def (DECL_STRUCT_FUNCTION (to_context),
6357 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6358 name, SSA_NAME_DEF_STMT (name));
6360 loc = pointer_map_insert (vars_map, name);
6364 new_name = (tree) *loc;
6375 struct pointer_map_t *vars_map;
6376 htab_t new_label_map;
6377 struct pointer_map_t *eh_map;
6381 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6382 contained in *TP if it has been ORIG_BLOCK previously and change the
6383 DECL_CONTEXT of every local variable referenced in *TP. */
6386 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6388 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6389 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6394 tree block = TREE_BLOCK (t);
6395 if (block == p->orig_block
6396 || (p->orig_block == NULL_TREE
6397 && block != NULL_TREE))
6398 TREE_SET_BLOCK (t, p->new_block);
6399 #ifdef ENABLE_CHECKING
6400 else if (block != NULL_TREE)
6402 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6403 block = BLOCK_SUPERCONTEXT (block);
6404 gcc_assert (block == p->orig_block);
6408 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6410 if (TREE_CODE (t) == SSA_NAME)
6411 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6412 else if (TREE_CODE (t) == LABEL_DECL)
6414 if (p->new_label_map)
6416 struct tree_map in, *out;
6418 out = (struct tree_map *)
6419 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6424 DECL_CONTEXT (t) = p->to_context;
6426 else if (p->remap_decls_p)
6428 /* Replace T with its duplicate. T should no longer appear in the
6429 parent function, so this looks wasteful; however, it may appear
6430 in referenced_vars, and more importantly, as virtual operands of
6431 statements, and in alias lists of other variables. It would be
6432 quite difficult to expunge it from all those places. ??? It might
6433 suffice to do this for addressable variables. */
6434 if ((TREE_CODE (t) == VAR_DECL
6435 && !is_global_var (t))
6436 || TREE_CODE (t) == CONST_DECL)
6437 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6441 else if (TYPE_P (t))
6447 /* Helper for move_stmt_r. Given an EH region number for the source
6448 function, map that to the duplicate EH regio number in the dest. */
6451 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6453 eh_region old_r, new_r;
6456 old_r = get_eh_region_from_number (old_nr);
6457 slot = pointer_map_contains (p->eh_map, old_r);
6458 new_r = (eh_region) *slot;
6460 return new_r->index;
6463 /* Similar, but operate on INTEGER_CSTs. */
6466 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6470 old_nr = tree_to_shwi (old_t_nr);
6471 new_nr = move_stmt_eh_region_nr (old_nr, p);
6473 return build_int_cst (integer_type_node, new_nr);
6476 /* Like move_stmt_op, but for gimple statements.
6478 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6479 contained in the current statement in *GSI_P and change the
6480 DECL_CONTEXT of every local variable referenced in the current
6484 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6485 struct walk_stmt_info *wi)
6487 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6488 gimple stmt = gsi_stmt (*gsi_p);
6489 tree block = gimple_block (stmt);
6491 if (block == p->orig_block
6492 || (p->orig_block == NULL_TREE
6493 && block != NULL_TREE))
6494 gimple_set_block (stmt, p->new_block);
6496 switch (gimple_code (stmt))
6499 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6501 tree r, fndecl = gimple_call_fndecl (stmt);
6502 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6503 switch (DECL_FUNCTION_CODE (fndecl))
6505 case BUILT_IN_EH_COPY_VALUES:
6506 r = gimple_call_arg (stmt, 1);
6507 r = move_stmt_eh_region_tree_nr (r, p);
6508 gimple_call_set_arg (stmt, 1, r);
6511 case BUILT_IN_EH_POINTER:
6512 case BUILT_IN_EH_FILTER:
6513 r = gimple_call_arg (stmt, 0);
6514 r = move_stmt_eh_region_tree_nr (r, p);
6515 gimple_call_set_arg (stmt, 0, r);
6526 int r = gimple_resx_region (stmt);
6527 r = move_stmt_eh_region_nr (r, p);
6528 gimple_resx_set_region (stmt, r);
6532 case GIMPLE_EH_DISPATCH:
6534 int r = gimple_eh_dispatch_region (stmt);
6535 r = move_stmt_eh_region_nr (r, p);
6536 gimple_eh_dispatch_set_region (stmt, r);
6540 case GIMPLE_OMP_RETURN:
6541 case GIMPLE_OMP_CONTINUE:
6544 if (is_gimple_omp (stmt))
6546 /* Do not remap variables inside OMP directives. Variables
6547 referenced in clauses and directive header belong to the
6548 parent function and should not be moved into the child
6550 bool save_remap_decls_p = p->remap_decls_p;
6551 p->remap_decls_p = false;
6552 *handled_ops_p = true;
6554 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6557 p->remap_decls_p = save_remap_decls_p;
6565 /* Move basic block BB from function CFUN to function DEST_FN. The
6566 block is moved out of the original linked list and placed after
6567 block AFTER in the new list. Also, the block is removed from the
6568 original array of blocks and placed in DEST_FN's array of blocks.
6569 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6570 updated to reflect the moved edges.
6572 The local variables are remapped to new instances, VARS_MAP is used
6573 to record the mapping. */
6576 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6577 basic_block after, bool update_edge_count_p,
6578 struct move_stmt_d *d)
6580 struct control_flow_graph *cfg;
6583 gimple_stmt_iterator si;
6584 unsigned old_len, new_len;
6586 /* Remove BB from dominance structures. */
6587 delete_from_dominance_info (CDI_DOMINATORS, bb);
6589 /* Move BB from its current loop to the copy in the new function. */
6592 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6594 bb->loop_father = new_loop;
6597 /* Link BB to the new linked list. */
6598 move_block_after (bb, after);
6600 /* Update the edge count in the corresponding flowgraphs. */
6601 if (update_edge_count_p)
6602 FOR_EACH_EDGE (e, ei, bb->succs)
6604 cfun->cfg->x_n_edges--;
6605 dest_cfun->cfg->x_n_edges++;
6608 /* Remove BB from the original basic block array. */
6609 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6610 cfun->cfg->x_n_basic_blocks--;
6612 /* Grow DEST_CFUN's basic block array if needed. */
6613 cfg = dest_cfun->cfg;
6614 cfg->x_n_basic_blocks++;
6615 if (bb->index >= cfg->x_last_basic_block)
6616 cfg->x_last_basic_block = bb->index + 1;
6618 old_len = vec_safe_length (cfg->x_basic_block_info);
6619 if ((unsigned) cfg->x_last_basic_block >= old_len)
6621 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6622 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6625 (*cfg->x_basic_block_info)[bb->index] = bb;
6627 /* Remap the variables in phi nodes. */
6628 for (si = gsi_start_phis (bb); !gsi_end_p (si); )
6630 gimple phi = gsi_stmt (si);
6632 tree op = PHI_RESULT (phi);
6636 if (virtual_operand_p (op))
6638 /* Remove the phi nodes for virtual operands (alias analysis will be
6639 run for the new function, anyway). */
6640 remove_phi_node (&si, true);
6644 SET_PHI_RESULT (phi,
6645 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6646 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6648 op = USE_FROM_PTR (use);
6649 if (TREE_CODE (op) == SSA_NAME)
6650 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6653 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6655 location_t locus = gimple_phi_arg_location (phi, i);
6656 tree block = LOCATION_BLOCK (locus);
6658 if (locus == UNKNOWN_LOCATION)
6660 if (d->orig_block == NULL_TREE || block == d->orig_block)
6662 if (d->new_block == NULL_TREE)
6663 locus = LOCATION_LOCUS (locus);
6665 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6666 gimple_phi_arg_set_location (phi, i, locus);
6673 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6675 gimple stmt = gsi_stmt (si);
6676 struct walk_stmt_info wi;
6678 memset (&wi, 0, sizeof (wi));
6680 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6682 if (gimple_code (stmt) == GIMPLE_LABEL)
6684 tree label = gimple_label_label (stmt);
6685 int uid = LABEL_DECL_UID (label);
6687 gcc_assert (uid > -1);
6689 old_len = vec_safe_length (cfg->x_label_to_block_map);
6690 if (old_len <= (unsigned) uid)
6692 new_len = 3 * uid / 2 + 1;
6693 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6696 (*cfg->x_label_to_block_map)[uid] = bb;
6697 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6699 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6701 if (uid >= dest_cfun->cfg->last_label_uid)
6702 dest_cfun->cfg->last_label_uid = uid + 1;
6705 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6706 remove_stmt_from_eh_lp_fn (cfun, stmt);
6708 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6709 gimple_remove_stmt_histograms (cfun, stmt);
6711 /* We cannot leave any operands allocated from the operand caches of
6712 the current function. */
6713 free_stmt_operands (cfun, stmt);
6714 push_cfun (dest_cfun);
6719 FOR_EACH_EDGE (e, ei, bb->succs)
6720 if (e->goto_locus != UNKNOWN_LOCATION)
6722 tree block = LOCATION_BLOCK (e->goto_locus);
6723 if (d->orig_block == NULL_TREE
6724 || block == d->orig_block)
6725 e->goto_locus = d->new_block ?
6726 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6727 LOCATION_LOCUS (e->goto_locus);
6731 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6732 the outermost EH region. Use REGION as the incoming base EH region. */
6735 find_outermost_region_in_block (struct function *src_cfun,
6736 basic_block bb, eh_region region)
6738 gimple_stmt_iterator si;
6740 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6742 gimple stmt = gsi_stmt (si);
6743 eh_region stmt_region;
6746 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6747 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6751 region = stmt_region;
6752 else if (stmt_region != region)
6754 region = eh_region_outermost (src_cfun, stmt_region, region);
6755 gcc_assert (region != NULL);
6764 new_label_mapper (tree decl, void *data)
6766 htab_t hash = (htab_t) data;
6770 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6772 m = XNEW (struct tree_map);
6773 m->hash = DECL_UID (decl);
6774 m->base.from = decl;
6775 m->to = create_artificial_label (UNKNOWN_LOCATION);
6776 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6777 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6778 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6780 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6781 gcc_assert (*slot == NULL);
6788 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6792 replace_block_vars_by_duplicates (tree block, struct pointer_map_t *vars_map,
6797 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6800 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6802 replace_by_duplicate_decl (&t, vars_map, to_context);
6805 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6807 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (*tp));
6808 DECL_HAS_VALUE_EXPR_P (t) = 1;
6810 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6815 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6816 replace_block_vars_by_duplicates (block, vars_map, to_context);
6819 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6823 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6826 /* Discard it from the old loop array. */
6827 (*get_loops (fn1))[loop->num] = NULL;
6829 /* Place it in the new loop array, assigning it a new number. */
6830 loop->num = number_of_loops (fn2);
6831 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6833 /* Recurse to children. */
6834 for (loop = loop->inner; loop; loop = loop->next)
6835 fixup_loop_arrays_after_move (fn1, fn2, loop);
6838 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
6839 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
6840 single basic block in the original CFG and the new basic block is
6841 returned. DEST_CFUN must not have a CFG yet.
6843 Note that the region need not be a pure SESE region. Blocks inside
6844 the region may contain calls to abort/exit. The only restriction
6845 is that ENTRY_BB should be the only entry point and it must
6848 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
6849 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
6850 to the new function.
6852 All local variables referenced in the region are assumed to be in
6853 the corresponding BLOCK_VARS and unexpanded variable lists
6854 associated with DEST_CFUN. */
6857 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
6858 basic_block exit_bb, tree orig_block)
6860 vec<basic_block> bbs, dom_bbs;
6861 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
6862 basic_block after, bb, *entry_pred, *exit_succ, abb;
6863 struct function *saved_cfun = cfun;
6864 int *entry_flag, *exit_flag;
6865 unsigned *entry_prob, *exit_prob;
6866 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
6869 htab_t new_label_map;
6870 struct pointer_map_t *vars_map, *eh_map;
6871 struct loop *loop = entry_bb->loop_father;
6872 struct loop *loop0 = get_loop (saved_cfun, 0);
6873 struct move_stmt_d d;
6875 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
6877 gcc_assert (entry_bb != exit_bb
6879 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
6881 /* Collect all the blocks in the region. Manually add ENTRY_BB
6882 because it won't be added by dfs_enumerate_from. */
6884 bbs.safe_push (entry_bb);
6885 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
6887 /* The blocks that used to be dominated by something in BBS will now be
6888 dominated by the new block. */
6889 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
6893 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
6894 the predecessor edges to ENTRY_BB and the successor edges to
6895 EXIT_BB so that we can re-attach them to the new basic block that
6896 will replace the region. */
6897 num_entry_edges = EDGE_COUNT (entry_bb->preds);
6898 entry_pred = XNEWVEC (basic_block, num_entry_edges);
6899 entry_flag = XNEWVEC (int, num_entry_edges);
6900 entry_prob = XNEWVEC (unsigned, num_entry_edges);
6902 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
6904 entry_prob[i] = e->probability;
6905 entry_flag[i] = e->flags;
6906 entry_pred[i++] = e->src;
6912 num_exit_edges = EDGE_COUNT (exit_bb->succs);
6913 exit_succ = XNEWVEC (basic_block, num_exit_edges);
6914 exit_flag = XNEWVEC (int, num_exit_edges);
6915 exit_prob = XNEWVEC (unsigned, num_exit_edges);
6917 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
6919 exit_prob[i] = e->probability;
6920 exit_flag[i] = e->flags;
6921 exit_succ[i++] = e->dest;
6933 /* Switch context to the child function to initialize DEST_FN's CFG. */
6934 gcc_assert (dest_cfun->cfg == NULL);
6935 push_cfun (dest_cfun);
6937 init_empty_tree_cfg ();
6939 /* Initialize EH information for the new function. */
6941 new_label_map = NULL;
6944 eh_region region = NULL;
6946 FOR_EACH_VEC_ELT (bbs, i, bb)
6947 region = find_outermost_region_in_block (saved_cfun, bb, region);
6949 init_eh_for_function ();
6952 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
6953 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
6954 new_label_mapper, new_label_map);
6958 /* Initialize an empty loop tree. */
6959 struct loops *loops = ggc_alloc_cleared_loops ();
6960 init_loops_structure (dest_cfun, loops, 1);
6961 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
6962 set_loops_for_fn (dest_cfun, loops);
6964 /* Move the outlined loop tree part. */
6965 num_nodes = bbs.length ();
6966 FOR_EACH_VEC_ELT (bbs, i, bb)
6968 if (bb->loop_father->header == bb)
6970 struct loop *this_loop = bb->loop_father;
6971 struct loop *outer = loop_outer (this_loop);
6973 /* If the SESE region contains some bbs ending with
6974 a noreturn call, those are considered to belong
6975 to the outermost loop in saved_cfun, rather than
6976 the entry_bb's loop_father. */
6980 num_nodes -= this_loop->num_nodes;
6981 flow_loop_tree_node_remove (bb->loop_father);
6982 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
6983 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
6986 else if (bb->loop_father == loop0 && loop0 != loop)
6989 /* Remove loop exits from the outlined region. */
6990 if (loops_for_fn (saved_cfun)->exits)
6991 FOR_EACH_EDGE (e, ei, bb->succs)
6993 void **slot = htab_find_slot_with_hash
6994 (loops_for_fn (saved_cfun)->exits, e,
6995 htab_hash_pointer (e), NO_INSERT);
6997 htab_clear_slot (loops_for_fn (saved_cfun)->exits, slot);
7002 /* Adjust the number of blocks in the tree root of the outlined part. */
7003 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7005 /* Setup a mapping to be used by move_block_to_fn. */
7006 loop->aux = current_loops->tree_root;
7007 loop0->aux = current_loops->tree_root;
7011 /* Move blocks from BBS into DEST_CFUN. */
7012 gcc_assert (bbs.length () >= 2);
7013 after = dest_cfun->cfg->x_entry_block_ptr;
7014 vars_map = pointer_map_create ();
7016 memset (&d, 0, sizeof (d));
7017 d.orig_block = orig_block;
7018 d.new_block = DECL_INITIAL (dest_cfun->decl);
7019 d.from_context = cfun->decl;
7020 d.to_context = dest_cfun->decl;
7021 d.vars_map = vars_map;
7022 d.new_label_map = new_label_map;
7024 d.remap_decls_p = true;
7026 FOR_EACH_VEC_ELT (bbs, i, bb)
7028 /* No need to update edge counts on the last block. It has
7029 already been updated earlier when we detached the region from
7030 the original CFG. */
7031 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7037 /* Loop sizes are no longer correct, fix them up. */
7038 loop->num_nodes -= num_nodes;
7039 for (struct loop *outer = loop_outer (loop);
7040 outer; outer = loop_outer (outer))
7041 outer->num_nodes -= num_nodes;
7042 loop0->num_nodes -= bbs.length () - num_nodes;
7044 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vect_loops)
7047 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7052 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7054 dest_cfun->has_simduid_loops = true;
7056 if (aloop->force_vect)
7057 dest_cfun->has_force_vect_loops = true;
7061 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7065 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7067 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7068 = BLOCK_SUBBLOCKS (orig_block);
7069 for (block = BLOCK_SUBBLOCKS (orig_block);
7070 block; block = BLOCK_CHAIN (block))
7071 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7072 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7075 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7076 vars_map, dest_cfun->decl);
7079 htab_delete (new_label_map);
7081 pointer_map_destroy (eh_map);
7082 pointer_map_destroy (vars_map);
7084 /* Rewire the entry and exit blocks. The successor to the entry
7085 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7086 the child function. Similarly, the predecessor of DEST_FN's
7087 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7088 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7089 various CFG manipulation function get to the right CFG.
7091 FIXME, this is silly. The CFG ought to become a parameter to
7093 push_cfun (dest_cfun);
7094 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7096 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7099 /* Back in the original function, the SESE region has disappeared,
7100 create a new basic block in its place. */
7101 bb = create_empty_bb (entry_pred[0]);
7103 add_bb_to_loop (bb, loop);
7104 for (i = 0; i < num_entry_edges; i++)
7106 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7107 e->probability = entry_prob[i];
7110 for (i = 0; i < num_exit_edges; i++)
7112 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7113 e->probability = exit_prob[i];
7116 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7117 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7118 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7136 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7140 dump_function_to_file (tree fndecl, FILE *file, int flags)
7142 tree arg, var, old_current_fndecl = current_function_decl;
7143 struct function *dsf;
7144 bool ignore_topmost_bind = false, any_var = false;
7147 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7148 && decl_is_tm_clone (fndecl));
7149 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7151 current_function_decl = fndecl;
7152 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7154 arg = DECL_ARGUMENTS (fndecl);
7157 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7158 fprintf (file, " ");
7159 print_generic_expr (file, arg, dump_flags);
7160 if (flags & TDF_VERBOSE)
7161 print_node (file, "", arg, 4);
7162 if (DECL_CHAIN (arg))
7163 fprintf (file, ", ");
7164 arg = DECL_CHAIN (arg);
7166 fprintf (file, ")\n");
7168 if (flags & TDF_VERBOSE)
7169 print_node (file, "", fndecl, 2);
7171 dsf = DECL_STRUCT_FUNCTION (fndecl);
7172 if (dsf && (flags & TDF_EH))
7173 dump_eh_tree (file, dsf);
7175 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7177 dump_node (fndecl, TDF_SLIM | flags, file);
7178 current_function_decl = old_current_fndecl;
7182 /* When GIMPLE is lowered, the variables are no longer available in
7183 BIND_EXPRs, so display them separately. */
7184 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7187 ignore_topmost_bind = true;
7189 fprintf (file, "{\n");
7190 if (!vec_safe_is_empty (fun->local_decls))
7191 FOR_EACH_LOCAL_DECL (fun, ix, var)
7193 print_generic_decl (file, var, flags);
7194 if (flags & TDF_VERBOSE)
7195 print_node (file, "", var, 4);
7196 fprintf (file, "\n");
7200 if (gimple_in_ssa_p (cfun))
7201 for (ix = 1; ix < num_ssa_names; ++ix)
7203 tree name = ssa_name (ix);
7204 if (name && !SSA_NAME_VAR (name))
7206 fprintf (file, " ");
7207 print_generic_expr (file, TREE_TYPE (name), flags);
7208 fprintf (file, " ");
7209 print_generic_expr (file, name, flags);
7210 fprintf (file, ";\n");
7217 if (fun && fun->decl == fndecl
7219 && basic_block_info_for_fn (fun))
7221 /* If the CFG has been built, emit a CFG-based dump. */
7222 if (!ignore_topmost_bind)
7223 fprintf (file, "{\n");
7225 if (any_var && n_basic_blocks_for_fn (fun))
7226 fprintf (file, "\n");
7228 FOR_EACH_BB_FN (bb, fun)
7229 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7231 fprintf (file, "}\n");
7233 else if (DECL_SAVED_TREE (fndecl) == NULL)
7235 /* The function is now in GIMPLE form but the CFG has not been
7236 built yet. Emit the single sequence of GIMPLE statements
7237 that make up its body. */
7238 gimple_seq body = gimple_body (fndecl);
7240 if (gimple_seq_first_stmt (body)
7241 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7242 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7243 print_gimple_seq (file, body, 0, flags);
7246 if (!ignore_topmost_bind)
7247 fprintf (file, "{\n");
7250 fprintf (file, "\n");
7252 print_gimple_seq (file, body, 2, flags);
7253 fprintf (file, "}\n");
7260 /* Make a tree based dump. */
7261 chain = DECL_SAVED_TREE (fndecl);
7262 if (chain && TREE_CODE (chain) == BIND_EXPR)
7264 if (ignore_topmost_bind)
7266 chain = BIND_EXPR_BODY (chain);
7274 if (!ignore_topmost_bind)
7275 fprintf (file, "{\n");
7280 fprintf (file, "\n");
7282 print_generic_stmt_indented (file, chain, flags, indent);
7283 if (ignore_topmost_bind)
7284 fprintf (file, "}\n");
7287 if (flags & TDF_ENUMERATE_LOCALS)
7288 dump_enumerated_decls (file, flags);
7289 fprintf (file, "\n\n");
7291 current_function_decl = old_current_fndecl;
7294 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7297 debug_function (tree fn, int flags)
7299 dump_function_to_file (fn, stderr, flags);
7303 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7306 print_pred_bbs (FILE *file, basic_block bb)
7311 FOR_EACH_EDGE (e, ei, bb->preds)
7312 fprintf (file, "bb_%d ", e->src->index);
7316 /* Print on FILE the indexes for the successors of basic_block BB. */
7319 print_succ_bbs (FILE *file, basic_block bb)
7324 FOR_EACH_EDGE (e, ei, bb->succs)
7325 fprintf (file, "bb_%d ", e->dest->index);
7328 /* Print to FILE the basic block BB following the VERBOSITY level. */
7331 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7333 char *s_indent = (char *) alloca ((size_t) indent + 1);
7334 memset ((void *) s_indent, ' ', (size_t) indent);
7335 s_indent[indent] = '\0';
7337 /* Print basic_block's header. */
7340 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7341 print_pred_bbs (file, bb);
7342 fprintf (file, "}, succs = {");
7343 print_succ_bbs (file, bb);
7344 fprintf (file, "})\n");
7347 /* Print basic_block's body. */
7350 fprintf (file, "%s {\n", s_indent);
7351 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7352 fprintf (file, "%s }\n", s_indent);
7356 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7358 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7359 VERBOSITY level this outputs the contents of the loop, or just its
7363 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7371 s_indent = (char *) alloca ((size_t) indent + 1);
7372 memset ((void *) s_indent, ' ', (size_t) indent);
7373 s_indent[indent] = '\0';
7375 /* Print loop's header. */
7376 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7378 fprintf (file, "header = %d", loop->header->index);
7381 fprintf (file, "deleted)\n");
7385 fprintf (file, ", latch = %d", loop->latch->index);
7387 fprintf (file, ", multiple latches");
7388 fprintf (file, ", niter = ");
7389 print_generic_expr (file, loop->nb_iterations, 0);
7391 if (loop->any_upper_bound)
7393 fprintf (file, ", upper_bound = ");
7394 dump_double_int (file, loop->nb_iterations_upper_bound, true);
7397 if (loop->any_estimate)
7399 fprintf (file, ", estimate = ");
7400 dump_double_int (file, loop->nb_iterations_estimate, true);
7402 fprintf (file, ")\n");
7404 /* Print loop's body. */
7407 fprintf (file, "%s{\n", s_indent);
7408 FOR_EACH_BB_FN (bb, cfun)
7409 if (bb->loop_father == loop)
7410 print_loops_bb (file, bb, indent, verbosity);
7412 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7413 fprintf (file, "%s}\n", s_indent);
7417 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7418 spaces. Following VERBOSITY level this outputs the contents of the
7419 loop, or just its structure. */
7422 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7428 print_loop (file, loop, indent, verbosity);
7429 print_loop_and_siblings (file, loop->next, indent, verbosity);
7432 /* Follow a CFG edge from the entry point of the program, and on entry
7433 of a loop, pretty print the loop structure on FILE. */
7436 print_loops (FILE *file, int verbosity)
7440 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7441 if (bb && bb->loop_father)
7442 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7448 debug (struct loop &ref)
7450 print_loop (stderr, &ref, 0, /*verbosity*/0);
7454 debug (struct loop *ptr)
7459 fprintf (stderr, "<nil>\n");
7462 /* Dump a loop verbosely. */
7465 debug_verbose (struct loop &ref)
7467 print_loop (stderr, &ref, 0, /*verbosity*/3);
7471 debug_verbose (struct loop *ptr)
7476 fprintf (stderr, "<nil>\n");
7480 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7483 debug_loops (int verbosity)
7485 print_loops (stderr, verbosity);
7488 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7491 debug_loop (struct loop *loop, int verbosity)
7493 print_loop (stderr, loop, 0, verbosity);
7496 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7500 debug_loop_num (unsigned num, int verbosity)
7502 debug_loop (get_loop (cfun, num), verbosity);
7505 /* Return true if BB ends with a call, possibly followed by some
7506 instructions that must stay with the call. Return false,
7510 gimple_block_ends_with_call_p (basic_block bb)
7512 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7513 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7517 /* Return true if BB ends with a conditional branch. Return false,
7521 gimple_block_ends_with_condjump_p (const_basic_block bb)
7523 gimple stmt = last_stmt (CONST_CAST_BB (bb));
7524 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7528 /* Return true if we need to add fake edge to exit at statement T.
7529 Helper function for gimple_flow_call_edges_add. */
7532 need_fake_edge_p (gimple t)
7534 tree fndecl = NULL_TREE;
7537 /* NORETURN and LONGJMP calls already have an edge to exit.
7538 CONST and PURE calls do not need one.
7539 We don't currently check for CONST and PURE here, although
7540 it would be a good idea, because those attributes are
7541 figured out from the RTL in mark_constant_function, and
7542 the counter incrementation code from -fprofile-arcs
7543 leads to different results from -fbranch-probabilities. */
7544 if (is_gimple_call (t))
7546 fndecl = gimple_call_fndecl (t);
7547 call_flags = gimple_call_flags (t);
7550 if (is_gimple_call (t)
7552 && DECL_BUILT_IN (fndecl)
7553 && (call_flags & ECF_NOTHROW)
7554 && !(call_flags & ECF_RETURNS_TWICE)
7555 /* fork() doesn't really return twice, but the effect of
7556 wrapping it in __gcov_fork() which calls __gcov_flush()
7557 and clears the counters before forking has the same
7558 effect as returning twice. Force a fake edge. */
7559 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7560 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7563 if (is_gimple_call (t))
7569 if (!(call_flags & ECF_NORETURN))
7573 FOR_EACH_EDGE (e, ei, bb->succs)
7574 if ((e->flags & EDGE_FAKE) == 0)
7578 if (gimple_code (t) == GIMPLE_ASM
7579 && (gimple_asm_volatile_p (t) || gimple_asm_input_p (t)))
7586 /* Add fake edges to the function exit for any non constant and non
7587 noreturn calls (or noreturn calls with EH/abnormal edges),
7588 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7589 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7592 The goal is to expose cases in which entering a basic block does
7593 not imply that all subsequent instructions must be executed. */
7596 gimple_flow_call_edges_add (sbitmap blocks)
7599 int blocks_split = 0;
7600 int last_bb = last_basic_block_for_fn (cfun);
7601 bool check_last_block = false;
7603 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7607 check_last_block = true;
7609 check_last_block = bitmap_bit_p (blocks,
7610 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7612 /* In the last basic block, before epilogue generation, there will be
7613 a fallthru edge to EXIT. Special care is required if the last insn
7614 of the last basic block is a call because make_edge folds duplicate
7615 edges, which would result in the fallthru edge also being marked
7616 fake, which would result in the fallthru edge being removed by
7617 remove_fake_edges, which would result in an invalid CFG.
7619 Moreover, we can't elide the outgoing fake edge, since the block
7620 profiler needs to take this into account in order to solve the minimal
7621 spanning tree in the case that the call doesn't return.
7623 Handle this by adding a dummy instruction in a new last basic block. */
7624 if (check_last_block)
7626 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7627 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7630 if (!gsi_end_p (gsi))
7633 if (t && need_fake_edge_p (t))
7637 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7640 gsi_insert_on_edge (e, gimple_build_nop ());
7641 gsi_commit_edge_inserts ();
7646 /* Now add fake edges to the function exit for any non constant
7647 calls since there is no way that we can determine if they will
7649 for (i = 0; i < last_bb; i++)
7651 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7652 gimple_stmt_iterator gsi;
7653 gimple stmt, last_stmt;
7658 if (blocks && !bitmap_bit_p (blocks, i))
7661 gsi = gsi_last_nondebug_bb (bb);
7662 if (!gsi_end_p (gsi))
7664 last_stmt = gsi_stmt (gsi);
7667 stmt = gsi_stmt (gsi);
7668 if (need_fake_edge_p (stmt))
7672 /* The handling above of the final block before the
7673 epilogue should be enough to verify that there is
7674 no edge to the exit block in CFG already.
7675 Calling make_edge in such case would cause us to
7676 mark that edge as fake and remove it later. */
7677 #ifdef ENABLE_CHECKING
7678 if (stmt == last_stmt)
7680 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7681 gcc_assert (e == NULL);
7685 /* Note that the following may create a new basic block
7686 and renumber the existing basic blocks. */
7687 if (stmt != last_stmt)
7689 e = split_block (bb, stmt);
7693 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7697 while (!gsi_end_p (gsi));
7702 verify_flow_info ();
7704 return blocks_split;
7707 /* Removes edge E and all the blocks dominated by it, and updates dominance
7708 information. The IL in E->src needs to be updated separately.
7709 If dominance info is not available, only the edge E is removed.*/
7712 remove_edge_and_dominated_blocks (edge e)
7714 vec<basic_block> bbs_to_remove = vNULL;
7715 vec<basic_block> bbs_to_fix_dom = vNULL;
7719 bool none_removed = false;
7721 basic_block bb, dbb;
7724 if (!dom_info_available_p (CDI_DOMINATORS))
7730 /* No updating is needed for edges to exit. */
7731 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7733 if (cfgcleanup_altered_bbs)
7734 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7739 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7740 that is not dominated by E->dest, then this set is empty. Otherwise,
7741 all the basic blocks dominated by E->dest are removed.
7743 Also, to DF_IDOM we store the immediate dominators of the blocks in
7744 the dominance frontier of E (i.e., of the successors of the
7745 removed blocks, if there are any, and of E->dest otherwise). */
7746 FOR_EACH_EDGE (f, ei, e->dest->preds)
7751 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7753 none_removed = true;
7758 df = BITMAP_ALLOC (NULL);
7759 df_idom = BITMAP_ALLOC (NULL);
7762 bitmap_set_bit (df_idom,
7763 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
7766 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
7767 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7769 FOR_EACH_EDGE (f, ei, bb->succs)
7771 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
7772 bitmap_set_bit (df, f->dest->index);
7775 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
7776 bitmap_clear_bit (df, bb->index);
7778 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
7780 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7781 bitmap_set_bit (df_idom,
7782 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
7786 if (cfgcleanup_altered_bbs)
7788 /* Record the set of the altered basic blocks. */
7789 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7790 bitmap_ior_into (cfgcleanup_altered_bbs, df);
7793 /* Remove E and the cancelled blocks. */
7798 /* Walk backwards so as to get a chance to substitute all
7799 released DEFs into debug stmts. See
7800 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
7802 for (i = bbs_to_remove.length (); i-- > 0; )
7803 delete_basic_block (bbs_to_remove[i]);
7806 /* Update the dominance information. The immediate dominator may change only
7807 for blocks whose immediate dominator belongs to DF_IDOM:
7809 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
7810 removal. Let Z the arbitrary block such that idom(Z) = Y and
7811 Z dominates X after the removal. Before removal, there exists a path P
7812 from Y to X that avoids Z. Let F be the last edge on P that is
7813 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
7814 dominates W, and because of P, Z does not dominate W), and W belongs to
7815 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
7816 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
7818 bb = BASIC_BLOCK_FOR_FN (cfun, i);
7819 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
7821 dbb = next_dom_son (CDI_DOMINATORS, dbb))
7822 bbs_to_fix_dom.safe_push (dbb);
7825 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
7828 BITMAP_FREE (df_idom);
7829 bbs_to_remove.release ();
7830 bbs_to_fix_dom.release ();
7833 /* Purge dead EH edges from basic block BB. */
7836 gimple_purge_dead_eh_edges (basic_block bb)
7838 bool changed = false;
7841 gimple stmt = last_stmt (bb);
7843 if (stmt && stmt_can_throw_internal (stmt))
7846 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7848 if (e->flags & EDGE_EH)
7850 remove_edge_and_dominated_blocks (e);
7860 /* Purge dead EH edges from basic block listed in BLOCKS. */
7863 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
7865 bool changed = false;
7869 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7871 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7873 /* Earlier gimple_purge_dead_eh_edges could have removed
7874 this basic block already. */
7875 gcc_assert (bb || changed);
7877 changed |= gimple_purge_dead_eh_edges (bb);
7883 /* Purge dead abnormal call edges from basic block BB. */
7886 gimple_purge_dead_abnormal_call_edges (basic_block bb)
7888 bool changed = false;
7891 gimple stmt = last_stmt (bb);
7893 if (!cfun->has_nonlocal_label
7894 && !cfun->calls_setjmp)
7897 if (stmt && stmt_can_make_abnormal_goto (stmt))
7900 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
7902 if (e->flags & EDGE_ABNORMAL)
7904 if (e->flags & EDGE_FALLTHRU)
7905 e->flags &= ~EDGE_ABNORMAL;
7907 remove_edge_and_dominated_blocks (e);
7917 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
7920 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
7922 bool changed = false;
7926 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
7928 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7930 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
7931 this basic block already. */
7932 gcc_assert (bb || changed);
7934 changed |= gimple_purge_dead_abnormal_call_edges (bb);
7940 /* This function is called whenever a new edge is created or
7944 gimple_execute_on_growing_pred (edge e)
7946 basic_block bb = e->dest;
7948 if (!gimple_seq_empty_p (phi_nodes (bb)))
7949 reserve_phi_args_for_new_edge (bb);
7952 /* This function is called immediately before edge E is removed from
7953 the edge vector E->dest->preds. */
7956 gimple_execute_on_shrinking_pred (edge e)
7958 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
7959 remove_phi_args (e);
7962 /*---------------------------------------------------------------------------
7963 Helper functions for Loop versioning
7964 ---------------------------------------------------------------------------*/
7966 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
7967 of 'first'. Both of them are dominated by 'new_head' basic block. When
7968 'new_head' was created by 'second's incoming edge it received phi arguments
7969 on the edge by split_edge(). Later, additional edge 'e' was created to
7970 connect 'new_head' and 'first'. Now this routine adds phi args on this
7971 additional edge 'e' that new_head to second edge received as part of edge
7975 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
7976 basic_block new_head, edge e)
7979 gimple_stmt_iterator psi1, psi2;
7981 edge e2 = find_edge (new_head, second);
7983 /* Because NEW_HEAD has been created by splitting SECOND's incoming
7984 edge, we should always have an edge from NEW_HEAD to SECOND. */
7985 gcc_assert (e2 != NULL);
7987 /* Browse all 'second' basic block phi nodes and add phi args to
7988 edge 'e' for 'first' head. PHI args are always in correct order. */
7990 for (psi2 = gsi_start_phis (second),
7991 psi1 = gsi_start_phis (first);
7992 !gsi_end_p (psi2) && !gsi_end_p (psi1);
7993 gsi_next (&psi2), gsi_next (&psi1))
7995 phi1 = gsi_stmt (psi1);
7996 phi2 = gsi_stmt (psi2);
7997 def = PHI_ARG_DEF (phi2, e2->dest_idx);
7998 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8003 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8004 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8005 the destination of the ELSE part. */
8008 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8009 basic_block second_head ATTRIBUTE_UNUSED,
8010 basic_block cond_bb, void *cond_e)
8012 gimple_stmt_iterator gsi;
8013 gimple new_cond_expr;
8014 tree cond_expr = (tree) cond_e;
8017 /* Build new conditional expr */
8018 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8019 NULL_TREE, NULL_TREE);
8021 /* Add new cond in cond_bb. */
8022 gsi = gsi_last_bb (cond_bb);
8023 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8025 /* Adjust edges appropriately to connect new head with first head
8026 as well as second head. */
8027 e0 = single_succ_edge (cond_bb);
8028 e0->flags &= ~EDGE_FALLTHRU;
8029 e0->flags |= EDGE_FALSE_VALUE;
8033 /* Do book-keeping of basic block BB for the profile consistency checker.
8034 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8035 then do post-pass accounting. Store the counting in RECORD. */
8037 gimple_account_profile_record (basic_block bb, int after_pass,
8038 struct profile_record *record)
8040 gimple_stmt_iterator i;
8041 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8043 record->size[after_pass]
8044 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8045 if (profile_status_for_fn (cfun) == PROFILE_READ)
8046 record->time[after_pass]
8047 += estimate_num_insns (gsi_stmt (i),
8048 &eni_time_weights) * bb->count;
8049 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8050 record->time[after_pass]
8051 += estimate_num_insns (gsi_stmt (i),
8052 &eni_time_weights) * bb->frequency;
8056 struct cfg_hooks gimple_cfg_hooks = {
8058 gimple_verify_flow_info,
8059 gimple_dump_bb, /* dump_bb */
8060 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8061 create_bb, /* create_basic_block */
8062 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8063 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8064 gimple_can_remove_branch_p, /* can_remove_branch_p */
8065 remove_bb, /* delete_basic_block */
8066 gimple_split_block, /* split_block */
8067 gimple_move_block_after, /* move_block_after */
8068 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8069 gimple_merge_blocks, /* merge_blocks */
8070 gimple_predict_edge, /* predict_edge */
8071 gimple_predicted_by_p, /* predicted_by_p */
8072 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8073 gimple_duplicate_bb, /* duplicate_block */
8074 gimple_split_edge, /* split_edge */
8075 gimple_make_forwarder_block, /* make_forward_block */
8076 NULL, /* tidy_fallthru_edge */
8077 NULL, /* force_nonfallthru */
8078 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8079 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8080 gimple_flow_call_edges_add, /* flow_call_edges_add */
8081 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8082 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8083 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8084 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8085 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8086 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8087 flush_pending_stmts, /* flush_pending_stmts */
8088 gimple_empty_block_p, /* block_empty_p */
8089 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8090 gimple_account_profile_record,
8094 /* Split all critical edges. */
8097 split_critical_edges (void)
8103 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8104 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8105 mappings around the calls to split_edge. */
8106 start_recording_case_labels ();
8107 FOR_ALL_BB_FN (bb, cfun)
8109 FOR_EACH_EDGE (e, ei, bb->succs)
8111 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8113 /* PRE inserts statements to edges and expects that
8114 since split_critical_edges was done beforehand, committing edge
8115 insertions will not split more edges. In addition to critical
8116 edges we must split edges that have multiple successors and
8117 end by control flow statements, such as RESX.
8118 Go ahead and split them too. This matches the logic in
8119 gimple_find_edge_insert_loc. */
8120 else if ((!single_pred_p (e->dest)
8121 || !gimple_seq_empty_p (phi_nodes (e->dest))
8122 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8123 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8124 && !(e->flags & EDGE_ABNORMAL))
8126 gimple_stmt_iterator gsi;
8128 gsi = gsi_last_bb (e->src);
8129 if (!gsi_end_p (gsi)
8130 && stmt_ends_bb_p (gsi_stmt (gsi))
8131 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8132 && !gimple_call_builtin_p (gsi_stmt (gsi),
8138 end_recording_case_labels ();
8144 const pass_data pass_data_split_crit_edges =
8146 GIMPLE_PASS, /* type */
8147 "crited", /* name */
8148 OPTGROUP_NONE, /* optinfo_flags */
8149 false, /* has_gate */
8150 true, /* has_execute */
8151 TV_TREE_SPLIT_EDGES, /* tv_id */
8152 PROP_cfg, /* properties_required */
8153 PROP_no_crit_edges, /* properties_provided */
8154 0, /* properties_destroyed */
8155 0, /* todo_flags_start */
8156 TODO_verify_flow, /* todo_flags_finish */
8159 class pass_split_crit_edges : public gimple_opt_pass
8162 pass_split_crit_edges (gcc::context *ctxt)
8163 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8166 /* opt_pass methods: */
8167 unsigned int execute () { return split_critical_edges (); }
8169 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8170 }; // class pass_split_crit_edges
8175 make_pass_split_crit_edges (gcc::context *ctxt)
8177 return new pass_split_crit_edges (ctxt);
8181 /* Build a ternary operation and gimplify it. Emit code before GSI.
8182 Return the gimple_val holding the result. */
8185 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8186 tree type, tree a, tree b, tree c)
8189 location_t loc = gimple_location (gsi_stmt (*gsi));
8191 ret = fold_build3_loc (loc, code, type, a, b, c);
8194 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8198 /* Build a binary operation and gimplify it. Emit code before GSI.
8199 Return the gimple_val holding the result. */
8202 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8203 tree type, tree a, tree b)
8207 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8210 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8214 /* Build a unary operation and gimplify it. Emit code before GSI.
8215 Return the gimple_val holding the result. */
8218 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8223 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8226 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8232 /* Emit return warnings. */
8235 execute_warn_function_return (void)
8237 source_location location;
8242 if (!targetm.warn_func_return (cfun->decl))
8245 /* If we have a path to EXIT, then we do return. */
8246 if (TREE_THIS_VOLATILE (cfun->decl)
8247 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0)
8249 location = UNKNOWN_LOCATION;
8250 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
8252 last = last_stmt (e->src);
8253 if ((gimple_code (last) == GIMPLE_RETURN
8254 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8255 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8258 if (location == UNKNOWN_LOCATION)
8259 location = cfun->function_end_locus;
8260 warning_at (location, 0, "%<noreturn%> function does return");
8263 /* If we see "return;" in some basic block, then we do reach the end
8264 without returning a value. */
8265 else if (warn_return_type
8266 && !TREE_NO_WARNING (cfun->decl)
8267 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) > 0
8268 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (cfun->decl))))
8270 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
8272 gimple last = last_stmt (e->src);
8273 if (gimple_code (last) == GIMPLE_RETURN
8274 && gimple_return_retval (last) == NULL
8275 && !gimple_no_warning_p (last))
8277 location = gimple_location (last);
8278 if (location == UNKNOWN_LOCATION)
8279 location = cfun->function_end_locus;
8280 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8281 TREE_NO_WARNING (cfun->decl) = 1;
8290 /* Given a basic block B which ends with a conditional and has
8291 precisely two successors, determine which of the edges is taken if
8292 the conditional is true and which is taken if the conditional is
8293 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8296 extract_true_false_edges_from_block (basic_block b,
8300 edge e = EDGE_SUCC (b, 0);
8302 if (e->flags & EDGE_TRUE_VALUE)
8305 *false_edge = EDGE_SUCC (b, 1);
8310 *true_edge = EDGE_SUCC (b, 1);
8316 const pass_data pass_data_warn_function_return =
8318 GIMPLE_PASS, /* type */
8319 "*warn_function_return", /* name */
8320 OPTGROUP_NONE, /* optinfo_flags */
8321 false, /* has_gate */
8322 true, /* has_execute */
8323 TV_NONE, /* tv_id */
8324 PROP_cfg, /* properties_required */
8325 0, /* properties_provided */
8326 0, /* properties_destroyed */
8327 0, /* todo_flags_start */
8328 0, /* todo_flags_finish */
8331 class pass_warn_function_return : public gimple_opt_pass
8334 pass_warn_function_return (gcc::context *ctxt)
8335 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8338 /* opt_pass methods: */
8339 unsigned int execute () { return execute_warn_function_return (); }
8341 }; // class pass_warn_function_return
8346 make_pass_warn_function_return (gcc::context *ctxt)
8348 return new pass_warn_function_return (ctxt);
8351 /* Walk a gimplified function and warn for functions whose return value is
8352 ignored and attribute((warn_unused_result)) is set. This is done before
8353 inlining, so we don't have to worry about that. */
8356 do_warn_unused_result (gimple_seq seq)
8359 gimple_stmt_iterator i;
8361 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8363 gimple g = gsi_stmt (i);
8365 switch (gimple_code (g))
8368 do_warn_unused_result (gimple_bind_body (g));
8371 do_warn_unused_result (gimple_try_eval (g));
8372 do_warn_unused_result (gimple_try_cleanup (g));
8375 do_warn_unused_result (gimple_catch_handler (g));
8377 case GIMPLE_EH_FILTER:
8378 do_warn_unused_result (gimple_eh_filter_failure (g));
8382 if (gimple_call_lhs (g))
8384 if (gimple_call_internal_p (g))
8387 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8388 LHS. All calls whose value is ignored should be
8389 represented like this. Look for the attribute. */
8390 fdecl = gimple_call_fndecl (g);
8391 ftype = gimple_call_fntype (g);
8393 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8395 location_t loc = gimple_location (g);
8398 warning_at (loc, OPT_Wunused_result,
8399 "ignoring return value of %qD, "
8400 "declared with attribute warn_unused_result",
8403 warning_at (loc, OPT_Wunused_result,
8404 "ignoring return value of function "
8405 "declared with attribute warn_unused_result");
8410 /* Not a container, not a call, or a call whose value is used. */
8417 run_warn_unused_result (void)
8419 do_warn_unused_result (gimple_body (current_function_decl));
8424 gate_warn_unused_result (void)
8426 return flag_warn_unused_result;
8431 const pass_data pass_data_warn_unused_result =
8433 GIMPLE_PASS, /* type */
8434 "*warn_unused_result", /* name */
8435 OPTGROUP_NONE, /* optinfo_flags */
8436 true, /* has_gate */
8437 true, /* has_execute */
8438 TV_NONE, /* tv_id */
8439 PROP_gimple_any, /* properties_required */
8440 0, /* properties_provided */
8441 0, /* properties_destroyed */
8442 0, /* todo_flags_start */
8443 0, /* todo_flags_finish */
8446 class pass_warn_unused_result : public gimple_opt_pass
8449 pass_warn_unused_result (gcc::context *ctxt)
8450 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8453 /* opt_pass methods: */
8454 bool gate () { return gate_warn_unused_result (); }
8455 unsigned int execute () { return run_warn_unused_result (); }
8457 }; // class pass_warn_unused_result
8462 make_pass_warn_unused_result (gcc::context *ctxt)
8464 return new pass_warn_unused_result (ctxt);
8467 /* IPA passes, compilation of earlier functions or inlining
8468 might have changed some properties, such as marked functions nothrow,
8469 pure, const or noreturn.
8470 Remove redundant edges and basic blocks, and create new ones if necessary.
8472 This pass can't be executed as stand alone pass from pass manager, because
8473 in between inlining and this fixup the verify_flow_info would fail. */
8476 execute_fixup_cfg (void)
8479 gimple_stmt_iterator gsi;
8480 int todo = gimple_in_ssa_p (cfun) ? TODO_verify_ssa : 0;
8481 gcov_type count_scale;
8486 = GCOV_COMPUTE_SCALE (cgraph_get_node (current_function_decl)->count,
8487 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8489 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8490 cgraph_get_node (current_function_decl)->count;
8491 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8492 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8495 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8496 e->count = apply_scale (e->count, count_scale);
8498 FOR_EACH_BB_FN (bb, cfun)
8500 bb->count = apply_scale (bb->count, count_scale);
8501 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
8503 gimple stmt = gsi_stmt (gsi);
8504 tree decl = is_gimple_call (stmt)
8505 ? gimple_call_fndecl (stmt)
8509 int flags = gimple_call_flags (stmt);
8510 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8512 if (gimple_purge_dead_abnormal_call_edges (bb))
8513 todo |= TODO_cleanup_cfg;
8515 if (gimple_in_ssa_p (cfun))
8517 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8522 if (flags & ECF_NORETURN
8523 && fixup_noreturn_call (stmt))
8524 todo |= TODO_cleanup_cfg;
8527 if (maybe_clean_eh_stmt (stmt)
8528 && gimple_purge_dead_eh_edges (bb))
8529 todo |= TODO_cleanup_cfg;
8532 FOR_EACH_EDGE (e, ei, bb->succs)
8533 e->count = apply_scale (e->count, count_scale);
8535 /* If we have a basic block with no successors that does not
8536 end with a control statement or a noreturn call end it with
8537 a call to __builtin_unreachable. This situation can occur
8538 when inlining a noreturn call that does in fact return. */
8539 if (EDGE_COUNT (bb->succs) == 0)
8541 gimple stmt = last_stmt (bb);
8543 || (!is_ctrl_stmt (stmt)
8544 && (!is_gimple_call (stmt)
8545 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8547 if (stmt && is_gimple_call (stmt))
8548 gimple_call_set_ctrl_altering (stmt, false);
8549 stmt = gimple_build_call
8550 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8551 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8552 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8556 if (count_scale != REG_BR_PROB_BASE)
8557 compute_function_frequency ();
8559 /* Dump a textual representation of the flowgraph. */
8561 gimple_dump_cfg (dump_file, dump_flags);
8564 && (todo & TODO_cleanup_cfg))
8565 loops_state_set (LOOPS_NEED_FIXUP);
8572 const pass_data pass_data_fixup_cfg =
8574 GIMPLE_PASS, /* type */
8575 "*free_cfg_annotations", /* name */
8576 OPTGROUP_NONE, /* optinfo_flags */
8577 false, /* has_gate */
8578 true, /* has_execute */
8579 TV_NONE, /* tv_id */
8580 PROP_cfg, /* properties_required */
8581 0, /* properties_provided */
8582 0, /* properties_destroyed */
8583 0, /* todo_flags_start */
8584 0, /* todo_flags_finish */
8587 class pass_fixup_cfg : public gimple_opt_pass
8590 pass_fixup_cfg (gcc::context *ctxt)
8591 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8594 /* opt_pass methods: */
8595 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8596 unsigned int execute () { return execute_fixup_cfg (); }
8598 }; // class pass_fixup_cfg
8603 make_pass_fixup_cfg (gcc::context *ctxt)
8605 return new pass_fixup_cfg (ctxt);
8608 /* Garbage collection support for edge_def. */
8610 extern void gt_ggc_mx (tree&);
8611 extern void gt_ggc_mx (gimple&);
8612 extern void gt_ggc_mx (rtx&);
8613 extern void gt_ggc_mx (basic_block&);
8616 gt_ggc_mx (edge_def *e)
8618 tree block = LOCATION_BLOCK (e->goto_locus);
8620 gt_ggc_mx (e->dest);
8621 if (current_ir_type () == IR_GIMPLE)
8622 gt_ggc_mx (e->insns.g);
8624 gt_ggc_mx (e->insns.r);
8628 /* PCH support for edge_def. */
8630 extern void gt_pch_nx (tree&);
8631 extern void gt_pch_nx (gimple&);
8632 extern void gt_pch_nx (rtx&);
8633 extern void gt_pch_nx (basic_block&);
8636 gt_pch_nx (edge_def *e)
8638 tree block = LOCATION_BLOCK (e->goto_locus);
8640 gt_pch_nx (e->dest);
8641 if (current_ir_type () == IR_GIMPLE)
8642 gt_pch_nx (e->insns.g);
8644 gt_pch_nx (e->insns.r);
8649 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
8651 tree block = LOCATION_BLOCK (e->goto_locus);
8652 op (&(e->src), cookie);
8653 op (&(e->dest), cookie);
8654 if (current_ir_type () == IR_GIMPLE)
8655 op (&(e->insns.g), cookie);
8657 op (&(e->insns.r), cookie);
8658 op (&(block), cookie);