1 /* Control flow functions for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "fold-const.h"
32 #include "trans-mem.h"
33 #include "stor-layout.h"
34 #include "print-tree.h"
38 #include "gimple-pretty-print.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
47 #include "tree-ssa-loop-manip.h"
48 #include "tree-ssa-loop-niter.h"
49 #include "tree-into-ssa.h"
50 #include "insn-config.h"
61 #include "tree-dump.h"
62 #include "tree-pass.h"
63 #include "diagnostic-core.h"
66 #include "tree-ssa-propagate.h"
67 #include "value-prof.h"
68 #include "tree-inline.h"
70 #include "tree-ssa-live.h"
72 #include "tree-cfgcleanup.h"
73 #include "wide-int-print.h"
77 /* This file contains functions for building the Control Flow Graph (CFG)
78 for a function tree. */
80 /* Local declarations. */
82 /* Initial capacity for the basic block array. */
83 static const int initial_cfg_capacity = 20;
85 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
86 which use a particular edge. The CASE_LABEL_EXPRs are chained together
87 via their CASE_CHAIN field, which we clear after we're done with the
88 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
90 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
91 update the case vector in response to edge redirections.
93 Right now this table is set up and torn down at key points in the
94 compilation process. It would be nice if we could make the table
95 more persistent. The key is getting notification of changes to
96 the CFG (particularly edge removal, creation and redirection). */
98 static hash_map<edge, tree> *edge_to_cases;
100 /* If we record edge_to_cases, this bitmap will hold indexes
101 of basic blocks that end in a GIMPLE_SWITCH which we touched
102 due to edge manipulations. */
104 static bitmap touched_switch_bbs;
106 /* CFG statistics. */
109 long num_merged_labels;
112 static struct cfg_stats_d cfg_stats;
114 /* Data to pass to replace_block_vars_by_duplicates_1. */
115 struct replace_decls_d
117 hash_map<tree, tree> *vars_map;
121 /* Hash table to store last discriminator assigned for each locus. */
122 struct locus_discrim_map
128 /* Hashtable helpers. */
130 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
132 static inline hashval_t hash (const locus_discrim_map *);
133 static inline bool equal (const locus_discrim_map *,
134 const locus_discrim_map *);
137 /* Trivial hash function for a location_t. ITEM is a pointer to
138 a hash table entry that maps a location_t to a discriminator. */
141 locus_discrim_hasher::hash (const locus_discrim_map *item)
143 return LOCATION_LINE (item->locus);
146 /* Equality function for the locus-to-discriminator map. A and B
147 point to the two hash table entries to compare. */
150 locus_discrim_hasher::equal (const locus_discrim_map *a,
151 const locus_discrim_map *b)
153 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus);
156 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
158 /* Basic blocks and flowgraphs. */
159 static void make_blocks (gimple_seq);
162 static void make_edges (void);
163 static void assign_discriminators (void);
164 static void make_cond_expr_edges (basic_block);
165 static void make_gimple_switch_edges (gswitch *, basic_block);
166 static bool make_goto_expr_edges (basic_block);
167 static void make_gimple_asm_edges (basic_block);
168 static edge gimple_redirect_edge_and_branch (edge, basic_block);
169 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
171 /* Various helpers. */
172 static inline bool stmt_starts_bb_p (gimple *, gimple *);
173 static int gimple_verify_flow_info (void);
174 static void gimple_make_forwarder_block (edge);
175 static gimple *first_non_label_stmt (basic_block);
176 static bool verify_gimple_transaction (gtransaction *);
177 static bool call_can_make_abnormal_goto (gimple *);
179 /* Flowgraph optimization and cleanup. */
180 static void gimple_merge_blocks (basic_block, basic_block);
181 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
182 static void remove_bb (basic_block);
183 static edge find_taken_edge_computed_goto (basic_block, tree);
184 static edge find_taken_edge_cond_expr (basic_block, tree);
185 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
186 static tree find_case_label_for_value (gswitch *, tree);
189 init_empty_tree_cfg_for_function (struct function *fn)
191 /* Initialize the basic block array. */
193 profile_status_for_fn (fn) = PROFILE_ABSENT;
194 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
195 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
196 vec_alloc (basic_block_info_for_fn (fn), initial_cfg_capacity);
197 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
198 initial_cfg_capacity);
200 /* Build a mapping of labels to their associated blocks. */
201 vec_alloc (label_to_block_map_for_fn (fn), initial_cfg_capacity);
202 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
203 initial_cfg_capacity);
205 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
206 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
208 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
209 = EXIT_BLOCK_PTR_FOR_FN (fn);
210 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
211 = ENTRY_BLOCK_PTR_FOR_FN (fn);
215 init_empty_tree_cfg (void)
217 init_empty_tree_cfg_for_function (cfun);
220 /*---------------------------------------------------------------------------
222 ---------------------------------------------------------------------------*/
224 /* Entry point to the CFG builder for trees. SEQ is the sequence of
225 statements to be added to the flowgraph. */
228 build_gimple_cfg (gimple_seq seq)
230 /* Register specific gimple functions. */
231 gimple_register_cfg_hooks ();
233 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
235 init_empty_tree_cfg ();
239 /* Make sure there is always at least one block, even if it's empty. */
240 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
241 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
243 /* Adjust the size of the array. */
244 if (basic_block_info_for_fn (cfun)->length ()
245 < (size_t) n_basic_blocks_for_fn (cfun))
246 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
247 n_basic_blocks_for_fn (cfun));
249 /* To speed up statement iterator walks, we first purge dead labels. */
250 cleanup_dead_labels ();
252 /* Group case nodes to reduce the number of edges.
253 We do this after cleaning up dead labels because otherwise we miss
254 a lot of obvious case merging opportunities. */
255 group_case_labels ();
257 /* Create the edges of the flowgraph. */
258 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
260 assign_discriminators ();
261 cleanup_dead_labels ();
262 delete discriminator_per_locus;
263 discriminator_per_locus = NULL;
266 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
267 them and propagate the information to LOOP. We assume that the annotations
268 come immediately before the condition in BB, if any. */
271 replace_loop_annotate_in_block (basic_block bb, struct loop *loop)
273 gimple_stmt_iterator gsi = gsi_last_bb (bb);
274 gimple *stmt = gsi_stmt (gsi);
276 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
279 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
281 stmt = gsi_stmt (gsi);
282 if (gimple_code (stmt) != GIMPLE_CALL)
284 if (!gimple_call_internal_p (stmt)
285 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
288 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
290 case annot_expr_ivdep_kind:
291 loop->safelen = INT_MAX;
293 case annot_expr_no_vector_kind:
294 loop->dont_vectorize = true;
296 case annot_expr_vector_kind:
297 loop->force_vectorize = true;
298 cfun->has_force_vectorize_loops = true;
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
315 replace_loop_annotate (void)
319 gimple_stmt_iterator gsi;
322 FOR_EACH_LOOP (loop, 0)
324 /* First look into the header. */
325 replace_loop_annotate_in_block (loop->header, loop);
327 /* Then look into the latch, if any. */
329 replace_loop_annotate_in_block (loop->latch, loop);
332 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
333 FOR_EACH_BB_FN (bb, cfun)
335 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
337 stmt = gsi_stmt (gsi);
338 if (gimple_code (stmt) != GIMPLE_CALL)
340 if (!gimple_call_internal_p (stmt)
341 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
344 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
346 case annot_expr_ivdep_kind:
347 case annot_expr_no_vector_kind:
348 case annot_expr_vector_kind:
354 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
355 stmt = gimple_build_assign (gimple_call_lhs (stmt),
356 gimple_call_arg (stmt, 0));
357 gsi_replace (&gsi, stmt, true);
364 execute_build_cfg (void)
366 gimple_seq body = gimple_body (current_function_decl);
368 build_gimple_cfg (body);
369 gimple_set_body (current_function_decl, NULL);
370 if (dump_file && (dump_flags & TDF_DETAILS))
372 fprintf (dump_file, "Scope blocks:\n");
373 dump_scope_blocks (dump_file, dump_flags);
376 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
377 replace_loop_annotate ();
383 const pass_data pass_data_build_cfg =
385 GIMPLE_PASS, /* type */
387 OPTGROUP_NONE, /* optinfo_flags */
388 TV_TREE_CFG, /* tv_id */
389 PROP_gimple_leh, /* properties_required */
390 ( PROP_cfg | PROP_loops ), /* properties_provided */
391 0, /* properties_destroyed */
392 0, /* todo_flags_start */
393 0, /* todo_flags_finish */
396 class pass_build_cfg : public gimple_opt_pass
399 pass_build_cfg (gcc::context *ctxt)
400 : gimple_opt_pass (pass_data_build_cfg, ctxt)
403 /* opt_pass methods: */
404 virtual unsigned int execute (function *) { return execute_build_cfg (); }
406 }; // class pass_build_cfg
411 make_pass_build_cfg (gcc::context *ctxt)
413 return new pass_build_cfg (ctxt);
417 /* Return true if T is a computed goto. */
420 computed_goto_p (gimple *t)
422 return (gimple_code (t) == GIMPLE_GOTO
423 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
426 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
427 the other edge points to a bb with just __builtin_unreachable ().
428 I.e. return true for C->M edge in:
436 __builtin_unreachable ();
440 assert_unreachable_fallthru_edge_p (edge e)
442 basic_block pred_bb = e->src;
443 gimple *last = last_stmt (pred_bb);
444 if (last && gimple_code (last) == GIMPLE_COND)
446 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
447 if (other_bb == e->dest)
448 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
449 if (EDGE_COUNT (other_bb->succs) == 0)
451 gimple_stmt_iterator gsi = gsi_after_labels (other_bb);
456 stmt = gsi_stmt (gsi);
457 while (is_gimple_debug (stmt) || gimple_clobber_p (stmt))
462 stmt = gsi_stmt (gsi);
464 return gimple_call_builtin_p (stmt, BUILT_IN_UNREACHABLE);
471 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
472 could alter control flow except via eh. We initialize the flag at
473 CFG build time and only ever clear it later. */
476 gimple_call_initialize_ctrl_altering (gimple *stmt)
478 int flags = gimple_call_flags (stmt);
480 /* A call alters control flow if it can make an abnormal goto. */
481 if (call_can_make_abnormal_goto (stmt)
482 /* A call also alters control flow if it does not return. */
483 || flags & ECF_NORETURN
484 /* TM ending statements have backedges out of the transaction.
485 Return true so we split the basic block containing them.
486 Note that the TM_BUILTIN test is merely an optimization. */
487 || ((flags & ECF_TM_BUILTIN)
488 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
489 /* BUILT_IN_RETURN call is same as return statement. */
490 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
491 gimple_call_set_ctrl_altering (stmt, true);
493 gimple_call_set_ctrl_altering (stmt, false);
497 /* Insert SEQ after BB and build a flowgraph. */
500 make_blocks_1 (gimple_seq seq, basic_block bb)
502 gimple_stmt_iterator i = gsi_start (seq);
504 bool start_new_block = true;
505 bool first_stmt_of_seq = true;
507 while (!gsi_end_p (i))
514 if (stmt && is_gimple_call (stmt))
515 gimple_call_initialize_ctrl_altering (stmt);
517 /* If the statement starts a new basic block or if we have determined
518 in a previous pass that we need to create a new block for STMT, do
520 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
522 if (!first_stmt_of_seq)
523 gsi_split_seq_before (&i, &seq);
524 bb = create_basic_block (seq, bb);
525 start_new_block = false;
528 /* Now add STMT to BB and create the subgraphs for special statement
530 gimple_set_bb (stmt, bb);
532 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
534 if (stmt_ends_bb_p (stmt))
536 /* If the stmt can make abnormal goto use a new temporary
537 for the assignment to the LHS. This makes sure the old value
538 of the LHS is available on the abnormal edge. Otherwise
539 we will end up with overlapping life-ranges for abnormal
541 if (gimple_has_lhs (stmt)
542 && stmt_can_make_abnormal_goto (stmt)
543 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
545 tree lhs = gimple_get_lhs (stmt);
546 tree tmp = create_tmp_var (TREE_TYPE (lhs));
547 gimple *s = gimple_build_assign (lhs, tmp);
548 gimple_set_location (s, gimple_location (stmt));
549 gimple_set_block (s, gimple_block (stmt));
550 gimple_set_lhs (stmt, tmp);
551 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
552 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
553 DECL_GIMPLE_REG_P (tmp) = 1;
554 gsi_insert_after (&i, s, GSI_SAME_STMT);
556 start_new_block = true;
560 first_stmt_of_seq = false;
565 /* Build a flowgraph for the sequence of stmts SEQ. */
568 make_blocks (gimple_seq seq)
570 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
573 /* Create and return a new empty basic block after bb AFTER. */
576 create_bb (void *h, void *e, basic_block after)
582 /* Create and initialize a new basic block. Since alloc_block uses
583 GC allocation that clears memory to allocate a basic block, we do
584 not have to clear the newly allocated basic block here. */
587 bb->index = last_basic_block_for_fn (cfun);
589 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
591 /* Add the new block to the linked list of blocks. */
592 link_block (bb, after);
594 /* Grow the basic block array if needed. */
595 if ((size_t) last_basic_block_for_fn (cfun)
596 == basic_block_info_for_fn (cfun)->length ())
599 (last_basic_block_for_fn (cfun)
600 + (last_basic_block_for_fn (cfun) + 3) / 4);
601 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
604 /* Add the newly created block to the array. */
605 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
607 n_basic_blocks_for_fn (cfun)++;
608 last_basic_block_for_fn (cfun)++;
614 /*---------------------------------------------------------------------------
616 ---------------------------------------------------------------------------*/
618 /* If basic block BB has an abnormal edge to a basic block
619 containing IFN_ABNORMAL_DISPATCHER internal call, return
620 that the dispatcher's basic block, otherwise return NULL. */
623 get_abnormal_succ_dispatcher (basic_block bb)
628 FOR_EACH_EDGE (e, ei, bb->succs)
629 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
631 gimple_stmt_iterator gsi
632 = gsi_start_nondebug_after_labels_bb (e->dest);
633 gimple *g = gsi_stmt (gsi);
635 && is_gimple_call (g)
636 && gimple_call_internal_p (g)
637 && gimple_call_internal_fn (g) == IFN_ABNORMAL_DISPATCHER)
643 /* Helper function for make_edges. Create a basic block with
644 with ABNORMAL_DISPATCHER internal call in it if needed, and
645 create abnormal edges from BBS to it and from it to FOR_BB
646 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
649 handle_abnormal_edges (basic_block *dispatcher_bbs,
650 basic_block for_bb, int *bb_to_omp_idx,
651 auto_vec<basic_block> *bbs, bool computed_goto)
653 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
654 unsigned int idx = 0;
660 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
661 if (bb_to_omp_idx[for_bb->index] != 0)
665 /* If the dispatcher has been created already, then there are basic
666 blocks with abnormal edges to it, so just make a new edge to
668 if (*dispatcher == NULL)
670 /* Check if there are any basic blocks that need to have
671 abnormal edges to this dispatcher. If there are none, return
673 if (bb_to_omp_idx == NULL)
675 if (bbs->is_empty ())
680 FOR_EACH_VEC_ELT (*bbs, idx, bb)
681 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
687 /* Create the dispatcher bb. */
688 *dispatcher = create_basic_block (NULL, for_bb);
691 /* Factor computed gotos into a common computed goto site. Also
692 record the location of that site so that we can un-factor the
693 gotos after we have converted back to normal form. */
694 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
696 /* Create the destination of the factored goto. Each original
697 computed goto will put its desired destination into this
698 variable and jump to the label we create immediately below. */
699 tree var = create_tmp_var (ptr_type_node, "gotovar");
701 /* Build a label for the new block which will contain the
702 factored computed goto. */
703 tree factored_label_decl
704 = create_artificial_label (UNKNOWN_LOCATION);
705 gimple *factored_computed_goto_label
706 = gimple_build_label (factored_label_decl);
707 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
709 /* Build our new computed goto. */
710 gimple *factored_computed_goto = gimple_build_goto (var);
711 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
713 FOR_EACH_VEC_ELT (*bbs, idx, bb)
716 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
719 gsi = gsi_last_bb (bb);
720 gimple *last = gsi_stmt (gsi);
722 gcc_assert (computed_goto_p (last));
724 /* Copy the original computed goto's destination into VAR. */
726 = gimple_build_assign (var, gimple_goto_dest (last));
727 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
729 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
730 e->goto_locus = gimple_location (last);
731 gsi_remove (&gsi, true);
736 tree arg = inner ? boolean_true_node : boolean_false_node;
737 gimple *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
739 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
740 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
742 /* Create predecessor edges of the dispatcher. */
743 FOR_EACH_VEC_ELT (*bbs, idx, bb)
746 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
748 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
753 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
756 /* Creates outgoing edges for BB. Returns 1 when it ends with an
757 computed goto, returns 2 when it ends with a statement that
758 might return to this function via an nonlocal goto, otherwise
759 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
762 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
764 gimple *last = last_stmt (bb);
765 bool fallthru = false;
771 switch (gimple_code (last))
774 if (make_goto_expr_edges (bb))
780 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
781 e->goto_locus = gimple_location (last);
786 make_cond_expr_edges (bb);
790 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
794 make_eh_edges (last);
797 case GIMPLE_EH_DISPATCH:
798 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
802 /* If this function receives a nonlocal goto, then we need to
803 make edges from this call site to all the nonlocal goto
805 if (stmt_can_make_abnormal_goto (last))
808 /* If this statement has reachable exception handlers, then
809 create abnormal edges to them. */
810 make_eh_edges (last);
812 /* BUILTIN_RETURN is really a return statement. */
813 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
815 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
818 /* Some calls are known not to return. */
820 fallthru = !(gimple_call_flags (last) & ECF_NORETURN);
824 /* A GIMPLE_ASSIGN may throw internally and thus be considered
826 if (is_ctrl_altering_stmt (last))
827 make_eh_edges (last);
832 make_gimple_asm_edges (bb);
837 fallthru = make_gimple_omp_edges (bb, pcur_region, pomp_index);
840 case GIMPLE_TRANSACTION:
843 = gimple_transaction_label (as_a <gtransaction *> (last));
845 make_edge (bb, label_to_block (abort_label), EDGE_TM_ABORT);
851 gcc_assert (!stmt_ends_bb_p (last));
857 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
862 /* Join all the blocks in the flowgraph. */
868 struct omp_region *cur_region = NULL;
869 auto_vec<basic_block> ab_edge_goto;
870 auto_vec<basic_block> ab_edge_call;
871 int *bb_to_omp_idx = NULL;
872 int cur_omp_region_idx = 0;
874 /* Create an edge from entry to the first block with executable
876 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
877 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
880 /* Traverse the basic block array placing edges. */
881 FOR_EACH_BB_FN (bb, cfun)
886 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
888 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
890 ab_edge_goto.safe_push (bb);
892 ab_edge_call.safe_push (bb);
894 if (cur_region && bb_to_omp_idx == NULL)
895 bb_to_omp_idx = XCNEWVEC (int, n_basic_blocks_for_fn (cfun));
898 /* Computed gotos are hell to deal with, especially if there are
899 lots of them with a large number of destinations. So we factor
900 them to a common computed goto location before we build the
901 edge list. After we convert back to normal form, we will un-factor
902 the computed gotos since factoring introduces an unwanted jump.
903 For non-local gotos and abnormal edges from calls to calls that return
904 twice or forced labels, factor the abnormal edges too, by having all
905 abnormal edges from the calls go to a common artificial basic block
906 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
907 basic block to all forced labels and calls returning twice.
908 We do this per-OpenMP structured block, because those regions
909 are guaranteed to be single entry single exit by the standard,
910 so it is not allowed to enter or exit such regions abnormally this way,
911 thus all computed gotos, non-local gotos and setjmp/longjmp calls
912 must not transfer control across SESE region boundaries. */
913 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
915 gimple_stmt_iterator gsi;
916 basic_block dispatcher_bb_array[2] = { NULL, NULL };
917 basic_block *dispatcher_bbs = dispatcher_bb_array;
918 int count = n_basic_blocks_for_fn (cfun);
921 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
923 FOR_EACH_BB_FN (bb, cfun)
925 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
927 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
933 target = gimple_label_label (label_stmt);
935 /* Make an edge to every label block that has been marked as a
936 potential target for a computed goto or a non-local goto. */
937 if (FORCED_LABEL (target))
938 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
939 &ab_edge_goto, true);
940 if (DECL_NONLOCAL (target))
942 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
943 &ab_edge_call, false);
948 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
949 gsi_next_nondebug (&gsi);
950 if (!gsi_end_p (gsi))
952 /* Make an edge to every setjmp-like call. */
953 gimple *call_stmt = gsi_stmt (gsi);
954 if (is_gimple_call (call_stmt)
955 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
956 || gimple_call_builtin_p (call_stmt,
957 BUILT_IN_SETJMP_RECEIVER)))
958 handle_abnormal_edges (dispatcher_bbs, bb, bb_to_omp_idx,
959 &ab_edge_call, false);
964 XDELETE (dispatcher_bbs);
967 XDELETE (bb_to_omp_idx);
972 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
973 needed. Returns true if new bbs were created.
974 Note: This is transitional code, and should not be used for new code. We
975 should be able to get rid of this by rewriting all target va-arg
976 gimplification hooks to use an interface gimple_build_cond_value as described
977 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
980 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
982 gimple *stmt = gsi_stmt (*gsi);
983 basic_block bb = gimple_bb (stmt);
984 basic_block lastbb, afterbb;
985 int old_num_bbs = n_basic_blocks_for_fn (cfun);
987 lastbb = make_blocks_1 (seq, bb);
988 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
990 e = split_block (bb, stmt);
991 /* Move e->dest to come after the new basic blocks. */
993 unlink_block (afterbb);
994 link_block (afterbb, lastbb);
995 redirect_edge_succ (e, bb->next_bb);
997 while (bb != afterbb)
999 struct omp_region *cur_region = NULL;
1000 int cur_omp_region_idx = 0;
1001 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1002 gcc_assert (!mer && !cur_region);
1003 add_bb_to_loop (bb, afterbb->loop_father);
1009 /* Find the next available discriminator value for LOCUS. The
1010 discriminator distinguishes among several basic blocks that
1011 share a common locus, allowing for more accurate sample-based
1015 next_discriminator_for_locus (location_t locus)
1017 struct locus_discrim_map item;
1018 struct locus_discrim_map **slot;
1021 item.discriminator = 0;
1022 slot = discriminator_per_locus->find_slot_with_hash (
1023 &item, LOCATION_LINE (locus), INSERT);
1025 if (*slot == HTAB_EMPTY_ENTRY)
1027 *slot = XNEW (struct locus_discrim_map);
1029 (*slot)->locus = locus;
1030 (*slot)->discriminator = 0;
1032 (*slot)->discriminator++;
1033 return (*slot)->discriminator;
1036 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1039 same_line_p (location_t locus1, location_t locus2)
1041 expanded_location from, to;
1043 if (locus1 == locus2)
1046 from = expand_location (locus1);
1047 to = expand_location (locus2);
1049 if (from.line != to.line)
1051 if (from.file == to.file)
1053 return (from.file != NULL
1055 && filename_cmp (from.file, to.file) == 0);
1058 /* Assign discriminators to each basic block. */
1061 assign_discriminators (void)
1065 FOR_EACH_BB_FN (bb, cfun)
1069 gimple *last = last_stmt (bb);
1070 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1072 if (locus == UNKNOWN_LOCATION)
1075 FOR_EACH_EDGE (e, ei, bb->succs)
1077 gimple *first = first_non_label_stmt (e->dest);
1078 gimple *last = last_stmt (e->dest);
1079 if ((first && same_line_p (locus, gimple_location (first)))
1080 || (last && same_line_p (locus, gimple_location (last))))
1082 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1083 bb->discriminator = next_discriminator_for_locus (locus);
1085 e->dest->discriminator = next_discriminator_for_locus (locus);
1091 /* Create the edges for a GIMPLE_COND starting at block BB. */
1094 make_cond_expr_edges (basic_block bb)
1096 gcond *entry = as_a <gcond *> (last_stmt (bb));
1097 gimple *then_stmt, *else_stmt;
1098 basic_block then_bb, else_bb;
1099 tree then_label, else_label;
1103 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1105 /* Entry basic blocks for each component. */
1106 then_label = gimple_cond_true_label (entry);
1107 else_label = gimple_cond_false_label (entry);
1108 then_bb = label_to_block (then_label);
1109 else_bb = label_to_block (else_label);
1110 then_stmt = first_stmt (then_bb);
1111 else_stmt = first_stmt (else_bb);
1113 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1114 e->goto_locus = gimple_location (then_stmt);
1115 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1117 e->goto_locus = gimple_location (else_stmt);
1119 /* We do not need the labels anymore. */
1120 gimple_cond_set_true_label (entry, NULL_TREE);
1121 gimple_cond_set_false_label (entry, NULL_TREE);
1125 /* Called for each element in the hash table (P) as we delete the
1126 edge to cases hash table.
1128 Clear all the TREE_CHAINs to prevent problems with copying of
1129 SWITCH_EXPRs and structure sharing rules, then free the hash table
1133 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1137 for (t = value; t; t = next)
1139 next = CASE_CHAIN (t);
1140 CASE_CHAIN (t) = NULL;
1146 /* Start recording information mapping edges to case labels. */
1149 start_recording_case_labels (void)
1151 gcc_assert (edge_to_cases == NULL);
1152 edge_to_cases = new hash_map<edge, tree>;
1153 touched_switch_bbs = BITMAP_ALLOC (NULL);
1156 /* Return nonzero if we are recording information for case labels. */
1159 recording_case_labels_p (void)
1161 return (edge_to_cases != NULL);
1164 /* Stop recording information mapping edges to case labels and
1165 remove any information we have recorded. */
1167 end_recording_case_labels (void)
1171 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1172 delete edge_to_cases;
1173 edge_to_cases = NULL;
1174 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1176 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1179 gimple *stmt = last_stmt (bb);
1180 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1181 group_case_labels_stmt (as_a <gswitch *> (stmt));
1184 BITMAP_FREE (touched_switch_bbs);
1187 /* If we are inside a {start,end}_recording_cases block, then return
1188 a chain of CASE_LABEL_EXPRs from T which reference E.
1190 Otherwise return NULL. */
1193 get_cases_for_edge (edge e, gswitch *t)
1198 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1199 chains available. Return NULL so the caller can detect this case. */
1200 if (!recording_case_labels_p ())
1203 slot = edge_to_cases->get (e);
1207 /* If we did not find E in the hash table, then this must be the first
1208 time we have been queried for information about E & T. Add all the
1209 elements from T to the hash table then perform the query again. */
1211 n = gimple_switch_num_labels (t);
1212 for (i = 0; i < n; i++)
1214 tree elt = gimple_switch_label (t, i);
1215 tree lab = CASE_LABEL (elt);
1216 basic_block label_bb = label_to_block (lab);
1217 edge this_edge = find_edge (e->src, label_bb);
1219 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1221 tree &s = edge_to_cases->get_or_insert (this_edge);
1222 CASE_CHAIN (elt) = s;
1226 return *edge_to_cases->get (e);
1229 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1232 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1236 n = gimple_switch_num_labels (entry);
1238 for (i = 0; i < n; ++i)
1240 tree lab = CASE_LABEL (gimple_switch_label (entry, i));
1241 basic_block label_bb = label_to_block (lab);
1242 make_edge (bb, label_bb, 0);
1247 /* Return the basic block holding label DEST. */
1250 label_to_block_fn (struct function *ifun, tree dest)
1252 int uid = LABEL_DECL_UID (dest);
1254 /* We would die hard when faced by an undefined label. Emit a label to
1255 the very first basic block. This will hopefully make even the dataflow
1256 and undefined variable warnings quite right. */
1257 if (seen_error () && uid < 0)
1259 gimple_stmt_iterator gsi =
1260 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1263 stmt = gimple_build_label (dest);
1264 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1265 uid = LABEL_DECL_UID (dest);
1267 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1269 return (*ifun->cfg->x_label_to_block_map)[uid];
1272 /* Create edges for a goto statement at block BB. Returns true
1273 if abnormal edges should be created. */
1276 make_goto_expr_edges (basic_block bb)
1278 gimple_stmt_iterator last = gsi_last_bb (bb);
1279 gimple *goto_t = gsi_stmt (last);
1281 /* A simple GOTO creates normal edges. */
1282 if (simple_goto_p (goto_t))
1284 tree dest = gimple_goto_dest (goto_t);
1285 basic_block label_bb = label_to_block (dest);
1286 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1287 e->goto_locus = gimple_location (goto_t);
1288 gsi_remove (&last, true);
1292 /* A computed GOTO creates abnormal edges. */
1296 /* Create edges for an asm statement with labels at block BB. */
1299 make_gimple_asm_edges (basic_block bb)
1301 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1302 int i, n = gimple_asm_nlabels (stmt);
1304 for (i = 0; i < n; ++i)
1306 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1307 basic_block label_bb = label_to_block (label);
1308 make_edge (bb, label_bb, 0);
1312 /*---------------------------------------------------------------------------
1314 ---------------------------------------------------------------------------*/
1316 /* Cleanup useless labels in basic blocks. This is something we wish
1317 to do early because it allows us to group case labels before creating
1318 the edges for the CFG, and it speeds up block statement iterators in
1319 all passes later on.
1320 We rerun this pass after CFG is created, to get rid of the labels that
1321 are no longer referenced. After then we do not run it any more, since
1322 (almost) no new labels should be created. */
1324 /* A map from basic block index to the leading label of that block. */
1325 static struct label_record
1330 /* True if the label is referenced from somewhere. */
1334 /* Given LABEL return the first label in the same basic block. */
1337 main_block_label (tree label)
1339 basic_block bb = label_to_block (label);
1340 tree main_label = label_for_bb[bb->index].label;
1342 /* label_to_block possibly inserted undefined label into the chain. */
1345 label_for_bb[bb->index].label = label;
1349 label_for_bb[bb->index].used = true;
1353 /* Clean up redundant labels within the exception tree. */
1356 cleanup_dead_labels_eh (void)
1363 if (cfun->eh == NULL)
1366 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1367 if (lp && lp->post_landing_pad)
1369 lab = main_block_label (lp->post_landing_pad);
1370 if (lab != lp->post_landing_pad)
1372 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1373 EH_LANDING_PAD_NR (lab) = lp->index;
1377 FOR_ALL_EH_REGION (r)
1381 case ERT_MUST_NOT_THROW:
1387 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1391 c->label = main_block_label (lab);
1396 case ERT_ALLOWED_EXCEPTIONS:
1397 lab = r->u.allowed.label;
1399 r->u.allowed.label = main_block_label (lab);
1405 /* Cleanup redundant labels. This is a three-step process:
1406 1) Find the leading label for each block.
1407 2) Redirect all references to labels to the leading labels.
1408 3) Cleanup all useless labels. */
1411 cleanup_dead_labels (void)
1414 label_for_bb = XCNEWVEC (struct label_record, last_basic_block_for_fn (cfun));
1416 /* Find a suitable label for each block. We use the first user-defined
1417 label if there is one, or otherwise just the first label we see. */
1418 FOR_EACH_BB_FN (bb, cfun)
1420 gimple_stmt_iterator i;
1422 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1425 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1430 label = gimple_label_label (label_stmt);
1432 /* If we have not yet seen a label for the current block,
1433 remember this one and see if there are more labels. */
1434 if (!label_for_bb[bb->index].label)
1436 label_for_bb[bb->index].label = label;
1440 /* If we did see a label for the current block already, but it
1441 is an artificially created label, replace it if the current
1442 label is a user defined label. */
1443 if (!DECL_ARTIFICIAL (label)
1444 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1446 label_for_bb[bb->index].label = label;
1452 /* Now redirect all jumps/branches to the selected label.
1453 First do so for each block ending in a control statement. */
1454 FOR_EACH_BB_FN (bb, cfun)
1456 gimple *stmt = last_stmt (bb);
1457 tree label, new_label;
1462 switch (gimple_code (stmt))
1466 gcond *cond_stmt = as_a <gcond *> (stmt);
1467 label = gimple_cond_true_label (cond_stmt);
1470 new_label = main_block_label (label);
1471 if (new_label != label)
1472 gimple_cond_set_true_label (cond_stmt, new_label);
1475 label = gimple_cond_false_label (cond_stmt);
1478 new_label = main_block_label (label);
1479 if (new_label != label)
1480 gimple_cond_set_false_label (cond_stmt, new_label);
1487 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1488 size_t i, n = gimple_switch_num_labels (switch_stmt);
1490 /* Replace all destination labels. */
1491 for (i = 0; i < n; ++i)
1493 tree case_label = gimple_switch_label (switch_stmt, i);
1494 label = CASE_LABEL (case_label);
1495 new_label = main_block_label (label);
1496 if (new_label != label)
1497 CASE_LABEL (case_label) = new_label;
1504 gasm *asm_stmt = as_a <gasm *> (stmt);
1505 int i, n = gimple_asm_nlabels (asm_stmt);
1507 for (i = 0; i < n; ++i)
1509 tree cons = gimple_asm_label_op (asm_stmt, i);
1510 tree label = main_block_label (TREE_VALUE (cons));
1511 TREE_VALUE (cons) = label;
1516 /* We have to handle gotos until they're removed, and we don't
1517 remove them until after we've created the CFG edges. */
1519 if (!computed_goto_p (stmt))
1521 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1522 label = gimple_goto_dest (goto_stmt);
1523 new_label = main_block_label (label);
1524 if (new_label != label)
1525 gimple_goto_set_dest (goto_stmt, new_label);
1529 case GIMPLE_TRANSACTION:
1531 gtransaction *trans_stmt = as_a <gtransaction *> (stmt);
1532 tree label = gimple_transaction_label (trans_stmt);
1535 tree new_label = main_block_label (label);
1536 if (new_label != label)
1537 gimple_transaction_set_label (trans_stmt, new_label);
1547 /* Do the same for the exception region tree labels. */
1548 cleanup_dead_labels_eh ();
1550 /* Finally, purge dead labels. All user-defined labels and labels that
1551 can be the target of non-local gotos and labels which have their
1552 address taken are preserved. */
1553 FOR_EACH_BB_FN (bb, cfun)
1555 gimple_stmt_iterator i;
1556 tree label_for_this_bb = label_for_bb[bb->index].label;
1558 if (!label_for_this_bb)
1561 /* If the main label of the block is unused, we may still remove it. */
1562 if (!label_for_bb[bb->index].used)
1563 label_for_this_bb = NULL;
1565 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1568 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1573 label = gimple_label_label (label_stmt);
1575 if (label == label_for_this_bb
1576 || !DECL_ARTIFICIAL (label)
1577 || DECL_NONLOCAL (label)
1578 || FORCED_LABEL (label))
1581 gsi_remove (&i, true);
1585 free (label_for_bb);
1588 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1589 the ones jumping to the same label.
1590 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1593 group_case_labels_stmt (gswitch *stmt)
1595 int old_size = gimple_switch_num_labels (stmt);
1596 int i, j, new_size = old_size;
1597 basic_block default_bb = NULL;
1599 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt)));
1601 /* Look for possible opportunities to merge cases. */
1603 while (i < old_size)
1605 tree base_case, base_high;
1606 basic_block base_bb;
1608 base_case = gimple_switch_label (stmt, i);
1610 gcc_assert (base_case);
1611 base_bb = label_to_block (CASE_LABEL (base_case));
1613 /* Discard cases that have the same destination as the
1615 if (base_bb == default_bb)
1617 gimple_switch_set_label (stmt, i, NULL_TREE);
1623 base_high = CASE_HIGH (base_case)
1624 ? CASE_HIGH (base_case)
1625 : CASE_LOW (base_case);
1628 /* Try to merge case labels. Break out when we reach the end
1629 of the label vector or when we cannot merge the next case
1630 label with the current one. */
1631 while (i < old_size)
1633 tree merge_case = gimple_switch_label (stmt, i);
1634 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case));
1635 wide_int bhp1 = wi::add (base_high, 1);
1637 /* Merge the cases if they jump to the same place,
1638 and their ranges are consecutive. */
1639 if (merge_bb == base_bb
1640 && wi::eq_p (CASE_LOW (merge_case), bhp1))
1642 base_high = CASE_HIGH (merge_case) ?
1643 CASE_HIGH (merge_case) : CASE_LOW (merge_case);
1644 CASE_HIGH (base_case) = base_high;
1645 gimple_switch_set_label (stmt, i, NULL_TREE);
1654 /* Compress the case labels in the label vector, and adjust the
1655 length of the vector. */
1656 for (i = 0, j = 0; i < new_size; i++)
1658 while (! gimple_switch_label (stmt, j))
1660 gimple_switch_set_label (stmt, i,
1661 gimple_switch_label (stmt, j++));
1664 gcc_assert (new_size <= old_size);
1665 gimple_switch_set_num_labels (stmt, new_size);
1668 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1669 and scan the sorted vector of cases. Combine the ones jumping to the
1673 group_case_labels (void)
1677 FOR_EACH_BB_FN (bb, cfun)
1679 gimple *stmt = last_stmt (bb);
1680 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1681 group_case_labels_stmt (as_a <gswitch *> (stmt));
1685 /* Checks whether we can merge block B into block A. */
1688 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1692 if (!single_succ_p (a))
1695 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1698 if (single_succ (a) != b)
1701 if (!single_pred_p (b))
1704 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1705 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1708 /* If A ends by a statement causing exceptions or something similar, we
1709 cannot merge the blocks. */
1710 stmt = last_stmt (a);
1711 if (stmt && stmt_ends_bb_p (stmt))
1714 /* Do not allow a block with only a non-local label to be merged. */
1716 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1717 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1720 /* Examine the labels at the beginning of B. */
1721 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1725 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1728 lab = gimple_label_label (label_stmt);
1730 /* Do not remove user forced labels or for -O0 any user labels. */
1731 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1735 /* Protect simple loop latches. We only want to avoid merging
1736 the latch with the loop header or with a block in another
1737 loop in this case. */
1739 && b->loop_father->latch == b
1740 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1741 && (b->loop_father->header == a
1742 || b->loop_father != a->loop_father))
1745 /* It must be possible to eliminate all phi nodes in B. If ssa form
1746 is not up-to-date and a name-mapping is registered, we cannot eliminate
1747 any phis. Symbols marked for renaming are never a problem though. */
1748 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
1751 gphi *phi = gsi.phi ();
1752 /* Technically only new names matter. */
1753 if (name_registered_for_update_p (PHI_RESULT (phi)))
1757 /* When not optimizing, don't merge if we'd lose goto_locus. */
1759 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
1761 location_t goto_locus = single_succ_edge (a)->goto_locus;
1762 gimple_stmt_iterator prev, next;
1763 prev = gsi_last_nondebug_bb (a);
1764 next = gsi_after_labels (b);
1765 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
1766 gsi_next_nondebug (&next);
1767 if ((gsi_end_p (prev)
1768 || gimple_location (gsi_stmt (prev)) != goto_locus)
1769 && (gsi_end_p (next)
1770 || gimple_location (gsi_stmt (next)) != goto_locus))
1777 /* Replaces all uses of NAME by VAL. */
1780 replace_uses_by (tree name, tree val)
1782 imm_use_iterator imm_iter;
1787 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
1789 /* Mark the block if we change the last stmt in it. */
1790 if (cfgcleanup_altered_bbs
1791 && stmt_ends_bb_p (stmt))
1792 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
1794 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
1796 replace_exp (use, val);
1798 if (gimple_code (stmt) == GIMPLE_PHI)
1800 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
1801 PHI_ARG_INDEX_FROM_USE (use));
1802 if (e->flags & EDGE_ABNORMAL
1803 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
1805 /* This can only occur for virtual operands, since
1806 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
1807 would prevent replacement. */
1808 gcc_checking_assert (virtual_operand_p (name));
1809 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
1814 if (gimple_code (stmt) != GIMPLE_PHI)
1816 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1817 gimple *orig_stmt = stmt;
1820 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
1821 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
1822 only change sth from non-invariant to invariant, and only
1823 when propagating constants. */
1824 if (is_gimple_min_invariant (val))
1825 for (i = 0; i < gimple_num_ops (stmt); i++)
1827 tree op = gimple_op (stmt, i);
1828 /* Operands may be empty here. For example, the labels
1829 of a GIMPLE_COND are nulled out following the creation
1830 of the corresponding CFG edges. */
1831 if (op && TREE_CODE (op) == ADDR_EXPR)
1832 recompute_tree_invariant_for_addr_expr (op);
1835 if (fold_stmt (&gsi))
1836 stmt = gsi_stmt (gsi);
1838 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
1839 gimple_purge_dead_eh_edges (gimple_bb (stmt));
1845 gcc_checking_assert (has_zero_uses (name));
1847 /* Also update the trees stored in loop structures. */
1852 FOR_EACH_LOOP (loop, 0)
1854 substitute_in_loop_info (loop, name, val);
1859 /* Merge block B into block A. */
1862 gimple_merge_blocks (basic_block a, basic_block b)
1864 gimple_stmt_iterator last, gsi;
1868 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
1870 /* Remove all single-valued PHI nodes from block B of the form
1871 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
1872 gsi = gsi_last_bb (a);
1873 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
1875 gimple *phi = gsi_stmt (psi);
1876 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
1878 bool may_replace_uses = (virtual_operand_p (def)
1879 || may_propagate_copy (def, use));
1881 /* In case we maintain loop closed ssa form, do not propagate arguments
1882 of loop exit phi nodes. */
1884 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
1885 && !virtual_operand_p (def)
1886 && TREE_CODE (use) == SSA_NAME
1887 && a->loop_father != b->loop_father)
1888 may_replace_uses = false;
1890 if (!may_replace_uses)
1892 gcc_assert (!virtual_operand_p (def));
1894 /* Note that just emitting the copies is fine -- there is no problem
1895 with ordering of phi nodes. This is because A is the single
1896 predecessor of B, therefore results of the phi nodes cannot
1897 appear as arguments of the phi nodes. */
1898 copy = gimple_build_assign (def, use);
1899 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
1900 remove_phi_node (&psi, false);
1904 /* If we deal with a PHI for virtual operands, we can simply
1905 propagate these without fussing with folding or updating
1907 if (virtual_operand_p (def))
1909 imm_use_iterator iter;
1910 use_operand_p use_p;
1913 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
1914 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1915 SET_USE (use_p, use);
1917 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
1918 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
1921 replace_uses_by (def, use);
1923 remove_phi_node (&psi, true);
1927 /* Ensure that B follows A. */
1928 move_block_after (b, a);
1930 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
1931 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
1933 /* Remove labels from B and set gimple_bb to A for other statements. */
1934 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
1936 gimple *stmt = gsi_stmt (gsi);
1937 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1939 tree label = gimple_label_label (label_stmt);
1942 gsi_remove (&gsi, false);
1944 /* Now that we can thread computed gotos, we might have
1945 a situation where we have a forced label in block B
1946 However, the label at the start of block B might still be
1947 used in other ways (think about the runtime checking for
1948 Fortran assigned gotos). So we can not just delete the
1949 label. Instead we move the label to the start of block A. */
1950 if (FORCED_LABEL (label))
1952 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
1953 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
1955 /* Other user labels keep around in a form of a debug stmt. */
1956 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS)
1958 gimple *dbg = gimple_build_debug_bind (label,
1961 gimple_debug_bind_reset_value (dbg);
1962 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
1965 lp_nr = EH_LANDING_PAD_NR (label);
1968 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
1969 lp->post_landing_pad = NULL;
1974 gimple_set_bb (stmt, a);
1979 /* When merging two BBs, if their counts are different, the larger count
1980 is selected as the new bb count. This is to handle inconsistent
1982 if (a->loop_father == b->loop_father)
1984 a->count = MAX (a->count, b->count);
1985 a->frequency = MAX (a->frequency, b->frequency);
1988 /* Merge the sequences. */
1989 last = gsi_last_bb (a);
1990 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
1991 set_bb_seq (b, NULL);
1993 if (cfgcleanup_altered_bbs)
1994 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
1998 /* Return the one of two successors of BB that is not reachable by a
1999 complex edge, if there is one. Else, return BB. We use
2000 this in optimizations that use post-dominators for their heuristics,
2001 to catch the cases in C++ where function calls are involved. */
2004 single_noncomplex_succ (basic_block bb)
2007 if (EDGE_COUNT (bb->succs) != 2)
2010 e0 = EDGE_SUCC (bb, 0);
2011 e1 = EDGE_SUCC (bb, 1);
2012 if (e0->flags & EDGE_COMPLEX)
2014 if (e1->flags & EDGE_COMPLEX)
2020 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2023 notice_special_calls (gcall *call)
2025 int flags = gimple_call_flags (call);
2027 if (flags & ECF_MAY_BE_ALLOCA)
2028 cfun->calls_alloca = true;
2029 if (flags & ECF_RETURNS_TWICE)
2030 cfun->calls_setjmp = true;
2034 /* Clear flags set by notice_special_calls. Used by dead code removal
2035 to update the flags. */
2038 clear_special_calls (void)
2040 cfun->calls_alloca = false;
2041 cfun->calls_setjmp = false;
2044 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2047 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2049 /* Since this block is no longer reachable, we can just delete all
2050 of its PHI nodes. */
2051 remove_phi_nodes (bb);
2053 /* Remove edges to BB's successors. */
2054 while (EDGE_COUNT (bb->succs) > 0)
2055 remove_edge (EDGE_SUCC (bb, 0));
2059 /* Remove statements of basic block BB. */
2062 remove_bb (basic_block bb)
2064 gimple_stmt_iterator i;
2068 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2069 if (dump_flags & TDF_DETAILS)
2071 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2072 fprintf (dump_file, "\n");
2078 struct loop *loop = bb->loop_father;
2080 /* If a loop gets removed, clean up the information associated
2082 if (loop->latch == bb
2083 || loop->header == bb)
2084 free_numbers_of_iterations_estimates_loop (loop);
2087 /* Remove all the instructions in the block. */
2088 if (bb_seq (bb) != NULL)
2090 /* Walk backwards so as to get a chance to substitute all
2091 released DEFs into debug stmts. See
2092 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
2094 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2096 gimple *stmt = gsi_stmt (i);
2097 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2099 && (FORCED_LABEL (gimple_label_label (label_stmt))
2100 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2103 gimple_stmt_iterator new_gsi;
2105 /* A non-reachable non-local label may still be referenced.
2106 But it no longer needs to carry the extra semantics of
2108 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2110 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2111 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2114 new_bb = bb->prev_bb;
2115 new_gsi = gsi_start_bb (new_bb);
2116 gsi_remove (&i, false);
2117 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2121 /* Release SSA definitions if we are in SSA. Note that we
2122 may be called when not in SSA. For example,
2123 final_cleanup calls this function via
2124 cleanup_tree_cfg. */
2125 if (gimple_in_ssa_p (cfun))
2126 release_defs (stmt);
2128 gsi_remove (&i, true);
2132 i = gsi_last_bb (bb);
2138 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2139 bb->il.gimple.seq = NULL;
2140 bb->il.gimple.phi_nodes = NULL;
2144 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a
2145 predicate VAL, return the edge that will be taken out of the block.
2146 If VAL does not match a unique edge, NULL is returned. */
2149 find_taken_edge (basic_block bb, tree val)
2153 stmt = last_stmt (bb);
2156 gcc_assert (is_ctrl_stmt (stmt));
2161 if (!is_gimple_min_invariant (val))
2164 if (gimple_code (stmt) == GIMPLE_COND)
2165 return find_taken_edge_cond_expr (bb, val);
2167 if (gimple_code (stmt) == GIMPLE_SWITCH)
2168 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val);
2170 if (computed_goto_p (stmt))
2172 /* Only optimize if the argument is a label, if the argument is
2173 not a label then we can not construct a proper CFG.
2175 It may be the case that we only need to allow the LABEL_REF to
2176 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2177 appear inside a LABEL_EXPR just to be safe. */
2178 if ((TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2179 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2180 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2187 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2188 statement, determine which of the outgoing edges will be taken out of the
2189 block. Return NULL if either edge may be taken. */
2192 find_taken_edge_computed_goto (basic_block bb, tree val)
2197 dest = label_to_block (val);
2200 e = find_edge (bb, dest);
2201 gcc_assert (e != NULL);
2207 /* Given a constant value VAL and the entry block BB to a COND_EXPR
2208 statement, determine which of the two edges will be taken out of the
2209 block. Return NULL if either edge may be taken. */
2212 find_taken_edge_cond_expr (basic_block bb, tree val)
2214 edge true_edge, false_edge;
2216 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2218 gcc_assert (TREE_CODE (val) == INTEGER_CST);
2219 return (integer_zerop (val) ? false_edge : true_edge);
2222 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR
2223 statement, determine which edge will be taken out of the block. Return
2224 NULL if any edge may be taken. */
2227 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb,
2230 basic_block dest_bb;
2234 taken_case = find_case_label_for_value (switch_stmt, val);
2235 dest_bb = label_to_block (CASE_LABEL (taken_case));
2237 e = find_edge (bb, dest_bb);
2243 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2244 We can make optimal use here of the fact that the case labels are
2245 sorted: We can do a binary search for a case matching VAL. */
2248 find_case_label_for_value (gswitch *switch_stmt, tree val)
2250 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2251 tree default_case = gimple_switch_default_label (switch_stmt);
2253 for (low = 0, high = n; high - low > 1; )
2255 size_t i = (high + low) / 2;
2256 tree t = gimple_switch_label (switch_stmt, i);
2259 /* Cache the result of comparing CASE_LOW and val. */
2260 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2267 if (CASE_HIGH (t) == NULL)
2269 /* A singe-valued case label. */
2275 /* A case range. We can only handle integer ranges. */
2276 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2281 return default_case;
2285 /* Dump a basic block on stderr. */
2288 gimple_debug_bb (basic_block bb)
2290 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2294 /* Dump basic block with index N on stderr. */
2297 gimple_debug_bb_n (int n)
2299 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2300 return BASIC_BLOCK_FOR_FN (cfun, n);
2304 /* Dump the CFG on stderr.
2306 FLAGS are the same used by the tree dumping functions
2307 (see TDF_* in dumpfile.h). */
2310 gimple_debug_cfg (int flags)
2312 gimple_dump_cfg (stderr, flags);
2316 /* Dump the program showing basic block boundaries on the given FILE.
2318 FLAGS are the same used by the tree dumping functions (see TDF_* in
2322 gimple_dump_cfg (FILE *file, int flags)
2324 if (flags & TDF_DETAILS)
2326 dump_function_header (file, current_function_decl, flags);
2327 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2328 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2329 last_basic_block_for_fn (cfun));
2331 brief_dump_cfg (file, flags | TDF_COMMENT);
2332 fprintf (file, "\n");
2335 if (flags & TDF_STATS)
2336 dump_cfg_stats (file);
2338 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2342 /* Dump CFG statistics on FILE. */
2345 dump_cfg_stats (FILE *file)
2347 static long max_num_merged_labels = 0;
2348 unsigned long size, total = 0;
2351 const char * const fmt_str = "%-30s%-13s%12s\n";
2352 const char * const fmt_str_1 = "%-30s%13d%11lu%c\n";
2353 const char * const fmt_str_2 = "%-30s%13ld%11lu%c\n";
2354 const char * const fmt_str_3 = "%-43s%11lu%c\n";
2355 const char *funcname = current_function_name ();
2357 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2359 fprintf (file, "---------------------------------------------------------\n");
2360 fprintf (file, fmt_str, "", " Number of ", "Memory");
2361 fprintf (file, fmt_str, "", " instances ", "used ");
2362 fprintf (file, "---------------------------------------------------------\n");
2364 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2366 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2367 SCALE (size), LABEL (size));
2370 FOR_EACH_BB_FN (bb, cfun)
2371 num_edges += EDGE_COUNT (bb->succs);
2372 size = num_edges * sizeof (struct edge_def);
2374 fprintf (file, fmt_str_2, "Edges", num_edges, SCALE (size), LABEL (size));
2376 fprintf (file, "---------------------------------------------------------\n");
2377 fprintf (file, fmt_str_3, "Total memory used by CFG data", SCALE (total),
2379 fprintf (file, "---------------------------------------------------------\n");
2380 fprintf (file, "\n");
2382 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2383 max_num_merged_labels = cfg_stats.num_merged_labels;
2385 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2386 cfg_stats.num_merged_labels, max_num_merged_labels);
2388 fprintf (file, "\n");
2392 /* Dump CFG statistics on stderr. Keep extern so that it's always
2393 linked in the final executable. */
2396 debug_cfg_stats (void)
2398 dump_cfg_stats (stderr);
2401 /*---------------------------------------------------------------------------
2402 Miscellaneous helpers
2403 ---------------------------------------------------------------------------*/
2405 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2406 flow. Transfers of control flow associated with EH are excluded. */
2409 call_can_make_abnormal_goto (gimple *t)
2411 /* If the function has no non-local labels, then a call cannot make an
2412 abnormal transfer of control. */
2413 if (!cfun->has_nonlocal_label
2414 && !cfun->calls_setjmp)
2417 /* Likewise if the call has no side effects. */
2418 if (!gimple_has_side_effects (t))
2421 /* Likewise if the called function is leaf. */
2422 if (gimple_call_flags (t) & ECF_LEAF)
2429 /* Return true if T can make an abnormal transfer of control flow.
2430 Transfers of control flow associated with EH are excluded. */
2433 stmt_can_make_abnormal_goto (gimple *t)
2435 if (computed_goto_p (t))
2437 if (is_gimple_call (t))
2438 return call_can_make_abnormal_goto (t);
2443 /* Return true if T represents a stmt that always transfers control. */
2446 is_ctrl_stmt (gimple *t)
2448 switch (gimple_code (t))
2462 /* Return true if T is a statement that may alter the flow of control
2463 (e.g., a call to a non-returning function). */
2466 is_ctrl_altering_stmt (gimple *t)
2470 switch (gimple_code (t))
2473 /* Per stmt call flag indicates whether the call could alter
2475 if (gimple_call_ctrl_altering_p (t))
2479 case GIMPLE_EH_DISPATCH:
2480 /* EH_DISPATCH branches to the individual catch handlers at
2481 this level of a try or allowed-exceptions region. It can
2482 fallthru to the next statement as well. */
2486 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2491 /* OpenMP directives alter control flow. */
2494 case GIMPLE_TRANSACTION:
2495 /* A transaction start alters control flow. */
2502 /* If a statement can throw, it alters control flow. */
2503 return stmt_can_throw_internal (t);
2507 /* Return true if T is a simple local goto. */
2510 simple_goto_p (gimple *t)
2512 return (gimple_code (t) == GIMPLE_GOTO
2513 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2517 /* Return true if STMT should start a new basic block. PREV_STMT is
2518 the statement preceding STMT. It is used when STMT is a label or a
2519 case label. Labels should only start a new basic block if their
2520 previous statement wasn't a label. Otherwise, sequence of labels
2521 would generate unnecessary basic blocks that only contain a single
2525 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2530 /* Labels start a new basic block only if the preceding statement
2531 wasn't a label of the same type. This prevents the creation of
2532 consecutive blocks that have nothing but a single label. */
2533 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2535 /* Nonlocal and computed GOTO targets always start a new block. */
2536 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2537 || FORCED_LABEL (gimple_label_label (label_stmt)))
2540 if (prev_stmt && gimple_code (prev_stmt) == GIMPLE_LABEL)
2542 if (DECL_NONLOCAL (gimple_label_label (
2543 as_a <glabel *> (prev_stmt))))
2546 cfg_stats.num_merged_labels++;
2552 else if (gimple_code (stmt) == GIMPLE_CALL
2553 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2554 /* setjmp acts similar to a nonlocal GOTO target and thus should
2555 start a new block. */
2562 /* Return true if T should end a basic block. */
2565 stmt_ends_bb_p (gimple *t)
2567 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2570 /* Remove block annotations and other data structures. */
2573 delete_tree_cfg_annotations (void)
2575 vec_free (label_to_block_map_for_fn (cfun));
2578 /* Return the virtual phi in BB. */
2581 get_virtual_phi (basic_block bb)
2583 for (gphi_iterator gsi = gsi_start_phis (bb);
2587 gphi *phi = gsi.phi ();
2589 if (virtual_operand_p (PHI_RESULT (phi)))
2596 /* Return the first statement in basic block BB. */
2599 first_stmt (basic_block bb)
2601 gimple_stmt_iterator i = gsi_start_bb (bb);
2602 gimple *stmt = NULL;
2604 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2612 /* Return the first non-label statement in basic block BB. */
2615 first_non_label_stmt (basic_block bb)
2617 gimple_stmt_iterator i = gsi_start_bb (bb);
2618 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2620 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2623 /* Return the last statement in basic block BB. */
2626 last_stmt (basic_block bb)
2628 gimple_stmt_iterator i = gsi_last_bb (bb);
2629 gimple *stmt = NULL;
2631 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2639 /* Return the last statement of an otherwise empty block. Return NULL
2640 if the block is totally empty, or if it contains more than one
2644 last_and_only_stmt (basic_block bb)
2646 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2647 gimple *last, *prev;
2652 last = gsi_stmt (i);
2653 gsi_prev_nondebug (&i);
2657 /* Empty statements should no longer appear in the instruction stream.
2658 Everything that might have appeared before should be deleted by
2659 remove_useless_stmts, and the optimizers should just gsi_remove
2660 instead of smashing with build_empty_stmt.
2662 Thus the only thing that should appear here in a block containing
2663 one executable statement is a label. */
2664 prev = gsi_stmt (i);
2665 if (gimple_code (prev) == GIMPLE_LABEL)
2671 /* Reinstall those PHI arguments queued in OLD_EDGE to NEW_EDGE. */
2674 reinstall_phi_args (edge new_edge, edge old_edge)
2680 vec<edge_var_map> *v = redirect_edge_var_map_vector (old_edge);
2684 for (i = 0, phis = gsi_start_phis (new_edge->dest);
2685 v->iterate (i, &vm) && !gsi_end_p (phis);
2686 i++, gsi_next (&phis))
2688 gphi *phi = phis.phi ();
2689 tree result = redirect_edge_var_map_result (vm);
2690 tree arg = redirect_edge_var_map_def (vm);
2692 gcc_assert (result == gimple_phi_result (phi));
2694 add_phi_arg (phi, arg, new_edge, redirect_edge_var_map_location (vm));
2697 redirect_edge_var_map_clear (old_edge);
2700 /* Returns the basic block after which the new basic block created
2701 by splitting edge EDGE_IN should be placed. Tries to keep the new block
2702 near its "logical" location. This is of most help to humans looking
2703 at debugging dumps. */
2706 split_edge_bb_loc (edge edge_in)
2708 basic_block dest = edge_in->dest;
2709 basic_block dest_prev = dest->prev_bb;
2713 edge e = find_edge (dest_prev, dest);
2714 if (e && !(e->flags & EDGE_COMPLEX))
2715 return edge_in->src;
2720 /* Split a (typically critical) edge EDGE_IN. Return the new block.
2721 Abort on abnormal edges. */
2724 gimple_split_edge (edge edge_in)
2726 basic_block new_bb, after_bb, dest;
2729 /* Abnormal edges cannot be split. */
2730 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
2732 dest = edge_in->dest;
2734 after_bb = split_edge_bb_loc (edge_in);
2736 new_bb = create_empty_bb (after_bb);
2737 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2738 new_bb->count = edge_in->count;
2739 new_edge = make_edge (new_bb, dest, EDGE_FALLTHRU);
2740 new_edge->probability = REG_BR_PROB_BASE;
2741 new_edge->count = edge_in->count;
2743 e = redirect_edge_and_branch (edge_in, new_bb);
2744 gcc_assert (e == edge_in);
2745 reinstall_phi_args (new_edge, e);
2751 /* Verify properties of the address expression T with base object BASE. */
2754 verify_address (tree t, tree base)
2757 bool old_side_effects;
2759 bool new_side_effects;
2761 old_constant = TREE_CONSTANT (t);
2762 old_side_effects = TREE_SIDE_EFFECTS (t);
2764 recompute_tree_invariant_for_addr_expr (t);
2765 new_side_effects = TREE_SIDE_EFFECTS (t);
2766 new_constant = TREE_CONSTANT (t);
2768 if (old_constant != new_constant)
2770 error ("constant not recomputed when ADDR_EXPR changed");
2773 if (old_side_effects != new_side_effects)
2775 error ("side effects not recomputed when ADDR_EXPR changed");
2779 if (!(TREE_CODE (base) == VAR_DECL
2780 || TREE_CODE (base) == PARM_DECL
2781 || TREE_CODE (base) == RESULT_DECL))
2784 if (DECL_GIMPLE_REG_P (base))
2786 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2793 /* Callback for walk_tree, check that all elements with address taken are
2794 properly noticed as such. The DATA is an int* that is 1 if TP was seen
2795 inside a PHI node. */
2798 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
2805 /* Check operand N for being valid GIMPLE and give error MSG if not. */
2806 #define CHECK_OP(N, MSG) \
2807 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \
2808 { error (MSG); return TREE_OPERAND (t, N); }} while (0)
2810 switch (TREE_CODE (t))
2813 if (SSA_NAME_IN_FREE_LIST (t))
2815 error ("SSA name in freelist but still referenced");
2821 error ("INDIRECT_REF in gimple IL");
2825 x = TREE_OPERAND (t, 0);
2826 if (!POINTER_TYPE_P (TREE_TYPE (x))
2827 || !is_gimple_mem_ref_addr (x))
2829 error ("invalid first operand of MEM_REF");
2832 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2833 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2835 error ("invalid offset operand of MEM_REF");
2836 return TREE_OPERAND (t, 1);
2838 if (TREE_CODE (x) == ADDR_EXPR
2839 && (x = verify_address (x, TREE_OPERAND (x, 0))))
2845 x = fold (ASSERT_EXPR_COND (t));
2846 if (x == boolean_false_node)
2848 error ("ASSERT_EXPR with an always-false condition");
2854 error ("MODIFY_EXPR not expected while having tuples");
2861 gcc_assert (is_gimple_address (t));
2863 /* Skip any references (they will be checked when we recurse down the
2864 tree) and ensure that any variable used as a prefix is marked
2866 for (x = TREE_OPERAND (t, 0);
2867 handled_component_p (x);
2868 x = TREE_OPERAND (x, 0))
2871 if ((tem = verify_address (t, x)))
2874 if (!(TREE_CODE (x) == VAR_DECL
2875 || TREE_CODE (x) == PARM_DECL
2876 || TREE_CODE (x) == RESULT_DECL))
2879 if (!TREE_ADDRESSABLE (x))
2881 error ("address taken, but ADDRESSABLE bit not set");
2889 x = COND_EXPR_COND (t);
2890 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
2892 error ("non-integral used in condition");
2895 if (!is_gimple_condexpr (x))
2897 error ("invalid conditional operand");
2902 case NON_LVALUE_EXPR:
2903 case TRUTH_NOT_EXPR:
2907 case FIX_TRUNC_EXPR:
2912 CHECK_OP (0, "invalid operand to unary operator");
2918 if (!is_gimple_reg_type (TREE_TYPE (t)))
2920 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
2924 if (TREE_CODE (t) == BIT_FIELD_REF)
2926 tree t0 = TREE_OPERAND (t, 0);
2927 tree t1 = TREE_OPERAND (t, 1);
2928 tree t2 = TREE_OPERAND (t, 2);
2929 if (!tree_fits_uhwi_p (t1)
2930 || !tree_fits_uhwi_p (t2))
2932 error ("invalid position or size operand to BIT_FIELD_REF");
2935 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
2936 && (TYPE_PRECISION (TREE_TYPE (t))
2937 != tree_to_uhwi (t1)))
2939 error ("integral result type precision does not match "
2940 "field size of BIT_FIELD_REF");
2943 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t))
2944 && TYPE_MODE (TREE_TYPE (t)) != BLKmode
2945 && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t)))
2946 != tree_to_uhwi (t1)))
2948 error ("mode precision of non-integral result does not "
2949 "match field size of BIT_FIELD_REF");
2952 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0))
2953 && (tree_to_uhwi (t1) + tree_to_uhwi (t2)
2954 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0)))))
2956 error ("position plus size exceeds size of referenced object in "
2961 t = TREE_OPERAND (t, 0);
2966 case ARRAY_RANGE_REF:
2967 case VIEW_CONVERT_EXPR:
2968 /* We have a nest of references. Verify that each of the operands
2969 that determine where to reference is either a constant or a variable,
2970 verify that the base is valid, and then show we've already checked
2972 while (handled_component_p (t))
2974 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2))
2975 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
2976 else if (TREE_CODE (t) == ARRAY_REF
2977 || TREE_CODE (t) == ARRAY_RANGE_REF)
2979 CHECK_OP (1, "invalid array index");
2980 if (TREE_OPERAND (t, 2))
2981 CHECK_OP (2, "invalid array lower bound");
2982 if (TREE_OPERAND (t, 3))
2983 CHECK_OP (3, "invalid array stride");
2985 else if (TREE_CODE (t) == BIT_FIELD_REF
2986 || TREE_CODE (t) == REALPART_EXPR
2987 || TREE_CODE (t) == IMAGPART_EXPR)
2989 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
2994 t = TREE_OPERAND (t, 0);
2997 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
2999 error ("invalid reference prefix");
3006 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3007 POINTER_PLUS_EXPR. */
3008 if (POINTER_TYPE_P (TREE_TYPE (t)))
3010 error ("invalid operand to plus/minus, type is a pointer");
3013 CHECK_OP (0, "invalid operand to binary operator");
3014 CHECK_OP (1, "invalid operand to binary operator");
3017 case POINTER_PLUS_EXPR:
3018 /* Check to make sure the first operand is a pointer or reference type. */
3019 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3021 error ("invalid operand to pointer plus, first operand is not a pointer");
3024 /* Check to make sure the second operand is a ptrofftype. */
3025 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3027 error ("invalid operand to pointer plus, second operand is not an "
3028 "integer type of appropriate width");
3038 case UNORDERED_EXPR:
3047 case TRUNC_DIV_EXPR:
3049 case FLOOR_DIV_EXPR:
3050 case ROUND_DIV_EXPR:
3051 case TRUNC_MOD_EXPR:
3053 case FLOOR_MOD_EXPR:
3054 case ROUND_MOD_EXPR:
3056 case EXACT_DIV_EXPR:
3066 CHECK_OP (0, "invalid operand to binary operator");
3067 CHECK_OP (1, "invalid operand to binary operator");
3071 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3075 case CASE_LABEL_EXPR:
3078 error ("invalid CASE_CHAIN");
3092 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3093 Returns true if there is an error, otherwise false. */
3096 verify_types_in_gimple_min_lval (tree expr)
3100 if (is_gimple_id (expr))
3103 if (TREE_CODE (expr) != TARGET_MEM_REF
3104 && TREE_CODE (expr) != MEM_REF)
3106 error ("invalid expression for min lvalue");
3110 /* TARGET_MEM_REFs are strange beasts. */
3111 if (TREE_CODE (expr) == TARGET_MEM_REF)
3114 op = TREE_OPERAND (expr, 0);
3115 if (!is_gimple_val (op))
3117 error ("invalid operand in indirect reference");
3118 debug_generic_stmt (op);
3121 /* Memory references now generally can involve a value conversion. */
3126 /* Verify if EXPR is a valid GIMPLE reference expression. If
3127 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3128 if there is an error, otherwise false. */
3131 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3133 while (handled_component_p (expr))
3135 tree op = TREE_OPERAND (expr, 0);
3137 if (TREE_CODE (expr) == ARRAY_REF
3138 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3140 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3141 || (TREE_OPERAND (expr, 2)
3142 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3143 || (TREE_OPERAND (expr, 3)
3144 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3146 error ("invalid operands to array reference");
3147 debug_generic_stmt (expr);
3152 /* Verify if the reference array element types are compatible. */
3153 if (TREE_CODE (expr) == ARRAY_REF
3154 && !useless_type_conversion_p (TREE_TYPE (expr),
3155 TREE_TYPE (TREE_TYPE (op))))
3157 error ("type mismatch in array reference");
3158 debug_generic_stmt (TREE_TYPE (expr));
3159 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3162 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3163 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3164 TREE_TYPE (TREE_TYPE (op))))
3166 error ("type mismatch in array range reference");
3167 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3168 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3172 if ((TREE_CODE (expr) == REALPART_EXPR
3173 || TREE_CODE (expr) == IMAGPART_EXPR)
3174 && !useless_type_conversion_p (TREE_TYPE (expr),
3175 TREE_TYPE (TREE_TYPE (op))))
3177 error ("type mismatch in real/imagpart reference");
3178 debug_generic_stmt (TREE_TYPE (expr));
3179 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3183 if (TREE_CODE (expr) == COMPONENT_REF
3184 && !useless_type_conversion_p (TREE_TYPE (expr),
3185 TREE_TYPE (TREE_OPERAND (expr, 1))))
3187 error ("type mismatch in component reference");
3188 debug_generic_stmt (TREE_TYPE (expr));
3189 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3193 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3195 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3196 that their operand is not an SSA name or an invariant when
3197 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3198 bug). Otherwise there is nothing to verify, gross mismatches at
3199 most invoke undefined behavior. */
3201 && (TREE_CODE (op) == SSA_NAME
3202 || is_gimple_min_invariant (op)))
3204 error ("conversion of an SSA_NAME on the left hand side");
3205 debug_generic_stmt (expr);
3208 else if (TREE_CODE (op) == SSA_NAME
3209 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3211 error ("conversion of register to a different size");
3212 debug_generic_stmt (expr);
3215 else if (!handled_component_p (op))
3222 if (TREE_CODE (expr) == MEM_REF)
3224 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
3226 error ("invalid address operand in MEM_REF");
3227 debug_generic_stmt (expr);
3230 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
3231 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3233 error ("invalid offset operand in MEM_REF");
3234 debug_generic_stmt (expr);
3238 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3240 if (!TMR_BASE (expr)
3241 || !is_gimple_mem_ref_addr (TMR_BASE (expr)))
3243 error ("invalid address operand in TARGET_MEM_REF");
3246 if (!TMR_OFFSET (expr)
3247 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST
3248 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3250 error ("invalid offset operand in TARGET_MEM_REF");
3251 debug_generic_stmt (expr);
3256 return ((require_lvalue || !is_gimple_min_invariant (expr))
3257 && verify_types_in_gimple_min_lval (expr));
3260 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3261 list of pointer-to types that is trivially convertible to DEST. */
3264 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3268 if (!TYPE_POINTER_TO (src_obj))
3271 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3272 if (useless_type_conversion_p (dest, src))
3278 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3279 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3282 valid_fixed_convert_types_p (tree type1, tree type2)
3284 return (FIXED_POINT_TYPE_P (type1)
3285 && (INTEGRAL_TYPE_P (type2)
3286 || SCALAR_FLOAT_TYPE_P (type2)
3287 || FIXED_POINT_TYPE_P (type2)));
3290 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3291 is a problem, otherwise false. */
3294 verify_gimple_call (gcall *stmt)
3296 tree fn = gimple_call_fn (stmt);
3297 tree fntype, fndecl;
3300 if (gimple_call_internal_p (stmt))
3304 error ("gimple call has two targets");
3305 debug_generic_stmt (fn);
3313 error ("gimple call has no target");
3318 if (fn && !is_gimple_call_addr (fn))
3320 error ("invalid function in gimple call");
3321 debug_generic_stmt (fn);
3326 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3327 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3328 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3330 error ("non-function in gimple call");
3334 fndecl = gimple_call_fndecl (stmt);
3336 && TREE_CODE (fndecl) == FUNCTION_DECL
3337 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3338 && !DECL_PURE_P (fndecl)
3339 && !TREE_READONLY (fndecl))
3341 error ("invalid pure const state for function");
3345 tree lhs = gimple_call_lhs (stmt);
3347 && (!is_gimple_lvalue (lhs)
3348 || verify_types_in_gimple_reference (lhs, true)))
3350 error ("invalid LHS in gimple call");
3355 && gimple_call_ctrl_altering_p (stmt)
3356 && gimple_call_noreturn_p (stmt)
3357 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs))) == INTEGER_CST)
3359 error ("LHS in noreturn call");
3363 fntype = gimple_call_fntype (stmt);
3366 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3367 /* ??? At least C++ misses conversions at assignments from
3368 void * call results.
3369 ??? Java is completely off. Especially with functions
3370 returning java.lang.Object.
3371 For now simply allow arbitrary pointer type conversions. */
3372 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3373 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3375 error ("invalid conversion in gimple call");
3376 debug_generic_stmt (TREE_TYPE (lhs));
3377 debug_generic_stmt (TREE_TYPE (fntype));
3381 if (gimple_call_chain (stmt)
3382 && !is_gimple_val (gimple_call_chain (stmt)))
3384 error ("invalid static chain in gimple call");
3385 debug_generic_stmt (gimple_call_chain (stmt));
3389 /* If there is a static chain argument, the call should either be
3390 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3391 if (gimple_call_chain (stmt)
3393 && !DECL_STATIC_CHAIN (fndecl))
3395 error ("static chain with function that doesn%'t use one");
3399 /* ??? The C frontend passes unpromoted arguments in case it
3400 didn't see a function declaration before the call. So for now
3401 leave the call arguments mostly unverified. Once we gimplify
3402 unit-at-a-time we have a chance to fix this. */
3404 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3406 tree arg = gimple_call_arg (stmt, i);
3407 if ((is_gimple_reg_type (TREE_TYPE (arg))
3408 && !is_gimple_val (arg))
3409 || (!is_gimple_reg_type (TREE_TYPE (arg))
3410 && !is_gimple_lvalue (arg)))
3412 error ("invalid argument to gimple call");
3413 debug_generic_expr (arg);
3421 /* Verifies the gimple comparison with the result type TYPE and
3422 the operands OP0 and OP1. */
3425 verify_gimple_comparison (tree type, tree op0, tree op1)
3427 tree op0_type = TREE_TYPE (op0);
3428 tree op1_type = TREE_TYPE (op1);
3430 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3432 error ("invalid operands in gimple comparison");
3436 /* For comparisons we do not have the operations type as the
3437 effective type the comparison is carried out in. Instead
3438 we require that either the first operand is trivially
3439 convertible into the second, or the other way around.
3440 Because we special-case pointers to void we allow
3441 comparisons of pointers with the same mode as well. */
3442 if (!useless_type_conversion_p (op0_type, op1_type)
3443 && !useless_type_conversion_p (op1_type, op0_type)
3444 && (!POINTER_TYPE_P (op0_type)
3445 || !POINTER_TYPE_P (op1_type)
3446 || TYPE_MODE (op0_type) != TYPE_MODE (op1_type)))
3448 error ("mismatching comparison operand types");
3449 debug_generic_expr (op0_type);
3450 debug_generic_expr (op1_type);
3454 /* The resulting type of a comparison may be an effective boolean type. */
3455 if (INTEGRAL_TYPE_P (type)
3456 && (TREE_CODE (type) == BOOLEAN_TYPE
3457 || TYPE_PRECISION (type) == 1))
3459 if (TREE_CODE (op0_type) == VECTOR_TYPE
3460 || TREE_CODE (op1_type) == VECTOR_TYPE)
3462 error ("vector comparison returning a boolean");
3463 debug_generic_expr (op0_type);
3464 debug_generic_expr (op1_type);
3468 /* Or an integer vector type with the same size and element count
3469 as the comparison operand types. */
3470 else if (TREE_CODE (type) == VECTOR_TYPE
3471 && TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE)
3473 if (TREE_CODE (op0_type) != VECTOR_TYPE
3474 || TREE_CODE (op1_type) != VECTOR_TYPE)
3476 error ("non-vector operands in vector comparison");
3477 debug_generic_expr (op0_type);
3478 debug_generic_expr (op1_type);
3482 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)
3483 || (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (type)))
3484 != GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0_type))))
3485 /* The result of a vector comparison is of signed
3487 || TYPE_UNSIGNED (TREE_TYPE (type)))
3489 error ("invalid vector comparison resulting type");
3490 debug_generic_expr (type);
3496 error ("bogus comparison result type");
3497 debug_generic_expr (type);
3504 /* Verify a gimple assignment statement STMT with an unary rhs.
3505 Returns true if anything is wrong. */
3508 verify_gimple_assign_unary (gassign *stmt)
3510 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3511 tree lhs = gimple_assign_lhs (stmt);
3512 tree lhs_type = TREE_TYPE (lhs);
3513 tree rhs1 = gimple_assign_rhs1 (stmt);
3514 tree rhs1_type = TREE_TYPE (rhs1);
3516 if (!is_gimple_reg (lhs))
3518 error ("non-register as LHS of unary operation");
3522 if (!is_gimple_val (rhs1))
3524 error ("invalid operand in unary operation");
3528 /* First handle conversions. */
3533 /* Allow conversions from pointer type to integral type only if
3534 there is no sign or zero extension involved.
3535 For targets were the precision of ptrofftype doesn't match that
3536 of pointers we need to allow arbitrary conversions to ptrofftype. */
3537 if ((POINTER_TYPE_P (lhs_type)
3538 && INTEGRAL_TYPE_P (rhs1_type))
3539 || (POINTER_TYPE_P (rhs1_type)
3540 && INTEGRAL_TYPE_P (lhs_type)
3541 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3542 || ptrofftype_p (sizetype))))
3545 /* Allow conversion from integral to offset type and vice versa. */
3546 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3547 && INTEGRAL_TYPE_P (rhs1_type))
3548 || (INTEGRAL_TYPE_P (lhs_type)
3549 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3552 /* Otherwise assert we are converting between types of the
3554 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3556 error ("invalid types in nop conversion");
3557 debug_generic_expr (lhs_type);
3558 debug_generic_expr (rhs1_type);
3565 case ADDR_SPACE_CONVERT_EXPR:
3567 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3568 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3569 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3571 error ("invalid types in address space conversion");
3572 debug_generic_expr (lhs_type);
3573 debug_generic_expr (rhs1_type);
3580 case FIXED_CONVERT_EXPR:
3582 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3583 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3585 error ("invalid types in fixed-point conversion");
3586 debug_generic_expr (lhs_type);
3587 debug_generic_expr (rhs1_type);
3596 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3597 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3598 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3600 error ("invalid types in conversion to floating point");
3601 debug_generic_expr (lhs_type);
3602 debug_generic_expr (rhs1_type);
3609 case FIX_TRUNC_EXPR:
3611 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3612 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3613 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3615 error ("invalid types in conversion to integer");
3616 debug_generic_expr (lhs_type);
3617 debug_generic_expr (rhs1_type);
3623 case REDUC_MAX_EXPR:
3624 case REDUC_MIN_EXPR:
3625 case REDUC_PLUS_EXPR:
3626 if (!VECTOR_TYPE_P (rhs1_type)
3627 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3629 error ("reduction should convert from vector to element type");
3630 debug_generic_expr (lhs_type);
3631 debug_generic_expr (rhs1_type);
3636 case VEC_UNPACK_HI_EXPR:
3637 case VEC_UNPACK_LO_EXPR:
3638 case VEC_UNPACK_FLOAT_HI_EXPR:
3639 case VEC_UNPACK_FLOAT_LO_EXPR:
3654 /* For the remaining codes assert there is no conversion involved. */
3655 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3657 error ("non-trivial conversion in unary operation");
3658 debug_generic_expr (lhs_type);
3659 debug_generic_expr (rhs1_type);
3666 /* Verify a gimple assignment statement STMT with a binary rhs.
3667 Returns true if anything is wrong. */
3670 verify_gimple_assign_binary (gassign *stmt)
3672 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3673 tree lhs = gimple_assign_lhs (stmt);
3674 tree lhs_type = TREE_TYPE (lhs);
3675 tree rhs1 = gimple_assign_rhs1 (stmt);
3676 tree rhs1_type = TREE_TYPE (rhs1);
3677 tree rhs2 = gimple_assign_rhs2 (stmt);
3678 tree rhs2_type = TREE_TYPE (rhs2);
3680 if (!is_gimple_reg (lhs))
3682 error ("non-register as LHS of binary operation");
3686 if (!is_gimple_val (rhs1)
3687 || !is_gimple_val (rhs2))
3689 error ("invalid operands in binary operation");
3693 /* First handle operations that involve different types. */
3698 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3699 || !(INTEGRAL_TYPE_P (rhs1_type)
3700 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3701 || !(INTEGRAL_TYPE_P (rhs2_type)
3702 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3704 error ("type mismatch in complex expression");
3705 debug_generic_expr (lhs_type);
3706 debug_generic_expr (rhs1_type);
3707 debug_generic_expr (rhs2_type);
3719 /* Shifts and rotates are ok on integral types, fixed point
3720 types and integer vector types. */
3721 if ((!INTEGRAL_TYPE_P (rhs1_type)
3722 && !FIXED_POINT_TYPE_P (rhs1_type)
3723 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3724 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3725 || (!INTEGRAL_TYPE_P (rhs2_type)
3726 /* Vector shifts of vectors are also ok. */
3727 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3728 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3729 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3730 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3731 || !useless_type_conversion_p (lhs_type, rhs1_type))
3733 error ("type mismatch in shift expression");
3734 debug_generic_expr (lhs_type);
3735 debug_generic_expr (rhs1_type);
3736 debug_generic_expr (rhs2_type);
3743 case WIDEN_LSHIFT_EXPR:
3745 if (!INTEGRAL_TYPE_P (lhs_type)
3746 || !INTEGRAL_TYPE_P (rhs1_type)
3747 || TREE_CODE (rhs2) != INTEGER_CST
3748 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3750 error ("type mismatch in widening vector shift expression");
3751 debug_generic_expr (lhs_type);
3752 debug_generic_expr (rhs1_type);
3753 debug_generic_expr (rhs2_type);
3760 case VEC_WIDEN_LSHIFT_HI_EXPR:
3761 case VEC_WIDEN_LSHIFT_LO_EXPR:
3763 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3764 || TREE_CODE (lhs_type) != VECTOR_TYPE
3765 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3766 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3767 || TREE_CODE (rhs2) != INTEGER_CST
3768 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
3769 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
3771 error ("type mismatch in widening vector shift expression");
3772 debug_generic_expr (lhs_type);
3773 debug_generic_expr (rhs1_type);
3774 debug_generic_expr (rhs2_type);
3784 tree lhs_etype = lhs_type;
3785 tree rhs1_etype = rhs1_type;
3786 tree rhs2_etype = rhs2_type;
3787 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
3789 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3790 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
3792 error ("invalid non-vector operands to vector valued plus");
3795 lhs_etype = TREE_TYPE (lhs_type);
3796 rhs1_etype = TREE_TYPE (rhs1_type);
3797 rhs2_etype = TREE_TYPE (rhs2_type);
3799 if (POINTER_TYPE_P (lhs_etype)
3800 || POINTER_TYPE_P (rhs1_etype)
3801 || POINTER_TYPE_P (rhs2_etype))
3803 error ("invalid (pointer) operands to plus/minus");
3807 /* Continue with generic binary expression handling. */
3811 case POINTER_PLUS_EXPR:
3813 if (!POINTER_TYPE_P (rhs1_type)
3814 || !useless_type_conversion_p (lhs_type, rhs1_type)
3815 || !ptrofftype_p (rhs2_type))
3817 error ("type mismatch in pointer plus expression");
3818 debug_generic_stmt (lhs_type);
3819 debug_generic_stmt (rhs1_type);
3820 debug_generic_stmt (rhs2_type);
3827 case TRUTH_ANDIF_EXPR:
3828 case TRUTH_ORIF_EXPR:
3829 case TRUTH_AND_EXPR:
3831 case TRUTH_XOR_EXPR:
3841 case UNORDERED_EXPR:
3849 /* Comparisons are also binary, but the result type is not
3850 connected to the operand types. */
3851 return verify_gimple_comparison (lhs_type, rhs1, rhs2);
3853 case WIDEN_MULT_EXPR:
3854 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
3856 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
3857 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
3859 case WIDEN_SUM_EXPR:
3860 case VEC_WIDEN_MULT_HI_EXPR:
3861 case VEC_WIDEN_MULT_LO_EXPR:
3862 case VEC_WIDEN_MULT_EVEN_EXPR:
3863 case VEC_WIDEN_MULT_ODD_EXPR:
3864 case VEC_PACK_TRUNC_EXPR:
3865 case VEC_PACK_SAT_EXPR:
3866 case VEC_PACK_FIX_TRUNC_EXPR:
3871 case MULT_HIGHPART_EXPR:
3872 case TRUNC_DIV_EXPR:
3874 case FLOOR_DIV_EXPR:
3875 case ROUND_DIV_EXPR:
3876 case TRUNC_MOD_EXPR:
3878 case FLOOR_MOD_EXPR:
3879 case ROUND_MOD_EXPR:
3881 case EXACT_DIV_EXPR:
3887 /* Continue with generic binary expression handling. */
3894 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3895 || !useless_type_conversion_p (lhs_type, rhs2_type))
3897 error ("type mismatch in binary expression");
3898 debug_generic_stmt (lhs_type);
3899 debug_generic_stmt (rhs1_type);
3900 debug_generic_stmt (rhs2_type);
3907 /* Verify a gimple assignment statement STMT with a ternary rhs.
3908 Returns true if anything is wrong. */
3911 verify_gimple_assign_ternary (gassign *stmt)
3913 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3914 tree lhs = gimple_assign_lhs (stmt);
3915 tree lhs_type = TREE_TYPE (lhs);
3916 tree rhs1 = gimple_assign_rhs1 (stmt);
3917 tree rhs1_type = TREE_TYPE (rhs1);
3918 tree rhs2 = gimple_assign_rhs2 (stmt);
3919 tree rhs2_type = TREE_TYPE (rhs2);
3920 tree rhs3 = gimple_assign_rhs3 (stmt);
3921 tree rhs3_type = TREE_TYPE (rhs3);
3923 if (!is_gimple_reg (lhs))
3925 error ("non-register as LHS of ternary operation");
3929 if (((rhs_code == VEC_COND_EXPR || rhs_code == COND_EXPR)
3930 ? !is_gimple_condexpr (rhs1) : !is_gimple_val (rhs1))
3931 || !is_gimple_val (rhs2)
3932 || !is_gimple_val (rhs3))
3934 error ("invalid operands in ternary operation");
3938 /* First handle operations that involve different types. */
3941 case WIDEN_MULT_PLUS_EXPR:
3942 case WIDEN_MULT_MINUS_EXPR:
3943 if ((!INTEGRAL_TYPE_P (rhs1_type)
3944 && !FIXED_POINT_TYPE_P (rhs1_type))
3945 || !useless_type_conversion_p (rhs1_type, rhs2_type)
3946 || !useless_type_conversion_p (lhs_type, rhs3_type)
3947 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
3948 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
3950 error ("type mismatch in widening multiply-accumulate expression");
3951 debug_generic_expr (lhs_type);
3952 debug_generic_expr (rhs1_type);
3953 debug_generic_expr (rhs2_type);
3954 debug_generic_expr (rhs3_type);
3960 if (!useless_type_conversion_p (lhs_type, rhs1_type)
3961 || !useless_type_conversion_p (lhs_type, rhs2_type)
3962 || !useless_type_conversion_p (lhs_type, rhs3_type))
3964 error ("type mismatch in fused multiply-add expression");
3965 debug_generic_expr (lhs_type);
3966 debug_generic_expr (rhs1_type);
3967 debug_generic_expr (rhs2_type);
3968 debug_generic_expr (rhs3_type);
3974 if (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3975 || TYPE_SIGN (rhs1_type) != SIGNED
3976 || TYPE_SIZE (rhs1_type) != TYPE_SIZE (lhs_type)
3977 || TYPE_VECTOR_SUBPARTS (rhs1_type)
3978 != TYPE_VECTOR_SUBPARTS (lhs_type))
3980 error ("the first argument of a VEC_COND_EXPR must be of a signed "
3981 "integral vector type of the same size and number of "
3982 "elements as the result");
3983 debug_generic_expr (lhs_type);
3984 debug_generic_expr (rhs1_type);
3989 if (!useless_type_conversion_p (lhs_type, rhs2_type)
3990 || !useless_type_conversion_p (lhs_type, rhs3_type))
3992 error ("type mismatch in conditional expression");
3993 debug_generic_expr (lhs_type);
3994 debug_generic_expr (rhs2_type);
3995 debug_generic_expr (rhs3_type);
4001 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4002 || !useless_type_conversion_p (lhs_type, rhs2_type))
4004 error ("type mismatch in vector permute expression");
4005 debug_generic_expr (lhs_type);
4006 debug_generic_expr (rhs1_type);
4007 debug_generic_expr (rhs2_type);
4008 debug_generic_expr (rhs3_type);
4012 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4013 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4014 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4016 error ("vector types expected in vector permute expression");
4017 debug_generic_expr (lhs_type);
4018 debug_generic_expr (rhs1_type);
4019 debug_generic_expr (rhs2_type);
4020 debug_generic_expr (rhs3_type);
4024 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type)
4025 || TYPE_VECTOR_SUBPARTS (rhs2_type)
4026 != TYPE_VECTOR_SUBPARTS (rhs3_type)
4027 || TYPE_VECTOR_SUBPARTS (rhs3_type)
4028 != TYPE_VECTOR_SUBPARTS (lhs_type))
4030 error ("vectors with different element number found "
4031 "in vector permute expression");
4032 debug_generic_expr (lhs_type);
4033 debug_generic_expr (rhs1_type);
4034 debug_generic_expr (rhs2_type);
4035 debug_generic_expr (rhs3_type);
4039 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4040 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs3_type)))
4041 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type))))
4043 error ("invalid mask type in vector permute expression");
4044 debug_generic_expr (lhs_type);
4045 debug_generic_expr (rhs1_type);
4046 debug_generic_expr (rhs2_type);
4047 debug_generic_expr (rhs3_type);
4054 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4055 || !useless_type_conversion_p (lhs_type, rhs3_type)
4056 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4057 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4059 error ("type mismatch in sad expression");
4060 debug_generic_expr (lhs_type);
4061 debug_generic_expr (rhs1_type);
4062 debug_generic_expr (rhs2_type);
4063 debug_generic_expr (rhs3_type);
4067 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4068 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4069 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4071 error ("vector types expected in sad expression");
4072 debug_generic_expr (lhs_type);
4073 debug_generic_expr (rhs1_type);
4074 debug_generic_expr (rhs2_type);
4075 debug_generic_expr (rhs3_type);
4082 case REALIGN_LOAD_EXPR:
4092 /* Verify a gimple assignment statement STMT with a single rhs.
4093 Returns true if anything is wrong. */
4096 verify_gimple_assign_single (gassign *stmt)
4098 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4099 tree lhs = gimple_assign_lhs (stmt);
4100 tree lhs_type = TREE_TYPE (lhs);
4101 tree rhs1 = gimple_assign_rhs1 (stmt);
4102 tree rhs1_type = TREE_TYPE (rhs1);
4105 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4107 error ("non-trivial conversion at assignment");
4108 debug_generic_expr (lhs_type);
4109 debug_generic_expr (rhs1_type);
4113 if (gimple_clobber_p (stmt)
4114 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4116 error ("non-decl/MEM_REF LHS in clobber statement");
4117 debug_generic_expr (lhs);
4121 if (handled_component_p (lhs)
4122 || TREE_CODE (lhs) == MEM_REF
4123 || TREE_CODE (lhs) == TARGET_MEM_REF)
4124 res |= verify_types_in_gimple_reference (lhs, true);
4126 /* Special codes we cannot handle via their class. */
4131 tree op = TREE_OPERAND (rhs1, 0);
4132 if (!is_gimple_addressable (op))
4134 error ("invalid operand in unary expression");
4138 /* Technically there is no longer a need for matching types, but
4139 gimple hygiene asks for this check. In LTO we can end up
4140 combining incompatible units and thus end up with addresses
4141 of globals that change their type to a common one. */
4143 && !types_compatible_p (TREE_TYPE (op),
4144 TREE_TYPE (TREE_TYPE (rhs1)))
4145 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4148 error ("type mismatch in address expression");
4149 debug_generic_stmt (TREE_TYPE (rhs1));
4150 debug_generic_stmt (TREE_TYPE (op));
4154 return verify_types_in_gimple_reference (op, true);
4159 error ("INDIRECT_REF in gimple IL");
4165 case ARRAY_RANGE_REF:
4166 case VIEW_CONVERT_EXPR:
4169 case TARGET_MEM_REF:
4171 if (!is_gimple_reg (lhs)
4172 && is_gimple_reg_type (TREE_TYPE (lhs)))
4174 error ("invalid rhs for gimple memory store");
4175 debug_generic_stmt (lhs);
4176 debug_generic_stmt (rhs1);
4179 return res || verify_types_in_gimple_reference (rhs1, false);
4191 /* tcc_declaration */
4196 if (!is_gimple_reg (lhs)
4197 && !is_gimple_reg (rhs1)
4198 && is_gimple_reg_type (TREE_TYPE (lhs)))
4200 error ("invalid rhs for gimple memory store");
4201 debug_generic_stmt (lhs);
4202 debug_generic_stmt (rhs1);
4208 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4211 tree elt_i, elt_v, elt_t = NULL_TREE;
4213 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4215 /* For vector CONSTRUCTORs we require that either it is empty
4216 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4217 (then the element count must be correct to cover the whole
4218 outer vector and index must be NULL on all elements, or it is
4219 a CONSTRUCTOR of scalar elements, where we as an exception allow
4220 smaller number of elements (assuming zero filling) and
4221 consecutive indexes as compared to NULL indexes (such
4222 CONSTRUCTORs can appear in the IL from FEs). */
4223 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4225 if (elt_t == NULL_TREE)
4227 elt_t = TREE_TYPE (elt_v);
4228 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4230 tree elt_t = TREE_TYPE (elt_v);
4231 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4234 error ("incorrect type of vector CONSTRUCTOR"
4236 debug_generic_stmt (rhs1);
4239 else if (CONSTRUCTOR_NELTS (rhs1)
4240 * TYPE_VECTOR_SUBPARTS (elt_t)
4241 != TYPE_VECTOR_SUBPARTS (rhs1_type))
4243 error ("incorrect number of vector CONSTRUCTOR"
4245 debug_generic_stmt (rhs1);
4249 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4252 error ("incorrect type of vector CONSTRUCTOR elements");
4253 debug_generic_stmt (rhs1);
4256 else if (CONSTRUCTOR_NELTS (rhs1)
4257 > TYPE_VECTOR_SUBPARTS (rhs1_type))
4259 error ("incorrect number of vector CONSTRUCTOR elements");
4260 debug_generic_stmt (rhs1);
4264 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4266 error ("incorrect type of vector CONSTRUCTOR elements");
4267 debug_generic_stmt (rhs1);
4270 if (elt_i != NULL_TREE
4271 && (TREE_CODE (elt_t) == VECTOR_TYPE
4272 || TREE_CODE (elt_i) != INTEGER_CST
4273 || compare_tree_int (elt_i, i) != 0))
4275 error ("vector CONSTRUCTOR with non-NULL element index");
4276 debug_generic_stmt (rhs1);
4279 if (!is_gimple_val (elt_v))
4281 error ("vector CONSTRUCTOR element is not a GIMPLE value");
4282 debug_generic_stmt (rhs1);
4287 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4289 error ("non-vector CONSTRUCTOR with elements");
4290 debug_generic_stmt (rhs1);
4296 case WITH_SIZE_EXPR:
4306 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4307 is a problem, otherwise false. */
4310 verify_gimple_assign (gassign *stmt)
4312 switch (gimple_assign_rhs_class (stmt))
4314 case GIMPLE_SINGLE_RHS:
4315 return verify_gimple_assign_single (stmt);
4317 case GIMPLE_UNARY_RHS:
4318 return verify_gimple_assign_unary (stmt);
4320 case GIMPLE_BINARY_RHS:
4321 return verify_gimple_assign_binary (stmt);
4323 case GIMPLE_TERNARY_RHS:
4324 return verify_gimple_assign_ternary (stmt);
4331 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4332 is a problem, otherwise false. */
4335 verify_gimple_return (greturn *stmt)
4337 tree op = gimple_return_retval (stmt);
4338 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4340 /* We cannot test for present return values as we do not fix up missing
4341 return values from the original source. */
4345 if (!is_gimple_val (op)
4346 && TREE_CODE (op) != RESULT_DECL)
4348 error ("invalid operand in return statement");
4349 debug_generic_stmt (op);
4353 if ((TREE_CODE (op) == RESULT_DECL
4354 && DECL_BY_REFERENCE (op))
4355 || (TREE_CODE (op) == SSA_NAME
4356 && SSA_NAME_VAR (op)
4357 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4358 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4359 op = TREE_TYPE (op);
4361 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4363 error ("invalid conversion in return statement");
4364 debug_generic_stmt (restype);
4365 debug_generic_stmt (TREE_TYPE (op));
4373 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4374 is a problem, otherwise false. */
4377 verify_gimple_goto (ggoto *stmt)
4379 tree dest = gimple_goto_dest (stmt);
4381 /* ??? We have two canonical forms of direct goto destinations, a
4382 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4383 if (TREE_CODE (dest) != LABEL_DECL
4384 && (!is_gimple_val (dest)
4385 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4387 error ("goto destination is neither a label nor a pointer");
4394 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4395 is a problem, otherwise false. */
4398 verify_gimple_switch (gswitch *stmt)
4401 tree elt, prev_upper_bound = NULL_TREE;
4402 tree index_type, elt_type = NULL_TREE;
4404 if (!is_gimple_val (gimple_switch_index (stmt)))
4406 error ("invalid operand to switch statement");
4407 debug_generic_stmt (gimple_switch_index (stmt));
4411 index_type = TREE_TYPE (gimple_switch_index (stmt));
4412 if (! INTEGRAL_TYPE_P (index_type))
4414 error ("non-integral type switch statement");
4415 debug_generic_expr (index_type);
4419 elt = gimple_switch_label (stmt, 0);
4420 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE)
4422 error ("invalid default case label in switch statement");
4423 debug_generic_expr (elt);
4427 n = gimple_switch_num_labels (stmt);
4428 for (i = 1; i < n; i++)
4430 elt = gimple_switch_label (stmt, i);
4432 if (! CASE_LOW (elt))
4434 error ("invalid case label in switch statement");
4435 debug_generic_expr (elt);
4439 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4441 error ("invalid case range in switch statement");
4442 debug_generic_expr (elt);
4448 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4449 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4451 error ("type mismatch for case label in switch statement");
4452 debug_generic_expr (elt);
4458 elt_type = TREE_TYPE (CASE_LOW (elt));
4459 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4461 error ("type precision mismatch in switch statement");
4466 if (prev_upper_bound)
4468 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4470 error ("case labels not sorted in switch statement");
4475 prev_upper_bound = CASE_HIGH (elt);
4476 if (! prev_upper_bound)
4477 prev_upper_bound = CASE_LOW (elt);
4483 /* Verify a gimple debug statement STMT.
4484 Returns true if anything is wrong. */
4487 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
4489 /* There isn't much that could be wrong in a gimple debug stmt. A
4490 gimple debug bind stmt, for example, maps a tree, that's usually
4491 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
4492 component or member of an aggregate type, to another tree, that
4493 can be an arbitrary expression. These stmts expand into debug
4494 insns, and are converted to debug notes by var-tracking.c. */
4498 /* Verify a gimple label statement STMT.
4499 Returns true if anything is wrong. */
4502 verify_gimple_label (glabel *stmt)
4504 tree decl = gimple_label_label (stmt);
4508 if (TREE_CODE (decl) != LABEL_DECL)
4510 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
4511 && DECL_CONTEXT (decl) != current_function_decl)
4513 error ("label's context is not the current function decl");
4517 uid = LABEL_DECL_UID (decl);
4520 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
4522 error ("incorrect entry in label_to_block_map");
4526 uid = EH_LANDING_PAD_NR (decl);
4529 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
4530 if (decl != lp->post_landing_pad)
4532 error ("incorrect setting of landing pad number");
4540 /* Verify a gimple cond statement STMT.
4541 Returns true if anything is wrong. */
4544 verify_gimple_cond (gcond *stmt)
4546 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
4548 error ("invalid comparison code in gimple cond");
4551 if (!(!gimple_cond_true_label (stmt)
4552 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
4553 || !(!gimple_cond_false_label (stmt)
4554 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
4556 error ("invalid labels in gimple cond");
4560 return verify_gimple_comparison (boolean_type_node,
4561 gimple_cond_lhs (stmt),
4562 gimple_cond_rhs (stmt));
4565 /* Verify the GIMPLE statement STMT. Returns true if there is an
4566 error, otherwise false. */
4569 verify_gimple_stmt (gimple *stmt)
4571 switch (gimple_code (stmt))
4574 return verify_gimple_assign (as_a <gassign *> (stmt));
4577 return verify_gimple_label (as_a <glabel *> (stmt));
4580 return verify_gimple_call (as_a <gcall *> (stmt));
4583 return verify_gimple_cond (as_a <gcond *> (stmt));
4586 return verify_gimple_goto (as_a <ggoto *> (stmt));
4589 return verify_gimple_switch (as_a <gswitch *> (stmt));
4592 return verify_gimple_return (as_a <greturn *> (stmt));
4597 case GIMPLE_TRANSACTION:
4598 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
4600 /* Tuples that do not have tree operands. */
4602 case GIMPLE_PREDICT:
4604 case GIMPLE_EH_DISPATCH:
4605 case GIMPLE_EH_MUST_NOT_THROW:
4609 /* OpenMP directives are validated by the FE and never operated
4610 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
4611 non-gimple expressions when the main index variable has had
4612 its address taken. This does not affect the loop itself
4613 because the header of an GIMPLE_OMP_FOR is merely used to determine
4614 how to setup the parallel iteration. */
4618 return verify_gimple_debug (stmt);
4625 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4626 and false otherwise. */
4629 verify_gimple_phi (gimple *phi)
4633 tree phi_result = gimple_phi_result (phi);
4638 error ("invalid PHI result");
4642 virtual_p = virtual_operand_p (phi_result);
4643 if (TREE_CODE (phi_result) != SSA_NAME
4645 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
4647 error ("invalid PHI result");
4651 for (i = 0; i < gimple_phi_num_args (phi); i++)
4653 tree t = gimple_phi_arg_def (phi, i);
4657 error ("missing PHI def");
4661 /* Addressable variables do have SSA_NAMEs but they
4662 are not considered gimple values. */
4663 else if ((TREE_CODE (t) == SSA_NAME
4664 && virtual_p != virtual_operand_p (t))
4666 && (TREE_CODE (t) != SSA_NAME
4667 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
4669 && !is_gimple_val (t)))
4671 error ("invalid PHI argument");
4672 debug_generic_expr (t);
4675 #ifdef ENABLE_TYPES_CHECKING
4676 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
4678 error ("incompatible types in PHI argument %u", i);
4679 debug_generic_stmt (TREE_TYPE (phi_result));
4680 debug_generic_stmt (TREE_TYPE (t));
4689 /* Verify the GIMPLE statements inside the sequence STMTS. */
4692 verify_gimple_in_seq_2 (gimple_seq stmts)
4694 gimple_stmt_iterator ittr;
4697 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
4699 gimple *stmt = gsi_stmt (ittr);
4701 switch (gimple_code (stmt))
4704 err |= verify_gimple_in_seq_2 (
4705 gimple_bind_body (as_a <gbind *> (stmt)));
4709 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
4710 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
4713 case GIMPLE_EH_FILTER:
4714 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
4717 case GIMPLE_EH_ELSE:
4719 geh_else *eh_else = as_a <geh_else *> (stmt);
4720 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
4721 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
4726 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
4727 as_a <gcatch *> (stmt)));
4730 case GIMPLE_TRANSACTION:
4731 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
4736 bool err2 = verify_gimple_stmt (stmt);
4738 debug_gimple_stmt (stmt);
4747 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
4748 is a problem, otherwise false. */
4751 verify_gimple_transaction (gtransaction *stmt)
4753 tree lab = gimple_transaction_label (stmt);
4754 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
4756 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
4760 /* Verify the GIMPLE statements inside the statement list STMTS. */
4763 verify_gimple_in_seq (gimple_seq stmts)
4765 timevar_push (TV_TREE_STMT_VERIFY);
4766 if (verify_gimple_in_seq_2 (stmts))
4767 internal_error ("verify_gimple failed");
4768 timevar_pop (TV_TREE_STMT_VERIFY);
4771 /* Return true when the T can be shared. */
4774 tree_node_can_be_shared (tree t)
4776 if (IS_TYPE_OR_DECL_P (t)
4777 || is_gimple_min_invariant (t)
4778 || TREE_CODE (t) == SSA_NAME
4779 || t == error_mark_node
4780 || TREE_CODE (t) == IDENTIFIER_NODE)
4783 if (TREE_CODE (t) == CASE_LABEL_EXPR)
4792 /* Called via walk_tree. Verify tree sharing. */
4795 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
4797 hash_set<void *> *visited = (hash_set<void *> *) data;
4799 if (tree_node_can_be_shared (*tp))
4801 *walk_subtrees = false;
4805 if (visited->add (*tp))
4811 /* Called via walk_gimple_stmt. Verify tree sharing. */
4814 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
4816 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4817 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
4820 static bool eh_error_found;
4822 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
4823 hash_set<gimple *> *visited)
4825 if (!visited->contains (stmt))
4827 error ("dead STMT in EH table");
4828 debug_gimple_stmt (stmt);
4829 eh_error_found = true;
4834 /* Verify if the location LOCs block is in BLOCKS. */
4837 verify_location (hash_set<tree> *blocks, location_t loc)
4839 tree block = LOCATION_BLOCK (loc);
4840 if (block != NULL_TREE
4841 && !blocks->contains (block))
4843 error ("location references block not in block tree");
4846 if (block != NULL_TREE)
4847 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
4851 /* Called via walk_tree. Verify that expressions have no blocks. */
4854 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
4858 *walk_subtrees = false;
4862 location_t loc = EXPR_LOCATION (*tp);
4863 if (LOCATION_BLOCK (loc) != NULL)
4869 /* Called via walk_tree. Verify locations of expressions. */
4872 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
4874 hash_set<tree> *blocks = (hash_set<tree> *) data;
4876 if (TREE_CODE (*tp) == VAR_DECL
4877 && DECL_HAS_DEBUG_EXPR_P (*tp))
4879 tree t = DECL_DEBUG_EXPR (*tp);
4880 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4884 if ((TREE_CODE (*tp) == VAR_DECL
4885 || TREE_CODE (*tp) == PARM_DECL
4886 || TREE_CODE (*tp) == RESULT_DECL)
4887 && DECL_HAS_VALUE_EXPR_P (*tp))
4889 tree t = DECL_VALUE_EXPR (*tp);
4890 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL);
4897 *walk_subtrees = false;
4901 location_t loc = EXPR_LOCATION (*tp);
4902 if (verify_location (blocks, loc))
4908 /* Called via walk_gimple_op. Verify locations of expressions. */
4911 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
4913 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
4914 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
4917 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
4920 collect_subblocks (hash_set<tree> *blocks, tree block)
4923 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
4926 collect_subblocks (blocks, t);
4930 /* Verify the GIMPLE statements in the CFG of FN. */
4933 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow)
4938 timevar_push (TV_TREE_STMT_VERIFY);
4939 hash_set<void *> visited;
4940 hash_set<gimple *> visited_stmts;
4942 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
4943 hash_set<tree> blocks;
4944 if (DECL_INITIAL (fn->decl))
4946 blocks.add (DECL_INITIAL (fn->decl));
4947 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
4950 FOR_EACH_BB_FN (bb, fn)
4952 gimple_stmt_iterator gsi;
4954 for (gphi_iterator gpi = gsi_start_phis (bb);
4958 gphi *phi = gpi.phi ();
4962 visited_stmts.add (phi);
4964 if (gimple_bb (phi) != bb)
4966 error ("gimple_bb (phi) is set to a wrong basic block");
4970 err2 |= verify_gimple_phi (phi);
4972 /* Only PHI arguments have locations. */
4973 if (gimple_location (phi) != UNKNOWN_LOCATION)
4975 error ("PHI node with location");
4979 for (i = 0; i < gimple_phi_num_args (phi); i++)
4981 tree arg = gimple_phi_arg_def (phi, i);
4982 tree addr = walk_tree (&arg, verify_node_sharing_1,
4986 error ("incorrect sharing of tree nodes");
4987 debug_generic_expr (addr);
4990 location_t loc = gimple_phi_arg_location (phi, i);
4991 if (virtual_operand_p (gimple_phi_result (phi))
4992 && loc != UNKNOWN_LOCATION)
4994 error ("virtual PHI with argument locations");
4997 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5000 debug_generic_expr (addr);
5003 err2 |= verify_location (&blocks, loc);
5007 debug_gimple_stmt (phi);
5011 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5013 gimple *stmt = gsi_stmt (gsi);
5015 struct walk_stmt_info wi;
5019 visited_stmts.add (stmt);
5021 if (gimple_bb (stmt) != bb)
5023 error ("gimple_bb (stmt) is set to a wrong basic block");
5027 err2 |= verify_gimple_stmt (stmt);
5028 err2 |= verify_location (&blocks, gimple_location (stmt));
5030 memset (&wi, 0, sizeof (wi));
5031 wi.info = (void *) &visited;
5032 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5035 error ("incorrect sharing of tree nodes");
5036 debug_generic_expr (addr);
5040 memset (&wi, 0, sizeof (wi));
5041 wi.info = (void *) &blocks;
5042 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5045 debug_generic_expr (addr);
5049 /* ??? Instead of not checking these stmts at all the walker
5050 should know its context via wi. */
5051 if (!is_gimple_debug (stmt)
5052 && !is_gimple_omp (stmt))
5054 memset (&wi, 0, sizeof (wi));
5055 addr = walk_gimple_op (stmt, verify_expr, &wi);
5058 debug_generic_expr (addr);
5059 inform (gimple_location (stmt), "in statement");
5064 /* If the statement is marked as part of an EH region, then it is
5065 expected that the statement could throw. Verify that when we
5066 have optimizations that simplify statements such that we prove
5067 that they cannot throw, that we update other data structures
5069 lp_nr = lookup_stmt_eh_lp (stmt);
5072 if (!stmt_could_throw_p (stmt))
5076 error ("statement marked for throw, but doesn%'t");
5080 else if (!gsi_one_before_end_p (gsi))
5082 error ("statement marked for throw in middle of block");
5088 debug_gimple_stmt (stmt);
5093 eh_error_found = false;
5094 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5096 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5099 if (err || eh_error_found)
5100 internal_error ("verify_gimple failed");
5102 verify_histograms ();
5103 timevar_pop (TV_TREE_STMT_VERIFY);
5107 /* Verifies that the flow information is OK. */
5110 gimple_verify_flow_info (void)
5114 gimple_stmt_iterator gsi;
5119 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5120 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5122 error ("ENTRY_BLOCK has IL associated with it");
5126 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5127 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5129 error ("EXIT_BLOCK has IL associated with it");
5133 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5134 if (e->flags & EDGE_FALLTHRU)
5136 error ("fallthru to exit from bb %d", e->src->index);
5140 FOR_EACH_BB_FN (bb, cfun)
5142 bool found_ctrl_stmt = false;
5146 /* Skip labels on the start of basic block. */
5147 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5150 gimple *prev_stmt = stmt;
5152 stmt = gsi_stmt (gsi);
5154 if (gimple_code (stmt) != GIMPLE_LABEL)
5157 label = gimple_label_label (as_a <glabel *> (stmt));
5158 if (prev_stmt && DECL_NONLOCAL (label))
5160 error ("nonlocal label ");
5161 print_generic_expr (stderr, label, 0);
5162 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5167 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5169 error ("EH landing pad label ");
5170 print_generic_expr (stderr, label, 0);
5171 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5176 if (label_to_block (label) != bb)
5179 print_generic_expr (stderr, label, 0);
5180 fprintf (stderr, " to block does not match in bb %d",
5185 if (decl_function_context (label) != current_function_decl)
5188 print_generic_expr (stderr, label, 0);
5189 fprintf (stderr, " has incorrect context in bb %d",
5195 /* Verify that body of basic block BB is free of control flow. */
5196 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5198 gimple *stmt = gsi_stmt (gsi);
5200 if (found_ctrl_stmt)
5202 error ("control flow in the middle of basic block %d",
5207 if (stmt_ends_bb_p (stmt))
5208 found_ctrl_stmt = true;
5210 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5213 print_generic_expr (stderr, gimple_label_label (label_stmt), 0);
5214 fprintf (stderr, " in the middle of basic block %d", bb->index);
5219 gsi = gsi_last_bb (bb);
5220 if (gsi_end_p (gsi))
5223 stmt = gsi_stmt (gsi);
5225 if (gimple_code (stmt) == GIMPLE_LABEL)
5228 err |= verify_eh_edges (stmt);
5230 if (is_ctrl_stmt (stmt))
5232 FOR_EACH_EDGE (e, ei, bb->succs)
5233 if (e->flags & EDGE_FALLTHRU)
5235 error ("fallthru edge after a control statement in bb %d",
5241 if (gimple_code (stmt) != GIMPLE_COND)
5243 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5244 after anything else but if statement. */
5245 FOR_EACH_EDGE (e, ei, bb->succs)
5246 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5248 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5254 switch (gimple_code (stmt))
5261 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5265 || !(true_edge->flags & EDGE_TRUE_VALUE)
5266 || !(false_edge->flags & EDGE_FALSE_VALUE)
5267 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5268 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5269 || EDGE_COUNT (bb->succs) >= 3)
5271 error ("wrong outgoing edge flags at end of bb %d",
5279 if (simple_goto_p (stmt))
5281 error ("explicit goto at end of bb %d", bb->index);
5286 /* FIXME. We should double check that the labels in the
5287 destination blocks have their address taken. */
5288 FOR_EACH_EDGE (e, ei, bb->succs)
5289 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5290 | EDGE_FALSE_VALUE))
5291 || !(e->flags & EDGE_ABNORMAL))
5293 error ("wrong outgoing edge flags at end of bb %d",
5301 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5303 /* ... fallthru ... */
5305 if (!single_succ_p (bb)
5306 || (single_succ_edge (bb)->flags
5307 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5308 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5310 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5313 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5315 error ("return edge does not point to exit in bb %d",
5323 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5328 n = gimple_switch_num_labels (switch_stmt);
5330 /* Mark all the destination basic blocks. */
5331 for (i = 0; i < n; ++i)
5333 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5334 basic_block label_bb = label_to_block (lab);
5335 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5336 label_bb->aux = (void *)1;
5339 /* Verify that the case labels are sorted. */
5340 prev = gimple_switch_label (switch_stmt, 0);
5341 for (i = 1; i < n; ++i)
5343 tree c = gimple_switch_label (switch_stmt, i);
5346 error ("found default case not at the start of "
5352 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5354 error ("case labels not sorted: ");
5355 print_generic_expr (stderr, prev, 0);
5356 fprintf (stderr," is greater than ");
5357 print_generic_expr (stderr, c, 0);
5358 fprintf (stderr," but comes before it.\n");
5363 /* VRP will remove the default case if it can prove it will
5364 never be executed. So do not verify there always exists
5365 a default case here. */
5367 FOR_EACH_EDGE (e, ei, bb->succs)
5371 error ("extra outgoing edge %d->%d",
5372 bb->index, e->dest->index);
5376 e->dest->aux = (void *)2;
5377 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5378 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5380 error ("wrong outgoing edge flags at end of bb %d",
5386 /* Check that we have all of them. */
5387 for (i = 0; i < n; ++i)
5389 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
5390 basic_block label_bb = label_to_block (lab);
5392 if (label_bb->aux != (void *)2)
5394 error ("missing edge %i->%i", bb->index, label_bb->index);
5399 FOR_EACH_EDGE (e, ei, bb->succs)
5400 e->dest->aux = (void *)0;
5404 case GIMPLE_EH_DISPATCH:
5405 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5413 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5414 verify_dominators (CDI_DOMINATORS);
5420 /* Updates phi nodes after creating a forwarder block joined
5421 by edge FALLTHRU. */
5424 gimple_make_forwarder_block (edge fallthru)
5428 basic_block dummy, bb;
5432 dummy = fallthru->src;
5433 bb = fallthru->dest;
5435 if (single_pred_p (bb))
5438 /* If we redirected a branch we must create new PHI nodes at the
5440 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
5442 gphi *phi, *new_phi;
5445 var = gimple_phi_result (phi);
5446 new_phi = create_phi_node (var, bb);
5447 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
5448 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
5452 /* Add the arguments we have stored on edges. */
5453 FOR_EACH_EDGE (e, ei, bb->preds)
5458 flush_pending_stmts (e);
5463 /* Return a non-special label in the head of basic block BLOCK.
5464 Create one if it doesn't exist. */
5467 gimple_block_label (basic_block bb)
5469 gimple_stmt_iterator i, s = gsi_start_bb (bb);
5474 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
5476 stmt = dyn_cast <glabel *> (gsi_stmt (i));
5479 label = gimple_label_label (stmt);
5480 if (!DECL_NONLOCAL (label))
5483 gsi_move_before (&i, &s);
5488 label = create_artificial_label (UNKNOWN_LOCATION);
5489 stmt = gimple_build_label (label);
5490 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
5495 /* Attempt to perform edge redirection by replacing a possibly complex
5496 jump instruction by a goto or by removing the jump completely.
5497 This can apply only if all edges now point to the same block. The
5498 parameters and return values are equivalent to
5499 redirect_edge_and_branch. */
5502 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
5504 basic_block src = e->src;
5505 gimple_stmt_iterator i;
5508 /* We can replace or remove a complex jump only when we have exactly
5510 if (EDGE_COUNT (src->succs) != 2
5511 /* Verify that all targets will be TARGET. Specifically, the
5512 edge that is not E must also go to TARGET. */
5513 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
5516 i = gsi_last_bb (src);
5520 stmt = gsi_stmt (i);
5522 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
5524 gsi_remove (&i, true);
5525 e = ssa_redirect_edge (e, target);
5526 e->flags = EDGE_FALLTHRU;
5534 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
5535 edge representing the redirected branch. */
5538 gimple_redirect_edge_and_branch (edge e, basic_block dest)
5540 basic_block bb = e->src;
5541 gimple_stmt_iterator gsi;
5545 if (e->flags & EDGE_ABNORMAL)
5548 if (e->dest == dest)
5551 if (e->flags & EDGE_EH)
5552 return redirect_eh_edge (e, dest);
5554 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
5556 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5561 gsi = gsi_last_bb (bb);
5562 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5564 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5567 /* For COND_EXPR, we only need to redirect the edge. */
5571 /* No non-abnormal edges should lead from a non-simple goto, and
5572 simple ones should be represented implicitly. */
5577 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5578 tree label = gimple_block_label (dest);
5579 tree cases = get_cases_for_edge (e, switch_stmt);
5581 /* If we have a list of cases associated with E, then use it
5582 as it's a lot faster than walking the entire case vector. */
5585 edge e2 = find_edge (e->src, dest);
5592 CASE_LABEL (cases) = label;
5593 cases = CASE_CHAIN (cases);
5596 /* If there was already an edge in the CFG, then we need
5597 to move all the cases associated with E to E2. */
5600 tree cases2 = get_cases_for_edge (e2, switch_stmt);
5602 CASE_CHAIN (last) = CASE_CHAIN (cases2);
5603 CASE_CHAIN (cases2) = first;
5605 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
5609 size_t i, n = gimple_switch_num_labels (switch_stmt);
5611 for (i = 0; i < n; i++)
5613 tree elt = gimple_switch_label (switch_stmt, i);
5614 if (label_to_block (CASE_LABEL (elt)) == e->dest)
5615 CASE_LABEL (elt) = label;
5623 gasm *asm_stmt = as_a <gasm *> (stmt);
5624 int i, n = gimple_asm_nlabels (asm_stmt);
5627 for (i = 0; i < n; ++i)
5629 tree cons = gimple_asm_label_op (asm_stmt, i);
5630 if (label_to_block (TREE_VALUE (cons)) == e->dest)
5633 label = gimple_block_label (dest);
5634 TREE_VALUE (cons) = label;
5638 /* If we didn't find any label matching the former edge in the
5639 asm labels, we must be redirecting the fallthrough
5641 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
5646 gsi_remove (&gsi, true);
5647 e->flags |= EDGE_FALLTHRU;
5650 case GIMPLE_OMP_RETURN:
5651 case GIMPLE_OMP_CONTINUE:
5652 case GIMPLE_OMP_SECTIONS_SWITCH:
5653 case GIMPLE_OMP_FOR:
5654 /* The edges from OMP constructs can be simply redirected. */
5657 case GIMPLE_EH_DISPATCH:
5658 if (!(e->flags & EDGE_FALLTHRU))
5659 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
5662 case GIMPLE_TRANSACTION:
5663 /* The ABORT edge has a stored label associated with it, otherwise
5664 the edges are simply redirectable. */
5666 gimple_transaction_set_label (as_a <gtransaction *> (stmt),
5667 gimple_block_label (dest));
5671 /* Otherwise it must be a fallthru edge, and we don't need to
5672 do anything besides redirecting it. */
5673 gcc_assert (e->flags & EDGE_FALLTHRU);
5677 /* Update/insert PHI nodes as necessary. */
5679 /* Now update the edges in the CFG. */
5680 e = ssa_redirect_edge (e, dest);
5685 /* Returns true if it is possible to remove edge E by redirecting
5686 it to the destination of the other edge from E->src. */
5689 gimple_can_remove_branch_p (const_edge e)
5691 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
5697 /* Simple wrapper, as we can always redirect fallthru edges. */
5700 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
5702 e = gimple_redirect_edge_and_branch (e, dest);
5709 /* Splits basic block BB after statement STMT (but at least after the
5710 labels). If STMT is NULL, BB is split just after the labels. */
5713 gimple_split_block (basic_block bb, void *stmt)
5715 gimple_stmt_iterator gsi;
5716 gimple_stmt_iterator gsi_tgt;
5722 new_bb = create_empty_bb (bb);
5724 /* Redirect the outgoing edges. */
5725 new_bb->succs = bb->succs;
5727 FOR_EACH_EDGE (e, ei, new_bb->succs)
5730 /* Get a stmt iterator pointing to the first stmt to move. */
5731 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
5732 gsi = gsi_after_labels (bb);
5735 gsi = gsi_for_stmt ((gimple *) stmt);
5739 /* Move everything from GSI to the new basic block. */
5740 if (gsi_end_p (gsi))
5743 /* Split the statement list - avoid re-creating new containers as this
5744 brings ugly quadratic memory consumption in the inliner.
5745 (We are still quadratic since we need to update stmt BB pointers,
5747 gsi_split_seq_before (&gsi, &list);
5748 set_bb_seq (new_bb, list);
5749 for (gsi_tgt = gsi_start (list);
5750 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
5751 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
5757 /* Moves basic block BB after block AFTER. */
5760 gimple_move_block_after (basic_block bb, basic_block after)
5762 if (bb->prev_bb == after)
5766 link_block (bb, after);
5772 /* Return TRUE if block BB has no executable statements, otherwise return
5776 gimple_empty_block_p (basic_block bb)
5778 /* BB must have no executable statements. */
5779 gimple_stmt_iterator gsi = gsi_after_labels (bb);
5782 if (gsi_end_p (gsi))
5784 if (is_gimple_debug (gsi_stmt (gsi)))
5785 gsi_next_nondebug (&gsi);
5786 return gsi_end_p (gsi);
5790 /* Split a basic block if it ends with a conditional branch and if the
5791 other part of the block is not empty. */
5794 gimple_split_block_before_cond_jump (basic_block bb)
5796 gimple *last, *split_point;
5797 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
5798 if (gsi_end_p (gsi))
5800 last = gsi_stmt (gsi);
5801 if (gimple_code (last) != GIMPLE_COND
5802 && gimple_code (last) != GIMPLE_SWITCH)
5804 gsi_prev_nondebug (&gsi);
5805 split_point = gsi_stmt (gsi);
5806 return split_block (bb, split_point)->dest;
5810 /* Return true if basic_block can be duplicated. */
5813 gimple_can_duplicate_bb_p (const_basic_block bb ATTRIBUTE_UNUSED)
5818 /* Create a duplicate of the basic block BB. NOTE: This does not
5819 preserve SSA form. */
5822 gimple_duplicate_bb (basic_block bb)
5825 gimple_stmt_iterator gsi_tgt;
5827 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
5829 /* Copy the PHI nodes. We ignore PHI node arguments here because
5830 the incoming edges have not been setup yet. */
5831 for (gphi_iterator gpi = gsi_start_phis (bb);
5837 copy = create_phi_node (NULL_TREE, new_bb);
5838 create_new_def_for (gimple_phi_result (phi), copy,
5839 gimple_phi_result_ptr (copy));
5840 gimple_set_uid (copy, gimple_uid (phi));
5843 gsi_tgt = gsi_start_bb (new_bb);
5844 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5848 def_operand_p def_p;
5849 ssa_op_iter op_iter;
5851 gimple *stmt, *copy;
5853 stmt = gsi_stmt (gsi);
5854 if (gimple_code (stmt) == GIMPLE_LABEL)
5857 /* Don't duplicate label debug stmts. */
5858 if (gimple_debug_bind_p (stmt)
5859 && TREE_CODE (gimple_debug_bind_get_var (stmt))
5863 /* Create a new copy of STMT and duplicate STMT's virtual
5865 copy = gimple_copy (stmt);
5866 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
5868 maybe_duplicate_eh_stmt (copy, stmt);
5869 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
5871 /* When copying around a stmt writing into a local non-user
5872 aggregate, make sure it won't share stack slot with other
5874 lhs = gimple_get_lhs (stmt);
5875 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5877 tree base = get_base_address (lhs);
5879 && (TREE_CODE (base) == VAR_DECL
5880 || TREE_CODE (base) == RESULT_DECL)
5881 && DECL_IGNORED_P (base)
5882 && !TREE_STATIC (base)
5883 && !DECL_EXTERNAL (base)
5884 && (TREE_CODE (base) != VAR_DECL
5885 || !DECL_HAS_VALUE_EXPR_P (base)))
5886 DECL_NONSHAREABLE (base) = 1;
5889 /* Create new names for all the definitions created by COPY and
5890 add replacement mappings for each new name. */
5891 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
5892 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
5898 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
5901 add_phi_args_after_copy_edge (edge e_copy)
5903 basic_block bb, bb_copy = e_copy->src, dest;
5906 gphi *phi, *phi_copy;
5908 gphi_iterator psi, psi_copy;
5910 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
5913 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
5915 if (e_copy->dest->flags & BB_DUPLICATED)
5916 dest = get_bb_original (e_copy->dest);
5918 dest = e_copy->dest;
5920 e = find_edge (bb, dest);
5923 /* During loop unrolling the target of the latch edge is copied.
5924 In this case we are not looking for edge to dest, but to
5925 duplicated block whose original was dest. */
5926 FOR_EACH_EDGE (e, ei, bb->succs)
5928 if ((e->dest->flags & BB_DUPLICATED)
5929 && get_bb_original (e->dest) == dest)
5933 gcc_assert (e != NULL);
5936 for (psi = gsi_start_phis (e->dest),
5937 psi_copy = gsi_start_phis (e_copy->dest);
5939 gsi_next (&psi), gsi_next (&psi_copy))
5942 phi_copy = psi_copy.phi ();
5943 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5944 add_phi_arg (phi_copy, def, e_copy,
5945 gimple_phi_arg_location_from_edge (phi, e));
5950 /* Basic block BB_COPY was created by code duplication. Add phi node
5951 arguments for edges going out of BB_COPY. The blocks that were
5952 duplicated have BB_DUPLICATED set. */
5955 add_phi_args_after_copy_bb (basic_block bb_copy)
5960 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
5962 add_phi_args_after_copy_edge (e_copy);
5966 /* Blocks in REGION_COPY array of length N_REGION were created by
5967 duplication of basic blocks. Add phi node arguments for edges
5968 going from these blocks. If E_COPY is not NULL, also add
5969 phi node arguments for its destination.*/
5972 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
5977 for (i = 0; i < n_region; i++)
5978 region_copy[i]->flags |= BB_DUPLICATED;
5980 for (i = 0; i < n_region; i++)
5981 add_phi_args_after_copy_bb (region_copy[i]);
5983 add_phi_args_after_copy_edge (e_copy);
5985 for (i = 0; i < n_region; i++)
5986 region_copy[i]->flags &= ~BB_DUPLICATED;
5989 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
5990 important exit edge EXIT. By important we mean that no SSA name defined
5991 inside region is live over the other exit edges of the region. All entry
5992 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
5993 to the duplicate of the region. Dominance and loop information is
5994 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
5995 UPDATE_DOMINANCE is false then we assume that the caller will update the
5996 dominance information after calling this function. The new basic
5997 blocks are stored to REGION_COPY in the same order as they had in REGION,
5998 provided that REGION_COPY is not NULL.
5999 The function returns false if it is unable to copy the region,
6003 gimple_duplicate_sese_region (edge entry, edge exit,
6004 basic_block *region, unsigned n_region,
6005 basic_block *region_copy,
6006 bool update_dominance)
6009 bool free_region_copy = false, copying_header = false;
6010 struct loop *loop = entry->dest->loop_father;
6012 vec<basic_block> doms;
6014 int total_freq = 0, entry_freq = 0;
6015 gcov_type total_count = 0, entry_count = 0;
6017 if (!can_copy_bbs_p (region, n_region))
6020 /* Some sanity checking. Note that we do not check for all possible
6021 missuses of the functions. I.e. if you ask to copy something weird,
6022 it will work, but the state of structures probably will not be
6024 for (i = 0; i < n_region; i++)
6026 /* We do not handle subloops, i.e. all the blocks must belong to the
6028 if (region[i]->loop_father != loop)
6031 if (region[i] != entry->dest
6032 && region[i] == loop->header)
6036 /* In case the function is used for loop header copying (which is the primary
6037 use), ensure that EXIT and its copy will be new latch and entry edges. */
6038 if (loop->header == entry->dest)
6040 copying_header = true;
6042 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6045 for (i = 0; i < n_region; i++)
6046 if (region[i] != exit->src
6047 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6051 initialize_original_copy_tables ();
6054 set_loop_copy (loop, loop_outer (loop));
6056 set_loop_copy (loop, loop);
6060 region_copy = XNEWVEC (basic_block, n_region);
6061 free_region_copy = true;
6064 /* Record blocks outside the region that are dominated by something
6066 if (update_dominance)
6069 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6072 if (entry->dest->count)
6074 total_count = entry->dest->count;
6075 entry_count = entry->count;
6076 /* Fix up corner cases, to avoid division by zero or creation of negative
6078 if (entry_count > total_count)
6079 entry_count = total_count;
6083 total_freq = entry->dest->frequency;
6084 entry_freq = EDGE_FREQUENCY (entry);
6085 /* Fix up corner cases, to avoid division by zero or creation of negative
6087 if (total_freq == 0)
6089 else if (entry_freq > total_freq)
6090 entry_freq = total_freq;
6093 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6094 split_edge_bb_loc (entry), update_dominance);
6097 scale_bbs_frequencies_gcov_type (region, n_region,
6098 total_count - entry_count,
6100 scale_bbs_frequencies_gcov_type (region_copy, n_region, entry_count,
6105 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6107 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6112 loop->header = exit->dest;
6113 loop->latch = exit->src;
6116 /* Redirect the entry and add the phi node arguments. */
6117 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6118 gcc_assert (redirected != NULL);
6119 flush_pending_stmts (entry);
6121 /* Concerning updating of dominators: We must recount dominators
6122 for entry block and its copy. Anything that is outside of the
6123 region, but was dominated by something inside needs recounting as
6125 if (update_dominance)
6127 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6128 doms.safe_push (get_bb_original (entry->dest));
6129 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6133 /* Add the other PHI node arguments. */
6134 add_phi_args_after_copy (region_copy, n_region, NULL);
6136 if (free_region_copy)
6139 free_original_copy_tables ();
6143 /* Checks if BB is part of the region defined by N_REGION BBS. */
6145 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6149 for (n = 0; n < n_region; n++)
6157 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6158 are stored to REGION_COPY in the same order in that they appear
6159 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6160 the region, EXIT an exit from it. The condition guarding EXIT
6161 is moved to ENTRY. Returns true if duplication succeeds, false
6187 gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNUSED,
6188 basic_block *region ATTRIBUTE_UNUSED, unsigned n_region ATTRIBUTE_UNUSED,
6189 basic_block *region_copy ATTRIBUTE_UNUSED)
6192 bool free_region_copy = false;
6193 struct loop *loop = exit->dest->loop_father;
6194 struct loop *orig_loop = entry->dest->loop_father;
6195 basic_block switch_bb, entry_bb, nentry_bb;
6196 vec<basic_block> doms;
6197 int total_freq = 0, exit_freq = 0;
6198 gcov_type total_count = 0, exit_count = 0;
6199 edge exits[2], nexits[2], e;
6200 gimple_stmt_iterator gsi;
6203 basic_block exit_bb;
6207 struct loop *target, *aloop, *cloop;
6209 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6211 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6213 if (!can_copy_bbs_p (region, n_region))
6216 initialize_original_copy_tables ();
6217 set_loop_copy (orig_loop, loop);
6220 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6222 if (bb_part_of_region_p (aloop->header, region, n_region))
6224 cloop = duplicate_loop (aloop, target);
6225 duplicate_subloops (aloop, cloop);
6231 region_copy = XNEWVEC (basic_block, n_region);
6232 free_region_copy = true;
6235 gcc_assert (!need_ssa_update_p (cfun));
6237 /* Record blocks outside the region that are dominated by something
6239 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6241 if (exit->src->count)
6243 total_count = exit->src->count;
6244 exit_count = exit->count;
6245 /* Fix up corner cases, to avoid division by zero or creation of negative
6247 if (exit_count > total_count)
6248 exit_count = total_count;
6252 total_freq = exit->src->frequency;
6253 exit_freq = EDGE_FREQUENCY (exit);
6254 /* Fix up corner cases, to avoid division by zero or creation of negative
6256 if (total_freq == 0)
6258 if (exit_freq > total_freq)
6259 exit_freq = total_freq;
6262 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6263 split_edge_bb_loc (exit), true);
6266 scale_bbs_frequencies_gcov_type (region, n_region,
6267 total_count - exit_count,
6269 scale_bbs_frequencies_gcov_type (region_copy, n_region, exit_count,
6274 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6276 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6279 /* Create the switch block, and put the exit condition to it. */
6280 entry_bb = entry->dest;
6281 nentry_bb = get_bb_copy (entry_bb);
6282 if (!last_stmt (entry->src)
6283 || !stmt_ends_bb_p (last_stmt (entry->src)))
6284 switch_bb = entry->src;
6286 switch_bb = split_edge (entry);
6287 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6289 gsi = gsi_last_bb (switch_bb);
6290 cond_stmt = last_stmt (exit->src);
6291 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6292 cond_stmt = gimple_copy (cond_stmt);
6294 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6296 sorig = single_succ_edge (switch_bb);
6297 sorig->flags = exits[1]->flags;
6298 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6300 /* Register the new edge from SWITCH_BB in loop exit lists. */
6301 rescan_loop_exit (snew, true, false);
6303 /* Add the PHI node arguments. */
6304 add_phi_args_after_copy (region_copy, n_region, snew);
6306 /* Get rid of now superfluous conditions and associated edges (and phi node
6308 exit_bb = exit->dest;
6310 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6311 PENDING_STMT (e) = NULL;
6313 /* The latch of ORIG_LOOP was copied, and so was the backedge
6314 to the original header. We redirect this backedge to EXIT_BB. */
6315 for (i = 0; i < n_region; i++)
6316 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6318 gcc_assert (single_succ_edge (region_copy[i]));
6319 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6320 PENDING_STMT (e) = NULL;
6321 for (psi = gsi_start_phis (exit_bb);
6326 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6327 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6330 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6331 PENDING_STMT (e) = NULL;
6333 /* Anything that is outside of the region, but was dominated by something
6334 inside needs to update dominance info. */
6335 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6337 /* Update the SSA web. */
6338 update_ssa (TODO_update_ssa);
6340 if (free_region_copy)
6343 free_original_copy_tables ();
6347 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6348 adding blocks when the dominator traversal reaches EXIT. This
6349 function silently assumes that ENTRY strictly dominates EXIT. */
6352 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6353 vec<basic_block> *bbs_p)
6357 for (son = first_dom_son (CDI_DOMINATORS, entry);
6359 son = next_dom_son (CDI_DOMINATORS, son))
6361 bbs_p->safe_push (son);
6363 gather_blocks_in_sese_region (son, exit, bbs_p);
6367 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
6368 The duplicates are recorded in VARS_MAP. */
6371 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
6374 tree t = *tp, new_t;
6375 struct function *f = DECL_STRUCT_FUNCTION (to_context);
6377 if (DECL_CONTEXT (t) == to_context)
6381 tree &loc = vars_map->get_or_insert (t, &existed);
6387 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
6388 add_local_decl (f, new_t);
6392 gcc_assert (TREE_CODE (t) == CONST_DECL);
6393 new_t = copy_node (t);
6395 DECL_CONTEXT (new_t) = to_context;
6406 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
6407 VARS_MAP maps old ssa names and var_decls to the new ones. */
6410 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
6415 gcc_assert (!virtual_operand_p (name));
6417 tree *loc = vars_map->get (name);
6421 tree decl = SSA_NAME_VAR (name);
6424 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
6425 replace_by_duplicate_decl (&decl, vars_map, to_context);
6426 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6427 decl, SSA_NAME_DEF_STMT (name));
6430 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
6431 name, SSA_NAME_DEF_STMT (name));
6433 /* Now that we've used the def stmt to define new_name, make sure it
6434 doesn't define name anymore. */
6435 SSA_NAME_DEF_STMT (name) = NULL;
6437 vars_map->put (name, new_name);
6451 hash_map<tree, tree> *vars_map;
6452 htab_t new_label_map;
6453 hash_map<void *, void *> *eh_map;
6457 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
6458 contained in *TP if it has been ORIG_BLOCK previously and change the
6459 DECL_CONTEXT of every local variable referenced in *TP. */
6462 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
6464 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
6465 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6470 tree block = TREE_BLOCK (t);
6471 if (block == p->orig_block
6472 || (p->orig_block == NULL_TREE
6473 && block != NULL_TREE))
6474 TREE_SET_BLOCK (t, p->new_block);
6475 #ifdef ENABLE_CHECKING
6476 else if (block != NULL_TREE)
6478 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6479 block = BLOCK_SUPERCONTEXT (block);
6480 gcc_assert (block == p->orig_block);
6484 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
6486 if (TREE_CODE (t) == SSA_NAME)
6487 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
6488 else if (TREE_CODE (t) == PARM_DECL
6489 && gimple_in_ssa_p (cfun))
6490 *tp = *(p->vars_map->get (t));
6491 else if (TREE_CODE (t) == LABEL_DECL)
6493 if (p->new_label_map)
6495 struct tree_map in, *out;
6497 out = (struct tree_map *)
6498 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
6503 DECL_CONTEXT (t) = p->to_context;
6505 else if (p->remap_decls_p)
6507 /* Replace T with its duplicate. T should no longer appear in the
6508 parent function, so this looks wasteful; however, it may appear
6509 in referenced_vars, and more importantly, as virtual operands of
6510 statements, and in alias lists of other variables. It would be
6511 quite difficult to expunge it from all those places. ??? It might
6512 suffice to do this for addressable variables. */
6513 if ((TREE_CODE (t) == VAR_DECL
6514 && !is_global_var (t))
6515 || TREE_CODE (t) == CONST_DECL)
6516 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
6520 else if (TYPE_P (t))
6526 /* Helper for move_stmt_r. Given an EH region number for the source
6527 function, map that to the duplicate EH regio number in the dest. */
6530 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
6532 eh_region old_r, new_r;
6534 old_r = get_eh_region_from_number (old_nr);
6535 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
6537 return new_r->index;
6540 /* Similar, but operate on INTEGER_CSTs. */
6543 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
6547 old_nr = tree_to_shwi (old_t_nr);
6548 new_nr = move_stmt_eh_region_nr (old_nr, p);
6550 return build_int_cst (integer_type_node, new_nr);
6553 /* Like move_stmt_op, but for gimple statements.
6555 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
6556 contained in the current statement in *GSI_P and change the
6557 DECL_CONTEXT of every local variable referenced in the current
6561 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
6562 struct walk_stmt_info *wi)
6564 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
6565 gimple *stmt = gsi_stmt (*gsi_p);
6566 tree block = gimple_block (stmt);
6568 if (block == p->orig_block
6569 || (p->orig_block == NULL_TREE
6570 && block != NULL_TREE))
6571 gimple_set_block (stmt, p->new_block);
6573 switch (gimple_code (stmt))
6576 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6578 tree r, fndecl = gimple_call_fndecl (stmt);
6579 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
6580 switch (DECL_FUNCTION_CODE (fndecl))
6582 case BUILT_IN_EH_COPY_VALUES:
6583 r = gimple_call_arg (stmt, 1);
6584 r = move_stmt_eh_region_tree_nr (r, p);
6585 gimple_call_set_arg (stmt, 1, r);
6588 case BUILT_IN_EH_POINTER:
6589 case BUILT_IN_EH_FILTER:
6590 r = gimple_call_arg (stmt, 0);
6591 r = move_stmt_eh_region_tree_nr (r, p);
6592 gimple_call_set_arg (stmt, 0, r);
6603 gresx *resx_stmt = as_a <gresx *> (stmt);
6604 int r = gimple_resx_region (resx_stmt);
6605 r = move_stmt_eh_region_nr (r, p);
6606 gimple_resx_set_region (resx_stmt, r);
6610 case GIMPLE_EH_DISPATCH:
6612 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
6613 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
6614 r = move_stmt_eh_region_nr (r, p);
6615 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
6619 case GIMPLE_OMP_RETURN:
6620 case GIMPLE_OMP_CONTINUE:
6623 if (is_gimple_omp (stmt))
6625 /* Do not remap variables inside OMP directives. Variables
6626 referenced in clauses and directive header belong to the
6627 parent function and should not be moved into the child
6629 bool save_remap_decls_p = p->remap_decls_p;
6630 p->remap_decls_p = false;
6631 *handled_ops_p = true;
6633 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
6636 p->remap_decls_p = save_remap_decls_p;
6644 /* Move basic block BB from function CFUN to function DEST_FN. The
6645 block is moved out of the original linked list and placed after
6646 block AFTER in the new list. Also, the block is removed from the
6647 original array of blocks and placed in DEST_FN's array of blocks.
6648 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
6649 updated to reflect the moved edges.
6651 The local variables are remapped to new instances, VARS_MAP is used
6652 to record the mapping. */
6655 move_block_to_fn (struct function *dest_cfun, basic_block bb,
6656 basic_block after, bool update_edge_count_p,
6657 struct move_stmt_d *d)
6659 struct control_flow_graph *cfg;
6662 gimple_stmt_iterator si;
6663 unsigned old_len, new_len;
6665 /* Remove BB from dominance structures. */
6666 delete_from_dominance_info (CDI_DOMINATORS, bb);
6668 /* Move BB from its current loop to the copy in the new function. */
6671 struct loop *new_loop = (struct loop *)bb->loop_father->aux;
6673 bb->loop_father = new_loop;
6676 /* Link BB to the new linked list. */
6677 move_block_after (bb, after);
6679 /* Update the edge count in the corresponding flowgraphs. */
6680 if (update_edge_count_p)
6681 FOR_EACH_EDGE (e, ei, bb->succs)
6683 cfun->cfg->x_n_edges--;
6684 dest_cfun->cfg->x_n_edges++;
6687 /* Remove BB from the original basic block array. */
6688 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
6689 cfun->cfg->x_n_basic_blocks--;
6691 /* Grow DEST_CFUN's basic block array if needed. */
6692 cfg = dest_cfun->cfg;
6693 cfg->x_n_basic_blocks++;
6694 if (bb->index >= cfg->x_last_basic_block)
6695 cfg->x_last_basic_block = bb->index + 1;
6697 old_len = vec_safe_length (cfg->x_basic_block_info);
6698 if ((unsigned) cfg->x_last_basic_block >= old_len)
6700 new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
6701 vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
6704 (*cfg->x_basic_block_info)[bb->index] = bb;
6706 /* Remap the variables in phi nodes. */
6707 for (gphi_iterator psi = gsi_start_phis (bb);
6710 gphi *phi = psi.phi ();
6712 tree op = PHI_RESULT (phi);
6716 if (virtual_operand_p (op))
6718 /* Remove the phi nodes for virtual operands (alias analysis will be
6719 run for the new function, anyway). */
6720 remove_phi_node (&psi, true);
6724 SET_PHI_RESULT (phi,
6725 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6726 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
6728 op = USE_FROM_PTR (use);
6729 if (TREE_CODE (op) == SSA_NAME)
6730 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
6733 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
6735 location_t locus = gimple_phi_arg_location (phi, i);
6736 tree block = LOCATION_BLOCK (locus);
6738 if (locus == UNKNOWN_LOCATION)
6740 if (d->orig_block == NULL_TREE || block == d->orig_block)
6742 if (d->new_block == NULL_TREE)
6743 locus = LOCATION_LOCUS (locus);
6745 locus = COMBINE_LOCATION_DATA (line_table, locus, d->new_block);
6746 gimple_phi_arg_set_location (phi, i, locus);
6753 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6755 gimple *stmt = gsi_stmt (si);
6756 struct walk_stmt_info wi;
6758 memset (&wi, 0, sizeof (wi));
6760 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
6762 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
6764 tree label = gimple_label_label (label_stmt);
6765 int uid = LABEL_DECL_UID (label);
6767 gcc_assert (uid > -1);
6769 old_len = vec_safe_length (cfg->x_label_to_block_map);
6770 if (old_len <= (unsigned) uid)
6772 new_len = 3 * uid / 2 + 1;
6773 vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
6776 (*cfg->x_label_to_block_map)[uid] = bb;
6777 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
6779 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
6781 if (uid >= dest_cfun->cfg->last_label_uid)
6782 dest_cfun->cfg->last_label_uid = uid + 1;
6785 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
6786 remove_stmt_from_eh_lp_fn (cfun, stmt);
6788 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
6789 gimple_remove_stmt_histograms (cfun, stmt);
6791 /* We cannot leave any operands allocated from the operand caches of
6792 the current function. */
6793 free_stmt_operands (cfun, stmt);
6794 push_cfun (dest_cfun);
6799 FOR_EACH_EDGE (e, ei, bb->succs)
6800 if (e->goto_locus != UNKNOWN_LOCATION)
6802 tree block = LOCATION_BLOCK (e->goto_locus);
6803 if (d->orig_block == NULL_TREE
6804 || block == d->orig_block)
6805 e->goto_locus = d->new_block ?
6806 COMBINE_LOCATION_DATA (line_table, e->goto_locus, d->new_block) :
6807 LOCATION_LOCUS (e->goto_locus);
6811 /* Examine the statements in BB (which is in SRC_CFUN); find and return
6812 the outermost EH region. Use REGION as the incoming base EH region. */
6815 find_outermost_region_in_block (struct function *src_cfun,
6816 basic_block bb, eh_region region)
6818 gimple_stmt_iterator si;
6820 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
6822 gimple *stmt = gsi_stmt (si);
6823 eh_region stmt_region;
6826 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
6827 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
6831 region = stmt_region;
6832 else if (stmt_region != region)
6834 region = eh_region_outermost (src_cfun, stmt_region, region);
6835 gcc_assert (region != NULL);
6844 new_label_mapper (tree decl, void *data)
6846 htab_t hash = (htab_t) data;
6850 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
6852 m = XNEW (struct tree_map);
6853 m->hash = DECL_UID (decl);
6854 m->base.from = decl;
6855 m->to = create_artificial_label (UNKNOWN_LOCATION);
6856 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
6857 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
6858 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
6860 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
6861 gcc_assert (*slot == NULL);
6868 /* Tree walker to replace the decls used inside value expressions by
6872 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
6874 struct replace_decls_d *rd = (struct replace_decls_d *)data;
6876 switch (TREE_CODE (*tp))
6881 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
6887 if (IS_TYPE_OR_DECL_P (*tp))
6888 *walk_subtrees = false;
6893 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
6897 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
6902 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
6905 if (TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != CONST_DECL)
6907 replace_by_duplicate_decl (&t, vars_map, to_context);
6910 if (TREE_CODE (*tp) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (*tp))
6912 tree x = DECL_VALUE_EXPR (*tp);
6913 struct replace_decls_d rd = { vars_map, to_context };
6915 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
6916 SET_DECL_VALUE_EXPR (t, x);
6917 DECL_HAS_VALUE_EXPR_P (t) = 1;
6919 DECL_CHAIN (t) = DECL_CHAIN (*tp);
6924 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
6925 replace_block_vars_by_duplicates (block, vars_map, to_context);
6928 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
6932 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
6935 /* Discard it from the old loop array. */
6936 (*get_loops (fn1))[loop->num] = NULL;
6938 /* Place it in the new loop array, assigning it a new number. */
6939 loop->num = number_of_loops (fn2);
6940 vec_safe_push (loops_for_fn (fn2)->larray, loop);
6942 /* Recurse to children. */
6943 for (loop = loop->inner; loop; loop = loop->next)
6944 fixup_loop_arrays_after_move (fn1, fn2, loop);
6947 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
6948 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
6951 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
6956 bitmap bbs = BITMAP_ALLOC (NULL);
6959 gcc_assert (entry != NULL);
6960 gcc_assert (entry != exit);
6961 gcc_assert (bbs_p != NULL);
6963 gcc_assert (bbs_p->length () > 0);
6965 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6966 bitmap_set_bit (bbs, bb->index);
6968 gcc_assert (bitmap_bit_p (bbs, entry->index));
6969 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
6971 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
6975 gcc_assert (single_pred_p (entry));
6976 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
6979 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
6982 gcc_assert (bitmap_bit_p (bbs, e->src->index));
6987 gcc_assert (single_succ_p (exit));
6988 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
6991 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
6994 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7001 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7004 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7006 bitmap release_names = (bitmap)data;
7008 if (TREE_CODE (from) != SSA_NAME)
7011 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7015 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7016 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7017 single basic block in the original CFG and the new basic block is
7018 returned. DEST_CFUN must not have a CFG yet.
7020 Note that the region need not be a pure SESE region. Blocks inside
7021 the region may contain calls to abort/exit. The only restriction
7022 is that ENTRY_BB should be the only entry point and it must
7025 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7026 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7027 to the new function.
7029 All local variables referenced in the region are assumed to be in
7030 the corresponding BLOCK_VARS and unexpanded variable lists
7031 associated with DEST_CFUN.
7033 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7034 reimplement move_sese_region_to_fn by duplicating the region rather than
7038 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7039 basic_block exit_bb, tree orig_block)
7041 vec<basic_block> bbs, dom_bbs;
7042 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7043 basic_block after, bb, *entry_pred, *exit_succ, abb;
7044 struct function *saved_cfun = cfun;
7045 int *entry_flag, *exit_flag;
7046 unsigned *entry_prob, *exit_prob;
7047 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7050 htab_t new_label_map;
7051 hash_map<void *, void *> *eh_map;
7052 struct loop *loop = entry_bb->loop_father;
7053 struct loop *loop0 = get_loop (saved_cfun, 0);
7054 struct move_stmt_d d;
7056 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7058 gcc_assert (entry_bb != exit_bb
7060 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7062 /* Collect all the blocks in the region. Manually add ENTRY_BB
7063 because it won't be added by dfs_enumerate_from. */
7065 bbs.safe_push (entry_bb);
7066 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7067 #ifdef ENABLE_CHECKING
7068 verify_sese (entry_bb, exit_bb, &bbs);
7071 /* The blocks that used to be dominated by something in BBS will now be
7072 dominated by the new block. */
7073 dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7077 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7078 the predecessor edges to ENTRY_BB and the successor edges to
7079 EXIT_BB so that we can re-attach them to the new basic block that
7080 will replace the region. */
7081 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7082 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7083 entry_flag = XNEWVEC (int, num_entry_edges);
7084 entry_prob = XNEWVEC (unsigned, num_entry_edges);
7086 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7088 entry_prob[i] = e->probability;
7089 entry_flag[i] = e->flags;
7090 entry_pred[i++] = e->src;
7096 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7097 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7098 exit_flag = XNEWVEC (int, num_exit_edges);
7099 exit_prob = XNEWVEC (unsigned, num_exit_edges);
7101 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7103 exit_prob[i] = e->probability;
7104 exit_flag[i] = e->flags;
7105 exit_succ[i++] = e->dest;
7117 /* Switch context to the child function to initialize DEST_FN's CFG. */
7118 gcc_assert (dest_cfun->cfg == NULL);
7119 push_cfun (dest_cfun);
7121 init_empty_tree_cfg ();
7123 /* Initialize EH information for the new function. */
7125 new_label_map = NULL;
7128 eh_region region = NULL;
7130 FOR_EACH_VEC_ELT (bbs, i, bb)
7131 region = find_outermost_region_in_block (saved_cfun, bb, region);
7133 init_eh_for_function ();
7136 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7137 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7138 new_label_mapper, new_label_map);
7142 /* Initialize an empty loop tree. */
7143 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7144 init_loops_structure (dest_cfun, loops, 1);
7145 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7146 set_loops_for_fn (dest_cfun, loops);
7148 /* Move the outlined loop tree part. */
7149 num_nodes = bbs.length ();
7150 FOR_EACH_VEC_ELT (bbs, i, bb)
7152 if (bb->loop_father->header == bb)
7154 struct loop *this_loop = bb->loop_father;
7155 struct loop *outer = loop_outer (this_loop);
7157 /* If the SESE region contains some bbs ending with
7158 a noreturn call, those are considered to belong
7159 to the outermost loop in saved_cfun, rather than
7160 the entry_bb's loop_father. */
7164 num_nodes -= this_loop->num_nodes;
7165 flow_loop_tree_node_remove (bb->loop_father);
7166 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7167 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7170 else if (bb->loop_father == loop0 && loop0 != loop)
7173 /* Remove loop exits from the outlined region. */
7174 if (loops_for_fn (saved_cfun)->exits)
7175 FOR_EACH_EDGE (e, ei, bb->succs)
7177 struct loops *l = loops_for_fn (saved_cfun);
7179 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7182 l->exits->clear_slot (slot);
7187 /* Adjust the number of blocks in the tree root of the outlined part. */
7188 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7190 /* Setup a mapping to be used by move_block_to_fn. */
7191 loop->aux = current_loops->tree_root;
7192 loop0->aux = current_loops->tree_root;
7196 /* Move blocks from BBS into DEST_CFUN. */
7197 gcc_assert (bbs.length () >= 2);
7198 after = dest_cfun->cfg->x_entry_block_ptr;
7199 hash_map<tree, tree> vars_map;
7201 memset (&d, 0, sizeof (d));
7202 d.orig_block = orig_block;
7203 d.new_block = DECL_INITIAL (dest_cfun->decl);
7204 d.from_context = cfun->decl;
7205 d.to_context = dest_cfun->decl;
7206 d.vars_map = &vars_map;
7207 d.new_label_map = new_label_map;
7209 d.remap_decls_p = true;
7211 if (gimple_in_ssa_p (cfun))
7212 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
7214 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
7215 set_ssa_default_def (dest_cfun, arg, narg);
7216 vars_map.put (arg, narg);
7219 FOR_EACH_VEC_ELT (bbs, i, bb)
7221 /* No need to update edge counts on the last block. It has
7222 already been updated earlier when we detached the region from
7223 the original CFG. */
7224 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
7230 /* Loop sizes are no longer correct, fix them up. */
7231 loop->num_nodes -= num_nodes;
7232 for (struct loop *outer = loop_outer (loop);
7233 outer; outer = loop_outer (outer))
7234 outer->num_nodes -= num_nodes;
7235 loop0->num_nodes -= bbs.length () - num_nodes;
7237 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
7240 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
7245 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
7247 dest_cfun->has_simduid_loops = true;
7249 if (aloop->force_vectorize)
7250 dest_cfun->has_force_vectorize_loops = true;
7254 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
7258 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7260 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
7261 = BLOCK_SUBBLOCKS (orig_block);
7262 for (block = BLOCK_SUBBLOCKS (orig_block);
7263 block; block = BLOCK_CHAIN (block))
7264 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
7265 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
7268 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
7269 &vars_map, dest_cfun->decl);
7272 htab_delete (new_label_map);
7276 if (gimple_in_ssa_p (cfun))
7278 /* We need to release ssa-names in a defined order, so first find them,
7279 and then iterate in ascending version order. */
7280 bitmap release_names = BITMAP_ALLOC (NULL);
7281 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
7284 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
7285 release_ssa_name (ssa_name (i));
7286 BITMAP_FREE (release_names);
7289 /* Rewire the entry and exit blocks. The successor to the entry
7290 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
7291 the child function. Similarly, the predecessor of DEST_FN's
7292 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
7293 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
7294 various CFG manipulation function get to the right CFG.
7296 FIXME, this is silly. The CFG ought to become a parameter to
7298 push_cfun (dest_cfun);
7299 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7301 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7304 /* Back in the original function, the SESE region has disappeared,
7305 create a new basic block in its place. */
7306 bb = create_empty_bb (entry_pred[0]);
7308 add_bb_to_loop (bb, loop);
7309 for (i = 0; i < num_entry_edges; i++)
7311 e = make_edge (entry_pred[i], bb, entry_flag[i]);
7312 e->probability = entry_prob[i];
7315 for (i = 0; i < num_exit_edges; i++)
7317 e = make_edge (bb, exit_succ[i], exit_flag[i]);
7318 e->probability = exit_prob[i];
7321 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
7322 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
7323 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
7341 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
7345 dump_function_to_file (tree fndecl, FILE *file, int flags)
7347 tree arg, var, old_current_fndecl = current_function_decl;
7348 struct function *dsf;
7349 bool ignore_topmost_bind = false, any_var = false;
7352 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
7353 && decl_is_tm_clone (fndecl));
7354 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
7356 if (DECL_ATTRIBUTES (fndecl) != NULL_TREE)
7358 fprintf (file, "__attribute__((");
7362 for (chain = DECL_ATTRIBUTES (fndecl); chain;
7363 first = false, chain = TREE_CHAIN (chain))
7366 fprintf (file, ", ");
7368 print_generic_expr (file, get_attribute_name (chain), dump_flags);
7369 if (TREE_VALUE (chain) != NULL_TREE)
7371 fprintf (file, " (");
7372 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
7373 fprintf (file, ")");
7377 fprintf (file, "))\n");
7380 current_function_decl = fndecl;
7381 fprintf (file, "%s %s(", function_name (fun), tmclone ? "[tm-clone] " : "");
7383 arg = DECL_ARGUMENTS (fndecl);
7386 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
7387 fprintf (file, " ");
7388 print_generic_expr (file, arg, dump_flags);
7389 if (flags & TDF_VERBOSE)
7390 print_node (file, "", arg, 4);
7391 if (DECL_CHAIN (arg))
7392 fprintf (file, ", ");
7393 arg = DECL_CHAIN (arg);
7395 fprintf (file, ")\n");
7397 if (flags & TDF_VERBOSE)
7398 print_node (file, "", fndecl, 2);
7400 dsf = DECL_STRUCT_FUNCTION (fndecl);
7401 if (dsf && (flags & TDF_EH))
7402 dump_eh_tree (file, dsf);
7404 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
7406 dump_node (fndecl, TDF_SLIM | flags, file);
7407 current_function_decl = old_current_fndecl;
7411 /* When GIMPLE is lowered, the variables are no longer available in
7412 BIND_EXPRs, so display them separately. */
7413 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
7416 ignore_topmost_bind = true;
7418 fprintf (file, "{\n");
7419 if (!vec_safe_is_empty (fun->local_decls))
7420 FOR_EACH_LOCAL_DECL (fun, ix, var)
7422 print_generic_decl (file, var, flags);
7423 if (flags & TDF_VERBOSE)
7424 print_node (file, "", var, 4);
7425 fprintf (file, "\n");
7429 if (gimple_in_ssa_p (cfun))
7430 for (ix = 1; ix < num_ssa_names; ++ix)
7432 tree name = ssa_name (ix);
7433 if (name && !SSA_NAME_VAR (name))
7435 fprintf (file, " ");
7436 print_generic_expr (file, TREE_TYPE (name), flags);
7437 fprintf (file, " ");
7438 print_generic_expr (file, name, flags);
7439 fprintf (file, ";\n");
7446 if (fun && fun->decl == fndecl
7448 && basic_block_info_for_fn (fun))
7450 /* If the CFG has been built, emit a CFG-based dump. */
7451 if (!ignore_topmost_bind)
7452 fprintf (file, "{\n");
7454 if (any_var && n_basic_blocks_for_fn (fun))
7455 fprintf (file, "\n");
7457 FOR_EACH_BB_FN (bb, fun)
7458 dump_bb (file, bb, 2, flags | TDF_COMMENT);
7460 fprintf (file, "}\n");
7462 else if (DECL_SAVED_TREE (fndecl) == NULL)
7464 /* The function is now in GIMPLE form but the CFG has not been
7465 built yet. Emit the single sequence of GIMPLE statements
7466 that make up its body. */
7467 gimple_seq body = gimple_body (fndecl);
7469 if (gimple_seq_first_stmt (body)
7470 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
7471 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
7472 print_gimple_seq (file, body, 0, flags);
7475 if (!ignore_topmost_bind)
7476 fprintf (file, "{\n");
7479 fprintf (file, "\n");
7481 print_gimple_seq (file, body, 2, flags);
7482 fprintf (file, "}\n");
7489 /* Make a tree based dump. */
7490 chain = DECL_SAVED_TREE (fndecl);
7491 if (chain && TREE_CODE (chain) == BIND_EXPR)
7493 if (ignore_topmost_bind)
7495 chain = BIND_EXPR_BODY (chain);
7503 if (!ignore_topmost_bind)
7505 fprintf (file, "{\n");
7506 /* No topmost bind, pretend it's ignored for later. */
7507 ignore_topmost_bind = true;
7513 fprintf (file, "\n");
7515 print_generic_stmt_indented (file, chain, flags, indent);
7516 if (ignore_topmost_bind)
7517 fprintf (file, "}\n");
7520 if (flags & TDF_ENUMERATE_LOCALS)
7521 dump_enumerated_decls (file, flags);
7522 fprintf (file, "\n\n");
7524 current_function_decl = old_current_fndecl;
7527 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
7530 debug_function (tree fn, int flags)
7532 dump_function_to_file (fn, stderr, flags);
7536 /* Print on FILE the indexes for the predecessors of basic_block BB. */
7539 print_pred_bbs (FILE *file, basic_block bb)
7544 FOR_EACH_EDGE (e, ei, bb->preds)
7545 fprintf (file, "bb_%d ", e->src->index);
7549 /* Print on FILE the indexes for the successors of basic_block BB. */
7552 print_succ_bbs (FILE *file, basic_block bb)
7557 FOR_EACH_EDGE (e, ei, bb->succs)
7558 fprintf (file, "bb_%d ", e->dest->index);
7561 /* Print to FILE the basic block BB following the VERBOSITY level. */
7564 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
7566 char *s_indent = (char *) alloca ((size_t) indent + 1);
7567 memset ((void *) s_indent, ' ', (size_t) indent);
7568 s_indent[indent] = '\0';
7570 /* Print basic_block's header. */
7573 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
7574 print_pred_bbs (file, bb);
7575 fprintf (file, "}, succs = {");
7576 print_succ_bbs (file, bb);
7577 fprintf (file, "})\n");
7580 /* Print basic_block's body. */
7583 fprintf (file, "%s {\n", s_indent);
7584 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
7585 fprintf (file, "%s }\n", s_indent);
7589 static void print_loop_and_siblings (FILE *, struct loop *, int, int);
7591 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
7592 VERBOSITY level this outputs the contents of the loop, or just its
7596 print_loop (FILE *file, struct loop *loop, int indent, int verbosity)
7604 s_indent = (char *) alloca ((size_t) indent + 1);
7605 memset ((void *) s_indent, ' ', (size_t) indent);
7606 s_indent[indent] = '\0';
7608 /* Print loop's header. */
7609 fprintf (file, "%sloop_%d (", s_indent, loop->num);
7611 fprintf (file, "header = %d", loop->header->index);
7614 fprintf (file, "deleted)\n");
7618 fprintf (file, ", latch = %d", loop->latch->index);
7620 fprintf (file, ", multiple latches");
7621 fprintf (file, ", niter = ");
7622 print_generic_expr (file, loop->nb_iterations, 0);
7624 if (loop->any_upper_bound)
7626 fprintf (file, ", upper_bound = ");
7627 print_decu (loop->nb_iterations_upper_bound, file);
7630 if (loop->any_estimate)
7632 fprintf (file, ", estimate = ");
7633 print_decu (loop->nb_iterations_estimate, file);
7635 fprintf (file, ")\n");
7637 /* Print loop's body. */
7640 fprintf (file, "%s{\n", s_indent);
7641 FOR_EACH_BB_FN (bb, cfun)
7642 if (bb->loop_father == loop)
7643 print_loops_bb (file, bb, indent, verbosity);
7645 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
7646 fprintf (file, "%s}\n", s_indent);
7650 /* Print the LOOP and its sibling loops on FILE, indented INDENT
7651 spaces. Following VERBOSITY level this outputs the contents of the
7652 loop, or just its structure. */
7655 print_loop_and_siblings (FILE *file, struct loop *loop, int indent,
7661 print_loop (file, loop, indent, verbosity);
7662 print_loop_and_siblings (file, loop->next, indent, verbosity);
7665 /* Follow a CFG edge from the entry point of the program, and on entry
7666 of a loop, pretty print the loop structure on FILE. */
7669 print_loops (FILE *file, int verbosity)
7673 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
7674 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
7675 if (bb && bb->loop_father)
7676 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
7682 debug (struct loop &ref)
7684 print_loop (stderr, &ref, 0, /*verbosity*/0);
7688 debug (struct loop *ptr)
7693 fprintf (stderr, "<nil>\n");
7696 /* Dump a loop verbosely. */
7699 debug_verbose (struct loop &ref)
7701 print_loop (stderr, &ref, 0, /*verbosity*/3);
7705 debug_verbose (struct loop *ptr)
7710 fprintf (stderr, "<nil>\n");
7714 /* Debugging loops structure at tree level, at some VERBOSITY level. */
7717 debug_loops (int verbosity)
7719 print_loops (stderr, verbosity);
7722 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
7725 debug_loop (struct loop *loop, int verbosity)
7727 print_loop (stderr, loop, 0, verbosity);
7730 /* Print on stderr the code of loop number NUM, at some VERBOSITY
7734 debug_loop_num (unsigned num, int verbosity)
7736 debug_loop (get_loop (cfun, num), verbosity);
7739 /* Return true if BB ends with a call, possibly followed by some
7740 instructions that must stay with the call. Return false,
7744 gimple_block_ends_with_call_p (basic_block bb)
7746 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7747 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
7751 /* Return true if BB ends with a conditional branch. Return false,
7755 gimple_block_ends_with_condjump_p (const_basic_block bb)
7757 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
7758 return (stmt && gimple_code (stmt) == GIMPLE_COND);
7762 /* Return true if we need to add fake edge to exit at statement T.
7763 Helper function for gimple_flow_call_edges_add. */
7766 need_fake_edge_p (gimple *t)
7768 tree fndecl = NULL_TREE;
7771 /* NORETURN and LONGJMP calls already have an edge to exit.
7772 CONST and PURE calls do not need one.
7773 We don't currently check for CONST and PURE here, although
7774 it would be a good idea, because those attributes are
7775 figured out from the RTL in mark_constant_function, and
7776 the counter incrementation code from -fprofile-arcs
7777 leads to different results from -fbranch-probabilities. */
7778 if (is_gimple_call (t))
7780 fndecl = gimple_call_fndecl (t);
7781 call_flags = gimple_call_flags (t);
7784 if (is_gimple_call (t)
7786 && DECL_BUILT_IN (fndecl)
7787 && (call_flags & ECF_NOTHROW)
7788 && !(call_flags & ECF_RETURNS_TWICE)
7789 /* fork() doesn't really return twice, but the effect of
7790 wrapping it in __gcov_fork() which calls __gcov_flush()
7791 and clears the counters before forking has the same
7792 effect as returning twice. Force a fake edge. */
7793 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7794 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
7797 if (is_gimple_call (t))
7803 if (!(call_flags & ECF_NORETURN))
7807 FOR_EACH_EDGE (e, ei, bb->succs)
7808 if ((e->flags & EDGE_FAKE) == 0)
7812 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
7813 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
7820 /* Add fake edges to the function exit for any non constant and non
7821 noreturn calls (or noreturn calls with EH/abnormal edges),
7822 volatile inline assembly in the bitmap of blocks specified by BLOCKS
7823 or to the whole CFG if BLOCKS is zero. Return the number of blocks
7826 The goal is to expose cases in which entering a basic block does
7827 not imply that all subsequent instructions must be executed. */
7830 gimple_flow_call_edges_add (sbitmap blocks)
7833 int blocks_split = 0;
7834 int last_bb = last_basic_block_for_fn (cfun);
7835 bool check_last_block = false;
7837 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
7841 check_last_block = true;
7843 check_last_block = bitmap_bit_p (blocks,
7844 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
7846 /* In the last basic block, before epilogue generation, there will be
7847 a fallthru edge to EXIT. Special care is required if the last insn
7848 of the last basic block is a call because make_edge folds duplicate
7849 edges, which would result in the fallthru edge also being marked
7850 fake, which would result in the fallthru edge being removed by
7851 remove_fake_edges, which would result in an invalid CFG.
7853 Moreover, we can't elide the outgoing fake edge, since the block
7854 profiler needs to take this into account in order to solve the minimal
7855 spanning tree in the case that the call doesn't return.
7857 Handle this by adding a dummy instruction in a new last basic block. */
7858 if (check_last_block)
7860 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
7861 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
7864 if (!gsi_end_p (gsi))
7867 if (t && need_fake_edge_p (t))
7871 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7874 gsi_insert_on_edge (e, gimple_build_nop ());
7875 gsi_commit_edge_inserts ();
7880 /* Now add fake edges to the function exit for any non constant
7881 calls since there is no way that we can determine if they will
7883 for (i = 0; i < last_bb; i++)
7885 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
7886 gimple_stmt_iterator gsi;
7887 gimple *stmt, *last_stmt;
7892 if (blocks && !bitmap_bit_p (blocks, i))
7895 gsi = gsi_last_nondebug_bb (bb);
7896 if (!gsi_end_p (gsi))
7898 last_stmt = gsi_stmt (gsi);
7901 stmt = gsi_stmt (gsi);
7902 if (need_fake_edge_p (stmt))
7906 /* The handling above of the final block before the
7907 epilogue should be enough to verify that there is
7908 no edge to the exit block in CFG already.
7909 Calling make_edge in such case would cause us to
7910 mark that edge as fake and remove it later. */
7911 #ifdef ENABLE_CHECKING
7912 if (stmt == last_stmt)
7914 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
7915 gcc_assert (e == NULL);
7919 /* Note that the following may create a new basic block
7920 and renumber the existing basic blocks. */
7921 if (stmt != last_stmt)
7923 e = split_block (bb, stmt);
7927 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
7931 while (!gsi_end_p (gsi));
7936 verify_flow_info ();
7938 return blocks_split;
7941 /* Removes edge E and all the blocks dominated by it, and updates dominance
7942 information. The IL in E->src needs to be updated separately.
7943 If dominance info is not available, only the edge E is removed.*/
7946 remove_edge_and_dominated_blocks (edge e)
7948 vec<basic_block> bbs_to_remove = vNULL;
7949 vec<basic_block> bbs_to_fix_dom = vNULL;
7953 bool none_removed = false;
7955 basic_block bb, dbb;
7958 /* If we are removing a path inside a non-root loop that may change
7959 loop ownership of blocks or remove loops. Mark loops for fixup. */
7961 && loop_outer (e->src->loop_father) != NULL
7962 && e->src->loop_father == e->dest->loop_father)
7963 loops_state_set (LOOPS_NEED_FIXUP);
7965 if (!dom_info_available_p (CDI_DOMINATORS))
7971 /* No updating is needed for edges to exit. */
7972 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
7974 if (cfgcleanup_altered_bbs)
7975 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
7980 /* First, we find the basic blocks to remove. If E->dest has a predecessor
7981 that is not dominated by E->dest, then this set is empty. Otherwise,
7982 all the basic blocks dominated by E->dest are removed.
7984 Also, to DF_IDOM we store the immediate dominators of the blocks in
7985 the dominance frontier of E (i.e., of the successors of the
7986 removed blocks, if there are any, and of E->dest otherwise). */
7987 FOR_EACH_EDGE (f, ei, e->dest->preds)
7992 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
7994 none_removed = true;
7999 df = BITMAP_ALLOC (NULL);
8000 df_idom = BITMAP_ALLOC (NULL);
8003 bitmap_set_bit (df_idom,
8004 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8007 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8008 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8010 FOR_EACH_EDGE (f, ei, bb->succs)
8012 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8013 bitmap_set_bit (df, f->dest->index);
8016 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8017 bitmap_clear_bit (df, bb->index);
8019 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8021 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8022 bitmap_set_bit (df_idom,
8023 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8027 if (cfgcleanup_altered_bbs)
8029 /* Record the set of the altered basic blocks. */
8030 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8031 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8034 /* Remove E and the cancelled blocks. */
8039 /* Walk backwards so as to get a chance to substitute all
8040 released DEFs into debug stmts. See
8041 eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
8043 for (i = bbs_to_remove.length (); i-- > 0; )
8044 delete_basic_block (bbs_to_remove[i]);
8047 /* Update the dominance information. The immediate dominator may change only
8048 for blocks whose immediate dominator belongs to DF_IDOM:
8050 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8051 removal. Let Z the arbitrary block such that idom(Z) = Y and
8052 Z dominates X after the removal. Before removal, there exists a path P
8053 from Y to X that avoids Z. Let F be the last edge on P that is
8054 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8055 dominates W, and because of P, Z does not dominate W), and W belongs to
8056 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8057 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8059 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8060 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8062 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8063 bbs_to_fix_dom.safe_push (dbb);
8066 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8069 BITMAP_FREE (df_idom);
8070 bbs_to_remove.release ();
8071 bbs_to_fix_dom.release ();
8074 /* Purge dead EH edges from basic block BB. */
8077 gimple_purge_dead_eh_edges (basic_block bb)
8079 bool changed = false;
8082 gimple *stmt = last_stmt (bb);
8084 if (stmt && stmt_can_throw_internal (stmt))
8087 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8089 if (e->flags & EDGE_EH)
8091 remove_edge_and_dominated_blocks (e);
8101 /* Purge dead EH edges from basic block listed in BLOCKS. */
8104 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
8106 bool changed = false;
8110 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8112 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8114 /* Earlier gimple_purge_dead_eh_edges could have removed
8115 this basic block already. */
8116 gcc_assert (bb || changed);
8118 changed |= gimple_purge_dead_eh_edges (bb);
8124 /* Purge dead abnormal call edges from basic block BB. */
8127 gimple_purge_dead_abnormal_call_edges (basic_block bb)
8129 bool changed = false;
8132 gimple *stmt = last_stmt (bb);
8134 if (!cfun->has_nonlocal_label
8135 && !cfun->calls_setjmp)
8138 if (stmt && stmt_can_make_abnormal_goto (stmt))
8141 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8143 if (e->flags & EDGE_ABNORMAL)
8145 if (e->flags & EDGE_FALLTHRU)
8146 e->flags &= ~EDGE_ABNORMAL;
8148 remove_edge_and_dominated_blocks (e);
8158 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
8161 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
8163 bool changed = false;
8167 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
8169 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8171 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
8172 this basic block already. */
8173 gcc_assert (bb || changed);
8175 changed |= gimple_purge_dead_abnormal_call_edges (bb);
8181 /* This function is called whenever a new edge is created or
8185 gimple_execute_on_growing_pred (edge e)
8187 basic_block bb = e->dest;
8189 if (!gimple_seq_empty_p (phi_nodes (bb)))
8190 reserve_phi_args_for_new_edge (bb);
8193 /* This function is called immediately before edge E is removed from
8194 the edge vector E->dest->preds. */
8197 gimple_execute_on_shrinking_pred (edge e)
8199 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
8200 remove_phi_args (e);
8203 /*---------------------------------------------------------------------------
8204 Helper functions for Loop versioning
8205 ---------------------------------------------------------------------------*/
8207 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
8208 of 'first'. Both of them are dominated by 'new_head' basic block. When
8209 'new_head' was created by 'second's incoming edge it received phi arguments
8210 on the edge by split_edge(). Later, additional edge 'e' was created to
8211 connect 'new_head' and 'first'. Now this routine adds phi args on this
8212 additional edge 'e' that new_head to second edge received as part of edge
8216 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
8217 basic_block new_head, edge e)
8220 gphi_iterator psi1, psi2;
8222 edge e2 = find_edge (new_head, second);
8224 /* Because NEW_HEAD has been created by splitting SECOND's incoming
8225 edge, we should always have an edge from NEW_HEAD to SECOND. */
8226 gcc_assert (e2 != NULL);
8228 /* Browse all 'second' basic block phi nodes and add phi args to
8229 edge 'e' for 'first' head. PHI args are always in correct order. */
8231 for (psi2 = gsi_start_phis (second),
8232 psi1 = gsi_start_phis (first);
8233 !gsi_end_p (psi2) && !gsi_end_p (psi1);
8234 gsi_next (&psi2), gsi_next (&psi1))
8238 def = PHI_ARG_DEF (phi2, e2->dest_idx);
8239 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
8244 /* Adds a if else statement to COND_BB with condition COND_EXPR.
8245 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
8246 the destination of the ELSE part. */
8249 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
8250 basic_block second_head ATTRIBUTE_UNUSED,
8251 basic_block cond_bb, void *cond_e)
8253 gimple_stmt_iterator gsi;
8254 gimple *new_cond_expr;
8255 tree cond_expr = (tree) cond_e;
8258 /* Build new conditional expr */
8259 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
8260 NULL_TREE, NULL_TREE);
8262 /* Add new cond in cond_bb. */
8263 gsi = gsi_last_bb (cond_bb);
8264 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
8266 /* Adjust edges appropriately to connect new head with first head
8267 as well as second head. */
8268 e0 = single_succ_edge (cond_bb);
8269 e0->flags &= ~EDGE_FALLTHRU;
8270 e0->flags |= EDGE_FALSE_VALUE;
8274 /* Do book-keeping of basic block BB for the profile consistency checker.
8275 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
8276 then do post-pass accounting. Store the counting in RECORD. */
8278 gimple_account_profile_record (basic_block bb, int after_pass,
8279 struct profile_record *record)
8281 gimple_stmt_iterator i;
8282 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
8284 record->size[after_pass]
8285 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
8286 if (profile_status_for_fn (cfun) == PROFILE_READ)
8287 record->time[after_pass]
8288 += estimate_num_insns (gsi_stmt (i),
8289 &eni_time_weights) * bb->count;
8290 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8291 record->time[after_pass]
8292 += estimate_num_insns (gsi_stmt (i),
8293 &eni_time_weights) * bb->frequency;
8297 struct cfg_hooks gimple_cfg_hooks = {
8299 gimple_verify_flow_info,
8300 gimple_dump_bb, /* dump_bb */
8301 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
8302 create_bb, /* create_basic_block */
8303 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
8304 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
8305 gimple_can_remove_branch_p, /* can_remove_branch_p */
8306 remove_bb, /* delete_basic_block */
8307 gimple_split_block, /* split_block */
8308 gimple_move_block_after, /* move_block_after */
8309 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
8310 gimple_merge_blocks, /* merge_blocks */
8311 gimple_predict_edge, /* predict_edge */
8312 gimple_predicted_by_p, /* predicted_by_p */
8313 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
8314 gimple_duplicate_bb, /* duplicate_block */
8315 gimple_split_edge, /* split_edge */
8316 gimple_make_forwarder_block, /* make_forward_block */
8317 NULL, /* tidy_fallthru_edge */
8318 NULL, /* force_nonfallthru */
8319 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
8320 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
8321 gimple_flow_call_edges_add, /* flow_call_edges_add */
8322 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
8323 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
8324 gimple_duplicate_loop_to_header_edge, /* duplicate loop for trees */
8325 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
8326 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
8327 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
8328 flush_pending_stmts, /* flush_pending_stmts */
8329 gimple_empty_block_p, /* block_empty_p */
8330 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
8331 gimple_account_profile_record,
8335 /* Split all critical edges. */
8338 split_critical_edges (void)
8344 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
8345 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
8346 mappings around the calls to split_edge. */
8347 start_recording_case_labels ();
8348 FOR_ALL_BB_FN (bb, cfun)
8350 FOR_EACH_EDGE (e, ei, bb->succs)
8352 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
8354 /* PRE inserts statements to edges and expects that
8355 since split_critical_edges was done beforehand, committing edge
8356 insertions will not split more edges. In addition to critical
8357 edges we must split edges that have multiple successors and
8358 end by control flow statements, such as RESX.
8359 Go ahead and split them too. This matches the logic in
8360 gimple_find_edge_insert_loc. */
8361 else if ((!single_pred_p (e->dest)
8362 || !gimple_seq_empty_p (phi_nodes (e->dest))
8363 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8364 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
8365 && !(e->flags & EDGE_ABNORMAL))
8367 gimple_stmt_iterator gsi;
8369 gsi = gsi_last_bb (e->src);
8370 if (!gsi_end_p (gsi)
8371 && stmt_ends_bb_p (gsi_stmt (gsi))
8372 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
8373 && !gimple_call_builtin_p (gsi_stmt (gsi),
8379 end_recording_case_labels ();
8385 const pass_data pass_data_split_crit_edges =
8387 GIMPLE_PASS, /* type */
8388 "crited", /* name */
8389 OPTGROUP_NONE, /* optinfo_flags */
8390 TV_TREE_SPLIT_EDGES, /* tv_id */
8391 PROP_cfg, /* properties_required */
8392 PROP_no_crit_edges, /* properties_provided */
8393 0, /* properties_destroyed */
8394 0, /* todo_flags_start */
8395 0, /* todo_flags_finish */
8398 class pass_split_crit_edges : public gimple_opt_pass
8401 pass_split_crit_edges (gcc::context *ctxt)
8402 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
8405 /* opt_pass methods: */
8406 virtual unsigned int execute (function *) { return split_critical_edges (); }
8408 opt_pass * clone () { return new pass_split_crit_edges (m_ctxt); }
8409 }; // class pass_split_crit_edges
8414 make_pass_split_crit_edges (gcc::context *ctxt)
8416 return new pass_split_crit_edges (ctxt);
8420 /* Insert COND expression which is GIMPLE_COND after STMT
8421 in basic block BB with appropriate basic block split
8422 and creation of a new conditionally executed basic block.
8423 Return created basic block. */
8425 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond)
8427 edge fall = split_block (bb, stmt);
8428 gimple_stmt_iterator iter = gsi_last_bb (bb);
8431 /* Insert cond statement. */
8432 gcc_assert (gimple_code (cond) == GIMPLE_COND);
8433 if (gsi_end_p (iter))
8434 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
8436 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
8438 /* Create conditionally executed block. */
8439 new_bb = create_empty_bb (bb);
8440 make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8441 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8443 /* Fix edge for split bb. */
8444 fall->flags = EDGE_FALSE_VALUE;
8446 /* Update dominance info. */
8447 if (dom_info_available_p (CDI_DOMINATORS))
8449 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
8450 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
8453 /* Update loop info. */
8455 add_bb_to_loop (new_bb, bb->loop_father);
8460 /* Build a ternary operation and gimplify it. Emit code before GSI.
8461 Return the gimple_val holding the result. */
8464 gimplify_build3 (gimple_stmt_iterator *gsi, enum tree_code code,
8465 tree type, tree a, tree b, tree c)
8468 location_t loc = gimple_location (gsi_stmt (*gsi));
8470 ret = fold_build3_loc (loc, code, type, a, b, c);
8473 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8477 /* Build a binary operation and gimplify it. Emit code before GSI.
8478 Return the gimple_val holding the result. */
8481 gimplify_build2 (gimple_stmt_iterator *gsi, enum tree_code code,
8482 tree type, tree a, tree b)
8486 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8489 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8493 /* Build a unary operation and gimplify it. Emit code before GSI.
8494 Return the gimple_val holding the result. */
8497 gimplify_build1 (gimple_stmt_iterator *gsi, enum tree_code code, tree type,
8502 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8505 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8511 /* Given a basic block B which ends with a conditional and has
8512 precisely two successors, determine which of the edges is taken if
8513 the conditional is true and which is taken if the conditional is
8514 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
8517 extract_true_false_edges_from_block (basic_block b,
8521 edge e = EDGE_SUCC (b, 0);
8523 if (e->flags & EDGE_TRUE_VALUE)
8526 *false_edge = EDGE_SUCC (b, 1);
8531 *true_edge = EDGE_SUCC (b, 1);
8536 /* From a controlling predicate in the immediate dominator DOM of
8537 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
8538 predicate evaluates to true and false and store them to
8539 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
8540 they are non-NULL. Returns true if the edges can be determined,
8541 else return false. */
8544 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
8545 edge *true_controlled_edge,
8546 edge *false_controlled_edge)
8548 basic_block bb = phiblock;
8549 edge true_edge, false_edge, tem;
8550 edge e0 = NULL, e1 = NULL;
8552 /* We have to verify that one edge into the PHI node is dominated
8553 by the true edge of the predicate block and the other edge
8554 dominated by the false edge. This ensures that the PHI argument
8555 we are going to take is completely determined by the path we
8556 take from the predicate block.
8557 We can only use BB dominance checks below if the destination of
8558 the true/false edges are dominated by their edge, thus only
8559 have a single predecessor. */
8560 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
8561 tem = EDGE_PRED (bb, 0);
8562 if (tem == true_edge
8563 || (single_pred_p (true_edge->dest)
8564 && (tem->src == true_edge->dest
8565 || dominated_by_p (CDI_DOMINATORS,
8566 tem->src, true_edge->dest))))
8568 else if (tem == false_edge
8569 || (single_pred_p (false_edge->dest)
8570 && (tem->src == false_edge->dest
8571 || dominated_by_p (CDI_DOMINATORS,
8572 tem->src, false_edge->dest))))
8576 tem = EDGE_PRED (bb, 1);
8577 if (tem == true_edge
8578 || (single_pred_p (true_edge->dest)
8579 && (tem->src == true_edge->dest
8580 || dominated_by_p (CDI_DOMINATORS,
8581 tem->src, true_edge->dest))))
8583 else if (tem == false_edge
8584 || (single_pred_p (false_edge->dest)
8585 && (tem->src == false_edge->dest
8586 || dominated_by_p (CDI_DOMINATORS,
8587 tem->src, false_edge->dest))))
8594 if (true_controlled_edge)
8595 *true_controlled_edge = e0;
8596 if (false_controlled_edge)
8597 *false_controlled_edge = e1;
8604 /* Emit return warnings. */
8608 const pass_data pass_data_warn_function_return =
8610 GIMPLE_PASS, /* type */
8611 "*warn_function_return", /* name */
8612 OPTGROUP_NONE, /* optinfo_flags */
8613 TV_NONE, /* tv_id */
8614 PROP_cfg, /* properties_required */
8615 0, /* properties_provided */
8616 0, /* properties_destroyed */
8617 0, /* todo_flags_start */
8618 0, /* todo_flags_finish */
8621 class pass_warn_function_return : public gimple_opt_pass
8624 pass_warn_function_return (gcc::context *ctxt)
8625 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
8628 /* opt_pass methods: */
8629 virtual unsigned int execute (function *);
8631 }; // class pass_warn_function_return
8634 pass_warn_function_return::execute (function *fun)
8636 source_location location;
8641 if (!targetm.warn_func_return (fun->decl))
8644 /* If we have a path to EXIT, then we do return. */
8645 if (TREE_THIS_VOLATILE (fun->decl)
8646 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
8648 location = UNKNOWN_LOCATION;
8649 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8651 last = last_stmt (e->src);
8652 if ((gimple_code (last) == GIMPLE_RETURN
8653 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
8654 && (location = gimple_location (last)) != UNKNOWN_LOCATION)
8657 if (location == UNKNOWN_LOCATION)
8658 location = cfun->function_end_locus;
8659 warning_at (location, 0, "%<noreturn%> function does return");
8662 /* If we see "return;" in some basic block, then we do reach the end
8663 without returning a value. */
8664 else if (warn_return_type
8665 && !TREE_NO_WARNING (fun->decl)
8666 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
8667 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
8669 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
8671 gimple *last = last_stmt (e->src);
8672 greturn *return_stmt = dyn_cast <greturn *> (last);
8674 && gimple_return_retval (return_stmt) == NULL
8675 && !gimple_no_warning_p (last))
8677 location = gimple_location (last);
8678 if (location == UNKNOWN_LOCATION)
8679 location = fun->function_end_locus;
8680 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function");
8681 TREE_NO_WARNING (fun->decl) = 1;
8692 make_pass_warn_function_return (gcc::context *ctxt)
8694 return new pass_warn_function_return (ctxt);
8697 /* Walk a gimplified function and warn for functions whose return value is
8698 ignored and attribute((warn_unused_result)) is set. This is done before
8699 inlining, so we don't have to worry about that. */
8702 do_warn_unused_result (gimple_seq seq)
8705 gimple_stmt_iterator i;
8707 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
8709 gimple *g = gsi_stmt (i);
8711 switch (gimple_code (g))
8714 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
8717 do_warn_unused_result (gimple_try_eval (g));
8718 do_warn_unused_result (gimple_try_cleanup (g));
8721 do_warn_unused_result (gimple_catch_handler (
8722 as_a <gcatch *> (g)));
8724 case GIMPLE_EH_FILTER:
8725 do_warn_unused_result (gimple_eh_filter_failure (g));
8729 if (gimple_call_lhs (g))
8731 if (gimple_call_internal_p (g))
8734 /* This is a naked call, as opposed to a GIMPLE_CALL with an
8735 LHS. All calls whose value is ignored should be
8736 represented like this. Look for the attribute. */
8737 fdecl = gimple_call_fndecl (g);
8738 ftype = gimple_call_fntype (g);
8740 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
8742 location_t loc = gimple_location (g);
8745 warning_at (loc, OPT_Wunused_result,
8746 "ignoring return value of %qD, "
8747 "declared with attribute warn_unused_result",
8750 warning_at (loc, OPT_Wunused_result,
8751 "ignoring return value of function "
8752 "declared with attribute warn_unused_result");
8757 /* Not a container, not a call, or a call whose value is used. */
8765 const pass_data pass_data_warn_unused_result =
8767 GIMPLE_PASS, /* type */
8768 "*warn_unused_result", /* name */
8769 OPTGROUP_NONE, /* optinfo_flags */
8770 TV_NONE, /* tv_id */
8771 PROP_gimple_any, /* properties_required */
8772 0, /* properties_provided */
8773 0, /* properties_destroyed */
8774 0, /* todo_flags_start */
8775 0, /* todo_flags_finish */
8778 class pass_warn_unused_result : public gimple_opt_pass
8781 pass_warn_unused_result (gcc::context *ctxt)
8782 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
8785 /* opt_pass methods: */
8786 virtual bool gate (function *) { return flag_warn_unused_result; }
8787 virtual unsigned int execute (function *)
8789 do_warn_unused_result (gimple_body (current_function_decl));
8793 }; // class pass_warn_unused_result
8798 make_pass_warn_unused_result (gcc::context *ctxt)
8800 return new pass_warn_unused_result (ctxt);
8803 /* IPA passes, compilation of earlier functions or inlining
8804 might have changed some properties, such as marked functions nothrow,
8805 pure, const or noreturn.
8806 Remove redundant edges and basic blocks, and create new ones if necessary.
8808 This pass can't be executed as stand alone pass from pass manager, because
8809 in between inlining and this fixup the verify_flow_info would fail. */
8812 execute_fixup_cfg (void)
8815 gimple_stmt_iterator gsi;
8817 gcov_type count_scale;
8822 = GCOV_COMPUTE_SCALE (cgraph_node::get (current_function_decl)->count,
8823 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count);
8825 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
8826 cgraph_node::get (current_function_decl)->count;
8827 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
8828 apply_scale (EXIT_BLOCK_PTR_FOR_FN (cfun)->count,
8831 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
8832 e->count = apply_scale (e->count, count_scale);
8834 FOR_EACH_BB_FN (bb, cfun)
8836 bb->count = apply_scale (bb->count, count_scale);
8837 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
8839 gimple *stmt = gsi_stmt (gsi);
8840 tree decl = is_gimple_call (stmt)
8841 ? gimple_call_fndecl (stmt)
8845 int flags = gimple_call_flags (stmt);
8846 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
8848 if (gimple_purge_dead_abnormal_call_edges (bb))
8849 todo |= TODO_cleanup_cfg;
8851 if (gimple_in_ssa_p (cfun))
8853 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8858 if (flags & ECF_NORETURN
8859 && fixup_noreturn_call (stmt))
8860 todo |= TODO_cleanup_cfg;
8863 /* Remove stores to variables we marked write-only.
8864 Keep access when store has side effect, i.e. in case when source
8866 if (gimple_store_p (stmt)
8867 && !gimple_has_side_effects (stmt))
8869 tree lhs = get_base_address (gimple_get_lhs (stmt));
8871 if (TREE_CODE (lhs) == VAR_DECL
8872 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8873 && varpool_node::get (lhs)->writeonly)
8875 unlink_stmt_vdef (stmt);
8876 gsi_remove (&gsi, true);
8877 release_defs (stmt);
8878 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8882 /* For calls we can simply remove LHS when it is known
8883 to be write-only. */
8884 if (is_gimple_call (stmt)
8885 && gimple_get_lhs (stmt))
8887 tree lhs = get_base_address (gimple_get_lhs (stmt));
8889 if (TREE_CODE (lhs) == VAR_DECL
8890 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
8891 && varpool_node::get (lhs)->writeonly)
8893 gimple_call_set_lhs (stmt, NULL);
8895 todo |= TODO_update_ssa | TODO_cleanup_cfg;
8899 if (maybe_clean_eh_stmt (stmt)
8900 && gimple_purge_dead_eh_edges (bb))
8901 todo |= TODO_cleanup_cfg;
8905 FOR_EACH_EDGE (e, ei, bb->succs)
8906 e->count = apply_scale (e->count, count_scale);
8908 /* If we have a basic block with no successors that does not
8909 end with a control statement or a noreturn call end it with
8910 a call to __builtin_unreachable. This situation can occur
8911 when inlining a noreturn call that does in fact return. */
8912 if (EDGE_COUNT (bb->succs) == 0)
8914 gimple *stmt = last_stmt (bb);
8916 || (!is_ctrl_stmt (stmt)
8917 && (!is_gimple_call (stmt)
8918 || (gimple_call_flags (stmt) & ECF_NORETURN) == 0)))
8920 if (stmt && is_gimple_call (stmt))
8921 gimple_call_set_ctrl_altering (stmt, false);
8922 stmt = gimple_build_call
8923 (builtin_decl_implicit (BUILT_IN_UNREACHABLE), 0);
8924 gimple_stmt_iterator gsi = gsi_last_bb (bb);
8925 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
8929 if (count_scale != REG_BR_PROB_BASE)
8930 compute_function_frequency ();
8933 && (todo & TODO_cleanup_cfg))
8934 loops_state_set (LOOPS_NEED_FIXUP);
8941 const pass_data pass_data_fixup_cfg =
8943 GIMPLE_PASS, /* type */
8944 "fixup_cfg", /* name */
8945 OPTGROUP_NONE, /* optinfo_flags */
8946 TV_NONE, /* tv_id */
8947 PROP_cfg, /* properties_required */
8948 0, /* properties_provided */
8949 0, /* properties_destroyed */
8950 0, /* todo_flags_start */
8951 0, /* todo_flags_finish */
8954 class pass_fixup_cfg : public gimple_opt_pass
8957 pass_fixup_cfg (gcc::context *ctxt)
8958 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
8961 /* opt_pass methods: */
8962 opt_pass * clone () { return new pass_fixup_cfg (m_ctxt); }
8963 virtual unsigned int execute (function *) { return execute_fixup_cfg (); }
8965 }; // class pass_fixup_cfg
8970 make_pass_fixup_cfg (gcc::context *ctxt)
8972 return new pass_fixup_cfg (ctxt);
8975 /* Garbage collection support for edge_def. */
8977 extern void gt_ggc_mx (tree&);
8978 extern void gt_ggc_mx (gimple *&);
8979 extern void gt_ggc_mx (rtx&);
8980 extern void gt_ggc_mx (basic_block&);
8983 gt_ggc_mx (rtx_insn *& x)
8986 gt_ggc_mx_rtx_def ((void *) x);
8990 gt_ggc_mx (edge_def *e)
8992 tree block = LOCATION_BLOCK (e->goto_locus);
8994 gt_ggc_mx (e->dest);
8995 if (current_ir_type () == IR_GIMPLE)
8996 gt_ggc_mx (e->insns.g);
8998 gt_ggc_mx (e->insns.r);
9002 /* PCH support for edge_def. */
9004 extern void gt_pch_nx (tree&);
9005 extern void gt_pch_nx (gimple *&);
9006 extern void gt_pch_nx (rtx&);
9007 extern void gt_pch_nx (basic_block&);
9010 gt_pch_nx (rtx_insn *& x)
9013 gt_pch_nx_rtx_def ((void *) x);
9017 gt_pch_nx (edge_def *e)
9019 tree block = LOCATION_BLOCK (e->goto_locus);
9021 gt_pch_nx (e->dest);
9022 if (current_ir_type () == IR_GIMPLE)
9023 gt_pch_nx (e->insns.g);
9025 gt_pch_nx (e->insns.r);
9030 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
9032 tree block = LOCATION_BLOCK (e->goto_locus);
9033 op (&(e->src), cookie);
9034 op (&(e->dest), cookie);
9035 if (current_ir_type () == IR_GIMPLE)
9036 op (&(e->insns.g), cookie);
9038 op (&(e->insns.r), cookie);
9039 op (&(block), cookie);