1 /* Control flow functions for trees.
2 Copyright (C) 2001-2022 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "gimple-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "fold-const.h"
36 #include "trans-mem.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
46 #include "tree-ssa-loop-manip.h"
47 #include "tree-ssa-loop-niter.h"
48 #include "tree-into-ssa.h"
53 #include "tree-ssa-propagate.h"
54 #include "value-prof.h"
55 #include "tree-inline.h"
56 #include "tree-ssa-live.h"
57 #include "tree-ssa-dce.h"
58 #include "omp-general.h"
59 #include "omp-expand.h"
60 #include "tree-cfgcleanup.h"
69 /* This file contains functions for building the Control Flow Graph (CFG)
70 for a function tree. */
72 /* Local declarations. */
74 /* Initial capacity for the basic block array. */
75 static const int initial_cfg_capacity = 20;
77 /* This hash table allows us to efficiently lookup all CASE_LABEL_EXPRs
78 which use a particular edge. The CASE_LABEL_EXPRs are chained together
79 via their CASE_CHAIN field, which we clear after we're done with the
80 hash table to prevent problems with duplication of GIMPLE_SWITCHes.
82 Access to this list of CASE_LABEL_EXPRs allows us to efficiently
83 update the case vector in response to edge redirections.
85 Right now this table is set up and torn down at key points in the
86 compilation process. It would be nice if we could make the table
87 more persistent. The key is getting notification of changes to
88 the CFG (particularly edge removal, creation and redirection). */
90 static hash_map<edge, tree> *edge_to_cases;
92 /* If we record edge_to_cases, this bitmap will hold indexes
93 of basic blocks that end in a GIMPLE_SWITCH which we touched
94 due to edge manipulations. */
96 static bitmap touched_switch_bbs;
98 /* OpenMP region idxs for blocks during cfg pass. */
99 static vec<int> bb_to_omp_idx;
101 /* CFG statistics. */
104 long num_merged_labels;
107 static struct cfg_stats_d cfg_stats;
109 /* Data to pass to replace_block_vars_by_duplicates_1. */
110 struct replace_decls_d
112 hash_map<tree, tree> *vars_map;
116 /* Hash table to store last discriminator assigned for each locus. */
117 struct locus_discrim_map
123 /* Hashtable helpers. */
125 struct locus_discrim_hasher : free_ptr_hash <locus_discrim_map>
127 static inline hashval_t hash (const locus_discrim_map *);
128 static inline bool equal (const locus_discrim_map *,
129 const locus_discrim_map *);
132 /* Trivial hash function for a location_t. ITEM is a pointer to
133 a hash table entry that maps a location_t to a discriminator. */
136 locus_discrim_hasher::hash (const locus_discrim_map *item)
138 return item->location_line;
141 /* Equality function for the locus-to-discriminator map. A and B
142 point to the two hash table entries to compare. */
145 locus_discrim_hasher::equal (const locus_discrim_map *a,
146 const locus_discrim_map *b)
148 return a->location_line == b->location_line;
151 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
153 /* Basic blocks and flowgraphs. */
154 static void make_blocks (gimple_seq);
157 static void make_edges (void);
158 static void assign_discriminators (void);
159 static void make_cond_expr_edges (basic_block);
160 static void make_gimple_switch_edges (gswitch *, basic_block);
161 static bool make_goto_expr_edges (basic_block);
162 static void make_gimple_asm_edges (basic_block);
163 static edge gimple_redirect_edge_and_branch (edge, basic_block);
164 static edge gimple_try_redirect_by_replacing_jump (edge, basic_block);
166 /* Various helpers. */
167 static inline bool stmt_starts_bb_p (gimple *, gimple *);
168 static int gimple_verify_flow_info (void);
169 static void gimple_make_forwarder_block (edge);
170 static gimple *first_non_label_stmt (basic_block);
171 static bool verify_gimple_transaction (gtransaction *);
172 static bool call_can_make_abnormal_goto (gimple *);
174 /* Flowgraph optimization and cleanup. */
175 static void gimple_merge_blocks (basic_block, basic_block);
176 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
177 static void remove_bb (basic_block);
178 static edge find_taken_edge_computed_goto (basic_block, tree);
179 static edge find_taken_edge_cond_expr (const gcond *, tree);
182 init_empty_tree_cfg_for_function (struct function *fn)
184 /* Initialize the basic block array. */
186 profile_status_for_fn (fn) = PROFILE_ABSENT;
187 n_basic_blocks_for_fn (fn) = NUM_FIXED_BLOCKS;
188 last_basic_block_for_fn (fn) = NUM_FIXED_BLOCKS;
189 vec_safe_grow_cleared (basic_block_info_for_fn (fn),
190 initial_cfg_capacity, true);
192 /* Build a mapping of labels to their associated blocks. */
193 vec_safe_grow_cleared (label_to_block_map_for_fn (fn),
194 initial_cfg_capacity, true);
196 SET_BASIC_BLOCK_FOR_FN (fn, ENTRY_BLOCK, ENTRY_BLOCK_PTR_FOR_FN (fn));
197 SET_BASIC_BLOCK_FOR_FN (fn, EXIT_BLOCK, EXIT_BLOCK_PTR_FOR_FN (fn));
199 ENTRY_BLOCK_PTR_FOR_FN (fn)->next_bb
200 = EXIT_BLOCK_PTR_FOR_FN (fn);
201 EXIT_BLOCK_PTR_FOR_FN (fn)->prev_bb
202 = ENTRY_BLOCK_PTR_FOR_FN (fn);
206 init_empty_tree_cfg (void)
208 init_empty_tree_cfg_for_function (cfun);
211 /*---------------------------------------------------------------------------
213 ---------------------------------------------------------------------------*/
215 /* Entry point to the CFG builder for trees. SEQ is the sequence of
216 statements to be added to the flowgraph. */
219 build_gimple_cfg (gimple_seq seq)
221 /* Register specific gimple functions. */
222 gimple_register_cfg_hooks ();
224 memset ((void *) &cfg_stats, 0, sizeof (cfg_stats));
226 init_empty_tree_cfg ();
230 /* Make sure there is always at least one block, even if it's empty. */
231 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
232 create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
234 /* Adjust the size of the array. */
235 if (basic_block_info_for_fn (cfun)->length ()
236 < (size_t) n_basic_blocks_for_fn (cfun))
237 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
238 n_basic_blocks_for_fn (cfun));
240 /* To speed up statement iterator walks, we first purge dead labels. */
241 cleanup_dead_labels ();
243 /* Group case nodes to reduce the number of edges.
244 We do this after cleaning up dead labels because otherwise we miss
245 a lot of obvious case merging opportunities. */
246 group_case_labels ();
248 /* Create the edges of the flowgraph. */
249 discriminator_per_locus = new hash_table<locus_discrim_hasher> (13);
251 assign_discriminators ();
252 cleanup_dead_labels ();
253 delete discriminator_per_locus;
254 discriminator_per_locus = NULL;
257 /* Look for ANNOTATE calls with loop annotation kind in BB; if found, remove
258 them and propagate the information to LOOP. We assume that the annotations
259 come immediately before the condition in BB, if any. */
262 replace_loop_annotate_in_block (basic_block bb, class loop *loop)
264 gimple_stmt_iterator gsi = gsi_last_bb (bb);
265 gimple *stmt = gsi_stmt (gsi);
267 if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
270 for (gsi_prev_nondebug (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
272 stmt = gsi_stmt (gsi);
273 if (gimple_code (stmt) != GIMPLE_CALL)
275 if (!gimple_call_internal_p (stmt)
276 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
279 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
281 case annot_expr_ivdep_kind:
282 loop->safelen = INT_MAX;
284 case annot_expr_unroll_kind:
286 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
287 cfun->has_unroll = true;
289 case annot_expr_no_vector_kind:
290 loop->dont_vectorize = true;
292 case annot_expr_vector_kind:
293 loop->force_vectorize = true;
294 cfun->has_force_vectorize_loops = true;
296 case annot_expr_parallel_kind:
297 loop->can_be_parallel = true;
298 loop->safelen = INT_MAX;
304 stmt = gimple_build_assign (gimple_call_lhs (stmt),
305 gimple_call_arg (stmt, 0));
306 gsi_replace (&gsi, stmt, true);
310 /* Look for ANNOTATE calls with loop annotation kind; if found, remove
311 them and propagate the information to the loop. We assume that the
312 annotations come immediately before the condition of the loop. */
315 replace_loop_annotate (void)
318 gimple_stmt_iterator gsi;
321 for (auto loop : loops_list (cfun, 0))
323 /* First look into the header. */
324 replace_loop_annotate_in_block (loop->header, loop);
326 /* Then look into the latch, if any. */
328 replace_loop_annotate_in_block (loop->latch, loop);
330 /* Push the global flag_finite_loops state down to individual loops. */
331 loop->finite_p = flag_finite_loops;
334 /* Remove IFN_ANNOTATE. Safeguard for the case loop->latch == NULL. */
335 FOR_EACH_BB_FN (bb, cfun)
337 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
339 stmt = gsi_stmt (gsi);
340 if (gimple_code (stmt) != GIMPLE_CALL)
342 if (!gimple_call_internal_p (stmt)
343 || gimple_call_internal_fn (stmt) != IFN_ANNOTATE)
346 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
348 case annot_expr_ivdep_kind:
349 case annot_expr_unroll_kind:
350 case annot_expr_no_vector_kind:
351 case annot_expr_vector_kind:
352 case annot_expr_parallel_kind:
358 warning_at (gimple_location (stmt), 0, "ignoring loop annotation");
359 stmt = gimple_build_assign (gimple_call_lhs (stmt),
360 gimple_call_arg (stmt, 0));
361 gsi_replace (&gsi, stmt, true);
367 execute_build_cfg (void)
369 gimple_seq body = gimple_body (current_function_decl);
371 build_gimple_cfg (body);
372 gimple_set_body (current_function_decl, NULL);
373 if (dump_file && (dump_flags & TDF_DETAILS))
375 fprintf (dump_file, "Scope blocks:\n");
376 dump_scope_blocks (dump_file, dump_flags);
380 bb_to_omp_idx.release ();
382 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
383 replace_loop_annotate ();
389 const pass_data pass_data_build_cfg =
391 GIMPLE_PASS, /* type */
393 OPTGROUP_NONE, /* optinfo_flags */
394 TV_TREE_CFG, /* tv_id */
395 PROP_gimple_leh, /* properties_required */
396 ( PROP_cfg | PROP_loops ), /* properties_provided */
397 0, /* properties_destroyed */
398 0, /* todo_flags_start */
399 0, /* todo_flags_finish */
402 class pass_build_cfg : public gimple_opt_pass
405 pass_build_cfg (gcc::context *ctxt)
406 : gimple_opt_pass (pass_data_build_cfg, ctxt)
409 /* opt_pass methods: */
410 unsigned int execute (function *) final override
412 return execute_build_cfg ();
415 }; // class pass_build_cfg
420 make_pass_build_cfg (gcc::context *ctxt)
422 return new pass_build_cfg (ctxt);
426 /* Return true if T is a computed goto. */
429 computed_goto_p (gimple *t)
431 return (gimple_code (t) == GIMPLE_GOTO
432 && TREE_CODE (gimple_goto_dest (t)) != LABEL_DECL);
435 /* Returns true if the sequence of statements STMTS only contains
436 a call to __builtin_unreachable (). */
439 gimple_seq_unreachable_p (gimple_seq stmts)
442 /* Return false if -fsanitize=unreachable, we don't want to
443 optimize away those calls, but rather turn them into
444 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
446 || sanitize_flags_p (SANITIZE_UNREACHABLE))
449 gimple_stmt_iterator gsi = gsi_last (stmts);
451 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
454 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
456 gimple *stmt = gsi_stmt (gsi);
457 if (gimple_code (stmt) != GIMPLE_LABEL
458 && !is_gimple_debug (stmt)
459 && !gimple_clobber_p (stmt))
465 /* Returns true for edge E where e->src ends with a GIMPLE_COND and
466 the other edge points to a bb with just __builtin_unreachable ().
467 I.e. return true for C->M edge in:
475 __builtin_unreachable ();
479 assert_unreachable_fallthru_edge_p (edge e)
481 basic_block pred_bb = e->src;
482 gimple *last = last_stmt (pred_bb);
483 if (last && gimple_code (last) == GIMPLE_COND)
485 basic_block other_bb = EDGE_SUCC (pred_bb, 0)->dest;
486 if (other_bb == e->dest)
487 other_bb = EDGE_SUCC (pred_bb, 1)->dest;
488 if (EDGE_COUNT (other_bb->succs) == 0)
489 return gimple_seq_unreachable_p (bb_seq (other_bb));
495 /* Initialize GF_CALL_CTRL_ALTERING flag, which indicates the call
496 could alter control flow except via eh. We initialize the flag at
497 CFG build time and only ever clear it later. */
500 gimple_call_initialize_ctrl_altering (gimple *stmt)
502 int flags = gimple_call_flags (stmt);
504 /* A call alters control flow if it can make an abnormal goto. */
505 if (call_can_make_abnormal_goto (stmt)
506 /* A call also alters control flow if it does not return. */
507 || flags & ECF_NORETURN
508 /* TM ending statements have backedges out of the transaction.
509 Return true so we split the basic block containing them.
510 Note that the TM_BUILTIN test is merely an optimization. */
511 || ((flags & ECF_TM_BUILTIN)
512 && is_tm_ending_fndecl (gimple_call_fndecl (stmt)))
513 /* BUILT_IN_RETURN call is same as return statement. */
514 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)
515 /* IFN_UNIQUE should be the last insn, to make checking for it
516 as cheap as possible. */
517 || (gimple_call_internal_p (stmt)
518 && gimple_call_internal_unique_p (stmt)))
519 gimple_call_set_ctrl_altering (stmt, true);
521 gimple_call_set_ctrl_altering (stmt, false);
525 /* Insert SEQ after BB and build a flowgraph. */
528 make_blocks_1 (gimple_seq seq, basic_block bb)
530 gimple_stmt_iterator i = gsi_start (seq);
532 gimple *prev_stmt = NULL;
533 bool start_new_block = true;
534 bool first_stmt_of_seq = true;
536 while (!gsi_end_p (i))
538 /* PREV_STMT should only be set to a debug stmt if the debug
539 stmt is before nondebug stmts. Once stmt reaches a nondebug
540 nonlabel, prev_stmt will be set to it, so that
541 stmt_starts_bb_p will know to start a new block if a label is
542 found. However, if stmt was a label after debug stmts only,
543 keep the label in prev_stmt even if we find further debug
544 stmts, for there may be other labels after them, and they
545 should land in the same block. */
546 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
550 if (stmt && is_gimple_call (stmt))
551 gimple_call_initialize_ctrl_altering (stmt);
553 /* If the statement starts a new basic block or if we have determined
554 in a previous pass that we need to create a new block for STMT, do
556 if (start_new_block || stmt_starts_bb_p (stmt, prev_stmt))
558 if (!first_stmt_of_seq)
559 gsi_split_seq_before (&i, &seq);
560 bb = create_basic_block (seq, bb);
561 start_new_block = false;
565 /* Now add STMT to BB and create the subgraphs for special statement
567 gimple_set_bb (stmt, bb);
569 /* If STMT is a basic block terminator, set START_NEW_BLOCK for the
571 if (stmt_ends_bb_p (stmt))
573 /* If the stmt can make abnormal goto use a new temporary
574 for the assignment to the LHS. This makes sure the old value
575 of the LHS is available on the abnormal edge. Otherwise
576 we will end up with overlapping life-ranges for abnormal
578 if (gimple_has_lhs (stmt)
579 && stmt_can_make_abnormal_goto (stmt)
580 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
582 tree lhs = gimple_get_lhs (stmt);
583 tree tmp = create_tmp_var (TREE_TYPE (lhs));
584 gimple *s = gimple_build_assign (lhs, tmp);
585 gimple_set_location (s, gimple_location (stmt));
586 gimple_set_block (s, gimple_block (stmt));
587 gimple_set_lhs (stmt, tmp);
588 gsi_insert_after (&i, s, GSI_SAME_STMT);
590 start_new_block = true;
594 first_stmt_of_seq = false;
599 /* Build a flowgraph for the sequence of stmts SEQ. */
602 make_blocks (gimple_seq seq)
604 /* Look for debug markers right before labels, and move the debug
605 stmts after the labels. Accepting labels among debug markers
606 adds no value, just complexity; if we wanted to annotate labels
607 with view numbers (so sequencing among markers would matter) or
608 somesuch, we're probably better off still moving the labels, but
609 adding other debug annotations in their original positions or
610 emitting nonbind or bind markers associated with the labels in
611 the original position of the labels.
613 Moving labels would probably be simpler, but we can't do that:
614 moving labels assigns label ids to them, and doing so because of
615 debug markers makes for -fcompare-debug and possibly even codegen
616 differences. So, we have to move the debug stmts instead. To
617 that end, we scan SEQ backwards, marking the position of the
618 latest (earliest we find) label, and moving debug stmts that are
619 not separated from it by nondebug nonlabel stmts after the
621 if (MAY_HAVE_DEBUG_MARKER_STMTS)
623 gimple_stmt_iterator label = gsi_none ();
625 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
627 gimple *stmt = gsi_stmt (i);
629 /* If this is the first label we encounter (latest in SEQ)
630 before nondebug stmts, record its position. */
631 if (is_a <glabel *> (stmt))
633 if (gsi_end_p (label))
638 /* Without a recorded label position to move debug stmts to,
639 there's nothing to do. */
640 if (gsi_end_p (label))
643 /* Move the debug stmt at I after LABEL. */
644 if (is_gimple_debug (stmt))
646 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
647 /* As STMT is removed, I advances to the stmt after
648 STMT, so the gsi_prev in the for "increment"
649 expression gets us to the stmt we're to visit after
650 STMT. LABEL, however, would advance to the moved
651 stmt if we passed it to gsi_move_after, so pass it a
652 copy instead, so as to keep LABEL pointing to the
654 gimple_stmt_iterator copy = label;
655 gsi_move_after (&i, ©);
659 /* There aren't any (more?) debug stmts before label, so
660 there isn't anything else to move after it. */
665 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
668 /* Create and return a new empty basic block after bb AFTER. */
671 create_bb (void *h, void *e, basic_block after)
677 /* Create and initialize a new basic block. Since alloc_block uses
678 GC allocation that clears memory to allocate a basic block, we do
679 not have to clear the newly allocated basic block here. */
682 bb->index = last_basic_block_for_fn (cfun);
684 set_bb_seq (bb, h ? (gimple_seq) h : NULL);
686 /* Add the new block to the linked list of blocks. */
687 link_block (bb, after);
689 /* Grow the basic block array if needed. */
690 if ((size_t) last_basic_block_for_fn (cfun)
691 == basic_block_info_for_fn (cfun)->length ())
692 vec_safe_grow_cleared (basic_block_info_for_fn (cfun),
693 last_basic_block_for_fn (cfun) + 1);
695 /* Add the newly created block to the array. */
696 SET_BASIC_BLOCK_FOR_FN (cfun, last_basic_block_for_fn (cfun), bb);
698 n_basic_blocks_for_fn (cfun)++;
699 last_basic_block_for_fn (cfun)++;
705 /*---------------------------------------------------------------------------
707 ---------------------------------------------------------------------------*/
709 /* If basic block BB has an abnormal edge to a basic block
710 containing IFN_ABNORMAL_DISPATCHER internal call, return
711 that the dispatcher's basic block, otherwise return NULL. */
714 get_abnormal_succ_dispatcher (basic_block bb)
719 FOR_EACH_EDGE (e, ei, bb->succs)
720 if ((e->flags & (EDGE_ABNORMAL | EDGE_EH)) == EDGE_ABNORMAL)
722 gimple_stmt_iterator gsi
723 = gsi_start_nondebug_after_labels_bb (e->dest);
724 gimple *g = gsi_stmt (gsi);
725 if (g && gimple_call_internal_p (g, IFN_ABNORMAL_DISPATCHER))
731 /* Helper function for make_edges. Create a basic block with
732 with ABNORMAL_DISPATCHER internal call in it if needed, and
733 create abnormal edges from BBS to it and from it to FOR_BB
734 if COMPUTED_GOTO is false, otherwise factor the computed gotos. */
737 handle_abnormal_edges (basic_block *dispatcher_bbs, basic_block for_bb,
738 auto_vec<basic_block> *bbs, bool computed_goto)
740 basic_block *dispatcher = dispatcher_bbs + (computed_goto ? 1 : 0);
741 unsigned int idx = 0;
745 if (!bb_to_omp_idx.is_empty ())
747 dispatcher = dispatcher_bbs + 2 * bb_to_omp_idx[for_bb->index];
748 if (bb_to_omp_idx[for_bb->index] != 0)
752 /* If the dispatcher has been created already, then there are basic
753 blocks with abnormal edges to it, so just make a new edge to
755 if (*dispatcher == NULL)
757 /* Check if there are any basic blocks that need to have
758 abnormal edges to this dispatcher. If there are none, return
760 if (bb_to_omp_idx.is_empty ())
762 if (bbs->is_empty ())
767 FOR_EACH_VEC_ELT (*bbs, idx, bb)
768 if (bb_to_omp_idx[bb->index] == bb_to_omp_idx[for_bb->index])
774 /* Create the dispatcher bb. */
775 *dispatcher = create_basic_block (NULL, for_bb);
778 /* Factor computed gotos into a common computed goto site. Also
779 record the location of that site so that we can un-factor the
780 gotos after we have converted back to normal form. */
781 gimple_stmt_iterator gsi = gsi_start_bb (*dispatcher);
783 /* Create the destination of the factored goto. Each original
784 computed goto will put its desired destination into this
785 variable and jump to the label we create immediately below. */
786 tree var = create_tmp_var (ptr_type_node, "gotovar");
788 /* Build a label for the new block which will contain the
789 factored computed goto. */
790 tree factored_label_decl
791 = create_artificial_label (UNKNOWN_LOCATION);
792 gimple *factored_computed_goto_label
793 = gimple_build_label (factored_label_decl);
794 gsi_insert_after (&gsi, factored_computed_goto_label, GSI_NEW_STMT);
796 /* Build our new computed goto. */
797 gimple *factored_computed_goto = gimple_build_goto (var);
798 gsi_insert_after (&gsi, factored_computed_goto, GSI_NEW_STMT);
800 FOR_EACH_VEC_ELT (*bbs, idx, bb)
802 if (!bb_to_omp_idx.is_empty ()
803 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
806 gsi = gsi_last_bb (bb);
807 gimple *last = gsi_stmt (gsi);
809 gcc_assert (computed_goto_p (last));
811 /* Copy the original computed goto's destination into VAR. */
813 = gimple_build_assign (var, gimple_goto_dest (last));
814 gsi_insert_before (&gsi, assignment, GSI_SAME_STMT);
816 edge e = make_edge (bb, *dispatcher, EDGE_FALLTHRU);
817 e->goto_locus = gimple_location (last);
818 gsi_remove (&gsi, true);
823 tree arg = inner ? boolean_true_node : boolean_false_node;
824 gcall *g = gimple_build_call_internal (IFN_ABNORMAL_DISPATCHER,
826 gimple_call_set_ctrl_altering (g, true);
827 gimple_stmt_iterator gsi = gsi_after_labels (*dispatcher);
828 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
830 /* Create predecessor edges of the dispatcher. */
831 FOR_EACH_VEC_ELT (*bbs, idx, bb)
833 if (!bb_to_omp_idx.is_empty ()
834 && bb_to_omp_idx[bb->index] != bb_to_omp_idx[for_bb->index])
836 make_edge (bb, *dispatcher, EDGE_ABNORMAL);
841 make_edge (*dispatcher, for_bb, EDGE_ABNORMAL);
844 /* Creates outgoing edges for BB. Returns 1 when it ends with an
845 computed goto, returns 2 when it ends with a statement that
846 might return to this function via an nonlocal goto, otherwise
847 return 0. Updates *PCUR_REGION with the OMP region this BB is in. */
850 make_edges_bb (basic_block bb, struct omp_region **pcur_region, int *pomp_index)
852 gimple *last = last_stmt (bb);
853 bool fallthru = false;
859 switch (gimple_code (last))
862 if (make_goto_expr_edges (bb))
868 edge e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
869 e->goto_locus = gimple_location (last);
874 make_cond_expr_edges (bb);
878 make_gimple_switch_edges (as_a <gswitch *> (last), bb);
882 make_eh_edges (last);
885 case GIMPLE_EH_DISPATCH:
886 fallthru = make_eh_dispatch_edges (as_a <geh_dispatch *> (last));
890 /* If this function receives a nonlocal goto, then we need to
891 make edges from this call site to all the nonlocal goto
893 if (stmt_can_make_abnormal_goto (last))
896 /* If this statement has reachable exception handlers, then
897 create abnormal edges to them. */
898 make_eh_edges (last);
900 /* BUILTIN_RETURN is really a return statement. */
901 if (gimple_call_builtin_p (last, BUILT_IN_RETURN))
903 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
906 /* Some calls are known not to return. */
908 fallthru = !gimple_call_noreturn_p (last);
912 /* A GIMPLE_ASSIGN may throw internally and thus be considered
914 if (is_ctrl_altering_stmt (last))
915 make_eh_edges (last);
920 make_gimple_asm_edges (bb);
925 fallthru = omp_make_gimple_edges (bb, pcur_region, pomp_index);
928 case GIMPLE_TRANSACTION:
930 gtransaction *txn = as_a <gtransaction *> (last);
931 tree label1 = gimple_transaction_label_norm (txn);
932 tree label2 = gimple_transaction_label_uninst (txn);
935 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
937 make_edge (bb, label_to_block (cfun, label2),
938 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
940 tree label3 = gimple_transaction_label_over (txn);
941 if (gimple_transaction_subcode (txn)
942 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
943 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
950 gcc_assert (!stmt_ends_bb_p (last));
956 make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
961 /* Join all the blocks in the flowgraph. */
967 struct omp_region *cur_region = NULL;
968 auto_vec<basic_block> ab_edge_goto;
969 auto_vec<basic_block> ab_edge_call;
970 int cur_omp_region_idx = 0;
972 /* Create an edge from entry to the first block with executable
974 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun),
975 BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS),
978 /* Traverse the basic block array placing edges. */
979 FOR_EACH_BB_FN (bb, cfun)
983 if (!bb_to_omp_idx.is_empty ())
984 bb_to_omp_idx[bb->index] = cur_omp_region_idx;
986 mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
988 ab_edge_goto.safe_push (bb);
990 ab_edge_call.safe_push (bb);
992 if (cur_region && bb_to_omp_idx.is_empty ())
993 bb_to_omp_idx.safe_grow_cleared (n_basic_blocks_for_fn (cfun), true);
996 /* Computed gotos are hell to deal with, especially if there are
997 lots of them with a large number of destinations. So we factor
998 them to a common computed goto location before we build the
999 edge list. After we convert back to normal form, we will un-factor
1000 the computed gotos since factoring introduces an unwanted jump.
1001 For non-local gotos and abnormal edges from calls to calls that return
1002 twice or forced labels, factor the abnormal edges too, by having all
1003 abnormal edges from the calls go to a common artificial basic block
1004 with ABNORMAL_DISPATCHER internal call and abnormal edges from that
1005 basic block to all forced labels and calls returning twice.
1006 We do this per-OpenMP structured block, because those regions
1007 are guaranteed to be single entry single exit by the standard,
1008 so it is not allowed to enter or exit such regions abnormally this way,
1009 thus all computed gotos, non-local gotos and setjmp/longjmp calls
1010 must not transfer control across SESE region boundaries. */
1011 if (!ab_edge_goto.is_empty () || !ab_edge_call.is_empty ())
1013 gimple_stmt_iterator gsi;
1014 basic_block dispatcher_bb_array[2] = { NULL, NULL };
1015 basic_block *dispatcher_bbs = dispatcher_bb_array;
1016 int count = n_basic_blocks_for_fn (cfun);
1018 if (!bb_to_omp_idx.is_empty ())
1019 dispatcher_bbs = XCNEWVEC (basic_block, 2 * count);
1021 FOR_EACH_BB_FN (bb, cfun)
1023 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1025 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1031 target = gimple_label_label (label_stmt);
1033 /* Make an edge to every label block that has been marked as a
1034 potential target for a computed goto or a non-local goto. */
1035 if (FORCED_LABEL (target))
1036 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_goto,
1038 if (DECL_NONLOCAL (target))
1040 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1046 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
1047 gsi_next_nondebug (&gsi);
1048 if (!gsi_end_p (gsi))
1050 /* Make an edge to every setjmp-like call. */
1051 gimple *call_stmt = gsi_stmt (gsi);
1052 if (is_gimple_call (call_stmt)
1053 && ((gimple_call_flags (call_stmt) & ECF_RETURNS_TWICE)
1054 || gimple_call_builtin_p (call_stmt,
1055 BUILT_IN_SETJMP_RECEIVER)))
1056 handle_abnormal_edges (dispatcher_bbs, bb, &ab_edge_call,
1061 if (!bb_to_omp_idx.is_empty ())
1062 XDELETE (dispatcher_bbs);
1065 omp_free_regions ();
1068 /* Add SEQ after GSI. Start new bb after GSI, and created further bbs as
1069 needed. Returns true if new bbs were created.
1070 Note: This is transitional code, and should not be used for new code. We
1071 should be able to get rid of this by rewriting all target va-arg
1072 gimplification hooks to use an interface gimple_build_cond_value as described
1073 in https://gcc.gnu.org/ml/gcc-patches/2015-02/msg01194.html. */
1076 gimple_find_sub_bbs (gimple_seq seq, gimple_stmt_iterator *gsi)
1078 gimple *stmt = gsi_stmt (*gsi);
1079 basic_block bb = gimple_bb (stmt);
1080 basic_block lastbb, afterbb;
1081 int old_num_bbs = n_basic_blocks_for_fn (cfun);
1083 lastbb = make_blocks_1 (seq, bb);
1084 if (old_num_bbs == n_basic_blocks_for_fn (cfun))
1086 e = split_block (bb, stmt);
1087 /* Move e->dest to come after the new basic blocks. */
1089 unlink_block (afterbb);
1090 link_block (afterbb, lastbb);
1091 redirect_edge_succ (e, bb->next_bb);
1093 while (bb != afterbb)
1095 struct omp_region *cur_region = NULL;
1096 profile_count cnt = profile_count::zero ();
1099 int cur_omp_region_idx = 0;
1100 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1101 gcc_assert (!mer && !cur_region);
1102 add_bb_to_loop (bb, afterbb->loop_father);
1106 FOR_EACH_EDGE (e, ei, bb->preds)
1108 if (e->count ().initialized_p ())
1113 tree_guess_outgoing_edge_probabilities (bb);
1114 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1122 /* Find the next available discriminator value for LOCUS. The
1123 discriminator distinguishes among several basic blocks that
1124 share a common locus, allowing for more accurate sample-based
1128 next_discriminator_for_locus (int line)
1130 struct locus_discrim_map item;
1131 struct locus_discrim_map **slot;
1133 item.location_line = line;
1134 item.discriminator = 0;
1135 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1137 if (*slot == HTAB_EMPTY_ENTRY)
1139 *slot = XNEW (struct locus_discrim_map);
1141 (*slot)->location_line = line;
1142 (*slot)->discriminator = 0;
1144 (*slot)->discriminator++;
1145 return (*slot)->discriminator;
1148 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1151 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1153 expanded_location to;
1155 if (locus1 == locus2)
1158 to = expand_location (locus2);
1160 if (from->line != to.line)
1162 if (from->file == to.file)
1164 return (from->file != NULL
1166 && filename_cmp (from->file, to.file) == 0);
1169 /* Assign a unique discriminator value to all statements in block bb that
1170 have the same line number as locus. */
1173 assign_discriminator (location_t locus, basic_block bb)
1175 gimple_stmt_iterator gsi;
1178 if (locus == UNKNOWN_LOCATION)
1181 expanded_location locus_e = expand_location (locus);
1183 discriminator = next_discriminator_for_locus (locus_e.line);
1185 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1187 gimple *stmt = gsi_stmt (gsi);
1188 location_t stmt_locus = gimple_location (stmt);
1189 if (same_line_p (locus, &locus_e, stmt_locus))
1190 gimple_set_location (stmt,
1191 location_with_discriminator (stmt_locus, discriminator));
1195 /* Assign discriminators to statement locations. */
1198 assign_discriminators (void)
1202 FOR_EACH_BB_FN (bb, cfun)
1206 gimple_stmt_iterator gsi;
1207 location_t curr_locus = UNKNOWN_LOCATION;
1208 expanded_location curr_locus_e = {};
1211 /* Traverse the basic block, if two function calls within a basic block
1212 are mapped to the same line, assign a new discriminator because a call
1213 stmt could be a split point of a basic block. */
1214 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1216 gimple *stmt = gsi_stmt (gsi);
1218 if (curr_locus == UNKNOWN_LOCATION)
1220 curr_locus = gimple_location (stmt);
1221 curr_locus_e = expand_location (curr_locus);
1223 else if (!same_line_p (curr_locus, &curr_locus_e, gimple_location (stmt)))
1225 curr_locus = gimple_location (stmt);
1226 curr_locus_e = expand_location (curr_locus);
1229 else if (curr_discr != 0)
1231 location_t loc = gimple_location (stmt);
1232 location_t dloc = location_with_discriminator (loc, curr_discr);
1233 gimple_set_location (stmt, dloc);
1235 /* Allocate a new discriminator for CALL stmt. */
1236 if (gimple_code (stmt) == GIMPLE_CALL)
1237 curr_discr = next_discriminator_for_locus (curr_locus);
1240 gimple *last = last_stmt (bb);
1241 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1242 if (locus == UNKNOWN_LOCATION)
1245 expanded_location locus_e = expand_location (locus);
1247 FOR_EACH_EDGE (e, ei, bb->succs)
1249 gimple *first = first_non_label_stmt (e->dest);
1250 gimple *last = last_stmt (e->dest);
1252 gimple *stmt_on_same_line = NULL;
1253 if (first && same_line_p (locus, &locus_e,
1254 gimple_location (first)))
1255 stmt_on_same_line = first;
1256 else if (last && same_line_p (locus, &locus_e,
1257 gimple_location (last)))
1258 stmt_on_same_line = last;
1260 if (stmt_on_same_line)
1262 if (has_discriminator (gimple_location (stmt_on_same_line))
1263 && !has_discriminator (locus))
1264 assign_discriminator (locus, bb);
1266 assign_discriminator (locus, e->dest);
1272 /* Create the edges for a GIMPLE_COND starting at block BB. */
1275 make_cond_expr_edges (basic_block bb)
1277 gcond *entry = as_a <gcond *> (last_stmt (bb));
1278 gimple *then_stmt, *else_stmt;
1279 basic_block then_bb, else_bb;
1280 tree then_label, else_label;
1284 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1286 /* Entry basic blocks for each component. */
1287 then_label = gimple_cond_true_label (entry);
1288 else_label = gimple_cond_false_label (entry);
1289 then_bb = label_to_block (cfun, then_label);
1290 else_bb = label_to_block (cfun, else_label);
1291 then_stmt = first_stmt (then_bb);
1292 else_stmt = first_stmt (else_bb);
1294 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1295 e->goto_locus = gimple_location (then_stmt);
1296 e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
1298 e->goto_locus = gimple_location (else_stmt);
1300 /* We do not need the labels anymore. */
1301 gimple_cond_set_true_label (entry, NULL_TREE);
1302 gimple_cond_set_false_label (entry, NULL_TREE);
1306 /* Called for each element in the hash table (P) as we delete the
1307 edge to cases hash table.
1309 Clear all the CASE_CHAINs to prevent problems with copying of
1310 SWITCH_EXPRs and structure sharing rules, then free the hash table
1314 edge_to_cases_cleanup (edge const &, tree const &value, void *)
1318 for (t = value; t; t = next)
1320 next = CASE_CHAIN (t);
1321 CASE_CHAIN (t) = NULL;
1327 /* Start recording information mapping edges to case labels. */
1330 start_recording_case_labels (void)
1332 gcc_assert (edge_to_cases == NULL);
1333 edge_to_cases = new hash_map<edge, tree>;
1334 touched_switch_bbs = BITMAP_ALLOC (NULL);
1337 /* Return nonzero if we are recording information for case labels. */
1340 recording_case_labels_p (void)
1342 return (edge_to_cases != NULL);
1345 /* Stop recording information mapping edges to case labels and
1346 remove any information we have recorded. */
1348 end_recording_case_labels (void)
1352 edge_to_cases->traverse<void *, edge_to_cases_cleanup> (NULL);
1353 delete edge_to_cases;
1354 edge_to_cases = NULL;
1355 EXECUTE_IF_SET_IN_BITMAP (touched_switch_bbs, 0, i, bi)
1357 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
1360 gimple *stmt = last_stmt (bb);
1361 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1362 group_case_labels_stmt (as_a <gswitch *> (stmt));
1365 BITMAP_FREE (touched_switch_bbs);
1368 /* If we are inside a {start,end}_recording_cases block, then return
1369 a chain of CASE_LABEL_EXPRs from T which reference E.
1371 Otherwise return NULL. */
1374 get_cases_for_edge (edge e, gswitch *t)
1379 /* If we are not recording cases, then we do not have CASE_LABEL_EXPR
1380 chains available. Return NULL so the caller can detect this case. */
1381 if (!recording_case_labels_p ())
1384 slot = edge_to_cases->get (e);
1388 /* If we did not find E in the hash table, then this must be the first
1389 time we have been queried for information about E & T. Add all the
1390 elements from T to the hash table then perform the query again. */
1392 n = gimple_switch_num_labels (t);
1393 for (i = 0; i < n; i++)
1395 tree elt = gimple_switch_label (t, i);
1396 tree lab = CASE_LABEL (elt);
1397 basic_block label_bb = label_to_block (cfun, lab);
1398 edge this_edge = find_edge (e->src, label_bb);
1400 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1402 tree &s = edge_to_cases->get_or_insert (this_edge);
1403 CASE_CHAIN (elt) = s;
1407 return *edge_to_cases->get (e);
1410 /* Create the edges for a GIMPLE_SWITCH starting at block BB. */
1413 make_gimple_switch_edges (gswitch *entry, basic_block bb)
1417 n = gimple_switch_num_labels (entry);
1419 for (i = 0; i < n; ++i)
1421 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1422 make_edge (bb, label_bb, 0);
1427 /* Return the basic block holding label DEST. */
1430 label_to_block (struct function *ifun, tree dest)
1432 int uid = LABEL_DECL_UID (dest);
1434 /* We would die hard when faced by an undefined label. Emit a label to
1435 the very first basic block. This will hopefully make even the dataflow
1436 and undefined variable warnings quite right. */
1437 if (seen_error () && uid < 0)
1439 gimple_stmt_iterator gsi =
1440 gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, NUM_FIXED_BLOCKS));
1443 stmt = gimple_build_label (dest);
1444 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
1445 uid = LABEL_DECL_UID (dest);
1447 if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
1449 return (*ifun->cfg->x_label_to_block_map)[uid];
1452 /* Create edges for a goto statement at block BB. Returns true
1453 if abnormal edges should be created. */
1456 make_goto_expr_edges (basic_block bb)
1458 gimple_stmt_iterator last = gsi_last_bb (bb);
1459 gimple *goto_t = gsi_stmt (last);
1461 /* A simple GOTO creates normal edges. */
1462 if (simple_goto_p (goto_t))
1464 tree dest = gimple_goto_dest (goto_t);
1465 basic_block label_bb = label_to_block (cfun, dest);
1466 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1467 e->goto_locus = gimple_location (goto_t);
1468 gsi_remove (&last, true);
1472 /* A computed GOTO creates abnormal edges. */
1476 /* Create edges for an asm statement with labels at block BB. */
1479 make_gimple_asm_edges (basic_block bb)
1481 gasm *stmt = as_a <gasm *> (last_stmt (bb));
1482 int i, n = gimple_asm_nlabels (stmt);
1484 for (i = 0; i < n; ++i)
1486 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1487 basic_block label_bb = label_to_block (cfun, label);
1488 make_edge (bb, label_bb, 0);
1492 /*---------------------------------------------------------------------------
1494 ---------------------------------------------------------------------------*/
1496 /* Cleanup useless labels in basic blocks. This is something we wish
1497 to do early because it allows us to group case labels before creating
1498 the edges for the CFG, and it speeds up block statement iterators in
1499 all passes later on.
1500 We rerun this pass after CFG is created, to get rid of the labels that
1501 are no longer referenced. After then we do not run it any more, since
1502 (almost) no new labels should be created. */
1504 /* A map from basic block index to the leading label of that block. */
1510 /* True if the label is referenced from somewhere. */
1514 /* Given LABEL return the first label in the same basic block. */
1517 main_block_label (tree label, label_record *label_for_bb)
1519 basic_block bb = label_to_block (cfun, label);
1520 tree main_label = label_for_bb[bb->index].label;
1522 /* label_to_block possibly inserted undefined label into the chain. */
1525 label_for_bb[bb->index].label = label;
1529 label_for_bb[bb->index].used = true;
1533 /* Clean up redundant labels within the exception tree. */
1536 cleanup_dead_labels_eh (label_record *label_for_bb)
1543 if (cfun->eh == NULL)
1546 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1547 if (lp && lp->post_landing_pad)
1549 lab = main_block_label (lp->post_landing_pad, label_for_bb);
1550 if (lab != lp->post_landing_pad)
1552 EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1553 lp->post_landing_pad = lab;
1554 EH_LANDING_PAD_NR (lab) = lp->index;
1558 FOR_ALL_EH_REGION (r)
1562 case ERT_MUST_NOT_THROW:
1568 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
1572 c->label = main_block_label (lab, label_for_bb);
1577 case ERT_ALLOWED_EXCEPTIONS:
1578 lab = r->u.allowed.label;
1580 r->u.allowed.label = main_block_label (lab, label_for_bb);
1586 /* Cleanup redundant labels. This is a three-step process:
1587 1) Find the leading label for each block.
1588 2) Redirect all references to labels to the leading labels.
1589 3) Cleanup all useless labels. */
1592 cleanup_dead_labels (void)
1595 label_record *label_for_bb = XCNEWVEC (struct label_record,
1596 last_basic_block_for_fn (cfun));
1598 /* Find a suitable label for each block. We use the first user-defined
1599 label if there is one, or otherwise just the first label we see. */
1600 FOR_EACH_BB_FN (bb, cfun)
1602 gimple_stmt_iterator i;
1604 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
1607 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1612 label = gimple_label_label (label_stmt);
1614 /* If we have not yet seen a label for the current block,
1615 remember this one and see if there are more labels. */
1616 if (!label_for_bb[bb->index].label)
1618 label_for_bb[bb->index].label = label;
1622 /* If we did see a label for the current block already, but it
1623 is an artificially created label, replace it if the current
1624 label is a user defined label. */
1625 if (!DECL_ARTIFICIAL (label)
1626 && DECL_ARTIFICIAL (label_for_bb[bb->index].label))
1628 label_for_bb[bb->index].label = label;
1634 /* Now redirect all jumps/branches to the selected label.
1635 First do so for each block ending in a control statement. */
1636 FOR_EACH_BB_FN (bb, cfun)
1638 gimple *stmt = last_stmt (bb);
1639 tree label, new_label;
1644 switch (gimple_code (stmt))
1648 gcond *cond_stmt = as_a <gcond *> (stmt);
1649 label = gimple_cond_true_label (cond_stmt);
1652 new_label = main_block_label (label, label_for_bb);
1653 if (new_label != label)
1654 gimple_cond_set_true_label (cond_stmt, new_label);
1657 label = gimple_cond_false_label (cond_stmt);
1660 new_label = main_block_label (label, label_for_bb);
1661 if (new_label != label)
1662 gimple_cond_set_false_label (cond_stmt, new_label);
1669 gswitch *switch_stmt = as_a <gswitch *> (stmt);
1670 size_t i, n = gimple_switch_num_labels (switch_stmt);
1672 /* Replace all destination labels. */
1673 for (i = 0; i < n; ++i)
1675 tree case_label = gimple_switch_label (switch_stmt, i);
1676 label = CASE_LABEL (case_label);
1677 new_label = main_block_label (label, label_for_bb);
1678 if (new_label != label)
1679 CASE_LABEL (case_label) = new_label;
1686 gasm *asm_stmt = as_a <gasm *> (stmt);
1687 int i, n = gimple_asm_nlabels (asm_stmt);
1689 for (i = 0; i < n; ++i)
1691 tree cons = gimple_asm_label_op (asm_stmt, i);
1692 tree label = main_block_label (TREE_VALUE (cons), label_for_bb);
1693 TREE_VALUE (cons) = label;
1698 /* We have to handle gotos until they're removed, and we don't
1699 remove them until after we've created the CFG edges. */
1701 if (!computed_goto_p (stmt))
1703 ggoto *goto_stmt = as_a <ggoto *> (stmt);
1704 label = gimple_goto_dest (goto_stmt);
1705 new_label = main_block_label (label, label_for_bb);
1706 if (new_label != label)
1707 gimple_goto_set_dest (goto_stmt, new_label);
1711 case GIMPLE_TRANSACTION:
1713 gtransaction *txn = as_a <gtransaction *> (stmt);
1715 label = gimple_transaction_label_norm (txn);
1718 new_label = main_block_label (label, label_for_bb);
1719 if (new_label != label)
1720 gimple_transaction_set_label_norm (txn, new_label);
1723 label = gimple_transaction_label_uninst (txn);
1726 new_label = main_block_label (label, label_for_bb);
1727 if (new_label != label)
1728 gimple_transaction_set_label_uninst (txn, new_label);
1731 label = gimple_transaction_label_over (txn);
1734 new_label = main_block_label (label, label_for_bb);
1735 if (new_label != label)
1736 gimple_transaction_set_label_over (txn, new_label);
1746 /* Do the same for the exception region tree labels. */
1747 cleanup_dead_labels_eh (label_for_bb);
1749 /* Finally, purge dead labels. All user-defined labels and labels that
1750 can be the target of non-local gotos and labels which have their
1751 address taken are preserved. */
1752 FOR_EACH_BB_FN (bb, cfun)
1754 gimple_stmt_iterator i;
1755 tree label_for_this_bb = label_for_bb[bb->index].label;
1757 if (!label_for_this_bb)
1760 /* If the main label of the block is unused, we may still remove it. */
1761 if (!label_for_bb[bb->index].used)
1762 label_for_this_bb = NULL;
1764 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
1767 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (i));
1772 label = gimple_label_label (label_stmt);
1774 if (label == label_for_this_bb
1775 || !DECL_ARTIFICIAL (label)
1776 || DECL_NONLOCAL (label)
1777 || FORCED_LABEL (label))
1781 gcc_checking_assert (EH_LANDING_PAD_NR (label) == 0);
1782 gsi_remove (&i, true);
1787 free (label_for_bb);
1790 /* Scan the sorted vector of cases in STMT (a GIMPLE_SWITCH) and combine
1791 the ones jumping to the same label.
1792 Eg. three separate entries 1: 2: 3: become one entry 1..3: */
1795 group_case_labels_stmt (gswitch *stmt)
1797 int old_size = gimple_switch_num_labels (stmt);
1798 int i, next_index, new_size;
1799 basic_block default_bb = NULL;
1800 hash_set<tree> *removed_labels = NULL;
1802 default_bb = gimple_switch_default_bb (cfun, stmt);
1804 /* Look for possible opportunities to merge cases. */
1806 while (i < old_size)
1808 tree base_case, base_high;
1809 basic_block base_bb;
1811 base_case = gimple_switch_label (stmt, i);
1813 gcc_assert (base_case);
1814 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1816 /* Discard cases that have the same destination as the default case or
1817 whose destination blocks have already been removed as unreachable. */
1819 || base_bb == default_bb
1821 && removed_labels->contains (CASE_LABEL (base_case))))
1827 base_high = CASE_HIGH (base_case)
1828 ? CASE_HIGH (base_case)
1829 : CASE_LOW (base_case);
1832 /* Try to merge case labels. Break out when we reach the end
1833 of the label vector or when we cannot merge the next case
1834 label with the current one. */
1835 while (next_index < old_size)
1837 tree merge_case = gimple_switch_label (stmt, next_index);
1838 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1839 wide_int bhp1 = wi::to_wide (base_high) + 1;
1841 /* Merge the cases if they jump to the same place,
1842 and their ranges are consecutive. */
1843 if (merge_bb == base_bb
1844 && (removed_labels == NULL
1845 || !removed_labels->contains (CASE_LABEL (merge_case)))
1846 && wi::to_wide (CASE_LOW (merge_case)) == bhp1)
1849 = (CASE_HIGH (merge_case)
1850 ? CASE_HIGH (merge_case) : CASE_LOW (merge_case));
1851 CASE_HIGH (base_case) = base_high;
1858 /* Discard cases that have an unreachable destination block. */
1859 if (EDGE_COUNT (base_bb->succs) == 0
1860 && gimple_seq_unreachable_p (bb_seq (base_bb))
1861 /* Don't optimize this if __builtin_unreachable () is the
1862 implicitly added one by the C++ FE too early, before
1863 -Wreturn-type can be diagnosed. We'll optimize it later
1864 during switchconv pass or any other cfg cleanup. */
1865 && (gimple_in_ssa_p (cfun)
1866 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1867 != BUILTINS_LOCATION)))
1869 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1870 if (base_edge != NULL)
1872 for (gimple_stmt_iterator gsi = gsi_start_bb (base_bb);
1873 !gsi_end_p (gsi); gsi_next (&gsi))
1874 if (glabel *stmt = dyn_cast <glabel *> (gsi_stmt (gsi)))
1876 if (FORCED_LABEL (gimple_label_label (stmt))
1877 || DECL_NONLOCAL (gimple_label_label (stmt)))
1879 /* Forced/non-local labels aren't going to be removed,
1880 but they will be moved to some neighbouring basic
1881 block. If some later case label refers to one of
1882 those labels, we should throw that case away rather
1883 than keeping it around and refering to some random
1884 other basic block without an edge to it. */
1885 if (removed_labels == NULL)
1886 removed_labels = new hash_set<tree>;
1887 removed_labels->add (gimple_label_label (stmt));
1892 remove_edge_and_dominated_blocks (base_edge);
1899 gimple_switch_set_label (stmt, new_size,
1900 gimple_switch_label (stmt, i));
1905 gcc_assert (new_size <= old_size);
1907 if (new_size < old_size)
1908 gimple_switch_set_num_labels (stmt, new_size);
1910 delete removed_labels;
1911 return new_size < old_size;
1914 /* Look for blocks ending in a multiway branch (a GIMPLE_SWITCH),
1915 and scan the sorted vector of cases. Combine the ones jumping to the
1919 group_case_labels (void)
1922 bool changed = false;
1924 FOR_EACH_BB_FN (bb, cfun)
1926 gimple *stmt = last_stmt (bb);
1927 if (stmt && gimple_code (stmt) == GIMPLE_SWITCH)
1928 changed |= group_case_labels_stmt (as_a <gswitch *> (stmt));
1934 /* Checks whether we can merge block B into block A. */
1937 gimple_can_merge_blocks_p (basic_block a, basic_block b)
1941 if (!single_succ_p (a))
1944 if (single_succ_edge (a)->flags & EDGE_COMPLEX)
1947 if (single_succ (a) != b)
1950 if (!single_pred_p (b))
1953 if (a == ENTRY_BLOCK_PTR_FOR_FN (cfun)
1954 || b == EXIT_BLOCK_PTR_FOR_FN (cfun))
1957 /* If A ends by a statement causing exceptions or something similar, we
1958 cannot merge the blocks. */
1959 stmt = last_stmt (a);
1960 if (stmt && stmt_ends_bb_p (stmt))
1963 /* Do not allow a block with only a non-local label to be merged. */
1965 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
1966 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
1969 /* Examine the labels at the beginning of B. */
1970 for (gimple_stmt_iterator gsi = gsi_start_bb (b); !gsi_end_p (gsi);
1974 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
1977 lab = gimple_label_label (label_stmt);
1979 /* Do not remove user forced labels or for -O0 any user labels. */
1980 if (!DECL_ARTIFICIAL (lab) && (!optimize || FORCED_LABEL (lab)))
1984 /* Protect simple loop latches. We only want to avoid merging
1985 the latch with the loop header or with a block in another
1986 loop in this case. */
1988 && b->loop_father->latch == b
1989 && loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES)
1990 && (b->loop_father->header == a
1991 || b->loop_father != a->loop_father))
1994 /* It must be possible to eliminate all phi nodes in B. If ssa form
1995 is not up-to-date and a name-mapping is registered, we cannot eliminate
1996 any phis. Symbols marked for renaming are never a problem though. */
1997 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);
2000 gphi *phi = gsi.phi ();
2001 /* Technically only new names matter. */
2002 if (name_registered_for_update_p (PHI_RESULT (phi)))
2006 /* When not optimizing, don't merge if we'd lose goto_locus. */
2008 && single_succ_edge (a)->goto_locus != UNKNOWN_LOCATION)
2010 location_t goto_locus = single_succ_edge (a)->goto_locus;
2011 gimple_stmt_iterator prev, next;
2012 prev = gsi_last_nondebug_bb (a);
2013 next = gsi_after_labels (b);
2014 if (!gsi_end_p (next) && is_gimple_debug (gsi_stmt (next)))
2015 gsi_next_nondebug (&next);
2016 if ((gsi_end_p (prev)
2017 || gimple_location (gsi_stmt (prev)) != goto_locus)
2018 && (gsi_end_p (next)
2019 || gimple_location (gsi_stmt (next)) != goto_locus))
2026 /* Replaces all uses of NAME by VAL. */
2029 replace_uses_by (tree name, tree val)
2031 imm_use_iterator imm_iter;
2036 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
2038 /* Mark the block if we change the last stmt in it. */
2039 if (cfgcleanup_altered_bbs
2040 && stmt_ends_bb_p (stmt))
2041 bitmap_set_bit (cfgcleanup_altered_bbs, gimple_bb (stmt)->index);
2043 FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
2045 replace_exp (use, val);
2047 if (gimple_code (stmt) == GIMPLE_PHI)
2049 e = gimple_phi_arg_edge (as_a <gphi *> (stmt),
2050 PHI_ARG_INDEX_FROM_USE (use));
2051 if (e->flags & EDGE_ABNORMAL
2052 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val))
2054 /* This can only occur for virtual operands, since
2055 for the real ones SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name))
2056 would prevent replacement. */
2057 gcc_checking_assert (virtual_operand_p (name));
2058 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val) = 1;
2063 if (gimple_code (stmt) != GIMPLE_PHI)
2065 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
2066 gimple *orig_stmt = stmt;
2069 /* FIXME. It shouldn't be required to keep TREE_CONSTANT
2070 on ADDR_EXPRs up-to-date on GIMPLE. Propagation will
2071 only change sth from non-invariant to invariant, and only
2072 when propagating constants. */
2073 if (is_gimple_min_invariant (val))
2074 for (i = 0; i < gimple_num_ops (stmt); i++)
2076 tree op = gimple_op (stmt, i);
2077 /* Operands may be empty here. For example, the labels
2078 of a GIMPLE_COND are nulled out following the creation
2079 of the corresponding CFG edges. */
2080 if (op && TREE_CODE (op) == ADDR_EXPR)
2081 recompute_tree_invariant_for_addr_expr (op);
2084 if (fold_stmt (&gsi))
2085 stmt = gsi_stmt (gsi);
2087 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
2088 gimple_purge_dead_eh_edges (gimple_bb (stmt));
2094 gcc_checking_assert (has_zero_uses (name));
2096 /* Also update the trees stored in loop structures. */
2099 for (auto loop : loops_list (cfun, 0))
2100 substitute_in_loop_info (loop, name, val);
2104 /* Merge block B into block A. */
2107 gimple_merge_blocks (basic_block a, basic_block b)
2109 gimple_stmt_iterator last, gsi;
2113 fprintf (dump_file, "Merging blocks %d and %d\n", a->index, b->index);
2115 /* Remove all single-valued PHI nodes from block B of the form
2116 V_i = PHI <V_j> by propagating V_j to all the uses of V_i. */
2117 gsi = gsi_last_bb (a);
2118 for (psi = gsi_start_phis (b); !gsi_end_p (psi); )
2120 gimple *phi = gsi_stmt (psi);
2121 tree def = gimple_phi_result (phi), use = gimple_phi_arg_def (phi, 0);
2123 bool may_replace_uses = (virtual_operand_p (def)
2124 || may_propagate_copy (def, use));
2126 /* In case we maintain loop closed ssa form, do not propagate arguments
2127 of loop exit phi nodes. */
2129 && loops_state_satisfies_p (LOOP_CLOSED_SSA)
2130 && !virtual_operand_p (def)
2131 && TREE_CODE (use) == SSA_NAME
2132 && a->loop_father != b->loop_father)
2133 may_replace_uses = false;
2135 if (!may_replace_uses)
2137 gcc_assert (!virtual_operand_p (def));
2139 /* Note that just emitting the copies is fine -- there is no problem
2140 with ordering of phi nodes. This is because A is the single
2141 predecessor of B, therefore results of the phi nodes cannot
2142 appear as arguments of the phi nodes. */
2143 copy = gimple_build_assign (def, use);
2144 gsi_insert_after (&gsi, copy, GSI_NEW_STMT);
2145 remove_phi_node (&psi, false);
2149 /* If we deal with a PHI for virtual operands, we can simply
2150 propagate these without fussing with folding or updating
2152 if (virtual_operand_p (def))
2154 imm_use_iterator iter;
2155 use_operand_p use_p;
2158 FOR_EACH_IMM_USE_STMT (stmt, iter, def)
2159 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
2160 SET_USE (use_p, use);
2162 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
2163 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (use) = 1;
2166 replace_uses_by (def, use);
2168 remove_phi_node (&psi, true);
2172 /* Ensure that B follows A. */
2173 move_block_after (b, a);
2175 gcc_assert (single_succ_edge (a)->flags & EDGE_FALLTHRU);
2176 gcc_assert (!last_stmt (a) || !stmt_ends_bb_p (last_stmt (a)));
2178 /* Remove labels from B and set gimple_bb to A for other statements. */
2179 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi);)
2181 gimple *stmt = gsi_stmt (gsi);
2182 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2184 tree label = gimple_label_label (label_stmt);
2187 gsi_remove (&gsi, false);
2189 /* Now that we can thread computed gotos, we might have
2190 a situation where we have a forced label in block B
2191 However, the label at the start of block B might still be
2192 used in other ways (think about the runtime checking for
2193 Fortran assigned gotos). So we cannot just delete the
2194 label. Instead we move the label to the start of block A. */
2195 if (FORCED_LABEL (label))
2197 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2198 tree first_label = NULL_TREE;
2199 if (!gsi_end_p (dest_gsi))
2200 if (glabel *first_label_stmt
2201 = dyn_cast <glabel *> (gsi_stmt (dest_gsi)))
2202 first_label = gimple_label_label (first_label_stmt);
2204 && (DECL_NONLOCAL (first_label)
2205 || EH_LANDING_PAD_NR (first_label) != 0))
2206 gsi_insert_after (&dest_gsi, stmt, GSI_NEW_STMT);
2208 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2210 /* Other user labels keep around in a form of a debug stmt. */
2211 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2213 gimple *dbg = gimple_build_debug_bind (label,
2216 gimple_debug_bind_reset_value (dbg);
2217 gsi_insert_before (&gsi, dbg, GSI_SAME_STMT);
2220 lp_nr = EH_LANDING_PAD_NR (label);
2223 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
2224 lp->post_landing_pad = NULL;
2229 gimple_set_bb (stmt, a);
2234 /* When merging two BBs, if their counts are different, the larger count
2235 is selected as the new bb count. This is to handle inconsistent
2237 if (a->loop_father == b->loop_father)
2239 a->count = a->count.merge (b->count);
2242 /* Merge the sequences. */
2243 last = gsi_last_bb (a);
2244 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2245 set_bb_seq (b, NULL);
2247 if (cfgcleanup_altered_bbs)
2248 bitmap_set_bit (cfgcleanup_altered_bbs, a->index);
2252 /* Return the one of two successors of BB that is not reachable by a
2253 complex edge, if there is one. Else, return BB. We use
2254 this in optimizations that use post-dominators for their heuristics,
2255 to catch the cases in C++ where function calls are involved. */
2258 single_noncomplex_succ (basic_block bb)
2261 if (EDGE_COUNT (bb->succs) != 2)
2264 e0 = EDGE_SUCC (bb, 0);
2265 e1 = EDGE_SUCC (bb, 1);
2266 if (e0->flags & EDGE_COMPLEX)
2268 if (e1->flags & EDGE_COMPLEX)
2274 /* T is CALL_EXPR. Set current_function_calls_* flags. */
2277 notice_special_calls (gcall *call)
2279 int flags = gimple_call_flags (call);
2281 if (flags & ECF_MAY_BE_ALLOCA)
2282 cfun->calls_alloca = true;
2283 if (flags & ECF_RETURNS_TWICE)
2284 cfun->calls_setjmp = true;
2288 /* Clear flags set by notice_special_calls. Used by dead code removal
2289 to update the flags. */
2292 clear_special_calls (void)
2294 cfun->calls_alloca = false;
2295 cfun->calls_setjmp = false;
2298 /* Remove PHI nodes associated with basic block BB and all edges out of BB. */
2301 remove_phi_nodes_and_edges_for_unreachable_block (basic_block bb)
2303 /* Since this block is no longer reachable, we can just delete all
2304 of its PHI nodes. */
2305 remove_phi_nodes (bb);
2307 /* Remove edges to BB's successors. */
2308 while (EDGE_COUNT (bb->succs) > 0)
2309 remove_edge (EDGE_SUCC (bb, 0));
2313 /* Remove statements of basic block BB. */
2316 remove_bb (basic_block bb)
2318 gimple_stmt_iterator i;
2322 fprintf (dump_file, "Removing basic block %d\n", bb->index);
2323 if (dump_flags & TDF_DETAILS)
2325 dump_bb (dump_file, bb, 0, TDF_BLOCKS);
2326 fprintf (dump_file, "\n");
2332 class loop *loop = bb->loop_father;
2334 /* If a loop gets removed, clean up the information associated
2336 if (loop->latch == bb
2337 || loop->header == bb)
2338 free_numbers_of_iterations_estimates (loop);
2341 /* Remove all the instructions in the block. */
2342 if (bb_seq (bb) != NULL)
2344 /* Walk backwards so as to get a chance to substitute all
2345 released DEFs into debug stmts. See
2346 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
2348 for (i = gsi_last_bb (bb); !gsi_end_p (i);)
2350 gimple *stmt = gsi_stmt (i);
2351 glabel *label_stmt = dyn_cast <glabel *> (stmt);
2353 && (FORCED_LABEL (gimple_label_label (label_stmt))
2354 || DECL_NONLOCAL (gimple_label_label (label_stmt))))
2357 gimple_stmt_iterator new_gsi;
2359 /* A non-reachable non-local label may still be referenced.
2360 But it no longer needs to carry the extra semantics of
2362 if (DECL_NONLOCAL (gimple_label_label (label_stmt)))
2364 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2365 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2368 new_bb = bb->prev_bb;
2369 /* Don't move any labels into ENTRY block. */
2370 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2372 new_bb = single_succ (new_bb);
2373 gcc_assert (new_bb != bb);
2375 if ((unsigned) bb->index < bb_to_omp_idx.length ()
2376 && ((unsigned) new_bb->index >= bb_to_omp_idx.length ()
2377 || (bb_to_omp_idx[bb->index]
2378 != bb_to_omp_idx[new_bb->index])))
2380 /* During cfg pass make sure to put orphaned labels
2381 into the right OMP region. */
2385 FOR_EACH_VEC_ELT (bb_to_omp_idx, i, idx)
2386 if (i >= NUM_FIXED_BLOCKS
2387 && idx == bb_to_omp_idx[bb->index]
2388 && i != (unsigned) bb->index)
2390 new_bb = BASIC_BLOCK_FOR_FN (cfun, i);
2395 new_bb = single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2396 gcc_assert (new_bb != bb);
2399 new_gsi = gsi_after_labels (new_bb);
2400 gsi_remove (&i, false);
2401 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2405 /* Release SSA definitions. */
2406 release_defs (stmt);
2407 gsi_remove (&i, true);
2411 i = gsi_last_bb (bb);
2417 if ((unsigned) bb->index < bb_to_omp_idx.length ())
2418 bb_to_omp_idx[bb->index] = -1;
2419 remove_phi_nodes_and_edges_for_unreachable_block (bb);
2420 bb->il.gimple.seq = NULL;
2421 bb->il.gimple.phi_nodes = NULL;
2425 /* Given a basic block BB and a value VAL for use in the final statement
2426 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2427 the edge that will be taken out of the block.
2428 If VAL is NULL_TREE, then the current value of the final statement's
2429 predicate or index is used.
2430 If the value does not match a unique edge, NULL is returned. */
2433 find_taken_edge (basic_block bb, tree val)
2437 stmt = last_stmt (bb);
2439 /* Handle ENTRY and EXIT. */
2443 if (gimple_code (stmt) == GIMPLE_COND)
2444 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2446 if (gimple_code (stmt) == GIMPLE_SWITCH)
2447 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2449 if (computed_goto_p (stmt))
2451 /* Only optimize if the argument is a label, if the argument is
2452 not a label then we cannot construct a proper CFG.
2454 It may be the case that we only need to allow the LABEL_REF to
2455 appear inside an ADDR_EXPR, but we also allow the LABEL_REF to
2456 appear inside a LABEL_EXPR just to be safe. */
2458 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2459 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2460 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2463 /* Otherwise we only know the taken successor edge if it's unique. */
2464 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2467 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2468 statement, determine which of the outgoing edges will be taken out of the
2469 block. Return NULL if either edge may be taken. */
2472 find_taken_edge_computed_goto (basic_block bb, tree val)
2477 dest = label_to_block (cfun, val);
2479 e = find_edge (bb, dest);
2481 /* It's possible for find_edge to return NULL here on invalid code
2482 that abuses the labels-as-values extension (e.g. code that attempts to
2483 jump *between* functions via stored labels-as-values; PR 84136).
2484 If so, then we simply return that NULL for the edge.
2485 We don't currently have a way of detecting such invalid code, so we
2486 can't assert that it was the case when a NULL edge occurs here. */
2491 /* Given COND_STMT and a constant value VAL for use as the predicate,
2492 determine which of the two edges will be taken out of
2493 the statement's block. Return NULL if either edge may be taken.
2494 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2498 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2500 edge true_edge, false_edge;
2502 if (val == NULL_TREE)
2504 /* Use the current value of the predicate. */
2505 if (gimple_cond_true_p (cond_stmt))
2506 val = integer_one_node;
2507 else if (gimple_cond_false_p (cond_stmt))
2508 val = integer_zero_node;
2512 else if (TREE_CODE (val) != INTEGER_CST)
2515 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2516 &true_edge, &false_edge);
2518 return (integer_zerop (val) ? false_edge : true_edge);
2521 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2522 which edge will be taken out of the statement's block. Return NULL if any
2524 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2528 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2530 basic_block dest_bb;
2534 if (gimple_switch_num_labels (switch_stmt) == 1)
2535 taken_case = gimple_switch_default_label (switch_stmt);
2538 if (val == NULL_TREE)
2539 val = gimple_switch_index (switch_stmt);
2540 if (TREE_CODE (val) != INTEGER_CST)
2543 taken_case = find_case_label_for_value (switch_stmt, val);
2545 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2547 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2553 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2554 We can make optimal use here of the fact that the case labels are
2555 sorted: We can do a binary search for a case matching VAL. */
2558 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2560 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2561 tree default_case = gimple_switch_default_label (switch_stmt);
2563 for (low = 0, high = n; high - low > 1; )
2565 size_t i = (high + low) / 2;
2566 tree t = gimple_switch_label (switch_stmt, i);
2569 /* Cache the result of comparing CASE_LOW and val. */
2570 cmp = tree_int_cst_compare (CASE_LOW (t), val);
2577 if (CASE_HIGH (t) == NULL)
2579 /* A singe-valued case label. */
2585 /* A case range. We can only handle integer ranges. */
2586 if (cmp <= 0 && tree_int_cst_compare (CASE_HIGH (t), val) >= 0)
2591 return default_case;
2595 /* Dump a basic block on stderr. */
2598 gimple_debug_bb (basic_block bb)
2600 dump_bb (stderr, bb, 0, TDF_VOPS|TDF_MEMSYMS|TDF_BLOCKS);
2604 /* Dump basic block with index N on stderr. */
2607 gimple_debug_bb_n (int n)
2609 gimple_debug_bb (BASIC_BLOCK_FOR_FN (cfun, n));
2610 return BASIC_BLOCK_FOR_FN (cfun, n);
2614 /* Dump the CFG on stderr.
2616 FLAGS are the same used by the tree dumping functions
2617 (see TDF_* in dumpfile.h). */
2620 gimple_debug_cfg (dump_flags_t flags)
2622 gimple_dump_cfg (stderr, flags);
2626 /* Dump the program showing basic block boundaries on the given FILE.
2628 FLAGS are the same used by the tree dumping functions (see TDF_* in
2632 gimple_dump_cfg (FILE *file, dump_flags_t flags)
2634 if (flags & TDF_DETAILS)
2636 dump_function_header (file, current_function_decl, flags);
2637 fprintf (file, ";; \n%d basic blocks, %d edges, last basic block %d.\n\n",
2638 n_basic_blocks_for_fn (cfun), n_edges_for_fn (cfun),
2639 last_basic_block_for_fn (cfun));
2641 brief_dump_cfg (file, flags);
2642 fprintf (file, "\n");
2645 if (flags & TDF_STATS)
2646 dump_cfg_stats (file);
2648 dump_function_to_file (current_function_decl, file, flags | TDF_BLOCKS);
2652 /* Dump CFG statistics on FILE. */
2655 dump_cfg_stats (FILE *file)
2657 static long max_num_merged_labels = 0;
2658 unsigned long size, total = 0;
2661 const char * const fmt_str = "%-30s%-13s%12s\n";
2662 const char * const fmt_str_1 = "%-30s%13d" PRsa (11) "\n";
2663 const char * const fmt_str_2 = "%-30s%13ld" PRsa (11) "\n";
2664 const char * const fmt_str_3 = "%-43s" PRsa (11) "\n";
2665 const char *funcname = current_function_name ();
2667 fprintf (file, "\nCFG Statistics for %s\n\n", funcname);
2669 fprintf (file, "---------------------------------------------------------\n");
2670 fprintf (file, fmt_str, "", " Number of ", "Memory");
2671 fprintf (file, fmt_str, "", " instances ", "used ");
2672 fprintf (file, "---------------------------------------------------------\n");
2674 size = n_basic_blocks_for_fn (cfun) * sizeof (struct basic_block_def);
2676 fprintf (file, fmt_str_1, "Basic blocks", n_basic_blocks_for_fn (cfun),
2677 SIZE_AMOUNT (size));
2680 FOR_EACH_BB_FN (bb, cfun)
2681 num_edges += EDGE_COUNT (bb->succs);
2682 size = num_edges * sizeof (class edge_def);
2684 fprintf (file, fmt_str_2, "Edges", num_edges, SIZE_AMOUNT (size));
2686 fprintf (file, "---------------------------------------------------------\n");
2687 fprintf (file, fmt_str_3, "Total memory used by CFG data",
2688 SIZE_AMOUNT (total));
2689 fprintf (file, "---------------------------------------------------------\n");
2690 fprintf (file, "\n");
2692 if (cfg_stats.num_merged_labels > max_num_merged_labels)
2693 max_num_merged_labels = cfg_stats.num_merged_labels;
2695 fprintf (file, "Coalesced label blocks: %ld (Max so far: %ld)\n",
2696 cfg_stats.num_merged_labels, max_num_merged_labels);
2698 fprintf (file, "\n");
2702 /* Dump CFG statistics on stderr. Keep extern so that it's always
2703 linked in the final executable. */
2706 debug_cfg_stats (void)
2708 dump_cfg_stats (stderr);
2711 /*---------------------------------------------------------------------------
2712 Miscellaneous helpers
2713 ---------------------------------------------------------------------------*/
2715 /* Return true if T, a GIMPLE_CALL, can make an abnormal transfer of control
2716 flow. Transfers of control flow associated with EH are excluded. */
2719 call_can_make_abnormal_goto (gimple *t)
2721 /* If the function has no non-local labels, then a call cannot make an
2722 abnormal transfer of control. */
2723 if (!cfun->has_nonlocal_label
2724 && !cfun->calls_setjmp)
2727 /* Likewise if the call has no side effects. */
2728 if (!gimple_has_side_effects (t))
2731 /* Likewise if the called function is leaf. */
2732 if (gimple_call_flags (t) & ECF_LEAF)
2739 /* Return true if T can make an abnormal transfer of control flow.
2740 Transfers of control flow associated with EH are excluded. */
2743 stmt_can_make_abnormal_goto (gimple *t)
2745 if (computed_goto_p (t))
2747 if (is_gimple_call (t))
2748 return call_can_make_abnormal_goto (t);
2753 /* Return true if T represents a stmt that always transfers control. */
2756 is_ctrl_stmt (gimple *t)
2758 switch (gimple_code (t))
2772 /* Return true if T is a statement that may alter the flow of control
2773 (e.g., a call to a non-returning function). */
2776 is_ctrl_altering_stmt (gimple *t)
2780 switch (gimple_code (t))
2783 /* Per stmt call flag indicates whether the call could alter
2785 if (gimple_call_ctrl_altering_p (t))
2789 case GIMPLE_EH_DISPATCH:
2790 /* EH_DISPATCH branches to the individual catch handlers at
2791 this level of a try or allowed-exceptions region. It can
2792 fallthru to the next statement as well. */
2796 if (gimple_asm_nlabels (as_a <gasm *> (t)) > 0)
2801 /* OpenMP directives alter control flow. */
2804 case GIMPLE_TRANSACTION:
2805 /* A transaction start alters control flow. */
2812 /* If a statement can throw, it alters control flow. */
2813 return stmt_can_throw_internal (cfun, t);
2817 /* Return true if T is a simple local goto. */
2820 simple_goto_p (gimple *t)
2822 return (gimple_code (t) == GIMPLE_GOTO
2823 && TREE_CODE (gimple_goto_dest (t)) == LABEL_DECL);
2827 /* Return true if STMT should start a new basic block. PREV_STMT is
2828 the statement preceding STMT. It is used when STMT is a label or a
2829 case label. Labels should only start a new basic block if their
2830 previous statement wasn't a label. Otherwise, sequence of labels
2831 would generate unnecessary basic blocks that only contain a single
2835 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2840 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2841 any nondebug stmts in the block. We don't want to start another
2842 block in this case: the debug stmt will already have started the
2843 one STMT would start if we weren't outputting debug stmts. */
2844 if (prev_stmt && is_gimple_debug (prev_stmt))
2847 /* Labels start a new basic block only if the preceding statement
2848 wasn't a label of the same type. This prevents the creation of
2849 consecutive blocks that have nothing but a single label. */
2850 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
2852 /* Nonlocal and computed GOTO targets always start a new block. */
2853 if (DECL_NONLOCAL (gimple_label_label (label_stmt))
2854 || FORCED_LABEL (gimple_label_label (label_stmt)))
2857 if (glabel *plabel = safe_dyn_cast <glabel *> (prev_stmt))
2859 if (DECL_NONLOCAL (gimple_label_label (plabel))
2860 || !DECL_ARTIFICIAL (gimple_label_label (plabel)))
2863 cfg_stats.num_merged_labels++;
2869 else if (gimple_code (stmt) == GIMPLE_CALL)
2871 if (gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
2872 /* setjmp acts similar to a nonlocal GOTO target and thus should
2873 start a new block. */
2875 if (gimple_call_internal_p (stmt, IFN_PHI)
2877 && gimple_code (prev_stmt) != GIMPLE_LABEL
2878 && (gimple_code (prev_stmt) != GIMPLE_CALL
2879 || ! gimple_call_internal_p (prev_stmt, IFN_PHI)))
2880 /* PHI nodes start a new block unless preceeded by a label
2889 /* Return true if T should end a basic block. */
2892 stmt_ends_bb_p (gimple *t)
2894 return is_ctrl_stmt (t) || is_ctrl_altering_stmt (t);
2897 /* Remove block annotations and other data structures. */
2900 delete_tree_cfg_annotations (struct function *fn)
2902 vec_free (label_to_block_map_for_fn (fn));
2905 /* Return the virtual phi in BB. */
2908 get_virtual_phi (basic_block bb)
2910 for (gphi_iterator gsi = gsi_start_phis (bb);
2914 gphi *phi = gsi.phi ();
2916 if (virtual_operand_p (PHI_RESULT (phi)))
2923 /* Return the first statement in basic block BB. */
2926 first_stmt (basic_block bb)
2928 gimple_stmt_iterator i = gsi_start_bb (bb);
2929 gimple *stmt = NULL;
2931 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2939 /* Return the first non-label statement in basic block BB. */
2942 first_non_label_stmt (basic_block bb)
2944 gimple_stmt_iterator i = gsi_start_bb (bb);
2945 while (!gsi_end_p (i) && gimple_code (gsi_stmt (i)) == GIMPLE_LABEL)
2947 return !gsi_end_p (i) ? gsi_stmt (i) : NULL;
2950 /* Return the last statement in basic block BB. */
2953 last_stmt (basic_block bb)
2955 gimple_stmt_iterator i = gsi_last_bb (bb);
2956 gimple *stmt = NULL;
2958 while (!gsi_end_p (i) && is_gimple_debug ((stmt = gsi_stmt (i))))
2966 /* Return the last statement of an otherwise empty block. Return NULL
2967 if the block is totally empty, or if it contains more than one
2971 last_and_only_stmt (basic_block bb)
2973 gimple_stmt_iterator i = gsi_last_nondebug_bb (bb);
2974 gimple *last, *prev;
2979 last = gsi_stmt (i);
2980 gsi_prev_nondebug (&i);
2984 /* Empty statements should no longer appear in the instruction stream.
2985 Everything that might have appeared before should be deleted by
2986 remove_useless_stmts, and the optimizers should just gsi_remove
2987 instead of smashing with build_empty_stmt.
2989 Thus the only thing that should appear here in a block containing
2990 one executable statement is a label. */
2991 prev = gsi_stmt (i);
2992 if (gimple_code (prev) == GIMPLE_LABEL)
2998 /* Returns the basic block after which the new basic block created
2999 by splitting edge EDGE_IN should be placed. Tries to keep the new block
3000 near its "logical" location. This is of most help to humans looking
3001 at debugging dumps. */
3004 split_edge_bb_loc (edge edge_in)
3006 basic_block dest = edge_in->dest;
3007 basic_block dest_prev = dest->prev_bb;
3011 edge e = find_edge (dest_prev, dest);
3012 if (e && !(e->flags & EDGE_COMPLEX))
3013 return edge_in->src;
3018 /* Split a (typically critical) edge EDGE_IN. Return the new block.
3019 Abort on abnormal edges. */
3022 gimple_split_edge (edge edge_in)
3024 basic_block new_bb, after_bb, dest;
3027 /* Abnormal edges cannot be split. */
3028 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
3030 dest = edge_in->dest;
3032 after_bb = split_edge_bb_loc (edge_in);
3034 new_bb = create_empty_bb (after_bb);
3035 new_bb->count = edge_in->count ();
3037 /* We want to avoid re-allocating PHIs when we first
3038 add the fallthru edge from new_bb to dest but we also
3039 want to avoid changing PHI argument order when
3040 first redirecting edge_in away from dest. The former
3041 avoids changing PHI argument order by adding them
3042 last and then the redirection swapping it back into
3043 place by means of unordered remove.
3044 So hack around things by temporarily removing all PHIs
3045 from the destination during the edge redirection and then
3046 making sure the edges stay in order. */
3047 gimple_seq saved_phis = phi_nodes (dest);
3048 unsigned old_dest_idx = edge_in->dest_idx;
3049 set_phi_nodes (dest, NULL);
3050 new_edge = make_single_succ_edge (new_bb, dest, EDGE_FALLTHRU);
3051 e = redirect_edge_and_branch (edge_in, new_bb);
3052 gcc_assert (e == edge_in && new_edge->dest_idx == old_dest_idx);
3053 /* set_phi_nodes sets the BB of the PHI nodes, so do it manually here. */
3054 dest->il.gimple.phi_nodes = saved_phis;
3060 /* Verify properties of the address expression T whose base should be
3061 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
3064 verify_address (tree t, bool verify_addressable)
3067 bool old_side_effects;
3069 bool new_side_effects;
3071 old_constant = TREE_CONSTANT (t);
3072 old_side_effects = TREE_SIDE_EFFECTS (t);
3074 recompute_tree_invariant_for_addr_expr (t);
3075 new_side_effects = TREE_SIDE_EFFECTS (t);
3076 new_constant = TREE_CONSTANT (t);
3078 if (old_constant != new_constant)
3080 error ("constant not recomputed when %<ADDR_EXPR%> changed");
3083 if (old_side_effects != new_side_effects)
3085 error ("side effects not recomputed when %<ADDR_EXPR%> changed");
3089 tree base = TREE_OPERAND (t, 0);
3090 while (handled_component_p (base))
3091 base = TREE_OPERAND (base, 0);
3094 || TREE_CODE (base) == PARM_DECL
3095 || TREE_CODE (base) == RESULT_DECL))
3098 if (verify_addressable && !TREE_ADDRESSABLE (base))
3100 error ("address taken but %<TREE_ADDRESSABLE%> bit not set");
3108 /* Verify if EXPR is a valid GIMPLE reference expression. If
3109 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3110 if there is an error, otherwise false. */
3113 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3115 const char *code_name = get_tree_code_name (TREE_CODE (expr));
3117 if (TREE_CODE (expr) == REALPART_EXPR
3118 || TREE_CODE (expr) == IMAGPART_EXPR
3119 || TREE_CODE (expr) == BIT_FIELD_REF)
3121 tree op = TREE_OPERAND (expr, 0);
3122 if (!is_gimple_reg_type (TREE_TYPE (expr)))
3124 error ("non-scalar %qs", code_name);
3128 if (TREE_CODE (expr) == BIT_FIELD_REF)
3130 tree t1 = TREE_OPERAND (expr, 1);
3131 tree t2 = TREE_OPERAND (expr, 2);
3132 poly_uint64 size, bitpos;
3133 if (!poly_int_tree_p (t1, &size)
3134 || !poly_int_tree_p (t2, &bitpos)
3135 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3136 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3138 error ("invalid position or size operand to %qs", code_name);
3141 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3142 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3144 error ("integral result type precision does not match "
3145 "field size of %qs", code_name);
3148 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3149 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3150 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3153 error ("mode size of non-integral result does not "
3154 "match field size of %qs",
3158 if (INTEGRAL_TYPE_P (TREE_TYPE (op))
3159 && !type_has_mode_precision_p (TREE_TYPE (op)))
3161 error ("%qs of non-mode-precision operand", code_name);
3164 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3165 && maybe_gt (size + bitpos,
3166 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3168 error ("position plus size exceeds size of referenced object in "
3174 if ((TREE_CODE (expr) == REALPART_EXPR
3175 || TREE_CODE (expr) == IMAGPART_EXPR)
3176 && !useless_type_conversion_p (TREE_TYPE (expr),
3177 TREE_TYPE (TREE_TYPE (op))))
3179 error ("type mismatch in %qs reference", code_name);
3180 debug_generic_stmt (TREE_TYPE (expr));
3181 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3187 while (handled_component_p (expr))
3189 code_name = get_tree_code_name (TREE_CODE (expr));
3191 if (TREE_CODE (expr) == REALPART_EXPR
3192 || TREE_CODE (expr) == IMAGPART_EXPR
3193 || TREE_CODE (expr) == BIT_FIELD_REF)
3195 error ("non-top-level %qs", code_name);
3199 tree op = TREE_OPERAND (expr, 0);
3201 if (TREE_CODE (expr) == ARRAY_REF
3202 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3204 if (!is_gimple_val (TREE_OPERAND (expr, 1))
3205 || (TREE_OPERAND (expr, 2)
3206 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3207 || (TREE_OPERAND (expr, 3)
3208 && !is_gimple_val (TREE_OPERAND (expr, 3))))
3210 error ("invalid operands to %qs", code_name);
3211 debug_generic_stmt (expr);
3216 /* Verify if the reference array element types are compatible. */
3217 if (TREE_CODE (expr) == ARRAY_REF
3218 && !useless_type_conversion_p (TREE_TYPE (expr),
3219 TREE_TYPE (TREE_TYPE (op))))
3221 error ("type mismatch in %qs", code_name);
3222 debug_generic_stmt (TREE_TYPE (expr));
3223 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3226 if (TREE_CODE (expr) == ARRAY_RANGE_REF
3227 && !useless_type_conversion_p (TREE_TYPE (TREE_TYPE (expr)),
3228 TREE_TYPE (TREE_TYPE (op))))
3230 error ("type mismatch in %qs", code_name);
3231 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3232 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3236 if (TREE_CODE (expr) == COMPONENT_REF)
3238 if (TREE_OPERAND (expr, 2)
3239 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3241 error ("invalid %qs offset operator", code_name);
3244 if (!useless_type_conversion_p (TREE_TYPE (expr),
3245 TREE_TYPE (TREE_OPERAND (expr, 1))))
3247 error ("type mismatch in %qs", code_name);
3248 debug_generic_stmt (TREE_TYPE (expr));
3249 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3254 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3256 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3257 that their operand is not an SSA name or an invariant when
3258 requiring an lvalue (this usually means there is a SRA or IPA-SRA
3259 bug). Otherwise there is nothing to verify, gross mismatches at
3260 most invoke undefined behavior. */
3262 && (TREE_CODE (op) == SSA_NAME
3263 || is_gimple_min_invariant (op)))
3265 error ("conversion of %qs on the left hand side of %qs",
3266 get_tree_code_name (TREE_CODE (op)), code_name);
3267 debug_generic_stmt (expr);
3270 else if (TREE_CODE (op) == SSA_NAME
3271 && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
3273 error ("conversion of register to a different size in %qs",
3275 debug_generic_stmt (expr);
3278 else if (!handled_component_p (op))
3285 code_name = get_tree_code_name (TREE_CODE (expr));
3287 if (TREE_CODE (expr) == MEM_REF)
3289 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3290 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3291 && verify_address (TREE_OPERAND (expr, 0), false)))
3293 error ("invalid address operand in %qs", code_name);
3294 debug_generic_stmt (expr);
3297 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3298 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3300 error ("invalid offset operand in %qs", code_name);
3301 debug_generic_stmt (expr);
3304 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3305 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3307 error ("invalid clique in %qs", code_name);
3308 debug_generic_stmt (expr);
3312 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3314 if (!TMR_BASE (expr)
3315 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3316 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3317 && verify_address (TMR_BASE (expr), false)))
3319 error ("invalid address operand in %qs", code_name);
3322 if (!TMR_OFFSET (expr)
3323 || !poly_int_tree_p (TMR_OFFSET (expr))
3324 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3326 error ("invalid offset operand in %qs", code_name);
3327 debug_generic_stmt (expr);
3330 if (MR_DEPENDENCE_CLIQUE (expr) != 0
3331 && MR_DEPENDENCE_CLIQUE (expr) > cfun->last_clique)
3333 error ("invalid clique in %qs", code_name);
3334 debug_generic_stmt (expr);
3338 else if (TREE_CODE (expr) == INDIRECT_REF)
3340 error ("%qs in gimple IL", code_name);
3341 debug_generic_stmt (expr);
3346 && (TREE_CODE (expr) == SSA_NAME || is_gimple_min_invariant (expr)))
3349 if (TREE_CODE (expr) != SSA_NAME && is_gimple_id (expr))
3352 if (TREE_CODE (expr) != TARGET_MEM_REF
3353 && TREE_CODE (expr) != MEM_REF)
3355 error ("invalid expression for min lvalue");
3362 /* Returns true if there is one pointer type in TYPE_POINTER_TO (SRC_OBJ)
3363 list of pointer-to types that is trivially convertible to DEST. */
3366 one_pointer_to_useless_type_conversion_p (tree dest, tree src_obj)
3370 if (!TYPE_POINTER_TO (src_obj))
3373 for (src = TYPE_POINTER_TO (src_obj); src; src = TYPE_NEXT_PTR_TO (src))
3374 if (useless_type_conversion_p (dest, src))
3380 /* Return true if TYPE1 is a fixed-point type and if conversions to and
3381 from TYPE2 can be handled by FIXED_CONVERT_EXPR. */
3384 valid_fixed_convert_types_p (tree type1, tree type2)
3386 return (FIXED_POINT_TYPE_P (type1)
3387 && (INTEGRAL_TYPE_P (type2)
3388 || SCALAR_FLOAT_TYPE_P (type2)
3389 || FIXED_POINT_TYPE_P (type2)));
3392 /* Verify the contents of a GIMPLE_CALL STMT. Returns true when there
3393 is a problem, otherwise false. */
3396 verify_gimple_call (gcall *stmt)
3398 tree fn = gimple_call_fn (stmt);
3399 tree fntype, fndecl;
3402 if (gimple_call_internal_p (stmt))
3406 error ("gimple call has two targets");
3407 debug_generic_stmt (fn);
3415 error ("gimple call has no target");
3420 if (fn && !is_gimple_call_addr (fn))
3422 error ("invalid function in gimple call");
3423 debug_generic_stmt (fn);
3428 && (!POINTER_TYPE_P (TREE_TYPE (fn))
3429 || (TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != FUNCTION_TYPE
3430 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) != METHOD_TYPE)))
3432 error ("non-function in gimple call");
3436 fndecl = gimple_call_fndecl (stmt);
3438 && TREE_CODE (fndecl) == FUNCTION_DECL
3439 && DECL_LOOPING_CONST_OR_PURE_P (fndecl)
3440 && !DECL_PURE_P (fndecl)
3441 && !TREE_READONLY (fndecl))
3443 error ("invalid pure const state for function");
3447 tree lhs = gimple_call_lhs (stmt);
3449 && (!is_gimple_reg (lhs)
3450 && (!is_gimple_lvalue (lhs)
3451 || verify_types_in_gimple_reference
3452 (TREE_CODE (lhs) == WITH_SIZE_EXPR
3453 ? TREE_OPERAND (lhs, 0) : lhs, true))))
3455 error ("invalid LHS in gimple call");
3459 if (gimple_call_ctrl_altering_p (stmt)
3460 && gimple_call_noreturn_p (stmt)
3461 && should_remove_lhs_p (lhs))
3463 error ("LHS in %<noreturn%> call");
3467 fntype = gimple_call_fntype (stmt);
3470 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (fntype))
3471 /* ??? At least C++ misses conversions at assignments from
3472 void * call results.
3473 For now simply allow arbitrary pointer type conversions. */
3474 && !(POINTER_TYPE_P (TREE_TYPE (lhs))
3475 && POINTER_TYPE_P (TREE_TYPE (fntype))))
3477 error ("invalid conversion in gimple call");
3478 debug_generic_stmt (TREE_TYPE (lhs));
3479 debug_generic_stmt (TREE_TYPE (fntype));
3483 if (gimple_call_chain (stmt)
3484 && !is_gimple_val (gimple_call_chain (stmt)))
3486 error ("invalid static chain in gimple call");
3487 debug_generic_stmt (gimple_call_chain (stmt));
3491 /* If there is a static chain argument, the call should either be
3492 indirect, or the decl should have DECL_STATIC_CHAIN set. */
3493 if (gimple_call_chain (stmt)
3495 && !DECL_STATIC_CHAIN (fndecl))
3497 error ("static chain with function that doesn%'t use one");
3501 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3503 switch (DECL_FUNCTION_CODE (fndecl))
3505 case BUILT_IN_UNREACHABLE:
3507 if (gimple_call_num_args (stmt) > 0)
3509 /* Built-in unreachable with parameters might not be caught by
3510 undefined behavior sanitizer. Front-ends do check users do not
3511 call them that way but we also produce calls to
3512 __builtin_unreachable internally, for example when IPA figures
3513 out a call cannot happen in a legal program. In such cases,
3514 we must make sure arguments are stripped off. */
3515 error ("%<__builtin_unreachable%> or %<__builtin_trap%> call "
3525 /* For a call to .DEFERRED_INIT,
3526 LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL)
3527 we should guarantee that when the 1st argument is a constant, it should
3528 be the same as the size of the LHS. */
3530 if (gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
3532 tree size_of_arg0 = gimple_call_arg (stmt, 0);
3533 tree size_of_lhs = TYPE_SIZE_UNIT (TREE_TYPE (lhs));
3535 if (TREE_CODE (lhs) == SSA_NAME)
3536 lhs = SSA_NAME_VAR (lhs);
3538 poly_uint64 size_from_arg0, size_from_lhs;
3539 bool is_constant_size_arg0 = poly_int_tree_p (size_of_arg0,
3541 bool is_constant_size_lhs = poly_int_tree_p (size_of_lhs,
3543 if (is_constant_size_arg0 && is_constant_size_lhs)
3544 if (maybe_ne (size_from_arg0, size_from_lhs))
3546 error ("%<DEFERRED_INIT%> calls should have same "
3547 "constant size for the first argument and LHS");
3552 /* ??? The C frontend passes unpromoted arguments in case it
3553 didn't see a function declaration before the call. So for now
3554 leave the call arguments mostly unverified. Once we gimplify
3555 unit-at-a-time we have a chance to fix this. */
3556 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3558 tree arg = gimple_call_arg (stmt, i);
3559 if ((is_gimple_reg_type (TREE_TYPE (arg))
3560 && !is_gimple_val (arg))
3561 || (!is_gimple_reg_type (TREE_TYPE (arg))
3562 && !is_gimple_lvalue (arg)))
3564 error ("invalid argument to gimple call");
3565 debug_generic_expr (arg);
3568 if (!is_gimple_reg (arg))
3570 if (TREE_CODE (arg) == WITH_SIZE_EXPR)
3571 arg = TREE_OPERAND (arg, 0);
3572 if (verify_types_in_gimple_reference (arg, false))
3580 /* Verifies the gimple comparison with the result type TYPE and
3581 the operands OP0 and OP1, comparison code is CODE. */
3584 verify_gimple_comparison (tree type, tree op0, tree op1, enum tree_code code)
3586 tree op0_type = TREE_TYPE (op0);
3587 tree op1_type = TREE_TYPE (op1);
3589 if (!is_gimple_val (op0) || !is_gimple_val (op1))
3591 error ("invalid operands in gimple comparison");
3595 /* For comparisons we do not have the operations type as the
3596 effective type the comparison is carried out in. Instead
3597 we require that either the first operand is trivially
3598 convertible into the second, or the other way around. */
3599 if (!useless_type_conversion_p (op0_type, op1_type)
3600 && !useless_type_conversion_p (op1_type, op0_type))
3602 error ("mismatching comparison operand types");
3603 debug_generic_expr (op0_type);
3604 debug_generic_expr (op1_type);
3608 /* The resulting type of a comparison may be an effective boolean type. */
3609 if (INTEGRAL_TYPE_P (type)
3610 && (TREE_CODE (type) == BOOLEAN_TYPE
3611 || TYPE_PRECISION (type) == 1))
3613 if ((TREE_CODE (op0_type) == VECTOR_TYPE
3614 || TREE_CODE (op1_type) == VECTOR_TYPE)
3615 && code != EQ_EXPR && code != NE_EXPR
3616 && !VECTOR_BOOLEAN_TYPE_P (op0_type)
3617 && !VECTOR_INTEGER_TYPE_P (op0_type))
3619 error ("unsupported operation or type for vector comparison"
3620 " returning a boolean");
3621 debug_generic_expr (op0_type);
3622 debug_generic_expr (op1_type);
3626 /* Or a boolean vector type with the same element count
3627 as the comparison operand types. */
3628 else if (TREE_CODE (type) == VECTOR_TYPE
3629 && TREE_CODE (TREE_TYPE (type)) == BOOLEAN_TYPE)
3631 if (TREE_CODE (op0_type) != VECTOR_TYPE
3632 || TREE_CODE (op1_type) != VECTOR_TYPE)
3634 error ("non-vector operands in vector comparison");
3635 debug_generic_expr (op0_type);
3636 debug_generic_expr (op1_type);
3640 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3641 TYPE_VECTOR_SUBPARTS (op0_type)))
3643 error ("invalid vector comparison resulting type");
3644 debug_generic_expr (type);
3650 error ("bogus comparison result type");
3651 debug_generic_expr (type);
3658 /* Verify a gimple assignment statement STMT with an unary rhs.
3659 Returns true if anything is wrong. */
3662 verify_gimple_assign_unary (gassign *stmt)
3664 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3665 tree lhs = gimple_assign_lhs (stmt);
3666 tree lhs_type = TREE_TYPE (lhs);
3667 tree rhs1 = gimple_assign_rhs1 (stmt);
3668 tree rhs1_type = TREE_TYPE (rhs1);
3670 if (!is_gimple_reg (lhs))
3672 error ("non-register as LHS of unary operation");
3676 if (!is_gimple_val (rhs1))
3678 error ("invalid operand in unary operation");
3682 const char* const code_name = get_tree_code_name (rhs_code);
3684 /* First handle conversions. */
3689 /* Allow conversions between vectors with the same number of elements,
3690 provided that the conversion is OK for the element types too. */
3691 if (VECTOR_TYPE_P (lhs_type)
3692 && VECTOR_TYPE_P (rhs1_type)
3693 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
3694 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3696 lhs_type = TREE_TYPE (lhs_type);
3697 rhs1_type = TREE_TYPE (rhs1_type);
3699 else if (VECTOR_TYPE_P (lhs_type) || VECTOR_TYPE_P (rhs1_type))
3701 error ("invalid vector types in nop conversion");
3702 debug_generic_expr (lhs_type);
3703 debug_generic_expr (rhs1_type);
3707 /* Allow conversions from pointer type to integral type only if
3708 there is no sign or zero extension involved.
3709 For targets were the precision of ptrofftype doesn't match that
3710 of pointers we allow conversions to types where
3711 POINTERS_EXTEND_UNSIGNED specifies how that works. */
3712 if ((POINTER_TYPE_P (lhs_type)
3713 && INTEGRAL_TYPE_P (rhs1_type))
3714 || (POINTER_TYPE_P (rhs1_type)
3715 && INTEGRAL_TYPE_P (lhs_type)
3716 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3717 #if defined(POINTERS_EXTEND_UNSIGNED)
3718 || (TYPE_MODE (rhs1_type) == ptr_mode
3719 && (TYPE_PRECISION (lhs_type)
3720 == BITS_PER_WORD /* word_mode */
3721 || (TYPE_PRECISION (lhs_type)
3722 == GET_MODE_PRECISION (Pmode))))
3727 /* Allow conversion from integral to offset type and vice versa. */
3728 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3729 && INTEGRAL_TYPE_P (rhs1_type))
3730 || (INTEGRAL_TYPE_P (lhs_type)
3731 && TREE_CODE (rhs1_type) == OFFSET_TYPE))
3734 /* Otherwise assert we are converting between types of the
3736 if (INTEGRAL_TYPE_P (lhs_type) != INTEGRAL_TYPE_P (rhs1_type))
3738 error ("invalid types in nop conversion");
3739 debug_generic_expr (lhs_type);
3740 debug_generic_expr (rhs1_type);
3747 case ADDR_SPACE_CONVERT_EXPR:
3749 if (!POINTER_TYPE_P (rhs1_type) || !POINTER_TYPE_P (lhs_type)
3750 || (TYPE_ADDR_SPACE (TREE_TYPE (rhs1_type))
3751 == TYPE_ADDR_SPACE (TREE_TYPE (lhs_type))))
3753 error ("invalid types in address space conversion");
3754 debug_generic_expr (lhs_type);
3755 debug_generic_expr (rhs1_type);
3762 case FIXED_CONVERT_EXPR:
3764 if (!valid_fixed_convert_types_p (lhs_type, rhs1_type)
3765 && !valid_fixed_convert_types_p (rhs1_type, lhs_type))
3767 error ("invalid types in fixed-point conversion");
3768 debug_generic_expr (lhs_type);
3769 debug_generic_expr (rhs1_type);
3778 if ((!INTEGRAL_TYPE_P (rhs1_type) || !SCALAR_FLOAT_TYPE_P (lhs_type))
3779 && (!VECTOR_INTEGER_TYPE_P (rhs1_type)
3780 || !VECTOR_FLOAT_TYPE_P (lhs_type)))
3782 error ("invalid types in conversion to floating-point");
3783 debug_generic_expr (lhs_type);
3784 debug_generic_expr (rhs1_type);
3791 case FIX_TRUNC_EXPR:
3793 if ((!INTEGRAL_TYPE_P (lhs_type) || !SCALAR_FLOAT_TYPE_P (rhs1_type))
3794 && (!VECTOR_INTEGER_TYPE_P (lhs_type)
3795 || !VECTOR_FLOAT_TYPE_P (rhs1_type)))
3797 error ("invalid types in conversion to integer");
3798 debug_generic_expr (lhs_type);
3799 debug_generic_expr (rhs1_type);
3806 case VEC_UNPACK_HI_EXPR:
3807 case VEC_UNPACK_LO_EXPR:
3808 case VEC_UNPACK_FLOAT_HI_EXPR:
3809 case VEC_UNPACK_FLOAT_LO_EXPR:
3810 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3811 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3812 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3813 || TREE_CODE (lhs_type) != VECTOR_TYPE
3814 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3815 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3816 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3817 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3818 || ((rhs_code == VEC_UNPACK_HI_EXPR
3819 || rhs_code == VEC_UNPACK_LO_EXPR)
3820 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3821 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3822 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3823 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3824 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3825 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3826 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3827 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3828 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3829 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3830 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3831 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3832 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3833 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3834 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3835 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3837 error ("type mismatch in %qs expression", code_name);
3838 debug_generic_expr (lhs_type);
3839 debug_generic_expr (rhs1_type);
3850 /* Disallow pointer and offset types for many of the unary gimple. */
3851 if (POINTER_TYPE_P (lhs_type)
3852 || TREE_CODE (lhs_type) == OFFSET_TYPE)
3854 error ("invalid types for %qs", code_name);
3855 debug_generic_expr (lhs_type);
3856 debug_generic_expr (rhs1_type);
3862 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3863 || !TYPE_UNSIGNED (lhs_type)
3864 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3865 || TYPE_UNSIGNED (rhs1_type)
3866 || element_precision (lhs_type) != element_precision (rhs1_type))
3868 error ("invalid types for %qs", code_name);
3869 debug_generic_expr (lhs_type);
3870 debug_generic_expr (rhs1_type);
3875 case VEC_DUPLICATE_EXPR:
3876 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3877 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3879 error ("%qs should be from a scalar to a like vector", code_name);
3880 debug_generic_expr (lhs_type);
3881 debug_generic_expr (rhs1_type);
3890 /* For the remaining codes assert there is no conversion involved. */
3891 if (!useless_type_conversion_p (lhs_type, rhs1_type))
3893 error ("non-trivial conversion in unary operation");
3894 debug_generic_expr (lhs_type);
3895 debug_generic_expr (rhs1_type);
3902 /* Verify a gimple assignment statement STMT with a binary rhs.
3903 Returns true if anything is wrong. */
3906 verify_gimple_assign_binary (gassign *stmt)
3908 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
3909 tree lhs = gimple_assign_lhs (stmt);
3910 tree lhs_type = TREE_TYPE (lhs);
3911 tree rhs1 = gimple_assign_rhs1 (stmt);
3912 tree rhs1_type = TREE_TYPE (rhs1);
3913 tree rhs2 = gimple_assign_rhs2 (stmt);
3914 tree rhs2_type = TREE_TYPE (rhs2);
3916 if (!is_gimple_reg (lhs))
3918 error ("non-register as LHS of binary operation");
3922 if (!is_gimple_val (rhs1)
3923 || !is_gimple_val (rhs2))
3925 error ("invalid operands in binary operation");
3929 const char* const code_name = get_tree_code_name (rhs_code);
3931 /* First handle operations that involve different types. */
3936 if (TREE_CODE (lhs_type) != COMPLEX_TYPE
3937 || !(INTEGRAL_TYPE_P (rhs1_type)
3938 || SCALAR_FLOAT_TYPE_P (rhs1_type))
3939 || !(INTEGRAL_TYPE_P (rhs2_type)
3940 || SCALAR_FLOAT_TYPE_P (rhs2_type)))
3942 error ("type mismatch in %qs", code_name);
3943 debug_generic_expr (lhs_type);
3944 debug_generic_expr (rhs1_type);
3945 debug_generic_expr (rhs2_type);
3957 /* Shifts and rotates are ok on integral types, fixed point
3958 types and integer vector types. */
3959 if ((!INTEGRAL_TYPE_P (rhs1_type)
3960 && !FIXED_POINT_TYPE_P (rhs1_type)
3961 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3962 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3963 || (!INTEGRAL_TYPE_P (rhs2_type)
3964 /* Vector shifts of vectors are also ok. */
3965 && !(TREE_CODE (rhs1_type) == VECTOR_TYPE
3966 && INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3967 && TREE_CODE (rhs2_type) == VECTOR_TYPE
3968 && INTEGRAL_TYPE_P (TREE_TYPE (rhs2_type))))
3969 || !useless_type_conversion_p (lhs_type, rhs1_type))
3971 error ("type mismatch in %qs", code_name);
3972 debug_generic_expr (lhs_type);
3973 debug_generic_expr (rhs1_type);
3974 debug_generic_expr (rhs2_type);
3981 case WIDEN_LSHIFT_EXPR:
3983 if (!INTEGRAL_TYPE_P (lhs_type)
3984 || !INTEGRAL_TYPE_P (rhs1_type)
3985 || TREE_CODE (rhs2) != INTEGER_CST
3986 || (2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)))
3988 error ("type mismatch in %qs", code_name);
3989 debug_generic_expr (lhs_type);
3990 debug_generic_expr (rhs1_type);
3991 debug_generic_expr (rhs2_type);
3998 case VEC_WIDEN_LSHIFT_HI_EXPR:
3999 case VEC_WIDEN_LSHIFT_LO_EXPR:
4001 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4002 || TREE_CODE (lhs_type) != VECTOR_TYPE
4003 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4004 || !INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
4005 || TREE_CODE (rhs2) != INTEGER_CST
4006 || (2 * TYPE_PRECISION (TREE_TYPE (rhs1_type))
4007 > TYPE_PRECISION (TREE_TYPE (lhs_type))))
4009 error ("type mismatch in %qs", code_name);
4010 debug_generic_expr (lhs_type);
4011 debug_generic_expr (rhs1_type);
4012 debug_generic_expr (rhs2_type);
4019 case WIDEN_PLUS_EXPR:
4020 case WIDEN_MINUS_EXPR:
4024 tree lhs_etype = lhs_type;
4025 tree rhs1_etype = rhs1_type;
4026 tree rhs2_etype = rhs2_type;
4027 if (TREE_CODE (lhs_type) == VECTOR_TYPE)
4029 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4030 || TREE_CODE (rhs2_type) != VECTOR_TYPE)
4032 error ("invalid non-vector operands to %qs", code_name);
4035 lhs_etype = TREE_TYPE (lhs_type);
4036 rhs1_etype = TREE_TYPE (rhs1_type);
4037 rhs2_etype = TREE_TYPE (rhs2_type);
4039 if (POINTER_TYPE_P (lhs_etype)
4040 || POINTER_TYPE_P (rhs1_etype)
4041 || POINTER_TYPE_P (rhs2_etype))
4043 error ("invalid (pointer) operands %qs", code_name);
4047 /* Continue with generic binary expression handling. */
4051 case POINTER_PLUS_EXPR:
4053 if (!POINTER_TYPE_P (rhs1_type)
4054 || !useless_type_conversion_p (lhs_type, rhs1_type)
4055 || !ptrofftype_p (rhs2_type))
4057 error ("type mismatch in %qs", code_name);
4058 debug_generic_stmt (lhs_type);
4059 debug_generic_stmt (rhs1_type);
4060 debug_generic_stmt (rhs2_type);
4067 case POINTER_DIFF_EXPR:
4069 if (!POINTER_TYPE_P (rhs1_type)
4070 || !POINTER_TYPE_P (rhs2_type)
4071 /* Because we special-case pointers to void we allow difference
4072 of arbitrary pointers with the same mode. */
4073 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
4074 || !INTEGRAL_TYPE_P (lhs_type)
4075 || TYPE_UNSIGNED (lhs_type)
4076 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
4078 error ("type mismatch in %qs", code_name);
4079 debug_generic_stmt (lhs_type);
4080 debug_generic_stmt (rhs1_type);
4081 debug_generic_stmt (rhs2_type);
4088 case TRUTH_ANDIF_EXPR:
4089 case TRUTH_ORIF_EXPR:
4090 case TRUTH_AND_EXPR:
4092 case TRUTH_XOR_EXPR:
4102 case UNORDERED_EXPR:
4110 /* Comparisons are also binary, but the result type is not
4111 connected to the operand types. */
4112 return verify_gimple_comparison (lhs_type, rhs1, rhs2, rhs_code);
4114 case WIDEN_MULT_EXPR:
4115 if (TREE_CODE (lhs_type) != INTEGER_TYPE)
4117 return ((2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type))
4118 || (TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type)));
4120 case WIDEN_SUM_EXPR:
4122 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4123 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4124 && ((!INTEGRAL_TYPE_P (rhs1_type)
4125 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4126 || (!INTEGRAL_TYPE_P (lhs_type)
4127 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4128 || !useless_type_conversion_p (lhs_type, rhs2_type)
4129 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4130 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4132 error ("type mismatch in %qs", code_name);
4133 debug_generic_expr (lhs_type);
4134 debug_generic_expr (rhs1_type);
4135 debug_generic_expr (rhs2_type);
4141 case VEC_WIDEN_MINUS_HI_EXPR:
4142 case VEC_WIDEN_MINUS_LO_EXPR:
4143 case VEC_WIDEN_PLUS_HI_EXPR:
4144 case VEC_WIDEN_PLUS_LO_EXPR:
4145 case VEC_WIDEN_MULT_HI_EXPR:
4146 case VEC_WIDEN_MULT_LO_EXPR:
4147 case VEC_WIDEN_MULT_EVEN_EXPR:
4148 case VEC_WIDEN_MULT_ODD_EXPR:
4150 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4151 || TREE_CODE (lhs_type) != VECTOR_TYPE
4152 || !types_compatible_p (rhs1_type, rhs2_type)
4153 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4154 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4156 error ("type mismatch in %qs", code_name);
4157 debug_generic_expr (lhs_type);
4158 debug_generic_expr (rhs1_type);
4159 debug_generic_expr (rhs2_type);
4165 case VEC_PACK_TRUNC_EXPR:
4166 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4167 vector boolean types. */
4168 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4169 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4170 && types_compatible_p (rhs1_type, rhs2_type)
4171 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4172 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4176 case VEC_PACK_SAT_EXPR:
4177 case VEC_PACK_FIX_TRUNC_EXPR:
4179 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4180 || TREE_CODE (lhs_type) != VECTOR_TYPE
4181 || !((rhs_code == VEC_PACK_FIX_TRUNC_EXPR
4182 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4183 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4184 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4185 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4186 || !types_compatible_p (rhs1_type, rhs2_type)
4187 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4188 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4189 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4190 TYPE_VECTOR_SUBPARTS (lhs_type)))
4192 error ("type mismatch in %qs", code_name);
4193 debug_generic_expr (lhs_type);
4194 debug_generic_expr (rhs1_type);
4195 debug_generic_expr (rhs2_type);
4202 case VEC_PACK_FLOAT_EXPR:
4203 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4204 || TREE_CODE (lhs_type) != VECTOR_TYPE
4205 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4206 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4207 || !types_compatible_p (rhs1_type, rhs2_type)
4208 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4209 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4210 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4211 TYPE_VECTOR_SUBPARTS (lhs_type)))
4213 error ("type mismatch in %qs", code_name);
4214 debug_generic_expr (lhs_type);
4215 debug_generic_expr (rhs1_type);
4216 debug_generic_expr (rhs2_type);
4223 case MULT_HIGHPART_EXPR:
4224 case TRUNC_DIV_EXPR:
4226 case FLOOR_DIV_EXPR:
4227 case ROUND_DIV_EXPR:
4228 case TRUNC_MOD_EXPR:
4230 case FLOOR_MOD_EXPR:
4231 case ROUND_MOD_EXPR:
4233 case EXACT_DIV_EXPR:
4236 /* Disallow pointer and offset types for many of the binary gimple. */
4237 if (POINTER_TYPE_P (lhs_type)
4238 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4240 error ("invalid types for %qs", code_name);
4241 debug_generic_expr (lhs_type);
4242 debug_generic_expr (rhs1_type);
4243 debug_generic_expr (rhs2_type);
4246 /* Continue with generic binary expression handling. */
4251 /* Continue with generic binary expression handling. */
4255 if (POINTER_TYPE_P (lhs_type)
4256 && TREE_CODE (rhs2) == INTEGER_CST)
4258 /* Disallow pointer and offset types for many of the binary gimple. */
4259 if (POINTER_TYPE_P (lhs_type)
4260 || TREE_CODE (lhs_type) == OFFSET_TYPE)
4262 error ("invalid types for %qs", code_name);
4263 debug_generic_expr (lhs_type);
4264 debug_generic_expr (rhs1_type);
4265 debug_generic_expr (rhs2_type);
4268 /* Continue with generic binary expression handling. */
4271 case VEC_SERIES_EXPR:
4272 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4274 error ("type mismatch in %qs", code_name);
4275 debug_generic_expr (rhs1_type);
4276 debug_generic_expr (rhs2_type);
4279 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4280 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4282 error ("vector type expected in %qs", code_name);
4283 debug_generic_expr (lhs_type);
4292 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4293 || !useless_type_conversion_p (lhs_type, rhs2_type))
4295 error ("type mismatch in binary expression");
4296 debug_generic_stmt (lhs_type);
4297 debug_generic_stmt (rhs1_type);
4298 debug_generic_stmt (rhs2_type);
4305 /* Verify a gimple assignment statement STMT with a ternary rhs.
4306 Returns true if anything is wrong. */
4309 verify_gimple_assign_ternary (gassign *stmt)
4311 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4312 tree lhs = gimple_assign_lhs (stmt);
4313 tree lhs_type = TREE_TYPE (lhs);
4314 tree rhs1 = gimple_assign_rhs1 (stmt);
4315 tree rhs1_type = TREE_TYPE (rhs1);
4316 tree rhs2 = gimple_assign_rhs2 (stmt);
4317 tree rhs2_type = TREE_TYPE (rhs2);
4318 tree rhs3 = gimple_assign_rhs3 (stmt);
4319 tree rhs3_type = TREE_TYPE (rhs3);
4321 if (!is_gimple_reg (lhs))
4323 error ("non-register as LHS of ternary operation");
4327 if (!is_gimple_val (rhs1)
4328 || !is_gimple_val (rhs2)
4329 || !is_gimple_val (rhs3))
4331 error ("invalid operands in ternary operation");
4335 const char* const code_name = get_tree_code_name (rhs_code);
4337 /* First handle operations that involve different types. */
4340 case WIDEN_MULT_PLUS_EXPR:
4341 case WIDEN_MULT_MINUS_EXPR:
4342 if ((!INTEGRAL_TYPE_P (rhs1_type)
4343 && !FIXED_POINT_TYPE_P (rhs1_type))
4344 || !useless_type_conversion_p (rhs1_type, rhs2_type)
4345 || !useless_type_conversion_p (lhs_type, rhs3_type)
4346 || 2 * TYPE_PRECISION (rhs1_type) > TYPE_PRECISION (lhs_type)
4347 || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
4349 error ("type mismatch in %qs", code_name);
4350 debug_generic_expr (lhs_type);
4351 debug_generic_expr (rhs1_type);
4352 debug_generic_expr (rhs2_type);
4353 debug_generic_expr (rhs3_type);
4359 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4360 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4361 TYPE_VECTOR_SUBPARTS (lhs_type)))
4363 error ("the first argument of a %qs must be of a "
4364 "boolean vector type of the same number of elements "
4365 "as the result", code_name);
4366 debug_generic_expr (lhs_type);
4367 debug_generic_expr (rhs1_type);
4372 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4373 || !useless_type_conversion_p (lhs_type, rhs3_type))
4375 error ("type mismatch in %qs", code_name);
4376 debug_generic_expr (lhs_type);
4377 debug_generic_expr (rhs2_type);
4378 debug_generic_expr (rhs3_type);
4384 /* If permute is constant, then we allow for lhs and rhs
4385 to have different vector types, provided:
4386 (1) lhs, rhs1, rhs2 have same element type.
4387 (2) rhs3 vector is constant and has integer element type.
4388 (3) len(lhs) == len(rhs3) && len(rhs1) == len(rhs2). */
4390 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4391 || TREE_CODE (rhs1_type) != VECTOR_TYPE
4392 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4393 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4395 error ("vector types expected in %qs", code_name);
4396 debug_generic_expr (lhs_type);
4397 debug_generic_expr (rhs1_type);
4398 debug_generic_expr (rhs2_type);
4399 debug_generic_expr (rhs3_type);
4403 /* If rhs3 is constant, we allow lhs, rhs1 and rhs2 to be different vector types,
4404 as long as lhs, rhs1 and rhs2 have same element type. */
4405 if (TREE_CONSTANT (rhs3)
4406 ? (!useless_type_conversion_p (TREE_TYPE (lhs_type), TREE_TYPE (rhs1_type))
4407 || !useless_type_conversion_p (TREE_TYPE (lhs_type), TREE_TYPE (rhs2_type)))
4408 : (!useless_type_conversion_p (lhs_type, rhs1_type)
4409 || !useless_type_conversion_p (lhs_type, rhs2_type)))
4411 error ("type mismatch in %qs", code_name);
4412 debug_generic_expr (lhs_type);
4413 debug_generic_expr (rhs1_type);
4414 debug_generic_expr (rhs2_type);
4415 debug_generic_expr (rhs3_type);
4419 /* If rhs3 is constant, relax the check len(rhs2) == len(rhs3). */
4420 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4421 TYPE_VECTOR_SUBPARTS (rhs2_type))
4422 || (!TREE_CONSTANT(rhs3)
4423 && maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4424 TYPE_VECTOR_SUBPARTS (rhs3_type)))
4425 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4426 TYPE_VECTOR_SUBPARTS (lhs_type)))
4428 error ("vectors with different element number found in %qs",
4430 debug_generic_expr (lhs_type);
4431 debug_generic_expr (rhs1_type);
4432 debug_generic_expr (rhs2_type);
4433 debug_generic_expr (rhs3_type);
4437 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4438 || (TREE_CODE (rhs3) != VECTOR_CST
4439 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4440 (TREE_TYPE (rhs3_type)))
4441 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4442 (TREE_TYPE (rhs1_type))))))
4444 error ("invalid mask type in %qs", code_name);
4445 debug_generic_expr (lhs_type);
4446 debug_generic_expr (rhs1_type);
4447 debug_generic_expr (rhs2_type);
4448 debug_generic_expr (rhs3_type);
4455 if (!useless_type_conversion_p (rhs1_type, rhs2_type)
4456 || !useless_type_conversion_p (lhs_type, rhs3_type)
4457 || 2 * GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (rhs1_type)))
4458 > GET_MODE_UNIT_BITSIZE (TYPE_MODE (TREE_TYPE (lhs_type))))
4460 error ("type mismatch in %qs", code_name);
4461 debug_generic_expr (lhs_type);
4462 debug_generic_expr (rhs1_type);
4463 debug_generic_expr (rhs2_type);
4464 debug_generic_expr (rhs3_type);
4468 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4469 || TREE_CODE (rhs2_type) != VECTOR_TYPE
4470 || TREE_CODE (rhs3_type) != VECTOR_TYPE)
4472 error ("vector types expected in %qs", code_name);
4473 debug_generic_expr (lhs_type);
4474 debug_generic_expr (rhs1_type);
4475 debug_generic_expr (rhs2_type);
4476 debug_generic_expr (rhs3_type);
4482 case BIT_INSERT_EXPR:
4483 if (! useless_type_conversion_p (lhs_type, rhs1_type))
4485 error ("type mismatch in %qs", code_name);
4486 debug_generic_expr (lhs_type);
4487 debug_generic_expr (rhs1_type);
4490 if (! ((INTEGRAL_TYPE_P (rhs1_type)
4491 && INTEGRAL_TYPE_P (rhs2_type))
4492 /* Vector element insert. */
4493 || (VECTOR_TYPE_P (rhs1_type)
4494 && types_compatible_p (TREE_TYPE (rhs1_type), rhs2_type))
4495 /* Aligned sub-vector insert. */
4496 || (VECTOR_TYPE_P (rhs1_type)
4497 && VECTOR_TYPE_P (rhs2_type)
4498 && types_compatible_p (TREE_TYPE (rhs1_type),
4499 TREE_TYPE (rhs2_type))
4500 && multiple_p (TYPE_VECTOR_SUBPARTS (rhs1_type),
4501 TYPE_VECTOR_SUBPARTS (rhs2_type))
4502 && multiple_p (wi::to_poly_offset (rhs3),
4503 wi::to_poly_offset (TYPE_SIZE (rhs2_type))))))
4505 error ("not allowed type combination in %qs", code_name);
4506 debug_generic_expr (rhs1_type);
4507 debug_generic_expr (rhs2_type);
4510 if (! tree_fits_uhwi_p (rhs3)
4511 || ! types_compatible_p (bitsizetype, TREE_TYPE (rhs3))
4512 || ! tree_fits_uhwi_p (TYPE_SIZE (rhs2_type)))
4514 error ("invalid position or size in %qs", code_name);
4517 if (INTEGRAL_TYPE_P (rhs1_type)
4518 && !type_has_mode_precision_p (rhs1_type))
4520 error ("%qs into non-mode-precision operand", code_name);
4523 if (INTEGRAL_TYPE_P (rhs1_type))
4525 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4526 if (bitpos >= TYPE_PRECISION (rhs1_type)
4527 || (bitpos + TYPE_PRECISION (rhs2_type)
4528 > TYPE_PRECISION (rhs1_type)))
4530 error ("insertion out of range in %qs", code_name);
4534 else if (VECTOR_TYPE_P (rhs1_type))
4536 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (rhs3);
4537 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (TYPE_SIZE (rhs2_type));
4538 if (bitpos % bitsize != 0)
4540 error ("%qs not at element boundary", code_name);
4548 if (((TREE_CODE (rhs1_type) != VECTOR_TYPE
4549 || TREE_CODE (lhs_type) != VECTOR_TYPE)
4550 && ((!INTEGRAL_TYPE_P (rhs1_type)
4551 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4552 || (!INTEGRAL_TYPE_P (lhs_type)
4553 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4554 /* rhs1_type and rhs2_type may differ in sign. */
4555 || !tree_nop_conversion_p (rhs1_type, rhs2_type)
4556 || !useless_type_conversion_p (lhs_type, rhs3_type)
4557 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4558 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4560 error ("type mismatch in %qs", code_name);
4561 debug_generic_expr (lhs_type);
4562 debug_generic_expr (rhs1_type);
4563 debug_generic_expr (rhs2_type);
4569 case REALIGN_LOAD_EXPR:
4579 /* Verify a gimple assignment statement STMT with a single rhs.
4580 Returns true if anything is wrong. */
4583 verify_gimple_assign_single (gassign *stmt)
4585 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
4586 tree lhs = gimple_assign_lhs (stmt);
4587 tree lhs_type = TREE_TYPE (lhs);
4588 tree rhs1 = gimple_assign_rhs1 (stmt);
4589 tree rhs1_type = TREE_TYPE (rhs1);
4592 const char* const code_name = get_tree_code_name (rhs_code);
4594 if (!useless_type_conversion_p (lhs_type, rhs1_type))
4596 error ("non-trivial conversion in %qs", code_name);
4597 debug_generic_expr (lhs_type);
4598 debug_generic_expr (rhs1_type);
4602 if (gimple_clobber_p (stmt)
4603 && !(DECL_P (lhs) || TREE_CODE (lhs) == MEM_REF))
4605 error ("%qs LHS in clobber statement",
4606 get_tree_code_name (TREE_CODE (lhs)));
4607 debug_generic_expr (lhs);
4611 if (TREE_CODE (lhs) == WITH_SIZE_EXPR)
4613 error ("%qs LHS in assignment statement",
4614 get_tree_code_name (TREE_CODE (lhs)));
4615 debug_generic_expr (lhs);
4619 if (handled_component_p (lhs)
4620 || TREE_CODE (lhs) == MEM_REF
4621 || TREE_CODE (lhs) == TARGET_MEM_REF)
4622 res |= verify_types_in_gimple_reference (lhs, true);
4624 /* Special codes we cannot handle via their class. */
4629 tree op = TREE_OPERAND (rhs1, 0);
4630 if (!is_gimple_addressable (op))
4632 error ("invalid operand in %qs", code_name);
4636 /* Technically there is no longer a need for matching types, but
4637 gimple hygiene asks for this check. In LTO we can end up
4638 combining incompatible units and thus end up with addresses
4639 of globals that change their type to a common one. */
4641 && !types_compatible_p (TREE_TYPE (op),
4642 TREE_TYPE (TREE_TYPE (rhs1)))
4643 && !one_pointer_to_useless_type_conversion_p (TREE_TYPE (rhs1),
4646 error ("type mismatch in %qs", code_name);
4647 debug_generic_stmt (TREE_TYPE (rhs1));
4648 debug_generic_stmt (TREE_TYPE (op));
4652 return (verify_address (rhs1, true)
4653 || verify_types_in_gimple_reference (op, true));
4658 error ("%qs in gimple IL", code_name);
4664 case ARRAY_RANGE_REF:
4665 case VIEW_CONVERT_EXPR:
4668 case TARGET_MEM_REF:
4670 if (!is_gimple_reg (lhs)
4671 && is_gimple_reg_type (TREE_TYPE (lhs)))
4673 error ("invalid RHS for gimple memory store: %qs", code_name);
4674 debug_generic_stmt (lhs);
4675 debug_generic_stmt (rhs1);
4678 return res || verify_types_in_gimple_reference (rhs1, false);
4690 /* tcc_declaration */
4695 if (!is_gimple_reg (lhs)
4696 && !is_gimple_reg (rhs1)
4697 && is_gimple_reg_type (TREE_TYPE (lhs)))
4699 error ("invalid RHS for gimple memory store: %qs", code_name);
4700 debug_generic_stmt (lhs);
4701 debug_generic_stmt (rhs1);
4707 if (TREE_CODE (rhs1_type) == VECTOR_TYPE)
4710 tree elt_i, elt_v, elt_t = NULL_TREE;
4712 if (CONSTRUCTOR_NELTS (rhs1) == 0)
4714 /* For vector CONSTRUCTORs we require that either it is empty
4715 CONSTRUCTOR, or it is a CONSTRUCTOR of smaller vector elements
4716 (then the element count must be correct to cover the whole
4717 outer vector and index must be NULL on all elements, or it is
4718 a CONSTRUCTOR of scalar elements, where we as an exception allow
4719 smaller number of elements (assuming zero filling) and
4720 consecutive indexes as compared to NULL indexes (such
4721 CONSTRUCTORs can appear in the IL from FEs). */
4722 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs1), i, elt_i, elt_v)
4724 if (elt_t == NULL_TREE)
4726 elt_t = TREE_TYPE (elt_v);
4727 if (TREE_CODE (elt_t) == VECTOR_TYPE)
4729 tree elt_t = TREE_TYPE (elt_v);
4730 if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4733 error ("incorrect type of vector %qs elements",
4735 debug_generic_stmt (rhs1);
4738 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4739 * TYPE_VECTOR_SUBPARTS (elt_t),
4740 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4742 error ("incorrect number of vector %qs elements",
4744 debug_generic_stmt (rhs1);
4748 else if (!useless_type_conversion_p (TREE_TYPE (rhs1_type),
4751 error ("incorrect type of vector %qs elements",
4753 debug_generic_stmt (rhs1);
4756 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4757 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4759 error ("incorrect number of vector %qs elements",
4761 debug_generic_stmt (rhs1);
4765 else if (!useless_type_conversion_p (elt_t, TREE_TYPE (elt_v)))
4767 error ("incorrect type of vector CONSTRUCTOR elements");
4768 debug_generic_stmt (rhs1);
4771 if (elt_i != NULL_TREE
4772 && (TREE_CODE (elt_t) == VECTOR_TYPE
4773 || TREE_CODE (elt_i) != INTEGER_CST
4774 || compare_tree_int (elt_i, i) != 0))
4776 error ("vector %qs with non-NULL element index",
4778 debug_generic_stmt (rhs1);
4781 if (!is_gimple_val (elt_v))
4783 error ("vector %qs element is not a GIMPLE value",
4785 debug_generic_stmt (rhs1);
4790 else if (CONSTRUCTOR_NELTS (rhs1) != 0)
4792 error ("non-vector %qs with elements", code_name);
4793 debug_generic_stmt (rhs1);
4800 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4801 if (rhs1 == boolean_false_node)
4803 error ("%qs with an always-false condition", code_name);
4804 debug_generic_stmt (rhs1);
4809 case WITH_SIZE_EXPR:
4810 error ("%qs RHS in assignment statement",
4811 get_tree_code_name (rhs_code));
4812 debug_generic_expr (rhs1);
4825 /* Verify the contents of a GIMPLE_ASSIGN STMT. Returns true when there
4826 is a problem, otherwise false. */
4829 verify_gimple_assign (gassign *stmt)
4831 switch (gimple_assign_rhs_class (stmt))
4833 case GIMPLE_SINGLE_RHS:
4834 return verify_gimple_assign_single (stmt);
4836 case GIMPLE_UNARY_RHS:
4837 return verify_gimple_assign_unary (stmt);
4839 case GIMPLE_BINARY_RHS:
4840 return verify_gimple_assign_binary (stmt);
4842 case GIMPLE_TERNARY_RHS:
4843 return verify_gimple_assign_ternary (stmt);
4850 /* Verify the contents of a GIMPLE_RETURN STMT. Returns true when there
4851 is a problem, otherwise false. */
4854 verify_gimple_return (greturn *stmt)
4856 tree op = gimple_return_retval (stmt);
4857 tree restype = TREE_TYPE (TREE_TYPE (cfun->decl));
4859 /* We cannot test for present return values as we do not fix up missing
4860 return values from the original source. */
4864 if (!is_gimple_val (op)
4865 && TREE_CODE (op) != RESULT_DECL)
4867 error ("invalid operand in return statement");
4868 debug_generic_stmt (op);
4872 if ((TREE_CODE (op) == RESULT_DECL
4873 && DECL_BY_REFERENCE (op))
4874 || (TREE_CODE (op) == SSA_NAME
4875 && SSA_NAME_VAR (op)
4876 && TREE_CODE (SSA_NAME_VAR (op)) == RESULT_DECL
4877 && DECL_BY_REFERENCE (SSA_NAME_VAR (op))))
4878 op = TREE_TYPE (op);
4880 if (!useless_type_conversion_p (restype, TREE_TYPE (op)))
4882 error ("invalid conversion in return statement");
4883 debug_generic_stmt (restype);
4884 debug_generic_stmt (TREE_TYPE (op));
4892 /* Verify the contents of a GIMPLE_GOTO STMT. Returns true when there
4893 is a problem, otherwise false. */
4896 verify_gimple_goto (ggoto *stmt)
4898 tree dest = gimple_goto_dest (stmt);
4900 /* ??? We have two canonical forms of direct goto destinations, a
4901 bare LABEL_DECL and an ADDR_EXPR of a LABEL_DECL. */
4902 if (TREE_CODE (dest) != LABEL_DECL
4903 && (!is_gimple_val (dest)
4904 || !POINTER_TYPE_P (TREE_TYPE (dest))))
4906 error ("goto destination is neither a label nor a pointer");
4913 /* Verify the contents of a GIMPLE_SWITCH STMT. Returns true when there
4914 is a problem, otherwise false. */
4917 verify_gimple_switch (gswitch *stmt)
4920 tree elt, prev_upper_bound = NULL_TREE;
4921 tree index_type, elt_type = NULL_TREE;
4923 if (!is_gimple_val (gimple_switch_index (stmt)))
4925 error ("invalid operand to switch statement");
4926 debug_generic_stmt (gimple_switch_index (stmt));
4930 index_type = TREE_TYPE (gimple_switch_index (stmt));
4931 if (! INTEGRAL_TYPE_P (index_type))
4933 error ("non-integral type switch statement");
4934 debug_generic_expr (index_type);
4938 elt = gimple_switch_label (stmt, 0);
4939 if (CASE_LOW (elt) != NULL_TREE
4940 || CASE_HIGH (elt) != NULL_TREE
4941 || CASE_CHAIN (elt) != NULL_TREE)
4943 error ("invalid default case label in switch statement");
4944 debug_generic_expr (elt);
4948 n = gimple_switch_num_labels (stmt);
4949 for (i = 1; i < n; i++)
4951 elt = gimple_switch_label (stmt, i);
4953 if (CASE_CHAIN (elt))
4955 error ("invalid %<CASE_CHAIN%>");
4956 debug_generic_expr (elt);
4959 if (! CASE_LOW (elt))
4961 error ("invalid case label in switch statement");
4962 debug_generic_expr (elt);
4966 && ! tree_int_cst_lt (CASE_LOW (elt), CASE_HIGH (elt)))
4968 error ("invalid case range in switch statement");
4969 debug_generic_expr (elt);
4975 elt_type = TREE_TYPE (CASE_LOW (elt));
4976 if (TYPE_PRECISION (index_type) < TYPE_PRECISION (elt_type))
4978 error ("type precision mismatch in switch statement");
4982 if (TREE_TYPE (CASE_LOW (elt)) != elt_type
4983 || (CASE_HIGH (elt) && TREE_TYPE (CASE_HIGH (elt)) != elt_type))
4985 error ("type mismatch for case label in switch statement");
4986 debug_generic_expr (elt);
4990 if (prev_upper_bound)
4992 if (! tree_int_cst_lt (prev_upper_bound, CASE_LOW (elt)))
4994 error ("case labels not sorted in switch statement");
4999 prev_upper_bound = CASE_HIGH (elt);
5000 if (! prev_upper_bound)
5001 prev_upper_bound = CASE_LOW (elt);
5007 /* Verify a gimple debug statement STMT.
5008 Returns true if anything is wrong. */
5011 verify_gimple_debug (gimple *stmt ATTRIBUTE_UNUSED)
5013 /* There isn't much that could be wrong in a gimple debug stmt. A
5014 gimple debug bind stmt, for example, maps a tree, that's usually
5015 a VAR_DECL or a PARM_DECL, but that could also be some scalarized
5016 component or member of an aggregate type, to another tree, that
5017 can be an arbitrary expression. These stmts expand into debug
5018 insns, and are converted to debug notes by var-tracking.cc. */
5022 /* Verify a gimple label statement STMT.
5023 Returns true if anything is wrong. */
5026 verify_gimple_label (glabel *stmt)
5028 tree decl = gimple_label_label (stmt);
5032 if (TREE_CODE (decl) != LABEL_DECL)
5034 if (!DECL_NONLOCAL (decl) && !FORCED_LABEL (decl)
5035 && DECL_CONTEXT (decl) != current_function_decl)
5037 error ("label context is not the current function declaration");
5041 uid = LABEL_DECL_UID (decl);
5044 || (*label_to_block_map_for_fn (cfun))[uid] != gimple_bb (stmt)))
5046 error ("incorrect entry in %<label_to_block_map%>");
5050 uid = EH_LANDING_PAD_NR (decl);
5053 eh_landing_pad lp = get_eh_landing_pad_from_number (uid);
5054 if (decl != lp->post_landing_pad)
5056 error ("incorrect setting of landing pad number");
5064 /* Verify a gimple cond statement STMT.
5065 Returns true if anything is wrong. */
5068 verify_gimple_cond (gcond *stmt)
5070 if (TREE_CODE_CLASS (gimple_cond_code (stmt)) != tcc_comparison)
5072 error ("invalid comparison code in gimple cond");
5075 if (!(!gimple_cond_true_label (stmt)
5076 || TREE_CODE (gimple_cond_true_label (stmt)) == LABEL_DECL)
5077 || !(!gimple_cond_false_label (stmt)
5078 || TREE_CODE (gimple_cond_false_label (stmt)) == LABEL_DECL))
5080 error ("invalid labels in gimple cond");
5084 return verify_gimple_comparison (boolean_type_node,
5085 gimple_cond_lhs (stmt),
5086 gimple_cond_rhs (stmt),
5087 gimple_cond_code (stmt));
5090 /* Verify the GIMPLE statement STMT. Returns true if there is an
5091 error, otherwise false. */
5094 verify_gimple_stmt (gimple *stmt)
5096 switch (gimple_code (stmt))
5099 return verify_gimple_assign (as_a <gassign *> (stmt));
5102 return verify_gimple_label (as_a <glabel *> (stmt));
5105 return verify_gimple_call (as_a <gcall *> (stmt));
5108 return verify_gimple_cond (as_a <gcond *> (stmt));
5111 return verify_gimple_goto (as_a <ggoto *> (stmt));
5114 return verify_gimple_switch (as_a <gswitch *> (stmt));
5117 return verify_gimple_return (as_a <greturn *> (stmt));
5122 case GIMPLE_TRANSACTION:
5123 return verify_gimple_transaction (as_a <gtransaction *> (stmt));
5125 /* Tuples that do not have tree operands. */
5127 case GIMPLE_PREDICT:
5129 case GIMPLE_EH_DISPATCH:
5130 case GIMPLE_EH_MUST_NOT_THROW:
5134 /* OpenMP directives are validated by the FE and never operated
5135 on by the optimizers. Furthermore, GIMPLE_OMP_FOR may contain
5136 non-gimple expressions when the main index variable has had
5137 its address taken. This does not affect the loop itself
5138 because the header of an GIMPLE_OMP_FOR is merely used to determine
5139 how to setup the parallel iteration. */
5146 return verify_gimple_debug (stmt);
5153 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
5154 and false otherwise. */
5157 verify_gimple_phi (gphi *phi)
5161 tree phi_result = gimple_phi_result (phi);
5166 error ("invalid %<PHI%> result");
5170 virtual_p = virtual_operand_p (phi_result);
5171 if (TREE_CODE (phi_result) != SSA_NAME
5173 && SSA_NAME_VAR (phi_result) != gimple_vop (cfun)))
5175 error ("invalid %<PHI%> result");
5179 for (i = 0; i < gimple_phi_num_args (phi); i++)
5181 tree t = gimple_phi_arg_def (phi, i);
5185 error ("missing %<PHI%> def");
5189 /* Addressable variables do have SSA_NAMEs but they
5190 are not considered gimple values. */
5191 else if ((TREE_CODE (t) == SSA_NAME
5192 && virtual_p != virtual_operand_p (t))
5194 && (TREE_CODE (t) != SSA_NAME
5195 || SSA_NAME_VAR (t) != gimple_vop (cfun)))
5197 && !is_gimple_val (t)))
5199 error ("invalid %<PHI%> argument");
5200 debug_generic_expr (t);
5203 #ifdef ENABLE_TYPES_CHECKING
5204 if (!useless_type_conversion_p (TREE_TYPE (phi_result), TREE_TYPE (t)))
5206 error ("incompatible types in %<PHI%> argument %u", i);
5207 debug_generic_stmt (TREE_TYPE (phi_result));
5208 debug_generic_stmt (TREE_TYPE (t));
5217 /* Verify the GIMPLE statements inside the sequence STMTS. */
5220 verify_gimple_in_seq_2 (gimple_seq stmts)
5222 gimple_stmt_iterator ittr;
5225 for (ittr = gsi_start (stmts); !gsi_end_p (ittr); gsi_next (&ittr))
5227 gimple *stmt = gsi_stmt (ittr);
5229 switch (gimple_code (stmt))
5232 err |= verify_gimple_in_seq_2 (
5233 gimple_bind_body (as_a <gbind *> (stmt)));
5237 err |= verify_gimple_in_seq_2 (gimple_try_eval (stmt));
5238 err |= verify_gimple_in_seq_2 (gimple_try_cleanup (stmt));
5241 case GIMPLE_EH_FILTER:
5242 err |= verify_gimple_in_seq_2 (gimple_eh_filter_failure (stmt));
5245 case GIMPLE_EH_ELSE:
5247 geh_else *eh_else = as_a <geh_else *> (stmt);
5248 err |= verify_gimple_in_seq_2 (gimple_eh_else_n_body (eh_else));
5249 err |= verify_gimple_in_seq_2 (gimple_eh_else_e_body (eh_else));
5254 err |= verify_gimple_in_seq_2 (gimple_catch_handler (
5255 as_a <gcatch *> (stmt)));
5259 err |= verify_gimple_in_seq_2 (gimple_assume_body (stmt));
5262 case GIMPLE_TRANSACTION:
5263 err |= verify_gimple_transaction (as_a <gtransaction *> (stmt));
5268 bool err2 = verify_gimple_stmt (stmt);
5270 debug_gimple_stmt (stmt);
5279 /* Verify the contents of a GIMPLE_TRANSACTION. Returns true if there
5280 is a problem, otherwise false. */
5283 verify_gimple_transaction (gtransaction *stmt)
5287 lab = gimple_transaction_label_norm (stmt);
5288 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5290 lab = gimple_transaction_label_uninst (stmt);
5291 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5293 lab = gimple_transaction_label_over (stmt);
5294 if (lab != NULL && TREE_CODE (lab) != LABEL_DECL)
5297 return verify_gimple_in_seq_2 (gimple_transaction_body (stmt));
5301 /* Verify the GIMPLE statements inside the statement list STMTS. */
5304 verify_gimple_in_seq (gimple_seq stmts, bool ice)
5306 timevar_push (TV_TREE_STMT_VERIFY);
5307 bool res = verify_gimple_in_seq_2 (stmts);
5309 internal_error ("%<verify_gimple%> failed");
5310 timevar_pop (TV_TREE_STMT_VERIFY);
5314 /* Return true when the T can be shared. */
5317 tree_node_can_be_shared (tree t)
5319 if (IS_TYPE_OR_DECL_P (t)
5320 || TREE_CODE (t) == SSA_NAME
5321 || TREE_CODE (t) == IDENTIFIER_NODE
5322 || TREE_CODE (t) == CASE_LABEL_EXPR
5323 || is_gimple_min_invariant (t))
5326 if (t == error_mark_node)
5332 /* Called via walk_tree. Verify tree sharing. */
5335 verify_node_sharing_1 (tree *tp, int *walk_subtrees, void *data)
5337 hash_set<void *> *visited = (hash_set<void *> *) data;
5339 if (tree_node_can_be_shared (*tp))
5341 *walk_subtrees = false;
5345 if (visited->add (*tp))
5351 /* Called via walk_gimple_stmt. Verify tree sharing. */
5354 verify_node_sharing (tree *tp, int *walk_subtrees, void *data)
5356 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5357 return verify_node_sharing_1 (tp, walk_subtrees, wi->info);
5360 static bool eh_error_found;
5362 verify_eh_throw_stmt_node (gimple *const &stmt, const int &,
5363 hash_set<gimple *> *visited)
5365 if (!visited->contains (stmt))
5367 error ("dead statement in EH table");
5368 debug_gimple_stmt (stmt);
5369 eh_error_found = true;
5374 /* Verify if the location LOCs block is in BLOCKS. */
5377 verify_location (hash_set<tree> *blocks, location_t loc)
5379 tree block = LOCATION_BLOCK (loc);
5380 if (block != NULL_TREE
5381 && !blocks->contains (block))
5383 error ("location references block not in block tree");
5386 if (block != NULL_TREE)
5387 return verify_location (blocks, BLOCK_SOURCE_LOCATION (block));
5391 /* Called via walk_tree. Verify that expressions have no blocks. */
5394 verify_expr_no_block (tree *tp, int *walk_subtrees, void *)
5398 *walk_subtrees = false;
5402 location_t loc = EXPR_LOCATION (*tp);
5403 if (LOCATION_BLOCK (loc) != NULL)
5409 /* Called via walk_tree. Verify locations of expressions. */
5412 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5414 hash_set<tree> *blocks = (hash_set<tree> *) data;
5417 /* ??? This doesn't really belong here but there's no good place to
5418 stick this remainder of old verify_expr. */
5419 /* ??? This barfs on debug stmts which contain binds to vars with
5420 different function context. */
5423 || TREE_CODE (t) == PARM_DECL
5424 || TREE_CODE (t) == RESULT_DECL)
5426 tree context = decl_function_context (t);
5427 if (context != cfun->decl
5428 && !SCOPE_FILE_SCOPE_P (context)
5430 && !DECL_EXTERNAL (t))
5432 error ("local declaration from a different function");
5438 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5440 tree x = DECL_DEBUG_EXPR (t);
5441 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5446 || TREE_CODE (t) == PARM_DECL
5447 || TREE_CODE (t) == RESULT_DECL)
5448 && DECL_HAS_VALUE_EXPR_P (t))
5450 tree x = DECL_VALUE_EXPR (t);
5451 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5458 *walk_subtrees = false;
5462 location_t loc = EXPR_LOCATION (t);
5463 if (verify_location (blocks, loc))
5469 /* Called via walk_gimple_op. Verify locations of expressions. */
5472 verify_expr_location (tree *tp, int *walk_subtrees, void *data)
5474 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
5475 return verify_expr_location_1 (tp, walk_subtrees, wi->info);
5478 /* Insert all subblocks of BLOCK into BLOCKS and recurse. */
5481 collect_subblocks (hash_set<tree> *blocks, tree block)
5484 for (t = BLOCK_SUBBLOCKS (block); t; t = BLOCK_CHAIN (t))
5487 collect_subblocks (blocks, t);
5491 /* Disable warnings about missing quoting in GCC diagnostics for
5492 the verification errors. Their format strings don't follow
5493 GCC diagnostic conventions and trigger an ICE in the end. */
5495 # pragma GCC diagnostic push
5496 # pragma GCC diagnostic ignored "-Wformat-diag"
5499 /* Verify the GIMPLE statements in the CFG of FN. */
5502 verify_gimple_in_cfg (struct function *fn, bool verify_nothrow, bool ice)
5507 timevar_push (TV_TREE_STMT_VERIFY);
5508 hash_set<void *> visited;
5509 hash_set<gimple *> visited_throwing_stmts;
5511 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5512 hash_set<tree> blocks;
5513 if (DECL_INITIAL (fn->decl))
5515 blocks.add (DECL_INITIAL (fn->decl));
5516 collect_subblocks (&blocks, DECL_INITIAL (fn->decl));
5519 FOR_EACH_BB_FN (bb, fn)
5521 gimple_stmt_iterator gsi;
5525 for (gphi_iterator gpi = gsi_start_phis (bb);
5529 gphi *phi = gpi.phi ();
5533 if (gimple_bb (phi) != bb)
5535 error ("gimple_bb (phi) is set to a wrong basic block");
5539 err2 |= verify_gimple_phi (phi);
5541 /* Only PHI arguments have locations. */
5542 if (gimple_location (phi) != UNKNOWN_LOCATION)
5544 error ("PHI node with location");
5548 for (i = 0; i < gimple_phi_num_args (phi); i++)
5550 tree arg = gimple_phi_arg_def (phi, i);
5551 tree addr = walk_tree (&arg, verify_node_sharing_1,
5555 error ("incorrect sharing of tree nodes");
5556 debug_generic_expr (addr);
5559 location_t loc = gimple_phi_arg_location (phi, i);
5560 if (virtual_operand_p (gimple_phi_result (phi))
5561 && loc != UNKNOWN_LOCATION)
5563 error ("virtual PHI with argument locations");
5566 addr = walk_tree (&arg, verify_expr_location_1, &blocks, NULL);
5569 debug_generic_expr (addr);
5572 err2 |= verify_location (&blocks, loc);
5576 debug_gimple_stmt (phi);
5580 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5582 gimple *stmt = gsi_stmt (gsi);
5584 struct walk_stmt_info wi;
5588 if (gimple_bb (stmt) != bb)
5590 error ("gimple_bb (stmt) is set to a wrong basic block");
5594 err2 |= verify_gimple_stmt (stmt);
5595 err2 |= verify_location (&blocks, gimple_location (stmt));
5597 memset (&wi, 0, sizeof (wi));
5598 wi.info = (void *) &visited;
5599 addr = walk_gimple_op (stmt, verify_node_sharing, &wi);
5602 error ("incorrect sharing of tree nodes");
5603 debug_generic_expr (addr);
5607 memset (&wi, 0, sizeof (wi));
5608 wi.info = (void *) &blocks;
5609 addr = walk_gimple_op (stmt, verify_expr_location, &wi);
5612 debug_generic_expr (addr);
5616 /* If the statement is marked as part of an EH region, then it is
5617 expected that the statement could throw. Verify that when we
5618 have optimizations that simplify statements such that we prove
5619 that they cannot throw, that we update other data structures
5621 lp_nr = lookup_stmt_eh_lp (stmt);
5623 visited_throwing_stmts.add (stmt);
5626 if (!stmt_could_throw_p (cfun, stmt))
5630 error ("statement marked for throw, but doesn%'t");
5634 else if (!gsi_one_before_end_p (gsi))
5636 error ("statement marked for throw in middle of block");
5642 debug_gimple_stmt (stmt);
5646 FOR_EACH_EDGE (e, ei, bb->succs)
5647 if (e->goto_locus != UNKNOWN_LOCATION)
5648 err |= verify_location (&blocks, e->goto_locus);
5651 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5652 eh_error_found = false;
5654 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5655 (&visited_throwing_stmts);
5657 if (ice && (err || eh_error_found))
5658 internal_error ("verify_gimple failed");
5660 verify_histograms ();
5661 timevar_pop (TV_TREE_STMT_VERIFY);
5663 return (err || eh_error_found);
5667 /* Verifies that the flow information is OK. */
5670 gimple_verify_flow_info (void)
5674 gimple_stmt_iterator gsi;
5679 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5680 || ENTRY_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5682 error ("ENTRY_BLOCK has IL associated with it");
5686 if (EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.seq
5687 || EXIT_BLOCK_PTR_FOR_FN (cfun)->il.gimple.phi_nodes)
5689 error ("EXIT_BLOCK has IL associated with it");
5693 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5694 if (e->flags & EDGE_FALLTHRU)
5696 error ("fallthru to exit from bb %d", e->src->index);
5700 FOR_EACH_BB_FN (bb, cfun)
5702 bool found_ctrl_stmt = false;
5706 /* Skip labels on the start of basic block. */
5707 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5710 gimple *prev_stmt = stmt;
5712 stmt = gsi_stmt (gsi);
5714 if (gimple_code (stmt) != GIMPLE_LABEL)
5717 label = gimple_label_label (as_a <glabel *> (stmt));
5718 if (prev_stmt && DECL_NONLOCAL (label))
5720 error ("nonlocal label %qD is not first in a sequence "
5721 "of labels in bb %d", label, bb->index);
5725 if (prev_stmt && EH_LANDING_PAD_NR (label) != 0)
5727 error ("EH landing pad label %qD is not first in a sequence "
5728 "of labels in bb %d", label, bb->index);
5732 if (label_to_block (cfun, label) != bb)
5734 error ("label %qD to block does not match in bb %d",
5739 if (decl_function_context (label) != current_function_decl)
5741 error ("label %qD has incorrect context in bb %d",
5747 /* Verify that body of basic block BB is free of control flow. */
5748 bool seen_nondebug_stmt = false;
5749 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5751 gimple *stmt = gsi_stmt (gsi);
5753 if (found_ctrl_stmt)
5755 error ("control flow in the middle of basic block %d",
5760 if (stmt_ends_bb_p (stmt))
5761 found_ctrl_stmt = true;
5763 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
5765 error ("label %qD in the middle of basic block %d",
5766 gimple_label_label (label_stmt), bb->index);
5770 /* Check that no statements appear between a returns_twice call
5771 and its associated abnormal edge. */
5772 if (gimple_code (stmt) == GIMPLE_CALL
5773 && gimple_call_flags (stmt) & ECF_RETURNS_TWICE)
5775 const char *misplaced = NULL;
5776 /* TM is an exception: it points abnormal edges just after the
5777 call that starts a transaction, i.e. it must end the BB. */
5778 if (gimple_call_builtin_p (stmt, BUILT_IN_TM_START))
5780 if (single_succ_p (bb)
5781 && bb_has_abnormal_pred (single_succ (bb))
5782 && !gsi_one_nondebug_before_end_p (gsi))
5783 misplaced = "not last";
5787 if (seen_nondebug_stmt
5788 && bb_has_abnormal_pred (bb))
5789 misplaced = "not first";
5793 error ("returns_twice call is %s in basic block %d",
5794 misplaced, bb->index);
5795 print_gimple_stmt (stderr, stmt, 0, TDF_SLIM);
5799 if (!is_gimple_debug (stmt))
5800 seen_nondebug_stmt = true;
5803 gsi = gsi_last_nondebug_bb (bb);
5804 if (gsi_end_p (gsi))
5807 stmt = gsi_stmt (gsi);
5809 if (gimple_code (stmt) == GIMPLE_LABEL)
5812 err |= verify_eh_edges (stmt);
5814 if (is_ctrl_stmt (stmt))
5816 FOR_EACH_EDGE (e, ei, bb->succs)
5817 if (e->flags & EDGE_FALLTHRU)
5819 error ("fallthru edge after a control statement in bb %d",
5825 if (gimple_code (stmt) != GIMPLE_COND)
5827 /* Verify that there are no edges with EDGE_TRUE/FALSE_FLAG set
5828 after anything else but if statement. */
5829 FOR_EACH_EDGE (e, ei, bb->succs)
5830 if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE))
5832 error ("true/false edge after a non-GIMPLE_COND in bb %d",
5838 switch (gimple_code (stmt))
5845 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
5849 || !(true_edge->flags & EDGE_TRUE_VALUE)
5850 || !(false_edge->flags & EDGE_FALSE_VALUE)
5851 || (true_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5852 || (false_edge->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL))
5853 || EDGE_COUNT (bb->succs) >= 3)
5855 error ("wrong outgoing edge flags at end of bb %d",
5863 if (simple_goto_p (stmt))
5865 error ("explicit goto at end of bb %d", bb->index);
5870 /* FIXME. We should double check that the labels in the
5871 destination blocks have their address taken. */
5872 FOR_EACH_EDGE (e, ei, bb->succs)
5873 if ((e->flags & (EDGE_FALLTHRU | EDGE_TRUE_VALUE
5874 | EDGE_FALSE_VALUE))
5875 || !(e->flags & EDGE_ABNORMAL))
5877 error ("wrong outgoing edge flags at end of bb %d",
5885 if (!gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
5889 if (!single_succ_p (bb)
5890 || (single_succ_edge (bb)->flags
5891 & (EDGE_FALLTHRU | EDGE_ABNORMAL
5892 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5894 error ("wrong outgoing edge flags at end of bb %d", bb->index);
5897 if (single_succ (bb) != EXIT_BLOCK_PTR_FOR_FN (cfun))
5899 error ("return edge does not point to exit in bb %d",
5907 gswitch *switch_stmt = as_a <gswitch *> (stmt);
5912 n = gimple_switch_num_labels (switch_stmt);
5914 /* Mark all the destination basic blocks. */
5915 for (i = 0; i < n; ++i)
5917 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5918 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5919 label_bb->aux = (void *)1;
5922 /* Verify that the case labels are sorted. */
5923 prev = gimple_switch_label (switch_stmt, 0);
5924 for (i = 1; i < n; ++i)
5926 tree c = gimple_switch_label (switch_stmt, i);
5929 error ("found default case not at the start of "
5935 && !tree_int_cst_lt (CASE_LOW (prev), CASE_LOW (c)))
5937 error ("case labels not sorted: ");
5938 print_generic_expr (stderr, prev);
5939 fprintf (stderr," is greater than ");
5940 print_generic_expr (stderr, c);
5941 fprintf (stderr," but comes before it.\n");
5946 /* VRP will remove the default case if it can prove it will
5947 never be executed. So do not verify there always exists
5948 a default case here. */
5950 FOR_EACH_EDGE (e, ei, bb->succs)
5954 error ("extra outgoing edge %d->%d",
5955 bb->index, e->dest->index);
5959 e->dest->aux = (void *)2;
5960 if ((e->flags & (EDGE_FALLTHRU | EDGE_ABNORMAL
5961 | EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)))
5963 error ("wrong outgoing edge flags at end of bb %d",
5969 /* Check that we have all of them. */
5970 for (i = 0; i < n; ++i)
5972 basic_block label_bb = gimple_switch_label_bb (cfun,
5975 if (label_bb->aux != (void *)2)
5977 error ("missing edge %i->%i", bb->index, label_bb->index);
5982 FOR_EACH_EDGE (e, ei, bb->succs)
5983 e->dest->aux = (void *)0;
5987 case GIMPLE_EH_DISPATCH:
5988 err |= verify_eh_dispatch_edge (as_a <geh_dispatch *> (stmt));
5996 if (dom_info_state (CDI_DOMINATORS) >= DOM_NO_FAST_QUERY)
5997 verify_dominators (CDI_DOMINATORS);
6003 # pragma GCC diagnostic pop
6006 /* Updates phi nodes after creating a forwarder block joined
6007 by edge FALLTHRU. */
6010 gimple_make_forwarder_block (edge fallthru)
6014 basic_block dummy, bb;
6017 bool forward_location_p;
6019 dummy = fallthru->src;
6020 bb = fallthru->dest;
6022 if (single_pred_p (bb))
6025 /* We can forward location info if we have only one predecessor. */
6026 forward_location_p = single_pred_p (dummy);
6028 /* If we redirected a branch we must create new PHI nodes at the
6030 for (gsi = gsi_start_phis (dummy); !gsi_end_p (gsi); gsi_next (&gsi))
6032 gphi *phi, *new_phi;
6035 var = gimple_phi_result (phi);
6036 new_phi = create_phi_node (var, bb);
6037 gimple_phi_set_result (phi, copy_ssa_name (var, phi));
6038 add_phi_arg (new_phi, gimple_phi_result (phi), fallthru,
6040 ? gimple_phi_arg_location (phi, 0) : UNKNOWN_LOCATION);
6043 /* Add the arguments we have stored on edges. */
6044 FOR_EACH_EDGE (e, ei, bb->preds)
6049 flush_pending_stmts (e);
6054 /* Return a non-special label in the head of basic block BLOCK.
6055 Create one if it doesn't exist. */
6058 gimple_block_label (basic_block bb)
6060 gimple_stmt_iterator i, s = gsi_start_bb (bb);
6065 for (i = s; !gsi_end_p (i); first = false, gsi_next (&i))
6067 stmt = dyn_cast <glabel *> (gsi_stmt (i));
6070 label = gimple_label_label (stmt);
6071 if (!DECL_NONLOCAL (label))
6074 gsi_move_before (&i, &s);
6079 label = create_artificial_label (UNKNOWN_LOCATION);
6080 stmt = gimple_build_label (label);
6081 gsi_insert_before (&s, stmt, GSI_NEW_STMT);
6086 /* Attempt to perform edge redirection by replacing a possibly complex
6087 jump instruction by a goto or by removing the jump completely.
6088 This can apply only if all edges now point to the same block. The
6089 parameters and return values are equivalent to
6090 redirect_edge_and_branch. */
6093 gimple_try_redirect_by_replacing_jump (edge e, basic_block target)
6095 basic_block src = e->src;
6096 gimple_stmt_iterator i;
6099 /* We can replace or remove a complex jump only when we have exactly
6101 if (EDGE_COUNT (src->succs) != 2
6102 /* Verify that all targets will be TARGET. Specifically, the
6103 edge that is not E must also go to TARGET. */
6104 || EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target)
6107 i = gsi_last_bb (src);
6111 stmt = gsi_stmt (i);
6113 if (gimple_code (stmt) == GIMPLE_COND || gimple_code (stmt) == GIMPLE_SWITCH)
6115 gsi_remove (&i, true);
6116 e = ssa_redirect_edge (e, target);
6117 e->flags = EDGE_FALLTHRU;
6125 /* Redirect E to DEST. Return NULL on failure. Otherwise, return the
6126 edge representing the redirected branch. */
6129 gimple_redirect_edge_and_branch (edge e, basic_block dest)
6131 basic_block bb = e->src;
6132 gimple_stmt_iterator gsi;
6136 if (e->flags & EDGE_ABNORMAL)
6139 if (e->dest == dest)
6142 if (e->flags & EDGE_EH)
6143 return redirect_eh_edge (e, dest);
6145 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
6147 ret = gimple_try_redirect_by_replacing_jump (e, dest);
6152 gsi = gsi_last_nondebug_bb (bb);
6153 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
6155 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
6158 /* For COND_EXPR, we only need to redirect the edge. */
6162 /* No non-abnormal edges should lead from a non-simple goto, and
6163 simple ones should be represented implicitly. */
6168 gswitch *switch_stmt = as_a <gswitch *> (stmt);
6169 tree label = gimple_block_label (dest);
6170 tree cases = get_cases_for_edge (e, switch_stmt);
6172 /* If we have a list of cases associated with E, then use it
6173 as it's a lot faster than walking the entire case vector. */
6176 edge e2 = find_edge (e->src, dest);
6183 CASE_LABEL (cases) = label;
6184 cases = CASE_CHAIN (cases);
6187 /* If there was already an edge in the CFG, then we need
6188 to move all the cases associated with E to E2. */
6191 tree cases2 = get_cases_for_edge (e2, switch_stmt);
6193 CASE_CHAIN (last) = CASE_CHAIN (cases2);
6194 CASE_CHAIN (cases2) = first;
6196 bitmap_set_bit (touched_switch_bbs, gimple_bb (stmt)->index);
6200 size_t i, n = gimple_switch_num_labels (switch_stmt);
6202 for (i = 0; i < n; i++)
6204 tree elt = gimple_switch_label (switch_stmt, i);
6205 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
6206 CASE_LABEL (elt) = label;
6214 gasm *asm_stmt = as_a <gasm *> (stmt);
6215 int i, n = gimple_asm_nlabels (asm_stmt);
6218 for (i = 0; i < n; ++i)
6220 tree cons = gimple_asm_label_op (asm_stmt, i);
6221 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
6224 label = gimple_block_label (dest);
6225 TREE_VALUE (cons) = label;
6229 /* If we didn't find any label matching the former edge in the
6230 asm labels, we must be redirecting the fallthrough
6232 gcc_assert (label || (e->flags & EDGE_FALLTHRU));
6237 gsi_remove (&gsi, true);
6238 e->flags |= EDGE_FALLTHRU;
6241 case GIMPLE_OMP_RETURN:
6242 case GIMPLE_OMP_CONTINUE:
6243 case GIMPLE_OMP_SECTIONS_SWITCH:
6244 case GIMPLE_OMP_FOR:
6245 /* The edges from OMP constructs can be simply redirected. */
6248 case GIMPLE_EH_DISPATCH:
6249 if (!(e->flags & EDGE_FALLTHRU))
6250 redirect_eh_dispatch_edge (as_a <geh_dispatch *> (stmt), e, dest);
6253 case GIMPLE_TRANSACTION:
6254 if (e->flags & EDGE_TM_ABORT)
6255 gimple_transaction_set_label_over (as_a <gtransaction *> (stmt),
6256 gimple_block_label (dest));
6257 else if (e->flags & EDGE_TM_UNINSTRUMENTED)
6258 gimple_transaction_set_label_uninst (as_a <gtransaction *> (stmt),
6259 gimple_block_label (dest));
6261 gimple_transaction_set_label_norm (as_a <gtransaction *> (stmt),
6262 gimple_block_label (dest));
6266 /* Otherwise it must be a fallthru edge, and we don't need to
6267 do anything besides redirecting it. */
6268 gcc_assert (e->flags & EDGE_FALLTHRU);
6272 /* Update/insert PHI nodes as necessary. */
6274 /* Now update the edges in the CFG. */
6275 e = ssa_redirect_edge (e, dest);
6280 /* Returns true if it is possible to remove edge E by redirecting
6281 it to the destination of the other edge from E->src. */
6284 gimple_can_remove_branch_p (const_edge e)
6286 if (e->flags & (EDGE_ABNORMAL | EDGE_EH))
6292 /* Simple wrapper, as we can always redirect fallthru edges. */
6295 gimple_redirect_edge_and_branch_force (edge e, basic_block dest)
6297 e = gimple_redirect_edge_and_branch (e, dest);
6304 /* Splits basic block BB after statement STMT (but at least after the
6305 labels). If STMT is NULL, BB is split just after the labels. */
6308 gimple_split_block (basic_block bb, void *stmt)
6310 gimple_stmt_iterator gsi;
6311 gimple_stmt_iterator gsi_tgt;
6317 new_bb = create_empty_bb (bb);
6319 /* Redirect the outgoing edges. */
6320 new_bb->succs = bb->succs;
6322 FOR_EACH_EDGE (e, ei, new_bb->succs)
6325 /* Get a stmt iterator pointing to the first stmt to move. */
6326 if (!stmt || gimple_code ((gimple *) stmt) == GIMPLE_LABEL)
6327 gsi = gsi_after_labels (bb);
6330 gsi = gsi_for_stmt ((gimple *) stmt);
6334 /* Move everything from GSI to the new basic block. */
6335 if (gsi_end_p (gsi))
6338 /* Split the statement list - avoid re-creating new containers as this
6339 brings ugly quadratic memory consumption in the inliner.
6340 (We are still quadratic since we need to update stmt BB pointers,
6342 gsi_split_seq_before (&gsi, &list);
6343 set_bb_seq (new_bb, list);
6344 for (gsi_tgt = gsi_start (list);
6345 !gsi_end_p (gsi_tgt); gsi_next (&gsi_tgt))
6346 gimple_set_bb (gsi_stmt (gsi_tgt), new_bb);
6352 /* Moves basic block BB after block AFTER. */
6355 gimple_move_block_after (basic_block bb, basic_block after)
6357 if (bb->prev_bb == after)
6361 link_block (bb, after);
6367 /* Return TRUE if block BB has no executable statements, otherwise return
6371 gimple_empty_block_p (basic_block bb)
6373 /* BB must have no executable statements. */
6374 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6377 while (!gsi_end_p (gsi))
6379 gimple *stmt = gsi_stmt (gsi);
6380 if (is_gimple_debug (stmt))
6382 else if (gimple_code (stmt) == GIMPLE_NOP
6383 || gimple_code (stmt) == GIMPLE_PREDICT)
6393 /* Split a basic block if it ends with a conditional branch and if the
6394 other part of the block is not empty. */
6397 gimple_split_block_before_cond_jump (basic_block bb)
6399 gimple *last, *split_point;
6400 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
6401 if (gsi_end_p (gsi))
6403 last = gsi_stmt (gsi);
6404 if (gimple_code (last) != GIMPLE_COND
6405 && gimple_code (last) != GIMPLE_SWITCH)
6408 split_point = gsi_stmt (gsi);
6409 return split_block (bb, split_point)->dest;
6413 /* Return true if basic_block can be duplicated. */
6416 gimple_can_duplicate_bb_p (const_basic_block bb)
6418 gimple *last = last_stmt (CONST_CAST_BB (bb));
6420 /* Do checks that can only fail for the last stmt, to minimize the work in the
6423 /* A transaction is a single entry multiple exit region. It
6424 must be duplicated in its entirety or not at all. */
6425 if (gimple_code (last) == GIMPLE_TRANSACTION)
6428 /* An IFN_UNIQUE call must be duplicated as part of its group,
6430 if (is_gimple_call (last)
6431 && gimple_call_internal_p (last)
6432 && gimple_call_internal_unique_p (last))
6436 for (gimple_stmt_iterator gsi = gsi_start_bb (CONST_CAST_BB (bb));
6437 !gsi_end_p (gsi); gsi_next (&gsi))
6439 gimple *g = gsi_stmt (gsi);
6441 /* Prohibit duplication of returns_twice calls, otherwise associated
6442 abnormal edges also need to be duplicated properly.
6443 An IFN_GOMP_SIMT_ENTER_ALLOC/IFN_GOMP_SIMT_EXIT call must be
6444 duplicated as part of its group, or not at all.
6445 The IFN_GOMP_SIMT_VOTE_ANY and IFN_GOMP_SIMT_XCHG_* are part of such a
6446 group, so the same holds there. */
6447 if (is_gimple_call (g)
6448 && (gimple_call_flags (g) & ECF_RETURNS_TWICE
6449 || gimple_call_internal_p (g, IFN_GOMP_SIMT_ENTER_ALLOC)
6450 || gimple_call_internal_p (g, IFN_GOMP_SIMT_EXIT)
6451 || gimple_call_internal_p (g, IFN_GOMP_SIMT_VOTE_ANY)
6452 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_BFLY)
6453 || gimple_call_internal_p (g, IFN_GOMP_SIMT_XCHG_IDX)))
6460 /* Create a duplicate of the basic block BB. NOTE: This does not
6461 preserve SSA form. */
6464 gimple_duplicate_bb (basic_block bb, copy_bb_data *id)
6467 gimple_stmt_iterator gsi_tgt;
6469 new_bb = create_empty_bb (EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
6471 /* Copy the PHI nodes. We ignore PHI node arguments here because
6472 the incoming edges have not been setup yet. */
6473 for (gphi_iterator gpi = gsi_start_phis (bb);
6479 copy = create_phi_node (NULL_TREE, new_bb);
6480 create_new_def_for (gimple_phi_result (phi), copy,
6481 gimple_phi_result_ptr (copy));
6482 gimple_set_uid (copy, gimple_uid (phi));
6485 gsi_tgt = gsi_start_bb (new_bb);
6486 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6490 def_operand_p def_p;
6491 ssa_op_iter op_iter;
6493 gimple *stmt, *copy;
6495 stmt = gsi_stmt (gsi);
6496 if (gimple_code (stmt) == GIMPLE_LABEL)
6499 /* Don't duplicate label debug stmts. */
6500 if (gimple_debug_bind_p (stmt)
6501 && TREE_CODE (gimple_debug_bind_get_var (stmt))
6505 /* Create a new copy of STMT and duplicate STMT's virtual
6507 copy = gimple_copy (stmt);
6508 gsi_insert_after (&gsi_tgt, copy, GSI_NEW_STMT);
6510 maybe_duplicate_eh_stmt (copy, stmt);
6511 gimple_duplicate_stmt_histograms (cfun, copy, cfun, stmt);
6513 /* When copying around a stmt writing into a local non-user
6514 aggregate, make sure it won't share stack slot with other
6516 lhs = gimple_get_lhs (stmt);
6517 if (lhs && TREE_CODE (lhs) != SSA_NAME)
6519 tree base = get_base_address (lhs);
6521 && (VAR_P (base) || TREE_CODE (base) == RESULT_DECL)
6522 && DECL_IGNORED_P (base)
6523 && !TREE_STATIC (base)
6524 && !DECL_EXTERNAL (base)
6525 && (!VAR_P (base) || !DECL_HAS_VALUE_EXPR_P (base)))
6526 DECL_NONSHAREABLE (base) = 1;
6529 /* If requested remap dependence info of cliques brought in
6532 for (unsigned i = 0; i < gimple_num_ops (copy); ++i)
6534 tree op = gimple_op (copy, i);
6537 if (TREE_CODE (op) == ADDR_EXPR
6538 || TREE_CODE (op) == WITH_SIZE_EXPR)
6539 op = TREE_OPERAND (op, 0);
6540 while (handled_component_p (op))
6541 op = TREE_OPERAND (op, 0);
6542 if ((TREE_CODE (op) == MEM_REF
6543 || TREE_CODE (op) == TARGET_MEM_REF)
6544 && MR_DEPENDENCE_CLIQUE (op) > 1
6545 && MR_DEPENDENCE_CLIQUE (op) != bb->loop_father->owned_clique)
6547 if (!id->dependence_map)
6548 id->dependence_map = new hash_map<dependence_hash,
6551 unsigned short &newc = id->dependence_map->get_or_insert
6552 (MR_DEPENDENCE_CLIQUE (op), &existed);
6555 gcc_assert (MR_DEPENDENCE_CLIQUE (op) <= cfun->last_clique);
6556 newc = ++cfun->last_clique;
6558 MR_DEPENDENCE_CLIQUE (op) = newc;
6562 /* Create new names for all the definitions created by COPY and
6563 add replacement mappings for each new name. */
6564 FOR_EACH_SSA_DEF_OPERAND (def_p, copy, op_iter, SSA_OP_ALL_DEFS)
6565 create_new_def_for (DEF_FROM_PTR (def_p), copy, def_p);
6571 /* Adds phi node arguments for edge E_COPY after basic block duplication. */
6574 add_phi_args_after_copy_edge (edge e_copy)
6576 basic_block bb, bb_copy = e_copy->src, dest;
6579 gphi *phi, *phi_copy;
6581 gphi_iterator psi, psi_copy;
6583 if (gimple_seq_empty_p (phi_nodes (e_copy->dest)))
6586 bb = bb_copy->flags & BB_DUPLICATED ? get_bb_original (bb_copy) : bb_copy;
6588 if (e_copy->dest->flags & BB_DUPLICATED)
6589 dest = get_bb_original (e_copy->dest);
6591 dest = e_copy->dest;
6593 e = find_edge (bb, dest);
6596 /* During loop unrolling the target of the latch edge is copied.
6597 In this case we are not looking for edge to dest, but to
6598 duplicated block whose original was dest. */
6599 FOR_EACH_EDGE (e, ei, bb->succs)
6601 if ((e->dest->flags & BB_DUPLICATED)
6602 && get_bb_original (e->dest) == dest)
6606 gcc_assert (e != NULL);
6609 for (psi = gsi_start_phis (e->dest),
6610 psi_copy = gsi_start_phis (e_copy->dest);
6612 gsi_next (&psi), gsi_next (&psi_copy))
6615 phi_copy = psi_copy.phi ();
6616 def = PHI_ARG_DEF_FROM_EDGE (phi, e);
6617 add_phi_arg (phi_copy, def, e_copy,
6618 gimple_phi_arg_location_from_edge (phi, e));
6623 /* Basic block BB_COPY was created by code duplication. Add phi node
6624 arguments for edges going out of BB_COPY. The blocks that were
6625 duplicated have BB_DUPLICATED set. */
6628 add_phi_args_after_copy_bb (basic_block bb_copy)
6633 FOR_EACH_EDGE (e_copy, ei, bb_copy->succs)
6635 add_phi_args_after_copy_edge (e_copy);
6639 /* Blocks in REGION_COPY array of length N_REGION were created by
6640 duplication of basic blocks. Add phi node arguments for edges
6641 going from these blocks. If E_COPY is not NULL, also add
6642 phi node arguments for its destination.*/
6645 add_phi_args_after_copy (basic_block *region_copy, unsigned n_region,
6650 for (i = 0; i < n_region; i++)
6651 region_copy[i]->flags |= BB_DUPLICATED;
6653 for (i = 0; i < n_region; i++)
6654 add_phi_args_after_copy_bb (region_copy[i]);
6656 add_phi_args_after_copy_edge (e_copy);
6658 for (i = 0; i < n_region; i++)
6659 region_copy[i]->flags &= ~BB_DUPLICATED;
6662 /* Duplicates a REGION (set of N_REGION basic blocks) with just a single
6663 important exit edge EXIT. By important we mean that no SSA name defined
6664 inside region is live over the other exit edges of the region. All entry
6665 edges to the region must go to ENTRY->dest. The edge ENTRY is redirected
6666 to the duplicate of the region. Dominance and loop information is
6667 updated if UPDATE_DOMINANCE is true, but not the SSA web. If
6668 UPDATE_DOMINANCE is false then we assume that the caller will update the
6669 dominance information after calling this function. The new basic
6670 blocks are stored to REGION_COPY in the same order as they had in REGION,
6671 provided that REGION_COPY is not NULL.
6672 The function returns false if it is unable to copy the region,
6676 gimple_duplicate_sese_region (edge entry, edge exit,
6677 basic_block *region, unsigned n_region,
6678 basic_block *region_copy,
6679 bool update_dominance)
6682 bool free_region_copy = false, copying_header = false;
6683 class loop *loop = entry->dest->loop_father;
6686 profile_count total_count = profile_count::uninitialized ();
6687 profile_count entry_count = profile_count::uninitialized ();
6689 if (!can_copy_bbs_p (region, n_region))
6692 /* Some sanity checking. Note that we do not check for all possible
6693 missuses of the functions. I.e. if you ask to copy something weird,
6694 it will work, but the state of structures probably will not be
6696 for (i = 0; i < n_region; i++)
6698 /* We do not handle subloops, i.e. all the blocks must belong to the
6700 if (region[i]->loop_father != loop)
6703 if (region[i] != entry->dest
6704 && region[i] == loop->header)
6708 /* In case the function is used for loop header copying (which is the primary
6709 use), ensure that EXIT and its copy will be new latch and entry edges. */
6710 if (loop->header == entry->dest)
6712 copying_header = true;
6714 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, exit->src))
6717 for (i = 0; i < n_region; i++)
6718 if (region[i] != exit->src
6719 && dominated_by_p (CDI_DOMINATORS, region[i], exit->src))
6723 initialize_original_copy_tables ();
6726 set_loop_copy (loop, loop_outer (loop));
6728 set_loop_copy (loop, loop);
6732 region_copy = XNEWVEC (basic_block, n_region);
6733 free_region_copy = true;
6736 /* Record blocks outside the region that are dominated by something
6738 auto_vec<basic_block> doms;
6739 if (update_dominance)
6741 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6744 if (entry->dest->count.initialized_p ())
6746 total_count = entry->dest->count;
6747 entry_count = entry->count ();
6748 /* Fix up corner cases, to avoid division by zero or creation of negative
6750 if (entry_count > total_count)
6751 entry_count = total_count;
6754 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6755 split_edge_bb_loc (entry), update_dominance);
6756 if (total_count.initialized_p () && entry_count.initialized_p ())
6758 scale_bbs_frequencies_profile_count (region, n_region,
6759 total_count - entry_count,
6761 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6767 loop->header = exit->dest;
6768 loop->latch = exit->src;
6771 /* Redirect the entry and add the phi node arguments. */
6772 redirected = redirect_edge_and_branch (entry, get_bb_copy (entry->dest));
6773 gcc_assert (redirected != NULL);
6774 flush_pending_stmts (entry);
6776 /* Concerning updating of dominators: We must recount dominators
6777 for entry block and its copy. Anything that is outside of the
6778 region, but was dominated by something inside needs recounting as
6780 if (update_dominance)
6782 set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
6783 doms.safe_push (get_bb_original (entry->dest));
6784 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6787 /* Add the other PHI node arguments. */
6788 add_phi_args_after_copy (region_copy, n_region, NULL);
6790 if (free_region_copy)
6793 free_original_copy_tables ();
6797 /* Checks if BB is part of the region defined by N_REGION BBS. */
6799 bb_part_of_region_p (basic_block bb, basic_block* bbs, unsigned n_region)
6803 for (n = 0; n < n_region; n++)
6811 /* Duplicates REGION consisting of N_REGION blocks. The new blocks
6812 are stored to REGION_COPY in the same order in that they appear
6813 in REGION, if REGION_COPY is not NULL. ENTRY is the entry to
6814 the region, EXIT an exit from it. The condition guarding EXIT
6815 is moved to ENTRY. Returns true if duplication succeeds, false
6841 gimple_duplicate_sese_tail (edge entry, edge exit,
6842 basic_block *region, unsigned n_region,
6843 basic_block *region_copy)
6846 bool free_region_copy = false;
6847 class loop *loop = exit->dest->loop_father;
6848 class loop *orig_loop = entry->dest->loop_father;
6849 basic_block switch_bb, entry_bb, nentry_bb;
6850 profile_count total_count = profile_count::uninitialized (),
6851 exit_count = profile_count::uninitialized ();
6852 edge exits[2], nexits[2], e;
6853 gimple_stmt_iterator gsi;
6856 basic_block exit_bb;
6860 class loop *target, *aloop, *cloop;
6862 gcc_assert (EDGE_COUNT (exit->src->succs) == 2);
6864 exits[1] = EDGE_SUCC (exit->src, EDGE_SUCC (exit->src, 0) == exit);
6866 if (!can_copy_bbs_p (region, n_region))
6869 initialize_original_copy_tables ();
6870 set_loop_copy (orig_loop, loop);
6873 for (aloop = orig_loop->inner; aloop; aloop = aloop->next)
6875 if (bb_part_of_region_p (aloop->header, region, n_region))
6877 cloop = duplicate_loop (aloop, target);
6878 duplicate_subloops (aloop, cloop);
6884 region_copy = XNEWVEC (basic_block, n_region);
6885 free_region_copy = true;
6888 gcc_assert (!need_ssa_update_p (cfun));
6890 /* Record blocks outside the region that are dominated by something
6892 auto_vec<basic_block> doms = get_dominated_by_region (CDI_DOMINATORS, region,
6895 total_count = exit->src->count;
6896 exit_count = exit->count ();
6897 /* Fix up corner cases, to avoid division by zero or creation of negative
6899 if (exit_count > total_count)
6900 exit_count = total_count;
6902 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6903 split_edge_bb_loc (exit), true);
6904 if (total_count.initialized_p () && exit_count.initialized_p ())
6906 scale_bbs_frequencies_profile_count (region, n_region,
6907 total_count - exit_count,
6909 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6913 /* Create the switch block, and put the exit condition to it. */
6914 entry_bb = entry->dest;
6915 nentry_bb = get_bb_copy (entry_bb);
6916 if (!last_stmt (entry->src)
6917 || !stmt_ends_bb_p (last_stmt (entry->src)))
6918 switch_bb = entry->src;
6920 switch_bb = split_edge (entry);
6921 set_immediate_dominator (CDI_DOMINATORS, nentry_bb, switch_bb);
6923 gsi = gsi_last_bb (switch_bb);
6924 cond_stmt = last_stmt (exit->src);
6925 gcc_assert (gimple_code (cond_stmt) == GIMPLE_COND);
6926 cond_stmt = gimple_copy (cond_stmt);
6928 gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
6930 sorig = single_succ_edge (switch_bb);
6931 sorig->flags = exits[1]->flags;
6932 sorig->probability = exits[1]->probability;
6933 snew = make_edge (switch_bb, nentry_bb, exits[0]->flags);
6934 snew->probability = exits[0]->probability;
6937 /* Register the new edge from SWITCH_BB in loop exit lists. */
6938 rescan_loop_exit (snew, true, false);
6940 /* Add the PHI node arguments. */
6941 add_phi_args_after_copy (region_copy, n_region, snew);
6943 /* Get rid of now superfluous conditions and associated edges (and phi node
6945 exit_bb = exit->dest;
6947 e = redirect_edge_and_branch (exits[0], exits[1]->dest);
6948 PENDING_STMT (e) = NULL;
6950 /* The latch of ORIG_LOOP was copied, and so was the backedge
6951 to the original header. We redirect this backedge to EXIT_BB. */
6952 for (i = 0; i < n_region; i++)
6953 if (get_bb_original (region_copy[i]) == orig_loop->latch)
6955 gcc_assert (single_succ_edge (region_copy[i]));
6956 e = redirect_edge_and_branch (single_succ_edge (region_copy[i]), exit_bb);
6957 PENDING_STMT (e) = NULL;
6958 for (psi = gsi_start_phis (exit_bb);
6963 def = PHI_ARG_DEF (phi, nexits[0]->dest_idx);
6964 add_phi_arg (phi, def, e, gimple_phi_arg_location_from_edge (phi, e));
6967 e = redirect_edge_and_branch (nexits[1], nexits[0]->dest);
6968 PENDING_STMT (e) = NULL;
6970 /* Anything that is outside of the region, but was dominated by something
6971 inside needs to update dominance info. */
6972 iterate_fix_dominators (CDI_DOMINATORS, doms, false);
6974 if (free_region_copy)
6977 free_original_copy_tables ();
6981 /* Add all the blocks dominated by ENTRY to the array BBS_P. Stop
6982 adding blocks when the dominator traversal reaches EXIT. This
6983 function silently assumes that ENTRY strictly dominates EXIT. */
6986 gather_blocks_in_sese_region (basic_block entry, basic_block exit,
6987 vec<basic_block> *bbs_p)
6991 for (son = first_dom_son (CDI_DOMINATORS, entry);
6993 son = next_dom_son (CDI_DOMINATORS, son))
6995 bbs_p->safe_push (son);
6997 gather_blocks_in_sese_region (son, exit, bbs_p);
7001 /* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
7002 The duplicates are recorded in VARS_MAP. */
7005 replace_by_duplicate_decl (tree *tp, hash_map<tree, tree> *vars_map,
7008 tree t = *tp, new_t;
7009 struct function *f = DECL_STRUCT_FUNCTION (to_context);
7011 if (DECL_CONTEXT (t) == to_context)
7015 tree &loc = vars_map->get_or_insert (t, &existed);
7021 new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
7022 add_local_decl (f, new_t);
7026 gcc_assert (TREE_CODE (t) == CONST_DECL);
7027 new_t = copy_node (t);
7029 DECL_CONTEXT (new_t) = to_context;
7040 /* Creates an ssa name in TO_CONTEXT equivalent to NAME.
7041 VARS_MAP maps old ssa names and var_decls to the new ones. */
7044 replace_ssa_name (tree name, hash_map<tree, tree> *vars_map,
7049 gcc_assert (!virtual_operand_p (name));
7051 tree *loc = vars_map->get (name);
7055 tree decl = SSA_NAME_VAR (name);
7058 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (name));
7059 replace_by_duplicate_decl (&decl, vars_map, to_context);
7060 new_name = make_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
7061 decl, SSA_NAME_DEF_STMT (name));
7064 new_name = copy_ssa_name_fn (DECL_STRUCT_FUNCTION (to_context),
7065 name, SSA_NAME_DEF_STMT (name));
7067 /* Now that we've used the def stmt to define new_name, make sure it
7068 doesn't define name anymore. */
7069 SSA_NAME_DEF_STMT (name) = NULL;
7071 vars_map->put (name, new_name);
7085 hash_map<tree, tree> *vars_map;
7086 htab_t new_label_map;
7087 hash_map<void *, void *> *eh_map;
7091 /* Helper for move_block_to_fn. Set TREE_BLOCK in every expression
7092 contained in *TP if it has been ORIG_BLOCK previously and change the
7093 DECL_CONTEXT of every local variable referenced in *TP. */
7096 move_stmt_op (tree *tp, int *walk_subtrees, void *data)
7098 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
7099 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7104 tree block = TREE_BLOCK (t);
7105 if (block == NULL_TREE)
7107 else if (block == p->orig_block
7108 || p->orig_block == NULL_TREE)
7110 /* tree_node_can_be_shared says we can share invariant
7111 addresses but unshare_expr copies them anyways. Make sure
7112 to unshare before adjusting the block in place - we do not
7113 always see a copy here. */
7114 if (TREE_CODE (t) == ADDR_EXPR
7115 && is_gimple_min_invariant (t))
7116 *tp = t = unshare_expr (t);
7117 TREE_SET_BLOCK (t, p->new_block);
7119 else if (flag_checking)
7121 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
7122 block = BLOCK_SUPERCONTEXT (block);
7123 gcc_assert (block == p->orig_block);
7126 else if (DECL_P (t) || TREE_CODE (t) == SSA_NAME)
7128 if (TREE_CODE (t) == SSA_NAME)
7129 *tp = replace_ssa_name (t, p->vars_map, p->to_context);
7130 else if (TREE_CODE (t) == PARM_DECL
7131 && gimple_in_ssa_p (cfun))
7132 *tp = *(p->vars_map->get (t));
7133 else if (TREE_CODE (t) == LABEL_DECL)
7135 if (p->new_label_map)
7137 struct tree_map in, *out;
7139 out = (struct tree_map *)
7140 htab_find_with_hash (p->new_label_map, &in, DECL_UID (t));
7145 /* For FORCED_LABELs we can end up with references from other
7146 functions if some SESE regions are outlined. It is UB to
7147 jump in between them, but they could be used just for printing
7148 addresses etc. In that case, DECL_CONTEXT on the label should
7149 be the function containing the glabel stmt with that LABEL_DECL,
7150 rather than whatever function a reference to the label was seen
7152 if (!FORCED_LABEL (t) && !DECL_NONLOCAL (t))
7153 DECL_CONTEXT (t) = p->to_context;
7155 else if (p->remap_decls_p)
7157 /* Replace T with its duplicate. T should no longer appear in the
7158 parent function, so this looks wasteful; however, it may appear
7159 in referenced_vars, and more importantly, as virtual operands of
7160 statements, and in alias lists of other variables. It would be
7161 quite difficult to expunge it from all those places. ??? It might
7162 suffice to do this for addressable variables. */
7163 if ((VAR_P (t) && !is_global_var (t))
7164 || TREE_CODE (t) == CONST_DECL)
7165 replace_by_duplicate_decl (tp, p->vars_map, p->to_context);
7169 else if (TYPE_P (t))
7175 /* Helper for move_stmt_r. Given an EH region number for the source
7176 function, map that to the duplicate EH regio number in the dest. */
7179 move_stmt_eh_region_nr (int old_nr, struct move_stmt_d *p)
7181 eh_region old_r, new_r;
7183 old_r = get_eh_region_from_number (old_nr);
7184 new_r = static_cast<eh_region> (*p->eh_map->get (old_r));
7186 return new_r->index;
7189 /* Similar, but operate on INTEGER_CSTs. */
7192 move_stmt_eh_region_tree_nr (tree old_t_nr, struct move_stmt_d *p)
7196 old_nr = tree_to_shwi (old_t_nr);
7197 new_nr = move_stmt_eh_region_nr (old_nr, p);
7199 return build_int_cst (integer_type_node, new_nr);
7202 /* Like move_stmt_op, but for gimple statements.
7204 Helper for move_block_to_fn. Set GIMPLE_BLOCK in every expression
7205 contained in the current statement in *GSI_P and change the
7206 DECL_CONTEXT of every local variable referenced in the current
7210 move_stmt_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
7211 struct walk_stmt_info *wi)
7213 struct move_stmt_d *p = (struct move_stmt_d *) wi->info;
7214 gimple *stmt = gsi_stmt (*gsi_p);
7215 tree block = gimple_block (stmt);
7217 if (block == p->orig_block
7218 || (p->orig_block == NULL_TREE
7219 && block != NULL_TREE))
7220 gimple_set_block (stmt, p->new_block);
7222 switch (gimple_code (stmt))
7225 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
7227 tree r, fndecl = gimple_call_fndecl (stmt);
7228 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7229 switch (DECL_FUNCTION_CODE (fndecl))
7231 case BUILT_IN_EH_COPY_VALUES:
7232 r = gimple_call_arg (stmt, 1);
7233 r = move_stmt_eh_region_tree_nr (r, p);
7234 gimple_call_set_arg (stmt, 1, r);
7237 case BUILT_IN_EH_POINTER:
7238 case BUILT_IN_EH_FILTER:
7239 r = gimple_call_arg (stmt, 0);
7240 r = move_stmt_eh_region_tree_nr (r, p);
7241 gimple_call_set_arg (stmt, 0, r);
7252 gresx *resx_stmt = as_a <gresx *> (stmt);
7253 int r = gimple_resx_region (resx_stmt);
7254 r = move_stmt_eh_region_nr (r, p);
7255 gimple_resx_set_region (resx_stmt, r);
7259 case GIMPLE_EH_DISPATCH:
7261 geh_dispatch *eh_dispatch_stmt = as_a <geh_dispatch *> (stmt);
7262 int r = gimple_eh_dispatch_region (eh_dispatch_stmt);
7263 r = move_stmt_eh_region_nr (r, p);
7264 gimple_eh_dispatch_set_region (eh_dispatch_stmt, r);
7268 case GIMPLE_OMP_RETURN:
7269 case GIMPLE_OMP_CONTINUE:
7274 /* For FORCED_LABEL, move_stmt_op doesn't adjust DECL_CONTEXT,
7275 so that such labels can be referenced from other regions.
7276 Make sure to update it when seeing a GIMPLE_LABEL though,
7277 that is the owner of the label. */
7278 walk_gimple_op (stmt, move_stmt_op, wi);
7279 *handled_ops_p = true;
7280 tree label = gimple_label_label (as_a <glabel *> (stmt));
7281 if (FORCED_LABEL (label) || DECL_NONLOCAL (label))
7282 DECL_CONTEXT (label) = p->to_context;
7287 if (is_gimple_omp (stmt))
7289 /* Do not remap variables inside OMP directives. Variables
7290 referenced in clauses and directive header belong to the
7291 parent function and should not be moved into the child
7293 bool save_remap_decls_p = p->remap_decls_p;
7294 p->remap_decls_p = false;
7295 *handled_ops_p = true;
7297 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), move_stmt_r,
7300 p->remap_decls_p = save_remap_decls_p;
7308 /* Move basic block BB from function CFUN to function DEST_FN. The
7309 block is moved out of the original linked list and placed after
7310 block AFTER in the new list. Also, the block is removed from the
7311 original array of blocks and placed in DEST_FN's array of blocks.
7312 If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
7313 updated to reflect the moved edges.
7315 The local variables are remapped to new instances, VARS_MAP is used
7316 to record the mapping. */
7319 move_block_to_fn (struct function *dest_cfun, basic_block bb,
7320 basic_block after, bool update_edge_count_p,
7321 struct move_stmt_d *d)
7323 struct control_flow_graph *cfg;
7326 gimple_stmt_iterator si;
7329 /* Remove BB from dominance structures. */
7330 delete_from_dominance_info (CDI_DOMINATORS, bb);
7332 /* Move BB from its current loop to the copy in the new function. */
7335 class loop *new_loop = (class loop *)bb->loop_father->aux;
7337 bb->loop_father = new_loop;
7340 /* Link BB to the new linked list. */
7341 move_block_after (bb, after);
7343 /* Update the edge count in the corresponding flowgraphs. */
7344 if (update_edge_count_p)
7345 FOR_EACH_EDGE (e, ei, bb->succs)
7347 cfun->cfg->x_n_edges--;
7348 dest_cfun->cfg->x_n_edges++;
7351 /* Remove BB from the original basic block array. */
7352 (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
7353 cfun->cfg->x_n_basic_blocks--;
7355 /* Grow DEST_CFUN's basic block array if needed. */
7356 cfg = dest_cfun->cfg;
7357 cfg->x_n_basic_blocks++;
7358 if (bb->index >= cfg->x_last_basic_block)
7359 cfg->x_last_basic_block = bb->index + 1;
7361 old_len = vec_safe_length (cfg->x_basic_block_info);
7362 if ((unsigned) cfg->x_last_basic_block >= old_len)
7363 vec_safe_grow_cleared (cfg->x_basic_block_info,
7364 cfg->x_last_basic_block + 1);
7366 (*cfg->x_basic_block_info)[bb->index] = bb;
7368 /* Remap the variables in phi nodes. */
7369 for (gphi_iterator psi = gsi_start_phis (bb);
7372 gphi *phi = psi.phi ();
7374 tree op = PHI_RESULT (phi);
7378 if (virtual_operand_p (op))
7380 /* Remove the phi nodes for virtual operands (alias analysis will be
7381 run for the new function, anyway). But replace all uses that
7382 might be outside of the region we move. */
7383 use_operand_p use_p;
7384 imm_use_iterator iter;
7386 FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
7387 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7388 SET_USE (use_p, SSA_NAME_VAR (op));
7389 remove_phi_node (&psi, true);
7393 SET_PHI_RESULT (phi,
7394 replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7395 FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
7397 op = USE_FROM_PTR (use);
7398 if (TREE_CODE (op) == SSA_NAME)
7399 SET_USE (use, replace_ssa_name (op, d->vars_map, dest_cfun->decl));
7402 for (i = 0; i < EDGE_COUNT (bb->preds); i++)
7404 location_t locus = gimple_phi_arg_location (phi, i);
7405 tree block = LOCATION_BLOCK (locus);
7407 if (locus == UNKNOWN_LOCATION)
7409 if (d->orig_block == NULL_TREE || block == d->orig_block)
7411 locus = set_block (locus, d->new_block);
7412 gimple_phi_arg_set_location (phi, i, locus);
7419 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7421 gimple *stmt = gsi_stmt (si);
7422 struct walk_stmt_info wi;
7424 memset (&wi, 0, sizeof (wi));
7426 walk_gimple_stmt (&si, move_stmt_r, move_stmt_op, &wi);
7428 if (glabel *label_stmt = dyn_cast <glabel *> (stmt))
7430 tree label = gimple_label_label (label_stmt);
7431 int uid = LABEL_DECL_UID (label);
7433 gcc_assert (uid > -1);
7435 old_len = vec_safe_length (cfg->x_label_to_block_map);
7436 if (old_len <= (unsigned) uid)
7437 vec_safe_grow_cleared (cfg->x_label_to_block_map, uid + 1);
7439 (*cfg->x_label_to_block_map)[uid] = bb;
7440 (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
7442 gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
7444 if (uid >= dest_cfun->cfg->last_label_uid)
7445 dest_cfun->cfg->last_label_uid = uid + 1;
7448 maybe_duplicate_eh_stmt_fn (dest_cfun, stmt, cfun, stmt, d->eh_map, 0);
7449 remove_stmt_from_eh_lp_fn (cfun, stmt);
7451 gimple_duplicate_stmt_histograms (dest_cfun, stmt, cfun, stmt);
7452 gimple_remove_stmt_histograms (cfun, stmt);
7454 /* We cannot leave any operands allocated from the operand caches of
7455 the current function. */
7456 free_stmt_operands (cfun, stmt);
7457 push_cfun (dest_cfun);
7459 if (is_gimple_call (stmt))
7460 notice_special_calls (as_a <gcall *> (stmt));
7464 FOR_EACH_EDGE (e, ei, bb->succs)
7465 if (e->goto_locus != UNKNOWN_LOCATION)
7467 tree block = LOCATION_BLOCK (e->goto_locus);
7468 if (d->orig_block == NULL_TREE
7469 || block == d->orig_block)
7470 e->goto_locus = set_block (e->goto_locus, d->new_block);
7474 /* Examine the statements in BB (which is in SRC_CFUN); find and return
7475 the outermost EH region. Use REGION as the incoming base EH region.
7476 If there is no single outermost region, return NULL and set *ALL to
7480 find_outermost_region_in_block (struct function *src_cfun,
7481 basic_block bb, eh_region region,
7484 gimple_stmt_iterator si;
7486 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
7488 gimple *stmt = gsi_stmt (si);
7489 eh_region stmt_region;
7492 lp_nr = lookup_stmt_eh_lp_fn (src_cfun, stmt);
7493 stmt_region = get_eh_region_from_lp_number_fn (src_cfun, lp_nr);
7497 region = stmt_region;
7498 else if (stmt_region != region)
7500 region = eh_region_outermost (src_cfun, stmt_region, region);
7514 new_label_mapper (tree decl, void *data)
7516 htab_t hash = (htab_t) data;
7520 gcc_assert (TREE_CODE (decl) == LABEL_DECL);
7522 m = XNEW (struct tree_map);
7523 m->hash = DECL_UID (decl);
7524 m->base.from = decl;
7525 m->to = create_artificial_label (UNKNOWN_LOCATION);
7526 LABEL_DECL_UID (m->to) = LABEL_DECL_UID (decl);
7527 if (LABEL_DECL_UID (m->to) >= cfun->cfg->last_label_uid)
7528 cfun->cfg->last_label_uid = LABEL_DECL_UID (m->to) + 1;
7530 slot = htab_find_slot_with_hash (hash, m, m->hash, INSERT);
7531 gcc_assert (*slot == NULL);
7538 /* Tree walker to replace the decls used inside value expressions by
7542 replace_block_vars_by_duplicates_1 (tree *tp, int *walk_subtrees, void *data)
7544 struct replace_decls_d *rd = (struct replace_decls_d *)data;
7546 switch (TREE_CODE (*tp))
7551 replace_by_duplicate_decl (tp, rd->vars_map, rd->to_context);
7557 if (IS_TYPE_OR_DECL_P (*tp))
7558 *walk_subtrees = false;
7563 /* Change DECL_CONTEXT of all BLOCK_VARS in block, including
7567 replace_block_vars_by_duplicates (tree block, hash_map<tree, tree> *vars_map,
7572 for (tp = &BLOCK_VARS (block); *tp; tp = &DECL_CHAIN (*tp))
7575 if (!VAR_P (t) && TREE_CODE (t) != CONST_DECL)
7577 replace_by_duplicate_decl (&t, vars_map, to_context);
7580 if (VAR_P (*tp) && DECL_HAS_VALUE_EXPR_P (*tp))
7582 tree x = DECL_VALUE_EXPR (*tp);
7583 struct replace_decls_d rd = { vars_map, to_context };
7585 walk_tree (&x, replace_block_vars_by_duplicates_1, &rd, NULL);
7586 SET_DECL_VALUE_EXPR (t, x);
7587 DECL_HAS_VALUE_EXPR_P (t) = 1;
7589 DECL_CHAIN (t) = DECL_CHAIN (*tp);
7594 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
7595 replace_block_vars_by_duplicates (block, vars_map, to_context);
7598 /* Fixup the loop arrays and numbers after moving LOOP and its subloops
7602 fixup_loop_arrays_after_move (struct function *fn1, struct function *fn2,
7605 /* Discard it from the old loop array. */
7606 (*get_loops (fn1))[loop->num] = NULL;
7608 /* Place it in the new loop array, assigning it a new number. */
7609 loop->num = number_of_loops (fn2);
7610 vec_safe_push (loops_for_fn (fn2)->larray, loop);
7612 /* Recurse to children. */
7613 for (loop = loop->inner; loop; loop = loop->next)
7614 fixup_loop_arrays_after_move (fn1, fn2, loop);
7617 /* Verify that the blocks in BBS_P are a single-entry, single-exit region
7618 delimited by ENTRY_BB and EXIT_BB, possibly containing noreturn blocks. */
7621 verify_sese (basic_block entry, basic_block exit, vec<basic_block> *bbs_p)
7626 bitmap bbs = BITMAP_ALLOC (NULL);
7629 gcc_assert (entry != NULL);
7630 gcc_assert (entry != exit);
7631 gcc_assert (bbs_p != NULL);
7633 gcc_assert (bbs_p->length () > 0);
7635 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7636 bitmap_set_bit (bbs, bb->index);
7638 gcc_assert (bitmap_bit_p (bbs, entry->index));
7639 gcc_assert (exit == NULL || bitmap_bit_p (bbs, exit->index));
7641 FOR_EACH_VEC_ELT (*bbs_p, i, bb)
7645 gcc_assert (single_pred_p (entry));
7646 gcc_assert (!bitmap_bit_p (bbs, single_pred (entry)->index));
7649 for (ei = ei_start (bb->preds); !ei_end_p (ei); ei_next (&ei))
7652 gcc_assert (bitmap_bit_p (bbs, e->src->index));
7657 gcc_assert (single_succ_p (exit));
7658 gcc_assert (!bitmap_bit_p (bbs, single_succ (exit)->index));
7661 for (ei = ei_start (bb->succs); !ei_end_p (ei); ei_next (&ei))
7664 gcc_assert (bitmap_bit_p (bbs, e->dest->index));
7671 /* If FROM is an SSA_NAME, mark the version in bitmap DATA. */
7674 gather_ssa_name_hash_map_from (tree const &from, tree const &, void *data)
7676 bitmap release_names = (bitmap)data;
7678 if (TREE_CODE (from) != SSA_NAME)
7681 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7685 /* Return LOOP_DIST_ALIAS call if present in BB. */
7688 find_loop_dist_alias (basic_block bb)
7690 gimple *g = last_stmt (bb);
7691 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7694 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7696 if (gsi_end_p (gsi))
7700 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7705 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7706 to VALUE and update any immediate uses of it's LHS. */
7709 fold_loop_internal_call (gimple *g, tree value)
7711 tree lhs = gimple_call_lhs (g);
7712 use_operand_p use_p;
7713 imm_use_iterator iter;
7715 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7717 replace_call_with_value (&gsi, value);
7718 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7720 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7721 SET_USE (use_p, value);
7722 update_stmt (use_stmt);
7726 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7727 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7728 single basic block in the original CFG and the new basic block is
7729 returned. DEST_CFUN must not have a CFG yet.
7731 Note that the region need not be a pure SESE region. Blocks inside
7732 the region may contain calls to abort/exit. The only restriction
7733 is that ENTRY_BB should be the only entry point and it must
7736 Change TREE_BLOCK of all statements in ORIG_BLOCK to the new
7737 functions outermost BLOCK, move all subblocks of ORIG_BLOCK
7738 to the new function.
7740 All local variables referenced in the region are assumed to be in
7741 the corresponding BLOCK_VARS and unexpanded variable lists
7742 associated with DEST_CFUN.
7744 TODO: investigate whether we can reuse gimple_duplicate_sese_region to
7745 reimplement move_sese_region_to_fn by duplicating the region rather than
7749 move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
7750 basic_block exit_bb, tree orig_block)
7752 vec<basic_block> bbs;
7753 basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
7754 basic_block after, bb, *entry_pred, *exit_succ, abb;
7755 struct function *saved_cfun = cfun;
7756 int *entry_flag, *exit_flag;
7757 profile_probability *entry_prob, *exit_prob;
7758 unsigned i, num_entry_edges, num_exit_edges, num_nodes;
7761 htab_t new_label_map;
7762 hash_map<void *, void *> *eh_map;
7763 class loop *loop = entry_bb->loop_father;
7764 class loop *loop0 = get_loop (saved_cfun, 0);
7765 struct move_stmt_d d;
7767 /* If ENTRY does not strictly dominate EXIT, this cannot be an SESE
7769 gcc_assert (entry_bb != exit_bb
7771 || dominated_by_p (CDI_DOMINATORS, exit_bb, entry_bb)));
7773 /* Collect all the blocks in the region. Manually add ENTRY_BB
7774 because it won't be added by dfs_enumerate_from. */
7776 bbs.safe_push (entry_bb);
7777 gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
7780 verify_sese (entry_bb, exit_bb, &bbs);
7782 /* The blocks that used to be dominated by something in BBS will now be
7783 dominated by the new block. */
7784 auto_vec<basic_block> dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
7788 /* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
7789 the predecessor edges to ENTRY_BB and the successor edges to
7790 EXIT_BB so that we can re-attach them to the new basic block that
7791 will replace the region. */
7792 num_entry_edges = EDGE_COUNT (entry_bb->preds);
7793 entry_pred = XNEWVEC (basic_block, num_entry_edges);
7794 entry_flag = XNEWVEC (int, num_entry_edges);
7795 entry_prob = XNEWVEC (profile_probability, num_entry_edges);
7797 for (ei = ei_start (entry_bb->preds); (e = ei_safe_edge (ei)) != NULL;)
7799 entry_prob[i] = e->probability;
7800 entry_flag[i] = e->flags;
7801 entry_pred[i++] = e->src;
7807 num_exit_edges = EDGE_COUNT (exit_bb->succs);
7808 exit_succ = XNEWVEC (basic_block, num_exit_edges);
7809 exit_flag = XNEWVEC (int, num_exit_edges);
7810 exit_prob = XNEWVEC (profile_probability, num_exit_edges);
7812 for (ei = ei_start (exit_bb->succs); (e = ei_safe_edge (ei)) != NULL;)
7814 exit_prob[i] = e->probability;
7815 exit_flag[i] = e->flags;
7816 exit_succ[i++] = e->dest;
7828 /* Switch context to the child function to initialize DEST_FN's CFG. */
7829 gcc_assert (dest_cfun->cfg == NULL);
7830 push_cfun (dest_cfun);
7832 init_empty_tree_cfg ();
7834 /* Initialize EH information for the new function. */
7836 new_label_map = NULL;
7839 eh_region region = NULL;
7842 FOR_EACH_VEC_ELT (bbs, i, bb)
7844 region = find_outermost_region_in_block (saved_cfun, bb, region, &all);
7849 init_eh_for_function ();
7850 if (region != NULL || all)
7852 new_label_map = htab_create (17, tree_map_hash, tree_map_eq, free);
7853 eh_map = duplicate_eh_regions (saved_cfun, region, 0,
7854 new_label_mapper, new_label_map);
7858 /* Initialize an empty loop tree. */
7859 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7860 init_loops_structure (dest_cfun, loops, 1);
7861 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7862 set_loops_for_fn (dest_cfun, loops);
7864 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7866 /* Move the outlined loop tree part. */
7867 num_nodes = bbs.length ();
7868 FOR_EACH_VEC_ELT (bbs, i, bb)
7870 if (bb->loop_father->header == bb)
7872 class loop *this_loop = bb->loop_father;
7873 class loop *outer = loop_outer (this_loop);
7875 /* If the SESE region contains some bbs ending with
7876 a noreturn call, those are considered to belong
7877 to the outermost loop in saved_cfun, rather than
7878 the entry_bb's loop_father. */
7882 num_nodes -= this_loop->num_nodes;
7883 flow_loop_tree_node_remove (bb->loop_father);
7884 flow_loop_tree_node_add (get_loop (dest_cfun, 0), this_loop);
7885 fixup_loop_arrays_after_move (saved_cfun, cfun, this_loop);
7888 else if (bb->loop_father == loop0 && loop0 != loop)
7891 /* Remove loop exits from the outlined region. */
7892 if (loops_for_fn (saved_cfun)->exits)
7893 FOR_EACH_EDGE (e, ei, bb->succs)
7895 struct loops *l = loops_for_fn (saved_cfun);
7897 = l->exits->find_slot_with_hash (e, htab_hash_pointer (e),
7900 l->exits->clear_slot (slot);
7904 /* Adjust the number of blocks in the tree root of the outlined part. */
7905 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7907 /* Setup a mapping to be used by move_block_to_fn. */
7908 loop->aux = current_loops->tree_root;
7909 loop0->aux = current_loops->tree_root;
7911 /* Fix up orig_loop_num. If the block referenced in it has been moved
7912 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7913 signed char *moved_orig_loop_num = NULL;
7914 for (auto dloop : loops_list (dest_cfun, 0))
7915 if (dloop->orig_loop_num)
7917 if (moved_orig_loop_num == NULL)
7919 = XCNEWVEC (signed char, vec_safe_length (larray));
7920 if ((*larray)[dloop->orig_loop_num] != NULL
7921 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7923 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7924 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7925 moved_orig_loop_num[dloop->orig_loop_num]++;
7926 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7930 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7931 dloop->orig_loop_num = 0;
7936 if (moved_orig_loop_num)
7938 FOR_EACH_VEC_ELT (bbs, i, bb)
7940 gimple *g = find_loop_dist_alias (bb);
7944 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7945 gcc_assert (orig_loop_num
7946 && (unsigned) orig_loop_num < vec_safe_length (larray));
7947 if (moved_orig_loop_num[orig_loop_num] == 2)
7949 /* If we have moved both loops with this orig_loop_num into
7950 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7951 too, update the first argument. */
7952 gcc_assert ((*larray)[orig_loop_num] != NULL
7953 && (get_loop (saved_cfun, orig_loop_num) == NULL));
7954 tree t = build_int_cst (integer_type_node,
7955 (*larray)[orig_loop_num]->num);
7956 gimple_call_set_arg (g, 0, t);
7958 /* Make sure the following loop will not update it. */
7959 moved_orig_loop_num[orig_loop_num] = 0;
7962 /* Otherwise at least one of the loops stayed in saved_cfun.
7963 Remove the LOOP_DIST_ALIAS call. */
7964 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7966 FOR_EACH_BB_FN (bb, saved_cfun)
7968 gimple *g = find_loop_dist_alias (bb);
7971 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7972 gcc_assert (orig_loop_num
7973 && (unsigned) orig_loop_num < vec_safe_length (larray));
7974 if (moved_orig_loop_num[orig_loop_num])
7975 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7976 of the corresponding loops was moved, remove it. */
7977 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7979 XDELETEVEC (moved_orig_loop_num);
7983 /* Move blocks from BBS into DEST_CFUN. */
7984 gcc_assert (bbs.length () >= 2);
7985 after = dest_cfun->cfg->x_entry_block_ptr;
7986 hash_map<tree, tree> vars_map;
7988 memset (&d, 0, sizeof (d));
7989 d.orig_block = orig_block;
7990 d.new_block = DECL_INITIAL (dest_cfun->decl);
7991 d.from_context = cfun->decl;
7992 d.to_context = dest_cfun->decl;
7993 d.vars_map = &vars_map;
7994 d.new_label_map = new_label_map;
7996 d.remap_decls_p = true;
7998 if (gimple_in_ssa_p (cfun))
7999 for (tree arg = DECL_ARGUMENTS (d.to_context); arg; arg = DECL_CHAIN (arg))
8001 tree narg = make_ssa_name_fn (dest_cfun, arg, gimple_build_nop ());
8002 set_ssa_default_def (dest_cfun, arg, narg);
8003 vars_map.put (arg, narg);
8006 FOR_EACH_VEC_ELT (bbs, i, bb)
8008 /* No need to update edge counts on the last block. It has
8009 already been updated earlier when we detached the region from
8010 the original CFG. */
8011 move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, &d);
8015 /* Adjust the maximum clique used. */
8016 dest_cfun->last_clique = saved_cfun->last_clique;
8020 /* Loop sizes are no longer correct, fix them up. */
8021 loop->num_nodes -= num_nodes;
8022 for (class loop *outer = loop_outer (loop);
8023 outer; outer = loop_outer (outer))
8024 outer->num_nodes -= num_nodes;
8025 loop0->num_nodes -= bbs.length () - num_nodes;
8027 if (saved_cfun->has_simduid_loops || saved_cfun->has_force_vectorize_loops)
8030 for (i = 0; vec_safe_iterate (loops->larray, i, &aloop); i++)
8035 replace_by_duplicate_decl (&aloop->simduid, d.vars_map,
8037 dest_cfun->has_simduid_loops = true;
8039 if (aloop->force_vectorize)
8040 dest_cfun->has_force_vectorize_loops = true;
8044 /* Rewire BLOCK_SUBBLOCKS of orig_block. */
8048 gcc_assert (BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
8050 BLOCK_SUBBLOCKS (DECL_INITIAL (dest_cfun->decl))
8051 = BLOCK_SUBBLOCKS (orig_block);
8052 for (block = BLOCK_SUBBLOCKS (orig_block);
8053 block; block = BLOCK_CHAIN (block))
8054 BLOCK_SUPERCONTEXT (block) = DECL_INITIAL (dest_cfun->decl);
8055 BLOCK_SUBBLOCKS (orig_block) = NULL_TREE;
8058 replace_block_vars_by_duplicates (DECL_INITIAL (dest_cfun->decl),
8059 &vars_map, dest_cfun->decl);
8062 htab_delete (new_label_map);
8066 /* We need to release ssa-names in a defined order, so first find them,
8067 and then iterate in ascending version order. */
8068 bitmap release_names = BITMAP_ALLOC (NULL);
8069 vars_map.traverse<void *, gather_ssa_name_hash_map_from> (release_names);
8071 EXECUTE_IF_SET_IN_BITMAP (release_names, 0, i, bi)
8072 release_ssa_name (ssa_name (i));
8073 BITMAP_FREE (release_names);
8075 /* Rewire the entry and exit blocks. The successor to the entry
8076 block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
8077 the child function. Similarly, the predecessor of DEST_FN's
8078 EXIT_BLOCK_PTR turns into the predecessor of EXIT_BLOCK_PTR. We
8079 need to switch CFUN between DEST_CFUN and SAVED_CFUN so that the
8080 various CFG manipulation function get to the right CFG.
8082 FIXME, this is silly. The CFG ought to become a parameter to
8084 push_cfun (dest_cfun);
8085 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
8086 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
8089 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
8090 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
8093 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
8096 /* Back in the original function, the SESE region has disappeared,
8097 create a new basic block in its place. */
8098 bb = create_empty_bb (entry_pred[0]);
8100 add_bb_to_loop (bb, loop);
8101 for (i = 0; i < num_entry_edges; i++)
8103 e = make_edge (entry_pred[i], bb, entry_flag[i]);
8104 e->probability = entry_prob[i];
8107 for (i = 0; i < num_exit_edges; i++)
8109 e = make_edge (bb, exit_succ[i], exit_flag[i]);
8110 e->probability = exit_prob[i];
8113 set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
8114 FOR_EACH_VEC_ELT (dom_bbs, i, abb)
8115 set_immediate_dominator (CDI_DOMINATORS, abb, bb);
8131 /* Dump default def DEF to file FILE using FLAGS and indentation
8135 dump_default_def (FILE *file, tree def, int spc, dump_flags_t flags)
8137 for (int i = 0; i < spc; ++i)
8138 fprintf (file, " ");
8139 dump_ssaname_info_to_file (file, def, spc);
8141 print_generic_expr (file, TREE_TYPE (def), flags);
8142 fprintf (file, " ");
8143 print_generic_expr (file, def, flags);
8144 fprintf (file, " = ");
8145 print_generic_expr (file, SSA_NAME_VAR (def), flags);
8146 fprintf (file, ";\n");
8149 /* Print no_sanitize attribute to FILE for a given attribute VALUE. */
8152 print_no_sanitize_attr_value (FILE *file, tree value)
8154 unsigned int flags = tree_to_uhwi (value);
8156 for (int i = 0; sanitizer_opts[i].name != NULL; ++i)
8158 if ((sanitizer_opts[i].flag & flags) == sanitizer_opts[i].flag)
8161 fprintf (file, " | ");
8162 fprintf (file, "%s", sanitizer_opts[i].name);
8168 /* Dump FUNCTION_DECL FN to file FILE using FLAGS (see TDF_* in dumpfile.h)
8172 dump_function_to_file (tree fndecl, FILE *file, dump_flags_t flags)
8174 tree arg, var, old_current_fndecl = current_function_decl;
8175 struct function *dsf;
8176 bool ignore_topmost_bind = false, any_var = false;
8179 bool tmclone = (TREE_CODE (fndecl) == FUNCTION_DECL
8180 && decl_is_tm_clone (fndecl));
8181 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
8183 tree fntype = TREE_TYPE (fndecl);
8184 tree attrs[] = { DECL_ATTRIBUTES (fndecl), TYPE_ATTRIBUTES (fntype) };
8186 for (int i = 0; i != 2; ++i)
8191 fprintf (file, "__attribute__((");
8195 for (chain = attrs[i]; chain; first = false, chain = TREE_CHAIN (chain))
8198 fprintf (file, ", ");
8200 tree name = get_attribute_name (chain);
8201 print_generic_expr (file, name, dump_flags);
8202 if (TREE_VALUE (chain) != NULL_TREE)
8204 fprintf (file, " (");
8206 if (strstr (IDENTIFIER_POINTER (name), "no_sanitize"))
8207 print_no_sanitize_attr_value (file, TREE_VALUE (chain));
8209 print_generic_expr (file, TREE_VALUE (chain), dump_flags);
8210 fprintf (file, ")");
8214 fprintf (file, "))\n");
8217 current_function_decl = fndecl;
8218 if (flags & TDF_GIMPLE)
8220 static bool hotness_bb_param_printed = false;
8221 if (profile_info != NULL
8222 && !hotness_bb_param_printed)
8224 hotness_bb_param_printed = true;
8226 "/* --param=gimple-fe-computed-hot-bb-threshold=%" PRId64
8227 " */\n", get_hot_bb_threshold ());
8230 print_generic_expr (file, TREE_TYPE (TREE_TYPE (fndecl)),
8231 dump_flags | TDF_SLIM);
8232 fprintf (file, " __GIMPLE (%s",
8233 (fun->curr_properties & PROP_ssa) ? "ssa"
8234 : (fun->curr_properties & PROP_cfg) ? "cfg"
8237 if (fun && fun->cfg)
8239 basic_block bb = ENTRY_BLOCK_PTR_FOR_FN (fun);
8240 if (bb->count.initialized_p ())
8241 fprintf (file, ",%s(%" PRIu64 ")",
8242 profile_quality_as_string (bb->count.quality ()),
8243 bb->count.value ());
8244 if (dump_flags & TDF_UID)
8245 fprintf (file, ")\n%sD_%u (", function_name (fun),
8248 fprintf (file, ")\n%s (", function_name (fun));
8253 print_generic_expr (file, TREE_TYPE (fntype), dump_flags);
8254 if (dump_flags & TDF_UID)
8255 fprintf (file, " %sD.%u %s(", function_name (fun), DECL_UID (fndecl),
8256 tmclone ? "[tm-clone] " : "");
8258 fprintf (file, " %s %s(", function_name (fun),
8259 tmclone ? "[tm-clone] " : "");
8262 arg = DECL_ARGUMENTS (fndecl);
8265 print_generic_expr (file, TREE_TYPE (arg), dump_flags);
8266 fprintf (file, " ");
8267 print_generic_expr (file, arg, dump_flags);
8268 if (DECL_CHAIN (arg))
8269 fprintf (file, ", ");
8270 arg = DECL_CHAIN (arg);
8272 fprintf (file, ")\n");
8274 dsf = DECL_STRUCT_FUNCTION (fndecl);
8275 if (dsf && (flags & TDF_EH))
8276 dump_eh_tree (file, dsf);
8278 if (flags & TDF_RAW && !gimple_has_body_p (fndecl))
8280 dump_node (fndecl, TDF_SLIM | flags, file);
8281 current_function_decl = old_current_fndecl;
8285 /* When GIMPLE is lowered, the variables are no longer available in
8286 BIND_EXPRs, so display them separately. */
8287 if (fun && fun->decl == fndecl && (fun->curr_properties & PROP_gimple_lcf))
8290 ignore_topmost_bind = true;
8292 fprintf (file, "{\n");
8293 if (gimple_in_ssa_p (fun)
8294 && (flags & TDF_ALIAS))
8296 for (arg = DECL_ARGUMENTS (fndecl); arg != NULL;
8297 arg = DECL_CHAIN (arg))
8299 tree def = ssa_default_def (fun, arg);
8301 dump_default_def (file, def, 2, flags);
8304 tree res = DECL_RESULT (fun->decl);
8305 if (res != NULL_TREE
8306 && DECL_BY_REFERENCE (res))
8308 tree def = ssa_default_def (fun, res);
8310 dump_default_def (file, def, 2, flags);
8313 tree static_chain = fun->static_chain_decl;
8314 if (static_chain != NULL_TREE)
8316 tree def = ssa_default_def (fun, static_chain);
8318 dump_default_def (file, def, 2, flags);
8322 if (!vec_safe_is_empty (fun->local_decls))
8323 FOR_EACH_LOCAL_DECL (fun, ix, var)
8325 print_generic_decl (file, var, flags);
8326 fprintf (file, "\n");
8333 if (gimple_in_ssa_p (fun))
8334 FOR_EACH_SSA_NAME (ix, name, fun)
8336 if (!SSA_NAME_VAR (name)
8337 /* SSA name with decls without a name still get
8338 dumped as _N, list those explicitely as well even
8339 though we've dumped the decl declaration as D.xxx
8341 || !SSA_NAME_IDENTIFIER (name))
8343 fprintf (file, " ");
8344 print_generic_expr (file, TREE_TYPE (name), flags);
8345 fprintf (file, " ");
8346 print_generic_expr (file, name, flags);
8347 fprintf (file, ";\n");
8354 if (fun && fun->decl == fndecl
8356 && basic_block_info_for_fn (fun))
8358 /* If the CFG has been built, emit a CFG-based dump. */
8359 if (!ignore_topmost_bind)
8360 fprintf (file, "{\n");
8362 if (any_var && n_basic_blocks_for_fn (fun))
8363 fprintf (file, "\n");
8365 FOR_EACH_BB_FN (bb, fun)
8366 dump_bb (file, bb, 2, flags);
8368 fprintf (file, "}\n");
8370 else if (fun && (fun->curr_properties & PROP_gimple_any))
8372 /* The function is now in GIMPLE form but the CFG has not been
8373 built yet. Emit the single sequence of GIMPLE statements
8374 that make up its body. */
8375 gimple_seq body = gimple_body (fndecl);
8377 if (gimple_seq_first_stmt (body)
8378 && gimple_seq_first_stmt (body) == gimple_seq_last_stmt (body)
8379 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND)
8380 print_gimple_seq (file, body, 0, flags);
8383 if (!ignore_topmost_bind)
8384 fprintf (file, "{\n");
8387 fprintf (file, "\n");
8389 print_gimple_seq (file, body, 2, flags);
8390 fprintf (file, "}\n");
8397 /* Make a tree based dump. */
8398 chain = DECL_SAVED_TREE (fndecl);
8399 if (chain && TREE_CODE (chain) == BIND_EXPR)
8401 if (ignore_topmost_bind)
8403 chain = BIND_EXPR_BODY (chain);
8411 if (!ignore_topmost_bind)
8413 fprintf (file, "{\n");
8414 /* No topmost bind, pretend it's ignored for later. */
8415 ignore_topmost_bind = true;
8421 fprintf (file, "\n");
8423 print_generic_stmt_indented (file, chain, flags, indent);
8424 if (ignore_topmost_bind)
8425 fprintf (file, "}\n");
8428 if (flags & TDF_ENUMERATE_LOCALS)
8429 dump_enumerated_decls (file, flags);
8430 fprintf (file, "\n\n");
8432 current_function_decl = old_current_fndecl;
8435 /* Dump FUNCTION_DECL FN to stderr using FLAGS (see TDF_* in tree.h) */
8438 debug_function (tree fn, dump_flags_t flags)
8440 dump_function_to_file (fn, stderr, flags);
8444 /* Print on FILE the indexes for the predecessors of basic_block BB. */
8447 print_pred_bbs (FILE *file, basic_block bb)
8452 FOR_EACH_EDGE (e, ei, bb->preds)
8453 fprintf (file, "bb_%d ", e->src->index);
8457 /* Print on FILE the indexes for the successors of basic_block BB. */
8460 print_succ_bbs (FILE *file, basic_block bb)
8465 FOR_EACH_EDGE (e, ei, bb->succs)
8466 fprintf (file, "bb_%d ", e->dest->index);
8469 /* Print to FILE the basic block BB following the VERBOSITY level. */
8472 print_loops_bb (FILE *file, basic_block bb, int indent, int verbosity)
8474 char *s_indent = (char *) alloca ((size_t) indent + 1);
8475 memset ((void *) s_indent, ' ', (size_t) indent);
8476 s_indent[indent] = '\0';
8478 /* Print basic_block's header. */
8481 fprintf (file, "%s bb_%d (preds = {", s_indent, bb->index);
8482 print_pred_bbs (file, bb);
8483 fprintf (file, "}, succs = {");
8484 print_succ_bbs (file, bb);
8485 fprintf (file, "})\n");
8488 /* Print basic_block's body. */
8491 fprintf (file, "%s {\n", s_indent);
8492 dump_bb (file, bb, indent + 4, TDF_VOPS|TDF_MEMSYMS);
8493 fprintf (file, "%s }\n", s_indent);
8497 static void print_loop_and_siblings (FILE *, class loop *, int, int);
8499 /* Pretty print LOOP on FILE, indented INDENT spaces. Following
8500 VERBOSITY level this outputs the contents of the loop, or just its
8504 print_loop (FILE *file, class loop *loop, int indent, int verbosity)
8512 s_indent = (char *) alloca ((size_t) indent + 1);
8513 memset ((void *) s_indent, ' ', (size_t) indent);
8514 s_indent[indent] = '\0';
8516 /* Print loop's header. */
8517 fprintf (file, "%sloop_%d (", s_indent, loop->num);
8519 fprintf (file, "header = %d", loop->header->index);
8522 fprintf (file, "deleted)\n");
8526 fprintf (file, ", latch = %d", loop->latch->index);
8528 fprintf (file, ", multiple latches");
8529 fprintf (file, ", niter = ");
8530 print_generic_expr (file, loop->nb_iterations);
8532 if (loop->any_upper_bound)
8534 fprintf (file, ", upper_bound = ");
8535 print_decu (loop->nb_iterations_upper_bound, file);
8537 if (loop->any_likely_upper_bound)
8539 fprintf (file, ", likely_upper_bound = ");
8540 print_decu (loop->nb_iterations_likely_upper_bound, file);
8543 if (loop->any_estimate)
8545 fprintf (file, ", estimate = ");
8546 print_decu (loop->nb_iterations_estimate, file);
8549 fprintf (file, ", unroll = %d", loop->unroll);
8550 fprintf (file, ")\n");
8552 /* Print loop's body. */
8555 fprintf (file, "%s{\n", s_indent);
8556 FOR_EACH_BB_FN (bb, cfun)
8557 if (bb->loop_father == loop)
8558 print_loops_bb (file, bb, indent, verbosity);
8560 print_loop_and_siblings (file, loop->inner, indent + 2, verbosity);
8561 fprintf (file, "%s}\n", s_indent);
8565 /* Print the LOOP and its sibling loops on FILE, indented INDENT
8566 spaces. Following VERBOSITY level this outputs the contents of the
8567 loop, or just its structure. */
8570 print_loop_and_siblings (FILE *file, class loop *loop, int indent,
8576 print_loop (file, loop, indent, verbosity);
8577 print_loop_and_siblings (file, loop->next, indent, verbosity);
8580 /* Follow a CFG edge from the entry point of the program, and on entry
8581 of a loop, pretty print the loop structure on FILE. */
8584 print_loops (FILE *file, int verbosity)
8588 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
8589 fprintf (file, "\nLoops in function: %s\n", current_function_name ());
8590 if (bb && bb->loop_father)
8591 print_loop_and_siblings (file, bb->loop_father, 0, verbosity);
8597 debug (class loop &ref)
8599 print_loop (stderr, &ref, 0, /*verbosity*/0);
8603 debug (class loop *ptr)
8608 fprintf (stderr, "<nil>\n");
8611 /* Dump a loop verbosely. */
8614 debug_verbose (class loop &ref)
8616 print_loop (stderr, &ref, 0, /*verbosity*/3);
8620 debug_verbose (class loop *ptr)
8625 fprintf (stderr, "<nil>\n");
8629 /* Debugging loops structure at tree level, at some VERBOSITY level. */
8632 debug_loops (int verbosity)
8634 print_loops (stderr, verbosity);
8637 /* Print on stderr the code of LOOP, at some VERBOSITY level. */
8640 debug_loop (class loop *loop, int verbosity)
8642 print_loop (stderr, loop, 0, verbosity);
8645 /* Print on stderr the code of loop number NUM, at some VERBOSITY
8649 debug_loop_num (unsigned num, int verbosity)
8651 debug_loop (get_loop (cfun, num), verbosity);
8654 /* Return true if BB ends with a call, possibly followed by some
8655 instructions that must stay with the call. Return false,
8659 gimple_block_ends_with_call_p (basic_block bb)
8661 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8662 return !gsi_end_p (gsi) && is_gimple_call (gsi_stmt (gsi));
8666 /* Return true if BB ends with a conditional branch. Return false,
8670 gimple_block_ends_with_condjump_p (const_basic_block bb)
8672 gimple *stmt = last_stmt (CONST_CAST_BB (bb));
8673 return (stmt && gimple_code (stmt) == GIMPLE_COND);
8677 /* Return true if statement T may terminate execution of BB in ways not
8678 explicitly represtented in the CFG. */
8681 stmt_can_terminate_bb_p (gimple *t)
8683 tree fndecl = NULL_TREE;
8686 /* Eh exception not handled internally terminates execution of the whole
8688 if (stmt_can_throw_external (cfun, t))
8691 /* NORETURN and LONGJMP calls already have an edge to exit.
8692 CONST and PURE calls do not need one.
8693 We don't currently check for CONST and PURE here, although
8694 it would be a good idea, because those attributes are
8695 figured out from the RTL in mark_constant_function, and
8696 the counter incrementation code from -fprofile-arcs
8697 leads to different results from -fbranch-probabilities. */
8698 if (is_gimple_call (t))
8700 fndecl = gimple_call_fndecl (t);
8701 call_flags = gimple_call_flags (t);
8704 if (is_gimple_call (t)
8706 && fndecl_built_in_p (fndecl)
8707 && (call_flags & ECF_NOTHROW)
8708 && !(call_flags & ECF_RETURNS_TWICE)
8709 /* fork() doesn't really return twice, but the effect of
8710 wrapping it in __gcov_fork() which calls __gcov_dump() and
8711 __gcov_reset() and clears the counters before forking has the same
8712 effect as returning twice. Force a fake edge. */
8713 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8716 if (is_gimple_call (t))
8722 if (call_flags & (ECF_PURE | ECF_CONST)
8723 && !(call_flags & ECF_LOOPING_CONST_OR_PURE))
8726 /* Function call may do longjmp, terminate program or do other things.
8727 Special case noreturn that have non-abnormal edges out as in this case
8728 the fact is sufficiently represented by lack of edges out of T. */
8729 if (!(call_flags & ECF_NORETURN))
8733 FOR_EACH_EDGE (e, ei, bb->succs)
8734 if ((e->flags & EDGE_FAKE) == 0)
8738 if (gasm *asm_stmt = dyn_cast <gasm *> (t))
8739 if (gimple_asm_volatile_p (asm_stmt) || gimple_asm_input_p (asm_stmt))
8746 /* Add fake edges to the function exit for any non constant and non
8747 noreturn calls (or noreturn calls with EH/abnormal edges),
8748 volatile inline assembly in the bitmap of blocks specified by BLOCKS
8749 or to the whole CFG if BLOCKS is zero. Return the number of blocks
8752 The goal is to expose cases in which entering a basic block does
8753 not imply that all subsequent instructions must be executed. */
8756 gimple_flow_call_edges_add (sbitmap blocks)
8759 int blocks_split = 0;
8760 int last_bb = last_basic_block_for_fn (cfun);
8761 bool check_last_block = false;
8763 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
8767 check_last_block = true;
8769 check_last_block = bitmap_bit_p (blocks,
8770 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
8772 /* In the last basic block, before epilogue generation, there will be
8773 a fallthru edge to EXIT. Special care is required if the last insn
8774 of the last basic block is a call because make_edge folds duplicate
8775 edges, which would result in the fallthru edge also being marked
8776 fake, which would result in the fallthru edge being removed by
8777 remove_fake_edges, which would result in an invalid CFG.
8779 Moreover, we can't elide the outgoing fake edge, since the block
8780 profiler needs to take this into account in order to solve the minimal
8781 spanning tree in the case that the call doesn't return.
8783 Handle this by adding a dummy instruction in a new last basic block. */
8784 if (check_last_block)
8786 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
8787 gimple_stmt_iterator gsi = gsi_last_nondebug_bb (bb);
8790 if (!gsi_end_p (gsi))
8793 if (t && stmt_can_terminate_bb_p (t))
8797 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8800 gsi_insert_on_edge (e, gimple_build_nop ());
8801 gsi_commit_edge_inserts ();
8806 /* Now add fake edges to the function exit for any non constant
8807 calls since there is no way that we can determine if they will
8809 for (i = 0; i < last_bb; i++)
8811 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
8812 gimple_stmt_iterator gsi;
8813 gimple *stmt, *last_stmt;
8818 if (blocks && !bitmap_bit_p (blocks, i))
8821 gsi = gsi_last_nondebug_bb (bb);
8822 if (!gsi_end_p (gsi))
8824 last_stmt = gsi_stmt (gsi);
8827 stmt = gsi_stmt (gsi);
8828 if (stmt_can_terminate_bb_p (stmt))
8832 /* The handling above of the final block before the
8833 epilogue should be enough to verify that there is
8834 no edge to the exit block in CFG already.
8835 Calling make_edge in such case would cause us to
8836 mark that edge as fake and remove it later. */
8837 if (flag_checking && stmt == last_stmt)
8839 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
8840 gcc_assert (e == NULL);
8843 /* Note that the following may create a new basic block
8844 and renumber the existing basic blocks. */
8845 if (stmt != last_stmt)
8847 e = split_block (bb, stmt);
8851 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
8852 e->probability = profile_probability::guessed_never ();
8856 while (!gsi_end_p (gsi));
8861 checking_verify_flow_info ();
8863 return blocks_split;
8866 /* Removes edge E and all the blocks dominated by it, and updates dominance
8867 information. The IL in E->src needs to be updated separately.
8868 If dominance info is not available, only the edge E is removed.*/
8871 remove_edge_and_dominated_blocks (edge e)
8873 vec<basic_block> bbs_to_fix_dom = vNULL;
8876 bool none_removed = false;
8878 basic_block bb, dbb;
8881 /* If we are removing a path inside a non-root loop that may change
8882 loop ownership of blocks or remove loops. Mark loops for fixup. */
8884 && loop_outer (e->src->loop_father) != NULL
8885 && e->src->loop_father == e->dest->loop_father)
8886 loops_state_set (LOOPS_NEED_FIXUP);
8888 if (!dom_info_available_p (CDI_DOMINATORS))
8894 /* No updating is needed for edges to exit. */
8895 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
8897 if (cfgcleanup_altered_bbs)
8898 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8903 /* First, we find the basic blocks to remove. If E->dest has a predecessor
8904 that is not dominated by E->dest, then this set is empty. Otherwise,
8905 all the basic blocks dominated by E->dest are removed.
8907 Also, to DF_IDOM we store the immediate dominators of the blocks in
8908 the dominance frontier of E (i.e., of the successors of the
8909 removed blocks, if there are any, and of E->dest otherwise). */
8910 FOR_EACH_EDGE (f, ei, e->dest->preds)
8915 if (!dominated_by_p (CDI_DOMINATORS, f->src, e->dest))
8917 none_removed = true;
8922 auto_bitmap df, df_idom;
8923 auto_vec<basic_block> bbs_to_remove;
8925 bitmap_set_bit (df_idom,
8926 get_immediate_dominator (CDI_DOMINATORS, e->dest)->index);
8929 bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
8930 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8932 FOR_EACH_EDGE (f, ei, bb->succs)
8934 if (f->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
8935 bitmap_set_bit (df, f->dest->index);
8938 FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
8939 bitmap_clear_bit (df, bb->index);
8941 EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
8943 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8944 bitmap_set_bit (df_idom,
8945 get_immediate_dominator (CDI_DOMINATORS, bb)->index);
8949 if (cfgcleanup_altered_bbs)
8951 /* Record the set of the altered basic blocks. */
8952 bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
8953 bitmap_ior_into (cfgcleanup_altered_bbs, df);
8956 /* Remove E and the cancelled blocks. */
8961 /* Walk backwards so as to get a chance to substitute all
8962 released DEFs into debug stmts. See
8963 eliminate_unnecessary_stmts() in tree-ssa-dce.cc for more
8965 for (i = bbs_to_remove.length (); i-- > 0; )
8966 delete_basic_block (bbs_to_remove[i]);
8969 /* Update the dominance information. The immediate dominator may change only
8970 for blocks whose immediate dominator belongs to DF_IDOM:
8972 Suppose that idom(X) = Y before removal of E and idom(X) != Y after the
8973 removal. Let Z the arbitrary block such that idom(Z) = Y and
8974 Z dominates X after the removal. Before removal, there exists a path P
8975 from Y to X that avoids Z. Let F be the last edge on P that is
8976 removed, and let W = F->dest. Before removal, idom(W) = Y (since Y
8977 dominates W, and because of P, Z does not dominate W), and W belongs to
8978 the dominance frontier of E. Therefore, Y belongs to DF_IDOM. */
8979 EXECUTE_IF_SET_IN_BITMAP (df_idom, 0, i, bi)
8981 bb = BASIC_BLOCK_FOR_FN (cfun, i);
8982 for (dbb = first_dom_son (CDI_DOMINATORS, bb);
8984 dbb = next_dom_son (CDI_DOMINATORS, dbb))
8985 bbs_to_fix_dom.safe_push (dbb);
8988 iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
8990 bbs_to_fix_dom.release ();
8993 /* Purge dead EH edges from basic block BB. */
8996 gimple_purge_dead_eh_edges (basic_block bb)
8998 bool changed = false;
9001 gimple *stmt = last_stmt (bb);
9003 if (stmt && stmt_can_throw_internal (cfun, stmt))
9006 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
9008 if (e->flags & EDGE_EH)
9010 remove_edge_and_dominated_blocks (e);
9020 /* Purge dead EH edges from basic block listed in BLOCKS. */
9023 gimple_purge_all_dead_eh_edges (const_bitmap blocks)
9025 bool changed = false;
9029 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
9031 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
9033 /* Earlier gimple_purge_dead_eh_edges could have removed
9034 this basic block already. */
9035 gcc_assert (bb || changed);
9037 changed |= gimple_purge_dead_eh_edges (bb);
9043 /* Purge dead abnormal call edges from basic block BB. */
9046 gimple_purge_dead_abnormal_call_edges (basic_block bb)
9048 bool changed = false;
9051 gimple *stmt = last_stmt (bb);
9053 if (stmt && stmt_can_make_abnormal_goto (stmt))
9056 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
9058 if (e->flags & EDGE_ABNORMAL)
9060 if (e->flags & EDGE_FALLTHRU)
9061 e->flags &= ~EDGE_ABNORMAL;
9063 remove_edge_and_dominated_blocks (e);
9073 /* Purge dead abnormal call edges from basic block listed in BLOCKS. */
9076 gimple_purge_all_dead_abnormal_call_edges (const_bitmap blocks)
9078 bool changed = false;
9082 EXECUTE_IF_SET_IN_BITMAP (blocks, 0, i, bi)
9084 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
9086 /* Earlier gimple_purge_dead_abnormal_call_edges could have removed
9087 this basic block already. */
9088 gcc_assert (bb || changed);
9090 changed |= gimple_purge_dead_abnormal_call_edges (bb);
9096 /* This function is called whenever a new edge is created or
9100 gimple_execute_on_growing_pred (edge e)
9102 basic_block bb = e->dest;
9104 if (!gimple_seq_empty_p (phi_nodes (bb)))
9105 reserve_phi_args_for_new_edge (bb);
9108 /* This function is called immediately before edge E is removed from
9109 the edge vector E->dest->preds. */
9112 gimple_execute_on_shrinking_pred (edge e)
9114 if (!gimple_seq_empty_p (phi_nodes (e->dest)))
9115 remove_phi_args (e);
9118 /*---------------------------------------------------------------------------
9119 Helper functions for Loop versioning
9120 ---------------------------------------------------------------------------*/
9122 /* Adjust phi nodes for 'first' basic block. 'second' basic block is a copy
9123 of 'first'. Both of them are dominated by 'new_head' basic block. When
9124 'new_head' was created by 'second's incoming edge it received phi arguments
9125 on the edge by split_edge(). Later, additional edge 'e' was created to
9126 connect 'new_head' and 'first'. Now this routine adds phi args on this
9127 additional edge 'e' that new_head to second edge received as part of edge
9131 gimple_lv_adjust_loop_header_phi (basic_block first, basic_block second,
9132 basic_block new_head, edge e)
9135 gphi_iterator psi1, psi2;
9137 edge e2 = find_edge (new_head, second);
9139 /* Because NEW_HEAD has been created by splitting SECOND's incoming
9140 edge, we should always have an edge from NEW_HEAD to SECOND. */
9141 gcc_assert (e2 != NULL);
9143 /* Browse all 'second' basic block phi nodes and add phi args to
9144 edge 'e' for 'first' head. PHI args are always in correct order. */
9146 for (psi2 = gsi_start_phis (second),
9147 psi1 = gsi_start_phis (first);
9148 !gsi_end_p (psi2) && !gsi_end_p (psi1);
9149 gsi_next (&psi2), gsi_next (&psi1))
9153 def = PHI_ARG_DEF (phi2, e2->dest_idx);
9154 add_phi_arg (phi1, def, e, gimple_phi_arg_location_from_edge (phi2, e2));
9159 /* Adds a if else statement to COND_BB with condition COND_EXPR.
9160 SECOND_HEAD is the destination of the THEN and FIRST_HEAD is
9161 the destination of the ELSE part. */
9164 gimple_lv_add_condition_to_bb (basic_block first_head ATTRIBUTE_UNUSED,
9165 basic_block second_head ATTRIBUTE_UNUSED,
9166 basic_block cond_bb, void *cond_e)
9168 gimple_stmt_iterator gsi;
9169 gimple *new_cond_expr;
9170 tree cond_expr = (tree) cond_e;
9173 /* Build new conditional expr */
9174 gsi = gsi_last_bb (cond_bb);
9176 cond_expr = force_gimple_operand_gsi_1 (&gsi, cond_expr,
9177 is_gimple_condexpr_for_cond,
9179 GSI_CONTINUE_LINKING);
9180 new_cond_expr = gimple_build_cond_from_tree (cond_expr,
9181 NULL_TREE, NULL_TREE);
9183 /* Add new cond in cond_bb. */
9184 gsi_insert_after (&gsi, new_cond_expr, GSI_NEW_STMT);
9186 /* Adjust edges appropriately to connect new head with first head
9187 as well as second head. */
9188 e0 = single_succ_edge (cond_bb);
9189 e0->flags &= ~EDGE_FALLTHRU;
9190 e0->flags |= EDGE_FALSE_VALUE;
9194 /* Do book-keeping of basic block BB for the profile consistency checker.
9195 Store the counting in RECORD. */
9197 gimple_account_profile_record (basic_block bb,
9198 struct profile_record *record)
9200 gimple_stmt_iterator i;
9201 for (i = gsi_start_nondebug_after_labels_bb (bb); !gsi_end_p (i);
9202 gsi_next_nondebug (&i))
9205 += estimate_num_insns (gsi_stmt (i), &eni_size_weights);
9208 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().initialized_p ()
9209 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.ipa ().nonzero_p ()
9210 && bb->count.ipa ().initialized_p ())
9212 += estimate_num_insns (gsi_stmt (i),
9214 * bb->count.ipa ().to_gcov_type ();
9216 else if (bb->count.initialized_p ()
9217 && ENTRY_BLOCK_PTR_FOR_FN (cfun)->count.initialized_p ())
9219 += estimate_num_insns
9222 * bb->count.to_sreal_scale
9223 (ENTRY_BLOCK_PTR_FOR_FN (cfun)->count).to_double ();
9226 += estimate_num_insns (gsi_stmt (i), &eni_time_weights);
9230 struct cfg_hooks gimple_cfg_hooks = {
9232 gimple_verify_flow_info,
9233 gimple_dump_bb, /* dump_bb */
9234 gimple_dump_bb_for_graph, /* dump_bb_for_graph */
9235 create_bb, /* create_basic_block */
9236 gimple_redirect_edge_and_branch, /* redirect_edge_and_branch */
9237 gimple_redirect_edge_and_branch_force, /* redirect_edge_and_branch_force */
9238 gimple_can_remove_branch_p, /* can_remove_branch_p */
9239 remove_bb, /* delete_basic_block */
9240 gimple_split_block, /* split_block */
9241 gimple_move_block_after, /* move_block_after */
9242 gimple_can_merge_blocks_p, /* can_merge_blocks_p */
9243 gimple_merge_blocks, /* merge_blocks */
9244 gimple_predict_edge, /* predict_edge */
9245 gimple_predicted_by_p, /* predicted_by_p */
9246 gimple_can_duplicate_bb_p, /* can_duplicate_block_p */
9247 gimple_duplicate_bb, /* duplicate_block */
9248 gimple_split_edge, /* split_edge */
9249 gimple_make_forwarder_block, /* make_forward_block */
9250 NULL, /* tidy_fallthru_edge */
9251 NULL, /* force_nonfallthru */
9252 gimple_block_ends_with_call_p,/* block_ends_with_call_p */
9253 gimple_block_ends_with_condjump_p, /* block_ends_with_condjump_p */
9254 gimple_flow_call_edges_add, /* flow_call_edges_add */
9255 gimple_execute_on_growing_pred, /* execute_on_growing_pred */
9256 gimple_execute_on_shrinking_pred, /* execute_on_shrinking_pred */
9257 gimple_duplicate_loop_body_to_header_edge, /* duplicate loop for trees */
9258 gimple_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
9259 gimple_lv_adjust_loop_header_phi, /* lv_adjust_loop_header_phi*/
9260 extract_true_false_edges_from_block, /* extract_cond_bb_edges */
9261 flush_pending_stmts, /* flush_pending_stmts */
9262 gimple_empty_block_p, /* block_empty_p */
9263 gimple_split_block_before_cond_jump, /* split_block_before_cond_jump */
9264 gimple_account_profile_record,
9268 /* Split all critical edges. Split some extra (not necessarily critical) edges
9269 if FOR_EDGE_INSERTION_P is true. */
9272 split_critical_edges (bool for_edge_insertion_p /* = false */)
9278 /* split_edge can redirect edges out of SWITCH_EXPRs, which can get
9279 expensive. So we want to enable recording of edge to CASE_LABEL_EXPR
9280 mappings around the calls to split_edge. */
9281 start_recording_case_labels ();
9282 FOR_ALL_BB_FN (bb, cfun)
9284 FOR_EACH_EDGE (e, ei, bb->succs)
9286 if (EDGE_CRITICAL_P (e) && !(e->flags & EDGE_ABNORMAL))
9288 /* PRE inserts statements to edges and expects that
9289 since split_critical_edges was done beforehand, committing edge
9290 insertions will not split more edges. In addition to critical
9291 edges we must split edges that have multiple successors and
9292 end by control flow statements, such as RESX.
9293 Go ahead and split them too. This matches the logic in
9294 gimple_find_edge_insert_loc. */
9295 else if (for_edge_insertion_p
9296 && (!single_pred_p (e->dest)
9297 || !gimple_seq_empty_p (phi_nodes (e->dest))
9298 || e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
9299 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
9300 && !(e->flags & EDGE_ABNORMAL))
9302 gimple_stmt_iterator gsi;
9304 gsi = gsi_last_bb (e->src);
9305 if (!gsi_end_p (gsi)
9306 && stmt_ends_bb_p (gsi_stmt (gsi))
9307 && (gimple_code (gsi_stmt (gsi)) != GIMPLE_RETURN
9308 && !gimple_call_builtin_p (gsi_stmt (gsi),
9314 end_recording_case_labels ();
9320 const pass_data pass_data_split_crit_edges =
9322 GIMPLE_PASS, /* type */
9323 "crited", /* name */
9324 OPTGROUP_NONE, /* optinfo_flags */
9325 TV_TREE_SPLIT_EDGES, /* tv_id */
9326 PROP_cfg, /* properties_required */
9327 PROP_no_crit_edges, /* properties_provided */
9328 0, /* properties_destroyed */
9329 0, /* todo_flags_start */
9330 0, /* todo_flags_finish */
9333 class pass_split_crit_edges : public gimple_opt_pass
9336 pass_split_crit_edges (gcc::context *ctxt)
9337 : gimple_opt_pass (pass_data_split_crit_edges, ctxt)
9340 /* opt_pass methods: */
9341 unsigned int execute (function *) final override
9343 return split_critical_edges ();
9346 opt_pass * clone () final override
9348 return new pass_split_crit_edges (m_ctxt);
9350 }; // class pass_split_crit_edges
9355 make_pass_split_crit_edges (gcc::context *ctxt)
9357 return new pass_split_crit_edges (ctxt);
9361 /* Insert COND expression which is GIMPLE_COND after STMT
9362 in basic block BB with appropriate basic block split
9363 and creation of a new conditionally executed basic block.
9364 Update profile so the new bb is visited with probability PROB.
9365 Return created basic block. */
9367 insert_cond_bb (basic_block bb, gimple *stmt, gimple *cond,
9368 profile_probability prob)
9370 edge fall = split_block (bb, stmt);
9371 gimple_stmt_iterator iter = gsi_last_bb (bb);
9374 /* Insert cond statement. */
9375 gcc_assert (gimple_code (cond) == GIMPLE_COND);
9376 if (gsi_end_p (iter))
9377 gsi_insert_before (&iter, cond, GSI_CONTINUE_LINKING);
9379 gsi_insert_after (&iter, cond, GSI_CONTINUE_LINKING);
9381 /* Create conditionally executed block. */
9382 new_bb = create_empty_bb (bb);
9383 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
9384 e->probability = prob;
9385 new_bb->count = e->count ();
9386 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
9388 /* Fix edge for split bb. */
9389 fall->flags = EDGE_FALSE_VALUE;
9390 fall->probability -= e->probability;
9392 /* Update dominance info. */
9393 if (dom_info_available_p (CDI_DOMINATORS))
9395 set_immediate_dominator (CDI_DOMINATORS, new_bb, bb);
9396 set_immediate_dominator (CDI_DOMINATORS, fall->dest, bb);
9399 /* Update loop info. */
9401 add_bb_to_loop (new_bb, bb->loop_father);
9408 /* Given a basic block B which ends with a conditional and has
9409 precisely two successors, determine which of the edges is taken if
9410 the conditional is true and which is taken if the conditional is
9411 false. Set TRUE_EDGE and FALSE_EDGE appropriately. */
9414 extract_true_false_edges_from_block (basic_block b,
9418 edge e = EDGE_SUCC (b, 0);
9420 if (e->flags & EDGE_TRUE_VALUE)
9423 *false_edge = EDGE_SUCC (b, 1);
9428 *true_edge = EDGE_SUCC (b, 1);
9433 /* From a controlling predicate in the immediate dominator DOM of
9434 PHIBLOCK determine the edges into PHIBLOCK that are chosen if the
9435 predicate evaluates to true and false and store them to
9436 *TRUE_CONTROLLED_EDGE and *FALSE_CONTROLLED_EDGE if
9437 they are non-NULL. Returns true if the edges can be determined,
9438 else return false. */
9441 extract_true_false_controlled_edges (basic_block dom, basic_block phiblock,
9442 edge *true_controlled_edge,
9443 edge *false_controlled_edge)
9445 basic_block bb = phiblock;
9446 edge true_edge, false_edge, tem;
9447 edge e0 = NULL, e1 = NULL;
9449 /* We have to verify that one edge into the PHI node is dominated
9450 by the true edge of the predicate block and the other edge
9451 dominated by the false edge. This ensures that the PHI argument
9452 we are going to take is completely determined by the path we
9453 take from the predicate block.
9454 We can only use BB dominance checks below if the destination of
9455 the true/false edges are dominated by their edge, thus only
9456 have a single predecessor. */
9457 extract_true_false_edges_from_block (dom, &true_edge, &false_edge);
9458 tem = EDGE_PRED (bb, 0);
9459 if (tem == true_edge
9460 || (single_pred_p (true_edge->dest)
9461 && (tem->src == true_edge->dest
9462 || dominated_by_p (CDI_DOMINATORS,
9463 tem->src, true_edge->dest))))
9465 else if (tem == false_edge
9466 || (single_pred_p (false_edge->dest)
9467 && (tem->src == false_edge->dest
9468 || dominated_by_p (CDI_DOMINATORS,
9469 tem->src, false_edge->dest))))
9473 tem = EDGE_PRED (bb, 1);
9474 if (tem == true_edge
9475 || (single_pred_p (true_edge->dest)
9476 && (tem->src == true_edge->dest
9477 || dominated_by_p (CDI_DOMINATORS,
9478 tem->src, true_edge->dest))))
9480 else if (tem == false_edge
9481 || (single_pred_p (false_edge->dest)
9482 && (tem->src == false_edge->dest
9483 || dominated_by_p (CDI_DOMINATORS,
9484 tem->src, false_edge->dest))))
9491 if (true_controlled_edge)
9492 *true_controlled_edge = e0;
9493 if (false_controlled_edge)
9494 *false_controlled_edge = e1;
9499 /* Generate a range test LHS CODE RHS that determines whether INDEX is in the
9500 range [low, high]. Place associated stmts before *GSI. */
9503 generate_range_test (basic_block bb, tree index, tree low, tree high,
9504 tree *lhs, tree *rhs)
9506 tree type = TREE_TYPE (index);
9507 tree utype = range_check_type (type);
9509 low = fold_convert (utype, low);
9510 high = fold_convert (utype, high);
9512 gimple_seq seq = NULL;
9513 index = gimple_convert (&seq, utype, index);
9514 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9515 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9517 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9518 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9521 /* Return the basic block that belongs to label numbered INDEX
9522 of a switch statement. */
9525 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9527 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9530 /* Return the default basic block of a switch statement. */
9533 gimple_switch_default_bb (function *ifun, gswitch *gs)
9535 return gimple_switch_label_bb (ifun, gs, 0);
9538 /* Return the edge that belongs to label numbered INDEX
9539 of a switch statement. */
9542 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9544 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9547 /* Return the default edge of a switch statement. */
9550 gimple_switch_default_edge (function *ifun, gswitch *gs)
9552 return gimple_switch_edge (ifun, gs, 0);
9555 /* Return true if the only executable statement in BB is a GIMPLE_COND. */
9558 cond_only_block_p (basic_block bb)
9560 /* BB must have no executable statements. */
9561 gimple_stmt_iterator gsi = gsi_after_labels (bb);
9564 while (!gsi_end_p (gsi))
9566 gimple *stmt = gsi_stmt (gsi);
9567 if (is_gimple_debug (stmt))
9569 else if (gimple_code (stmt) == GIMPLE_NOP
9570 || gimple_code (stmt) == GIMPLE_PREDICT
9571 || gimple_code (stmt) == GIMPLE_COND)
9581 /* Emit return warnings. */
9585 const pass_data pass_data_warn_function_return =
9587 GIMPLE_PASS, /* type */
9588 "*warn_function_return", /* name */
9589 OPTGROUP_NONE, /* optinfo_flags */
9590 TV_NONE, /* tv_id */
9591 PROP_cfg, /* properties_required */
9592 0, /* properties_provided */
9593 0, /* properties_destroyed */
9594 0, /* todo_flags_start */
9595 0, /* todo_flags_finish */
9598 class pass_warn_function_return : public gimple_opt_pass
9601 pass_warn_function_return (gcc::context *ctxt)
9602 : gimple_opt_pass (pass_data_warn_function_return, ctxt)
9605 /* opt_pass methods: */
9606 unsigned int execute (function *) final override;
9608 }; // class pass_warn_function_return
9611 pass_warn_function_return::execute (function *fun)
9613 location_t location;
9618 if (!targetm.warn_func_return (fun->decl))
9621 /* If we have a path to EXIT, then we do return. */
9622 if (TREE_THIS_VOLATILE (fun->decl)
9623 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0)
9625 location = UNKNOWN_LOCATION;
9626 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (fun)->preds);
9627 (e = ei_safe_edge (ei)); )
9629 last = last_stmt (e->src);
9630 if ((gimple_code (last) == GIMPLE_RETURN
9631 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9632 && location == UNKNOWN_LOCATION
9633 && ((location = LOCATION_LOCUS (gimple_location (last)))
9634 != UNKNOWN_LOCATION)
9637 /* When optimizing, replace return stmts in noreturn functions
9638 with __builtin_unreachable () call. */
9639 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9641 location_t loc = gimple_location (last);
9642 gimple *new_stmt = gimple_build_builtin_unreachable (loc);
9643 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9644 gsi_replace (&gsi, new_stmt, true);
9650 if (location == UNKNOWN_LOCATION)
9651 location = cfun->function_end_locus;
9652 warning_at (location, 0, "%<noreturn%> function does return");
9655 /* If we see "return;" in some basic block, then we do reach the end
9656 without returning a value. */
9657 else if (warn_return_type > 0
9658 && !warning_suppressed_p (fun->decl, OPT_Wreturn_type)
9659 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9661 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9663 gimple *last = last_stmt (e->src);
9664 greturn *return_stmt = dyn_cast <greturn *> (last);
9666 && gimple_return_retval (return_stmt) == NULL
9667 && !warning_suppressed_p (last, OPT_Wreturn_type))
9669 location = gimple_location (last);
9670 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9671 location = fun->function_end_locus;
9672 if (warning_at (location, OPT_Wreturn_type,
9673 "control reaches end of non-void function"))
9674 suppress_warning (fun->decl, OPT_Wreturn_type);
9678 /* The C++ FE turns fallthrough from the end of non-void function
9679 into __builtin_unreachable () call with BUILTINS_LOCATION.
9680 Recognize those as well as calls from ubsan_instrument_return. */
9682 if (!warning_suppressed_p (fun->decl, OPT_Wreturn_type))
9683 FOR_EACH_BB_FN (bb, fun)
9684 if (EDGE_COUNT (bb->succs) == 0)
9686 gimple *last = last_stmt (bb);
9687 const enum built_in_function ubsan_missing_ret
9688 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9690 && ((LOCATION_LOCUS (gimple_location (last))
9691 == BUILTINS_LOCATION
9692 && (gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE)
9693 || gimple_call_builtin_p (last, BUILT_IN_TRAP)))
9694 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9696 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9697 gsi_prev_nondebug (&gsi);
9698 gimple *prev = gsi_stmt (gsi);
9700 location = UNKNOWN_LOCATION;
9702 location = gimple_location (prev);
9703 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9704 location = fun->function_end_locus;
9705 if (warning_at (location, OPT_Wreturn_type,
9706 "control reaches end of non-void function"))
9707 suppress_warning (fun->decl, OPT_Wreturn_type);
9718 make_pass_warn_function_return (gcc::context *ctxt)
9720 return new pass_warn_function_return (ctxt);
9723 /* Walk a gimplified function and warn for functions whose return value is
9724 ignored and attribute((warn_unused_result)) is set. This is done before
9725 inlining, so we don't have to worry about that. */
9728 do_warn_unused_result (gimple_seq seq)
9731 gimple_stmt_iterator i;
9733 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
9735 gimple *g = gsi_stmt (i);
9737 switch (gimple_code (g))
9740 do_warn_unused_result (gimple_bind_body (as_a <gbind *>(g)));
9743 do_warn_unused_result (gimple_try_eval (g));
9744 do_warn_unused_result (gimple_try_cleanup (g));
9747 do_warn_unused_result (gimple_catch_handler (
9748 as_a <gcatch *> (g)));
9750 case GIMPLE_EH_FILTER:
9751 do_warn_unused_result (gimple_eh_filter_failure (g));
9755 if (gimple_call_lhs (g))
9757 if (gimple_call_internal_p (g))
9760 /* This is a naked call, as opposed to a GIMPLE_CALL with an
9761 LHS. All calls whose value is ignored should be
9762 represented like this. Look for the attribute. */
9763 fdecl = gimple_call_fndecl (g);
9764 ftype = gimple_call_fntype (g);
9766 if (lookup_attribute ("warn_unused_result", TYPE_ATTRIBUTES (ftype)))
9768 location_t loc = gimple_location (g);
9771 warning_at (loc, OPT_Wunused_result,
9772 "ignoring return value of %qD "
9773 "declared with attribute %<warn_unused_result%>",
9776 warning_at (loc, OPT_Wunused_result,
9777 "ignoring return value of function "
9778 "declared with attribute %<warn_unused_result%>");
9783 /* Not a container, not a call, or a call whose value is used. */
9791 const pass_data pass_data_warn_unused_result =
9793 GIMPLE_PASS, /* type */
9794 "*warn_unused_result", /* name */
9795 OPTGROUP_NONE, /* optinfo_flags */
9796 TV_NONE, /* tv_id */
9797 PROP_gimple_any, /* properties_required */
9798 0, /* properties_provided */
9799 0, /* properties_destroyed */
9800 0, /* todo_flags_start */
9801 0, /* todo_flags_finish */
9804 class pass_warn_unused_result : public gimple_opt_pass
9807 pass_warn_unused_result (gcc::context *ctxt)
9808 : gimple_opt_pass (pass_data_warn_unused_result, ctxt)
9811 /* opt_pass methods: */
9812 bool gate (function *) final override { return flag_warn_unused_result; }
9813 unsigned int execute (function *) final override
9815 do_warn_unused_result (gimple_body (current_function_decl));
9819 }; // class pass_warn_unused_result
9824 make_pass_warn_unused_result (gcc::context *ctxt)
9826 return new pass_warn_unused_result (ctxt);
9829 /* Maybe Remove stores to variables we marked write-only.
9830 Return true if a store was removed. */
9832 maybe_remove_writeonly_store (gimple_stmt_iterator &gsi, gimple *stmt,
9833 bitmap dce_ssa_names)
9835 /* Keep access when store has side effect, i.e. in case when source
9837 if (!gimple_store_p (stmt)
9838 || gimple_has_side_effects (stmt)
9842 tree lhs = get_base_address (gimple_get_lhs (stmt));
9845 || (!TREE_STATIC (lhs) && !DECL_EXTERNAL (lhs))
9846 || !varpool_node::get (lhs)->writeonly)
9849 if (dump_file && (dump_flags & TDF_DETAILS))
9851 fprintf (dump_file, "Removing statement, writes"
9852 " to write only var:\n");
9853 print_gimple_stmt (dump_file, stmt, 0,
9854 TDF_VOPS|TDF_MEMSYMS);
9857 /* Mark ssa name defining to be checked for simple dce. */
9858 if (gimple_assign_single_p (stmt))
9860 tree rhs = gimple_assign_rhs1 (stmt);
9861 if (TREE_CODE (rhs) == SSA_NAME
9862 && !SSA_NAME_IS_DEFAULT_DEF (rhs))
9863 bitmap_set_bit (dce_ssa_names, SSA_NAME_VERSION (rhs));
9865 unlink_stmt_vdef (stmt);
9866 gsi_remove (&gsi, true);
9867 release_defs (stmt);
9871 /* IPA passes, compilation of earlier functions or inlining
9872 might have changed some properties, such as marked functions nothrow,
9873 pure, const or noreturn.
9874 Remove redundant edges and basic blocks, and create new ones if necessary. */
9877 execute_fixup_cfg (void)
9880 gimple_stmt_iterator gsi;
9882 cgraph_node *node = cgraph_node::get (current_function_decl);
9883 /* Same scaling is also done by ipa_merge_profiles. */
9884 profile_count num = node->count;
9885 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9886 bool scale = num.initialized_p () && !(num == den);
9887 auto_bitmap dce_ssa_names;
9891 profile_count::adjust_for_ipa_scaling (&num, &den);
9892 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9893 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9894 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9897 FOR_EACH_BB_FN (bb, cfun)
9900 bb->count = bb->count.apply_scale (num, den);
9901 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
9903 gimple *stmt = gsi_stmt (gsi);
9904 tree decl = is_gimple_call (stmt)
9905 ? gimple_call_fndecl (stmt)
9909 int flags = gimple_call_flags (stmt);
9910 if (flags & (ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE))
9912 if (gimple_in_ssa_p (cfun))
9914 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9918 if (flags & ECF_NORETURN
9919 && fixup_noreturn_call (stmt))
9920 todo |= TODO_cleanup_cfg;
9923 /* Remove stores to variables we marked write-only. */
9924 if (maybe_remove_writeonly_store (gsi, stmt, dce_ssa_names))
9926 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9930 /* For calls we can simply remove LHS when it is known
9931 to be write-only. */
9932 if (is_gimple_call (stmt)
9933 && gimple_get_lhs (stmt))
9935 tree lhs = get_base_address (gimple_get_lhs (stmt));
9938 && (TREE_STATIC (lhs) || DECL_EXTERNAL (lhs))
9939 && varpool_node::get (lhs)->writeonly)
9941 gimple_call_set_lhs (stmt, NULL);
9943 todo |= TODO_update_ssa | TODO_cleanup_cfg;
9949 if (gimple *last = last_stmt (bb))
9951 if (maybe_clean_eh_stmt (last)
9952 && gimple_purge_dead_eh_edges (bb))
9953 todo |= TODO_cleanup_cfg;
9954 if (gimple_purge_dead_abnormal_call_edges (bb))
9955 todo |= TODO_cleanup_cfg;
9958 /* If we have a basic block with no successors that does not
9959 end with a control statement or a noreturn call end it with
9960 a call to __builtin_unreachable. This situation can occur
9961 when inlining a noreturn call that does in fact return. */
9962 if (EDGE_COUNT (bb->succs) == 0)
9964 gimple *stmt = last_stmt (bb);
9966 || (!is_ctrl_stmt (stmt)
9967 && (!is_gimple_call (stmt)
9968 || !gimple_call_noreturn_p (stmt))))
9970 if (stmt && is_gimple_call (stmt))
9971 gimple_call_set_ctrl_altering (stmt, false);
9972 stmt = gimple_build_builtin_unreachable (UNKNOWN_LOCATION);
9973 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9974 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9975 if (!cfun->after_inlining)
9976 if (tree fndecl = gimple_call_fndecl (stmt))
9978 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9979 node->create_edge (cgraph_node::get_create (fndecl),
9980 call_stmt, bb->count);
9987 update_max_bb_count ();
9988 compute_function_frequency ();
9992 && (todo & TODO_cleanup_cfg))
9993 loops_state_set (LOOPS_NEED_FIXUP);
9995 simple_dce_from_worklist (dce_ssa_names);
10002 const pass_data pass_data_fixup_cfg =
10004 GIMPLE_PASS, /* type */
10005 "fixup_cfg", /* name */
10006 OPTGROUP_NONE, /* optinfo_flags */
10007 TV_NONE, /* tv_id */
10008 PROP_cfg, /* properties_required */
10009 0, /* properties_provided */
10010 0, /* properties_destroyed */
10011 0, /* todo_flags_start */
10012 0, /* todo_flags_finish */
10015 class pass_fixup_cfg : public gimple_opt_pass
10018 pass_fixup_cfg (gcc::context *ctxt)
10019 : gimple_opt_pass (pass_data_fixup_cfg, ctxt)
10022 /* opt_pass methods: */
10023 opt_pass * clone () final override { return new pass_fixup_cfg (m_ctxt); }
10024 unsigned int execute (function *) final override
10026 return execute_fixup_cfg ();
10029 }; // class pass_fixup_cfg
10031 } // anon namespace
10034 make_pass_fixup_cfg (gcc::context *ctxt)
10036 return new pass_fixup_cfg (ctxt);
10039 /* Garbage collection support for edge_def. */
10041 extern void gt_ggc_mx (tree&);
10042 extern void gt_ggc_mx (gimple *&);
10043 extern void gt_ggc_mx (rtx&);
10044 extern void gt_ggc_mx (basic_block&);
10047 gt_ggc_mx (rtx_insn *& x)
10050 gt_ggc_mx_rtx_def ((void *) x);
10054 gt_ggc_mx (edge_def *e)
10056 tree block = LOCATION_BLOCK (e->goto_locus);
10057 gt_ggc_mx (e->src);
10058 gt_ggc_mx (e->dest);
10059 if (current_ir_type () == IR_GIMPLE)
10060 gt_ggc_mx (e->insns.g);
10062 gt_ggc_mx (e->insns.r);
10066 /* PCH support for edge_def. */
10068 extern void gt_pch_nx (tree&);
10069 extern void gt_pch_nx (gimple *&);
10070 extern void gt_pch_nx (rtx&);
10071 extern void gt_pch_nx (basic_block&);
10074 gt_pch_nx (rtx_insn *& x)
10077 gt_pch_nx_rtx_def ((void *) x);
10081 gt_pch_nx (edge_def *e)
10083 tree block = LOCATION_BLOCK (e->goto_locus);
10084 gt_pch_nx (e->src);
10085 gt_pch_nx (e->dest);
10086 if (current_ir_type () == IR_GIMPLE)
10087 gt_pch_nx (e->insns.g);
10089 gt_pch_nx (e->insns.r);
10094 gt_pch_nx (edge_def *e, gt_pointer_operator op, void *cookie)
10096 tree block = LOCATION_BLOCK (e->goto_locus);
10097 op (&(e->src), NULL, cookie);
10098 op (&(e->dest), NULL, cookie);
10099 if (current_ir_type () == IR_GIMPLE)
10100 op (&(e->insns.g), NULL, cookie);
10102 op (&(e->insns.r), NULL, cookie);
10103 op (&(block), &(block), cookie);
10108 namespace selftest {
10110 /* Helper function for CFG selftests: create a dummy function decl
10111 and push it as cfun. */
10114 push_fndecl (const char *name)
10116 tree fn_type = build_function_type_array (integer_type_node, 0, NULL);
10117 /* FIXME: this uses input_location: */
10118 tree fndecl = build_fn_decl (name, fn_type);
10119 tree retval = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
10120 NULL_TREE, integer_type_node);
10121 DECL_RESULT (fndecl) = retval;
10122 push_struct_function (fndecl);
10123 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10124 ASSERT_TRUE (fun != NULL);
10125 init_empty_tree_cfg_for_function (fun);
10126 ASSERT_EQ (2, n_basic_blocks_for_fn (fun));
10127 ASSERT_EQ (0, n_edges_for_fn (fun));
10131 /* These tests directly create CFGs.
10132 Compare with the static fns within tree-cfg.cc:
10134 - make_blocks: calls create_basic_block (seq, bb);
10137 /* Verify a simple cfg of the form:
10138 ENTRY -> A -> B -> C -> EXIT. */
10141 test_linear_chain ()
10143 gimple_register_cfg_hooks ();
10145 tree fndecl = push_fndecl ("cfg_test_linear_chain");
10146 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10148 /* Create some empty blocks. */
10149 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10150 basic_block bb_b = create_empty_bb (bb_a);
10151 basic_block bb_c = create_empty_bb (bb_b);
10153 ASSERT_EQ (5, n_basic_blocks_for_fn (fun));
10154 ASSERT_EQ (0, n_edges_for_fn (fun));
10156 /* Create some edges: a simple linear chain of BBs. */
10157 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10158 make_edge (bb_a, bb_b, 0);
10159 make_edge (bb_b, bb_c, 0);
10160 make_edge (bb_c, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10162 /* Verify the edges. */
10163 ASSERT_EQ (4, n_edges_for_fn (fun));
10164 ASSERT_EQ (NULL, ENTRY_BLOCK_PTR_FOR_FN (fun)->preds);
10165 ASSERT_EQ (1, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs->length ());
10166 ASSERT_EQ (1, bb_a->preds->length ());
10167 ASSERT_EQ (1, bb_a->succs->length ());
10168 ASSERT_EQ (1, bb_b->preds->length ());
10169 ASSERT_EQ (1, bb_b->succs->length ());
10170 ASSERT_EQ (1, bb_c->preds->length ());
10171 ASSERT_EQ (1, bb_c->succs->length ());
10172 ASSERT_EQ (1, EXIT_BLOCK_PTR_FOR_FN (fun)->preds->length ());
10173 ASSERT_EQ (NULL, EXIT_BLOCK_PTR_FOR_FN (fun)->succs);
10175 /* Verify the dominance information
10176 Each BB in our simple chain should be dominated by the one before
10178 calculate_dominance_info (CDI_DOMINATORS);
10179 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10180 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10181 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10182 ASSERT_EQ (1, dom_by_b.length ());
10183 ASSERT_EQ (bb_c, dom_by_b[0]);
10184 free_dominance_info (CDI_DOMINATORS);
10186 /* Similarly for post-dominance: each BB in our chain is post-dominated
10187 by the one after it. */
10188 calculate_dominance_info (CDI_POST_DOMINATORS);
10189 ASSERT_EQ (bb_b, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10190 ASSERT_EQ (bb_c, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10191 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10192 ASSERT_EQ (1, postdom_by_b.length ());
10193 ASSERT_EQ (bb_a, postdom_by_b[0]);
10194 free_dominance_info (CDI_POST_DOMINATORS);
10199 /* Verify a simple CFG of the form:
10215 gimple_register_cfg_hooks ();
10217 tree fndecl = push_fndecl ("cfg_test_diamond");
10218 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10220 /* Create some empty blocks. */
10221 basic_block bb_a = create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
10222 basic_block bb_b = create_empty_bb (bb_a);
10223 basic_block bb_c = create_empty_bb (bb_a);
10224 basic_block bb_d = create_empty_bb (bb_b);
10226 ASSERT_EQ (6, n_basic_blocks_for_fn (fun));
10227 ASSERT_EQ (0, n_edges_for_fn (fun));
10229 /* Create the edges. */
10230 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), bb_a, EDGE_FALLTHRU);
10231 make_edge (bb_a, bb_b, EDGE_TRUE_VALUE);
10232 make_edge (bb_a, bb_c, EDGE_FALSE_VALUE);
10233 make_edge (bb_b, bb_d, 0);
10234 make_edge (bb_c, bb_d, 0);
10235 make_edge (bb_d, EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10237 /* Verify the edges. */
10238 ASSERT_EQ (6, n_edges_for_fn (fun));
10239 ASSERT_EQ (1, bb_a->preds->length ());
10240 ASSERT_EQ (2, bb_a->succs->length ());
10241 ASSERT_EQ (1, bb_b->preds->length ());
10242 ASSERT_EQ (1, bb_b->succs->length ());
10243 ASSERT_EQ (1, bb_c->preds->length ());
10244 ASSERT_EQ (1, bb_c->succs->length ());
10245 ASSERT_EQ (2, bb_d->preds->length ());
10246 ASSERT_EQ (1, bb_d->succs->length ());
10248 /* Verify the dominance information. */
10249 calculate_dominance_info (CDI_DOMINATORS);
10250 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_b));
10251 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_c));
10252 ASSERT_EQ (bb_a, get_immediate_dominator (CDI_DOMINATORS, bb_d));
10253 auto_vec<basic_block> dom_by_a = get_dominated_by (CDI_DOMINATORS, bb_a);
10254 ASSERT_EQ (3, dom_by_a.length ()); /* B, C, D, in some order. */
10255 dom_by_a.release ();
10256 auto_vec<basic_block> dom_by_b = get_dominated_by (CDI_DOMINATORS, bb_b);
10257 ASSERT_EQ (0, dom_by_b.length ());
10258 dom_by_b.release ();
10259 free_dominance_info (CDI_DOMINATORS);
10261 /* Similarly for post-dominance. */
10262 calculate_dominance_info (CDI_POST_DOMINATORS);
10263 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_a));
10264 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_b));
10265 ASSERT_EQ (bb_d, get_immediate_dominator (CDI_POST_DOMINATORS, bb_c));
10266 auto_vec<basic_block> postdom_by_d = get_dominated_by (CDI_POST_DOMINATORS, bb_d);
10267 ASSERT_EQ (3, postdom_by_d.length ()); /* A, B, C in some order. */
10268 postdom_by_d.release ();
10269 auto_vec<basic_block> postdom_by_b = get_dominated_by (CDI_POST_DOMINATORS, bb_b);
10270 ASSERT_EQ (0, postdom_by_b.length ());
10271 postdom_by_b.release ();
10272 free_dominance_info (CDI_POST_DOMINATORS);
10277 /* Verify that we can handle a CFG containing a "complete" aka
10278 fully-connected subgraph (where A B C D below all have edges
10279 pointing to each other node, also to themselves).
10297 test_fully_connected ()
10299 gimple_register_cfg_hooks ();
10301 tree fndecl = push_fndecl ("cfg_fully_connected");
10302 function *fun = DECL_STRUCT_FUNCTION (fndecl);
10306 /* Create some empty blocks. */
10307 auto_vec <basic_block> subgraph_nodes;
10308 for (int i = 0; i < n; i++)
10309 subgraph_nodes.safe_push (create_empty_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)));
10311 ASSERT_EQ (n + 2, n_basic_blocks_for_fn (fun));
10312 ASSERT_EQ (0, n_edges_for_fn (fun));
10314 /* Create the edges. */
10315 make_edge (ENTRY_BLOCK_PTR_FOR_FN (fun), subgraph_nodes[0], EDGE_FALLTHRU);
10316 make_edge (subgraph_nodes[0], EXIT_BLOCK_PTR_FOR_FN (fun), 0);
10317 for (int i = 0; i < n; i++)
10318 for (int j = 0; j < n; j++)
10319 make_edge (subgraph_nodes[i], subgraph_nodes[j], 0);
10321 /* Verify the edges. */
10322 ASSERT_EQ (2 + (n * n), n_edges_for_fn (fun));
10323 /* The first one is linked to ENTRY/EXIT as well as itself and
10324 everything else. */
10325 ASSERT_EQ (n + 1, subgraph_nodes[0]->preds->length ());
10326 ASSERT_EQ (n + 1, subgraph_nodes[0]->succs->length ());
10327 /* The other ones in the subgraph are linked to everything in
10328 the subgraph (including themselves). */
10329 for (int i = 1; i < n; i++)
10331 ASSERT_EQ (n, subgraph_nodes[i]->preds->length ());
10332 ASSERT_EQ (n, subgraph_nodes[i]->succs->length ());
10335 /* Verify the dominance information. */
10336 calculate_dominance_info (CDI_DOMINATORS);
10337 /* The initial block in the subgraph should be dominated by ENTRY. */
10338 ASSERT_EQ (ENTRY_BLOCK_PTR_FOR_FN (fun),
10339 get_immediate_dominator (CDI_DOMINATORS,
10340 subgraph_nodes[0]));
10341 /* Every other block in the subgraph should be dominated by the
10343 for (int i = 1; i < n; i++)
10344 ASSERT_EQ (subgraph_nodes[0],
10345 get_immediate_dominator (CDI_DOMINATORS,
10346 subgraph_nodes[i]));
10347 free_dominance_info (CDI_DOMINATORS);
10349 /* Similarly for post-dominance. */
10350 calculate_dominance_info (CDI_POST_DOMINATORS);
10351 /* The initial block in the subgraph should be postdominated by EXIT. */
10352 ASSERT_EQ (EXIT_BLOCK_PTR_FOR_FN (fun),
10353 get_immediate_dominator (CDI_POST_DOMINATORS,
10354 subgraph_nodes[0]));
10355 /* Every other block in the subgraph should be postdominated by the
10356 initial block, since that leads to EXIT. */
10357 for (int i = 1; i < n; i++)
10358 ASSERT_EQ (subgraph_nodes[0],
10359 get_immediate_dominator (CDI_POST_DOMINATORS,
10360 subgraph_nodes[i]));
10361 free_dominance_info (CDI_POST_DOMINATORS);
10366 /* Run all of the selftests within this file. */
10369 tree_cfg_cc_tests ()
10371 test_linear_chain ();
10373 test_fully_connected ();
10376 } // namespace selftest
10378 /* TODO: test the dominator/postdominator logic with various graphs/nodes:
10381 - switch statement (a block with many out-edges)
10382 - something that jumps to itself
10385 #endif /* CHECKING_P */