1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "pointer-set.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-inline.h"
33 #include "tree-iterator.h"
34 #include "tree-pass.h"
36 #include "langhooks.h"
38 #include "diagnostic-core.h"
43 /* In some instances a tree and a gimple need to be stored in a same table,
44 i.e. in hash tables. This is a structure to do this. */
45 typedef union {tree *tp; tree t; gimple g;} treemple;
47 /* Nonzero if we are using EH to handle cleanups. */
48 static int using_eh_for_cleanups_p = 0;
51 using_eh_for_cleanups (void)
53 using_eh_for_cleanups_p = 1;
56 /* Misc functions used in this file. */
58 /* Remember and lookup EH landing pad data for arbitrary statements.
59 Really this means any statement that could_throw_p. We could
60 stuff this information into the stmt_ann data structure, but:
62 (1) We absolutely rely on this information being kept until
63 we get to rtl. Once we're done with lowering here, if we lose
64 the information there's no way to recover it!
66 (2) There are many more statements that *cannot* throw as
67 compared to those that can. We should be saving some amount
68 of space by only allocating memory for those that can throw. */
70 /* Add statement T in function IFUN to landing pad NUM. */
73 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
75 struct throw_stmt_node *n;
78 gcc_assert (num != 0);
80 n = ggc_alloc_throw_stmt_node ();
84 if (!get_eh_throw_stmt_table (ifun))
85 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
89 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
94 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
97 add_stmt_to_eh_lp (gimple t, int num)
99 add_stmt_to_eh_lp_fn (cfun, t, num);
102 /* Add statement T to the single EH landing pad in REGION. */
105 record_stmt_eh_region (eh_region region, gimple t)
109 if (region->type == ERT_MUST_NOT_THROW)
110 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
113 eh_landing_pad lp = region->landing_pads;
115 lp = gen_eh_landing_pad (region);
117 gcc_assert (lp->next_lp == NULL);
118 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
123 /* Remove statement T in function IFUN from its EH landing pad. */
126 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
128 struct throw_stmt_node dummy;
131 if (!get_eh_throw_stmt_table (ifun))
135 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
139 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
147 /* Remove statement T in the current function (cfun) from its
151 remove_stmt_from_eh_lp (gimple t)
153 return remove_stmt_from_eh_lp_fn (cfun, t);
156 /* Determine if statement T is inside an EH region in function IFUN.
157 Positive numbers indicate a landing pad index; negative numbers
158 indicate a MUST_NOT_THROW region index; zero indicates that the
159 statement is not recorded in the region table. */
162 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
164 struct throw_stmt_node *p, n;
166 if (ifun->eh->throw_stmt_table == NULL)
170 p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
171 return p ? p->lp_nr : 0;
174 /* Likewise, but always use the current function. */
177 lookup_stmt_eh_lp (gimple t)
179 /* We can get called from initialized data when -fnon-call-exceptions
180 is on; prevent crash. */
183 return lookup_stmt_eh_lp_fn (cfun, t);
186 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
187 nodes and LABEL_DECL nodes. We will use this during the second phase to
188 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
190 struct finally_tree_node
192 /* When storing a GIMPLE_TRY, we have to record a gimple. However
193 when deciding whether a GOTO to a certain LABEL_DECL (which is a
194 tree) leaves the TRY block, its necessary to record a tree in
195 this field. Thus a treemple is used. */
200 /* Note that this table is *not* marked GTY. It is short-lived. */
201 static htab_t finally_tree;
204 record_in_finally_tree (treemple child, gimple parent)
206 struct finally_tree_node *n;
209 n = XNEW (struct finally_tree_node);
213 slot = htab_find_slot (finally_tree, n, INSERT);
219 collect_finally_tree (gimple stmt, gimple region);
221 /* Go through the gimple sequence. Works with collect_finally_tree to
222 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
225 collect_finally_tree_1 (gimple_seq seq, gimple region)
227 gimple_stmt_iterator gsi;
229 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
230 collect_finally_tree (gsi_stmt (gsi), region);
234 collect_finally_tree (gimple stmt, gimple region)
238 switch (gimple_code (stmt))
241 temp.t = gimple_label_label (stmt);
242 record_in_finally_tree (temp, region);
246 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
249 record_in_finally_tree (temp, region);
250 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
251 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
253 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
255 collect_finally_tree_1 (gimple_try_eval (stmt), region);
256 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
261 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
264 case GIMPLE_EH_FILTER:
265 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
269 collect_finally_tree_1 (gimple_eh_else_n_body (stmt), region);
270 collect_finally_tree_1 (gimple_eh_else_e_body (stmt), region);
274 /* A type, a decl, or some kind of statement that we're not
275 interested in. Don't walk them. */
281 /* Use the finally tree to determine if a jump from START to TARGET
282 would leave the try_finally node that START lives in. */
285 outside_finally_tree (treemple start, gimple target)
287 struct finally_tree_node n, *p;
292 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
297 while (start.g != target);
302 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
303 nodes into a set of gotos, magic labels, and eh regions.
304 The eh region creation is straight-forward, but frobbing all the gotos
305 and such into shape isn't. */
307 /* The sequence into which we record all EH stuff. This will be
308 placed at the end of the function when we're all done. */
309 static gimple_seq eh_seq;
311 /* Record whether an EH region contains something that can throw,
312 indexed by EH region number. */
313 static bitmap eh_region_may_contain_throw_map;
315 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
316 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
317 The idea is to record a gimple statement for everything except for
318 the conditionals, which get their labels recorded. Since labels are
319 of type 'tree', we need this node to store both gimple and tree
320 objects. REPL_STMT is the sequence used to replace the goto/return
321 statement. CONT_STMT is used to store the statement that allows
322 the return/goto to jump to the original destination. */
324 struct goto_queue_node
327 gimple_seq repl_stmt;
330 /* This is used when index >= 0 to indicate that stmt is a label (as
331 opposed to a goto stmt). */
335 /* State of the world while lowering. */
339 /* What's "current" while constructing the eh region tree. These
340 correspond to variables of the same name in cfun->eh, which we
341 don't have easy access to. */
342 eh_region cur_region;
344 /* What's "current" for the purposes of __builtin_eh_pointer. For
345 a CATCH, this is the associated TRY. For an EH_FILTER, this is
346 the associated ALLOWED_EXCEPTIONS, etc. */
347 eh_region ehp_region;
349 /* Processing of TRY_FINALLY requires a bit more state. This is
350 split out into a separate structure so that we don't have to
351 copy so much when processing other nodes. */
352 struct leh_tf_state *tf;
357 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
358 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
359 this so that outside_finally_tree can reliably reference the tree used
360 in the collect_finally_tree data structures. */
361 gimple try_finally_expr;
364 /* While lowering a top_p usually it is expanded into multiple statements,
365 thus we need the following field to store them. */
366 gimple_seq top_p_seq;
368 /* The state outside this try_finally node. */
369 struct leh_state *outer;
371 /* The exception region created for it. */
374 /* The goto queue. */
375 struct goto_queue_node *goto_queue;
376 size_t goto_queue_size;
377 size_t goto_queue_active;
379 /* Pointer map to help in searching goto_queue when it is large. */
380 struct pointer_map_t *goto_queue_map;
382 /* The set of unique labels seen as entries in the goto queue. */
383 VEC(tree,heap) *dest_array;
385 /* A label to be added at the end of the completed transformed
386 sequence. It will be set if may_fallthru was true *at one time*,
387 though subsequent transformations may have cleared that flag. */
390 /* True if it is possible to fall out the bottom of the try block.
391 Cleared if the fallthru is converted to a goto. */
394 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
397 /* True if the finally block can receive an exception edge.
398 Cleared if the exception case is handled by code duplication. */
402 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
404 /* Search for STMT in the goto queue. Return the replacement,
405 or null if the statement isn't in the queue. */
407 #define LARGE_GOTO_QUEUE 20
409 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
412 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
417 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
419 for (i = 0; i < tf->goto_queue_active; i++)
420 if ( tf->goto_queue[i].stmt.g == stmt.g)
421 return tf->goto_queue[i].repl_stmt;
425 /* If we have a large number of entries in the goto_queue, create a
426 pointer map and use that for searching. */
428 if (!tf->goto_queue_map)
430 tf->goto_queue_map = pointer_map_create ();
431 for (i = 0; i < tf->goto_queue_active; i++)
433 slot = pointer_map_insert (tf->goto_queue_map,
434 tf->goto_queue[i].stmt.g);
435 gcc_assert (*slot == NULL);
436 *slot = &tf->goto_queue[i];
440 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
442 return (((struct goto_queue_node *) *slot)->repl_stmt);
447 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
448 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
449 then we can just splat it in, otherwise we add the new stmts immediately
450 after the GIMPLE_COND and redirect. */
453 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
454 gimple_stmt_iterator *gsi)
459 location_t loc = gimple_location (gsi_stmt (*gsi));
462 new_seq = find_goto_replacement (tf, temp);
466 if (gimple_seq_singleton_p (new_seq)
467 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
469 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
473 label = create_artificial_label (loc);
474 /* Set the new label for the GIMPLE_COND */
477 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
478 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
481 /* The real work of replace_goto_queue. Returns with TSI updated to
482 point to the next statement. */
484 static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
487 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
488 gimple_stmt_iterator *gsi)
494 switch (gimple_code (stmt))
499 seq = find_goto_replacement (tf, temp);
502 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
503 gsi_remove (gsi, false);
509 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
510 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
514 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
515 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
518 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (stmt), tf);
520 case GIMPLE_EH_FILTER:
521 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
524 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (stmt), tf);
525 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (stmt), tf);
529 /* These won't have gotos in them. */
536 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
539 replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
541 gimple_stmt_iterator gsi = gsi_start (*seq);
543 while (!gsi_end_p (gsi))
544 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
547 /* Replace all goto queue members. */
550 replace_goto_queue (struct leh_tf_state *tf)
552 if (tf->goto_queue_active == 0)
554 replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
555 replace_goto_queue_stmt_list (&eh_seq, tf);
558 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
559 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
563 record_in_goto_queue (struct leh_tf_state *tf,
569 struct goto_queue_node *q;
571 gcc_assert (!tf->goto_queue_map);
573 active = tf->goto_queue_active;
574 size = tf->goto_queue_size;
577 size = (size ? size * 2 : 32);
578 tf->goto_queue_size = size;
580 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
583 q = &tf->goto_queue[active];
584 tf->goto_queue_active = active + 1;
586 memset (q, 0, sizeof (*q));
589 q->is_label = is_label;
592 /* Record the LABEL label in the goto queue contained in TF.
596 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label)
599 treemple temp, new_stmt;
604 /* Computed and non-local gotos do not get processed. Given
605 their nature we can neither tell whether we've escaped the
606 finally block nor redirect them if we knew. */
607 if (TREE_CODE (label) != LABEL_DECL)
610 /* No need to record gotos that don't leave the try block. */
612 if (!outside_finally_tree (temp, tf->try_finally_expr))
615 if (! tf->dest_array)
617 tf->dest_array = VEC_alloc (tree, heap, 10);
618 VEC_quick_push (tree, tf->dest_array, label);
623 int n = VEC_length (tree, tf->dest_array);
624 for (index = 0; index < n; ++index)
625 if (VEC_index (tree, tf->dest_array, index) == label)
628 VEC_safe_push (tree, heap, tf->dest_array, label);
631 /* In the case of a GOTO we want to record the destination label,
632 since with a GIMPLE_COND we have an easy access to the then/else
635 record_in_goto_queue (tf, new_stmt, index, true);
638 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
639 node, and if so record that fact in the goto queue associated with that
643 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
645 struct leh_tf_state *tf = state->tf;
651 switch (gimple_code (stmt))
654 new_stmt.tp = gimple_op_ptr (stmt, 2);
655 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt));
656 new_stmt.tp = gimple_op_ptr (stmt, 3);
657 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt));
661 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt));
665 tf->may_return = true;
667 record_in_goto_queue (tf, new_stmt, -1, false);
676 #ifdef ENABLE_CHECKING
677 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
678 was in fact structured, and we've not yet done jump threading, then none
679 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
682 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
684 struct leh_tf_state *tf = state->tf;
690 n = gimple_switch_num_labels (switch_expr);
692 for (i = 0; i < n; ++i)
695 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
697 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
701 #define verify_norecord_switch_expr(state, switch_expr)
704 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
705 non-null, insert it before the new branch. */
708 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
712 /* In the case of a return, the queue node must be a gimple statement. */
713 gcc_assert (!q->is_label);
715 /* Note that the return value may have already been computed, e.g.,
728 should return 0, not 1. We don't have to do anything to make
729 this happens because the return value has been placed in the
730 RESULT_DECL already. */
732 q->cont_stmt = q->stmt.g;
735 gimple_seq_add_seq (&q->repl_stmt, mod);
737 x = gimple_build_goto (finlab);
738 gimple_seq_add_stmt (&q->repl_stmt, x);
741 /* Similar, but easier, for GIMPLE_GOTO. */
744 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
745 struct leh_tf_state *tf)
749 gcc_assert (q->is_label);
751 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index));
754 gimple_seq_add_seq (&q->repl_stmt, mod);
756 x = gimple_build_goto (finlab);
757 gimple_seq_add_stmt (&q->repl_stmt, x);
760 /* Emit a standard landing pad sequence into SEQ for REGION. */
763 emit_post_landing_pad (gimple_seq *seq, eh_region region)
765 eh_landing_pad lp = region->landing_pads;
769 lp = gen_eh_landing_pad (region);
771 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
772 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
774 x = gimple_build_label (lp->post_landing_pad);
775 gimple_seq_add_stmt (seq, x);
778 /* Emit a RESX statement into SEQ for REGION. */
781 emit_resx (gimple_seq *seq, eh_region region)
783 gimple x = gimple_build_resx (region->index);
784 gimple_seq_add_stmt (seq, x);
786 record_stmt_eh_region (region->outer, x);
789 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
792 emit_eh_dispatch (gimple_seq *seq, eh_region region)
794 gimple x = gimple_build_eh_dispatch (region->index);
795 gimple_seq_add_stmt (seq, x);
798 /* Note that the current EH region may contain a throw, or a
799 call to a function which itself may contain a throw. */
802 note_eh_region_may_contain_throw (eh_region region)
804 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
806 if (region->type == ERT_MUST_NOT_THROW)
808 region = region->outer;
814 /* Check if REGION has been marked as containing a throw. If REGION is
815 NULL, this predicate is false. */
818 eh_region_may_contain_throw (eh_region r)
820 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
823 /* We want to transform
824 try { body; } catch { stuff; }
834 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
835 should be placed before the second operand, or NULL. OVER is
836 an existing label that should be put at the exit, or NULL. */
839 frob_into_branch_around (gimple tp, eh_region region, tree over)
842 gimple_seq cleanup, result;
843 location_t loc = gimple_location (tp);
845 cleanup = gimple_try_cleanup (tp);
846 result = gimple_try_eval (tp);
849 emit_post_landing_pad (&eh_seq, region);
851 if (gimple_seq_may_fallthru (cleanup))
854 over = create_artificial_label (loc);
855 x = gimple_build_goto (over);
856 gimple_seq_add_stmt (&cleanup, x);
858 gimple_seq_add_seq (&eh_seq, cleanup);
862 x = gimple_build_label (over);
863 gimple_seq_add_stmt (&result, x);
868 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
869 Make sure to record all new labels found. */
872 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state)
874 gimple region = NULL;
877 new_seq = copy_gimple_seq_and_replace_locals (seq);
880 region = outer_state->tf->try_finally_expr;
881 collect_finally_tree_1 (new_seq, region);
886 /* A subroutine of lower_try_finally. Create a fallthru label for
887 the given try_finally state. The only tricky bit here is that
888 we have to make sure to record the label in our outer context. */
891 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
893 tree label = tf->fallthru_label;
898 label = create_artificial_label (gimple_location (tf->try_finally_expr));
899 tf->fallthru_label = label;
903 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
909 /* A subroutine of lower_try_finally. If FINALLY consits of a
910 GIMPLE_EH_ELSE node, return it. */
913 get_eh_else (gimple_seq finally)
915 gimple x = gimple_seq_first_stmt (finally);
916 if (gimple_code (x) == GIMPLE_EH_ELSE)
918 gcc_assert (gimple_seq_singleton_p (finally));
924 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
925 langhook returns non-null, then the language requires that the exception
926 path out of a try_finally be treated specially. To wit: the code within
927 the finally block may not itself throw an exception. We have two choices
928 here. First we can duplicate the finally block and wrap it in a
929 must_not_throw region. Second, we can generate code like
934 if (fintmp == eh_edge)
935 protect_cleanup_actions;
938 where "fintmp" is the temporary used in the switch statement generation
939 alternative considered below. For the nonce, we always choose the first
942 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
945 honor_protect_cleanup_actions (struct leh_state *outer_state,
946 struct leh_state *this_state,
947 struct leh_tf_state *tf)
949 tree protect_cleanup_actions;
950 gimple_stmt_iterator gsi;
951 bool finally_may_fallthru;
955 /* First check for nothing to do. */
956 if (lang_hooks.eh_protect_cleanup_actions == NULL)
958 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
959 if (protect_cleanup_actions == NULL)
962 finally = gimple_try_cleanup (tf->top_p);
963 eh_else = get_eh_else (finally);
965 /* Duplicate the FINALLY block. Only need to do this for try-finally,
966 and not for cleanups. If we've got an EH_ELSE, extract it now. */
969 finally = gimple_eh_else_e_body (eh_else);
970 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
973 finally = lower_try_finally_dup_block (finally, outer_state);
974 finally_may_fallthru = gimple_seq_may_fallthru (finally);
976 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
977 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
978 to be in an enclosing scope, but needs to be implemented at this level
979 to avoid a nesting violation (see wrap_temporary_cleanups in
980 cp/decl.c). Since it's logically at an outer level, we should call
981 terminate before we get to it, so strip it away before adding the
982 MUST_NOT_THROW filter. */
983 gsi = gsi_start (finally);
985 if (gimple_code (x) == GIMPLE_TRY
986 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
987 && gimple_try_catch_is_cleanup (x))
989 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
990 gsi_remove (&gsi, false);
993 /* Wrap the block with protect_cleanup_actions as the action. */
994 x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
995 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
997 finally = lower_eh_must_not_throw (outer_state, x);
999 /* Drop all of this into the exception sequence. */
1000 emit_post_landing_pad (&eh_seq, tf->region);
1001 gimple_seq_add_seq (&eh_seq, finally);
1002 if (finally_may_fallthru)
1003 emit_resx (&eh_seq, tf->region);
1005 /* Having now been handled, EH isn't to be considered with
1006 the rest of the outgoing edges. */
1007 tf->may_throw = false;
1010 /* A subroutine of lower_try_finally. We have determined that there is
1011 no fallthru edge out of the finally block. This means that there is
1012 no outgoing edge corresponding to any incoming edge. Restructure the
1013 try_finally node for this special case. */
1016 lower_try_finally_nofallthru (struct leh_state *state,
1017 struct leh_tf_state *tf)
1022 struct goto_queue_node *q, *qe;
1024 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1026 /* We expect that tf->top_p is a GIMPLE_TRY. */
1027 finally = gimple_try_cleanup (tf->top_p);
1028 tf->top_p_seq = gimple_try_eval (tf->top_p);
1030 x = gimple_build_label (lab);
1031 gimple_seq_add_stmt (&tf->top_p_seq, x);
1034 qe = q + tf->goto_queue_active;
1037 do_return_redirection (q, lab, NULL);
1039 do_goto_redirection (q, lab, NULL, tf);
1041 replace_goto_queue (tf);
1043 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1044 eh_else = get_eh_else (finally);
1047 finally = gimple_eh_else_n_body (eh_else);
1048 lower_eh_constructs_1 (state, &finally);
1049 gimple_seq_add_seq (&tf->top_p_seq, finally);
1053 finally = gimple_eh_else_e_body (eh_else);
1054 lower_eh_constructs_1 (state, &finally);
1056 emit_post_landing_pad (&eh_seq, tf->region);
1057 gimple_seq_add_seq (&eh_seq, finally);
1062 lower_eh_constructs_1 (state, &finally);
1063 gimple_seq_add_seq (&tf->top_p_seq, finally);
1067 emit_post_landing_pad (&eh_seq, tf->region);
1069 x = gimple_build_goto (lab);
1070 gimple_seq_add_stmt (&eh_seq, x);
1075 /* A subroutine of lower_try_finally. We have determined that there is
1076 exactly one destination of the finally block. Restructure the
1077 try_finally node for this special case. */
1080 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1082 struct goto_queue_node *q, *qe;
1086 location_t loc = gimple_location (tf->try_finally_expr);
1088 finally = gimple_try_cleanup (tf->top_p);
1089 tf->top_p_seq = gimple_try_eval (tf->top_p);
1091 /* Since there's only one destination, and the destination edge can only
1092 either be EH or non-EH, that implies that all of our incoming edges
1093 are of the same type. Therefore we can lower EH_ELSE immediately. */
1094 x = get_eh_else (finally);
1098 finally = gimple_eh_else_e_body (x);
1100 finally = gimple_eh_else_n_body (x);
1103 lower_eh_constructs_1 (state, &finally);
1107 /* Only reachable via the exception edge. Add the given label to
1108 the head of the FINALLY block. Append a RESX at the end. */
1109 emit_post_landing_pad (&eh_seq, tf->region);
1110 gimple_seq_add_seq (&eh_seq, finally);
1111 emit_resx (&eh_seq, tf->region);
1115 if (tf->may_fallthru)
1117 /* Only reachable via the fallthru edge. Do nothing but let
1118 the two blocks run together; we'll fall out the bottom. */
1119 gimple_seq_add_seq (&tf->top_p_seq, finally);
1123 finally_label = create_artificial_label (loc);
1124 x = gimple_build_label (finally_label);
1125 gimple_seq_add_stmt (&tf->top_p_seq, x);
1127 gimple_seq_add_seq (&tf->top_p_seq, finally);
1130 qe = q + tf->goto_queue_active;
1134 /* Reachable by return expressions only. Redirect them. */
1136 do_return_redirection (q, finally_label, NULL);
1137 replace_goto_queue (tf);
1141 /* Reachable by goto expressions only. Redirect them. */
1143 do_goto_redirection (q, finally_label, NULL, tf);
1144 replace_goto_queue (tf);
1146 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1148 /* Reachable by goto to fallthru label only. Redirect it
1149 to the new label (already created, sadly), and do not
1150 emit the final branch out, or the fallthru label. */
1151 tf->fallthru_label = NULL;
1156 /* Place the original return/goto to the original destination
1157 immediately after the finally block. */
1158 x = tf->goto_queue[0].cont_stmt;
1159 gimple_seq_add_stmt (&tf->top_p_seq, x);
1160 maybe_record_in_goto_queue (state, x);
1163 /* A subroutine of lower_try_finally. There are multiple edges incoming
1164 and outgoing from the finally block. Implement this by duplicating the
1165 finally block for every destination. */
1168 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1171 gimple_seq new_stmt;
1175 location_t tf_loc = gimple_location (tf->try_finally_expr);
1177 finally = gimple_try_cleanup (tf->top_p);
1179 /* Notice EH_ELSE, and simplify some of the remaining code
1180 by considering FINALLY to be the normal return path only. */
1181 eh_else = get_eh_else (finally);
1183 finally = gimple_eh_else_n_body (eh_else);
1185 tf->top_p_seq = gimple_try_eval (tf->top_p);
1188 if (tf->may_fallthru)
1190 seq = lower_try_finally_dup_block (finally, state);
1191 lower_eh_constructs_1 (state, &seq);
1192 gimple_seq_add_seq (&new_stmt, seq);
1194 tmp = lower_try_finally_fallthru_label (tf);
1195 x = gimple_build_goto (tmp);
1196 gimple_seq_add_stmt (&new_stmt, x);
1201 /* We don't need to copy the EH path of EH_ELSE,
1202 since it is only emitted once. */
1204 seq = gimple_eh_else_e_body (eh_else);
1206 seq = lower_try_finally_dup_block (finally, state);
1207 lower_eh_constructs_1 (state, &seq);
1209 emit_post_landing_pad (&eh_seq, tf->region);
1210 gimple_seq_add_seq (&eh_seq, seq);
1211 emit_resx (&eh_seq, tf->region);
1216 struct goto_queue_node *q, *qe;
1217 int return_index, index;
1220 struct goto_queue_node *q;
1224 return_index = VEC_length (tree, tf->dest_array);
1225 labels = XCNEWVEC (struct labels_s, return_index + 1);
1228 qe = q + tf->goto_queue_active;
1231 index = q->index < 0 ? return_index : q->index;
1233 if (!labels[index].q)
1234 labels[index].q = q;
1237 for (index = 0; index < return_index + 1; index++)
1241 q = labels[index].q;
1245 lab = labels[index].label
1246 = create_artificial_label (tf_loc);
1248 if (index == return_index)
1249 do_return_redirection (q, lab, NULL);
1251 do_goto_redirection (q, lab, NULL, tf);
1253 x = gimple_build_label (lab);
1254 gimple_seq_add_stmt (&new_stmt, x);
1256 seq = lower_try_finally_dup_block (finally, state);
1257 lower_eh_constructs_1 (state, &seq);
1258 gimple_seq_add_seq (&new_stmt, seq);
1260 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1261 maybe_record_in_goto_queue (state, q->cont_stmt);
1264 for (q = tf->goto_queue; q < qe; q++)
1268 index = q->index < 0 ? return_index : q->index;
1270 if (labels[index].q == q)
1273 lab = labels[index].label;
1275 if (index == return_index)
1276 do_return_redirection (q, lab, NULL);
1278 do_goto_redirection (q, lab, NULL, tf);
1281 replace_goto_queue (tf);
1285 /* Need to link new stmts after running replace_goto_queue due
1286 to not wanting to process the same goto stmts twice. */
1287 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1290 /* A subroutine of lower_try_finally. There are multiple edges incoming
1291 and outgoing from the finally block. Implement this by instrumenting
1292 each incoming edge and creating a switch statement at the end of the
1293 finally block that branches to the appropriate destination. */
1296 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1298 struct goto_queue_node *q, *qe;
1299 tree finally_tmp, finally_label;
1300 int return_index, eh_index, fallthru_index;
1301 int nlabels, ndests, j, last_case_index;
1303 VEC (tree,heap) *case_label_vec;
1304 gimple_seq switch_body = NULL;
1309 struct pointer_map_t *cont_map = NULL;
1310 /* The location of the TRY_FINALLY stmt. */
1311 location_t tf_loc = gimple_location (tf->try_finally_expr);
1312 /* The location of the finally block. */
1313 location_t finally_loc;
1315 finally = gimple_try_cleanup (tf->top_p);
1316 eh_else = get_eh_else (finally);
1318 /* Mash the TRY block to the head of the chain. */
1319 tf->top_p_seq = gimple_try_eval (tf->top_p);
1321 /* The location of the finally is either the last stmt in the finally
1322 block or the location of the TRY_FINALLY itself. */
1323 finally_loc = gimple_seq_last_stmt (tf->top_p_seq) != NULL ?
1324 gimple_location (gimple_seq_last_stmt (tf->top_p_seq))
1327 /* Lower the finally block itself. */
1328 lower_eh_constructs_1 (state, &finally);
1330 /* Prepare for switch statement generation. */
1331 nlabels = VEC_length (tree, tf->dest_array);
1332 return_index = nlabels;
1333 eh_index = return_index + tf->may_return;
1334 fallthru_index = eh_index + (tf->may_throw && !eh_else);
1335 ndests = fallthru_index + tf->may_fallthru;
1337 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1338 finally_label = create_artificial_label (finally_loc);
1340 /* We use VEC_quick_push on case_label_vec throughout this function,
1341 since we know the size in advance and allocate precisely as muce
1343 case_label_vec = VEC_alloc (tree, heap, ndests);
1345 last_case_index = 0;
1347 /* Begin inserting code for getting to the finally block. Things
1348 are done in this order to correspond to the sequence the code is
1351 if (tf->may_fallthru)
1353 x = gimple_build_assign (finally_tmp,
1354 build_int_cst (integer_type_node,
1356 gimple_seq_add_stmt (&tf->top_p_seq, x);
1358 tmp = build_int_cst (integer_type_node, fallthru_index);
1359 last_case = build_case_label (tmp, NULL,
1360 create_artificial_label (tf_loc));
1361 VEC_quick_push (tree, case_label_vec, last_case);
1364 x = gimple_build_label (CASE_LABEL (last_case));
1365 gimple_seq_add_stmt (&switch_body, x);
1367 tmp = lower_try_finally_fallthru_label (tf);
1368 x = gimple_build_goto (tmp);
1369 gimple_seq_add_stmt (&switch_body, x);
1372 /* For EH_ELSE, emit the exception path (plus resx) now, then
1373 subsequently we only need consider the normal path. */
1378 finally = gimple_eh_else_e_body (eh_else);
1379 lower_eh_constructs_1 (state, &finally);
1381 emit_post_landing_pad (&eh_seq, tf->region);
1382 gimple_seq_add_seq (&eh_seq, finally);
1383 emit_resx (&eh_seq, tf->region);
1386 finally = gimple_eh_else_n_body (eh_else);
1388 else if (tf->may_throw)
1390 emit_post_landing_pad (&eh_seq, tf->region);
1392 x = gimple_build_assign (finally_tmp,
1393 build_int_cst (integer_type_node, eh_index));
1394 gimple_seq_add_stmt (&eh_seq, x);
1396 x = gimple_build_goto (finally_label);
1397 gimple_seq_add_stmt (&eh_seq, x);
1399 tmp = build_int_cst (integer_type_node, eh_index);
1400 last_case = build_case_label (tmp, NULL,
1401 create_artificial_label (tf_loc));
1402 VEC_quick_push (tree, case_label_vec, last_case);
1405 x = gimple_build_label (CASE_LABEL (last_case));
1406 gimple_seq_add_stmt (&eh_seq, x);
1407 emit_resx (&eh_seq, tf->region);
1410 x = gimple_build_label (finally_label);
1411 gimple_seq_add_stmt (&tf->top_p_seq, x);
1413 gimple_seq_add_seq (&tf->top_p_seq, finally);
1415 /* Redirect each incoming goto edge. */
1417 qe = q + tf->goto_queue_active;
1418 j = last_case_index + tf->may_return;
1419 /* Prepare the assignments to finally_tmp that are executed upon the
1420 entrance through a particular edge. */
1423 gimple_seq mod = NULL;
1425 unsigned int case_index;
1429 x = gimple_build_assign (finally_tmp,
1430 build_int_cst (integer_type_node,
1432 gimple_seq_add_stmt (&mod, x);
1433 do_return_redirection (q, finally_label, mod);
1434 switch_id = return_index;
1438 x = gimple_build_assign (finally_tmp,
1439 build_int_cst (integer_type_node, q->index));
1440 gimple_seq_add_stmt (&mod, x);
1441 do_goto_redirection (q, finally_label, mod, tf);
1442 switch_id = q->index;
1445 case_index = j + q->index;
1446 if (VEC_length (tree, case_label_vec) <= case_index
1447 || !VEC_index (tree, case_label_vec, case_index))
1451 tmp = build_int_cst (integer_type_node, switch_id);
1452 case_lab = build_case_label (tmp, NULL,
1453 create_artificial_label (tf_loc));
1454 /* We store the cont_stmt in the pointer map, so that we can recover
1455 it in the loop below. */
1457 cont_map = pointer_map_create ();
1458 slot = pointer_map_insert (cont_map, case_lab);
1459 *slot = q->cont_stmt;
1460 VEC_quick_push (tree, case_label_vec, case_lab);
1463 for (j = last_case_index; j < last_case_index + nlabels; j++)
1468 last_case = VEC_index (tree, case_label_vec, j);
1470 gcc_assert (last_case);
1471 gcc_assert (cont_map);
1473 slot = pointer_map_contains (cont_map, last_case);
1475 cont_stmt = *(gimple *) slot;
1477 x = gimple_build_label (CASE_LABEL (last_case));
1478 gimple_seq_add_stmt (&switch_body, x);
1479 gimple_seq_add_stmt (&switch_body, cont_stmt);
1480 maybe_record_in_goto_queue (state, cont_stmt);
1483 pointer_map_destroy (cont_map);
1485 replace_goto_queue (tf);
1487 /* Make sure that the last case is the default label, as one is required.
1488 Then sort the labels, which is also required in GIMPLE. */
1489 CASE_LOW (last_case) = NULL;
1490 sort_case_labels (case_label_vec);
1492 /* Build the switch statement, setting last_case to be the default
1494 switch_stmt = gimple_build_switch_vec (finally_tmp, last_case,
1496 gimple_set_location (switch_stmt, finally_loc);
1498 /* Need to link SWITCH_STMT after running replace_goto_queue
1499 due to not wanting to process the same goto stmts twice. */
1500 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1501 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1504 /* Decide whether or not we are going to duplicate the finally block.
1505 There are several considerations.
1507 First, if this is Java, then the finally block contains code
1508 written by the user. It has line numbers associated with it,
1509 so duplicating the block means it's difficult to set a breakpoint.
1510 Since controlling code generation via -g is verboten, we simply
1511 never duplicate code without optimization.
1513 Second, we'd like to prevent egregious code growth. One way to
1514 do this is to estimate the size of the finally block, multiply
1515 that by the number of copies we'd need to make, and compare against
1516 the estimate of the size of the switch machinery we'd have to add. */
1519 decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
1521 int f_estimate, sw_estimate;
1524 /* If there's an EH_ELSE involved, the exception path is separate
1525 and really doesn't come into play for this computation. */
1526 eh_else = get_eh_else (finally);
1529 ndests -= may_throw;
1530 finally = gimple_eh_else_n_body (eh_else);
1535 gimple_stmt_iterator gsi;
1540 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1542 gimple stmt = gsi_stmt (gsi);
1543 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
1549 /* Finally estimate N times, plus N gotos. */
1550 f_estimate = count_insns_seq (finally, &eni_size_weights);
1551 f_estimate = (f_estimate + 1) * ndests;
1553 /* Switch statement (cost 10), N variable assignments, N gotos. */
1554 sw_estimate = 10 + 2 * ndests;
1556 /* Optimize for size clearly wants our best guess. */
1557 if (optimize_function_for_size_p (cfun))
1558 return f_estimate < sw_estimate;
1560 /* ??? These numbers are completely made up so far. */
1562 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1564 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1567 /* REG is the enclosing region for a possible cleanup region, or the region
1568 itself. Returns TRUE if such a region would be unreachable.
1570 Cleanup regions within a must-not-throw region aren't actually reachable
1571 even if there are throwing stmts within them, because the personality
1572 routine will call terminate before unwinding. */
1575 cleanup_is_dead_in (eh_region reg)
1577 while (reg && reg->type == ERT_CLEANUP)
1579 return (reg && reg->type == ERT_MUST_NOT_THROW);
1582 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1583 to a sequence of labels and blocks, plus the exception region trees
1584 that record all the magic. This is complicated by the need to
1585 arrange for the FINALLY block to be executed on all exits. */
1588 lower_try_finally (struct leh_state *state, gimple tp)
1590 struct leh_tf_state this_tf;
1591 struct leh_state this_state;
1593 gimple_seq old_eh_seq;
1595 /* Process the try block. */
1597 memset (&this_tf, 0, sizeof (this_tf));
1598 this_tf.try_finally_expr = tp;
1600 this_tf.outer = state;
1601 if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region))
1603 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1604 this_state.cur_region = this_tf.region;
1608 this_tf.region = NULL;
1609 this_state.cur_region = state->cur_region;
1612 this_state.ehp_region = state->ehp_region;
1613 this_state.tf = &this_tf;
1615 old_eh_seq = eh_seq;
1618 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1620 /* Determine if the try block is escaped through the bottom. */
1621 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1623 /* Determine if any exceptions are possible within the try block. */
1625 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1626 if (this_tf.may_throw)
1627 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1629 /* Determine how many edges (still) reach the finally block. Or rather,
1630 how many destinations are reached by the finally block. Use this to
1631 determine how we process the finally block itself. */
1633 ndests = VEC_length (tree, this_tf.dest_array);
1634 ndests += this_tf.may_fallthru;
1635 ndests += this_tf.may_return;
1636 ndests += this_tf.may_throw;
1638 /* If the FINALLY block is not reachable, dike it out. */
1641 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1642 gimple_try_set_cleanup (tp, NULL);
1644 /* If the finally block doesn't fall through, then any destination
1645 we might try to impose there isn't reached either. There may be
1646 some minor amount of cleanup and redirection still needed. */
1647 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1648 lower_try_finally_nofallthru (state, &this_tf);
1650 /* We can easily special-case redirection to a single destination. */
1651 else if (ndests == 1)
1652 lower_try_finally_onedest (state, &this_tf);
1653 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1654 gimple_try_cleanup (tp)))
1655 lower_try_finally_copy (state, &this_tf);
1657 lower_try_finally_switch (state, &this_tf);
1659 /* If someone requested we add a label at the end of the transformed
1661 if (this_tf.fallthru_label)
1663 /* This must be reached only if ndests == 0. */
1664 gimple x = gimple_build_label (this_tf.fallthru_label);
1665 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1668 VEC_free (tree, heap, this_tf.dest_array);
1669 free (this_tf.goto_queue);
1670 if (this_tf.goto_queue_map)
1671 pointer_map_destroy (this_tf.goto_queue_map);
1673 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1674 If there was no old eh_seq, then the append is trivially already done. */
1678 eh_seq = old_eh_seq;
1681 gimple_seq new_eh_seq = eh_seq;
1682 eh_seq = old_eh_seq;
1683 gimple_seq_add_seq(&eh_seq, new_eh_seq);
1687 return this_tf.top_p_seq;
1690 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1691 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1692 exception region trees that records all the magic. */
1695 lower_catch (struct leh_state *state, gimple tp)
1697 eh_region try_region = NULL;
1698 struct leh_state this_state = *state;
1699 gimple_stmt_iterator gsi;
1701 gimple_seq new_seq, cleanup;
1703 location_t try_catch_loc = gimple_location (tp);
1705 if (flag_exceptions)
1707 try_region = gen_eh_region_try (state->cur_region);
1708 this_state.cur_region = try_region;
1711 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1713 if (!eh_region_may_contain_throw (try_region))
1714 return gimple_try_eval (tp);
1717 emit_eh_dispatch (&new_seq, try_region);
1718 emit_resx (&new_seq, try_region);
1720 this_state.cur_region = state->cur_region;
1721 this_state.ehp_region = try_region;
1724 cleanup = gimple_try_cleanup (tp);
1725 for (gsi = gsi_start (cleanup);
1733 gcatch = gsi_stmt (gsi);
1734 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1736 handler = gimple_catch_handler (gcatch);
1737 lower_eh_constructs_1 (&this_state, &handler);
1739 c->label = create_artificial_label (UNKNOWN_LOCATION);
1740 x = gimple_build_label (c->label);
1741 gimple_seq_add_stmt (&new_seq, x);
1743 gimple_seq_add_seq (&new_seq, handler);
1745 if (gimple_seq_may_fallthru (new_seq))
1748 out_label = create_artificial_label (try_catch_loc);
1750 x = gimple_build_goto (out_label);
1751 gimple_seq_add_stmt (&new_seq, x);
1757 gimple_try_set_cleanup (tp, new_seq);
1759 return frob_into_branch_around (tp, try_region, out_label);
1762 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1763 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1764 region trees that record all the magic. */
1767 lower_eh_filter (struct leh_state *state, gimple tp)
1769 struct leh_state this_state = *state;
1770 eh_region this_region = NULL;
1774 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1776 if (flag_exceptions)
1778 this_region = gen_eh_region_allowed (state->cur_region,
1779 gimple_eh_filter_types (inner));
1780 this_state.cur_region = this_region;
1783 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1785 if (!eh_region_may_contain_throw (this_region))
1786 return gimple_try_eval (tp);
1789 this_state.cur_region = state->cur_region;
1790 this_state.ehp_region = this_region;
1792 emit_eh_dispatch (&new_seq, this_region);
1793 emit_resx (&new_seq, this_region);
1795 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1796 x = gimple_build_label (this_region->u.allowed.label);
1797 gimple_seq_add_stmt (&new_seq, x);
1799 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
1800 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1802 gimple_try_set_cleanup (tp, new_seq);
1804 return frob_into_branch_around (tp, this_region, NULL);
1807 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1808 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1809 plus the exception region trees that record all the magic. */
1812 lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1814 struct leh_state this_state = *state;
1816 if (flag_exceptions)
1818 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1819 eh_region this_region;
1821 this_region = gen_eh_region_must_not_throw (state->cur_region);
1822 this_region->u.must_not_throw.failure_decl
1823 = gimple_eh_must_not_throw_fndecl (inner);
1824 this_region->u.must_not_throw.failure_loc = gimple_location (tp);
1826 /* In order to get mangling applied to this decl, we must mark it
1827 used now. Otherwise, pass_ipa_free_lang_data won't think it
1829 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1831 this_state.cur_region = this_region;
1834 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1836 return gimple_try_eval (tp);
1839 /* Implement a cleanup expression. This is similar to try-finally,
1840 except that we only execute the cleanup block for exception edges. */
1843 lower_cleanup (struct leh_state *state, gimple tp)
1845 struct leh_state this_state = *state;
1846 eh_region this_region = NULL;
1847 struct leh_tf_state fake_tf;
1849 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1851 if (flag_exceptions && !cleanup_dead)
1853 this_region = gen_eh_region_cleanup (state->cur_region);
1854 this_state.cur_region = this_region;
1857 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1859 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1860 return gimple_try_eval (tp);
1862 /* Build enough of a try-finally state so that we can reuse
1863 honor_protect_cleanup_actions. */
1864 memset (&fake_tf, 0, sizeof (fake_tf));
1865 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1866 fake_tf.outer = state;
1867 fake_tf.region = this_region;
1868 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1869 fake_tf.may_throw = true;
1871 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1873 if (fake_tf.may_throw)
1875 /* In this case honor_protect_cleanup_actions had nothing to do,
1876 and we should process this normally. */
1877 lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
1878 result = frob_into_branch_around (tp, this_region,
1879 fake_tf.fallthru_label);
1883 /* In this case honor_protect_cleanup_actions did nearly all of
1884 the work. All we have left is to append the fallthru_label. */
1886 result = gimple_try_eval (tp);
1887 if (fake_tf.fallthru_label)
1889 gimple x = gimple_build_label (fake_tf.fallthru_label);
1890 gimple_seq_add_stmt (&result, x);
1896 /* Main loop for lowering eh constructs. Also moves gsi to the next
1900 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1904 gimple stmt = gsi_stmt (*gsi);
1906 switch (gimple_code (stmt))
1910 tree fndecl = gimple_call_fndecl (stmt);
1913 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1914 switch (DECL_FUNCTION_CODE (fndecl))
1916 case BUILT_IN_EH_POINTER:
1917 /* The front end may have generated a call to
1918 __builtin_eh_pointer (0) within a catch region. Replace
1919 this zero argument with the current catch region number. */
1920 if (state->ehp_region)
1922 tree nr = build_int_cst (integer_type_node,
1923 state->ehp_region->index);
1924 gimple_call_set_arg (stmt, 0, nr);
1928 /* The user has dome something silly. Remove it. */
1929 rhs = null_pointer_node;
1934 case BUILT_IN_EH_FILTER:
1935 /* ??? This should never appear, but since it's a builtin it
1936 is accessible to abuse by users. Just remove it and
1937 replace the use with the arbitrary value zero. */
1938 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
1940 lhs = gimple_call_lhs (stmt);
1941 x = gimple_build_assign (lhs, rhs);
1942 gsi_insert_before (gsi, x, GSI_SAME_STMT);
1945 case BUILT_IN_EH_COPY_VALUES:
1946 /* Likewise this should not appear. Remove it. */
1947 gsi_remove (gsi, true);
1957 /* If the stmt can throw use a new temporary for the assignment
1958 to a LHS. This makes sure the old value of the LHS is
1959 available on the EH edge. Only do so for statements that
1960 potentially fall through (no noreturn calls e.g.), otherwise
1961 this new assignment might create fake fallthru regions. */
1962 if (stmt_could_throw_p (stmt)
1963 && gimple_has_lhs (stmt)
1964 && gimple_stmt_may_fallthru (stmt)
1965 && !tree_could_throw_p (gimple_get_lhs (stmt))
1966 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
1968 tree lhs = gimple_get_lhs (stmt);
1969 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
1970 gimple s = gimple_build_assign (lhs, tmp);
1971 gimple_set_location (s, gimple_location (stmt));
1972 gimple_set_block (s, gimple_block (stmt));
1973 gimple_set_lhs (stmt, tmp);
1974 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
1975 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
1976 DECL_GIMPLE_REG_P (tmp) = 1;
1977 gsi_insert_after (gsi, s, GSI_SAME_STMT);
1979 /* Look for things that can throw exceptions, and record them. */
1980 if (state->cur_region && stmt_could_throw_p (stmt))
1982 record_stmt_eh_region (state->cur_region, stmt);
1983 note_eh_region_may_contain_throw (state->cur_region);
1990 maybe_record_in_goto_queue (state, stmt);
1994 verify_norecord_switch_expr (state, stmt);
1998 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
1999 replace = lower_try_finally (state, stmt);
2002 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
2005 replace = gimple_try_eval (stmt);
2006 lower_eh_constructs_1 (state, &replace);
2009 switch (gimple_code (x))
2012 replace = lower_catch (state, stmt);
2014 case GIMPLE_EH_FILTER:
2015 replace = lower_eh_filter (state, stmt);
2017 case GIMPLE_EH_MUST_NOT_THROW:
2018 replace = lower_eh_must_not_throw (state, stmt);
2020 case GIMPLE_EH_ELSE:
2021 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2024 replace = lower_cleanup (state, stmt);
2029 /* Remove the old stmt and insert the transformed sequence
2031 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2032 gsi_remove (gsi, true);
2034 /* Return since we don't want gsi_next () */
2037 case GIMPLE_EH_ELSE:
2038 /* We should be eliminating this in lower_try_finally et al. */
2042 /* A type, a decl, or some kind of statement that we're not
2043 interested in. Don't walk them. */
2050 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2053 lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
2055 gimple_stmt_iterator gsi;
2056 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
2057 lower_eh_constructs_2 (state, &gsi);
2061 lower_eh_constructs (void)
2063 struct leh_state null_state;
2066 bodyp = gimple_body (current_function_decl);
2070 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
2071 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2072 memset (&null_state, 0, sizeof (null_state));
2074 collect_finally_tree_1 (bodyp, NULL);
2075 lower_eh_constructs_1 (&null_state, &bodyp);
2076 gimple_set_body (current_function_decl, bodyp);
2078 /* We assume there's a return statement, or something, at the end of
2079 the function, and thus ploping the EH sequence afterward won't
2081 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2082 gimple_seq_add_seq (&bodyp, eh_seq);
2084 /* We assume that since BODYP already existed, adding EH_SEQ to it
2085 didn't change its value, and we don't have to re-set the function. */
2086 gcc_assert (bodyp == gimple_body (current_function_decl));
2088 htab_delete (finally_tree);
2089 BITMAP_FREE (eh_region_may_contain_throw_map);
2092 /* If this function needs a language specific EH personality routine
2093 and the frontend didn't already set one do so now. */
2094 if (function_needs_eh_personality (cfun) == eh_personality_lang
2095 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2096 DECL_FUNCTION_PERSONALITY (current_function_decl)
2097 = lang_hooks.eh_personality ();
2102 struct gimple_opt_pass pass_lower_eh =
2108 lower_eh_constructs, /* execute */
2111 0, /* static_pass_number */
2112 TV_TREE_EH, /* tv_id */
2113 PROP_gimple_lcf, /* properties_required */
2114 PROP_gimple_leh, /* properties_provided */
2115 0, /* properties_destroyed */
2116 0, /* todo_flags_start */
2117 0 /* todo_flags_finish */
2121 /* Create the multiple edges from an EH_DISPATCH statement to all of
2122 the possible handlers for its EH region. Return true if there's
2123 no fallthru edge; false if there is. */
2126 make_eh_dispatch_edges (gimple stmt)
2130 basic_block src, dst;
2132 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2133 src = gimple_bb (stmt);
2138 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2140 dst = label_to_block (c->label);
2141 make_edge (src, dst, 0);
2143 /* A catch-all handler doesn't have a fallthru. */
2144 if (c->type_list == NULL)
2149 case ERT_ALLOWED_EXCEPTIONS:
2150 dst = label_to_block (r->u.allowed.label);
2151 make_edge (src, dst, 0);
2161 /* Create the single EH edge from STMT to its nearest landing pad,
2162 if there is such a landing pad within the current function. */
2165 make_eh_edges (gimple stmt)
2167 basic_block src, dst;
2171 lp_nr = lookup_stmt_eh_lp (stmt);
2175 lp = get_eh_landing_pad_from_number (lp_nr);
2176 gcc_assert (lp != NULL);
2178 src = gimple_bb (stmt);
2179 dst = label_to_block (lp->post_landing_pad);
2180 make_edge (src, dst, EDGE_EH);
2183 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2184 do not actually perform the final edge redirection.
2186 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2187 we intend to change the destination EH region as well; this means
2188 EH_LANDING_PAD_NR must already be set on the destination block label.
2189 If false, we're being called from generic cfg manipulation code and we
2190 should preserve our place within the region tree. */
2193 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2195 eh_landing_pad old_lp, new_lp;
2198 int old_lp_nr, new_lp_nr;
2199 tree old_label, new_label;
2203 old_bb = edge_in->dest;
2204 old_label = gimple_block_label (old_bb);
2205 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2206 gcc_assert (old_lp_nr > 0);
2207 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2209 throw_stmt = last_stmt (edge_in->src);
2210 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2212 new_label = gimple_block_label (new_bb);
2214 /* Look for an existing region that might be using NEW_BB already. */
2215 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2218 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2219 gcc_assert (new_lp);
2221 /* Unless CHANGE_REGION is true, the new and old landing pad
2222 had better be associated with the same EH region. */
2223 gcc_assert (change_region || new_lp->region == old_lp->region);
2228 gcc_assert (!change_region);
2231 /* Notice when we redirect the last EH edge away from OLD_BB. */
2232 FOR_EACH_EDGE (e, ei, old_bb->preds)
2233 if (e != edge_in && (e->flags & EDGE_EH))
2238 /* NEW_LP already exists. If there are still edges into OLD_LP,
2239 there's nothing to do with the EH tree. If there are no more
2240 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2241 If CHANGE_REGION is true, then our caller is expecting to remove
2243 if (e == NULL && !change_region)
2244 remove_eh_landing_pad (old_lp);
2248 /* No correct landing pad exists. If there are no more edges
2249 into OLD_LP, then we can simply re-use the existing landing pad.
2250 Otherwise, we have to create a new landing pad. */
2253 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2257 new_lp = gen_eh_landing_pad (old_lp->region);
2258 new_lp->post_landing_pad = new_label;
2259 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2262 /* Maybe move the throwing statement to the new region. */
2263 if (old_lp != new_lp)
2265 remove_stmt_from_eh_lp (throw_stmt);
2266 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2270 /* Redirect EH edge E to NEW_BB. */
2273 redirect_eh_edge (edge edge_in, basic_block new_bb)
2275 redirect_eh_edge_1 (edge_in, new_bb, false);
2276 return ssa_redirect_edge (edge_in, new_bb);
2279 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2280 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2281 The actual edge update will happen in the caller. */
2284 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2286 tree new_lab = gimple_block_label (new_bb);
2287 bool any_changed = false;
2292 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2296 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2298 old_bb = label_to_block (c->label);
2299 if (old_bb == e->dest)
2307 case ERT_ALLOWED_EXCEPTIONS:
2308 old_bb = label_to_block (r->u.allowed.label);
2309 gcc_assert (old_bb == e->dest);
2310 r->u.allowed.label = new_lab;
2318 gcc_assert (any_changed);
2321 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2324 operation_could_trap_helper_p (enum tree_code op,
2335 case TRUNC_DIV_EXPR:
2337 case FLOOR_DIV_EXPR:
2338 case ROUND_DIV_EXPR:
2339 case EXACT_DIV_EXPR:
2341 case FLOOR_MOD_EXPR:
2342 case ROUND_MOD_EXPR:
2343 case TRUNC_MOD_EXPR:
2345 if (honor_snans || honor_trapv)
2348 return flag_trapping_math;
2349 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2358 /* Some floating point comparisons may trap. */
2363 case UNORDERED_EXPR:
2373 case FIX_TRUNC_EXPR:
2374 /* Conversion of floating point might trap. */
2380 /* These operations don't trap with floating point. */
2388 /* Any floating arithmetic may trap. */
2389 if (fp_operation && flag_trapping_math)
2397 /* Constructing an object cannot trap. */
2401 /* Any floating arithmetic may trap. */
2402 if (fp_operation && flag_trapping_math)
2410 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2411 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2412 type operands that may trap. If OP is a division operator, DIVISOR contains
2413 the value of the divisor. */
2416 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2419 bool honor_nans = (fp_operation && flag_trapping_math
2420 && !flag_finite_math_only);
2421 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2424 if (TREE_CODE_CLASS (op) != tcc_comparison
2425 && TREE_CODE_CLASS (op) != tcc_unary
2426 && TREE_CODE_CLASS (op) != tcc_binary)
2429 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2430 honor_nans, honor_snans, divisor,
2434 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2435 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2436 This routine expects only GIMPLE lhs or rhs input. */
2439 tree_could_trap_p (tree expr)
2441 enum tree_code code;
2442 bool fp_operation = false;
2443 bool honor_trapv = false;
2444 tree t, base, div = NULL_TREE;
2449 code = TREE_CODE (expr);
2450 t = TREE_TYPE (expr);
2454 if (COMPARISON_CLASS_P (expr))
2455 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2457 fp_operation = FLOAT_TYPE_P (t);
2458 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2461 if (TREE_CODE_CLASS (code) == tcc_binary)
2462 div = TREE_OPERAND (expr, 1);
2463 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2469 case TARGET_MEM_REF:
2470 if (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
2471 && !TMR_INDEX (expr) && !TMR_INDEX2 (expr))
2473 return !TREE_THIS_NOTRAP (expr);
2479 case VIEW_CONVERT_EXPR:
2480 case WITH_SIZE_EXPR:
2481 expr = TREE_OPERAND (expr, 0);
2482 code = TREE_CODE (expr);
2485 case ARRAY_RANGE_REF:
2486 base = TREE_OPERAND (expr, 0);
2487 if (tree_could_trap_p (base))
2489 if (TREE_THIS_NOTRAP (expr))
2491 return !range_in_array_bounds_p (expr);
2494 base = TREE_OPERAND (expr, 0);
2495 if (tree_could_trap_p (base))
2497 if (TREE_THIS_NOTRAP (expr))
2499 return !in_array_bounds_p (expr);
2502 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2506 return !TREE_THIS_NOTRAP (expr);
2509 return TREE_THIS_VOLATILE (expr);
2512 t = get_callee_fndecl (expr);
2513 /* Assume that calls to weak functions may trap. */
2514 if (!t || !DECL_P (t))
2517 return tree_could_trap_p (t);
2521 /* Assume that accesses to weak functions may trap, unless we know
2522 they are certainly defined in current TU or in some other
2524 if (DECL_WEAK (expr))
2526 struct cgraph_node *node;
2527 if (!DECL_EXTERNAL (expr))
2529 node = cgraph_function_node (cgraph_get_node (expr), NULL);
2530 if (node && node->symbol.in_other_partition)
2537 /* Assume that accesses to weak vars may trap, unless we know
2538 they are certainly defined in current TU or in some other
2540 if (DECL_WEAK (expr))
2542 struct varpool_node *node;
2543 if (!DECL_EXTERNAL (expr))
2545 node = varpool_variable_node (varpool_get_node (expr), NULL);
2546 if (node && node->symbol.in_other_partition)
2558 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2559 an assignment or a conditional) may throw. */
2562 stmt_could_throw_1_p (gimple stmt)
2564 enum tree_code code = gimple_expr_code (stmt);
2565 bool honor_nans = false;
2566 bool honor_snans = false;
2567 bool fp_operation = false;
2568 bool honor_trapv = false;
2573 if (TREE_CODE_CLASS (code) == tcc_comparison
2574 || TREE_CODE_CLASS (code) == tcc_unary
2575 || TREE_CODE_CLASS (code) == tcc_binary)
2577 if (is_gimple_assign (stmt)
2578 && TREE_CODE_CLASS (code) == tcc_comparison)
2579 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2580 else if (gimple_code (stmt) == GIMPLE_COND)
2581 t = TREE_TYPE (gimple_cond_lhs (stmt));
2583 t = gimple_expr_type (stmt);
2584 fp_operation = FLOAT_TYPE_P (t);
2587 honor_nans = flag_trapping_math && !flag_finite_math_only;
2588 honor_snans = flag_signaling_nans != 0;
2590 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2594 /* Check if the main expression may trap. */
2595 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2596 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2597 honor_nans, honor_snans, t,
2602 /* If the expression does not trap, see if any of the individual operands may
2604 for (i = 0; i < gimple_num_ops (stmt); i++)
2605 if (tree_could_trap_p (gimple_op (stmt, i)))
2612 /* Return true if statement STMT could throw an exception. */
2615 stmt_could_throw_p (gimple stmt)
2617 if (!flag_exceptions)
2620 /* The only statements that can throw an exception are assignments,
2621 conditionals, calls, resx, and asms. */
2622 switch (gimple_code (stmt))
2628 return !gimple_call_nothrow_p (stmt);
2632 if (!cfun->can_throw_non_call_exceptions)
2634 return stmt_could_throw_1_p (stmt);
2637 if (!cfun->can_throw_non_call_exceptions)
2639 return gimple_asm_volatile_p (stmt);
2647 /* Return true if expression T could throw an exception. */
2650 tree_could_throw_p (tree t)
2652 if (!flag_exceptions)
2654 if (TREE_CODE (t) == MODIFY_EXPR)
2656 if (cfun->can_throw_non_call_exceptions
2657 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2659 t = TREE_OPERAND (t, 1);
2662 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2663 t = TREE_OPERAND (t, 0);
2664 if (TREE_CODE (t) == CALL_EXPR)
2665 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2666 if (cfun->can_throw_non_call_exceptions)
2667 return tree_could_trap_p (t);
2671 /* Return true if STMT can throw an exception that is not caught within
2672 the current function (CFUN). */
2675 stmt_can_throw_external (gimple stmt)
2679 if (!stmt_could_throw_p (stmt))
2682 lp_nr = lookup_stmt_eh_lp (stmt);
2686 /* Return true if STMT can throw an exception that is caught within
2687 the current function (CFUN). */
2690 stmt_can_throw_internal (gimple stmt)
2694 if (!stmt_could_throw_p (stmt))
2697 lp_nr = lookup_stmt_eh_lp (stmt);
2701 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2702 remove any entry it might have from the EH table. Return true if
2703 any change was made. */
2706 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2708 if (stmt_could_throw_p (stmt))
2710 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2713 /* Likewise, but always use the current function. */
2716 maybe_clean_eh_stmt (gimple stmt)
2718 return maybe_clean_eh_stmt_fn (cfun, stmt);
2721 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2722 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2723 in the table if it should be in there. Return TRUE if a replacement was
2724 done that my require an EH edge purge. */
2727 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2729 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2733 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2735 if (new_stmt == old_stmt && new_stmt_could_throw)
2738 remove_stmt_from_eh_lp (old_stmt);
2739 if (new_stmt_could_throw)
2741 add_stmt_to_eh_lp (new_stmt, lp_nr);
2751 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
2752 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2753 operand is the return value of duplicate_eh_regions. */
2756 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2757 struct function *old_fun, gimple old_stmt,
2758 struct pointer_map_t *map, int default_lp_nr)
2760 int old_lp_nr, new_lp_nr;
2763 if (!stmt_could_throw_p (new_stmt))
2766 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2769 if (default_lp_nr == 0)
2771 new_lp_nr = default_lp_nr;
2773 else if (old_lp_nr > 0)
2775 eh_landing_pad old_lp, new_lp;
2777 old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr);
2778 slot = pointer_map_contains (map, old_lp);
2779 new_lp = (eh_landing_pad) *slot;
2780 new_lp_nr = new_lp->index;
2784 eh_region old_r, new_r;
2786 old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr);
2787 slot = pointer_map_contains (map, old_r);
2788 new_r = (eh_region) *slot;
2789 new_lp_nr = -new_r->index;
2792 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2796 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2797 and thus no remapping is required. */
2800 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2804 if (!stmt_could_throw_p (new_stmt))
2807 lp_nr = lookup_stmt_eh_lp (old_stmt);
2811 add_stmt_to_eh_lp (new_stmt, lp_nr);
2815 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2816 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2817 this only handles handlers consisting of a single call, as that's the
2818 important case for C++: a destructor call for a particular object showing
2819 up in multiple handlers. */
2822 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2824 gimple_stmt_iterator gsi;
2828 gsi = gsi_start (oneh);
2829 if (!gsi_one_before_end_p (gsi))
2831 ones = gsi_stmt (gsi);
2833 gsi = gsi_start (twoh);
2834 if (!gsi_one_before_end_p (gsi))
2836 twos = gsi_stmt (gsi);
2838 if (!is_gimple_call (ones)
2839 || !is_gimple_call (twos)
2840 || gimple_call_lhs (ones)
2841 || gimple_call_lhs (twos)
2842 || gimple_call_chain (ones)
2843 || gimple_call_chain (twos)
2844 || !gimple_call_same_target_p (ones, twos)
2845 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2848 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2849 if (!operand_equal_p (gimple_call_arg (ones, ai),
2850 gimple_call_arg (twos, ai), 0))
2857 try { A() } finally { try { ~B() } catch { ~A() } }
2858 try { ... } finally { ~A() }
2860 try { A() } catch { ~B() }
2861 try { ~B() ... } finally { ~A() }
2863 This occurs frequently in C++, where A is a local variable and B is a
2864 temporary used in the initializer for A. */
2867 optimize_double_finally (gimple one, gimple two)
2870 gimple_stmt_iterator gsi;
2873 cleanup = gimple_try_cleanup (one);
2874 gsi = gsi_start (cleanup);
2875 if (!gsi_one_before_end_p (gsi))
2878 oneh = gsi_stmt (gsi);
2879 if (gimple_code (oneh) != GIMPLE_TRY
2880 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2883 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2885 gimple_seq seq = gimple_try_eval (oneh);
2887 gimple_try_set_cleanup (one, seq);
2888 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2889 seq = copy_gimple_seq_and_replace_locals (seq);
2890 gimple_seq_add_seq (&seq, gimple_try_eval (two));
2891 gimple_try_set_eval (two, seq);
2895 /* Perform EH refactoring optimizations that are simpler to do when code
2896 flow has been lowered but EH structures haven't. */
2899 refactor_eh_r (gimple_seq seq)
2901 gimple_stmt_iterator gsi;
2906 gsi = gsi_start (seq);
2910 if (gsi_end_p (gsi))
2913 two = gsi_stmt (gsi);
2916 && gimple_code (one) == GIMPLE_TRY
2917 && gimple_code (two) == GIMPLE_TRY
2918 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2919 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2920 optimize_double_finally (one, two);
2922 switch (gimple_code (one))
2925 refactor_eh_r (gimple_try_eval (one));
2926 refactor_eh_r (gimple_try_cleanup (one));
2929 refactor_eh_r (gimple_catch_handler (one));
2931 case GIMPLE_EH_FILTER:
2932 refactor_eh_r (gimple_eh_filter_failure (one));
2934 case GIMPLE_EH_ELSE:
2935 refactor_eh_r (gimple_eh_else_n_body (one));
2936 refactor_eh_r (gimple_eh_else_e_body (one));
2951 refactor_eh_r (gimple_body (current_function_decl));
2956 gate_refactor_eh (void)
2958 return flag_exceptions != 0;
2961 struct gimple_opt_pass pass_refactor_eh =
2966 gate_refactor_eh, /* gate */
2967 refactor_eh, /* execute */
2970 0, /* static_pass_number */
2971 TV_TREE_EH, /* tv_id */
2972 PROP_gimple_lcf, /* properties_required */
2973 0, /* properties_provided */
2974 0, /* properties_destroyed */
2975 0, /* todo_flags_start */
2976 0 /* todo_flags_finish */
2980 /* At the end of gimple optimization, we can lower RESX. */
2983 lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
2986 eh_region src_r, dst_r;
2987 gimple_stmt_iterator gsi;
2992 lp_nr = lookup_stmt_eh_lp (stmt);
2994 dst_r = get_eh_region_from_lp_number (lp_nr);
2998 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
2999 gsi = gsi_last_bb (bb);
3003 /* We can wind up with no source region when pass_cleanup_eh shows
3004 that there are no entries into an eh region and deletes it, but
3005 then the block that contains the resx isn't removed. This can
3006 happen without optimization when the switch statement created by
3007 lower_try_finally_switch isn't simplified to remove the eh case.
3009 Resolve this by expanding the resx node to an abort. */
3011 fn = builtin_decl_implicit (BUILT_IN_TRAP);
3012 x = gimple_build_call (fn, 0);
3013 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3015 while (EDGE_COUNT (bb->succs) > 0)
3016 remove_edge (EDGE_SUCC (bb, 0));
3020 /* When we have a destination region, we resolve this by copying
3021 the excptr and filter values into place, and changing the edge
3022 to immediately after the landing pad. */
3031 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3032 the failure decl into a new block, if needed. */
3033 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
3035 slot = pointer_map_contains (mnt_map, dst_r);
3038 gimple_stmt_iterator gsi2;
3040 new_bb = create_empty_bb (bb);
3042 add_bb_to_loop (new_bb, bb->loop_father);
3043 lab = gimple_block_label (new_bb);
3044 gsi2 = gsi_start_bb (new_bb);
3046 fn = dst_r->u.must_not_throw.failure_decl;
3047 x = gimple_build_call (fn, 0);
3048 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3049 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3051 slot = pointer_map_insert (mnt_map, dst_r);
3057 new_bb = label_to_block (lab);
3060 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3061 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
3062 e->count = bb->count;
3063 e->probability = REG_BR_PROB_BASE;
3068 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
3070 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
3071 src_nr = build_int_cst (integer_type_node, src_r->index);
3072 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3073 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3075 /* Update the flags for the outgoing edge. */
3076 e = single_succ_edge (bb);
3077 gcc_assert (e->flags & EDGE_EH);
3078 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3080 /* If there are no more EH users of the landing pad, delete it. */
3081 FOR_EACH_EDGE (e, ei, e->dest->preds)
3082 if (e->flags & EDGE_EH)
3086 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3087 remove_eh_landing_pad (lp);
3097 /* When we don't have a destination region, this exception escapes
3098 up the call chain. We resolve this by generating a call to the
3099 _Unwind_Resume library function. */
3101 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3102 with no arguments for C++ and Java. Check for that. */
3103 if (src_r->use_cxa_end_cleanup)
3105 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3106 x = gimple_build_call (fn, 0);
3107 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3111 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3112 src_nr = build_int_cst (integer_type_node, src_r->index);
3113 x = gimple_build_call (fn, 1, src_nr);
3114 var = create_tmp_var (ptr_type_node, NULL);
3115 var = make_ssa_name (var, x);
3116 gimple_call_set_lhs (x, var);
3117 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3119 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3120 x = gimple_build_call (fn, 1, var);
3121 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3124 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3127 gsi_remove (&gsi, true);
3133 execute_lower_resx (void)
3136 struct pointer_map_t *mnt_map;
3137 bool dominance_invalidated = false;
3138 bool any_rewritten = false;
3140 mnt_map = pointer_map_create ();
3144 gimple last = last_stmt (bb);
3145 if (last && is_gimple_resx (last))
3147 dominance_invalidated |= lower_resx (bb, last, mnt_map);
3148 any_rewritten = true;
3152 pointer_map_destroy (mnt_map);
3154 if (dominance_invalidated)
3156 free_dominance_info (CDI_DOMINATORS);
3157 free_dominance_info (CDI_POST_DOMINATORS);
3160 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3164 gate_lower_resx (void)
3166 return flag_exceptions != 0;
3169 struct gimple_opt_pass pass_lower_resx =
3174 gate_lower_resx, /* gate */
3175 execute_lower_resx, /* execute */
3178 0, /* static_pass_number */
3179 TV_TREE_EH, /* tv_id */
3180 PROP_gimple_lcf, /* properties_required */
3181 0, /* properties_provided */
3182 0, /* properties_destroyed */
3183 0, /* todo_flags_start */
3184 TODO_verify_flow /* todo_flags_finish */
3188 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3192 optimize_clobbers (basic_block bb)
3194 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3195 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3197 gimple stmt = gsi_stmt (gsi);
3198 if (is_gimple_debug (stmt))
3200 if (!gimple_clobber_p (stmt)
3201 || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
3203 unlink_stmt_vdef (stmt);
3204 gsi_remove (&gsi, true);
3205 release_defs (stmt);
3209 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3210 internal throw to successor BB. */
3213 sink_clobbers (basic_block bb)
3217 gimple_stmt_iterator gsi, dgsi;
3219 bool any_clobbers = false;
3221 /* Only optimize if BB has a single EH successor and
3222 all predecessor edges are EH too. */
3223 if (!single_succ_p (bb)
3224 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3227 FOR_EACH_EDGE (e, ei, bb->preds)
3229 if ((e->flags & EDGE_EH) == 0)
3233 /* And BB contains only CLOBBER stmts before the final
3235 gsi = gsi_last_bb (bb);
3236 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3238 gimple stmt = gsi_stmt (gsi);
3239 if (is_gimple_debug (stmt))
3241 if (gimple_code (stmt) == GIMPLE_LABEL)
3243 if (!gimple_clobber_p (stmt)
3244 || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
3246 any_clobbers = true;
3251 succbb = single_succ (bb);
3252 dgsi = gsi_after_labels (succbb);
3253 gsi = gsi_last_bb (bb);
3254 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3256 gimple stmt = gsi_stmt (gsi);
3258 if (is_gimple_debug (stmt))
3260 if (gimple_code (stmt) == GIMPLE_LABEL)
3262 unlink_stmt_vdef (stmt);
3263 gsi_remove (&gsi, false);
3264 vdef = gimple_vdef (stmt);
3265 if (vdef && TREE_CODE (vdef) == SSA_NAME)
3267 release_ssa_name (vdef);
3268 vdef = SSA_NAME_VAR (vdef);
3269 mark_sym_for_renaming (vdef);
3270 gimple_set_vdef (stmt, vdef);
3271 gimple_set_vuse (stmt, vdef);
3273 gsi_insert_before (&dgsi, stmt, GSI_SAME_STMT);
3276 return TODO_update_ssa_only_virtuals;
3279 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3280 we have found some duplicate labels and removed some edges. */
3283 lower_eh_dispatch (basic_block src, gimple stmt)
3285 gimple_stmt_iterator gsi;
3290 bool redirected = false;
3292 region_nr = gimple_eh_dispatch_region (stmt);
3293 r = get_eh_region_from_number (region_nr);
3295 gsi = gsi_last_bb (src);
3301 VEC (tree, heap) *labels = NULL;
3302 tree default_label = NULL;
3306 struct pointer_set_t *seen_values = pointer_set_create ();
3308 /* Collect the labels for a switch. Zero the post_landing_pad
3309 field becase we'll no longer have anything keeping these labels
3310 in existence and the optimizer will be free to merge these
3312 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3314 tree tp_node, flt_node, lab = c->label;
3315 bool have_label = false;
3318 tp_node = c->type_list;
3319 flt_node = c->filter_list;
3321 if (tp_node == NULL)
3323 default_label = lab;
3328 /* Filter out duplicate labels that arise when this handler
3329 is shadowed by an earlier one. When no labels are
3330 attached to the handler anymore, we remove
3331 the corresponding edge and then we delete unreachable
3332 blocks at the end of this pass. */
3333 if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
3335 tree t = build_case_label (TREE_VALUE (flt_node),
3337 VEC_safe_push (tree, heap, labels, t);
3338 pointer_set_insert (seen_values, TREE_VALUE (flt_node));
3342 tp_node = TREE_CHAIN (tp_node);
3343 flt_node = TREE_CHAIN (flt_node);
3348 remove_edge (find_edge (src, label_to_block (lab)));
3353 /* Clean up the edge flags. */
3354 FOR_EACH_EDGE (e, ei, src->succs)
3356 if (e->flags & EDGE_FALLTHRU)
3358 /* If there was no catch-all, use the fallthru edge. */
3359 if (default_label == NULL)
3360 default_label = gimple_block_label (e->dest);
3361 e->flags &= ~EDGE_FALLTHRU;
3364 gcc_assert (default_label != NULL);
3366 /* Don't generate a switch if there's only a default case.
3367 This is common in the form of try { A; } catch (...) { B; }. */
3370 e = single_succ_edge (src);
3371 e->flags |= EDGE_FALLTHRU;
3375 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3376 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3378 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3379 filter = make_ssa_name (filter, x);
3380 gimple_call_set_lhs (x, filter);
3381 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3383 /* Turn the default label into a default case. */
3384 default_label = build_case_label (NULL, NULL, default_label);
3385 sort_case_labels (labels);
3387 x = gimple_build_switch_vec (filter, default_label, labels);
3388 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3390 VEC_free (tree, heap, labels);
3392 pointer_set_destroy (seen_values);
3396 case ERT_ALLOWED_EXCEPTIONS:
3398 edge b_e = BRANCH_EDGE (src);
3399 edge f_e = FALLTHRU_EDGE (src);
3401 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3402 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3404 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3405 filter = make_ssa_name (filter, x);
3406 gimple_call_set_lhs (x, filter);
3407 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3409 r->u.allowed.label = NULL;
3410 x = gimple_build_cond (EQ_EXPR, filter,
3411 build_int_cst (TREE_TYPE (filter),
3412 r->u.allowed.filter),
3413 NULL_TREE, NULL_TREE);
3414 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3416 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3417 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3425 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3426 gsi_remove (&gsi, true);
3431 execute_lower_eh_dispatch (void)
3435 bool redirected = false;
3437 assign_filter_values ();
3441 gimple last = last_stmt (bb);
3444 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
3446 redirected |= lower_eh_dispatch (bb, last);
3447 flags |= TODO_update_ssa_only_virtuals;
3449 else if (gimple_code (last) == GIMPLE_RESX)
3451 if (stmt_can_throw_external (last))
3452 optimize_clobbers (bb);
3454 flags |= sink_clobbers (bb);
3459 delete_unreachable_blocks ();
3464 gate_lower_eh_dispatch (void)
3466 return cfun->eh->region_tree != NULL;
3469 struct gimple_opt_pass pass_lower_eh_dispatch =
3473 "ehdisp", /* name */
3474 gate_lower_eh_dispatch, /* gate */
3475 execute_lower_eh_dispatch, /* execute */
3478 0, /* static_pass_number */
3479 TV_TREE_EH, /* tv_id */
3480 PROP_gimple_lcf, /* properties_required */
3481 0, /* properties_provided */
3482 0, /* properties_destroyed */
3483 0, /* todo_flags_start */
3484 TODO_verify_flow /* todo_flags_finish */
3488 /* Walk statements, see what regions are really referenced and remove
3489 those that are unused. */
3492 remove_unreachable_handlers (void)
3494 sbitmap r_reachable, lp_reachable;
3500 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3502 = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array));
3503 sbitmap_zero (r_reachable);
3504 sbitmap_zero (lp_reachable);
3508 gimple_stmt_iterator gsi;
3510 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3512 gimple stmt = gsi_stmt (gsi);
3513 lp_nr = lookup_stmt_eh_lp (stmt);
3515 /* Negative LP numbers are MUST_NOT_THROW regions which
3516 are not considered BB enders. */
3518 SET_BIT (r_reachable, -lp_nr);
3520 /* Positive LP numbers are real landing pads, are are BB enders. */
3523 gcc_assert (gsi_one_before_end_p (gsi));
3524 region = get_eh_region_from_lp_number (lp_nr);
3525 SET_BIT (r_reachable, region->index);
3526 SET_BIT (lp_reachable, lp_nr);
3529 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3530 switch (gimple_code (stmt))
3533 SET_BIT (r_reachable, gimple_resx_region (stmt));
3535 case GIMPLE_EH_DISPATCH:
3536 SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
3546 fprintf (dump_file, "Before removal of unreachable regions:\n");
3547 dump_eh_tree (dump_file, cfun);
3548 fprintf (dump_file, "Reachable regions: ");
3549 dump_sbitmap_file (dump_file, r_reachable);
3550 fprintf (dump_file, "Reachable landing pads: ");
3551 dump_sbitmap_file (dump_file, lp_reachable);
3555 VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
3556 if (region && !TEST_BIT (r_reachable, r_nr))
3559 fprintf (dump_file, "Removing unreachable region %d\n", r_nr);
3560 remove_eh_handler (region);
3564 VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
3565 if (lp && !TEST_BIT (lp_reachable, lp_nr))
3568 fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
3569 remove_eh_landing_pad (lp);
3574 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3575 dump_eh_tree (dump_file, cfun);
3576 fprintf (dump_file, "\n\n");
3579 sbitmap_free (r_reachable);
3580 sbitmap_free (lp_reachable);
3582 #ifdef ENABLE_CHECKING
3583 verify_eh_tree (cfun);
3587 /* Remove unreachable handlers if any landing pads have been removed after
3588 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
3591 maybe_remove_unreachable_handlers (void)
3596 if (cfun->eh == NULL)
3599 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3600 if (lp && lp->post_landing_pad)
3602 if (label_to_block (lp->post_landing_pad) == NULL)
3604 remove_unreachable_handlers ();
3610 /* Remove regions that do not have landing pads. This assumes
3611 that remove_unreachable_handlers has already been run, and
3612 that we've just manipulated the landing pads since then. */
3615 remove_unreachable_handlers_no_lp (void)
3619 sbitmap r_reachable;
3622 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3623 sbitmap_zero (r_reachable);
3627 gimple stmt = last_stmt (bb);
3629 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3630 switch (gimple_code (stmt))
3633 SET_BIT (r_reachable, gimple_resx_region (stmt));
3635 case GIMPLE_EH_DISPATCH:
3636 SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
3643 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
3644 if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW
3645 && !TEST_BIT (r_reachable, i))
3648 fprintf (dump_file, "Removing unreachable region %d\n", i);
3649 remove_eh_handler (r);
3652 sbitmap_free (r_reachable);
3655 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3656 optimisticaly split all sorts of edges, including EH edges. The
3657 optimization passes in between may not have needed them; if not,
3658 we should undo the split.
3660 Recognize this case by having one EH edge incoming to the BB and
3661 one normal edge outgoing; BB should be empty apart from the
3662 post_landing_pad label.
3664 Note that this is slightly different from the empty handler case
3665 handled by cleanup_empty_eh, in that the actual handler may yet
3666 have actual code but the landing pad has been separated from the
3667 handler. As such, cleanup_empty_eh relies on this transformation
3668 having been done first. */
3671 unsplit_eh (eh_landing_pad lp)
3673 basic_block bb = label_to_block (lp->post_landing_pad);
3674 gimple_stmt_iterator gsi;
3677 /* Quickly check the edge counts on BB for singularity. */
3678 if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1)
3680 e_in = EDGE_PRED (bb, 0);
3681 e_out = EDGE_SUCC (bb, 0);
3683 /* Input edge must be EH and output edge must be normal. */
3684 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3687 /* The block must be empty except for the labels and debug insns. */
3688 gsi = gsi_after_labels (bb);
3689 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3690 gsi_next_nondebug (&gsi);
3691 if (!gsi_end_p (gsi))
3694 /* The destination block must not already have a landing pad
3695 for a different region. */
3696 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3698 gimple stmt = gsi_stmt (gsi);
3702 if (gimple_code (stmt) != GIMPLE_LABEL)
3704 lab = gimple_label_label (stmt);
3705 lp_nr = EH_LANDING_PAD_NR (lab);
3706 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3710 /* The new destination block must not already be a destination of
3711 the source block, lest we merge fallthru and eh edges and get
3712 all sorts of confused. */
3713 if (find_edge (e_in->src, e_out->dest))
3716 /* ??? We can get degenerate phis due to cfg cleanups. I would have
3717 thought this should have been cleaned up by a phicprop pass, but
3718 that doesn't appear to handle virtuals. Propagate by hand. */
3719 if (!gimple_seq_empty_p (phi_nodes (bb)))
3721 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
3723 gimple use_stmt, phi = gsi_stmt (gsi);
3724 tree lhs = gimple_phi_result (phi);
3725 tree rhs = gimple_phi_arg_def (phi, 0);
3726 use_operand_p use_p;
3727 imm_use_iterator iter;
3729 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3731 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3732 SET_USE (use_p, rhs);
3735 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3736 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
3738 remove_phi_node (&gsi, true);
3742 if (dump_file && (dump_flags & TDF_DETAILS))
3743 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
3744 lp->index, e_out->dest->index);
3746 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
3747 a successor edge, humor it. But do the real CFG change with the
3748 predecessor of E_OUT in order to preserve the ordering of arguments
3749 to the PHI nodes in E_OUT->DEST. */
3750 redirect_eh_edge_1 (e_in, e_out->dest, false);
3751 redirect_edge_pred (e_out, e_in->src);
3752 e_out->flags = e_in->flags;
3753 e_out->probability = e_in->probability;
3754 e_out->count = e_in->count;
3760 /* Examine each landing pad block and see if it matches unsplit_eh. */
3763 unsplit_all_eh (void)
3765 bool changed = false;
3769 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3771 changed |= unsplit_eh (lp);
3776 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
3777 to OLD_BB to NEW_BB; return true on success, false on failure.
3779 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
3780 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
3781 Virtual PHIs may be deleted and marked for renaming. */
3784 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
3785 edge old_bb_out, bool change_region)
3787 gimple_stmt_iterator ngsi, ogsi;
3790 bitmap rename_virts;
3791 bitmap ophi_handled;
3793 /* The destination block must not be a regular successor for any
3794 of the preds of the landing pad. Thus, avoid turning
3804 which CFG verification would choke on. See PR45172 and PR51089. */
3805 FOR_EACH_EDGE (e, ei, old_bb->preds)
3806 if (find_edge (e->src, new_bb))
3809 FOR_EACH_EDGE (e, ei, old_bb->preds)
3810 redirect_edge_var_map_clear (e);
3812 ophi_handled = BITMAP_ALLOC (NULL);
3813 rename_virts = BITMAP_ALLOC (NULL);
3815 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
3816 for the edges we're going to move. */
3817 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
3819 gimple ophi, nphi = gsi_stmt (ngsi);
3822 nresult = gimple_phi_result (nphi);
3823 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
3825 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
3826 the source ssa_name. */
3828 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3830 ophi = gsi_stmt (ogsi);
3831 if (gimple_phi_result (ophi) == nop)
3836 /* If we did find the corresponding PHI, copy those inputs. */
3839 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
3840 if (!has_single_use (nop))
3842 imm_use_iterator imm_iter;
3843 use_operand_p use_p;
3845 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
3847 if (!gimple_debug_bind_p (USE_STMT (use_p))
3848 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
3849 || gimple_bb (USE_STMT (use_p)) != new_bb))
3853 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
3854 FOR_EACH_EDGE (e, ei, old_bb->preds)
3859 if ((e->flags & EDGE_EH) == 0)
3861 oop = gimple_phi_arg_def (ophi, e->dest_idx);
3862 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
3863 redirect_edge_var_map_add (e, nresult, oop, oloc);
3866 /* If we didn't find the PHI, but it's a VOP, remember to rename
3867 it later, assuming all other tests succeed. */
3868 else if (!is_gimple_reg (nresult))
3869 bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult));
3870 /* If we didn't find the PHI, and it's a real variable, we know
3871 from the fact that OLD_BB is tree_empty_eh_handler_p that the
3872 variable is unchanged from input to the block and we can simply
3873 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
3877 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
3878 FOR_EACH_EDGE (e, ei, old_bb->preds)
3879 redirect_edge_var_map_add (e, nresult, nop, nloc);
3883 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
3884 we don't know what values from the other edges into NEW_BB to use. */
3885 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3887 gimple ophi = gsi_stmt (ogsi);
3888 tree oresult = gimple_phi_result (ophi);
3889 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
3893 /* At this point we know that the merge will succeed. Remove the PHI
3894 nodes for the virtuals that we want to rename. */
3895 if (!bitmap_empty_p (rename_virts))
3897 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); )
3899 gimple nphi = gsi_stmt (ngsi);
3900 tree nresult = gimple_phi_result (nphi);
3901 if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult)))
3903 mark_virtual_phi_result_for_renaming (nphi);
3904 remove_phi_node (&ngsi, true);
3911 /* Finally, move the edges and update the PHIs. */
3912 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
3913 if (e->flags & EDGE_EH)
3915 /* ??? CFG manipluation routines do not try to update loop
3916 form on edge redirection. Do so manually here for now. */
3917 /* If we redirect a loop entry or latch edge that will either create
3918 a multiple entry loop or rotate the loop. If the loops merge
3919 we may have created a loop with multiple latches.
3920 All of this isn't easily fixed thus cancel the affected loop
3921 and mark the other loop as possibly having multiple latches. */
3923 && e->dest == e->dest->loop_father->header)
3925 e->dest->loop_father->header = NULL;
3926 e->dest->loop_father->latch = NULL;
3927 new_bb->loop_father->latch = NULL;
3928 loops_state_set (LOOPS_NEED_FIXUP|LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
3930 redirect_eh_edge_1 (e, new_bb, change_region);
3931 redirect_edge_succ (e, new_bb);
3932 flush_pending_stmts (e);
3937 BITMAP_FREE (ophi_handled);
3938 BITMAP_FREE (rename_virts);
3942 FOR_EACH_EDGE (e, ei, old_bb->preds)
3943 redirect_edge_var_map_clear (e);
3944 BITMAP_FREE (ophi_handled);
3945 BITMAP_FREE (rename_virts);
3949 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
3950 old region to NEW_REGION at BB. */
3953 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
3954 eh_landing_pad lp, eh_region new_region)
3956 gimple_stmt_iterator gsi;
3959 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
3963 lp->region = new_region;
3964 lp->next_lp = new_region->landing_pads;
3965 new_region->landing_pads = lp;
3967 /* Delete the RESX that was matched within the empty handler block. */
3968 gsi = gsi_last_bb (bb);
3969 unlink_stmt_vdef (gsi_stmt (gsi));
3970 gsi_remove (&gsi, true);
3972 /* Clean up E_OUT for the fallthru. */
3973 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3974 e_out->probability = REG_BR_PROB_BASE;
3977 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
3978 unsplitting than unsplit_eh was prepared to handle, e.g. when
3979 multiple incoming edges and phis are involved. */
3982 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
3984 gimple_stmt_iterator gsi;
3987 /* We really ought not have totally lost everything following
3988 a landing pad label. Given that BB is empty, there had better
3990 gcc_assert (e_out != NULL);
3992 /* The destination block must not already have a landing pad
3993 for a different region. */
3995 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3997 gimple stmt = gsi_stmt (gsi);
4000 if (gimple_code (stmt) != GIMPLE_LABEL)
4002 lab = gimple_label_label (stmt);
4003 lp_nr = EH_LANDING_PAD_NR (lab);
4004 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4008 /* Attempt to move the PHIs into the successor block. */
4009 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
4011 if (dump_file && (dump_flags & TDF_DETAILS))
4013 "Unsplit EH landing pad %d to block %i "
4014 "(via cleanup_empty_eh).\n",
4015 lp->index, e_out->dest->index);
4022 /* Return true if edge E_FIRST is part of an empty infinite loop
4023 or leads to such a loop through a series of single successor
4027 infinite_empty_loop_p (edge e_first)
4029 bool inf_loop = false;
4032 if (e_first->dest == e_first->src)
4035 e_first->src->aux = (void *) 1;
4036 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4038 gimple_stmt_iterator gsi;
4044 e->dest->aux = (void *) 1;
4045 gsi = gsi_after_labels (e->dest);
4046 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4047 gsi_next_nondebug (&gsi);
4048 if (!gsi_end_p (gsi))
4051 e_first->src->aux = NULL;
4052 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4053 e->dest->aux = NULL;
4058 /* Examine the block associated with LP to determine if it's an empty
4059 handler for its EH region. If so, attempt to redirect EH edges to
4060 an outer region. Return true the CFG was updated in any way. This
4061 is similar to jump forwarding, just across EH edges. */
4064 cleanup_empty_eh (eh_landing_pad lp)
4066 basic_block bb = label_to_block (lp->post_landing_pad);
4067 gimple_stmt_iterator gsi;
4069 eh_region new_region;
4072 bool has_non_eh_pred;
4076 /* There can be zero or one edges out of BB. This is the quickest test. */
4077 switch (EDGE_COUNT (bb->succs))
4083 e_out = EDGE_SUCC (bb, 0);
4089 resx = last_stmt (bb);
4090 if (resx && is_gimple_resx (resx))
4092 if (stmt_can_throw_external (resx))
4093 optimize_clobbers (bb);
4094 else if (sink_clobbers (bb))
4098 gsi = gsi_after_labels (bb);
4100 /* Make sure to skip debug statements. */
4101 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4102 gsi_next_nondebug (&gsi);
4104 /* If the block is totally empty, look for more unsplitting cases. */
4105 if (gsi_end_p (gsi))
4107 /* For the degenerate case of an infinite loop bail out. */
4108 if (infinite_empty_loop_p (e_out))
4111 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
4114 /* The block should consist only of a single RESX statement, modulo a
4115 preceding call to __builtin_stack_restore if there is no outgoing
4116 edge, since the call can be eliminated in this case. */
4117 resx = gsi_stmt (gsi);
4118 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4121 resx = gsi_stmt (gsi);
4123 if (!is_gimple_resx (resx))
4125 gcc_assert (gsi_one_before_end_p (gsi));
4127 /* Determine if there are non-EH edges, or resx edges into the handler. */
4128 has_non_eh_pred = false;
4129 FOR_EACH_EDGE (e, ei, bb->preds)
4130 if (!(e->flags & EDGE_EH))
4131 has_non_eh_pred = true;
4133 /* Find the handler that's outer of the empty handler by looking at
4134 where the RESX instruction was vectored. */
4135 new_lp_nr = lookup_stmt_eh_lp (resx);
4136 new_region = get_eh_region_from_lp_number (new_lp_nr);
4138 /* If there's no destination region within the current function,
4139 redirection is trivial via removing the throwing statements from
4140 the EH region, removing the EH edges, and allowing the block
4141 to go unreachable. */
4142 if (new_region == NULL)
4144 gcc_assert (e_out == NULL);
4145 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4146 if (e->flags & EDGE_EH)
4148 gimple stmt = last_stmt (e->src);
4149 remove_stmt_from_eh_lp (stmt);
4157 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4158 to handle the abort and allow the blocks to go unreachable. */
4159 if (new_region->type == ERT_MUST_NOT_THROW)
4161 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4162 if (e->flags & EDGE_EH)
4164 gimple stmt = last_stmt (e->src);
4165 remove_stmt_from_eh_lp (stmt);
4166 add_stmt_to_eh_lp (stmt, new_lp_nr);
4174 /* Try to redirect the EH edges and merge the PHIs into the destination
4175 landing pad block. If the merge succeeds, we'll already have redirected
4176 all the EH edges. The handler itself will go unreachable if there were
4178 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
4181 /* Finally, if all input edges are EH edges, then we can (potentially)
4182 reduce the number of transfers from the runtime by moving the landing
4183 pad from the original region to the new region. This is a win when
4184 we remove the last CLEANUP region along a particular exception
4185 propagation path. Since nothing changes except for the region with
4186 which the landing pad is associated, the PHI nodes do not need to be
4188 if (!has_non_eh_pred)
4190 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4191 if (dump_file && (dump_flags & TDF_DETAILS))
4192 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4193 lp->index, new_region->index);
4195 /* ??? The CFG didn't change, but we may have rendered the
4196 old EH region unreachable. Trigger a cleanup there. */
4203 if (dump_file && (dump_flags & TDF_DETAILS))
4204 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4205 remove_eh_landing_pad (lp);
4209 /* Do a post-order traversal of the EH region tree. Examine each
4210 post_landing_pad block and see if we can eliminate it as empty. */
4213 cleanup_all_empty_eh (void)
4215 bool changed = false;
4219 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
4221 changed |= cleanup_empty_eh (lp);
4226 /* Perform cleanups and lowering of exception handling
4227 1) cleanups regions with handlers doing nothing are optimized out
4228 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4229 3) Info about regions that are containing instructions, and regions
4230 reachable via local EH edges is collected
4231 4) Eh tree is pruned for regions no longer neccesary.
4233 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4234 Unify those that have the same failure decl and locus.
4238 execute_cleanup_eh_1 (void)
4240 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4241 looking up unreachable landing pads. */
4242 remove_unreachable_handlers ();
4244 /* Watch out for the region tree vanishing due to all unreachable. */
4245 if (cfun->eh->region_tree && optimize)
4247 bool changed = false;
4249 changed |= unsplit_all_eh ();
4250 changed |= cleanup_all_empty_eh ();
4254 free_dominance_info (CDI_DOMINATORS);
4255 free_dominance_info (CDI_POST_DOMINATORS);
4257 /* We delayed all basic block deletion, as we may have performed
4258 cleanups on EH edges while non-EH edges were still present. */
4259 delete_unreachable_blocks ();
4261 /* We manipulated the landing pads. Remove any region that no
4262 longer has a landing pad. */
4263 remove_unreachable_handlers_no_lp ();
4265 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4273 execute_cleanup_eh (void)
4275 int ret = execute_cleanup_eh_1 ();
4277 /* If the function no longer needs an EH personality routine
4278 clear it. This exposes cross-language inlining opportunities
4279 and avoids references to a never defined personality routine. */
4280 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4281 && function_needs_eh_personality (cfun) != eh_personality_lang)
4282 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4288 gate_cleanup_eh (void)
4290 return cfun->eh != NULL && cfun->eh->region_tree != NULL;
4293 struct gimple_opt_pass pass_cleanup_eh = {
4296 "ehcleanup", /* name */
4297 gate_cleanup_eh, /* gate */
4298 execute_cleanup_eh, /* execute */
4301 0, /* static_pass_number */
4302 TV_TREE_EH, /* tv_id */
4303 PROP_gimple_lcf, /* properties_required */
4304 0, /* properties_provided */
4305 0, /* properties_destroyed */
4306 0, /* todo_flags_start */
4307 0 /* todo_flags_finish */
4311 /* Verify that BB containing STMT as the last statement, has precisely the
4312 edge that make_eh_edges would create. */
4315 verify_eh_edges (gimple stmt)
4317 basic_block bb = gimple_bb (stmt);
4318 eh_landing_pad lp = NULL;
4323 lp_nr = lookup_stmt_eh_lp (stmt);
4325 lp = get_eh_landing_pad_from_number (lp_nr);
4328 FOR_EACH_EDGE (e, ei, bb->succs)
4330 if (e->flags & EDGE_EH)
4334 error ("BB %i has multiple EH edges", bb->index);
4346 error ("BB %i can not throw but has an EH edge", bb->index);
4352 if (!stmt_could_throw_p (stmt))
4354 error ("BB %i last statement has incorrectly set lp", bb->index);
4358 if (eh_edge == NULL)
4360 error ("BB %i is missing an EH edge", bb->index);
4364 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4366 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4373 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4376 verify_eh_dispatch_edge (gimple stmt)
4380 basic_block src, dst;
4381 bool want_fallthru = true;
4385 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4386 src = gimple_bb (stmt);
4388 FOR_EACH_EDGE (e, ei, src->succs)
4389 gcc_assert (e->aux == NULL);
4394 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4396 dst = label_to_block (c->label);
4397 e = find_edge (src, dst);
4400 error ("BB %i is missing an edge", src->index);
4405 /* A catch-all handler doesn't have a fallthru. */
4406 if (c->type_list == NULL)
4408 want_fallthru = false;
4414 case ERT_ALLOWED_EXCEPTIONS:
4415 dst = label_to_block (r->u.allowed.label);
4416 e = find_edge (src, dst);
4419 error ("BB %i is missing an edge", src->index);
4430 FOR_EACH_EDGE (e, ei, src->succs)
4432 if (e->flags & EDGE_FALLTHRU)
4434 if (fall_edge != NULL)
4436 error ("BB %i too many fallthru edges", src->index);
4445 error ("BB %i has incorrect edge", src->index);
4449 if ((fall_edge != NULL) ^ want_fallthru)
4451 error ("BB %i has incorrect fallthru edge", src->index);