1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "pointer-set.h"
30 #include "tree-flow.h"
31 #include "tree-inline.h"
32 #include "tree-pass.h"
33 #include "langhooks.h"
35 #include "diagnostic-core.h"
40 /* In some instances a tree and a gimple need to be stored in a same table,
41 i.e. in hash tables. This is a structure to do this. */
42 typedef union {tree *tp; tree t; gimple g;} treemple;
44 /* Nonzero if we are using EH to handle cleanups. */
45 static int using_eh_for_cleanups_p = 0;
48 using_eh_for_cleanups (void)
50 using_eh_for_cleanups_p = 1;
53 /* Misc functions used in this file. */
55 /* Remember and lookup EH landing pad data for arbitrary statements.
56 Really this means any statement that could_throw_p. We could
57 stuff this information into the stmt_ann data structure, but:
59 (1) We absolutely rely on this information being kept until
60 we get to rtl. Once we're done with lowering here, if we lose
61 the information there's no way to recover it!
63 (2) There are many more statements that *cannot* throw as
64 compared to those that can. We should be saving some amount
65 of space by only allocating memory for those that can throw. */
67 /* Add statement T in function IFUN to landing pad NUM. */
70 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
72 struct throw_stmt_node *n;
75 gcc_assert (num != 0);
77 n = ggc_alloc_throw_stmt_node ();
81 if (!get_eh_throw_stmt_table (ifun))
82 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
86 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
91 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
94 add_stmt_to_eh_lp (gimple t, int num)
96 add_stmt_to_eh_lp_fn (cfun, t, num);
99 /* Add statement T to the single EH landing pad in REGION. */
102 record_stmt_eh_region (eh_region region, gimple t)
106 if (region->type == ERT_MUST_NOT_THROW)
107 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
110 eh_landing_pad lp = region->landing_pads;
112 lp = gen_eh_landing_pad (region);
114 gcc_assert (lp->next_lp == NULL);
115 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
120 /* Remove statement T in function IFUN from its EH landing pad. */
123 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
125 struct throw_stmt_node dummy;
128 if (!get_eh_throw_stmt_table (ifun))
132 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
136 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
144 /* Remove statement T in the current function (cfun) from its
148 remove_stmt_from_eh_lp (gimple t)
150 return remove_stmt_from_eh_lp_fn (cfun, t);
153 /* Determine if statement T is inside an EH region in function IFUN.
154 Positive numbers indicate a landing pad index; negative numbers
155 indicate a MUST_NOT_THROW region index; zero indicates that the
156 statement is not recorded in the region table. */
159 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
161 struct throw_stmt_node *p, n;
163 if (ifun->eh->throw_stmt_table == NULL)
167 p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
168 return p ? p->lp_nr : 0;
171 /* Likewise, but always use the current function. */
174 lookup_stmt_eh_lp (gimple t)
176 /* We can get called from initialized data when -fnon-call-exceptions
177 is on; prevent crash. */
180 return lookup_stmt_eh_lp_fn (cfun, t);
183 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
184 nodes and LABEL_DECL nodes. We will use this during the second phase to
185 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
187 struct finally_tree_node
189 /* When storing a GIMPLE_TRY, we have to record a gimple. However
190 when deciding whether a GOTO to a certain LABEL_DECL (which is a
191 tree) leaves the TRY block, its necessary to record a tree in
192 this field. Thus a treemple is used. */
197 /* Note that this table is *not* marked GTY. It is short-lived. */
198 static htab_t finally_tree;
201 record_in_finally_tree (treemple child, gimple parent)
203 struct finally_tree_node *n;
206 n = XNEW (struct finally_tree_node);
210 slot = htab_find_slot (finally_tree, n, INSERT);
216 collect_finally_tree (gimple stmt, gimple region);
218 /* Go through the gimple sequence. Works with collect_finally_tree to
219 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
222 collect_finally_tree_1 (gimple_seq seq, gimple region)
224 gimple_stmt_iterator gsi;
226 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
227 collect_finally_tree (gsi_stmt (gsi), region);
231 collect_finally_tree (gimple stmt, gimple region)
235 switch (gimple_code (stmt))
238 temp.t = gimple_label_label (stmt);
239 record_in_finally_tree (temp, region);
243 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
246 record_in_finally_tree (temp, region);
247 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
248 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
250 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
252 collect_finally_tree_1 (gimple_try_eval (stmt), region);
253 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
258 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
261 case GIMPLE_EH_FILTER:
262 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
266 collect_finally_tree_1 (gimple_eh_else_n_body (stmt), region);
267 collect_finally_tree_1 (gimple_eh_else_e_body (stmt), region);
271 /* A type, a decl, or some kind of statement that we're not
272 interested in. Don't walk them. */
278 /* Use the finally tree to determine if a jump from START to TARGET
279 would leave the try_finally node that START lives in. */
282 outside_finally_tree (treemple start, gimple target)
284 struct finally_tree_node n, *p;
289 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
294 while (start.g != target);
299 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
300 nodes into a set of gotos, magic labels, and eh regions.
301 The eh region creation is straight-forward, but frobbing all the gotos
302 and such into shape isn't. */
304 /* The sequence into which we record all EH stuff. This will be
305 placed at the end of the function when we're all done. */
306 static gimple_seq eh_seq;
308 /* Record whether an EH region contains something that can throw,
309 indexed by EH region number. */
310 static bitmap eh_region_may_contain_throw_map;
312 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
313 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
314 The idea is to record a gimple statement for everything except for
315 the conditionals, which get their labels recorded. Since labels are
316 of type 'tree', we need this node to store both gimple and tree
317 objects. REPL_STMT is the sequence used to replace the goto/return
318 statement. CONT_STMT is used to store the statement that allows
319 the return/goto to jump to the original destination. */
321 struct goto_queue_node
325 gimple_seq repl_stmt;
328 /* This is used when index >= 0 to indicate that stmt is a label (as
329 opposed to a goto stmt). */
333 /* State of the world while lowering. */
337 /* What's "current" while constructing the eh region tree. These
338 correspond to variables of the same name in cfun->eh, which we
339 don't have easy access to. */
340 eh_region cur_region;
342 /* What's "current" for the purposes of __builtin_eh_pointer. For
343 a CATCH, this is the associated TRY. For an EH_FILTER, this is
344 the associated ALLOWED_EXCEPTIONS, etc. */
345 eh_region ehp_region;
347 /* Processing of TRY_FINALLY requires a bit more state. This is
348 split out into a separate structure so that we don't have to
349 copy so much when processing other nodes. */
350 struct leh_tf_state *tf;
355 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
356 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
357 this so that outside_finally_tree can reliably reference the tree used
358 in the collect_finally_tree data structures. */
359 gimple try_finally_expr;
362 /* While lowering a top_p usually it is expanded into multiple statements,
363 thus we need the following field to store them. */
364 gimple_seq top_p_seq;
366 /* The state outside this try_finally node. */
367 struct leh_state *outer;
369 /* The exception region created for it. */
372 /* The goto queue. */
373 struct goto_queue_node *goto_queue;
374 size_t goto_queue_size;
375 size_t goto_queue_active;
377 /* Pointer map to help in searching goto_queue when it is large. */
378 struct pointer_map_t *goto_queue_map;
380 /* The set of unique labels seen as entries in the goto queue. */
381 VEC(tree,heap) *dest_array;
383 /* A label to be added at the end of the completed transformed
384 sequence. It will be set if may_fallthru was true *at one time*,
385 though subsequent transformations may have cleared that flag. */
388 /* True if it is possible to fall out the bottom of the try block.
389 Cleared if the fallthru is converted to a goto. */
392 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
395 /* True if the finally block can receive an exception edge.
396 Cleared if the exception case is handled by code duplication. */
400 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
402 /* Search for STMT in the goto queue. Return the replacement,
403 or null if the statement isn't in the queue. */
405 #define LARGE_GOTO_QUEUE 20
407 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
410 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
415 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
417 for (i = 0; i < tf->goto_queue_active; i++)
418 if ( tf->goto_queue[i].stmt.g == stmt.g)
419 return tf->goto_queue[i].repl_stmt;
423 /* If we have a large number of entries in the goto_queue, create a
424 pointer map and use that for searching. */
426 if (!tf->goto_queue_map)
428 tf->goto_queue_map = pointer_map_create ();
429 for (i = 0; i < tf->goto_queue_active; i++)
431 slot = pointer_map_insert (tf->goto_queue_map,
432 tf->goto_queue[i].stmt.g);
433 gcc_assert (*slot == NULL);
434 *slot = &tf->goto_queue[i];
438 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
440 return (((struct goto_queue_node *) *slot)->repl_stmt);
445 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
446 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
447 then we can just splat it in, otherwise we add the new stmts immediately
448 after the GIMPLE_COND and redirect. */
451 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
452 gimple_stmt_iterator *gsi)
457 location_t loc = gimple_location (gsi_stmt (*gsi));
460 new_seq = find_goto_replacement (tf, temp);
464 if (gimple_seq_singleton_p (new_seq)
465 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
467 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
471 label = create_artificial_label (loc);
472 /* Set the new label for the GIMPLE_COND */
475 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
476 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
479 /* The real work of replace_goto_queue. Returns with TSI updated to
480 point to the next statement. */
482 static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
485 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
486 gimple_stmt_iterator *gsi)
492 switch (gimple_code (stmt))
497 seq = find_goto_replacement (tf, temp);
500 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
501 gsi_remove (gsi, false);
507 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
508 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
512 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
513 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
516 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (stmt), tf);
518 case GIMPLE_EH_FILTER:
519 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
522 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (stmt), tf);
523 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (stmt), tf);
527 /* These won't have gotos in them. */
534 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
537 replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
539 gimple_stmt_iterator gsi = gsi_start (*seq);
541 while (!gsi_end_p (gsi))
542 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
545 /* Replace all goto queue members. */
548 replace_goto_queue (struct leh_tf_state *tf)
550 if (tf->goto_queue_active == 0)
552 replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
553 replace_goto_queue_stmt_list (&eh_seq, tf);
556 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
557 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
561 record_in_goto_queue (struct leh_tf_state *tf,
568 struct goto_queue_node *q;
570 gcc_assert (!tf->goto_queue_map);
572 active = tf->goto_queue_active;
573 size = tf->goto_queue_size;
576 size = (size ? size * 2 : 32);
577 tf->goto_queue_size = size;
579 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
582 q = &tf->goto_queue[active];
583 tf->goto_queue_active = active + 1;
585 memset (q, 0, sizeof (*q));
588 q->location = location;
589 q->is_label = is_label;
592 /* Record the LABEL label in the goto queue contained in TF.
596 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
600 treemple temp, new_stmt;
605 /* Computed and non-local gotos do not get processed. Given
606 their nature we can neither tell whether we've escaped the
607 finally block nor redirect them if we knew. */
608 if (TREE_CODE (label) != LABEL_DECL)
611 /* No need to record gotos that don't leave the try block. */
613 if (!outside_finally_tree (temp, tf->try_finally_expr))
616 if (! tf->dest_array)
618 tf->dest_array = VEC_alloc (tree, heap, 10);
619 VEC_quick_push (tree, tf->dest_array, label);
624 int n = VEC_length (tree, tf->dest_array);
625 for (index = 0; index < n; ++index)
626 if (VEC_index (tree, tf->dest_array, index) == label)
629 VEC_safe_push (tree, heap, tf->dest_array, label);
632 /* In the case of a GOTO we want to record the destination label,
633 since with a GIMPLE_COND we have an easy access to the then/else
636 record_in_goto_queue (tf, new_stmt, index, true, location);
639 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
640 node, and if so record that fact in the goto queue associated with that
644 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
646 struct leh_tf_state *tf = state->tf;
652 switch (gimple_code (stmt))
655 new_stmt.tp = gimple_op_ptr (stmt, 2);
656 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt),
657 EXPR_LOCATION (*new_stmt.tp));
658 new_stmt.tp = gimple_op_ptr (stmt, 3);
659 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt),
660 EXPR_LOCATION (*new_stmt.tp));
664 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
665 gimple_location (stmt));
669 tf->may_return = true;
671 record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt));
680 #ifdef ENABLE_CHECKING
681 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
682 was in fact structured, and we've not yet done jump threading, then none
683 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
686 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
688 struct leh_tf_state *tf = state->tf;
694 n = gimple_switch_num_labels (switch_expr);
696 for (i = 0; i < n; ++i)
699 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
701 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
705 #define verify_norecord_switch_expr(state, switch_expr)
708 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
709 non-null, insert it before the new branch. */
712 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
716 /* In the case of a return, the queue node must be a gimple statement. */
717 gcc_assert (!q->is_label);
719 /* Note that the return value may have already been computed, e.g.,
732 should return 0, not 1. We don't have to do anything to make
733 this happens because the return value has been placed in the
734 RESULT_DECL already. */
736 q->cont_stmt = q->stmt.g;
739 gimple_seq_add_seq (&q->repl_stmt, mod);
741 x = gimple_build_goto (finlab);
742 gimple_set_location (x, q->location);
743 gimple_seq_add_stmt (&q->repl_stmt, x);
746 /* Similar, but easier, for GIMPLE_GOTO. */
749 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
750 struct leh_tf_state *tf)
754 gcc_assert (q->is_label);
756 q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index));
759 gimple_seq_add_seq (&q->repl_stmt, mod);
761 x = gimple_build_goto (finlab);
762 gimple_set_location (x, q->location);
763 gimple_seq_add_stmt (&q->repl_stmt, x);
766 /* Emit a standard landing pad sequence into SEQ for REGION. */
769 emit_post_landing_pad (gimple_seq *seq, eh_region region)
771 eh_landing_pad lp = region->landing_pads;
775 lp = gen_eh_landing_pad (region);
777 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
778 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
780 x = gimple_build_label (lp->post_landing_pad);
781 gimple_seq_add_stmt (seq, x);
784 /* Emit a RESX statement into SEQ for REGION. */
787 emit_resx (gimple_seq *seq, eh_region region)
789 gimple x = gimple_build_resx (region->index);
790 gimple_seq_add_stmt (seq, x);
792 record_stmt_eh_region (region->outer, x);
795 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
798 emit_eh_dispatch (gimple_seq *seq, eh_region region)
800 gimple x = gimple_build_eh_dispatch (region->index);
801 gimple_seq_add_stmt (seq, x);
804 /* Note that the current EH region may contain a throw, or a
805 call to a function which itself may contain a throw. */
808 note_eh_region_may_contain_throw (eh_region region)
810 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
812 if (region->type == ERT_MUST_NOT_THROW)
814 region = region->outer;
820 /* Check if REGION has been marked as containing a throw. If REGION is
821 NULL, this predicate is false. */
824 eh_region_may_contain_throw (eh_region r)
826 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
829 /* We want to transform
830 try { body; } catch { stuff; }
840 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
841 should be placed before the second operand, or NULL. OVER is
842 an existing label that should be put at the exit, or NULL. */
845 frob_into_branch_around (gimple tp, eh_region region, tree over)
848 gimple_seq cleanup, result;
849 location_t loc = gimple_location (tp);
851 cleanup = gimple_try_cleanup (tp);
852 result = gimple_try_eval (tp);
855 emit_post_landing_pad (&eh_seq, region);
857 if (gimple_seq_may_fallthru (cleanup))
860 over = create_artificial_label (loc);
861 x = gimple_build_goto (over);
862 gimple_set_location (x, loc);
863 gimple_seq_add_stmt (&cleanup, x);
865 gimple_seq_add_seq (&eh_seq, cleanup);
869 x = gimple_build_label (over);
870 gimple_seq_add_stmt (&result, x);
875 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
876 Make sure to record all new labels found. */
879 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
882 gimple region = NULL;
884 gimple_stmt_iterator gsi;
886 new_seq = copy_gimple_seq_and_replace_locals (seq);
888 for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
890 gimple stmt = gsi_stmt (gsi);
891 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
893 tree block = gimple_block (stmt);
894 gimple_set_location (stmt, loc);
895 gimple_set_block (stmt, block);
900 region = outer_state->tf->try_finally_expr;
901 collect_finally_tree_1 (new_seq, region);
906 /* A subroutine of lower_try_finally. Create a fallthru label for
907 the given try_finally state. The only tricky bit here is that
908 we have to make sure to record the label in our outer context. */
911 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
913 tree label = tf->fallthru_label;
918 label = create_artificial_label (gimple_location (tf->try_finally_expr));
919 tf->fallthru_label = label;
923 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
929 /* A subroutine of lower_try_finally. If FINALLY consits of a
930 GIMPLE_EH_ELSE node, return it. */
933 get_eh_else (gimple_seq finally)
935 gimple x = gimple_seq_first_stmt (finally);
936 if (gimple_code (x) == GIMPLE_EH_ELSE)
938 gcc_assert (gimple_seq_singleton_p (finally));
944 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
945 langhook returns non-null, then the language requires that the exception
946 path out of a try_finally be treated specially. To wit: the code within
947 the finally block may not itself throw an exception. We have two choices
948 here. First we can duplicate the finally block and wrap it in a
949 must_not_throw region. Second, we can generate code like
954 if (fintmp == eh_edge)
955 protect_cleanup_actions;
958 where "fintmp" is the temporary used in the switch statement generation
959 alternative considered below. For the nonce, we always choose the first
962 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
965 honor_protect_cleanup_actions (struct leh_state *outer_state,
966 struct leh_state *this_state,
967 struct leh_tf_state *tf)
969 tree protect_cleanup_actions;
970 gimple_stmt_iterator gsi;
971 bool finally_may_fallthru;
975 /* First check for nothing to do. */
976 if (lang_hooks.eh_protect_cleanup_actions == NULL)
978 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
979 if (protect_cleanup_actions == NULL)
982 finally = gimple_try_cleanup (tf->top_p);
983 eh_else = get_eh_else (finally);
985 /* Duplicate the FINALLY block. Only need to do this for try-finally,
986 and not for cleanups. If we've got an EH_ELSE, extract it now. */
989 finally = gimple_eh_else_e_body (eh_else);
990 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
993 finally = lower_try_finally_dup_block (finally, outer_state,
994 gimple_location (tf->try_finally_expr));
995 finally_may_fallthru = gimple_seq_may_fallthru (finally);
997 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
998 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
999 to be in an enclosing scope, but needs to be implemented at this level
1000 to avoid a nesting violation (see wrap_temporary_cleanups in
1001 cp/decl.c). Since it's logically at an outer level, we should call
1002 terminate before we get to it, so strip it away before adding the
1003 MUST_NOT_THROW filter. */
1004 gsi = gsi_start (finally);
1006 if (gimple_code (x) == GIMPLE_TRY
1007 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1008 && gimple_try_catch_is_cleanup (x))
1010 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1011 gsi_remove (&gsi, false);
1014 /* Wrap the block with protect_cleanup_actions as the action. */
1015 x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
1016 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
1018 finally = lower_eh_must_not_throw (outer_state, x);
1020 /* Drop all of this into the exception sequence. */
1021 emit_post_landing_pad (&eh_seq, tf->region);
1022 gimple_seq_add_seq (&eh_seq, finally);
1023 if (finally_may_fallthru)
1024 emit_resx (&eh_seq, tf->region);
1026 /* Having now been handled, EH isn't to be considered with
1027 the rest of the outgoing edges. */
1028 tf->may_throw = false;
1031 /* A subroutine of lower_try_finally. We have determined that there is
1032 no fallthru edge out of the finally block. This means that there is
1033 no outgoing edge corresponding to any incoming edge. Restructure the
1034 try_finally node for this special case. */
1037 lower_try_finally_nofallthru (struct leh_state *state,
1038 struct leh_tf_state *tf)
1043 struct goto_queue_node *q, *qe;
1045 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1047 /* We expect that tf->top_p is a GIMPLE_TRY. */
1048 finally = gimple_try_cleanup (tf->top_p);
1049 tf->top_p_seq = gimple_try_eval (tf->top_p);
1051 x = gimple_build_label (lab);
1052 gimple_seq_add_stmt (&tf->top_p_seq, x);
1055 qe = q + tf->goto_queue_active;
1058 do_return_redirection (q, lab, NULL);
1060 do_goto_redirection (q, lab, NULL, tf);
1062 replace_goto_queue (tf);
1064 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1065 eh_else = get_eh_else (finally);
1068 finally = gimple_eh_else_n_body (eh_else);
1069 lower_eh_constructs_1 (state, &finally);
1070 gimple_seq_add_seq (&tf->top_p_seq, finally);
1074 finally = gimple_eh_else_e_body (eh_else);
1075 lower_eh_constructs_1 (state, &finally);
1077 emit_post_landing_pad (&eh_seq, tf->region);
1078 gimple_seq_add_seq (&eh_seq, finally);
1083 lower_eh_constructs_1 (state, &finally);
1084 gimple_seq_add_seq (&tf->top_p_seq, finally);
1088 emit_post_landing_pad (&eh_seq, tf->region);
1090 x = gimple_build_goto (lab);
1091 gimple_set_location (x, gimple_location (tf->try_finally_expr));
1092 gimple_seq_add_stmt (&eh_seq, x);
1097 /* A subroutine of lower_try_finally. We have determined that there is
1098 exactly one destination of the finally block. Restructure the
1099 try_finally node for this special case. */
1102 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1104 struct goto_queue_node *q, *qe;
1107 gimple_stmt_iterator gsi;
1109 location_t loc = gimple_location (tf->try_finally_expr);
1111 finally = gimple_try_cleanup (tf->top_p);
1112 tf->top_p_seq = gimple_try_eval (tf->top_p);
1114 /* Since there's only one destination, and the destination edge can only
1115 either be EH or non-EH, that implies that all of our incoming edges
1116 are of the same type. Therefore we can lower EH_ELSE immediately. */
1117 x = get_eh_else (finally);
1121 finally = gimple_eh_else_e_body (x);
1123 finally = gimple_eh_else_n_body (x);
1126 lower_eh_constructs_1 (state, &finally);
1128 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1130 gimple stmt = gsi_stmt (gsi);
1131 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
1133 tree block = gimple_block (stmt);
1134 gimple_set_location (stmt, gimple_location (tf->try_finally_expr));
1135 gimple_set_block (stmt, block);
1141 /* Only reachable via the exception edge. Add the given label to
1142 the head of the FINALLY block. Append a RESX at the end. */
1143 emit_post_landing_pad (&eh_seq, tf->region);
1144 gimple_seq_add_seq (&eh_seq, finally);
1145 emit_resx (&eh_seq, tf->region);
1149 if (tf->may_fallthru)
1151 /* Only reachable via the fallthru edge. Do nothing but let
1152 the two blocks run together; we'll fall out the bottom. */
1153 gimple_seq_add_seq (&tf->top_p_seq, finally);
1157 finally_label = create_artificial_label (loc);
1158 x = gimple_build_label (finally_label);
1159 gimple_seq_add_stmt (&tf->top_p_seq, x);
1161 gimple_seq_add_seq (&tf->top_p_seq, finally);
1164 qe = q + tf->goto_queue_active;
1168 /* Reachable by return expressions only. Redirect them. */
1170 do_return_redirection (q, finally_label, NULL);
1171 replace_goto_queue (tf);
1175 /* Reachable by goto expressions only. Redirect them. */
1177 do_goto_redirection (q, finally_label, NULL, tf);
1178 replace_goto_queue (tf);
1180 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1182 /* Reachable by goto to fallthru label only. Redirect it
1183 to the new label (already created, sadly), and do not
1184 emit the final branch out, or the fallthru label. */
1185 tf->fallthru_label = NULL;
1190 /* Place the original return/goto to the original destination
1191 immediately after the finally block. */
1192 x = tf->goto_queue[0].cont_stmt;
1193 gimple_seq_add_stmt (&tf->top_p_seq, x);
1194 maybe_record_in_goto_queue (state, x);
1197 /* A subroutine of lower_try_finally. There are multiple edges incoming
1198 and outgoing from the finally block. Implement this by duplicating the
1199 finally block for every destination. */
1202 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1205 gimple_seq new_stmt;
1209 location_t tf_loc = gimple_location (tf->try_finally_expr);
1211 finally = gimple_try_cleanup (tf->top_p);
1213 /* Notice EH_ELSE, and simplify some of the remaining code
1214 by considering FINALLY to be the normal return path only. */
1215 eh_else = get_eh_else (finally);
1217 finally = gimple_eh_else_n_body (eh_else);
1219 tf->top_p_seq = gimple_try_eval (tf->top_p);
1222 if (tf->may_fallthru)
1224 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1225 lower_eh_constructs_1 (state, &seq);
1226 gimple_seq_add_seq (&new_stmt, seq);
1228 tmp = lower_try_finally_fallthru_label (tf);
1229 x = gimple_build_goto (tmp);
1230 gimple_set_location (x, tf_loc);
1231 gimple_seq_add_stmt (&new_stmt, x);
1236 /* We don't need to copy the EH path of EH_ELSE,
1237 since it is only emitted once. */
1239 seq = gimple_eh_else_e_body (eh_else);
1241 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1242 lower_eh_constructs_1 (state, &seq);
1244 emit_post_landing_pad (&eh_seq, tf->region);
1245 gimple_seq_add_seq (&eh_seq, seq);
1246 emit_resx (&eh_seq, tf->region);
1251 struct goto_queue_node *q, *qe;
1252 int return_index, index;
1255 struct goto_queue_node *q;
1259 return_index = VEC_length (tree, tf->dest_array);
1260 labels = XCNEWVEC (struct labels_s, return_index + 1);
1263 qe = q + tf->goto_queue_active;
1266 index = q->index < 0 ? return_index : q->index;
1268 if (!labels[index].q)
1269 labels[index].q = q;
1272 for (index = 0; index < return_index + 1; index++)
1276 q = labels[index].q;
1280 lab = labels[index].label
1281 = create_artificial_label (tf_loc);
1283 if (index == return_index)
1284 do_return_redirection (q, lab, NULL);
1286 do_goto_redirection (q, lab, NULL, tf);
1288 x = gimple_build_label (lab);
1289 gimple_seq_add_stmt (&new_stmt, x);
1291 seq = lower_try_finally_dup_block (finally, state, q->location);
1292 lower_eh_constructs_1 (state, &seq);
1293 gimple_seq_add_seq (&new_stmt, seq);
1295 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1296 maybe_record_in_goto_queue (state, q->cont_stmt);
1299 for (q = tf->goto_queue; q < qe; q++)
1303 index = q->index < 0 ? return_index : q->index;
1305 if (labels[index].q == q)
1308 lab = labels[index].label;
1310 if (index == return_index)
1311 do_return_redirection (q, lab, NULL);
1313 do_goto_redirection (q, lab, NULL, tf);
1316 replace_goto_queue (tf);
1320 /* Need to link new stmts after running replace_goto_queue due
1321 to not wanting to process the same goto stmts twice. */
1322 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1325 /* A subroutine of lower_try_finally. There are multiple edges incoming
1326 and outgoing from the finally block. Implement this by instrumenting
1327 each incoming edge and creating a switch statement at the end of the
1328 finally block that branches to the appropriate destination. */
1331 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1333 struct goto_queue_node *q, *qe;
1334 tree finally_tmp, finally_label;
1335 int return_index, eh_index, fallthru_index;
1336 int nlabels, ndests, j, last_case_index;
1338 VEC (tree,heap) *case_label_vec;
1339 gimple_seq switch_body = NULL;
1344 struct pointer_map_t *cont_map = NULL;
1345 /* The location of the TRY_FINALLY stmt. */
1346 location_t tf_loc = gimple_location (tf->try_finally_expr);
1347 /* The location of the finally block. */
1348 location_t finally_loc;
1350 finally = gimple_try_cleanup (tf->top_p);
1351 eh_else = get_eh_else (finally);
1353 /* Mash the TRY block to the head of the chain. */
1354 tf->top_p_seq = gimple_try_eval (tf->top_p);
1356 /* The location of the finally is either the last stmt in the finally
1357 block or the location of the TRY_FINALLY itself. */
1358 x = gimple_seq_last_stmt (finally);
1359 finally_loc = x ? gimple_location (x) : tf_loc;
1361 /* Lower the finally block itself. */
1362 lower_eh_constructs_1 (state, &finally);
1364 /* Prepare for switch statement generation. */
1365 nlabels = VEC_length (tree, tf->dest_array);
1366 return_index = nlabels;
1367 eh_index = return_index + tf->may_return;
1368 fallthru_index = eh_index + (tf->may_throw && !eh_else);
1369 ndests = fallthru_index + tf->may_fallthru;
1371 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1372 finally_label = create_artificial_label (finally_loc);
1374 /* We use VEC_quick_push on case_label_vec throughout this function,
1375 since we know the size in advance and allocate precisely as muce
1377 case_label_vec = VEC_alloc (tree, heap, ndests);
1379 last_case_index = 0;
1381 /* Begin inserting code for getting to the finally block. Things
1382 are done in this order to correspond to the sequence the code is
1385 if (tf->may_fallthru)
1387 x = gimple_build_assign (finally_tmp,
1388 build_int_cst (integer_type_node,
1390 gimple_seq_add_stmt (&tf->top_p_seq, x);
1392 tmp = build_int_cst (integer_type_node, fallthru_index);
1393 last_case = build_case_label (tmp, NULL,
1394 create_artificial_label (tf_loc));
1395 VEC_quick_push (tree, case_label_vec, last_case);
1398 x = gimple_build_label (CASE_LABEL (last_case));
1399 gimple_seq_add_stmt (&switch_body, x);
1401 tmp = lower_try_finally_fallthru_label (tf);
1402 x = gimple_build_goto (tmp);
1403 gimple_set_location (x, tf_loc);
1404 gimple_seq_add_stmt (&switch_body, x);
1407 /* For EH_ELSE, emit the exception path (plus resx) now, then
1408 subsequently we only need consider the normal path. */
1413 finally = gimple_eh_else_e_body (eh_else);
1414 lower_eh_constructs_1 (state, &finally);
1416 emit_post_landing_pad (&eh_seq, tf->region);
1417 gimple_seq_add_seq (&eh_seq, finally);
1418 emit_resx (&eh_seq, tf->region);
1421 finally = gimple_eh_else_n_body (eh_else);
1423 else if (tf->may_throw)
1425 emit_post_landing_pad (&eh_seq, tf->region);
1427 x = gimple_build_assign (finally_tmp,
1428 build_int_cst (integer_type_node, eh_index));
1429 gimple_seq_add_stmt (&eh_seq, x);
1431 x = gimple_build_goto (finally_label);
1432 gimple_set_location (x, tf_loc);
1433 gimple_seq_add_stmt (&eh_seq, x);
1435 tmp = build_int_cst (integer_type_node, eh_index);
1436 last_case = build_case_label (tmp, NULL,
1437 create_artificial_label (tf_loc));
1438 VEC_quick_push (tree, case_label_vec, last_case);
1441 x = gimple_build_label (CASE_LABEL (last_case));
1442 gimple_seq_add_stmt (&eh_seq, x);
1443 emit_resx (&eh_seq, tf->region);
1446 x = gimple_build_label (finally_label);
1447 gimple_seq_add_stmt (&tf->top_p_seq, x);
1449 gimple_seq_add_seq (&tf->top_p_seq, finally);
1451 /* Redirect each incoming goto edge. */
1453 qe = q + tf->goto_queue_active;
1454 j = last_case_index + tf->may_return;
1455 /* Prepare the assignments to finally_tmp that are executed upon the
1456 entrance through a particular edge. */
1459 gimple_seq mod = NULL;
1461 unsigned int case_index;
1465 x = gimple_build_assign (finally_tmp,
1466 build_int_cst (integer_type_node,
1468 gimple_seq_add_stmt (&mod, x);
1469 do_return_redirection (q, finally_label, mod);
1470 switch_id = return_index;
1474 x = gimple_build_assign (finally_tmp,
1475 build_int_cst (integer_type_node, q->index));
1476 gimple_seq_add_stmt (&mod, x);
1477 do_goto_redirection (q, finally_label, mod, tf);
1478 switch_id = q->index;
1481 case_index = j + q->index;
1482 if (VEC_length (tree, case_label_vec) <= case_index
1483 || !VEC_index (tree, case_label_vec, case_index))
1487 tmp = build_int_cst (integer_type_node, switch_id);
1488 case_lab = build_case_label (tmp, NULL,
1489 create_artificial_label (tf_loc));
1490 /* We store the cont_stmt in the pointer map, so that we can recover
1491 it in the loop below. */
1493 cont_map = pointer_map_create ();
1494 slot = pointer_map_insert (cont_map, case_lab);
1495 *slot = q->cont_stmt;
1496 VEC_quick_push (tree, case_label_vec, case_lab);
1499 for (j = last_case_index; j < last_case_index + nlabels; j++)
1504 last_case = VEC_index (tree, case_label_vec, j);
1506 gcc_assert (last_case);
1507 gcc_assert (cont_map);
1509 slot = pointer_map_contains (cont_map, last_case);
1511 cont_stmt = *(gimple *) slot;
1513 x = gimple_build_label (CASE_LABEL (last_case));
1514 gimple_seq_add_stmt (&switch_body, x);
1515 gimple_seq_add_stmt (&switch_body, cont_stmt);
1516 maybe_record_in_goto_queue (state, cont_stmt);
1519 pointer_map_destroy (cont_map);
1521 replace_goto_queue (tf);
1523 /* Make sure that the last case is the default label, as one is required.
1524 Then sort the labels, which is also required in GIMPLE. */
1525 CASE_LOW (last_case) = NULL;
1526 sort_case_labels (case_label_vec);
1528 /* Build the switch statement, setting last_case to be the default
1530 switch_stmt = gimple_build_switch (finally_tmp, last_case,
1532 gimple_set_location (switch_stmt, finally_loc);
1534 /* Need to link SWITCH_STMT after running replace_goto_queue
1535 due to not wanting to process the same goto stmts twice. */
1536 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1537 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1540 /* Decide whether or not we are going to duplicate the finally block.
1541 There are several considerations.
1543 First, if this is Java, then the finally block contains code
1544 written by the user. It has line numbers associated with it,
1545 so duplicating the block means it's difficult to set a breakpoint.
1546 Since controlling code generation via -g is verboten, we simply
1547 never duplicate code without optimization.
1549 Second, we'd like to prevent egregious code growth. One way to
1550 do this is to estimate the size of the finally block, multiply
1551 that by the number of copies we'd need to make, and compare against
1552 the estimate of the size of the switch machinery we'd have to add. */
1555 decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
1557 int f_estimate, sw_estimate;
1560 /* If there's an EH_ELSE involved, the exception path is separate
1561 and really doesn't come into play for this computation. */
1562 eh_else = get_eh_else (finally);
1565 ndests -= may_throw;
1566 finally = gimple_eh_else_n_body (eh_else);
1571 gimple_stmt_iterator gsi;
1576 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1578 gimple stmt = gsi_stmt (gsi);
1579 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
1585 /* Finally estimate N times, plus N gotos. */
1586 f_estimate = count_insns_seq (finally, &eni_size_weights);
1587 f_estimate = (f_estimate + 1) * ndests;
1589 /* Switch statement (cost 10), N variable assignments, N gotos. */
1590 sw_estimate = 10 + 2 * ndests;
1592 /* Optimize for size clearly wants our best guess. */
1593 if (optimize_function_for_size_p (cfun))
1594 return f_estimate < sw_estimate;
1596 /* ??? These numbers are completely made up so far. */
1598 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1600 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1603 /* REG is the enclosing region for a possible cleanup region, or the region
1604 itself. Returns TRUE if such a region would be unreachable.
1606 Cleanup regions within a must-not-throw region aren't actually reachable
1607 even if there are throwing stmts within them, because the personality
1608 routine will call terminate before unwinding. */
1611 cleanup_is_dead_in (eh_region reg)
1613 while (reg && reg->type == ERT_CLEANUP)
1615 return (reg && reg->type == ERT_MUST_NOT_THROW);
1618 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1619 to a sequence of labels and blocks, plus the exception region trees
1620 that record all the magic. This is complicated by the need to
1621 arrange for the FINALLY block to be executed on all exits. */
1624 lower_try_finally (struct leh_state *state, gimple tp)
1626 struct leh_tf_state this_tf;
1627 struct leh_state this_state;
1629 gimple_seq old_eh_seq;
1631 /* Process the try block. */
1633 memset (&this_tf, 0, sizeof (this_tf));
1634 this_tf.try_finally_expr = tp;
1636 this_tf.outer = state;
1637 if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region))
1639 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1640 this_state.cur_region = this_tf.region;
1644 this_tf.region = NULL;
1645 this_state.cur_region = state->cur_region;
1648 this_state.ehp_region = state->ehp_region;
1649 this_state.tf = &this_tf;
1651 old_eh_seq = eh_seq;
1654 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1656 /* Determine if the try block is escaped through the bottom. */
1657 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1659 /* Determine if any exceptions are possible within the try block. */
1661 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1662 if (this_tf.may_throw)
1663 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1665 /* Determine how many edges (still) reach the finally block. Or rather,
1666 how many destinations are reached by the finally block. Use this to
1667 determine how we process the finally block itself. */
1669 ndests = VEC_length (tree, this_tf.dest_array);
1670 ndests += this_tf.may_fallthru;
1671 ndests += this_tf.may_return;
1672 ndests += this_tf.may_throw;
1674 /* If the FINALLY block is not reachable, dike it out. */
1677 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1678 gimple_try_set_cleanup (tp, NULL);
1680 /* If the finally block doesn't fall through, then any destination
1681 we might try to impose there isn't reached either. There may be
1682 some minor amount of cleanup and redirection still needed. */
1683 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1684 lower_try_finally_nofallthru (state, &this_tf);
1686 /* We can easily special-case redirection to a single destination. */
1687 else if (ndests == 1)
1688 lower_try_finally_onedest (state, &this_tf);
1689 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1690 gimple_try_cleanup (tp)))
1691 lower_try_finally_copy (state, &this_tf);
1693 lower_try_finally_switch (state, &this_tf);
1695 /* If someone requested we add a label at the end of the transformed
1697 if (this_tf.fallthru_label)
1699 /* This must be reached only if ndests == 0. */
1700 gimple x = gimple_build_label (this_tf.fallthru_label);
1701 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1704 VEC_free (tree, heap, this_tf.dest_array);
1705 free (this_tf.goto_queue);
1706 if (this_tf.goto_queue_map)
1707 pointer_map_destroy (this_tf.goto_queue_map);
1709 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1710 If there was no old eh_seq, then the append is trivially already done. */
1714 eh_seq = old_eh_seq;
1717 gimple_seq new_eh_seq = eh_seq;
1718 eh_seq = old_eh_seq;
1719 gimple_seq_add_seq(&eh_seq, new_eh_seq);
1723 return this_tf.top_p_seq;
1726 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1727 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1728 exception region trees that records all the magic. */
1731 lower_catch (struct leh_state *state, gimple tp)
1733 eh_region try_region = NULL;
1734 struct leh_state this_state = *state;
1735 gimple_stmt_iterator gsi;
1737 gimple_seq new_seq, cleanup;
1739 location_t try_catch_loc = gimple_location (tp);
1741 if (flag_exceptions)
1743 try_region = gen_eh_region_try (state->cur_region);
1744 this_state.cur_region = try_region;
1747 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1749 if (!eh_region_may_contain_throw (try_region))
1750 return gimple_try_eval (tp);
1753 emit_eh_dispatch (&new_seq, try_region);
1754 emit_resx (&new_seq, try_region);
1756 this_state.cur_region = state->cur_region;
1757 this_state.ehp_region = try_region;
1760 cleanup = gimple_try_cleanup (tp);
1761 for (gsi = gsi_start (cleanup);
1769 gcatch = gsi_stmt (gsi);
1770 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1772 handler = gimple_catch_handler (gcatch);
1773 lower_eh_constructs_1 (&this_state, &handler);
1775 c->label = create_artificial_label (UNKNOWN_LOCATION);
1776 x = gimple_build_label (c->label);
1777 gimple_seq_add_stmt (&new_seq, x);
1779 gimple_seq_add_seq (&new_seq, handler);
1781 if (gimple_seq_may_fallthru (new_seq))
1784 out_label = create_artificial_label (try_catch_loc);
1786 x = gimple_build_goto (out_label);
1787 gimple_seq_add_stmt (&new_seq, x);
1793 gimple_try_set_cleanup (tp, new_seq);
1795 return frob_into_branch_around (tp, try_region, out_label);
1798 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1799 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1800 region trees that record all the magic. */
1803 lower_eh_filter (struct leh_state *state, gimple tp)
1805 struct leh_state this_state = *state;
1806 eh_region this_region = NULL;
1810 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1812 if (flag_exceptions)
1814 this_region = gen_eh_region_allowed (state->cur_region,
1815 gimple_eh_filter_types (inner));
1816 this_state.cur_region = this_region;
1819 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1821 if (!eh_region_may_contain_throw (this_region))
1822 return gimple_try_eval (tp);
1825 this_state.cur_region = state->cur_region;
1826 this_state.ehp_region = this_region;
1828 emit_eh_dispatch (&new_seq, this_region);
1829 emit_resx (&new_seq, this_region);
1831 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1832 x = gimple_build_label (this_region->u.allowed.label);
1833 gimple_seq_add_stmt (&new_seq, x);
1835 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
1836 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1838 gimple_try_set_cleanup (tp, new_seq);
1840 return frob_into_branch_around (tp, this_region, NULL);
1843 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1844 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1845 plus the exception region trees that record all the magic. */
1848 lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1850 struct leh_state this_state = *state;
1852 if (flag_exceptions)
1854 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1855 eh_region this_region;
1857 this_region = gen_eh_region_must_not_throw (state->cur_region);
1858 this_region->u.must_not_throw.failure_decl
1859 = gimple_eh_must_not_throw_fndecl (inner);
1860 this_region->u.must_not_throw.failure_loc = gimple_location (tp);
1862 /* In order to get mangling applied to this decl, we must mark it
1863 used now. Otherwise, pass_ipa_free_lang_data won't think it
1865 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1867 this_state.cur_region = this_region;
1870 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1872 return gimple_try_eval (tp);
1875 /* Implement a cleanup expression. This is similar to try-finally,
1876 except that we only execute the cleanup block for exception edges. */
1879 lower_cleanup (struct leh_state *state, gimple tp)
1881 struct leh_state this_state = *state;
1882 eh_region this_region = NULL;
1883 struct leh_tf_state fake_tf;
1885 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1887 if (flag_exceptions && !cleanup_dead)
1889 this_region = gen_eh_region_cleanup (state->cur_region);
1890 this_state.cur_region = this_region;
1893 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1895 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1896 return gimple_try_eval (tp);
1898 /* Build enough of a try-finally state so that we can reuse
1899 honor_protect_cleanup_actions. */
1900 memset (&fake_tf, 0, sizeof (fake_tf));
1901 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1902 fake_tf.outer = state;
1903 fake_tf.region = this_region;
1904 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1905 fake_tf.may_throw = true;
1907 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1909 if (fake_tf.may_throw)
1911 /* In this case honor_protect_cleanup_actions had nothing to do,
1912 and we should process this normally. */
1913 lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
1914 result = frob_into_branch_around (tp, this_region,
1915 fake_tf.fallthru_label);
1919 /* In this case honor_protect_cleanup_actions did nearly all of
1920 the work. All we have left is to append the fallthru_label. */
1922 result = gimple_try_eval (tp);
1923 if (fake_tf.fallthru_label)
1925 gimple x = gimple_build_label (fake_tf.fallthru_label);
1926 gimple_seq_add_stmt (&result, x);
1932 /* Main loop for lowering eh constructs. Also moves gsi to the next
1936 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1940 gimple stmt = gsi_stmt (*gsi);
1942 switch (gimple_code (stmt))
1946 tree fndecl = gimple_call_fndecl (stmt);
1949 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1950 switch (DECL_FUNCTION_CODE (fndecl))
1952 case BUILT_IN_EH_POINTER:
1953 /* The front end may have generated a call to
1954 __builtin_eh_pointer (0) within a catch region. Replace
1955 this zero argument with the current catch region number. */
1956 if (state->ehp_region)
1958 tree nr = build_int_cst (integer_type_node,
1959 state->ehp_region->index);
1960 gimple_call_set_arg (stmt, 0, nr);
1964 /* The user has dome something silly. Remove it. */
1965 rhs = null_pointer_node;
1970 case BUILT_IN_EH_FILTER:
1971 /* ??? This should never appear, but since it's a builtin it
1972 is accessible to abuse by users. Just remove it and
1973 replace the use with the arbitrary value zero. */
1974 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
1976 lhs = gimple_call_lhs (stmt);
1977 x = gimple_build_assign (lhs, rhs);
1978 gsi_insert_before (gsi, x, GSI_SAME_STMT);
1981 case BUILT_IN_EH_COPY_VALUES:
1982 /* Likewise this should not appear. Remove it. */
1983 gsi_remove (gsi, true);
1993 /* If the stmt can throw use a new temporary for the assignment
1994 to a LHS. This makes sure the old value of the LHS is
1995 available on the EH edge. Only do so for statements that
1996 potentially fall through (no noreturn calls e.g.), otherwise
1997 this new assignment might create fake fallthru regions. */
1998 if (stmt_could_throw_p (stmt)
1999 && gimple_has_lhs (stmt)
2000 && gimple_stmt_may_fallthru (stmt)
2001 && !tree_could_throw_p (gimple_get_lhs (stmt))
2002 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
2004 tree lhs = gimple_get_lhs (stmt);
2005 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
2006 gimple s = gimple_build_assign (lhs, tmp);
2007 gimple_set_location (s, gimple_location (stmt));
2008 gimple_set_block (s, gimple_block (stmt));
2009 gimple_set_lhs (stmt, tmp);
2010 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
2011 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
2012 DECL_GIMPLE_REG_P (tmp) = 1;
2013 gsi_insert_after (gsi, s, GSI_SAME_STMT);
2015 /* Look for things that can throw exceptions, and record them. */
2016 if (state->cur_region && stmt_could_throw_p (stmt))
2018 record_stmt_eh_region (state->cur_region, stmt);
2019 note_eh_region_may_contain_throw (state->cur_region);
2026 maybe_record_in_goto_queue (state, stmt);
2030 verify_norecord_switch_expr (state, stmt);
2034 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2035 replace = lower_try_finally (state, stmt);
2038 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
2041 replace = gimple_try_eval (stmt);
2042 lower_eh_constructs_1 (state, &replace);
2045 switch (gimple_code (x))
2048 replace = lower_catch (state, stmt);
2050 case GIMPLE_EH_FILTER:
2051 replace = lower_eh_filter (state, stmt);
2053 case GIMPLE_EH_MUST_NOT_THROW:
2054 replace = lower_eh_must_not_throw (state, stmt);
2056 case GIMPLE_EH_ELSE:
2057 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2060 replace = lower_cleanup (state, stmt);
2065 /* Remove the old stmt and insert the transformed sequence
2067 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2068 gsi_remove (gsi, true);
2070 /* Return since we don't want gsi_next () */
2073 case GIMPLE_EH_ELSE:
2074 /* We should be eliminating this in lower_try_finally et al. */
2078 /* A type, a decl, or some kind of statement that we're not
2079 interested in. Don't walk them. */
2086 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2089 lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
2091 gimple_stmt_iterator gsi;
2092 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
2093 lower_eh_constructs_2 (state, &gsi);
2097 lower_eh_constructs (void)
2099 struct leh_state null_state;
2102 bodyp = gimple_body (current_function_decl);
2106 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
2107 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2108 memset (&null_state, 0, sizeof (null_state));
2110 collect_finally_tree_1 (bodyp, NULL);
2111 lower_eh_constructs_1 (&null_state, &bodyp);
2112 gimple_set_body (current_function_decl, bodyp);
2114 /* We assume there's a return statement, or something, at the end of
2115 the function, and thus ploping the EH sequence afterward won't
2117 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2118 gimple_seq_add_seq (&bodyp, eh_seq);
2120 /* We assume that since BODYP already existed, adding EH_SEQ to it
2121 didn't change its value, and we don't have to re-set the function. */
2122 gcc_assert (bodyp == gimple_body (current_function_decl));
2124 htab_delete (finally_tree);
2125 BITMAP_FREE (eh_region_may_contain_throw_map);
2128 /* If this function needs a language specific EH personality routine
2129 and the frontend didn't already set one do so now. */
2130 if (function_needs_eh_personality (cfun) == eh_personality_lang
2131 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2132 DECL_FUNCTION_PERSONALITY (current_function_decl)
2133 = lang_hooks.eh_personality ();
2138 struct gimple_opt_pass pass_lower_eh =
2144 lower_eh_constructs, /* execute */
2147 0, /* static_pass_number */
2148 TV_TREE_EH, /* tv_id */
2149 PROP_gimple_lcf, /* properties_required */
2150 PROP_gimple_leh, /* properties_provided */
2151 0, /* properties_destroyed */
2152 0, /* todo_flags_start */
2153 0 /* todo_flags_finish */
2157 /* Create the multiple edges from an EH_DISPATCH statement to all of
2158 the possible handlers for its EH region. Return true if there's
2159 no fallthru edge; false if there is. */
2162 make_eh_dispatch_edges (gimple stmt)
2166 basic_block src, dst;
2168 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2169 src = gimple_bb (stmt);
2174 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2176 dst = label_to_block (c->label);
2177 make_edge (src, dst, 0);
2179 /* A catch-all handler doesn't have a fallthru. */
2180 if (c->type_list == NULL)
2185 case ERT_ALLOWED_EXCEPTIONS:
2186 dst = label_to_block (r->u.allowed.label);
2187 make_edge (src, dst, 0);
2197 /* Create the single EH edge from STMT to its nearest landing pad,
2198 if there is such a landing pad within the current function. */
2201 make_eh_edges (gimple stmt)
2203 basic_block src, dst;
2207 lp_nr = lookup_stmt_eh_lp (stmt);
2211 lp = get_eh_landing_pad_from_number (lp_nr);
2212 gcc_assert (lp != NULL);
2214 src = gimple_bb (stmt);
2215 dst = label_to_block (lp->post_landing_pad);
2216 make_edge (src, dst, EDGE_EH);
2219 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2220 do not actually perform the final edge redirection.
2222 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2223 we intend to change the destination EH region as well; this means
2224 EH_LANDING_PAD_NR must already be set on the destination block label.
2225 If false, we're being called from generic cfg manipulation code and we
2226 should preserve our place within the region tree. */
2229 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2231 eh_landing_pad old_lp, new_lp;
2234 int old_lp_nr, new_lp_nr;
2235 tree old_label, new_label;
2239 old_bb = edge_in->dest;
2240 old_label = gimple_block_label (old_bb);
2241 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2242 gcc_assert (old_lp_nr > 0);
2243 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2245 throw_stmt = last_stmt (edge_in->src);
2246 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2248 new_label = gimple_block_label (new_bb);
2250 /* Look for an existing region that might be using NEW_BB already. */
2251 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2254 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2255 gcc_assert (new_lp);
2257 /* Unless CHANGE_REGION is true, the new and old landing pad
2258 had better be associated with the same EH region. */
2259 gcc_assert (change_region || new_lp->region == old_lp->region);
2264 gcc_assert (!change_region);
2267 /* Notice when we redirect the last EH edge away from OLD_BB. */
2268 FOR_EACH_EDGE (e, ei, old_bb->preds)
2269 if (e != edge_in && (e->flags & EDGE_EH))
2274 /* NEW_LP already exists. If there are still edges into OLD_LP,
2275 there's nothing to do with the EH tree. If there are no more
2276 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2277 If CHANGE_REGION is true, then our caller is expecting to remove
2279 if (e == NULL && !change_region)
2280 remove_eh_landing_pad (old_lp);
2284 /* No correct landing pad exists. If there are no more edges
2285 into OLD_LP, then we can simply re-use the existing landing pad.
2286 Otherwise, we have to create a new landing pad. */
2289 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2293 new_lp = gen_eh_landing_pad (old_lp->region);
2294 new_lp->post_landing_pad = new_label;
2295 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2298 /* Maybe move the throwing statement to the new region. */
2299 if (old_lp != new_lp)
2301 remove_stmt_from_eh_lp (throw_stmt);
2302 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2306 /* Redirect EH edge E to NEW_BB. */
2309 redirect_eh_edge (edge edge_in, basic_block new_bb)
2311 redirect_eh_edge_1 (edge_in, new_bb, false);
2312 return ssa_redirect_edge (edge_in, new_bb);
2315 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2316 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2317 The actual edge update will happen in the caller. */
2320 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2322 tree new_lab = gimple_block_label (new_bb);
2323 bool any_changed = false;
2328 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2332 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2334 old_bb = label_to_block (c->label);
2335 if (old_bb == e->dest)
2343 case ERT_ALLOWED_EXCEPTIONS:
2344 old_bb = label_to_block (r->u.allowed.label);
2345 gcc_assert (old_bb == e->dest);
2346 r->u.allowed.label = new_lab;
2354 gcc_assert (any_changed);
2357 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2360 operation_could_trap_helper_p (enum tree_code op,
2371 case TRUNC_DIV_EXPR:
2373 case FLOOR_DIV_EXPR:
2374 case ROUND_DIV_EXPR:
2375 case EXACT_DIV_EXPR:
2377 case FLOOR_MOD_EXPR:
2378 case ROUND_MOD_EXPR:
2379 case TRUNC_MOD_EXPR:
2381 if (honor_snans || honor_trapv)
2384 return flag_trapping_math;
2385 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2394 /* Some floating point comparisons may trap. */
2399 case UNORDERED_EXPR:
2409 case FIX_TRUNC_EXPR:
2410 /* Conversion of floating point might trap. */
2416 /* These operations don't trap with floating point. */
2424 /* Any floating arithmetic may trap. */
2425 if (fp_operation && flag_trapping_math)
2433 /* Constructing an object cannot trap. */
2437 /* Any floating arithmetic may trap. */
2438 if (fp_operation && flag_trapping_math)
2446 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2447 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2448 type operands that may trap. If OP is a division operator, DIVISOR contains
2449 the value of the divisor. */
2452 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2455 bool honor_nans = (fp_operation && flag_trapping_math
2456 && !flag_finite_math_only);
2457 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2460 if (TREE_CODE_CLASS (op) != tcc_comparison
2461 && TREE_CODE_CLASS (op) != tcc_unary
2462 && TREE_CODE_CLASS (op) != tcc_binary)
2465 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2466 honor_nans, honor_snans, divisor,
2470 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2471 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2472 This routine expects only GIMPLE lhs or rhs input. */
2475 tree_could_trap_p (tree expr)
2477 enum tree_code code;
2478 bool fp_operation = false;
2479 bool honor_trapv = false;
2480 tree t, base, div = NULL_TREE;
2485 code = TREE_CODE (expr);
2486 t = TREE_TYPE (expr);
2490 if (COMPARISON_CLASS_P (expr))
2491 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2493 fp_operation = FLOAT_TYPE_P (t);
2494 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2497 if (TREE_CODE_CLASS (code) == tcc_binary)
2498 div = TREE_OPERAND (expr, 1);
2499 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2505 case TARGET_MEM_REF:
2506 if (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
2507 && !TMR_INDEX (expr) && !TMR_INDEX2 (expr))
2509 return !TREE_THIS_NOTRAP (expr);
2515 case VIEW_CONVERT_EXPR:
2516 case WITH_SIZE_EXPR:
2517 expr = TREE_OPERAND (expr, 0);
2518 code = TREE_CODE (expr);
2521 case ARRAY_RANGE_REF:
2522 base = TREE_OPERAND (expr, 0);
2523 if (tree_could_trap_p (base))
2525 if (TREE_THIS_NOTRAP (expr))
2527 return !range_in_array_bounds_p (expr);
2530 base = TREE_OPERAND (expr, 0);
2531 if (tree_could_trap_p (base))
2533 if (TREE_THIS_NOTRAP (expr))
2535 return !in_array_bounds_p (expr);
2538 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2542 return !TREE_THIS_NOTRAP (expr);
2545 return TREE_THIS_VOLATILE (expr);
2548 t = get_callee_fndecl (expr);
2549 /* Assume that calls to weak functions may trap. */
2550 if (!t || !DECL_P (t))
2553 return tree_could_trap_p (t);
2557 /* Assume that accesses to weak functions may trap, unless we know
2558 they are certainly defined in current TU or in some other
2560 if (DECL_WEAK (expr))
2562 struct cgraph_node *node;
2563 if (!DECL_EXTERNAL (expr))
2565 node = cgraph_function_node (cgraph_get_node (expr), NULL);
2566 if (node && node->symbol.in_other_partition)
2573 /* Assume that accesses to weak vars may trap, unless we know
2574 they are certainly defined in current TU or in some other
2576 if (DECL_WEAK (expr))
2578 struct varpool_node *node;
2579 if (!DECL_EXTERNAL (expr))
2581 node = varpool_variable_node (varpool_get_node (expr), NULL);
2582 if (node && node->symbol.in_other_partition)
2594 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2595 an assignment or a conditional) may throw. */
2598 stmt_could_throw_1_p (gimple stmt)
2600 enum tree_code code = gimple_expr_code (stmt);
2601 bool honor_nans = false;
2602 bool honor_snans = false;
2603 bool fp_operation = false;
2604 bool honor_trapv = false;
2609 if (TREE_CODE_CLASS (code) == tcc_comparison
2610 || TREE_CODE_CLASS (code) == tcc_unary
2611 || TREE_CODE_CLASS (code) == tcc_binary)
2613 if (is_gimple_assign (stmt)
2614 && TREE_CODE_CLASS (code) == tcc_comparison)
2615 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2616 else if (gimple_code (stmt) == GIMPLE_COND)
2617 t = TREE_TYPE (gimple_cond_lhs (stmt));
2619 t = gimple_expr_type (stmt);
2620 fp_operation = FLOAT_TYPE_P (t);
2623 honor_nans = flag_trapping_math && !flag_finite_math_only;
2624 honor_snans = flag_signaling_nans != 0;
2626 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2630 /* Check if the main expression may trap. */
2631 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2632 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2633 honor_nans, honor_snans, t,
2638 /* If the expression does not trap, see if any of the individual operands may
2640 for (i = 0; i < gimple_num_ops (stmt); i++)
2641 if (tree_could_trap_p (gimple_op (stmt, i)))
2648 /* Return true if statement STMT could throw an exception. */
2651 stmt_could_throw_p (gimple stmt)
2653 if (!flag_exceptions)
2656 /* The only statements that can throw an exception are assignments,
2657 conditionals, calls, resx, and asms. */
2658 switch (gimple_code (stmt))
2664 return !gimple_call_nothrow_p (stmt);
2668 if (!cfun->can_throw_non_call_exceptions)
2670 return stmt_could_throw_1_p (stmt);
2673 if (!cfun->can_throw_non_call_exceptions)
2675 return gimple_asm_volatile_p (stmt);
2683 /* Return true if expression T could throw an exception. */
2686 tree_could_throw_p (tree t)
2688 if (!flag_exceptions)
2690 if (TREE_CODE (t) == MODIFY_EXPR)
2692 if (cfun->can_throw_non_call_exceptions
2693 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2695 t = TREE_OPERAND (t, 1);
2698 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2699 t = TREE_OPERAND (t, 0);
2700 if (TREE_CODE (t) == CALL_EXPR)
2701 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2702 if (cfun->can_throw_non_call_exceptions)
2703 return tree_could_trap_p (t);
2707 /* Return true if STMT can throw an exception that is not caught within
2708 the current function (CFUN). */
2711 stmt_can_throw_external (gimple stmt)
2715 if (!stmt_could_throw_p (stmt))
2718 lp_nr = lookup_stmt_eh_lp (stmt);
2722 /* Return true if STMT can throw an exception that is caught within
2723 the current function (CFUN). */
2726 stmt_can_throw_internal (gimple stmt)
2730 if (!stmt_could_throw_p (stmt))
2733 lp_nr = lookup_stmt_eh_lp (stmt);
2737 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2738 remove any entry it might have from the EH table. Return true if
2739 any change was made. */
2742 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2744 if (stmt_could_throw_p (stmt))
2746 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2749 /* Likewise, but always use the current function. */
2752 maybe_clean_eh_stmt (gimple stmt)
2754 return maybe_clean_eh_stmt_fn (cfun, stmt);
2757 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2758 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2759 in the table if it should be in there. Return TRUE if a replacement was
2760 done that my require an EH edge purge. */
2763 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2765 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2769 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2771 if (new_stmt == old_stmt && new_stmt_could_throw)
2774 remove_stmt_from_eh_lp (old_stmt);
2775 if (new_stmt_could_throw)
2777 add_stmt_to_eh_lp (new_stmt, lp_nr);
2787 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
2788 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2789 operand is the return value of duplicate_eh_regions. */
2792 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2793 struct function *old_fun, gimple old_stmt,
2794 struct pointer_map_t *map, int default_lp_nr)
2796 int old_lp_nr, new_lp_nr;
2799 if (!stmt_could_throw_p (new_stmt))
2802 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2805 if (default_lp_nr == 0)
2807 new_lp_nr = default_lp_nr;
2809 else if (old_lp_nr > 0)
2811 eh_landing_pad old_lp, new_lp;
2813 old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr);
2814 slot = pointer_map_contains (map, old_lp);
2815 new_lp = (eh_landing_pad) *slot;
2816 new_lp_nr = new_lp->index;
2820 eh_region old_r, new_r;
2822 old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr);
2823 slot = pointer_map_contains (map, old_r);
2824 new_r = (eh_region) *slot;
2825 new_lp_nr = -new_r->index;
2828 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2832 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2833 and thus no remapping is required. */
2836 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2840 if (!stmt_could_throw_p (new_stmt))
2843 lp_nr = lookup_stmt_eh_lp (old_stmt);
2847 add_stmt_to_eh_lp (new_stmt, lp_nr);
2851 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2852 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2853 this only handles handlers consisting of a single call, as that's the
2854 important case for C++: a destructor call for a particular object showing
2855 up in multiple handlers. */
2858 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2860 gimple_stmt_iterator gsi;
2864 gsi = gsi_start (oneh);
2865 if (!gsi_one_before_end_p (gsi))
2867 ones = gsi_stmt (gsi);
2869 gsi = gsi_start (twoh);
2870 if (!gsi_one_before_end_p (gsi))
2872 twos = gsi_stmt (gsi);
2874 if (!is_gimple_call (ones)
2875 || !is_gimple_call (twos)
2876 || gimple_call_lhs (ones)
2877 || gimple_call_lhs (twos)
2878 || gimple_call_chain (ones)
2879 || gimple_call_chain (twos)
2880 || !gimple_call_same_target_p (ones, twos)
2881 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
2884 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
2885 if (!operand_equal_p (gimple_call_arg (ones, ai),
2886 gimple_call_arg (twos, ai), 0))
2893 try { A() } finally { try { ~B() } catch { ~A() } }
2894 try { ... } finally { ~A() }
2896 try { A() } catch { ~B() }
2897 try { ~B() ... } finally { ~A() }
2899 This occurs frequently in C++, where A is a local variable and B is a
2900 temporary used in the initializer for A. */
2903 optimize_double_finally (gimple one, gimple two)
2906 gimple_stmt_iterator gsi;
2909 cleanup = gimple_try_cleanup (one);
2910 gsi = gsi_start (cleanup);
2911 if (!gsi_one_before_end_p (gsi))
2914 oneh = gsi_stmt (gsi);
2915 if (gimple_code (oneh) != GIMPLE_TRY
2916 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
2919 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
2921 gimple_seq seq = gimple_try_eval (oneh);
2923 gimple_try_set_cleanup (one, seq);
2924 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
2925 seq = copy_gimple_seq_and_replace_locals (seq);
2926 gimple_seq_add_seq (&seq, gimple_try_eval (two));
2927 gimple_try_set_eval (two, seq);
2931 /* Perform EH refactoring optimizations that are simpler to do when code
2932 flow has been lowered but EH structures haven't. */
2935 refactor_eh_r (gimple_seq seq)
2937 gimple_stmt_iterator gsi;
2942 gsi = gsi_start (seq);
2946 if (gsi_end_p (gsi))
2949 two = gsi_stmt (gsi);
2952 && gimple_code (one) == GIMPLE_TRY
2953 && gimple_code (two) == GIMPLE_TRY
2954 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
2955 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
2956 optimize_double_finally (one, two);
2958 switch (gimple_code (one))
2961 refactor_eh_r (gimple_try_eval (one));
2962 refactor_eh_r (gimple_try_cleanup (one));
2965 refactor_eh_r (gimple_catch_handler (one));
2967 case GIMPLE_EH_FILTER:
2968 refactor_eh_r (gimple_eh_filter_failure (one));
2970 case GIMPLE_EH_ELSE:
2971 refactor_eh_r (gimple_eh_else_n_body (one));
2972 refactor_eh_r (gimple_eh_else_e_body (one));
2987 refactor_eh_r (gimple_body (current_function_decl));
2992 gate_refactor_eh (void)
2994 return flag_exceptions != 0;
2997 struct gimple_opt_pass pass_refactor_eh =
3002 gate_refactor_eh, /* gate */
3003 refactor_eh, /* execute */
3006 0, /* static_pass_number */
3007 TV_TREE_EH, /* tv_id */
3008 PROP_gimple_lcf, /* properties_required */
3009 0, /* properties_provided */
3010 0, /* properties_destroyed */
3011 0, /* todo_flags_start */
3012 0 /* todo_flags_finish */
3016 /* At the end of gimple optimization, we can lower RESX. */
3019 lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
3022 eh_region src_r, dst_r;
3023 gimple_stmt_iterator gsi;
3028 lp_nr = lookup_stmt_eh_lp (stmt);
3030 dst_r = get_eh_region_from_lp_number (lp_nr);
3034 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
3035 gsi = gsi_last_bb (bb);
3039 /* We can wind up with no source region when pass_cleanup_eh shows
3040 that there are no entries into an eh region and deletes it, but
3041 then the block that contains the resx isn't removed. This can
3042 happen without optimization when the switch statement created by
3043 lower_try_finally_switch isn't simplified to remove the eh case.
3045 Resolve this by expanding the resx node to an abort. */
3047 fn = builtin_decl_implicit (BUILT_IN_TRAP);
3048 x = gimple_build_call (fn, 0);
3049 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3051 while (EDGE_COUNT (bb->succs) > 0)
3052 remove_edge (EDGE_SUCC (bb, 0));
3056 /* When we have a destination region, we resolve this by copying
3057 the excptr and filter values into place, and changing the edge
3058 to immediately after the landing pad. */
3067 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3068 the failure decl into a new block, if needed. */
3069 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
3071 slot = pointer_map_contains (mnt_map, dst_r);
3074 gimple_stmt_iterator gsi2;
3076 new_bb = create_empty_bb (bb);
3078 add_bb_to_loop (new_bb, bb->loop_father);
3079 lab = gimple_block_label (new_bb);
3080 gsi2 = gsi_start_bb (new_bb);
3082 fn = dst_r->u.must_not_throw.failure_decl;
3083 x = gimple_build_call (fn, 0);
3084 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3085 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3087 slot = pointer_map_insert (mnt_map, dst_r);
3093 new_bb = label_to_block (lab);
3096 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3097 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
3098 e->count = bb->count;
3099 e->probability = REG_BR_PROB_BASE;
3104 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
3106 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
3107 src_nr = build_int_cst (integer_type_node, src_r->index);
3108 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3109 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3111 /* Update the flags for the outgoing edge. */
3112 e = single_succ_edge (bb);
3113 gcc_assert (e->flags & EDGE_EH);
3114 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3116 /* If there are no more EH users of the landing pad, delete it. */
3117 FOR_EACH_EDGE (e, ei, e->dest->preds)
3118 if (e->flags & EDGE_EH)
3122 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3123 remove_eh_landing_pad (lp);
3133 /* When we don't have a destination region, this exception escapes
3134 up the call chain. We resolve this by generating a call to the
3135 _Unwind_Resume library function. */
3137 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3138 with no arguments for C++ and Java. Check for that. */
3139 if (src_r->use_cxa_end_cleanup)
3141 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3142 x = gimple_build_call (fn, 0);
3143 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3147 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3148 src_nr = build_int_cst (integer_type_node, src_r->index);
3149 x = gimple_build_call (fn, 1, src_nr);
3150 var = create_tmp_var (ptr_type_node, NULL);
3151 var = make_ssa_name (var, x);
3152 gimple_call_set_lhs (x, var);
3153 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3155 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3156 x = gimple_build_call (fn, 1, var);
3157 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3160 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3163 gsi_remove (&gsi, true);
3169 execute_lower_resx (void)
3172 struct pointer_map_t *mnt_map;
3173 bool dominance_invalidated = false;
3174 bool any_rewritten = false;
3176 mnt_map = pointer_map_create ();
3180 gimple last = last_stmt (bb);
3181 if (last && is_gimple_resx (last))
3183 dominance_invalidated |= lower_resx (bb, last, mnt_map);
3184 any_rewritten = true;
3188 pointer_map_destroy (mnt_map);
3190 if (dominance_invalidated)
3192 free_dominance_info (CDI_DOMINATORS);
3193 free_dominance_info (CDI_POST_DOMINATORS);
3196 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3200 gate_lower_resx (void)
3202 return flag_exceptions != 0;
3205 struct gimple_opt_pass pass_lower_resx =
3210 gate_lower_resx, /* gate */
3211 execute_lower_resx, /* execute */
3214 0, /* static_pass_number */
3215 TV_TREE_EH, /* tv_id */
3216 PROP_gimple_lcf, /* properties_required */
3217 0, /* properties_provided */
3218 0, /* properties_destroyed */
3219 0, /* todo_flags_start */
3220 TODO_verify_flow /* todo_flags_finish */
3224 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3228 optimize_clobbers (basic_block bb)
3230 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3231 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3233 gimple stmt = gsi_stmt (gsi);
3234 if (is_gimple_debug (stmt))
3236 if (!gimple_clobber_p (stmt)
3237 || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
3239 unlink_stmt_vdef (stmt);
3240 gsi_remove (&gsi, true);
3241 release_defs (stmt);
3245 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3246 internal throw to successor BB. */
3249 sink_clobbers (basic_block bb)
3253 gimple_stmt_iterator gsi, dgsi;
3255 bool any_clobbers = false;
3257 /* Only optimize if BB has a single EH successor and
3258 all predecessor edges are EH too. */
3259 if (!single_succ_p (bb)
3260 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3263 FOR_EACH_EDGE (e, ei, bb->preds)
3265 if ((e->flags & EDGE_EH) == 0)
3269 /* And BB contains only CLOBBER stmts before the final
3271 gsi = gsi_last_bb (bb);
3272 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3274 gimple stmt = gsi_stmt (gsi);
3275 if (is_gimple_debug (stmt))
3277 if (gimple_code (stmt) == GIMPLE_LABEL)
3279 if (!gimple_clobber_p (stmt)
3280 || TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
3282 any_clobbers = true;
3287 succbb = single_succ (bb);
3288 dgsi = gsi_after_labels (succbb);
3289 gsi = gsi_last_bb (bb);
3290 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3292 gimple stmt = gsi_stmt (gsi);
3293 if (is_gimple_debug (stmt))
3295 if (gimple_code (stmt) == GIMPLE_LABEL)
3297 unlink_stmt_vdef (stmt);
3298 gsi_remove (&gsi, false);
3299 /* Trigger the operand scanner to cause renaming for virtual
3300 operands for this statement.
3301 ??? Given the simple structure of this code manually
3302 figuring out the reaching definition should not be too hard. */
3303 if (gimple_vuse (stmt))
3304 gimple_set_vuse (stmt, NULL_TREE);
3305 gsi_insert_before (&dgsi, stmt, GSI_SAME_STMT);
3308 return TODO_update_ssa_only_virtuals;
3311 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3312 we have found some duplicate labels and removed some edges. */
3315 lower_eh_dispatch (basic_block src, gimple stmt)
3317 gimple_stmt_iterator gsi;
3322 bool redirected = false;
3324 region_nr = gimple_eh_dispatch_region (stmt);
3325 r = get_eh_region_from_number (region_nr);
3327 gsi = gsi_last_bb (src);
3333 VEC (tree, heap) *labels = NULL;
3334 tree default_label = NULL;
3338 struct pointer_set_t *seen_values = pointer_set_create ();
3340 /* Collect the labels for a switch. Zero the post_landing_pad
3341 field becase we'll no longer have anything keeping these labels
3342 in existence and the optimizer will be free to merge these
3344 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3346 tree tp_node, flt_node, lab = c->label;
3347 bool have_label = false;
3350 tp_node = c->type_list;
3351 flt_node = c->filter_list;
3353 if (tp_node == NULL)
3355 default_label = lab;
3360 /* Filter out duplicate labels that arise when this handler
3361 is shadowed by an earlier one. When no labels are
3362 attached to the handler anymore, we remove
3363 the corresponding edge and then we delete unreachable
3364 blocks at the end of this pass. */
3365 if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
3367 tree t = build_case_label (TREE_VALUE (flt_node),
3369 VEC_safe_push (tree, heap, labels, t);
3370 pointer_set_insert (seen_values, TREE_VALUE (flt_node));
3374 tp_node = TREE_CHAIN (tp_node);
3375 flt_node = TREE_CHAIN (flt_node);
3380 remove_edge (find_edge (src, label_to_block (lab)));
3385 /* Clean up the edge flags. */
3386 FOR_EACH_EDGE (e, ei, src->succs)
3388 if (e->flags & EDGE_FALLTHRU)
3390 /* If there was no catch-all, use the fallthru edge. */
3391 if (default_label == NULL)
3392 default_label = gimple_block_label (e->dest);
3393 e->flags &= ~EDGE_FALLTHRU;
3396 gcc_assert (default_label != NULL);
3398 /* Don't generate a switch if there's only a default case.
3399 This is common in the form of try { A; } catch (...) { B; }. */
3402 e = single_succ_edge (src);
3403 e->flags |= EDGE_FALLTHRU;
3407 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3408 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3410 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3411 filter = make_ssa_name (filter, x);
3412 gimple_call_set_lhs (x, filter);
3413 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3415 /* Turn the default label into a default case. */
3416 default_label = build_case_label (NULL, NULL, default_label);
3417 sort_case_labels (labels);
3419 x = gimple_build_switch (filter, default_label, labels);
3420 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3422 VEC_free (tree, heap, labels);
3424 pointer_set_destroy (seen_values);
3428 case ERT_ALLOWED_EXCEPTIONS:
3430 edge b_e = BRANCH_EDGE (src);
3431 edge f_e = FALLTHRU_EDGE (src);
3433 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3434 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3436 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3437 filter = make_ssa_name (filter, x);
3438 gimple_call_set_lhs (x, filter);
3439 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3441 r->u.allowed.label = NULL;
3442 x = gimple_build_cond (EQ_EXPR, filter,
3443 build_int_cst (TREE_TYPE (filter),
3444 r->u.allowed.filter),
3445 NULL_TREE, NULL_TREE);
3446 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3448 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3449 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3457 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3458 gsi_remove (&gsi, true);
3463 execute_lower_eh_dispatch (void)
3467 bool redirected = false;
3469 assign_filter_values ();
3473 gimple last = last_stmt (bb);
3476 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
3478 redirected |= lower_eh_dispatch (bb, last);
3479 flags |= TODO_update_ssa_only_virtuals;
3481 else if (gimple_code (last) == GIMPLE_RESX)
3483 if (stmt_can_throw_external (last))
3484 optimize_clobbers (bb);
3486 flags |= sink_clobbers (bb);
3491 delete_unreachable_blocks ();
3496 gate_lower_eh_dispatch (void)
3498 return cfun->eh->region_tree != NULL;
3501 struct gimple_opt_pass pass_lower_eh_dispatch =
3505 "ehdisp", /* name */
3506 gate_lower_eh_dispatch, /* gate */
3507 execute_lower_eh_dispatch, /* execute */
3510 0, /* static_pass_number */
3511 TV_TREE_EH, /* tv_id */
3512 PROP_gimple_lcf, /* properties_required */
3513 0, /* properties_provided */
3514 0, /* properties_destroyed */
3515 0, /* todo_flags_start */
3516 TODO_verify_flow /* todo_flags_finish */
3520 /* Walk statements, see what regions are really referenced and remove
3521 those that are unused. */
3524 remove_unreachable_handlers (void)
3526 sbitmap r_reachable, lp_reachable;
3532 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3534 = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array));
3535 bitmap_clear (r_reachable);
3536 bitmap_clear (lp_reachable);
3540 gimple_stmt_iterator gsi;
3542 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3544 gimple stmt = gsi_stmt (gsi);
3545 lp_nr = lookup_stmt_eh_lp (stmt);
3547 /* Negative LP numbers are MUST_NOT_THROW regions which
3548 are not considered BB enders. */
3550 SET_BIT (r_reachable, -lp_nr);
3552 /* Positive LP numbers are real landing pads, are are BB enders. */
3555 gcc_assert (gsi_one_before_end_p (gsi));
3556 region = get_eh_region_from_lp_number (lp_nr);
3557 SET_BIT (r_reachable, region->index);
3558 SET_BIT (lp_reachable, lp_nr);
3561 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3562 switch (gimple_code (stmt))
3565 SET_BIT (r_reachable, gimple_resx_region (stmt));
3567 case GIMPLE_EH_DISPATCH:
3568 SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
3578 fprintf (dump_file, "Before removal of unreachable regions:\n");
3579 dump_eh_tree (dump_file, cfun);
3580 fprintf (dump_file, "Reachable regions: ");
3581 dump_bitmap_file (dump_file, r_reachable);
3582 fprintf (dump_file, "Reachable landing pads: ");
3583 dump_bitmap_file (dump_file, lp_reachable);
3587 VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
3588 if (region && !TEST_BIT (r_reachable, r_nr))
3591 fprintf (dump_file, "Removing unreachable region %d\n", r_nr);
3592 remove_eh_handler (region);
3596 VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
3597 if (lp && !TEST_BIT (lp_reachable, lp_nr))
3600 fprintf (dump_file, "Removing unreachable landing pad %d\n", lp_nr);
3601 remove_eh_landing_pad (lp);
3606 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3607 dump_eh_tree (dump_file, cfun);
3608 fprintf (dump_file, "\n\n");
3611 sbitmap_free (r_reachable);
3612 sbitmap_free (lp_reachable);
3614 #ifdef ENABLE_CHECKING
3615 verify_eh_tree (cfun);
3619 /* Remove unreachable handlers if any landing pads have been removed after
3620 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
3623 maybe_remove_unreachable_handlers (void)
3628 if (cfun->eh == NULL)
3631 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3632 if (lp && lp->post_landing_pad)
3634 if (label_to_block (lp->post_landing_pad) == NULL)
3636 remove_unreachable_handlers ();
3642 /* Remove regions that do not have landing pads. This assumes
3643 that remove_unreachable_handlers has already been run, and
3644 that we've just manipulated the landing pads since then. */
3647 remove_unreachable_handlers_no_lp (void)
3651 sbitmap r_reachable;
3654 r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
3655 bitmap_clear (r_reachable);
3659 gimple stmt = last_stmt (bb);
3661 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3662 switch (gimple_code (stmt))
3665 SET_BIT (r_reachable, gimple_resx_region (stmt));
3667 case GIMPLE_EH_DISPATCH:
3668 SET_BIT (r_reachable, gimple_eh_dispatch_region (stmt));
3675 for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
3676 if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW
3677 && !TEST_BIT (r_reachable, i))
3680 fprintf (dump_file, "Removing unreachable region %d\n", i);
3681 remove_eh_handler (r);
3684 sbitmap_free (r_reachable);
3687 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3688 optimisticaly split all sorts of edges, including EH edges. The
3689 optimization passes in between may not have needed them; if not,
3690 we should undo the split.
3692 Recognize this case by having one EH edge incoming to the BB and
3693 one normal edge outgoing; BB should be empty apart from the
3694 post_landing_pad label.
3696 Note that this is slightly different from the empty handler case
3697 handled by cleanup_empty_eh, in that the actual handler may yet
3698 have actual code but the landing pad has been separated from the
3699 handler. As such, cleanup_empty_eh relies on this transformation
3700 having been done first. */
3703 unsplit_eh (eh_landing_pad lp)
3705 basic_block bb = label_to_block (lp->post_landing_pad);
3706 gimple_stmt_iterator gsi;
3709 /* Quickly check the edge counts on BB for singularity. */
3710 if (EDGE_COUNT (bb->preds) != 1 || EDGE_COUNT (bb->succs) != 1)
3712 e_in = EDGE_PRED (bb, 0);
3713 e_out = EDGE_SUCC (bb, 0);
3715 /* Input edge must be EH and output edge must be normal. */
3716 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
3719 /* The block must be empty except for the labels and debug insns. */
3720 gsi = gsi_after_labels (bb);
3721 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
3722 gsi_next_nondebug (&gsi);
3723 if (!gsi_end_p (gsi))
3726 /* The destination block must not already have a landing pad
3727 for a different region. */
3728 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
3730 gimple stmt = gsi_stmt (gsi);
3734 if (gimple_code (stmt) != GIMPLE_LABEL)
3736 lab = gimple_label_label (stmt);
3737 lp_nr = EH_LANDING_PAD_NR (lab);
3738 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
3742 /* The new destination block must not already be a destination of
3743 the source block, lest we merge fallthru and eh edges and get
3744 all sorts of confused. */
3745 if (find_edge (e_in->src, e_out->dest))
3748 /* ??? We can get degenerate phis due to cfg cleanups. I would have
3749 thought this should have been cleaned up by a phicprop pass, but
3750 that doesn't appear to handle virtuals. Propagate by hand. */
3751 if (!gimple_seq_empty_p (phi_nodes (bb)))
3753 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
3755 gimple use_stmt, phi = gsi_stmt (gsi);
3756 tree lhs = gimple_phi_result (phi);
3757 tree rhs = gimple_phi_arg_def (phi, 0);
3758 use_operand_p use_p;
3759 imm_use_iterator iter;
3761 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
3763 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3764 SET_USE (use_p, rhs);
3767 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3768 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
3770 remove_phi_node (&gsi, true);
3774 if (dump_file && (dump_flags & TDF_DETAILS))
3775 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
3776 lp->index, e_out->dest->index);
3778 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
3779 a successor edge, humor it. But do the real CFG change with the
3780 predecessor of E_OUT in order to preserve the ordering of arguments
3781 to the PHI nodes in E_OUT->DEST. */
3782 redirect_eh_edge_1 (e_in, e_out->dest, false);
3783 redirect_edge_pred (e_out, e_in->src);
3784 e_out->flags = e_in->flags;
3785 e_out->probability = e_in->probability;
3786 e_out->count = e_in->count;
3792 /* Examine each landing pad block and see if it matches unsplit_eh. */
3795 unsplit_all_eh (void)
3797 bool changed = false;
3801 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
3803 changed |= unsplit_eh (lp);
3808 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
3809 to OLD_BB to NEW_BB; return true on success, false on failure.
3811 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
3812 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
3813 Virtual PHIs may be deleted and marked for renaming. */
3816 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
3817 edge old_bb_out, bool change_region)
3819 gimple_stmt_iterator ngsi, ogsi;
3822 bitmap rename_virts;
3823 bitmap ophi_handled;
3825 /* The destination block must not be a regular successor for any
3826 of the preds of the landing pad. Thus, avoid turning
3836 which CFG verification would choke on. See PR45172 and PR51089. */
3837 FOR_EACH_EDGE (e, ei, old_bb->preds)
3838 if (find_edge (e->src, new_bb))
3841 FOR_EACH_EDGE (e, ei, old_bb->preds)
3842 redirect_edge_var_map_clear (e);
3844 ophi_handled = BITMAP_ALLOC (NULL);
3845 rename_virts = BITMAP_ALLOC (NULL);
3847 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
3848 for the edges we're going to move. */
3849 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
3851 gimple ophi, nphi = gsi_stmt (ngsi);
3854 nresult = gimple_phi_result (nphi);
3855 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
3857 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
3858 the source ssa_name. */
3860 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3862 ophi = gsi_stmt (ogsi);
3863 if (gimple_phi_result (ophi) == nop)
3868 /* If we did find the corresponding PHI, copy those inputs. */
3871 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
3872 if (!has_single_use (nop))
3874 imm_use_iterator imm_iter;
3875 use_operand_p use_p;
3877 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
3879 if (!gimple_debug_bind_p (USE_STMT (use_p))
3880 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
3881 || gimple_bb (USE_STMT (use_p)) != new_bb))
3885 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
3886 FOR_EACH_EDGE (e, ei, old_bb->preds)
3891 if ((e->flags & EDGE_EH) == 0)
3893 oop = gimple_phi_arg_def (ophi, e->dest_idx);
3894 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
3895 redirect_edge_var_map_add (e, nresult, oop, oloc);
3898 /* If we didn't find the PHI, but it's a VOP, remember to rename
3899 it later, assuming all other tests succeed. */
3900 else if (virtual_operand_p (nresult))
3901 bitmap_set_bit (rename_virts, SSA_NAME_VERSION (nresult));
3902 /* If we didn't find the PHI, and it's a real variable, we know
3903 from the fact that OLD_BB is tree_empty_eh_handler_p that the
3904 variable is unchanged from input to the block and we can simply
3905 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
3909 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
3910 FOR_EACH_EDGE (e, ei, old_bb->preds)
3911 redirect_edge_var_map_add (e, nresult, nop, nloc);
3915 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
3916 we don't know what values from the other edges into NEW_BB to use. */
3917 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
3919 gimple ophi = gsi_stmt (ogsi);
3920 tree oresult = gimple_phi_result (ophi);
3921 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
3925 /* At this point we know that the merge will succeed. Remove the PHI
3926 nodes for the virtuals that we want to rename. */
3927 if (!bitmap_empty_p (rename_virts))
3929 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); )
3931 gimple nphi = gsi_stmt (ngsi);
3932 tree nresult = gimple_phi_result (nphi);
3933 if (bitmap_bit_p (rename_virts, SSA_NAME_VERSION (nresult)))
3935 mark_virtual_phi_result_for_renaming (nphi);
3936 remove_phi_node (&ngsi, true);
3943 /* Finally, move the edges and update the PHIs. */
3944 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
3945 if (e->flags & EDGE_EH)
3947 /* ??? CFG manipluation routines do not try to update loop
3948 form on edge redirection. Do so manually here for now. */
3949 /* If we redirect a loop entry or latch edge that will either create
3950 a multiple entry loop or rotate the loop. If the loops merge
3951 we may have created a loop with multiple latches.
3952 All of this isn't easily fixed thus cancel the affected loop
3953 and mark the other loop as possibly having multiple latches. */
3955 && e->dest == e->dest->loop_father->header)
3957 e->dest->loop_father->header = NULL;
3958 e->dest->loop_father->latch = NULL;
3959 new_bb->loop_father->latch = NULL;
3960 loops_state_set (LOOPS_NEED_FIXUP|LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
3962 redirect_eh_edge_1 (e, new_bb, change_region);
3963 redirect_edge_succ (e, new_bb);
3964 flush_pending_stmts (e);
3969 BITMAP_FREE (ophi_handled);
3970 BITMAP_FREE (rename_virts);
3974 FOR_EACH_EDGE (e, ei, old_bb->preds)
3975 redirect_edge_var_map_clear (e);
3976 BITMAP_FREE (ophi_handled);
3977 BITMAP_FREE (rename_virts);
3981 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
3982 old region to NEW_REGION at BB. */
3985 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
3986 eh_landing_pad lp, eh_region new_region)
3988 gimple_stmt_iterator gsi;
3991 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
3995 lp->region = new_region;
3996 lp->next_lp = new_region->landing_pads;
3997 new_region->landing_pads = lp;
3999 /* Delete the RESX that was matched within the empty handler block. */
4000 gsi = gsi_last_bb (bb);
4001 unlink_stmt_vdef (gsi_stmt (gsi));
4002 gsi_remove (&gsi, true);
4004 /* Clean up E_OUT for the fallthru. */
4005 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
4006 e_out->probability = REG_BR_PROB_BASE;
4009 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
4010 unsplitting than unsplit_eh was prepared to handle, e.g. when
4011 multiple incoming edges and phis are involved. */
4014 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
4016 gimple_stmt_iterator gsi;
4019 /* We really ought not have totally lost everything following
4020 a landing pad label. Given that BB is empty, there had better
4022 gcc_assert (e_out != NULL);
4024 /* The destination block must not already have a landing pad
4025 for a different region. */
4027 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4029 gimple stmt = gsi_stmt (gsi);
4032 if (gimple_code (stmt) != GIMPLE_LABEL)
4034 lab = gimple_label_label (stmt);
4035 lp_nr = EH_LANDING_PAD_NR (lab);
4036 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4040 /* Attempt to move the PHIs into the successor block. */
4041 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
4043 if (dump_file && (dump_flags & TDF_DETAILS))
4045 "Unsplit EH landing pad %d to block %i "
4046 "(via cleanup_empty_eh).\n",
4047 lp->index, e_out->dest->index);
4054 /* Return true if edge E_FIRST is part of an empty infinite loop
4055 or leads to such a loop through a series of single successor
4059 infinite_empty_loop_p (edge e_first)
4061 bool inf_loop = false;
4064 if (e_first->dest == e_first->src)
4067 e_first->src->aux = (void *) 1;
4068 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4070 gimple_stmt_iterator gsi;
4076 e->dest->aux = (void *) 1;
4077 gsi = gsi_after_labels (e->dest);
4078 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4079 gsi_next_nondebug (&gsi);
4080 if (!gsi_end_p (gsi))
4083 e_first->src->aux = NULL;
4084 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4085 e->dest->aux = NULL;
4090 /* Examine the block associated with LP to determine if it's an empty
4091 handler for its EH region. If so, attempt to redirect EH edges to
4092 an outer region. Return true the CFG was updated in any way. This
4093 is similar to jump forwarding, just across EH edges. */
4096 cleanup_empty_eh (eh_landing_pad lp)
4098 basic_block bb = label_to_block (lp->post_landing_pad);
4099 gimple_stmt_iterator gsi;
4101 eh_region new_region;
4104 bool has_non_eh_pred;
4108 /* There can be zero or one edges out of BB. This is the quickest test. */
4109 switch (EDGE_COUNT (bb->succs))
4115 e_out = EDGE_SUCC (bb, 0);
4121 resx = last_stmt (bb);
4122 if (resx && is_gimple_resx (resx))
4124 if (stmt_can_throw_external (resx))
4125 optimize_clobbers (bb);
4126 else if (sink_clobbers (bb))
4130 gsi = gsi_after_labels (bb);
4132 /* Make sure to skip debug statements. */
4133 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4134 gsi_next_nondebug (&gsi);
4136 /* If the block is totally empty, look for more unsplitting cases. */
4137 if (gsi_end_p (gsi))
4139 /* For the degenerate case of an infinite loop bail out. */
4140 if (infinite_empty_loop_p (e_out))
4143 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
4146 /* The block should consist only of a single RESX statement, modulo a
4147 preceding call to __builtin_stack_restore if there is no outgoing
4148 edge, since the call can be eliminated in this case. */
4149 resx = gsi_stmt (gsi);
4150 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4153 resx = gsi_stmt (gsi);
4155 if (!is_gimple_resx (resx))
4157 gcc_assert (gsi_one_before_end_p (gsi));
4159 /* Determine if there are non-EH edges, or resx edges into the handler. */
4160 has_non_eh_pred = false;
4161 FOR_EACH_EDGE (e, ei, bb->preds)
4162 if (!(e->flags & EDGE_EH))
4163 has_non_eh_pred = true;
4165 /* Find the handler that's outer of the empty handler by looking at
4166 where the RESX instruction was vectored. */
4167 new_lp_nr = lookup_stmt_eh_lp (resx);
4168 new_region = get_eh_region_from_lp_number (new_lp_nr);
4170 /* If there's no destination region within the current function,
4171 redirection is trivial via removing the throwing statements from
4172 the EH region, removing the EH edges, and allowing the block
4173 to go unreachable. */
4174 if (new_region == NULL)
4176 gcc_assert (e_out == NULL);
4177 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4178 if (e->flags & EDGE_EH)
4180 gimple stmt = last_stmt (e->src);
4181 remove_stmt_from_eh_lp (stmt);
4189 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4190 to handle the abort and allow the blocks to go unreachable. */
4191 if (new_region->type == ERT_MUST_NOT_THROW)
4193 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4194 if (e->flags & EDGE_EH)
4196 gimple stmt = last_stmt (e->src);
4197 remove_stmt_from_eh_lp (stmt);
4198 add_stmt_to_eh_lp (stmt, new_lp_nr);
4206 /* Try to redirect the EH edges and merge the PHIs into the destination
4207 landing pad block. If the merge succeeds, we'll already have redirected
4208 all the EH edges. The handler itself will go unreachable if there were
4210 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
4213 /* Finally, if all input edges are EH edges, then we can (potentially)
4214 reduce the number of transfers from the runtime by moving the landing
4215 pad from the original region to the new region. This is a win when
4216 we remove the last CLEANUP region along a particular exception
4217 propagation path. Since nothing changes except for the region with
4218 which the landing pad is associated, the PHI nodes do not need to be
4220 if (!has_non_eh_pred)
4222 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4223 if (dump_file && (dump_flags & TDF_DETAILS))
4224 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4225 lp->index, new_region->index);
4227 /* ??? The CFG didn't change, but we may have rendered the
4228 old EH region unreachable. Trigger a cleanup there. */
4235 if (dump_file && (dump_flags & TDF_DETAILS))
4236 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4237 remove_eh_landing_pad (lp);
4241 /* Do a post-order traversal of the EH region tree. Examine each
4242 post_landing_pad block and see if we can eliminate it as empty. */
4245 cleanup_all_empty_eh (void)
4247 bool changed = false;
4251 for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
4253 changed |= cleanup_empty_eh (lp);
4258 /* Perform cleanups and lowering of exception handling
4259 1) cleanups regions with handlers doing nothing are optimized out
4260 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4261 3) Info about regions that are containing instructions, and regions
4262 reachable via local EH edges is collected
4263 4) Eh tree is pruned for regions no longer neccesary.
4265 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4266 Unify those that have the same failure decl and locus.
4270 execute_cleanup_eh_1 (void)
4272 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4273 looking up unreachable landing pads. */
4274 remove_unreachable_handlers ();
4276 /* Watch out for the region tree vanishing due to all unreachable. */
4277 if (cfun->eh->region_tree && optimize)
4279 bool changed = false;
4281 changed |= unsplit_all_eh ();
4282 changed |= cleanup_all_empty_eh ();
4286 free_dominance_info (CDI_DOMINATORS);
4287 free_dominance_info (CDI_POST_DOMINATORS);
4289 /* We delayed all basic block deletion, as we may have performed
4290 cleanups on EH edges while non-EH edges were still present. */
4291 delete_unreachable_blocks ();
4293 /* We manipulated the landing pads. Remove any region that no
4294 longer has a landing pad. */
4295 remove_unreachable_handlers_no_lp ();
4297 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4305 execute_cleanup_eh (void)
4307 int ret = execute_cleanup_eh_1 ();
4309 /* If the function no longer needs an EH personality routine
4310 clear it. This exposes cross-language inlining opportunities
4311 and avoids references to a never defined personality routine. */
4312 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4313 && function_needs_eh_personality (cfun) != eh_personality_lang)
4314 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4320 gate_cleanup_eh (void)
4322 return cfun->eh != NULL && cfun->eh->region_tree != NULL;
4325 struct gimple_opt_pass pass_cleanup_eh = {
4328 "ehcleanup", /* name */
4329 gate_cleanup_eh, /* gate */
4330 execute_cleanup_eh, /* execute */
4333 0, /* static_pass_number */
4334 TV_TREE_EH, /* tv_id */
4335 PROP_gimple_lcf, /* properties_required */
4336 0, /* properties_provided */
4337 0, /* properties_destroyed */
4338 0, /* todo_flags_start */
4339 0 /* todo_flags_finish */
4343 /* Verify that BB containing STMT as the last statement, has precisely the
4344 edge that make_eh_edges would create. */
4347 verify_eh_edges (gimple stmt)
4349 basic_block bb = gimple_bb (stmt);
4350 eh_landing_pad lp = NULL;
4355 lp_nr = lookup_stmt_eh_lp (stmt);
4357 lp = get_eh_landing_pad_from_number (lp_nr);
4360 FOR_EACH_EDGE (e, ei, bb->succs)
4362 if (e->flags & EDGE_EH)
4366 error ("BB %i has multiple EH edges", bb->index);
4378 error ("BB %i can not throw but has an EH edge", bb->index);
4384 if (!stmt_could_throw_p (stmt))
4386 error ("BB %i last statement has incorrectly set lp", bb->index);
4390 if (eh_edge == NULL)
4392 error ("BB %i is missing an EH edge", bb->index);
4396 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4398 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4405 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4408 verify_eh_dispatch_edge (gimple stmt)
4412 basic_block src, dst;
4413 bool want_fallthru = true;
4417 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4418 src = gimple_bb (stmt);
4420 FOR_EACH_EDGE (e, ei, src->succs)
4421 gcc_assert (e->aux == NULL);
4426 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4428 dst = label_to_block (c->label);
4429 e = find_edge (src, dst);
4432 error ("BB %i is missing an edge", src->index);
4437 /* A catch-all handler doesn't have a fallthru. */
4438 if (c->type_list == NULL)
4440 want_fallthru = false;
4446 case ERT_ALLOWED_EXCEPTIONS:
4447 dst = label_to_block (r->u.allowed.label);
4448 e = find_edge (src, dst);
4451 error ("BB %i is missing an edge", src->index);
4462 FOR_EACH_EDGE (e, ei, src->succs)
4464 if (e->flags & EDGE_FALLTHRU)
4466 if (fall_edge != NULL)
4468 error ("BB %i too many fallthru edges", src->index);
4477 error ("BB %i has incorrect edge", src->index);
4481 if ((fall_edge != NULL) ^ want_fallthru)
4483 error ("BB %i has incorrect fallthru edge", src->index);