1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
23 #include "hash-table.h"
31 #include "pointer-set.h"
32 #include "basic-block.h"
33 #include "tree-ssa-alias.h"
34 #include "internal-fn.h"
36 #include "gimple-expr.h"
39 #include "gimple-iterator.h"
40 #include "gimple-ssa.h"
43 #include "tree-phinodes.h"
44 #include "ssa-iterators.h"
45 #include "stringpool.h"
46 #include "tree-ssanames.h"
47 #include "tree-into-ssa.h"
49 #include "tree-inline.h"
50 #include "tree-pass.h"
51 #include "langhooks.h"
52 #include "diagnostic-core.h"
55 #include "gimple-low.h"
57 /* In some instances a tree and a gimple need to be stored in a same table,
58 i.e. in hash tables. This is a structure to do this. */
59 typedef union {tree *tp; tree t; gimple g;} treemple;
61 /* Misc functions used in this file. */
63 /* Remember and lookup EH landing pad data for arbitrary statements.
64 Really this means any statement that could_throw_p. We could
65 stuff this information into the stmt_ann data structure, but:
67 (1) We absolutely rely on this information being kept until
68 we get to rtl. Once we're done with lowering here, if we lose
69 the information there's no way to recover it!
71 (2) There are many more statements that *cannot* throw as
72 compared to those that can. We should be saving some amount
73 of space by only allocating memory for those that can throw. */
75 /* Add statement T in function IFUN to landing pad NUM. */
78 add_stmt_to_eh_lp_fn (struct function *ifun, gimple t, int num)
80 struct throw_stmt_node *n;
83 gcc_assert (num != 0);
85 n = ggc_alloc_throw_stmt_node ();
89 if (!get_eh_throw_stmt_table (ifun))
90 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
94 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
99 /* Add statement T in the current function (cfun) to EH landing pad NUM. */
102 add_stmt_to_eh_lp (gimple t, int num)
104 add_stmt_to_eh_lp_fn (cfun, t, num);
107 /* Add statement T to the single EH landing pad in REGION. */
110 record_stmt_eh_region (eh_region region, gimple t)
114 if (region->type == ERT_MUST_NOT_THROW)
115 add_stmt_to_eh_lp_fn (cfun, t, -region->index);
118 eh_landing_pad lp = region->landing_pads;
120 lp = gen_eh_landing_pad (region);
122 gcc_assert (lp->next_lp == NULL);
123 add_stmt_to_eh_lp_fn (cfun, t, lp->index);
128 /* Remove statement T in function IFUN from its EH landing pad. */
131 remove_stmt_from_eh_lp_fn (struct function *ifun, gimple t)
133 struct throw_stmt_node dummy;
136 if (!get_eh_throw_stmt_table (ifun))
140 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
144 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
152 /* Remove statement T in the current function (cfun) from its
156 remove_stmt_from_eh_lp (gimple t)
158 return remove_stmt_from_eh_lp_fn (cfun, t);
161 /* Determine if statement T is inside an EH region in function IFUN.
162 Positive numbers indicate a landing pad index; negative numbers
163 indicate a MUST_NOT_THROW region index; zero indicates that the
164 statement is not recorded in the region table. */
167 lookup_stmt_eh_lp_fn (struct function *ifun, gimple t)
169 struct throw_stmt_node *p, n;
171 if (ifun->eh->throw_stmt_table == NULL)
175 p = (struct throw_stmt_node *) htab_find (ifun->eh->throw_stmt_table, &n);
176 return p ? p->lp_nr : 0;
179 /* Likewise, but always use the current function. */
182 lookup_stmt_eh_lp (gimple t)
184 /* We can get called from initialized data when -fnon-call-exceptions
185 is on; prevent crash. */
188 return lookup_stmt_eh_lp_fn (cfun, t);
191 /* First pass of EH node decomposition. Build up a tree of GIMPLE_TRY_FINALLY
192 nodes and LABEL_DECL nodes. We will use this during the second phase to
193 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
195 struct finally_tree_node
197 /* When storing a GIMPLE_TRY, we have to record a gimple. However
198 when deciding whether a GOTO to a certain LABEL_DECL (which is a
199 tree) leaves the TRY block, its necessary to record a tree in
200 this field. Thus a treemple is used. */
205 /* Hashtable helpers. */
207 struct finally_tree_hasher : typed_free_remove <finally_tree_node>
209 typedef finally_tree_node value_type;
210 typedef finally_tree_node compare_type;
211 static inline hashval_t hash (const value_type *);
212 static inline bool equal (const value_type *, const compare_type *);
216 finally_tree_hasher::hash (const value_type *v)
218 return (intptr_t)v->child.t >> 4;
222 finally_tree_hasher::equal (const value_type *v, const compare_type *c)
224 return v->child.t == c->child.t;
227 /* Note that this table is *not* marked GTY. It is short-lived. */
228 static hash_table <finally_tree_hasher> finally_tree;
231 record_in_finally_tree (treemple child, gimple parent)
233 struct finally_tree_node *n;
234 finally_tree_node **slot;
236 n = XNEW (struct finally_tree_node);
240 slot = finally_tree.find_slot (n, INSERT);
246 collect_finally_tree (gimple stmt, gimple region);
248 /* Go through the gimple sequence. Works with collect_finally_tree to
249 record all GIMPLE_LABEL and GIMPLE_TRY statements. */
252 collect_finally_tree_1 (gimple_seq seq, gimple region)
254 gimple_stmt_iterator gsi;
256 for (gsi = gsi_start (seq); !gsi_end_p (gsi); gsi_next (&gsi))
257 collect_finally_tree (gsi_stmt (gsi), region);
261 collect_finally_tree (gimple stmt, gimple region)
265 switch (gimple_code (stmt))
268 temp.t = gimple_label_label (stmt);
269 record_in_finally_tree (temp, region);
273 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
276 record_in_finally_tree (temp, region);
277 collect_finally_tree_1 (gimple_try_eval (stmt), stmt);
278 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
280 else if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
282 collect_finally_tree_1 (gimple_try_eval (stmt), region);
283 collect_finally_tree_1 (gimple_try_cleanup (stmt), region);
288 collect_finally_tree_1 (gimple_catch_handler (stmt), region);
291 case GIMPLE_EH_FILTER:
292 collect_finally_tree_1 (gimple_eh_filter_failure (stmt), region);
296 collect_finally_tree_1 (gimple_eh_else_n_body (stmt), region);
297 collect_finally_tree_1 (gimple_eh_else_e_body (stmt), region);
301 /* A type, a decl, or some kind of statement that we're not
302 interested in. Don't walk them. */
308 /* Use the finally tree to determine if a jump from START to TARGET
309 would leave the try_finally node that START lives in. */
312 outside_finally_tree (treemple start, gimple target)
314 struct finally_tree_node n, *p;
319 p = finally_tree.find (&n);
324 while (start.g != target);
329 /* Second pass of EH node decomposition. Actually transform the GIMPLE_TRY
330 nodes into a set of gotos, magic labels, and eh regions.
331 The eh region creation is straight-forward, but frobbing all the gotos
332 and such into shape isn't. */
334 /* The sequence into which we record all EH stuff. This will be
335 placed at the end of the function when we're all done. */
336 static gimple_seq eh_seq;
338 /* Record whether an EH region contains something that can throw,
339 indexed by EH region number. */
340 static bitmap eh_region_may_contain_throw_map;
342 /* The GOTO_QUEUE is is an array of GIMPLE_GOTO and GIMPLE_RETURN
343 statements that are seen to escape this GIMPLE_TRY_FINALLY node.
344 The idea is to record a gimple statement for everything except for
345 the conditionals, which get their labels recorded. Since labels are
346 of type 'tree', we need this node to store both gimple and tree
347 objects. REPL_STMT is the sequence used to replace the goto/return
348 statement. CONT_STMT is used to store the statement that allows
349 the return/goto to jump to the original destination. */
351 struct goto_queue_node
355 gimple_seq repl_stmt;
358 /* This is used when index >= 0 to indicate that stmt is a label (as
359 opposed to a goto stmt). */
363 /* State of the world while lowering. */
367 /* What's "current" while constructing the eh region tree. These
368 correspond to variables of the same name in cfun->eh, which we
369 don't have easy access to. */
370 eh_region cur_region;
372 /* What's "current" for the purposes of __builtin_eh_pointer. For
373 a CATCH, this is the associated TRY. For an EH_FILTER, this is
374 the associated ALLOWED_EXCEPTIONS, etc. */
375 eh_region ehp_region;
377 /* Processing of TRY_FINALLY requires a bit more state. This is
378 split out into a separate structure so that we don't have to
379 copy so much when processing other nodes. */
380 struct leh_tf_state *tf;
385 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
386 try_finally_expr is the original GIMPLE_TRY_FINALLY. We need to retain
387 this so that outside_finally_tree can reliably reference the tree used
388 in the collect_finally_tree data structures. */
389 gimple try_finally_expr;
392 /* While lowering a top_p usually it is expanded into multiple statements,
393 thus we need the following field to store them. */
394 gimple_seq top_p_seq;
396 /* The state outside this try_finally node. */
397 struct leh_state *outer;
399 /* The exception region created for it. */
402 /* The goto queue. */
403 struct goto_queue_node *goto_queue;
404 size_t goto_queue_size;
405 size_t goto_queue_active;
407 /* Pointer map to help in searching goto_queue when it is large. */
408 struct pointer_map_t *goto_queue_map;
410 /* The set of unique labels seen as entries in the goto queue. */
411 vec<tree> dest_array;
413 /* A label to be added at the end of the completed transformed
414 sequence. It will be set if may_fallthru was true *at one time*,
415 though subsequent transformations may have cleared that flag. */
418 /* True if it is possible to fall out the bottom of the try block.
419 Cleared if the fallthru is converted to a goto. */
422 /* True if any entry in goto_queue is a GIMPLE_RETURN. */
425 /* True if the finally block can receive an exception edge.
426 Cleared if the exception case is handled by code duplication. */
430 static gimple_seq lower_eh_must_not_throw (struct leh_state *, gimple);
432 /* Search for STMT in the goto queue. Return the replacement,
433 or null if the statement isn't in the queue. */
435 #define LARGE_GOTO_QUEUE 20
437 static void lower_eh_constructs_1 (struct leh_state *state, gimple_seq *seq);
440 find_goto_replacement (struct leh_tf_state *tf, treemple stmt)
445 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
447 for (i = 0; i < tf->goto_queue_active; i++)
448 if ( tf->goto_queue[i].stmt.g == stmt.g)
449 return tf->goto_queue[i].repl_stmt;
453 /* If we have a large number of entries in the goto_queue, create a
454 pointer map and use that for searching. */
456 if (!tf->goto_queue_map)
458 tf->goto_queue_map = pointer_map_create ();
459 for (i = 0; i < tf->goto_queue_active; i++)
461 slot = pointer_map_insert (tf->goto_queue_map,
462 tf->goto_queue[i].stmt.g);
463 gcc_assert (*slot == NULL);
464 *slot = &tf->goto_queue[i];
468 slot = pointer_map_contains (tf->goto_queue_map, stmt.g);
470 return (((struct goto_queue_node *) *slot)->repl_stmt);
475 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
476 lowered GIMPLE_COND. If, by chance, the replacement is a simple goto,
477 then we can just splat it in, otherwise we add the new stmts immediately
478 after the GIMPLE_COND and redirect. */
481 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
482 gimple_stmt_iterator *gsi)
487 location_t loc = gimple_location (gsi_stmt (*gsi));
490 new_seq = find_goto_replacement (tf, temp);
494 if (gimple_seq_singleton_p (new_seq)
495 && gimple_code (gimple_seq_first_stmt (new_seq)) == GIMPLE_GOTO)
497 *tp = gimple_goto_dest (gimple_seq_first_stmt (new_seq));
501 label = create_artificial_label (loc);
502 /* Set the new label for the GIMPLE_COND */
505 gsi_insert_after (gsi, gimple_build_label (label), GSI_CONTINUE_LINKING);
506 gsi_insert_seq_after (gsi, gimple_seq_copy (new_seq), GSI_CONTINUE_LINKING);
509 /* The real work of replace_goto_queue. Returns with TSI updated to
510 point to the next statement. */
512 static void replace_goto_queue_stmt_list (gimple_seq *, struct leh_tf_state *);
515 replace_goto_queue_1 (gimple stmt, struct leh_tf_state *tf,
516 gimple_stmt_iterator *gsi)
522 switch (gimple_code (stmt))
527 seq = find_goto_replacement (tf, temp);
530 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT);
531 gsi_remove (gsi, false);
537 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 2), tf, gsi);
538 replace_goto_queue_cond_clause (gimple_op_ptr (stmt, 3), tf, gsi);
542 replace_goto_queue_stmt_list (gimple_try_eval_ptr (stmt), tf);
543 replace_goto_queue_stmt_list (gimple_try_cleanup_ptr (stmt), tf);
546 replace_goto_queue_stmt_list (gimple_catch_handler_ptr (stmt), tf);
548 case GIMPLE_EH_FILTER:
549 replace_goto_queue_stmt_list (gimple_eh_filter_failure_ptr (stmt), tf);
552 replace_goto_queue_stmt_list (gimple_eh_else_n_body_ptr (stmt), tf);
553 replace_goto_queue_stmt_list (gimple_eh_else_e_body_ptr (stmt), tf);
557 /* These won't have gotos in them. */
564 /* A subroutine of replace_goto_queue. Handles GIMPLE_SEQ. */
567 replace_goto_queue_stmt_list (gimple_seq *seq, struct leh_tf_state *tf)
569 gimple_stmt_iterator gsi = gsi_start (*seq);
571 while (!gsi_end_p (gsi))
572 replace_goto_queue_1 (gsi_stmt (gsi), tf, &gsi);
575 /* Replace all goto queue members. */
578 replace_goto_queue (struct leh_tf_state *tf)
580 if (tf->goto_queue_active == 0)
582 replace_goto_queue_stmt_list (&tf->top_p_seq, tf);
583 replace_goto_queue_stmt_list (&eh_seq, tf);
586 /* Add a new record to the goto queue contained in TF. NEW_STMT is the
587 data to be added, IS_LABEL indicates whether NEW_STMT is a label or
591 record_in_goto_queue (struct leh_tf_state *tf,
598 struct goto_queue_node *q;
600 gcc_assert (!tf->goto_queue_map);
602 active = tf->goto_queue_active;
603 size = tf->goto_queue_size;
606 size = (size ? size * 2 : 32);
607 tf->goto_queue_size = size;
609 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
612 q = &tf->goto_queue[active];
613 tf->goto_queue_active = active + 1;
615 memset (q, 0, sizeof (*q));
618 q->location = location;
619 q->is_label = is_label;
622 /* Record the LABEL label in the goto queue contained in TF.
626 record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
630 treemple temp, new_stmt;
635 /* Computed and non-local gotos do not get processed. Given
636 their nature we can neither tell whether we've escaped the
637 finally block nor redirect them if we knew. */
638 if (TREE_CODE (label) != LABEL_DECL)
641 /* No need to record gotos that don't leave the try block. */
643 if (!outside_finally_tree (temp, tf->try_finally_expr))
646 if (! tf->dest_array.exists ())
648 tf->dest_array.create (10);
649 tf->dest_array.quick_push (label);
654 int n = tf->dest_array.length ();
655 for (index = 0; index < n; ++index)
656 if (tf->dest_array[index] == label)
659 tf->dest_array.safe_push (label);
662 /* In the case of a GOTO we want to record the destination label,
663 since with a GIMPLE_COND we have an easy access to the then/else
666 record_in_goto_queue (tf, new_stmt, index, true, location);
669 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally
670 node, and if so record that fact in the goto queue associated with that
674 maybe_record_in_goto_queue (struct leh_state *state, gimple stmt)
676 struct leh_tf_state *tf = state->tf;
682 switch (gimple_code (stmt))
685 new_stmt.tp = gimple_op_ptr (stmt, 2);
686 record_in_goto_queue_label (tf, new_stmt, gimple_cond_true_label (stmt),
687 EXPR_LOCATION (*new_stmt.tp));
688 new_stmt.tp = gimple_op_ptr (stmt, 3);
689 record_in_goto_queue_label (tf, new_stmt, gimple_cond_false_label (stmt),
690 EXPR_LOCATION (*new_stmt.tp));
694 record_in_goto_queue_label (tf, new_stmt, gimple_goto_dest (stmt),
695 gimple_location (stmt));
699 tf->may_return = true;
701 record_in_goto_queue (tf, new_stmt, -1, false, gimple_location (stmt));
710 #ifdef ENABLE_CHECKING
711 /* We do not process GIMPLE_SWITCHes for now. As long as the original source
712 was in fact structured, and we've not yet done jump threading, then none
713 of the labels will leave outer GIMPLE_TRY_FINALLY nodes. Verify this. */
716 verify_norecord_switch_expr (struct leh_state *state, gimple switch_expr)
718 struct leh_tf_state *tf = state->tf;
724 n = gimple_switch_num_labels (switch_expr);
726 for (i = 0; i < n; ++i)
729 tree lab = CASE_LABEL (gimple_switch_label (switch_expr, i));
731 gcc_assert (!outside_finally_tree (temp, tf->try_finally_expr));
735 #define verify_norecord_switch_expr(state, switch_expr)
738 /* Redirect a RETURN_EXPR pointed to by Q to FINLAB. If MOD is
739 non-null, insert it before the new branch. */
742 do_return_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod)
746 /* In the case of a return, the queue node must be a gimple statement. */
747 gcc_assert (!q->is_label);
749 /* Note that the return value may have already been computed, e.g.,
762 should return 0, not 1. We don't have to do anything to make
763 this happens because the return value has been placed in the
764 RESULT_DECL already. */
766 q->cont_stmt = q->stmt.g;
769 gimple_seq_add_seq (&q->repl_stmt, mod);
771 x = gimple_build_goto (finlab);
772 gimple_set_location (x, q->location);
773 gimple_seq_add_stmt (&q->repl_stmt, x);
776 /* Similar, but easier, for GIMPLE_GOTO. */
779 do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
780 struct leh_tf_state *tf)
784 gcc_assert (q->is_label);
786 q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]);
789 gimple_seq_add_seq (&q->repl_stmt, mod);
791 x = gimple_build_goto (finlab);
792 gimple_set_location (x, q->location);
793 gimple_seq_add_stmt (&q->repl_stmt, x);
796 /* Emit a standard landing pad sequence into SEQ for REGION. */
799 emit_post_landing_pad (gimple_seq *seq, eh_region region)
801 eh_landing_pad lp = region->landing_pads;
805 lp = gen_eh_landing_pad (region);
807 lp->post_landing_pad = create_artificial_label (UNKNOWN_LOCATION);
808 EH_LANDING_PAD_NR (lp->post_landing_pad) = lp->index;
810 x = gimple_build_label (lp->post_landing_pad);
811 gimple_seq_add_stmt (seq, x);
814 /* Emit a RESX statement into SEQ for REGION. */
817 emit_resx (gimple_seq *seq, eh_region region)
819 gimple x = gimple_build_resx (region->index);
820 gimple_seq_add_stmt (seq, x);
822 record_stmt_eh_region (region->outer, x);
825 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
828 emit_eh_dispatch (gimple_seq *seq, eh_region region)
830 gimple x = gimple_build_eh_dispatch (region->index);
831 gimple_seq_add_stmt (seq, x);
834 /* Note that the current EH region may contain a throw, or a
835 call to a function which itself may contain a throw. */
838 note_eh_region_may_contain_throw (eh_region region)
840 while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
842 if (region->type == ERT_MUST_NOT_THROW)
844 region = region->outer;
850 /* Check if REGION has been marked as containing a throw. If REGION is
851 NULL, this predicate is false. */
854 eh_region_may_contain_throw (eh_region r)
856 return r && bitmap_bit_p (eh_region_may_contain_throw_map, r->index);
859 /* We want to transform
860 try { body; } catch { stuff; }
870 TP is a GIMPLE_TRY node. REGION is the region whose post_landing_pad
871 should be placed before the second operand, or NULL. OVER is
872 an existing label that should be put at the exit, or NULL. */
875 frob_into_branch_around (gimple tp, eh_region region, tree over)
878 gimple_seq cleanup, result;
879 location_t loc = gimple_location (tp);
881 cleanup = gimple_try_cleanup (tp);
882 result = gimple_try_eval (tp);
885 emit_post_landing_pad (&eh_seq, region);
887 if (gimple_seq_may_fallthru (cleanup))
890 over = create_artificial_label (loc);
891 x = gimple_build_goto (over);
892 gimple_set_location (x, loc);
893 gimple_seq_add_stmt (&cleanup, x);
895 gimple_seq_add_seq (&eh_seq, cleanup);
899 x = gimple_build_label (over);
900 gimple_seq_add_stmt (&result, x);
905 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
906 Make sure to record all new labels found. */
909 lower_try_finally_dup_block (gimple_seq seq, struct leh_state *outer_state,
912 gimple region = NULL;
914 gimple_stmt_iterator gsi;
916 new_seq = copy_gimple_seq_and_replace_locals (seq);
918 for (gsi = gsi_start (new_seq); !gsi_end_p (gsi); gsi_next (&gsi))
920 gimple stmt = gsi_stmt (gsi);
921 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
923 tree block = gimple_block (stmt);
924 gimple_set_location (stmt, loc);
925 gimple_set_block (stmt, block);
930 region = outer_state->tf->try_finally_expr;
931 collect_finally_tree_1 (new_seq, region);
936 /* A subroutine of lower_try_finally. Create a fallthru label for
937 the given try_finally state. The only tricky bit here is that
938 we have to make sure to record the label in our outer context. */
941 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
943 tree label = tf->fallthru_label;
948 label = create_artificial_label (gimple_location (tf->try_finally_expr));
949 tf->fallthru_label = label;
953 record_in_finally_tree (temp, tf->outer->tf->try_finally_expr);
959 /* A subroutine of lower_try_finally. If FINALLY consits of a
960 GIMPLE_EH_ELSE node, return it. */
963 get_eh_else (gimple_seq finally)
965 gimple x = gimple_seq_first_stmt (finally);
966 if (gimple_code (x) == GIMPLE_EH_ELSE)
968 gcc_assert (gimple_seq_singleton_p (finally));
974 /* A subroutine of lower_try_finally. If the eh_protect_cleanup_actions
975 langhook returns non-null, then the language requires that the exception
976 path out of a try_finally be treated specially. To wit: the code within
977 the finally block may not itself throw an exception. We have two choices
978 here. First we can duplicate the finally block and wrap it in a
979 must_not_throw region. Second, we can generate code like
984 if (fintmp == eh_edge)
985 protect_cleanup_actions;
988 where "fintmp" is the temporary used in the switch statement generation
989 alternative considered below. For the nonce, we always choose the first
992 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
995 honor_protect_cleanup_actions (struct leh_state *outer_state,
996 struct leh_state *this_state,
997 struct leh_tf_state *tf)
999 tree protect_cleanup_actions;
1000 gimple_stmt_iterator gsi;
1001 bool finally_may_fallthru;
1005 /* First check for nothing to do. */
1006 if (lang_hooks.eh_protect_cleanup_actions == NULL)
1008 protect_cleanup_actions = lang_hooks.eh_protect_cleanup_actions ();
1009 if (protect_cleanup_actions == NULL)
1012 finally = gimple_try_cleanup (tf->top_p);
1013 eh_else = get_eh_else (finally);
1015 /* Duplicate the FINALLY block. Only need to do this for try-finally,
1016 and not for cleanups. If we've got an EH_ELSE, extract it now. */
1019 finally = gimple_eh_else_e_body (eh_else);
1020 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
1022 else if (this_state)
1023 finally = lower_try_finally_dup_block (finally, outer_state,
1024 gimple_location (tf->try_finally_expr));
1025 finally_may_fallthru = gimple_seq_may_fallthru (finally);
1027 /* If this cleanup consists of a TRY_CATCH_EXPR with TRY_CATCH_IS_CLEANUP
1028 set, the handler of the TRY_CATCH_EXPR is another cleanup which ought
1029 to be in an enclosing scope, but needs to be implemented at this level
1030 to avoid a nesting violation (see wrap_temporary_cleanups in
1031 cp/decl.c). Since it's logically at an outer level, we should call
1032 terminate before we get to it, so strip it away before adding the
1033 MUST_NOT_THROW filter. */
1034 gsi = gsi_start (finally);
1036 if (gimple_code (x) == GIMPLE_TRY
1037 && gimple_try_kind (x) == GIMPLE_TRY_CATCH
1038 && gimple_try_catch_is_cleanup (x))
1040 gsi_insert_seq_before (&gsi, gimple_try_eval (x), GSI_SAME_STMT);
1041 gsi_remove (&gsi, false);
1044 /* Wrap the block with protect_cleanup_actions as the action. */
1045 x = gimple_build_eh_must_not_throw (protect_cleanup_actions);
1046 x = gimple_build_try (finally, gimple_seq_alloc_with_stmt (x),
1048 finally = lower_eh_must_not_throw (outer_state, x);
1050 /* Drop all of this into the exception sequence. */
1051 emit_post_landing_pad (&eh_seq, tf->region);
1052 gimple_seq_add_seq (&eh_seq, finally);
1053 if (finally_may_fallthru)
1054 emit_resx (&eh_seq, tf->region);
1056 /* Having now been handled, EH isn't to be considered with
1057 the rest of the outgoing edges. */
1058 tf->may_throw = false;
1061 /* A subroutine of lower_try_finally. We have determined that there is
1062 no fallthru edge out of the finally block. This means that there is
1063 no outgoing edge corresponding to any incoming edge. Restructure the
1064 try_finally node for this special case. */
1067 lower_try_finally_nofallthru (struct leh_state *state,
1068 struct leh_tf_state *tf)
1073 struct goto_queue_node *q, *qe;
1075 lab = create_artificial_label (gimple_location (tf->try_finally_expr));
1077 /* We expect that tf->top_p is a GIMPLE_TRY. */
1078 finally = gimple_try_cleanup (tf->top_p);
1079 tf->top_p_seq = gimple_try_eval (tf->top_p);
1081 x = gimple_build_label (lab);
1082 gimple_seq_add_stmt (&tf->top_p_seq, x);
1085 qe = q + tf->goto_queue_active;
1088 do_return_redirection (q, lab, NULL);
1090 do_goto_redirection (q, lab, NULL, tf);
1092 replace_goto_queue (tf);
1094 /* Emit the finally block into the stream. Lower EH_ELSE at this time. */
1095 eh_else = get_eh_else (finally);
1098 finally = gimple_eh_else_n_body (eh_else);
1099 lower_eh_constructs_1 (state, &finally);
1100 gimple_seq_add_seq (&tf->top_p_seq, finally);
1104 finally = gimple_eh_else_e_body (eh_else);
1105 lower_eh_constructs_1 (state, &finally);
1107 emit_post_landing_pad (&eh_seq, tf->region);
1108 gimple_seq_add_seq (&eh_seq, finally);
1113 lower_eh_constructs_1 (state, &finally);
1114 gimple_seq_add_seq (&tf->top_p_seq, finally);
1118 emit_post_landing_pad (&eh_seq, tf->region);
1120 x = gimple_build_goto (lab);
1121 gimple_set_location (x, gimple_location (tf->try_finally_expr));
1122 gimple_seq_add_stmt (&eh_seq, x);
1127 /* A subroutine of lower_try_finally. We have determined that there is
1128 exactly one destination of the finally block. Restructure the
1129 try_finally node for this special case. */
1132 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
1134 struct goto_queue_node *q, *qe;
1137 gimple_stmt_iterator gsi;
1139 location_t loc = gimple_location (tf->try_finally_expr);
1141 finally = gimple_try_cleanup (tf->top_p);
1142 tf->top_p_seq = gimple_try_eval (tf->top_p);
1144 /* Since there's only one destination, and the destination edge can only
1145 either be EH or non-EH, that implies that all of our incoming edges
1146 are of the same type. Therefore we can lower EH_ELSE immediately. */
1147 x = get_eh_else (finally);
1151 finally = gimple_eh_else_e_body (x);
1153 finally = gimple_eh_else_n_body (x);
1156 lower_eh_constructs_1 (state, &finally);
1158 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1160 gimple stmt = gsi_stmt (gsi);
1161 if (LOCATION_LOCUS (gimple_location (stmt)) == UNKNOWN_LOCATION)
1163 tree block = gimple_block (stmt);
1164 gimple_set_location (stmt, gimple_location (tf->try_finally_expr));
1165 gimple_set_block (stmt, block);
1171 /* Only reachable via the exception edge. Add the given label to
1172 the head of the FINALLY block. Append a RESX at the end. */
1173 emit_post_landing_pad (&eh_seq, tf->region);
1174 gimple_seq_add_seq (&eh_seq, finally);
1175 emit_resx (&eh_seq, tf->region);
1179 if (tf->may_fallthru)
1181 /* Only reachable via the fallthru edge. Do nothing but let
1182 the two blocks run together; we'll fall out the bottom. */
1183 gimple_seq_add_seq (&tf->top_p_seq, finally);
1187 finally_label = create_artificial_label (loc);
1188 x = gimple_build_label (finally_label);
1189 gimple_seq_add_stmt (&tf->top_p_seq, x);
1191 gimple_seq_add_seq (&tf->top_p_seq, finally);
1194 qe = q + tf->goto_queue_active;
1198 /* Reachable by return expressions only. Redirect them. */
1200 do_return_redirection (q, finally_label, NULL);
1201 replace_goto_queue (tf);
1205 /* Reachable by goto expressions only. Redirect them. */
1207 do_goto_redirection (q, finally_label, NULL, tf);
1208 replace_goto_queue (tf);
1210 if (tf->dest_array[0] == tf->fallthru_label)
1212 /* Reachable by goto to fallthru label only. Redirect it
1213 to the new label (already created, sadly), and do not
1214 emit the final branch out, or the fallthru label. */
1215 tf->fallthru_label = NULL;
1220 /* Place the original return/goto to the original destination
1221 immediately after the finally block. */
1222 x = tf->goto_queue[0].cont_stmt;
1223 gimple_seq_add_stmt (&tf->top_p_seq, x);
1224 maybe_record_in_goto_queue (state, x);
1227 /* A subroutine of lower_try_finally. There are multiple edges incoming
1228 and outgoing from the finally block. Implement this by duplicating the
1229 finally block for every destination. */
1232 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1235 gimple_seq new_stmt;
1239 location_t tf_loc = gimple_location (tf->try_finally_expr);
1241 finally = gimple_try_cleanup (tf->top_p);
1243 /* Notice EH_ELSE, and simplify some of the remaining code
1244 by considering FINALLY to be the normal return path only. */
1245 eh_else = get_eh_else (finally);
1247 finally = gimple_eh_else_n_body (eh_else);
1249 tf->top_p_seq = gimple_try_eval (tf->top_p);
1252 if (tf->may_fallthru)
1254 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1255 lower_eh_constructs_1 (state, &seq);
1256 gimple_seq_add_seq (&new_stmt, seq);
1258 tmp = lower_try_finally_fallthru_label (tf);
1259 x = gimple_build_goto (tmp);
1260 gimple_set_location (x, tf_loc);
1261 gimple_seq_add_stmt (&new_stmt, x);
1266 /* We don't need to copy the EH path of EH_ELSE,
1267 since it is only emitted once. */
1269 seq = gimple_eh_else_e_body (eh_else);
1271 seq = lower_try_finally_dup_block (finally, state, tf_loc);
1272 lower_eh_constructs_1 (state, &seq);
1274 emit_post_landing_pad (&eh_seq, tf->region);
1275 gimple_seq_add_seq (&eh_seq, seq);
1276 emit_resx (&eh_seq, tf->region);
1281 struct goto_queue_node *q, *qe;
1282 int return_index, index;
1285 struct goto_queue_node *q;
1289 return_index = tf->dest_array.length ();
1290 labels = XCNEWVEC (struct labels_s, return_index + 1);
1293 qe = q + tf->goto_queue_active;
1296 index = q->index < 0 ? return_index : q->index;
1298 if (!labels[index].q)
1299 labels[index].q = q;
1302 for (index = 0; index < return_index + 1; index++)
1306 q = labels[index].q;
1310 lab = labels[index].label
1311 = create_artificial_label (tf_loc);
1313 if (index == return_index)
1314 do_return_redirection (q, lab, NULL);
1316 do_goto_redirection (q, lab, NULL, tf);
1318 x = gimple_build_label (lab);
1319 gimple_seq_add_stmt (&new_stmt, x);
1321 seq = lower_try_finally_dup_block (finally, state, q->location);
1322 lower_eh_constructs_1 (state, &seq);
1323 gimple_seq_add_seq (&new_stmt, seq);
1325 gimple_seq_add_stmt (&new_stmt, q->cont_stmt);
1326 maybe_record_in_goto_queue (state, q->cont_stmt);
1329 for (q = tf->goto_queue; q < qe; q++)
1333 index = q->index < 0 ? return_index : q->index;
1335 if (labels[index].q == q)
1338 lab = labels[index].label;
1340 if (index == return_index)
1341 do_return_redirection (q, lab, NULL);
1343 do_goto_redirection (q, lab, NULL, tf);
1346 replace_goto_queue (tf);
1350 /* Need to link new stmts after running replace_goto_queue due
1351 to not wanting to process the same goto stmts twice. */
1352 gimple_seq_add_seq (&tf->top_p_seq, new_stmt);
1355 /* A subroutine of lower_try_finally. There are multiple edges incoming
1356 and outgoing from the finally block. Implement this by instrumenting
1357 each incoming edge and creating a switch statement at the end of the
1358 finally block that branches to the appropriate destination. */
1361 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1363 struct goto_queue_node *q, *qe;
1364 tree finally_tmp, finally_label;
1365 int return_index, eh_index, fallthru_index;
1366 int nlabels, ndests, j, last_case_index;
1368 vec<tree> case_label_vec;
1369 gimple_seq switch_body = NULL;
1374 struct pointer_map_t *cont_map = NULL;
1375 /* The location of the TRY_FINALLY stmt. */
1376 location_t tf_loc = gimple_location (tf->try_finally_expr);
1377 /* The location of the finally block. */
1378 location_t finally_loc;
1380 finally = gimple_try_cleanup (tf->top_p);
1381 eh_else = get_eh_else (finally);
1383 /* Mash the TRY block to the head of the chain. */
1384 tf->top_p_seq = gimple_try_eval (tf->top_p);
1386 /* The location of the finally is either the last stmt in the finally
1387 block or the location of the TRY_FINALLY itself. */
1388 x = gimple_seq_last_stmt (finally);
1389 finally_loc = x ? gimple_location (x) : tf_loc;
1391 /* Prepare for switch statement generation. */
1392 nlabels = tf->dest_array.length ();
1393 return_index = nlabels;
1394 eh_index = return_index + tf->may_return;
1395 fallthru_index = eh_index + (tf->may_throw && !eh_else);
1396 ndests = fallthru_index + tf->may_fallthru;
1398 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1399 finally_label = create_artificial_label (finally_loc);
1401 /* We use vec::quick_push on case_label_vec throughout this function,
1402 since we know the size in advance and allocate precisely as muce
1404 case_label_vec.create (ndests);
1406 last_case_index = 0;
1408 /* Begin inserting code for getting to the finally block. Things
1409 are done in this order to correspond to the sequence the code is
1412 if (tf->may_fallthru)
1414 x = gimple_build_assign (finally_tmp,
1415 build_int_cst (integer_type_node,
1417 gimple_seq_add_stmt (&tf->top_p_seq, x);
1419 tmp = build_int_cst (integer_type_node, fallthru_index);
1420 last_case = build_case_label (tmp, NULL,
1421 create_artificial_label (tf_loc));
1422 case_label_vec.quick_push (last_case);
1425 x = gimple_build_label (CASE_LABEL (last_case));
1426 gimple_seq_add_stmt (&switch_body, x);
1428 tmp = lower_try_finally_fallthru_label (tf);
1429 x = gimple_build_goto (tmp);
1430 gimple_set_location (x, tf_loc);
1431 gimple_seq_add_stmt (&switch_body, x);
1434 /* For EH_ELSE, emit the exception path (plus resx) now, then
1435 subsequently we only need consider the normal path. */
1440 finally = gimple_eh_else_e_body (eh_else);
1441 lower_eh_constructs_1 (state, &finally);
1443 emit_post_landing_pad (&eh_seq, tf->region);
1444 gimple_seq_add_seq (&eh_seq, finally);
1445 emit_resx (&eh_seq, tf->region);
1448 finally = gimple_eh_else_n_body (eh_else);
1450 else if (tf->may_throw)
1452 emit_post_landing_pad (&eh_seq, tf->region);
1454 x = gimple_build_assign (finally_tmp,
1455 build_int_cst (integer_type_node, eh_index));
1456 gimple_seq_add_stmt (&eh_seq, x);
1458 x = gimple_build_goto (finally_label);
1459 gimple_set_location (x, tf_loc);
1460 gimple_seq_add_stmt (&eh_seq, x);
1462 tmp = build_int_cst (integer_type_node, eh_index);
1463 last_case = build_case_label (tmp, NULL,
1464 create_artificial_label (tf_loc));
1465 case_label_vec.quick_push (last_case);
1468 x = gimple_build_label (CASE_LABEL (last_case));
1469 gimple_seq_add_stmt (&eh_seq, x);
1470 emit_resx (&eh_seq, tf->region);
1473 x = gimple_build_label (finally_label);
1474 gimple_seq_add_stmt (&tf->top_p_seq, x);
1476 lower_eh_constructs_1 (state, &finally);
1477 gimple_seq_add_seq (&tf->top_p_seq, finally);
1479 /* Redirect each incoming goto edge. */
1481 qe = q + tf->goto_queue_active;
1482 j = last_case_index + tf->may_return;
1483 /* Prepare the assignments to finally_tmp that are executed upon the
1484 entrance through a particular edge. */
1487 gimple_seq mod = NULL;
1489 unsigned int case_index;
1493 x = gimple_build_assign (finally_tmp,
1494 build_int_cst (integer_type_node,
1496 gimple_seq_add_stmt (&mod, x);
1497 do_return_redirection (q, finally_label, mod);
1498 switch_id = return_index;
1502 x = gimple_build_assign (finally_tmp,
1503 build_int_cst (integer_type_node, q->index));
1504 gimple_seq_add_stmt (&mod, x);
1505 do_goto_redirection (q, finally_label, mod, tf);
1506 switch_id = q->index;
1509 case_index = j + q->index;
1510 if (case_label_vec.length () <= case_index || !case_label_vec[case_index])
1514 tmp = build_int_cst (integer_type_node, switch_id);
1515 case_lab = build_case_label (tmp, NULL,
1516 create_artificial_label (tf_loc));
1517 /* We store the cont_stmt in the pointer map, so that we can recover
1518 it in the loop below. */
1520 cont_map = pointer_map_create ();
1521 slot = pointer_map_insert (cont_map, case_lab);
1522 *slot = q->cont_stmt;
1523 case_label_vec.quick_push (case_lab);
1526 for (j = last_case_index; j < last_case_index + nlabels; j++)
1531 last_case = case_label_vec[j];
1533 gcc_assert (last_case);
1534 gcc_assert (cont_map);
1536 slot = pointer_map_contains (cont_map, last_case);
1538 cont_stmt = *(gimple *) slot;
1540 x = gimple_build_label (CASE_LABEL (last_case));
1541 gimple_seq_add_stmt (&switch_body, x);
1542 gimple_seq_add_stmt (&switch_body, cont_stmt);
1543 maybe_record_in_goto_queue (state, cont_stmt);
1546 pointer_map_destroy (cont_map);
1548 replace_goto_queue (tf);
1550 /* Make sure that the last case is the default label, as one is required.
1551 Then sort the labels, which is also required in GIMPLE. */
1552 CASE_LOW (last_case) = NULL;
1553 sort_case_labels (case_label_vec);
1555 /* Build the switch statement, setting last_case to be the default
1557 switch_stmt = gimple_build_switch (finally_tmp, last_case,
1559 gimple_set_location (switch_stmt, finally_loc);
1561 /* Need to link SWITCH_STMT after running replace_goto_queue
1562 due to not wanting to process the same goto stmts twice. */
1563 gimple_seq_add_stmt (&tf->top_p_seq, switch_stmt);
1564 gimple_seq_add_seq (&tf->top_p_seq, switch_body);
1567 /* Decide whether or not we are going to duplicate the finally block.
1568 There are several considerations.
1570 First, if this is Java, then the finally block contains code
1571 written by the user. It has line numbers associated with it,
1572 so duplicating the block means it's difficult to set a breakpoint.
1573 Since controlling code generation via -g is verboten, we simply
1574 never duplicate code without optimization.
1576 Second, we'd like to prevent egregious code growth. One way to
1577 do this is to estimate the size of the finally block, multiply
1578 that by the number of copies we'd need to make, and compare against
1579 the estimate of the size of the switch machinery we'd have to add. */
1582 decide_copy_try_finally (int ndests, bool may_throw, gimple_seq finally)
1584 int f_estimate, sw_estimate;
1587 /* If there's an EH_ELSE involved, the exception path is separate
1588 and really doesn't come into play for this computation. */
1589 eh_else = get_eh_else (finally);
1592 ndests -= may_throw;
1593 finally = gimple_eh_else_n_body (eh_else);
1598 gimple_stmt_iterator gsi;
1603 for (gsi = gsi_start (finally); !gsi_end_p (gsi); gsi_next (&gsi))
1605 gimple stmt = gsi_stmt (gsi);
1606 if (!is_gimple_debug (stmt) && !gimple_clobber_p (stmt))
1612 /* Finally estimate N times, plus N gotos. */
1613 f_estimate = count_insns_seq (finally, &eni_size_weights);
1614 f_estimate = (f_estimate + 1) * ndests;
1616 /* Switch statement (cost 10), N variable assignments, N gotos. */
1617 sw_estimate = 10 + 2 * ndests;
1619 /* Optimize for size clearly wants our best guess. */
1620 if (optimize_function_for_size_p (cfun))
1621 return f_estimate < sw_estimate;
1623 /* ??? These numbers are completely made up so far. */
1625 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1627 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1630 /* REG is the enclosing region for a possible cleanup region, or the region
1631 itself. Returns TRUE if such a region would be unreachable.
1633 Cleanup regions within a must-not-throw region aren't actually reachable
1634 even if there are throwing stmts within them, because the personality
1635 routine will call terminate before unwinding. */
1638 cleanup_is_dead_in (eh_region reg)
1640 while (reg && reg->type == ERT_CLEANUP)
1642 return (reg && reg->type == ERT_MUST_NOT_THROW);
1645 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1646 to a sequence of labels and blocks, plus the exception region trees
1647 that record all the magic. This is complicated by the need to
1648 arrange for the FINALLY block to be executed on all exits. */
1651 lower_try_finally (struct leh_state *state, gimple tp)
1653 struct leh_tf_state this_tf;
1654 struct leh_state this_state;
1656 gimple_seq old_eh_seq;
1658 /* Process the try block. */
1660 memset (&this_tf, 0, sizeof (this_tf));
1661 this_tf.try_finally_expr = tp;
1663 this_tf.outer = state;
1664 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state->cur_region))
1666 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1667 this_state.cur_region = this_tf.region;
1671 this_tf.region = NULL;
1672 this_state.cur_region = state->cur_region;
1675 this_state.ehp_region = state->ehp_region;
1676 this_state.tf = &this_tf;
1678 old_eh_seq = eh_seq;
1681 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1683 /* Determine if the try block is escaped through the bottom. */
1684 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1686 /* Determine if any exceptions are possible within the try block. */
1688 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region);
1689 if (this_tf.may_throw)
1690 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1692 /* Determine how many edges (still) reach the finally block. Or rather,
1693 how many destinations are reached by the finally block. Use this to
1694 determine how we process the finally block itself. */
1696 ndests = this_tf.dest_array.length ();
1697 ndests += this_tf.may_fallthru;
1698 ndests += this_tf.may_return;
1699 ndests += this_tf.may_throw;
1701 /* If the FINALLY block is not reachable, dike it out. */
1704 gimple_seq_add_seq (&this_tf.top_p_seq, gimple_try_eval (tp));
1705 gimple_try_set_cleanup (tp, NULL);
1707 /* If the finally block doesn't fall through, then any destination
1708 we might try to impose there isn't reached either. There may be
1709 some minor amount of cleanup and redirection still needed. */
1710 else if (!gimple_seq_may_fallthru (gimple_try_cleanup (tp)))
1711 lower_try_finally_nofallthru (state, &this_tf);
1713 /* We can easily special-case redirection to a single destination. */
1714 else if (ndests == 1)
1715 lower_try_finally_onedest (state, &this_tf);
1716 else if (decide_copy_try_finally (ndests, this_tf.may_throw,
1717 gimple_try_cleanup (tp)))
1718 lower_try_finally_copy (state, &this_tf);
1720 lower_try_finally_switch (state, &this_tf);
1722 /* If someone requested we add a label at the end of the transformed
1724 if (this_tf.fallthru_label)
1726 /* This must be reached only if ndests == 0. */
1727 gimple x = gimple_build_label (this_tf.fallthru_label);
1728 gimple_seq_add_stmt (&this_tf.top_p_seq, x);
1731 this_tf.dest_array.release ();
1732 free (this_tf.goto_queue);
1733 if (this_tf.goto_queue_map)
1734 pointer_map_destroy (this_tf.goto_queue_map);
1736 /* If there was an old (aka outer) eh_seq, append the current eh_seq.
1737 If there was no old eh_seq, then the append is trivially already done. */
1741 eh_seq = old_eh_seq;
1744 gimple_seq new_eh_seq = eh_seq;
1745 eh_seq = old_eh_seq;
1746 gimple_seq_add_seq (&eh_seq, new_eh_seq);
1750 return this_tf.top_p_seq;
1753 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a
1754 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the
1755 exception region trees that records all the magic. */
1758 lower_catch (struct leh_state *state, gimple tp)
1760 eh_region try_region = NULL;
1761 struct leh_state this_state = *state;
1762 gimple_stmt_iterator gsi;
1764 gimple_seq new_seq, cleanup;
1766 location_t try_catch_loc = gimple_location (tp);
1768 if (flag_exceptions)
1770 try_region = gen_eh_region_try (state->cur_region);
1771 this_state.cur_region = try_region;
1774 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1776 if (!eh_region_may_contain_throw (try_region))
1777 return gimple_try_eval (tp);
1780 emit_eh_dispatch (&new_seq, try_region);
1781 emit_resx (&new_seq, try_region);
1783 this_state.cur_region = state->cur_region;
1784 this_state.ehp_region = try_region;
1787 cleanup = gimple_try_cleanup (tp);
1788 for (gsi = gsi_start (cleanup);
1796 gcatch = gsi_stmt (gsi);
1797 c = gen_eh_region_catch (try_region, gimple_catch_types (gcatch));
1799 handler = gimple_catch_handler (gcatch);
1800 lower_eh_constructs_1 (&this_state, &handler);
1802 c->label = create_artificial_label (UNKNOWN_LOCATION);
1803 x = gimple_build_label (c->label);
1804 gimple_seq_add_stmt (&new_seq, x);
1806 gimple_seq_add_seq (&new_seq, handler);
1808 if (gimple_seq_may_fallthru (new_seq))
1811 out_label = create_artificial_label (try_catch_loc);
1813 x = gimple_build_goto (out_label);
1814 gimple_seq_add_stmt (&new_seq, x);
1820 gimple_try_set_cleanup (tp, new_seq);
1822 return frob_into_branch_around (tp, try_region, out_label);
1825 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with a
1826 GIMPLE_EH_FILTER to a sequence of labels and blocks, plus the exception
1827 region trees that record all the magic. */
1830 lower_eh_filter (struct leh_state *state, gimple tp)
1832 struct leh_state this_state = *state;
1833 eh_region this_region = NULL;
1837 inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1839 if (flag_exceptions)
1841 this_region = gen_eh_region_allowed (state->cur_region,
1842 gimple_eh_filter_types (inner));
1843 this_state.cur_region = this_region;
1846 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1848 if (!eh_region_may_contain_throw (this_region))
1849 return gimple_try_eval (tp);
1852 this_state.cur_region = state->cur_region;
1853 this_state.ehp_region = this_region;
1855 emit_eh_dispatch (&new_seq, this_region);
1856 emit_resx (&new_seq, this_region);
1858 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1859 x = gimple_build_label (this_region->u.allowed.label);
1860 gimple_seq_add_stmt (&new_seq, x);
1862 lower_eh_constructs_1 (&this_state, gimple_eh_filter_failure_ptr (inner));
1863 gimple_seq_add_seq (&new_seq, gimple_eh_filter_failure (inner));
1865 gimple_try_set_cleanup (tp, new_seq);
1867 return frob_into_branch_around (tp, this_region, NULL);
1870 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY with
1871 an GIMPLE_EH_MUST_NOT_THROW to a sequence of labels and blocks,
1872 plus the exception region trees that record all the magic. */
1875 lower_eh_must_not_throw (struct leh_state *state, gimple tp)
1877 struct leh_state this_state = *state;
1879 if (flag_exceptions)
1881 gimple inner = gimple_seq_first_stmt (gimple_try_cleanup (tp));
1882 eh_region this_region;
1884 this_region = gen_eh_region_must_not_throw (state->cur_region);
1885 this_region->u.must_not_throw.failure_decl
1886 = gimple_eh_must_not_throw_fndecl (inner);
1887 this_region->u.must_not_throw.failure_loc
1888 = LOCATION_LOCUS (gimple_location (tp));
1890 /* In order to get mangling applied to this decl, we must mark it
1891 used now. Otherwise, pass_ipa_free_lang_data won't think it
1893 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1895 this_state.cur_region = this_region;
1898 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1900 return gimple_try_eval (tp);
1903 /* Implement a cleanup expression. This is similar to try-finally,
1904 except that we only execute the cleanup block for exception edges. */
1907 lower_cleanup (struct leh_state *state, gimple tp)
1909 struct leh_state this_state = *state;
1910 eh_region this_region = NULL;
1911 struct leh_tf_state fake_tf;
1913 bool cleanup_dead = cleanup_is_dead_in (state->cur_region);
1915 if (flag_exceptions && !cleanup_dead)
1917 this_region = gen_eh_region_cleanup (state->cur_region);
1918 this_state.cur_region = this_region;
1921 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1923 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
1924 return gimple_try_eval (tp);
1926 /* Build enough of a try-finally state so that we can reuse
1927 honor_protect_cleanup_actions. */
1928 memset (&fake_tf, 0, sizeof (fake_tf));
1929 fake_tf.top_p = fake_tf.try_finally_expr = tp;
1930 fake_tf.outer = state;
1931 fake_tf.region = this_region;
1932 fake_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp));
1933 fake_tf.may_throw = true;
1935 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1937 if (fake_tf.may_throw)
1939 /* In this case honor_protect_cleanup_actions had nothing to do,
1940 and we should process this normally. */
1941 lower_eh_constructs_1 (state, gimple_try_cleanup_ptr (tp));
1942 result = frob_into_branch_around (tp, this_region,
1943 fake_tf.fallthru_label);
1947 /* In this case honor_protect_cleanup_actions did nearly all of
1948 the work. All we have left is to append the fallthru_label. */
1950 result = gimple_try_eval (tp);
1951 if (fake_tf.fallthru_label)
1953 gimple x = gimple_build_label (fake_tf.fallthru_label);
1954 gimple_seq_add_stmt (&result, x);
1960 /* Main loop for lowering eh constructs. Also moves gsi to the next
1964 lower_eh_constructs_2 (struct leh_state *state, gimple_stmt_iterator *gsi)
1968 gimple stmt = gsi_stmt (*gsi);
1970 switch (gimple_code (stmt))
1974 tree fndecl = gimple_call_fndecl (stmt);
1977 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1978 switch (DECL_FUNCTION_CODE (fndecl))
1980 case BUILT_IN_EH_POINTER:
1981 /* The front end may have generated a call to
1982 __builtin_eh_pointer (0) within a catch region. Replace
1983 this zero argument with the current catch region number. */
1984 if (state->ehp_region)
1986 tree nr = build_int_cst (integer_type_node,
1987 state->ehp_region->index);
1988 gimple_call_set_arg (stmt, 0, nr);
1992 /* The user has dome something silly. Remove it. */
1993 rhs = null_pointer_node;
1998 case BUILT_IN_EH_FILTER:
1999 /* ??? This should never appear, but since it's a builtin it
2000 is accessible to abuse by users. Just remove it and
2001 replace the use with the arbitrary value zero. */
2002 rhs = build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
2004 lhs = gimple_call_lhs (stmt);
2005 x = gimple_build_assign (lhs, rhs);
2006 gsi_insert_before (gsi, x, GSI_SAME_STMT);
2009 case BUILT_IN_EH_COPY_VALUES:
2010 /* Likewise this should not appear. Remove it. */
2011 gsi_remove (gsi, true);
2021 /* If the stmt can throw use a new temporary for the assignment
2022 to a LHS. This makes sure the old value of the LHS is
2023 available on the EH edge. Only do so for statements that
2024 potentially fall through (no noreturn calls e.g.), otherwise
2025 this new assignment might create fake fallthru regions. */
2026 if (stmt_could_throw_p (stmt)
2027 && gimple_has_lhs (stmt)
2028 && gimple_stmt_may_fallthru (stmt)
2029 && !tree_could_throw_p (gimple_get_lhs (stmt))
2030 && is_gimple_reg_type (TREE_TYPE (gimple_get_lhs (stmt))))
2032 tree lhs = gimple_get_lhs (stmt);
2033 tree tmp = create_tmp_var (TREE_TYPE (lhs), NULL);
2034 gimple s = gimple_build_assign (lhs, tmp);
2035 gimple_set_location (s, gimple_location (stmt));
2036 gimple_set_block (s, gimple_block (stmt));
2037 gimple_set_lhs (stmt, tmp);
2038 if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE
2039 || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE)
2040 DECL_GIMPLE_REG_P (tmp) = 1;
2041 gsi_insert_after (gsi, s, GSI_SAME_STMT);
2043 /* Look for things that can throw exceptions, and record them. */
2044 if (state->cur_region && stmt_could_throw_p (stmt))
2046 record_stmt_eh_region (state->cur_region, stmt);
2047 note_eh_region_may_contain_throw (state->cur_region);
2054 maybe_record_in_goto_queue (state, stmt);
2058 verify_norecord_switch_expr (state, stmt);
2062 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
2063 replace = lower_try_finally (state, stmt);
2066 x = gimple_seq_first_stmt (gimple_try_cleanup (stmt));
2069 replace = gimple_try_eval (stmt);
2070 lower_eh_constructs_1 (state, &replace);
2073 switch (gimple_code (x))
2076 replace = lower_catch (state, stmt);
2078 case GIMPLE_EH_FILTER:
2079 replace = lower_eh_filter (state, stmt);
2081 case GIMPLE_EH_MUST_NOT_THROW:
2082 replace = lower_eh_must_not_throw (state, stmt);
2084 case GIMPLE_EH_ELSE:
2085 /* This code is only valid with GIMPLE_TRY_FINALLY. */
2088 replace = lower_cleanup (state, stmt);
2093 /* Remove the old stmt and insert the transformed sequence
2095 gsi_insert_seq_before (gsi, replace, GSI_SAME_STMT);
2096 gsi_remove (gsi, true);
2098 /* Return since we don't want gsi_next () */
2101 case GIMPLE_EH_ELSE:
2102 /* We should be eliminating this in lower_try_finally et al. */
2106 /* A type, a decl, or some kind of statement that we're not
2107 interested in. Don't walk them. */
2114 /* A helper to unwrap a gimple_seq and feed stmts to lower_eh_constructs_2. */
2117 lower_eh_constructs_1 (struct leh_state *state, gimple_seq *pseq)
2119 gimple_stmt_iterator gsi;
2120 for (gsi = gsi_start (*pseq); !gsi_end_p (gsi);)
2121 lower_eh_constructs_2 (state, &gsi);
2125 lower_eh_constructs (void)
2127 struct leh_state null_state;
2130 bodyp = gimple_body (current_function_decl);
2134 finally_tree.create (31);
2135 eh_region_may_contain_throw_map = BITMAP_ALLOC (NULL);
2136 memset (&null_state, 0, sizeof (null_state));
2138 collect_finally_tree_1 (bodyp, NULL);
2139 lower_eh_constructs_1 (&null_state, &bodyp);
2140 gimple_set_body (current_function_decl, bodyp);
2142 /* We assume there's a return statement, or something, at the end of
2143 the function, and thus ploping the EH sequence afterward won't
2145 gcc_assert (!gimple_seq_may_fallthru (bodyp));
2146 gimple_seq_add_seq (&bodyp, eh_seq);
2148 /* We assume that since BODYP already existed, adding EH_SEQ to it
2149 didn't change its value, and we don't have to re-set the function. */
2150 gcc_assert (bodyp == gimple_body (current_function_decl));
2152 finally_tree.dispose ();
2153 BITMAP_FREE (eh_region_may_contain_throw_map);
2156 /* If this function needs a language specific EH personality routine
2157 and the frontend didn't already set one do so now. */
2158 if (function_needs_eh_personality (cfun) == eh_personality_lang
2159 && !DECL_FUNCTION_PERSONALITY (current_function_decl))
2160 DECL_FUNCTION_PERSONALITY (current_function_decl)
2161 = lang_hooks.eh_personality ();
2168 const pass_data pass_data_lower_eh =
2170 GIMPLE_PASS, /* type */
2172 OPTGROUP_NONE, /* optinfo_flags */
2173 true, /* has_execute */
2174 TV_TREE_EH, /* tv_id */
2175 PROP_gimple_lcf, /* properties_required */
2176 PROP_gimple_leh, /* properties_provided */
2177 0, /* properties_destroyed */
2178 0, /* todo_flags_start */
2179 0, /* todo_flags_finish */
2182 class pass_lower_eh : public gimple_opt_pass
2185 pass_lower_eh (gcc::context *ctxt)
2186 : gimple_opt_pass (pass_data_lower_eh, ctxt)
2189 /* opt_pass methods: */
2190 unsigned int execute () { return lower_eh_constructs (); }
2192 }; // class pass_lower_eh
2197 make_pass_lower_eh (gcc::context *ctxt)
2199 return new pass_lower_eh (ctxt);
2202 /* Create the multiple edges from an EH_DISPATCH statement to all of
2203 the possible handlers for its EH region. Return true if there's
2204 no fallthru edge; false if there is. */
2207 make_eh_dispatch_edges (gimple stmt)
2211 basic_block src, dst;
2213 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2214 src = gimple_bb (stmt);
2219 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2221 dst = label_to_block (c->label);
2222 make_edge (src, dst, 0);
2224 /* A catch-all handler doesn't have a fallthru. */
2225 if (c->type_list == NULL)
2230 case ERT_ALLOWED_EXCEPTIONS:
2231 dst = label_to_block (r->u.allowed.label);
2232 make_edge (src, dst, 0);
2242 /* Create the single EH edge from STMT to its nearest landing pad,
2243 if there is such a landing pad within the current function. */
2246 make_eh_edges (gimple stmt)
2248 basic_block src, dst;
2252 lp_nr = lookup_stmt_eh_lp (stmt);
2256 lp = get_eh_landing_pad_from_number (lp_nr);
2257 gcc_assert (lp != NULL);
2259 src = gimple_bb (stmt);
2260 dst = label_to_block (lp->post_landing_pad);
2261 make_edge (src, dst, EDGE_EH);
2264 /* Do the work in redirecting EDGE_IN to NEW_BB within the EH region tree;
2265 do not actually perform the final edge redirection.
2267 CHANGE_REGION is true when we're being called from cleanup_empty_eh and
2268 we intend to change the destination EH region as well; this means
2269 EH_LANDING_PAD_NR must already be set on the destination block label.
2270 If false, we're being called from generic cfg manipulation code and we
2271 should preserve our place within the region tree. */
2274 redirect_eh_edge_1 (edge edge_in, basic_block new_bb, bool change_region)
2276 eh_landing_pad old_lp, new_lp;
2279 int old_lp_nr, new_lp_nr;
2280 tree old_label, new_label;
2284 old_bb = edge_in->dest;
2285 old_label = gimple_block_label (old_bb);
2286 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2287 gcc_assert (old_lp_nr > 0);
2288 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2290 throw_stmt = last_stmt (edge_in->src);
2291 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2293 new_label = gimple_block_label (new_bb);
2295 /* Look for an existing region that might be using NEW_BB already. */
2296 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2299 new_lp = get_eh_landing_pad_from_number (new_lp_nr);
2300 gcc_assert (new_lp);
2302 /* Unless CHANGE_REGION is true, the new and old landing pad
2303 had better be associated with the same EH region. */
2304 gcc_assert (change_region || new_lp->region == old_lp->region);
2309 gcc_assert (!change_region);
2312 /* Notice when we redirect the last EH edge away from OLD_BB. */
2313 FOR_EACH_EDGE (e, ei, old_bb->preds)
2314 if (e != edge_in && (e->flags & EDGE_EH))
2319 /* NEW_LP already exists. If there are still edges into OLD_LP,
2320 there's nothing to do with the EH tree. If there are no more
2321 edges into OLD_LP, then we want to remove OLD_LP as it is unused.
2322 If CHANGE_REGION is true, then our caller is expecting to remove
2324 if (e == NULL && !change_region)
2325 remove_eh_landing_pad (old_lp);
2329 /* No correct landing pad exists. If there are no more edges
2330 into OLD_LP, then we can simply re-use the existing landing pad.
2331 Otherwise, we have to create a new landing pad. */
2334 EH_LANDING_PAD_NR (old_lp->post_landing_pad) = 0;
2338 new_lp = gen_eh_landing_pad (old_lp->region);
2339 new_lp->post_landing_pad = new_label;
2340 EH_LANDING_PAD_NR (new_label) = new_lp->index;
2343 /* Maybe move the throwing statement to the new region. */
2344 if (old_lp != new_lp)
2346 remove_stmt_from_eh_lp (throw_stmt);
2347 add_stmt_to_eh_lp (throw_stmt, new_lp->index);
2351 /* Redirect EH edge E to NEW_BB. */
2354 redirect_eh_edge (edge edge_in, basic_block new_bb)
2356 redirect_eh_edge_1 (edge_in, new_bb, false);
2357 return ssa_redirect_edge (edge_in, new_bb);
2360 /* This is a subroutine of gimple_redirect_edge_and_branch. Update the
2361 labels for redirecting a non-fallthru EH_DISPATCH edge E to NEW_BB.
2362 The actual edge update will happen in the caller. */
2365 redirect_eh_dispatch_edge (gimple stmt, edge e, basic_block new_bb)
2367 tree new_lab = gimple_block_label (new_bb);
2368 bool any_changed = false;
2373 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
2377 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
2379 old_bb = label_to_block (c->label);
2380 if (old_bb == e->dest)
2388 case ERT_ALLOWED_EXCEPTIONS:
2389 old_bb = label_to_block (r->u.allowed.label);
2390 gcc_assert (old_bb == e->dest);
2391 r->u.allowed.label = new_lab;
2399 gcc_assert (any_changed);
2402 /* Helper function for operation_could_trap_p and stmt_could_throw_p. */
2405 operation_could_trap_helper_p (enum tree_code op,
2416 case TRUNC_DIV_EXPR:
2418 case FLOOR_DIV_EXPR:
2419 case ROUND_DIV_EXPR:
2420 case EXACT_DIV_EXPR:
2422 case FLOOR_MOD_EXPR:
2423 case ROUND_MOD_EXPR:
2424 case TRUNC_MOD_EXPR:
2426 if (honor_snans || honor_trapv)
2429 return flag_trapping_math;
2430 if (!TREE_CONSTANT (divisor) || integer_zerop (divisor))
2439 /* Some floating point comparisons may trap. */
2444 case UNORDERED_EXPR:
2454 case FIX_TRUNC_EXPR:
2455 /* Conversion of floating point might trap. */
2461 /* These operations don't trap with floating point. */
2469 /* Any floating arithmetic may trap. */
2470 if (fp_operation && flag_trapping_math)
2478 /* Constructing an object cannot trap. */
2482 /* Any floating arithmetic may trap. */
2483 if (fp_operation && flag_trapping_math)
2491 /* Return true if operation OP may trap. FP_OPERATION is true if OP is applied
2492 on floating-point values. HONOR_TRAPV is true if OP is applied on integer
2493 type operands that may trap. If OP is a division operator, DIVISOR contains
2494 the value of the divisor. */
2497 operation_could_trap_p (enum tree_code op, bool fp_operation, bool honor_trapv,
2500 bool honor_nans = (fp_operation && flag_trapping_math
2501 && !flag_finite_math_only);
2502 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2505 if (TREE_CODE_CLASS (op) != tcc_comparison
2506 && TREE_CODE_CLASS (op) != tcc_unary
2507 && TREE_CODE_CLASS (op) != tcc_binary)
2510 return operation_could_trap_helper_p (op, fp_operation, honor_trapv,
2511 honor_nans, honor_snans, divisor,
2516 /* Returns true if it is possible to prove that the index of
2517 an array access REF (an ARRAY_REF expression) falls into the
2521 in_array_bounds_p (tree ref)
2523 tree idx = TREE_OPERAND (ref, 1);
2526 if (TREE_CODE (idx) != INTEGER_CST)
2529 min = array_ref_low_bound (ref);
2530 max = array_ref_up_bound (ref);
2533 || TREE_CODE (min) != INTEGER_CST
2534 || TREE_CODE (max) != INTEGER_CST)
2537 if (tree_int_cst_lt (idx, min)
2538 || tree_int_cst_lt (max, idx))
2544 /* Returns true if it is possible to prove that the range of
2545 an array access REF (an ARRAY_RANGE_REF expression) falls
2546 into the array bounds. */
2549 range_in_array_bounds_p (tree ref)
2551 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ref));
2552 tree range_min, range_max, min, max;
2554 range_min = TYPE_MIN_VALUE (domain_type);
2555 range_max = TYPE_MAX_VALUE (domain_type);
2558 || TREE_CODE (range_min) != INTEGER_CST
2559 || TREE_CODE (range_max) != INTEGER_CST)
2562 min = array_ref_low_bound (ref);
2563 max = array_ref_up_bound (ref);
2566 || TREE_CODE (min) != INTEGER_CST
2567 || TREE_CODE (max) != INTEGER_CST)
2570 if (tree_int_cst_lt (range_min, min)
2571 || tree_int_cst_lt (max, range_max))
2577 /* Return true if EXPR can trap, as in dereferencing an invalid pointer
2578 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
2579 This routine expects only GIMPLE lhs or rhs input. */
2582 tree_could_trap_p (tree expr)
2584 enum tree_code code;
2585 bool fp_operation = false;
2586 bool honor_trapv = false;
2587 tree t, base, div = NULL_TREE;
2592 code = TREE_CODE (expr);
2593 t = TREE_TYPE (expr);
2597 if (COMPARISON_CLASS_P (expr))
2598 fp_operation = FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)));
2600 fp_operation = FLOAT_TYPE_P (t);
2601 honor_trapv = INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t);
2604 if (TREE_CODE_CLASS (code) == tcc_binary)
2605 div = TREE_OPERAND (expr, 1);
2606 if (operation_could_trap_p (code, fp_operation, honor_trapv, div))
2616 case VIEW_CONVERT_EXPR:
2617 case WITH_SIZE_EXPR:
2618 expr = TREE_OPERAND (expr, 0);
2619 code = TREE_CODE (expr);
2622 case ARRAY_RANGE_REF:
2623 base = TREE_OPERAND (expr, 0);
2624 if (tree_could_trap_p (base))
2626 if (TREE_THIS_NOTRAP (expr))
2628 return !range_in_array_bounds_p (expr);
2631 base = TREE_OPERAND (expr, 0);
2632 if (tree_could_trap_p (base))
2634 if (TREE_THIS_NOTRAP (expr))
2636 return !in_array_bounds_p (expr);
2638 case TARGET_MEM_REF:
2640 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
2641 && tree_could_trap_p (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
2643 if (TREE_THIS_NOTRAP (expr))
2645 /* We cannot prove that the access is in-bounds when we have
2646 variable-index TARGET_MEM_REFs. */
2647 if (code == TARGET_MEM_REF
2648 && (TMR_INDEX (expr) || TMR_INDEX2 (expr)))
2650 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
2652 tree base = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
2653 double_int off = mem_ref_offset (expr);
2654 if (off.is_negative ())
2656 if (TREE_CODE (base) == STRING_CST)
2657 return double_int::from_uhwi (TREE_STRING_LENGTH (base)).ule (off);
2658 else if (DECL_SIZE_UNIT (base) == NULL_TREE
2659 || TREE_CODE (DECL_SIZE_UNIT (base)) != INTEGER_CST
2660 || tree_to_double_int (DECL_SIZE_UNIT (base)).ule (off))
2662 /* Now we are sure the first byte of the access is inside
2669 return !TREE_THIS_NOTRAP (expr);
2672 return TREE_THIS_VOLATILE (expr);
2675 t = get_callee_fndecl (expr);
2676 /* Assume that calls to weak functions may trap. */
2677 if (!t || !DECL_P (t))
2680 return tree_could_trap_p (t);
2684 /* Assume that accesses to weak functions may trap, unless we know
2685 they are certainly defined in current TU or in some other
2687 if (DECL_WEAK (expr) && !DECL_COMDAT (expr))
2689 struct cgraph_node *node;
2690 if (!DECL_EXTERNAL (expr))
2692 node = cgraph_function_node (cgraph_get_node (expr), NULL);
2693 if (node && node->in_other_partition)
2700 /* Assume that accesses to weak vars may trap, unless we know
2701 they are certainly defined in current TU or in some other
2703 if (DECL_WEAK (expr) && !DECL_COMDAT (expr))
2706 if (!DECL_EXTERNAL (expr))
2708 node = varpool_variable_node (varpool_get_node (expr), NULL);
2709 if (node && node->in_other_partition)
2721 /* Helper for stmt_could_throw_p. Return true if STMT (assumed to be a
2722 an assignment or a conditional) may throw. */
2725 stmt_could_throw_1_p (gimple stmt)
2727 enum tree_code code = gimple_expr_code (stmt);
2728 bool honor_nans = false;
2729 bool honor_snans = false;
2730 bool fp_operation = false;
2731 bool honor_trapv = false;
2736 if (TREE_CODE_CLASS (code) == tcc_comparison
2737 || TREE_CODE_CLASS (code) == tcc_unary
2738 || TREE_CODE_CLASS (code) == tcc_binary)
2740 if (is_gimple_assign (stmt)
2741 && TREE_CODE_CLASS (code) == tcc_comparison)
2742 t = TREE_TYPE (gimple_assign_rhs1 (stmt));
2743 else if (gimple_code (stmt) == GIMPLE_COND)
2744 t = TREE_TYPE (gimple_cond_lhs (stmt));
2746 t = gimple_expr_type (stmt);
2747 fp_operation = FLOAT_TYPE_P (t);
2750 honor_nans = flag_trapping_math && !flag_finite_math_only;
2751 honor_snans = flag_signaling_nans != 0;
2753 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
2757 /* Check if the main expression may trap. */
2758 t = is_gimple_assign (stmt) ? gimple_assign_rhs2 (stmt) : NULL;
2759 ret = operation_could_trap_helper_p (code, fp_operation, honor_trapv,
2760 honor_nans, honor_snans, t,
2765 /* If the expression does not trap, see if any of the individual operands may
2767 for (i = 0; i < gimple_num_ops (stmt); i++)
2768 if (tree_could_trap_p (gimple_op (stmt, i)))
2775 /* Return true if statement STMT could throw an exception. */
2778 stmt_could_throw_p (gimple stmt)
2780 if (!flag_exceptions)
2783 /* The only statements that can throw an exception are assignments,
2784 conditionals, calls, resx, and asms. */
2785 switch (gimple_code (stmt))
2791 return !gimple_call_nothrow_p (stmt);
2795 if (!cfun->can_throw_non_call_exceptions)
2797 return stmt_could_throw_1_p (stmt);
2800 if (!cfun->can_throw_non_call_exceptions)
2802 return gimple_asm_volatile_p (stmt);
2810 /* Return true if expression T could throw an exception. */
2813 tree_could_throw_p (tree t)
2815 if (!flag_exceptions)
2817 if (TREE_CODE (t) == MODIFY_EXPR)
2819 if (cfun->can_throw_non_call_exceptions
2820 && tree_could_trap_p (TREE_OPERAND (t, 0)))
2822 t = TREE_OPERAND (t, 1);
2825 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2826 t = TREE_OPERAND (t, 0);
2827 if (TREE_CODE (t) == CALL_EXPR)
2828 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2829 if (cfun->can_throw_non_call_exceptions)
2830 return tree_could_trap_p (t);
2834 /* Return true if STMT can throw an exception that is not caught within
2835 the current function (CFUN). */
2838 stmt_can_throw_external (gimple stmt)
2842 if (!stmt_could_throw_p (stmt))
2845 lp_nr = lookup_stmt_eh_lp (stmt);
2849 /* Return true if STMT can throw an exception that is caught within
2850 the current function (CFUN). */
2853 stmt_can_throw_internal (gimple stmt)
2857 if (!stmt_could_throw_p (stmt))
2860 lp_nr = lookup_stmt_eh_lp (stmt);
2864 /* Given a statement STMT in IFUN, if STMT can no longer throw, then
2865 remove any entry it might have from the EH table. Return true if
2866 any change was made. */
2869 maybe_clean_eh_stmt_fn (struct function *ifun, gimple stmt)
2871 if (stmt_could_throw_p (stmt))
2873 return remove_stmt_from_eh_lp_fn (ifun, stmt);
2876 /* Likewise, but always use the current function. */
2879 maybe_clean_eh_stmt (gimple stmt)
2881 return maybe_clean_eh_stmt_fn (cfun, stmt);
2884 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2885 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2886 in the table if it should be in there. Return TRUE if a replacement was
2887 done that my require an EH edge purge. */
2890 maybe_clean_or_replace_eh_stmt (gimple old_stmt, gimple new_stmt)
2892 int lp_nr = lookup_stmt_eh_lp (old_stmt);
2896 bool new_stmt_could_throw = stmt_could_throw_p (new_stmt);
2898 if (new_stmt == old_stmt && new_stmt_could_throw)
2901 remove_stmt_from_eh_lp (old_stmt);
2902 if (new_stmt_could_throw)
2904 add_stmt_to_eh_lp (new_stmt, lp_nr);
2914 /* Given a statement OLD_STMT in OLD_FUN and a duplicate statement NEW_STMT
2915 in NEW_FUN, copy the EH table data from OLD_STMT to NEW_STMT. The MAP
2916 operand is the return value of duplicate_eh_regions. */
2919 maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
2920 struct function *old_fun, gimple old_stmt,
2921 struct pointer_map_t *map, int default_lp_nr)
2923 int old_lp_nr, new_lp_nr;
2926 if (!stmt_could_throw_p (new_stmt))
2929 old_lp_nr = lookup_stmt_eh_lp_fn (old_fun, old_stmt);
2932 if (default_lp_nr == 0)
2934 new_lp_nr = default_lp_nr;
2936 else if (old_lp_nr > 0)
2938 eh_landing_pad old_lp, new_lp;
2940 old_lp = (*old_fun->eh->lp_array)[old_lp_nr];
2941 slot = pointer_map_contains (map, old_lp);
2942 new_lp = (eh_landing_pad) *slot;
2943 new_lp_nr = new_lp->index;
2947 eh_region old_r, new_r;
2949 old_r = (*old_fun->eh->region_array)[-old_lp_nr];
2950 slot = pointer_map_contains (map, old_r);
2951 new_r = (eh_region) *slot;
2952 new_lp_nr = -new_r->index;
2955 add_stmt_to_eh_lp_fn (new_fun, new_stmt, new_lp_nr);
2959 /* Similar, but both OLD_STMT and NEW_STMT are within the current function,
2960 and thus no remapping is required. */
2963 maybe_duplicate_eh_stmt (gimple new_stmt, gimple old_stmt)
2967 if (!stmt_could_throw_p (new_stmt))
2970 lp_nr = lookup_stmt_eh_lp (old_stmt);
2974 add_stmt_to_eh_lp (new_stmt, lp_nr);
2978 /* Returns TRUE if oneh and twoh are exception handlers (gimple_try_cleanup of
2979 GIMPLE_TRY) that are similar enough to be considered the same. Currently
2980 this only handles handlers consisting of a single call, as that's the
2981 important case for C++: a destructor call for a particular object showing
2982 up in multiple handlers. */
2985 same_handler_p (gimple_seq oneh, gimple_seq twoh)
2987 gimple_stmt_iterator gsi;
2991 gsi = gsi_start (oneh);
2992 if (!gsi_one_before_end_p (gsi))
2994 ones = gsi_stmt (gsi);
2996 gsi = gsi_start (twoh);
2997 if (!gsi_one_before_end_p (gsi))
2999 twos = gsi_stmt (gsi);
3001 if (!is_gimple_call (ones)
3002 || !is_gimple_call (twos)
3003 || gimple_call_lhs (ones)
3004 || gimple_call_lhs (twos)
3005 || gimple_call_chain (ones)
3006 || gimple_call_chain (twos)
3007 || !gimple_call_same_target_p (ones, twos)
3008 || gimple_call_num_args (ones) != gimple_call_num_args (twos))
3011 for (ai = 0; ai < gimple_call_num_args (ones); ++ai)
3012 if (!operand_equal_p (gimple_call_arg (ones, ai),
3013 gimple_call_arg (twos, ai), 0))
3020 try { A() } finally { try { ~B() } catch { ~A() } }
3021 try { ... } finally { ~A() }
3023 try { A() } catch { ~B() }
3024 try { ~B() ... } finally { ~A() }
3026 This occurs frequently in C++, where A is a local variable and B is a
3027 temporary used in the initializer for A. */
3030 optimize_double_finally (gimple one, gimple two)
3033 gimple_stmt_iterator gsi;
3036 cleanup = gimple_try_cleanup (one);
3037 gsi = gsi_start (cleanup);
3038 if (!gsi_one_before_end_p (gsi))
3041 oneh = gsi_stmt (gsi);
3042 if (gimple_code (oneh) != GIMPLE_TRY
3043 || gimple_try_kind (oneh) != GIMPLE_TRY_CATCH)
3046 if (same_handler_p (gimple_try_cleanup (oneh), gimple_try_cleanup (two)))
3048 gimple_seq seq = gimple_try_eval (oneh);
3050 gimple_try_set_cleanup (one, seq);
3051 gimple_try_set_kind (one, GIMPLE_TRY_CATCH);
3052 seq = copy_gimple_seq_and_replace_locals (seq);
3053 gimple_seq_add_seq (&seq, gimple_try_eval (two));
3054 gimple_try_set_eval (two, seq);
3058 /* Perform EH refactoring optimizations that are simpler to do when code
3059 flow has been lowered but EH structures haven't. */
3062 refactor_eh_r (gimple_seq seq)
3064 gimple_stmt_iterator gsi;
3069 gsi = gsi_start (seq);
3073 if (gsi_end_p (gsi))
3076 two = gsi_stmt (gsi);
3079 && gimple_code (one) == GIMPLE_TRY
3080 && gimple_code (two) == GIMPLE_TRY
3081 && gimple_try_kind (one) == GIMPLE_TRY_FINALLY
3082 && gimple_try_kind (two) == GIMPLE_TRY_FINALLY)
3083 optimize_double_finally (one, two);
3085 switch (gimple_code (one))
3088 refactor_eh_r (gimple_try_eval (one));
3089 refactor_eh_r (gimple_try_cleanup (one));
3092 refactor_eh_r (gimple_catch_handler (one));
3094 case GIMPLE_EH_FILTER:
3095 refactor_eh_r (gimple_eh_filter_failure (one));
3097 case GIMPLE_EH_ELSE:
3098 refactor_eh_r (gimple_eh_else_n_body (one));
3099 refactor_eh_r (gimple_eh_else_e_body (one));
3114 refactor_eh_r (gimple_body (current_function_decl));
3120 const pass_data pass_data_refactor_eh =
3122 GIMPLE_PASS, /* type */
3124 OPTGROUP_NONE, /* optinfo_flags */
3125 true, /* has_execute */
3126 TV_TREE_EH, /* tv_id */
3127 PROP_gimple_lcf, /* properties_required */
3128 0, /* properties_provided */
3129 0, /* properties_destroyed */
3130 0, /* todo_flags_start */
3131 0, /* todo_flags_finish */
3134 class pass_refactor_eh : public gimple_opt_pass
3137 pass_refactor_eh (gcc::context *ctxt)
3138 : gimple_opt_pass (pass_data_refactor_eh, ctxt)
3141 /* opt_pass methods: */
3142 virtual bool gate (function *) { return flag_exceptions != 0; }
3143 unsigned int execute () { return refactor_eh (); }
3145 }; // class pass_refactor_eh
3150 make_pass_refactor_eh (gcc::context *ctxt)
3152 return new pass_refactor_eh (ctxt);
3155 /* At the end of gimple optimization, we can lower RESX. */
3158 lower_resx (basic_block bb, gimple stmt, struct pointer_map_t *mnt_map)
3161 eh_region src_r, dst_r;
3162 gimple_stmt_iterator gsi;
3167 lp_nr = lookup_stmt_eh_lp (stmt);
3169 dst_r = get_eh_region_from_lp_number (lp_nr);
3173 src_r = get_eh_region_from_number (gimple_resx_region (stmt));
3174 gsi = gsi_last_bb (bb);
3178 /* We can wind up with no source region when pass_cleanup_eh shows
3179 that there are no entries into an eh region and deletes it, but
3180 then the block that contains the resx isn't removed. This can
3181 happen without optimization when the switch statement created by
3182 lower_try_finally_switch isn't simplified to remove the eh case.
3184 Resolve this by expanding the resx node to an abort. */
3186 fn = builtin_decl_implicit (BUILT_IN_TRAP);
3187 x = gimple_build_call (fn, 0);
3188 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3190 while (EDGE_COUNT (bb->succs) > 0)
3191 remove_edge (EDGE_SUCC (bb, 0));
3195 /* When we have a destination region, we resolve this by copying
3196 the excptr and filter values into place, and changing the edge
3197 to immediately after the landing pad. */
3206 /* We are resuming into a MUST_NOT_CALL region. Expand a call to
3207 the failure decl into a new block, if needed. */
3208 gcc_assert (dst_r->type == ERT_MUST_NOT_THROW);
3210 slot = pointer_map_contains (mnt_map, dst_r);
3213 gimple_stmt_iterator gsi2;
3215 new_bb = create_empty_bb (bb);
3217 add_bb_to_loop (new_bb, bb->loop_father);
3218 lab = gimple_block_label (new_bb);
3219 gsi2 = gsi_start_bb (new_bb);
3221 fn = dst_r->u.must_not_throw.failure_decl;
3222 x = gimple_build_call (fn, 0);
3223 gimple_set_location (x, dst_r->u.must_not_throw.failure_loc);
3224 gsi_insert_after (&gsi2, x, GSI_CONTINUE_LINKING);
3226 slot = pointer_map_insert (mnt_map, dst_r);
3232 new_bb = label_to_block (lab);
3235 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3236 e = make_edge (bb, new_bb, EDGE_FALLTHRU);
3237 e->count = bb->count;
3238 e->probability = REG_BR_PROB_BASE;
3243 tree dst_nr = build_int_cst (integer_type_node, dst_r->index);
3245 fn = builtin_decl_implicit (BUILT_IN_EH_COPY_VALUES);
3246 src_nr = build_int_cst (integer_type_node, src_r->index);
3247 x = gimple_build_call (fn, 2, dst_nr, src_nr);
3248 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3250 /* Update the flags for the outgoing edge. */
3251 e = single_succ_edge (bb);
3252 gcc_assert (e->flags & EDGE_EH);
3253 e->flags = (e->flags & ~EDGE_EH) | EDGE_FALLTHRU;
3255 /* If there are no more EH users of the landing pad, delete it. */
3256 FOR_EACH_EDGE (e, ei, e->dest->preds)
3257 if (e->flags & EDGE_EH)
3261 eh_landing_pad lp = get_eh_landing_pad_from_number (lp_nr);
3262 remove_eh_landing_pad (lp);
3272 /* When we don't have a destination region, this exception escapes
3273 up the call chain. We resolve this by generating a call to the
3274 _Unwind_Resume library function. */
3276 /* The ARM EABI redefines _Unwind_Resume as __cxa_end_cleanup
3277 with no arguments for C++ and Java. Check for that. */
3278 if (src_r->use_cxa_end_cleanup)
3280 fn = builtin_decl_implicit (BUILT_IN_CXA_END_CLEANUP);
3281 x = gimple_build_call (fn, 0);
3282 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3286 fn = builtin_decl_implicit (BUILT_IN_EH_POINTER);
3287 src_nr = build_int_cst (integer_type_node, src_r->index);
3288 x = gimple_build_call (fn, 1, src_nr);
3289 var = create_tmp_var (ptr_type_node, NULL);
3290 var = make_ssa_name (var, x);
3291 gimple_call_set_lhs (x, var);
3292 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3294 fn = builtin_decl_implicit (BUILT_IN_UNWIND_RESUME);
3295 x = gimple_build_call (fn, 1, var);
3296 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3299 gcc_assert (EDGE_COUNT (bb->succs) == 0);
3302 gsi_remove (&gsi, true);
3308 execute_lower_resx (void)
3311 struct pointer_map_t *mnt_map;
3312 bool dominance_invalidated = false;
3313 bool any_rewritten = false;
3315 mnt_map = pointer_map_create ();
3317 FOR_EACH_BB_FN (bb, cfun)
3319 gimple last = last_stmt (bb);
3320 if (last && is_gimple_resx (last))
3322 dominance_invalidated |= lower_resx (bb, last, mnt_map);
3323 any_rewritten = true;
3327 pointer_map_destroy (mnt_map);
3329 if (dominance_invalidated)
3331 free_dominance_info (CDI_DOMINATORS);
3332 free_dominance_info (CDI_POST_DOMINATORS);
3335 return any_rewritten ? TODO_update_ssa_only_virtuals : 0;
3340 const pass_data pass_data_lower_resx =
3342 GIMPLE_PASS, /* type */
3344 OPTGROUP_NONE, /* optinfo_flags */
3345 true, /* has_execute */
3346 TV_TREE_EH, /* tv_id */
3347 PROP_gimple_lcf, /* properties_required */
3348 0, /* properties_provided */
3349 0, /* properties_destroyed */
3350 0, /* todo_flags_start */
3351 TODO_verify_flow, /* todo_flags_finish */
3354 class pass_lower_resx : public gimple_opt_pass
3357 pass_lower_resx (gcc::context *ctxt)
3358 : gimple_opt_pass (pass_data_lower_resx, ctxt)
3361 /* opt_pass methods: */
3362 virtual bool gate (function *) { return flag_exceptions != 0; }
3363 unsigned int execute () { return execute_lower_resx (); }
3365 }; // class pass_lower_resx
3370 make_pass_lower_resx (gcc::context *ctxt)
3372 return new pass_lower_resx (ctxt);
3375 /* Try to optimize var = {v} {CLOBBER} stmts followed just by
3379 optimize_clobbers (basic_block bb)
3381 gimple_stmt_iterator gsi = gsi_last_bb (bb);
3382 bool any_clobbers = false;
3383 bool seen_stack_restore = false;
3387 /* Only optimize anything if the bb contains at least one clobber,
3388 ends with resx (checked by caller), optionally contains some
3389 debug stmts or labels, or at most one __builtin_stack_restore
3390 call, and has an incoming EH edge. */
3391 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3393 gimple stmt = gsi_stmt (gsi);
3394 if (is_gimple_debug (stmt))
3396 if (gimple_clobber_p (stmt))
3398 any_clobbers = true;
3401 if (!seen_stack_restore
3402 && gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
3404 seen_stack_restore = true;
3407 if (gimple_code (stmt) == GIMPLE_LABEL)
3413 FOR_EACH_EDGE (e, ei, bb->preds)
3414 if (e->flags & EDGE_EH)
3418 gsi = gsi_last_bb (bb);
3419 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3421 gimple stmt = gsi_stmt (gsi);
3422 if (!gimple_clobber_p (stmt))
3424 unlink_stmt_vdef (stmt);
3425 gsi_remove (&gsi, true);
3426 release_defs (stmt);
3430 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3431 internal throw to successor BB. */
3434 sink_clobbers (basic_block bb)
3438 gimple_stmt_iterator gsi, dgsi;
3440 bool any_clobbers = false;
3443 /* Only optimize if BB has a single EH successor and
3444 all predecessor edges are EH too. */
3445 if (!single_succ_p (bb)
3446 || (single_succ_edge (bb)->flags & EDGE_EH) == 0)
3449 FOR_EACH_EDGE (e, ei, bb->preds)
3451 if ((e->flags & EDGE_EH) == 0)
3455 /* And BB contains only CLOBBER stmts before the final
3457 gsi = gsi_last_bb (bb);
3458 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3460 gimple stmt = gsi_stmt (gsi);
3461 if (is_gimple_debug (stmt))
3463 if (gimple_code (stmt) == GIMPLE_LABEL)
3465 if (!gimple_clobber_p (stmt))
3467 any_clobbers = true;
3472 edge succe = single_succ_edge (bb);
3473 succbb = succe->dest;
3475 /* See if there is a virtual PHI node to take an updated virtual
3478 tree vuse = NULL_TREE;
3479 for (gsi = gsi_start_phis (succbb); !gsi_end_p (gsi); gsi_next (&gsi))
3481 tree res = gimple_phi_result (gsi_stmt (gsi));
3482 if (virtual_operand_p (res))
3484 vphi = gsi_stmt (gsi);
3490 dgsi = gsi_after_labels (succbb);
3491 gsi = gsi_last_bb (bb);
3492 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3494 gimple stmt = gsi_stmt (gsi);
3496 if (is_gimple_debug (stmt))
3498 if (gimple_code (stmt) == GIMPLE_LABEL)
3500 lhs = gimple_assign_lhs (stmt);
3501 /* Unfortunately we don't have dominance info updated at this
3502 point, so checking if
3503 dominated_by_p (CDI_DOMINATORS, succbb,
3504 gimple_bb (SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0)))
3505 would be too costly. Thus, avoid sinking any clobbers that
3506 refer to non-(D) SSA_NAMEs. */
3507 if (TREE_CODE (lhs) == MEM_REF
3508 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME
3509 && !SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (lhs, 0)))
3511 unlink_stmt_vdef (stmt);
3512 gsi_remove (&gsi, true);
3513 release_defs (stmt);
3517 /* As we do not change stmt order when sinking across a
3518 forwarder edge we can keep virtual operands in place. */
3519 gsi_remove (&gsi, false);
3520 gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT);
3522 /* But adjust virtual operands if we sunk across a PHI node. */
3526 imm_use_iterator iter;
3527 use_operand_p use_p;
3528 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse)
3529 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3530 SET_USE (use_p, gimple_vdef (stmt));
3531 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse))
3533 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)) = 1;
3534 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 0;
3536 /* Adjust the incoming virtual operand. */
3537 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe), gimple_vuse (stmt));
3538 SET_USE (gimple_vuse_op (stmt), vuse);
3540 /* If there isn't a single predecessor but no virtual PHI node
3541 arrange for virtual operands to be renamed. */
3542 else if (gimple_vuse_op (stmt) != NULL_USE_OPERAND_P
3543 && !single_pred_p (succbb))
3545 /* In this case there will be no use of the VDEF of this stmt.
3546 ??? Unless this is a secondary opportunity and we have not
3547 removed unreachable blocks yet, so we cannot assert this.
3548 Which also means we will end up renaming too many times. */
3549 SET_USE (gimple_vuse_op (stmt), gimple_vop (cfun));
3550 mark_virtual_operands_for_renaming (cfun);
3551 todo |= TODO_update_ssa_only_virtuals;
3558 /* At the end of inlining, we can lower EH_DISPATCH. Return true when
3559 we have found some duplicate labels and removed some edges. */
3562 lower_eh_dispatch (basic_block src, gimple stmt)
3564 gimple_stmt_iterator gsi;
3569 bool redirected = false;
3571 region_nr = gimple_eh_dispatch_region (stmt);
3572 r = get_eh_region_from_number (region_nr);
3574 gsi = gsi_last_bb (src);
3580 auto_vec<tree> labels;
3581 tree default_label = NULL;
3585 struct pointer_set_t *seen_values = pointer_set_create ();
3587 /* Collect the labels for a switch. Zero the post_landing_pad
3588 field becase we'll no longer have anything keeping these labels
3589 in existence and the optimizer will be free to merge these
3591 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
3593 tree tp_node, flt_node, lab = c->label;
3594 bool have_label = false;
3597 tp_node = c->type_list;
3598 flt_node = c->filter_list;
3600 if (tp_node == NULL)
3602 default_label = lab;
3607 /* Filter out duplicate labels that arise when this handler
3608 is shadowed by an earlier one. When no labels are
3609 attached to the handler anymore, we remove
3610 the corresponding edge and then we delete unreachable
3611 blocks at the end of this pass. */
3612 if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node)))
3614 tree t = build_case_label (TREE_VALUE (flt_node),
3616 labels.safe_push (t);
3617 pointer_set_insert (seen_values, TREE_VALUE (flt_node));
3621 tp_node = TREE_CHAIN (tp_node);
3622 flt_node = TREE_CHAIN (flt_node);
3627 remove_edge (find_edge (src, label_to_block (lab)));
3632 /* Clean up the edge flags. */
3633 FOR_EACH_EDGE (e, ei, src->succs)
3635 if (e->flags & EDGE_FALLTHRU)
3637 /* If there was no catch-all, use the fallthru edge. */
3638 if (default_label == NULL)
3639 default_label = gimple_block_label (e->dest);
3640 e->flags &= ~EDGE_FALLTHRU;
3643 gcc_assert (default_label != NULL);
3645 /* Don't generate a switch if there's only a default case.
3646 This is common in the form of try { A; } catch (...) { B; }. */
3647 if (!labels.exists ())
3649 e = single_succ_edge (src);
3650 e->flags |= EDGE_FALLTHRU;
3654 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3655 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3657 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3658 filter = make_ssa_name (filter, x);
3659 gimple_call_set_lhs (x, filter);
3660 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3662 /* Turn the default label into a default case. */
3663 default_label = build_case_label (NULL, NULL, default_label);
3664 sort_case_labels (labels);
3666 x = gimple_build_switch (filter, default_label, labels);
3667 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3669 pointer_set_destroy (seen_values);
3673 case ERT_ALLOWED_EXCEPTIONS:
3675 edge b_e = BRANCH_EDGE (src);
3676 edge f_e = FALLTHRU_EDGE (src);
3678 fn = builtin_decl_implicit (BUILT_IN_EH_FILTER);
3679 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3681 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)), NULL);
3682 filter = make_ssa_name (filter, x);
3683 gimple_call_set_lhs (x, filter);
3684 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3686 r->u.allowed.label = NULL;
3687 x = gimple_build_cond (EQ_EXPR, filter,
3688 build_int_cst (TREE_TYPE (filter),
3689 r->u.allowed.filter),
3690 NULL_TREE, NULL_TREE);
3691 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3693 b_e->flags = b_e->flags | EDGE_TRUE_VALUE;
3694 f_e->flags = (f_e->flags & ~EDGE_FALLTHRU) | EDGE_FALSE_VALUE;
3702 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */
3703 gsi_remove (&gsi, true);
3708 execute_lower_eh_dispatch (void)
3712 bool redirected = false;
3714 assign_filter_values ();
3716 FOR_EACH_BB_FN (bb, cfun)
3718 gimple last = last_stmt (bb);
3721 if (gimple_code (last) == GIMPLE_EH_DISPATCH)
3723 redirected |= lower_eh_dispatch (bb, last);
3724 flags |= TODO_update_ssa_only_virtuals;
3726 else if (gimple_code (last) == GIMPLE_RESX)
3728 if (stmt_can_throw_external (last))
3729 optimize_clobbers (bb);
3731 flags |= sink_clobbers (bb);
3736 delete_unreachable_blocks ();
3742 const pass_data pass_data_lower_eh_dispatch =
3744 GIMPLE_PASS, /* type */
3745 "ehdisp", /* name */
3746 OPTGROUP_NONE, /* optinfo_flags */
3747 true, /* has_execute */
3748 TV_TREE_EH, /* tv_id */
3749 PROP_gimple_lcf, /* properties_required */
3750 0, /* properties_provided */
3751 0, /* properties_destroyed */
3752 0, /* todo_flags_start */
3753 TODO_verify_flow, /* todo_flags_finish */
3756 class pass_lower_eh_dispatch : public gimple_opt_pass
3759 pass_lower_eh_dispatch (gcc::context *ctxt)
3760 : gimple_opt_pass (pass_data_lower_eh_dispatch, ctxt)
3763 /* opt_pass methods: */
3764 virtual bool gate (function *fun) { return fun->eh->region_tree != NULL; }
3766 unsigned int execute () { return execute_lower_eh_dispatch (); }
3768 }; // class pass_lower_eh_dispatch
3773 make_pass_lower_eh_dispatch (gcc::context *ctxt)
3775 return new pass_lower_eh_dispatch (ctxt);
3778 /* Walk statements, see what regions and, optionally, landing pads
3779 are really referenced.
3781 Returns in R_REACHABLEP an sbitmap with bits set for reachable regions,
3782 and in LP_REACHABLE an sbitmap with bits set for reachable landing pads.
3784 Passing NULL for LP_REACHABLE is valid, in this case only reachable
3787 The caller is responsible for freeing the returned sbitmaps. */
3790 mark_reachable_handlers (sbitmap *r_reachablep, sbitmap *lp_reachablep)
3792 sbitmap r_reachable, lp_reachable;
3794 bool mark_landing_pads = (lp_reachablep != NULL);
3795 gcc_checking_assert (r_reachablep != NULL);
3797 r_reachable = sbitmap_alloc (cfun->eh->region_array->length ());
3798 bitmap_clear (r_reachable);
3799 *r_reachablep = r_reachable;
3801 if (mark_landing_pads)
3803 lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ());
3804 bitmap_clear (lp_reachable);
3805 *lp_reachablep = lp_reachable;
3808 lp_reachable = NULL;
3810 FOR_EACH_BB_FN (bb, cfun)
3812 gimple_stmt_iterator gsi;
3814 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3816 gimple stmt = gsi_stmt (gsi);
3818 if (mark_landing_pads)
3820 int lp_nr = lookup_stmt_eh_lp (stmt);
3822 /* Negative LP numbers are MUST_NOT_THROW regions which
3823 are not considered BB enders. */
3825 bitmap_set_bit (r_reachable, -lp_nr);
3827 /* Positive LP numbers are real landing pads, and BB enders. */
3830 gcc_assert (gsi_one_before_end_p (gsi));
3831 eh_region region = get_eh_region_from_lp_number (lp_nr);
3832 bitmap_set_bit (r_reachable, region->index);
3833 bitmap_set_bit (lp_reachable, lp_nr);
3837 /* Avoid removing regions referenced from RESX/EH_DISPATCH. */
3838 switch (gimple_code (stmt))
3841 bitmap_set_bit (r_reachable, gimple_resx_region (stmt));
3843 case GIMPLE_EH_DISPATCH:
3844 bitmap_set_bit (r_reachable, gimple_eh_dispatch_region (stmt));
3853 /* Remove unreachable handlers and unreachable landing pads. */
3856 remove_unreachable_handlers (void)
3858 sbitmap r_reachable, lp_reachable;
3863 mark_reachable_handlers (&r_reachable, &lp_reachable);
3867 fprintf (dump_file, "Before removal of unreachable regions:\n");
3868 dump_eh_tree (dump_file, cfun);
3869 fprintf (dump_file, "Reachable regions: ");
3870 dump_bitmap_file (dump_file, r_reachable);
3871 fprintf (dump_file, "Reachable landing pads: ");
3872 dump_bitmap_file (dump_file, lp_reachable);
3877 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3878 if (region && !bitmap_bit_p (r_reachable, region->index))
3880 "Removing unreachable region %d\n",
3884 remove_unreachable_eh_regions (r_reachable);
3886 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3887 if (lp && !bitmap_bit_p (lp_reachable, lp->index))
3891 "Removing unreachable landing pad %d\n",
3893 remove_eh_landing_pad (lp);
3898 fprintf (dump_file, "\n\nAfter removal of unreachable regions:\n");
3899 dump_eh_tree (dump_file, cfun);
3900 fprintf (dump_file, "\n\n");
3903 sbitmap_free (r_reachable);
3904 sbitmap_free (lp_reachable);
3906 #ifdef ENABLE_CHECKING
3907 verify_eh_tree (cfun);
3911 /* Remove unreachable handlers if any landing pads have been removed after
3912 last ehcleanup pass (due to gimple_purge_dead_eh_edges). */
3915 maybe_remove_unreachable_handlers (void)
3920 if (cfun->eh == NULL)
3923 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
3924 if (lp && lp->post_landing_pad)
3926 if (label_to_block (lp->post_landing_pad) == NULL)
3928 remove_unreachable_handlers ();
3934 /* Remove regions that do not have landing pads. This assumes
3935 that remove_unreachable_handlers has already been run, and
3936 that we've just manipulated the landing pads since then.
3938 Preserve regions with landing pads and regions that prevent
3939 exceptions from propagating further, even if these regions
3940 are not reachable. */
3943 remove_unreachable_handlers_no_lp (void)
3946 sbitmap r_reachable;
3949 mark_reachable_handlers (&r_reachable, /*lp_reachablep=*/NULL);
3951 FOR_EACH_VEC_SAFE_ELT (cfun->eh->region_array, i, region)
3956 if (region->landing_pads != NULL
3957 || region->type == ERT_MUST_NOT_THROW)
3958 bitmap_set_bit (r_reachable, region->index);
3961 && !bitmap_bit_p (r_reachable, region->index))
3963 "Removing unreachable region %d\n",
3967 remove_unreachable_eh_regions (r_reachable);
3969 sbitmap_free (r_reachable);
3972 /* Undo critical edge splitting on an EH landing pad. Earlier, we
3973 optimisticaly split all sorts of edges, including EH edges. The
3974 optimization passes in between may not have needed them; if not,
3975 we should undo the split.
3977 Recognize this case by having one EH edge incoming to the BB and
3978 one normal edge outgoing; BB should be empty apart from the
3979 post_landing_pad label.
3981 Note that this is slightly different from the empty handler case
3982 handled by cleanup_empty_eh, in that the actual handler may yet
3983 have actual code but the landing pad has been separated from the
3984 handler. As such, cleanup_empty_eh relies on this transformation
3985 having been done first. */
3988 unsplit_eh (eh_landing_pad lp)
3990 basic_block bb = label_to_block (lp->post_landing_pad);
3991 gimple_stmt_iterator gsi;
3994 /* Quickly check the edge counts on BB for singularity. */
3995 if (!single_pred_p (bb) || !single_succ_p (bb))
3997 e_in = single_pred_edge (bb);
3998 e_out = single_succ_edge (bb);
4000 /* Input edge must be EH and output edge must be normal. */
4001 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0)
4004 /* The block must be empty except for the labels and debug insns. */
4005 gsi = gsi_after_labels (bb);
4006 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4007 gsi_next_nondebug (&gsi);
4008 if (!gsi_end_p (gsi))
4011 /* The destination block must not already have a landing pad
4012 for a different region. */
4013 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4015 gimple stmt = gsi_stmt (gsi);
4019 if (gimple_code (stmt) != GIMPLE_LABEL)
4021 lab = gimple_label_label (stmt);
4022 lp_nr = EH_LANDING_PAD_NR (lab);
4023 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4027 /* The new destination block must not already be a destination of
4028 the source block, lest we merge fallthru and eh edges and get
4029 all sorts of confused. */
4030 if (find_edge (e_in->src, e_out->dest))
4033 /* ??? We can get degenerate phis due to cfg cleanups. I would have
4034 thought this should have been cleaned up by a phicprop pass, but
4035 that doesn't appear to handle virtuals. Propagate by hand. */
4036 if (!gimple_seq_empty_p (phi_nodes (bb)))
4038 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); )
4040 gimple use_stmt, phi = gsi_stmt (gsi);
4041 tree lhs = gimple_phi_result (phi);
4042 tree rhs = gimple_phi_arg_def (phi, 0);
4043 use_operand_p use_p;
4044 imm_use_iterator iter;
4046 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
4048 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
4049 SET_USE (use_p, rhs);
4052 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4053 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs) = 1;
4055 remove_phi_node (&gsi, true);
4059 if (dump_file && (dump_flags & TDF_DETAILS))
4060 fprintf (dump_file, "Unsplit EH landing pad %d to block %i.\n",
4061 lp->index, e_out->dest->index);
4063 /* Redirect the edge. Since redirect_eh_edge_1 expects to be moving
4064 a successor edge, humor it. But do the real CFG change with the
4065 predecessor of E_OUT in order to preserve the ordering of arguments
4066 to the PHI nodes in E_OUT->DEST. */
4067 redirect_eh_edge_1 (e_in, e_out->dest, false);
4068 redirect_edge_pred (e_out, e_in->src);
4069 e_out->flags = e_in->flags;
4070 e_out->probability = e_in->probability;
4071 e_out->count = e_in->count;
4077 /* Examine each landing pad block and see if it matches unsplit_eh. */
4080 unsplit_all_eh (void)
4082 bool changed = false;
4086 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4088 changed |= unsplit_eh (lp);
4093 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
4094 to OLD_BB to NEW_BB; return true on success, false on failure.
4096 OLD_BB_OUT is the edge into NEW_BB from OLD_BB, so if we miss any
4097 PHI variables from OLD_BB we can pick them up from OLD_BB_OUT.
4098 Virtual PHIs may be deleted and marked for renaming. */
4101 cleanup_empty_eh_merge_phis (basic_block new_bb, basic_block old_bb,
4102 edge old_bb_out, bool change_region)
4104 gimple_stmt_iterator ngsi, ogsi;
4107 bitmap ophi_handled;
4109 /* The destination block must not be a regular successor for any
4110 of the preds of the landing pad. Thus, avoid turning
4120 which CFG verification would choke on. See PR45172 and PR51089. */
4121 FOR_EACH_EDGE (e, ei, old_bb->preds)
4122 if (find_edge (e->src, new_bb))
4125 FOR_EACH_EDGE (e, ei, old_bb->preds)
4126 redirect_edge_var_map_clear (e);
4128 ophi_handled = BITMAP_ALLOC (NULL);
4130 /* First, iterate through the PHIs on NEW_BB and set up the edge_var_map
4131 for the edges we're going to move. */
4132 for (ngsi = gsi_start_phis (new_bb); !gsi_end_p (ngsi); gsi_next (&ngsi))
4134 gimple ophi, nphi = gsi_stmt (ngsi);
4137 nresult = gimple_phi_result (nphi);
4138 nop = gimple_phi_arg_def (nphi, old_bb_out->dest_idx);
4140 /* Find the corresponding PHI in OLD_BB so we can forward-propagate
4141 the source ssa_name. */
4143 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4145 ophi = gsi_stmt (ogsi);
4146 if (gimple_phi_result (ophi) == nop)
4151 /* If we did find the corresponding PHI, copy those inputs. */
4154 /* If NOP is used somewhere else beyond phis in new_bb, give up. */
4155 if (!has_single_use (nop))
4157 imm_use_iterator imm_iter;
4158 use_operand_p use_p;
4160 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, nop)
4162 if (!gimple_debug_bind_p (USE_STMT (use_p))
4163 && (gimple_code (USE_STMT (use_p)) != GIMPLE_PHI
4164 || gimple_bb (USE_STMT (use_p)) != new_bb))
4168 bitmap_set_bit (ophi_handled, SSA_NAME_VERSION (nop));
4169 FOR_EACH_EDGE (e, ei, old_bb->preds)
4174 if ((e->flags & EDGE_EH) == 0)
4176 oop = gimple_phi_arg_def (ophi, e->dest_idx);
4177 oloc = gimple_phi_arg_location (ophi, e->dest_idx);
4178 redirect_edge_var_map_add (e, nresult, oop, oloc);
4181 /* If we didn't find the PHI, if it's a real variable or a VOP, we know
4182 from the fact that OLD_BB is tree_empty_eh_handler_p that the
4183 variable is unchanged from input to the block and we can simply
4184 re-use the input to NEW_BB from the OLD_BB_OUT edge. */
4188 = gimple_phi_arg_location (nphi, old_bb_out->dest_idx);
4189 FOR_EACH_EDGE (e, ei, old_bb->preds)
4190 redirect_edge_var_map_add (e, nresult, nop, nloc);
4194 /* Second, verify that all PHIs from OLD_BB have been handled. If not,
4195 we don't know what values from the other edges into NEW_BB to use. */
4196 for (ogsi = gsi_start_phis (old_bb); !gsi_end_p (ogsi); gsi_next (&ogsi))
4198 gimple ophi = gsi_stmt (ogsi);
4199 tree oresult = gimple_phi_result (ophi);
4200 if (!bitmap_bit_p (ophi_handled, SSA_NAME_VERSION (oresult)))
4204 /* Finally, move the edges and update the PHIs. */
4205 for (ei = ei_start (old_bb->preds); (e = ei_safe_edge (ei)); )
4206 if (e->flags & EDGE_EH)
4208 /* ??? CFG manipluation routines do not try to update loop
4209 form on edge redirection. Do so manually here for now. */
4210 /* If we redirect a loop entry or latch edge that will either create
4211 a multiple entry loop or rotate the loop. If the loops merge
4212 we may have created a loop with multiple latches.
4213 All of this isn't easily fixed thus cancel the affected loop
4214 and mark the other loop as possibly having multiple latches. */
4216 && e->dest == e->dest->loop_father->header)
4218 e->dest->loop_father->header = NULL;
4219 e->dest->loop_father->latch = NULL;
4220 new_bb->loop_father->latch = NULL;
4221 loops_state_set (LOOPS_NEED_FIXUP|LOOPS_MAY_HAVE_MULTIPLE_LATCHES);
4223 redirect_eh_edge_1 (e, new_bb, change_region);
4224 redirect_edge_succ (e, new_bb);
4225 flush_pending_stmts (e);
4230 BITMAP_FREE (ophi_handled);
4234 FOR_EACH_EDGE (e, ei, old_bb->preds)
4235 redirect_edge_var_map_clear (e);
4236 BITMAP_FREE (ophi_handled);
4240 /* A subroutine of cleanup_empty_eh. Move a landing pad LP from its
4241 old region to NEW_REGION at BB. */
4244 cleanup_empty_eh_move_lp (basic_block bb, edge e_out,
4245 eh_landing_pad lp, eh_region new_region)
4247 gimple_stmt_iterator gsi;
4250 for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
4254 lp->region = new_region;
4255 lp->next_lp = new_region->landing_pads;
4256 new_region->landing_pads = lp;
4258 /* Delete the RESX that was matched within the empty handler block. */
4259 gsi = gsi_last_bb (bb);
4260 unlink_stmt_vdef (gsi_stmt (gsi));
4261 gsi_remove (&gsi, true);
4263 /* Clean up E_OUT for the fallthru. */
4264 e_out->flags = (e_out->flags & ~EDGE_EH) | EDGE_FALLTHRU;
4265 e_out->probability = REG_BR_PROB_BASE;
4268 /* A subroutine of cleanup_empty_eh. Handle more complex cases of
4269 unsplitting than unsplit_eh was prepared to handle, e.g. when
4270 multiple incoming edges and phis are involved. */
4273 cleanup_empty_eh_unsplit (basic_block bb, edge e_out, eh_landing_pad lp)
4275 gimple_stmt_iterator gsi;
4278 /* We really ought not have totally lost everything following
4279 a landing pad label. Given that BB is empty, there had better
4281 gcc_assert (e_out != NULL);
4283 /* The destination block must not already have a landing pad
4284 for a different region. */
4286 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi))
4288 gimple stmt = gsi_stmt (gsi);
4291 if (gimple_code (stmt) != GIMPLE_LABEL)
4293 lab = gimple_label_label (stmt);
4294 lp_nr = EH_LANDING_PAD_NR (lab);
4295 if (lp_nr && get_eh_region_from_lp_number (lp_nr) != lp->region)
4299 /* Attempt to move the PHIs into the successor block. */
4300 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, false))
4302 if (dump_file && (dump_flags & TDF_DETAILS))
4304 "Unsplit EH landing pad %d to block %i "
4305 "(via cleanup_empty_eh).\n",
4306 lp->index, e_out->dest->index);
4313 /* Return true if edge E_FIRST is part of an empty infinite loop
4314 or leads to such a loop through a series of single successor
4318 infinite_empty_loop_p (edge e_first)
4320 bool inf_loop = false;
4323 if (e_first->dest == e_first->src)
4326 e_first->src->aux = (void *) 1;
4327 for (e = e_first; single_succ_p (e->dest); e = single_succ_edge (e->dest))
4329 gimple_stmt_iterator gsi;
4335 e->dest->aux = (void *) 1;
4336 gsi = gsi_after_labels (e->dest);
4337 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4338 gsi_next_nondebug (&gsi);
4339 if (!gsi_end_p (gsi))
4342 e_first->src->aux = NULL;
4343 for (e = e_first; e->dest->aux; e = single_succ_edge (e->dest))
4344 e->dest->aux = NULL;
4349 /* Examine the block associated with LP to determine if it's an empty
4350 handler for its EH region. If so, attempt to redirect EH edges to
4351 an outer region. Return true the CFG was updated in any way. This
4352 is similar to jump forwarding, just across EH edges. */
4355 cleanup_empty_eh (eh_landing_pad lp)
4357 basic_block bb = label_to_block (lp->post_landing_pad);
4358 gimple_stmt_iterator gsi;
4360 eh_region new_region;
4363 bool has_non_eh_pred;
4367 /* There can be zero or one edges out of BB. This is the quickest test. */
4368 switch (EDGE_COUNT (bb->succs))
4374 e_out = single_succ_edge (bb);
4380 resx = last_stmt (bb);
4381 if (resx && is_gimple_resx (resx))
4383 if (stmt_can_throw_external (resx))
4384 optimize_clobbers (bb);
4385 else if (sink_clobbers (bb))
4389 gsi = gsi_after_labels (bb);
4391 /* Make sure to skip debug statements. */
4392 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi)))
4393 gsi_next_nondebug (&gsi);
4395 /* If the block is totally empty, look for more unsplitting cases. */
4396 if (gsi_end_p (gsi))
4398 /* For the degenerate case of an infinite loop bail out.
4399 If bb has no successors and is totally empty, which can happen e.g.
4400 because of incorrect noreturn attribute, bail out too. */
4402 || infinite_empty_loop_p (e_out))
4405 return ret | cleanup_empty_eh_unsplit (bb, e_out, lp);
4408 /* The block should consist only of a single RESX statement, modulo a
4409 preceding call to __builtin_stack_restore if there is no outgoing
4410 edge, since the call can be eliminated in this case. */
4411 resx = gsi_stmt (gsi);
4412 if (!e_out && gimple_call_builtin_p (resx, BUILT_IN_STACK_RESTORE))
4415 resx = gsi_stmt (gsi);
4417 if (!is_gimple_resx (resx))
4419 gcc_assert (gsi_one_before_end_p (gsi));
4421 /* Determine if there are non-EH edges, or resx edges into the handler. */
4422 has_non_eh_pred = false;
4423 FOR_EACH_EDGE (e, ei, bb->preds)
4424 if (!(e->flags & EDGE_EH))
4425 has_non_eh_pred = true;
4427 /* Find the handler that's outer of the empty handler by looking at
4428 where the RESX instruction was vectored. */
4429 new_lp_nr = lookup_stmt_eh_lp (resx);
4430 new_region = get_eh_region_from_lp_number (new_lp_nr);
4432 /* If there's no destination region within the current function,
4433 redirection is trivial via removing the throwing statements from
4434 the EH region, removing the EH edges, and allowing the block
4435 to go unreachable. */
4436 if (new_region == NULL)
4438 gcc_assert (e_out == NULL);
4439 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4440 if (e->flags & EDGE_EH)
4442 gimple stmt = last_stmt (e->src);
4443 remove_stmt_from_eh_lp (stmt);
4451 /* If the destination region is a MUST_NOT_THROW, allow the runtime
4452 to handle the abort and allow the blocks to go unreachable. */
4453 if (new_region->type == ERT_MUST_NOT_THROW)
4455 for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
4456 if (e->flags & EDGE_EH)
4458 gimple stmt = last_stmt (e->src);
4459 remove_stmt_from_eh_lp (stmt);
4460 add_stmt_to_eh_lp (stmt, new_lp_nr);
4468 /* Try to redirect the EH edges and merge the PHIs into the destination
4469 landing pad block. If the merge succeeds, we'll already have redirected
4470 all the EH edges. The handler itself will go unreachable if there were
4472 if (cleanup_empty_eh_merge_phis (e_out->dest, bb, e_out, true))
4475 /* Finally, if all input edges are EH edges, then we can (potentially)
4476 reduce the number of transfers from the runtime by moving the landing
4477 pad from the original region to the new region. This is a win when
4478 we remove the last CLEANUP region along a particular exception
4479 propagation path. Since nothing changes except for the region with
4480 which the landing pad is associated, the PHI nodes do not need to be
4482 if (!has_non_eh_pred)
4484 cleanup_empty_eh_move_lp (bb, e_out, lp, new_region);
4485 if (dump_file && (dump_flags & TDF_DETAILS))
4486 fprintf (dump_file, "Empty EH handler %i moved to EH region %i.\n",
4487 lp->index, new_region->index);
4489 /* ??? The CFG didn't change, but we may have rendered the
4490 old EH region unreachable. Trigger a cleanup there. */
4497 if (dump_file && (dump_flags & TDF_DETAILS))
4498 fprintf (dump_file, "Empty EH handler %i removed.\n", lp->index);
4499 remove_eh_landing_pad (lp);
4503 /* Do a post-order traversal of the EH region tree. Examine each
4504 post_landing_pad block and see if we can eliminate it as empty. */
4507 cleanup_all_empty_eh (void)
4509 bool changed = false;
4513 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4515 changed |= cleanup_empty_eh (lp);
4520 /* Perform cleanups and lowering of exception handling
4521 1) cleanups regions with handlers doing nothing are optimized out
4522 2) MUST_NOT_THROW regions that became dead because of 1) are optimized out
4523 3) Info about regions that are containing instructions, and regions
4524 reachable via local EH edges is collected
4525 4) Eh tree is pruned for regions no longer necessary.
4527 TODO: Push MUST_NOT_THROW regions to the root of the EH tree.
4528 Unify those that have the same failure decl and locus.
4532 execute_cleanup_eh_1 (void)
4534 /* Do this first: unsplit_all_eh and cleanup_all_empty_eh can die
4535 looking up unreachable landing pads. */
4536 remove_unreachable_handlers ();
4538 /* Watch out for the region tree vanishing due to all unreachable. */
4539 if (cfun->eh->region_tree)
4541 bool changed = false;
4544 changed |= unsplit_all_eh ();
4545 changed |= cleanup_all_empty_eh ();
4549 free_dominance_info (CDI_DOMINATORS);
4550 free_dominance_info (CDI_POST_DOMINATORS);
4552 /* We delayed all basic block deletion, as we may have performed
4553 cleanups on EH edges while non-EH edges were still present. */
4554 delete_unreachable_blocks ();
4556 /* We manipulated the landing pads. Remove any region that no
4557 longer has a landing pad. */
4558 remove_unreachable_handlers_no_lp ();
4560 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
4568 execute_cleanup_eh (void)
4570 int ret = execute_cleanup_eh_1 ();
4572 /* If the function no longer needs an EH personality routine
4573 clear it. This exposes cross-language inlining opportunities
4574 and avoids references to a never defined personality routine. */
4575 if (DECL_FUNCTION_PERSONALITY (current_function_decl)
4576 && function_needs_eh_personality (cfun) != eh_personality_lang)
4577 DECL_FUNCTION_PERSONALITY (current_function_decl) = NULL_TREE;
4584 const pass_data pass_data_cleanup_eh =
4586 GIMPLE_PASS, /* type */
4587 "ehcleanup", /* name */
4588 OPTGROUP_NONE, /* optinfo_flags */
4589 true, /* has_execute */
4590 TV_TREE_EH, /* tv_id */
4591 PROP_gimple_lcf, /* properties_required */
4592 0, /* properties_provided */
4593 0, /* properties_destroyed */
4594 0, /* todo_flags_start */
4595 TODO_verify_ssa, /* todo_flags_finish */
4598 class pass_cleanup_eh : public gimple_opt_pass
4601 pass_cleanup_eh (gcc::context *ctxt)
4602 : gimple_opt_pass (pass_data_cleanup_eh, ctxt)
4605 /* opt_pass methods: */
4606 opt_pass * clone () { return new pass_cleanup_eh (m_ctxt); }
4607 virtual bool gate (function *fun)
4609 return fun->eh != NULL && fun->eh->region_tree != NULL;
4612 unsigned int execute () { return execute_cleanup_eh (); }
4614 }; // class pass_cleanup_eh
4619 make_pass_cleanup_eh (gcc::context *ctxt)
4621 return new pass_cleanup_eh (ctxt);
4624 /* Verify that BB containing STMT as the last statement, has precisely the
4625 edge that make_eh_edges would create. */
4628 verify_eh_edges (gimple stmt)
4630 basic_block bb = gimple_bb (stmt);
4631 eh_landing_pad lp = NULL;
4636 lp_nr = lookup_stmt_eh_lp (stmt);
4638 lp = get_eh_landing_pad_from_number (lp_nr);
4641 FOR_EACH_EDGE (e, ei, bb->succs)
4643 if (e->flags & EDGE_EH)
4647 error ("BB %i has multiple EH edges", bb->index);
4659 error ("BB %i can not throw but has an EH edge", bb->index);
4665 if (!stmt_could_throw_p (stmt))
4667 error ("BB %i last statement has incorrectly set lp", bb->index);
4671 if (eh_edge == NULL)
4673 error ("BB %i is missing an EH edge", bb->index);
4677 if (eh_edge->dest != label_to_block (lp->post_landing_pad))
4679 error ("Incorrect EH edge %i->%i", bb->index, eh_edge->dest->index);
4686 /* Similarly, but handle GIMPLE_EH_DISPATCH specifically. */
4689 verify_eh_dispatch_edge (gimple stmt)
4693 basic_block src, dst;
4694 bool want_fallthru = true;
4698 r = get_eh_region_from_number (gimple_eh_dispatch_region (stmt));
4699 src = gimple_bb (stmt);
4701 FOR_EACH_EDGE (e, ei, src->succs)
4702 gcc_assert (e->aux == NULL);
4707 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
4709 dst = label_to_block (c->label);
4710 e = find_edge (src, dst);
4713 error ("BB %i is missing an edge", src->index);
4718 /* A catch-all handler doesn't have a fallthru. */
4719 if (c->type_list == NULL)
4721 want_fallthru = false;
4727 case ERT_ALLOWED_EXCEPTIONS:
4728 dst = label_to_block (r->u.allowed.label);
4729 e = find_edge (src, dst);
4732 error ("BB %i is missing an edge", src->index);
4743 FOR_EACH_EDGE (e, ei, src->succs)
4745 if (e->flags & EDGE_FALLTHRU)
4747 if (fall_edge != NULL)
4749 error ("BB %i too many fallthru edges", src->index);
4758 error ("BB %i has incorrect edge", src->index);
4762 if ((fall_edge != NULL) ^ want_fallthru)
4764 error ("BB %i has incorrect fallthru edge", src->index);