1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "tree-flow.h"
31 #include "tree-dump.h"
32 #include "tree-inline.h"
33 #include "tree-iterator.h"
34 #include "tree-pass.h"
36 #include "langhooks.h"
39 #include "pointer-set.h"
42 /* Nonzero if we are using EH to handle cleanups. */
43 static int using_eh_for_cleanups_p = 0;
46 using_eh_for_cleanups (void)
48 using_eh_for_cleanups_p = 1;
51 /* Misc functions used in this file. */
53 /* Compare and hash for any structure which begins with a canonical
54 pointer. Assumes all pointers are interchangeable, which is sort
55 of already assumed by gcc elsewhere IIRC. */
58 struct_ptr_eq (const void *a, const void *b)
60 const void * const * x = (const void * const *) a;
61 const void * const * y = (const void * const *) b;
66 struct_ptr_hash (const void *a)
68 const void * const * x = (const void * const *) a;
69 return (size_t)*x >> 4;
73 /* Remember and lookup EH region data for arbitrary statements.
74 Really this means any statement that could_throw_p. We could
75 stuff this information into the stmt_ann data structure, but:
77 (1) We absolutely rely on this information being kept until
78 we get to rtl. Once we're done with lowering here, if we lose
79 the information there's no way to recover it!
81 (2) There are many more statements that *cannot* throw as
82 compared to those that can. We should be saving some amount
83 of space by only allocating memory for those that can throw. */
86 record_stmt_eh_region (struct eh_region *region, tree t)
91 add_stmt_to_eh_region (t, get_eh_region_number (region));
95 add_stmt_to_eh_region_fn (struct function *ifun, tree t, int num)
97 struct throw_stmt_node *n;
100 gcc_assert (num >= 0);
101 gcc_assert (TREE_CODE (t) != RESX_EXPR);
103 n = GGC_NEW (struct throw_stmt_node);
107 if (!get_eh_throw_stmt_table (ifun))
108 set_eh_throw_stmt_table (ifun, htab_create_ggc (31, struct_ptr_hash,
112 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), n, INSERT);
118 add_stmt_to_eh_region (tree t, int num)
120 add_stmt_to_eh_region_fn (cfun, t, num);
124 remove_stmt_from_eh_region_fn (struct function *ifun, tree t)
126 struct throw_stmt_node dummy;
129 if (!get_eh_throw_stmt_table (ifun))
133 slot = htab_find_slot (get_eh_throw_stmt_table (ifun), &dummy,
137 htab_clear_slot (get_eh_throw_stmt_table (ifun), slot);
145 remove_stmt_from_eh_region (tree t)
147 return remove_stmt_from_eh_region_fn (cfun, t);
151 lookup_stmt_eh_region_fn (struct function *ifun, tree t)
153 struct throw_stmt_node *p, n;
155 if (!get_eh_throw_stmt_table (ifun))
159 p = (struct throw_stmt_node *) htab_find (get_eh_throw_stmt_table (ifun),
162 return (p ? p->region_nr : -1);
166 lookup_stmt_eh_region (tree t)
168 /* We can get called from initialized data when -fnon-call-exceptions
169 is on; prevent crash. */
172 return lookup_stmt_eh_region_fn (cfun, t);
176 /* First pass of EH node decomposition. Build up a tree of TRY_FINALLY_EXPR
177 nodes and LABEL_DECL nodes. We will use this during the second phase to
178 determine if a goto leaves the body of a TRY_FINALLY_EXPR node. */
180 struct finally_tree_node
185 /* Note that this table is *not* marked GTY. It is short-lived. */
186 static htab_t finally_tree;
189 record_in_finally_tree (tree child, tree parent)
191 struct finally_tree_node *n;
194 n = XNEW (struct finally_tree_node);
198 slot = htab_find_slot (finally_tree, n, INSERT);
204 collect_finally_tree (tree t, tree region)
207 switch (TREE_CODE (t))
210 record_in_finally_tree (LABEL_EXPR_LABEL (t), region);
213 case TRY_FINALLY_EXPR:
214 record_in_finally_tree (t, region);
215 collect_finally_tree (TREE_OPERAND (t, 0), t);
216 t = TREE_OPERAND (t, 1);
220 collect_finally_tree (TREE_OPERAND (t, 0), region);
221 t = TREE_OPERAND (t, 1);
229 t = EH_FILTER_FAILURE (t);
234 tree_stmt_iterator i;
235 for (i = tsi_start (t); !tsi_end_p (i); tsi_next (&i))
236 collect_finally_tree (tsi_stmt (i), region);
241 /* A type, a decl, or some kind of statement that we're not
242 interested in. Don't walk them. */
247 /* Use the finally tree to determine if a jump from START to TARGET
248 would leave the try_finally node that START lives in. */
251 outside_finally_tree (tree start, tree target)
253 struct finally_tree_node n, *p;
258 p = (struct finally_tree_node *) htab_find (finally_tree, &n);
263 while (start != target);
268 /* Second pass of EH node decomposition. Actually transform the TRY_FINALLY
269 and TRY_CATCH nodes into a set of gotos, magic labels, and eh regions.
270 The eh region creation is straight-forward, but frobbing all the gotos
271 and such into shape isn't. */
273 /* State of the world while lowering. */
277 /* What's "current" while constructing the eh region tree. These
278 correspond to variables of the same name in cfun->eh, which we
279 don't have easy access to. */
280 struct eh_region *cur_region;
281 struct eh_region *prev_try;
283 /* Processing of TRY_FINALLY requires a bit more state. This is
284 split out into a separate structure so that we don't have to
285 copy so much when processing other nodes. */
286 struct leh_tf_state *tf;
291 /* Pointer to the TRY_FINALLY node under discussion. The try_finally_expr
292 is the original TRY_FINALLY_EXPR. We need to retain this so that
293 outside_finally_tree can reliably reference the tree used in the
294 collect_finally_tree data structures. */
295 tree try_finally_expr;
298 /* The state outside this try_finally node. */
299 struct leh_state *outer;
301 /* The exception region created for it. */
302 struct eh_region *region;
304 /* The GOTO_QUEUE is is an array of GOTO_EXPR and RETURN_EXPR statements
305 that are seen to escape this TRY_FINALLY_EXPR node. */
306 struct goto_queue_node {
312 size_t goto_queue_size;
313 size_t goto_queue_active;
315 /* Pointer map to help in searching qoto_queue when it is large. */
316 struct pointer_map_t *goto_queue_map;
318 /* The set of unique labels seen as entries in the goto queue. */
319 VEC(tree,heap) *dest_array;
321 /* A label to be added at the end of the completed transformed
322 sequence. It will be set if may_fallthru was true *at one time*,
323 though subsequent transformations may have cleared that flag. */
326 /* A label that has been registered with except.c to be the
327 landing pad for this try block. */
330 /* True if it is possible to fall out the bottom of the try block.
331 Cleared if the fallthru is converted to a goto. */
334 /* True if any entry in goto_queue is a RETURN_EXPR. */
337 /* True if the finally block can receive an exception edge.
338 Cleared if the exception case is handled by code duplication. */
342 static void lower_eh_filter (struct leh_state *, tree *);
343 static void lower_eh_constructs_1 (struct leh_state *, tree *);
345 /* Search for STMT in the goto queue. Return the replacement,
346 or null if the statement isn't in the queue. */
348 #define LARGE_GOTO_QUEUE 20
351 find_goto_replacement (struct leh_tf_state *tf, tree stmt)
356 if (tf->goto_queue_active < LARGE_GOTO_QUEUE)
358 for (i = 0; i < tf->goto_queue_active; i++)
359 if (tf->goto_queue[i].stmt == stmt)
360 return tf->goto_queue[i].repl_stmt;
364 /* If we have a large number of entries in the goto_queue, create a
365 pointer map and use that for searching. */
367 if (!tf->goto_queue_map)
369 tf->goto_queue_map = pointer_map_create ();
370 for (i = 0; i < tf->goto_queue_active; i++)
372 slot = pointer_map_insert (tf->goto_queue_map, tf->goto_queue[i].stmt);
373 gcc_assert (*slot == NULL);
374 *slot = (void *) &tf->goto_queue[i];
378 slot = pointer_map_contains (tf->goto_queue_map, stmt);
380 return (((struct goto_queue_node *) *slot)->repl_stmt);
385 /* A subroutine of replace_goto_queue_1. Handles the sub-clauses of a
386 lowered COND_EXPR. If, by chance, the replacement is a simple goto,
387 then we can just splat it in, otherwise we add the new stmts immediately
388 after the COND_EXPR and redirect. */
391 replace_goto_queue_cond_clause (tree *tp, struct leh_tf_state *tf,
392 tree_stmt_iterator *tsi)
394 tree new, one, label;
396 new = find_goto_replacement (tf, *tp);
400 one = expr_only (new);
401 if (one && TREE_CODE (one) == GOTO_EXPR)
407 label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
408 *tp = build_and_jump (&LABEL_EXPR_LABEL (label));
410 tsi_link_after (tsi, label, TSI_CONTINUE_LINKING);
411 tsi_link_after (tsi, new, TSI_CONTINUE_LINKING);
414 /* The real work of replace_goto_queue. Returns with TSI updated to
415 point to the next statement. */
417 static void replace_goto_queue_stmt_list (tree, struct leh_tf_state *);
420 replace_goto_queue_1 (tree t, struct leh_tf_state *tf, tree_stmt_iterator *tsi)
422 switch (TREE_CODE (t))
426 t = find_goto_replacement (tf, t);
429 tsi_link_before (tsi, t, TSI_SAME_STMT);
436 replace_goto_queue_cond_clause (&COND_EXPR_THEN (t), tf, tsi);
437 replace_goto_queue_cond_clause (&COND_EXPR_ELSE (t), tf, tsi);
440 case TRY_FINALLY_EXPR:
442 replace_goto_queue_stmt_list (TREE_OPERAND (t, 0), tf);
443 replace_goto_queue_stmt_list (TREE_OPERAND (t, 1), tf);
446 replace_goto_queue_stmt_list (CATCH_BODY (t), tf);
449 replace_goto_queue_stmt_list (EH_FILTER_FAILURE (t), tf);
456 /* These won't have gotos in them. */
463 /* A subroutine of replace_goto_queue. Handles STATEMENT_LISTs. */
466 replace_goto_queue_stmt_list (tree t, struct leh_tf_state *tf)
468 tree_stmt_iterator i = tsi_start (t);
469 while (!tsi_end_p (i))
470 replace_goto_queue_1 (tsi_stmt (i), tf, &i);
473 /* Replace all goto queue members. */
476 replace_goto_queue (struct leh_tf_state *tf)
478 if (tf->goto_queue_active == 0)
480 replace_goto_queue_stmt_list (*tf->top_p, tf);
483 /* For any GOTO_EXPR or RETURN_EXPR, decide whether it leaves a try_finally
484 node, and if so record that fact in the goto queue associated with that
488 maybe_record_in_goto_queue (struct leh_state *state, tree stmt)
490 struct leh_tf_state *tf = state->tf;
491 struct goto_queue_node *q;
498 switch (TREE_CODE (stmt))
502 tree lab = GOTO_DESTINATION (stmt);
504 /* Computed and non-local gotos do not get processed. Given
505 their nature we can neither tell whether we've escaped the
506 finally block nor redirect them if we knew. */
507 if (TREE_CODE (lab) != LABEL_DECL)
510 /* No need to record gotos that don't leave the try block. */
511 if (! outside_finally_tree (lab, tf->try_finally_expr))
514 if (! tf->dest_array)
516 tf->dest_array = VEC_alloc (tree, heap, 10);
517 VEC_quick_push (tree, tf->dest_array, lab);
522 int n = VEC_length (tree, tf->dest_array);
523 for (index = 0; index < n; ++index)
524 if (VEC_index (tree, tf->dest_array, index) == lab)
527 VEC_safe_push (tree, heap, tf->dest_array, lab);
533 tf->may_return = true;
541 gcc_assert (!tf->goto_queue_map);
543 active = tf->goto_queue_active;
544 size = tf->goto_queue_size;
547 size = (size ? size * 2 : 32);
548 tf->goto_queue_size = size;
550 = XRESIZEVEC (struct goto_queue_node, tf->goto_queue, size);
553 q = &tf->goto_queue[active];
554 tf->goto_queue_active = active + 1;
556 memset (q, 0, sizeof (*q));
561 #ifdef ENABLE_CHECKING
562 /* We do not process SWITCH_EXPRs for now. As long as the original source
563 was in fact structured, and we've not yet done jump threading, then none
564 of the labels will leave outer TRY_FINALLY_EXPRs. Verify this. */
567 verify_norecord_switch_expr (struct leh_state *state, tree switch_expr)
569 struct leh_tf_state *tf = state->tf;
576 vec = SWITCH_LABELS (switch_expr);
577 n = TREE_VEC_LENGTH (vec);
579 for (i = 0; i < n; ++i)
581 tree lab = CASE_LABEL (TREE_VEC_ELT (vec, i));
582 gcc_assert (!outside_finally_tree (lab, tf->try_finally_expr));
586 #define verify_norecord_switch_expr(state, switch_expr)
589 /* Redirect a RETURN_EXPR pointed to by STMT_P to FINLAB. Place in CONT_P
590 whatever is needed to finish the return. If MOD is non-null, insert it
591 before the new branch. RETURN_VALUE_P is a cache containing a temporary
592 variable to be used in manipulating the value returned from the function. */
595 do_return_redirection (struct goto_queue_node *q, tree finlab, tree mod,
596 tree *return_value_p)
598 tree ret_expr = TREE_OPERAND (q->stmt, 0);
603 /* The nasty part about redirecting the return value is that the
604 return value itself is to be computed before the FINALLY block
618 should return 0, not 1. Arrange for this to happen by copying
619 computed the return value into a local temporary. This also
620 allows us to redirect multiple return statements through the
621 same destination block; whether this is a net win or not really
622 depends, I guess, but it does make generation of the switch in
623 lower_try_finally_switch easier. */
625 switch (TREE_CODE (ret_expr))
628 if (!*return_value_p)
629 *return_value_p = ret_expr;
631 gcc_assert (*return_value_p == ret_expr);
632 q->cont_stmt = q->stmt;
635 case GIMPLE_MODIFY_STMT:
637 tree result = GIMPLE_STMT_OPERAND (ret_expr, 0);
638 tree new, old = GIMPLE_STMT_OPERAND (ret_expr, 1);
640 if (!*return_value_p)
642 if (aggregate_value_p (TREE_TYPE (result),
643 TREE_TYPE (current_function_decl)))
644 /* If this function returns in memory, copy the argument
645 into the return slot now. Otherwise, we might need to
646 worry about magic return semantics, so we need to use a
647 temporary to hold the value until we're actually ready
651 new = create_tmp_var (TREE_TYPE (old), "rettmp");
652 *return_value_p = new;
655 new = *return_value_p;
657 x = build_gimple_modify_stmt (new, old);
658 append_to_statement_list (x, &q->repl_stmt);
663 x = build_gimple_modify_stmt (result, new);
664 q->cont_stmt = build1 (RETURN_EXPR, void_type_node, x);
673 /* If we don't return a value, all return statements are the same. */
674 q->cont_stmt = q->stmt;
678 append_to_statement_list (mod, &q->repl_stmt);
680 x = build1 (GOTO_EXPR, void_type_node, finlab);
681 append_to_statement_list (x, &q->repl_stmt);
684 /* Similar, but easier, for GOTO_EXPR. */
687 do_goto_redirection (struct goto_queue_node *q, tree finlab, tree mod)
691 q->cont_stmt = q->stmt;
693 append_to_statement_list (mod, &q->repl_stmt);
695 x = build1 (GOTO_EXPR, void_type_node, finlab);
696 append_to_statement_list (x, &q->repl_stmt);
699 /* We want to transform
700 try { body; } catch { stuff; }
702 body; goto over; lab: stuff; over:
704 T is a TRY_FINALLY or TRY_CATCH node. LAB is the label that
705 should be placed before the second operand, or NULL. OVER is
706 an existing label that should be put at the exit, or NULL. */
709 frob_into_branch_around (tree *tp, tree lab, tree over)
713 op1 = TREE_OPERAND (*tp, 1);
714 *tp = TREE_OPERAND (*tp, 0);
716 if (block_may_fallthru (*tp))
719 over = create_artificial_label ();
720 x = build1 (GOTO_EXPR, void_type_node, over);
721 append_to_statement_list (x, tp);
726 x = build1 (LABEL_EXPR, void_type_node, lab);
727 append_to_statement_list (x, tp);
730 append_to_statement_list (op1, tp);
734 x = build1 (LABEL_EXPR, void_type_node, over);
735 append_to_statement_list (x, tp);
739 /* A subroutine of lower_try_finally. Duplicate the tree rooted at T.
740 Make sure to record all new labels found. */
743 lower_try_finally_dup_block (tree t, struct leh_state *outer_state)
747 t = unsave_expr_now (t);
750 region = outer_state->tf->try_finally_expr;
751 collect_finally_tree (t, region);
756 /* A subroutine of lower_try_finally. Create a fallthru label for
757 the given try_finally state. The only tricky bit here is that
758 we have to make sure to record the label in our outer context. */
761 lower_try_finally_fallthru_label (struct leh_tf_state *tf)
763 tree label = tf->fallthru_label;
766 label = create_artificial_label ();
767 tf->fallthru_label = label;
769 record_in_finally_tree (label, tf->outer->tf->try_finally_expr);
774 /* A subroutine of lower_try_finally. If lang_protect_cleanup_actions
775 returns non-null, then the language requires that the exception path out
776 of a try_finally be treated specially. To wit: the code within the
777 finally block may not itself throw an exception. We have two choices here.
778 First we can duplicate the finally block and wrap it in a must_not_throw
779 region. Second, we can generate code like
784 if (fintmp == eh_edge)
785 protect_cleanup_actions;
788 where "fintmp" is the temporary used in the switch statement generation
789 alternative considered below. For the nonce, we always choose the first
792 THIS_STATE may be null if this is a try-cleanup, not a try-finally. */
795 honor_protect_cleanup_actions (struct leh_state *outer_state,
796 struct leh_state *this_state,
797 struct leh_tf_state *tf)
799 tree protect_cleanup_actions, finally, x;
800 tree_stmt_iterator i;
801 bool finally_may_fallthru;
803 /* First check for nothing to do. */
804 if (lang_protect_cleanup_actions)
805 protect_cleanup_actions = lang_protect_cleanup_actions ();
807 protect_cleanup_actions = NULL;
809 finally = TREE_OPERAND (*tf->top_p, 1);
811 /* If the EH case of the finally block can fall through, this may be a
812 structure of the form
825 E.g. with an inline destructor with an embedded try block. In this
826 case we must save the runtime EH data around the nested exception.
828 This complication means that any time the previous runtime data might
829 be used (via fallthru from the finally) we handle the eh case here,
830 whether or not protect_cleanup_actions is active. */
832 finally_may_fallthru = block_may_fallthru (finally);
833 if (!finally_may_fallthru && !protect_cleanup_actions)
836 /* Duplicate the FINALLY block. Only need to do this for try-finally,
837 and not for cleanups. */
839 finally = lower_try_finally_dup_block (finally, outer_state);
841 /* Resume execution after the exception. Adding this now lets
842 lower_eh_filter not add unnecessary gotos, as it is clear that
843 we never fallthru from this copy of the finally block. */
844 if (finally_may_fallthru)
846 tree save_eptr, save_filt;
848 save_eptr = create_tmp_var (ptr_type_node, "save_eptr");
849 save_filt = create_tmp_var (integer_type_node, "save_filt");
851 i = tsi_start (finally);
852 x = build0 (EXC_PTR_EXPR, ptr_type_node);
853 x = build_gimple_modify_stmt (save_eptr, x);
854 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
856 x = build0 (FILTER_EXPR, integer_type_node);
857 x = build_gimple_modify_stmt (save_filt, x);
858 tsi_link_before (&i, x, TSI_CONTINUE_LINKING);
860 i = tsi_last (finally);
861 x = build0 (EXC_PTR_EXPR, ptr_type_node);
862 x = build_gimple_modify_stmt (x, save_eptr);
863 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
865 x = build0 (FILTER_EXPR, integer_type_node);
866 x = build_gimple_modify_stmt (x, save_filt);
867 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
869 x = build_resx (get_eh_region_number (tf->region));
870 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
873 /* Wrap the block with protect_cleanup_actions as the action. */
874 if (protect_cleanup_actions)
876 x = build2 (EH_FILTER_EXPR, void_type_node, NULL, NULL);
877 append_to_statement_list (protect_cleanup_actions, &EH_FILTER_FAILURE (x));
878 EH_FILTER_MUST_NOT_THROW (x) = 1;
879 finally = build2 (TRY_CATCH_EXPR, void_type_node, finally, x);
880 lower_eh_filter (outer_state, &finally);
883 lower_eh_constructs_1 (outer_state, &finally);
885 /* Hook this up to the end of the existing try block. If we
886 previously fell through the end, we'll have to branch around.
887 This means adding a new goto, and adding it to the queue. */
889 i = tsi_last (TREE_OPERAND (*tf->top_p, 0));
891 if (tf->may_fallthru)
893 x = lower_try_finally_fallthru_label (tf);
894 x = build1 (GOTO_EXPR, void_type_node, x);
895 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
898 maybe_record_in_goto_queue (this_state, x);
900 tf->may_fallthru = false;
903 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
904 tsi_link_after (&i, x, TSI_CONTINUE_LINKING);
905 tsi_link_after (&i, finally, TSI_CONTINUE_LINKING);
907 /* Having now been handled, EH isn't to be considered with
908 the rest of the outgoing edges. */
909 tf->may_throw = false;
912 /* A subroutine of lower_try_finally. We have determined that there is
913 no fallthru edge out of the finally block. This means that there is
914 no outgoing edge corresponding to any incoming edge. Restructure the
915 try_finally node for this special case. */
918 lower_try_finally_nofallthru (struct leh_state *state, struct leh_tf_state *tf)
920 tree x, finally, lab, return_val;
921 struct goto_queue_node *q, *qe;
926 lab = create_artificial_label ();
928 finally = TREE_OPERAND (*tf->top_p, 1);
929 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
931 x = build1 (LABEL_EXPR, void_type_node, lab);
932 append_to_statement_list (x, tf->top_p);
936 qe = q + tf->goto_queue_active;
939 do_return_redirection (q, lab, NULL, &return_val);
941 do_goto_redirection (q, lab, NULL);
943 replace_goto_queue (tf);
945 lower_eh_constructs_1 (state, &finally);
946 append_to_statement_list (finally, tf->top_p);
949 /* A subroutine of lower_try_finally. We have determined that there is
950 exactly one destination of the finally block. Restructure the
951 try_finally node for this special case. */
954 lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
956 struct goto_queue_node *q, *qe;
957 tree x, finally, finally_label;
959 finally = TREE_OPERAND (*tf->top_p, 1);
960 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
962 lower_eh_constructs_1 (state, &finally);
966 /* Only reachable via the exception edge. Add the given label to
967 the head of the FINALLY block. Append a RESX at the end. */
969 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
970 append_to_statement_list (x, tf->top_p);
972 append_to_statement_list (finally, tf->top_p);
974 x = build_resx (get_eh_region_number (tf->region));
976 append_to_statement_list (x, tf->top_p);
981 if (tf->may_fallthru)
983 /* Only reachable via the fallthru edge. Do nothing but let
984 the two blocks run together; we'll fall out the bottom. */
985 append_to_statement_list (finally, tf->top_p);
989 finally_label = create_artificial_label ();
990 x = build1 (LABEL_EXPR, void_type_node, finally_label);
991 append_to_statement_list (x, tf->top_p);
993 append_to_statement_list (finally, tf->top_p);
996 qe = q + tf->goto_queue_active;
1000 /* Reachable by return expressions only. Redirect them. */
1001 tree return_val = NULL;
1003 do_return_redirection (q, finally_label, NULL, &return_val);
1004 replace_goto_queue (tf);
1008 /* Reachable by goto expressions only. Redirect them. */
1010 do_goto_redirection (q, finally_label, NULL);
1011 replace_goto_queue (tf);
1013 if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
1015 /* Reachable by goto to fallthru label only. Redirect it
1016 to the new label (already created, sadly), and do not
1017 emit the final branch out, or the fallthru label. */
1018 tf->fallthru_label = NULL;
1023 /* Reset the locus of the goto since we're moving
1024 goto to a different block which might be on a different line. */
1025 SET_EXPR_LOCUS (tf->goto_queue[0].cont_stmt, NULL);
1026 append_to_statement_list (tf->goto_queue[0].cont_stmt, tf->top_p);
1027 maybe_record_in_goto_queue (state, tf->goto_queue[0].cont_stmt);
1030 /* A subroutine of lower_try_finally. There are multiple edges incoming
1031 and outgoing from the finally block. Implement this by duplicating the
1032 finally block for every destination. */
1035 lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
1037 tree finally, new_stmt;
1040 finally = TREE_OPERAND (*tf->top_p, 1);
1041 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1043 new_stmt = NULL_TREE;
1045 if (tf->may_fallthru)
1047 x = lower_try_finally_dup_block (finally, state);
1048 lower_eh_constructs_1 (state, &x);
1049 append_to_statement_list (x, &new_stmt);
1051 x = lower_try_finally_fallthru_label (tf);
1052 x = build1 (GOTO_EXPR, void_type_node, x);
1053 append_to_statement_list (x, &new_stmt);
1058 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1059 append_to_statement_list (x, &new_stmt);
1061 x = lower_try_finally_dup_block (finally, state);
1062 lower_eh_constructs_1 (state, &x);
1063 append_to_statement_list (x, &new_stmt);
1065 x = build_resx (get_eh_region_number (tf->region));
1066 append_to_statement_list (x, &new_stmt);
1071 struct goto_queue_node *q, *qe;
1072 tree return_val = NULL;
1073 int return_index, index;
1076 struct goto_queue_node *q;
1080 return_index = VEC_length (tree, tf->dest_array);
1081 labels = XCNEWVEC (struct labels_s, return_index + 1);
1084 qe = q + tf->goto_queue_active;
1087 index = q->index < 0 ? return_index : q->index;
1089 if (!labels[index].q)
1090 labels[index].q = q;
1093 for (index = 0; index < return_index + 1; index++)
1097 q = labels[index].q;
1101 lab = labels[index].label = create_artificial_label ();
1103 if (index == return_index)
1104 do_return_redirection (q, lab, NULL, &return_val);
1106 do_goto_redirection (q, lab, NULL);
1108 x = build1 (LABEL_EXPR, void_type_node, lab);
1109 append_to_statement_list (x, &new_stmt);
1111 x = lower_try_finally_dup_block (finally, state);
1112 lower_eh_constructs_1 (state, &x);
1113 append_to_statement_list (x, &new_stmt);
1115 append_to_statement_list (q->cont_stmt, &new_stmt);
1116 maybe_record_in_goto_queue (state, q->cont_stmt);
1119 for (q = tf->goto_queue; q < qe; q++)
1123 index = q->index < 0 ? return_index : q->index;
1125 if (labels[index].q == q)
1128 lab = labels[index].label;
1130 if (index == return_index)
1131 do_return_redirection (q, lab, NULL, &return_val);
1133 do_goto_redirection (q, lab, NULL);
1136 replace_goto_queue (tf);
1140 /* Need to link new stmts after running replace_goto_queue due
1141 to not wanting to process the same goto stmts twice. */
1142 append_to_statement_list (new_stmt, tf->top_p);
1145 /* A subroutine of lower_try_finally. There are multiple edges incoming
1146 and outgoing from the finally block. Implement this by instrumenting
1147 each incoming edge and creating a switch statement at the end of the
1148 finally block that branches to the appropriate destination. */
1151 lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
1153 struct goto_queue_node *q, *qe;
1154 tree return_val = NULL;
1155 tree finally, finally_tmp, finally_label;
1156 int return_index, eh_index, fallthru_index;
1157 int nlabels, ndests, j, last_case_index;
1158 tree case_label_vec, switch_stmt, last_case, switch_body;
1161 /* Mash the TRY block to the head of the chain. */
1162 finally = TREE_OPERAND (*tf->top_p, 1);
1163 *tf->top_p = TREE_OPERAND (*tf->top_p, 0);
1165 /* Lower the finally block itself. */
1166 lower_eh_constructs_1 (state, &finally);
1168 /* Prepare for switch statement generation. */
1169 nlabels = VEC_length (tree, tf->dest_array);
1170 return_index = nlabels;
1171 eh_index = return_index + tf->may_return;
1172 fallthru_index = eh_index + tf->may_throw;
1173 ndests = fallthru_index + tf->may_fallthru;
1175 finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
1176 finally_label = create_artificial_label ();
1178 case_label_vec = make_tree_vec (ndests);
1179 switch_stmt = build3 (SWITCH_EXPR, integer_type_node, finally_tmp,
1180 NULL_TREE, case_label_vec);
1183 last_case_index = 0;
1185 /* Begin inserting code for getting to the finally block. Things
1186 are done in this order to correspond to the sequence the code is
1189 if (tf->may_fallthru)
1191 x = build_gimple_modify_stmt (finally_tmp,
1192 build_int_cst (integer_type_node,
1194 append_to_statement_list (x, tf->top_p);
1198 x = build1 (GOTO_EXPR, void_type_node, finally_label);
1199 append_to_statement_list (x, tf->top_p);
1203 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1204 build_int_cst (NULL_TREE, fallthru_index), NULL,
1205 create_artificial_label ());
1206 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1209 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1210 append_to_statement_list (x, &switch_body);
1212 x = lower_try_finally_fallthru_label (tf);
1213 x = build1 (GOTO_EXPR, void_type_node, x);
1214 append_to_statement_list (x, &switch_body);
1219 x = build1 (LABEL_EXPR, void_type_node, tf->eh_label);
1220 append_to_statement_list (x, tf->top_p);
1222 x = build_gimple_modify_stmt (finally_tmp,
1223 build_int_cst (integer_type_node,
1225 append_to_statement_list (x, tf->top_p);
1227 last_case = build3 (CASE_LABEL_EXPR, void_type_node,
1228 build_int_cst (NULL_TREE, eh_index), NULL,
1229 create_artificial_label ());
1230 TREE_VEC_ELT (case_label_vec, last_case_index) = last_case;
1233 x = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (last_case));
1234 append_to_statement_list (x, &switch_body);
1235 x = build_resx (get_eh_region_number (tf->region));
1236 append_to_statement_list (x, &switch_body);
1239 x = build1 (LABEL_EXPR, void_type_node, finally_label);
1240 append_to_statement_list (x, tf->top_p);
1242 append_to_statement_list (finally, tf->top_p);
1244 /* Redirect each incoming goto edge. */
1246 qe = q + tf->goto_queue_active;
1247 j = last_case_index + tf->may_return;
1251 int switch_id, case_index;
1255 mod = build_gimple_modify_stmt (finally_tmp,
1256 build_int_cst (integer_type_node,
1258 do_return_redirection (q, finally_label, mod, &return_val);
1259 switch_id = return_index;
1263 mod = build_gimple_modify_stmt (finally_tmp,
1264 build_int_cst (integer_type_node,
1266 do_goto_redirection (q, finally_label, mod);
1267 switch_id = q->index;
1270 case_index = j + q->index;
1271 if (!TREE_VEC_ELT (case_label_vec, case_index))
1272 TREE_VEC_ELT (case_label_vec, case_index)
1273 = build3 (CASE_LABEL_EXPR, void_type_node,
1274 build_int_cst (NULL_TREE, switch_id), NULL,
1275 /* We store the cont_stmt in the
1276 CASE_LABEL, so that we can recover it
1277 in the loop below. We don't create
1278 the new label while walking the
1279 goto_queue because pointers don't
1280 offer a stable order. */
1283 for (j = last_case_index; j < last_case_index + nlabels; j++)
1288 last_case = TREE_VEC_ELT (case_label_vec, j);
1290 gcc_assert (last_case);
1292 cont_stmt = CASE_LABEL (last_case);
1294 label = create_artificial_label ();
1295 CASE_LABEL (last_case) = label;
1297 x = build1 (LABEL_EXPR, void_type_node, label);
1298 append_to_statement_list (x, &switch_body);
1299 append_to_statement_list (cont_stmt, &switch_body);
1300 maybe_record_in_goto_queue (state, cont_stmt);
1302 replace_goto_queue (tf);
1304 /* Make sure that the last case is the default label, as one is required.
1305 Then sort the labels, which is also required in GIMPLE. */
1306 CASE_LOW (last_case) = NULL;
1307 sort_case_labels (case_label_vec);
1309 /* Need to link switch_stmt after running replace_goto_queue due
1310 to not wanting to process the same goto stmts twice. */
1311 append_to_statement_list (switch_stmt, tf->top_p);
1312 append_to_statement_list (switch_body, tf->top_p);
1315 /* Decide whether or not we are going to duplicate the finally block.
1316 There are several considerations.
1318 First, if this is Java, then the finally block contains code
1319 written by the user. It has line numbers associated with it,
1320 so duplicating the block means it's difficult to set a breakpoint.
1321 Since controlling code generation via -g is verboten, we simply
1322 never duplicate code without optimization.
1324 Second, we'd like to prevent egregious code growth. One way to
1325 do this is to estimate the size of the finally block, multiply
1326 that by the number of copies we'd need to make, and compare against
1327 the estimate of the size of the switch machinery we'd have to add. */
1330 decide_copy_try_finally (int ndests, tree finally)
1332 int f_estimate, sw_estimate;
1337 /* Finally estimate N times, plus N gotos. */
1338 f_estimate = estimate_num_insns (finally, &eni_size_weights);
1339 f_estimate = (f_estimate + 1) * ndests;
1341 /* Switch statement (cost 10), N variable assignments, N gotos. */
1342 sw_estimate = 10 + 2 * ndests;
1344 /* Optimize for size clearly wants our best guess. */
1346 return f_estimate < sw_estimate;
1348 /* ??? These numbers are completely made up so far. */
1350 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1352 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1355 /* A subroutine of lower_eh_constructs_1. Lower a TRY_FINALLY_EXPR nodes
1356 to a sequence of labels and blocks, plus the exception region trees
1357 that record all the magic. This is complicated by the need to
1358 arrange for the FINALLY block to be executed on all exits. */
1361 lower_try_finally (struct leh_state *state, tree *tp)
1363 struct leh_tf_state this_tf;
1364 struct leh_state this_state;
1367 /* Process the try block. */
1369 memset (&this_tf, 0, sizeof (this_tf));
1370 this_tf.try_finally_expr = *tp;
1372 this_tf.outer = state;
1373 if (using_eh_for_cleanups_p)
1375 = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1377 this_tf.region = NULL;
1379 this_state.cur_region = this_tf.region;
1380 this_state.prev_try = state->prev_try;
1381 this_state.tf = &this_tf;
1383 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1385 /* Determine if the try block is escaped through the bottom. */
1386 this_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1388 /* Determine if any exceptions are possible within the try block. */
1389 if (using_eh_for_cleanups_p)
1390 this_tf.may_throw = get_eh_region_may_contain_throw (this_tf.region);
1391 if (this_tf.may_throw)
1393 this_tf.eh_label = create_artificial_label ();
1394 set_eh_region_tree_label (this_tf.region, this_tf.eh_label);
1395 honor_protect_cleanup_actions (state, &this_state, &this_tf);
1398 /* Determine how many edges (still) reach the finally block. Or rather,
1399 how many destinations are reached by the finally block. Use this to
1400 determine how we process the finally block itself. */
1402 ndests = VEC_length (tree, this_tf.dest_array);
1403 ndests += this_tf.may_fallthru;
1404 ndests += this_tf.may_return;
1405 ndests += this_tf.may_throw;
1407 /* If the FINALLY block is not reachable, dike it out. */
1409 *tp = TREE_OPERAND (*tp, 0);
1411 /* If the finally block doesn't fall through, then any destination
1412 we might try to impose there isn't reached either. There may be
1413 some minor amount of cleanup and redirection still needed. */
1414 else if (!block_may_fallthru (TREE_OPERAND (*tp, 1)))
1415 lower_try_finally_nofallthru (state, &this_tf);
1417 /* We can easily special-case redirection to a single destination. */
1418 else if (ndests == 1)
1419 lower_try_finally_onedest (state, &this_tf);
1421 else if (decide_copy_try_finally (ndests, TREE_OPERAND (*tp, 1)))
1422 lower_try_finally_copy (state, &this_tf);
1424 lower_try_finally_switch (state, &this_tf);
1426 /* If someone requested we add a label at the end of the transformed
1428 if (this_tf.fallthru_label)
1430 tree x = build1 (LABEL_EXPR, void_type_node, this_tf.fallthru_label);
1431 append_to_statement_list (x, tp);
1434 VEC_free (tree, heap, this_tf.dest_array);
1435 if (this_tf.goto_queue)
1436 free (this_tf.goto_queue);
1437 if (this_tf.goto_queue_map)
1438 pointer_map_destroy (this_tf.goto_queue_map);
1441 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1442 list of CATCH_EXPR nodes to a sequence of labels and blocks, plus the
1443 exception region trees that record all the magic. */
1446 lower_catch (struct leh_state *state, tree *tp)
1448 struct eh_region *try_region;
1449 struct leh_state this_state;
1450 tree_stmt_iterator i;
1453 try_region = gen_eh_region_try (state->cur_region);
1454 this_state.cur_region = try_region;
1455 this_state.prev_try = try_region;
1456 this_state.tf = state->tf;
1458 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1460 if (!get_eh_region_may_contain_throw (try_region))
1462 *tp = TREE_OPERAND (*tp, 0);
1467 for (i = tsi_start (TREE_OPERAND (*tp, 1)); !tsi_end_p (i); )
1469 struct eh_region *catch_region;
1470 tree catch, x, eh_label;
1472 catch = tsi_stmt (i);
1473 catch_region = gen_eh_region_catch (try_region, CATCH_TYPES (catch));
1475 this_state.cur_region = catch_region;
1476 this_state.prev_try = state->prev_try;
1477 lower_eh_constructs_1 (&this_state, &CATCH_BODY (catch));
1479 eh_label = create_artificial_label ();
1480 set_eh_region_tree_label (catch_region, eh_label);
1482 x = build1 (LABEL_EXPR, void_type_node, eh_label);
1483 tsi_link_before (&i, x, TSI_SAME_STMT);
1485 if (block_may_fallthru (CATCH_BODY (catch)))
1488 out_label = create_artificial_label ();
1490 x = build1 (GOTO_EXPR, void_type_node, out_label);
1491 append_to_statement_list (x, &CATCH_BODY (catch));
1494 tsi_link_before (&i, CATCH_BODY (catch), TSI_SAME_STMT);
1498 frob_into_branch_around (tp, NULL, out_label);
1501 /* A subroutine of lower_eh_constructs_1. Lower a TRY_CATCH_EXPR with a
1502 EH_FILTER_EXPR to a sequence of labels and blocks, plus the exception
1503 region trees that record all the magic. */
1506 lower_eh_filter (struct leh_state *state, tree *tp)
1508 struct leh_state this_state;
1509 struct eh_region *this_region;
1510 tree inner = expr_first (TREE_OPERAND (*tp, 1));
1513 if (EH_FILTER_MUST_NOT_THROW (inner))
1514 this_region = gen_eh_region_must_not_throw (state->cur_region);
1516 this_region = gen_eh_region_allowed (state->cur_region,
1517 EH_FILTER_TYPES (inner));
1518 this_state = *state;
1519 this_state.cur_region = this_region;
1520 /* For must not throw regions any cleanup regions inside it
1521 can't reach outer catch regions. */
1522 if (EH_FILTER_MUST_NOT_THROW (inner))
1523 this_state.prev_try = NULL;
1525 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1527 if (!get_eh_region_may_contain_throw (this_region))
1529 *tp = TREE_OPERAND (*tp, 0);
1533 lower_eh_constructs_1 (state, &EH_FILTER_FAILURE (inner));
1534 TREE_OPERAND (*tp, 1) = EH_FILTER_FAILURE (inner);
1536 eh_label = create_artificial_label ();
1537 set_eh_region_tree_label (this_region, eh_label);
1539 frob_into_branch_around (tp, eh_label, NULL);
1542 /* Implement a cleanup expression. This is similar to try-finally,
1543 except that we only execute the cleanup block for exception edges. */
1546 lower_cleanup (struct leh_state *state, tree *tp)
1548 struct leh_state this_state;
1549 struct eh_region *this_region;
1550 struct leh_tf_state fake_tf;
1552 /* If not using eh, then exception-only cleanups are no-ops. */
1553 if (!flag_exceptions)
1555 *tp = TREE_OPERAND (*tp, 0);
1556 lower_eh_constructs_1 (state, tp);
1560 this_region = gen_eh_region_cleanup (state->cur_region, state->prev_try);
1561 this_state = *state;
1562 this_state.cur_region = this_region;
1564 lower_eh_constructs_1 (&this_state, &TREE_OPERAND (*tp, 0));
1566 if (!get_eh_region_may_contain_throw (this_region))
1568 *tp = TREE_OPERAND (*tp, 0);
1572 /* Build enough of a try-finally state so that we can reuse
1573 honor_protect_cleanup_actions. */
1574 memset (&fake_tf, 0, sizeof (fake_tf));
1576 fake_tf.outer = state;
1577 fake_tf.region = this_region;
1578 fake_tf.may_fallthru = block_may_fallthru (TREE_OPERAND (*tp, 0));
1579 fake_tf.may_throw = true;
1581 fake_tf.eh_label = create_artificial_label ();
1582 set_eh_region_tree_label (this_region, fake_tf.eh_label);
1584 honor_protect_cleanup_actions (state, NULL, &fake_tf);
1586 if (fake_tf.may_throw)
1588 /* In this case honor_protect_cleanup_actions had nothing to do,
1589 and we should process this normally. */
1590 lower_eh_constructs_1 (state, &TREE_OPERAND (*tp, 1));
1591 frob_into_branch_around (tp, fake_tf.eh_label, fake_tf.fallthru_label);
1595 /* In this case honor_protect_cleanup_actions did nearly all of
1596 the work. All we have left is to append the fallthru_label. */
1598 *tp = TREE_OPERAND (*tp, 0);
1599 if (fake_tf.fallthru_label)
1601 tree x = build1 (LABEL_EXPR, void_type_node, fake_tf.fallthru_label);
1602 append_to_statement_list (x, tp);
1607 /* Main loop for lowering eh constructs. */
1610 lower_eh_constructs_1 (struct leh_state *state, tree *tp)
1612 tree_stmt_iterator i;
1615 switch (TREE_CODE (t))
1618 lower_eh_constructs_1 (state, &COND_EXPR_THEN (t));
1619 lower_eh_constructs_1 (state, &COND_EXPR_ELSE (t));
1623 /* Look for things that can throw exceptions, and record them. */
1624 if (state->cur_region && tree_could_throw_p (t))
1626 record_stmt_eh_region (state->cur_region, t);
1627 note_eh_region_may_contain_throw (state->cur_region);
1631 case GIMPLE_MODIFY_STMT:
1632 /* Look for things that can throw exceptions, and record them. */
1633 if (state->cur_region && tree_could_throw_p (t))
1635 record_stmt_eh_region (state->cur_region, t);
1636 note_eh_region_may_contain_throw (state->cur_region);
1642 maybe_record_in_goto_queue (state, t);
1645 verify_norecord_switch_expr (state, t);
1648 case TRY_FINALLY_EXPR:
1649 lower_try_finally (state, tp);
1652 case TRY_CATCH_EXPR:
1653 i = tsi_start (TREE_OPERAND (t, 1));
1654 switch (TREE_CODE (tsi_stmt (i)))
1657 lower_catch (state, tp);
1659 case EH_FILTER_EXPR:
1660 lower_eh_filter (state, tp);
1663 lower_cleanup (state, tp);
1668 case STATEMENT_LIST:
1669 for (i = tsi_start (t); !tsi_end_p (i); )
1671 lower_eh_constructs_1 (state, tsi_stmt_ptr (i));
1673 if (TREE_CODE (t) == STATEMENT_LIST)
1675 tsi_link_before (&i, t, TSI_SAME_STMT);
1684 /* A type, a decl, or some kind of statement that we're not
1685 interested in. Don't walk them. */
1691 lower_eh_constructs (void)
1693 struct leh_state null_state;
1694 tree *tp = &DECL_SAVED_TREE (current_function_decl);
1696 finally_tree = htab_create (31, struct_ptr_hash, struct_ptr_eq, free);
1698 collect_finally_tree (*tp, NULL);
1700 memset (&null_state, 0, sizeof (null_state));
1701 lower_eh_constructs_1 (&null_state, tp);
1703 htab_delete (finally_tree);
1705 collect_eh_region_array ();
1709 struct tree_opt_pass pass_lower_eh =
1713 lower_eh_constructs, /* execute */
1716 0, /* static_pass_number */
1717 TV_TREE_EH, /* tv_id */
1718 PROP_gimple_lcf, /* properties_required */
1719 PROP_gimple_leh, /* properties_provided */
1720 0, /* properties_destroyed */
1721 0, /* todo_flags_start */
1722 TODO_dump_func, /* todo_flags_finish */
1727 /* Construct EH edges for STMT. */
1730 make_eh_edge (struct eh_region *region, void *data)
1733 basic_block src, dst;
1736 lab = get_eh_region_tree_label (region);
1738 src = bb_for_stmt (stmt);
1739 dst = label_to_block (lab);
1741 make_edge (src, dst, EDGE_ABNORMAL | EDGE_EH);
1745 make_eh_edges (tree stmt)
1750 if (TREE_CODE (stmt) == RESX_EXPR)
1752 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1757 region_nr = lookup_stmt_eh_region (stmt);
1763 foreach_reachable_handler (region_nr, is_resx, make_eh_edge, stmt);
1766 static bool mark_eh_edge_found_error;
1768 /* Mark edge make_eh_edge would create for given region by setting it aux
1769 field, output error if something goes wrong. */
1771 mark_eh_edge (struct eh_region *region, void *data)
1774 basic_block src, dst;
1778 lab = get_eh_region_tree_label (region);
1780 src = bb_for_stmt (stmt);
1781 dst = label_to_block (lab);
1783 e = find_edge (src, dst);
1786 error ("EH edge %i->%i is missing", src->index, dst->index);
1787 mark_eh_edge_found_error = true;
1789 else if (!(e->flags & EDGE_EH))
1791 error ("EH edge %i->%i miss EH flag", src->index, dst->index);
1792 mark_eh_edge_found_error = true;
1796 /* ??? might not be mistake. */
1797 error ("EH edge %i->%i has duplicated regions", src->index, dst->index);
1798 mark_eh_edge_found_error = true;
1804 /* Verify that BB containing stmt as last stmt has precisely the edges
1805 make_eh_edges would create. */
1807 verify_eh_edges (tree stmt)
1811 basic_block bb = bb_for_stmt (stmt);
1815 FOR_EACH_EDGE (e, ei, bb->succs)
1816 gcc_assert (!e->aux);
1817 mark_eh_edge_found_error = false;
1818 if (TREE_CODE (stmt) == RESX_EXPR)
1820 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0));
1825 region_nr = lookup_stmt_eh_region (stmt);
1828 FOR_EACH_EDGE (e, ei, bb->succs)
1829 if (e->flags & EDGE_EH)
1831 error ("BB %i can not throw but has EH edges", bb->index);
1836 if (!tree_could_throw_p (stmt))
1838 error ("BB %i last statement has incorrectly set region", bb->index);
1844 foreach_reachable_handler (region_nr, is_resx, mark_eh_edge, stmt);
1845 FOR_EACH_EDGE (e, ei, bb->succs)
1847 if ((e->flags & EDGE_EH) && !e->aux)
1849 error ("unnecessary EH edge %i->%i", bb->index, e->dest->index);
1850 mark_eh_edge_found_error = true;
1855 return mark_eh_edge_found_error;
1859 /* Return true if the expr can trap, as in dereferencing an invalid pointer
1860 location or floating point arithmetic. C.f. the rtl version, may_trap_p.
1861 This routine expects only GIMPLE lhs or rhs input. */
1864 tree_could_trap_p (tree expr)
1866 enum tree_code code = TREE_CODE (expr);
1867 bool honor_nans = false;
1868 bool honor_snans = false;
1869 bool fp_operation = false;
1870 bool honor_trapv = false;
1873 if (TREE_CODE_CLASS (code) == tcc_comparison
1874 || TREE_CODE_CLASS (code) == tcc_unary
1875 || TREE_CODE_CLASS (code) == tcc_binary)
1877 t = TREE_TYPE (expr);
1878 fp_operation = FLOAT_TYPE_P (t);
1881 honor_nans = flag_trapping_math && !flag_finite_math_only;
1882 honor_snans = flag_signaling_nans != 0;
1884 else if (INTEGRAL_TYPE_P (t) && TYPE_OVERFLOW_TRAPS (t))
1891 case TARGET_MEM_REF:
1892 /* For TARGET_MEM_REFs use the information based on the original
1894 expr = TMR_ORIGINAL (expr);
1895 code = TREE_CODE (expr);
1902 case VIEW_CONVERT_EXPR:
1903 case WITH_SIZE_EXPR:
1904 expr = TREE_OPERAND (expr, 0);
1905 code = TREE_CODE (expr);
1908 case ARRAY_RANGE_REF:
1909 base = TREE_OPERAND (expr, 0);
1910 if (tree_could_trap_p (base))
1913 if (TREE_THIS_NOTRAP (expr))
1916 return !range_in_array_bounds_p (expr);
1919 base = TREE_OPERAND (expr, 0);
1920 if (tree_could_trap_p (base))
1923 if (TREE_THIS_NOTRAP (expr))
1926 return !in_array_bounds_p (expr);
1929 case ALIGN_INDIRECT_REF:
1930 case MISALIGNED_INDIRECT_REF:
1931 return !TREE_THIS_NOTRAP (expr);
1934 return TREE_THIS_VOLATILE (expr);
1936 case TRUNC_DIV_EXPR:
1938 case FLOOR_DIV_EXPR:
1939 case ROUND_DIV_EXPR:
1940 case EXACT_DIV_EXPR:
1942 case FLOOR_MOD_EXPR:
1943 case ROUND_MOD_EXPR:
1944 case TRUNC_MOD_EXPR:
1946 if (honor_snans || honor_trapv)
1949 return flag_trapping_math;
1950 t = TREE_OPERAND (expr, 1);
1951 if (!TREE_CONSTANT (t) || integer_zerop (t))
1960 /* Some floating point comparisons may trap. */
1965 case UNORDERED_EXPR:
1975 case FIX_TRUNC_EXPR:
1976 /* Conversion of floating point might trap. */
1982 /* These operations don't trap with floating point. */
1990 /* Any floating arithmetic may trap. */
1991 if (fp_operation && flag_trapping_math)
1998 t = get_callee_fndecl (expr);
1999 /* Assume that calls to weak functions may trap. */
2000 if (!t || !DECL_P (t) || DECL_WEAK (t))
2005 /* Any floating arithmetic may trap. */
2006 if (fp_operation && flag_trapping_math)
2013 tree_could_throw_p (tree t)
2015 if (!flag_exceptions)
2017 if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
2019 if (flag_non_call_exceptions
2020 && tree_could_trap_p (GIMPLE_STMT_OPERAND (t, 0)))
2022 t = GIMPLE_STMT_OPERAND (t, 1);
2025 if (TREE_CODE (t) == WITH_SIZE_EXPR)
2026 t = TREE_OPERAND (t, 0);
2027 if (TREE_CODE (t) == CALL_EXPR)
2028 return (call_expr_flags (t) & ECF_NOTHROW) == 0;
2029 if (flag_non_call_exceptions)
2030 return tree_could_trap_p (t);
2035 tree_can_throw_internal (tree stmt)
2038 bool is_resx = false;
2040 if (TREE_CODE (stmt) == RESX_EXPR)
2041 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2043 region_nr = lookup_stmt_eh_region (stmt);
2046 return can_throw_internal_1 (region_nr, is_resx);
2050 tree_can_throw_external (tree stmt)
2053 bool is_resx = false;
2055 if (TREE_CODE (stmt) == RESX_EXPR)
2056 region_nr = TREE_INT_CST_LOW (TREE_OPERAND (stmt, 0)), is_resx = true;
2058 region_nr = lookup_stmt_eh_region (stmt);
2060 return tree_could_throw_p (stmt);
2062 return can_throw_external_1 (region_nr, is_resx);
2065 /* Given a statement OLD_STMT and a new statement NEW_STMT that has replaced
2066 OLD_STMT in the function, remove OLD_STMT from the EH table and put NEW_STMT
2067 in the table if it should be in there. Return TRUE if a replacement was
2068 done that my require an EH edge purge. */
2071 maybe_clean_or_replace_eh_stmt (tree old_stmt, tree new_stmt)
2073 int region_nr = lookup_stmt_eh_region (old_stmt);
2077 bool new_stmt_could_throw = tree_could_throw_p (new_stmt);
2079 if (new_stmt == old_stmt && new_stmt_could_throw)
2082 remove_stmt_from_eh_region (old_stmt);
2083 if (new_stmt_could_throw)
2085 add_stmt_to_eh_region (new_stmt, region_nr);