1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GNU CC.
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
57 #include "insn-config.h"
59 #include "integrate.h"
60 #include "hard-reg-set.h"
61 #include "basic-block.h"
63 #include "dwarf2asm.h"
64 #include "dwarf2out.h"
73 /* Provide defaults for stuff that may not be defined when using
75 #ifndef EH_RETURN_STACKADJ_RTX
76 #define EH_RETURN_STACKADJ_RTX 0
78 #ifndef EH_RETURN_HANDLER_RTX
79 #define EH_RETURN_HANDLER_RTX 0
81 #ifndef EH_RETURN_DATA_REGNO
82 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
86 /* Nonzero means enable synchronous exceptions for non-call instructions. */
87 int flag_non_call_exceptions;
89 /* Protect cleanup actions with must-not-throw regions, with a call
90 to the given failure handler. */
91 tree (*lang_protect_cleanup_actions) PARAMS ((void));
93 /* Return true if type A catches type B. */
94 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
96 /* Map a type to a runtime object to match type. */
97 tree (*lang_eh_runtime_type) PARAMS ((tree));
99 /* A list of labels used for exception handlers. */
100 rtx exception_handler_labels;
102 static int call_site_base;
103 static unsigned int sjlj_funcdef_number;
104 static htab_t type_to_runtime_map;
106 /* Describe the SjLj_Function_Context structure. */
107 static tree sjlj_fc_type_node;
108 static int sjlj_fc_call_site_ofs;
109 static int sjlj_fc_data_ofs;
110 static int sjlj_fc_personality_ofs;
111 static int sjlj_fc_lsda_ofs;
112 static int sjlj_fc_jbuf_ofs;
114 /* Describes one exception region. */
117 /* The immediately surrounding region. */
118 struct eh_region *outer;
120 /* The list of immediately contained regions. */
121 struct eh_region *inner;
122 struct eh_region *next_peer;
124 /* An identifier for this region. */
127 /* Each region does exactly one thing. */
133 ERT_ALLOWED_EXCEPTIONS,
139 /* Holds the action to perform based on the preceeding type. */
141 /* A list of catch blocks, a surrounding try block,
142 and the label for continuing after a catch. */
144 struct eh_region *catch;
145 struct eh_region *last_catch;
146 struct eh_region *prev_try;
150 /* The list through the catch handlers, the type object
151 matched, and a pointer to the generated code. */
153 struct eh_region *next_catch;
154 struct eh_region *prev_catch;
159 /* A tree_list of allowed types. */
165 /* The type given by a call to "throw foo();", or discovered
171 /* Retain the cleanup expression even after expansion so that
172 we can match up fixup regions. */
177 /* The real region (by expression and by pointer) that fixup code
181 struct eh_region *real_region;
185 /* Entry point for this region's handler before landing pads are built. */
188 /* Entry point for this region's handler from the runtime eh library. */
191 /* Entry point for this region's handler from an inner region. */
192 rtx post_landing_pad;
194 /* The RESX insn for handing off control to the next outermost handler,
199 /* Used to save exception status for each function. */
202 /* The tree of all regions for this function. */
203 struct eh_region *region_tree;
205 /* The same information as an indexable array. */
206 struct eh_region **region_array;
208 /* The most recently open region. */
209 struct eh_region *cur_region;
211 /* This is the region for which we are processing catch blocks. */
212 struct eh_region *try_region;
214 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
215 node is itself a TREE_CHAINed list of handlers for regions that
216 are not yet closed. The TREE_VALUE of each entry contains the
217 handler for the corresponding entry on the ehstack. */
223 int built_landing_pads;
224 int last_region_number;
226 varray_type ttype_data;
227 varray_type ehspec_data;
228 varray_type action_record_data;
230 struct call_site_record
235 int call_site_data_used;
236 int call_site_data_size;
247 static void mark_eh_region PARAMS ((struct eh_region *));
249 static int t2r_eq PARAMS ((const PTR,
251 static hashval_t t2r_hash PARAMS ((const PTR));
252 static int t2r_mark_1 PARAMS ((PTR *, PTR));
253 static void t2r_mark PARAMS ((PTR));
254 static void add_type_for_runtime PARAMS ((tree));
255 static tree lookup_type_for_runtime PARAMS ((tree));
257 static struct eh_region *expand_eh_region_end PARAMS ((void));
259 static rtx get_exception_filter PARAMS ((struct function *));
261 static void collect_eh_region_array PARAMS ((void));
262 static void resolve_fixup_regions PARAMS ((void));
263 static void remove_fixup_regions PARAMS ((void));
264 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
266 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
267 struct inline_remap *));
268 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
269 struct eh_region **));
270 static int ttypes_filter_eq PARAMS ((const PTR,
272 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
273 static int ehspec_filter_eq PARAMS ((const PTR,
275 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
276 static int add_ttypes_entry PARAMS ((htab_t, tree));
277 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
279 static void assign_filter_values PARAMS ((void));
280 static void build_post_landing_pads PARAMS ((void));
281 static void connect_post_landing_pads PARAMS ((void));
282 static void dw2_build_landing_pads PARAMS ((void));
285 static bool sjlj_find_directly_reachable_regions
286 PARAMS ((struct sjlj_lp_info *));
287 static void sjlj_assign_call_site_values
288 PARAMS ((rtx, struct sjlj_lp_info *));
289 static void sjlj_mark_call_sites
290 PARAMS ((struct sjlj_lp_info *));
291 static void sjlj_emit_function_enter PARAMS ((rtx));
292 static void sjlj_emit_function_exit PARAMS ((void));
293 static void sjlj_emit_dispatch_table
294 PARAMS ((rtx, struct sjlj_lp_info *));
295 static void sjlj_build_landing_pads PARAMS ((void));
297 static void remove_exception_handler_label PARAMS ((rtx));
298 static void remove_eh_handler PARAMS ((struct eh_region *));
300 struct reachable_info;
302 /* The return value of reachable_next_level. */
305 /* The given exception is not processed by the given region. */
307 /* The given exception may need processing by the given region. */
309 /* The given exception is completely processed by the given region. */
311 /* The given exception is completely processed by the runtime. */
315 static int check_handled PARAMS ((tree, tree));
316 static void add_reachable_handler
317 PARAMS ((struct reachable_info *, struct eh_region *,
318 struct eh_region *));
319 static enum reachable_code reachable_next_level
320 PARAMS ((struct eh_region *, tree, struct reachable_info *));
322 static int action_record_eq PARAMS ((const PTR,
324 static hashval_t action_record_hash PARAMS ((const PTR));
325 static int add_action_record PARAMS ((htab_t, int, int));
326 static int collect_one_action_chain PARAMS ((htab_t,
327 struct eh_region *));
328 static int add_call_site PARAMS ((rtx, int));
330 static void push_uleb128 PARAMS ((varray_type *,
332 static void push_sleb128 PARAMS ((varray_type *, int));
333 #ifndef HAVE_AS_LEB128
334 static int dw2_size_of_call_site_table PARAMS ((void));
335 static int sjlj_size_of_call_site_table PARAMS ((void));
337 static void dw2_output_call_site_table PARAMS ((void));
338 static void sjlj_output_call_site_table PARAMS ((void));
341 /* Routine to see if exception handling is turned on.
342 DO_WARN is non-zero if we want to inform the user that exception
343 handling is turned off.
345 This is used to ensure that -fexceptions has been specified if the
346 compiler tries to use any exception-specific functions. */
352 if (! flag_exceptions)
354 static int warned = 0;
355 if (! warned && do_warn)
357 error ("exception handling disabled, use -fexceptions to enable");
369 ggc_add_rtx_root (&exception_handler_labels, 1);
371 if (! flag_exceptions)
374 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
375 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
377 /* Create the SjLj_Function_Context structure. This should match
378 the definition in unwind-sjlj.c. */
379 if (USING_SJLJ_EXCEPTIONS)
381 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
383 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
384 ggc_add_tree_root (&sjlj_fc_type_node, 1);
386 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
387 build_pointer_type (sjlj_fc_type_node));
388 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
390 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
392 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
394 tmp = build_index_type (build_int_2 (4 - 1, 0));
395 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
396 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
397 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
399 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
401 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
403 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
405 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
407 #ifdef DONT_USE_BUILTIN_SETJMP
409 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
411 /* Should be large enough for most systems, if it is not,
412 JMP_BUF_SIZE should be defined with the proper value. It will
413 also tend to be larger than necessary for most systems, a more
414 optimal port will define JMP_BUF_SIZE. */
415 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
418 /* This is 2 for builtin_setjmp, plus whatever the target requires
419 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
420 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
421 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
423 tmp = build_index_type (tmp);
424 tmp = build_array_type (ptr_type_node, tmp);
425 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
426 #ifdef DONT_USE_BUILTIN_SETJMP
427 /* We don't know what the alignment requirements of the
428 runtime's jmp_buf has. Overestimate. */
429 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
430 DECL_USER_ALIGN (f_jbuf) = 1;
432 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
434 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
435 TREE_CHAIN (f_prev) = f_cs;
436 TREE_CHAIN (f_cs) = f_data;
437 TREE_CHAIN (f_data) = f_per;
438 TREE_CHAIN (f_per) = f_lsda;
439 TREE_CHAIN (f_lsda) = f_jbuf;
441 layout_type (sjlj_fc_type_node);
443 /* Cache the interesting field offsets so that we have
444 easy access from rtl. */
445 sjlj_fc_call_site_ofs
446 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
447 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
449 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
450 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
451 sjlj_fc_personality_ofs
452 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
453 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
455 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
456 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
458 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
459 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
464 init_eh_for_function ()
466 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
469 /* Mark EH for GC. */
472 mark_eh_region (region)
473 struct eh_region *region;
478 switch (region->type)
481 ggc_mark_tree (region->u.cleanup.exp);
484 ggc_mark_rtx (region->u.try.continue_label);
487 ggc_mark_tree (region->u.catch.type);
489 case ERT_ALLOWED_EXCEPTIONS:
490 ggc_mark_tree (region->u.allowed.type_list);
492 case ERT_MUST_NOT_THROW:
495 ggc_mark_tree (region->u.throw.type);
498 ggc_mark_tree (region->u.fixup.cleanup_exp);
504 ggc_mark_rtx (region->label);
505 ggc_mark_rtx (region->resume);
506 ggc_mark_rtx (region->landing_pad);
507 ggc_mark_rtx (region->post_landing_pad);
512 struct eh_status *eh;
519 /* If we've called collect_eh_region_array, use it. Otherwise walk
520 the tree non-recursively. */
521 if (eh->region_array)
523 for (i = eh->last_region_number; i > 0; --i)
525 struct eh_region *r = eh->region_array[i];
526 if (r && r->region_number == i)
530 else if (eh->region_tree)
532 struct eh_region *r = eh->region_tree;
538 else if (r->next_peer)
546 } while (r->next_peer == NULL);
553 ggc_mark_tree (eh->protect_list);
554 ggc_mark_rtx (eh->filter);
555 ggc_mark_rtx (eh->exc_ptr);
556 ggc_mark_tree_varray (eh->ttype_data);
558 if (eh->call_site_data)
560 for (i = eh->call_site_data_used - 1; i >= 0; --i)
561 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
564 ggc_mark_rtx (eh->ehr_stackadj);
565 ggc_mark_rtx (eh->ehr_handler);
566 ggc_mark_rtx (eh->ehr_label);
568 ggc_mark_rtx (eh->sjlj_fc);
569 ggc_mark_rtx (eh->sjlj_exit_after);
576 struct eh_status *eh = f->eh;
578 if (eh->region_array)
581 for (i = eh->last_region_number; i > 0; --i)
583 struct eh_region *r = eh->region_array[i];
584 /* Mind we don't free a region struct more than once. */
585 if (r && r->region_number == i)
588 free (eh->region_array);
590 else if (eh->region_tree)
592 struct eh_region *next, *r = eh->region_tree;
597 else if (r->next_peer)
611 } while (r->next_peer == NULL);
620 VARRAY_FREE (eh->ttype_data);
621 VARRAY_FREE (eh->ehspec_data);
622 VARRAY_FREE (eh->action_record_data);
623 if (eh->call_site_data)
624 free (eh->call_site_data);
631 /* Start an exception handling region. All instructions emitted
632 after this point are considered to be part of the region until
633 expand_eh_region_end is invoked. */
636 expand_eh_region_start ()
638 struct eh_region *new_region;
639 struct eh_region *cur_region;
645 /* Insert a new blank region as a leaf in the tree. */
646 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
647 cur_region = cfun->eh->cur_region;
648 new_region->outer = cur_region;
651 new_region->next_peer = cur_region->inner;
652 cur_region->inner = new_region;
656 new_region->next_peer = cfun->eh->region_tree;
657 cfun->eh->region_tree = new_region;
659 cfun->eh->cur_region = new_region;
661 /* Create a note marking the start of this region. */
662 new_region->region_number = ++cfun->eh->last_region_number;
663 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
664 NOTE_EH_HANDLER (note) = new_region->region_number;
667 /* Common code to end a region. Returns the region just ended. */
669 static struct eh_region *
670 expand_eh_region_end ()
672 struct eh_region *cur_region = cfun->eh->cur_region;
675 /* Create a nute marking the end of this region. */
676 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
677 NOTE_EH_HANDLER (note) = cur_region->region_number;
680 cfun->eh->cur_region = cur_region->outer;
685 /* End an exception handling region for a cleanup. HANDLER is an
686 expression to expand for the cleanup. */
689 expand_eh_region_end_cleanup (handler)
692 struct eh_region *region;
693 tree protect_cleanup_actions;
700 region = expand_eh_region_end ();
701 region->type = ERT_CLEANUP;
702 region->label = gen_label_rtx ();
703 region->u.cleanup.exp = handler;
705 around_label = gen_label_rtx ();
706 emit_jump (around_label);
708 emit_label (region->label);
710 /* Give the language a chance to specify an action to be taken if an
711 exception is thrown that would propogate out of the HANDLER. */
712 protect_cleanup_actions
713 = (lang_protect_cleanup_actions
714 ? (*lang_protect_cleanup_actions) ()
717 if (protect_cleanup_actions)
718 expand_eh_region_start ();
720 /* In case this cleanup involves an inline destructor with a try block in
721 it, we need to save the EH return data registers around it. */
722 data_save[0] = gen_reg_rtx (Pmode);
723 emit_move_insn (data_save[0], get_exception_pointer (cfun));
724 data_save[1] = gen_reg_rtx (word_mode);
725 emit_move_insn (data_save[1], get_exception_filter (cfun));
727 expand_expr (handler, const0_rtx, VOIDmode, 0);
729 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
730 emit_move_insn (cfun->eh->filter, data_save[1]);
732 if (protect_cleanup_actions)
733 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
735 /* We need any stack adjustment complete before the around_label. */
736 do_pending_stack_adjust ();
738 /* We delay the generation of the _Unwind_Resume until we generate
739 landing pads. We emit a marker here so as to get good control
740 flow data in the meantime. */
742 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
745 emit_label (around_label);
748 /* End an exception handling region for a try block, and prepares
749 for subsequent calls to expand_start_catch. */
752 expand_start_all_catch ()
754 struct eh_region *region;
759 region = expand_eh_region_end ();
760 region->type = ERT_TRY;
761 region->u.try.prev_try = cfun->eh->try_region;
762 region->u.try.continue_label = gen_label_rtx ();
764 cfun->eh->try_region = region;
766 emit_jump (region->u.try.continue_label);
769 /* Begin a catch clause. TYPE is the type caught, or null if this is
770 a catch-all clause. */
773 expand_start_catch (type)
776 struct eh_region *t, *c, *l;
782 add_type_for_runtime (type);
783 expand_eh_region_start ();
785 t = cfun->eh->try_region;
786 c = cfun->eh->cur_region;
788 c->u.catch.type = type;
789 c->label = gen_label_rtx ();
791 l = t->u.try.last_catch;
792 c->u.catch.prev_catch = l;
794 l->u.catch.next_catch = c;
797 t->u.try.last_catch = c;
799 emit_label (c->label);
802 /* End a catch clause. Control will resume after the try/catch block. */
807 struct eh_region *try_region, *catch_region;
812 catch_region = expand_eh_region_end ();
813 try_region = cfun->eh->try_region;
815 emit_jump (try_region->u.try.continue_label);
818 /* End a sequence of catch handlers for a try block. */
821 expand_end_all_catch ()
823 struct eh_region *try_region;
828 try_region = cfun->eh->try_region;
829 cfun->eh->try_region = try_region->u.try.prev_try;
831 emit_label (try_region->u.try.continue_label);
834 /* End an exception region for an exception type filter. ALLOWED is a
835 TREE_LIST of types to be matched by the runtime. FAILURE is an
836 expression to invoke if a mismatch ocurrs. */
839 expand_eh_region_end_allowed (allowed, failure)
840 tree allowed, failure;
842 struct eh_region *region;
848 region = expand_eh_region_end ();
849 region->type = ERT_ALLOWED_EXCEPTIONS;
850 region->u.allowed.type_list = allowed;
851 region->label = gen_label_rtx ();
853 for (; allowed ; allowed = TREE_CHAIN (allowed))
854 add_type_for_runtime (TREE_VALUE (allowed));
856 /* We must emit the call to FAILURE here, so that if this function
857 throws a different exception, that it will be processed by the
860 around_label = gen_label_rtx ();
861 emit_jump (around_label);
863 emit_label (region->label);
864 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
865 /* We must adjust the stack before we reach the AROUND_LABEL because
866 the call to FAILURE does not occur on all paths to the
868 do_pending_stack_adjust ();
870 emit_label (around_label);
873 /* End an exception region for a must-not-throw filter. FAILURE is an
874 expression invoke if an uncaught exception propagates this far.
876 This is conceptually identical to expand_eh_region_end_allowed with
877 an empty allowed list (if you passed "std::terminate" instead of
878 "__cxa_call_unexpected"), but they are represented differently in
882 expand_eh_region_end_must_not_throw (failure)
885 struct eh_region *region;
891 region = expand_eh_region_end ();
892 region->type = ERT_MUST_NOT_THROW;
893 region->label = gen_label_rtx ();
895 /* We must emit the call to FAILURE here, so that if this function
896 throws a different exception, that it will be processed by the
899 around_label = gen_label_rtx ();
900 emit_jump (around_label);
902 emit_label (region->label);
903 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
905 emit_label (around_label);
908 /* End an exception region for a throw. No handling goes on here,
909 but it's the easiest way for the front-end to indicate what type
913 expand_eh_region_end_throw (type)
916 struct eh_region *region;
921 region = expand_eh_region_end ();
922 region->type = ERT_THROW;
923 region->u.throw.type = type;
926 /* End a fixup region. Within this region the cleanups for the immediately
927 enclosing region are _not_ run. This is used for goto cleanup to avoid
928 destroying an object twice.
930 This would be an extraordinarily simple prospect, were it not for the
931 fact that we don't actually know what the immediately enclosing region
932 is. This surprising fact is because expand_cleanups is currently
933 generating a sequence that it will insert somewhere else. We collect
934 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
937 expand_eh_region_end_fixup (handler)
940 struct eh_region *fixup;
945 fixup = expand_eh_region_end ();
946 fixup->type = ERT_FIXUP;
947 fixup->u.fixup.cleanup_exp = handler;
950 /* Return an rtl expression for a pointer to the exception object
954 get_exception_pointer (fun)
955 struct function *fun;
957 rtx exc_ptr = fun->eh->exc_ptr;
958 if (fun == cfun && ! exc_ptr)
960 exc_ptr = gen_reg_rtx (Pmode);
961 fun->eh->exc_ptr = exc_ptr;
966 /* Return an rtl expression for the exception dispatch filter
970 get_exception_filter (fun)
971 struct function *fun;
973 rtx filter = fun->eh->filter;
974 if (fun == cfun && ! filter)
976 filter = gen_reg_rtx (word_mode);
977 fun->eh->filter = filter;
982 /* Begin a region that will contain entries created with
983 add_partial_entry. */
986 begin_protect_partials ()
988 /* Push room for a new list. */
989 cfun->eh->protect_list
990 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
993 /* Start a new exception region for a region of code that has a
994 cleanup action and push the HANDLER for the region onto
995 protect_list. All of the regions created with add_partial_entry
996 will be ended when end_protect_partials is invoked. */
999 add_partial_entry (handler)
1002 expand_eh_region_start ();
1004 /* ??? This comment was old before the most recent rewrite. We
1005 really ought to fix the callers at some point. */
1006 /* For backwards compatibility, we allow callers to omit calls to
1007 begin_protect_partials for the outermost region. So, we must
1008 explicitly do so here. */
1009 if (!cfun->eh->protect_list)
1010 begin_protect_partials ();
1012 /* Add this entry to the front of the list. */
1013 TREE_VALUE (cfun->eh->protect_list)
1014 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1017 /* End all the pending exception regions on protect_list. */
1020 end_protect_partials ()
1024 /* ??? This comment was old before the most recent rewrite. We
1025 really ought to fix the callers at some point. */
1026 /* For backwards compatibility, we allow callers to omit the call to
1027 begin_protect_partials for the outermost region. So,
1028 PROTECT_LIST may be NULL. */
1029 if (!cfun->eh->protect_list)
1032 /* Pop the topmost entry. */
1033 t = TREE_VALUE (cfun->eh->protect_list);
1034 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1036 /* End all the exception regions. */
1037 for (; t; t = TREE_CHAIN (t))
1038 expand_eh_region_end_cleanup (TREE_VALUE (t));
1042 /* This section is for the exception handling specific optimization pass. */
1044 /* Random access the exception region tree. It's just as simple to
1045 collect the regions this way as in expand_eh_region_start, but
1046 without having to realloc memory. */
1049 collect_eh_region_array ()
1051 struct eh_region **array, *i;
1053 i = cfun->eh->region_tree;
1057 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1058 cfun->eh->region_array = array;
1062 array[i->region_number] = i;
1064 /* If there are sub-regions, process them. */
1067 /* If there are peers, process them. */
1068 else if (i->next_peer)
1070 /* Otherwise, step back up the tree to the next peer. */
1077 } while (i->next_peer == NULL);
1084 resolve_fixup_regions ()
1086 int i, j, n = cfun->eh->last_region_number;
1088 for (i = 1; i <= n; ++i)
1090 struct eh_region *fixup = cfun->eh->region_array[i];
1091 struct eh_region *cleanup = 0;
1093 if (! fixup || fixup->type != ERT_FIXUP)
1096 for (j = 1; j <= n; ++j)
1098 cleanup = cfun->eh->region_array[j];
1099 if (cleanup->type == ERT_CLEANUP
1100 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1106 fixup->u.fixup.real_region = cleanup->outer;
1110 /* Now that we've discovered what region actually encloses a fixup,
1111 we can shuffle pointers and remove them from the tree. */
1114 remove_fixup_regions ()
1118 struct eh_region *fixup;
1120 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1121 for instructions referencing fixup regions. This is only
1122 strictly necessary for fixup regions with no parent, but
1123 doesn't hurt to do it for all regions. */
1124 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1126 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1127 && INTVAL (XEXP (note, 0)) > 0
1128 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1129 && fixup->type == ERT_FIXUP)
1131 if (fixup->u.fixup.real_region)
1132 XEXP (note, 1) = GEN_INT (fixup->u.fixup.real_region->region_number);
1134 remove_note (insn, note);
1137 /* Remove the fixup regions from the tree. */
1138 for (i = cfun->eh->last_region_number; i > 0; --i)
1140 fixup = cfun->eh->region_array[i];
1144 /* Allow GC to maybe free some memory. */
1145 if (fixup->type == ERT_CLEANUP)
1146 fixup->u.cleanup.exp = NULL_TREE;
1148 if (fixup->type != ERT_FIXUP)
1153 struct eh_region *parent, *p, **pp;
1155 parent = fixup->u.fixup.real_region;
1157 /* Fix up the children's parent pointers; find the end of
1159 for (p = fixup->inner; ; p = p->next_peer)
1166 /* In the tree of cleanups, only outer-inner ordering matters.
1167 So link the children back in anywhere at the correct level. */
1169 pp = &parent->inner;
1171 pp = &cfun->eh->region_tree;
1174 fixup->inner = NULL;
1177 remove_eh_handler (fixup);
1181 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1182 can_throw instruction in the region. */
1185 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1193 for (insn = *pinsns; insn ; insn = next)
1195 next = NEXT_INSN (insn);
1196 if (GET_CODE (insn) == NOTE)
1198 int kind = NOTE_LINE_NUMBER (insn);
1199 if (kind == NOTE_INSN_EH_REGION_BEG
1200 || kind == NOTE_INSN_EH_REGION_END)
1202 if (kind == NOTE_INSN_EH_REGION_BEG)
1204 struct eh_region *r;
1207 cur = NOTE_EH_HANDLER (insn);
1209 r = cfun->eh->region_array[cur];
1210 if (r->type == ERT_FIXUP)
1212 r = r->u.fixup.real_region;
1213 cur = r ? r->region_number : 0;
1215 else if (r->type == ERT_CATCH)
1218 cur = r ? r->region_number : 0;
1224 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1225 requires extra care to adjust sequence start. */
1226 if (insn == *pinsns)
1232 else if (INSN_P (insn))
1235 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1236 /* Calls can always potentially throw exceptions, unless
1237 they have a REG_EH_REGION note with a value of 0 or less.
1238 Which should be the only possible kind so far. */
1239 && (GET_CODE (insn) == CALL_INSN
1240 /* If we wanted exceptions for non-call insns, then
1241 any may_trap_p instruction could throw. */
1242 || (flag_non_call_exceptions
1243 && may_trap_p (PATTERN (insn)))))
1245 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1249 if (GET_CODE (insn) == CALL_INSN
1250 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1252 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1254 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1256 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1267 convert_from_eh_region_ranges ()
1272 collect_eh_region_array ();
1273 resolve_fixup_regions ();
1275 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1276 insns = get_insns ();
1277 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1280 remove_fixup_regions ();
1284 find_exception_handler_labels ()
1286 rtx list = NULL_RTX;
1289 free_EXPR_LIST_list (&exception_handler_labels);
1291 if (cfun->eh->region_tree == NULL)
1294 for (i = cfun->eh->last_region_number; i > 0; --i)
1296 struct eh_region *region = cfun->eh->region_array[i];
1301 if (cfun->eh->built_landing_pads)
1302 lab = region->landing_pad;
1304 lab = region->label;
1307 list = alloc_EXPR_LIST (0, lab, list);
1310 /* For sjlj exceptions, need the return label to remain live until
1311 after landing pad generation. */
1312 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1313 list = alloc_EXPR_LIST (0, return_label, list);
1315 exception_handler_labels = list;
1319 static struct eh_region *
1320 duplicate_eh_region_1 (o, map)
1321 struct eh_region *o;
1322 struct inline_remap *map;
1325 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1327 n->region_number = o->region_number + cfun->eh->last_region_number;
1333 case ERT_MUST_NOT_THROW:
1337 if (o->u.try.continue_label)
1338 n->u.try.continue_label
1339 = get_label_from_map (map,
1340 CODE_LABEL_NUMBER (o->u.try.continue_label));
1344 n->u.catch.type = o->u.catch.type;
1347 case ERT_ALLOWED_EXCEPTIONS:
1348 n->u.allowed.type_list = o->u.allowed.type_list;
1352 n->u.throw.type = o->u.throw.type;
1359 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1362 n->resume = map->insn_map[INSN_UID (o->resume)];
1363 if (n->resume == NULL)
1371 duplicate_eh_region_2 (o, n_array)
1372 struct eh_region *o;
1373 struct eh_region **n_array;
1375 struct eh_region *n = n_array[o->region_number];
1380 n->u.try.catch = n_array[o->u.try.catch->region_number];
1381 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1385 if (o->u.catch.next_catch)
1386 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1387 if (o->u.catch.prev_catch)
1388 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1396 n->outer = n_array[o->outer->region_number];
1398 n->inner = n_array[o->inner->region_number];
1400 n->next_peer = n_array[o->next_peer->region_number];
1404 duplicate_eh_regions (ifun, map)
1405 struct function *ifun;
1406 struct inline_remap *map;
1408 int ifun_last_region_number = ifun->eh->last_region_number;
1409 struct eh_region **n_array, *root, *cur;
1412 if (ifun_last_region_number == 0)
1415 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1417 for (i = 1; i <= ifun_last_region_number; ++i)
1419 cur = ifun->eh->region_array[i];
1420 if (!cur || cur->region_number != i)
1422 n_array[i] = duplicate_eh_region_1 (cur, map);
1424 for (i = 1; i <= ifun_last_region_number; ++i)
1426 cur = ifun->eh->region_array[i];
1427 if (!cur || cur->region_number != i)
1429 duplicate_eh_region_2 (cur, n_array);
1432 root = n_array[ifun->eh->region_tree->region_number];
1433 cur = cfun->eh->cur_region;
1436 struct eh_region *p = cur->inner;
1439 while (p->next_peer)
1441 p->next_peer = root;
1446 for (i = 1; i <= ifun_last_region_number; ++i)
1447 if (n_array[i]->outer == NULL)
1448 n_array[i]->outer = cur;
1452 struct eh_region *p = cfun->eh->region_tree;
1455 while (p->next_peer)
1457 p->next_peer = root;
1460 cfun->eh->region_tree = root;
1465 i = cfun->eh->last_region_number;
1466 cfun->eh->last_region_number = i + ifun_last_region_number;
1471 /* ??? Move from tree.c to tree.h. */
1472 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
1475 t2r_eq (pentry, pdata)
1479 tree entry = (tree) pentry;
1480 tree data = (tree) pdata;
1482 return TREE_PURPOSE (entry) == data;
1489 tree entry = (tree) pentry;
1490 return TYPE_HASH (TREE_PURPOSE (entry));
1494 t2r_mark_1 (slot, data)
1496 PTR data ATTRIBUTE_UNUSED;
1498 tree contents = (tree) *slot;
1499 ggc_mark_tree (contents);
1507 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1511 add_type_for_runtime (type)
1516 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1517 TYPE_HASH (type), INSERT);
1520 tree runtime = (*lang_eh_runtime_type) (type);
1521 *slot = tree_cons (type, runtime, NULL_TREE);
1526 lookup_type_for_runtime (type)
1531 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1532 TYPE_HASH (type), NO_INSERT);
1534 /* We should have always inserrted the data earlier. */
1535 return TREE_VALUE (*slot);
1539 /* Represent an entry in @TTypes for either catch actions
1540 or exception filter actions. */
1541 struct ttypes_filter
1547 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1548 (a tree) for a @TTypes type node we are thinking about adding. */
1551 ttypes_filter_eq (pentry, pdata)
1555 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1556 tree data = (tree) pdata;
1558 return entry->t == data;
1562 ttypes_filter_hash (pentry)
1565 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1566 return TYPE_HASH (entry->t);
1569 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1570 exception specification list we are thinking about adding. */
1571 /* ??? Currently we use the type lists in the order given. Someone
1572 should put these in some canonical order. */
1575 ehspec_filter_eq (pentry, pdata)
1579 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1580 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1582 return type_list_equal (entry->t, data->t);
1585 /* Hash function for exception specification lists. */
1588 ehspec_filter_hash (pentry)
1591 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1595 for (list = entry->t; list ; list = TREE_CHAIN (list))
1596 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1600 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1601 up the search. Return the filter value to be used. */
1604 add_ttypes_entry (ttypes_hash, type)
1608 struct ttypes_filter **slot, *n;
1610 slot = (struct ttypes_filter **)
1611 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1613 if ((n = *slot) == NULL)
1615 /* Filter value is a 1 based table index. */
1617 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1619 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1622 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1628 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1629 to speed up the search. Return the filter value to be used. */
1632 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1637 struct ttypes_filter **slot, *n;
1638 struct ttypes_filter dummy;
1641 slot = (struct ttypes_filter **)
1642 htab_find_slot (ehspec_hash, &dummy, INSERT);
1644 if ((n = *slot) == NULL)
1646 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1648 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1650 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1653 /* Look up each type in the list and encode its filter
1654 value as a uleb128. Terminate the list with 0. */
1655 for (; list ; list = TREE_CHAIN (list))
1656 push_uleb128 (&cfun->eh->ehspec_data,
1657 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1658 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1664 /* Generate the action filter values to be used for CATCH and
1665 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1666 we use lots of landing pads, and so every type or list can share
1667 the same filter value, which saves table space. */
1670 assign_filter_values ()
1673 htab_t ttypes, ehspec;
1675 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1676 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1678 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1679 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1681 for (i = cfun->eh->last_region_number; i > 0; --i)
1683 struct eh_region *r = cfun->eh->region_array[i];
1685 /* Mind we don't process a region more than once. */
1686 if (!r || r->region_number != i)
1692 r->u.catch.filter = add_ttypes_entry (ttypes, r->u.catch.type);
1695 case ERT_ALLOWED_EXCEPTIONS:
1697 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1705 htab_delete (ttypes);
1706 htab_delete (ehspec);
1710 build_post_landing_pads ()
1714 for (i = cfun->eh->last_region_number; i > 0; --i)
1716 struct eh_region *region = cfun->eh->region_array[i];
1719 /* Mind we don't process a region more than once. */
1720 if (!region || region->region_number != i)
1723 switch (region->type)
1726 /* ??? Collect the set of all non-overlapping catch handlers
1727 all the way up the chain until blocked by a cleanup. */
1728 /* ??? Outer try regions can share landing pads with inner
1729 try regions if the types are completely non-overlapping,
1730 and there are no interveaning cleanups. */
1732 region->post_landing_pad = gen_label_rtx ();
1736 emit_label (region->post_landing_pad);
1738 /* ??? It is mighty inconvenient to call back into the
1739 switch statement generation code in expand_end_case.
1740 Rapid prototyping sez a sequence of ifs. */
1742 struct eh_region *c;
1743 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1745 /* ??? _Unwind_ForcedUnwind wants no match here. */
1746 if (c->u.catch.type == NULL)
1747 emit_jump (c->label);
1749 emit_cmp_and_jump_insns (cfun->eh->filter,
1750 GEN_INT (c->u.catch.filter),
1751 EQ, NULL_RTX, word_mode,
1756 /* We delay the generation of the _Unwind_Resume until we generate
1757 landing pads. We emit a marker here so as to get good control
1758 flow data in the meantime. */
1760 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1766 emit_insns_before (seq, region->u.try.catch->label);
1769 case ERT_ALLOWED_EXCEPTIONS:
1770 region->post_landing_pad = gen_label_rtx ();
1774 emit_label (region->post_landing_pad);
1776 emit_cmp_and_jump_insns (cfun->eh->filter,
1777 GEN_INT (region->u.allowed.filter),
1778 EQ, NULL_RTX, word_mode, 0, 0,
1781 /* We delay the generation of the _Unwind_Resume until we generate
1782 landing pads. We emit a marker here so as to get good control
1783 flow data in the meantime. */
1785 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1791 emit_insns_before (seq, region->label);
1795 case ERT_MUST_NOT_THROW:
1796 region->post_landing_pad = region->label;
1801 /* Nothing to do. */
1810 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1811 _Unwind_Resume otherwise. */
1814 connect_post_landing_pads ()
1818 for (i = cfun->eh->last_region_number; i > 0; --i)
1820 struct eh_region *region = cfun->eh->region_array[i];
1821 struct eh_region *outer;
1824 /* Mind we don't process a region more than once. */
1825 if (!region || region->region_number != i)
1828 /* If there is no RESX, or it has been deleted by flow, there's
1829 nothing to fix up. */
1830 if (! region->resume || INSN_DELETED_P (region->resume))
1833 /* Search for another landing pad in this function. */
1834 for (outer = region->outer; outer ; outer = outer->outer)
1835 if (outer->post_landing_pad)
1841 emit_jump (outer->post_landing_pad);
1843 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1844 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1848 emit_insns_before (seq, region->resume);
1850 /* Leave the RESX to be deleted by flow. */
1856 dw2_build_landing_pads ()
1861 for (i = cfun->eh->last_region_number; i > 0; --i)
1863 struct eh_region *region = cfun->eh->region_array[i];
1866 /* Mind we don't process a region more than once. */
1867 if (!region || region->region_number != i)
1870 if (region->type != ERT_CLEANUP
1871 && region->type != ERT_TRY
1872 && region->type != ERT_ALLOWED_EXCEPTIONS)
1877 region->landing_pad = gen_label_rtx ();
1878 emit_label (region->landing_pad);
1880 #ifdef HAVE_exception_receiver
1881 if (HAVE_exception_receiver)
1882 emit_insn (gen_exception_receiver ());
1885 #ifdef HAVE_nonlocal_goto_receiver
1886 if (HAVE_nonlocal_goto_receiver)
1887 emit_insn (gen_nonlocal_goto_receiver ());
1892 /* If the eh_return data registers are call-saved, then we
1893 won't have considered them clobbered from the call that
1894 threw. Kill them now. */
1897 unsigned r = EH_RETURN_DATA_REGNO (j);
1898 if (r == INVALID_REGNUM)
1900 if (! call_used_regs[r])
1901 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
1904 emit_move_insn (cfun->eh->exc_ptr,
1905 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
1906 emit_move_insn (cfun->eh->filter,
1907 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
1912 emit_insns_before (seq, region->post_landing_pad);
1919 int directly_reachable;
1922 int call_site_index;
1926 sjlj_find_directly_reachable_regions (lp_info)
1927 struct sjlj_lp_info *lp_info;
1930 bool found_one = false;
1932 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1934 struct eh_region *region;
1938 if (! INSN_P (insn))
1941 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1942 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1945 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
1947 type_thrown = NULL_TREE;
1948 if (region->type == ERT_THROW)
1950 type_thrown = region->u.throw.type;
1951 region = region->outer;
1954 /* Find the first containing region that might handle the exception.
1955 That's the landing pad to which we will transfer control. */
1956 for (; region; region = region->outer)
1957 if (reachable_next_level (region, type_thrown, 0) != RNL_NOT_CAUGHT)
1962 lp_info[region->region_number].directly_reachable = 1;
1971 sjlj_assign_call_site_values (dispatch_label, lp_info)
1973 struct sjlj_lp_info *lp_info;
1978 /* First task: build the action table. */
1980 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
1981 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1983 for (i = cfun->eh->last_region_number; i > 0; --i)
1984 if (lp_info[i].directly_reachable)
1986 struct eh_region *r = cfun->eh->region_array[i];
1987 r->landing_pad = dispatch_label;
1988 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1989 if (lp_info[i].action_index != -1)
1990 cfun->uses_eh_lsda = 1;
1993 htab_delete (ar_hash);
1995 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1996 landing pad label for the region. For sjlj though, there is one
1997 common landing pad from which we dispatch to the post-landing pads.
1999 A region receives a dispatch index if it is directly reachable
2000 and requires in-function processing. Regions that share post-landing
2001 pads may share dispatch indicies. */
2002 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2003 (see build_post_landing_pads) so we don't bother checking for it. */
2006 for (i = cfun->eh->last_region_number; i > 0; --i)
2007 if (lp_info[i].directly_reachable
2008 && lp_info[i].action_index >= 0)
2009 lp_info[i].dispatch_index = index++;
2011 /* Finally: assign call-site values. If dwarf2 terms, this would be
2012 the region number assigned by convert_to_eh_region_ranges, but
2013 handles no-action and must-not-throw differently. */
2016 for (i = cfun->eh->last_region_number; i > 0; --i)
2017 if (lp_info[i].directly_reachable)
2019 int action = lp_info[i].action_index;
2021 /* Map must-not-throw to otherwise unused call-site index 0. */
2024 /* Map no-action to otherwise unused call-site index -1. */
2025 else if (action == -1)
2027 /* Otherwise, look it up in the table. */
2029 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2031 lp_info[i].call_site_index = index;
2036 sjlj_mark_call_sites (lp_info)
2037 struct sjlj_lp_info *lp_info;
2039 int last_call_site = -2;
2042 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2043 sjlj_fc_call_site_ofs);
2045 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2047 struct eh_region *region;
2049 rtx note, before, p;
2051 /* Reset value tracking at extended basic block boundaries. */
2052 if (GET_CODE (insn) == CODE_LABEL)
2053 last_call_site = -2;
2055 if (! INSN_P (insn))
2058 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2061 /* Calls (and trapping insns) without notes are outside any
2062 exception handling region in this function. Mark them as
2064 if (GET_CODE (insn) == CALL_INSN
2065 || (flag_non_call_exceptions
2066 && may_trap_p (PATTERN (insn))))
2067 this_call_site = -1;
2073 /* Calls that are known to not throw need not be marked. */
2074 if (INTVAL (XEXP (note, 0)) <= 0)
2077 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2078 this_call_site = lp_info[region->region_number].call_site_index;
2081 if (this_call_site == last_call_site)
2084 /* Don't separate a call from it's argument loads. */
2086 if (GET_CODE (insn) == CALL_INSN)
2088 HARD_REG_SET parm_regs;
2091 /* Since different machines initialize their parameter registers
2092 in different orders, assume nothing. Collect the set of all
2093 parameter registers. */
2094 CLEAR_HARD_REG_SET (parm_regs);
2096 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
2097 if (GET_CODE (XEXP (p, 0)) == USE
2098 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
2100 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
2103 /* We only care about registers which can hold function
2105 if (! FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
2108 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
2112 /* Search backward for the first set of a register in this set. */
2115 before = PREV_INSN (before);
2117 /* Given that we've done no other optimizations yet,
2118 the arguments should be immediately available. */
2119 if (GET_CODE (before) == CODE_LABEL)
2122 p = single_set (before);
2123 if (p && GET_CODE (SET_DEST (p)) == REG
2124 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
2125 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
2127 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
2134 emit_move_insn (mem, GEN_INT (this_call_site));
2138 emit_insns_before (p, before);
2139 last_call_site = this_call_site;
2143 /* Construct the SjLj_Function_Context. */
2146 sjlj_emit_function_enter (dispatch_label)
2149 rtx fn_begin, fc, mem, seq;
2151 fc = cfun->eh->sjlj_fc;
2155 /* We're storing this libcall's address into memory instead of
2156 calling it directly. Thus, we must call assemble_external_libcall
2157 here, as we can not depend on emit_library_call to do it for us. */
2158 assemble_external_libcall (eh_personality_libfunc);
2159 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2160 emit_move_insn (mem, eh_personality_libfunc);
2162 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2163 if (cfun->uses_eh_lsda)
2166 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2167 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2170 emit_move_insn (mem, const0_rtx);
2172 #ifdef DONT_USE_BUILTIN_SETJMP
2175 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_NORMAL,
2176 TYPE_MODE (integer_type_node), 1,
2177 plus_constant (XEXP (fc, 0),
2178 sjlj_fc_jbuf_ofs), Pmode);
2180 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2181 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2183 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2184 TYPE_MODE (integer_type_node), 0, 0,
2188 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2192 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2193 1, XEXP (fc, 0), Pmode);
2198 /* ??? Instead of doing this at the beginning of the function,
2199 do this in a block that is at loop level 0 and dominates all
2200 can_throw_internal instructions. */
2202 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2203 if (GET_CODE (fn_begin) == NOTE
2204 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2206 emit_insns_after (seq, fn_begin);
2209 /* Call back from expand_function_end to know where we should put
2210 the call to unwind_sjlj_unregister_libfunc if needed. */
2213 sjlj_emit_function_exit_after (after)
2216 cfun->eh->sjlj_exit_after = after;
2220 sjlj_emit_function_exit ()
2226 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2227 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2232 /* ??? Really this can be done in any block at loop level 0 that
2233 post-dominates all can_throw_internal instructions. This is
2234 the last possible moment. */
2236 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2240 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2242 struct sjlj_lp_info *lp_info;
2244 int i, first_reachable;
2245 rtx mem, dispatch, seq, fc;
2247 fc = cfun->eh->sjlj_fc;
2251 emit_label (dispatch_label);
2253 #ifndef DONT_USE_BUILTIN_SETJMP
2254 expand_builtin_setjmp_receiver (dispatch_label);
2257 /* Load up dispatch index, exc_ptr and filter values from the
2258 function context. */
2259 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2260 sjlj_fc_call_site_ofs);
2261 dispatch = copy_to_reg (mem);
2263 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2264 if (word_mode != Pmode)
2266 #ifdef POINTERS_EXTEND_UNSIGNED
2267 mem = convert_memory_address (Pmode, mem);
2269 mem = convert_to_mode (Pmode, mem, 0);
2272 emit_move_insn (cfun->eh->exc_ptr, mem);
2274 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2275 emit_move_insn (cfun->eh->filter, mem);
2277 /* Jump to one of the directly reachable regions. */
2278 /* ??? This really ought to be using a switch statement. */
2280 first_reachable = 0;
2281 for (i = cfun->eh->last_region_number; i > 0; --i)
2283 if (! lp_info[i].directly_reachable
2284 || lp_info[i].action_index < 0)
2287 if (! first_reachable)
2289 first_reachable = i;
2293 emit_cmp_and_jump_insns (dispatch,
2294 GEN_INT (lp_info[i].dispatch_index), EQ,
2295 NULL_RTX, TYPE_MODE (integer_type_node), 0, 0,
2296 cfun->eh->region_array[i]->post_landing_pad);
2302 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2303 ->post_landing_pad));
2307 sjlj_build_landing_pads ()
2309 struct sjlj_lp_info *lp_info;
2311 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2312 sizeof (struct sjlj_lp_info));
2314 if (sjlj_find_directly_reachable_regions (lp_info))
2316 rtx dispatch_label = gen_label_rtx ();
2319 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2320 int_size_in_bytes (sjlj_fc_type_node),
2321 TYPE_ALIGN (sjlj_fc_type_node));
2323 sjlj_assign_call_site_values (dispatch_label, lp_info);
2324 sjlj_mark_call_sites (lp_info);
2326 sjlj_emit_function_enter (dispatch_label);
2327 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2328 sjlj_emit_function_exit ();
2335 finish_eh_generation ()
2337 /* Nothing to do if no regions created. */
2338 if (cfun->eh->region_tree == NULL)
2341 /* The object here is to provide find_basic_blocks with detailed
2342 information (via reachable_handlers) on how exception control
2343 flows within the function. In this first pass, we can include
2344 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2345 regions, and hope that it will be useful in deleting unreachable
2346 handlers. Subsequently, we will generate landing pads which will
2347 connect many of the handlers, and then type information will not
2348 be effective. Still, this is a win over previous implementations. */
2350 jump_optimize_minimal (get_insns ());
2351 find_basic_blocks (get_insns (), max_reg_num (), 0);
2354 /* These registers are used by the landing pads. Make sure they
2355 have been generated. */
2356 get_exception_pointer (cfun);
2357 get_exception_filter (cfun);
2359 /* Construct the landing pads. */
2361 assign_filter_values ();
2362 build_post_landing_pads ();
2363 connect_post_landing_pads ();
2364 if (USING_SJLJ_EXCEPTIONS)
2365 sjlj_build_landing_pads ();
2367 dw2_build_landing_pads ();
2369 cfun->eh->built_landing_pads = 1;
2371 /* We've totally changed the CFG. Start over. */
2372 find_exception_handler_labels ();
2373 jump_optimize_minimal (get_insns ());
2374 find_basic_blocks (get_insns (), max_reg_num (), 0);
2378 /* This section handles removing dead code for flow. */
2380 /* Remove LABEL from the exception_handler_labels list. */
2383 remove_exception_handler_label (label)
2388 for (pl = &exception_handler_labels, l = *pl;
2389 XEXP (l, 0) != label;
2390 pl = &XEXP (l, 1), l = *pl)
2394 free_EXPR_LIST_node (l);
2397 /* Splice REGION from the region tree etc. */
2400 remove_eh_handler (region)
2401 struct eh_region *region;
2403 struct eh_region **pp, *p;
2407 /* For the benefit of efficiently handling REG_EH_REGION notes,
2408 replace this region in the region array with its containing
2409 region. Note that previous region deletions may result in
2410 multiple copies of this region in the array, so we have to
2411 search the whole thing. */
2412 for (i = cfun->eh->last_region_number; i > 0; --i)
2413 if (cfun->eh->region_array[i] == region)
2414 cfun->eh->region_array[i] = region->outer;
2416 if (cfun->eh->built_landing_pads)
2417 lab = region->landing_pad;
2419 lab = region->label;
2421 remove_exception_handler_label (lab);
2424 pp = ®ion->outer->inner;
2426 pp = &cfun->eh->region_tree;
2427 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2432 for (p = region->inner; p->next_peer ; p = p->next_peer)
2433 p->outer = region->outer;
2434 p->next_peer = region->next_peer;
2435 p->outer = region->outer;
2436 *pp = region->inner;
2439 *pp = region->next_peer;
2441 if (region->type == ERT_CATCH)
2443 struct eh_region *try, *next, *prev;
2445 for (try = region->next_peer;
2446 try->type == ERT_CATCH;
2447 try = try->next_peer)
2449 if (try->type != ERT_TRY)
2452 next = region->u.catch.next_catch;
2453 prev = region->u.catch.prev_catch;
2456 next->u.catch.prev_catch = prev;
2458 try->u.try.last_catch = prev;
2460 prev->u.catch.next_catch = next;
2463 try->u.try.catch = next;
2465 remove_eh_handler (try);
2472 /* LABEL heads a basic block that is about to be deleted. If this
2473 label corresponds to an exception region, we may be able to
2474 delete the region. */
2477 maybe_remove_eh_handler (label)
2482 /* ??? After generating landing pads, it's not so simple to determine
2483 if the region data is completely unused. One must examine the
2484 landing pad and the post landing pad, and whether an inner try block
2485 is referencing the catch handlers directly. */
2486 if (cfun->eh->built_landing_pads)
2489 for (i = cfun->eh->last_region_number; i > 0; --i)
2491 struct eh_region *region = cfun->eh->region_array[i];
2492 if (region && region->label == label)
2494 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2495 because there is no path to the fallback call to terminate.
2496 But the region continues to affect call-site data until there
2497 are no more contained calls, which we don't see here. */
2498 if (region->type == ERT_MUST_NOT_THROW)
2500 remove_exception_handler_label (region->label);
2501 region->label = NULL_RTX;
2504 remove_eh_handler (region);
2511 /* This section describes CFG exception edges for flow. */
2513 /* For communicating between calls to reachable_next_level. */
2514 struct reachable_info
2521 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2522 base class of TYPE, is in HANDLED. */
2525 check_handled (handled, type)
2530 /* We can check for exact matches without front-end help. */
2531 if (! lang_eh_type_covers)
2533 for (t = handled; t ; t = TREE_CHAIN (t))
2534 if (TREE_VALUE (t) == type)
2539 for (t = handled; t ; t = TREE_CHAIN (t))
2540 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2547 /* A subroutine of reachable_next_level. If we are collecting a list
2548 of handlers, add one. After landing pad generation, reference
2549 it instead of the handlers themselves. Further, the handlers are
2550 all wired together, so by referencing one, we've got them all.
2551 Before landing pad generation we reference each handler individually.
2553 LP_REGION contains the landing pad; REGION is the handler. */
2556 add_reachable_handler (info, lp_region, region)
2557 struct reachable_info *info;
2558 struct eh_region *lp_region;
2559 struct eh_region *region;
2564 if (cfun->eh->built_landing_pads)
2566 if (! info->handlers)
2567 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2570 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2573 /* Process one level of exception regions for reachability.
2574 If TYPE_THROWN is non-null, then it is the *exact* type being
2575 propagated. If INFO is non-null, then collect handler labels
2576 and caught/allowed type information between invocations. */
2578 static enum reachable_code
2579 reachable_next_level (region, type_thrown, info)
2580 struct eh_region *region;
2582 struct reachable_info *info;
2584 switch (region->type)
2587 /* Before landing-pad generation, we model control flow
2588 directly to the individual handlers. In this way we can
2589 see that catch handler types may shadow one another. */
2590 add_reachable_handler (info, region, region);
2591 return RNL_MAYBE_CAUGHT;
2595 struct eh_region *c;
2596 enum reachable_code ret = RNL_NOT_CAUGHT;
2598 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2600 /* A catch-all handler ends the search. */
2601 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2602 to be run as well. */
2603 if (c->u.catch.type == NULL)
2605 add_reachable_handler (info, region, c);
2611 /* If we have a type match, end the search. */
2612 if (c->u.catch.type == type_thrown
2613 || (lang_eh_type_covers
2614 && (*lang_eh_type_covers) (c->u.catch.type,
2617 add_reachable_handler (info, region, c);
2621 /* If we have definitive information of a match failure,
2622 the catch won't trigger. */
2623 if (lang_eh_type_covers)
2624 return RNL_NOT_CAUGHT;
2628 ret = RNL_MAYBE_CAUGHT;
2630 /* A type must not have been previously caught. */
2631 else if (! check_handled (info->types_caught, c->u.catch.type))
2633 add_reachable_handler (info, region, c);
2634 info->types_caught = tree_cons (NULL, c->u.catch.type,
2635 info->types_caught);
2637 /* ??? If the catch type is a base class of every allowed
2638 type, then we know we can stop the search. */
2639 ret = RNL_MAYBE_CAUGHT;
2646 case ERT_ALLOWED_EXCEPTIONS:
2647 /* An empty list of types definitely ends the search. */
2648 if (region->u.allowed.type_list == NULL_TREE)
2650 add_reachable_handler (info, region, region);
2654 /* Collect a list of lists of allowed types for use in detecting
2655 when a catch may be transformed into a catch-all. */
2657 info->types_allowed = tree_cons (NULL_TREE,
2658 region->u.allowed.type_list,
2659 info->types_allowed);
2661 /* If we have definitive information about the type heirarchy,
2662 then we can tell if the thrown type will pass through the
2664 if (type_thrown && lang_eh_type_covers)
2666 if (check_handled (region->u.allowed.type_list, type_thrown))
2667 return RNL_NOT_CAUGHT;
2670 add_reachable_handler (info, region, region);
2675 add_reachable_handler (info, region, region);
2676 return RNL_MAYBE_CAUGHT;
2679 /* Catch regions are handled by their controling try region. */
2680 return RNL_NOT_CAUGHT;
2682 case ERT_MUST_NOT_THROW:
2683 /* Here we end our search, since no exceptions may propagate.
2684 If we've touched down at some landing pad previous, then the
2685 explicit function call we generated may be used. Otherwise
2686 the call is made by the runtime. */
2687 if (info && info->handlers)
2689 add_reachable_handler (info, region, region);
2697 /* Shouldn't see these here. */
2704 /* Retrieve a list of labels of exception handlers which can be
2705 reached by a given insn. */
2708 reachable_handlers (insn)
2711 struct reachable_info info;
2712 struct eh_region *region;
2716 if (GET_CODE (insn) == JUMP_INSN
2717 && GET_CODE (PATTERN (insn)) == RESX)
2718 region_number = XINT (PATTERN (insn), 0);
2721 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2722 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2724 region_number = INTVAL (XEXP (note, 0));
2727 memset (&info, 0, sizeof (info));
2729 region = cfun->eh->region_array[region_number];
2731 type_thrown = NULL_TREE;
2732 if (region->type == ERT_THROW)
2734 type_thrown = region->u.throw.type;
2735 region = region->outer;
2737 else if (GET_CODE (insn) == JUMP_INSN
2738 && GET_CODE (PATTERN (insn)) == RESX)
2739 region = region->outer;
2741 for (; region; region = region->outer)
2742 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2745 return info.handlers;
2748 /* Determine if the given INSN can throw an exception that is caught
2749 within the function. */
2752 can_throw_internal (insn)
2755 struct eh_region *region;
2759 if (! INSN_P (insn))
2762 if (GET_CODE (insn) == INSN
2763 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2764 insn = XVECEXP (PATTERN (insn), 0, 0);
2766 if (GET_CODE (insn) == CALL_INSN
2767 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2770 for (i = 0; i < 3; ++i)
2772 rtx sub = XEXP (PATTERN (insn), i);
2773 for (; sub ; sub = NEXT_INSN (sub))
2774 if (can_throw_internal (sub))
2780 /* Every insn that might throw has an EH_REGION note. */
2781 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2782 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2785 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2787 type_thrown = NULL_TREE;
2788 if (region->type == ERT_THROW)
2790 type_thrown = region->u.throw.type;
2791 region = region->outer;
2794 /* If this exception is ignored by each and every containing region,
2795 then control passes straight out. The runtime may handle some
2796 regions, which also do not require processing internally. */
2797 for (; region; region = region->outer)
2799 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2800 if (how == RNL_BLOCKED)
2802 if (how != RNL_NOT_CAUGHT)
2809 /* Determine if the given INSN can throw an exception that is
2810 visible outside the function. */
2813 can_throw_external (insn)
2816 struct eh_region *region;
2820 if (! INSN_P (insn))
2823 if (GET_CODE (insn) == INSN
2824 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2825 insn = XVECEXP (PATTERN (insn), 0, 0);
2827 if (GET_CODE (insn) == CALL_INSN
2828 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2831 for (i = 0; i < 3; ++i)
2833 rtx sub = XEXP (PATTERN (insn), i);
2834 for (; sub ; sub = NEXT_INSN (sub))
2835 if (can_throw_external (sub))
2841 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2844 /* Calls (and trapping insns) without notes are outside any
2845 exception handling region in this function. We have to
2846 assume it might throw. Given that the front end and middle
2847 ends mark known NOTHROW functions, this isn't so wildly
2849 return (GET_CODE (insn) == CALL_INSN
2850 || (flag_non_call_exceptions
2851 && may_trap_p (PATTERN (insn))));
2853 if (INTVAL (XEXP (note, 0)) <= 0)
2856 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2858 type_thrown = NULL_TREE;
2859 if (region->type == ERT_THROW)
2861 type_thrown = region->u.throw.type;
2862 region = region->outer;
2865 /* If the exception is caught or blocked by any containing region,
2866 then it is not seen by any calling function. */
2867 for (; region ; region = region->outer)
2868 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
2874 /* True if nothing in this function can throw outside this function. */
2877 nothrow_function_p ()
2881 if (! flag_exceptions)
2884 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2885 if (can_throw_external (insn))
2887 for (insn = current_function_epilogue_delay_list; insn;
2888 insn = XEXP (insn, 1))
2889 if (can_throw_external (insn))
2896 /* Various hooks for unwind library. */
2898 /* Do any necessary initialization to access arbitrary stack frames.
2899 On the SPARC, this means flushing the register windows. */
2902 expand_builtin_unwind_init ()
2904 /* Set this so all the registers get saved in our frame; we need to be
2905 able to copy the saved values for any registers from frames we unwind. */
2906 current_function_has_nonlocal_label = 1;
2908 #ifdef SETUP_FRAME_ADDRESSES
2909 SETUP_FRAME_ADDRESSES ();
2914 expand_builtin_eh_return_data_regno (arglist)
2917 tree which = TREE_VALUE (arglist);
2918 unsigned HOST_WIDE_INT iwhich;
2920 if (TREE_CODE (which) != INTEGER_CST)
2922 error ("argument of `__builtin_eh_return_regno' must be constant");
2926 iwhich = tree_low_cst (which, 1);
2927 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2928 if (iwhich == INVALID_REGNUM)
2931 #ifdef DWARF_FRAME_REGNUM
2932 iwhich = DWARF_FRAME_REGNUM (iwhich);
2934 iwhich = DBX_REGISTER_NUMBER (iwhich);
2937 return GEN_INT (iwhich);
2940 /* Given a value extracted from the return address register or stack slot,
2941 return the actual address encoded in that value. */
2944 expand_builtin_extract_return_addr (addr_tree)
2947 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2949 /* First mask out any unwanted bits. */
2950 #ifdef MASK_RETURN_ADDR
2951 expand_and (addr, MASK_RETURN_ADDR, addr);
2954 /* Then adjust to find the real return address. */
2955 #if defined (RETURN_ADDR_OFFSET)
2956 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2962 /* Given an actual address in addr_tree, do any necessary encoding
2963 and return the value to be stored in the return address register or
2964 stack slot so the epilogue will return to that address. */
2967 expand_builtin_frob_return_addr (addr_tree)
2970 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
2972 #ifdef POINTERS_EXTEND_UNSIGNED
2973 addr = convert_memory_address (Pmode, addr);
2976 #ifdef RETURN_ADDR_OFFSET
2977 addr = force_reg (Pmode, addr);
2978 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2984 /* Set up the epilogue with the magic bits we'll need to return to the
2985 exception handler. */
2988 expand_builtin_eh_return (stackadj_tree, handler_tree)
2989 tree stackadj_tree, handler_tree;
2991 rtx stackadj, handler;
2993 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
2994 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
2996 #ifdef POINTERS_EXTEND_UNSIGNED
2997 stackadj = convert_memory_address (Pmode, stackadj);
2998 handler = convert_memory_address (Pmode, handler);
3001 if (! cfun->eh->ehr_label)
3003 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3004 cfun->eh->ehr_handler = copy_to_reg (handler);
3005 cfun->eh->ehr_label = gen_label_rtx ();
3009 if (stackadj != cfun->eh->ehr_stackadj)
3010 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3011 if (handler != cfun->eh->ehr_handler)
3012 emit_move_insn (cfun->eh->ehr_handler, handler);
3015 emit_jump (cfun->eh->ehr_label);
3021 rtx sa, ra, around_label;
3023 if (! cfun->eh->ehr_label)
3026 sa = EH_RETURN_STACKADJ_RTX;
3029 error ("__builtin_eh_return not supported on this target");
3033 current_function_calls_eh_return = 1;
3035 around_label = gen_label_rtx ();
3036 emit_move_insn (sa, const0_rtx);
3037 emit_jump (around_label);
3039 emit_label (cfun->eh->ehr_label);
3040 clobber_return_register ();
3042 #ifdef HAVE_eh_return
3044 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3048 ra = EH_RETURN_HANDLER_RTX;
3051 error ("__builtin_eh_return not supported on this target");
3052 ra = gen_reg_rtx (Pmode);
3055 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3056 emit_move_insn (ra, cfun->eh->ehr_handler);
3059 emit_label (around_label);
3062 struct action_record
3070 action_record_eq (pentry, pdata)
3074 const struct action_record *entry = (const struct action_record *) pentry;
3075 const struct action_record *data = (const struct action_record *) pdata;
3076 return entry->filter == data->filter && entry->next == data->next;
3080 action_record_hash (pentry)
3083 const struct action_record *entry = (const struct action_record *) pentry;
3084 return entry->next * 1009 + entry->filter;
3088 add_action_record (ar_hash, filter, next)
3092 struct action_record **slot, *new, tmp;
3094 tmp.filter = filter;
3096 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3098 if ((new = *slot) == NULL)
3100 new = (struct action_record *) xmalloc (sizeof (*new));
3101 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3102 new->filter = filter;
3106 /* The filter value goes in untouched. The link to the next
3107 record is a "self-relative" byte offset, or zero to indicate
3108 that there is no next record. So convert the absolute 1 based
3109 indicies we've been carrying around into a displacement. */
3111 push_sleb128 (&cfun->eh->action_record_data, filter);
3113 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3114 push_sleb128 (&cfun->eh->action_record_data, next);
3121 collect_one_action_chain (ar_hash, region)
3123 struct eh_region *region;
3125 struct eh_region *c;
3128 /* If we've reached the top of the region chain, then we have
3129 no actions, and require no landing pad. */
3133 switch (region->type)
3136 /* A cleanup adds a zero filter to the beginning of the chain, but
3137 there are special cases to look out for. If there are *only*
3138 cleanups along a path, then it compresses to a zero action.
3139 Further, if there are multiple cleanups along a path, we only
3140 need to represent one of them, as that is enough to trigger
3141 entry to the landing pad at runtime. */
3142 next = collect_one_action_chain (ar_hash, region->outer);
3145 for (c = region->outer; c ; c = c->outer)
3146 if (c->type == ERT_CLEANUP)
3148 return add_action_record (ar_hash, 0, next);
3151 /* Process the associated catch regions in reverse order.
3152 If there's a catch-all handler, then we don't need to
3153 search outer regions. Use a magic -3 value to record
3154 that we havn't done the outer search. */
3156 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3158 if (c->u.catch.type == NULL)
3159 next = add_action_record (ar_hash, c->u.catch.filter, 0);
3164 next = collect_one_action_chain (ar_hash, region->outer);
3168 next = add_action_record (ar_hash, c->u.catch.filter, next);
3173 case ERT_ALLOWED_EXCEPTIONS:
3174 /* An exception specification adds its filter to the
3175 beginning of the chain. */
3176 next = collect_one_action_chain (ar_hash, region->outer);
3177 return add_action_record (ar_hash, region->u.allowed.filter,
3178 next < 0 ? 0 : next);
3180 case ERT_MUST_NOT_THROW:
3181 /* A must-not-throw region with no inner handlers or cleanups
3182 requires no call-site entry. Note that this differs from
3183 the no handler or cleanup case in that we do require an lsda
3184 to be generated. Return a magic -2 value to record this. */
3189 /* CATCH regions are handled in TRY above. THROW regions are
3190 for optimization information only and produce no output. */
3191 return collect_one_action_chain (ar_hash, region->outer);
3199 add_call_site (landing_pad, action)
3203 struct call_site_record *data = cfun->eh->call_site_data;
3204 int used = cfun->eh->call_site_data_used;
3205 int size = cfun->eh->call_site_data_size;
3209 size = (size ? size * 2 : 64);
3210 data = (struct call_site_record *)
3211 xrealloc (data, sizeof (*data) * size);
3212 cfun->eh->call_site_data = data;
3213 cfun->eh->call_site_data_size = size;
3216 data[used].landing_pad = landing_pad;
3217 data[used].action = action;
3219 cfun->eh->call_site_data_used = used + 1;
3221 return used + call_site_base;
3224 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3225 The new note numbers will not refer to region numbers, but
3226 instead to call site entries. */
3229 convert_to_eh_region_ranges ()
3231 rtx insn, iter, note;
3233 int last_action = -3;
3234 rtx last_action_insn = NULL_RTX;
3235 rtx last_landing_pad = NULL_RTX;
3236 rtx first_no_action_insn = NULL_RTX;
3239 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3242 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3244 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3246 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3249 struct eh_region *region;
3251 rtx this_landing_pad;
3254 if (GET_CODE (insn) == INSN
3255 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3256 insn = XVECEXP (PATTERN (insn), 0, 0);
3258 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3261 if (! (GET_CODE (insn) == CALL_INSN
3262 || (flag_non_call_exceptions
3263 && may_trap_p (PATTERN (insn)))))
3270 if (INTVAL (XEXP (note, 0)) <= 0)
3272 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3273 this_action = collect_one_action_chain (ar_hash, region);
3276 /* Existence of catch handlers, or must-not-throw regions
3277 implies that an lsda is needed (even if empty). */
3278 if (this_action != -1)
3279 cfun->uses_eh_lsda = 1;
3281 /* Delay creation of region notes for no-action regions
3282 until we're sure that an lsda will be required. */
3283 else if (last_action == -3)
3285 first_no_action_insn = iter;
3289 /* Cleanups and handlers may share action chains but not
3290 landing pads. Collect the landing pad for this region. */
3291 if (this_action >= 0)
3293 struct eh_region *o;
3294 for (o = region; ! o->landing_pad ; o = o->outer)
3296 this_landing_pad = o->landing_pad;
3299 this_landing_pad = NULL_RTX;
3301 /* Differing actions or landing pads implies a change in call-site
3302 info, which implies some EH_REGION note should be emitted. */
3303 if (last_action != this_action
3304 || last_landing_pad != this_landing_pad)
3306 /* If we'd not seen a previous action (-3) or the previous
3307 action was must-not-throw (-2), then we do not need an
3309 if (last_action >= -1)
3311 /* If we delayed the creation of the begin, do it now. */
3312 if (first_no_action_insn)
3314 call_site = add_call_site (NULL_RTX, 0);
3315 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3316 first_no_action_insn);
3317 NOTE_EH_HANDLER (note) = call_site;
3318 first_no_action_insn = NULL_RTX;
3321 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3323 NOTE_EH_HANDLER (note) = call_site;
3326 /* If the new action is must-not-throw, then no region notes
3328 if (this_action >= -1)
3330 call_site = add_call_site (this_landing_pad,
3331 this_action < 0 ? 0 : this_action);
3332 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3333 NOTE_EH_HANDLER (note) = call_site;
3336 last_action = this_action;
3337 last_landing_pad = this_landing_pad;
3339 last_action_insn = iter;
3342 if (last_action >= -1 && ! first_no_action_insn)
3344 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3345 NOTE_EH_HANDLER (note) = call_site;
3348 htab_delete (ar_hash);
3353 push_uleb128 (data_area, value)
3354 varray_type *data_area;
3359 unsigned char byte = value & 0x7f;
3363 VARRAY_PUSH_UCHAR (*data_area, byte);
3369 push_sleb128 (data_area, value)
3370 varray_type *data_area;
3378 byte = value & 0x7f;
3380 more = ! ((value == 0 && (byte & 0x40) == 0)
3381 || (value == -1 && (byte & 0x40) != 0));
3384 VARRAY_PUSH_UCHAR (*data_area, byte);
3390 #ifndef HAVE_AS_LEB128
3392 dw2_size_of_call_site_table ()
3394 int n = cfun->eh->call_site_data_used;
3395 int size = n * (4 + 4 + 4);
3398 for (i = 0; i < n; ++i)
3400 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3401 size += size_of_uleb128 (cs->action);
3408 sjlj_size_of_call_site_table ()
3410 int n = cfun->eh->call_site_data_used;
3414 for (i = 0; i < n; ++i)
3416 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3417 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3418 size += size_of_uleb128 (cs->action);
3426 dw2_output_call_site_table ()
3428 const char *function_start_lab
3429 = IDENTIFIER_POINTER (current_function_func_begin_label);
3430 int n = cfun->eh->call_site_data_used;
3433 for (i = 0; i < n; ++i)
3435 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3436 char reg_start_lab[32];
3437 char reg_end_lab[32];
3438 char landing_pad_lab[32];
3440 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3441 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3443 if (cs->landing_pad)
3444 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3445 CODE_LABEL_NUMBER (cs->landing_pad));
3447 /* ??? Perhaps use insn length scaling if the assembler supports
3448 generic arithmetic. */
3449 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3450 data4 if the function is small enough. */
3451 #ifdef HAVE_AS_LEB128
3452 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3453 "region %d start", i);
3454 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3456 if (cs->landing_pad)
3457 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3460 dw2_asm_output_data_uleb128 (0, "landing pad");
3462 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3463 "region %d start", i);
3464 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3465 if (cs->landing_pad)
3466 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3469 dw2_asm_output_data (4, 0, "landing pad");
3471 dw2_asm_output_data_uleb128 (cs->action, "action");
3474 call_site_base += n;
3478 sjlj_output_call_site_table ()
3480 int n = cfun->eh->call_site_data_used;
3483 for (i = 0; i < n; ++i)
3485 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3487 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3488 "region %d landing pad", i);
3489 dw2_asm_output_data_uleb128 (cs->action, "action");
3492 call_site_base += n;
3496 output_function_exception_table ()
3498 int tt_format, cs_format, lp_format, i, n;
3499 #ifdef HAVE_AS_LEB128
3500 char ttype_label[32];
3501 char cs_after_size_label[32];
3502 char cs_end_label[32];
3508 int tt_format_size = 0;
3510 /* Not all functions need anything. */
3511 if (! cfun->uses_eh_lsda)
3514 funcdef_number = (USING_SJLJ_EXCEPTIONS
3515 ? sjlj_funcdef_number
3516 : current_funcdef_number);
3518 #ifdef IA64_UNWIND_INFO
3519 fputs ("\t.personality\t", asm_out_file);
3520 output_addr_const (asm_out_file, eh_personality_libfunc);
3521 fputs ("\n\t.handlerdata\n", asm_out_file);
3522 /* Note that varasm still thinks we're in the function's code section.
3523 The ".endp" directive that will immediately follow will take us back. */
3525 exception_section ();
3528 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3529 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3531 /* Indicate the format of the @TType entries. */
3533 tt_format = DW_EH_PE_omit;
3536 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3537 #ifdef HAVE_AS_LEB128
3538 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3540 tt_format_size = size_of_encoded_value (tt_format);
3542 assemble_eh_align (tt_format_size * BITS_PER_UNIT);
3545 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3547 /* The LSDA header. */
3549 /* Indicate the format of the landing pad start pointer. An omitted
3550 field implies @LPStart == @Start. */
3551 /* Currently we always put @LPStart == @Start. This field would
3552 be most useful in moving the landing pads completely out of
3553 line to another section, but it could also be used to minimize
3554 the size of uleb128 landing pad offsets. */
3555 lp_format = DW_EH_PE_omit;
3556 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3557 eh_data_format_name (lp_format));
3559 /* @LPStart pointer would go here. */
3561 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3562 eh_data_format_name (tt_format));
3564 #ifndef HAVE_AS_LEB128
3565 if (USING_SJLJ_EXCEPTIONS)
3566 call_site_len = sjlj_size_of_call_site_table ();
3568 call_site_len = dw2_size_of_call_site_table ();
3571 /* A pc-relative 4-byte displacement to the @TType data. */
3574 #ifdef HAVE_AS_LEB128
3575 char ttype_after_disp_label[32];
3576 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3578 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3579 "@TType base offset");
3580 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3582 /* Ug. Alignment queers things. */
3583 unsigned int before_disp, after_disp, last_disp, disp;
3585 before_disp = 1 + 1;
3586 after_disp = (1 + size_of_uleb128 (call_site_len)
3588 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3589 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3595 unsigned int disp_size, pad;
3598 disp_size = size_of_uleb128 (disp);
3599 pad = before_disp + disp_size + after_disp;
3600 if (pad % tt_format_size)
3601 pad = tt_format_size - (pad % tt_format_size);
3604 disp = after_disp + pad;
3606 while (disp != last_disp);
3608 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3612 /* Indicate the format of the call-site offsets. */
3613 #ifdef HAVE_AS_LEB128
3614 cs_format = DW_EH_PE_uleb128;
3616 cs_format = DW_EH_PE_udata4;
3618 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3619 eh_data_format_name (cs_format));
3621 #ifdef HAVE_AS_LEB128
3622 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3624 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3626 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3627 "Call-site table length");
3628 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3629 if (USING_SJLJ_EXCEPTIONS)
3630 sjlj_output_call_site_table ();
3632 dw2_output_call_site_table ();
3633 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3635 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3636 if (USING_SJLJ_EXCEPTIONS)
3637 sjlj_output_call_site_table ();
3639 dw2_output_call_site_table ();
3642 /* ??? Decode and interpret the data for flag_debug_asm. */
3643 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3644 for (i = 0; i < n; ++i)
3645 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3646 (i ? NULL : "Action record table"));
3649 assemble_eh_align (tt_format_size * BITS_PER_UNIT);
3651 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3654 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3656 if (type == NULL_TREE)
3657 type = integer_zero_node;
3659 type = lookup_type_for_runtime (type);
3661 dw2_asm_output_encoded_addr_rtx (tt_format,
3662 expand_expr (type, NULL_RTX, VOIDmode,
3663 EXPAND_INITIALIZER),
3667 #ifdef HAVE_AS_LEB128
3669 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3672 /* ??? Decode and interpret the data for flag_debug_asm. */
3673 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3674 for (i = 0; i < n; ++i)
3675 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3676 (i ? NULL : "Exception specification table"));
3678 function_section (current_function_decl);
3680 if (USING_SJLJ_EXCEPTIONS)
3681 sjlj_funcdef_number += 1;