1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
58 #include "insn-config.h"
60 #include "integrate.h"
61 #include "hard-reg-set.h"
62 #include "basic-block.h"
64 #include "dwarf2asm.h"
65 #include "dwarf2out.h"
74 /* Provide defaults for stuff that may not be defined when using
76 #ifndef EH_RETURN_STACKADJ_RTX
77 #define EH_RETURN_STACKADJ_RTX 0
79 #ifndef EH_RETURN_HANDLER_RTX
80 #define EH_RETURN_HANDLER_RTX 0
82 #ifndef EH_RETURN_DATA_REGNO
83 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
87 /* Nonzero means enable synchronous exceptions for non-call instructions. */
88 int flag_non_call_exceptions;
90 /* Protect cleanup actions with must-not-throw regions, with a call
91 to the given failure handler. */
92 tree (*lang_protect_cleanup_actions) PARAMS ((void));
94 /* Return true if type A catches type B. */
95 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
97 /* Map a type to a runtime object to match type. */
98 tree (*lang_eh_runtime_type) PARAMS ((tree));
100 /* A list of labels used for exception handlers. */
101 rtx exception_handler_labels;
103 static int call_site_base;
104 static unsigned int sjlj_funcdef_number;
105 static htab_t type_to_runtime_map;
107 /* Describe the SjLj_Function_Context structure. */
108 static tree sjlj_fc_type_node;
109 static int sjlj_fc_call_site_ofs;
110 static int sjlj_fc_data_ofs;
111 static int sjlj_fc_personality_ofs;
112 static int sjlj_fc_lsda_ofs;
113 static int sjlj_fc_jbuf_ofs;
115 /* Describes one exception region. */
118 /* The immediately surrounding region. */
119 struct eh_region *outer;
121 /* The list of immediately contained regions. */
122 struct eh_region *inner;
123 struct eh_region *next_peer;
125 /* An identifier for this region. */
128 /* Each region does exactly one thing. */
135 ERT_ALLOWED_EXCEPTIONS,
141 /* Holds the action to perform based on the preceding type. */
143 /* A list of catch blocks, a surrounding try block,
144 and the label for continuing after a catch. */
146 struct eh_region *catch;
147 struct eh_region *last_catch;
148 struct eh_region *prev_try;
152 /* The list through the catch handlers, the list of type objects
153 matched, and the list of associated filters. */
155 struct eh_region *next_catch;
156 struct eh_region *prev_catch;
161 /* A tree_list of allowed types. */
167 /* The type given by a call to "throw foo();", or discovered
173 /* Retain the cleanup expression even after expansion so that
174 we can match up fixup regions. */
179 /* The real region (by expression and by pointer) that fixup code
183 struct eh_region *real_region;
187 /* Entry point for this region's handler before landing pads are built. */
190 /* Entry point for this region's handler from the runtime eh library. */
193 /* Entry point for this region's handler from an inner region. */
194 rtx post_landing_pad;
196 /* The RESX insn for handing off control to the next outermost handler,
201 /* Used to save exception status for each function. */
204 /* The tree of all regions for this function. */
205 struct eh_region *region_tree;
207 /* The same information as an indexable array. */
208 struct eh_region **region_array;
210 /* The most recently open region. */
211 struct eh_region *cur_region;
213 /* This is the region for which we are processing catch blocks. */
214 struct eh_region *try_region;
216 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
217 node is itself a TREE_CHAINed list of handlers for regions that
218 are not yet closed. The TREE_VALUE of each entry contains the
219 handler for the corresponding entry on the ehstack. */
225 int built_landing_pads;
226 int last_region_number;
228 varray_type ttype_data;
229 varray_type ehspec_data;
230 varray_type action_record_data;
232 struct call_site_record
237 int call_site_data_used;
238 int call_site_data_size;
249 static void mark_eh_region PARAMS ((struct eh_region *));
251 static int t2r_eq PARAMS ((const PTR,
253 static hashval_t t2r_hash PARAMS ((const PTR));
254 static int t2r_mark_1 PARAMS ((PTR *, PTR));
255 static void t2r_mark PARAMS ((PTR));
256 static void add_type_for_runtime PARAMS ((tree));
257 static tree lookup_type_for_runtime PARAMS ((tree));
259 static struct eh_region *expand_eh_region_end PARAMS ((void));
261 static rtx get_exception_filter PARAMS ((struct function *));
263 static void collect_eh_region_array PARAMS ((void));
264 static void resolve_fixup_regions PARAMS ((void));
265 static void remove_fixup_regions PARAMS ((void));
266 static void remove_unreachable_regions PARAMS ((rtx));
267 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
269 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
270 struct inline_remap *));
271 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
272 struct eh_region **));
273 static int ttypes_filter_eq PARAMS ((const PTR,
275 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
276 static int ehspec_filter_eq PARAMS ((const PTR,
278 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
279 static int add_ttypes_entry PARAMS ((htab_t, tree));
280 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
282 static void assign_filter_values PARAMS ((void));
283 static void build_post_landing_pads PARAMS ((void));
284 static void connect_post_landing_pads PARAMS ((void));
285 static void dw2_build_landing_pads PARAMS ((void));
288 static bool sjlj_find_directly_reachable_regions
289 PARAMS ((struct sjlj_lp_info *));
290 static void sjlj_assign_call_site_values
291 PARAMS ((rtx, struct sjlj_lp_info *));
292 static void sjlj_mark_call_sites
293 PARAMS ((struct sjlj_lp_info *));
294 static void sjlj_emit_function_enter PARAMS ((rtx));
295 static void sjlj_emit_function_exit PARAMS ((void));
296 static void sjlj_emit_dispatch_table
297 PARAMS ((rtx, struct sjlj_lp_info *));
298 static void sjlj_build_landing_pads PARAMS ((void));
300 static void remove_exception_handler_label PARAMS ((rtx));
301 static void remove_eh_handler PARAMS ((struct eh_region *));
303 struct reachable_info;
305 /* The return value of reachable_next_level. */
308 /* The given exception is not processed by the given region. */
310 /* The given exception may need processing by the given region. */
312 /* The given exception is completely processed by the given region. */
314 /* The given exception is completely processed by the runtime. */
318 static int check_handled PARAMS ((tree, tree));
319 static void add_reachable_handler
320 PARAMS ((struct reachable_info *, struct eh_region *,
321 struct eh_region *));
322 static enum reachable_code reachable_next_level
323 PARAMS ((struct eh_region *, tree, struct reachable_info *));
325 static int action_record_eq PARAMS ((const PTR,
327 static hashval_t action_record_hash PARAMS ((const PTR));
328 static int add_action_record PARAMS ((htab_t, int, int));
329 static int collect_one_action_chain PARAMS ((htab_t,
330 struct eh_region *));
331 static int add_call_site PARAMS ((rtx, int));
333 static void push_uleb128 PARAMS ((varray_type *,
335 static void push_sleb128 PARAMS ((varray_type *, int));
336 #ifndef HAVE_AS_LEB128
337 static int dw2_size_of_call_site_table PARAMS ((void));
338 static int sjlj_size_of_call_site_table PARAMS ((void));
340 static void dw2_output_call_site_table PARAMS ((void));
341 static void sjlj_output_call_site_table PARAMS ((void));
344 /* Routine to see if exception handling is turned on.
345 DO_WARN is non-zero if we want to inform the user that exception
346 handling is turned off.
348 This is used to ensure that -fexceptions has been specified if the
349 compiler tries to use any exception-specific functions. */
355 if (! flag_exceptions)
357 static int warned = 0;
358 if (! warned && do_warn)
360 error ("exception handling disabled, use -fexceptions to enable");
372 ggc_add_rtx_root (&exception_handler_labels, 1);
374 if (! flag_exceptions)
377 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
378 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
380 /* Create the SjLj_Function_Context structure. This should match
381 the definition in unwind-sjlj.c. */
382 if (USING_SJLJ_EXCEPTIONS)
384 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
386 sjlj_fc_type_node = make_lang_type (RECORD_TYPE);
387 ggc_add_tree_root (&sjlj_fc_type_node, 1);
389 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
390 build_pointer_type (sjlj_fc_type_node));
391 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
393 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
395 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
397 tmp = build_index_type (build_int_2 (4 - 1, 0));
398 tmp = build_array_type (type_for_mode (word_mode, 1), tmp);
399 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
400 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
402 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
404 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
406 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
408 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
410 #ifdef DONT_USE_BUILTIN_SETJMP
412 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
414 /* Should be large enough for most systems, if it is not,
415 JMP_BUF_SIZE should be defined with the proper value. It will
416 also tend to be larger than necessary for most systems, a more
417 optimal port will define JMP_BUF_SIZE. */
418 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
421 /* This is 2 for builtin_setjmp, plus whatever the target requires
422 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
423 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
424 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
426 tmp = build_index_type (tmp);
427 tmp = build_array_type (ptr_type_node, tmp);
428 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
429 #ifdef DONT_USE_BUILTIN_SETJMP
430 /* We don't know what the alignment requirements of the
431 runtime's jmp_buf has. Overestimate. */
432 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
433 DECL_USER_ALIGN (f_jbuf) = 1;
435 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
437 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
438 TREE_CHAIN (f_prev) = f_cs;
439 TREE_CHAIN (f_cs) = f_data;
440 TREE_CHAIN (f_data) = f_per;
441 TREE_CHAIN (f_per) = f_lsda;
442 TREE_CHAIN (f_lsda) = f_jbuf;
444 layout_type (sjlj_fc_type_node);
446 /* Cache the interesting field offsets so that we have
447 easy access from rtl. */
448 sjlj_fc_call_site_ofs
449 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
450 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
452 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
453 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
454 sjlj_fc_personality_ofs
455 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
456 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
458 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
459 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
461 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
462 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
467 init_eh_for_function ()
469 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
472 /* Mark EH for GC. */
475 mark_eh_region (region)
476 struct eh_region *region;
481 switch (region->type)
484 /* This can happen if a nested function is inside the body of a region
485 and we do a GC as part of processing it. */
488 ggc_mark_tree (region->u.cleanup.exp);
491 ggc_mark_rtx (region->u.try.continue_label);
494 ggc_mark_tree (region->u.catch.type_list);
495 ggc_mark_tree (region->u.catch.filter_list);
497 case ERT_ALLOWED_EXCEPTIONS:
498 ggc_mark_tree (region->u.allowed.type_list);
500 case ERT_MUST_NOT_THROW:
503 ggc_mark_tree (region->u.throw.type);
506 ggc_mark_tree (region->u.fixup.cleanup_exp);
512 ggc_mark_rtx (region->label);
513 ggc_mark_rtx (region->resume);
514 ggc_mark_rtx (region->landing_pad);
515 ggc_mark_rtx (region->post_landing_pad);
520 struct eh_status *eh;
527 /* If we've called collect_eh_region_array, use it. Otherwise walk
528 the tree non-recursively. */
529 if (eh->region_array)
531 for (i = eh->last_region_number; i > 0; --i)
533 struct eh_region *r = eh->region_array[i];
534 if (r && r->region_number == i)
538 else if (eh->region_tree)
540 struct eh_region *r = eh->region_tree;
546 else if (r->next_peer)
554 } while (r->next_peer == NULL);
561 ggc_mark_tree (eh->protect_list);
562 ggc_mark_rtx (eh->filter);
563 ggc_mark_rtx (eh->exc_ptr);
564 ggc_mark_tree_varray (eh->ttype_data);
566 if (eh->call_site_data)
568 for (i = eh->call_site_data_used - 1; i >= 0; --i)
569 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
572 ggc_mark_rtx (eh->ehr_stackadj);
573 ggc_mark_rtx (eh->ehr_handler);
574 ggc_mark_rtx (eh->ehr_label);
576 ggc_mark_rtx (eh->sjlj_fc);
577 ggc_mark_rtx (eh->sjlj_exit_after);
584 struct eh_status *eh = f->eh;
586 if (eh->region_array)
589 for (i = eh->last_region_number; i > 0; --i)
591 struct eh_region *r = eh->region_array[i];
592 /* Mind we don't free a region struct more than once. */
593 if (r && r->region_number == i)
596 free (eh->region_array);
598 else if (eh->region_tree)
600 struct eh_region *next, *r = eh->region_tree;
605 else if (r->next_peer)
619 } while (r->next_peer == NULL);
628 VARRAY_FREE (eh->ttype_data);
629 VARRAY_FREE (eh->ehspec_data);
630 VARRAY_FREE (eh->action_record_data);
631 if (eh->call_site_data)
632 free (eh->call_site_data);
636 exception_handler_labels = NULL;
640 /* Start an exception handling region. All instructions emitted
641 after this point are considered to be part of the region until
642 expand_eh_region_end is invoked. */
645 expand_eh_region_start ()
647 struct eh_region *new_region;
648 struct eh_region *cur_region;
654 /* Insert a new blank region as a leaf in the tree. */
655 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
656 cur_region = cfun->eh->cur_region;
657 new_region->outer = cur_region;
660 new_region->next_peer = cur_region->inner;
661 cur_region->inner = new_region;
665 new_region->next_peer = cfun->eh->region_tree;
666 cfun->eh->region_tree = new_region;
668 cfun->eh->cur_region = new_region;
670 /* Create a note marking the start of this region. */
671 new_region->region_number = ++cfun->eh->last_region_number;
672 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
673 NOTE_EH_HANDLER (note) = new_region->region_number;
676 /* Common code to end a region. Returns the region just ended. */
678 static struct eh_region *
679 expand_eh_region_end ()
681 struct eh_region *cur_region = cfun->eh->cur_region;
684 /* Create a note marking the end of this region. */
685 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
686 NOTE_EH_HANDLER (note) = cur_region->region_number;
689 cfun->eh->cur_region = cur_region->outer;
694 /* End an exception handling region for a cleanup. HANDLER is an
695 expression to expand for the cleanup. */
698 expand_eh_region_end_cleanup (handler)
701 struct eh_region *region;
702 tree protect_cleanup_actions;
709 region = expand_eh_region_end ();
710 region->type = ERT_CLEANUP;
711 region->label = gen_label_rtx ();
712 region->u.cleanup.exp = handler;
714 around_label = gen_label_rtx ();
715 emit_jump (around_label);
717 emit_label (region->label);
719 /* Give the language a chance to specify an action to be taken if an
720 exception is thrown that would propagate out of the HANDLER. */
721 protect_cleanup_actions
722 = (lang_protect_cleanup_actions
723 ? (*lang_protect_cleanup_actions) ()
726 if (protect_cleanup_actions)
727 expand_eh_region_start ();
729 /* In case this cleanup involves an inline destructor with a try block in
730 it, we need to save the EH return data registers around it. */
731 data_save[0] = gen_reg_rtx (Pmode);
732 emit_move_insn (data_save[0], get_exception_pointer (cfun));
733 data_save[1] = gen_reg_rtx (word_mode);
734 emit_move_insn (data_save[1], get_exception_filter (cfun));
736 expand_expr (handler, const0_rtx, VOIDmode, 0);
738 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
739 emit_move_insn (cfun->eh->filter, data_save[1]);
741 if (protect_cleanup_actions)
742 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
744 /* We need any stack adjustment complete before the around_label. */
745 do_pending_stack_adjust ();
747 /* We delay the generation of the _Unwind_Resume until we generate
748 landing pads. We emit a marker here so as to get good control
749 flow data in the meantime. */
751 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
754 emit_label (around_label);
757 /* End an exception handling region for a try block, and prepares
758 for subsequent calls to expand_start_catch. */
761 expand_start_all_catch ()
763 struct eh_region *region;
768 region = expand_eh_region_end ();
769 region->type = ERT_TRY;
770 region->u.try.prev_try = cfun->eh->try_region;
771 region->u.try.continue_label = gen_label_rtx ();
773 cfun->eh->try_region = region;
775 emit_jump (region->u.try.continue_label);
778 /* Begin a catch clause. TYPE is the type caught, a list of such types, or
779 null if this is a catch-all clause. Providing a type list enables to
780 associate the catch region with potentially several exception types, which
781 is useful e.g. for Ada. */
784 expand_start_catch (type_or_list)
787 struct eh_region *t, *c, *l;
793 type_list = type_or_list;
797 /* Ensure to always end up with a type list to normalize further
798 processing, then register each type against the runtime types
802 if (TREE_CODE (type_or_list) != TREE_LIST)
803 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
805 type_node = type_list;
806 for (; type_node; type_node = TREE_CHAIN (type_node))
807 add_type_for_runtime (TREE_VALUE (type_node));
810 expand_eh_region_start ();
812 t = cfun->eh->try_region;
813 c = cfun->eh->cur_region;
815 c->u.catch.type_list = type_list;
816 c->label = gen_label_rtx ();
818 l = t->u.try.last_catch;
819 c->u.catch.prev_catch = l;
821 l->u.catch.next_catch = c;
824 t->u.try.last_catch = c;
826 emit_label (c->label);
829 /* End a catch clause. Control will resume after the try/catch block. */
834 struct eh_region *try_region, *catch_region;
839 catch_region = expand_eh_region_end ();
840 try_region = cfun->eh->try_region;
842 emit_jump (try_region->u.try.continue_label);
845 /* End a sequence of catch handlers for a try block. */
848 expand_end_all_catch ()
850 struct eh_region *try_region;
855 try_region = cfun->eh->try_region;
856 cfun->eh->try_region = try_region->u.try.prev_try;
858 emit_label (try_region->u.try.continue_label);
861 /* End an exception region for an exception type filter. ALLOWED is a
862 TREE_LIST of types to be matched by the runtime. FAILURE is an
863 expression to invoke if a mismatch occurs.
865 ??? We could use these semantics for calls to rethrow, too; if we can
866 see the surrounding catch clause, we know that the exception we're
867 rethrowing satisfies the "filter" of the catch type. */
870 expand_eh_region_end_allowed (allowed, failure)
871 tree allowed, failure;
873 struct eh_region *region;
879 region = expand_eh_region_end ();
880 region->type = ERT_ALLOWED_EXCEPTIONS;
881 region->u.allowed.type_list = allowed;
882 region->label = gen_label_rtx ();
884 for (; allowed ; allowed = TREE_CHAIN (allowed))
885 add_type_for_runtime (TREE_VALUE (allowed));
887 /* We must emit the call to FAILURE here, so that if this function
888 throws a different exception, that it will be processed by the
891 around_label = gen_label_rtx ();
892 emit_jump (around_label);
894 emit_label (region->label);
895 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
896 /* We must adjust the stack before we reach the AROUND_LABEL because
897 the call to FAILURE does not occur on all paths to the
899 do_pending_stack_adjust ();
901 emit_label (around_label);
904 /* End an exception region for a must-not-throw filter. FAILURE is an
905 expression invoke if an uncaught exception propagates this far.
907 This is conceptually identical to expand_eh_region_end_allowed with
908 an empty allowed list (if you passed "std::terminate" instead of
909 "__cxa_call_unexpected"), but they are represented differently in
913 expand_eh_region_end_must_not_throw (failure)
916 struct eh_region *region;
922 region = expand_eh_region_end ();
923 region->type = ERT_MUST_NOT_THROW;
924 region->label = gen_label_rtx ();
926 /* We must emit the call to FAILURE here, so that if this function
927 throws a different exception, that it will be processed by the
930 around_label = gen_label_rtx ();
931 emit_jump (around_label);
933 emit_label (region->label);
934 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
936 emit_label (around_label);
939 /* End an exception region for a throw. No handling goes on here,
940 but it's the easiest way for the front-end to indicate what type
944 expand_eh_region_end_throw (type)
947 struct eh_region *region;
952 region = expand_eh_region_end ();
953 region->type = ERT_THROW;
954 region->u.throw.type = type;
957 /* End a fixup region. Within this region the cleanups for the immediately
958 enclosing region are _not_ run. This is used for goto cleanup to avoid
959 destroying an object twice.
961 This would be an extraordinarily simple prospect, were it not for the
962 fact that we don't actually know what the immediately enclosing region
963 is. This surprising fact is because expand_cleanups is currently
964 generating a sequence that it will insert somewhere else. We collect
965 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
968 expand_eh_region_end_fixup (handler)
971 struct eh_region *fixup;
976 fixup = expand_eh_region_end ();
977 fixup->type = ERT_FIXUP;
978 fixup->u.fixup.cleanup_exp = handler;
981 /* Return an rtl expression for a pointer to the exception object
985 get_exception_pointer (fun)
986 struct function *fun;
988 rtx exc_ptr = fun->eh->exc_ptr;
989 if (fun == cfun && ! exc_ptr)
991 exc_ptr = gen_reg_rtx (Pmode);
992 fun->eh->exc_ptr = exc_ptr;
997 /* Return an rtl expression for the exception dispatch filter
1001 get_exception_filter (fun)
1002 struct function *fun;
1004 rtx filter = fun->eh->filter;
1005 if (fun == cfun && ! filter)
1007 filter = gen_reg_rtx (word_mode);
1008 fun->eh->filter = filter;
1013 /* Begin a region that will contain entries created with
1014 add_partial_entry. */
1017 begin_protect_partials ()
1019 /* Push room for a new list. */
1020 cfun->eh->protect_list
1021 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
1024 /* Start a new exception region for a region of code that has a
1025 cleanup action and push the HANDLER for the region onto
1026 protect_list. All of the regions created with add_partial_entry
1027 will be ended when end_protect_partials is invoked.
1029 ??? The only difference between this purpose and that of
1030 expand_decl_cleanup is that in this case, we only want the cleanup to
1031 run if an exception is thrown. This should also be handled using
1035 add_partial_entry (handler)
1038 expand_eh_region_start ();
1040 /* ??? This comment was old before the most recent rewrite. We
1041 really ought to fix the callers at some point. */
1042 /* For backwards compatibility, we allow callers to omit calls to
1043 begin_protect_partials for the outermost region. So, we must
1044 explicitly do so here. */
1045 if (!cfun->eh->protect_list)
1046 begin_protect_partials ();
1048 /* Add this entry to the front of the list. */
1049 TREE_VALUE (cfun->eh->protect_list)
1050 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1053 /* End all the pending exception regions on protect_list. */
1056 end_protect_partials ()
1060 /* ??? This comment was old before the most recent rewrite. We
1061 really ought to fix the callers at some point. */
1062 /* For backwards compatibility, we allow callers to omit the call to
1063 begin_protect_partials for the outermost region. So,
1064 PROTECT_LIST may be NULL. */
1065 if (!cfun->eh->protect_list)
1068 /* Pop the topmost entry. */
1069 t = TREE_VALUE (cfun->eh->protect_list);
1070 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1072 /* End all the exception regions. */
1073 for (; t; t = TREE_CHAIN (t))
1074 expand_eh_region_end_cleanup (TREE_VALUE (t));
1078 /* This section is for the exception handling specific optimization pass. */
1080 /* Random access the exception region tree. It's just as simple to
1081 collect the regions this way as in expand_eh_region_start, but
1082 without having to realloc memory. */
1085 collect_eh_region_array ()
1087 struct eh_region **array, *i;
1089 i = cfun->eh->region_tree;
1093 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1094 cfun->eh->region_array = array;
1098 array[i->region_number] = i;
1100 /* If there are sub-regions, process them. */
1103 /* If there are peers, process them. */
1104 else if (i->next_peer)
1106 /* Otherwise, step back up the tree to the next peer. */
1113 } while (i->next_peer == NULL);
1120 resolve_fixup_regions ()
1122 int i, j, n = cfun->eh->last_region_number;
1124 for (i = 1; i <= n; ++i)
1126 struct eh_region *fixup = cfun->eh->region_array[i];
1127 struct eh_region *cleanup = 0;
1129 if (! fixup || fixup->type != ERT_FIXUP)
1132 for (j = 1; j <= n; ++j)
1134 cleanup = cfun->eh->region_array[j];
1135 if (cleanup->type == ERT_CLEANUP
1136 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1142 fixup->u.fixup.real_region = cleanup->outer;
1146 /* Now that we've discovered what region actually encloses a fixup,
1147 we can shuffle pointers and remove them from the tree. */
1150 remove_fixup_regions ()
1154 struct eh_region *fixup;
1156 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1157 for instructions referencing fixup regions. This is only
1158 strictly necessary for fixup regions with no parent, but
1159 doesn't hurt to do it for all regions. */
1160 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1162 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1163 && INTVAL (XEXP (note, 0)) > 0
1164 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1165 && fixup->type == ERT_FIXUP)
1167 if (fixup->u.fixup.real_region)
1168 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
1170 remove_note (insn, note);
1173 /* Remove the fixup regions from the tree. */
1174 for (i = cfun->eh->last_region_number; i > 0; --i)
1176 fixup = cfun->eh->region_array[i];
1180 /* Allow GC to maybe free some memory. */
1181 if (fixup->type == ERT_CLEANUP)
1182 fixup->u.cleanup.exp = NULL_TREE;
1184 if (fixup->type != ERT_FIXUP)
1189 struct eh_region *parent, *p, **pp;
1191 parent = fixup->u.fixup.real_region;
1193 /* Fix up the children's parent pointers; find the end of
1195 for (p = fixup->inner; ; p = p->next_peer)
1202 /* In the tree of cleanups, only outer-inner ordering matters.
1203 So link the children back in anywhere at the correct level. */
1205 pp = &parent->inner;
1207 pp = &cfun->eh->region_tree;
1210 fixup->inner = NULL;
1213 remove_eh_handler (fixup);
1217 /* Remove all regions whose labels are not reachable from insns. */
1220 remove_unreachable_regions (insns)
1223 int i, *uid_region_num;
1225 struct eh_region *r;
1228 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1229 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1231 for (i = cfun->eh->last_region_number; i > 0; --i)
1233 r = cfun->eh->region_array[i];
1234 if (!r || r->region_number != i)
1239 if (uid_region_num[INSN_UID (r->resume)])
1241 uid_region_num[INSN_UID (r->resume)] = i;
1245 if (uid_region_num[INSN_UID (r->label)])
1247 uid_region_num[INSN_UID (r->label)] = i;
1249 if (r->type == ERT_TRY && r->u.try.continue_label)
1251 if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1253 uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
1257 for (insn = insns; insn; insn = NEXT_INSN (insn))
1258 reachable[uid_region_num[INSN_UID (insn)]] = true;
1260 for (i = cfun->eh->last_region_number; i > 0; --i)
1262 r = cfun->eh->region_array[i];
1263 if (r && r->region_number == i && !reachable[i])
1265 /* Don't remove ERT_THROW regions if their outer region
1267 if (r->type == ERT_THROW
1269 && reachable[r->outer->region_number])
1272 remove_eh_handler (r);
1277 free (uid_region_num);
1280 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1281 can_throw instruction in the region. */
1284 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1292 for (insn = *pinsns; insn ; insn = next)
1294 next = NEXT_INSN (insn);
1295 if (GET_CODE (insn) == NOTE)
1297 int kind = NOTE_LINE_NUMBER (insn);
1298 if (kind == NOTE_INSN_EH_REGION_BEG
1299 || kind == NOTE_INSN_EH_REGION_END)
1301 if (kind == NOTE_INSN_EH_REGION_BEG)
1303 struct eh_region *r;
1306 cur = NOTE_EH_HANDLER (insn);
1308 r = cfun->eh->region_array[cur];
1309 if (r->type == ERT_FIXUP)
1311 r = r->u.fixup.real_region;
1312 cur = r ? r->region_number : 0;
1314 else if (r->type == ERT_CATCH)
1317 cur = r ? r->region_number : 0;
1323 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1324 requires extra care to adjust sequence start. */
1325 if (insn == *pinsns)
1331 else if (INSN_P (insn))
1334 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1335 /* Calls can always potentially throw exceptions, unless
1336 they have a REG_EH_REGION note with a value of 0 or less.
1337 Which should be the only possible kind so far. */
1338 && (GET_CODE (insn) == CALL_INSN
1339 /* If we wanted exceptions for non-call insns, then
1340 any may_trap_p instruction could throw. */
1341 || (flag_non_call_exceptions
1342 && GET_CODE (PATTERN (insn)) != CLOBBER
1343 && GET_CODE (PATTERN (insn)) != USE
1344 && may_trap_p (PATTERN (insn)))))
1346 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1350 if (GET_CODE (insn) == CALL_INSN
1351 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1353 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1355 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1357 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1368 convert_from_eh_region_ranges ()
1373 collect_eh_region_array ();
1374 resolve_fixup_regions ();
1376 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1377 insns = get_insns ();
1378 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1381 remove_fixup_regions ();
1382 remove_unreachable_regions (insns);
1386 find_exception_handler_labels ()
1388 rtx list = NULL_RTX;
1391 free_EXPR_LIST_list (&exception_handler_labels);
1393 if (cfun->eh->region_tree == NULL)
1396 for (i = cfun->eh->last_region_number; i > 0; --i)
1398 struct eh_region *region = cfun->eh->region_array[i];
1401 if (! region || region->region_number != i)
1403 if (cfun->eh->built_landing_pads)
1404 lab = region->landing_pad;
1406 lab = region->label;
1409 list = alloc_EXPR_LIST (0, lab, list);
1412 /* For sjlj exceptions, need the return label to remain live until
1413 after landing pad generation. */
1414 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1415 list = alloc_EXPR_LIST (0, return_label, list);
1417 exception_handler_labels = list;
1421 static struct eh_region *
1422 duplicate_eh_region_1 (o, map)
1423 struct eh_region *o;
1424 struct inline_remap *map;
1427 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1429 n->region_number = o->region_number + cfun->eh->last_region_number;
1435 case ERT_MUST_NOT_THROW:
1439 if (o->u.try.continue_label)
1440 n->u.try.continue_label
1441 = get_label_from_map (map,
1442 CODE_LABEL_NUMBER (o->u.try.continue_label));
1446 n->u.catch.type_list = o->u.catch.type_list;
1449 case ERT_ALLOWED_EXCEPTIONS:
1450 n->u.allowed.type_list = o->u.allowed.type_list;
1454 n->u.throw.type = o->u.throw.type;
1461 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1464 n->resume = map->insn_map[INSN_UID (o->resume)];
1465 if (n->resume == NULL)
1473 duplicate_eh_region_2 (o, n_array)
1474 struct eh_region *o;
1475 struct eh_region **n_array;
1477 struct eh_region *n = n_array[o->region_number];
1482 n->u.try.catch = n_array[o->u.try.catch->region_number];
1483 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1487 if (o->u.catch.next_catch)
1488 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1489 if (o->u.catch.prev_catch)
1490 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1498 n->outer = n_array[o->outer->region_number];
1500 n->inner = n_array[o->inner->region_number];
1502 n->next_peer = n_array[o->next_peer->region_number];
1506 duplicate_eh_regions (ifun, map)
1507 struct function *ifun;
1508 struct inline_remap *map;
1510 int ifun_last_region_number = ifun->eh->last_region_number;
1511 struct eh_region **n_array, *root, *cur;
1514 if (ifun_last_region_number == 0)
1517 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1519 for (i = 1; i <= ifun_last_region_number; ++i)
1521 cur = ifun->eh->region_array[i];
1522 if (!cur || cur->region_number != i)
1524 n_array[i] = duplicate_eh_region_1 (cur, map);
1526 for (i = 1; i <= ifun_last_region_number; ++i)
1528 cur = ifun->eh->region_array[i];
1529 if (!cur || cur->region_number != i)
1531 duplicate_eh_region_2 (cur, n_array);
1534 root = n_array[ifun->eh->region_tree->region_number];
1535 cur = cfun->eh->cur_region;
1538 struct eh_region *p = cur->inner;
1541 while (p->next_peer)
1543 p->next_peer = root;
1548 for (i = 1; i <= ifun_last_region_number; ++i)
1549 if (n_array[i] && n_array[i]->outer == NULL)
1550 n_array[i]->outer = cur;
1554 struct eh_region *p = cfun->eh->region_tree;
1557 while (p->next_peer)
1559 p->next_peer = root;
1562 cfun->eh->region_tree = root;
1567 i = cfun->eh->last_region_number;
1568 cfun->eh->last_region_number = i + ifun_last_region_number;
1574 t2r_eq (pentry, pdata)
1578 tree entry = (tree) pentry;
1579 tree data = (tree) pdata;
1581 return TREE_PURPOSE (entry) == data;
1588 tree entry = (tree) pentry;
1589 return TYPE_HASH (TREE_PURPOSE (entry));
1593 t2r_mark_1 (slot, data)
1595 PTR data ATTRIBUTE_UNUSED;
1597 tree contents = (tree) *slot;
1598 ggc_mark_tree (contents);
1606 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1610 add_type_for_runtime (type)
1615 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1616 TYPE_HASH (type), INSERT);
1619 tree runtime = (*lang_eh_runtime_type) (type);
1620 *slot = tree_cons (type, runtime, NULL_TREE);
1625 lookup_type_for_runtime (type)
1630 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1631 TYPE_HASH (type), NO_INSERT);
1633 /* We should have always inserted the data earlier. */
1634 return TREE_VALUE (*slot);
1638 /* Represent an entry in @TTypes for either catch actions
1639 or exception filter actions. */
1640 struct ttypes_filter
1646 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1647 (a tree) for a @TTypes type node we are thinking about adding. */
1650 ttypes_filter_eq (pentry, pdata)
1654 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1655 tree data = (tree) pdata;
1657 return entry->t == data;
1661 ttypes_filter_hash (pentry)
1664 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1665 return TYPE_HASH (entry->t);
1668 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1669 exception specification list we are thinking about adding. */
1670 /* ??? Currently we use the type lists in the order given. Someone
1671 should put these in some canonical order. */
1674 ehspec_filter_eq (pentry, pdata)
1678 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1679 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1681 return type_list_equal (entry->t, data->t);
1684 /* Hash function for exception specification lists. */
1687 ehspec_filter_hash (pentry)
1690 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1694 for (list = entry->t; list ; list = TREE_CHAIN (list))
1695 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1699 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1700 up the search. Return the filter value to be used. */
1703 add_ttypes_entry (ttypes_hash, type)
1707 struct ttypes_filter **slot, *n;
1709 slot = (struct ttypes_filter **)
1710 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1712 if ((n = *slot) == NULL)
1714 /* Filter value is a 1 based table index. */
1716 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1718 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1721 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1727 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1728 to speed up the search. Return the filter value to be used. */
1731 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1736 struct ttypes_filter **slot, *n;
1737 struct ttypes_filter dummy;
1740 slot = (struct ttypes_filter **)
1741 htab_find_slot (ehspec_hash, &dummy, INSERT);
1743 if ((n = *slot) == NULL)
1745 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1747 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1749 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1752 /* Look up each type in the list and encode its filter
1753 value as a uleb128. Terminate the list with 0. */
1754 for (; list ; list = TREE_CHAIN (list))
1755 push_uleb128 (&cfun->eh->ehspec_data,
1756 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1757 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1763 /* Generate the action filter values to be used for CATCH and
1764 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1765 we use lots of landing pads, and so every type or list can share
1766 the same filter value, which saves table space. */
1769 assign_filter_values ()
1772 htab_t ttypes, ehspec;
1774 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1775 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1777 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1778 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1780 for (i = cfun->eh->last_region_number; i > 0; --i)
1782 struct eh_region *r = cfun->eh->region_array[i];
1784 /* Mind we don't process a region more than once. */
1785 if (!r || r->region_number != i)
1791 /* Whatever type_list is (NULL or true list), we build a list
1792 of filters for the region. */
1793 r->u.catch.filter_list = NULL_TREE;
1795 if (r->u.catch.type_list != NULL)
1797 /* Get a filter value for each of the types caught and store
1798 them in the region's dedicated list. */
1799 tree tp_node = r->u.catch.type_list;
1801 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1803 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1804 tree flt_node = build_int_2 (flt, 0);
1806 r->u.catch.filter_list
1807 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1812 /* Get a filter value for the NULL list also since it will need
1813 an action record anyway. */
1814 int flt = add_ttypes_entry (ttypes, NULL);
1815 tree flt_node = build_int_2 (flt, 0);
1817 r->u.catch.filter_list
1818 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1823 case ERT_ALLOWED_EXCEPTIONS:
1825 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1833 htab_delete (ttypes);
1834 htab_delete (ehspec);
1838 build_post_landing_pads ()
1842 for (i = cfun->eh->last_region_number; i > 0; --i)
1844 struct eh_region *region = cfun->eh->region_array[i];
1847 /* Mind we don't process a region more than once. */
1848 if (!region || region->region_number != i)
1851 switch (region->type)
1854 /* ??? Collect the set of all non-overlapping catch handlers
1855 all the way up the chain until blocked by a cleanup. */
1856 /* ??? Outer try regions can share landing pads with inner
1857 try regions if the types are completely non-overlapping,
1858 and there are no intervening cleanups. */
1860 region->post_landing_pad = gen_label_rtx ();
1864 emit_label (region->post_landing_pad);
1866 /* ??? It is mighty inconvenient to call back into the
1867 switch statement generation code in expand_end_case.
1868 Rapid prototyping sez a sequence of ifs. */
1870 struct eh_region *c;
1871 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1873 /* ??? _Unwind_ForcedUnwind wants no match here. */
1874 if (c->u.catch.type_list == NULL)
1875 emit_jump (c->label);
1878 /* Need for one cmp/jump per type caught. Each type
1879 list entry has a matching entry in the filter list
1880 (see assign_filter_values). */
1881 tree tp_node = c->u.catch.type_list;
1882 tree flt_node = c->u.catch.filter_list;
1886 emit_cmp_and_jump_insns
1888 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1889 EQ, NULL_RTX, word_mode, 0, c->label);
1891 tp_node = TREE_CHAIN (tp_node);
1892 flt_node = TREE_CHAIN (flt_node);
1898 /* We delay the generation of the _Unwind_Resume until we generate
1899 landing pads. We emit a marker here so as to get good control
1900 flow data in the meantime. */
1902 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1908 emit_insns_before (seq, region->u.try.catch->label);
1911 case ERT_ALLOWED_EXCEPTIONS:
1912 region->post_landing_pad = gen_label_rtx ();
1916 emit_label (region->post_landing_pad);
1918 emit_cmp_and_jump_insns (cfun->eh->filter,
1919 GEN_INT (region->u.allowed.filter),
1920 EQ, NULL_RTX, word_mode, 0, region->label);
1922 /* We delay the generation of the _Unwind_Resume until we generate
1923 landing pads. We emit a marker here so as to get good control
1924 flow data in the meantime. */
1926 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1932 emit_insns_before (seq, region->label);
1936 case ERT_MUST_NOT_THROW:
1937 region->post_landing_pad = region->label;
1942 /* Nothing to do. */
1951 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1952 _Unwind_Resume otherwise. */
1955 connect_post_landing_pads ()
1959 for (i = cfun->eh->last_region_number; i > 0; --i)
1961 struct eh_region *region = cfun->eh->region_array[i];
1962 struct eh_region *outer;
1965 /* Mind we don't process a region more than once. */
1966 if (!region || region->region_number != i)
1969 /* If there is no RESX, or it has been deleted by flow, there's
1970 nothing to fix up. */
1971 if (! region->resume || INSN_DELETED_P (region->resume))
1974 /* Search for another landing pad in this function. */
1975 for (outer = region->outer; outer ; outer = outer->outer)
1976 if (outer->post_landing_pad)
1982 emit_jump (outer->post_landing_pad);
1984 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1985 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1989 emit_insns_before (seq, region->resume);
1990 delete_insn (region->resume);
1996 dw2_build_landing_pads ()
2001 for (i = cfun->eh->last_region_number; i > 0; --i)
2003 struct eh_region *region = cfun->eh->region_array[i];
2005 bool clobbers_hard_regs = false;
2007 /* Mind we don't process a region more than once. */
2008 if (!region || region->region_number != i)
2011 if (region->type != ERT_CLEANUP
2012 && region->type != ERT_TRY
2013 && region->type != ERT_ALLOWED_EXCEPTIONS)
2018 region->landing_pad = gen_label_rtx ();
2019 emit_label (region->landing_pad);
2021 #ifdef HAVE_exception_receiver
2022 if (HAVE_exception_receiver)
2023 emit_insn (gen_exception_receiver ());
2026 #ifdef HAVE_nonlocal_goto_receiver
2027 if (HAVE_nonlocal_goto_receiver)
2028 emit_insn (gen_nonlocal_goto_receiver ());
2033 /* If the eh_return data registers are call-saved, then we
2034 won't have considered them clobbered from the call that
2035 threw. Kill them now. */
2038 unsigned r = EH_RETURN_DATA_REGNO (j);
2039 if (r == INVALID_REGNUM)
2041 if (! call_used_regs[r])
2043 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
2044 clobbers_hard_regs = true;
2048 if (clobbers_hard_regs)
2050 /* @@@ This is a kludge. Not all machine descriptions define a
2051 blockage insn, but we must not allow the code we just generated
2052 to be reordered by scheduling. So emit an ASM_INPUT to act as
2054 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2057 emit_move_insn (cfun->eh->exc_ptr,
2058 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
2059 emit_move_insn (cfun->eh->filter,
2060 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
2065 emit_insns_before (seq, region->post_landing_pad);
2072 int directly_reachable;
2075 int call_site_index;
2079 sjlj_find_directly_reachable_regions (lp_info)
2080 struct sjlj_lp_info *lp_info;
2083 bool found_one = false;
2085 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2087 struct eh_region *region;
2088 enum reachable_code rc;
2092 if (! INSN_P (insn))
2095 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2096 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2099 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2101 type_thrown = NULL_TREE;
2102 if (region->type == ERT_THROW)
2104 type_thrown = region->u.throw.type;
2105 region = region->outer;
2108 /* Find the first containing region that might handle the exception.
2109 That's the landing pad to which we will transfer control. */
2110 rc = RNL_NOT_CAUGHT;
2111 for (; region; region = region->outer)
2113 rc = reachable_next_level (region, type_thrown, 0);
2114 if (rc != RNL_NOT_CAUGHT)
2117 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2119 lp_info[region->region_number].directly_reachable = 1;
2128 sjlj_assign_call_site_values (dispatch_label, lp_info)
2130 struct sjlj_lp_info *lp_info;
2135 /* First task: build the action table. */
2137 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2138 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2140 for (i = cfun->eh->last_region_number; i > 0; --i)
2141 if (lp_info[i].directly_reachable)
2143 struct eh_region *r = cfun->eh->region_array[i];
2144 r->landing_pad = dispatch_label;
2145 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2146 if (lp_info[i].action_index != -1)
2147 cfun->uses_eh_lsda = 1;
2150 htab_delete (ar_hash);
2152 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2153 landing pad label for the region. For sjlj though, there is one
2154 common landing pad from which we dispatch to the post-landing pads.
2156 A region receives a dispatch index if it is directly reachable
2157 and requires in-function processing. Regions that share post-landing
2158 pads may share dispatch indices. */
2159 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2160 (see build_post_landing_pads) so we don't bother checking for it. */
2163 for (i = cfun->eh->last_region_number; i > 0; --i)
2164 if (lp_info[i].directly_reachable)
2165 lp_info[i].dispatch_index = index++;
2167 /* Finally: assign call-site values. If dwarf2 terms, this would be
2168 the region number assigned by convert_to_eh_region_ranges, but
2169 handles no-action and must-not-throw differently. */
2172 for (i = cfun->eh->last_region_number; i > 0; --i)
2173 if (lp_info[i].directly_reachable)
2175 int action = lp_info[i].action_index;
2177 /* Map must-not-throw to otherwise unused call-site index 0. */
2180 /* Map no-action to otherwise unused call-site index -1. */
2181 else if (action == -1)
2183 /* Otherwise, look it up in the table. */
2185 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2187 lp_info[i].call_site_index = index;
2192 sjlj_mark_call_sites (lp_info)
2193 struct sjlj_lp_info *lp_info;
2195 int last_call_site = -2;
2198 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2200 struct eh_region *region;
2202 rtx note, before, p;
2204 /* Reset value tracking at extended basic block boundaries. */
2205 if (GET_CODE (insn) == CODE_LABEL)
2206 last_call_site = -2;
2208 if (! INSN_P (insn))
2211 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2214 /* Calls (and trapping insns) without notes are outside any
2215 exception handling region in this function. Mark them as
2217 if (GET_CODE (insn) == CALL_INSN
2218 || (flag_non_call_exceptions
2219 && may_trap_p (PATTERN (insn))))
2220 this_call_site = -1;
2226 /* Calls that are known to not throw need not be marked. */
2227 if (INTVAL (XEXP (note, 0)) <= 0)
2230 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2231 this_call_site = lp_info[region->region_number].call_site_index;
2234 if (this_call_site == last_call_site)
2237 /* Don't separate a call from it's argument loads. */
2239 if (GET_CODE (insn) == CALL_INSN)
2240 before = find_first_parameter_load (insn, NULL_RTX);
2243 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2244 sjlj_fc_call_site_ofs);
2245 emit_move_insn (mem, GEN_INT (this_call_site));
2249 emit_insns_before (p, before);
2250 last_call_site = this_call_site;
2254 /* Construct the SjLj_Function_Context. */
2257 sjlj_emit_function_enter (dispatch_label)
2260 rtx fn_begin, fc, mem, seq;
2262 fc = cfun->eh->sjlj_fc;
2266 /* We're storing this libcall's address into memory instead of
2267 calling it directly. Thus, we must call assemble_external_libcall
2268 here, as we can not depend on emit_library_call to do it for us. */
2269 assemble_external_libcall (eh_personality_libfunc);
2270 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2271 emit_move_insn (mem, eh_personality_libfunc);
2273 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2274 if (cfun->uses_eh_lsda)
2277 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2278 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2281 emit_move_insn (mem, const0_rtx);
2283 #ifdef DONT_USE_BUILTIN_SETJMP
2286 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2287 TYPE_MODE (integer_type_node), 1,
2288 plus_constant (XEXP (fc, 0),
2289 sjlj_fc_jbuf_ofs), Pmode);
2291 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2292 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2294 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2295 TYPE_MODE (integer_type_node), 0, dispatch_label);
2298 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2302 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2303 1, XEXP (fc, 0), Pmode);
2308 /* ??? Instead of doing this at the beginning of the function,
2309 do this in a block that is at loop level 0 and dominates all
2310 can_throw_internal instructions. */
2312 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2313 if (GET_CODE (fn_begin) == NOTE
2314 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2316 emit_insns_after (seq, fn_begin);
2319 /* Call back from expand_function_end to know where we should put
2320 the call to unwind_sjlj_unregister_libfunc if needed. */
2323 sjlj_emit_function_exit_after (after)
2326 cfun->eh->sjlj_exit_after = after;
2330 sjlj_emit_function_exit ()
2336 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2337 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2342 /* ??? Really this can be done in any block at loop level 0 that
2343 post-dominates all can_throw_internal instructions. This is
2344 the last possible moment. */
2346 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2350 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2352 struct sjlj_lp_info *lp_info;
2354 int i, first_reachable;
2355 rtx mem, dispatch, seq, fc;
2357 fc = cfun->eh->sjlj_fc;
2361 emit_label (dispatch_label);
2363 #ifndef DONT_USE_BUILTIN_SETJMP
2364 expand_builtin_setjmp_receiver (dispatch_label);
2367 /* Load up dispatch index, exc_ptr and filter values from the
2368 function context. */
2369 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2370 sjlj_fc_call_site_ofs);
2371 dispatch = copy_to_reg (mem);
2373 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2374 if (word_mode != Pmode)
2376 #ifdef POINTERS_EXTEND_UNSIGNED
2377 mem = convert_memory_address (Pmode, mem);
2379 mem = convert_to_mode (Pmode, mem, 0);
2382 emit_move_insn (cfun->eh->exc_ptr, mem);
2384 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2385 emit_move_insn (cfun->eh->filter, mem);
2387 /* Jump to one of the directly reachable regions. */
2388 /* ??? This really ought to be using a switch statement. */
2390 first_reachable = 0;
2391 for (i = cfun->eh->last_region_number; i > 0; --i)
2393 if (! lp_info[i].directly_reachable)
2396 if (! first_reachable)
2398 first_reachable = i;
2402 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2403 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2404 cfun->eh->region_array[i]->post_landing_pad);
2410 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2411 ->post_landing_pad));
2415 sjlj_build_landing_pads ()
2417 struct sjlj_lp_info *lp_info;
2419 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2420 sizeof (struct sjlj_lp_info));
2422 if (sjlj_find_directly_reachable_regions (lp_info))
2424 rtx dispatch_label = gen_label_rtx ();
2427 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2428 int_size_in_bytes (sjlj_fc_type_node),
2429 TYPE_ALIGN (sjlj_fc_type_node));
2431 sjlj_assign_call_site_values (dispatch_label, lp_info);
2432 sjlj_mark_call_sites (lp_info);
2434 sjlj_emit_function_enter (dispatch_label);
2435 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2436 sjlj_emit_function_exit ();
2443 finish_eh_generation ()
2445 /* Nothing to do if no regions created. */
2446 if (cfun->eh->region_tree == NULL)
2449 /* The object here is to provide find_basic_blocks with detailed
2450 information (via reachable_handlers) on how exception control
2451 flows within the function. In this first pass, we can include
2452 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2453 regions, and hope that it will be useful in deleting unreachable
2454 handlers. Subsequently, we will generate landing pads which will
2455 connect many of the handlers, and then type information will not
2456 be effective. Still, this is a win over previous implementations. */
2458 rebuild_jump_labels (get_insns ());
2459 find_basic_blocks (get_insns (), max_reg_num (), 0);
2460 cleanup_cfg (CLEANUP_PRE_LOOP);
2462 /* These registers are used by the landing pads. Make sure they
2463 have been generated. */
2464 get_exception_pointer (cfun);
2465 get_exception_filter (cfun);
2467 /* Construct the landing pads. */
2469 assign_filter_values ();
2470 build_post_landing_pads ();
2471 connect_post_landing_pads ();
2472 if (USING_SJLJ_EXCEPTIONS)
2473 sjlj_build_landing_pads ();
2475 dw2_build_landing_pads ();
2477 cfun->eh->built_landing_pads = 1;
2479 /* We've totally changed the CFG. Start over. */
2480 find_exception_handler_labels ();
2481 rebuild_jump_labels (get_insns ());
2482 find_basic_blocks (get_insns (), max_reg_num (), 0);
2483 cleanup_cfg (CLEANUP_PRE_LOOP);
2486 /* This section handles removing dead code for flow. */
2488 /* Remove LABEL from the exception_handler_labels list. */
2491 remove_exception_handler_label (label)
2496 /* If exception_handler_labels was not built yet,
2497 there is nothing to do. */
2498 if (exception_handler_labels == NULL)
2501 for (pl = &exception_handler_labels, l = *pl;
2502 XEXP (l, 0) != label;
2503 pl = &XEXP (l, 1), l = *pl)
2507 free_EXPR_LIST_node (l);
2510 /* Splice REGION from the region tree etc. */
2513 remove_eh_handler (region)
2514 struct eh_region *region;
2516 struct eh_region **pp, *p;
2520 /* For the benefit of efficiently handling REG_EH_REGION notes,
2521 replace this region in the region array with its containing
2522 region. Note that previous region deletions may result in
2523 multiple copies of this region in the array, so we have to
2524 search the whole thing. */
2525 for (i = cfun->eh->last_region_number; i > 0; --i)
2526 if (cfun->eh->region_array[i] == region)
2527 cfun->eh->region_array[i] = region->outer;
2529 if (cfun->eh->built_landing_pads)
2530 lab = region->landing_pad;
2532 lab = region->label;
2534 remove_exception_handler_label (lab);
2537 pp = ®ion->outer->inner;
2539 pp = &cfun->eh->region_tree;
2540 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2545 for (p = region->inner; p->next_peer ; p = p->next_peer)
2546 p->outer = region->outer;
2547 p->next_peer = region->next_peer;
2548 p->outer = region->outer;
2549 *pp = region->inner;
2552 *pp = region->next_peer;
2554 if (region->type == ERT_CATCH)
2556 struct eh_region *try, *next, *prev;
2558 for (try = region->next_peer;
2559 try->type == ERT_CATCH;
2560 try = try->next_peer)
2562 if (try->type != ERT_TRY)
2565 next = region->u.catch.next_catch;
2566 prev = region->u.catch.prev_catch;
2569 next->u.catch.prev_catch = prev;
2571 try->u.try.last_catch = prev;
2573 prev->u.catch.next_catch = next;
2576 try->u.try.catch = next;
2578 remove_eh_handler (try);
2585 /* LABEL heads a basic block that is about to be deleted. If this
2586 label corresponds to an exception region, we may be able to
2587 delete the region. */
2590 maybe_remove_eh_handler (label)
2595 /* ??? After generating landing pads, it's not so simple to determine
2596 if the region data is completely unused. One must examine the
2597 landing pad and the post landing pad, and whether an inner try block
2598 is referencing the catch handlers directly. */
2599 if (cfun->eh->built_landing_pads)
2602 for (i = cfun->eh->last_region_number; i > 0; --i)
2604 struct eh_region *region = cfun->eh->region_array[i];
2605 if (region && region->label == label)
2607 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2608 because there is no path to the fallback call to terminate.
2609 But the region continues to affect call-site data until there
2610 are no more contained calls, which we don't see here. */
2611 if (region->type == ERT_MUST_NOT_THROW)
2613 remove_exception_handler_label (region->label);
2614 region->label = NULL_RTX;
2617 remove_eh_handler (region);
2624 /* This section describes CFG exception edges for flow. */
2626 /* For communicating between calls to reachable_next_level. */
2627 struct reachable_info
2634 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2635 base class of TYPE, is in HANDLED. */
2638 check_handled (handled, type)
2643 /* We can check for exact matches without front-end help. */
2644 if (! lang_eh_type_covers)
2646 for (t = handled; t ; t = TREE_CHAIN (t))
2647 if (TREE_VALUE (t) == type)
2652 for (t = handled; t ; t = TREE_CHAIN (t))
2653 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2660 /* A subroutine of reachable_next_level. If we are collecting a list
2661 of handlers, add one. After landing pad generation, reference
2662 it instead of the handlers themselves. Further, the handlers are
2663 all wired together, so by referencing one, we've got them all.
2664 Before landing pad generation we reference each handler individually.
2666 LP_REGION contains the landing pad; REGION is the handler. */
2669 add_reachable_handler (info, lp_region, region)
2670 struct reachable_info *info;
2671 struct eh_region *lp_region;
2672 struct eh_region *region;
2677 if (cfun->eh->built_landing_pads)
2679 if (! info->handlers)
2680 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2683 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2686 /* Process one level of exception regions for reachability.
2687 If TYPE_THROWN is non-null, then it is the *exact* type being
2688 propagated. If INFO is non-null, then collect handler labels
2689 and caught/allowed type information between invocations. */
2691 static enum reachable_code
2692 reachable_next_level (region, type_thrown, info)
2693 struct eh_region *region;
2695 struct reachable_info *info;
2697 switch (region->type)
2700 /* Before landing-pad generation, we model control flow
2701 directly to the individual handlers. In this way we can
2702 see that catch handler types may shadow one another. */
2703 add_reachable_handler (info, region, region);
2704 return RNL_MAYBE_CAUGHT;
2708 struct eh_region *c;
2709 enum reachable_code ret = RNL_NOT_CAUGHT;
2711 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2713 /* A catch-all handler ends the search. */
2714 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2715 to be run as well. */
2716 if (c->u.catch.type_list == NULL)
2718 add_reachable_handler (info, region, c);
2724 /* If we have at least one type match, end the search. */
2725 tree tp_node = c->u.catch.type_list;
2727 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2729 tree type = TREE_VALUE (tp_node);
2731 if (type == type_thrown
2732 || (lang_eh_type_covers
2733 && (*lang_eh_type_covers) (type, type_thrown)))
2735 add_reachable_handler (info, region, c);
2740 /* If we have definitive information of a match failure,
2741 the catch won't trigger. */
2742 if (lang_eh_type_covers)
2743 return RNL_NOT_CAUGHT;
2746 /* At this point, we either don't know what type is thrown or
2747 don't have front-end assistance to help deciding if it is
2748 covered by one of the types in the list for this region.
2750 We'd then like to add this region to the list of reachable
2751 handlers since it is indeed potentially reachable based on the
2752 information we have.
2754 Actually, this handler is for sure not reachable if all the
2755 types it matches have already been caught. That is, it is only
2756 potentially reachable if at least one of the types it catches
2757 has not been previously caught. */
2760 ret = RNL_MAYBE_CAUGHT;
2763 tree tp_node = c->u.catch.type_list;
2764 bool maybe_reachable = false;
2766 /* Compute the potential reachability of this handler and
2767 update the list of types caught at the same time. */
2768 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2770 tree type = TREE_VALUE (tp_node);
2772 if (! check_handled (info->types_caught, type))
2775 = tree_cons (NULL, type, info->types_caught);
2777 maybe_reachable = true;
2781 if (maybe_reachable)
2783 add_reachable_handler (info, region, c);
2785 /* ??? If the catch type is a base class of every allowed
2786 type, then we know we can stop the search. */
2787 ret = RNL_MAYBE_CAUGHT;
2795 case ERT_ALLOWED_EXCEPTIONS:
2796 /* An empty list of types definitely ends the search. */
2797 if (region->u.allowed.type_list == NULL_TREE)
2799 add_reachable_handler (info, region, region);
2803 /* Collect a list of lists of allowed types for use in detecting
2804 when a catch may be transformed into a catch-all. */
2806 info->types_allowed = tree_cons (NULL_TREE,
2807 region->u.allowed.type_list,
2808 info->types_allowed);
2810 /* If we have definitive information about the type hierarchy,
2811 then we can tell if the thrown type will pass through the
2813 if (type_thrown && lang_eh_type_covers)
2815 if (check_handled (region->u.allowed.type_list, type_thrown))
2816 return RNL_NOT_CAUGHT;
2819 add_reachable_handler (info, region, region);
2824 add_reachable_handler (info, region, region);
2825 return RNL_MAYBE_CAUGHT;
2828 /* Catch regions are handled by their controling try region. */
2829 return RNL_NOT_CAUGHT;
2831 case ERT_MUST_NOT_THROW:
2832 /* Here we end our search, since no exceptions may propagate.
2833 If we've touched down at some landing pad previous, then the
2834 explicit function call we generated may be used. Otherwise
2835 the call is made by the runtime. */
2836 if (info && info->handlers)
2838 add_reachable_handler (info, region, region);
2847 /* Shouldn't see these here. */
2854 /* Retrieve a list of labels of exception handlers which can be
2855 reached by a given insn. */
2858 reachable_handlers (insn)
2861 struct reachable_info info;
2862 struct eh_region *region;
2866 if (GET_CODE (insn) == JUMP_INSN
2867 && GET_CODE (PATTERN (insn)) == RESX)
2868 region_number = XINT (PATTERN (insn), 0);
2871 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2872 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2874 region_number = INTVAL (XEXP (note, 0));
2877 memset (&info, 0, sizeof (info));
2879 region = cfun->eh->region_array[region_number];
2881 type_thrown = NULL_TREE;
2882 if (GET_CODE (insn) == JUMP_INSN
2883 && GET_CODE (PATTERN (insn)) == RESX)
2885 /* A RESX leaves a region instead of entering it. Thus the
2886 region itself may have been deleted out from under us. */
2889 region = region->outer;
2891 else if (region->type == ERT_THROW)
2893 type_thrown = region->u.throw.type;
2894 region = region->outer;
2897 for (; region; region = region->outer)
2898 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2901 return info.handlers;
2904 /* Determine if the given INSN can throw an exception that is caught
2905 within the function. */
2908 can_throw_internal (insn)
2911 struct eh_region *region;
2915 if (! INSN_P (insn))
2918 if (GET_CODE (insn) == INSN
2919 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2920 insn = XVECEXP (PATTERN (insn), 0, 0);
2922 if (GET_CODE (insn) == CALL_INSN
2923 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2926 for (i = 0; i < 3; ++i)
2928 rtx sub = XEXP (PATTERN (insn), i);
2929 for (; sub ; sub = NEXT_INSN (sub))
2930 if (can_throw_internal (sub))
2936 /* Every insn that might throw has an EH_REGION note. */
2937 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2938 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2941 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2943 type_thrown = NULL_TREE;
2944 if (region->type == ERT_THROW)
2946 type_thrown = region->u.throw.type;
2947 region = region->outer;
2950 /* If this exception is ignored by each and every containing region,
2951 then control passes straight out. The runtime may handle some
2952 regions, which also do not require processing internally. */
2953 for (; region; region = region->outer)
2955 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2956 if (how == RNL_BLOCKED)
2958 if (how != RNL_NOT_CAUGHT)
2965 /* Determine if the given INSN can throw an exception that is
2966 visible outside the function. */
2969 can_throw_external (insn)
2972 struct eh_region *region;
2976 if (! INSN_P (insn))
2979 if (GET_CODE (insn) == INSN
2980 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2981 insn = XVECEXP (PATTERN (insn), 0, 0);
2983 if (GET_CODE (insn) == CALL_INSN
2984 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2987 for (i = 0; i < 3; ++i)
2989 rtx sub = XEXP (PATTERN (insn), i);
2990 for (; sub ; sub = NEXT_INSN (sub))
2991 if (can_throw_external (sub))
2997 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3000 /* Calls (and trapping insns) without notes are outside any
3001 exception handling region in this function. We have to
3002 assume it might throw. Given that the front end and middle
3003 ends mark known NOTHROW functions, this isn't so wildly
3005 return (GET_CODE (insn) == CALL_INSN
3006 || (flag_non_call_exceptions
3007 && may_trap_p (PATTERN (insn))));
3009 if (INTVAL (XEXP (note, 0)) <= 0)
3012 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3014 type_thrown = NULL_TREE;
3015 if (region->type == ERT_THROW)
3017 type_thrown = region->u.throw.type;
3018 region = region->outer;
3021 /* If the exception is caught or blocked by any containing region,
3022 then it is not seen by any calling function. */
3023 for (; region ; region = region->outer)
3024 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
3030 /* True if nothing in this function can throw outside this function. */
3033 nothrow_function_p ()
3037 if (! flag_exceptions)
3040 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3041 if (can_throw_external (insn))
3043 for (insn = current_function_epilogue_delay_list; insn;
3044 insn = XEXP (insn, 1))
3045 if (can_throw_external (insn))
3052 /* Various hooks for unwind library. */
3054 /* Do any necessary initialization to access arbitrary stack frames.
3055 On the SPARC, this means flushing the register windows. */
3058 expand_builtin_unwind_init ()
3060 /* Set this so all the registers get saved in our frame; we need to be
3061 able to copy the saved values for any registers from frames we unwind. */
3062 current_function_has_nonlocal_label = 1;
3064 #ifdef SETUP_FRAME_ADDRESSES
3065 SETUP_FRAME_ADDRESSES ();
3070 expand_builtin_eh_return_data_regno (arglist)
3073 tree which = TREE_VALUE (arglist);
3074 unsigned HOST_WIDE_INT iwhich;
3076 if (TREE_CODE (which) != INTEGER_CST)
3078 error ("argument of `__builtin_eh_return_regno' must be constant");
3082 iwhich = tree_low_cst (which, 1);
3083 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3084 if (iwhich == INVALID_REGNUM)
3087 #ifdef DWARF_FRAME_REGNUM
3088 iwhich = DWARF_FRAME_REGNUM (iwhich);
3090 iwhich = DBX_REGISTER_NUMBER (iwhich);
3093 return GEN_INT (iwhich);
3096 /* Given a value extracted from the return address register or stack slot,
3097 return the actual address encoded in that value. */
3100 expand_builtin_extract_return_addr (addr_tree)
3103 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
3105 /* First mask out any unwanted bits. */
3106 #ifdef MASK_RETURN_ADDR
3107 expand_and (addr, MASK_RETURN_ADDR, addr);
3110 /* Then adjust to find the real return address. */
3111 #if defined (RETURN_ADDR_OFFSET)
3112 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3118 /* Given an actual address in addr_tree, do any necessary encoding
3119 and return the value to be stored in the return address register or
3120 stack slot so the epilogue will return to that address. */
3123 expand_builtin_frob_return_addr (addr_tree)
3126 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3128 #ifdef POINTERS_EXTEND_UNSIGNED
3129 if (GET_MODE (addr) != Pmode)
3130 addr = convert_memory_address (Pmode, addr);
3133 #ifdef RETURN_ADDR_OFFSET
3134 addr = force_reg (Pmode, addr);
3135 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3141 /* Set up the epilogue with the magic bits we'll need to return to the
3142 exception handler. */
3145 expand_builtin_eh_return (stackadj_tree, handler_tree)
3146 tree stackadj_tree, handler_tree;
3148 rtx stackadj, handler;
3150 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3151 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3153 #ifdef POINTERS_EXTEND_UNSIGNED
3154 if (GET_MODE (stackadj) != Pmode)
3155 stackadj = convert_memory_address (Pmode, stackadj);
3157 if (GET_MODE (handler) != Pmode)
3158 handler = convert_memory_address (Pmode, handler);
3161 if (! cfun->eh->ehr_label)
3163 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3164 cfun->eh->ehr_handler = copy_to_reg (handler);
3165 cfun->eh->ehr_label = gen_label_rtx ();
3169 if (stackadj != cfun->eh->ehr_stackadj)
3170 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3171 if (handler != cfun->eh->ehr_handler)
3172 emit_move_insn (cfun->eh->ehr_handler, handler);
3175 emit_jump (cfun->eh->ehr_label);
3181 rtx sa, ra, around_label;
3183 if (! cfun->eh->ehr_label)
3186 sa = EH_RETURN_STACKADJ_RTX;
3189 error ("__builtin_eh_return not supported on this target");
3193 current_function_calls_eh_return = 1;
3195 around_label = gen_label_rtx ();
3196 emit_move_insn (sa, const0_rtx);
3197 emit_jump (around_label);
3199 emit_label (cfun->eh->ehr_label);
3200 clobber_return_register ();
3202 #ifdef HAVE_eh_return
3204 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3208 ra = EH_RETURN_HANDLER_RTX;
3211 error ("__builtin_eh_return not supported on this target");
3212 ra = gen_reg_rtx (Pmode);
3215 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3216 emit_move_insn (ra, cfun->eh->ehr_handler);
3219 emit_label (around_label);
3222 /* In the following functions, we represent entries in the action table
3223 as 1-based indices. Special cases are:
3225 0: null action record, non-null landing pad; implies cleanups
3226 -1: null action record, null landing pad; implies no action
3227 -2: no call-site entry; implies must_not_throw
3228 -3: we have yet to process outer regions
3230 Further, no special cases apply to the "next" field of the record.
3231 For next, 0 means end of list. */
3233 struct action_record
3241 action_record_eq (pentry, pdata)
3245 const struct action_record *entry = (const struct action_record *) pentry;
3246 const struct action_record *data = (const struct action_record *) pdata;
3247 return entry->filter == data->filter && entry->next == data->next;
3251 action_record_hash (pentry)
3254 const struct action_record *entry = (const struct action_record *) pentry;
3255 return entry->next * 1009 + entry->filter;
3259 add_action_record (ar_hash, filter, next)
3263 struct action_record **slot, *new, tmp;
3265 tmp.filter = filter;
3267 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3269 if ((new = *slot) == NULL)
3271 new = (struct action_record *) xmalloc (sizeof (*new));
3272 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3273 new->filter = filter;
3277 /* The filter value goes in untouched. The link to the next
3278 record is a "self-relative" byte offset, or zero to indicate
3279 that there is no next record. So convert the absolute 1 based
3280 indices we've been carrying around into a displacement. */
3282 push_sleb128 (&cfun->eh->action_record_data, filter);
3284 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3285 push_sleb128 (&cfun->eh->action_record_data, next);
3292 collect_one_action_chain (ar_hash, region)
3294 struct eh_region *region;
3296 struct eh_region *c;
3299 /* If we've reached the top of the region chain, then we have
3300 no actions, and require no landing pad. */
3304 switch (region->type)
3307 /* A cleanup adds a zero filter to the beginning of the chain, but
3308 there are special cases to look out for. If there are *only*
3309 cleanups along a path, then it compresses to a zero action.
3310 Further, if there are multiple cleanups along a path, we only
3311 need to represent one of them, as that is enough to trigger
3312 entry to the landing pad at runtime. */
3313 next = collect_one_action_chain (ar_hash, region->outer);
3316 for (c = region->outer; c ; c = c->outer)
3317 if (c->type == ERT_CLEANUP)
3319 return add_action_record (ar_hash, 0, next);
3322 /* Process the associated catch regions in reverse order.
3323 If there's a catch-all handler, then we don't need to
3324 search outer regions. Use a magic -3 value to record
3325 that we haven't done the outer search. */
3327 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3329 if (c->u.catch.type_list == NULL)
3331 /* Retrieve the filter from the head of the filter list
3332 where we have stored it (see assign_filter_values). */
3334 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3336 next = add_action_record (ar_hash, filter, 0);
3340 /* Once the outer search is done, trigger an action record for
3341 each filter we have. */
3346 next = collect_one_action_chain (ar_hash, region->outer);
3348 /* If there is no next action, terminate the chain. */
3351 /* If all outer actions are cleanups or must_not_throw,
3352 we'll have no action record for it, since we had wanted
3353 to encode these states in the call-site record directly.
3354 Add a cleanup action to the chain to catch these. */
3356 next = add_action_record (ar_hash, 0, 0);
3359 flt_node = c->u.catch.filter_list;
3360 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3362 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3363 next = add_action_record (ar_hash, filter, next);
3369 case ERT_ALLOWED_EXCEPTIONS:
3370 /* An exception specification adds its filter to the
3371 beginning of the chain. */
3372 next = collect_one_action_chain (ar_hash, region->outer);
3373 return add_action_record (ar_hash, region->u.allowed.filter,
3374 next < 0 ? 0 : next);
3376 case ERT_MUST_NOT_THROW:
3377 /* A must-not-throw region with no inner handlers or cleanups
3378 requires no call-site entry. Note that this differs from
3379 the no handler or cleanup case in that we do require an lsda
3380 to be generated. Return a magic -2 value to record this. */
3385 /* CATCH regions are handled in TRY above. THROW regions are
3386 for optimization information only and produce no output. */
3387 return collect_one_action_chain (ar_hash, region->outer);
3395 add_call_site (landing_pad, action)
3399 struct call_site_record *data = cfun->eh->call_site_data;
3400 int used = cfun->eh->call_site_data_used;
3401 int size = cfun->eh->call_site_data_size;
3405 size = (size ? size * 2 : 64);
3406 data = (struct call_site_record *)
3407 xrealloc (data, sizeof (*data) * size);
3408 cfun->eh->call_site_data = data;
3409 cfun->eh->call_site_data_size = size;
3412 data[used].landing_pad = landing_pad;
3413 data[used].action = action;
3415 cfun->eh->call_site_data_used = used + 1;
3417 return used + call_site_base;
3420 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3421 The new note numbers will not refer to region numbers, but
3422 instead to call site entries. */
3425 convert_to_eh_region_ranges ()
3427 rtx insn, iter, note;
3429 int last_action = -3;
3430 rtx last_action_insn = NULL_RTX;
3431 rtx last_landing_pad = NULL_RTX;
3432 rtx first_no_action_insn = NULL_RTX;
3435 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3438 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3440 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3442 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3445 struct eh_region *region;
3447 rtx this_landing_pad;
3450 if (GET_CODE (insn) == INSN
3451 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3452 insn = XVECEXP (PATTERN (insn), 0, 0);
3454 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3457 if (! (GET_CODE (insn) == CALL_INSN
3458 || (flag_non_call_exceptions
3459 && may_trap_p (PATTERN (insn)))))
3466 if (INTVAL (XEXP (note, 0)) <= 0)
3468 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3469 this_action = collect_one_action_chain (ar_hash, region);
3472 /* Existence of catch handlers, or must-not-throw regions
3473 implies that an lsda is needed (even if empty). */
3474 if (this_action != -1)
3475 cfun->uses_eh_lsda = 1;
3477 /* Delay creation of region notes for no-action regions
3478 until we're sure that an lsda will be required. */
3479 else if (last_action == -3)
3481 first_no_action_insn = iter;
3485 /* Cleanups and handlers may share action chains but not
3486 landing pads. Collect the landing pad for this region. */
3487 if (this_action >= 0)
3489 struct eh_region *o;
3490 for (o = region; ! o->landing_pad ; o = o->outer)
3492 this_landing_pad = o->landing_pad;
3495 this_landing_pad = NULL_RTX;
3497 /* Differing actions or landing pads implies a change in call-site
3498 info, which implies some EH_REGION note should be emitted. */
3499 if (last_action != this_action
3500 || last_landing_pad != this_landing_pad)
3502 /* If we'd not seen a previous action (-3) or the previous
3503 action was must-not-throw (-2), then we do not need an
3505 if (last_action >= -1)
3507 /* If we delayed the creation of the begin, do it now. */
3508 if (first_no_action_insn)
3510 call_site = add_call_site (NULL_RTX, 0);
3511 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3512 first_no_action_insn);
3513 NOTE_EH_HANDLER (note) = call_site;
3514 first_no_action_insn = NULL_RTX;
3517 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3519 NOTE_EH_HANDLER (note) = call_site;
3522 /* If the new action is must-not-throw, then no region notes
3524 if (this_action >= -1)
3526 call_site = add_call_site (this_landing_pad,
3527 this_action < 0 ? 0 : this_action);
3528 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3529 NOTE_EH_HANDLER (note) = call_site;
3532 last_action = this_action;
3533 last_landing_pad = this_landing_pad;
3535 last_action_insn = iter;
3538 if (last_action >= -1 && ! first_no_action_insn)
3540 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3541 NOTE_EH_HANDLER (note) = call_site;
3544 htab_delete (ar_hash);
3549 push_uleb128 (data_area, value)
3550 varray_type *data_area;
3555 unsigned char byte = value & 0x7f;
3559 VARRAY_PUSH_UCHAR (*data_area, byte);
3565 push_sleb128 (data_area, value)
3566 varray_type *data_area;
3574 byte = value & 0x7f;
3576 more = ! ((value == 0 && (byte & 0x40) == 0)
3577 || (value == -1 && (byte & 0x40) != 0));
3580 VARRAY_PUSH_UCHAR (*data_area, byte);
3586 #ifndef HAVE_AS_LEB128
3588 dw2_size_of_call_site_table ()
3590 int n = cfun->eh->call_site_data_used;
3591 int size = n * (4 + 4 + 4);
3594 for (i = 0; i < n; ++i)
3596 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3597 size += size_of_uleb128 (cs->action);
3604 sjlj_size_of_call_site_table ()
3606 int n = cfun->eh->call_site_data_used;
3610 for (i = 0; i < n; ++i)
3612 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3613 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3614 size += size_of_uleb128 (cs->action);
3622 dw2_output_call_site_table ()
3624 const char *const function_start_lab
3625 = IDENTIFIER_POINTER (current_function_func_begin_label);
3626 int n = cfun->eh->call_site_data_used;
3629 for (i = 0; i < n; ++i)
3631 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3632 char reg_start_lab[32];
3633 char reg_end_lab[32];
3634 char landing_pad_lab[32];
3636 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3637 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3639 if (cs->landing_pad)
3640 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3641 CODE_LABEL_NUMBER (cs->landing_pad));
3643 /* ??? Perhaps use insn length scaling if the assembler supports
3644 generic arithmetic. */
3645 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3646 data4 if the function is small enough. */
3647 #ifdef HAVE_AS_LEB128
3648 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3649 "region %d start", i);
3650 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3652 if (cs->landing_pad)
3653 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3656 dw2_asm_output_data_uleb128 (0, "landing pad");
3658 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3659 "region %d start", i);
3660 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3661 if (cs->landing_pad)
3662 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3665 dw2_asm_output_data (4, 0, "landing pad");
3667 dw2_asm_output_data_uleb128 (cs->action, "action");
3670 call_site_base += n;
3674 sjlj_output_call_site_table ()
3676 int n = cfun->eh->call_site_data_used;
3679 for (i = 0; i < n; ++i)
3681 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3683 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3684 "region %d landing pad", i);
3685 dw2_asm_output_data_uleb128 (cs->action, "action");
3688 call_site_base += n;
3692 output_function_exception_table ()
3694 int tt_format, cs_format, lp_format, i, n;
3695 #ifdef HAVE_AS_LEB128
3696 char ttype_label[32];
3697 char cs_after_size_label[32];
3698 char cs_end_label[32];
3704 int tt_format_size = 0;
3706 /* Not all functions need anything. */
3707 if (! cfun->uses_eh_lsda)
3710 funcdef_number = (USING_SJLJ_EXCEPTIONS
3711 ? sjlj_funcdef_number
3712 : current_funcdef_number);
3714 #ifdef IA64_UNWIND_INFO
3715 fputs ("\t.personality\t", asm_out_file);
3716 output_addr_const (asm_out_file, eh_personality_libfunc);
3717 fputs ("\n\t.handlerdata\n", asm_out_file);
3718 /* Note that varasm still thinks we're in the function's code section.
3719 The ".endp" directive that will immediately follow will take us back. */
3721 (*targetm.asm_out.exception_section) ();
3724 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3725 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3727 /* Indicate the format of the @TType entries. */
3729 tt_format = DW_EH_PE_omit;
3732 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3733 #ifdef HAVE_AS_LEB128
3734 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3736 tt_format_size = size_of_encoded_value (tt_format);
3738 assemble_align (tt_format_size * BITS_PER_UNIT);
3741 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3743 /* The LSDA header. */
3745 /* Indicate the format of the landing pad start pointer. An omitted
3746 field implies @LPStart == @Start. */
3747 /* Currently we always put @LPStart == @Start. This field would
3748 be most useful in moving the landing pads completely out of
3749 line to another section, but it could also be used to minimize
3750 the size of uleb128 landing pad offsets. */
3751 lp_format = DW_EH_PE_omit;
3752 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3753 eh_data_format_name (lp_format));
3755 /* @LPStart pointer would go here. */
3757 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3758 eh_data_format_name (tt_format));
3760 #ifndef HAVE_AS_LEB128
3761 if (USING_SJLJ_EXCEPTIONS)
3762 call_site_len = sjlj_size_of_call_site_table ();
3764 call_site_len = dw2_size_of_call_site_table ();
3767 /* A pc-relative 4-byte displacement to the @TType data. */
3770 #ifdef HAVE_AS_LEB128
3771 char ttype_after_disp_label[32];
3772 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3774 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3775 "@TType base offset");
3776 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3778 /* Ug. Alignment queers things. */
3779 unsigned int before_disp, after_disp, last_disp, disp;
3781 before_disp = 1 + 1;
3782 after_disp = (1 + size_of_uleb128 (call_site_len)
3784 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3785 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3791 unsigned int disp_size, pad;
3794 disp_size = size_of_uleb128 (disp);
3795 pad = before_disp + disp_size + after_disp;
3796 if (pad % tt_format_size)
3797 pad = tt_format_size - (pad % tt_format_size);
3800 disp = after_disp + pad;
3802 while (disp != last_disp);
3804 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3808 /* Indicate the format of the call-site offsets. */
3809 #ifdef HAVE_AS_LEB128
3810 cs_format = DW_EH_PE_uleb128;
3812 cs_format = DW_EH_PE_udata4;
3814 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3815 eh_data_format_name (cs_format));
3817 #ifdef HAVE_AS_LEB128
3818 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3820 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3822 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3823 "Call-site table length");
3824 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3825 if (USING_SJLJ_EXCEPTIONS)
3826 sjlj_output_call_site_table ();
3828 dw2_output_call_site_table ();
3829 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3831 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3832 if (USING_SJLJ_EXCEPTIONS)
3833 sjlj_output_call_site_table ();
3835 dw2_output_call_site_table ();
3838 /* ??? Decode and interpret the data for flag_debug_asm. */
3839 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3840 for (i = 0; i < n; ++i)
3841 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3842 (i ? NULL : "Action record table"));
3845 assemble_align (tt_format_size * BITS_PER_UNIT);
3847 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3850 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3853 if (type == NULL_TREE)
3854 type = integer_zero_node;
3856 type = lookup_type_for_runtime (type);
3858 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3859 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3860 assemble_integer (value, tt_format_size,
3861 tt_format_size * BITS_PER_UNIT, 1);
3863 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3866 #ifdef HAVE_AS_LEB128
3868 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3871 /* ??? Decode and interpret the data for flag_debug_asm. */
3872 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3873 for (i = 0; i < n; ++i)
3874 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3875 (i ? NULL : "Exception specification table"));
3877 function_section (current_function_decl);
3879 if (USING_SJLJ_EXCEPTIONS)
3880 sjlj_funcdef_number += 1;