1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4 Contributed by Mike Stump <mrs@cygnus.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
58 #include "insn-config.h"
60 #include "integrate.h"
61 #include "hard-reg-set.h"
62 #include "basic-block.h"
64 #include "dwarf2asm.h"
65 #include "dwarf2out.h"
73 #include "langhooks.h"
75 /* Provide defaults for stuff that may not be defined when using
77 #ifndef EH_RETURN_STACKADJ_RTX
78 #define EH_RETURN_STACKADJ_RTX 0
80 #ifndef EH_RETURN_HANDLER_RTX
81 #define EH_RETURN_HANDLER_RTX 0
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
88 /* Nonzero means enable synchronous exceptions for non-call instructions. */
89 int flag_non_call_exceptions;
91 /* Protect cleanup actions with must-not-throw regions, with a call
92 to the given failure handler. */
93 tree (*lang_protect_cleanup_actions) PARAMS ((void));
95 /* Return true if type A catches type B. */
96 int (*lang_eh_type_covers) PARAMS ((tree a, tree b));
98 /* Map a type to a runtime object to match type. */
99 tree (*lang_eh_runtime_type) PARAMS ((tree));
101 /* A list of labels used for exception handlers. */
102 rtx exception_handler_labels;
104 static int call_site_base;
105 static unsigned int sjlj_funcdef_number;
106 static htab_t type_to_runtime_map;
108 /* Describe the SjLj_Function_Context structure. */
109 static tree sjlj_fc_type_node;
110 static int sjlj_fc_call_site_ofs;
111 static int sjlj_fc_data_ofs;
112 static int sjlj_fc_personality_ofs;
113 static int sjlj_fc_lsda_ofs;
114 static int sjlj_fc_jbuf_ofs;
116 /* Describes one exception region. */
119 /* The immediately surrounding region. */
120 struct eh_region *outer;
122 /* The list of immediately contained regions. */
123 struct eh_region *inner;
124 struct eh_region *next_peer;
126 /* An identifier for this region. */
129 /* Each region does exactly one thing. */
136 ERT_ALLOWED_EXCEPTIONS,
142 /* Holds the action to perform based on the preceding type. */
144 /* A list of catch blocks, a surrounding try block,
145 and the label for continuing after a catch. */
147 struct eh_region *catch;
148 struct eh_region *last_catch;
149 struct eh_region *prev_try;
153 /* The list through the catch handlers, the list of type objects
154 matched, and the list of associated filters. */
156 struct eh_region *next_catch;
157 struct eh_region *prev_catch;
162 /* A tree_list of allowed types. */
168 /* The type given by a call to "throw foo();", or discovered
174 /* Retain the cleanup expression even after expansion so that
175 we can match up fixup regions. */
180 /* The real region (by expression and by pointer) that fixup code
184 struct eh_region *real_region;
188 /* Entry point for this region's handler before landing pads are built. */
191 /* Entry point for this region's handler from the runtime eh library. */
194 /* Entry point for this region's handler from an inner region. */
195 rtx post_landing_pad;
197 /* The RESX insn for handing off control to the next outermost handler,
202 /* Used to save exception status for each function. */
205 /* The tree of all regions for this function. */
206 struct eh_region *region_tree;
208 /* The same information as an indexable array. */
209 struct eh_region **region_array;
211 /* The most recently open region. */
212 struct eh_region *cur_region;
214 /* This is the region for which we are processing catch blocks. */
215 struct eh_region *try_region;
217 /* A stack (TREE_LIST) of lists of handlers. The TREE_VALUE of each
218 node is itself a TREE_CHAINed list of handlers for regions that
219 are not yet closed. The TREE_VALUE of each entry contains the
220 handler for the corresponding entry on the ehstack. */
226 int built_landing_pads;
227 int last_region_number;
229 varray_type ttype_data;
230 varray_type ehspec_data;
231 varray_type action_record_data;
233 struct call_site_record
238 int call_site_data_used;
239 int call_site_data_size;
250 static void mark_eh_region PARAMS ((struct eh_region *));
252 static int t2r_eq PARAMS ((const PTR,
254 static hashval_t t2r_hash PARAMS ((const PTR));
255 static int t2r_mark_1 PARAMS ((PTR *, PTR));
256 static void t2r_mark PARAMS ((PTR));
257 static void add_type_for_runtime PARAMS ((tree));
258 static tree lookup_type_for_runtime PARAMS ((tree));
260 static struct eh_region *expand_eh_region_end PARAMS ((void));
262 static rtx get_exception_filter PARAMS ((struct function *));
264 static void collect_eh_region_array PARAMS ((void));
265 static void resolve_fixup_regions PARAMS ((void));
266 static void remove_fixup_regions PARAMS ((void));
267 static void remove_unreachable_regions PARAMS ((rtx));
268 static void convert_from_eh_region_ranges_1 PARAMS ((rtx *, int *, int));
270 static struct eh_region *duplicate_eh_region_1 PARAMS ((struct eh_region *,
271 struct inline_remap *));
272 static void duplicate_eh_region_2 PARAMS ((struct eh_region *,
273 struct eh_region **));
274 static int ttypes_filter_eq PARAMS ((const PTR,
276 static hashval_t ttypes_filter_hash PARAMS ((const PTR));
277 static int ehspec_filter_eq PARAMS ((const PTR,
279 static hashval_t ehspec_filter_hash PARAMS ((const PTR));
280 static int add_ttypes_entry PARAMS ((htab_t, tree));
281 static int add_ehspec_entry PARAMS ((htab_t, htab_t,
283 static void assign_filter_values PARAMS ((void));
284 static void build_post_landing_pads PARAMS ((void));
285 static void connect_post_landing_pads PARAMS ((void));
286 static void dw2_build_landing_pads PARAMS ((void));
289 static bool sjlj_find_directly_reachable_regions
290 PARAMS ((struct sjlj_lp_info *));
291 static void sjlj_assign_call_site_values
292 PARAMS ((rtx, struct sjlj_lp_info *));
293 static void sjlj_mark_call_sites
294 PARAMS ((struct sjlj_lp_info *));
295 static void sjlj_emit_function_enter PARAMS ((rtx));
296 static void sjlj_emit_function_exit PARAMS ((void));
297 static void sjlj_emit_dispatch_table
298 PARAMS ((rtx, struct sjlj_lp_info *));
299 static void sjlj_build_landing_pads PARAMS ((void));
301 static void remove_exception_handler_label PARAMS ((rtx));
302 static void remove_eh_handler PARAMS ((struct eh_region *));
304 struct reachable_info;
306 /* The return value of reachable_next_level. */
309 /* The given exception is not processed by the given region. */
311 /* The given exception may need processing by the given region. */
313 /* The given exception is completely processed by the given region. */
315 /* The given exception is completely processed by the runtime. */
319 static int check_handled PARAMS ((tree, tree));
320 static void add_reachable_handler
321 PARAMS ((struct reachable_info *, struct eh_region *,
322 struct eh_region *));
323 static enum reachable_code reachable_next_level
324 PARAMS ((struct eh_region *, tree, struct reachable_info *));
326 static int action_record_eq PARAMS ((const PTR,
328 static hashval_t action_record_hash PARAMS ((const PTR));
329 static int add_action_record PARAMS ((htab_t, int, int));
330 static int collect_one_action_chain PARAMS ((htab_t,
331 struct eh_region *));
332 static int add_call_site PARAMS ((rtx, int));
334 static void push_uleb128 PARAMS ((varray_type *,
336 static void push_sleb128 PARAMS ((varray_type *, int));
337 #ifndef HAVE_AS_LEB128
338 static int dw2_size_of_call_site_table PARAMS ((void));
339 static int sjlj_size_of_call_site_table PARAMS ((void));
341 static void dw2_output_call_site_table PARAMS ((void));
342 static void sjlj_output_call_site_table PARAMS ((void));
345 /* Routine to see if exception handling is turned on.
346 DO_WARN is non-zero if we want to inform the user that exception
347 handling is turned off.
349 This is used to ensure that -fexceptions has been specified if the
350 compiler tries to use any exception-specific functions. */
356 if (! flag_exceptions)
358 static int warned = 0;
359 if (! warned && do_warn)
361 error ("exception handling disabled, use -fexceptions to enable");
373 ggc_add_rtx_root (&exception_handler_labels, 1);
375 if (! flag_exceptions)
378 type_to_runtime_map = htab_create (31, t2r_hash, t2r_eq, NULL);
379 ggc_add_root (&type_to_runtime_map, 1, sizeof (htab_t), t2r_mark);
381 /* Create the SjLj_Function_Context structure. This should match
382 the definition in unwind-sjlj.c. */
383 if (USING_SJLJ_EXCEPTIONS)
385 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
387 sjlj_fc_type_node = (*lang_hooks.types.make_type) (RECORD_TYPE);
388 ggc_add_tree_root (&sjlj_fc_type_node, 1);
390 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
391 build_pointer_type (sjlj_fc_type_node));
392 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
394 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
396 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
398 tmp = build_index_type (build_int_2 (4 - 1, 0));
399 tmp = build_array_type ((*lang_hooks.types.type_for_mode) (word_mode, 1),
401 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
402 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
404 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
406 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
408 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
410 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
412 #ifdef DONT_USE_BUILTIN_SETJMP
414 tmp = build_int_2 (JMP_BUF_SIZE - 1, 0);
416 /* Should be large enough for most systems, if it is not,
417 JMP_BUF_SIZE should be defined with the proper value. It will
418 also tend to be larger than necessary for most systems, a more
419 optimal port will define JMP_BUF_SIZE. */
420 tmp = build_int_2 (FIRST_PSEUDO_REGISTER + 2 - 1, 0);
423 /* This is 2 for builtin_setjmp, plus whatever the target requires
424 via STACK_SAVEAREA_MODE (SAVE_NONLOCAL). */
425 tmp = build_int_2 ((GET_MODE_SIZE (STACK_SAVEAREA_MODE (SAVE_NONLOCAL))
426 / GET_MODE_SIZE (Pmode)) + 2 - 1, 0);
428 tmp = build_index_type (tmp);
429 tmp = build_array_type (ptr_type_node, tmp);
430 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
431 #ifdef DONT_USE_BUILTIN_SETJMP
432 /* We don't know what the alignment requirements of the
433 runtime's jmp_buf has. Overestimate. */
434 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
435 DECL_USER_ALIGN (f_jbuf) = 1;
437 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
439 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
440 TREE_CHAIN (f_prev) = f_cs;
441 TREE_CHAIN (f_cs) = f_data;
442 TREE_CHAIN (f_data) = f_per;
443 TREE_CHAIN (f_per) = f_lsda;
444 TREE_CHAIN (f_lsda) = f_jbuf;
446 layout_type (sjlj_fc_type_node);
448 /* Cache the interesting field offsets so that we have
449 easy access from rtl. */
450 sjlj_fc_call_site_ofs
451 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
452 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
454 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
455 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
456 sjlj_fc_personality_ofs
457 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
458 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
460 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
461 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
463 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
464 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
469 init_eh_for_function ()
471 cfun->eh = (struct eh_status *) xcalloc (1, sizeof (struct eh_status));
474 /* Mark EH for GC. */
477 mark_eh_region (region)
478 struct eh_region *region;
483 switch (region->type)
486 /* This can happen if a nested function is inside the body of a region
487 and we do a GC as part of processing it. */
490 ggc_mark_tree (region->u.cleanup.exp);
493 ggc_mark_rtx (region->u.try.continue_label);
496 ggc_mark_tree (region->u.catch.type_list);
497 ggc_mark_tree (region->u.catch.filter_list);
499 case ERT_ALLOWED_EXCEPTIONS:
500 ggc_mark_tree (region->u.allowed.type_list);
502 case ERT_MUST_NOT_THROW:
505 ggc_mark_tree (region->u.throw.type);
508 ggc_mark_tree (region->u.fixup.cleanup_exp);
514 ggc_mark_rtx (region->label);
515 ggc_mark_rtx (region->resume);
516 ggc_mark_rtx (region->landing_pad);
517 ggc_mark_rtx (region->post_landing_pad);
522 struct eh_status *eh;
529 /* If we've called collect_eh_region_array, use it. Otherwise walk
530 the tree non-recursively. */
531 if (eh->region_array)
533 for (i = eh->last_region_number; i > 0; --i)
535 struct eh_region *r = eh->region_array[i];
536 if (r && r->region_number == i)
540 else if (eh->region_tree)
542 struct eh_region *r = eh->region_tree;
548 else if (r->next_peer)
556 } while (r->next_peer == NULL);
563 ggc_mark_tree (eh->protect_list);
564 ggc_mark_rtx (eh->filter);
565 ggc_mark_rtx (eh->exc_ptr);
566 ggc_mark_tree_varray (eh->ttype_data);
568 if (eh->call_site_data)
570 for (i = eh->call_site_data_used - 1; i >= 0; --i)
571 ggc_mark_rtx (eh->call_site_data[i].landing_pad);
574 ggc_mark_rtx (eh->ehr_stackadj);
575 ggc_mark_rtx (eh->ehr_handler);
576 ggc_mark_rtx (eh->ehr_label);
578 ggc_mark_rtx (eh->sjlj_fc);
579 ggc_mark_rtx (eh->sjlj_exit_after);
586 struct eh_status *eh = f->eh;
588 if (eh->region_array)
591 for (i = eh->last_region_number; i > 0; --i)
593 struct eh_region *r = eh->region_array[i];
594 /* Mind we don't free a region struct more than once. */
595 if (r && r->region_number == i)
598 free (eh->region_array);
600 else if (eh->region_tree)
602 struct eh_region *next, *r = eh->region_tree;
607 else if (r->next_peer)
621 } while (r->next_peer == NULL);
630 VARRAY_FREE (eh->ttype_data);
631 VARRAY_FREE (eh->ehspec_data);
632 VARRAY_FREE (eh->action_record_data);
633 if (eh->call_site_data)
634 free (eh->call_site_data);
638 exception_handler_labels = NULL;
642 /* Start an exception handling region. All instructions emitted
643 after this point are considered to be part of the region until
644 expand_eh_region_end is invoked. */
647 expand_eh_region_start ()
649 struct eh_region *new_region;
650 struct eh_region *cur_region;
656 /* Insert a new blank region as a leaf in the tree. */
657 new_region = (struct eh_region *) xcalloc (1, sizeof (*new_region));
658 cur_region = cfun->eh->cur_region;
659 new_region->outer = cur_region;
662 new_region->next_peer = cur_region->inner;
663 cur_region->inner = new_region;
667 new_region->next_peer = cfun->eh->region_tree;
668 cfun->eh->region_tree = new_region;
670 cfun->eh->cur_region = new_region;
672 /* Create a note marking the start of this region. */
673 new_region->region_number = ++cfun->eh->last_region_number;
674 note = emit_note (NULL, NOTE_INSN_EH_REGION_BEG);
675 NOTE_EH_HANDLER (note) = new_region->region_number;
678 /* Common code to end a region. Returns the region just ended. */
680 static struct eh_region *
681 expand_eh_region_end ()
683 struct eh_region *cur_region = cfun->eh->cur_region;
686 /* Create a note marking the end of this region. */
687 note = emit_note (NULL, NOTE_INSN_EH_REGION_END);
688 NOTE_EH_HANDLER (note) = cur_region->region_number;
691 cfun->eh->cur_region = cur_region->outer;
696 /* End an exception handling region for a cleanup. HANDLER is an
697 expression to expand for the cleanup. */
700 expand_eh_region_end_cleanup (handler)
703 struct eh_region *region;
704 tree protect_cleanup_actions;
711 region = expand_eh_region_end ();
712 region->type = ERT_CLEANUP;
713 region->label = gen_label_rtx ();
714 region->u.cleanup.exp = handler;
716 around_label = gen_label_rtx ();
717 emit_jump (around_label);
719 emit_label (region->label);
721 /* Give the language a chance to specify an action to be taken if an
722 exception is thrown that would propagate out of the HANDLER. */
723 protect_cleanup_actions
724 = (lang_protect_cleanup_actions
725 ? (*lang_protect_cleanup_actions) ()
728 if (protect_cleanup_actions)
729 expand_eh_region_start ();
731 /* In case this cleanup involves an inline destructor with a try block in
732 it, we need to save the EH return data registers around it. */
733 data_save[0] = gen_reg_rtx (Pmode);
734 emit_move_insn (data_save[0], get_exception_pointer (cfun));
735 data_save[1] = gen_reg_rtx (word_mode);
736 emit_move_insn (data_save[1], get_exception_filter (cfun));
738 expand_expr (handler, const0_rtx, VOIDmode, 0);
740 emit_move_insn (cfun->eh->exc_ptr, data_save[0]);
741 emit_move_insn (cfun->eh->filter, data_save[1]);
743 if (protect_cleanup_actions)
744 expand_eh_region_end_must_not_throw (protect_cleanup_actions);
746 /* We need any stack adjustment complete before the around_label. */
747 do_pending_stack_adjust ();
749 /* We delay the generation of the _Unwind_Resume until we generate
750 landing pads. We emit a marker here so as to get good control
751 flow data in the meantime. */
753 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
756 emit_label (around_label);
759 /* End an exception handling region for a try block, and prepares
760 for subsequent calls to expand_start_catch. */
763 expand_start_all_catch ()
765 struct eh_region *region;
770 region = expand_eh_region_end ();
771 region->type = ERT_TRY;
772 region->u.try.prev_try = cfun->eh->try_region;
773 region->u.try.continue_label = gen_label_rtx ();
775 cfun->eh->try_region = region;
777 emit_jump (region->u.try.continue_label);
780 /* Begin a catch clause. TYPE is the type caught, a list of such types, or
781 null if this is a catch-all clause. Providing a type list enables to
782 associate the catch region with potentially several exception types, which
783 is useful e.g. for Ada. */
786 expand_start_catch (type_or_list)
789 struct eh_region *t, *c, *l;
795 type_list = type_or_list;
799 /* Ensure to always end up with a type list to normalize further
800 processing, then register each type against the runtime types
804 if (TREE_CODE (type_or_list) != TREE_LIST)
805 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
807 type_node = type_list;
808 for (; type_node; type_node = TREE_CHAIN (type_node))
809 add_type_for_runtime (TREE_VALUE (type_node));
812 expand_eh_region_start ();
814 t = cfun->eh->try_region;
815 c = cfun->eh->cur_region;
817 c->u.catch.type_list = type_list;
818 c->label = gen_label_rtx ();
820 l = t->u.try.last_catch;
821 c->u.catch.prev_catch = l;
823 l->u.catch.next_catch = c;
826 t->u.try.last_catch = c;
828 emit_label (c->label);
831 /* End a catch clause. Control will resume after the try/catch block. */
836 struct eh_region *try_region, *catch_region;
841 catch_region = expand_eh_region_end ();
842 try_region = cfun->eh->try_region;
844 emit_jump (try_region->u.try.continue_label);
847 /* End a sequence of catch handlers for a try block. */
850 expand_end_all_catch ()
852 struct eh_region *try_region;
857 try_region = cfun->eh->try_region;
858 cfun->eh->try_region = try_region->u.try.prev_try;
860 emit_label (try_region->u.try.continue_label);
863 /* End an exception region for an exception type filter. ALLOWED is a
864 TREE_LIST of types to be matched by the runtime. FAILURE is an
865 expression to invoke if a mismatch occurs.
867 ??? We could use these semantics for calls to rethrow, too; if we can
868 see the surrounding catch clause, we know that the exception we're
869 rethrowing satisfies the "filter" of the catch type. */
872 expand_eh_region_end_allowed (allowed, failure)
873 tree allowed, failure;
875 struct eh_region *region;
881 region = expand_eh_region_end ();
882 region->type = ERT_ALLOWED_EXCEPTIONS;
883 region->u.allowed.type_list = allowed;
884 region->label = gen_label_rtx ();
886 for (; allowed ; allowed = TREE_CHAIN (allowed))
887 add_type_for_runtime (TREE_VALUE (allowed));
889 /* We must emit the call to FAILURE here, so that if this function
890 throws a different exception, that it will be processed by the
893 around_label = gen_label_rtx ();
894 emit_jump (around_label);
896 emit_label (region->label);
897 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
898 /* We must adjust the stack before we reach the AROUND_LABEL because
899 the call to FAILURE does not occur on all paths to the
901 do_pending_stack_adjust ();
903 emit_label (around_label);
906 /* End an exception region for a must-not-throw filter. FAILURE is an
907 expression invoke if an uncaught exception propagates this far.
909 This is conceptually identical to expand_eh_region_end_allowed with
910 an empty allowed list (if you passed "std::terminate" instead of
911 "__cxa_call_unexpected"), but they are represented differently in
915 expand_eh_region_end_must_not_throw (failure)
918 struct eh_region *region;
924 region = expand_eh_region_end ();
925 region->type = ERT_MUST_NOT_THROW;
926 region->label = gen_label_rtx ();
928 /* We must emit the call to FAILURE here, so that if this function
929 throws a different exception, that it will be processed by the
932 around_label = gen_label_rtx ();
933 emit_jump (around_label);
935 emit_label (region->label);
936 expand_expr (failure, const0_rtx, VOIDmode, EXPAND_NORMAL);
938 emit_label (around_label);
941 /* End an exception region for a throw. No handling goes on here,
942 but it's the easiest way for the front-end to indicate what type
946 expand_eh_region_end_throw (type)
949 struct eh_region *region;
954 region = expand_eh_region_end ();
955 region->type = ERT_THROW;
956 region->u.throw.type = type;
959 /* End a fixup region. Within this region the cleanups for the immediately
960 enclosing region are _not_ run. This is used for goto cleanup to avoid
961 destroying an object twice.
963 This would be an extraordinarily simple prospect, were it not for the
964 fact that we don't actually know what the immediately enclosing region
965 is. This surprising fact is because expand_cleanups is currently
966 generating a sequence that it will insert somewhere else. We collect
967 the proper notion of "enclosing" in convert_from_eh_region_ranges. */
970 expand_eh_region_end_fixup (handler)
973 struct eh_region *fixup;
978 fixup = expand_eh_region_end ();
979 fixup->type = ERT_FIXUP;
980 fixup->u.fixup.cleanup_exp = handler;
983 /* Return an rtl expression for a pointer to the exception object
987 get_exception_pointer (fun)
988 struct function *fun;
990 rtx exc_ptr = fun->eh->exc_ptr;
991 if (fun == cfun && ! exc_ptr)
993 exc_ptr = gen_reg_rtx (Pmode);
994 fun->eh->exc_ptr = exc_ptr;
999 /* Return an rtl expression for the exception dispatch filter
1000 within a handler. */
1003 get_exception_filter (fun)
1004 struct function *fun;
1006 rtx filter = fun->eh->filter;
1007 if (fun == cfun && ! filter)
1009 filter = gen_reg_rtx (word_mode);
1010 fun->eh->filter = filter;
1015 /* Begin a region that will contain entries created with
1016 add_partial_entry. */
1019 begin_protect_partials ()
1021 /* Push room for a new list. */
1022 cfun->eh->protect_list
1023 = tree_cons (NULL_TREE, NULL_TREE, cfun->eh->protect_list);
1026 /* Start a new exception region for a region of code that has a
1027 cleanup action and push the HANDLER for the region onto
1028 protect_list. All of the regions created with add_partial_entry
1029 will be ended when end_protect_partials is invoked.
1031 ??? The only difference between this purpose and that of
1032 expand_decl_cleanup is that in this case, we only want the cleanup to
1033 run if an exception is thrown. This should also be handled using
1037 add_partial_entry (handler)
1040 expand_eh_region_start ();
1042 /* Add this entry to the front of the list. */
1043 TREE_VALUE (cfun->eh->protect_list)
1044 = tree_cons (NULL_TREE, handler, TREE_VALUE (cfun->eh->protect_list));
1047 /* End all the pending exception regions on protect_list. */
1050 end_protect_partials ()
1054 /* Pop the topmost entry. */
1055 t = TREE_VALUE (cfun->eh->protect_list);
1056 cfun->eh->protect_list = TREE_CHAIN (cfun->eh->protect_list);
1058 /* End all the exception regions. */
1059 for (; t; t = TREE_CHAIN (t))
1060 expand_eh_region_end_cleanup (TREE_VALUE (t));
1064 /* This section is for the exception handling specific optimization pass. */
1066 /* Random access the exception region tree. It's just as simple to
1067 collect the regions this way as in expand_eh_region_start, but
1068 without having to realloc memory. */
1071 collect_eh_region_array ()
1073 struct eh_region **array, *i;
1075 i = cfun->eh->region_tree;
1079 array = xcalloc (cfun->eh->last_region_number + 1, sizeof (*array));
1080 cfun->eh->region_array = array;
1084 array[i->region_number] = i;
1086 /* If there are sub-regions, process them. */
1089 /* If there are peers, process them. */
1090 else if (i->next_peer)
1092 /* Otherwise, step back up the tree to the next peer. */
1099 } while (i->next_peer == NULL);
1106 resolve_fixup_regions ()
1108 int i, j, n = cfun->eh->last_region_number;
1110 for (i = 1; i <= n; ++i)
1112 struct eh_region *fixup = cfun->eh->region_array[i];
1113 struct eh_region *cleanup = 0;
1115 if (! fixup || fixup->type != ERT_FIXUP)
1118 for (j = 1; j <= n; ++j)
1120 cleanup = cfun->eh->region_array[j];
1121 if (cleanup->type == ERT_CLEANUP
1122 && cleanup->u.cleanup.exp == fixup->u.fixup.cleanup_exp)
1128 fixup->u.fixup.real_region = cleanup->outer;
1132 /* Now that we've discovered what region actually encloses a fixup,
1133 we can shuffle pointers and remove them from the tree. */
1136 remove_fixup_regions ()
1140 struct eh_region *fixup;
1142 /* Walk the insn chain and adjust the REG_EH_REGION numbers
1143 for instructions referencing fixup regions. This is only
1144 strictly necessary for fixup regions with no parent, but
1145 doesn't hurt to do it for all regions. */
1146 for (insn = get_insns(); insn ; insn = NEXT_INSN (insn))
1148 && (note = find_reg_note (insn, REG_EH_REGION, NULL))
1149 && INTVAL (XEXP (note, 0)) > 0
1150 && (fixup = cfun->eh->region_array[INTVAL (XEXP (note, 0))])
1151 && fixup->type == ERT_FIXUP)
1153 if (fixup->u.fixup.real_region)
1154 XEXP (note, 0) = GEN_INT (fixup->u.fixup.real_region->region_number);
1156 remove_note (insn, note);
1159 /* Remove the fixup regions from the tree. */
1160 for (i = cfun->eh->last_region_number; i > 0; --i)
1162 fixup = cfun->eh->region_array[i];
1166 /* Allow GC to maybe free some memory. */
1167 if (fixup->type == ERT_CLEANUP)
1168 fixup->u.cleanup.exp = NULL_TREE;
1170 if (fixup->type != ERT_FIXUP)
1175 struct eh_region *parent, *p, **pp;
1177 parent = fixup->u.fixup.real_region;
1179 /* Fix up the children's parent pointers; find the end of
1181 for (p = fixup->inner; ; p = p->next_peer)
1188 /* In the tree of cleanups, only outer-inner ordering matters.
1189 So link the children back in anywhere at the correct level. */
1191 pp = &parent->inner;
1193 pp = &cfun->eh->region_tree;
1196 fixup->inner = NULL;
1199 remove_eh_handler (fixup);
1203 /* Remove all regions whose labels are not reachable from insns. */
1206 remove_unreachable_regions (insns)
1209 int i, *uid_region_num;
1211 struct eh_region *r;
1214 uid_region_num = xcalloc (get_max_uid (), sizeof(int));
1215 reachable = xcalloc (cfun->eh->last_region_number + 1, sizeof(bool));
1217 for (i = cfun->eh->last_region_number; i > 0; --i)
1219 r = cfun->eh->region_array[i];
1220 if (!r || r->region_number != i)
1225 if (uid_region_num[INSN_UID (r->resume)])
1227 uid_region_num[INSN_UID (r->resume)] = i;
1231 if (uid_region_num[INSN_UID (r->label)])
1233 uid_region_num[INSN_UID (r->label)] = i;
1235 if (r->type == ERT_TRY && r->u.try.continue_label)
1237 if (uid_region_num[INSN_UID (r->u.try.continue_label)])
1239 uid_region_num[INSN_UID (r->u.try.continue_label)] = i;
1243 for (insn = insns; insn; insn = NEXT_INSN (insn))
1244 reachable[uid_region_num[INSN_UID (insn)]] = true;
1246 for (i = cfun->eh->last_region_number; i > 0; --i)
1248 r = cfun->eh->region_array[i];
1249 if (r && r->region_number == i && !reachable[i])
1251 /* Don't remove ERT_THROW regions if their outer region
1253 if (r->type == ERT_THROW
1255 && reachable[r->outer->region_number])
1258 remove_eh_handler (r);
1263 free (uid_region_num);
1266 /* Turn NOTE_INSN_EH_REGION notes into REG_EH_REGION notes for each
1267 can_throw instruction in the region. */
1270 convert_from_eh_region_ranges_1 (pinsns, orig_sp, cur)
1278 for (insn = *pinsns; insn ; insn = next)
1280 next = NEXT_INSN (insn);
1281 if (GET_CODE (insn) == NOTE)
1283 int kind = NOTE_LINE_NUMBER (insn);
1284 if (kind == NOTE_INSN_EH_REGION_BEG
1285 || kind == NOTE_INSN_EH_REGION_END)
1287 if (kind == NOTE_INSN_EH_REGION_BEG)
1289 struct eh_region *r;
1292 cur = NOTE_EH_HANDLER (insn);
1294 r = cfun->eh->region_array[cur];
1295 if (r->type == ERT_FIXUP)
1297 r = r->u.fixup.real_region;
1298 cur = r ? r->region_number : 0;
1300 else if (r->type == ERT_CATCH)
1303 cur = r ? r->region_number : 0;
1309 /* Removing the first insn of a CALL_PLACEHOLDER sequence
1310 requires extra care to adjust sequence start. */
1311 if (insn == *pinsns)
1317 else if (INSN_P (insn))
1320 && ! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1321 /* Calls can always potentially throw exceptions, unless
1322 they have a REG_EH_REGION note with a value of 0 or less.
1323 Which should be the only possible kind so far. */
1324 && (GET_CODE (insn) == CALL_INSN
1325 /* If we wanted exceptions for non-call insns, then
1326 any may_trap_p instruction could throw. */
1327 || (flag_non_call_exceptions
1328 && GET_CODE (PATTERN (insn)) != CLOBBER
1329 && GET_CODE (PATTERN (insn)) != USE
1330 && may_trap_p (PATTERN (insn)))))
1332 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (cur),
1336 if (GET_CODE (insn) == CALL_INSN
1337 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1339 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 0),
1341 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 1),
1343 convert_from_eh_region_ranges_1 (&XEXP (PATTERN (insn), 2),
1354 convert_from_eh_region_ranges ()
1359 collect_eh_region_array ();
1360 resolve_fixup_regions ();
1362 stack = xmalloc (sizeof (int) * (cfun->eh->last_region_number + 1));
1363 insns = get_insns ();
1364 convert_from_eh_region_ranges_1 (&insns, stack, 0);
1367 remove_fixup_regions ();
1368 remove_unreachable_regions (insns);
1372 find_exception_handler_labels ()
1374 rtx list = NULL_RTX;
1377 free_EXPR_LIST_list (&exception_handler_labels);
1379 if (cfun->eh->region_tree == NULL)
1382 for (i = cfun->eh->last_region_number; i > 0; --i)
1384 struct eh_region *region = cfun->eh->region_array[i];
1387 if (! region || region->region_number != i)
1389 if (cfun->eh->built_landing_pads)
1390 lab = region->landing_pad;
1392 lab = region->label;
1395 list = alloc_EXPR_LIST (0, lab, list);
1398 /* For sjlj exceptions, need the return label to remain live until
1399 after landing pad generation. */
1400 if (USING_SJLJ_EXCEPTIONS && ! cfun->eh->built_landing_pads)
1401 list = alloc_EXPR_LIST (0, return_label, list);
1403 exception_handler_labels = list;
1407 current_function_has_exception_handlers ()
1411 for (i = cfun->eh->last_region_number; i > 0; --i)
1413 struct eh_region *region = cfun->eh->region_array[i];
1415 if (! region || region->region_number != i)
1417 if (region->type != ERT_THROW)
1424 static struct eh_region *
1425 duplicate_eh_region_1 (o, map)
1426 struct eh_region *o;
1427 struct inline_remap *map;
1430 = (struct eh_region *) xcalloc (1, sizeof (struct eh_region));
1432 n->region_number = o->region_number + cfun->eh->last_region_number;
1438 case ERT_MUST_NOT_THROW:
1442 if (o->u.try.continue_label)
1443 n->u.try.continue_label
1444 = get_label_from_map (map,
1445 CODE_LABEL_NUMBER (o->u.try.continue_label));
1449 n->u.catch.type_list = o->u.catch.type_list;
1452 case ERT_ALLOWED_EXCEPTIONS:
1453 n->u.allowed.type_list = o->u.allowed.type_list;
1457 n->u.throw.type = o->u.throw.type;
1464 n->label = get_label_from_map (map, CODE_LABEL_NUMBER (o->label));
1467 n->resume = map->insn_map[INSN_UID (o->resume)];
1468 if (n->resume == NULL)
1476 duplicate_eh_region_2 (o, n_array)
1477 struct eh_region *o;
1478 struct eh_region **n_array;
1480 struct eh_region *n = n_array[o->region_number];
1485 n->u.try.catch = n_array[o->u.try.catch->region_number];
1486 n->u.try.last_catch = n_array[o->u.try.last_catch->region_number];
1490 if (o->u.catch.next_catch)
1491 n->u.catch.next_catch = n_array[o->u.catch.next_catch->region_number];
1492 if (o->u.catch.prev_catch)
1493 n->u.catch.prev_catch = n_array[o->u.catch.prev_catch->region_number];
1501 n->outer = n_array[o->outer->region_number];
1503 n->inner = n_array[o->inner->region_number];
1505 n->next_peer = n_array[o->next_peer->region_number];
1509 duplicate_eh_regions (ifun, map)
1510 struct function *ifun;
1511 struct inline_remap *map;
1513 int ifun_last_region_number = ifun->eh->last_region_number;
1514 struct eh_region **n_array, *root, *cur;
1517 if (ifun_last_region_number == 0)
1520 n_array = xcalloc (ifun_last_region_number + 1, sizeof (*n_array));
1522 for (i = 1; i <= ifun_last_region_number; ++i)
1524 cur = ifun->eh->region_array[i];
1525 if (!cur || cur->region_number != i)
1527 n_array[i] = duplicate_eh_region_1 (cur, map);
1529 for (i = 1; i <= ifun_last_region_number; ++i)
1531 cur = ifun->eh->region_array[i];
1532 if (!cur || cur->region_number != i)
1534 duplicate_eh_region_2 (cur, n_array);
1537 root = n_array[ifun->eh->region_tree->region_number];
1538 cur = cfun->eh->cur_region;
1541 struct eh_region *p = cur->inner;
1544 while (p->next_peer)
1546 p->next_peer = root;
1551 for (i = 1; i <= ifun_last_region_number; ++i)
1552 if (n_array[i] && n_array[i]->outer == NULL)
1553 n_array[i]->outer = cur;
1557 struct eh_region *p = cfun->eh->region_tree;
1560 while (p->next_peer)
1562 p->next_peer = root;
1565 cfun->eh->region_tree = root;
1570 i = cfun->eh->last_region_number;
1571 cfun->eh->last_region_number = i + ifun_last_region_number;
1577 t2r_eq (pentry, pdata)
1581 tree entry = (tree) pentry;
1582 tree data = (tree) pdata;
1584 return TREE_PURPOSE (entry) == data;
1591 tree entry = (tree) pentry;
1592 return TYPE_HASH (TREE_PURPOSE (entry));
1596 t2r_mark_1 (slot, data)
1598 PTR data ATTRIBUTE_UNUSED;
1600 tree contents = (tree) *slot;
1601 ggc_mark_tree (contents);
1609 htab_traverse (*(htab_t *)addr, t2r_mark_1, NULL);
1613 add_type_for_runtime (type)
1618 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1619 TYPE_HASH (type), INSERT);
1622 tree runtime = (*lang_eh_runtime_type) (type);
1623 *slot = tree_cons (type, runtime, NULL_TREE);
1628 lookup_type_for_runtime (type)
1633 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1634 TYPE_HASH (type), NO_INSERT);
1636 /* We should have always inserted the data earlier. */
1637 return TREE_VALUE (*slot);
1641 /* Represent an entry in @TTypes for either catch actions
1642 or exception filter actions. */
1643 struct ttypes_filter
1649 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1650 (a tree) for a @TTypes type node we are thinking about adding. */
1653 ttypes_filter_eq (pentry, pdata)
1657 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1658 tree data = (tree) pdata;
1660 return entry->t == data;
1664 ttypes_filter_hash (pentry)
1667 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1668 return TYPE_HASH (entry->t);
1671 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1672 exception specification list we are thinking about adding. */
1673 /* ??? Currently we use the type lists in the order given. Someone
1674 should put these in some canonical order. */
1677 ehspec_filter_eq (pentry, pdata)
1681 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1682 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1684 return type_list_equal (entry->t, data->t);
1687 /* Hash function for exception specification lists. */
1690 ehspec_filter_hash (pentry)
1693 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1697 for (list = entry->t; list ; list = TREE_CHAIN (list))
1698 h = (h << 5) + (h >> 27) + TYPE_HASH (TREE_VALUE (list));
1702 /* Add TYPE to cfun->eh->ttype_data, using TYPES_HASH to speed
1703 up the search. Return the filter value to be used. */
1706 add_ttypes_entry (ttypes_hash, type)
1710 struct ttypes_filter **slot, *n;
1712 slot = (struct ttypes_filter **)
1713 htab_find_slot_with_hash (ttypes_hash, type, TYPE_HASH (type), INSERT);
1715 if ((n = *slot) == NULL)
1717 /* Filter value is a 1 based table index. */
1719 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1721 n->filter = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) + 1;
1724 VARRAY_PUSH_TREE (cfun->eh->ttype_data, type);
1730 /* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
1731 to speed up the search. Return the filter value to be used. */
1734 add_ehspec_entry (ehspec_hash, ttypes_hash, list)
1739 struct ttypes_filter **slot, *n;
1740 struct ttypes_filter dummy;
1743 slot = (struct ttypes_filter **)
1744 htab_find_slot (ehspec_hash, &dummy, INSERT);
1746 if ((n = *slot) == NULL)
1748 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1750 n = (struct ttypes_filter *) xmalloc (sizeof (*n));
1752 n->filter = -(VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) + 1);
1755 /* Look up each type in the list and encode its filter
1756 value as a uleb128. Terminate the list with 0. */
1757 for (; list ; list = TREE_CHAIN (list))
1758 push_uleb128 (&cfun->eh->ehspec_data,
1759 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1760 VARRAY_PUSH_UCHAR (cfun->eh->ehspec_data, 0);
1766 /* Generate the action filter values to be used for CATCH and
1767 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1768 we use lots of landing pads, and so every type or list can share
1769 the same filter value, which saves table space. */
1772 assign_filter_values ()
1775 htab_t ttypes, ehspec;
1777 VARRAY_TREE_INIT (cfun->eh->ttype_data, 16, "ttype_data");
1778 VARRAY_UCHAR_INIT (cfun->eh->ehspec_data, 64, "ehspec_data");
1780 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1781 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1783 for (i = cfun->eh->last_region_number; i > 0; --i)
1785 struct eh_region *r = cfun->eh->region_array[i];
1787 /* Mind we don't process a region more than once. */
1788 if (!r || r->region_number != i)
1794 /* Whatever type_list is (NULL or true list), we build a list
1795 of filters for the region. */
1796 r->u.catch.filter_list = NULL_TREE;
1798 if (r->u.catch.type_list != NULL)
1800 /* Get a filter value for each of the types caught and store
1801 them in the region's dedicated list. */
1802 tree tp_node = r->u.catch.type_list;
1804 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1806 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1807 tree flt_node = build_int_2 (flt, 0);
1809 r->u.catch.filter_list
1810 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1815 /* Get a filter value for the NULL list also since it will need
1816 an action record anyway. */
1817 int flt = add_ttypes_entry (ttypes, NULL);
1818 tree flt_node = build_int_2 (flt, 0);
1820 r->u.catch.filter_list
1821 = tree_cons (NULL_TREE, flt_node, r->u.catch.filter_list);
1826 case ERT_ALLOWED_EXCEPTIONS:
1828 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1836 htab_delete (ttypes);
1837 htab_delete (ehspec);
1841 build_post_landing_pads ()
1845 for (i = cfun->eh->last_region_number; i > 0; --i)
1847 struct eh_region *region = cfun->eh->region_array[i];
1850 /* Mind we don't process a region more than once. */
1851 if (!region || region->region_number != i)
1854 switch (region->type)
1857 /* ??? Collect the set of all non-overlapping catch handlers
1858 all the way up the chain until blocked by a cleanup. */
1859 /* ??? Outer try regions can share landing pads with inner
1860 try regions if the types are completely non-overlapping,
1861 and there are no intervening cleanups. */
1863 region->post_landing_pad = gen_label_rtx ();
1867 emit_label (region->post_landing_pad);
1869 /* ??? It is mighty inconvenient to call back into the
1870 switch statement generation code in expand_end_case.
1871 Rapid prototyping sez a sequence of ifs. */
1873 struct eh_region *c;
1874 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
1876 /* ??? _Unwind_ForcedUnwind wants no match here. */
1877 if (c->u.catch.type_list == NULL)
1878 emit_jump (c->label);
1881 /* Need for one cmp/jump per type caught. Each type
1882 list entry has a matching entry in the filter list
1883 (see assign_filter_values). */
1884 tree tp_node = c->u.catch.type_list;
1885 tree flt_node = c->u.catch.filter_list;
1889 emit_cmp_and_jump_insns
1891 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1892 EQ, NULL_RTX, word_mode, 0, c->label);
1894 tp_node = TREE_CHAIN (tp_node);
1895 flt_node = TREE_CHAIN (flt_node);
1901 /* We delay the generation of the _Unwind_Resume until we generate
1902 landing pads. We emit a marker here so as to get good control
1903 flow data in the meantime. */
1905 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1911 emit_insns_before (seq, region->u.try.catch->label);
1914 case ERT_ALLOWED_EXCEPTIONS:
1915 region->post_landing_pad = gen_label_rtx ();
1919 emit_label (region->post_landing_pad);
1921 emit_cmp_and_jump_insns (cfun->eh->filter,
1922 GEN_INT (region->u.allowed.filter),
1923 EQ, NULL_RTX, word_mode, 0, region->label);
1925 /* We delay the generation of the _Unwind_Resume until we generate
1926 landing pads. We emit a marker here so as to get good control
1927 flow data in the meantime. */
1929 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1935 emit_insns_before (seq, region->label);
1939 case ERT_MUST_NOT_THROW:
1940 region->post_landing_pad = region->label;
1945 /* Nothing to do. */
1954 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1955 _Unwind_Resume otherwise. */
1958 connect_post_landing_pads ()
1962 for (i = cfun->eh->last_region_number; i > 0; --i)
1964 struct eh_region *region = cfun->eh->region_array[i];
1965 struct eh_region *outer;
1968 /* Mind we don't process a region more than once. */
1969 if (!region || region->region_number != i)
1972 /* If there is no RESX, or it has been deleted by flow, there's
1973 nothing to fix up. */
1974 if (! region->resume || INSN_DELETED_P (region->resume))
1977 /* Search for another landing pad in this function. */
1978 for (outer = region->outer; outer ; outer = outer->outer)
1979 if (outer->post_landing_pad)
1985 emit_jump (outer->post_landing_pad);
1987 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1988 VOIDmode, 1, cfun->eh->exc_ptr, Pmode);
1992 emit_insns_before (seq, region->resume);
1993 delete_insn (region->resume);
1999 dw2_build_landing_pads ()
2004 for (i = cfun->eh->last_region_number; i > 0; --i)
2006 struct eh_region *region = cfun->eh->region_array[i];
2008 bool clobbers_hard_regs = false;
2010 /* Mind we don't process a region more than once. */
2011 if (!region || region->region_number != i)
2014 if (region->type != ERT_CLEANUP
2015 && region->type != ERT_TRY
2016 && region->type != ERT_ALLOWED_EXCEPTIONS)
2021 region->landing_pad = gen_label_rtx ();
2022 emit_label (region->landing_pad);
2024 #ifdef HAVE_exception_receiver
2025 if (HAVE_exception_receiver)
2026 emit_insn (gen_exception_receiver ());
2029 #ifdef HAVE_nonlocal_goto_receiver
2030 if (HAVE_nonlocal_goto_receiver)
2031 emit_insn (gen_nonlocal_goto_receiver ());
2036 /* If the eh_return data registers are call-saved, then we
2037 won't have considered them clobbered from the call that
2038 threw. Kill them now. */
2041 unsigned r = EH_RETURN_DATA_REGNO (j);
2042 if (r == INVALID_REGNUM)
2044 if (! call_used_regs[r])
2046 emit_insn (gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, r)));
2047 clobbers_hard_regs = true;
2051 if (clobbers_hard_regs)
2053 /* @@@ This is a kludge. Not all machine descriptions define a
2054 blockage insn, but we must not allow the code we just generated
2055 to be reordered by scheduling. So emit an ASM_INPUT to act as
2057 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
2060 emit_move_insn (cfun->eh->exc_ptr,
2061 gen_rtx_REG (Pmode, EH_RETURN_DATA_REGNO (0)));
2062 emit_move_insn (cfun->eh->filter,
2063 gen_rtx_REG (word_mode, EH_RETURN_DATA_REGNO (1)));
2068 emit_insns_before (seq, region->post_landing_pad);
2075 int directly_reachable;
2078 int call_site_index;
2082 sjlj_find_directly_reachable_regions (lp_info)
2083 struct sjlj_lp_info *lp_info;
2086 bool found_one = false;
2088 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2090 struct eh_region *region;
2091 enum reachable_code rc;
2095 if (! INSN_P (insn))
2098 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2099 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2102 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2104 type_thrown = NULL_TREE;
2105 if (region->type == ERT_THROW)
2107 type_thrown = region->u.throw.type;
2108 region = region->outer;
2111 /* Find the first containing region that might handle the exception.
2112 That's the landing pad to which we will transfer control. */
2113 rc = RNL_NOT_CAUGHT;
2114 for (; region; region = region->outer)
2116 rc = reachable_next_level (region, type_thrown, 0);
2117 if (rc != RNL_NOT_CAUGHT)
2120 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
2122 lp_info[region->region_number].directly_reachable = 1;
2131 sjlj_assign_call_site_values (dispatch_label, lp_info)
2133 struct sjlj_lp_info *lp_info;
2138 /* First task: build the action table. */
2140 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
2141 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
2143 for (i = cfun->eh->last_region_number; i > 0; --i)
2144 if (lp_info[i].directly_reachable)
2146 struct eh_region *r = cfun->eh->region_array[i];
2147 r->landing_pad = dispatch_label;
2148 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
2149 if (lp_info[i].action_index != -1)
2150 cfun->uses_eh_lsda = 1;
2153 htab_delete (ar_hash);
2155 /* Next: assign dispatch values. In dwarf2 terms, this would be the
2156 landing pad label for the region. For sjlj though, there is one
2157 common landing pad from which we dispatch to the post-landing pads.
2159 A region receives a dispatch index if it is directly reachable
2160 and requires in-function processing. Regions that share post-landing
2161 pads may share dispatch indices. */
2162 /* ??? Post-landing pad sharing doesn't actually happen at the moment
2163 (see build_post_landing_pads) so we don't bother checking for it. */
2166 for (i = cfun->eh->last_region_number; i > 0; --i)
2167 if (lp_info[i].directly_reachable)
2168 lp_info[i].dispatch_index = index++;
2170 /* Finally: assign call-site values. If dwarf2 terms, this would be
2171 the region number assigned by convert_to_eh_region_ranges, but
2172 handles no-action and must-not-throw differently. */
2175 for (i = cfun->eh->last_region_number; i > 0; --i)
2176 if (lp_info[i].directly_reachable)
2178 int action = lp_info[i].action_index;
2180 /* Map must-not-throw to otherwise unused call-site index 0. */
2183 /* Map no-action to otherwise unused call-site index -1. */
2184 else if (action == -1)
2186 /* Otherwise, look it up in the table. */
2188 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
2190 lp_info[i].call_site_index = index;
2195 sjlj_mark_call_sites (lp_info)
2196 struct sjlj_lp_info *lp_info;
2198 int last_call_site = -2;
2201 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
2203 struct eh_region *region;
2205 rtx note, before, p;
2207 /* Reset value tracking at extended basic block boundaries. */
2208 if (GET_CODE (insn) == CODE_LABEL)
2209 last_call_site = -2;
2211 if (! INSN_P (insn))
2214 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2217 /* Calls (and trapping insns) without notes are outside any
2218 exception handling region in this function. Mark them as
2220 if (GET_CODE (insn) == CALL_INSN
2221 || (flag_non_call_exceptions
2222 && may_trap_p (PATTERN (insn))))
2223 this_call_site = -1;
2229 /* Calls that are known to not throw need not be marked. */
2230 if (INTVAL (XEXP (note, 0)) <= 0)
2233 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2234 this_call_site = lp_info[region->region_number].call_site_index;
2237 if (this_call_site == last_call_site)
2240 /* Don't separate a call from it's argument loads. */
2242 if (GET_CODE (insn) == CALL_INSN)
2243 before = find_first_parameter_load (insn, NULL_RTX);
2246 mem = adjust_address (cfun->eh->sjlj_fc, TYPE_MODE (integer_type_node),
2247 sjlj_fc_call_site_ofs);
2248 emit_move_insn (mem, GEN_INT (this_call_site));
2252 emit_insns_before (p, before);
2253 last_call_site = this_call_site;
2257 /* Construct the SjLj_Function_Context. */
2260 sjlj_emit_function_enter (dispatch_label)
2263 rtx fn_begin, fc, mem, seq;
2265 fc = cfun->eh->sjlj_fc;
2269 /* We're storing this libcall's address into memory instead of
2270 calling it directly. Thus, we must call assemble_external_libcall
2271 here, as we can not depend on emit_library_call to do it for us. */
2272 assemble_external_libcall (eh_personality_libfunc);
2273 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
2274 emit_move_insn (mem, eh_personality_libfunc);
2276 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
2277 if (cfun->uses_eh_lsda)
2280 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", sjlj_funcdef_number);
2281 emit_move_insn (mem, gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)));
2284 emit_move_insn (mem, const0_rtx);
2286 #ifdef DONT_USE_BUILTIN_SETJMP
2289 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
2290 TYPE_MODE (integer_type_node), 1,
2291 plus_constant (XEXP (fc, 0),
2292 sjlj_fc_jbuf_ofs), Pmode);
2294 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
2295 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, x, const0_rtx);
2297 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
2298 TYPE_MODE (integer_type_node), 0, dispatch_label);
2301 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
2305 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
2306 1, XEXP (fc, 0), Pmode);
2311 /* ??? Instead of doing this at the beginning of the function,
2312 do this in a block that is at loop level 0 and dominates all
2313 can_throw_internal instructions. */
2315 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
2316 if (GET_CODE (fn_begin) == NOTE
2317 && NOTE_LINE_NUMBER (fn_begin) == NOTE_INSN_FUNCTION_BEG)
2319 emit_insns_after (seq, fn_begin);
2322 /* Call back from expand_function_end to know where we should put
2323 the call to unwind_sjlj_unregister_libfunc if needed. */
2326 sjlj_emit_function_exit_after (after)
2329 cfun->eh->sjlj_exit_after = after;
2333 sjlj_emit_function_exit ()
2339 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
2340 1, XEXP (cfun->eh->sjlj_fc, 0), Pmode);
2345 /* ??? Really this can be done in any block at loop level 0 that
2346 post-dominates all can_throw_internal instructions. This is
2347 the last possible moment. */
2349 emit_insns_after (seq, cfun->eh->sjlj_exit_after);
2353 sjlj_emit_dispatch_table (dispatch_label, lp_info)
2355 struct sjlj_lp_info *lp_info;
2357 int i, first_reachable;
2358 rtx mem, dispatch, seq, fc;
2360 fc = cfun->eh->sjlj_fc;
2364 emit_label (dispatch_label);
2366 #ifndef DONT_USE_BUILTIN_SETJMP
2367 expand_builtin_setjmp_receiver (dispatch_label);
2370 /* Load up dispatch index, exc_ptr and filter values from the
2371 function context. */
2372 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2373 sjlj_fc_call_site_ofs);
2374 dispatch = copy_to_reg (mem);
2376 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs);
2377 if (word_mode != Pmode)
2379 #ifdef POINTERS_EXTEND_UNSIGNED
2380 mem = convert_memory_address (Pmode, mem);
2382 mem = convert_to_mode (Pmode, mem, 0);
2385 emit_move_insn (cfun->eh->exc_ptr, mem);
2387 mem = adjust_address (fc, word_mode, sjlj_fc_data_ofs + UNITS_PER_WORD);
2388 emit_move_insn (cfun->eh->filter, mem);
2390 /* Jump to one of the directly reachable regions. */
2391 /* ??? This really ought to be using a switch statement. */
2393 first_reachable = 0;
2394 for (i = cfun->eh->last_region_number; i > 0; --i)
2396 if (! lp_info[i].directly_reachable)
2399 if (! first_reachable)
2401 first_reachable = i;
2405 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2406 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2407 cfun->eh->region_array[i]->post_landing_pad);
2413 emit_insns_before (seq, (cfun->eh->region_array[first_reachable]
2414 ->post_landing_pad));
2418 sjlj_build_landing_pads ()
2420 struct sjlj_lp_info *lp_info;
2422 lp_info = (struct sjlj_lp_info *) xcalloc (cfun->eh->last_region_number + 1,
2423 sizeof (struct sjlj_lp_info));
2425 if (sjlj_find_directly_reachable_regions (lp_info))
2427 rtx dispatch_label = gen_label_rtx ();
2430 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2431 int_size_in_bytes (sjlj_fc_type_node),
2432 TYPE_ALIGN (sjlj_fc_type_node));
2434 sjlj_assign_call_site_values (dispatch_label, lp_info);
2435 sjlj_mark_call_sites (lp_info);
2437 sjlj_emit_function_enter (dispatch_label);
2438 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2439 sjlj_emit_function_exit ();
2446 finish_eh_generation ()
2448 /* Nothing to do if no regions created. */
2449 if (cfun->eh->region_tree == NULL)
2452 /* The object here is to provide find_basic_blocks with detailed
2453 information (via reachable_handlers) on how exception control
2454 flows within the function. In this first pass, we can include
2455 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2456 regions, and hope that it will be useful in deleting unreachable
2457 handlers. Subsequently, we will generate landing pads which will
2458 connect many of the handlers, and then type information will not
2459 be effective. Still, this is a win over previous implementations. */
2461 rebuild_jump_labels (get_insns ());
2462 find_basic_blocks (get_insns (), max_reg_num (), 0);
2463 cleanup_cfg (CLEANUP_PRE_LOOP);
2465 /* These registers are used by the landing pads. Make sure they
2466 have been generated. */
2467 get_exception_pointer (cfun);
2468 get_exception_filter (cfun);
2470 /* Construct the landing pads. */
2472 assign_filter_values ();
2473 build_post_landing_pads ();
2474 connect_post_landing_pads ();
2475 if (USING_SJLJ_EXCEPTIONS)
2476 sjlj_build_landing_pads ();
2478 dw2_build_landing_pads ();
2480 cfun->eh->built_landing_pads = 1;
2482 /* We've totally changed the CFG. Start over. */
2483 find_exception_handler_labels ();
2484 rebuild_jump_labels (get_insns ());
2485 find_basic_blocks (get_insns (), max_reg_num (), 0);
2486 cleanup_cfg (CLEANUP_PRE_LOOP);
2489 /* This section handles removing dead code for flow. */
2491 /* Remove LABEL from the exception_handler_labels list. */
2494 remove_exception_handler_label (label)
2499 /* If exception_handler_labels was not built yet,
2500 there is nothing to do. */
2501 if (exception_handler_labels == NULL)
2504 for (pl = &exception_handler_labels, l = *pl;
2505 XEXP (l, 0) != label;
2506 pl = &XEXP (l, 1), l = *pl)
2510 free_EXPR_LIST_node (l);
2513 /* Splice REGION from the region tree etc. */
2516 remove_eh_handler (region)
2517 struct eh_region *region;
2519 struct eh_region **pp, *p;
2523 /* For the benefit of efficiently handling REG_EH_REGION notes,
2524 replace this region in the region array with its containing
2525 region. Note that previous region deletions may result in
2526 multiple copies of this region in the array, so we have to
2527 search the whole thing. */
2528 for (i = cfun->eh->last_region_number; i > 0; --i)
2529 if (cfun->eh->region_array[i] == region)
2530 cfun->eh->region_array[i] = region->outer;
2532 if (cfun->eh->built_landing_pads)
2533 lab = region->landing_pad;
2535 lab = region->label;
2537 remove_exception_handler_label (lab);
2540 pp = ®ion->outer->inner;
2542 pp = &cfun->eh->region_tree;
2543 for (p = *pp; p != region; pp = &p->next_peer, p = *pp)
2548 for (p = region->inner; p->next_peer ; p = p->next_peer)
2549 p->outer = region->outer;
2550 p->next_peer = region->next_peer;
2551 p->outer = region->outer;
2552 *pp = region->inner;
2555 *pp = region->next_peer;
2557 if (region->type == ERT_CATCH)
2559 struct eh_region *try, *next, *prev;
2561 for (try = region->next_peer;
2562 try->type == ERT_CATCH;
2563 try = try->next_peer)
2565 if (try->type != ERT_TRY)
2568 next = region->u.catch.next_catch;
2569 prev = region->u.catch.prev_catch;
2572 next->u.catch.prev_catch = prev;
2574 try->u.try.last_catch = prev;
2576 prev->u.catch.next_catch = next;
2579 try->u.try.catch = next;
2581 remove_eh_handler (try);
2588 /* LABEL heads a basic block that is about to be deleted. If this
2589 label corresponds to an exception region, we may be able to
2590 delete the region. */
2593 maybe_remove_eh_handler (label)
2598 /* ??? After generating landing pads, it's not so simple to determine
2599 if the region data is completely unused. One must examine the
2600 landing pad and the post landing pad, and whether an inner try block
2601 is referencing the catch handlers directly. */
2602 if (cfun->eh->built_landing_pads)
2605 for (i = cfun->eh->last_region_number; i > 0; --i)
2607 struct eh_region *region = cfun->eh->region_array[i];
2608 if (region && region->label == label)
2610 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2611 because there is no path to the fallback call to terminate.
2612 But the region continues to affect call-site data until there
2613 are no more contained calls, which we don't see here. */
2614 if (region->type == ERT_MUST_NOT_THROW)
2616 remove_exception_handler_label (region->label);
2617 region->label = NULL_RTX;
2620 remove_eh_handler (region);
2627 /* This section describes CFG exception edges for flow. */
2629 /* For communicating between calls to reachable_next_level. */
2630 struct reachable_info
2637 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2638 base class of TYPE, is in HANDLED. */
2641 check_handled (handled, type)
2646 /* We can check for exact matches without front-end help. */
2647 if (! lang_eh_type_covers)
2649 for (t = handled; t ; t = TREE_CHAIN (t))
2650 if (TREE_VALUE (t) == type)
2655 for (t = handled; t ; t = TREE_CHAIN (t))
2656 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2663 /* A subroutine of reachable_next_level. If we are collecting a list
2664 of handlers, add one. After landing pad generation, reference
2665 it instead of the handlers themselves. Further, the handlers are
2666 all wired together, so by referencing one, we've got them all.
2667 Before landing pad generation we reference each handler individually.
2669 LP_REGION contains the landing pad; REGION is the handler. */
2672 add_reachable_handler (info, lp_region, region)
2673 struct reachable_info *info;
2674 struct eh_region *lp_region;
2675 struct eh_region *region;
2680 if (cfun->eh->built_landing_pads)
2682 if (! info->handlers)
2683 info->handlers = alloc_INSN_LIST (lp_region->landing_pad, NULL_RTX);
2686 info->handlers = alloc_INSN_LIST (region->label, info->handlers);
2689 /* Process one level of exception regions for reachability.
2690 If TYPE_THROWN is non-null, then it is the *exact* type being
2691 propagated. If INFO is non-null, then collect handler labels
2692 and caught/allowed type information between invocations. */
2694 static enum reachable_code
2695 reachable_next_level (region, type_thrown, info)
2696 struct eh_region *region;
2698 struct reachable_info *info;
2700 switch (region->type)
2703 /* Before landing-pad generation, we model control flow
2704 directly to the individual handlers. In this way we can
2705 see that catch handler types may shadow one another. */
2706 add_reachable_handler (info, region, region);
2707 return RNL_MAYBE_CAUGHT;
2711 struct eh_region *c;
2712 enum reachable_code ret = RNL_NOT_CAUGHT;
2714 for (c = region->u.try.catch; c ; c = c->u.catch.next_catch)
2716 /* A catch-all handler ends the search. */
2717 /* ??? _Unwind_ForcedUnwind will want outer cleanups
2718 to be run as well. */
2719 if (c->u.catch.type_list == NULL)
2721 add_reachable_handler (info, region, c);
2727 /* If we have at least one type match, end the search. */
2728 tree tp_node = c->u.catch.type_list;
2730 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2732 tree type = TREE_VALUE (tp_node);
2734 if (type == type_thrown
2735 || (lang_eh_type_covers
2736 && (*lang_eh_type_covers) (type, type_thrown)))
2738 add_reachable_handler (info, region, c);
2743 /* If we have definitive information of a match failure,
2744 the catch won't trigger. */
2745 if (lang_eh_type_covers)
2746 return RNL_NOT_CAUGHT;
2749 /* At this point, we either don't know what type is thrown or
2750 don't have front-end assistance to help deciding if it is
2751 covered by one of the types in the list for this region.
2753 We'd then like to add this region to the list of reachable
2754 handlers since it is indeed potentially reachable based on the
2755 information we have.
2757 Actually, this handler is for sure not reachable if all the
2758 types it matches have already been caught. That is, it is only
2759 potentially reachable if at least one of the types it catches
2760 has not been previously caught. */
2763 ret = RNL_MAYBE_CAUGHT;
2766 tree tp_node = c->u.catch.type_list;
2767 bool maybe_reachable = false;
2769 /* Compute the potential reachability of this handler and
2770 update the list of types caught at the same time. */
2771 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2773 tree type = TREE_VALUE (tp_node);
2775 if (! check_handled (info->types_caught, type))
2778 = tree_cons (NULL, type, info->types_caught);
2780 maybe_reachable = true;
2784 if (maybe_reachable)
2786 add_reachable_handler (info, region, c);
2788 /* ??? If the catch type is a base class of every allowed
2789 type, then we know we can stop the search. */
2790 ret = RNL_MAYBE_CAUGHT;
2798 case ERT_ALLOWED_EXCEPTIONS:
2799 /* An empty list of types definitely ends the search. */
2800 if (region->u.allowed.type_list == NULL_TREE)
2802 add_reachable_handler (info, region, region);
2806 /* Collect a list of lists of allowed types for use in detecting
2807 when a catch may be transformed into a catch-all. */
2809 info->types_allowed = tree_cons (NULL_TREE,
2810 region->u.allowed.type_list,
2811 info->types_allowed);
2813 /* If we have definitive information about the type hierarchy,
2814 then we can tell if the thrown type will pass through the
2816 if (type_thrown && lang_eh_type_covers)
2818 if (check_handled (region->u.allowed.type_list, type_thrown))
2819 return RNL_NOT_CAUGHT;
2822 add_reachable_handler (info, region, region);
2827 add_reachable_handler (info, region, region);
2828 return RNL_MAYBE_CAUGHT;
2831 /* Catch regions are handled by their controling try region. */
2832 return RNL_NOT_CAUGHT;
2834 case ERT_MUST_NOT_THROW:
2835 /* Here we end our search, since no exceptions may propagate.
2836 If we've touched down at some landing pad previous, then the
2837 explicit function call we generated may be used. Otherwise
2838 the call is made by the runtime. */
2839 if (info && info->handlers)
2841 add_reachable_handler (info, region, region);
2850 /* Shouldn't see these here. */
2857 /* Retrieve a list of labels of exception handlers which can be
2858 reached by a given insn. */
2861 reachable_handlers (insn)
2864 struct reachable_info info;
2865 struct eh_region *region;
2869 if (GET_CODE (insn) == JUMP_INSN
2870 && GET_CODE (PATTERN (insn)) == RESX)
2871 region_number = XINT (PATTERN (insn), 0);
2874 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2875 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2877 region_number = INTVAL (XEXP (note, 0));
2880 memset (&info, 0, sizeof (info));
2882 region = cfun->eh->region_array[region_number];
2884 type_thrown = NULL_TREE;
2885 if (GET_CODE (insn) == JUMP_INSN
2886 && GET_CODE (PATTERN (insn)) == RESX)
2888 /* A RESX leaves a region instead of entering it. Thus the
2889 region itself may have been deleted out from under us. */
2892 region = region->outer;
2894 else if (region->type == ERT_THROW)
2896 type_thrown = region->u.throw.type;
2897 region = region->outer;
2900 for (; region; region = region->outer)
2901 if (reachable_next_level (region, type_thrown, &info) >= RNL_CAUGHT)
2904 return info.handlers;
2907 /* Determine if the given INSN can throw an exception that is caught
2908 within the function. */
2911 can_throw_internal (insn)
2914 struct eh_region *region;
2918 if (! INSN_P (insn))
2921 if (GET_CODE (insn) == INSN
2922 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2923 insn = XVECEXP (PATTERN (insn), 0, 0);
2925 if (GET_CODE (insn) == CALL_INSN
2926 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2929 for (i = 0; i < 3; ++i)
2931 rtx sub = XEXP (PATTERN (insn), i);
2932 for (; sub ; sub = NEXT_INSN (sub))
2933 if (can_throw_internal (sub))
2939 /* Every insn that might throw has an EH_REGION note. */
2940 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2941 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2944 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
2946 type_thrown = NULL_TREE;
2947 if (region->type == ERT_THROW)
2949 type_thrown = region->u.throw.type;
2950 region = region->outer;
2953 /* If this exception is ignored by each and every containing region,
2954 then control passes straight out. The runtime may handle some
2955 regions, which also do not require processing internally. */
2956 for (; region; region = region->outer)
2958 enum reachable_code how = reachable_next_level (region, type_thrown, 0);
2959 if (how == RNL_BLOCKED)
2961 if (how != RNL_NOT_CAUGHT)
2968 /* Determine if the given INSN can throw an exception that is
2969 visible outside the function. */
2972 can_throw_external (insn)
2975 struct eh_region *region;
2979 if (! INSN_P (insn))
2982 if (GET_CODE (insn) == INSN
2983 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2984 insn = XVECEXP (PATTERN (insn), 0, 0);
2986 if (GET_CODE (insn) == CALL_INSN
2987 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
2990 for (i = 0; i < 3; ++i)
2992 rtx sub = XEXP (PATTERN (insn), i);
2993 for (; sub ; sub = NEXT_INSN (sub))
2994 if (can_throw_external (sub))
3000 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3003 /* Calls (and trapping insns) without notes are outside any
3004 exception handling region in this function. We have to
3005 assume it might throw. Given that the front end and middle
3006 ends mark known NOTHROW functions, this isn't so wildly
3008 return (GET_CODE (insn) == CALL_INSN
3009 || (flag_non_call_exceptions
3010 && may_trap_p (PATTERN (insn))));
3012 if (INTVAL (XEXP (note, 0)) <= 0)
3015 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3017 type_thrown = NULL_TREE;
3018 if (region->type == ERT_THROW)
3020 type_thrown = region->u.throw.type;
3021 region = region->outer;
3024 /* If the exception is caught or blocked by any containing region,
3025 then it is not seen by any calling function. */
3026 for (; region ; region = region->outer)
3027 if (reachable_next_level (region, type_thrown, NULL) >= RNL_CAUGHT)
3033 /* True if nothing in this function can throw outside this function. */
3036 nothrow_function_p ()
3040 if (! flag_exceptions)
3043 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3044 if (can_throw_external (insn))
3046 for (insn = current_function_epilogue_delay_list; insn;
3047 insn = XEXP (insn, 1))
3048 if (can_throw_external (insn))
3055 /* Various hooks for unwind library. */
3057 /* Do any necessary initialization to access arbitrary stack frames.
3058 On the SPARC, this means flushing the register windows. */
3061 expand_builtin_unwind_init ()
3063 /* Set this so all the registers get saved in our frame; we need to be
3064 able to copy the saved values for any registers from frames we unwind. */
3065 current_function_has_nonlocal_label = 1;
3067 #ifdef SETUP_FRAME_ADDRESSES
3068 SETUP_FRAME_ADDRESSES ();
3073 expand_builtin_eh_return_data_regno (arglist)
3076 tree which = TREE_VALUE (arglist);
3077 unsigned HOST_WIDE_INT iwhich;
3079 if (TREE_CODE (which) != INTEGER_CST)
3081 error ("argument of `__builtin_eh_return_regno' must be constant");
3085 iwhich = tree_low_cst (which, 1);
3086 iwhich = EH_RETURN_DATA_REGNO (iwhich);
3087 if (iwhich == INVALID_REGNUM)
3090 #ifdef DWARF_FRAME_REGNUM
3091 iwhich = DWARF_FRAME_REGNUM (iwhich);
3093 iwhich = DBX_REGISTER_NUMBER (iwhich);
3096 return GEN_INT (iwhich);
3099 /* Given a value extracted from the return address register or stack slot,
3100 return the actual address encoded in that value. */
3103 expand_builtin_extract_return_addr (addr_tree)
3106 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, 0);
3108 /* First mask out any unwanted bits. */
3109 #ifdef MASK_RETURN_ADDR
3110 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
3113 /* Then adjust to find the real return address. */
3114 #if defined (RETURN_ADDR_OFFSET)
3115 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
3121 /* Given an actual address in addr_tree, do any necessary encoding
3122 and return the value to be stored in the return address register or
3123 stack slot so the epilogue will return to that address. */
3126 expand_builtin_frob_return_addr (addr_tree)
3129 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, 0);
3131 #ifdef POINTERS_EXTEND_UNSIGNED
3132 if (GET_MODE (addr) != Pmode)
3133 addr = convert_memory_address (Pmode, addr);
3136 #ifdef RETURN_ADDR_OFFSET
3137 addr = force_reg (Pmode, addr);
3138 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
3144 /* Set up the epilogue with the magic bits we'll need to return to the
3145 exception handler. */
3148 expand_builtin_eh_return (stackadj_tree, handler_tree)
3149 tree stackadj_tree, handler_tree;
3151 rtx stackadj, handler;
3153 stackadj = expand_expr (stackadj_tree, cfun->eh->ehr_stackadj, VOIDmode, 0);
3154 handler = expand_expr (handler_tree, cfun->eh->ehr_handler, VOIDmode, 0);
3156 #ifdef POINTERS_EXTEND_UNSIGNED
3157 if (GET_MODE (stackadj) != Pmode)
3158 stackadj = convert_memory_address (Pmode, stackadj);
3160 if (GET_MODE (handler) != Pmode)
3161 handler = convert_memory_address (Pmode, handler);
3164 if (! cfun->eh->ehr_label)
3166 cfun->eh->ehr_stackadj = copy_to_reg (stackadj);
3167 cfun->eh->ehr_handler = copy_to_reg (handler);
3168 cfun->eh->ehr_label = gen_label_rtx ();
3172 if (stackadj != cfun->eh->ehr_stackadj)
3173 emit_move_insn (cfun->eh->ehr_stackadj, stackadj);
3174 if (handler != cfun->eh->ehr_handler)
3175 emit_move_insn (cfun->eh->ehr_handler, handler);
3178 emit_jump (cfun->eh->ehr_label);
3184 rtx sa, ra, around_label;
3186 if (! cfun->eh->ehr_label)
3189 sa = EH_RETURN_STACKADJ_RTX;
3192 error ("__builtin_eh_return not supported on this target");
3196 current_function_calls_eh_return = 1;
3198 around_label = gen_label_rtx ();
3199 emit_move_insn (sa, const0_rtx);
3200 emit_jump (around_label);
3202 emit_label (cfun->eh->ehr_label);
3203 clobber_return_register ();
3205 #ifdef HAVE_eh_return
3207 emit_insn (gen_eh_return (cfun->eh->ehr_stackadj, cfun->eh->ehr_handler));
3211 ra = EH_RETURN_HANDLER_RTX;
3214 error ("__builtin_eh_return not supported on this target");
3215 ra = gen_reg_rtx (Pmode);
3218 emit_move_insn (sa, cfun->eh->ehr_stackadj);
3219 emit_move_insn (ra, cfun->eh->ehr_handler);
3222 emit_label (around_label);
3225 /* In the following functions, we represent entries in the action table
3226 as 1-based indices. Special cases are:
3228 0: null action record, non-null landing pad; implies cleanups
3229 -1: null action record, null landing pad; implies no action
3230 -2: no call-site entry; implies must_not_throw
3231 -3: we have yet to process outer regions
3233 Further, no special cases apply to the "next" field of the record.
3234 For next, 0 means end of list. */
3236 struct action_record
3244 action_record_eq (pentry, pdata)
3248 const struct action_record *entry = (const struct action_record *) pentry;
3249 const struct action_record *data = (const struct action_record *) pdata;
3250 return entry->filter == data->filter && entry->next == data->next;
3254 action_record_hash (pentry)
3257 const struct action_record *entry = (const struct action_record *) pentry;
3258 return entry->next * 1009 + entry->filter;
3262 add_action_record (ar_hash, filter, next)
3266 struct action_record **slot, *new, tmp;
3268 tmp.filter = filter;
3270 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3272 if ((new = *slot) == NULL)
3274 new = (struct action_record *) xmalloc (sizeof (*new));
3275 new->offset = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3276 new->filter = filter;
3280 /* The filter value goes in untouched. The link to the next
3281 record is a "self-relative" byte offset, or zero to indicate
3282 that there is no next record. So convert the absolute 1 based
3283 indices we've been carrying around into a displacement. */
3285 push_sleb128 (&cfun->eh->action_record_data, filter);
3287 next -= VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data) + 1;
3288 push_sleb128 (&cfun->eh->action_record_data, next);
3295 collect_one_action_chain (ar_hash, region)
3297 struct eh_region *region;
3299 struct eh_region *c;
3302 /* If we've reached the top of the region chain, then we have
3303 no actions, and require no landing pad. */
3307 switch (region->type)
3310 /* A cleanup adds a zero filter to the beginning of the chain, but
3311 there are special cases to look out for. If there are *only*
3312 cleanups along a path, then it compresses to a zero action.
3313 Further, if there are multiple cleanups along a path, we only
3314 need to represent one of them, as that is enough to trigger
3315 entry to the landing pad at runtime. */
3316 next = collect_one_action_chain (ar_hash, region->outer);
3319 for (c = region->outer; c ; c = c->outer)
3320 if (c->type == ERT_CLEANUP)
3322 return add_action_record (ar_hash, 0, next);
3325 /* Process the associated catch regions in reverse order.
3326 If there's a catch-all handler, then we don't need to
3327 search outer regions. Use a magic -3 value to record
3328 that we haven't done the outer search. */
3330 for (c = region->u.try.last_catch; c ; c = c->u.catch.prev_catch)
3332 if (c->u.catch.type_list == NULL)
3334 /* Retrieve the filter from the head of the filter list
3335 where we have stored it (see assign_filter_values). */
3337 = TREE_INT_CST_LOW (TREE_VALUE (c->u.catch.filter_list));
3339 next = add_action_record (ar_hash, filter, 0);
3343 /* Once the outer search is done, trigger an action record for
3344 each filter we have. */
3349 next = collect_one_action_chain (ar_hash, region->outer);
3351 /* If there is no next action, terminate the chain. */
3354 /* If all outer actions are cleanups or must_not_throw,
3355 we'll have no action record for it, since we had wanted
3356 to encode these states in the call-site record directly.
3357 Add a cleanup action to the chain to catch these. */
3359 next = add_action_record (ar_hash, 0, 0);
3362 flt_node = c->u.catch.filter_list;
3363 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3365 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3366 next = add_action_record (ar_hash, filter, next);
3372 case ERT_ALLOWED_EXCEPTIONS:
3373 /* An exception specification adds its filter to the
3374 beginning of the chain. */
3375 next = collect_one_action_chain (ar_hash, region->outer);
3376 return add_action_record (ar_hash, region->u.allowed.filter,
3377 next < 0 ? 0 : next);
3379 case ERT_MUST_NOT_THROW:
3380 /* A must-not-throw region with no inner handlers or cleanups
3381 requires no call-site entry. Note that this differs from
3382 the no handler or cleanup case in that we do require an lsda
3383 to be generated. Return a magic -2 value to record this. */
3388 /* CATCH regions are handled in TRY above. THROW regions are
3389 for optimization information only and produce no output. */
3390 return collect_one_action_chain (ar_hash, region->outer);
3398 add_call_site (landing_pad, action)
3402 struct call_site_record *data = cfun->eh->call_site_data;
3403 int used = cfun->eh->call_site_data_used;
3404 int size = cfun->eh->call_site_data_size;
3408 size = (size ? size * 2 : 64);
3409 data = (struct call_site_record *)
3410 xrealloc (data, sizeof (*data) * size);
3411 cfun->eh->call_site_data = data;
3412 cfun->eh->call_site_data_size = size;
3415 data[used].landing_pad = landing_pad;
3416 data[used].action = action;
3418 cfun->eh->call_site_data_used = used + 1;
3420 return used + call_site_base;
3423 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3424 The new note numbers will not refer to region numbers, but
3425 instead to call site entries. */
3428 convert_to_eh_region_ranges ()
3430 rtx insn, iter, note;
3432 int last_action = -3;
3433 rtx last_action_insn = NULL_RTX;
3434 rtx last_landing_pad = NULL_RTX;
3435 rtx first_no_action_insn = NULL_RTX;
3438 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3441 VARRAY_UCHAR_INIT (cfun->eh->action_record_data, 64, "action_record_data");
3443 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3445 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3448 struct eh_region *region;
3450 rtx this_landing_pad;
3453 if (GET_CODE (insn) == INSN
3454 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3455 insn = XVECEXP (PATTERN (insn), 0, 0);
3457 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3460 if (! (GET_CODE (insn) == CALL_INSN
3461 || (flag_non_call_exceptions
3462 && may_trap_p (PATTERN (insn)))))
3469 if (INTVAL (XEXP (note, 0)) <= 0)
3471 region = cfun->eh->region_array[INTVAL (XEXP (note, 0))];
3472 this_action = collect_one_action_chain (ar_hash, region);
3475 /* Existence of catch handlers, or must-not-throw regions
3476 implies that an lsda is needed (even if empty). */
3477 if (this_action != -1)
3478 cfun->uses_eh_lsda = 1;
3480 /* Delay creation of region notes for no-action regions
3481 until we're sure that an lsda will be required. */
3482 else if (last_action == -3)
3484 first_no_action_insn = iter;
3488 /* Cleanups and handlers may share action chains but not
3489 landing pads. Collect the landing pad for this region. */
3490 if (this_action >= 0)
3492 struct eh_region *o;
3493 for (o = region; ! o->landing_pad ; o = o->outer)
3495 this_landing_pad = o->landing_pad;
3498 this_landing_pad = NULL_RTX;
3500 /* Differing actions or landing pads implies a change in call-site
3501 info, which implies some EH_REGION note should be emitted. */
3502 if (last_action != this_action
3503 || last_landing_pad != this_landing_pad)
3505 /* If we'd not seen a previous action (-3) or the previous
3506 action was must-not-throw (-2), then we do not need an
3508 if (last_action >= -1)
3510 /* If we delayed the creation of the begin, do it now. */
3511 if (first_no_action_insn)
3513 call_site = add_call_site (NULL_RTX, 0);
3514 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3515 first_no_action_insn);
3516 NOTE_EH_HANDLER (note) = call_site;
3517 first_no_action_insn = NULL_RTX;
3520 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3522 NOTE_EH_HANDLER (note) = call_site;
3525 /* If the new action is must-not-throw, then no region notes
3527 if (this_action >= -1)
3529 call_site = add_call_site (this_landing_pad,
3530 this_action < 0 ? 0 : this_action);
3531 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3532 NOTE_EH_HANDLER (note) = call_site;
3535 last_action = this_action;
3536 last_landing_pad = this_landing_pad;
3538 last_action_insn = iter;
3541 if (last_action >= -1 && ! first_no_action_insn)
3543 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3544 NOTE_EH_HANDLER (note) = call_site;
3547 htab_delete (ar_hash);
3552 push_uleb128 (data_area, value)
3553 varray_type *data_area;
3558 unsigned char byte = value & 0x7f;
3562 VARRAY_PUSH_UCHAR (*data_area, byte);
3568 push_sleb128 (data_area, value)
3569 varray_type *data_area;
3577 byte = value & 0x7f;
3579 more = ! ((value == 0 && (byte & 0x40) == 0)
3580 || (value == -1 && (byte & 0x40) != 0));
3583 VARRAY_PUSH_UCHAR (*data_area, byte);
3589 #ifndef HAVE_AS_LEB128
3591 dw2_size_of_call_site_table ()
3593 int n = cfun->eh->call_site_data_used;
3594 int size = n * (4 + 4 + 4);
3597 for (i = 0; i < n; ++i)
3599 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3600 size += size_of_uleb128 (cs->action);
3607 sjlj_size_of_call_site_table ()
3609 int n = cfun->eh->call_site_data_used;
3613 for (i = 0; i < n; ++i)
3615 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3616 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3617 size += size_of_uleb128 (cs->action);
3625 dw2_output_call_site_table ()
3627 const char *const function_start_lab
3628 = IDENTIFIER_POINTER (current_function_func_begin_label);
3629 int n = cfun->eh->call_site_data_used;
3632 for (i = 0; i < n; ++i)
3634 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3635 char reg_start_lab[32];
3636 char reg_end_lab[32];
3637 char landing_pad_lab[32];
3639 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3640 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3642 if (cs->landing_pad)
3643 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3644 CODE_LABEL_NUMBER (cs->landing_pad));
3646 /* ??? Perhaps use insn length scaling if the assembler supports
3647 generic arithmetic. */
3648 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3649 data4 if the function is small enough. */
3650 #ifdef HAVE_AS_LEB128
3651 dw2_asm_output_delta_uleb128 (reg_start_lab, function_start_lab,
3652 "region %d start", i);
3653 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3655 if (cs->landing_pad)
3656 dw2_asm_output_delta_uleb128 (landing_pad_lab, function_start_lab,
3659 dw2_asm_output_data_uleb128 (0, "landing pad");
3661 dw2_asm_output_delta (4, reg_start_lab, function_start_lab,
3662 "region %d start", i);
3663 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3664 if (cs->landing_pad)
3665 dw2_asm_output_delta (4, landing_pad_lab, function_start_lab,
3668 dw2_asm_output_data (4, 0, "landing pad");
3670 dw2_asm_output_data_uleb128 (cs->action, "action");
3673 call_site_base += n;
3677 sjlj_output_call_site_table ()
3679 int n = cfun->eh->call_site_data_used;
3682 for (i = 0; i < n; ++i)
3684 struct call_site_record *cs = &cfun->eh->call_site_data[i];
3686 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3687 "region %d landing pad", i);
3688 dw2_asm_output_data_uleb128 (cs->action, "action");
3691 call_site_base += n;
3695 output_function_exception_table ()
3697 int tt_format, cs_format, lp_format, i, n;
3698 #ifdef HAVE_AS_LEB128
3699 char ttype_label[32];
3700 char cs_after_size_label[32];
3701 char cs_end_label[32];
3707 int tt_format_size = 0;
3709 /* Not all functions need anything. */
3710 if (! cfun->uses_eh_lsda)
3713 funcdef_number = (USING_SJLJ_EXCEPTIONS
3714 ? sjlj_funcdef_number
3715 : current_funcdef_number);
3717 #ifdef IA64_UNWIND_INFO
3718 fputs ("\t.personality\t", asm_out_file);
3719 output_addr_const (asm_out_file, eh_personality_libfunc);
3720 fputs ("\n\t.handlerdata\n", asm_out_file);
3721 /* Note that varasm still thinks we're in the function's code section.
3722 The ".endp" directive that will immediately follow will take us back. */
3724 (*targetm.asm_out.exception_section) ();
3727 have_tt_data = (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data) > 0
3728 || VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data) > 0);
3730 /* Indicate the format of the @TType entries. */
3732 tt_format = DW_EH_PE_omit;
3735 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3736 #ifdef HAVE_AS_LEB128
3737 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT", funcdef_number);
3739 tt_format_size = size_of_encoded_value (tt_format);
3741 assemble_align (tt_format_size * BITS_PER_UNIT);
3744 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, "LLSDA", funcdef_number);
3746 /* The LSDA header. */
3748 /* Indicate the format of the landing pad start pointer. An omitted
3749 field implies @LPStart == @Start. */
3750 /* Currently we always put @LPStart == @Start. This field would
3751 be most useful in moving the landing pads completely out of
3752 line to another section, but it could also be used to minimize
3753 the size of uleb128 landing pad offsets. */
3754 lp_format = DW_EH_PE_omit;
3755 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3756 eh_data_format_name (lp_format));
3758 /* @LPStart pointer would go here. */
3760 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3761 eh_data_format_name (tt_format));
3763 #ifndef HAVE_AS_LEB128
3764 if (USING_SJLJ_EXCEPTIONS)
3765 call_site_len = sjlj_size_of_call_site_table ();
3767 call_site_len = dw2_size_of_call_site_table ();
3770 /* A pc-relative 4-byte displacement to the @TType data. */
3773 #ifdef HAVE_AS_LEB128
3774 char ttype_after_disp_label[32];
3775 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3777 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3778 "@TType base offset");
3779 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3781 /* Ug. Alignment queers things. */
3782 unsigned int before_disp, after_disp, last_disp, disp;
3784 before_disp = 1 + 1;
3785 after_disp = (1 + size_of_uleb128 (call_site_len)
3787 + VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data)
3788 + (VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data)
3794 unsigned int disp_size, pad;
3797 disp_size = size_of_uleb128 (disp);
3798 pad = before_disp + disp_size + after_disp;
3799 if (pad % tt_format_size)
3800 pad = tt_format_size - (pad % tt_format_size);
3803 disp = after_disp + pad;
3805 while (disp != last_disp);
3807 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3811 /* Indicate the format of the call-site offsets. */
3812 #ifdef HAVE_AS_LEB128
3813 cs_format = DW_EH_PE_uleb128;
3815 cs_format = DW_EH_PE_udata4;
3817 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3818 eh_data_format_name (cs_format));
3820 #ifdef HAVE_AS_LEB128
3821 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3823 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3825 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3826 "Call-site table length");
3827 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3828 if (USING_SJLJ_EXCEPTIONS)
3829 sjlj_output_call_site_table ();
3831 dw2_output_call_site_table ();
3832 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3834 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3835 if (USING_SJLJ_EXCEPTIONS)
3836 sjlj_output_call_site_table ();
3838 dw2_output_call_site_table ();
3841 /* ??? Decode and interpret the data for flag_debug_asm. */
3842 n = VARRAY_ACTIVE_SIZE (cfun->eh->action_record_data);
3843 for (i = 0; i < n; ++i)
3844 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->action_record_data, i),
3845 (i ? NULL : "Action record table"));
3848 assemble_align (tt_format_size * BITS_PER_UNIT);
3850 i = VARRAY_ACTIVE_SIZE (cfun->eh->ttype_data);
3853 tree type = VARRAY_TREE (cfun->eh->ttype_data, i);
3856 if (type == NULL_TREE)
3857 type = integer_zero_node;
3859 type = lookup_type_for_runtime (type);
3861 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3862 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3863 assemble_integer (value, tt_format_size,
3864 tt_format_size * BITS_PER_UNIT, 1);
3866 dw2_asm_output_encoded_addr_rtx (tt_format, value, NULL);
3869 #ifdef HAVE_AS_LEB128
3871 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3874 /* ??? Decode and interpret the data for flag_debug_asm. */
3875 n = VARRAY_ACTIVE_SIZE (cfun->eh->ehspec_data);
3876 for (i = 0; i < n; ++i)
3877 dw2_asm_output_data (1, VARRAY_UCHAR (cfun->eh->ehspec_data, i),
3878 (i ? NULL : "Exception specification table"));
3880 function_section (current_function_decl);
3882 if (USING_SJLJ_EXCEPTIONS)
3883 sjlj_funcdef_number += 1;