1 /* Implements exception handling.
2 Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5 Contributed by Mike Stump <mrs@cygnus.com>.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* An exception is an event that can be signaled from within a
25 function. This event can then be "caught" or "trapped" by the
26 callers of this function. This potentially allows program flow to
27 be transferred to any arbitrary code associated with a function call
28 several levels up the stack.
30 The intended use for this mechanism is for signaling "exceptional
31 events" in an out-of-band fashion, hence its name. The C++ language
32 (and many other OO-styled or functional languages) practically
33 requires such a mechanism, as otherwise it becomes very difficult
34 or even impossible to signal failure conditions in complex
35 situations. The traditional C++ example is when an error occurs in
36 the process of constructing an object; without such a mechanism, it
37 is impossible to signal that the error occurs without adding global
38 state variables and error checks around every object construction.
40 The act of causing this event to occur is referred to as "throwing
41 an exception". (Alternate terms include "raising an exception" or
42 "signaling an exception".) The term "throw" is used because control
43 is returned to the callers of the function that is signaling the
44 exception, and thus there is the concept of "throwing" the
45 exception up the call stack.
47 [ Add updated documentation on how to use this. ] */
52 #include "coretypes.h"
60 #include "insn-config.h"
62 #include "integrate.h"
63 #include "hard-reg-set.h"
64 #include "basic-block.h"
66 #include "dwarf2asm.h"
67 #include "dwarf2out.h"
75 #include "langhooks.h"
77 #include "diagnostic.h"
78 #include "tree-pass.h"
81 /* Provide defaults for stuff that may not be defined when using
83 #ifndef EH_RETURN_DATA_REGNO
84 #define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
87 /* Protect cleanup actions with must-not-throw regions, with a call
88 to the given failure handler. */
89 gimple (*lang_protect_cleanup_actions) (void);
91 /* Return true if type A catches type B. */
92 int (*lang_eh_type_covers) (tree a, tree b);
94 /* Map a type to a runtime object to match type. */
95 tree (*lang_eh_runtime_type) (tree);
97 /* A hash table of label to region number. */
99 struct ehl_map_entry GTY(())
102 struct eh_region *region;
105 static GTY(()) int call_site_base;
106 static GTY ((param_is (union tree_node)))
107 htab_t type_to_runtime_map;
109 /* Describe the SjLj_Function_Context structure. */
110 static GTY(()) tree sjlj_fc_type_node;
111 static int sjlj_fc_call_site_ofs;
112 static int sjlj_fc_data_ofs;
113 static int sjlj_fc_personality_ofs;
114 static int sjlj_fc_lsda_ofs;
115 static int sjlj_fc_jbuf_ofs;
117 /* Describes one exception region. */
118 struct eh_region GTY(())
120 /* The immediately surrounding region. */
121 struct eh_region *outer;
123 /* The list of immediately contained regions. */
124 struct eh_region *inner;
125 struct eh_region *next_peer;
127 /* An identifier for this region. */
130 /* When a region is deleted, its parents inherit the REG_EH_REGION
131 numbers already assigned. */
134 /* Each region does exactly one thing. */
141 ERT_ALLOWED_EXCEPTIONS,
146 /* Holds the action to perform based on the preceding type. */
148 /* A list of catch blocks, a surrounding try block,
149 and the label for continuing after a catch. */
150 struct eh_region_u_try {
151 struct eh_region *eh_catch;
152 struct eh_region *last_catch;
153 } GTY ((tag ("ERT_TRY"))) eh_try;
155 /* The list through the catch handlers, the list of type objects
156 matched, and the list of associated filters. */
157 struct eh_region_u_catch {
158 struct eh_region *next_catch;
159 struct eh_region *prev_catch;
162 } GTY ((tag ("ERT_CATCH"))) eh_catch;
164 /* A tree_list of allowed types. */
165 struct eh_region_u_allowed {
168 } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
170 /* The type given by a call to "throw foo();", or discovered
172 struct eh_region_u_throw {
174 } GTY ((tag ("ERT_THROW"))) eh_throw;
176 /* Retain the cleanup expression even after expansion so that
177 we can match up fixup regions. */
178 struct eh_region_u_cleanup {
179 struct eh_region *prev_try;
180 } GTY ((tag ("ERT_CLEANUP"))) cleanup;
181 } GTY ((desc ("%0.type"))) u;
183 /* Entry point for this region's handler before landing pads are built. */
187 /* Entry point for this region's handler from the runtime eh library. */
190 /* Entry point for this region's handler from an inner region. */
191 rtx post_landing_pad;
193 /* The RESX insn for handing off control to the next outermost handler,
197 /* True if something in this region may throw. */
198 unsigned may_contain_throw : 1;
201 typedef struct eh_region *eh_region;
203 struct call_site_record GTY(())
209 DEF_VEC_P(eh_region);
210 DEF_VEC_ALLOC_P(eh_region, gc);
212 /* Used to save exception status for each function. */
213 struct eh_status GTY(())
215 /* The tree of all regions for this function. */
216 struct eh_region *region_tree;
218 /* The same information as an indexable array. */
219 VEC(eh_region,gc) *region_array;
220 int last_region_number;
222 htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
225 static int t2r_eq (const void *, const void *);
226 static hashval_t t2r_hash (const void *);
227 static void add_type_for_runtime (tree);
228 static tree lookup_type_for_runtime (tree);
230 static void remove_unreachable_regions (rtx);
232 static int ttypes_filter_eq (const void *, const void *);
233 static hashval_t ttypes_filter_hash (const void *);
234 static int ehspec_filter_eq (const void *, const void *);
235 static hashval_t ehspec_filter_hash (const void *);
236 static int add_ttypes_entry (htab_t, tree);
237 static int add_ehspec_entry (htab_t, htab_t, tree);
238 static void assign_filter_values (void);
239 static void build_post_landing_pads (void);
240 static void connect_post_landing_pads (void);
241 static void dw2_build_landing_pads (void);
244 static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
245 static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
246 static void sjlj_mark_call_sites (struct sjlj_lp_info *);
247 static void sjlj_emit_function_enter (rtx);
248 static void sjlj_emit_function_exit (void);
249 static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
250 static void sjlj_build_landing_pads (void);
252 static hashval_t ehl_hash (const void *);
253 static int ehl_eq (const void *, const void *);
254 static void add_ehl_entry (rtx, struct eh_region *);
255 static void remove_exception_handler_label (rtx);
256 static void remove_eh_handler (struct eh_region *);
257 static int for_each_eh_label_1 (void **, void *);
259 /* The return value of reachable_next_level. */
262 /* The given exception is not processed by the given region. */
264 /* The given exception may need processing by the given region. */
266 /* The given exception is completely processed by the given region. */
268 /* The given exception is completely processed by the runtime. */
272 struct reachable_info;
273 static enum reachable_code reachable_next_level (struct eh_region *, tree,
274 struct reachable_info *, bool);
276 static int action_record_eq (const void *, const void *);
277 static hashval_t action_record_hash (const void *);
278 static int add_action_record (htab_t, int, int);
279 static int collect_one_action_chain (htab_t, struct eh_region *);
280 static int add_call_site (rtx, int);
282 static void push_uleb128 (varray_type *, unsigned int);
283 static void push_sleb128 (varray_type *, int);
284 #ifndef HAVE_AS_LEB128
285 static int dw2_size_of_call_site_table (void);
286 static int sjlj_size_of_call_site_table (void);
288 static void dw2_output_call_site_table (void);
289 static void sjlj_output_call_site_table (void);
292 /* Routine to see if exception handling is turned on.
293 DO_WARN is nonzero if we want to inform the user that exception
294 handling is turned off.
296 This is used to ensure that -fexceptions has been specified if the
297 compiler tries to use any exception-specific functions. */
300 doing_eh (int do_warn)
302 if (! flag_exceptions)
304 static int warned = 0;
305 if (! warned && do_warn)
307 error ("exception handling disabled, use -fexceptions to enable");
319 if (! flag_exceptions)
322 type_to_runtime_map = htab_create_ggc (31, t2r_hash, t2r_eq, NULL);
324 /* Create the SjLj_Function_Context structure. This should match
325 the definition in unwind-sjlj.c. */
326 if (USING_SJLJ_EXCEPTIONS)
328 tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
330 sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
332 f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
333 build_pointer_type (sjlj_fc_type_node));
334 DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
336 f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
338 DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
340 tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
341 tmp = build_array_type (lang_hooks.types.type_for_mode
342 (targetm.unwind_word_mode (), 1),
344 f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
345 DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
347 f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
349 DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
351 f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
353 DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
355 #ifdef DONT_USE_BUILTIN_SETJMP
357 tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
359 /* Should be large enough for most systems, if it is not,
360 JMP_BUF_SIZE should be defined with the proper value. It will
361 also tend to be larger than necessary for most systems, a more
362 optimal port will define JMP_BUF_SIZE. */
363 tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
366 /* builtin_setjmp takes a pointer to 5 words. */
367 tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
369 tmp = build_index_type (tmp);
370 tmp = build_array_type (ptr_type_node, tmp);
371 f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
372 #ifdef DONT_USE_BUILTIN_SETJMP
373 /* We don't know what the alignment requirements of the
374 runtime's jmp_buf has. Overestimate. */
375 DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
376 DECL_USER_ALIGN (f_jbuf) = 1;
378 DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
380 TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
381 TREE_CHAIN (f_prev) = f_cs;
382 TREE_CHAIN (f_cs) = f_data;
383 TREE_CHAIN (f_data) = f_per;
384 TREE_CHAIN (f_per) = f_lsda;
385 TREE_CHAIN (f_lsda) = f_jbuf;
387 layout_type (sjlj_fc_type_node);
389 /* Cache the interesting field offsets so that we have
390 easy access from rtl. */
391 sjlj_fc_call_site_ofs
392 = (tree_low_cst (DECL_FIELD_OFFSET (f_cs), 1)
393 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_cs), 1) / BITS_PER_UNIT);
395 = (tree_low_cst (DECL_FIELD_OFFSET (f_data), 1)
396 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_data), 1) / BITS_PER_UNIT);
397 sjlj_fc_personality_ofs
398 = (tree_low_cst (DECL_FIELD_OFFSET (f_per), 1)
399 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_per), 1) / BITS_PER_UNIT);
401 = (tree_low_cst (DECL_FIELD_OFFSET (f_lsda), 1)
402 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_lsda), 1) / BITS_PER_UNIT);
404 = (tree_low_cst (DECL_FIELD_OFFSET (f_jbuf), 1)
405 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f_jbuf), 1) / BITS_PER_UNIT);
410 init_eh_for_function (void)
412 cfun->eh = GGC_CNEW (struct eh_status);
415 /* Routines to generate the exception tree somewhat directly.
416 These are used from tree-eh.c when processing exception related
417 nodes during tree optimization. */
419 static struct eh_region *
420 gen_eh_region (enum eh_region_type type, struct eh_region *outer)
422 struct eh_region *new_eh;
424 #ifdef ENABLE_CHECKING
425 gcc_assert (doing_eh (0));
428 /* Insert a new blank region as a leaf in the tree. */
429 new_eh = GGC_CNEW (struct eh_region);
431 new_eh->outer = outer;
434 new_eh->next_peer = outer->inner;
435 outer->inner = new_eh;
439 new_eh->next_peer = cfun->eh->region_tree;
440 cfun->eh->region_tree = new_eh;
443 new_eh->region_number = ++cfun->eh->last_region_number;
449 gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
451 struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
452 cleanup->u.cleanup.prev_try = prev_try;
457 gen_eh_region_try (struct eh_region *outer)
459 return gen_eh_region (ERT_TRY, outer);
463 gen_eh_region_catch (struct eh_region *t, tree type_or_list)
465 struct eh_region *c, *l;
466 tree type_list, type_node;
468 /* Ensure to always end up with a type list to normalize further
469 processing, then register each type against the runtime types map. */
470 type_list = type_or_list;
473 if (TREE_CODE (type_or_list) != TREE_LIST)
474 type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
476 type_node = type_list;
477 for (; type_node; type_node = TREE_CHAIN (type_node))
478 add_type_for_runtime (TREE_VALUE (type_node));
481 c = gen_eh_region (ERT_CATCH, t->outer);
482 c->u.eh_catch.type_list = type_list;
483 l = t->u.eh_try.last_catch;
484 c->u.eh_catch.prev_catch = l;
486 l->u.eh_catch.next_catch = c;
488 t->u.eh_try.eh_catch = c;
489 t->u.eh_try.last_catch = c;
495 gen_eh_region_allowed (struct eh_region *outer, tree allowed)
497 struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
498 region->u.allowed.type_list = allowed;
500 for (; allowed ; allowed = TREE_CHAIN (allowed))
501 add_type_for_runtime (TREE_VALUE (allowed));
507 gen_eh_region_must_not_throw (struct eh_region *outer)
509 return gen_eh_region (ERT_MUST_NOT_THROW, outer);
513 get_eh_region_number (struct eh_region *region)
515 return region->region_number;
519 get_eh_region_may_contain_throw (struct eh_region *region)
521 return region->may_contain_throw;
525 get_eh_region_tree_label (struct eh_region *region)
527 return region->tree_label;
531 set_eh_region_tree_label (struct eh_region *region, tree lab)
533 region->tree_label = lab;
537 expand_resx_expr (tree exp)
539 int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
540 struct eh_region *reg = VEC_index (eh_region,
541 cfun->eh->region_array, region_nr);
543 gcc_assert (!reg->resume);
544 do_pending_stack_adjust ();
545 reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
549 /* Note that the current EH region (if any) may contain a throw, or a
550 call to a function which itself may contain a throw. */
553 note_eh_region_may_contain_throw (struct eh_region *region)
555 while (region && !region->may_contain_throw)
557 region->may_contain_throw = 1;
558 region = region->outer;
563 /* Return an rtl expression for a pointer to the exception object
567 get_exception_pointer (void)
569 if (! crtl->eh.exc_ptr)
570 crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
571 return crtl->eh.exc_ptr;
574 /* Return an rtl expression for the exception dispatch filter
578 get_exception_filter (void)
580 if (! crtl->eh.filter)
581 crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
582 return crtl->eh.filter;
585 /* This section is for the exception handling specific optimization pass. */
587 /* Random access the exception region tree. */
590 collect_eh_region_array (void)
594 i = cfun->eh->region_tree;
598 VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
599 cfun->eh->last_region_number + 1);
600 VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
604 VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
606 /* If there are sub-regions, process them. */
609 /* If there are peers, process them. */
610 else if (i->next_peer)
612 /* Otherwise, step back up the tree to the next peer. */
619 } while (i->next_peer == NULL);
625 /* Remove all regions whose labels are not reachable from insns. */
628 remove_unreachable_regions (rtx insns)
630 int i, *uid_region_num;
635 uid_region_num = XCNEWVEC (int, get_max_uid ());
636 reachable = XCNEWVEC (bool, cfun->eh->last_region_number + 1);
638 for (i = cfun->eh->last_region_number; i > 0; --i)
640 r = VEC_index (eh_region, cfun->eh->region_array, i);
641 if (!r || r->region_number != i)
646 gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
647 uid_region_num[INSN_UID (r->resume)] = i;
651 gcc_assert (!uid_region_num[INSN_UID (r->label)]);
652 uid_region_num[INSN_UID (r->label)] = i;
656 for (insn = insns; insn; insn = NEXT_INSN (insn))
657 reachable[uid_region_num[INSN_UID (insn)]] = true;
659 for (i = cfun->eh->last_region_number; i > 0; --i)
661 r = VEC_index (eh_region, cfun->eh->region_array, i);
662 if (r && r->region_number == i && !reachable[i])
668 /* Don't remove ERT_THROW regions if their outer region
670 if (r->outer && reachable[r->outer->region_number])
674 case ERT_MUST_NOT_THROW:
675 /* MUST_NOT_THROW regions are implementable solely in the
676 runtime, but their existence continues to affect calls
677 within that region. Never delete them here. */
683 /* TRY regions are reachable if any of its CATCH regions
686 for (c = r->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
687 if (reachable[c->region_number])
700 remove_eh_handler (r);
705 free (uid_region_num);
708 /* Set up EH labels for RTL. */
711 convert_from_eh_region_ranges (void)
713 rtx insns = get_insns ();
714 int i, n = cfun->eh->last_region_number;
716 /* Most of the work is already done at the tree level. All we need to
717 do is collect the rtl labels that correspond to the tree labels that
718 collect the rtl labels that correspond to the tree labels
719 we allocated earlier. */
720 for (i = 1; i <= n; ++i)
722 struct eh_region *region;
724 region = VEC_index (eh_region, cfun->eh->region_array, i);
725 if (region && region->tree_label)
726 region->label = DECL_RTL_IF_SET (region->tree_label);
729 remove_unreachable_regions (insns);
733 add_ehl_entry (rtx label, struct eh_region *region)
735 struct ehl_map_entry **slot, *entry;
737 LABEL_PRESERVE_P (label) = 1;
739 entry = GGC_NEW (struct ehl_map_entry);
740 entry->label = label;
741 entry->region = region;
743 slot = (struct ehl_map_entry **)
744 htab_find_slot (crtl->eh.exception_handler_label_map, entry, INSERT);
746 /* Before landing pad creation, each exception handler has its own
747 label. After landing pad creation, the exception handlers may
748 share landing pads. This is ok, since maybe_remove_eh_handler
749 only requires the 1-1 mapping before landing pad creation. */
750 gcc_assert (!*slot || crtl->eh.built_landing_pads);
756 find_exception_handler_labels (void)
760 if (crtl->eh.exception_handler_label_map)
761 htab_empty (crtl->eh.exception_handler_label_map);
764 /* ??? The expansion factor here (3/2) must be greater than the htab
765 occupancy factor (4/3) to avoid unnecessary resizing. */
766 crtl->eh.exception_handler_label_map
767 = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
768 ehl_hash, ehl_eq, NULL);
771 if (cfun->eh->region_tree == NULL)
774 for (i = cfun->eh->last_region_number; i > 0; --i)
776 struct eh_region *region;
779 region = VEC_index (eh_region, cfun->eh->region_array, i);
780 if (! region || region->region_number != i)
782 if (crtl->eh.built_landing_pads)
783 lab = region->landing_pad;
788 add_ehl_entry (lab, region);
791 /* For sjlj exceptions, need the return label to remain live until
792 after landing pad generation. */
793 if (USING_SJLJ_EXCEPTIONS && ! crtl->eh.built_landing_pads)
794 add_ehl_entry (return_label, NULL);
797 /* Returns true if the current function has exception handling regions. */
800 current_function_has_exception_handlers (void)
804 for (i = cfun->eh->last_region_number; i > 0; --i)
806 struct eh_region *region;
808 region = VEC_index (eh_region, cfun->eh->region_array, i);
810 && region->region_number == i
811 && region->type != ERT_THROW)
818 /* A subroutine of duplicate_eh_regions. Search the region tree under O
819 for the minimum and maximum region numbers. Update *MIN and *MAX. */
822 duplicate_eh_regions_0 (eh_region o, int *min, int *max)
828 i = bitmap_first_set_bit (o->aka);
831 i = bitmap_last_set_bit (o->aka);
835 if (o->region_number < *min)
836 *min = o->region_number;
837 if (o->region_number > *max)
838 *max = o->region_number;
843 duplicate_eh_regions_0 (o, min, max);
847 duplicate_eh_regions_0 (o, min, max);
852 /* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
853 Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
854 about the other internal pointers just yet, just the tree-like pointers. */
857 duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
861 ret = n = GGC_NEW (struct eh_region);
870 n->aka = BITMAP_GGC_ALLOC ();
872 EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
874 bitmap_set_bit (n->aka, i + eh_offset);
875 VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
879 n->region_number += eh_offset;
880 VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
885 n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
886 while (old->next_peer)
888 old = old->next_peer;
889 n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
896 /* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
897 function and root the tree below OUTER_REGION. Remap labels using MAP
898 callback. The special case of COPY_REGION of 0 means all regions. */
901 duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
902 void *data, int copy_region, int outer_region)
904 eh_region cur, prev_try, outer, *splice;
905 int i, min_region, max_region, eh_offset, cfun_last_region_number;
910 #ifdef ENABLE_CHECKING
911 verify_eh_tree (ifun);
914 /* Find the range of region numbers to be copied. The interface we
915 provide here mandates a single offset to find new number from old,
916 which means we must look at the numbers present, instead of the
917 count or something else. */
920 min_region = INT_MAX;
923 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
924 duplicate_eh_regions_0 (cur, &min_region, &max_region);
927 min_region = 1, max_region = ifun->eh->last_region_number;
928 num_regions = max_region - min_region + 1;
929 cfun_last_region_number = cfun->eh->last_region_number;
930 eh_offset = cfun_last_region_number + 1 - min_region;
932 /* If we've not yet created a region array, do so now. */
933 cfun->eh->last_region_number = cfun_last_region_number + num_regions;
934 VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
935 cfun->eh->last_region_number + 1);
937 /* Locate the spot at which to insert the new tree. */
938 if (outer_region > 0)
940 outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
942 splice = &outer->inner;
944 splice = &cfun->eh->region_tree;
949 splice = &cfun->eh->region_tree;
952 splice = &(*splice)->next_peer;
954 if (!ifun->eh->region_tree)
957 for (i = cfun_last_region_number + 1;
958 i <= cfun->eh->last_region_number; i++)
960 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
961 if (outer->aka == NULL)
962 outer->aka = BITMAP_GGC_ALLOC ();
963 bitmap_set_bit (outer->aka, i);
968 /* Copy all the regions in the subtree. */
971 cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
972 *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
978 cur = ifun->eh->region_tree;
979 *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
980 while (cur->next_peer)
982 cur = cur->next_peer;
983 n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
987 /* Remap all the labels in the new regions. */
988 for (i = cfun_last_region_number + 1;
989 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
990 if (cur && cur->tree_label)
991 cur->tree_label = map (cur->tree_label, data);
993 /* Search for the containing ERT_TRY region to fix up
994 the prev_try short-cuts for ERT_CLEANUP regions. */
996 if (outer_region > 0)
998 VEC_index (eh_region, cfun->eh->region_array, outer_region);
999 prev_try && prev_try->type != ERT_TRY; prev_try = prev_try->outer)
1000 if (prev_try->type == ERT_MUST_NOT_THROW
1001 || (prev_try->type == ERT_ALLOWED_EXCEPTIONS
1002 && !prev_try->u.allowed.type_list))
1008 /* Remap all of the internal catch and cleanup linkages. Since we
1009 duplicate entire subtrees, all of the referenced regions will have
1010 been copied too. And since we renumbered them as a block, a simple
1011 bit of arithmetic finds us the index for the replacement region. */
1012 for (i = cfun_last_region_number + 1;
1013 VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
1015 /* All removed EH that is toplevel in input function is now
1016 in outer EH of output function. */
1019 gcc_assert (VEC_index
1020 (eh_region, ifun->eh->region_array,
1021 i - eh_offset) == NULL);
1024 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
1025 if (outer->aka == NULL)
1026 outer->aka = BITMAP_GGC_ALLOC ();
1027 bitmap_set_bit (outer->aka, i);
1031 if (i != cur->region_number)
1034 #define REMAP(REG) \
1035 (REG) = VEC_index (eh_region, cfun->eh->region_array, \
1036 (REG)->region_number + eh_offset)
1041 if (cur->u.eh_try.eh_catch)
1042 REMAP (cur->u.eh_try.eh_catch);
1043 if (cur->u.eh_try.last_catch)
1044 REMAP (cur->u.eh_try.last_catch);
1048 if (cur->u.eh_catch.next_catch)
1049 REMAP (cur->u.eh_catch.next_catch);
1050 if (cur->u.eh_catch.prev_catch)
1051 REMAP (cur->u.eh_catch.prev_catch);
1055 if (cur->u.cleanup.prev_try)
1056 REMAP (cur->u.cleanup.prev_try);
1058 cur->u.cleanup.prev_try = prev_try;
1067 #ifdef ENABLE_CHECKING
1068 verify_eh_tree (cfun);
1074 /* Return true if REGION_A is outer to REGION_B in IFUN. */
1077 eh_region_outer_p (struct function *ifun, int region_a, int region_b)
1079 struct eh_region *rp_a, *rp_b;
1081 gcc_assert (ifun->eh->last_region_number > 0);
1082 gcc_assert (ifun->eh->region_tree);
1084 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1085 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1086 gcc_assert (rp_a != NULL);
1087 gcc_assert (rp_b != NULL);
1100 /* Return region number of region that is outer to both if REGION_A and
1101 REGION_B in IFUN. */
1104 eh_region_outermost (struct function *ifun, int region_a, int region_b)
1106 struct eh_region *rp_a, *rp_b;
1109 gcc_assert (ifun->eh->last_region_number > 0);
1110 gcc_assert (ifun->eh->region_tree);
1112 rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
1113 rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
1114 gcc_assert (rp_a != NULL);
1115 gcc_assert (rp_b != NULL);
1117 b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
1118 sbitmap_zero (b_outer);
1122 SET_BIT (b_outer, rp_b->region_number);
1129 if (TEST_BIT (b_outer, rp_a->region_number))
1131 sbitmap_free (b_outer);
1132 return rp_a->region_number;
1138 sbitmap_free (b_outer);
1143 t2r_eq (const void *pentry, const void *pdata)
1145 const_tree const entry = (const_tree) pentry;
1146 const_tree const data = (const_tree) pdata;
1148 return TREE_PURPOSE (entry) == data;
1152 t2r_hash (const void *pentry)
1154 const_tree const entry = (const_tree) pentry;
1155 return TREE_HASH (TREE_PURPOSE (entry));
1159 add_type_for_runtime (tree type)
1163 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1164 TREE_HASH (type), INSERT);
1167 tree runtime = (*lang_eh_runtime_type) (type);
1168 *slot = tree_cons (type, runtime, NULL_TREE);
1173 lookup_type_for_runtime (tree type)
1177 slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
1178 TREE_HASH (type), NO_INSERT);
1180 /* We should have always inserted the data earlier. */
1181 return TREE_VALUE (*slot);
1185 /* Represent an entry in @TTypes for either catch actions
1186 or exception filter actions. */
1187 struct ttypes_filter GTY(())
1193 /* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
1194 (a tree) for a @TTypes type node we are thinking about adding. */
1197 ttypes_filter_eq (const void *pentry, const void *pdata)
1199 const struct ttypes_filter *const entry
1200 = (const struct ttypes_filter *) pentry;
1201 const_tree const data = (const_tree) pdata;
1203 return entry->t == data;
1207 ttypes_filter_hash (const void *pentry)
1209 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1210 return TREE_HASH (entry->t);
1213 /* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
1214 exception specification list we are thinking about adding. */
1215 /* ??? Currently we use the type lists in the order given. Someone
1216 should put these in some canonical order. */
1219 ehspec_filter_eq (const void *pentry, const void *pdata)
1221 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1222 const struct ttypes_filter *data = (const struct ttypes_filter *) pdata;
1224 return type_list_equal (entry->t, data->t);
1227 /* Hash function for exception specification lists. */
1230 ehspec_filter_hash (const void *pentry)
1232 const struct ttypes_filter *entry = (const struct ttypes_filter *) pentry;
1236 for (list = entry->t; list ; list = TREE_CHAIN (list))
1237 h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
1241 /* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
1242 to speed up the search. Return the filter value to be used. */
1245 add_ttypes_entry (htab_t ttypes_hash, tree type)
1247 struct ttypes_filter **slot, *n;
1249 slot = (struct ttypes_filter **)
1250 htab_find_slot_with_hash (ttypes_hash, type, TREE_HASH (type), INSERT);
1252 if ((n = *slot) == NULL)
1254 /* Filter value is a 1 based table index. */
1256 n = XNEW (struct ttypes_filter);
1258 n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
1261 VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
1267 /* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
1268 to speed up the search. Return the filter value to be used. */
1271 add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
1273 struct ttypes_filter **slot, *n;
1274 struct ttypes_filter dummy;
1277 slot = (struct ttypes_filter **)
1278 htab_find_slot (ehspec_hash, &dummy, INSERT);
1280 if ((n = *slot) == NULL)
1282 /* Filter value is a -1 based byte index into a uleb128 buffer. */
1284 n = XNEW (struct ttypes_filter);
1286 n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
1289 /* Generate a 0 terminated list of filter values. */
1290 for (; list ; list = TREE_CHAIN (list))
1292 if (targetm.arm_eabi_unwinder)
1293 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
1296 /* Look up each type in the list and encode its filter
1297 value as a uleb128. */
1298 push_uleb128 (&crtl->eh.ehspec_data,
1299 add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
1302 if (targetm.arm_eabi_unwinder)
1303 VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
1305 VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
1311 /* Generate the action filter values to be used for CATCH and
1312 ALLOWED_EXCEPTIONS regions. When using dwarf2 exception regions,
1313 we use lots of landing pads, and so every type or list can share
1314 the same filter value, which saves table space. */
1317 assign_filter_values (void)
1320 htab_t ttypes, ehspec;
1322 crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
1323 if (targetm.arm_eabi_unwinder)
1324 VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1326 VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
1328 ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
1329 ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
1331 for (i = cfun->eh->last_region_number; i > 0; --i)
1333 struct eh_region *r;
1335 r = VEC_index (eh_region, cfun->eh->region_array, i);
1337 /* Mind we don't process a region more than once. */
1338 if (!r || r->region_number != i)
1344 /* Whatever type_list is (NULL or true list), we build a list
1345 of filters for the region. */
1346 r->u.eh_catch.filter_list = NULL_TREE;
1348 if (r->u.eh_catch.type_list != NULL)
1350 /* Get a filter value for each of the types caught and store
1351 them in the region's dedicated list. */
1352 tree tp_node = r->u.eh_catch.type_list;
1354 for (;tp_node; tp_node = TREE_CHAIN (tp_node))
1356 int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
1357 tree flt_node = build_int_cst (NULL_TREE, flt);
1359 r->u.eh_catch.filter_list
1360 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1365 /* Get a filter value for the NULL list also since it will need
1366 an action record anyway. */
1367 int flt = add_ttypes_entry (ttypes, NULL);
1368 tree flt_node = build_int_cst (NULL_TREE, flt);
1370 r->u.eh_catch.filter_list
1371 = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
1376 case ERT_ALLOWED_EXCEPTIONS:
1378 = add_ehspec_entry (ehspec, ttypes, r->u.allowed.type_list);
1386 htab_delete (ttypes);
1387 htab_delete (ehspec);
1390 /* Emit SEQ into basic block just before INSN (that is assumed to be
1391 first instruction of some existing BB and return the newly
1394 emit_to_new_bb_before (rtx seq, rtx insn)
1401 /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
1402 call), we don't want it to go into newly created landing pad or other EH
1404 for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
1405 if (e->flags & EDGE_FALLTHRU)
1406 force_nonfallthru (e);
1409 last = emit_insn_before (seq, insn);
1410 if (BARRIER_P (last))
1411 last = PREV_INSN (last);
1412 bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
1413 update_bb_for_insn (bb);
1414 bb->flags |= BB_SUPERBLOCK;
1418 /* Generate the code to actually handle exceptions, which will follow the
1422 build_post_landing_pads (void)
1426 for (i = cfun->eh->last_region_number; i > 0; --i)
1428 struct eh_region *region;
1431 region = VEC_index (eh_region, cfun->eh->region_array, i);
1432 /* Mind we don't process a region more than once. */
1433 if (!region || region->region_number != i)
1436 switch (region->type)
1439 /* ??? Collect the set of all non-overlapping catch handlers
1440 all the way up the chain until blocked by a cleanup. */
1441 /* ??? Outer try regions can share landing pads with inner
1442 try regions if the types are completely non-overlapping,
1443 and there are no intervening cleanups. */
1445 region->post_landing_pad = gen_label_rtx ();
1449 emit_label (region->post_landing_pad);
1451 /* ??? It is mighty inconvenient to call back into the
1452 switch statement generation code in expand_end_case.
1453 Rapid prototyping sez a sequence of ifs. */
1455 struct eh_region *c;
1456 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
1458 if (c->u.eh_catch.type_list == NULL)
1459 emit_jump (c->label);
1462 /* Need for one cmp/jump per type caught. Each type
1463 list entry has a matching entry in the filter list
1464 (see assign_filter_values). */
1465 tree tp_node = c->u.eh_catch.type_list;
1466 tree flt_node = c->u.eh_catch.filter_list;
1470 emit_cmp_and_jump_insns
1472 GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
1474 targetm.eh_return_filter_mode (), 0, c->label);
1476 tp_node = TREE_CHAIN (tp_node);
1477 flt_node = TREE_CHAIN (flt_node);
1483 /* We delay the generation of the _Unwind_Resume until we generate
1484 landing pads. We emit a marker here so as to get good control
1485 flow data in the meantime. */
1487 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1493 emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
1497 case ERT_ALLOWED_EXCEPTIONS:
1498 region->post_landing_pad = gen_label_rtx ();
1502 emit_label (region->post_landing_pad);
1504 emit_cmp_and_jump_insns (crtl->eh.filter,
1505 GEN_INT (region->u.allowed.filter),
1507 targetm.eh_return_filter_mode (), 0, region->label);
1509 /* We delay the generation of the _Unwind_Resume until we generate
1510 landing pads. We emit a marker here so as to get good control
1511 flow data in the meantime. */
1513 = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
1519 emit_to_new_bb_before (seq, region->label);
1523 case ERT_MUST_NOT_THROW:
1524 region->post_landing_pad = region->label;
1529 /* Nothing to do. */
1538 /* Replace RESX patterns with jumps to the next handler if any, or calls to
1539 _Unwind_Resume otherwise. */
1542 connect_post_landing_pads (void)
1546 for (i = cfun->eh->last_region_number; i > 0; --i)
1548 struct eh_region *region;
1549 struct eh_region *outer;
1553 region = VEC_index (eh_region, cfun->eh->region_array, i);
1554 /* Mind we don't process a region more than once. */
1555 if (!region || region->region_number != i)
1558 /* If there is no RESX, or it has been deleted by flow, there's
1559 nothing to fix up. */
1560 if (! region->resume || INSN_DELETED_P (region->resume))
1563 /* Search for another landing pad in this function. */
1564 for (outer = region->outer; outer ; outer = outer->outer)
1565 if (outer->post_landing_pad)
1573 basic_block src, dest;
1575 emit_jump (outer->post_landing_pad);
1576 src = BLOCK_FOR_INSN (region->resume);
1577 dest = BLOCK_FOR_INSN (outer->post_landing_pad);
1578 while (EDGE_COUNT (src->succs) > 0)
1579 remove_edge (EDGE_SUCC (src, 0));
1580 e = make_edge (src, dest, 0);
1581 e->probability = REG_BR_PROB_BASE;
1582 e->count = src->count;
1586 emit_library_call (unwind_resume_libfunc, LCT_THROW,
1587 VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
1589 /* What we just emitted was a throwing libcall, so it got a
1590 barrier automatically added after it. If the last insn in
1591 the libcall sequence isn't the barrier, it's because the
1592 target emits multiple insns for a call, and there are insns
1593 after the actual call insn (which are redundant and would be
1594 optimized away). The barrier is inserted exactly after the
1595 call insn, so let's go get that and delete the insns after
1596 it, because below we need the barrier to be the last insn in
1598 delete_insns_since (NEXT_INSN (last_call_insn ()));
1603 barrier = emit_insn_before (seq, region->resume);
1604 /* Avoid duplicate barrier. */
1605 gcc_assert (BARRIER_P (barrier));
1606 delete_insn (barrier);
1607 delete_insn (region->resume);
1609 /* ??? From tree-ssa we can wind up with catch regions whose
1610 label is not instantiated, but whose resx is present. Now
1611 that we've dealt with the resx, kill the region. */
1612 if (region->label == NULL && region->type == ERT_CLEANUP)
1613 remove_eh_handler (region);
1619 dw2_build_landing_pads (void)
1623 for (i = cfun->eh->last_region_number; i > 0; --i)
1625 struct eh_region *region;
1630 region = VEC_index (eh_region, cfun->eh->region_array, i);
1631 /* Mind we don't process a region more than once. */
1632 if (!region || region->region_number != i)
1635 if (region->type != ERT_CLEANUP
1636 && region->type != ERT_TRY
1637 && region->type != ERT_ALLOWED_EXCEPTIONS)
1642 region->landing_pad = gen_label_rtx ();
1643 emit_label (region->landing_pad);
1645 #ifdef HAVE_exception_receiver
1646 if (HAVE_exception_receiver)
1647 emit_insn (gen_exception_receiver ());
1650 #ifdef HAVE_nonlocal_goto_receiver
1651 if (HAVE_nonlocal_goto_receiver)
1652 emit_insn (gen_nonlocal_goto_receiver ());
1657 emit_move_insn (crtl->eh.exc_ptr,
1658 gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1659 emit_move_insn (crtl->eh.filter,
1660 gen_rtx_REG (targetm.eh_return_filter_mode (),
1661 EH_RETURN_DATA_REGNO (1)));
1666 bb = emit_to_new_bb_before (seq, region->post_landing_pad);
1667 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1668 e->count = bb->count;
1669 e->probability = REG_BR_PROB_BASE;
1676 int directly_reachable;
1679 int call_site_index;
1683 sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
1686 bool found_one = false;
1688 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1690 struct eh_region *region;
1691 enum reachable_code rc;
1695 if (! INSN_P (insn))
1698 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1699 if (!note || INTVAL (XEXP (note, 0)) <= 0)
1702 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1704 type_thrown = NULL_TREE;
1705 if (region->type == ERT_THROW)
1707 type_thrown = region->u.eh_throw.type;
1708 region = region->outer;
1711 /* Find the first containing region that might handle the exception.
1712 That's the landing pad to which we will transfer control. */
1713 rc = RNL_NOT_CAUGHT;
1714 for (; region; region = region->outer)
1716 rc = reachable_next_level (region, type_thrown, NULL, false);
1717 if (rc != RNL_NOT_CAUGHT)
1720 if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
1722 lp_info[region->region_number].directly_reachable = 1;
1731 sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1736 /* First task: build the action table. */
1738 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
1739 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
1741 for (i = cfun->eh->last_region_number; i > 0; --i)
1742 if (lp_info[i].directly_reachable)
1744 struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
1746 r->landing_pad = dispatch_label;
1747 lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
1748 if (lp_info[i].action_index != -1)
1749 crtl->uses_eh_lsda = 1;
1752 htab_delete (ar_hash);
1754 /* Next: assign dispatch values. In dwarf2 terms, this would be the
1755 landing pad label for the region. For sjlj though, there is one
1756 common landing pad from which we dispatch to the post-landing pads.
1758 A region receives a dispatch index if it is directly reachable
1759 and requires in-function processing. Regions that share post-landing
1760 pads may share dispatch indices. */
1761 /* ??? Post-landing pad sharing doesn't actually happen at the moment
1762 (see build_post_landing_pads) so we don't bother checking for it. */
1765 for (i = cfun->eh->last_region_number; i > 0; --i)
1766 if (lp_info[i].directly_reachable)
1767 lp_info[i].dispatch_index = index++;
1769 /* Finally: assign call-site values. If dwarf2 terms, this would be
1770 the region number assigned by convert_to_eh_region_ranges, but
1771 handles no-action and must-not-throw differently. */
1774 for (i = cfun->eh->last_region_number; i > 0; --i)
1775 if (lp_info[i].directly_reachable)
1777 int action = lp_info[i].action_index;
1779 /* Map must-not-throw to otherwise unused call-site index 0. */
1782 /* Map no-action to otherwise unused call-site index -1. */
1783 else if (action == -1)
1785 /* Otherwise, look it up in the table. */
1787 index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
1789 lp_info[i].call_site_index = index;
1794 sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
1796 int last_call_site = -2;
1799 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1801 struct eh_region *region;
1803 rtx note, before, p;
1805 /* Reset value tracking at extended basic block boundaries. */
1807 last_call_site = -2;
1809 if (! INSN_P (insn))
1812 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1815 /* Calls (and trapping insns) without notes are outside any
1816 exception handling region in this function. Mark them as
1819 || (flag_non_call_exceptions
1820 && may_trap_p (PATTERN (insn))))
1821 this_call_site = -1;
1827 /* Calls that are known to not throw need not be marked. */
1828 if (INTVAL (XEXP (note, 0)) <= 0)
1831 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
1832 this_call_site = lp_info[region->region_number].call_site_index;
1835 if (this_call_site == last_call_site)
1838 /* Don't separate a call from it's argument loads. */
1841 before = find_first_parameter_load (insn, NULL_RTX);
1844 mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1845 sjlj_fc_call_site_ofs);
1846 emit_move_insn (mem, GEN_INT (this_call_site));
1850 emit_insn_before (p, before);
1851 last_call_site = this_call_site;
1855 /* Construct the SjLj_Function_Context. */
1858 sjlj_emit_function_enter (rtx dispatch_label)
1860 rtx fn_begin, fc, mem, seq;
1861 bool fn_begin_outside_block;
1863 fc = crtl->eh.sjlj_fc;
1867 /* We're storing this libcall's address into memory instead of
1868 calling it directly. Thus, we must call assemble_external_libcall
1869 here, as we can not depend on emit_library_call to do it for us. */
1870 assemble_external_libcall (eh_personality_libfunc);
1871 mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1872 emit_move_insn (mem, eh_personality_libfunc);
1874 mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1875 if (crtl->uses_eh_lsda)
1880 ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1881 sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1882 SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1883 emit_move_insn (mem, sym);
1886 emit_move_insn (mem, const0_rtx);
1888 #ifdef DONT_USE_BUILTIN_SETJMP
1891 x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1892 TYPE_MODE (integer_type_node), 1,
1893 plus_constant (XEXP (fc, 0),
1894 sjlj_fc_jbuf_ofs), Pmode);
1896 emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1897 TYPE_MODE (integer_type_node), 0, dispatch_label);
1898 add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
1901 expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
1905 emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1906 1, XEXP (fc, 0), Pmode);
1911 /* ??? Instead of doing this at the beginning of the function,
1912 do this in a block that is at loop level 0 and dominates all
1913 can_throw_internal instructions. */
1915 fn_begin_outside_block = true;
1916 for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1917 if (NOTE_P (fn_begin))
1919 if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1921 else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1922 fn_begin_outside_block = false;
1925 if (fn_begin_outside_block)
1926 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR));
1928 emit_insn_after (seq, fn_begin);
1931 /* Call back from expand_function_end to know where we should put
1932 the call to unwind_sjlj_unregister_libfunc if needed. */
1935 sjlj_emit_function_exit_after (rtx after)
1937 crtl->eh.sjlj_exit_after = after;
1941 sjlj_emit_function_exit (void)
1949 emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1950 1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1955 /* ??? Really this can be done in any block at loop level 0 that
1956 post-dominates all can_throw_internal instructions. This is
1957 the last possible moment. */
1959 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1960 if (e->flags & EDGE_FALLTHRU)
1966 /* Figure out whether the place we are supposed to insert libcall
1967 is inside the last basic block or after it. In the other case
1968 we need to emit to edge. */
1969 gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
1970 for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
1972 if (insn == crtl->eh.sjlj_exit_after)
1975 insn = NEXT_INSN (insn);
1976 emit_insn_after (seq, insn);
1979 if (insn == BB_END (e->src))
1982 insert_insn_on_edge (seq, e);
1987 sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
1989 enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1990 enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
1991 int i, first_reachable;
1992 rtx mem, dispatch, seq, fc;
1997 fc = crtl->eh.sjlj_fc;
2001 emit_label (dispatch_label);
2003 #ifndef DONT_USE_BUILTIN_SETJMP
2004 expand_builtin_setjmp_receiver (dispatch_label);
2007 /* Load up dispatch index, exc_ptr and filter values from the
2008 function context. */
2009 mem = adjust_address (fc, TYPE_MODE (integer_type_node),
2010 sjlj_fc_call_site_ofs);
2011 dispatch = copy_to_reg (mem);
2013 mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
2014 if (unwind_word_mode != ptr_mode)
2016 #ifdef POINTERS_EXTEND_UNSIGNED
2017 mem = convert_memory_address (ptr_mode, mem);
2019 mem = convert_to_mode (ptr_mode, mem, 0);
2022 emit_move_insn (crtl->eh.exc_ptr, mem);
2024 mem = adjust_address (fc, unwind_word_mode,
2025 sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
2026 if (unwind_word_mode != filter_mode)
2027 mem = convert_to_mode (filter_mode, mem, 0);
2028 emit_move_insn (crtl->eh.filter, mem);
2030 /* Jump to one of the directly reachable regions. */
2031 /* ??? This really ought to be using a switch statement. */
2033 first_reachable = 0;
2034 for (i = cfun->eh->last_region_number; i > 0; --i)
2036 if (! lp_info[i].directly_reachable)
2039 if (! first_reachable)
2041 first_reachable = i;
2045 emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
2046 EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
2047 ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
2048 ->post_landing_pad);
2054 before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
2055 ->post_landing_pad);
2057 bb = emit_to_new_bb_before (seq, before);
2058 e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
2059 e->count = bb->count;
2060 e->probability = REG_BR_PROB_BASE;
2064 sjlj_build_landing_pads (void)
2066 struct sjlj_lp_info *lp_info;
2068 lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
2070 if (sjlj_find_directly_reachable_regions (lp_info))
2072 rtx dispatch_label = gen_label_rtx ();
2073 int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
2074 TYPE_MODE (sjlj_fc_type_node),
2075 TYPE_ALIGN (sjlj_fc_type_node));
2077 = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
2078 int_size_in_bytes (sjlj_fc_type_node),
2081 sjlj_assign_call_site_values (dispatch_label, lp_info);
2082 sjlj_mark_call_sites (lp_info);
2084 sjlj_emit_function_enter (dispatch_label);
2085 sjlj_emit_dispatch_table (dispatch_label, lp_info);
2086 sjlj_emit_function_exit ();
2093 finish_eh_generation (void)
2097 /* Nothing to do if no regions created. */
2098 if (cfun->eh->region_tree == NULL)
2101 /* The object here is to provide find_basic_blocks with detailed
2102 information (via reachable_handlers) on how exception control
2103 flows within the function. In this first pass, we can include
2104 type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
2105 regions, and hope that it will be useful in deleting unreachable
2106 handlers. Subsequently, we will generate landing pads which will
2107 connect many of the handlers, and then type information will not
2108 be effective. Still, this is a win over previous implementations. */
2110 /* These registers are used by the landing pads. Make sure they
2111 have been generated. */
2112 get_exception_pointer ();
2113 get_exception_filter ();
2115 /* Construct the landing pads. */
2117 assign_filter_values ();
2118 build_post_landing_pads ();
2119 connect_post_landing_pads ();
2120 if (USING_SJLJ_EXCEPTIONS)
2121 sjlj_build_landing_pads ();
2123 dw2_build_landing_pads ();
2125 crtl->eh.built_landing_pads = 1;
2127 /* We've totally changed the CFG. Start over. */
2128 find_exception_handler_labels ();
2129 break_superblocks ();
2130 if (USING_SJLJ_EXCEPTIONS
2131 /* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
2132 || single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
2133 commit_edge_insertions ();
2139 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2141 if (e->flags & EDGE_EH)
2150 rtl_make_eh_edge (NULL, bb, BB_END (bb));
2155 ehl_hash (const void *pentry)
2157 const struct ehl_map_entry *const entry
2158 = (const struct ehl_map_entry *) pentry;
2160 /* 2^32 * ((sqrt(5) - 1) / 2) */
2161 const hashval_t scaled_golden_ratio = 0x9e3779b9;
2162 return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
2166 ehl_eq (const void *pentry, const void *pdata)
2168 const struct ehl_map_entry *const entry
2169 = (const struct ehl_map_entry *) pentry;
2170 const struct ehl_map_entry *const data
2171 = (const struct ehl_map_entry *) pdata;
2173 return entry->label == data->label;
2176 /* This section handles removing dead code for flow. */
2178 /* Remove LABEL from exception_handler_label_map. */
2181 remove_exception_handler_label (rtx label)
2183 struct ehl_map_entry **slot, tmp;
2185 /* If exception_handler_label_map was not built yet,
2186 there is nothing to do. */
2187 if (crtl->eh.exception_handler_label_map == NULL)
2191 slot = (struct ehl_map_entry **)
2192 htab_find_slot (crtl->eh.exception_handler_label_map, &tmp, NO_INSERT);
2195 htab_clear_slot (crtl->eh.exception_handler_label_map, (void **) slot);
2198 /* Splice REGION from the region tree etc. */
2201 remove_eh_handler (struct eh_region *region)
2203 struct eh_region **pp, **pp_start, *p, *outer, *inner;
2206 /* For the benefit of efficiently handling REG_EH_REGION notes,
2207 replace this region in the region array with its containing
2208 region. Note that previous region deletions may result in
2209 multiple copies of this region in the array, so we have a
2210 list of alternate numbers by which we are known. */
2212 outer = region->outer;
2213 VEC_replace (eh_region, cfun->eh->region_array, region->region_number, outer);
2219 EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
2221 VEC_replace (eh_region, cfun->eh->region_array, i, outer);
2228 outer->aka = BITMAP_GGC_ALLOC ();
2230 bitmap_ior_into (outer->aka, region->aka);
2231 bitmap_set_bit (outer->aka, region->region_number);
2234 if (crtl->eh.built_landing_pads)
2235 lab = region->landing_pad;
2237 lab = region->label;
2239 remove_exception_handler_label (lab);
2242 pp_start = &outer->inner;
2244 pp_start = &cfun->eh->region_tree;
2245 for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
2247 *pp = region->next_peer;
2249 inner = region->inner;
2252 for (p = inner; p->next_peer ; p = p->next_peer)
2256 p->next_peer = *pp_start;
2260 if (region->type == ERT_CATCH)
2262 struct eh_region *eh_try, *next, *prev;
2264 for (eh_try = region->next_peer;
2265 eh_try->type == ERT_CATCH;
2266 eh_try = eh_try->next_peer)
2268 gcc_assert (eh_try->type == ERT_TRY);
2270 next = region->u.eh_catch.next_catch;
2271 prev = region->u.eh_catch.prev_catch;
2274 next->u.eh_catch.prev_catch = prev;
2276 eh_try->u.eh_try.last_catch = prev;
2278 prev->u.eh_catch.next_catch = next;
2281 eh_try->u.eh_try.eh_catch = next;
2283 remove_eh_handler (eh_try);
2288 /* LABEL heads a basic block that is about to be deleted. If this
2289 label corresponds to an exception region, we may be able to
2290 delete the region. */
2293 maybe_remove_eh_handler (rtx label)
2295 struct ehl_map_entry **slot, tmp;
2296 struct eh_region *region;
2298 /* ??? After generating landing pads, it's not so simple to determine
2299 if the region data is completely unused. One must examine the
2300 landing pad and the post landing pad, and whether an inner try block
2301 is referencing the catch handlers directly. */
2302 if (crtl->eh.built_landing_pads)
2306 slot = (struct ehl_map_entry **)
2307 htab_find_slot (crtl->eh.exception_handler_label_map, &tmp, NO_INSERT);
2310 region = (*slot)->region;
2314 /* Flow will want to remove MUST_NOT_THROW regions as unreachable
2315 because there is no path to the fallback call to terminate.
2316 But the region continues to affect call-site data until there
2317 are no more contained calls, which we don't see here. */
2318 if (region->type == ERT_MUST_NOT_THROW)
2320 htab_clear_slot (crtl->eh.exception_handler_label_map, (void **) slot);
2321 region->label = NULL_RTX;
2324 remove_eh_handler (region);
2327 /* Invokes CALLBACK for every exception handler label. Only used by old
2328 loop hackery; should not be used by new code. */
2331 for_each_eh_label (void (*callback) (rtx))
2333 htab_traverse (crtl->eh.exception_handler_label_map, for_each_eh_label_1,
2334 (void *) &callback);
2338 for_each_eh_label_1 (void **pentry, void *data)
2340 struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
2341 void (*callback) (rtx) = *(void (**) (rtx)) data;
2343 (*callback) (entry->label);
2347 /* Invoke CALLBACK for every exception region in the current function. */
2350 for_each_eh_region (void (*callback) (struct eh_region *))
2352 int i, n = cfun->eh->last_region_number;
2353 for (i = 1; i <= n; ++i)
2355 struct eh_region *region;
2357 region = VEC_index (eh_region, cfun->eh->region_array, i);
2359 (*callback) (region);
2363 /* This section describes CFG exception edges for flow. */
2365 /* For communicating between calls to reachable_next_level. */
2366 struct reachable_info
2370 void (*callback) (struct eh_region *, void *);
2371 void *callback_data;
2374 /* A subroutine of reachable_next_level. Return true if TYPE, or a
2375 base class of TYPE, is in HANDLED. */
2378 check_handled (tree handled, tree type)
2382 /* We can check for exact matches without front-end help. */
2383 if (! lang_eh_type_covers)
2385 for (t = handled; t ; t = TREE_CHAIN (t))
2386 if (TREE_VALUE (t) == type)
2391 for (t = handled; t ; t = TREE_CHAIN (t))
2392 if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
2399 /* A subroutine of reachable_next_level. If we are collecting a list
2400 of handlers, add one. After landing pad generation, reference
2401 it instead of the handlers themselves. Further, the handlers are
2402 all wired together, so by referencing one, we've got them all.
2403 Before landing pad generation we reference each handler individually.
2405 LP_REGION contains the landing pad; REGION is the handler. */
2408 add_reachable_handler (struct reachable_info *info,
2409 struct eh_region *lp_region, struct eh_region *region)
2414 if (crtl->eh.built_landing_pads)
2415 info->callback (lp_region, info->callback_data);
2417 info->callback (region, info->callback_data);
2420 /* Process one level of exception regions for reachability.
2421 If TYPE_THROWN is non-null, then it is the *exact* type being
2422 propagated. If INFO is non-null, then collect handler labels
2423 and caught/allowed type information between invocations. */
2425 static enum reachable_code
2426 reachable_next_level (struct eh_region *region, tree type_thrown,
2427 struct reachable_info *info,
2430 switch (region->type)
2433 /* Before landing-pad generation, we model control flow
2434 directly to the individual handlers. In this way we can
2435 see that catch handler types may shadow one another. */
2436 add_reachable_handler (info, region, region);
2437 return RNL_MAYBE_CAUGHT;
2441 struct eh_region *c;
2442 enum reachable_code ret = RNL_NOT_CAUGHT;
2444 for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
2446 /* A catch-all handler ends the search. */
2447 if (c->u.eh_catch.type_list == NULL)
2449 add_reachable_handler (info, region, c);
2455 /* If we have at least one type match, end the search. */
2456 tree tp_node = c->u.eh_catch.type_list;
2458 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2460 tree type = TREE_VALUE (tp_node);
2462 if (type == type_thrown
2463 || (lang_eh_type_covers
2464 && (*lang_eh_type_covers) (type, type_thrown)))
2466 add_reachable_handler (info, region, c);
2471 /* If we have definitive information of a match failure,
2472 the catch won't trigger. */
2473 if (lang_eh_type_covers)
2474 return RNL_NOT_CAUGHT;
2477 /* At this point, we either don't know what type is thrown or
2478 don't have front-end assistance to help deciding if it is
2479 covered by one of the types in the list for this region.
2481 We'd then like to add this region to the list of reachable
2482 handlers since it is indeed potentially reachable based on the
2483 information we have.
2485 Actually, this handler is for sure not reachable if all the
2486 types it matches have already been caught. That is, it is only
2487 potentially reachable if at least one of the types it catches
2488 has not been previously caught. */
2491 ret = RNL_MAYBE_CAUGHT;
2494 tree tp_node = c->u.eh_catch.type_list;
2495 bool maybe_reachable = false;
2497 /* Compute the potential reachability of this handler and
2498 update the list of types caught at the same time. */
2499 for (; tp_node; tp_node = TREE_CHAIN (tp_node))
2501 tree type = TREE_VALUE (tp_node);
2503 if (! check_handled (info->types_caught, type))
2506 = tree_cons (NULL, type, info->types_caught);
2508 maybe_reachable = true;
2512 if (maybe_reachable)
2514 add_reachable_handler (info, region, c);
2516 /* ??? If the catch type is a base class of every allowed
2517 type, then we know we can stop the search. */
2518 ret = RNL_MAYBE_CAUGHT;
2526 case ERT_ALLOWED_EXCEPTIONS:
2527 /* An empty list of types definitely ends the search. */
2528 if (region->u.allowed.type_list == NULL_TREE)
2530 add_reachable_handler (info, region, region);
2534 /* Collect a list of lists of allowed types for use in detecting
2535 when a catch may be transformed into a catch-all. */
2537 info->types_allowed = tree_cons (NULL_TREE,
2538 region->u.allowed.type_list,
2539 info->types_allowed);
2541 /* If we have definitive information about the type hierarchy,
2542 then we can tell if the thrown type will pass through the
2544 if (type_thrown && lang_eh_type_covers)
2546 if (check_handled (region->u.allowed.type_list, type_thrown))
2547 return RNL_NOT_CAUGHT;
2550 add_reachable_handler (info, region, region);
2555 add_reachable_handler (info, region, region);
2556 return RNL_MAYBE_CAUGHT;
2559 /* Catch regions are handled by their controlling try region. */
2560 return RNL_NOT_CAUGHT;
2562 case ERT_MUST_NOT_THROW:
2563 /* Here we end our search, since no exceptions may propagate.
2565 Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
2566 only via locally handled RESX instructions.
2568 When we inline a function call, we can bring in new handlers. In order
2569 to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
2570 assume that such handlers exists prior for any inlinable call prior
2571 inlining decisions are fixed. */
2575 add_reachable_handler (info, region, region);
2583 /* Shouldn't see these here. */
2591 /* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
2594 foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
2595 void (*callback) (struct eh_region *, void *),
2596 void *callback_data)
2598 struct reachable_info info;
2599 struct eh_region *region;
2602 memset (&info, 0, sizeof (info));
2603 info.callback = callback;
2604 info.callback_data = callback_data;
2606 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2610 type_thrown = NULL_TREE;
2613 /* A RESX leaves a region instead of entering it. Thus the
2614 region itself may have been deleted out from under us. */
2617 region = region->outer;
2619 else if (region->type == ERT_THROW)
2621 type_thrown = region->u.eh_throw.type;
2622 region = region->outer;
2627 if (reachable_next_level (region, type_thrown, &info,
2628 inlinable_call || is_resx) >= RNL_CAUGHT)
2630 /* If we have processed one cleanup, there is no point in
2631 processing any more of them. Each cleanup will have an edge
2632 to the next outer cleanup region, so the flow graph will be
2634 if (region->type == ERT_CLEANUP)
2635 region = region->u.cleanup.prev_try;
2637 region = region->outer;
2641 /* Retrieve a list of labels of exception handlers which can be
2642 reached by a given insn. */
2645 arh_to_landing_pad (struct eh_region *region, void *data)
2647 rtx *p_handlers = (rtx *) data;
2649 *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
2653 arh_to_label (struct eh_region *region, void *data)
2655 rtx *p_handlers = (rtx *) data;
2656 *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
2660 reachable_handlers (rtx insn)
2662 bool is_resx = false;
2663 rtx handlers = NULL;
2667 && GET_CODE (PATTERN (insn)) == RESX)
2669 region_number = XINT (PATTERN (insn), 0);
2674 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2675 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2677 region_number = INTVAL (XEXP (note, 0));
2680 foreach_reachable_handler (region_number, is_resx, false,
2681 (crtl->eh.built_landing_pads
2682 ? arh_to_landing_pad
2689 /* Determine if the given INSN can throw an exception that is caught
2690 within the function. */
2693 can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
2695 struct eh_region *region;
2698 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2702 type_thrown = NULL_TREE;
2704 region = region->outer;
2705 else if (region->type == ERT_THROW)
2707 type_thrown = region->u.eh_throw.type;
2708 region = region->outer;
2711 /* If this exception is ignored by each and every containing region,
2712 then control passes straight out. The runtime may handle some
2713 regions, which also do not require processing internally. */
2714 for (; region; region = region->outer)
2716 enum reachable_code how = reachable_next_level (region, type_thrown, 0,
2717 inlinable_call || is_resx);
2718 if (how == RNL_BLOCKED)
2720 if (how != RNL_NOT_CAUGHT)
2728 can_throw_internal (const_rtx insn)
2732 if (! INSN_P (insn))
2736 && GET_CODE (PATTERN (insn)) == RESX
2737 && XINT (PATTERN (insn), 0) > 0)
2738 return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
2740 if (NONJUMP_INSN_P (insn)
2741 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2742 insn = XVECEXP (PATTERN (insn), 0, 0);
2744 /* Every insn that might throw has an EH_REGION note. */
2745 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2746 if (!note || INTVAL (XEXP (note, 0)) <= 0)
2749 return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
2752 /* Determine if the given INSN can throw an exception that is
2753 visible outside the function. */
2756 can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
2758 struct eh_region *region;
2761 region = VEC_index (eh_region, cfun->eh->region_array, region_number);
2765 type_thrown = NULL_TREE;
2767 region = region->outer;
2768 else if (region->type == ERT_THROW)
2770 type_thrown = region->u.eh_throw.type;
2771 region = region->outer;
2774 /* If the exception is caught or blocked by any containing region,
2775 then it is not seen by any calling function. */
2776 for (; region ; region = region->outer)
2777 if (reachable_next_level (region, type_thrown, NULL,
2778 inlinable_call || is_resx) >= RNL_CAUGHT)
2785 can_throw_external (const_rtx insn)
2789 if (! INSN_P (insn))
2793 && GET_CODE (PATTERN (insn)) == RESX
2794 && XINT (PATTERN (insn), 0) > 0)
2795 return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
2797 if (NONJUMP_INSN_P (insn)
2798 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2799 insn = XVECEXP (PATTERN (insn), 0, 0);
2801 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
2804 /* Calls (and trapping insns) without notes are outside any
2805 exception handling region in this function. We have to
2806 assume it might throw. Given that the front end and middle
2807 ends mark known NOTHROW functions, this isn't so wildly
2809 return (CALL_P (insn)
2810 || (flag_non_call_exceptions
2811 && may_trap_p (PATTERN (insn))));
2813 if (INTVAL (XEXP (note, 0)) <= 0)
2816 return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
2819 /* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
2822 set_nothrow_function_flags (void)
2828 /* Assume crtl->all_throwers_are_sibcalls until we encounter
2829 something that can throw an exception. We specifically exempt
2830 CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
2831 and can't throw. Most CALL_INSNs are not SIBLING_CALL_P, so this
2834 crtl->all_throwers_are_sibcalls = 1;
2836 /* If we don't know that this implementation of the function will
2837 actually be used, then we must not set TREE_NOTHROW, since
2838 callers must not assume that this function does not throw. */
2839 if (TREE_NOTHROW (current_function_decl))
2842 if (! flag_exceptions)
2845 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2846 if (can_throw_external (insn))
2850 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2852 crtl->all_throwers_are_sibcalls = 0;
2857 for (insn = crtl->epilogue_delay_list; insn;
2858 insn = XEXP (insn, 1))
2859 if (can_throw_external (insn))
2863 if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
2865 crtl->all_throwers_are_sibcalls = 0;
2870 && (cgraph_function_body_availability (cgraph_node (current_function_decl))
2871 >= AVAIL_AVAILABLE))
2872 TREE_NOTHROW (current_function_decl) = 1;
2876 struct rtl_opt_pass pass_set_nothrow_function_flags =
2882 set_nothrow_function_flags, /* execute */
2885 0, /* static_pass_number */
2887 0, /* properties_required */
2888 0, /* properties_provided */
2889 0, /* properties_destroyed */
2890 0, /* todo_flags_start */
2891 0, /* todo_flags_finish */
2896 /* Various hooks for unwind library. */
2898 /* Do any necessary initialization to access arbitrary stack frames.
2899 On the SPARC, this means flushing the register windows. */
2902 expand_builtin_unwind_init (void)
2904 /* Set this so all the registers get saved in our frame; we need to be
2905 able to copy the saved values for any registers from frames we unwind. */
2906 crtl->saves_all_registers = 1;
2908 #ifdef SETUP_FRAME_ADDRESSES
2909 SETUP_FRAME_ADDRESSES ();
2914 expand_builtin_eh_return_data_regno (tree exp)
2916 tree which = CALL_EXPR_ARG (exp, 0);
2917 unsigned HOST_WIDE_INT iwhich;
2919 if (TREE_CODE (which) != INTEGER_CST)
2921 error ("argument of %<__builtin_eh_return_regno%> must be constant");
2925 iwhich = tree_low_cst (which, 1);
2926 iwhich = EH_RETURN_DATA_REGNO (iwhich);
2927 if (iwhich == INVALID_REGNUM)
2930 #ifdef DWARF_FRAME_REGNUM
2931 iwhich = DWARF_FRAME_REGNUM (iwhich);
2933 iwhich = DBX_REGISTER_NUMBER (iwhich);
2936 return GEN_INT (iwhich);
2939 /* Given a value extracted from the return address register or stack slot,
2940 return the actual address encoded in that value. */
2943 expand_builtin_extract_return_addr (tree addr_tree)
2945 rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2947 if (GET_MODE (addr) != Pmode
2948 && GET_MODE (addr) != VOIDmode)
2950 #ifdef POINTERS_EXTEND_UNSIGNED
2951 addr = convert_memory_address (Pmode, addr);
2953 addr = convert_to_mode (Pmode, addr, 0);
2957 /* First mask out any unwanted bits. */
2958 #ifdef MASK_RETURN_ADDR
2959 expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2962 /* Then adjust to find the real return address. */
2963 #if defined (RETURN_ADDR_OFFSET)
2964 addr = plus_constant (addr, RETURN_ADDR_OFFSET);
2970 /* Given an actual address in addr_tree, do any necessary encoding
2971 and return the value to be stored in the return address register or
2972 stack slot so the epilogue will return to that address. */
2975 expand_builtin_frob_return_addr (tree addr_tree)
2977 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2979 addr = convert_memory_address (Pmode, addr);
2981 #ifdef RETURN_ADDR_OFFSET
2982 addr = force_reg (Pmode, addr);
2983 addr = plus_constant (addr, -RETURN_ADDR_OFFSET);
2989 /* Set up the epilogue with the magic bits we'll need to return to the
2990 exception handler. */
2993 expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2998 #ifdef EH_RETURN_STACKADJ_RTX
2999 tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
3000 VOIDmode, EXPAND_NORMAL);
3001 tmp = convert_memory_address (Pmode, tmp);
3002 if (!crtl->eh.ehr_stackadj)
3003 crtl->eh.ehr_stackadj = copy_to_reg (tmp);
3004 else if (tmp != crtl->eh.ehr_stackadj)
3005 emit_move_insn (crtl->eh.ehr_stackadj, tmp);
3008 tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
3009 VOIDmode, EXPAND_NORMAL);
3010 tmp = convert_memory_address (Pmode, tmp);
3011 if (!crtl->eh.ehr_handler)
3012 crtl->eh.ehr_handler = copy_to_reg (tmp);
3013 else if (tmp != crtl->eh.ehr_handler)
3014 emit_move_insn (crtl->eh.ehr_handler, tmp);
3016 if (!crtl->eh.ehr_label)
3017 crtl->eh.ehr_label = gen_label_rtx ();
3018 emit_jump (crtl->eh.ehr_label);
3022 expand_eh_return (void)
3026 if (! crtl->eh.ehr_label)
3029 crtl->calls_eh_return = 1;
3031 #ifdef EH_RETURN_STACKADJ_RTX
3032 emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
3035 around_label = gen_label_rtx ();
3036 emit_jump (around_label);
3038 emit_label (crtl->eh.ehr_label);
3039 clobber_return_register ();
3041 #ifdef EH_RETURN_STACKADJ_RTX
3042 emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
3045 #ifdef HAVE_eh_return
3047 emit_insn (gen_eh_return (crtl->eh.ehr_handler));
3051 #ifdef EH_RETURN_HANDLER_RTX
3052 emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
3054 error ("__builtin_eh_return not supported on this target");
3058 emit_label (around_label);
3061 /* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
3062 POINTERS_EXTEND_UNSIGNED and return it. */
3065 expand_builtin_extend_pointer (tree addr_tree)
3067 rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
3070 #ifdef POINTERS_EXTEND_UNSIGNED
3071 extend = POINTERS_EXTEND_UNSIGNED;
3073 /* The previous EH code did an unsigned extend by default, so we do this also
3078 return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
3081 /* In the following functions, we represent entries in the action table
3082 as 1-based indices. Special cases are:
3084 0: null action record, non-null landing pad; implies cleanups
3085 -1: null action record, null landing pad; implies no action
3086 -2: no call-site entry; implies must_not_throw
3087 -3: we have yet to process outer regions
3089 Further, no special cases apply to the "next" field of the record.
3090 For next, 0 means end of list. */
3092 struct action_record
3100 action_record_eq (const void *pentry, const void *pdata)
3102 const struct action_record *entry = (const struct action_record *) pentry;
3103 const struct action_record *data = (const struct action_record *) pdata;
3104 return entry->filter == data->filter && entry->next == data->next;
3108 action_record_hash (const void *pentry)
3110 const struct action_record *entry = (const struct action_record *) pentry;
3111 return entry->next * 1009 + entry->filter;
3115 add_action_record (htab_t ar_hash, int filter, int next)
3117 struct action_record **slot, *new_ar, tmp;
3119 tmp.filter = filter;
3121 slot = (struct action_record **) htab_find_slot (ar_hash, &tmp, INSERT);
3123 if ((new_ar = *slot) == NULL)
3125 new_ar = XNEW (struct action_record);
3126 new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3127 new_ar->filter = filter;
3128 new_ar->next = next;
3131 /* The filter value goes in untouched. The link to the next
3132 record is a "self-relative" byte offset, or zero to indicate
3133 that there is no next record. So convert the absolute 1 based
3134 indices we've been carrying around into a displacement. */
3136 push_sleb128 (&crtl->eh.action_record_data, filter);
3138 next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
3139 push_sleb128 (&crtl->eh.action_record_data, next);
3142 return new_ar->offset;
3146 collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
3148 struct eh_region *c;
3151 /* If we've reached the top of the region chain, then we have
3152 no actions, and require no landing pad. */
3156 switch (region->type)
3159 /* A cleanup adds a zero filter to the beginning of the chain, but
3160 there are special cases to look out for. If there are *only*
3161 cleanups along a path, then it compresses to a zero action.
3162 Further, if there are multiple cleanups along a path, we only
3163 need to represent one of them, as that is enough to trigger
3164 entry to the landing pad at runtime. */
3165 next = collect_one_action_chain (ar_hash, region->outer);
3168 for (c = region->outer; c ; c = c->outer)
3169 if (c->type == ERT_CLEANUP)
3171 return add_action_record (ar_hash, 0, next);
3174 /* Process the associated catch regions in reverse order.
3175 If there's a catch-all handler, then we don't need to
3176 search outer regions. Use a magic -3 value to record
3177 that we haven't done the outer search. */
3179 for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
3181 if (c->u.eh_catch.type_list == NULL)
3183 /* Retrieve the filter from the head of the filter list
3184 where we have stored it (see assign_filter_values). */
3186 = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
3188 next = add_action_record (ar_hash, filter, 0);
3192 /* Once the outer search is done, trigger an action record for
3193 each filter we have. */
3198 next = collect_one_action_chain (ar_hash, region->outer);
3200 /* If there is no next action, terminate the chain. */
3203 /* If all outer actions are cleanups or must_not_throw,
3204 we'll have no action record for it, since we had wanted
3205 to encode these states in the call-site record directly.
3206 Add a cleanup action to the chain to catch these. */
3208 next = add_action_record (ar_hash, 0, 0);
3211 flt_node = c->u.eh_catch.filter_list;
3212 for (; flt_node; flt_node = TREE_CHAIN (flt_node))
3214 int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
3215 next = add_action_record (ar_hash, filter, next);
3221 case ERT_ALLOWED_EXCEPTIONS:
3222 /* An exception specification adds its filter to the
3223 beginning of the chain. */
3224 next = collect_one_action_chain (ar_hash, region->outer);
3226 /* If there is no next action, terminate the chain. */
3229 /* If all outer actions are cleanups or must_not_throw,
3230 we'll have no action record for it, since we had wanted
3231 to encode these states in the call-site record directly.
3232 Add a cleanup action to the chain to catch these. */
3234 next = add_action_record (ar_hash, 0, 0);
3236 return add_action_record (ar_hash, region->u.allowed.filter, next);
3238 case ERT_MUST_NOT_THROW:
3239 /* A must-not-throw region with no inner handlers or cleanups
3240 requires no call-site entry. Note that this differs from
3241 the no handler or cleanup case in that we do require an lsda
3242 to be generated. Return a magic -2 value to record this. */
3247 /* CATCH regions are handled in TRY above. THROW regions are
3248 for optimization information only and produce no output. */
3249 return collect_one_action_chain (ar_hash, region->outer);
3257 add_call_site (rtx landing_pad, int action)
3259 call_site_record record;
3261 record = GGC_NEW (struct call_site_record);
3262 record->landing_pad = landing_pad;
3263 record->action = action;
3265 VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
3267 return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
3270 /* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
3271 The new note numbers will not refer to region numbers, but
3272 instead to call site entries. */
3275 convert_to_eh_region_ranges (void)
3277 rtx insn, iter, note;
3279 int last_action = -3;
3280 rtx last_action_insn = NULL_RTX;
3281 rtx last_landing_pad = NULL_RTX;
3282 rtx first_no_action_insn = NULL_RTX;
3285 if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
3288 VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
3290 ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
3292 for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
3295 struct eh_region *region;
3297 rtx this_landing_pad;
3300 if (NONJUMP_INSN_P (insn)
3301 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3302 insn = XVECEXP (PATTERN (insn), 0, 0);
3304 note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
3307 if (! (CALL_P (insn)
3308 || (flag_non_call_exceptions
3309 && may_trap_p (PATTERN (insn)))))
3316 if (INTVAL (XEXP (note, 0)) <= 0)
3318 region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
3319 this_action = collect_one_action_chain (ar_hash, region);
3322 /* Existence of catch handlers, or must-not-throw regions
3323 implies that an lsda is needed (even if empty). */
3324 if (this_action != -1)
3325 crtl->uses_eh_lsda = 1;
3327 /* Delay creation of region notes for no-action regions
3328 until we're sure that an lsda will be required. */
3329 else if (last_action == -3)
3331 first_no_action_insn = iter;
3335 /* Cleanups and handlers may share action chains but not
3336 landing pads. Collect the landing pad for this region. */
3337 if (this_action >= 0)
3339 struct eh_region *o;
3340 for (o = region; ! o->landing_pad ; o = o->outer)
3342 this_landing_pad = o->landing_pad;
3345 this_landing_pad = NULL_RTX;
3347 /* Differing actions or landing pads implies a change in call-site
3348 info, which implies some EH_REGION note should be emitted. */
3349 if (last_action != this_action
3350 || last_landing_pad != this_landing_pad)
3352 /* If we'd not seen a previous action (-3) or the previous
3353 action was must-not-throw (-2), then we do not need an
3355 if (last_action >= -1)
3357 /* If we delayed the creation of the begin, do it now. */
3358 if (first_no_action_insn)
3360 call_site = add_call_site (NULL_RTX, 0);
3361 note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
3362 first_no_action_insn);
3363 NOTE_EH_HANDLER (note) = call_site;
3364 first_no_action_insn = NULL_RTX;
3367 note = emit_note_after (NOTE_INSN_EH_REGION_END,
3369 NOTE_EH_HANDLER (note) = call_site;
3372 /* If the new action is must-not-throw, then no region notes
3374 if (this_action >= -1)
3376 call_site = add_call_site (this_landing_pad,
3377 this_action < 0 ? 0 : this_action);
3378 note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
3379 NOTE_EH_HANDLER (note) = call_site;
3382 last_action = this_action;
3383 last_landing_pad = this_landing_pad;
3385 last_action_insn = iter;
3388 if (last_action >= -1 && ! first_no_action_insn)
3390 note = emit_note_after (NOTE_INSN_EH_REGION_END, last_action_insn);
3391 NOTE_EH_HANDLER (note) = call_site;
3394 htab_delete (ar_hash);
3398 struct rtl_opt_pass pass_convert_to_eh_region_ranges =
3402 "eh_ranges", /* name */
3404 convert_to_eh_region_ranges, /* execute */
3407 0, /* static_pass_number */
3409 0, /* properties_required */
3410 0, /* properties_provided */
3411 0, /* properties_destroyed */
3412 0, /* todo_flags_start */
3413 TODO_dump_func, /* todo_flags_finish */
3419 push_uleb128 (varray_type *data_area, unsigned int value)
3423 unsigned char byte = value & 0x7f;
3427 VARRAY_PUSH_UCHAR (*data_area, byte);
3433 push_sleb128 (varray_type *data_area, int value)
3440 byte = value & 0x7f;
3442 more = ! ((value == 0 && (byte & 0x40) == 0)
3443 || (value == -1 && (byte & 0x40) != 0));
3446 VARRAY_PUSH_UCHAR (*data_area, byte);
3452 #ifndef HAVE_AS_LEB128
3454 dw2_size_of_call_site_table (void)
3456 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3457 int size = n * (4 + 4 + 4);
3460 for (i = 0; i < n; ++i)
3462 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3463 size += size_of_uleb128 (cs->action);
3470 sjlj_size_of_call_site_table (void)
3472 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3476 for (i = 0; i < n; ++i)
3478 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3479 size += size_of_uleb128 (INTVAL (cs->landing_pad));
3480 size += size_of_uleb128 (cs->action);
3488 dw2_output_call_site_table (void)
3490 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3493 for (i = 0; i < n; ++i)
3495 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3496 char reg_start_lab[32];
3497 char reg_end_lab[32];
3498 char landing_pad_lab[32];
3500 ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
3501 ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
3503 if (cs->landing_pad)
3504 ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
3505 CODE_LABEL_NUMBER (cs->landing_pad));
3507 /* ??? Perhaps use insn length scaling if the assembler supports
3508 generic arithmetic. */
3509 /* ??? Perhaps use attr_length to choose data1 or data2 instead of
3510 data4 if the function is small enough. */
3511 #ifdef HAVE_AS_LEB128
3512 dw2_asm_output_delta_uleb128 (reg_start_lab,
3513 current_function_func_begin_label,
3514 "region %d start", i);
3515 dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
3517 if (cs->landing_pad)
3518 dw2_asm_output_delta_uleb128 (landing_pad_lab,
3519 current_function_func_begin_label,
3522 dw2_asm_output_data_uleb128 (0, "landing pad");
3524 dw2_asm_output_delta (4, reg_start_lab,
3525 current_function_func_begin_label,
3526 "region %d start", i);
3527 dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
3528 if (cs->landing_pad)
3529 dw2_asm_output_delta (4, landing_pad_lab,
3530 current_function_func_begin_label,
3533 dw2_asm_output_data (4, 0, "landing pad");
3535 dw2_asm_output_data_uleb128 (cs->action, "action");
3538 call_site_base += n;
3542 sjlj_output_call_site_table (void)
3544 int n = VEC_length (call_site_record, crtl->eh.call_site_record);
3547 for (i = 0; i < n; ++i)
3549 struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
3551 dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
3552 "region %d landing pad", i);
3553 dw2_asm_output_data_uleb128 (cs->action, "action");
3556 call_site_base += n;
3559 #ifndef TARGET_UNWIND_INFO
3560 /* Switch to the section that should be used for exception tables. */
3563 switch_to_exception_section (const char * ARG_UNUSED (fnname))
3567 if (exception_section)
3568 s = exception_section;
3571 /* Compute the section and cache it into exception_section,
3572 unless it depends on the function name. */
3573 if (targetm.have_named_sections)
3577 if (EH_TABLES_CAN_BE_READ_ONLY)
3580 ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3581 flags = ((! flag_pic
3582 || ((tt_format & 0x70) != DW_EH_PE_absptr
3583 && (tt_format & 0x70) != DW_EH_PE_aligned))
3584 ? 0 : SECTION_WRITE);
3587 flags = SECTION_WRITE;
3589 #ifdef HAVE_LD_EH_GC_SECTIONS
3590 if (flag_function_sections)
3592 char *section_name = XNEWVEC (char, strlen (fnname) + 32);
3593 sprintf (section_name, ".gcc_except_table.%s", fnname);
3594 s = get_section (section_name, flags, NULL);
3595 free (section_name);
3600 = s = get_section (".gcc_except_table", flags, NULL);
3604 = s = flag_pic ? data_section : readonly_data_section;
3607 switch_to_section (s);
3612 /* Output a reference from an exception table to the type_info object TYPE.
3613 TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
3617 output_ttype (tree type, int tt_format, int tt_format_size)
3620 bool is_public = true;
3622 if (type == NULL_TREE)
3626 struct varpool_node *node;
3628 type = lookup_type_for_runtime (type);
3629 value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
3631 /* Let cgraph know that the rtti decl is used. Not all of the
3632 paths below go through assemble_integer, which would take
3633 care of this for us. */
3635 if (TREE_CODE (type) == ADDR_EXPR)
3637 type = TREE_OPERAND (type, 0);
3638 if (TREE_CODE (type) == VAR_DECL)
3640 node = varpool_node (type);
3642 varpool_mark_needed_node (node);
3643 is_public = TREE_PUBLIC (type);
3647 gcc_assert (TREE_CODE (type) == INTEGER_CST);
3650 /* Allow the target to override the type table entry format. */
3651 if (targetm.asm_out.ttype (value))
3654 if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
3655 assemble_integer (value, tt_format_size,
3656 tt_format_size * BITS_PER_UNIT, 1);
3658 dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
3662 output_function_exception_table (const char * ARG_UNUSED (fnname))
3664 int tt_format, cs_format, lp_format, i, n;
3665 #ifdef HAVE_AS_LEB128
3666 char ttype_label[32];
3667 char cs_after_size_label[32];
3668 char cs_end_label[32];
3673 int tt_format_size = 0;
3675 /* Not all functions need anything. */
3676 if (! crtl->uses_eh_lsda)
3679 if (eh_personality_libfunc)
3680 assemble_external_libcall (eh_personality_libfunc);
3682 #ifdef TARGET_UNWIND_INFO
3683 /* TODO: Move this into target file. */
3684 fputs ("\t.personality\t", asm_out_file);
3685 output_addr_const (asm_out_file, eh_personality_libfunc);
3686 fputs ("\n\t.handlerdata\n", asm_out_file);
3687 /* Note that varasm still thinks we're in the function's code section.
3688 The ".endp" directive that will immediately follow will take us back. */
3690 switch_to_exception_section (fnname);
3693 /* If the target wants a label to begin the table, emit it here. */
3694 targetm.asm_out.except_table_label (asm_out_file);
3696 have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
3697 || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
3699 /* Indicate the format of the @TType entries. */
3701 tt_format = DW_EH_PE_omit;
3704 tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
3705 #ifdef HAVE_AS_LEB128
3706 ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
3707 current_function_funcdef_no);
3709 tt_format_size = size_of_encoded_value (tt_format);
3711 assemble_align (tt_format_size * BITS_PER_UNIT);
3714 targetm.asm_out.internal_label (asm_out_file, "LLSDA",
3715 current_function_funcdef_no);
3717 /* The LSDA header. */
3719 /* Indicate the format of the landing pad start pointer. An omitted
3720 field implies @LPStart == @Start. */
3721 /* Currently we always put @LPStart == @Start. This field would
3722 be most useful in moving the landing pads completely out of
3723 line to another section, but it could also be used to minimize
3724 the size of uleb128 landing pad offsets. */
3725 lp_format = DW_EH_PE_omit;
3726 dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3727 eh_data_format_name (lp_format));
3729 /* @LPStart pointer would go here. */
3731 dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3732 eh_data_format_name (tt_format));
3734 #ifndef HAVE_AS_LEB128
3735 if (USING_SJLJ_EXCEPTIONS)
3736 call_site_len = sjlj_size_of_call_site_table ();
3738 call_site_len = dw2_size_of_call_site_table ();
3741 /* A pc-relative 4-byte displacement to the @TType data. */
3744 #ifdef HAVE_AS_LEB128
3745 char ttype_after_disp_label[32];
3746 ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
3747 current_function_funcdef_no);
3748 dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3749 "@TType base offset");
3750 ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3752 /* Ug. Alignment queers things. */
3753 unsigned int before_disp, after_disp, last_disp, disp;
3755 before_disp = 1 + 1;
3756 after_disp = (1 + size_of_uleb128 (call_site_len)
3758 + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
3759 + (VEC_length (tree, crtl->eh.ttype_data)
3765 unsigned int disp_size, pad;
3768 disp_size = size_of_uleb128 (disp);
3769 pad = before_disp + disp_size + after_disp;
3770 if (pad % tt_format_size)
3771 pad = tt_format_size - (pad % tt_format_size);
3774 disp = after_disp + pad;
3776 while (disp != last_disp);
3778 dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3782 /* Indicate the format of the call-site offsets. */
3783 #ifdef HAVE_AS_LEB128
3784 cs_format = DW_EH_PE_uleb128;
3786 cs_format = DW_EH_PE_udata4;
3788 dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3789 eh_data_format_name (cs_format));
3791 #ifdef HAVE_AS_LEB128
3792 ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
3793 current_function_funcdef_no);
3794 ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
3795 current_function_funcdef_no);
3796 dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3797 "Call-site table length");
3798 ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3799 if (USING_SJLJ_EXCEPTIONS)
3800 sjlj_output_call_site_table ();
3802 dw2_output_call_site_table ();
3803 ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3805 dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
3806 if (USING_SJLJ_EXCEPTIONS)
3807 sjlj_output_call_site_table ();
3809 dw2_output_call_site_table ();
3812 /* ??? Decode and interpret the data for flag_debug_asm. */
3813 n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
3814 for (i = 0; i < n; ++i)
3815 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
3816 (i ? NULL : "Action record table"));
3819 assemble_align (tt_format_size * BITS_PER_UNIT);
3821 i = VEC_length (tree, crtl->eh.ttype_data);
3824 tree type = VEC_index (tree, crtl->eh.ttype_data, i);
3825 output_ttype (type, tt_format, tt_format_size);
3828 #ifdef HAVE_AS_LEB128
3830 ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3833 /* ??? Decode and interpret the data for flag_debug_asm. */
3834 n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
3835 for (i = 0; i < n; ++i)
3837 if (targetm.arm_eabi_unwinder)
3839 tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
3840 output_ttype (type, tt_format, tt_format_size);
3843 dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
3844 (i ? NULL : "Exception specification table"));
3847 switch_to_section (current_function_section ());
3851 set_eh_throw_stmt_table (struct function *fun, struct htab *table)
3853 fun->eh->throw_stmt_table = table;
3857 get_eh_throw_stmt_table (struct function *fun)
3859 return fun->eh->throw_stmt_table;
3862 /* Dump EH information to OUT. */
3865 dump_eh_tree (FILE * out, struct function *fun)
3867 struct eh_region *i;
3869 static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
3870 "allowed_exceptions", "must_not_throw",
3874 i = fun->eh->region_tree;
3878 fprintf (out, "Eh tree:\n");
3881 fprintf (out, " %*s %i %s", depth * 2, "",
3882 i->region_number, type_name[(int) i->type]);
3885 fprintf (out, " tree_label:");
3886 print_generic_expr (out, i->tree_label, 0);
3891 if (i->u.cleanup.prev_try)
3892 fprintf (out, " prev try:%i",
3893 i->u.cleanup.prev_try->region_number);
3898 struct eh_region *c;
3899 fprintf (out, " catch regions:");
3900 for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
3901 fprintf (out, " %i", c->region_number);
3906 if (i->u.eh_catch.prev_catch)
3907 fprintf (out, " prev: %i",
3908 i->u.eh_catch.prev_catch->region_number);
3909 if (i->u.eh_catch.next_catch)
3910 fprintf (out, " next %i",
3911 i->u.eh_catch.next_catch->region_number);
3914 case ERT_ALLOWED_EXCEPTIONS:
3915 fprintf (out, "filter :%i types:", i->u.allowed.filter);
3916 print_generic_expr (out, i->u.allowed.type_list, 0);
3920 fprintf (out, "type:");
3921 print_generic_expr (out, i->u.eh_throw.type, 0);
3924 case ERT_MUST_NOT_THROW:
3932 fprintf (out, " also known as:");
3933 dump_bitmap (out, i->aka);
3936 fprintf (out, "\n");
3937 /* If there are sub-regions, process them. */
3939 i = i->inner, depth++;
3940 /* If there are peers, process them. */
3941 else if (i->next_peer)
3943 /* Otherwise, step back up the tree to the next peer. */
3953 while (i->next_peer == NULL);
3959 /* Verify some basic invariants on EH datastructures. Could be extended to
3962 verify_eh_tree (struct function *fun)
3964 struct eh_region *i, *outer = NULL;
3971 if (!fun->eh->region_tree)
3973 for (j = fun->eh->last_region_number; j > 0; --j)
3974 if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
3976 if (i->region_number == j)
3978 if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
3980 error ("region_array is corrupted for region %i",
3985 i = fun->eh->region_tree;
3989 if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
3991 error ("region_array is corrupted for region %i", i->region_number);
3994 if (i->outer != outer)
3996 error ("outer block of region %i is wrong", i->region_number);
3999 if (i->may_contain_throw && outer && !outer->may_contain_throw)
4002 ("region %i may contain throw and is contained in region that may not",
4008 error ("negative nesting depth of region %i", i->region_number);
4012 /* If there are sub-regions, process them. */
4014 outer = i, i = i->inner, depth++;
4015 /* If there are peers, process them. */
4016 else if (i->next_peer)
4018 /* Otherwise, step back up the tree to the next peer. */
4029 error ("tree list ends on depth %i", depth + 1);
4032 if (count != nvisited)
4034 error ("array does not match the region tree");
4039 dump_eh_tree (stderr, fun);
4040 internal_error ("verify_eh_tree failed");
4046 while (i->next_peer == NULL);
4052 /* Initialize unwind_resume_libfunc. */
4055 default_init_unwind_resume_libfunc (void)
4057 /* The default c++ routines aren't actually c++ specific, so use those. */
4058 unwind_resume_libfunc =
4059 init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
4060 : "_Unwind_Resume");
4065 gate_handle_eh (void)
4067 return doing_eh (0);
4070 /* Complete generation of exception handling code. */
4072 rest_of_handle_eh (void)
4074 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4075 finish_eh_generation ();
4076 cleanup_cfg (CLEANUP_NO_INSN_DEL);
4080 struct rtl_opt_pass pass_rtl_eh =
4085 gate_handle_eh, /* gate */
4086 rest_of_handle_eh, /* execute */
4089 0, /* static_pass_number */
4090 TV_JUMP, /* tv_id */
4091 0, /* properties_required */
4092 0, /* properties_provided */
4093 0, /* properties_destroyed */
4094 0, /* todo_flags_start */
4095 TODO_dump_func /* todo_flags_finish */
4099 #include "gt-except.h"