/* Implements exception handling.
Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
Free Software Foundation, Inc.
Contributed by Mike Stump <mrs@cygnus.com>.
<http://www.gnu.org/licenses/>. */
-/* An exception is an event that can be signaled from within a
- function. This event can then be "caught" or "trapped" by the
- callers of this function. This potentially allows program flow to
- be transferred to any arbitrary code associated with a function call
- several levels up the stack.
-
- The intended use for this mechanism is for signaling "exceptional
- events" in an out-of-band fashion, hence its name. The C++ language
- (and many other OO-styled or functional languages) practically
- requires such a mechanism, as otherwise it becomes very difficult
- or even impossible to signal failure conditions in complex
- situations. The traditional C++ example is when an error occurs in
- the process of constructing an object; without such a mechanism, it
- is impossible to signal that the error occurs without adding global
- state variables and error checks around every object construction.
-
- The act of causing this event to occur is referred to as "throwing
- an exception". (Alternate terms include "raising an exception" or
- "signaling an exception".) The term "throw" is used because control
- is returned to the callers of the function that is signaling the
- exception, and thus there is the concept of "throwing" the
- exception up the call stack.
-
- [ Add updated documentation on how to use this. ] */
+/* An exception is an event that can be "thrown" from within a
+ function. This event can then be "caught" by the callers of
+ the function.
+
+ The representation of exceptions changes several times during
+ the compilation process:
+
+ In the beginning, in the front end, we have the GENERIC trees
+ TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
+ CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
+
+ During initial gimplification (gimplify.c) these are lowered
+ to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
+ The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
+ into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
+ conversion.
+
+ During pass_lower_eh (tree-eh.c) we record the nested structure
+ of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
+ We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
+ regions at this time. We can then flatten the statements within
+ the TRY nodes to straight-line code. Statements that had been within
+ TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
+ so that we may remember what action is supposed to be taken if
+ a given statement does throw. During this lowering process,
+ we create an EH_LANDING_PAD node for each EH_REGION that has
+ some code within the function that needs to be executed if a
+ throw does happen. We also create RESX statements that are
+ used to transfer control from an inner EH_REGION to an outer
+ EH_REGION. We also create EH_DISPATCH statements as placeholders
+ for a runtime type comparison that should be made in order to
+ select the action to perform among different CATCH and EH_FILTER
+ regions.
+
+ During pass_lower_eh_dispatch (tree-eh.c), which is run after
+ all inlining is complete, we are able to run assign_filter_values,
+ which allows us to map the set of types manipulated by all of the
+ CATCH and EH_FILTER regions to a set of integers. This set of integers
+ will be how the exception runtime communicates with the code generated
+ within the function. We then expand the GIMPLE_EH_DISPATCH statements
+ to a switch or conditional branches that use the argument provided by
+ the runtime (__builtin_eh_filter) and the set of integers we computed
+ in assign_filter_values.
+
+ During pass_lower_resx (tree-eh.c), which is run near the end
+ of optimization, we expand RESX statements. If the eh region
+ that is outer to the RESX statement is a MUST_NOT_THROW, then
+ the RESX expands to some form of abort statement. If the eh
+ region that is outer to the RESX statement is within the current
+ function, then the RESX expands to a bookkeeping call
+ (__builtin_eh_copy_values) and a goto. Otherwise, the next
+ handler for the exception must be within a function somewhere
+ up the call chain, so we call back into the exception runtime
+ (__builtin_unwind_resume).
+
+ During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
+ that create an rtl to eh_region mapping that corresponds to the
+ gimple to eh_region mapping that had been recorded in the
+ THROW_STMT_TABLE.
+
+ During pass_rtl_eh (except.c), we generate the real landing pads
+ to which the runtime will actually transfer control. These new
+ landing pads perform whatever bookkeeping is needed by the target
+ backend in order to resume execution within the current function.
+ Each of these new landing pads falls through into the post_landing_pad
+ label which had been used within the CFG up to this point. All
+ exception edges within the CFG are redirected to the new landing pads.
+ If the target uses setjmp to implement exceptions, the various extra
+ calls into the runtime to register and unregister the current stack
+ frame are emitted at this time.
+
+ During pass_convert_to_eh_region_ranges (except.c), we transform
+ the REG_EH_REGION notes attached to individual insns into
+ non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
+ and NOTE_INSN_EH_REGION_END. Each insn within such ranges has the
+ same associated action within the exception region tree, meaning
+ that (1) the exception is caught by the same landing pad within the
+ current function, (2) the exception is blocked by the runtime with
+ a MUST_NOT_THROW region, or (3) the exception is not handled at all
+ within the current function.
+
+ Finally, during assembly generation, we call
+ output_function_exception_table (except.c) to emit the tables with
+ which the exception runtime can determine if a given stack frame
+ handles a given exception, and if so what filter value to provide
+ to the function when the non-local control transfer is effected.
+ If the target uses dwarf2 unwinding to implement exceptions, then
+ output_call_frame_info (dwarf2out.c) emits the required unwind data. */
#include "config.h"
#include "ggc.h"
#include "tm_p.h"
#include "target.h"
+#include "common/common-target.h"
#include "langhooks.h"
#include "cgraph.h"
#include "diagnostic.h"
+#include "tree-pretty-print.h"
#include "tree-pass.h"
#include "timevar.h"
+#include "tree-flow.h"
/* Provide defaults for stuff that may not be defined when using
sjlj exceptions. */
#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
#endif
-/* Protect cleanup actions with must-not-throw regions, with a call
- to the given failure handler. */
-gimple (*lang_protect_cleanup_actions) (void);
-
-/* Return true if type A catches type B. */
-int (*lang_eh_type_covers) (tree a, tree b);
-
-/* Map a type to a runtime object to match type. */
-tree (*lang_eh_runtime_type) (tree);
-
-/* A hash table of label to region number. */
-
-struct ehl_map_entry GTY(())
-{
- rtx label;
- struct eh_region *region;
-};
-
static GTY(()) int call_site_base;
static GTY ((param_is (union tree_node)))
htab_t type_to_runtime_map;
static int sjlj_fc_lsda_ofs;
static int sjlj_fc_jbuf_ofs;
\f
-/* Describes one exception region. */
-struct eh_region GTY(())
-{
- /* The immediately surrounding region. */
- struct eh_region *outer;
-
- /* The list of immediately contained regions. */
- struct eh_region *inner;
- struct eh_region *next_peer;
-
- /* An identifier for this region. */
- int region_number;
-
- /* When a region is deleted, its parents inherit the REG_EH_REGION
- numbers already assigned. */
- bitmap aka;
-
- /* Each region does exactly one thing. */
- enum eh_region_type
- {
- ERT_UNKNOWN = 0,
- ERT_CLEANUP,
- ERT_TRY,
- ERT_CATCH,
- ERT_ALLOWED_EXCEPTIONS,
- ERT_MUST_NOT_THROW,
- ERT_THROW
- } type;
-
- /* Holds the action to perform based on the preceding type. */
- union eh_region_u {
- /* A list of catch blocks, a surrounding try block,
- and the label for continuing after a catch. */
- struct eh_region_u_try {
- struct eh_region *eh_catch;
- struct eh_region *last_catch;
- } GTY ((tag ("ERT_TRY"))) eh_try;
-
- /* The list through the catch handlers, the list of type objects
- matched, and the list of associated filters. */
- struct eh_region_u_catch {
- struct eh_region *next_catch;
- struct eh_region *prev_catch;
- tree type_list;
- tree filter_list;
- } GTY ((tag ("ERT_CATCH"))) eh_catch;
-
- /* A tree_list of allowed types. */
- struct eh_region_u_allowed {
- tree type_list;
- int filter;
- } GTY ((tag ("ERT_ALLOWED_EXCEPTIONS"))) allowed;
-
- /* The type given by a call to "throw foo();", or discovered
- for a throw. */
- struct eh_region_u_throw {
- tree type;
- } GTY ((tag ("ERT_THROW"))) eh_throw;
-
- /* Retain the cleanup expression even after expansion so that
- we can match up fixup regions. */
- struct eh_region_u_cleanup {
- struct eh_region *prev_try;
- } GTY ((tag ("ERT_CLEANUP"))) cleanup;
- } GTY ((desc ("%0.type"))) u;
-
- /* Entry point for this region's handler before landing pads are built. */
- rtx label;
- tree tree_label;
-
- /* Entry point for this region's handler from the runtime eh library. */
- rtx landing_pad;
-
- /* Entry point for this region's handler from an inner region. */
- rtx post_landing_pad;
-
- /* The RESX insn for handing off control to the next outermost handler,
- if appropriate. */
- rtx resume;
-
- /* True if something in this region may throw. */
- unsigned may_contain_throw : 1;
-};
-
-typedef struct eh_region *eh_region;
-struct call_site_record GTY(())
+struct GTY(()) call_site_record_d
{
rtx landing_pad;
int action;
};
-
-DEF_VEC_P(eh_region);
-DEF_VEC_ALLOC_P(eh_region, gc);
-DEF_VEC_ALLOC_P(eh_region, heap);
-
-/* Used to save exception status for each function. */
-struct eh_status GTY(())
-{
- /* The tree of all regions for this function. */
- struct eh_region *region_tree;
-
- /* The same information as an indexable array. */
- VEC(eh_region,gc) *region_array;
- int last_region_number;
-
- htab_t GTY((param_is (struct throw_stmt_node))) throw_stmt_table;
-};
\f
+static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
+ eh_landing_pad *);
+
static int t2r_eq (const void *, const void *);
static hashval_t t2r_hash (const void *);
-static void add_type_for_runtime (tree);
-static tree lookup_type_for_runtime (tree);
static int ttypes_filter_eq (const void *, const void *);
static hashval_t ttypes_filter_hash (const void *);
static hashval_t ehspec_filter_hash (const void *);
static int add_ttypes_entry (htab_t, tree);
static int add_ehspec_entry (htab_t, htab_t, tree);
-static void assign_filter_values (void);
-static void build_post_landing_pads (void);
-static void connect_post_landing_pads (void);
static void dw2_build_landing_pads (void);
-struct sjlj_lp_info;
-static bool sjlj_find_directly_reachable_regions (struct sjlj_lp_info *);
-static void sjlj_assign_call_site_values (rtx, struct sjlj_lp_info *);
-static void sjlj_mark_call_sites (struct sjlj_lp_info *);
-static void sjlj_emit_function_enter (rtx);
-static void sjlj_emit_function_exit (void);
-static void sjlj_emit_dispatch_table (rtx, struct sjlj_lp_info *);
-static void sjlj_build_landing_pads (void);
-
-static hashval_t ehl_hash (const void *);
-static int ehl_eq (const void *, const void *);
-static void add_ehl_entry (rtx, struct eh_region *);
-static void remove_exception_handler_label (rtx);
-static void remove_eh_handler (struct eh_region *);
-static void remove_eh_handler_and_replace (struct eh_region *,
- struct eh_region *);
-static int for_each_eh_label_1 (void **, void *);
-
-/* The return value of reachable_next_level. */
-enum reachable_code
-{
- /* The given exception is not processed by the given region. */
- RNL_NOT_CAUGHT,
- /* The given exception may need processing by the given region. */
- RNL_MAYBE_CAUGHT,
- /* The given exception is completely processed by the given region. */
- RNL_CAUGHT,
- /* The given exception is completely processed by the runtime. */
- RNL_BLOCKED
-};
-
-struct reachable_info;
-static enum reachable_code reachable_next_level (struct eh_region *, tree,
- struct reachable_info *, bool);
-
static int action_record_eq (const void *, const void *);
static hashval_t action_record_hash (const void *);
static int add_action_record (htab_t, int, int);
-static int collect_one_action_chain (htab_t, struct eh_region *);
-static int add_call_site (rtx, int);
+static int collect_one_action_chain (htab_t, eh_region);
+static int add_call_site (rtx, int, int);
-static void push_uleb128 (varray_type *, unsigned int);
-static void push_sleb128 (varray_type *, int);
+static void push_uleb128 (VEC (uchar, gc) **, unsigned int);
+static void push_sleb128 (VEC (uchar, gc) **, int);
#ifndef HAVE_AS_LEB128
-static int dw2_size_of_call_site_table (void);
+static int dw2_size_of_call_site_table (int);
static int sjlj_size_of_call_site_table (void);
#endif
-static void dw2_output_call_site_table (void);
+static void dw2_output_call_site_table (int, int);
static void sjlj_output_call_site_table (void);
\f
-/* Routine to see if exception handling is turned on.
- DO_WARN is nonzero if we want to inform the user that exception
- handling is turned off.
-
- This is used to ensure that -fexceptions has been specified if the
- compiler tries to use any exception-specific functions. */
-
-int
-doing_eh (int do_warn)
-{
- if (! flag_exceptions)
- {
- static int warned = 0;
- if (! warned && do_warn)
- {
- error ("exception handling disabled, use -fexceptions to enable");
- warned = 1;
- }
- return 0;
- }
- return 1;
-}
-
-\f
void
init_eh (void)
{
/* Create the SjLj_Function_Context structure. This should match
the definition in unwind-sjlj.c. */
- if (USING_SJLJ_EXCEPTIONS)
+ if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
{
tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
- f_prev = build_decl (FIELD_DECL, get_identifier ("__prev"),
+ f_prev = build_decl (BUILTINS_LOCATION,
+ FIELD_DECL, get_identifier ("__prev"),
build_pointer_type (sjlj_fc_type_node));
DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
- f_cs = build_decl (FIELD_DECL, get_identifier ("__call_site"),
+ f_cs = build_decl (BUILTINS_LOCATION,
+ FIELD_DECL, get_identifier ("__call_site"),
integer_type_node);
DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
- tmp = build_index_type (build_int_cst (NULL_TREE, 4 - 1));
+ tmp = build_index_type (size_int (4 - 1));
tmp = build_array_type (lang_hooks.types.type_for_mode
(targetm.unwind_word_mode (), 1),
tmp);
- f_data = build_decl (FIELD_DECL, get_identifier ("__data"), tmp);
+ f_data = build_decl (BUILTINS_LOCATION,
+ FIELD_DECL, get_identifier ("__data"), tmp);
DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
- f_per = build_decl (FIELD_DECL, get_identifier ("__personality"),
+ f_per = build_decl (BUILTINS_LOCATION,
+ FIELD_DECL, get_identifier ("__personality"),
ptr_type_node);
DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
- f_lsda = build_decl (FIELD_DECL, get_identifier ("__lsda"),
+ f_lsda = build_decl (BUILTINS_LOCATION,
+ FIELD_DECL, get_identifier ("__lsda"),
ptr_type_node);
DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
#ifdef DONT_USE_BUILTIN_SETJMP
#ifdef JMP_BUF_SIZE
- tmp = build_int_cst (NULL_TREE, JMP_BUF_SIZE - 1);
+ tmp = size_int (JMP_BUF_SIZE - 1);
#else
/* Should be large enough for most systems, if it is not,
JMP_BUF_SIZE should be defined with the proper value. It will
also tend to be larger than necessary for most systems, a more
optimal port will define JMP_BUF_SIZE. */
- tmp = build_int_cst (NULL_TREE, FIRST_PSEUDO_REGISTER + 2 - 1);
+ tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
#endif
#else
/* builtin_setjmp takes a pointer to 5 words. */
- tmp = build_int_cst (NULL_TREE, 5 * BITS_PER_WORD / POINTER_SIZE - 1);
+ tmp = size_int (5 * BITS_PER_WORD / POINTER_SIZE - 1);
#endif
tmp = build_index_type (tmp);
tmp = build_array_type (ptr_type_node, tmp);
- f_jbuf = build_decl (FIELD_DECL, get_identifier ("__jbuf"), tmp);
+ f_jbuf = build_decl (BUILTINS_LOCATION,
+ FIELD_DECL, get_identifier ("__jbuf"), tmp);
#ifdef DONT_USE_BUILTIN_SETJMP
/* We don't know what the alignment requirements of the
runtime's jmp_buf has. Overestimate. */
void
init_eh_for_function (void)
{
- cfun->eh = GGC_CNEW (struct eh_status);
+ cfun->eh = ggc_alloc_cleared_eh_status ();
+
+ /* Make sure zero'th entries are used. */
+ VEC_safe_push (eh_region, gc, cfun->eh->region_array, NULL);
+ VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, NULL);
}
\f
/* Routines to generate the exception tree somewhat directly.
These are used from tree-eh.c when processing exception related
nodes during tree optimization. */
-static struct eh_region *
-gen_eh_region (enum eh_region_type type, struct eh_region *outer)
+static eh_region
+gen_eh_region (enum eh_region_type type, eh_region outer)
{
- struct eh_region *new_eh;
-
-#ifdef ENABLE_CHECKING
- gcc_assert (doing_eh (0));
-#endif
+ eh_region new_eh;
/* Insert a new blank region as a leaf in the tree. */
- new_eh = GGC_CNEW (struct eh_region);
+ new_eh = ggc_alloc_cleared_eh_region_d ();
new_eh->type = type;
new_eh->outer = outer;
if (outer)
cfun->eh->region_tree = new_eh;
}
- new_eh->region_number = ++cfun->eh->last_region_number;
+ new_eh->index = VEC_length (eh_region, cfun->eh->region_array);
+ VEC_safe_push (eh_region, gc, cfun->eh->region_array, new_eh);
+
+ /* Copy the language's notion of whether to use __cxa_end_cleanup. */
+ if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
+ new_eh->use_cxa_end_cleanup = true;
return new_eh;
}
-struct eh_region *
-gen_eh_region_cleanup (struct eh_region *outer, struct eh_region *prev_try)
+eh_region
+gen_eh_region_cleanup (eh_region outer)
{
- struct eh_region *cleanup = gen_eh_region (ERT_CLEANUP, outer);
- cleanup->u.cleanup.prev_try = prev_try;
- return cleanup;
+ return gen_eh_region (ERT_CLEANUP, outer);
}
-struct eh_region *
-gen_eh_region_try (struct eh_region *outer)
+eh_region
+gen_eh_region_try (eh_region outer)
{
return gen_eh_region (ERT_TRY, outer);
}
-struct eh_region *
-gen_eh_region_catch (struct eh_region *t, tree type_or_list)
+eh_catch
+gen_eh_region_catch (eh_region t, tree type_or_list)
{
- struct eh_region *c, *l;
+ eh_catch c, l;
tree type_list, type_node;
+ gcc_assert (t->type == ERT_TRY);
+
/* Ensure to always end up with a type list to normalize further
processing, then register each type against the runtime types map. */
type_list = type_or_list;
add_type_for_runtime (TREE_VALUE (type_node));
}
- c = gen_eh_region (ERT_CATCH, t->outer);
- c->u.eh_catch.type_list = type_list;
+ c = ggc_alloc_cleared_eh_catch_d ();
+ c->type_list = type_list;
l = t->u.eh_try.last_catch;
- c->u.eh_catch.prev_catch = l;
+ c->prev_catch = l;
if (l)
- l->u.eh_catch.next_catch = c;
+ l->next_catch = c;
else
- t->u.eh_try.eh_catch = c;
+ t->u.eh_try.first_catch = c;
t->u.eh_try.last_catch = c;
return c;
}
-struct eh_region *
-gen_eh_region_allowed (struct eh_region *outer, tree allowed)
+eh_region
+gen_eh_region_allowed (eh_region outer, tree allowed)
{
- struct eh_region *region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
+ eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
region->u.allowed.type_list = allowed;
for (; allowed ; allowed = TREE_CHAIN (allowed))
return region;
}
-struct eh_region *
-gen_eh_region_must_not_throw (struct eh_region *outer)
+eh_region
+gen_eh_region_must_not_throw (eh_region outer)
{
return gen_eh_region (ERT_MUST_NOT_THROW, outer);
}
-int
-get_eh_region_number (struct eh_region *region)
+eh_landing_pad
+gen_eh_landing_pad (eh_region region)
{
- return region->region_number;
-}
+ eh_landing_pad lp = ggc_alloc_cleared_eh_landing_pad_d ();
-bool
-get_eh_region_may_contain_throw (struct eh_region *region)
-{
- return region->may_contain_throw;
+ lp->next_lp = region->landing_pads;
+ lp->region = region;
+ lp->index = VEC_length (eh_landing_pad, cfun->eh->lp_array);
+ region->landing_pads = lp;
+
+ VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, lp);
+
+ return lp;
}
-tree
-get_eh_region_tree_label (struct eh_region *region)
+eh_region
+get_eh_region_from_number_fn (struct function *ifun, int i)
{
- return region->tree_label;
+ return VEC_index (eh_region, ifun->eh->region_array, i);
}
-tree
-get_eh_region_no_tree_label (int region)
+eh_region
+get_eh_region_from_number (int i)
{
- return VEC_index (eh_region, cfun->eh->region_array, region)->tree_label;
+ return get_eh_region_from_number_fn (cfun, i);
}
-void
-set_eh_region_tree_label (struct eh_region *region, tree lab)
+eh_landing_pad
+get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
{
- region->tree_label = lab;
+ return VEC_index (eh_landing_pad, ifun->eh->lp_array, i);
}
-\f
-void
-expand_resx_expr (tree exp)
+
+eh_landing_pad
+get_eh_landing_pad_from_number (int i)
{
- int region_nr = TREE_INT_CST_LOW (TREE_OPERAND (exp, 0));
- struct eh_region *reg = VEC_index (eh_region,
- cfun->eh->region_array, region_nr);
-
- gcc_assert (!reg->resume);
- do_pending_stack_adjust ();
- reg->resume = emit_jump_insn (gen_rtx_RESX (VOIDmode, region_nr));
- emit_barrier ();
+ return get_eh_landing_pad_from_number_fn (cfun, i);
}
-/* Note that the current EH region (if any) may contain a throw, or a
- call to a function which itself may contain a throw. */
-
-void
-note_eh_region_may_contain_throw (struct eh_region *region)
+eh_region
+get_eh_region_from_lp_number_fn (struct function *ifun, int i)
{
- while (region && !region->may_contain_throw)
+ if (i < 0)
+ return VEC_index (eh_region, ifun->eh->region_array, -i);
+ else if (i == 0)
+ return NULL;
+ else
{
- region->may_contain_throw = 1;
- region = region->outer;
+ eh_landing_pad lp;
+ lp = VEC_index (eh_landing_pad, ifun->eh->lp_array, i);
+ return lp->region;
}
}
-
-/* Return an rtl expression for a pointer to the exception object
- within a handler. */
-
-rtx
-get_exception_pointer (void)
+eh_region
+get_eh_region_from_lp_number (int i)
{
- if (! crtl->eh.exc_ptr)
- crtl->eh.exc_ptr = gen_reg_rtx (ptr_mode);
- return crtl->eh.exc_ptr;
+ return get_eh_region_from_lp_number_fn (cfun, i);
}
+\f
+/* Returns true if the current function has exception handling regions. */
-/* Return an rtl expression for the exception dispatch filter
- within a handler. */
-
-rtx
-get_exception_filter (void)
+bool
+current_function_has_exception_handlers (void)
{
- if (! crtl->eh.filter)
- crtl->eh.filter = gen_reg_rtx (targetm.eh_return_filter_mode ());
- return crtl->eh.filter;
+ return cfun->eh->region_tree != NULL;
}
\f
-/* This section is for the exception handling specific optimization pass. */
+/* A subroutine of duplicate_eh_regions. Copy the eh_region tree at OLD.
+ Root it at OUTER, and apply LP_OFFSET to the lp numbers. */
-/* Random access the exception region tree. */
-
-void
-collect_eh_region_array (void)
+struct duplicate_eh_regions_data
{
- struct eh_region *i;
+ duplicate_eh_regions_map label_map;
+ void *label_map_data;
+ struct pointer_map_t *eh_map;
+};
- i = cfun->eh->region_tree;
- if (! i)
- return;
+static void
+duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
+ eh_region old_r, eh_region outer)
+{
+ eh_landing_pad old_lp, new_lp;
+ eh_region new_r;
+ void **slot;
- VEC_safe_grow (eh_region, gc, cfun->eh->region_array,
- cfun->eh->last_region_number + 1);
- VEC_replace (eh_region, cfun->eh->region_array, 0, 0);
+ new_r = gen_eh_region (old_r->type, outer);
+ slot = pointer_map_insert (data->eh_map, (void *)old_r);
+ gcc_assert (*slot == NULL);
+ *slot = (void *)new_r;
- while (1)
+ switch (old_r->type)
{
- VEC_replace (eh_region, cfun->eh->region_array, i->region_number, i);
-
- /* If there are sub-regions, process them. */
- if (i->inner)
- i = i->inner;
- /* If there are peers, process them. */
- else if (i->next_peer)
- i = i->next_peer;
- /* Otherwise, step back up the tree to the next peer. */
- else
- {
- do {
- i = i->outer;
- if (i == NULL)
- return;
- } while (i->next_peer == NULL);
- i = i->next_peer;
- }
- }
-}
-
-/* R is MUST_NOT_THROW region that is not reachable via local
- RESX instructions. It still must be kept in the tree in case runtime
- can unwind through it, or we will eliminate out terminate call
- runtime would do otherwise. Return TRUE if R contains throwing statements
- or some of the exceptions in inner regions can be unwound up to R.
-
- CONTAINS_STMT is bitmap of all regions that contains some throwing
- statements.
-
- Function looks O(^3) at first sight. In fact the function is called at most
- once for every MUST_NOT_THROW in EH tree from remove_unreachable_regions
- Because the outer loop walking subregions does not dive in MUST_NOT_THROW,
- the outer loop examines every region at most once. The inner loop
- is doing unwinding from the throwing statement same way as we do during
- CFG construction, so it is O(^2) in size of EH tree, but O(n) in size
- of CFG. In practice Eh trees are wide, not deep, so this is not
- a problem. */
-
-static bool
-can_be_reached_by_runtime (sbitmap contains_stmt, struct eh_region *r)
-{
- struct eh_region *i = r->inner;
- unsigned n;
- bitmap_iterator bi;
+ case ERT_CLEANUP:
+ break;
- if (TEST_BIT (contains_stmt, r->region_number))
- return true;
- if (r->aka)
- EXECUTE_IF_SET_IN_BITMAP (r->aka, 0, n, bi)
- if (TEST_BIT (contains_stmt, n))
- return true;
- if (!i)
- return false;
- while (1)
- {
- /* It is pointless to look into MUST_NOT_THROW
- or dive into subregions. They never unwind up. */
- if (i->type != ERT_MUST_NOT_THROW)
- {
- bool found = TEST_BIT (contains_stmt, i->region_number);
- if (!found)
- EXECUTE_IF_SET_IN_BITMAP (i->aka, 0, n, bi)
- if (TEST_BIT (contains_stmt, n))
- {
- found = true;
- break;
- }
- /* We have nested region that contains throwing statement.
- See if resuming might lead up to the resx or we get locally
- caught sooner. If we get locally caught sooner, we either
- know region R is not reachable or it would have direct edge
- from the EH resx and thus consider region reachable at
- firest place. */
- if (found)
- {
- struct eh_region *i1 = i;
- tree type_thrown = NULL_TREE;
+ case ERT_TRY:
+ {
+ eh_catch oc, nc;
+ for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
+ {
+ /* We should be doing all our region duplication before and
+ during inlining, which is before filter lists are created. */
+ gcc_assert (oc->filter_list == NULL);
+ nc = gen_eh_region_catch (new_r, oc->type_list);
+ nc->label = data->label_map (oc->label, data->label_map_data);
+ }
+ }
+ break;
- if (i1->type == ERT_THROW)
- {
- type_thrown = i1->u.eh_throw.type;
- i1 = i1->outer;
- }
- for (; i1 != r; i1 = i1->outer)
- if (reachable_next_level (i1, type_thrown, NULL,
- false) >= RNL_CAUGHT)
- break;
- if (i1 == r)
- return true;
- }
- }
- /* If there are sub-regions, process them. */
- if (i->type != ERT_MUST_NOT_THROW && i->inner)
- i = i->inner;
- /* If there are peers, process them. */
- else if (i->next_peer)
- i = i->next_peer;
- /* Otherwise, step back up the tree to the next peer. */
+ case ERT_ALLOWED_EXCEPTIONS:
+ new_r->u.allowed.type_list = old_r->u.allowed.type_list;
+ if (old_r->u.allowed.label)
+ new_r->u.allowed.label
+ = data->label_map (old_r->u.allowed.label, data->label_map_data);
else
- {
- do
- {
- i = i->outer;
- if (i == r)
- return false;
- }
- while (i->next_peer == NULL);
- i = i->next_peer;
- }
- }
-}
-
-/* Bring region R to the root of tree. */
-
-static void
-bring_to_root (struct eh_region *r)
-{
- struct eh_region **pp;
- struct eh_region *outer = r->outer;
- if (!r->outer)
- return;
- for (pp = &outer->inner; *pp != r; pp = &(*pp)->next_peer)
- continue;
- *pp = r->next_peer;
- r->outer = NULL;
- r->next_peer = cfun->eh->region_tree;
- cfun->eh->region_tree = r;
-}
-
-/* Remove all regions whose labels are not reachable.
- REACHABLE is bitmap of all regions that are used by the function
- CONTAINS_STMT is bitmap of all regions that contains stmt (or NULL). */
+ new_r->u.allowed.label = NULL_TREE;
+ break;
-void
-remove_unreachable_regions (sbitmap reachable, sbitmap contains_stmt)
-{
- int i;
- struct eh_region *r;
- VEC(eh_region,heap) *must_not_throws = VEC_alloc (eh_region, heap, 16);
- struct eh_region *local_must_not_throw = NULL;
- struct eh_region *first_must_not_throw = NULL;
+ case ERT_MUST_NOT_THROW:
+ new_r->u.must_not_throw = old_r->u.must_not_throw;
+ break;
+ }
- for (i = cfun->eh->last_region_number; i > 0; --i)
+ for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
{
- r = VEC_index (eh_region, cfun->eh->region_array, i);
- if (!r || r->region_number != i)
+ /* Don't bother copying unused landing pads. */
+ if (old_lp->post_landing_pad == NULL)
continue;
- if (!TEST_BIT (reachable, i) && !r->resume)
- {
- bool kill_it = true;
-
- r->tree_label = NULL;
- switch (r->type)
- {
- case ERT_THROW:
- /* Don't remove ERT_THROW regions if their outer region
- is reachable. */
- if (r->outer && TEST_BIT (reachable, r->outer->region_number))
- kill_it = false;
- break;
- case ERT_MUST_NOT_THROW:
- /* MUST_NOT_THROW regions are implementable solely in the
- runtime, but we need them when inlining function.
-
- Keep them if outer region is not MUST_NOT_THROW a well
- and if they contain some statement that might unwind through
- them. */
- if ((!r->outer || r->outer->type != ERT_MUST_NOT_THROW)
- && (!contains_stmt
- || can_be_reached_by_runtime (contains_stmt, r)))
- kill_it = false;
- break;
- case ERT_TRY:
- {
- /* TRY regions are reachable if any of its CATCH regions
- are reachable. */
- struct eh_region *c;
- for (c = r->u.eh_try.eh_catch; c;
- c = c->u.eh_catch.next_catch)
- if (TEST_BIT (reachable, c->region_number))
- {
- kill_it = false;
- break;
- }
- break;
- }
- default:
- break;
- }
+ new_lp = gen_eh_landing_pad (new_r);
+ slot = pointer_map_insert (data->eh_map, (void *)old_lp);
+ gcc_assert (*slot == NULL);
+ *slot = (void *)new_lp;
- if (kill_it)
- {
- if (dump_file)
- fprintf (dump_file, "Removing unreachable eh region %i\n",
- r->region_number);
- remove_eh_handler (r);
- }
- else if (r->type == ERT_MUST_NOT_THROW)
- {
- if (!first_must_not_throw)
- first_must_not_throw = r;
- VEC_safe_push (eh_region, heap, must_not_throws, r);
- }
- }
- else
- if (r->type == ERT_MUST_NOT_THROW)
- {
- if (!local_must_not_throw)
- local_must_not_throw = r;
- if (r->outer)
- VEC_safe_push (eh_region, heap, must_not_throws, r);
- }
+ new_lp->post_landing_pad
+ = data->label_map (old_lp->post_landing_pad, data->label_map_data);
+ EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
}
- /* MUST_NOT_THROW regions without local handler are all the same; they
- trigger terminate call in runtime.
- MUST_NOT_THROW handled locally can differ in debug info associated
- to std::terminate () call or if one is coming from Java and other
- from C++ whether they call terminate or abort.
+ /* Make sure to preserve the original use of __cxa_end_cleanup. */
+ new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
- We merge all MUST_NOT_THROW regions handled by the run-time into one.
- We alsobring all local MUST_NOT_THROW regions to the roots of EH tree
- (since unwinding never continues to the outer region anyway).
- If MUST_NOT_THROW with local handler is present in the tree, we use
- that region to merge into, since it will remain in tree anyway;
- otherwise we use first MUST_NOT_THROW.
+ for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
+ duplicate_eh_regions_1 (data, old_r, new_r);
+}
- Merging of locally handled regions needs changes to the CFG. Crossjumping
- should take care of this, by looking at the actual code and
- ensuring that the cleanup actions are really the same. */
+/* Duplicate the EH regions from IFUN rooted at COPY_REGION into
+ the current function and root the tree below OUTER_REGION.
+ The special case of COPY_REGION of NULL means all regions.
+ Remap labels using MAP/MAP_DATA callback. Return a pointer map
+ that allows the caller to remap uses of both EH regions and
+ EH landing pads. */
- if (local_must_not_throw)
- first_must_not_throw = local_must_not_throw;
+struct pointer_map_t *
+duplicate_eh_regions (struct function *ifun,
+ eh_region copy_region, int outer_lp,
+ duplicate_eh_regions_map map, void *map_data)
+{
+ struct duplicate_eh_regions_data data;
+ eh_region outer_region;
- for (i = 0; VEC_iterate (eh_region, must_not_throws, i, r); i++)
- {
- if (!r->label && !r->tree_label && r != first_must_not_throw)
- {
- if (dump_file)
- fprintf (dump_file, "Replacing MUST_NOT_THROW region %i by %i\n",
- r->region_number,
- first_must_not_throw->region_number);
- remove_eh_handler_and_replace (r, first_must_not_throw);
- }
- else
- bring_to_root (r);
- }
#ifdef ENABLE_CHECKING
- verify_eh_tree (cfun);
+ verify_eh_tree (ifun);
#endif
- VEC_free (eh_region, heap, must_not_throws);
-}
-/* Return array mapping LABEL_DECL_UID to region such that region's tree_label
- is identical to label. */
+ data.label_map = map;
+ data.label_map_data = map_data;
+ data.eh_map = pointer_map_create ();
-VEC(int,heap) *
-label_to_region_map (void)
-{
- VEC(int,heap) * label_to_region = NULL;
- int i;
+ outer_region = get_eh_region_from_lp_number (outer_lp);
- VEC_safe_grow_cleared (int, heap, label_to_region,
- cfun->cfg->last_label_uid + 1);
- for (i = cfun->eh->last_region_number; i > 0; --i)
+ /* Copy all the regions in the subtree. */
+ if (copy_region)
+ duplicate_eh_regions_1 (&data, copy_region, outer_region);
+ else
{
- struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
- if (r && r->region_number == i
- && r->tree_label && LABEL_DECL_UID (r->tree_label) >= 0)
- {
- VEC_replace (int, label_to_region, LABEL_DECL_UID (r->tree_label),
- i);
- }
+ eh_region r;
+ for (r = ifun->eh->region_tree; r ; r = r->next_peer)
+ duplicate_eh_regions_1 (&data, r, outer_region);
}
- return label_to_region;
-}
-/* Return number of EH regions. */
-int
-num_eh_regions (void)
-{
- return cfun->eh->last_region_number + 1;
+#ifdef ENABLE_CHECKING
+ verify_eh_tree (cfun);
+#endif
+
+ return data.eh_map;
}
-/* Remove all regions whose labels are not reachable from insns. */
+/* Return the region that is outer to both REGION_A and REGION_B in IFUN. */
-static void
-rtl_remove_unreachable_regions (rtx insns)
+eh_region
+eh_region_outermost (struct function *ifun, eh_region region_a,
+ eh_region region_b)
{
- int i, *uid_region_num;
- sbitmap reachable;
- struct eh_region *r;
- rtx insn;
+ sbitmap b_outer;
- uid_region_num = XCNEWVEC (int, get_max_uid ());
- reachable = sbitmap_alloc (cfun->eh->last_region_number + 1);
- sbitmap_zero (reachable);
+ gcc_assert (ifun->eh->region_array);
+ gcc_assert (ifun->eh->region_tree);
- for (i = cfun->eh->last_region_number; i > 0; --i)
- {
- r = VEC_index (eh_region, cfun->eh->region_array, i);
- if (!r || r->region_number != i)
- continue;
+ b_outer = sbitmap_alloc (VEC_length (eh_region, ifun->eh->region_array));
+ sbitmap_zero (b_outer);
- if (r->resume)
- {
- gcc_assert (!uid_region_num[INSN_UID (r->resume)]);
- uid_region_num[INSN_UID (r->resume)] = i;
- }
- if (r->label)
- {
- gcc_assert (!uid_region_num[INSN_UID (r->label)]);
- uid_region_num[INSN_UID (r->label)] = i;
- }
+ do
+ {
+ SET_BIT (b_outer, region_b->index);
+ region_b = region_b->outer;
}
+ while (region_b);
- for (insn = insns; insn; insn = NEXT_INSN (insn))
- SET_BIT (reachable, uid_region_num[INSN_UID (insn)]);
-
- remove_unreachable_regions (reachable, NULL);
-
- sbitmap_free (reachable);
- free (uid_region_num);
-}
-
-/* Set up EH labels for RTL. */
-
-void
-convert_from_eh_region_ranges (void)
-{
- rtx insns = get_insns ();
- int i, n = cfun->eh->last_region_number;
-
- /* Most of the work is already done at the tree level. All we need to
- do is collect the rtl labels that correspond to the tree labels that
- collect the rtl labels that correspond to the tree labels
- we allocated earlier. */
- for (i = 1; i <= n; ++i)
+ do
{
- struct eh_region *region;
-
- region = VEC_index (eh_region, cfun->eh->region_array, i);
- if (region && region->tree_label)
- region->label = DECL_RTL_IF_SET (region->tree_label);
+ if (TEST_BIT (b_outer, region_a->index))
+ break;
+ region_a = region_a->outer;
}
+ while (region_a);
- rtl_remove_unreachable_regions (insns);
+ sbitmap_free (b_outer);
+ return region_a;
}
-
-static void
-add_ehl_entry (rtx label, struct eh_region *region)
+\f
+static int
+t2r_eq (const void *pentry, const void *pdata)
{
- struct ehl_map_entry **slot, *entry;
-
- LABEL_PRESERVE_P (label) = 1;
-
- entry = GGC_NEW (struct ehl_map_entry);
- entry->label = label;
- entry->region = region;
-
- slot = (struct ehl_map_entry **)
- htab_find_slot (crtl->eh.exception_handler_label_map, entry, INSERT);
+ const_tree const entry = (const_tree) pentry;
+ const_tree const data = (const_tree) pdata;
- /* Before landing pad creation, each exception handler has its own
- label. After landing pad creation, the exception handlers may
- share landing pads. This is ok, since maybe_remove_eh_handler
- only requires the 1-1 mapping before landing pad creation. */
- gcc_assert (!*slot || crtl->eh.built_landing_pads);
+ return TREE_PURPOSE (entry) == data;
+}
- *slot = entry;
+static hashval_t
+t2r_hash (const void *pentry)
+{
+ const_tree const entry = (const_tree) pentry;
+ return TREE_HASH (TREE_PURPOSE (entry));
}
void
-find_exception_handler_labels (void)
+add_type_for_runtime (tree type)
{
- int i;
-
- if (crtl->eh.exception_handler_label_map)
- htab_empty (crtl->eh.exception_handler_label_map);
- else
- {
- /* ??? The expansion factor here (3/2) must be greater than the htab
- occupancy factor (4/3) to avoid unnecessary resizing. */
- crtl->eh.exception_handler_label_map
- = htab_create_ggc (cfun->eh->last_region_number * 3 / 2,
- ehl_hash, ehl_eq, NULL);
- }
+ tree *slot;
- if (cfun->eh->region_tree == NULL)
+ /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
+ if (TREE_CODE (type) == NOP_EXPR)
return;
- for (i = cfun->eh->last_region_number; i > 0; --i)
+ slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
+ TREE_HASH (type), INSERT);
+ if (*slot == NULL)
{
- struct eh_region *region;
- rtx lab;
-
- region = VEC_index (eh_region, cfun->eh->region_array, i);
- if (! region || region->region_number != i)
- continue;
- if (crtl->eh.built_landing_pads)
- lab = region->landing_pad;
- else
- lab = region->label;
-
- if (lab)
- add_ehl_entry (lab, region);
+ tree runtime = lang_hooks.eh_runtime_type (type);
+ *slot = tree_cons (type, runtime, NULL_TREE);
}
-
- /* For sjlj exceptions, need the return label to remain live until
- after landing pad generation. */
- if (USING_SJLJ_EXCEPTIONS && ! crtl->eh.built_landing_pads)
- add_ehl_entry (return_label, NULL);
}
-/* Returns true if the current function has exception handling regions. */
+tree
+lookup_type_for_runtime (tree type)
+{
+ tree *slot;
-bool
-current_function_has_exception_handlers (void)
-{
- int i;
-
- for (i = cfun->eh->last_region_number; i > 0; --i)
- {
- struct eh_region *region;
-
- region = VEC_index (eh_region, cfun->eh->region_array, i);
- if (region
- && region->region_number == i
- && region->type != ERT_THROW)
- return true;
- }
-
- return false;
-}
-\f
-/* A subroutine of duplicate_eh_regions. Search the region tree under O
- for the minimum and maximum region numbers. Update *MIN and *MAX. */
-
-static void
-duplicate_eh_regions_0 (eh_region o, int *min, int *max)
-{
- int i;
-
- if (o->aka)
- {
- i = bitmap_first_set_bit (o->aka);
- if (i < *min)
- *min = i;
- i = bitmap_last_set_bit (o->aka);
- if (i > *max)
- *max = i;
- }
- if (o->region_number < *min)
- *min = o->region_number;
- if (o->region_number > *max)
- *max = o->region_number;
-
- if (o->inner)
- {
- o = o->inner;
- duplicate_eh_regions_0 (o, min, max);
- while (o->next_peer)
- {
- o = o->next_peer;
- duplicate_eh_regions_0 (o, min, max);
- }
- }
-}
-
-/* A subroutine of duplicate_eh_regions. Copy the region tree under OLD.
- Root it at OUTER, and apply EH_OFFSET to the region number. Don't worry
- about the other internal pointers just yet, just the tree-like pointers. */
-
-static eh_region
-duplicate_eh_regions_1 (eh_region old, eh_region outer, int eh_offset)
-{
- eh_region ret, n;
-
- ret = n = GGC_NEW (struct eh_region);
-
- *n = *old;
- n->outer = outer;
- n->next_peer = NULL;
- if (old->aka)
- {
- unsigned i;
- bitmap_iterator bi;
- n->aka = BITMAP_GGC_ALLOC ();
-
- EXECUTE_IF_SET_IN_BITMAP (old->aka, 0, i, bi)
- {
- bitmap_set_bit (n->aka, i + eh_offset);
- VEC_replace (eh_region, cfun->eh->region_array, i + eh_offset, n);
- }
- }
-
- n->region_number += eh_offset;
- VEC_replace (eh_region, cfun->eh->region_array, n->region_number, n);
-
- if (old->inner)
- {
- old = old->inner;
- n = n->inner = duplicate_eh_regions_1 (old, ret, eh_offset);
- while (old->next_peer)
- {
- old = old->next_peer;
- n = n->next_peer = duplicate_eh_regions_1 (old, ret, eh_offset);
- }
- }
-
- return ret;
-}
-
-/* Duplicate the EH regions of IFUN, rooted at COPY_REGION, into current
- function and root the tree below OUTER_REGION. Remap labels using MAP
- callback. The special case of COPY_REGION of 0 means all regions. */
-
-int
-duplicate_eh_regions (struct function *ifun, duplicate_eh_regions_map map,
- void *data, int copy_region, int outer_region)
-{
- eh_region cur, prev_try, outer, *splice;
- int i, min_region, max_region, eh_offset, cfun_last_region_number;
- int num_regions;
-
- if (!ifun->eh)
- return 0;
-#ifdef ENABLE_CHECKING
- verify_eh_tree (ifun);
-#endif
-
- /* Find the range of region numbers to be copied. The interface we
- provide here mandates a single offset to find new number from old,
- which means we must look at the numbers present, instead of the
- count or something else. */
- if (copy_region > 0)
- {
- min_region = INT_MAX;
- max_region = 0;
-
- cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
- duplicate_eh_regions_0 (cur, &min_region, &max_region);
- }
- else
- min_region = 1, max_region = ifun->eh->last_region_number;
- num_regions = max_region - min_region + 1;
- cfun_last_region_number = cfun->eh->last_region_number;
- eh_offset = cfun_last_region_number + 1 - min_region;
-
- /* If we've not yet created a region array, do so now. */
- cfun->eh->last_region_number = cfun_last_region_number + num_regions;
- VEC_safe_grow_cleared (eh_region, gc, cfun->eh->region_array,
- cfun->eh->last_region_number + 1);
-
- /* Locate the spot at which to insert the new tree. */
- if (outer_region > 0)
- {
- outer = VEC_index (eh_region, cfun->eh->region_array, outer_region);
- if (outer)
- splice = &outer->inner;
- else
- splice = &cfun->eh->region_tree;
- }
- else
- {
- outer = NULL;
- splice = &cfun->eh->region_tree;
- }
- while (*splice)
- splice = &(*splice)->next_peer;
-
- if (!ifun->eh->region_tree)
- {
- if (outer)
- for (i = cfun_last_region_number + 1;
- i <= cfun->eh->last_region_number; i++)
- {
- VEC_replace (eh_region, cfun->eh->region_array, i, outer);
- if (outer->aka == NULL)
- outer->aka = BITMAP_GGC_ALLOC ();
- bitmap_set_bit (outer->aka, i);
- }
- return eh_offset;
- }
-
- /* Copy all the regions in the subtree. */
- if (copy_region > 0)
- {
- cur = VEC_index (eh_region, ifun->eh->region_array, copy_region);
- *splice = duplicate_eh_regions_1 (cur, outer, eh_offset);
- }
- else
- {
- eh_region n;
-
- cur = ifun->eh->region_tree;
- *splice = n = duplicate_eh_regions_1 (cur, outer, eh_offset);
- while (cur->next_peer)
- {
- cur = cur->next_peer;
- n = n->next_peer = duplicate_eh_regions_1 (cur, outer, eh_offset);
- }
- }
-
- /* Remap all the labels in the new regions. */
- for (i = cfun_last_region_number + 1;
- VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
- if (cur && cur->tree_label)
- cur->tree_label = map (cur->tree_label, data);
-
- /* Search for the containing ERT_TRY region to fix up
- the prev_try short-cuts for ERT_CLEANUP regions. */
- prev_try = NULL;
- if (outer_region > 0)
- for (prev_try =
- VEC_index (eh_region, cfun->eh->region_array, outer_region);
- prev_try && prev_try->type != ERT_TRY; prev_try = prev_try->outer)
- if (prev_try->type == ERT_MUST_NOT_THROW
- || (prev_try->type == ERT_ALLOWED_EXCEPTIONS
- && !prev_try->u.allowed.type_list))
- {
- prev_try = NULL;
- break;
- }
-
- /* Remap all of the internal catch and cleanup linkages. Since we
- duplicate entire subtrees, all of the referenced regions will have
- been copied too. And since we renumbered them as a block, a simple
- bit of arithmetic finds us the index for the replacement region. */
- for (i = cfun_last_region_number + 1;
- VEC_iterate (eh_region, cfun->eh->region_array, i, cur); ++i)
- {
- /* All removed EH that is toplevel in input function is now
- in outer EH of output function. */
- if (cur == NULL)
- {
- gcc_assert (VEC_index
- (eh_region, ifun->eh->region_array,
- i - eh_offset) == NULL);
- if (outer)
- {
- VEC_replace (eh_region, cfun->eh->region_array, i, outer);
- if (outer->aka == NULL)
- outer->aka = BITMAP_GGC_ALLOC ();
- bitmap_set_bit (outer->aka, i);
- }
- continue;
- }
- if (i != cur->region_number)
- continue;
-
-#define REMAP(REG) \
- (REG) = VEC_index (eh_region, cfun->eh->region_array, \
- (REG)->region_number + eh_offset)
-
- switch (cur->type)
- {
- case ERT_TRY:
- if (cur->u.eh_try.eh_catch)
- REMAP (cur->u.eh_try.eh_catch);
- if (cur->u.eh_try.last_catch)
- REMAP (cur->u.eh_try.last_catch);
- break;
-
- case ERT_CATCH:
- if (cur->u.eh_catch.next_catch)
- REMAP (cur->u.eh_catch.next_catch);
- if (cur->u.eh_catch.prev_catch)
- REMAP (cur->u.eh_catch.prev_catch);
- break;
-
- case ERT_CLEANUP:
- if (cur->u.cleanup.prev_try)
- REMAP (cur->u.cleanup.prev_try);
- else
- cur->u.cleanup.prev_try = prev_try;
- break;
-
- default:
- break;
- }
-
-#undef REMAP
- }
-#ifdef ENABLE_CHECKING
- verify_eh_tree (cfun);
-#endif
-
- return eh_offset;
-}
-
-/* Return true if REGION_A is outer to REGION_B in IFUN. */
-
-bool
-eh_region_outer_p (struct function *ifun, int region_a, int region_b)
-{
- struct eh_region *rp_a, *rp_b;
-
- gcc_assert (ifun->eh->last_region_number > 0);
- gcc_assert (ifun->eh->region_tree);
-
- rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
- rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
- gcc_assert (rp_a != NULL);
- gcc_assert (rp_b != NULL);
-
- do
- {
- if (rp_a == rp_b)
- return true;
- rp_b = rp_b->outer;
- }
- while (rp_b);
-
- return false;
-}
-
-/* Return region number of region that is outer to both if REGION_A and
- REGION_B in IFUN. */
-
-int
-eh_region_outermost (struct function *ifun, int region_a, int region_b)
-{
- struct eh_region *rp_a, *rp_b;
- sbitmap b_outer;
-
- gcc_assert (ifun->eh->last_region_number > 0);
- gcc_assert (ifun->eh->region_tree);
-
- rp_a = VEC_index (eh_region, ifun->eh->region_array, region_a);
- rp_b = VEC_index (eh_region, ifun->eh->region_array, region_b);
- gcc_assert (rp_a != NULL);
- gcc_assert (rp_b != NULL);
-
- b_outer = sbitmap_alloc (ifun->eh->last_region_number + 1);
- sbitmap_zero (b_outer);
-
- do
- {
- SET_BIT (b_outer, rp_b->region_number);
- rp_b = rp_b->outer;
- }
- while (rp_b);
-
- do
- {
- if (TEST_BIT (b_outer, rp_a->region_number))
- {
- sbitmap_free (b_outer);
- return rp_a->region_number;
- }
- rp_a = rp_a->outer;
- }
- while (rp_a);
-
- sbitmap_free (b_outer);
- return -1;
-}
-\f
-static int
-t2r_eq (const void *pentry, const void *pdata)
-{
- const_tree const entry = (const_tree) pentry;
- const_tree const data = (const_tree) pdata;
-
- return TREE_PURPOSE (entry) == data;
-}
-
-static hashval_t
-t2r_hash (const void *pentry)
-{
- const_tree const entry = (const_tree) pentry;
- return TREE_HASH (TREE_PURPOSE (entry));
-}
-
-static void
-add_type_for_runtime (tree type)
-{
- tree *slot;
-
- slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
- TREE_HASH (type), INSERT);
- if (*slot == NULL)
- {
- tree runtime = (*lang_eh_runtime_type) (type);
- *slot = tree_cons (type, runtime, NULL_TREE);
- }
-}
-
-static tree
-lookup_type_for_runtime (tree type)
-{
- tree *slot;
+ /* If TYPE is NOP_EXPR, it means that it already is a runtime type. */
+ if (TREE_CODE (type) == NOP_EXPR)
+ return type;
slot = (tree *) htab_find_slot_with_hash (type_to_runtime_map, type,
TREE_HASH (type), NO_INSERT);
\f
/* Represent an entry in @TTypes for either catch actions
or exception filter actions. */
-struct ttypes_filter GTY(())
-{
+struct ttypes_filter {
tree t;
int filter;
};
return h;
}
-/* Add TYPE (which may be NULL) to crtl->eh.ttype_data, using TYPES_HASH
+/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
to speed up the search. Return the filter value to be used. */
static int
n = XNEW (struct ttypes_filter);
n->t = type;
- n->filter = VEC_length (tree, crtl->eh.ttype_data) + 1;
+ n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
*slot = n;
- VEC_safe_push (tree, gc, crtl->eh.ttype_data, type);
+ VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
}
return n->filter;
}
-/* Add LIST to crtl->eh.ehspec_data, using EHSPEC_HASH and TYPES_HASH
+/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
to speed up the search. Return the filter value to be used. */
static int
if ((n = *slot) == NULL)
{
+ int len;
+
+ if (targetm.arm_eabi_unwinder)
+ len = VEC_length (tree, cfun->eh->ehspec_data.arm_eabi);
+ else
+ len = VEC_length (uchar, cfun->eh->ehspec_data.other);
+
/* Filter value is a -1 based byte index into a uleb128 buffer. */
n = XNEW (struct ttypes_filter);
n->t = list;
- n->filter = -(VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) + 1);
+ n->filter = -(len + 1);
*slot = n;
/* Generate a 0 terminated list of filter values. */
for (; list ; list = TREE_CHAIN (list))
{
if (targetm.arm_eabi_unwinder)
- VARRAY_PUSH_TREE (crtl->eh.ehspec_data, TREE_VALUE (list));
+ VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi,
+ TREE_VALUE (list));
else
{
/* Look up each type in the list and encode its filter
value as a uleb128. */
- push_uleb128 (&crtl->eh.ehspec_data,
- add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
+ push_uleb128 (&cfun->eh->ehspec_data.other,
+ add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
}
}
if (targetm.arm_eabi_unwinder)
- VARRAY_PUSH_TREE (crtl->eh.ehspec_data, NULL_TREE);
+ VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
else
- VARRAY_PUSH_UCHAR (crtl->eh.ehspec_data, 0);
+ VEC_safe_push (uchar, gc, cfun->eh->ehspec_data.other, 0);
}
return n->filter;
we use lots of landing pads, and so every type or list can share
the same filter value, which saves table space. */
-static void
+void
assign_filter_values (void)
{
int i;
htab_t ttypes, ehspec;
+ eh_region r;
+ eh_catch c;
- crtl->eh.ttype_data = VEC_alloc (tree, gc, 16);
+ cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
if (targetm.arm_eabi_unwinder)
- VARRAY_TREE_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
+ cfun->eh->ehspec_data.arm_eabi = VEC_alloc (tree, gc, 64);
else
- VARRAY_UCHAR_INIT (crtl->eh.ehspec_data, 64, "ehspec_data");
+ cfun->eh->ehspec_data.other = VEC_alloc (uchar, gc, 64);
ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
- for (i = cfun->eh->last_region_number; i > 0; --i)
+ for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
{
- struct eh_region *r;
-
- r = VEC_index (eh_region, cfun->eh->region_array, i);
-
- /* Mind we don't process a region more than once. */
- if (!r || r->region_number != i)
+ if (r == NULL)
continue;
switch (r->type)
{
- case ERT_CATCH:
- /* Whatever type_list is (NULL or true list), we build a list
- of filters for the region. */
- r->u.eh_catch.filter_list = NULL_TREE;
-
- if (r->u.eh_catch.type_list != NULL)
+ case ERT_TRY:
+ for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
{
- /* Get a filter value for each of the types caught and store
- them in the region's dedicated list. */
- tree tp_node = r->u.eh_catch.type_list;
+ /* Whatever type_list is (NULL or true list), we build a list
+ of filters for the region. */
+ c->filter_list = NULL_TREE;
- for (;tp_node; tp_node = TREE_CHAIN (tp_node))
+ if (c->type_list != NULL)
{
- int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
- tree flt_node = build_int_cst (NULL_TREE, flt);
+ /* Get a filter value for each of the types caught and store
+ them in the region's dedicated list. */
+ tree tp_node = c->type_list;
+
+ for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
+ {
+ int flt = add_ttypes_entry (ttypes, TREE_VALUE (tp_node));
+ tree flt_node = build_int_cst (integer_type_node, flt);
- r->u.eh_catch.filter_list
- = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
+ c->filter_list
+ = tree_cons (NULL_TREE, flt_node, c->filter_list);
+ }
}
- }
- else
- {
- /* Get a filter value for the NULL list also since it will need
- an action record anyway. */
- int flt = add_ttypes_entry (ttypes, NULL);
- tree flt_node = build_int_cst (NULL_TREE, flt);
+ else
+ {
+ /* Get a filter value for the NULL list also since it
+ will need an action record anyway. */
+ int flt = add_ttypes_entry (ttypes, NULL);
+ tree flt_node = build_int_cst (integer_type_node, flt);
- r->u.eh_catch.filter_list
- = tree_cons (NULL_TREE, flt_node, r->u.eh_catch.filter_list);
+ c->filter_list
+ = tree_cons (NULL_TREE, flt_node, NULL);
+ }
}
-
break;
case ERT_ALLOWED_EXCEPTIONS:
bb->flags |= BB_SUPERBLOCK;
return bb;
}
+\f
+/* A subroutine of dw2_build_landing_pads, also used for edge splitting
+ at the rtl level. Emit the code required by the target at a landing
+ pad for the given region. */
-/* Generate the code to actually handle exceptions, which will follow the
- landing pads. */
-
-static void
-build_post_landing_pads (void)
-{
- int i;
-
- for (i = cfun->eh->last_region_number; i > 0; --i)
- {
- struct eh_region *region;
- rtx seq;
-
- region = VEC_index (eh_region, cfun->eh->region_array, i);
- /* Mind we don't process a region more than once. */
- if (!region || region->region_number != i)
- continue;
-
- switch (region->type)
- {
- case ERT_TRY:
- /* ??? Collect the set of all non-overlapping catch handlers
- all the way up the chain until blocked by a cleanup. */
- /* ??? Outer try regions can share landing pads with inner
- try regions if the types are completely non-overlapping,
- and there are no intervening cleanups. */
-
- region->post_landing_pad = gen_label_rtx ();
-
- start_sequence ();
-
- emit_label (region->post_landing_pad);
-
- /* ??? It is mighty inconvenient to call back into the
- switch statement generation code in expand_end_case.
- Rapid prototyping sez a sequence of ifs. */
- {
- struct eh_region *c;
- for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
- {
- if (c->u.eh_catch.type_list == NULL)
- emit_jump (c->label);
- else
- {
- /* Need for one cmp/jump per type caught. Each type
- list entry has a matching entry in the filter list
- (see assign_filter_values). */
- tree tp_node = c->u.eh_catch.type_list;
- tree flt_node = c->u.eh_catch.filter_list;
-
- for (; tp_node; )
- {
- emit_cmp_and_jump_insns
- (crtl->eh.filter,
- GEN_INT (tree_low_cst (TREE_VALUE (flt_node), 0)),
- EQ, NULL_RTX,
- targetm.eh_return_filter_mode (), 0, c->label);
-
- tp_node = TREE_CHAIN (tp_node);
- flt_node = TREE_CHAIN (flt_node);
- }
- }
- }
- }
-
- /* We delay the generation of the _Unwind_Resume until we generate
- landing pads. We emit a marker here so as to get good control
- flow data in the meantime. */
- region->resume
- = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
- emit_barrier ();
-
- seq = get_insns ();
- end_sequence ();
-
- emit_to_new_bb_before (seq, region->u.eh_try.eh_catch->label);
-
- break;
-
- case ERT_ALLOWED_EXCEPTIONS:
- region->post_landing_pad = gen_label_rtx ();
-
- start_sequence ();
-
- emit_label (region->post_landing_pad);
-
- emit_cmp_and_jump_insns (crtl->eh.filter,
- GEN_INT (region->u.allowed.filter),
- EQ, NULL_RTX,
- targetm.eh_return_filter_mode (), 0, region->label);
-
- /* We delay the generation of the _Unwind_Resume until we generate
- landing pads. We emit a marker here so as to get good control
- flow data in the meantime. */
- region->resume
- = emit_jump_insn (gen_rtx_RESX (VOIDmode, region->region_number));
- emit_barrier ();
-
- seq = get_insns ();
- end_sequence ();
-
- emit_to_new_bb_before (seq, region->label);
- break;
-
- case ERT_CLEANUP:
- case ERT_MUST_NOT_THROW:
- region->post_landing_pad = region->label;
- break;
-
- case ERT_CATCH:
- case ERT_THROW:
- /* Nothing to do. */
- break;
-
- default:
- gcc_unreachable ();
- }
- }
-}
-
-/* Replace RESX patterns with jumps to the next handler if any, or calls to
- _Unwind_Resume otherwise. */
-
-static void
-connect_post_landing_pads (void)
-{
- int i;
-
- for (i = cfun->eh->last_region_number; i > 0; --i)
- {
- struct eh_region *region;
- struct eh_region *outer;
- rtx seq;
- rtx barrier;
-
- region = VEC_index (eh_region, cfun->eh->region_array, i);
- /* Mind we don't process a region more than once. */
- if (!region || region->region_number != i)
- continue;
-
- /* If there is no RESX, or it has been deleted by flow, there's
- nothing to fix up. */
- if (! region->resume || INSN_DELETED_P (region->resume))
- continue;
-
- /* Search for another landing pad in this function. */
- for (outer = region->outer; outer ; outer = outer->outer)
- if (outer->post_landing_pad)
- break;
-
- start_sequence ();
-
- if (outer)
- {
- edge e;
- basic_block src, dest;
-
- emit_jump (outer->post_landing_pad);
- src = BLOCK_FOR_INSN (region->resume);
- dest = BLOCK_FOR_INSN (outer->post_landing_pad);
- while (EDGE_COUNT (src->succs) > 0)
- remove_edge (EDGE_SUCC (src, 0));
- e = make_edge (src, dest, 0);
- e->probability = REG_BR_PROB_BASE;
- e->count = src->count;
- }
- else
- {
- emit_library_call (unwind_resume_libfunc, LCT_THROW,
- VOIDmode, 1, crtl->eh.exc_ptr, ptr_mode);
-
- /* What we just emitted was a throwing libcall, so it got a
- barrier automatically added after it. If the last insn in
- the libcall sequence isn't the barrier, it's because the
- target emits multiple insns for a call, and there are insns
- after the actual call insn (which are redundant and would be
- optimized away). The barrier is inserted exactly after the
- call insn, so let's go get that and delete the insns after
- it, because below we need the barrier to be the last insn in
- the sequence. */
- delete_insns_since (NEXT_INSN (last_call_insn ()));
- }
+void
+expand_dw2_landing_pad_for_region (eh_region region)
+{
+#ifdef HAVE_exception_receiver
+ if (HAVE_exception_receiver)
+ emit_insn (gen_exception_receiver ());
+ else
+#endif
+#ifdef HAVE_nonlocal_goto_receiver
+ if (HAVE_nonlocal_goto_receiver)
+ emit_insn (gen_nonlocal_goto_receiver ());
+ else
+#endif
+ { /* Nothing */ }
- seq = get_insns ();
- end_sequence ();
- barrier = emit_insn_before (seq, region->resume);
- /* Avoid duplicate barrier. */
- gcc_assert (BARRIER_P (barrier));
- delete_insn (barrier);
- delete_insn (region->resume);
-
- /* ??? From tree-ssa we can wind up with catch regions whose
- label is not instantiated, but whose resx is present. Now
- that we've dealt with the resx, kill the region. */
- if (region->label == NULL && region->type == ERT_CLEANUP)
- remove_eh_handler (region);
- }
+ if (region->exc_ptr_reg)
+ emit_move_insn (region->exc_ptr_reg,
+ gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
+ if (region->filter_reg)
+ emit_move_insn (region->filter_reg,
+ gen_rtx_REG (targetm.eh_return_filter_mode (),
+ EH_RETURN_DATA_REGNO (1)));
}
-\f
+/* Expand the extra code needed at landing pads for dwarf2 unwinding. */
+
static void
dw2_build_landing_pads (void)
{
int i;
+ eh_landing_pad lp;
+ int e_flags = EDGE_FALLTHRU;
- for (i = cfun->eh->last_region_number; i > 0; --i)
+ /* If we're going to partition blocks, we need to be able to add
+ new landing pads later, which means that we need to hold on to
+ the post-landing-pad block. Prevent it from being merged away.
+ We'll remove this bit after partitioning. */
+ if (flag_reorder_blocks_and_partition)
+ e_flags |= EDGE_PRESERVE;
+
+ for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
{
- struct eh_region *region;
- rtx seq;
basic_block bb;
+ rtx seq;
edge e;
- region = VEC_index (eh_region, cfun->eh->region_array, i);
- /* Mind we don't process a region more than once. */
- if (!region || region->region_number != i)
- continue;
-
- if (region->type != ERT_CLEANUP
- && region->type != ERT_TRY
- && region->type != ERT_ALLOWED_EXCEPTIONS)
+ if (lp == NULL || lp->post_landing_pad == NULL)
continue;
start_sequence ();
- region->landing_pad = gen_label_rtx ();
- emit_label (region->landing_pad);
-
-#ifdef HAVE_exception_receiver
- if (HAVE_exception_receiver)
- emit_insn (gen_exception_receiver ());
- else
-#endif
-#ifdef HAVE_nonlocal_goto_receiver
- if (HAVE_nonlocal_goto_receiver)
- emit_insn (gen_nonlocal_goto_receiver ());
- else
-#endif
- { /* Nothing */ }
+ lp->landing_pad = gen_label_rtx ();
+ emit_label (lp->landing_pad);
+ LABEL_PRESERVE_P (lp->landing_pad) = 1;
- emit_move_insn (crtl->eh.exc_ptr,
- gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
- emit_move_insn (crtl->eh.filter,
- gen_rtx_REG (targetm.eh_return_filter_mode (),
- EH_RETURN_DATA_REGNO (1)));
+ expand_dw2_landing_pad_for_region (lp->region);
seq = get_insns ();
end_sequence ();
- bb = emit_to_new_bb_before (seq, region->post_landing_pad);
- e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
+ bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
+ e = make_edge (bb, bb->next_bb, e_flags);
e->count = bb->count;
e->probability = REG_BR_PROB_BASE;
}
}
\f
-struct sjlj_lp_info
-{
- int directly_reachable;
- int action_index;
- int dispatch_index;
- int call_site_index;
-};
-
-static bool
-sjlj_find_directly_reachable_regions (struct sjlj_lp_info *lp_info)
-{
- rtx insn;
- bool found_one = false;
-
- for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
- {
- struct eh_region *region;
- enum reachable_code rc;
- tree type_thrown;
- rtx note;
-
- if (! INSN_P (insn))
- continue;
-
- note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
- if (!note || INTVAL (XEXP (note, 0)) <= 0)
- continue;
-
- region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
- if (!region)
- continue;
-
- type_thrown = NULL_TREE;
- if (region->type == ERT_THROW)
- {
- type_thrown = region->u.eh_throw.type;
- region = region->outer;
- }
-
- /* Find the first containing region that might handle the exception.
- That's the landing pad to which we will transfer control. */
- rc = RNL_NOT_CAUGHT;
- for (; region; region = region->outer)
- {
- rc = reachable_next_level (region, type_thrown, NULL, false);
- if (rc != RNL_NOT_CAUGHT)
- break;
- }
- if (rc == RNL_MAYBE_CAUGHT || rc == RNL_CAUGHT)
- {
- lp_info[region->region_number].directly_reachable = 1;
- found_one = true;
- }
- }
+static VEC (int, heap) *sjlj_lp_call_site_index;
- return found_one;
-}
+/* Process all active landing pads. Assign each one a compact dispatch
+ index, and a call-site index. */
-static void
-sjlj_assign_call_site_values (rtx dispatch_label, struct sjlj_lp_info *lp_info)
+static int
+sjlj_assign_call_site_values (void)
{
htab_t ar_hash;
- int i, index;
-
- /* First task: build the action table. */
+ int i, disp_index;
+ eh_landing_pad lp;
- VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
+ crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64);
ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
- for (i = cfun->eh->last_region_number; i > 0; --i)
- if (lp_info[i].directly_reachable)
- {
- struct eh_region *r = VEC_index (eh_region, cfun->eh->region_array, i);
-
- r->landing_pad = dispatch_label;
- lp_info[i].action_index = collect_one_action_chain (ar_hash, r);
- if (lp_info[i].action_index != -1)
- crtl->uses_eh_lsda = 1;
- }
-
- htab_delete (ar_hash);
-
- /* Next: assign dispatch values. In dwarf2 terms, this would be the
- landing pad label for the region. For sjlj though, there is one
- common landing pad from which we dispatch to the post-landing pads.
-
- A region receives a dispatch index if it is directly reachable
- and requires in-function processing. Regions that share post-landing
- pads may share dispatch indices. */
- /* ??? Post-landing pad sharing doesn't actually happen at the moment
- (see build_post_landing_pads) so we don't bother checking for it. */
-
- index = 0;
- for (i = cfun->eh->last_region_number; i > 0; --i)
- if (lp_info[i].directly_reachable)
- lp_info[i].dispatch_index = index++;
-
- /* Finally: assign call-site values. If dwarf2 terms, this would be
- the region number assigned by convert_to_eh_region_ranges, but
- handles no-action and must-not-throw differently. */
-
+ disp_index = 0;
call_site_base = 1;
- for (i = cfun->eh->last_region_number; i > 0; --i)
- if (lp_info[i].directly_reachable)
+ for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
+ if (lp && lp->post_landing_pad)
{
- int action = lp_info[i].action_index;
+ int action, call_site;
+
+ /* First: build the action table. */
+ action = collect_one_action_chain (ar_hash, lp->region);
+ /* Next: assign call-site values. If dwarf2 terms, this would be
+ the region number assigned by convert_to_eh_region_ranges, but
+ handles no-action and must-not-throw differently. */
/* Map must-not-throw to otherwise unused call-site index 0. */
if (action == -2)
- index = 0;
+ call_site = 0;
/* Map no-action to otherwise unused call-site index -1. */
else if (action == -1)
- index = -1;
+ call_site = -1;
/* Otherwise, look it up in the table. */
else
- index = add_call_site (GEN_INT (lp_info[i].dispatch_index), action);
+ call_site = add_call_site (GEN_INT (disp_index), action, 0);
+ VEC_replace (int, sjlj_lp_call_site_index, i, call_site);
- lp_info[i].call_site_index = index;
+ disp_index++;
}
+
+ htab_delete (ar_hash);
+
+ return disp_index;
}
+/* Emit code to record the current call-site index before every
+ insn that can throw. */
+
static void
-sjlj_mark_call_sites (struct sjlj_lp_info *lp_info)
+sjlj_mark_call_sites (void)
{
int last_call_site = -2;
rtx insn, mem;
for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
{
- struct eh_region *region;
+ eh_landing_pad lp;
+ eh_region r;
+ bool nothrow;
int this_call_site;
- rtx note, before, p;
+ rtx before, p;
/* Reset value tracking at extended basic block boundaries. */
if (LABEL_P (insn))
if (! INSN_P (insn))
continue;
- note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
-
- /* Calls that are known to not throw need not be marked. */
- if (note && INTVAL (XEXP (note, 0)) <= 0)
+ nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
+ if (nothrow)
continue;
-
- if (note)
- region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
- else
- region = NULL;
-
- if (!region)
+ if (lp)
+ this_call_site = VEC_index (int, sjlj_lp_call_site_index, lp->index);
+ else if (r == NULL)
{
/* Calls (and trapping insns) without notes are outside any
exception handling region in this function. Mark them as
no action. */
- if (CALL_P (insn)
- || (flag_non_call_exceptions
- && may_trap_p (PATTERN (insn))))
- this_call_site = -1;
- else
- continue;
+ this_call_site = -1;
}
else
- this_call_site = lp_info[region->region_number].call_site_index;
+ {
+ gcc_assert (r->type == ERT_MUST_NOT_THROW);
+ this_call_site = 0;
+ }
+
+ if (this_call_site != -1)
+ crtl->uses_eh_lsda = 1;
if (this_call_site == last_call_site)
continue;
{
rtx fn_begin, fc, mem, seq;
bool fn_begin_outside_block;
+ rtx personality = get_personality_function (current_function_decl);
fc = crtl->eh.sjlj_fc;
/* We're storing this libcall's address into memory instead of
calling it directly. Thus, we must call assemble_external_libcall
here, as we can not depend on emit_library_call to do it for us. */
- assemble_external_libcall (eh_personality_libfunc);
+ assemble_external_libcall (personality);
mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
- emit_move_insn (mem, eh_personality_libfunc);
+ emit_move_insn (mem, personality);
mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
if (crtl->uses_eh_lsda)
else
emit_move_insn (mem, const0_rtx);
+ if (dispatch_label)
+ {
#ifdef DONT_USE_BUILTIN_SETJMP
- {
- rtx x;
- x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
- TYPE_MODE (integer_type_node), 1,
- plus_constant (XEXP (fc, 0),
- sjlj_fc_jbuf_ofs), Pmode);
-
- emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
- TYPE_MODE (integer_type_node), 0, dispatch_label);
- add_reg_br_prob_note (get_insns (), REG_BR_PROB_BASE/100);
- }
-#else
- expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0), sjlj_fc_jbuf_ofs),
+ rtx x, last;
+ x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
+ TYPE_MODE (integer_type_node), 1,
+ plus_constant (XEXP (fc, 0),
+ sjlj_fc_jbuf_ofs), Pmode);
+
+ emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
+ TYPE_MODE (integer_type_node), 0,
dispatch_label);
+ last = get_last_insn ();
+ if (JUMP_P (last) && any_condjump_p (last))
+ {
+ gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
+ add_reg_note (last, REG_BR_PROB, GEN_INT (REG_BR_PROB_BASE / 100));
+ }
+#else
+ expand_builtin_setjmp_setup (plus_constant (XEXP (fc, 0),
+ sjlj_fc_jbuf_ofs),
+ dispatch_label);
#endif
+ }
emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1, XEXP (fc, 0), Pmode);
static void
sjlj_emit_function_exit (void)
{
- rtx seq;
- edge e;
- edge_iterator ei;
+ rtx seq, insn;
start_sequence ();
post-dominates all can_throw_internal instructions. This is
the last possible moment. */
- FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
- if (e->flags & EDGE_FALLTHRU)
- break;
- if (e)
- {
- rtx insn;
+ insn = crtl->eh.sjlj_exit_after;
+ if (LABEL_P (insn))
+ insn = NEXT_INSN (insn);
- /* Figure out whether the place we are supposed to insert libcall
- is inside the last basic block or after it. In the other case
- we need to emit to edge. */
- gcc_assert (e->src->next_bb == EXIT_BLOCK_PTR);
- for (insn = BB_HEAD (e->src); ; insn = NEXT_INSN (insn))
- {
- if (insn == crtl->eh.sjlj_exit_after)
- {
- if (LABEL_P (insn))
- insn = NEXT_INSN (insn);
- emit_insn_after (seq, insn);
- return;
- }
- if (insn == BB_END (e->src))
- break;
- }
- insert_insn_on_edge (seq, e);
- }
+ emit_insn_after (seq, insn);
}
static void
-sjlj_emit_dispatch_table (rtx dispatch_label, struct sjlj_lp_info *lp_info)
+sjlj_emit_dispatch_table (rtx dispatch_label, int num_dispatch)
{
enum machine_mode unwind_word_mode = targetm.unwind_word_mode ();
enum machine_mode filter_mode = targetm.eh_return_filter_mode ();
- int i, first_reachable;
- rtx mem, dispatch, seq, fc;
- rtx before;
+ eh_landing_pad lp;
+ rtx mem, seq, fc, before, exc_ptr_reg, filter_reg;
+ rtx first_reachable_label;
basic_block bb;
+ eh_region r;
edge e;
+ int i, disp_index;
+ gimple switch_stmt;
fc = crtl->eh.sjlj_fc;
#ifndef DONT_USE_BUILTIN_SETJMP
expand_builtin_setjmp_receiver (dispatch_label);
-#endif
- /* Load up dispatch index, exc_ptr and filter values from the
- function context. */
- mem = adjust_address (fc, TYPE_MODE (integer_type_node),
- sjlj_fc_call_site_ofs);
- dispatch = copy_to_reg (mem);
+ /* The caller of expand_builtin_setjmp_receiver is responsible for
+ making sure that the label doesn't vanish. The only other caller
+ is the expander for __builtin_setjmp_receiver, which places this
+ label on the nonlocal_goto_label list. Since we're modeling these
+ CFG edges more exactly, we can use the forced_labels list instead. */
+ LABEL_PRESERVE_P (dispatch_label) = 1;
+ forced_labels
+ = gen_rtx_EXPR_LIST (VOIDmode, dispatch_label, forced_labels);
+#endif
+ /* Load up exc_ptr and filter values from the function context. */
mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
if (unwind_word_mode != ptr_mode)
{
mem = convert_to_mode (ptr_mode, mem, 0);
#endif
}
- emit_move_insn (crtl->eh.exc_ptr, mem);
+ exc_ptr_reg = force_reg (ptr_mode, mem);
mem = adjust_address (fc, unwind_word_mode,
sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
if (unwind_word_mode != filter_mode)
mem = convert_to_mode (filter_mode, mem, 0);
- emit_move_insn (crtl->eh.filter, mem);
+ filter_reg = force_reg (filter_mode, mem);
/* Jump to one of the directly reachable regions. */
- /* ??? This really ought to be using a switch statement. */
- first_reachable = 0;
- for (i = cfun->eh->last_region_number; i > 0; --i)
+ disp_index = 0;
+ first_reachable_label = NULL;
+
+ /* If there's exactly one call site in the function, don't bother
+ generating a switch statement. */
+ switch_stmt = NULL;
+ if (num_dispatch > 1)
{
- if (! lp_info[i].directly_reachable)
- continue;
+ tree disp;
- if (! first_reachable)
- {
- first_reachable = i;
- continue;
- }
+ mem = adjust_address (fc, TYPE_MODE (integer_type_node),
+ sjlj_fc_call_site_ofs);
+ disp = make_tree (integer_type_node, mem);
+
+ switch_stmt = gimple_build_switch_nlabels (num_dispatch, disp, NULL);
+ }
+
+ for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
+ if (lp && lp->post_landing_pad)
+ {
+ rtx seq2, label;
+
+ start_sequence ();
+
+ lp->landing_pad = dispatch_label;
+
+ if (num_dispatch > 1)
+ {
+ tree t_label, case_elt, t;
+
+ t_label = create_artificial_label (UNKNOWN_LOCATION);
+ t = build_int_cst (integer_type_node, disp_index);
+ case_elt = build_case_label (t, NULL, t_label);
+ gimple_switch_set_label (switch_stmt, disp_index, case_elt);
+
+ label = label_rtx (t_label);
+ }
+ else
+ label = gen_label_rtx ();
+
+ if (disp_index == 0)
+ first_reachable_label = label;
+ emit_label (label);
+
+ r = lp->region;
+ if (r->exc_ptr_reg)
+ emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
+ if (r->filter_reg)
+ emit_move_insn (r->filter_reg, filter_reg);
+
+ seq2 = get_insns ();
+ end_sequence ();
+
+ before = label_rtx (lp->post_landing_pad);
+ bb = emit_to_new_bb_before (seq2, before);
+ e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
+ e->count = bb->count;
+ e->probability = REG_BR_PROB_BASE;
+
+ disp_index++;
+ }
+ gcc_assert (disp_index == num_dispatch);
- emit_cmp_and_jump_insns (dispatch, GEN_INT (lp_info[i].dispatch_index),
- EQ, NULL_RTX, TYPE_MODE (integer_type_node), 0,
- ((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, i))
- ->post_landing_pad);
+ if (num_dispatch > 1)
+ {
+ expand_case (switch_stmt);
+ expand_builtin_trap ();
}
seq = get_insns ();
end_sequence ();
- before = (((struct eh_region *)VEC_index (eh_region, cfun->eh->region_array, first_reachable))
- ->post_landing_pad);
-
- bb = emit_to_new_bb_before (seq, before);
- e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
- e->count = bb->count;
- e->probability = REG_BR_PROB_BASE;
+ bb = emit_to_new_bb_before (seq, first_reachable_label);
+ if (num_dispatch == 1)
+ {
+ e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
+ e->count = bb->count;
+ e->probability = REG_BR_PROB_BASE;
+ }
}
static void
sjlj_build_landing_pads (void)
{
- struct sjlj_lp_info *lp_info;
+ int num_dispatch;
- lp_info = XCNEWVEC (struct sjlj_lp_info, cfun->eh->last_region_number + 1);
+ num_dispatch = VEC_length (eh_landing_pad, cfun->eh->lp_array);
+ if (num_dispatch == 0)
+ return;
+ VEC_safe_grow (int, heap, sjlj_lp_call_site_index, num_dispatch);
- if (sjlj_find_directly_reachable_regions (lp_info))
+ num_dispatch = sjlj_assign_call_site_values ();
+ if (num_dispatch > 0)
{
rtx dispatch_label = gen_label_rtx ();
int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
int_size_in_bytes (sjlj_fc_type_node),
align);
- sjlj_assign_call_site_values (dispatch_label, lp_info);
- sjlj_mark_call_sites (lp_info);
-
+ sjlj_mark_call_sites ();
sjlj_emit_function_enter (dispatch_label);
- sjlj_emit_dispatch_table (dispatch_label, lp_info);
+ sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
+ sjlj_emit_function_exit ();
+ }
+
+ /* If we do not have any landing pads, we may still need to register a
+ personality routine and (empty) LSDA to handle must-not-throw regions. */
+ else if (function_needs_eh_personality (cfun) != eh_personality_none)
+ {
+ int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
+ TYPE_MODE (sjlj_fc_type_node),
+ TYPE_ALIGN (sjlj_fc_type_node));
+ crtl->eh.sjlj_fc
+ = assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
+ int_size_in_bytes (sjlj_fc_type_node),
+ align);
+
+ sjlj_mark_call_sites ();
+ sjlj_emit_function_enter (NULL_RTX);
sjlj_emit_function_exit ();
}
- free (lp_info);
+ VEC_free (int, heap, sjlj_lp_call_site_index);
}
-void
+/* After initial rtl generation, call back to finish generating
+ exception support code. */
+
+static void
finish_eh_generation (void)
{
basic_block bb;
- /* Nothing to do if no regions created. */
- if (cfun->eh->region_tree == NULL)
- return;
-
- /* The object here is to provide find_basic_blocks with detailed
- information (via reachable_handlers) on how exception control
- flows within the function. In this first pass, we can include
- type information garnered from ERT_THROW and ERT_ALLOWED_EXCEPTIONS
- regions, and hope that it will be useful in deleting unreachable
- handlers. Subsequently, we will generate landing pads which will
- connect many of the handlers, and then type information will not
- be effective. Still, this is a win over previous implementations. */
-
- /* These registers are used by the landing pads. Make sure they
- have been generated. */
- get_exception_pointer ();
- get_exception_filter ();
-
/* Construct the landing pads. */
-
- assign_filter_values ();
- build_post_landing_pads ();
- connect_post_landing_pads ();
- if (USING_SJLJ_EXCEPTIONS)
+ if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
sjlj_build_landing_pads ();
else
dw2_build_landing_pads ();
-
- crtl->eh.built_landing_pads = 1;
-
- /* We've totally changed the CFG. Start over. */
- find_exception_handler_labels ();
break_superblocks ();
- if (USING_SJLJ_EXCEPTIONS
+
+ if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
/* Kludge for Alpha/Tru64 (see alpha_gp_save_rtx). */
|| single_succ_edge (ENTRY_BLOCK_PTR)->insns.r)
commit_edge_insertions ();
+
+ /* Redirect all EH edges from the post_landing_pad to the landing pad. */
FOR_EACH_BB (bb)
{
- edge e;
+ eh_landing_pad lp;
edge_iterator ei;
- bool eh = false;
- for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
+ edge e;
+
+ lp = get_eh_landing_pad_from_rtx (BB_END (bb));
+
+ FOR_EACH_EDGE (e, ei, bb->succs)
+ if (e->flags & EDGE_EH)
+ break;
+
+ /* We should not have generated any new throwing insns during this
+ pass, and we should not have lost any EH edges, so we only need
+ to handle two cases here:
+ (1) reachable handler and an existing edge to post-landing-pad,
+ (2) no reachable handler and no edge. */
+ gcc_assert ((lp != NULL) == (e != NULL));
+ if (lp != NULL)
{
- if (e->flags & EDGE_EH)
- {
- remove_edge (e);
- eh = true;
- }
- else
- ei_next (&ei);
+ gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
+
+ redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
+ e->flags |= (CALL_P (BB_END (bb))
+ ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
+ : EDGE_ABNORMAL);
}
- if (eh)
- rtl_make_eh_edge (NULL, bb, BB_END (bb));
}
}
-\f
-static hashval_t
-ehl_hash (const void *pentry)
-{
- const struct ehl_map_entry *const entry
- = (const struct ehl_map_entry *) pentry;
-
- /* 2^32 * ((sqrt(5) - 1) / 2) */
- const hashval_t scaled_golden_ratio = 0x9e3779b9;
- return CODE_LABEL_NUMBER (entry->label) * scaled_golden_ratio;
-}
-static int
-ehl_eq (const void *pentry, const void *pdata)
+static bool
+gate_handle_eh (void)
{
- const struct ehl_map_entry *const entry
- = (const struct ehl_map_entry *) pentry;
- const struct ehl_map_entry *const data
- = (const struct ehl_map_entry *) pdata;
-
- return entry->label == data->label;
+ /* Nothing to do if no regions created. */
+ return cfun->eh->region_tree != NULL;
}
-/* This section handles removing dead code for flow. */
-
-/* Remove LABEL from exception_handler_label_map. */
-
-static void
-remove_exception_handler_label (rtx label)
+/* Complete generation of exception handling code. */
+static unsigned int
+rest_of_handle_eh (void)
{
- struct ehl_map_entry **slot, tmp;
-
- /* If exception_handler_label_map was not built yet,
- there is nothing to do. */
- if (crtl->eh.exception_handler_label_map == NULL)
- return;
-
- tmp.label = label;
- slot = (struct ehl_map_entry **)
- htab_find_slot (crtl->eh.exception_handler_label_map, &tmp, NO_INSERT);
- gcc_assert (slot);
-
- htab_clear_slot (crtl->eh.exception_handler_label_map, (void **) slot);
+ finish_eh_generation ();
+ cleanup_cfg (CLEANUP_NO_INSN_DEL);
+ return 0;
}
-/* Splice REGION from the region tree and replace it by REPLACE etc. */
-
-static void
-remove_eh_handler_and_replace (struct eh_region *region,
- struct eh_region *replace)
+struct rtl_opt_pass pass_rtl_eh =
{
- struct eh_region **pp, **pp_start, *p, *outer, *inner;
- rtx lab;
-
- outer = region->outer;
- /* For the benefit of efficiently handling REG_EH_REGION notes,
- replace this region in the region array with its containing
- region. Note that previous region deletions may result in
- multiple copies of this region in the array, so we have a
- list of alternate numbers by which we are known. */
-
- VEC_replace (eh_region, cfun->eh->region_array, region->region_number,
- replace);
- if (region->aka)
- {
- unsigned i;
- bitmap_iterator bi;
-
- EXECUTE_IF_SET_IN_BITMAP (region->aka, 0, i, bi)
- {
- VEC_replace (eh_region, cfun->eh->region_array, i, replace);
- }
- }
-
- if (replace)
- {
- if (!replace->aka)
- replace->aka = BITMAP_GGC_ALLOC ();
- if (region->aka)
- bitmap_ior_into (replace->aka, region->aka);
- bitmap_set_bit (replace->aka, region->region_number);
- }
-
- if (crtl->eh.built_landing_pads)
- lab = region->landing_pad;
- else
- lab = region->label;
- if (lab)
- remove_exception_handler_label (lab);
-
- if (outer)
- pp_start = &outer->inner;
- else
- pp_start = &cfun->eh->region_tree;
- for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
- continue;
- *pp = region->next_peer;
-
- if (replace)
- pp_start = &replace->inner;
- else
- pp_start = &cfun->eh->region_tree;
- inner = region->inner;
- if (inner)
- {
- for (p = inner; p->next_peer ; p = p->next_peer)
- p->outer = replace;
- p->outer = replace;
-
- p->next_peer = *pp_start;
- *pp_start = inner;
- }
-
- if (region->type == ERT_CATCH)
- {
- struct eh_region *eh_try, *next, *prev;
-
- for (eh_try = region->next_peer;
- eh_try->type == ERT_CATCH;
- eh_try = eh_try->next_peer)
- continue;
- gcc_assert (eh_try->type == ERT_TRY);
-
- next = region->u.eh_catch.next_catch;
- prev = region->u.eh_catch.prev_catch;
+ {
+ RTL_PASS,
+ "rtl_eh", /* name */
+ gate_handle_eh, /* gate */
+ rest_of_handle_eh, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ TV_JUMP, /* tv_id */
+ 0, /* properties_required */
+ 0, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ 0 /* todo_flags_finish */
+ }
+};
+\f
+/* This section handles removing dead code for flow. */
- if (next)
- next->u.eh_catch.prev_catch = prev;
- else
- eh_try->u.eh_try.last_catch = prev;
- if (prev)
- prev->u.eh_catch.next_catch = next;
- else
- {
- eh_try->u.eh_try.eh_catch = next;
- if (! next)
- remove_eh_handler (eh_try);
- }
- }
-}
+void
+remove_eh_landing_pad (eh_landing_pad lp)
+{
+ eh_landing_pad *pp;
-/* Splice REGION from the region tree and replace it by the outer region
- etc. */
+ for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
+ continue;
+ *pp = lp->next_lp;
-static void
-remove_eh_handler (struct eh_region *region)
-{
- remove_eh_handler_and_replace (region, region->outer);
+ if (lp->post_landing_pad)
+ EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
+ VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL);
}
-/* LABEL heads a basic block that is about to be deleted. If this
- label corresponds to an exception region, we may be able to
- delete the region. */
+/* Splice REGION from the region tree. */
void
-maybe_remove_eh_handler (rtx label)
+remove_eh_handler (eh_region region)
{
- struct ehl_map_entry **slot, tmp;
- struct eh_region *region;
-
- /* ??? After generating landing pads, it's not so simple to determine
- if the region data is completely unused. One must examine the
- landing pad and the post landing pad, and whether an inner try block
- is referencing the catch handlers directly. */
- if (crtl->eh.built_landing_pads)
- return;
+ eh_region *pp, *pp_start, p, outer;
+ eh_landing_pad lp;
- tmp.label = label;
- slot = (struct ehl_map_entry **)
- htab_find_slot (crtl->eh.exception_handler_label_map, &tmp, NO_INSERT);
- if (! slot)
- return;
- region = (*slot)->region;
- if (! region)
- return;
-
- /* Flow will want to remove MUST_NOT_THROW regions as unreachable
- because there is no path to the fallback call to terminate.
- But the region continues to affect call-site data until there
- are no more contained calls, which we don't see here. */
- if (region->type == ERT_MUST_NOT_THROW)
+ for (lp = region->landing_pads; lp ; lp = lp->next_lp)
{
- htab_clear_slot (crtl->eh.exception_handler_label_map, (void **) slot);
- region->label = NULL_RTX;
+ if (lp->post_landing_pad)
+ EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
+ VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL);
}
- else
- remove_eh_handler (region);
-}
-
-/* Remove Eh region R that has turned out to have no code in its handler. */
-void
-remove_eh_region (int r)
-{
- struct eh_region *region;
+ outer = region->outer;
+ if (outer)
+ pp_start = &outer->inner;
+ else
+ pp_start = &cfun->eh->region_tree;
+ for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
+ continue;
+ if (region->inner)
+ {
+ *pp = p = region->inner;
+ do
+ {
+ p->outer = outer;
+ pp = &p->next_peer;
+ p = *pp;
+ }
+ while (p);
+ }
+ *pp = region->next_peer;
- region = VEC_index (eh_region, cfun->eh->region_array, r);
- remove_eh_handler (region);
+ VEC_replace (eh_region, cfun->eh->region_array, region->index, NULL);
}
-/* Invokes CALLBACK for every exception handler label. Only used by old
- loop hackery; should not be used by new code. */
+/* Invokes CALLBACK for every exception handler landing pad label.
+ Only used by reload hackery; should not be used by new code. */
void
for_each_eh_label (void (*callback) (rtx))
{
- htab_traverse (crtl->eh.exception_handler_label_map, for_each_eh_label_1,
- (void *) &callback);
-}
-
-static int
-for_each_eh_label_1 (void **pentry, void *data)
-{
- struct ehl_map_entry *entry = *(struct ehl_map_entry **)pentry;
- void (*callback) (rtx) = *(void (**) (rtx)) data;
-
- (*callback) (entry->label);
- return 1;
-}
-
-/* Invoke CALLBACK for every exception region in the current function. */
+ eh_landing_pad lp;
+ int i;
-void
-for_each_eh_region (void (*callback) (struct eh_region *))
-{
- int i, n = cfun->eh->last_region_number;
- for (i = 1; i <= n; ++i)
+ for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
{
- struct eh_region *region;
-
- region = VEC_index (eh_region, cfun->eh->region_array, i);
- if (region)
- (*callback) (region);
+ if (lp)
+ {
+ rtx lab = lp->landing_pad;
+ if (lab && LABEL_P (lab))
+ (*callback) (lab);
+ }
}
}
\f
-/* This section describes CFG exception edges for flow. */
-
-/* For communicating between calls to reachable_next_level. */
-struct reachable_info
-{
- tree types_caught;
- tree types_allowed;
- void (*callback) (struct eh_region *, void *);
- void *callback_data;
-};
-
-/* A subroutine of reachable_next_level. Return true if TYPE, or a
- base class of TYPE, is in HANDLED. */
+/* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
+ call insn.
+
+ At the gimple level, we use LP_NR
+ > 0 : The statement transfers to landing pad LP_NR
+ = 0 : The statement is outside any EH region
+ < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
+
+ At the rtl level, we use LP_NR
+ > 0 : The insn transfers to landing pad LP_NR
+ = 0 : The insn cannot throw
+ < 0 : The insn is within MUST_NOT_THROW region -LP_NR
+ = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
+ missing note: The insn is outside any EH region.
+
+ ??? This difference probably ought to be avoided. We could stand
+ to record nothrow for arbitrary gimple statements, and so avoid
+ some moderately complex lookups in stmt_could_throw_p. Perhaps
+ NOTHROW should be mapped on both sides to INT_MIN. Perhaps the
+ no-nonlocal-goto property should be recorded elsewhere as a bit
+ on the call_insn directly. Perhaps we should make more use of
+ attaching the trees to call_insns (reachable via symbol_ref in
+ direct call cases) and just pull the data out of the trees. */
-static int
-check_handled (tree handled, tree type)
+void
+make_reg_eh_region_note (rtx insn, int ecf_flags, int lp_nr)
{
- tree t;
-
- /* We can check for exact matches without front-end help. */
- if (! lang_eh_type_covers)
- {
- for (t = handled; t ; t = TREE_CHAIN (t))
- if (TREE_VALUE (t) == type)
- return 1;
- }
+ rtx value;
+ if (ecf_flags & ECF_NOTHROW)
+ value = const0_rtx;
+ else if (lp_nr != 0)
+ value = GEN_INT (lp_nr);
else
- {
- for (t = handled; t ; t = TREE_CHAIN (t))
- if ((*lang_eh_type_covers) (TREE_VALUE (t), type))
- return 1;
- }
-
- return 0;
+ return;
+ add_reg_note (insn, REG_EH_REGION, value);
}
-/* A subroutine of reachable_next_level. If we are collecting a list
- of handlers, add one. After landing pad generation, reference
- it instead of the handlers themselves. Further, the handlers are
- all wired together, so by referencing one, we've got them all.
- Before landing pad generation we reference each handler individually.
-
- LP_REGION contains the landing pad; REGION is the handler. */
+/* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
+ nor perform a non-local goto. Replace the region note if it
+ already exists. */
-static void
-add_reachable_handler (struct reachable_info *info,
- struct eh_region *lp_region, struct eh_region *region)
+void
+make_reg_eh_region_note_nothrow_nononlocal (rtx insn)
{
- if (! info)
- return;
+ rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ rtx intmin = GEN_INT (INT_MIN);
- if (crtl->eh.built_landing_pads)
- info->callback (lp_region, info->callback_data);
+ if (note != 0)
+ XEXP (note, 0) = intmin;
else
- info->callback (region, info->callback_data);
+ add_reg_note (insn, REG_EH_REGION, intmin);
}
-/* Process one level of exception regions for reachability.
- If TYPE_THROWN is non-null, then it is the *exact* type being
- propagated. If INFO is non-null, then collect handler labels
- and caught/allowed type information between invocations. */
+/* Return true if INSN could throw, assuming no REG_EH_REGION note
+ to the contrary. */
-static enum reachable_code
-reachable_next_level (struct eh_region *region, tree type_thrown,
- struct reachable_info *info,
- bool maybe_resx)
+bool
+insn_could_throw_p (const_rtx insn)
{
- switch (region->type)
- {
- case ERT_CLEANUP:
- /* Before landing-pad generation, we model control flow
- directly to the individual handlers. In this way we can
- see that catch handler types may shadow one another. */
- add_reachable_handler (info, region, region);
- return RNL_MAYBE_CAUGHT;
-
- case ERT_TRY:
- {
- struct eh_region *c;
- enum reachable_code ret = RNL_NOT_CAUGHT;
-
- for (c = region->u.eh_try.eh_catch; c ; c = c->u.eh_catch.next_catch)
- {
- /* A catch-all handler ends the search. */
- if (c->u.eh_catch.type_list == NULL)
- {
- add_reachable_handler (info, region, c);
- return RNL_CAUGHT;
- }
-
- if (type_thrown)
- {
- /* If we have at least one type match, end the search. */
- tree tp_node = c->u.eh_catch.type_list;
-
- for (; tp_node; tp_node = TREE_CHAIN (tp_node))
- {
- tree type = TREE_VALUE (tp_node);
-
- if (type == type_thrown
- || (lang_eh_type_covers
- && (*lang_eh_type_covers) (type, type_thrown)))
- {
- add_reachable_handler (info, region, c);
- return RNL_CAUGHT;
- }
- }
-
- /* If we have definitive information of a match failure,
- the catch won't trigger. */
- if (lang_eh_type_covers)
- return RNL_NOT_CAUGHT;
- }
-
- /* At this point, we either don't know what type is thrown or
- don't have front-end assistance to help deciding if it is
- covered by one of the types in the list for this region.
-
- We'd then like to add this region to the list of reachable
- handlers since it is indeed potentially reachable based on the
- information we have.
-
- Actually, this handler is for sure not reachable if all the
- types it matches have already been caught. That is, it is only
- potentially reachable if at least one of the types it catches
- has not been previously caught. */
-
- if (! info)
- ret = RNL_MAYBE_CAUGHT;
- else
- {
- tree tp_node = c->u.eh_catch.type_list;
- bool maybe_reachable = false;
-
- /* Compute the potential reachability of this handler and
- update the list of types caught at the same time. */
- for (; tp_node; tp_node = TREE_CHAIN (tp_node))
- {
- tree type = TREE_VALUE (tp_node);
-
- if (! check_handled (info->types_caught, type))
- {
- info->types_caught
- = tree_cons (NULL, type, info->types_caught);
-
- maybe_reachable = true;
- }
- }
-
- if (maybe_reachable)
- {
- add_reachable_handler (info, region, c);
-
- /* ??? If the catch type is a base class of every allowed
- type, then we know we can stop the search. */
- ret = RNL_MAYBE_CAUGHT;
- }
- }
- }
-
- return ret;
- }
-
- case ERT_ALLOWED_EXCEPTIONS:
- /* An empty list of types definitely ends the search. */
- if (region->u.allowed.type_list == NULL_TREE)
- {
- add_reachable_handler (info, region, region);
- return RNL_CAUGHT;
- }
+ if (!flag_exceptions)
+ return false;
+ if (CALL_P (insn))
+ return true;
+ if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
+ return may_trap_p (PATTERN (insn));
+ return false;
+}
- /* Collect a list of lists of allowed types for use in detecting
- when a catch may be transformed into a catch-all. */
- if (info)
- info->types_allowed = tree_cons (NULL_TREE,
- region->u.allowed.type_list,
- info->types_allowed);
-
- /* If we have definitive information about the type hierarchy,
- then we can tell if the thrown type will pass through the
- filter. */
- if (type_thrown && lang_eh_type_covers)
- {
- if (check_handled (region->u.allowed.type_list, type_thrown))
- return RNL_NOT_CAUGHT;
- else
- {
- add_reachable_handler (info, region, region);
- return RNL_CAUGHT;
- }
- }
+/* Copy an REG_EH_REGION note to each insn that might throw beginning
+ at FIRST and ending at LAST. NOTE_OR_INSN is either the source insn
+ to look for a note, or the note itself. */
- add_reachable_handler (info, region, region);
- return RNL_MAYBE_CAUGHT;
+void
+copy_reg_eh_region_note_forward (rtx note_or_insn, rtx first, rtx last)
+{
+ rtx insn, note = note_or_insn;
- case ERT_CATCH:
- /* Catch regions are handled by their controlling try region. */
- return RNL_NOT_CAUGHT;
+ if (INSN_P (note_or_insn))
+ {
+ note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
+ if (note == NULL)
+ return;
+ }
+ note = XEXP (note, 0);
- case ERT_MUST_NOT_THROW:
- /* Here we end our search, since no exceptions may propagate.
-
- Local landing pads of ERT_MUST_NOT_THROW instructions are reachable
- only via locally handled RESX instructions.
+ for (insn = first; insn != last ; insn = NEXT_INSN (insn))
+ if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
+ && insn_could_throw_p (insn))
+ add_reg_note (insn, REG_EH_REGION, note);
+}
- When we inline a function call, we can bring in new handlers. In order
- to avoid ERT_MUST_NOT_THROW landing pads from being deleted as unreachable
- assume that such handlers exists prior for any inlinable call prior
- inlining decisions are fixed. */
+/* Likewise, but iterate backward. */
- if (maybe_resx)
- {
- add_reachable_handler (info, region, region);
- return RNL_CAUGHT;
- }
- else
- return RNL_BLOCKED;
+void
+copy_reg_eh_region_note_backward (rtx note_or_insn, rtx last, rtx first)
+{
+ rtx insn, note = note_or_insn;
- case ERT_THROW:
- case ERT_UNKNOWN:
- /* Shouldn't see these here. */
- gcc_unreachable ();
- break;
- default:
- gcc_unreachable ();
+ if (INSN_P (note_or_insn))
+ {
+ note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
+ if (note == NULL)
+ return;
}
+ note = XEXP (note, 0);
+
+ for (insn = last; insn != first; insn = PREV_INSN (insn))
+ if (insn_could_throw_p (insn))
+ add_reg_note (insn, REG_EH_REGION, note);
}
-/* Invoke CALLBACK on each region reachable from REGION_NUMBER. */
-void
-foreach_reachable_handler (int region_number, bool is_resx, bool inlinable_call,
- void (*callback) (struct eh_region *, void *),
- void *callback_data)
+/* Extract all EH information from INSN. Return true if the insn
+ was marked NOTHROW. */
+
+static bool
+get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
+ eh_landing_pad *plp)
{
- struct reachable_info info;
- struct eh_region *region;
- tree type_thrown;
+ eh_landing_pad lp = NULL;
+ eh_region r = NULL;
+ bool ret = false;
+ rtx note;
+ int lp_nr;
- memset (&info, 0, sizeof (info));
- info.callback = callback;
- info.callback_data = callback_data;
+ if (! INSN_P (insn))
+ goto egress;
- region = VEC_index (eh_region, cfun->eh->region_array, region_number);
- if (!region)
- return;
+ if (NONJUMP_INSN_P (insn)
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
+ insn = XVECEXP (PATTERN (insn), 0, 0);
- type_thrown = NULL_TREE;
- if (is_resx)
+ note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note)
{
- /* A RESX leaves a region instead of entering it. Thus the
- region itself may have been deleted out from under us. */
- if (region == NULL)
- return;
- region = region->outer;
+ ret = !insn_could_throw_p (insn);
+ goto egress;
}
- else if (region->type == ERT_THROW)
+
+ lp_nr = INTVAL (XEXP (note, 0));
+ if (lp_nr == 0 || lp_nr == INT_MIN)
{
- type_thrown = region->u.eh_throw.type;
- region = region->outer;
+ ret = true;
+ goto egress;
}
- while (region)
+ if (lp_nr < 0)
+ r = VEC_index (eh_region, cfun->eh->region_array, -lp_nr);
+ else
{
- if (reachable_next_level (region, type_thrown, &info,
- inlinable_call || is_resx) >= RNL_CAUGHT)
- break;
- /* If we have processed one cleanup, there is no point in
- processing any more of them. Each cleanup will have an edge
- to the next outer cleanup region, so the flow graph will be
- accurate. */
- if (region->type == ERT_CLEANUP)
- region = region->u.cleanup.prev_try;
- else
- region = region->outer;
+ lp = VEC_index (eh_landing_pad, cfun->eh->lp_array, lp_nr);
+ r = lp->region;
}
+
+ egress:
+ *plp = lp;
+ *pr = r;
+ return ret;
}
-/* Retrieve a list of labels of exception handlers which can be
- reached by a given insn. */
+/* Return the landing pad to which INSN may go, or NULL if it does not
+ have a reachable landing pad within this function. */
-static void
-arh_to_landing_pad (struct eh_region *region, void *data)
+eh_landing_pad
+get_eh_landing_pad_from_rtx (const_rtx insn)
{
- rtx *p_handlers = (rtx *) data;
- if (! *p_handlers)
- *p_handlers = alloc_INSN_LIST (region->landing_pad, NULL_RTX);
-}
+ eh_landing_pad lp;
+ eh_region r;
-static void
-arh_to_label (struct eh_region *region, void *data)
-{
- rtx *p_handlers = (rtx *) data;
- *p_handlers = alloc_INSN_LIST (region->label, *p_handlers);
+ get_eh_region_and_lp_from_rtx (insn, &r, &lp);
+ return lp;
}
-rtx
-reachable_handlers (rtx insn)
+/* Return the region to which INSN may go, or NULL if it does not
+ have a reachable region within this function. */
+
+eh_region
+get_eh_region_from_rtx (const_rtx insn)
{
- bool is_resx = false;
- rtx handlers = NULL;
- int region_number;
+ eh_landing_pad lp;
+ eh_region r;
- if (JUMP_P (insn)
- && GET_CODE (PATTERN (insn)) == RESX)
- {
- region_number = XINT (PATTERN (insn), 0);
- is_resx = true;
- }
- else
- {
- rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
- if (!note || INTVAL (XEXP (note, 0)) <= 0)
- return NULL;
- region_number = INTVAL (XEXP (note, 0));
- }
+ get_eh_region_and_lp_from_rtx (insn, &r, &lp);
+ return r;
+}
- foreach_reachable_handler (region_number, is_resx, false,
- (crtl->eh.built_landing_pads
- ? arh_to_landing_pad
- : arh_to_label),
- &handlers);
+/* Return true if INSN throws and is caught by something in this function. */
- return handlers;
+bool
+can_throw_internal (const_rtx insn)
+{
+ return get_eh_landing_pad_from_rtx (insn) != NULL;
}
-/* Determine if the given INSN can throw an exception that is caught
- within the function. */
+/* Return true if INSN throws and escapes from the current function. */
bool
-can_throw_internal_1 (int region_number, bool is_resx, bool inlinable_call)
+can_throw_external (const_rtx insn)
{
- struct eh_region *region;
- tree type_thrown;
+ eh_landing_pad lp;
+ eh_region r;
+ bool nothrow;
- region = VEC_index (eh_region, cfun->eh->region_array, region_number);
- if (!region)
+ if (! INSN_P (insn))
return false;
- type_thrown = NULL_TREE;
- if (is_resx)
- region = region->outer;
- else if (region->type == ERT_THROW)
+ if (NONJUMP_INSN_P (insn)
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
{
- type_thrown = region->u.eh_throw.type;
- region = region->outer;
- }
+ rtx seq = PATTERN (insn);
+ int i, n = XVECLEN (seq, 0);
- /* If this exception is ignored by each and every containing region,
- then control passes straight out. The runtime may handle some
- regions, which also do not require processing internally. */
- for (; region; region = region->outer)
- {
- enum reachable_code how = reachable_next_level (region, type_thrown, 0,
- inlinable_call || is_resx);
- if (how == RNL_BLOCKED)
- return false;
- if (how != RNL_NOT_CAUGHT)
- return true;
- }
+ for (i = 0; i < n; i++)
+ if (can_throw_external (XVECEXP (seq, 0, i)))
+ return true;
- return false;
-}
+ return false;
+ }
-bool
-can_throw_internal (const_rtx insn)
-{
- rtx note;
+ nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
- if (! INSN_P (insn))
+ /* If we can't throw, we obviously can't throw external. */
+ if (nothrow)
return false;
- if (JUMP_P (insn)
- && GET_CODE (PATTERN (insn)) == RESX
- && XINT (PATTERN (insn), 0) > 0)
- return can_throw_internal_1 (XINT (PATTERN (insn), 0), true, false);
-
- if (NONJUMP_INSN_P (insn)
- && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
-
- /* Every insn that might throw has an EH_REGION note. */
- note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
- if (!note || INTVAL (XEXP (note, 0)) <= 0)
+ /* If we have an internal landing pad, then we're not external. */
+ if (lp != NULL)
return false;
- return can_throw_internal_1 (INTVAL (XEXP (note, 0)), false, false);
+ /* If we're not within an EH region, then we are external. */
+ if (r == NULL)
+ return true;
+
+ /* The only thing that ought to be left is MUST_NOT_THROW regions,
+ which don't always have landing pads. */
+ gcc_assert (r->type == ERT_MUST_NOT_THROW);
+ return false;
}
-/* Determine if the given INSN can throw an exception that is
- visible outside the function. */
+/* Return true if INSN cannot throw at all. */
bool
-can_throw_external_1 (int region_number, bool is_resx, bool inlinable_call)
+insn_nothrow_p (const_rtx insn)
{
- struct eh_region *region;
- tree type_thrown;
+ eh_landing_pad lp;
+ eh_region r;
- region = VEC_index (eh_region, cfun->eh->region_array, region_number);
- if (!region)
+ if (! INSN_P (insn))
return true;
- type_thrown = NULL_TREE;
- if (is_resx)
- region = region->outer;
- else if (region->type == ERT_THROW)
+ if (NONJUMP_INSN_P (insn)
+ && GET_CODE (PATTERN (insn)) == SEQUENCE)
{
- type_thrown = region->u.eh_throw.type;
- region = region->outer;
- }
+ rtx seq = PATTERN (insn);
+ int i, n = XVECLEN (seq, 0);
- /* If the exception is caught or blocked by any containing region,
- then it is not seen by any calling function. */
- for (; region ; region = region->outer)
- if (reachable_next_level (region, type_thrown, NULL,
- inlinable_call || is_resx) >= RNL_CAUGHT)
- return false;
+ for (i = 0; i < n; i++)
+ if (!insn_nothrow_p (XVECEXP (seq, 0, i)))
+ return false;
- return true;
+ return true;
+ }
+
+ return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
}
+/* Return true if INSN can perform a non-local goto. */
+/* ??? This test is here in this file because it (ab)uses REG_EH_REGION. */
+
bool
-can_throw_external (const_rtx insn)
+can_nonlocal_goto (const_rtx insn)
{
- rtx note;
-
- if (! INSN_P (insn))
- return false;
-
- if (JUMP_P (insn)
- && GET_CODE (PATTERN (insn)) == RESX
- && XINT (PATTERN (insn), 0) > 0)
- return can_throw_external_1 (XINT (PATTERN (insn), 0), true, false);
-
- if (NONJUMP_INSN_P (insn)
- && GET_CODE (PATTERN (insn)) == SEQUENCE)
- insn = XVECEXP (PATTERN (insn), 0, 0);
-
- note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
- if (!note)
+ if (nonlocal_goto_handler_labels && CALL_P (insn))
{
- /* Calls (and trapping insns) without notes are outside any
- exception handling region in this function. We have to
- assume it might throw. Given that the front end and middle
- ends mark known NOTHROW functions, this isn't so wildly
- inaccurate. */
- return (CALL_P (insn)
- || (flag_non_call_exceptions
- && may_trap_p (PATTERN (insn))));
+ rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
+ if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
+ return true;
}
- if (INTVAL (XEXP (note, 0)) <= 0)
- return false;
-
- return can_throw_external_1 (INTVAL (XEXP (note, 0)), false, false);
+ return false;
}
-
+\f
/* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls. */
-unsigned int
+static unsigned int
set_nothrow_function_flags (void)
{
rtx insn;
}
}
if (crtl->nothrow
- && (cgraph_function_body_availability (cgraph_node (current_function_decl))
+ && (cgraph_function_body_availability (cgraph_get_node
+ (current_function_decl))
>= AVAIL_AVAILABLE))
- TREE_NOTHROW (current_function_decl) = 1;
+ {
+ struct cgraph_node *node = cgraph_get_node (current_function_decl);
+ struct cgraph_edge *e;
+ for (e = node->callers; e; e = e->next_caller)
+ e->can_throw_external = false;
+ cgraph_set_nothrow_flag (node, true);
+
+ if (dump_file)
+ fprintf (dump_file, "Marking function nothrow: %s\n\n",
+ current_function_name ());
+ }
return 0;
}
{
{
RTL_PASS,
- NULL, /* name */
+ "nothrow", /* name */
NULL, /* gate */
set_nothrow_function_flags, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
- 0, /* tv_id */
+ TV_NONE, /* tv_id */
0, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- 0, /* todo_flags_finish */
+ 0 /* todo_flags_finish */
}
};
\f
/* Various hooks for unwind library. */
+/* Expand the EH support builtin functions:
+ __builtin_eh_pointer and __builtin_eh_filter. */
+
+static eh_region
+expand_builtin_eh_common (tree region_nr_t)
+{
+ HOST_WIDE_INT region_nr;
+ eh_region region;
+
+ gcc_assert (host_integerp (region_nr_t, 0));
+ region_nr = tree_low_cst (region_nr_t, 0);
+
+ region = VEC_index (eh_region, cfun->eh->region_array, region_nr);
+
+ /* ??? We shouldn't have been able to delete a eh region without
+ deleting all the code that depended on it. */
+ gcc_assert (region != NULL);
+
+ return region;
+}
+
+/* Expand to the exc_ptr value from the given eh region. */
+
+rtx
+expand_builtin_eh_pointer (tree exp)
+{
+ eh_region region
+ = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
+ if (region->exc_ptr_reg == NULL)
+ region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
+ return region->exc_ptr_reg;
+}
+
+/* Expand to the filter value from the given eh region. */
+
+rtx
+expand_builtin_eh_filter (tree exp)
+{
+ eh_region region
+ = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
+ if (region->filter_reg == NULL)
+ region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
+ return region->filter_reg;
+}
+
+/* Copy the exc_ptr and filter values from one landing pad's registers
+ to another. This is used to inline the resx statement. */
+
+rtx
+expand_builtin_eh_copy_values (tree exp)
+{
+ eh_region dst
+ = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
+ eh_region src
+ = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
+ enum machine_mode fmode = targetm.eh_return_filter_mode ();
+
+ if (dst->exc_ptr_reg == NULL)
+ dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
+ if (src->exc_ptr_reg == NULL)
+ src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
+
+ if (dst->filter_reg == NULL)
+ dst->filter_reg = gen_reg_rtx (fmode);
+ if (src->filter_reg == NULL)
+ src->filter_reg = gen_reg_rtx (fmode);
+
+ emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
+ emit_move_insn (dst->filter_reg, src->filter_reg);
+
+ return const0_rtx;
+}
+
/* Do any necessary initialization to access arbitrary stack frames.
On the SPARC, this means flushing the register windows. */
#endif
}
+/* Map a non-negative number to an eh return data register number; expands
+ to -1 if no return data register is associated with the input number.
+ At least the inputs 0 and 1 must be mapped; the target may provide more. */
+
rtx
expand_builtin_eh_return_data_regno (tree exp)
{
emit_jump (crtl->eh.ehr_label);
}
+/* Expand __builtin_eh_return. This exit path from the function loads up
+ the eh return data registers, adjusts the stack, and branches to a
+ given PC other than the normal return address. */
+
void
expand_eh_return (void)
{
if ((new_ar = *slot) == NULL)
{
new_ar = XNEW (struct action_record);
- new_ar->offset = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
+ new_ar->offset = VEC_length (uchar, crtl->eh.action_record_data) + 1;
new_ar->filter = filter;
new_ar->next = next;
*slot = new_ar;
push_sleb128 (&crtl->eh.action_record_data, filter);
if (next)
- next -= VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data) + 1;
+ next -= VEC_length (uchar, crtl->eh.action_record_data) + 1;
push_sleb128 (&crtl->eh.action_record_data, next);
}
}
static int
-collect_one_action_chain (htab_t ar_hash, struct eh_region *region)
+collect_one_action_chain (htab_t ar_hash, eh_region region)
{
- struct eh_region *c;
int next;
/* If we've reached the top of the region chain, then we have
switch (region->type)
{
case ERT_CLEANUP:
- /* A cleanup adds a zero filter to the beginning of the chain, but
- there are special cases to look out for. If there are *only*
- cleanups along a path, then it compresses to a zero action.
- Further, if there are multiple cleanups along a path, we only
- need to represent one of them, as that is enough to trigger
- entry to the landing pad at runtime. */
- next = collect_one_action_chain (ar_hash, region->outer);
- if (next <= 0)
- return 0;
- for (c = region->outer; c ; c = c->outer)
- if (c->type == ERT_CLEANUP)
- return next;
- return add_action_record (ar_hash, 0, next);
-
- case ERT_TRY:
- /* Process the associated catch regions in reverse order.
- If there's a catch-all handler, then we don't need to
- search outer regions. Use a magic -3 value to record
- that we haven't done the outer search. */
- next = -3;
- for (c = region->u.eh_try.last_catch; c ; c = c->u.eh_catch.prev_catch)
- {
- if (c->u.eh_catch.type_list == NULL)
- {
- /* Retrieve the filter from the head of the filter list
- where we have stored it (see assign_filter_values). */
- int filter
- = TREE_INT_CST_LOW (TREE_VALUE (c->u.eh_catch.filter_list));
+ {
+ eh_region r;
+ /* A cleanup adds a zero filter to the beginning of the chain, but
+ there are special cases to look out for. If there are *only*
+ cleanups along a path, then it compresses to a zero action.
+ Further, if there are multiple cleanups along a path, we only
+ need to represent one of them, as that is enough to trigger
+ entry to the landing pad at runtime. */
+ next = collect_one_action_chain (ar_hash, region->outer);
+ if (next <= 0)
+ return 0;
+ for (r = region->outer; r ; r = r->outer)
+ if (r->type == ERT_CLEANUP)
+ return next;
+ return add_action_record (ar_hash, 0, next);
+ }
- next = add_action_record (ar_hash, filter, 0);
- }
- else
- {
- /* Once the outer search is done, trigger an action record for
- each filter we have. */
- tree flt_node;
+ case ERT_TRY:
+ {
+ eh_catch c;
+
+ /* Process the associated catch regions in reverse order.
+ If there's a catch-all handler, then we don't need to
+ search outer regions. Use a magic -3 value to record
+ that we haven't done the outer search. */
+ next = -3;
+ for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
+ {
+ if (c->type_list == NULL)
+ {
+ /* Retrieve the filter from the head of the filter list
+ where we have stored it (see assign_filter_values). */
+ int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
+ next = add_action_record (ar_hash, filter, 0);
+ }
+ else
+ {
+ /* Once the outer search is done, trigger an action record for
+ each filter we have. */
+ tree flt_node;
- if (next == -3)
- {
- next = collect_one_action_chain (ar_hash, region->outer);
-
- /* If there is no next action, terminate the chain. */
- if (next == -1)
- next = 0;
- /* If all outer actions are cleanups or must_not_throw,
- we'll have no action record for it, since we had wanted
- to encode these states in the call-site record directly.
- Add a cleanup action to the chain to catch these. */
- else if (next <= 0)
- next = add_action_record (ar_hash, 0, 0);
- }
+ if (next == -3)
+ {
+ next = collect_one_action_chain (ar_hash, region->outer);
+
+ /* If there is no next action, terminate the chain. */
+ if (next == -1)
+ next = 0;
+ /* If all outer actions are cleanups or must_not_throw,
+ we'll have no action record for it, since we had wanted
+ to encode these states in the call-site record directly.
+ Add a cleanup action to the chain to catch these. */
+ else if (next <= 0)
+ next = add_action_record (ar_hash, 0, 0);
+ }
- flt_node = c->u.eh_catch.filter_list;
- for (; flt_node; flt_node = TREE_CHAIN (flt_node))
- {
- int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
- next = add_action_record (ar_hash, filter, next);
- }
- }
- }
- return next;
+ flt_node = c->filter_list;
+ for (; flt_node; flt_node = TREE_CHAIN (flt_node))
+ {
+ int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
+ next = add_action_record (ar_hash, filter, next);
+ }
+ }
+ }
+ return next;
+ }
case ERT_ALLOWED_EXCEPTIONS:
/* An exception specification adds its filter to the
the no handler or cleanup case in that we do require an lsda
to be generated. Return a magic -2 value to record this. */
return -2;
-
- case ERT_CATCH:
- case ERT_THROW:
- /* CATCH regions are handled in TRY above. THROW regions are
- for optimization information only and produce no output. */
- return collect_one_action_chain (ar_hash, region->outer);
-
- default:
- gcc_unreachable ();
}
+
+ gcc_unreachable ();
}
static int
-add_call_site (rtx landing_pad, int action)
+add_call_site (rtx landing_pad, int action, int section)
{
call_site_record record;
-
- record = GGC_NEW (struct call_site_record);
+
+ record = ggc_alloc_call_site_record_d ();
record->landing_pad = landing_pad;
record->action = action;
- VEC_safe_push (call_site_record, gc, crtl->eh.call_site_record, record);
+ VEC_safe_push (call_site_record, gc,
+ crtl->eh.call_site_record[section], record);
- return call_site_base + VEC_length (call_site_record, crtl->eh.call_site_record) - 1;
+ return call_site_base + VEC_length (call_site_record,
+ crtl->eh.call_site_record[section]) - 1;
}
/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
The new note numbers will not refer to region numbers, but
instead to call site entries. */
-unsigned int
+static unsigned int
convert_to_eh_region_ranges (void)
{
rtx insn, iter, note;
rtx last_landing_pad = NULL_RTX;
rtx first_no_action_insn = NULL_RTX;
int call_site = 0;
+ int cur_sec = 0;
+ rtx section_switch_note = NULL_RTX;
+ rtx first_no_action_insn_before_switch = NULL_RTX;
+ rtx last_no_action_insn_before_switch = NULL_RTX;
+ int saved_call_site_base = call_site_base;
- if (USING_SJLJ_EXCEPTIONS || cfun->eh->region_tree == NULL)
- return 0;
-
- VARRAY_UCHAR_INIT (crtl->eh.action_record_data, 64, "action_record_data");
+ crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64);
ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
if (INSN_P (iter))
{
- struct eh_region *region;
+ eh_landing_pad lp;
+ eh_region region;
+ bool nothrow;
int this_action;
rtx this_landing_pad;
&& GET_CODE (PATTERN (insn)) == SEQUENCE)
insn = XVECEXP (PATTERN (insn), 0, 0);
- note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
- if (!note)
- {
- if (! (CALL_P (insn)
- || (flag_non_call_exceptions
- && may_trap_p (PATTERN (insn)))))
- continue;
- this_action = -1;
- region = NULL;
- }
+ nothrow = get_eh_region_and_lp_from_rtx (insn, ®ion, &lp);
+ if (nothrow)
+ continue;
+ if (region)
+ this_action = collect_one_action_chain (ar_hash, region);
else
- {
- if (INTVAL (XEXP (note, 0)) <= 0)
- continue;
- region = VEC_index (eh_region, cfun->eh->region_array, INTVAL (XEXP (note, 0)));
- this_action = collect_one_action_chain (ar_hash, region);
- }
+ this_action = -1;
/* Existence of catch handlers, or must-not-throw regions
implies that an lsda is needed (even if empty). */
last_action = -1;
}
- /* Cleanups and handlers may share action chains but not
- landing pads. Collect the landing pad for this region. */
if (this_action >= 0)
- {
- struct eh_region *o;
- for (o = region; ! o->landing_pad ; o = o->outer)
- continue;
- this_landing_pad = o->landing_pad;
- }
+ this_landing_pad = lp->landing_pad;
else
this_landing_pad = NULL_RTX;
if (last_action != this_action
|| last_landing_pad != this_landing_pad)
{
+ /* If there is a queued no-action region in the other section
+ with hot/cold partitioning, emit it now. */
+ if (first_no_action_insn_before_switch)
+ {
+ gcc_assert (this_action != -1
+ && last_action == (first_no_action_insn
+ ? -1 : -3));
+ call_site = add_call_site (NULL_RTX, 0, 0);
+ note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
+ first_no_action_insn_before_switch);
+ NOTE_EH_HANDLER (note) = call_site;
+ note = emit_note_after (NOTE_INSN_EH_REGION_END,
+ last_no_action_insn_before_switch);
+ NOTE_EH_HANDLER (note) = call_site;
+ gcc_assert (last_action != -3
+ || (last_action_insn
+ == last_no_action_insn_before_switch));
+ first_no_action_insn_before_switch = NULL_RTX;
+ last_no_action_insn_before_switch = NULL_RTX;
+ call_site_base++;
+ }
/* If we'd not seen a previous action (-3) or the previous
action was must-not-throw (-2), then we do not need an
end note. */
/* If we delayed the creation of the begin, do it now. */
if (first_no_action_insn)
{
- call_site = add_call_site (NULL_RTX, 0);
+ call_site = add_call_site (NULL_RTX, 0, cur_sec);
note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
first_no_action_insn);
NOTE_EH_HANDLER (note) = call_site;
if (this_action >= -1)
{
call_site = add_call_site (this_landing_pad,
- this_action < 0 ? 0 : this_action);
+ this_action < 0 ? 0 : this_action,
+ cur_sec);
note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
NOTE_EH_HANDLER (note) = call_site;
}
}
last_action_insn = iter;
}
+ else if (NOTE_P (iter)
+ && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
+ {
+ gcc_assert (section_switch_note == NULL_RTX);
+ gcc_assert (flag_reorder_blocks_and_partition);
+ section_switch_note = iter;
+ if (first_no_action_insn)
+ {
+ first_no_action_insn_before_switch = first_no_action_insn;
+ last_no_action_insn_before_switch = last_action_insn;
+ first_no_action_insn = NULL_RTX;
+ gcc_assert (last_action == -1);
+ last_action = -3;
+ }
+ /* Force closing of current EH region before section switch and
+ opening a new one afterwards. */
+ else if (last_action != -3)
+ last_landing_pad = pc_rtx;
+ call_site_base += VEC_length (call_site_record,
+ crtl->eh.call_site_record[cur_sec]);
+ cur_sec++;
+ gcc_assert (crtl->eh.call_site_record[cur_sec] == NULL);
+ crtl->eh.call_site_record[cur_sec]
+ = VEC_alloc (call_site_record, gc, 10);
+ }
if (last_action >= -1 && ! first_no_action_insn)
{
NOTE_EH_HANDLER (note) = call_site;
}
+ call_site_base = saved_call_site_base;
+
htab_delete (ar_hash);
return 0;
}
+static bool
+gate_convert_to_eh_region_ranges (void)
+{
+ /* Nothing to do for SJLJ exceptions or if no regions created. */
+ if (cfun->eh->region_tree == NULL)
+ return false;
+ if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
+ return false;
+ return true;
+}
+
struct rtl_opt_pass pass_convert_to_eh_region_ranges =
{
{
RTL_PASS,
"eh_ranges", /* name */
- NULL, /* gate */
+ gate_convert_to_eh_region_ranges, /* gate */
convert_to_eh_region_ranges, /* execute */
NULL, /* sub */
NULL, /* next */
0, /* static_pass_number */
- 0, /* tv_id */
+ TV_NONE, /* tv_id */
0, /* properties_required */
0, /* properties_provided */
0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func, /* todo_flags_finish */
+ 0 /* todo_flags_finish */
}
};
-
\f
static void
-push_uleb128 (varray_type *data_area, unsigned int value)
+push_uleb128 (VEC (uchar, gc) **data_area, unsigned int value)
{
do
{
value >>= 7;
if (value)
byte |= 0x80;
- VARRAY_PUSH_UCHAR (*data_area, byte);
+ VEC_safe_push (uchar, gc, *data_area, byte);
}
while (value);
}
static void
-push_sleb128 (varray_type *data_area, int value)
+push_sleb128 (VEC (uchar, gc) **data_area, int value)
{
unsigned char byte;
int more;
|| (value == -1 && (byte & 0x40) != 0));
if (more)
byte |= 0x80;
- VARRAY_PUSH_UCHAR (*data_area, byte);
+ VEC_safe_push (uchar, gc, *data_area, byte);
}
while (more);
}
\f
#ifndef HAVE_AS_LEB128
static int
-dw2_size_of_call_site_table (void)
+dw2_size_of_call_site_table (int section)
{
- int n = VEC_length (call_site_record, crtl->eh.call_site_record);
+ int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
int size = n * (4 + 4 + 4);
int i;
for (i = 0; i < n; ++i)
{
- struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
+ struct call_site_record_d *cs =
+ VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
size += size_of_uleb128 (cs->action);
}
static int
sjlj_size_of_call_site_table (void)
{
- int n = VEC_length (call_site_record, crtl->eh.call_site_record);
+ int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
int size = 0;
int i;
for (i = 0; i < n; ++i)
{
- struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
+ struct call_site_record_d *cs =
+ VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
size += size_of_uleb128 (INTVAL (cs->landing_pad));
size += size_of_uleb128 (cs->action);
}
#endif
static void
-dw2_output_call_site_table (void)
+dw2_output_call_site_table (int cs_format, int section)
{
- int n = VEC_length (call_site_record, crtl->eh.call_site_record);
+ int n = VEC_length (call_site_record, crtl->eh.call_site_record[section]);
int i;
+ const char *begin;
+
+ if (section == 0)
+ begin = current_function_func_begin_label;
+ else if (first_function_block_is_cold)
+ begin = crtl->subsections.hot_section_label;
+ else
+ begin = crtl->subsections.cold_section_label;
for (i = 0; i < n; ++i)
{
- struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
+ struct call_site_record_d *cs =
+ VEC_index (call_site_record, crtl->eh.call_site_record[section], i);
char reg_start_lab[32];
char reg_end_lab[32];
char landing_pad_lab[32];
generic arithmetic. */
/* ??? Perhaps use attr_length to choose data1 or data2 instead of
data4 if the function is small enough. */
-#ifdef HAVE_AS_LEB128
- dw2_asm_output_delta_uleb128 (reg_start_lab,
- current_function_func_begin_label,
- "region %d start", i);
- dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
- "length");
- if (cs->landing_pad)
- dw2_asm_output_delta_uleb128 (landing_pad_lab,
- current_function_func_begin_label,
- "landing pad");
- else
- dw2_asm_output_data_uleb128 (0, "landing pad");
-#else
- dw2_asm_output_delta (4, reg_start_lab,
- current_function_func_begin_label,
- "region %d start", i);
- dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
- if (cs->landing_pad)
- dw2_asm_output_delta (4, landing_pad_lab,
- current_function_func_begin_label,
- "landing pad");
+ if (cs_format == DW_EH_PE_uleb128)
+ {
+ dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
+ "region %d start", i);
+ dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
+ "length");
+ if (cs->landing_pad)
+ dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
+ "landing pad");
+ else
+ dw2_asm_output_data_uleb128 (0, "landing pad");
+ }
else
- dw2_asm_output_data (4, 0, "landing pad");
-#endif
+ {
+ dw2_asm_output_delta (4, reg_start_lab, begin,
+ "region %d start", i);
+ dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
+ if (cs->landing_pad)
+ dw2_asm_output_delta (4, landing_pad_lab, begin,
+ "landing pad");
+ else
+ dw2_asm_output_data (4, 0, "landing pad");
+ }
dw2_asm_output_data_uleb128 (cs->action, "action");
}
static void
sjlj_output_call_site_table (void)
{
- int n = VEC_length (call_site_record, crtl->eh.call_site_record);
+ int n = VEC_length (call_site_record, crtl->eh.call_site_record[0]);
int i;
for (i = 0; i < n; ++i)
{
- struct call_site_record *cs = VEC_index (call_site_record, crtl->eh.call_site_record, i);
+ struct call_site_record_d *cs =
+ VEC_index (call_site_record, crtl->eh.call_site_record[0], i);
dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
"region %d landing pad", i);
call_site_base += n;
}
-#ifndef TARGET_UNWIND_INFO
/* Switch to the section that should be used for exception tables. */
static void
{
/* Compute the section and cache it into exception_section,
unless it depends on the function name. */
- if (targetm.have_named_sections)
+ if (targetm_common.have_named_sections)
{
int flags;
switch_to_section (s);
}
-#endif
/* Output a reference from an exception table to the type_info object TYPE.
{
struct varpool_node *node;
- type = lookup_type_for_runtime (type);
+ /* FIXME lto. pass_ipa_free_lang_data changes all types to
+ runtime types so TYPE should already be a runtime type
+ reference. When pass_ipa_free_lang data is made a default
+ pass, we can then remove the call to lookup_type_for_runtime
+ below. */
+ if (TYPE_P (type))
+ type = lookup_type_for_runtime (type);
+
value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
/* Let cgraph know that the rtti decl is used. Not all of the
dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
}
-void
-output_function_exception_table (const char * ARG_UNUSED (fnname))
+static void
+output_one_function_exception_table (int section)
{
- int tt_format, cs_format, lp_format, i, n;
+ int tt_format, cs_format, lp_format, i;
#ifdef HAVE_AS_LEB128
char ttype_label[32];
char cs_after_size_label[32];
int have_tt_data;
int tt_format_size = 0;
- /* Not all functions need anything. */
- if (! crtl->uses_eh_lsda)
- return;
-
- if (eh_personality_libfunc)
- assemble_external_libcall (eh_personality_libfunc);
-
-#ifdef TARGET_UNWIND_INFO
- /* TODO: Move this into target file. */
- fputs ("\t.personality\t", asm_out_file);
- output_addr_const (asm_out_file, eh_personality_libfunc);
- fputs ("\n\t.handlerdata\n", asm_out_file);
- /* Note that varasm still thinks we're in the function's code section.
- The ".endp" directive that will immediately follow will take us back. */
-#else
- switch_to_exception_section (fnname);
-#endif
-
- /* If the target wants a label to begin the table, emit it here. */
- targetm.asm_out.except_table_label (asm_out_file);
-
- have_tt_data = (VEC_length (tree, crtl->eh.ttype_data) > 0
- || VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data) > 0);
+ have_tt_data = (VEC_length (tree, cfun->eh->ttype_data)
+ || (targetm.arm_eabi_unwinder
+ ? VEC_length (tree, cfun->eh->ehspec_data.arm_eabi)
+ : VEC_length (uchar, cfun->eh->ehspec_data.other)));
/* Indicate the format of the @TType entries. */
if (! have_tt_data)
{
tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
#ifdef HAVE_AS_LEB128
- ASM_GENERATE_INTERNAL_LABEL (ttype_label, "LLSDATT",
+ ASM_GENERATE_INTERNAL_LABEL (ttype_label,
+ section ? "LLSDATTC" : "LLSDATT",
current_function_funcdef_no);
#endif
tt_format_size = size_of_encoded_value (tt_format);
assemble_align (tt_format_size * BITS_PER_UNIT);
}
- targetm.asm_out.internal_label (asm_out_file, "LLSDA",
- current_function_funcdef_no);
+ targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
+ current_function_funcdef_no);
/* The LSDA header. */
eh_data_format_name (tt_format));
#ifndef HAVE_AS_LEB128
- if (USING_SJLJ_EXCEPTIONS)
+ if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
call_site_len = sjlj_size_of_call_site_table ();
else
- call_site_len = dw2_size_of_call_site_table ();
+ call_site_len = dw2_size_of_call_site_table (section);
#endif
/* A pc-relative 4-byte displacement to the @TType data. */
{
#ifdef HAVE_AS_LEB128
char ttype_after_disp_label[32];
- ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label, "LLSDATTD",
+ ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
+ section ? "LLSDATTDC" : "LLSDATTD",
current_function_funcdef_no);
dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
"@TType base offset");
before_disp = 1 + 1;
after_disp = (1 + size_of_uleb128 (call_site_len)
+ call_site_len
- + VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data)
- + (VEC_length (tree, crtl->eh.ttype_data)
+ + VEC_length (uchar, crtl->eh.action_record_data)
+ + (VEC_length (tree, cfun->eh->ttype_data)
* tt_format_size));
disp = after_disp;
eh_data_format_name (cs_format));
#ifdef HAVE_AS_LEB128
- ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label, "LLSDACSB",
+ ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
+ section ? "LLSDACSBC" : "LLSDACSB",
current_function_funcdef_no);
- ASM_GENERATE_INTERNAL_LABEL (cs_end_label, "LLSDACSE",
+ ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
+ section ? "LLSDACSEC" : "LLSDACSE",
current_function_funcdef_no);
dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
"Call-site table length");
ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
- if (USING_SJLJ_EXCEPTIONS)
+ if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
sjlj_output_call_site_table ();
else
- dw2_output_call_site_table ();
+ dw2_output_call_site_table (cs_format, section);
ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
#else
- dw2_asm_output_data_uleb128 (call_site_len,"Call-site table length");
- if (USING_SJLJ_EXCEPTIONS)
+ dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
+ if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
sjlj_output_call_site_table ();
else
- dw2_output_call_site_table ();
+ dw2_output_call_site_table (cs_format, section);
#endif
/* ??? Decode and interpret the data for flag_debug_asm. */
- n = VARRAY_ACTIVE_SIZE (crtl->eh.action_record_data);
- for (i = 0; i < n; ++i)
- dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.action_record_data, i),
- (i ? NULL : "Action record table"));
+ {
+ uchar uc;
+ FOR_EACH_VEC_ELT (uchar, crtl->eh.action_record_data, i, uc)
+ dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
+ }
if (have_tt_data)
assemble_align (tt_format_size * BITS_PER_UNIT);
- i = VEC_length (tree, crtl->eh.ttype_data);
+ i = VEC_length (tree, cfun->eh->ttype_data);
while (i-- > 0)
{
- tree type = VEC_index (tree, crtl->eh.ttype_data, i);
+ tree type = VEC_index (tree, cfun->eh->ttype_data, i);
output_ttype (type, tt_format, tt_format_size);
}
#endif
/* ??? Decode and interpret the data for flag_debug_asm. */
- n = VARRAY_ACTIVE_SIZE (crtl->eh.ehspec_data);
- for (i = 0; i < n; ++i)
+ if (targetm.arm_eabi_unwinder)
{
- if (targetm.arm_eabi_unwinder)
- {
- tree type = VARRAY_TREE (crtl->eh.ehspec_data, i);
- output_ttype (type, tt_format, tt_format_size);
- }
- else
- dw2_asm_output_data (1, VARRAY_UCHAR (crtl->eh.ehspec_data, i),
- (i ? NULL : "Exception specification table"));
+ tree type;
+ for (i = 0;
+ VEC_iterate (tree, cfun->eh->ehspec_data.arm_eabi, i, type); ++i)
+ output_ttype (type, tt_format, tt_format_size);
+ }
+ else
+ {
+ uchar uc;
+ for (i = 0;
+ VEC_iterate (uchar, cfun->eh->ehspec_data.other, i, uc); ++i)
+ dw2_asm_output_data (1, uc,
+ i ? NULL : "Exception specification table");
+ }
+}
+
+void
+output_function_exception_table (const char *fnname)
+{
+ rtx personality = get_personality_function (current_function_decl);
+
+ /* Not all functions need anything. */
+ if (! crtl->uses_eh_lsda)
+ return;
+
+ if (personality)
+ {
+ assemble_external_libcall (personality);
+
+ if (targetm.asm_out.emit_except_personality)
+ targetm.asm_out.emit_except_personality (personality);
}
+ switch_to_exception_section (fnname);
+
+ /* If the target wants a label to begin the table, emit it here. */
+ targetm.asm_out.emit_except_table_label (asm_out_file);
+
+ output_one_function_exception_table (0);
+ if (crtl->eh.call_site_record[1] != NULL)
+ output_one_function_exception_table (1);
+
switch_to_section (current_function_section ());
}
{
return fun->eh->throw_stmt_table;
}
+\f
+/* Determine if the function needs an EH personality function. */
+
+enum eh_personality_kind
+function_needs_eh_personality (struct function *fn)
+{
+ enum eh_personality_kind kind = eh_personality_none;
+ eh_region i;
+
+ FOR_ALL_EH_REGION_FN (i, fn)
+ {
+ switch (i->type)
+ {
+ case ERT_CLEANUP:
+ /* Can do with any personality including the generic C one. */
+ kind = eh_personality_any;
+ break;
+
+ case ERT_TRY:
+ case ERT_ALLOWED_EXCEPTIONS:
+ /* Always needs a EH personality function. The generic C
+ personality doesn't handle these even for empty type lists. */
+ return eh_personality_lang;
+
+ case ERT_MUST_NOT_THROW:
+ /* Always needs a EH personality function. The language may specify
+ what abort routine that must be used, e.g. std::terminate. */
+ return eh_personality_lang;
+ }
+ }
+ return kind;
+}
+\f
/* Dump EH information to OUT. */
void
dump_eh_tree (FILE * out, struct function *fun)
{
- struct eh_region *i;
+ eh_region i;
int depth = 0;
- static const char *const type_name[] = { "unknown", "cleanup", "try", "catch",
- "allowed_exceptions", "must_not_throw",
- "throw"
- };
+ static const char *const type_name[] = {
+ "cleanup", "try", "allowed_exceptions", "must_not_throw"
+ };
i = fun->eh->region_tree;
if (!i)
while (1)
{
fprintf (out, " %*s %i %s", depth * 2, "",
- i->region_number, type_name[(int) i->type]);
- if (i->tree_label)
+ i->index, type_name[(int) i->type]);
+
+ if (i->landing_pads)
{
- fprintf (out, " tree_label:");
- print_generic_expr (out, i->tree_label, 0);
+ eh_landing_pad lp;
+
+ fprintf (out, " land:");
+ if (current_ir_type () == IR_GIMPLE)
+ {
+ for (lp = i->landing_pads; lp ; lp = lp->next_lp)
+ {
+ fprintf (out, "{%i,", lp->index);
+ print_generic_expr (out, lp->post_landing_pad, 0);
+ fputc ('}', out);
+ if (lp->next_lp)
+ fputc (',', out);
+ }
+ }
+ else
+ {
+ for (lp = i->landing_pads; lp ; lp = lp->next_lp)
+ {
+ fprintf (out, "{%i,", lp->index);
+ if (lp->landing_pad)
+ fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
+ NOTE_P (lp->landing_pad) ? "(del)" : "");
+ else
+ fprintf (out, "(nil),");
+ if (lp->post_landing_pad)
+ {
+ rtx lab = label_rtx (lp->post_landing_pad);
+ fprintf (out, "%i%s}", INSN_UID (lab),
+ NOTE_P (lab) ? "(del)" : "");
+ }
+ else
+ fprintf (out, "(nil)}");
+ if (lp->next_lp)
+ fputc (',', out);
+ }
+ }
}
+
switch (i->type)
{
case ERT_CLEANUP:
- if (i->u.cleanup.prev_try)
- fprintf (out, " prev try:%i",
- i->u.cleanup.prev_try->region_number);
+ case ERT_MUST_NOT_THROW:
break;
case ERT_TRY:
{
- struct eh_region *c;
- fprintf (out, " catch regions:");
- for (c = i->u.eh_try.eh_catch; c; c = c->u.eh_catch.next_catch)
- fprintf (out, " %i", c->region_number);
+ eh_catch c;
+ fprintf (out, " catch:");
+ for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
+ {
+ fputc ('{', out);
+ if (c->label)
+ {
+ fprintf (out, "lab:");
+ print_generic_expr (out, c->label, 0);
+ fputc (';', out);
+ }
+ print_generic_expr (out, c->type_list, 0);
+ fputc ('}', out);
+ if (c->next_catch)
+ fputc (',', out);
+ }
}
break;
- case ERT_CATCH:
- if (i->u.eh_catch.prev_catch)
- fprintf (out, " prev: %i",
- i->u.eh_catch.prev_catch->region_number);
- if (i->u.eh_catch.next_catch)
- fprintf (out, " next %i",
- i->u.eh_catch.next_catch->region_number);
- break;
-
case ERT_ALLOWED_EXCEPTIONS:
- fprintf (out, "filter :%i types:", i->u.allowed.filter);
+ fprintf (out, " filter :%i types:", i->u.allowed.filter);
print_generic_expr (out, i->u.allowed.type_list, 0);
break;
-
- case ERT_THROW:
- fprintf (out, "type:");
- print_generic_expr (out, i->u.eh_throw.type, 0);
- break;
-
- case ERT_MUST_NOT_THROW:
- break;
-
- case ERT_UNKNOWN:
- break;
- }
- if (i->aka)
- {
- fprintf (out, " also known as:");
- dump_bitmap (out, i->aka);
}
- else
- fprintf (out, "\n");
+ fputc ('\n', out);
+
/* If there are sub-regions, process them. */
if (i->inner)
i = i->inner, depth++;
}
}
-/* Verify some basic invariants on EH datastructures. Could be extended to
- catch more. */
-void
+/* Dump the EH tree for FN on stderr. */
+
+DEBUG_FUNCTION void
+debug_eh_tree (struct function *fn)
+{
+ dump_eh_tree (stderr, fn);
+}
+
+/* Verify invariants on EH datastructures. */
+
+DEBUG_FUNCTION void
verify_eh_tree (struct function *fun)
{
- struct eh_region *i, *outer = NULL;
+ eh_region r, outer;
+ int nvisited_lp, nvisited_r;
+ int count_lp, count_r, depth, i;
+ eh_landing_pad lp;
bool err = false;
- int nvisited = 0;
- int count = 0;
- int j;
- int depth = 0;
if (!fun->eh->region_tree)
return;
- for (j = fun->eh->last_region_number; j > 0; --j)
- if ((i = VEC_index (eh_region, fun->eh->region_array, j)))
+
+ count_r = 0;
+ for (i = 1; VEC_iterate (eh_region, fun->eh->region_array, i, r); ++i)
+ if (r)
+ {
+ if (r->index == i)
+ count_r++;
+ else
+ {
+ error ("region_array is corrupted for region %i", r->index);
+ err = true;
+ }
+ }
+
+ count_lp = 0;
+ for (i = 1; VEC_iterate (eh_landing_pad, fun->eh->lp_array, i, lp); ++i)
+ if (lp)
{
- if (i->region_number == j)
- count++;
- if (i->region_number != j && (!i->aka || !bitmap_bit_p (i->aka, j)))
+ if (lp->index == i)
+ count_lp++;
+ else
{
- error ("region_array is corrupted for region %i",
- i->region_number);
+ error ("lp_array is corrupted for lp %i", lp->index);
err = true;
}
}
- i = fun->eh->region_tree;
+ depth = nvisited_lp = nvisited_r = 0;
+ outer = NULL;
+ r = fun->eh->region_tree;
while (1)
{
- if (VEC_index (eh_region, fun->eh->region_array, i->region_number) != i)
+ if (VEC_index (eh_region, fun->eh->region_array, r->index) != r)
{
- error ("region_array is corrupted for region %i", i->region_number);
+ error ("region_array is corrupted for region %i", r->index);
err = true;
}
- if (i->outer != outer)
+ if (r->outer != outer)
{
- error ("outer block of region %i is wrong", i->region_number);
+ error ("outer block of region %i is wrong", r->index);
err = true;
}
- if (i->may_contain_throw && outer && !outer->may_contain_throw)
+ if (depth < 0)
{
- error
- ("region %i may contain throw and is contained in region that may not",
- i->region_number);
+ error ("negative nesting depth of region %i", r->index);
err = true;
}
- if (depth < 0)
+ nvisited_r++;
+
+ for (lp = r->landing_pads; lp ; lp = lp->next_lp)
{
- error ("negative nesting depth of region %i", i->region_number);
- err = true;
+ if (VEC_index (eh_landing_pad, fun->eh->lp_array, lp->index) != lp)
+ {
+ error ("lp_array is corrupted for lp %i", lp->index);
+ err = true;
+ }
+ if (lp->region != r)
+ {
+ error ("region of lp %i is wrong", lp->index);
+ err = true;
+ }
+ nvisited_lp++;
}
- nvisited++;
- /* If there are sub-regions, process them. */
- if (i->inner)
- outer = i, i = i->inner, depth++;
- /* If there are peers, process them. */
- else if (i->next_peer)
- i = i->next_peer;
- /* Otherwise, step back up the tree to the next peer. */
+
+ if (r->inner)
+ outer = r, r = r->inner, depth++;
+ else if (r->next_peer)
+ r = r->next_peer;
else
{
do
{
- i = i->outer;
+ r = r->outer;
+ if (r == NULL)
+ goto region_done;
depth--;
- if (i == NULL)
- {
- if (depth != -1)
- {
- error ("tree list ends on depth %i", depth + 1);
- err = true;
- }
- if (count != nvisited)
- {
- error ("array does not match the region tree");
- err = true;
- }
- if (err)
- {
- dump_eh_tree (stderr, fun);
- internal_error ("verify_eh_tree failed");
- }
- return;
- }
- outer = i->outer;
+ outer = r->outer;
}
- while (i->next_peer == NULL);
- i = i->next_peer;
+ while (r->next_peer == NULL);
+ r = r->next_peer;
}
}
-}
-
-/* Initialize unwind_resume_libfunc. */
+ region_done:
+ if (depth != 0)
+ {
+ error ("tree list ends on depth %i", depth);
+ err = true;
+ }
+ if (count_r != nvisited_r)
+ {
+ error ("region_array does not match region_tree");
+ err = true;
+ }
+ if (count_lp != nvisited_lp)
+ {
+ error ("lp_array does not match region_tree");
+ err = true;
+ }
-void
-default_init_unwind_resume_libfunc (void)
-{
- /* The default c++ routines aren't actually c++ specific, so use those. */
- unwind_resume_libfunc =
- init_one_libfunc ( USING_SJLJ_EXCEPTIONS ? "_Unwind_SjLj_Resume"
- : "_Unwind_Resume");
+ if (err)
+ {
+ dump_eh_tree (stderr, fun);
+ internal_error ("verify_eh_tree failed");
+ }
}
-
\f
-static bool
-gate_handle_eh (void)
-{
- return doing_eh (0);
-}
-
-/* Complete generation of exception handling code. */
-static unsigned int
-rest_of_handle_eh (void)
-{
- cleanup_cfg (CLEANUP_NO_INSN_DEL);
- finish_eh_generation ();
- cleanup_cfg (CLEANUP_NO_INSN_DEL);
- return 0;
-}
-
-struct rtl_opt_pass pass_rtl_eh =
-{
- {
- RTL_PASS,
- "eh", /* name */
- gate_handle_eh, /* gate */
- rest_of_handle_eh, /* execute */
- NULL, /* sub */
- NULL, /* next */
- 0, /* static_pass_number */
- TV_JUMP, /* tv_id */
- 0, /* properties_required */
- 0, /* properties_provided */
- 0, /* properties_destroyed */
- 0, /* todo_flags_start */
- TODO_dump_func /* todo_flags_finish */
- }
-};
-
#include "gt-except.h"