* lto-streamer-in.c (input_function): Stream it in.
* lto-streamer-out.c (output_function): Stream it out.
* function.c (allocate_struct_function): Set it.
(expand_function_end): Substitute cfun->can_throw_non_call_exceptions
for flag_non_call_exceptions.
* cfgbuild.c (control_flow_insn_p): Likewise.
(make_edges): Likewise.
* cfgexpand.c (expand_stack_alignment): Likewise.
* combine.c (distribute_notes): Likewise.
* cse.c (cse_extended_basic_block): Likewise.
* except.c (insn_could_throw_p): Likewise.
* gcse.c (simple_mem): Likewise.
* ipa-pure-const.c (check_call): Likewise.
(check_stmt ): Likewise.
* lower-subreg.c (lower-subreg.c): Likewise.
* optabs.c (emit_libcall_block): Likewise.
(prepare_cmp_insn): Likewise.
* postreload-gcse.c (eliminate_partially_redundant_loads): Likewise.
* postreload.c (rest_of_handle_postreload): Likewise.
* reload1.c (reload_as_needed): Likewise.
(emit_input_reload_insns): Likewise.
(emit_output_reload_insns): Likewise.
(fixup_abnormal_edges): Likewise.
* sel-sched-ir.c (init_global_and_expr_for_insn): Likewise.
* store-motion.c (find_moveable_store): Likewise.
* tree-eh.c (stmt_could_throw_p): Likewise.
(tree_could_throw_p): Likewise.
* tree-ssa-dce.c (mark_stmt_if_obviously_necessary): Likewise.
* config/arm/arm.c (arm_expand_prologue): Likewise.
(thumb1_expand_prologue): Likewise.
* config/rx/rx.md (cbranchsf4): Likewise.
(cmpsf): Likewise.
* config/s390/s390.c (s390_emit_prologue): Likewise.
* tree-inline.c (initialize_cfun): Copy can_throw_non_call_exceptions.
(inline_forbidden_into_p): New predicate.
(expand_call_inline): Use it to forbid inlining.
(tree_can_inline_p): Likewise.
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@159847
138bc75d-0d04-0410-961f-
82ee72b054a4
+2010-05-25 Eric Botcazou <ebotcazou@adacore.com>
+
+ * function.h (struct function): Add can_throw_non_call_exceptions bit.
+ * lto-streamer-in.c (input_function): Stream it in.
+ * lto-streamer-out.c (output_function): Stream it out.
+ * function.c (allocate_struct_function): Set it.
+ (expand_function_end): Substitute cfun->can_throw_non_call_exceptions
+ for flag_non_call_exceptions.
+ * cfgbuild.c (control_flow_insn_p): Likewise.
+ (make_edges): Likewise.
+ * cfgexpand.c (expand_stack_alignment): Likewise.
+ * combine.c (distribute_notes): Likewise.
+ * cse.c (cse_extended_basic_block): Likewise.
+ * except.c (insn_could_throw_p): Likewise.
+ * gcse.c (simple_mem): Likewise.
+ * ipa-pure-const.c (check_call): Likewise.
+ (check_stmt ): Likewise.
+ * lower-subreg.c (lower-subreg.c): Likewise.
+ * optabs.c (emit_libcall_block): Likewise.
+ (prepare_cmp_insn): Likewise.
+ * postreload-gcse.c (eliminate_partially_redundant_loads): Likewise.
+ * postreload.c (rest_of_handle_postreload): Likewise.
+ * reload1.c (reload_as_needed): Likewise.
+ (emit_input_reload_insns): Likewise.
+ (emit_output_reload_insns): Likewise.
+ (fixup_abnormal_edges): Likewise.
+ * sel-sched-ir.c (init_global_and_expr_for_insn): Likewise.
+ * store-motion.c (find_moveable_store): Likewise.
+ * tree-eh.c (stmt_could_throw_p): Likewise.
+ (tree_could_throw_p): Likewise.
+ * tree-ssa-dce.c (mark_stmt_if_obviously_necessary): Likewise.
+ * config/arm/arm.c (arm_expand_prologue): Likewise.
+ (thumb1_expand_prologue): Likewise.
+ * config/rx/rx.md (cbranchsf4): Likewise.
+ (cmpsf): Likewise.
+ * config/s390/s390.c (s390_emit_prologue): Likewise.
+ * tree-inline.c (initialize_cfun): Copy can_throw_non_call_exceptions.
+ (inline_forbidden_into_p): New predicate.
+ (expand_call_inline): Use it to forbid inlining.
+ (tree_can_inline_p): Likewise.
+
2010-05-25 Steven Bosscher <steven@gcc.gnu.org>
* config/i386/i386-c.c: Do not include rtl.h.
if (GET_CODE (PATTERN (insn)) == TRAP_IF
&& XEXP (PATTERN (insn), 0) == const1_rtx)
return true;
- if (!flag_non_call_exceptions)
+ if (!cfun->can_throw_non_call_exceptions)
return false;
break;
handler for this CALL_INSN. If we're handling non-call
exceptions then any insn can reach any of the active handlers.
Also mark the CALL_INSN as reaching any nonlocal goto handler. */
- else if (code == CALL_INSN || flag_non_call_exceptions)
+ else if (code == CALL_INSN || cfun->can_throw_non_call_exceptions)
{
/* Add any appropriate EH edges. */
rtl_make_eh_edge (edge_cache, bb, insn);
stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
exceptions since callgraph doesn't collect incoming stack alignment
in this case. */
- if (flag_non_call_exceptions
+ if (cfun->can_throw_non_call_exceptions
&& PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
else
place = i2;
else
{
- gcc_assert (flag_non_call_exceptions);
+ gcc_assert (cfun->can_throw_non_call_exceptions);
if (may_trap_p (i3))
place = i3;
else if (i2 && may_trap_p (i2))
using the EABI unwinder, to prevent faulting instructions from being
swapped with a stack adjustment. */
if (crtl->profile || !TARGET_SCHED_PROLOG
- || (ARM_EABI_UNWIND_TABLES && flag_non_call_exceptions))
+ || (ARM_EABI_UNWIND_TABLES && cfun->can_throw_non_call_exceptions))
emit_insn (gen_blockage ());
/* If the link register is being kept alive, with the return address in it,
using the EABI unwinder, to prevent faulting instructions from being
swapped with a stack adjustment. */
if (crtl->profile || !TARGET_SCHED_PROLOG
- || (ARM_EABI_UNWIND_TABLES && flag_non_call_exceptions))
+ || (ARM_EABI_UNWIND_TABLES && cfun->can_throw_non_call_exceptions))
emit_insn (gen_blockage ());
cfun->machine->lr_save_eliminated = !thumb_force_lr_save ();
[(cc0) (const_int 0)])
(label_ref (match_operand 3 ""))
(pc)))]
- "ALLOW_RX_FPU_INSNS && ! flag_non_call_exceptions"
+ "ALLOW_RX_FPU_INSNS && !cfun->can_throw_non_call_exceptions"
""
)
(set_attr "length" "2,2,3,4,5,6,5")]
)
-;; This pattern is disabled when -fnon-call-exceptions is active because
+;; This pattern is disabled if the function can throw non-call exceptions,
;; it could generate a floating point exception, which would introduce an
;; edge into the flow graph between this insn and the conditional branch
;; insn to follow, thus breaking the cc0 relationship. Run the g++ test
[(set (cc0)
(compare:CC (match_operand:SF 0 "register_operand" "r,r,r")
(match_operand:SF 1 "rx_source_operand" "r,i,Q")))]
- "ALLOW_RX_FPU_INSNS && ! flag_non_call_exceptions"
+ "ALLOW_RX_FPU_INSNS && !cfun->can_throw_non_call_exceptions"
{
rx_float_compare_mode = true;
return "fcmp\t%1, %0";
insn = emit_insn (gen_move_insn (addr, temp_reg));
}
- /* If we support asynchronous exceptions (e.g. for Java),
+ /* If we support non-call exceptions (e.g. for Java),
we need to make sure the backchain pointer is set up
before any possibly trapping memory access. */
-
- if (TARGET_BACKCHAIN && flag_non_call_exceptions)
+ if (TARGET_BACKCHAIN && cfun->can_throw_non_call_exceptions)
{
addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
emit_clobber (addr);
/* With non-call exceptions, we are not always able to update
the CFG properly inside cse_insn. So clean up possibly
redundant EH edges here. */
- if (flag_non_call_exceptions && have_eh_succ_edges (bb))
+ if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
cse_cfg_altered |= purge_dead_edges (bb);
/* If we changed a conditional jump, we may have terminated
{
if (CALL_P (insn))
return true;
- if (INSN_P (insn) && flag_non_call_exceptions)
+ if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
return may_trap_p (PATTERN (insn));
return false;
}
/* Assume all registers in stdarg functions need to be saved. */
cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
+
+ /* ??? This could be set on a per-function basis by the front-end
+ but is this worth the hassle? */
+ cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
}
}
allocate_struct_function (fndecl, false);
}
-/* Reset cfun, and other non-struct-function variables to defaults as
+/* Reset crtl and other non-struct-function variables to defaults as
appropriate for emitting rtl at the start of a function. */
static void
/* We want to ensure that instructions that may trap are not
moved into the epilogue by scheduling, because we don't
always emit unwind information for the epilogue. */
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
emit_insn (gen_blockage ());
}
/* @@@ This is a kludge. We want to ensure that instructions that
may trap are not moved into the epilogue by scheduling, because
we don't always emit unwind information for the epilogue. */
- if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
+ if (!USING_SJLJ_EXCEPTIONS && cfun->can_throw_non_call_exceptions)
emit_insn (gen_blockage ());
/* If stack protection is enabled for this function, check the guard. */
unsigned int after_inlining : 1;
unsigned int always_inline_functions_inlined : 1;
+ /* Nonzero if function being compiled can throw synchronous non-call
+ exceptions. */
+ unsigned int can_throw_non_call_exceptions : 1;
+
/* Fields below this point are not set for abstract functions; see
allocate_struct_function. */
return 0;
/* If we are handling exceptions, we must be careful with memory references
- that may trap. If we are not, the behavior is undefined, so we may just
+ that may trap. If we are not, the behavior is undefined, so we may just
continue. */
- if (flag_non_call_exceptions && may_trap_p (x))
+ if (cfun->can_throw_non_call_exceptions && may_trap_p (x))
return 0;
if (side_effects_p (x))
if (gimple_op (call, i)
&& tree_could_throw_p (gimple_op (call, i)))
{
- if (possibly_throws && flag_non_call_exceptions)
+ if (possibly_throws && cfun->can_throw_non_call_exceptions)
{
if (dump_file)
fprintf (dump_file, " operand can throw; looping\n");
those bits. */
else if (!ipa || !callee_t)
{
- if (possibly_throws && flag_non_call_exceptions)
+ if (possibly_throws && cfun->can_throw_non_call_exceptions)
{
if (dump_file)
fprintf (dump_file, " can throw; looping\n");
if (gimple_code (stmt) != GIMPLE_CALL
&& stmt_could_throw_p (stmt))
{
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
{
if (dump_file)
fprintf (dump_file, " can throw; looping");
basic block and still produce the correct control
flow graph for it. */
gcc_assert (!cfi
- || (flag_non_call_exceptions
+ || (cfun->can_throw_non_call_exceptions
&& can_throw_internal (insn)));
insn = resolve_simple_move (set, insn);
fn->after_tree_profile = bp_unpack_value (bp, 1);
fn->returns_pcc_struct = bp_unpack_value (bp, 1);
fn->returns_struct = bp_unpack_value (bp, 1);
+ fn->can_throw_non_call_exceptions = bp_unpack_value (bp, 1);
fn->always_inline_functions_inlined = bp_unpack_value (bp, 1);
fn->after_inlining = bp_unpack_value (bp, 1);
fn->dont_save_pending_sizes_p = bp_unpack_value (bp, 1);
bp_pack_value (bp, fn->after_tree_profile, 1);
bp_pack_value (bp, fn->returns_pcc_struct, 1);
bp_pack_value (bp, fn->returns_struct, 1);
+ bp_pack_value (bp, fn->can_throw_non_call_exceptions, 1);
bp_pack_value (bp, fn->always_inline_functions_inlined, 1);
bp_pack_value (bp, fn->after_inlining, 1);
bp_pack_value (bp, fn->dont_save_pending_sizes_p, 1);
/* If we're using non-call exceptions, a libcall corresponding to an
operation that may trap may also trap. */
/* ??? See the comment in front of make_reg_eh_region_note. */
- if (flag_non_call_exceptions && may_trap_p (equiv))
+ if (cfun->can_throw_non_call_exceptions && may_trap_p (equiv))
{
for (insn = insns; insn; insn = NEXT_INSN (insn))
if (CALL_P (insn))
/* Don't allow operands to the compare to trap, as that can put the
compare and branch in different basic blocks. */
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
{
if (may_trap_p (x))
x = force_reg (mode, x);
/* Are the operands unchanged since the start of the
block? */
&& oprs_unchanged_p (src, insn, false)
- && !(flag_non_call_exceptions && may_trap_p (src))
+ && !(cfun->can_throw_non_call_exceptions && may_trap_p (src))
&& !side_effects_p (src)
/* Is the expression recorded? */
&& (expr = lookup_expr_in_table (src)) != NULL)
reload_cse_regs (get_insns ());
/* Reload_cse_regs can eliminate potentially-trapping MEMs.
Remove any EH edges associated with them. */
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
purge_all_dead_edges ();
return 0;
subst_reloads (insn);
/* Adjust the exception region notes for loads and stores. */
- if (flag_non_call_exceptions && !CALL_P (insn))
+ if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
fixup_eh_region_note (insn, prev, next);
/* If this was an ASM, make sure that all the reload insns
rl->when_needed);
}
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
/* End this sequence. */
else
output_reload_insns[rl->opnum] = get_insns ();
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
end_sequence ();
}
/* We've possibly turned single trapping insn into multiple ones. */
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
{
sbitmap blocks;
blocks = sbitmap_alloc (last_basic_block);
|| SCHED_GROUP_P (insn)
|| prologue_epilogue_contains (insn)
/* Exception handling insns are always unique. */
- || (flag_non_call_exceptions && can_throw_internal (insn))
+ || (cfun->can_throw_non_call_exceptions && can_throw_internal (insn))
/* TRAP_IF though have an INSN code is control_flow_insn_p (). */
|| control_flow_insn_p (insn))
force_unique_p = true;
return;
/* If we are handling exceptions, we must be careful with memory references
- that may trap. If we are not, the behavior is undefined, so we may just
+ that may trap. If we are not, the behavior is undefined, so we may just
continue. */
- if (flag_non_call_exceptions && may_trap_p (dest))
+ if (cfun->can_throw_non_call_exceptions && may_trap_p (dest))
return;
/* Even if the destination cannot trap, the source may. In this case we'd
case GIMPLE_ASSIGN:
case GIMPLE_COND:
- if (!flag_non_call_exceptions)
+ if (!cfun->can_throw_non_call_exceptions)
return false;
return stmt_could_throw_1_p (stmt);
case GIMPLE_ASM:
- if (!flag_non_call_exceptions)
+ if (!cfun->can_throw_non_call_exceptions)
return false;
return gimple_asm_volatile_p (stmt);
return false;
if (TREE_CODE (t) == MODIFY_EXPR)
{
- if (flag_non_call_exceptions
+ if (cfun->can_throw_non_call_exceptions
&& tree_could_trap_p (TREE_OPERAND (t, 0)))
return true;
t = TREE_OPERAND (t, 1);
t = TREE_OPERAND (t, 0);
if (TREE_CODE (t) == CALL_EXPR)
return (call_expr_flags (t) & ECF_NOTHROW) == 0;
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
return tree_could_trap_p (t);
return false;
}
cfun->stdarg = src_cfun->stdarg;
cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
cfun->after_inlining = src_cfun->after_inlining;
+ cfun->can_throw_non_call_exceptions
+ = src_cfun->can_throw_non_call_exceptions;
cfun->returns_struct = src_cfun->returns_struct;
cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
cfun->after_tree_profile = src_cfun->after_tree_profile;
return forbidden_p;
}
+/* Return true if CALLEE cannot be inlined into CALLER. */
+
+static bool
+inline_forbidden_into_p (tree caller, tree callee)
+{
+ /* Don't inline if the functions have different EH personalities. */
+ if (DECL_FUNCTION_PERSONALITY (caller)
+ && DECL_FUNCTION_PERSONALITY (callee)
+ && (DECL_FUNCTION_PERSONALITY (caller)
+ != DECL_FUNCTION_PERSONALITY (callee)))
+ return true;
+
+ /* Don't inline if the callee can throw non-call exceptions but the
+ caller cannot. */
+ if (DECL_STRUCT_FUNCTION (callee)
+ && DECL_STRUCT_FUNCTION (callee)->can_throw_non_call_exceptions
+ && !(DECL_STRUCT_FUNCTION (caller)
+ && DECL_STRUCT_FUNCTION (caller)->can_throw_non_call_exceptions))
+ return true;
+
+ return false;
+}
+
/* Returns nonzero if FN is a function that does not have any
fundamental inline blocking properties. */
cg_edge = cgraph_edge (id->dst_node, stmt);
- /* Don't inline functions with different EH personalities. */
- if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
- && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
- && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
- != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
+ /* First check that inlining isn't simply forbidden in this case. */
+ if (inline_forbidden_into_p (cg_edge->caller->decl, cg_edge->callee->decl))
goto egress;
- /* Don't try to inline functions that are not well-suited to
- inlining. */
+ /* Don't try to inline functions that are not well-suited to inlining. */
if (!cgraph_inline_p (cg_edge, &reason))
{
/* If this call was originally indirect, we do not want to emit any
caller = e->caller->decl;
callee = e->callee->decl;
- /* We cannot inline a function that uses a different EH personality
- than the caller. */
- if (DECL_FUNCTION_PERSONALITY (caller)
- && DECL_FUNCTION_PERSONALITY (callee)
- && (DECL_FUNCTION_PERSONALITY (caller)
- != DECL_FUNCTION_PERSONALITY (callee)))
+ /* First check that inlining isn't simply forbidden in this case. */
+ if (inline_forbidden_into_p (caller, callee))
{
e->inline_failed = CIF_UNSPECIFIED;
gimple_call_set_cannot_inline (e->call_stmt, true);
mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
{
tree lhs = NULL_TREE;
+
/* With non-call exceptions, we have to assume that all statements could
throw. If a statement may throw, it is inherently necessary. */
- if (flag_non_call_exceptions
- && stmt_could_throw_p (stmt))
+ if (cfun->can_throw_non_call_exceptions && stmt_could_throw_p (stmt))
{
mark_stmt_necessary (stmt, true);
return;