1 /* Tail call optimization on trees.
2 Copyright (C) 2003-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "basic-block.h"
28 #include "tree-flow.h"
29 #include "gimple-pretty-print.h"
31 #include "tree-pass.h"
33 #include "langhooks.h"
37 #include "common/common-target.h"
39 /* The file implements the tail recursion elimination. It is also used to
40 analyze the tail calls in general, passing the results to the rtl level
41 where they are used for sibcall optimization.
43 In addition to the standard tail recursion elimination, we handle the most
44 trivial cases of making the call tail recursive by creating accumulators.
45 For example the following function
50 return n + sum (n - 1);
67 To do this, we maintain two accumulators (a_acc and m_acc) that indicate
68 when we reach the return x statement, we should return a_acc + x * m_acc
69 instead. They are initially initialized to 0 and 1, respectively,
70 so the semantics of the function is obviously preserved. If we are
71 guaranteed that the value of the accumulator never change, we
74 There are three cases how the function may exit. The first one is
75 handled in adjust_return_value, the other two in adjust_accumulator_values
76 (the second case is actually a special case of the third one and we
77 present it separately just for clarity):
79 1) Just return x, where x is not in any of the remaining special shapes.
80 We rewrite this to a gimple equivalent of return m_acc * x + a_acc.
82 2) return f (...), where f is the current function, is rewritten in a
83 classical tail-recursion elimination way, into assignment of arguments
84 and jump to the start of the function. Values of the accumulators
87 3) return a + m * f(...), where a and m do not depend on call to f.
88 To preserve the semantics described before we want this to be rewritten
89 in such a way that we finally return
91 a_acc + (a + m * f(...)) * m_acc = (a_acc + a * m_acc) + (m * m_acc) * f(...).
93 I.e. we increase a_acc by a * m_acc, multiply m_acc by m and
94 eliminate the tail call to f. Special cases when the value is just
95 added or just multiplied are obtained by setting a = 0 or m = 1.
97 TODO -- it is possible to do similar tricks for other operations. */
99 /* A structure that describes the tailcall. */
103 /* The iterator pointing to the call statement. */
104 gimple_stmt_iterator call_gsi;
106 /* True if it is a call to the current function. */
109 /* The return value of the caller is mult * f + add, where f is the return
110 value of the call. */
113 /* Next tailcall in the chain. */
114 struct tailcall *next;
117 /* The variables holding the value of multiplicative and additive
119 static tree m_acc, a_acc;
121 static bool suitable_for_tail_opt_p (void);
122 static bool optimize_tail_call (struct tailcall *, bool);
123 static void eliminate_tail_call (struct tailcall *);
124 static void find_tail_calls (basic_block, struct tailcall **);
126 /* Returns false when the function is not suitable for tail call optimization
127 from some reason (e.g. if it takes variable number of arguments). */
130 suitable_for_tail_opt_p (void)
137 /* Returns false when the function is not suitable for tail call optimization
138 from some reason (e.g. if it takes variable number of arguments).
139 This test must pass in addition to suitable_for_tail_opt_p in order to make
140 tail call discovery happen. */
143 suitable_for_tail_call_opt_p (void)
147 /* alloca (until we have stack slot life analysis) inhibits
148 sibling call optimizations, but not tail recursion. */
149 if (cfun->calls_alloca)
152 /* If we are using sjlj exceptions, we may need to add a call to
153 _Unwind_SjLj_Unregister at exit of the function. Which means
154 that we cannot do any sibcall transformations. */
155 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
156 && current_function_has_exception_handlers ())
159 /* Any function that calls setjmp might have longjmp called from
160 any called function. ??? We really should represent this
161 properly in the CFG so that this needn't be special cased. */
162 if (cfun->calls_setjmp)
165 /* ??? It is OK if the argument of a function is taken in some cases,
166 but not in all cases. See PR15387 and PR19616. Revisit for 4.1. */
167 for (param = DECL_ARGUMENTS (current_function_decl);
169 param = DECL_CHAIN (param))
170 if (TREE_ADDRESSABLE (param))
176 /* Checks whether the expression EXPR in stmt AT is independent of the
177 statement pointed to by GSI (in a sense that we already know EXPR's value
178 at GSI). We use the fact that we are only called from the chain of
179 basic blocks that have only single successor. Returns the expression
180 containing the value of EXPR at GSI. */
183 independent_of_stmt_p (tree expr, gimple at, gimple_stmt_iterator gsi)
185 basic_block bb, call_bb, at_bb;
189 if (is_gimple_min_invariant (expr))
192 if (TREE_CODE (expr) != SSA_NAME)
195 /* Mark the blocks in the chain leading to the end. */
196 at_bb = gimple_bb (at);
197 call_bb = gimple_bb (gsi_stmt (gsi));
198 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
204 at = SSA_NAME_DEF_STMT (expr);
207 /* The default definition or defined before the chain. */
213 for (; !gsi_end_p (gsi); gsi_next (&gsi))
214 if (gsi_stmt (gsi) == at)
217 if (!gsi_end_p (gsi))
222 if (gimple_code (at) != GIMPLE_PHI)
228 FOR_EACH_EDGE (e, ei, bb->preds)
233 expr = PHI_ARG_DEF_FROM_EDGE (at, e);
234 if (TREE_CODE (expr) != SSA_NAME)
236 /* The value is a constant. */
241 /* Unmark the blocks. */
242 for (bb = call_bb; bb != at_bb; bb = single_succ (bb))
249 /* Simulates the effect of an assignment STMT on the return value of the tail
250 recursive CALL passed in ASS_VAR. M and A are the multiplicative and the
251 additive factor for the real return value. */
254 process_assignment (gimple stmt, gimple_stmt_iterator call, tree *m,
255 tree *a, tree *ass_var)
257 tree op0, op1 = NULL_TREE, non_ass_var = NULL_TREE;
258 tree dest = gimple_assign_lhs (stmt);
259 enum tree_code code = gimple_assign_rhs_code (stmt);
260 enum gimple_rhs_class rhs_class = get_gimple_rhs_class (code);
261 tree src_var = gimple_assign_rhs1 (stmt);
263 /* See if this is a simple copy operation of an SSA name to the function
264 result. In that case we may have a simple tail call. Ignore type
265 conversions that can never produce extra code between the function
266 call and the function return. */
267 if ((rhs_class == GIMPLE_SINGLE_RHS || gimple_assign_cast_p (stmt))
268 && (TREE_CODE (src_var) == SSA_NAME))
270 /* Reject a tailcall if the type conversion might need
272 if (gimple_assign_cast_p (stmt)
273 && TYPE_MODE (TREE_TYPE (dest)) != TYPE_MODE (TREE_TYPE (src_var)))
276 if (src_var != *ass_var)
285 case GIMPLE_BINARY_RHS:
286 op1 = gimple_assign_rhs2 (stmt);
290 case GIMPLE_UNARY_RHS:
291 op0 = gimple_assign_rhs1 (stmt);
298 /* Accumulator optimizations will reverse the order of operations.
299 We can only do that for floating-point types if we're assuming
300 that addition and multiplication are associative. */
301 if (!flag_associative_math)
302 if (FLOAT_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
305 if (rhs_class == GIMPLE_UNARY_RHS)
307 else if (op0 == *ass_var
308 && (non_ass_var = independent_of_stmt_p (op1, stmt, call)))
310 else if (op1 == *ass_var
311 && (non_ass_var = independent_of_stmt_p (op0, stmt, call)))
329 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
330 *m = build_real (TREE_TYPE (op0), dconstm1);
331 else if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
332 *m = build_int_cst (TREE_TYPE (op0), -1);
341 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
344 if (FLOAT_TYPE_P (TREE_TYPE (non_ass_var)))
345 *m = build_real (TREE_TYPE (non_ass_var), dconstm1);
346 else if (INTEGRAL_TYPE_P (TREE_TYPE (non_ass_var)))
347 *m = build_int_cst (TREE_TYPE (non_ass_var), -1);
351 *a = fold_build1 (NEGATE_EXPR, TREE_TYPE (non_ass_var), non_ass_var);
357 /* TODO -- Handle POINTER_PLUS_EXPR. */
364 /* Propagate VAR through phis on edge E. */
367 propagate_through_phis (tree var, edge e)
369 basic_block dest = e->dest;
370 gimple_stmt_iterator gsi;
372 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
374 gimple phi = gsi_stmt (gsi);
375 if (PHI_ARG_DEF_FROM_EDGE (phi, e) == var)
376 return PHI_RESULT (phi);
381 /* Finds tailcalls falling into basic block BB. The list of found tailcalls is
382 added to the start of RET. */
385 find_tail_calls (basic_block bb, struct tailcall **ret)
387 tree ass_var = NULL_TREE, ret_var, func, param;
388 gimple stmt, call = NULL;
389 gimple_stmt_iterator gsi, agsi;
398 if (!single_succ_p (bb))
401 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
403 stmt = gsi_stmt (gsi);
405 /* Ignore labels, returns, clobbers and debug stmts. */
406 if (gimple_code (stmt) == GIMPLE_LABEL
407 || gimple_code (stmt) == GIMPLE_RETURN
408 || gimple_clobber_p (stmt)
409 || is_gimple_debug (stmt))
412 /* Check for a call. */
413 if (is_gimple_call (stmt))
416 ass_var = gimple_call_lhs (stmt);
420 /* If the statement references memory or volatile operands, fail. */
421 if (gimple_references_memory_p (stmt)
422 || gimple_has_volatile_ops (stmt))
429 /* Recurse to the predecessors. */
430 FOR_EACH_EDGE (e, ei, bb->preds)
431 find_tail_calls (e->src, ret);
436 /* If the LHS of our call is not just a simple register, we can't
437 transform this into a tail or sibling call. This situation happens,
438 in (e.g.) "*p = foo()" where foo returns a struct. In this case
439 we won't have a temporary here, but we need to carry out the side
440 effect anyway, so tailcall is impossible.
442 ??? In some situations (when the struct is returned in memory via
443 invisible argument) we could deal with this, e.g. by passing 'p'
444 itself as that argument to foo, but it's too early to do this here,
445 and expand_call() will not handle it anyway. If it ever can, then
446 we need to revisit this here, to allow that situation. */
447 if (ass_var && !is_gimple_reg (ass_var))
450 /* We found the call, check whether it is suitable. */
451 tail_recursion = false;
452 func = gimple_call_fndecl (call);
453 if (func == current_function_decl)
457 for (param = DECL_ARGUMENTS (func), idx = 0;
458 param && idx < gimple_call_num_args (call);
459 param = DECL_CHAIN (param), idx ++)
461 arg = gimple_call_arg (call, idx);
464 /* Make sure there are no problems with copying. The parameter
465 have a copyable type and the two arguments must have reasonably
466 equivalent types. The latter requirement could be relaxed if
467 we emitted a suitable type conversion statement. */
468 if (!is_gimple_reg_type (TREE_TYPE (param))
469 || !useless_type_conversion_p (TREE_TYPE (param),
473 /* The parameter should be a real operand, so that phi node
474 created for it at the start of the function has the meaning
475 of copying the value. This test implies is_gimple_reg_type
476 from the previous condition, however this one could be
477 relaxed by being more careful with copying the new value
478 of the parameter (emitting appropriate GIMPLE_ASSIGN and
479 updating the virtual operands). */
480 if (!is_gimple_reg (param))
484 if (idx == gimple_call_num_args (call) && !param)
485 tail_recursion = true;
488 /* Make sure the tail invocation of this function does not refer
489 to local variables. */
490 FOR_EACH_LOCAL_DECL (cfun, idx, var)
492 if (TREE_CODE (var) != PARM_DECL
493 && auto_var_in_fn_p (var, cfun->decl)
494 && (ref_maybe_used_by_stmt_p (call, var)
495 || call_may_clobber_ref_p (call, var)))
499 /* Now check the statements after the call. None of them has virtual
500 operands, so they may only depend on the call through its return
501 value. The return value should also be dependent on each of them,
502 since we are running after dce. */
510 tree tmp_a = NULL_TREE;
511 tree tmp_m = NULL_TREE;
514 while (gsi_end_p (agsi))
516 ass_var = propagate_through_phis (ass_var, single_succ_edge (abb));
517 abb = single_succ (abb);
518 agsi = gsi_start_bb (abb);
521 stmt = gsi_stmt (agsi);
523 if (gimple_code (stmt) == GIMPLE_LABEL)
526 if (gimple_code (stmt) == GIMPLE_RETURN)
529 if (gimple_clobber_p (stmt))
532 if (is_gimple_debug (stmt))
535 if (gimple_code (stmt) != GIMPLE_ASSIGN)
538 /* This is a gimple assign. */
539 if (! process_assignment (stmt, gsi, &tmp_m, &tmp_a, &ass_var))
544 tree type = TREE_TYPE (tmp_a);
546 a = fold_build2 (PLUS_EXPR, type, fold_convert (type, a), tmp_a);
552 tree type = TREE_TYPE (tmp_m);
554 m = fold_build2 (MULT_EXPR, type, fold_convert (type, m), tmp_m);
559 a = fold_build2 (MULT_EXPR, type, fold_convert (type, a), tmp_m);
563 /* See if this is a tail call we can handle. */
564 ret_var = gimple_return_retval (stmt);
566 /* We may proceed if there either is no return value, or the return value
567 is identical to the call's return. */
569 && (ret_var != ass_var))
572 /* If this is not a tail recursive call, we cannot handle addends or
574 if (!tail_recursion && (m || a))
577 /* For pointers don't allow additions or multiplications. */
579 && POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (current_function_decl))))
582 nw = XNEW (struct tailcall);
586 nw->tail_recursion = tail_recursion;
595 /* Helper to insert PHI_ARGH to the phi of VAR in the destination of edge E. */
598 add_successor_phi_arg (edge e, tree var, tree phi_arg)
600 gimple_stmt_iterator gsi;
602 for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
603 if (PHI_RESULT (gsi_stmt (gsi)) == var)
606 gcc_assert (!gsi_end_p (gsi));
607 add_phi_arg (gsi_stmt (gsi), phi_arg, e, UNKNOWN_LOCATION);
610 /* Creates a GIMPLE statement which computes the operation specified by
611 CODE, ACC and OP1 to a new variable with name LABEL and inserts the
612 statement in the position specified by GSI. Returns the
613 tree node of the statement's result. */
616 adjust_return_value_with_ops (enum tree_code code, const char *label,
617 tree acc, tree op1, gimple_stmt_iterator gsi)
620 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
621 tree result = make_temp_ssa_name (ret_type, NULL, label);
624 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
625 stmt = gimple_build_assign_with_ops (code, result, acc, op1);
628 tree rhs = fold_convert (TREE_TYPE (acc),
631 fold_convert (TREE_TYPE (op1), acc),
633 rhs = force_gimple_operand_gsi (&gsi, rhs,
634 false, NULL, true, GSI_SAME_STMT);
635 stmt = gimple_build_assign (result, rhs);
638 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
642 /* Creates a new GIMPLE statement that adjusts the value of accumulator ACC by
643 the computation specified by CODE and OP1 and insert the statement
644 at the position specified by GSI as a new statement. Returns new SSA name
645 of updated accumulator. */
648 update_accumulator_with_ops (enum tree_code code, tree acc, tree op1,
649 gimple_stmt_iterator gsi)
652 tree var = copy_ssa_name (acc, NULL);
653 if (types_compatible_p (TREE_TYPE (acc), TREE_TYPE (op1)))
654 stmt = gimple_build_assign_with_ops (code, var, acc, op1);
657 tree rhs = fold_convert (TREE_TYPE (acc),
660 fold_convert (TREE_TYPE (op1), acc),
662 rhs = force_gimple_operand_gsi (&gsi, rhs,
663 false, NULL, false, GSI_CONTINUE_LINKING);
664 stmt = gimple_build_assign (var, rhs);
666 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
670 /* Adjust the accumulator values according to A and M after GSI, and update
671 the phi nodes on edge BACK. */
674 adjust_accumulator_values (gimple_stmt_iterator gsi, tree m, tree a, edge back)
676 tree var, a_acc_arg, m_acc_arg;
679 m = force_gimple_operand_gsi (&gsi, m, true, NULL, true, GSI_SAME_STMT);
681 a = force_gimple_operand_gsi (&gsi, a, true, NULL, true, GSI_SAME_STMT);
689 if (integer_onep (a))
692 var = adjust_return_value_with_ops (MULT_EXPR, "acc_tmp", m_acc,
698 a_acc_arg = update_accumulator_with_ops (PLUS_EXPR, a_acc, var, gsi);
702 m_acc_arg = update_accumulator_with_ops (MULT_EXPR, m_acc, m, gsi);
705 add_successor_phi_arg (back, a_acc, a_acc_arg);
708 add_successor_phi_arg (back, m_acc, m_acc_arg);
711 /* Adjust value of the return at the end of BB according to M and A
715 adjust_return_value (basic_block bb, tree m, tree a)
718 gimple ret_stmt = gimple_seq_last_stmt (bb_seq (bb));
719 gimple_stmt_iterator gsi = gsi_last_bb (bb);
721 gcc_assert (gimple_code (ret_stmt) == GIMPLE_RETURN);
723 retval = gimple_return_retval (ret_stmt);
724 if (!retval || retval == error_mark_node)
728 retval = adjust_return_value_with_ops (MULT_EXPR, "mul_tmp", m_acc, retval,
731 retval = adjust_return_value_with_ops (PLUS_EXPR, "acc_tmp", a_acc, retval,
733 gimple_return_set_retval (ret_stmt, retval);
734 update_stmt (ret_stmt);
737 /* Subtract COUNT and FREQUENCY from the basic block and it's
740 decrease_profile (basic_block bb, gcov_type count, int frequency)
746 bb->frequency -= frequency;
747 if (bb->frequency < 0)
749 if (!single_succ_p (bb))
751 gcc_assert (!EDGE_COUNT (bb->succs));
754 e = single_succ_edge (bb);
760 /* Returns true if argument PARAM of the tail recursive call needs to be copied
761 when the call is eliminated. */
764 arg_needs_copy_p (tree param)
768 if (!is_gimple_reg (param))
771 /* Parameters that are only defined but never used need not be copied. */
772 def = ssa_default_def (cfun, param);
779 /* Eliminates tail call described by T. TMP_VARS is a list of
780 temporary variables used to copy the function arguments. */
783 eliminate_tail_call (struct tailcall *t)
789 basic_block bb, first;
792 gimple_stmt_iterator gsi;
795 stmt = orig_stmt = gsi_stmt (t->call_gsi);
796 bb = gsi_bb (t->call_gsi);
798 if (dump_file && (dump_flags & TDF_DETAILS))
800 fprintf (dump_file, "Eliminated tail recursion in bb %d : ",
802 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
803 fprintf (dump_file, "\n");
806 gcc_assert (is_gimple_call (stmt));
808 first = single_succ (ENTRY_BLOCK_PTR);
810 /* Remove the code after call_gsi that will become unreachable. The
811 possibly unreachable code in other blocks is removed later in
815 while (!gsi_end_p (gsi))
817 gimple t = gsi_stmt (gsi);
818 /* Do not remove the return statement, so that redirect_edge_and_branch
819 sees how the block ends. */
820 if (gimple_code (t) == GIMPLE_RETURN)
823 gsi_remove (&gsi, true);
827 /* Number of executions of function has reduced by the tailcall. */
828 e = single_succ_edge (gsi_bb (t->call_gsi));
829 decrease_profile (EXIT_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
830 decrease_profile (ENTRY_BLOCK_PTR, e->count, EDGE_FREQUENCY (e));
831 if (e->dest != EXIT_BLOCK_PTR)
832 decrease_profile (e->dest, e->count, EDGE_FREQUENCY (e));
834 /* Replace the call by a jump to the start of function. */
835 e = redirect_edge_and_branch (single_succ_edge (gsi_bb (t->call_gsi)),
838 PENDING_STMT (e) = NULL;
840 /* Add phi node entries for arguments. The ordering of the phi nodes should
841 be the same as the ordering of the arguments. */
842 for (param = DECL_ARGUMENTS (current_function_decl),
843 idx = 0, gsi = gsi_start_phis (first);
845 param = DECL_CHAIN (param), idx++)
847 if (!arg_needs_copy_p (param))
850 arg = gimple_call_arg (stmt, idx);
851 phi = gsi_stmt (gsi);
852 gcc_assert (param == SSA_NAME_VAR (PHI_RESULT (phi)));
854 add_phi_arg (phi, arg, e, gimple_location (stmt));
858 /* Update the values of accumulators. */
859 adjust_accumulator_values (t->call_gsi, t->mult, t->add, e);
861 call = gsi_stmt (t->call_gsi);
862 rslt = gimple_call_lhs (call);
863 if (rslt != NULL_TREE)
865 /* Result of the call will no longer be defined. So adjust the
866 SSA_NAME_DEF_STMT accordingly. */
867 SSA_NAME_DEF_STMT (rslt) = gimple_build_nop ();
870 gsi_remove (&t->call_gsi, true);
874 /* Optimizes the tailcall described by T. If OPT_TAILCALLS is true, also
875 mark the tailcalls for the sibcall optimization. */
878 optimize_tail_call (struct tailcall *t, bool opt_tailcalls)
880 if (t->tail_recursion)
882 eliminate_tail_call (t);
888 gimple stmt = gsi_stmt (t->call_gsi);
890 gimple_call_set_tail (stmt, true);
891 if (dump_file && (dump_flags & TDF_DETAILS))
893 fprintf (dump_file, "Found tail call ");
894 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
895 fprintf (dump_file, " in bb %i\n", (gsi_bb (t->call_gsi))->index);
902 /* Creates a tail-call accumulator of the same type as the return type of the
903 current function. LABEL is the name used to creating the temporary
904 variable for the accumulator. The accumulator will be inserted in the
905 phis of a basic block BB with single predecessor with an initial value
906 INIT converted to the current function return type. */
909 create_tailcall_accumulator (const char *label, basic_block bb, tree init)
911 tree ret_type = TREE_TYPE (DECL_RESULT (current_function_decl));
912 tree tmp = make_temp_ssa_name (ret_type, NULL, label);
915 phi = create_phi_node (tmp, bb);
916 /* RET_TYPE can be a float when -ffast-maths is enabled. */
917 add_phi_arg (phi, fold_convert (ret_type, init), single_pred_edge (bb),
919 return PHI_RESULT (phi);
922 /* Optimizes tail calls in the function, turning the tail recursion
926 tree_optimize_tail_calls_1 (bool opt_tailcalls)
929 bool phis_constructed = false;
930 struct tailcall *tailcalls = NULL, *act, *next;
931 bool changed = false;
932 basic_block first = single_succ (ENTRY_BLOCK_PTR);
937 if (!suitable_for_tail_opt_p ())
940 opt_tailcalls = suitable_for_tail_call_opt_p ();
942 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
944 /* Only traverse the normal exits, i.e. those that end with return
946 stmt = last_stmt (e->src);
949 && gimple_code (stmt) == GIMPLE_RETURN)
950 find_tail_calls (e->src, &tailcalls);
953 /* Construct the phi nodes and accumulators if necessary. */
954 a_acc = m_acc = NULL_TREE;
955 for (act = tailcalls; act; act = act->next)
957 if (!act->tail_recursion)
960 if (!phis_constructed)
962 /* Ensure that there is only one predecessor of the block
963 or if there are existing degenerate PHI nodes. */
964 if (!single_pred_p (first)
965 || !gimple_seq_empty_p (phi_nodes (first)))
966 first = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
968 /* Copy the args if needed. */
969 for (param = DECL_ARGUMENTS (current_function_decl);
971 param = DECL_CHAIN (param))
972 if (arg_needs_copy_p (param))
974 tree name = ssa_default_def (cfun, param);
975 tree new_name = make_ssa_name (param, SSA_NAME_DEF_STMT (name));
978 set_ssa_default_def (cfun, param, new_name);
979 phi = create_phi_node (name, first);
980 add_phi_arg (phi, new_name, single_pred_edge (first),
981 EXPR_LOCATION (param));
983 phis_constructed = true;
986 if (act->add && !a_acc)
987 a_acc = create_tailcall_accumulator ("add_acc", first,
990 if (act->mult && !m_acc)
991 m_acc = create_tailcall_accumulator ("mult_acc", first,
997 /* When the tail call elimination using accumulators is performed,
998 statements adding the accumulated value are inserted at all exits.
999 This turns all other tail calls to non-tail ones. */
1000 opt_tailcalls = false;
1003 for (; tailcalls; tailcalls = next)
1005 next = tailcalls->next;
1006 changed |= optimize_tail_call (tailcalls, opt_tailcalls);
1012 /* Modify the remaining return statements. */
1013 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
1015 stmt = last_stmt (e->src);
1018 && gimple_code (stmt) == GIMPLE_RETURN)
1019 adjust_return_value (e->src, m_acc, a_acc);
1025 /* We may have created new loops. Make them magically appear. */
1027 loops_state_set (LOOPS_NEED_FIXUP);
1028 free_dominance_info (CDI_DOMINATORS);
1031 /* Add phi nodes for the virtual operands defined in the function to the
1032 header of the loop created by tail recursion elimination. Do so
1033 by triggering the SSA renamer. */
1034 if (phis_constructed)
1035 mark_virtual_operands_for_renaming (cfun);
1038 return TODO_cleanup_cfg | TODO_update_ssa_only_virtuals;
1043 execute_tail_recursion (void)
1045 return tree_optimize_tail_calls_1 (false);
1049 gate_tail_calls (void)
1051 return flag_optimize_sibling_calls != 0 && dbg_cnt (tail_call);
1055 execute_tail_calls (void)
1057 return tree_optimize_tail_calls_1 (true);
1060 struct gimple_opt_pass pass_tail_recursion =
1065 OPTGROUP_NONE, /* optinfo_flags */
1066 gate_tail_calls, /* gate */
1067 execute_tail_recursion, /* execute */
1070 0, /* static_pass_number */
1071 TV_NONE, /* tv_id */
1072 PROP_cfg | PROP_ssa, /* properties_required */
1073 0, /* properties_provided */
1074 0, /* properties_destroyed */
1075 0, /* todo_flags_start */
1076 TODO_verify_ssa /* todo_flags_finish */
1080 struct gimple_opt_pass pass_tail_calls =
1085 OPTGROUP_NONE, /* optinfo_flags */
1086 gate_tail_calls, /* gate */
1087 execute_tail_calls, /* execute */
1090 0, /* static_pass_number */
1091 TV_NONE, /* tv_id */
1092 PROP_cfg | PROP_ssa, /* properties_required */
1093 0, /* properties_provided */
1094 0, /* properties_destroyed */
1095 0, /* todo_flags_start */
1096 TODO_verify_ssa /* todo_flags_finish */