1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "tree-iterator.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
33 #include "diagnostic-core.h"
34 #include "tree-pass.h"
35 #include "langhooks.h"
37 /* The differences between High GIMPLE and Low GIMPLE are the
40 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
42 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
43 flow and exception regions are built as an on-the-side region
44 hierarchy (See tree-eh.c:lower_eh_constructs).
46 3- Multiple identical return statements are grouped into a single
47 return and gotos to the unique return site. */
49 /* Match a return statement with a label. During lowering, we identify
50 identical return statements and replace duplicates with a jump to
51 the corresponding label. */
52 struct return_statements_t
57 typedef struct return_statements_t return_statements_t;
59 DEF_VEC_O(return_statements_t);
60 DEF_VEC_ALLOC_O(return_statements_t,heap);
64 /* Block the current statement belongs to. */
67 /* A vector of label and return statements to be moved to the end
69 VEC(return_statements_t,heap) *return_statements;
71 /* True if the current statement cannot fall through. */
74 /* True if the function calls __builtin_setjmp. */
75 bool calls_builtin_setjmp;
78 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
79 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
80 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
81 static void lower_builtin_setjmp (gimple_stmt_iterator *);
84 /* Lower the body of current_function_decl from High GIMPLE into Low
88 lower_function_body (void)
90 struct lower_data data;
91 gimple_seq body = gimple_body (current_function_decl);
92 gimple_seq lowered_body;
93 gimple_stmt_iterator i;
98 /* The gimplifier should've left a body of exactly one statement,
99 namely a GIMPLE_BIND. */
100 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
101 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
103 memset (&data, 0, sizeof (data));
104 data.block = DECL_INITIAL (current_function_decl);
105 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
106 BLOCK_CHAIN (data.block) = NULL_TREE;
107 TREE_ASM_WRITTEN (data.block) = 1;
108 data.return_statements = VEC_alloc (return_statements_t, heap, 8);
110 bind = gimple_seq_first_stmt (body);
112 gimple_seq_add_stmt (&lowered_body, bind);
113 i = gsi_start (lowered_body);
114 lower_gimple_bind (&i, &data);
116 /* Once the old body has been lowered, replace it with the new
118 gimple_set_body (current_function_decl, lowered_body);
120 i = gsi_last (lowered_body);
122 /* If the function falls off the end, we need a null return statement.
123 If we've already got one in the return_statements vector, we don't
124 need to do anything special. Otherwise build one by hand. */
125 if (gimple_seq_may_fallthru (lowered_body)
126 && (VEC_empty (return_statements_t, data.return_statements)
127 || gimple_return_retval (VEC_last (return_statements_t,
128 data.return_statements)->stmt) != NULL))
130 x = gimple_build_return (NULL);
131 gimple_set_location (x, cfun->function_end_locus);
132 gimple_set_block (x, DECL_INITIAL (current_function_decl));
133 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
136 /* If we lowered any return statements, emit the representative
137 at the end of the function. */
138 while (!VEC_empty (return_statements_t, data.return_statements))
140 return_statements_t t;
142 /* Unfortunately, we can't use VEC_pop because it returns void for
144 t = *VEC_last (return_statements_t, data.return_statements);
145 VEC_truncate (return_statements_t,
146 data.return_statements,
147 VEC_length (return_statements_t,
148 data.return_statements) - 1);
150 x = gimple_build_label (t.label);
151 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
152 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
155 /* If the function calls __builtin_setjmp, we need to emit the computed
156 goto that will serve as the unique dispatcher for all the receivers. */
157 if (data.calls_builtin_setjmp)
159 tree disp_label, disp_var, arg;
161 /* Build 'DISP_LABEL:' and insert. */
162 disp_label = create_artificial_label (cfun->function_end_locus);
163 /* This mark will create forward edges from every call site. */
164 DECL_NONLOCAL (disp_label) = 1;
165 cfun->has_nonlocal_label = 1;
166 x = gimple_build_label (disp_label);
167 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
169 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
171 disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
172 arg = build_addr (disp_label, current_function_decl);
173 t = builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER);
174 x = gimple_build_call (t, 1, arg);
175 gimple_call_set_lhs (x, disp_var);
177 /* Build 'goto DISP_VAR;' and insert. */
178 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
179 x = gimple_build_goto (disp_var);
180 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
183 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
184 BLOCK_SUBBLOCKS (data.block)
185 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
187 clear_block_marks (data.block);
188 VEC_free(return_statements_t, heap, data.return_statements);
192 struct gimple_opt_pass pass_lower_cf =
198 lower_function_body, /* execute */
201 0, /* static_pass_number */
203 PROP_gimple_any, /* properties_required */
204 PROP_gimple_lcf, /* properties_provided */
205 0, /* properties_destroyed */
206 0, /* todo_flags_start */
207 0 /* todo_flags_finish */
213 /* Verify if the type of the argument matches that of the function
214 declaration. If we cannot verify this or there is a mismatch,
218 gimple_check_call_args (gimple stmt, tree fndecl)
221 unsigned int i, nargs;
223 /* Calls to internal functions always match their signature. */
224 if (gimple_call_internal_p (stmt))
227 nargs = gimple_call_num_args (stmt);
229 /* Get argument types for verification. */
231 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
233 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
235 /* Verify if the type of the argument matches that of the function
236 declaration. If we cannot verify this or there is a mismatch,
238 if (fndecl && DECL_ARGUMENTS (fndecl))
240 for (i = 0, p = DECL_ARGUMENTS (fndecl);
242 i++, p = DECL_CHAIN (p))
245 /* We cannot distinguish a varargs function from the case
246 of excess parameters, still deferring the inlining decision
247 to the callee is possible. */
250 arg = gimple_call_arg (stmt, i);
251 if (p == error_mark_node
252 || arg == error_mark_node
253 || (!types_compatible_p (DECL_ARG_TYPE (p), TREE_TYPE (arg))
254 && !fold_convertible_p (DECL_ARG_TYPE (p), arg)))
260 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
263 /* If this is a varargs function defer inlining decision
267 arg = gimple_call_arg (stmt, i);
268 if (TREE_VALUE (p) == error_mark_node
269 || arg == error_mark_node
270 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
271 || (!types_compatible_p (TREE_VALUE (p), TREE_TYPE (arg))
272 && !fold_convertible_p (TREE_VALUE (p), arg)))
284 /* Verify if the type of the argument and lhs of CALL_STMT matches
285 that of the function declaration CALLEE.
286 If we cannot verify this or there is a mismatch, return false. */
289 gimple_check_call_matching_types (gimple call_stmt, tree callee)
293 if ((DECL_RESULT (callee)
294 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
295 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
296 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
298 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
299 || !gimple_check_call_args (call_stmt, callee))
304 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
305 when they are changed -- if this has to be done, the lowering routine must
306 do it explicitly. DATA is passed through the recursion. */
309 lower_sequence (gimple_seq seq, struct lower_data *data)
311 gimple_stmt_iterator gsi;
313 for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
314 lower_stmt (&gsi, data);
318 /* Lower the OpenMP directive statement pointed by GSI. DATA is
319 passed through the recursion. */
322 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
326 stmt = gsi_stmt (*gsi);
328 lower_sequence (gimple_omp_body (stmt), data);
329 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
330 gsi_insert_seq_before (gsi, gimple_omp_body (stmt), GSI_SAME_STMT);
331 gimple_omp_set_body (stmt, NULL);
332 gsi_remove (gsi, false);
336 /* Lower statement GSI. DATA is passed through the recursion. We try to
337 track the fallthruness of statements and get rid of unreachable return
338 statements in order to prevent the EH lowering pass from adding useless
339 edges that can cause bogus warnings to be issued later; this guess need
340 not be 100% accurate, simply be conservative and reset cannot_fallthru
341 to false if we don't know. */
344 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
346 gimple stmt = gsi_stmt (*gsi);
348 gimple_set_block (stmt, data->block);
350 switch (gimple_code (stmt))
353 lower_gimple_bind (gsi, data);
354 /* Propagate fallthruness. */
360 data->cannot_fallthru = true;
365 if (data->cannot_fallthru)
367 gsi_remove (gsi, false);
368 /* Propagate fallthruness. */
372 lower_gimple_return (gsi, data);
373 data->cannot_fallthru = true;
379 bool try_cannot_fallthru;
380 lower_sequence (gimple_try_eval (stmt), data);
381 try_cannot_fallthru = data->cannot_fallthru;
382 data->cannot_fallthru = false;
383 lower_sequence (gimple_try_cleanup (stmt), data);
384 /* See gimple_stmt_may_fallthru for the rationale. */
385 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
387 data->cannot_fallthru |= try_cannot_fallthru;
395 data->cannot_fallthru = false;
396 lower_sequence (gimple_catch_handler (stmt), data);
399 case GIMPLE_EH_FILTER:
400 data->cannot_fallthru = false;
401 lower_sequence (gimple_eh_filter_failure (stmt), data);
405 lower_sequence (gimple_eh_else_n_body (stmt), data);
406 lower_sequence (gimple_eh_else_e_body (stmt), data);
414 case GIMPLE_EH_MUST_NOT_THROW:
416 case GIMPLE_OMP_SECTIONS:
417 case GIMPLE_OMP_SECTIONS_SWITCH:
418 case GIMPLE_OMP_SECTION:
419 case GIMPLE_OMP_SINGLE:
420 case GIMPLE_OMP_MASTER:
421 case GIMPLE_OMP_ORDERED:
422 case GIMPLE_OMP_CRITICAL:
423 case GIMPLE_OMP_RETURN:
424 case GIMPLE_OMP_ATOMIC_LOAD:
425 case GIMPLE_OMP_ATOMIC_STORE:
426 case GIMPLE_OMP_CONTINUE:
431 tree decl = gimple_call_fndecl (stmt);
434 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
435 && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
437 lower_builtin_setjmp (gsi);
438 data->cannot_fallthru = false;
439 data->calls_builtin_setjmp = true;
443 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
445 data->cannot_fallthru = true;
452 case GIMPLE_OMP_PARALLEL:
453 case GIMPLE_OMP_TASK:
454 data->cannot_fallthru = false;
455 lower_omp_directive (gsi, data);
456 data->cannot_fallthru = false;
459 case GIMPLE_TRANSACTION:
460 lower_sequence (gimple_transaction_body (stmt), data);
467 data->cannot_fallthru = false;
471 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
474 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
476 tree old_block = data->block;
477 gimple stmt = gsi_stmt (*gsi);
478 tree new_block = gimple_bind_block (stmt);
482 if (new_block == old_block)
484 /* The outermost block of the original function may not be the
485 outermost statement chain of the gimplified function. So we
486 may see the outermost block just inside the function. */
487 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
492 /* We do not expect to handle duplicate blocks. */
493 gcc_assert (!TREE_ASM_WRITTEN (new_block));
494 TREE_ASM_WRITTEN (new_block) = 1;
496 /* Block tree may get clobbered by inlining. Normally this would
497 be fixed in rest_of_decl_compilation using block notes, but
498 since we are not going to emit them, it is up to us. */
499 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
500 BLOCK_SUBBLOCKS (old_block) = new_block;
501 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
502 BLOCK_SUPERCONTEXT (new_block) = old_block;
504 data->block = new_block;
508 record_vars (gimple_bind_vars (stmt));
509 lower_sequence (gimple_bind_body (stmt), data);
513 gcc_assert (data->block == new_block);
515 BLOCK_SUBBLOCKS (new_block)
516 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
517 data->block = old_block;
520 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
521 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
522 gsi_remove (gsi, false);
525 /* Try to determine whether a TRY_CATCH expression can fall through.
526 This is a subroutine of block_may_fallthru. */
529 try_catch_may_fallthru (const_tree stmt)
531 tree_stmt_iterator i;
533 /* If the TRY block can fall through, the whole TRY_CATCH can
535 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
538 i = tsi_start (TREE_OPERAND (stmt, 1));
539 switch (TREE_CODE (tsi_stmt (i)))
542 /* We expect to see a sequence of CATCH_EXPR trees, each with a
543 catch expression and a body. The whole TRY_CATCH may fall
544 through iff any of the catch bodies falls through. */
545 for (; !tsi_end_p (i); tsi_next (&i))
547 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
553 /* The exception filter expression only matters if there is an
554 exception. If the exception does not match EH_FILTER_TYPES,
555 we will execute EH_FILTER_FAILURE, and we will fall through
556 if that falls through. If the exception does match
557 EH_FILTER_TYPES, the stack unwinder will continue up the
558 stack, so we will not fall through. We don't know whether we
559 will throw an exception which matches EH_FILTER_TYPES or not,
560 so we just ignore EH_FILTER_TYPES and assume that we might
561 throw an exception which doesn't match. */
562 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
565 /* This case represents statements to be executed when an
566 exception occurs. Those statements are implicitly followed
567 by a RESX statement to resume execution after the exception.
568 So in this case the TRY_CATCH never falls through. */
574 /* Same as above, but for a GIMPLE_TRY_CATCH. */
577 gimple_try_catch_may_fallthru (gimple stmt)
579 gimple_stmt_iterator i;
581 /* We don't handle GIMPLE_TRY_FINALLY. */
582 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
584 /* If the TRY block can fall through, the whole TRY_CATCH can
586 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
589 i = gsi_start (gimple_try_cleanup (stmt));
590 switch (gimple_code (gsi_stmt (i)))
593 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
594 catch expression and a body. The whole try/catch may fall
595 through iff any of the catch bodies falls through. */
596 for (; !gsi_end_p (i); gsi_next (&i))
598 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
603 case GIMPLE_EH_FILTER:
604 /* The exception filter expression only matters if there is an
605 exception. If the exception does not match EH_FILTER_TYPES,
606 we will execute EH_FILTER_FAILURE, and we will fall through
607 if that falls through. If the exception does match
608 EH_FILTER_TYPES, the stack unwinder will continue up the
609 stack, so we will not fall through. We don't know whether we
610 will throw an exception which matches EH_FILTER_TYPES or not,
611 so we just ignore EH_FILTER_TYPES and assume that we might
612 throw an exception which doesn't match. */
613 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
616 /* This case represents statements to be executed when an
617 exception occurs. Those statements are implicitly followed
618 by a GIMPLE_RESX to resume execution after the exception. So
619 in this case the try/catch never falls through. */
625 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
626 need not be 100% accurate; simply be conservative and return true if we
627 don't know. This is used only to avoid stupidly generating extra code.
628 If we're wrong, we'll just delete the extra code later. */
631 block_may_fallthru (const_tree block)
633 /* This CONST_CAST is okay because expr_last returns its argument
634 unmodified and we assign it to a const_tree. */
635 const_tree stmt = expr_last (CONST_CAST_TREE(block));
637 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
641 /* Easy cases. If the last statement of the block implies
642 control transfer, then we can't fall through. */
646 /* If SWITCH_LABELS is set, this is lowered, and represents a
647 branch to a selected label and hence can not fall through.
648 Otherwise SWITCH_BODY is set, and the switch can fall
650 return SWITCH_LABELS (stmt) == NULL_TREE;
653 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
655 return block_may_fallthru (COND_EXPR_ELSE (stmt));
658 return block_may_fallthru (BIND_EXPR_BODY (stmt));
661 return try_catch_may_fallthru (stmt);
663 case TRY_FINALLY_EXPR:
664 /* The finally clause is always executed after the try clause,
665 so if it does not fall through, then the try-finally will not
666 fall through. Otherwise, if the try clause does not fall
667 through, then when the finally clause falls through it will
668 resume execution wherever the try clause was going. So the
669 whole try-finally will only fall through if both the try
670 clause and the finally clause fall through. */
671 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
672 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
675 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
676 stmt = TREE_OPERAND (stmt, 1);
682 /* Functions that do not return do not fall through. */
683 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
685 case CLEANUP_POINT_EXPR:
686 return block_may_fallthru (TREE_OPERAND (stmt, 0));
689 return block_may_fallthru (TREE_OPERAND (stmt, 1));
695 return lang_hooks.block_may_fallthru (stmt);
700 /* Try to determine if we can continue executing the statement
701 immediately following STMT. This guess need not be 100% accurate;
702 simply be conservative and return true if we don't know. This is
703 used only to avoid stupidly generating extra code. If we're wrong,
704 we'll just delete the extra code later. */
707 gimple_stmt_may_fallthru (gimple stmt)
712 switch (gimple_code (stmt))
717 /* Easy cases. If the last statement of the seq implies
718 control transfer, then we can't fall through. */
722 /* Switch has already been lowered and represents a branch
723 to a selected label and hence can't fall through. */
727 /* GIMPLE_COND's are already lowered into a two-way branch. They
728 can't fall through. */
732 return gimple_seq_may_fallthru (gimple_bind_body (stmt));
735 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
736 return gimple_try_catch_may_fallthru (stmt);
738 /* It must be a GIMPLE_TRY_FINALLY. */
740 /* The finally clause is always executed after the try clause,
741 so if it does not fall through, then the try-finally will not
742 fall through. Otherwise, if the try clause does not fall
743 through, then when the finally clause falls through it will
744 resume execution wherever the try clause was going. So the
745 whole try-finally will only fall through if both the try
746 clause and the finally clause fall through. */
747 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
748 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
751 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
752 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
755 /* Functions that do not return do not fall through. */
756 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
764 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
767 gimple_seq_may_fallthru (gimple_seq seq)
769 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
773 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
776 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
778 gimple stmt = gsi_stmt (*gsi);
781 return_statements_t tmp_rs;
783 /* Match this up with an existing return statement that's been created. */
784 for (i = VEC_length (return_statements_t, data->return_statements) - 1;
787 tmp_rs = *VEC_index (return_statements_t, data->return_statements, i);
789 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
791 /* Remove the line number from the representative return statement.
792 It now fills in for many such returns. Failure to remove this
793 will result in incorrect results for coverage analysis. */
794 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
800 /* Not found. Create a new label and record the return statement. */
801 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
803 VEC_safe_push (return_statements_t, heap, data->return_statements, &tmp_rs);
805 /* Generate a goto statement and remove the return statement. */
807 /* When not optimizing, make sure user returns are preserved. */
808 if (!optimize && gimple_has_location (stmt))
809 DECL_ARTIFICIAL (tmp_rs.label) = 0;
810 t = gimple_build_goto (tmp_rs.label);
811 gimple_set_location (t, gimple_location (stmt));
812 gimple_set_block (t, gimple_block (stmt));
813 gsi_insert_before (gsi, t, GSI_SAME_STMT);
814 gsi_remove (gsi, false);
817 /* Lower a __builtin_setjmp GSI.
819 __builtin_setjmp is passed a pointer to an array of five words (not
820 all will be used on all machines). It operates similarly to the C
821 library function of the same name, but is more efficient.
823 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
824 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
825 __builtin_setjmp_dispatcher shared among all the instances; that's
826 why it is only emitted at the end by lower_function_body.
828 After full lowering, the body of the function should look like:
837 __builtin_setjmp_setup (&buf, &<D1847>);
841 __builtin_setjmp_receiver (&<D1847>);
844 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
848 __builtin_setjmp_setup (&buf, &<D2847>);
852 __builtin_setjmp_receiver (&<D2847>);
855 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
861 <D3853>: [non-local];
862 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
866 The dispatcher block will be both the unique destination of all the
867 abnormal call edges and the unique source of all the abnormal edges
868 to the receivers, thus keeping the complexity explosion localized. */
871 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
873 gimple stmt = gsi_stmt (*gsi);
874 location_t loc = gimple_location (stmt);
875 tree cont_label = create_artificial_label (loc);
876 tree next_label = create_artificial_label (loc);
880 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
881 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
882 FORCED_LABEL (next_label) = 1;
884 dest = gimple_call_lhs (stmt);
886 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
887 arg = build_addr (next_label, current_function_decl);
888 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
889 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
890 gimple_set_location (g, loc);
891 gimple_set_block (g, gimple_block (stmt));
892 gsi_insert_before (gsi, g, GSI_SAME_STMT);
894 /* Build 'DEST = 0' and insert. */
897 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
898 gimple_set_location (g, loc);
899 gimple_set_block (g, gimple_block (stmt));
900 gsi_insert_before (gsi, g, GSI_SAME_STMT);
903 /* Build 'goto CONT_LABEL' and insert. */
904 g = gimple_build_goto (cont_label);
905 gsi_insert_before (gsi, g, GSI_SAME_STMT);
907 /* Build 'NEXT_LABEL:' and insert. */
908 g = gimple_build_label (next_label);
909 gsi_insert_before (gsi, g, GSI_SAME_STMT);
911 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
912 arg = build_addr (next_label, current_function_decl);
913 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
914 g = gimple_build_call (t, 1, arg);
915 gimple_set_location (g, loc);
916 gimple_set_block (g, gimple_block (stmt));
917 gsi_insert_before (gsi, g, GSI_SAME_STMT);
919 /* Build 'DEST = 1' and insert. */
922 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
924 gimple_set_location (g, loc);
925 gimple_set_block (g, gimple_block (stmt));
926 gsi_insert_before (gsi, g, GSI_SAME_STMT);
929 /* Build 'CONT_LABEL:' and insert. */
930 g = gimple_build_label (cont_label);
931 gsi_insert_before (gsi, g, GSI_SAME_STMT);
933 /* Remove the call to __builtin_setjmp. */
934 gsi_remove (gsi, false);
938 /* Record the variables in VARS into function FN. */
941 record_vars_into (tree vars, tree fn)
943 if (fn != current_function_decl)
944 push_cfun (DECL_STRUCT_FUNCTION (fn));
946 for (; vars; vars = DECL_CHAIN (vars))
950 /* BIND_EXPRs contains also function/type/constant declarations
951 we don't need to care about. */
952 if (TREE_CODE (var) != VAR_DECL)
955 /* Nothing to do in this case. */
956 if (DECL_EXTERNAL (var))
959 /* Record the variable. */
960 add_local_decl (cfun, var);
961 if (gimple_referenced_vars (cfun))
962 add_referenced_var (var);
965 if (fn != current_function_decl)
970 /* Record the variables in VARS into current_function_decl. */
973 record_vars (tree vars)
975 record_vars_into (vars, current_function_decl);