1 /* GIMPLE lowering pass. Converts High GIMPLE into Low GIMPLE.
3 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "tree-iterator.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
33 #include "diagnostic-core.h"
34 #include "tree-pass.h"
36 /* The differences between High GIMPLE and Low GIMPLE are the
39 1- Lexical scopes are removed (i.e., GIMPLE_BIND disappears).
41 2- GIMPLE_TRY and GIMPLE_CATCH are converted to abnormal control
42 flow and exception regions are built as an on-the-side region
43 hierarchy (See tree-eh.c:lower_eh_constructs).
45 3- Multiple identical return statements are grouped into a single
46 return and gotos to the unique return site. */
48 /* Match a return statement with a label. During lowering, we identify
49 identical return statements and replace duplicates with a jump to
50 the corresponding label. */
51 struct return_statements_t
56 typedef struct return_statements_t return_statements_t;
58 DEF_VEC_O(return_statements_t);
59 DEF_VEC_ALLOC_O(return_statements_t,heap);
63 /* Block the current statement belongs to. */
66 /* A vector of label and return statements to be moved to the end
68 VEC(return_statements_t,heap) *return_statements;
70 /* True if the current statement cannot fall through. */
73 /* True if the function calls __builtin_setjmp. */
74 bool calls_builtin_setjmp;
77 static void lower_stmt (gimple_stmt_iterator *, struct lower_data *);
78 static void lower_gimple_bind (gimple_stmt_iterator *, struct lower_data *);
79 static void lower_gimple_return (gimple_stmt_iterator *, struct lower_data *);
80 static void lower_builtin_setjmp (gimple_stmt_iterator *);
83 /* Lower the body of current_function_decl from High GIMPLE into Low
87 lower_function_body (void)
89 struct lower_data data;
90 gimple_seq body = gimple_body (current_function_decl);
91 gimple_seq lowered_body;
92 gimple_stmt_iterator i;
97 /* The gimplifier should've left a body of exactly one statement,
98 namely a GIMPLE_BIND. */
99 gcc_assert (gimple_seq_first (body) == gimple_seq_last (body)
100 && gimple_code (gimple_seq_first_stmt (body)) == GIMPLE_BIND);
102 memset (&data, 0, sizeof (data));
103 data.block = DECL_INITIAL (current_function_decl);
104 BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
105 BLOCK_CHAIN (data.block) = NULL_TREE;
106 TREE_ASM_WRITTEN (data.block) = 1;
107 data.return_statements = VEC_alloc (return_statements_t, heap, 8);
109 bind = gimple_seq_first_stmt (body);
111 gimple_seq_add_stmt (&lowered_body, bind);
112 i = gsi_start (lowered_body);
113 lower_gimple_bind (&i, &data);
115 /* Once the old body has been lowered, replace it with the new
117 gimple_set_body (current_function_decl, lowered_body);
119 i = gsi_last (lowered_body);
121 /* If the function falls off the end, we need a null return statement.
122 If we've already got one in the return_statements vector, we don't
123 need to do anything special. Otherwise build one by hand. */
124 if (gimple_seq_may_fallthru (lowered_body)
125 && (VEC_empty (return_statements_t, data.return_statements)
126 || gimple_return_retval (VEC_last (return_statements_t,
127 data.return_statements)->stmt) != NULL))
129 x = gimple_build_return (NULL);
130 gimple_set_location (x, cfun->function_end_locus);
131 gimple_set_block (x, DECL_INITIAL (current_function_decl));
132 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
135 /* If we lowered any return statements, emit the representative
136 at the end of the function. */
137 while (!VEC_empty (return_statements_t, data.return_statements))
139 return_statements_t t;
141 /* Unfortunately, we can't use VEC_pop because it returns void for
143 t = *VEC_last (return_statements_t, data.return_statements);
144 VEC_truncate (return_statements_t,
145 data.return_statements,
146 VEC_length (return_statements_t,
147 data.return_statements) - 1);
149 x = gimple_build_label (t.label);
150 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
151 gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
154 /* If the function calls __builtin_setjmp, we need to emit the computed
155 goto that will serve as the unique dispatcher for all the receivers. */
156 if (data.calls_builtin_setjmp)
158 tree disp_label, disp_var, arg;
160 /* Build 'DISP_LABEL:' and insert. */
161 disp_label = create_artificial_label (cfun->function_end_locus);
162 /* This mark will create forward edges from every call site. */
163 DECL_NONLOCAL (disp_label) = 1;
164 cfun->has_nonlocal_label = 1;
165 x = gimple_build_label (disp_label);
166 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
168 /* Build 'DISP_VAR = __builtin_setjmp_dispatcher (DISP_LABEL);'
170 disp_var = create_tmp_var (ptr_type_node, "setjmpvar");
171 arg = build_addr (disp_label, current_function_decl);
172 t = builtin_decl_implicit (BUILT_IN_SETJMP_DISPATCHER);
173 x = gimple_build_call (t, 1, arg);
174 gimple_call_set_lhs (x, disp_var);
176 /* Build 'goto DISP_VAR;' and insert. */
177 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
178 x = gimple_build_goto (disp_var);
179 gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
182 gcc_assert (data.block == DECL_INITIAL (current_function_decl));
183 BLOCK_SUBBLOCKS (data.block)
184 = blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
186 clear_block_marks (data.block);
187 VEC_free(return_statements_t, heap, data.return_statements);
191 struct gimple_opt_pass pass_lower_cf =
197 lower_function_body, /* execute */
200 0, /* static_pass_number */
202 PROP_gimple_any, /* properties_required */
203 PROP_gimple_lcf, /* properties_provided */
204 0, /* properties_destroyed */
205 0, /* todo_flags_start */
206 0 /* todo_flags_finish */
212 /* Verify if the type of the argument matches that of the function
213 declaration. If we cannot verify this or there is a mismatch,
217 gimple_check_call_args (gimple stmt, tree fndecl)
220 unsigned int i, nargs;
222 /* Calls to internal functions always match their signature. */
223 if (gimple_call_internal_p (stmt))
226 nargs = gimple_call_num_args (stmt);
228 /* Get argument types for verification. */
230 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
232 parms = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
234 /* Verify if the type of the argument matches that of the function
235 declaration. If we cannot verify this or there is a mismatch,
237 if (fndecl && DECL_ARGUMENTS (fndecl))
239 for (i = 0, p = DECL_ARGUMENTS (fndecl);
241 i++, p = DECL_CHAIN (p))
243 /* We cannot distinguish a varargs function from the case
244 of excess parameters, still deferring the inlining decision
245 to the callee is possible. */
248 if (p == error_mark_node
249 || gimple_call_arg (stmt, i) == error_mark_node
250 || !fold_convertible_p (DECL_ARG_TYPE (p),
251 gimple_call_arg (stmt, i)))
257 for (i = 0, p = parms; i < nargs; i++, p = TREE_CHAIN (p))
259 /* If this is a varargs function defer inlining decision
263 if (TREE_VALUE (p) == error_mark_node
264 || gimple_call_arg (stmt, i) == error_mark_node
265 || TREE_CODE (TREE_VALUE (p)) == VOID_TYPE
266 || !fold_convertible_p (TREE_VALUE (p),
267 gimple_call_arg (stmt, i)))
279 /* Verify if the type of the argument and lhs of CALL_STMT matches
280 that of the function declaration CALLEE.
281 If we cannot verify this or there is a mismatch, return false. */
284 gimple_check_call_matching_types (gimple call_stmt, tree callee)
288 if ((DECL_RESULT (callee)
289 && !DECL_BY_REFERENCE (DECL_RESULT (callee))
290 && (lhs = gimple_call_lhs (call_stmt)) != NULL_TREE
291 && !useless_type_conversion_p (TREE_TYPE (DECL_RESULT (callee)),
293 && !fold_convertible_p (TREE_TYPE (DECL_RESULT (callee)), lhs))
294 || !gimple_check_call_args (call_stmt, callee))
299 /* Lower sequence SEQ. Unlike gimplification the statements are not relowered
300 when they are changed -- if this has to be done, the lowering routine must
301 do it explicitly. DATA is passed through the recursion. */
304 lower_sequence (gimple_seq seq, struct lower_data *data)
306 gimple_stmt_iterator gsi;
308 for (gsi = gsi_start (seq); !gsi_end_p (gsi); )
309 lower_stmt (&gsi, data);
313 /* Lower the OpenMP directive statement pointed by GSI. DATA is
314 passed through the recursion. */
317 lower_omp_directive (gimple_stmt_iterator *gsi, struct lower_data *data)
321 stmt = gsi_stmt (*gsi);
323 lower_sequence (gimple_omp_body (stmt), data);
324 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
325 gsi_insert_seq_before (gsi, gimple_omp_body (stmt), GSI_SAME_STMT);
326 gimple_omp_set_body (stmt, NULL);
327 gsi_remove (gsi, false);
331 /* Lower statement GSI. DATA is passed through the recursion. We try to
332 track the fallthruness of statements and get rid of unreachable return
333 statements in order to prevent the EH lowering pass from adding useless
334 edges that can cause bogus warnings to be issued later; this guess need
335 not be 100% accurate, simply be conservative and reset cannot_fallthru
336 to false if we don't know. */
339 lower_stmt (gimple_stmt_iterator *gsi, struct lower_data *data)
341 gimple stmt = gsi_stmt (*gsi);
343 gimple_set_block (stmt, data->block);
345 switch (gimple_code (stmt))
348 lower_gimple_bind (gsi, data);
349 /* Propagate fallthruness. */
355 data->cannot_fallthru = true;
360 if (data->cannot_fallthru)
362 gsi_remove (gsi, false);
363 /* Propagate fallthruness. */
367 lower_gimple_return (gsi, data);
368 data->cannot_fallthru = true;
374 bool try_cannot_fallthru;
375 lower_sequence (gimple_try_eval (stmt), data);
376 try_cannot_fallthru = data->cannot_fallthru;
377 data->cannot_fallthru = false;
378 lower_sequence (gimple_try_cleanup (stmt), data);
379 /* See gimple_stmt_may_fallthru for the rationale. */
380 if (gimple_try_kind (stmt) == GIMPLE_TRY_FINALLY)
382 data->cannot_fallthru |= try_cannot_fallthru;
390 data->cannot_fallthru = false;
391 lower_sequence (gimple_catch_handler (stmt), data);
394 case GIMPLE_EH_FILTER:
395 data->cannot_fallthru = false;
396 lower_sequence (gimple_eh_filter_failure (stmt), data);
400 lower_sequence (gimple_eh_else_n_body (stmt), data);
401 lower_sequence (gimple_eh_else_e_body (stmt), data);
409 case GIMPLE_EH_MUST_NOT_THROW:
411 case GIMPLE_OMP_SECTIONS:
412 case GIMPLE_OMP_SECTIONS_SWITCH:
413 case GIMPLE_OMP_SECTION:
414 case GIMPLE_OMP_SINGLE:
415 case GIMPLE_OMP_MASTER:
416 case GIMPLE_OMP_ORDERED:
417 case GIMPLE_OMP_CRITICAL:
418 case GIMPLE_OMP_RETURN:
419 case GIMPLE_OMP_ATOMIC_LOAD:
420 case GIMPLE_OMP_ATOMIC_STORE:
421 case GIMPLE_OMP_CONTINUE:
426 tree decl = gimple_call_fndecl (stmt);
429 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
430 && DECL_FUNCTION_CODE (decl) == BUILT_IN_SETJMP)
432 lower_builtin_setjmp (gsi);
433 data->cannot_fallthru = false;
434 data->calls_builtin_setjmp = true;
438 if (decl && (flags_from_decl_or_type (decl) & ECF_NORETURN))
440 data->cannot_fallthru = true;
447 case GIMPLE_OMP_PARALLEL:
448 case GIMPLE_OMP_TASK:
449 data->cannot_fallthru = false;
450 lower_omp_directive (gsi, data);
451 data->cannot_fallthru = false;
454 case GIMPLE_TRANSACTION:
455 lower_sequence (gimple_transaction_body (stmt), data);
462 data->cannot_fallthru = false;
466 /* Lower a bind_expr TSI. DATA is passed through the recursion. */
469 lower_gimple_bind (gimple_stmt_iterator *gsi, struct lower_data *data)
471 tree old_block = data->block;
472 gimple stmt = gsi_stmt (*gsi);
473 tree new_block = gimple_bind_block (stmt);
477 if (new_block == old_block)
479 /* The outermost block of the original function may not be the
480 outermost statement chain of the gimplified function. So we
481 may see the outermost block just inside the function. */
482 gcc_assert (new_block == DECL_INITIAL (current_function_decl));
487 /* We do not expect to handle duplicate blocks. */
488 gcc_assert (!TREE_ASM_WRITTEN (new_block));
489 TREE_ASM_WRITTEN (new_block) = 1;
491 /* Block tree may get clobbered by inlining. Normally this would
492 be fixed in rest_of_decl_compilation using block notes, but
493 since we are not going to emit them, it is up to us. */
494 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (old_block);
495 BLOCK_SUBBLOCKS (old_block) = new_block;
496 BLOCK_SUBBLOCKS (new_block) = NULL_TREE;
497 BLOCK_SUPERCONTEXT (new_block) = old_block;
499 data->block = new_block;
503 record_vars (gimple_bind_vars (stmt));
504 lower_sequence (gimple_bind_body (stmt), data);
508 gcc_assert (data->block == new_block);
510 BLOCK_SUBBLOCKS (new_block)
511 = blocks_nreverse (BLOCK_SUBBLOCKS (new_block));
512 data->block = old_block;
515 /* The GIMPLE_BIND no longer carries any useful information -- kill it. */
516 gsi_insert_seq_before (gsi, gimple_bind_body (stmt), GSI_SAME_STMT);
517 gsi_remove (gsi, false);
520 /* Try to determine whether a TRY_CATCH expression can fall through.
521 This is a subroutine of block_may_fallthru. */
524 try_catch_may_fallthru (const_tree stmt)
526 tree_stmt_iterator i;
528 /* If the TRY block can fall through, the whole TRY_CATCH can
530 if (block_may_fallthru (TREE_OPERAND (stmt, 0)))
533 i = tsi_start (TREE_OPERAND (stmt, 1));
534 switch (TREE_CODE (tsi_stmt (i)))
537 /* We expect to see a sequence of CATCH_EXPR trees, each with a
538 catch expression and a body. The whole TRY_CATCH may fall
539 through iff any of the catch bodies falls through. */
540 for (; !tsi_end_p (i); tsi_next (&i))
542 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i))))
548 /* The exception filter expression only matters if there is an
549 exception. If the exception does not match EH_FILTER_TYPES,
550 we will execute EH_FILTER_FAILURE, and we will fall through
551 if that falls through. If the exception does match
552 EH_FILTER_TYPES, the stack unwinder will continue up the
553 stack, so we will not fall through. We don't know whether we
554 will throw an exception which matches EH_FILTER_TYPES or not,
555 so we just ignore EH_FILTER_TYPES and assume that we might
556 throw an exception which doesn't match. */
557 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i)));
560 /* This case represents statements to be executed when an
561 exception occurs. Those statements are implicitly followed
562 by a RESX statement to resume execution after the exception.
563 So in this case the TRY_CATCH never falls through. */
569 /* Same as above, but for a GIMPLE_TRY_CATCH. */
572 gimple_try_catch_may_fallthru (gimple stmt)
574 gimple_stmt_iterator i;
576 /* We don't handle GIMPLE_TRY_FINALLY. */
577 gcc_assert (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH);
579 /* If the TRY block can fall through, the whole TRY_CATCH can
581 if (gimple_seq_may_fallthru (gimple_try_eval (stmt)))
584 i = gsi_start (gimple_try_cleanup (stmt));
585 switch (gimple_code (gsi_stmt (i)))
588 /* We expect to see a sequence of GIMPLE_CATCH stmts, each with a
589 catch expression and a body. The whole try/catch may fall
590 through iff any of the catch bodies falls through. */
591 for (; !gsi_end_p (i); gsi_next (&i))
593 if (gimple_seq_may_fallthru (gimple_catch_handler (gsi_stmt (i))))
598 case GIMPLE_EH_FILTER:
599 /* The exception filter expression only matters if there is an
600 exception. If the exception does not match EH_FILTER_TYPES,
601 we will execute EH_FILTER_FAILURE, and we will fall through
602 if that falls through. If the exception does match
603 EH_FILTER_TYPES, the stack unwinder will continue up the
604 stack, so we will not fall through. We don't know whether we
605 will throw an exception which matches EH_FILTER_TYPES or not,
606 so we just ignore EH_FILTER_TYPES and assume that we might
607 throw an exception which doesn't match. */
608 return gimple_seq_may_fallthru (gimple_eh_filter_failure (gsi_stmt (i)));
611 /* This case represents statements to be executed when an
612 exception occurs. Those statements are implicitly followed
613 by a GIMPLE_RESX to resume execution after the exception. So
614 in this case the try/catch never falls through. */
620 /* Try to determine if we can fall out of the bottom of BLOCK. This guess
621 need not be 100% accurate; simply be conservative and return true if we
622 don't know. This is used only to avoid stupidly generating extra code.
623 If we're wrong, we'll just delete the extra code later. */
626 block_may_fallthru (const_tree block)
628 /* This CONST_CAST is okay because expr_last returns its argument
629 unmodified and we assign it to a const_tree. */
630 const_tree stmt = expr_last (CONST_CAST_TREE(block));
632 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK)
636 /* Easy cases. If the last statement of the block implies
637 control transfer, then we can't fall through. */
641 /* If SWITCH_LABELS is set, this is lowered, and represents a
642 branch to a selected label and hence can not fall through.
643 Otherwise SWITCH_BODY is set, and the switch can fall
645 return SWITCH_LABELS (stmt) == NULL_TREE;
648 if (block_may_fallthru (COND_EXPR_THEN (stmt)))
650 return block_may_fallthru (COND_EXPR_ELSE (stmt));
653 return block_may_fallthru (BIND_EXPR_BODY (stmt));
656 return try_catch_may_fallthru (stmt);
658 case TRY_FINALLY_EXPR:
659 /* The finally clause is always executed after the try clause,
660 so if it does not fall through, then the try-finally will not
661 fall through. Otherwise, if the try clause does not fall
662 through, then when the finally clause falls through it will
663 resume execution wherever the try clause was going. So the
664 whole try-finally will only fall through if both the try
665 clause and the finally clause fall through. */
666 return (block_may_fallthru (TREE_OPERAND (stmt, 0))
667 && block_may_fallthru (TREE_OPERAND (stmt, 1)));
670 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)
671 stmt = TREE_OPERAND (stmt, 1);
677 /* Functions that do not return do not fall through. */
678 return (call_expr_flags (stmt) & ECF_NORETURN) == 0;
680 case CLEANUP_POINT_EXPR:
681 return block_may_fallthru (TREE_OPERAND (stmt, 0));
689 /* Try to determine if we can continue executing the statement
690 immediately following STMT. This guess need not be 100% accurate;
691 simply be conservative and return true if we don't know. This is
692 used only to avoid stupidly generating extra code. If we're wrong,
693 we'll just delete the extra code later. */
696 gimple_stmt_may_fallthru (gimple stmt)
701 switch (gimple_code (stmt))
706 /* Easy cases. If the last statement of the seq implies
707 control transfer, then we can't fall through. */
711 /* Switch has already been lowered and represents a branch
712 to a selected label and hence can't fall through. */
716 /* GIMPLE_COND's are already lowered into a two-way branch. They
717 can't fall through. */
721 return gimple_seq_may_fallthru (gimple_bind_body (stmt));
724 if (gimple_try_kind (stmt) == GIMPLE_TRY_CATCH)
725 return gimple_try_catch_may_fallthru (stmt);
727 /* It must be a GIMPLE_TRY_FINALLY. */
729 /* The finally clause is always executed after the try clause,
730 so if it does not fall through, then the try-finally will not
731 fall through. Otherwise, if the try clause does not fall
732 through, then when the finally clause falls through it will
733 resume execution wherever the try clause was going. So the
734 whole try-finally will only fall through if both the try
735 clause and the finally clause fall through. */
736 return (gimple_seq_may_fallthru (gimple_try_eval (stmt))
737 && gimple_seq_may_fallthru (gimple_try_cleanup (stmt)));
740 return (gimple_seq_may_fallthru (gimple_eh_else_n_body (stmt))
741 || gimple_seq_may_fallthru (gimple_eh_else_e_body (stmt)));
744 /* Functions that do not return do not fall through. */
745 return (gimple_call_flags (stmt) & ECF_NORETURN) == 0;
753 /* Same as gimple_stmt_may_fallthru, but for the gimple sequence SEQ. */
756 gimple_seq_may_fallthru (gimple_seq seq)
758 return gimple_stmt_may_fallthru (gimple_seq_last_stmt (seq));
762 /* Lower a GIMPLE_RETURN GSI. DATA is passed through the recursion. */
765 lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
767 gimple stmt = gsi_stmt (*gsi);
770 return_statements_t tmp_rs;
772 /* Match this up with an existing return statement that's been created. */
773 for (i = VEC_length (return_statements_t, data->return_statements) - 1;
776 tmp_rs = *VEC_index (return_statements_t, data->return_statements, i);
778 if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
780 /* Remove the line number from the representative return statement.
781 It now fills in for many such returns. Failure to remove this
782 will result in incorrect results for coverage analysis. */
783 gimple_set_location (tmp_rs.stmt, UNKNOWN_LOCATION);
789 /* Not found. Create a new label and record the return statement. */
790 tmp_rs.label = create_artificial_label (cfun->function_end_locus);
792 VEC_safe_push (return_statements_t, heap, data->return_statements, &tmp_rs);
794 /* Generate a goto statement and remove the return statement. */
796 /* When not optimizing, make sure user returns are preserved. */
797 if (!optimize && gimple_has_location (stmt))
798 DECL_ARTIFICIAL (tmp_rs.label) = 0;
799 t = gimple_build_goto (tmp_rs.label);
800 gimple_set_location (t, gimple_location (stmt));
801 gimple_set_block (t, gimple_block (stmt));
802 gsi_insert_before (gsi, t, GSI_SAME_STMT);
803 gsi_remove (gsi, false);
806 /* Lower a __builtin_setjmp GSI.
808 __builtin_setjmp is passed a pointer to an array of five words (not
809 all will be used on all machines). It operates similarly to the C
810 library function of the same name, but is more efficient.
812 It is lowered into 3 other builtins, namely __builtin_setjmp_setup,
813 __builtin_setjmp_dispatcher and __builtin_setjmp_receiver, but with
814 __builtin_setjmp_dispatcher shared among all the instances; that's
815 why it is only emitted at the end by lower_function_body.
817 After full lowering, the body of the function should look like:
826 __builtin_setjmp_setup (&buf, &<D1847>);
830 __builtin_setjmp_receiver (&<D1847>);
833 if (D.1844 == 0) goto <D1848>; else goto <D1849>;
837 __builtin_setjmp_setup (&buf, &<D2847>);
841 __builtin_setjmp_receiver (&<D2847>);
844 if (D.2844 == 0) goto <D2848>; else goto <D2849>;
850 <D3853>: [non-local];
851 setjmpvar.0 = __builtin_setjmp_dispatcher (&<D3853>);
855 The dispatcher block will be both the unique destination of all the
856 abnormal call edges and the unique source of all the abnormal edges
857 to the receivers, thus keeping the complexity explosion localized. */
860 lower_builtin_setjmp (gimple_stmt_iterator *gsi)
862 gimple stmt = gsi_stmt (*gsi);
863 location_t loc = gimple_location (stmt);
864 tree cont_label = create_artificial_label (loc);
865 tree next_label = create_artificial_label (loc);
869 /* NEXT_LABEL is the label __builtin_longjmp will jump to. Its address is
870 passed to both __builtin_setjmp_setup and __builtin_setjmp_receiver. */
871 FORCED_LABEL (next_label) = 1;
873 dest = gimple_call_lhs (stmt);
875 /* Build '__builtin_setjmp_setup (BUF, NEXT_LABEL)' and insert. */
876 arg = build_addr (next_label, current_function_decl);
877 t = builtin_decl_implicit (BUILT_IN_SETJMP_SETUP);
878 g = gimple_build_call (t, 2, gimple_call_arg (stmt, 0), arg);
879 gimple_set_location (g, loc);
880 gimple_set_block (g, gimple_block (stmt));
881 gsi_insert_before (gsi, g, GSI_SAME_STMT);
883 /* Build 'DEST = 0' and insert. */
886 g = gimple_build_assign (dest, build_zero_cst (TREE_TYPE (dest)));
887 gimple_set_location (g, loc);
888 gimple_set_block (g, gimple_block (stmt));
889 gsi_insert_before (gsi, g, GSI_SAME_STMT);
892 /* Build 'goto CONT_LABEL' and insert. */
893 g = gimple_build_goto (cont_label);
894 gsi_insert_before (gsi, g, GSI_SAME_STMT);
896 /* Build 'NEXT_LABEL:' and insert. */
897 g = gimple_build_label (next_label);
898 gsi_insert_before (gsi, g, GSI_SAME_STMT);
900 /* Build '__builtin_setjmp_receiver (NEXT_LABEL)' and insert. */
901 arg = build_addr (next_label, current_function_decl);
902 t = builtin_decl_implicit (BUILT_IN_SETJMP_RECEIVER);
903 g = gimple_build_call (t, 1, arg);
904 gimple_set_location (g, loc);
905 gimple_set_block (g, gimple_block (stmt));
906 gsi_insert_before (gsi, g, GSI_SAME_STMT);
908 /* Build 'DEST = 1' and insert. */
911 g = gimple_build_assign (dest, fold_convert_loc (loc, TREE_TYPE (dest),
913 gimple_set_location (g, loc);
914 gimple_set_block (g, gimple_block (stmt));
915 gsi_insert_before (gsi, g, GSI_SAME_STMT);
918 /* Build 'CONT_LABEL:' and insert. */
919 g = gimple_build_label (cont_label);
920 gsi_insert_before (gsi, g, GSI_SAME_STMT);
922 /* Remove the call to __builtin_setjmp. */
923 gsi_remove (gsi, false);
927 /* Record the variables in VARS into function FN. */
930 record_vars_into (tree vars, tree fn)
932 if (fn != current_function_decl)
933 push_cfun (DECL_STRUCT_FUNCTION (fn));
935 for (; vars; vars = DECL_CHAIN (vars))
939 /* BIND_EXPRs contains also function/type/constant declarations
940 we don't need to care about. */
941 if (TREE_CODE (var) != VAR_DECL)
944 /* Nothing to do in this case. */
945 if (DECL_EXTERNAL (var))
948 /* Record the variable. */
949 add_local_decl (cfun, var);
950 if (gimple_referenced_vars (cfun))
951 add_referenced_var (var);
954 if (fn != current_function_decl)
959 /* Record the variables in VARS into current_function_decl. */
962 record_vars (tree vars)
964 record_vars_into (vars, current_function_decl);