2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <aoliva@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "tree-pass.h"
33 #include "tree-pretty-print.h"
34 #include "diagnostic-core.h"
35 #include "gimple-predict.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "tree-inline.h"
40 #include "langhooks.h"
42 #include "tree-iterator.h"
44 #include "gimple-fold.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
51 #include "tree-into-ssa.h"
56 #include "value-prof.h"
59 #include "tree-chkp.h"
62 /* I'm not real happy about this, but we need to handle gimple and
65 /* Inlining, Cloning, Versioning, Parallelization
67 Inlining: a function body is duplicated, but the PARM_DECLs are
68 remapped into VAR_DECLs, and non-void RETURN_EXPRs become
69 MODIFY_EXPRs that store to a dedicated returned-value variable.
70 The duplicated eh_region info of the copy will later be appended
71 to the info for the caller; the eh_region info in copied throwing
72 statements and RESX statements are adjusted accordingly.
74 Cloning: (only in C++) We have one body for a con/de/structor, and
75 multiple function decls, each with a unique parameter list.
76 Duplicate the body, using the given splay tree; some parameters
77 will become constants (like 0 or 1).
79 Versioning: a function body is duplicated and the result is a new
80 function rather than into blocks of an existing function as with
81 inlining. Some parameters will become constants.
83 Parallelization: a region of a function is duplicated resulting in
84 a new function. Variables may be replaced with complex expressions
85 to enable shared variable semantics.
87 All of these will simultaneously lookup any callgraph edges. If
88 we're going to inline the duplicated function body, and the given
89 function has some cloned callgraph nodes (one for each place this
90 function will be inlined) those callgraph edges will be duplicated.
91 If we're cloning the body, those callgraph edges will be
92 updated to point into the new body. (Note that the original
93 callgraph node and edge list will not be altered.)
95 See the CALL_EXPR handling case in copy_tree_body_r (). */
99 o In order to make inlining-on-trees work, we pessimized
100 function-local static constants. In particular, they are now
101 always output, even when not addressed. Fix this by treating
102 function-local static constants just like global static
103 constants; the back-end already knows not to output them if they
106 o Provide heuristics to clamp inlining of recursive template
110 /* Weights that estimate_num_insns uses to estimate the size of the
113 eni_weights eni_size_weights;
115 /* Weights that estimate_num_insns uses to estimate the time necessary
116 to execute the produced code. */
118 eni_weights eni_time_weights;
122 static tree declare_return_variable (copy_body_data *, tree, tree, tree,
124 static void remap_block (tree *, copy_body_data *);
125 static void copy_bind_expr (tree *, int *, copy_body_data *);
126 static void declare_inline_vars (tree, tree);
127 static void remap_save_expr (tree *, hash_map<tree, tree> *, int *);
128 static void prepend_lexical_block (tree current_block, tree new_block);
129 static tree copy_decl_to_var (tree, copy_body_data *);
130 static tree copy_result_decl_to_var (tree, copy_body_data *);
131 static tree copy_decl_maybe_to_var (tree, copy_body_data *);
132 static gimple_seq remap_gimple_stmt (gimple *, copy_body_data *);
133 static bool delete_unreachable_blocks_update_callgraph (copy_body_data *id);
134 static void insert_init_stmt (copy_body_data *, basic_block, gimple *);
136 /* Insert a tree->tree mapping for ID. Despite the name suggests
137 that the trees should be variables, it is used for more than that. */
140 insert_decl_map (copy_body_data *id, tree key, tree value)
142 id->decl_map->put (key, value);
144 /* Always insert an identity map as well. If we see this same new
145 node again, we won't want to duplicate it a second time. */
147 id->decl_map->put (value, value);
150 /* Insert a tree->tree mapping for ID. This is only used for
154 insert_debug_decl_map (copy_body_data *id, tree key, tree value)
156 if (!gimple_in_ssa_p (id->src_cfun))
159 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
162 if (!target_for_debug_bind (key))
165 gcc_assert (TREE_CODE (key) == PARM_DECL);
166 gcc_assert (TREE_CODE (value) == VAR_DECL);
169 id->debug_map = new hash_map<tree, tree>;
171 id->debug_map->put (key, value);
174 /* If nonzero, we're remapping the contents of inlined debug
175 statements. If negative, an error has occurred, such as a
176 reference to a variable that isn't available in the inlined
178 static int processing_debug_stmt = 0;
180 /* Construct new SSA name for old NAME. ID is the inline context. */
183 remap_ssa_name (tree name, copy_body_data *id)
188 gcc_assert (TREE_CODE (name) == SSA_NAME);
190 n = id->decl_map->get (name);
192 return unshare_expr (*n);
194 if (processing_debug_stmt)
196 if (SSA_NAME_IS_DEFAULT_DEF (name)
197 && TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
198 && id->entry_bb == NULL
199 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
201 tree vexpr = make_node (DEBUG_EXPR_DECL);
203 gimple_stmt_iterator gsi;
204 tree val = SSA_NAME_VAR (name);
206 n = id->decl_map->get (val);
209 if (TREE_CODE (val) != PARM_DECL)
211 processing_debug_stmt = -1;
214 def_temp = gimple_build_debug_source_bind (vexpr, val, NULL);
215 DECL_ARTIFICIAL (vexpr) = 1;
216 TREE_TYPE (vexpr) = TREE_TYPE (name);
217 DECL_MODE (vexpr) = DECL_MODE (SSA_NAME_VAR (name));
218 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
219 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
223 processing_debug_stmt = -1;
227 /* Remap anonymous SSA names or SSA names of anonymous decls. */
228 var = SSA_NAME_VAR (name);
230 || (!SSA_NAME_IS_DEFAULT_DEF (name)
231 && TREE_CODE (var) == VAR_DECL
232 && !VAR_DECL_IS_VIRTUAL_OPERAND (var)
233 && DECL_ARTIFICIAL (var)
234 && DECL_IGNORED_P (var)
235 && !DECL_NAME (var)))
237 struct ptr_info_def *pi;
238 new_tree = make_ssa_name (remap_type (TREE_TYPE (name), id));
239 if (!var && SSA_NAME_IDENTIFIER (name))
240 SET_SSA_NAME_VAR_OR_IDENTIFIER (new_tree, SSA_NAME_IDENTIFIER (name));
241 insert_decl_map (id, name, new_tree);
242 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
243 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
244 /* At least IPA points-to info can be directly transferred. */
245 if (id->src_cfun->gimple_df
246 && id->src_cfun->gimple_df->ipa_pta
247 && POINTER_TYPE_P (TREE_TYPE (name))
248 && (pi = SSA_NAME_PTR_INFO (name))
251 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
257 /* Do not set DEF_STMT yet as statement is not copied yet. We do that
259 new_tree = remap_decl (var, id);
261 /* We might've substituted constant or another SSA_NAME for
264 Replace the SSA name representing RESULT_DECL by variable during
265 inlining: this saves us from need to introduce PHI node in a case
266 return value is just partly initialized. */
267 if ((TREE_CODE (new_tree) == VAR_DECL || TREE_CODE (new_tree) == PARM_DECL)
268 && (!SSA_NAME_VAR (name)
269 || TREE_CODE (SSA_NAME_VAR (name)) != RESULT_DECL
270 || !id->transform_return_to_modify))
272 struct ptr_info_def *pi;
273 new_tree = make_ssa_name (new_tree);
274 insert_decl_map (id, name, new_tree);
275 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_tree)
276 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name);
277 /* At least IPA points-to info can be directly transferred. */
278 if (id->src_cfun->gimple_df
279 && id->src_cfun->gimple_df->ipa_pta
280 && POINTER_TYPE_P (TREE_TYPE (name))
281 && (pi = SSA_NAME_PTR_INFO (name))
284 struct ptr_info_def *new_pi = get_ptr_info (new_tree);
287 if (SSA_NAME_IS_DEFAULT_DEF (name))
289 /* By inlining function having uninitialized variable, we might
290 extend the lifetime (variable might get reused). This cause
291 ICE in the case we end up extending lifetime of SSA name across
292 abnormal edge, but also increase register pressure.
294 We simply initialize all uninitialized vars by 0 except
295 for case we are inlining to very first BB. We can avoid
296 this for all BBs that are not inside strongly connected
297 regions of the CFG, but this is expensive to test. */
299 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name)
300 && (!SSA_NAME_VAR (name)
301 || TREE_CODE (SSA_NAME_VAR (name)) != PARM_DECL)
302 && (id->entry_bb != EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun),
304 || EDGE_COUNT (id->entry_bb->preds) != 1))
306 gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
308 tree zero = build_zero_cst (TREE_TYPE (new_tree));
310 init_stmt = gimple_build_assign (new_tree, zero);
311 gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
312 SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
316 SSA_NAME_DEF_STMT (new_tree) = gimple_build_nop ();
317 set_ssa_default_def (cfun, SSA_NAME_VAR (new_tree), new_tree);
322 insert_decl_map (id, name, new_tree);
326 /* Remap DECL during the copying of the BLOCK tree for the function. */
329 remap_decl (tree decl, copy_body_data *id)
333 /* We only remap local variables in the current function. */
335 /* See if we have remapped this declaration. */
337 n = id->decl_map->get (decl);
339 if (!n && processing_debug_stmt)
341 processing_debug_stmt = -1;
345 /* When remapping a type within copy_gimple_seq_and_replace_locals, all
346 necessary DECLs have already been remapped and we do not want to duplicate
347 a decl coming from outside of the sequence we are copying. */
349 && id->prevent_decl_creation_for_types
350 && id->remapping_type_depth > 0
351 && (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL))
354 /* If we didn't already have an equivalent for this declaration, create one
358 /* Make a copy of the variable or label. */
359 tree t = id->copy_decl (decl, id);
361 /* Remember it, so that if we encounter this local entity again
362 we can reuse this copy. Do this early because remap_type may
363 need this decl for TYPE_STUB_DECL. */
364 insert_decl_map (id, decl, t);
369 /* Remap types, if necessary. */
370 TREE_TYPE (t) = remap_type (TREE_TYPE (t), id);
371 if (TREE_CODE (t) == TYPE_DECL)
372 DECL_ORIGINAL_TYPE (t) = remap_type (DECL_ORIGINAL_TYPE (t), id);
374 /* Remap sizes as necessary. */
375 walk_tree (&DECL_SIZE (t), copy_tree_body_r, id, NULL);
376 walk_tree (&DECL_SIZE_UNIT (t), copy_tree_body_r, id, NULL);
378 /* If fields, do likewise for offset and qualifier. */
379 if (TREE_CODE (t) == FIELD_DECL)
381 walk_tree (&DECL_FIELD_OFFSET (t), copy_tree_body_r, id, NULL);
382 if (TREE_CODE (DECL_CONTEXT (t)) == QUAL_UNION_TYPE)
383 walk_tree (&DECL_QUALIFIER (t), copy_tree_body_r, id, NULL);
389 if (id->do_not_unshare)
392 return unshare_expr (*n);
396 remap_type_1 (tree type, copy_body_data *id)
400 /* We do need a copy. build and register it now. If this is a pointer or
401 reference type, remap the designated type and make a new pointer or
403 if (TREE_CODE (type) == POINTER_TYPE)
405 new_tree = build_pointer_type_for_mode (remap_type (TREE_TYPE (type), id),
407 TYPE_REF_CAN_ALIAS_ALL (type));
408 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
409 new_tree = build_type_attribute_qual_variant (new_tree,
410 TYPE_ATTRIBUTES (type),
412 insert_decl_map (id, type, new_tree);
415 else if (TREE_CODE (type) == REFERENCE_TYPE)
417 new_tree = build_reference_type_for_mode (remap_type (TREE_TYPE (type), id),
419 TYPE_REF_CAN_ALIAS_ALL (type));
420 if (TYPE_ATTRIBUTES (type) || TYPE_QUALS (type))
421 new_tree = build_type_attribute_qual_variant (new_tree,
422 TYPE_ATTRIBUTES (type),
424 insert_decl_map (id, type, new_tree);
428 new_tree = copy_node (type);
430 insert_decl_map (id, type, new_tree);
432 /* This is a new type, not a copy of an old type. Need to reassociate
433 variants. We can handle everything except the main variant lazily. */
434 t = TYPE_MAIN_VARIANT (type);
437 t = remap_type (t, id);
438 TYPE_MAIN_VARIANT (new_tree) = t;
439 TYPE_NEXT_VARIANT (new_tree) = TYPE_NEXT_VARIANT (t);
440 TYPE_NEXT_VARIANT (t) = new_tree;
444 TYPE_MAIN_VARIANT (new_tree) = new_tree;
445 TYPE_NEXT_VARIANT (new_tree) = NULL;
448 if (TYPE_STUB_DECL (type))
449 TYPE_STUB_DECL (new_tree) = remap_decl (TYPE_STUB_DECL (type), id);
451 /* Lazily create pointer and reference types. */
452 TYPE_POINTER_TO (new_tree) = NULL;
453 TYPE_REFERENCE_TO (new_tree) = NULL;
455 /* Copy all types that may contain references to local variables; be sure to
456 preserve sharing in between type and its main variant when possible. */
457 switch (TREE_CODE (new_tree))
461 case FIXED_POINT_TYPE:
464 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
466 gcc_checking_assert (TYPE_MIN_VALUE (type) == TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (type)));
467 gcc_checking_assert (TYPE_MAX_VALUE (type) == TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (type)));
469 TYPE_MIN_VALUE (new_tree) = TYPE_MIN_VALUE (TYPE_MAIN_VARIANT (new_tree));
470 TYPE_MAX_VALUE (new_tree) = TYPE_MAX_VALUE (TYPE_MAIN_VARIANT (new_tree));
474 t = TYPE_MIN_VALUE (new_tree);
475 if (t && TREE_CODE (t) != INTEGER_CST)
476 walk_tree (&TYPE_MIN_VALUE (new_tree), copy_tree_body_r, id, NULL);
478 t = TYPE_MAX_VALUE (new_tree);
479 if (t && TREE_CODE (t) != INTEGER_CST)
480 walk_tree (&TYPE_MAX_VALUE (new_tree), copy_tree_body_r, id, NULL);
485 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
486 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
487 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
489 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
490 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
491 && TYPE_ARG_TYPES (type) == TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (type)))
492 TYPE_ARG_TYPES (new_tree) = TYPE_ARG_TYPES (TYPE_MAIN_VARIANT (new_tree));
494 walk_tree (&TYPE_ARG_TYPES (new_tree), copy_tree_body_r, id, NULL);
498 if (TYPE_MAIN_VARIANT (new_tree) != new_tree
499 && TREE_TYPE (type) == TREE_TYPE (TYPE_MAIN_VARIANT (type)))
500 TREE_TYPE (new_tree) = TREE_TYPE (TYPE_MAIN_VARIANT (new_tree));
502 TREE_TYPE (new_tree) = remap_type (TREE_TYPE (new_tree), id);
504 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
506 gcc_checking_assert (TYPE_DOMAIN (type) == TYPE_DOMAIN (TYPE_MAIN_VARIANT (type)));
507 TYPE_DOMAIN (new_tree) = TYPE_DOMAIN (TYPE_MAIN_VARIANT (new_tree));
510 TYPE_DOMAIN (new_tree) = remap_type (TYPE_DOMAIN (new_tree), id);
515 case QUAL_UNION_TYPE:
516 if (TYPE_MAIN_VARIANT (type) != type
517 && TYPE_FIELDS (type) == TYPE_FIELDS (TYPE_MAIN_VARIANT (type)))
518 TYPE_FIELDS (new_tree) = TYPE_FIELDS (TYPE_MAIN_VARIANT (new_tree));
523 for (f = TYPE_FIELDS (new_tree); f ; f = DECL_CHAIN (f))
525 t = remap_decl (f, id);
526 DECL_CONTEXT (t) = new_tree;
530 TYPE_FIELDS (new_tree) = nreverse (nf);
536 /* Shouldn't have been thought variable sized. */
540 /* All variants of type share the same size, so use the already remaped data. */
541 if (TYPE_MAIN_VARIANT (new_tree) != new_tree)
543 gcc_checking_assert (TYPE_SIZE (type) == TYPE_SIZE (TYPE_MAIN_VARIANT (type)));
544 gcc_checking_assert (TYPE_SIZE_UNIT (type) == TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type)));
546 TYPE_SIZE (new_tree) = TYPE_SIZE (TYPE_MAIN_VARIANT (new_tree));
547 TYPE_SIZE_UNIT (new_tree) = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (new_tree));
551 walk_tree (&TYPE_SIZE (new_tree), copy_tree_body_r, id, NULL);
552 walk_tree (&TYPE_SIZE_UNIT (new_tree), copy_tree_body_r, id, NULL);
559 remap_type (tree type, copy_body_data *id)
567 /* See if we have remapped this type. */
568 node = id->decl_map->get (type);
572 /* The type only needs remapping if it's variably modified. */
573 if (! variably_modified_type_p (type, id->src_fn))
575 insert_decl_map (id, type, type);
579 id->remapping_type_depth++;
580 tmp = remap_type_1 (type, id);
581 id->remapping_type_depth--;
586 /* Decide if DECL can be put into BLOCK_NONLOCAL_VARs. */
589 can_be_nonlocal (tree decl, copy_body_data *id)
591 /* We can not duplicate function decls. */
592 if (TREE_CODE (decl) == FUNCTION_DECL)
595 /* Local static vars must be non-local or we get multiple declaration
597 if (TREE_CODE (decl) == VAR_DECL
598 && !auto_var_in_fn_p (decl, id->src_fn))
605 remap_decls (tree decls, vec<tree, va_gc> **nonlocalized_list,
609 tree new_decls = NULL_TREE;
611 /* Remap its variables. */
612 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
616 if (can_be_nonlocal (old_var, id))
618 /* We need to add this variable to the local decls as otherwise
619 nothing else will do so. */
620 if (TREE_CODE (old_var) == VAR_DECL
621 && ! DECL_EXTERNAL (old_var)
623 add_local_decl (cfun, old_var);
624 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
625 && !DECL_IGNORED_P (old_var)
626 && nonlocalized_list)
627 vec_safe_push (*nonlocalized_list, old_var);
631 /* Remap the variable. */
632 new_var = remap_decl (old_var, id);
634 /* If we didn't remap this variable, we can't mess with its
635 TREE_CHAIN. If we remapped this variable to the return slot, it's
636 already declared somewhere else, so don't declare it here. */
638 if (new_var == id->retvar)
642 if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
643 && !DECL_IGNORED_P (old_var)
644 && nonlocalized_list)
645 vec_safe_push (*nonlocalized_list, old_var);
649 gcc_assert (DECL_P (new_var));
650 DECL_CHAIN (new_var) = new_decls;
653 /* Also copy value-expressions. */
654 if (TREE_CODE (new_var) == VAR_DECL
655 && DECL_HAS_VALUE_EXPR_P (new_var))
657 tree tem = DECL_VALUE_EXPR (new_var);
658 bool old_regimplify = id->regimplify;
659 id->remapping_type_depth++;
660 walk_tree (&tem, copy_tree_body_r, id, NULL);
661 id->remapping_type_depth--;
662 id->regimplify = old_regimplify;
663 SET_DECL_VALUE_EXPR (new_var, tem);
668 return nreverse (new_decls);
671 /* Copy the BLOCK to contain remapped versions of the variables
672 therein. And hook the new block into the block-tree. */
675 remap_block (tree *block, copy_body_data *id)
680 /* Make the new block. */
682 new_block = make_node (BLOCK);
683 TREE_USED (new_block) = TREE_USED (old_block);
684 BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
685 BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
686 BLOCK_NONLOCALIZED_VARS (new_block)
687 = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
690 /* Remap its variables. */
691 BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
692 &BLOCK_NONLOCALIZED_VARS (new_block),
695 if (id->transform_lang_insert_block)
696 id->transform_lang_insert_block (new_block);
698 /* Remember the remapped block. */
699 insert_decl_map (id, old_block, new_block);
702 /* Copy the whole block tree and root it in id->block. */
704 remap_blocks (tree block, copy_body_data *id)
707 tree new_tree = block;
712 remap_block (&new_tree, id);
713 gcc_assert (new_tree != block);
714 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
715 prepend_lexical_block (new_tree, remap_blocks (t, id));
716 /* Blocks are in arbitrary order, but make things slightly prettier and do
717 not swap order when producing a copy. */
718 BLOCK_SUBBLOCKS (new_tree) = blocks_nreverse (BLOCK_SUBBLOCKS (new_tree));
722 /* Remap the block tree rooted at BLOCK to nothing. */
724 remap_blocks_to_null (tree block, copy_body_data *id)
727 insert_decl_map (id, block, NULL_TREE);
728 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
729 remap_blocks_to_null (t, id);
733 copy_statement_list (tree *tp)
735 tree_stmt_iterator oi, ni;
738 new_tree = alloc_stmt_list ();
739 ni = tsi_start (new_tree);
740 oi = tsi_start (*tp);
741 TREE_TYPE (new_tree) = TREE_TYPE (*tp);
744 for (; !tsi_end_p (oi); tsi_next (&oi))
746 tree stmt = tsi_stmt (oi);
747 if (TREE_CODE (stmt) == STATEMENT_LIST)
748 /* This copy is not redundant; tsi_link_after will smash this
749 STATEMENT_LIST into the end of the one we're building, and we
750 don't want to do that with the original. */
751 copy_statement_list (&stmt);
752 tsi_link_after (&ni, stmt, TSI_CONTINUE_LINKING);
757 copy_bind_expr (tree *tp, int *walk_subtrees, copy_body_data *id)
759 tree block = BIND_EXPR_BLOCK (*tp);
760 /* Copy (and replace) the statement. */
761 copy_tree_r (tp, walk_subtrees, NULL);
764 remap_block (&block, id);
765 BIND_EXPR_BLOCK (*tp) = block;
768 if (BIND_EXPR_VARS (*tp))
769 /* This will remap a lot of the same decls again, but this should be
771 BIND_EXPR_VARS (*tp) = remap_decls (BIND_EXPR_VARS (*tp), NULL, id);
775 /* Create a new gimple_seq by remapping all the statements in BODY
776 using the inlining information in ID. */
779 remap_gimple_seq (gimple_seq body, copy_body_data *id)
781 gimple_stmt_iterator si;
782 gimple_seq new_body = NULL;
784 for (si = gsi_start (body); !gsi_end_p (si); gsi_next (&si))
786 gimple_seq new_stmts = remap_gimple_stmt (gsi_stmt (si), id);
787 gimple_seq_add_seq (&new_body, new_stmts);
794 /* Copy a GIMPLE_BIND statement STMT, remapping all the symbols in its
795 block using the mapping information in ID. */
798 copy_gimple_bind (gbind *stmt, copy_body_data *id)
801 tree new_block, new_vars;
802 gimple_seq body, new_body;
804 /* Copy the statement. Note that we purposely don't use copy_stmt
805 here because we need to remap statements as we copy. */
806 body = gimple_bind_body (stmt);
807 new_body = remap_gimple_seq (body, id);
809 new_block = gimple_bind_block (stmt);
811 remap_block (&new_block, id);
813 /* This will remap a lot of the same decls again, but this should be
815 new_vars = gimple_bind_vars (stmt);
817 new_vars = remap_decls (new_vars, NULL, id);
819 new_bind = gimple_build_bind (new_vars, new_body, new_block);
824 /* Return true if DECL is a parameter or a SSA_NAME for a parameter. */
829 if (TREE_CODE (decl) == SSA_NAME)
831 decl = SSA_NAME_VAR (decl);
836 return (TREE_CODE (decl) == PARM_DECL);
839 /* Remap the dependence CLIQUE from the source to the destination function
840 as specified in ID. */
842 static unsigned short
843 remap_dependence_clique (copy_body_data *id, unsigned short clique)
845 if (clique == 0 || processing_debug_stmt)
847 if (!id->dependence_map)
848 id->dependence_map = new hash_map<dependence_hash, unsigned short>;
850 unsigned short &newc = id->dependence_map->get_or_insert (clique, &existed);
852 newc = ++cfun->last_clique;
856 /* Remap the GIMPLE operand pointed to by *TP. DATA is really a
857 'struct walk_stmt_info *'. DATA->INFO is a 'copy_body_data *'.
858 WALK_SUBTREES is used to indicate walk_gimple_op whether to keep
859 recursing into the children nodes of *TP. */
862 remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
864 struct walk_stmt_info *wi_p = (struct walk_stmt_info *) data;
865 copy_body_data *id = (copy_body_data *) wi_p->info;
866 tree fn = id->src_fn;
868 if (TREE_CODE (*tp) == SSA_NAME)
870 *tp = remap_ssa_name (*tp, id);
874 else if (auto_var_in_fn_p (*tp, fn))
876 /* Local variables and labels need to be replaced by equivalent
877 variables. We don't want to copy static variables; there's
878 only one of those, no matter how many times we inline the
879 containing function. Similarly for globals from an outer
883 /* Remap the declaration. */
884 new_decl = remap_decl (*tp, id);
885 gcc_assert (new_decl);
886 /* Replace this variable with the copy. */
887 STRIP_TYPE_NOPS (new_decl);
888 /* ??? The C++ frontend uses void * pointer zero to initialize
889 any other type. This confuses the middle-end type verification.
890 As cloned bodies do not go through gimplification again the fixup
891 there doesn't trigger. */
892 if (TREE_CODE (new_decl) == INTEGER_CST
893 && !useless_type_conversion_p (TREE_TYPE (*tp), TREE_TYPE (new_decl)))
894 new_decl = fold_convert (TREE_TYPE (*tp), new_decl);
898 else if (TREE_CODE (*tp) == STATEMENT_LIST)
900 else if (TREE_CODE (*tp) == SAVE_EXPR)
902 else if (TREE_CODE (*tp) == LABEL_DECL
903 && (!DECL_CONTEXT (*tp)
904 || decl_function_context (*tp) == id->src_fn))
905 /* These may need to be remapped for EH handling. */
906 *tp = remap_decl (*tp, id);
907 else if (TREE_CODE (*tp) == FIELD_DECL)
909 /* If the enclosing record type is variably_modified_type_p, the field
910 has already been remapped. Otherwise, it need not be. */
911 tree *n = id->decl_map->get (*tp);
916 else if (TYPE_P (*tp))
917 /* Types may need remapping as well. */
918 *tp = remap_type (*tp, id);
919 else if (CONSTANT_CLASS_P (*tp))
921 /* If this is a constant, we have to copy the node iff the type
922 will be remapped. copy_tree_r will not copy a constant. */
923 tree new_type = remap_type (TREE_TYPE (*tp), id);
925 if (new_type == TREE_TYPE (*tp))
928 else if (TREE_CODE (*tp) == INTEGER_CST)
929 *tp = wide_int_to_tree (new_type, *tp);
932 *tp = copy_node (*tp);
933 TREE_TYPE (*tp) = new_type;
938 /* Otherwise, just copy the node. Note that copy_tree_r already
939 knows not to copy VAR_DECLs, etc., so this is safe. */
941 if (TREE_CODE (*tp) == MEM_REF)
943 /* We need to re-canonicalize MEM_REFs from inline substitutions
944 that can happen when a pointer argument is an ADDR_EXPR.
945 Recurse here manually to allow that. */
946 tree ptr = TREE_OPERAND (*tp, 0);
947 tree type = remap_type (TREE_TYPE (*tp), id);
949 walk_tree (&ptr, remap_gimple_op_r, data, NULL);
950 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
951 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
952 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
953 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
954 if (MR_DEPENDENCE_CLIQUE (old) != 0)
956 MR_DEPENDENCE_CLIQUE (*tp)
957 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
958 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
960 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
961 remapped a parameter as the property might be valid only
962 for the parameter itself. */
963 if (TREE_THIS_NOTRAP (old)
964 && (!is_parm (TREE_OPERAND (old, 0))
965 || (!id->transform_parameter && is_parm (ptr))))
966 TREE_THIS_NOTRAP (*tp) = 1;
967 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
972 /* Here is the "usual case". Copy this tree node, and then
973 tweak some special cases. */
974 copy_tree_r (tp, walk_subtrees, NULL);
976 if (TREE_CODE (*tp) != OMP_CLAUSE)
977 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
979 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
981 /* The copied TARGET_EXPR has never been expanded, even if the
982 original node was expanded already. */
983 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
984 TREE_OPERAND (*tp, 3) = NULL_TREE;
986 else if (TREE_CODE (*tp) == ADDR_EXPR)
988 /* Variable substitution need not be simple. In particular,
989 the MEM_REF substitution above. Make sure that
990 TREE_CONSTANT and friends are up-to-date. */
991 int invariant = is_gimple_min_invariant (*tp);
992 walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
993 recompute_tree_invariant_for_addr_expr (*tp);
995 /* If this used to be invariant, but is not any longer,
996 then regimplification is probably needed. */
997 if (invariant && !is_gimple_min_invariant (*tp))
998 id->regimplify = true;
1004 /* Update the TREE_BLOCK for the cloned expr. */
1007 tree new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1008 tree old_block = TREE_BLOCK (*tp);
1012 n = id->decl_map->get (TREE_BLOCK (*tp));
1016 TREE_SET_BLOCK (*tp, new_block);
1019 /* Keep iterating. */
1024 /* Called from copy_body_id via walk_tree. DATA is really a
1025 `copy_body_data *'. */
1028 copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
1030 copy_body_data *id = (copy_body_data *) data;
1031 tree fn = id->src_fn;
1034 /* Begin by recognizing trees that we'll completely rewrite for the
1035 inlining context. Our output for these trees is completely
1036 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1037 into an edge). Further down, we'll handle trees that get
1038 duplicated and/or tweaked. */
1040 /* When requested, RETURN_EXPRs should be transformed to just the
1041 contained MODIFY_EXPR. The branch semantics of the return will
1042 be handled elsewhere by manipulating the CFG rather than a statement. */
1043 if (TREE_CODE (*tp) == RETURN_EXPR && id->transform_return_to_modify)
1045 tree assignment = TREE_OPERAND (*tp, 0);
1047 /* If we're returning something, just turn that into an
1048 assignment into the equivalent of the original RESULT_DECL.
1049 If the "assignment" is just the result decl, the result
1050 decl has already been set (e.g. a recent "foo (&result_decl,
1051 ...)"); just toss the entire RETURN_EXPR. */
1052 if (assignment && TREE_CODE (assignment) == MODIFY_EXPR)
1054 /* Replace the RETURN_EXPR with (a copy of) the
1055 MODIFY_EXPR hanging underneath. */
1056 *tp = copy_node (assignment);
1058 else /* Else the RETURN_EXPR returns no value. */
1061 return (tree) (void *)1;
1064 else if (TREE_CODE (*tp) == SSA_NAME)
1066 *tp = remap_ssa_name (*tp, id);
1071 /* Local variables and labels need to be replaced by equivalent
1072 variables. We don't want to copy static variables; there's only
1073 one of those, no matter how many times we inline the containing
1074 function. Similarly for globals from an outer function. */
1075 else if (auto_var_in_fn_p (*tp, fn))
1079 /* Remap the declaration. */
1080 new_decl = remap_decl (*tp, id);
1081 gcc_assert (new_decl);
1082 /* Replace this variable with the copy. */
1083 STRIP_TYPE_NOPS (new_decl);
1087 else if (TREE_CODE (*tp) == STATEMENT_LIST)
1088 copy_statement_list (tp);
1089 else if (TREE_CODE (*tp) == SAVE_EXPR
1090 || TREE_CODE (*tp) == TARGET_EXPR)
1091 remap_save_expr (tp, id->decl_map, walk_subtrees);
1092 else if (TREE_CODE (*tp) == LABEL_DECL
1093 && (! DECL_CONTEXT (*tp)
1094 || decl_function_context (*tp) == id->src_fn))
1095 /* These may need to be remapped for EH handling. */
1096 *tp = remap_decl (*tp, id);
1097 else if (TREE_CODE (*tp) == BIND_EXPR)
1098 copy_bind_expr (tp, walk_subtrees, id);
1099 /* Types may need remapping as well. */
1100 else if (TYPE_P (*tp))
1101 *tp = remap_type (*tp, id);
1103 /* If this is a constant, we have to copy the node iff the type will be
1104 remapped. copy_tree_r will not copy a constant. */
1105 else if (CONSTANT_CLASS_P (*tp))
1107 tree new_type = remap_type (TREE_TYPE (*tp), id);
1109 if (new_type == TREE_TYPE (*tp))
1112 else if (TREE_CODE (*tp) == INTEGER_CST)
1113 *tp = wide_int_to_tree (new_type, *tp);
1116 *tp = copy_node (*tp);
1117 TREE_TYPE (*tp) = new_type;
1121 /* Otherwise, just copy the node. Note that copy_tree_r already
1122 knows not to copy VAR_DECLs, etc., so this is safe. */
1125 /* Here we handle trees that are not completely rewritten.
1126 First we detect some inlining-induced bogosities for
1128 if (TREE_CODE (*tp) == MODIFY_EXPR
1129 && TREE_OPERAND (*tp, 0) == TREE_OPERAND (*tp, 1)
1130 && (auto_var_in_fn_p (TREE_OPERAND (*tp, 0), fn)))
1132 /* Some assignments VAR = VAR; don't generate any rtl code
1133 and thus don't count as variable modification. Avoid
1134 keeping bogosities like 0 = 0. */
1135 tree decl = TREE_OPERAND (*tp, 0), value;
1138 n = id->decl_map->get (decl);
1142 STRIP_TYPE_NOPS (value);
1143 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1145 *tp = build_empty_stmt (EXPR_LOCATION (*tp));
1146 return copy_tree_body_r (tp, walk_subtrees, data);
1150 else if (TREE_CODE (*tp) == INDIRECT_REF)
1152 /* Get rid of *& from inline substitutions that can happen when a
1153 pointer argument is an ADDR_EXPR. */
1154 tree decl = TREE_OPERAND (*tp, 0);
1155 tree *n = id->decl_map->get (decl);
1158 /* If we happen to get an ADDR_EXPR in n->value, strip
1159 it manually here as we'll eventually get ADDR_EXPRs
1160 which lie about their types pointed to. In this case
1161 build_fold_indirect_ref wouldn't strip the INDIRECT_REF,
1162 but we absolutely rely on that. As fold_indirect_ref
1163 does other useful transformations, try that first, though. */
1164 tree type = TREE_TYPE (*tp);
1165 tree ptr = id->do_not_unshare ? *n : unshare_expr (*n);
1167 *tp = gimple_fold_indirect_ref (ptr);
1170 if (TREE_CODE (ptr) == ADDR_EXPR)
1173 = fold_indirect_ref_1 (EXPR_LOCATION (ptr), type, ptr);
1174 /* ??? We should either assert here or build
1175 a VIEW_CONVERT_EXPR instead of blindly leaking
1176 incompatible types to our IL. */
1178 *tp = TREE_OPERAND (ptr, 0);
1182 *tp = build1 (INDIRECT_REF, type, ptr);
1183 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1184 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1185 TREE_READONLY (*tp) = TREE_READONLY (old);
1186 /* We cannot propagate the TREE_THIS_NOTRAP flag if we
1187 have remapped a parameter as the property might be
1188 valid only for the parameter itself. */
1189 if (TREE_THIS_NOTRAP (old)
1190 && (!is_parm (TREE_OPERAND (old, 0))
1191 || (!id->transform_parameter && is_parm (ptr))))
1192 TREE_THIS_NOTRAP (*tp) = 1;
1199 else if (TREE_CODE (*tp) == MEM_REF)
1201 /* We need to re-canonicalize MEM_REFs from inline substitutions
1202 that can happen when a pointer argument is an ADDR_EXPR.
1203 Recurse here manually to allow that. */
1204 tree ptr = TREE_OPERAND (*tp, 0);
1205 tree type = remap_type (TREE_TYPE (*tp), id);
1207 walk_tree (&ptr, copy_tree_body_r, data, NULL);
1208 *tp = fold_build2 (MEM_REF, type, ptr, TREE_OPERAND (*tp, 1));
1209 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
1210 TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
1211 TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
1212 if (MR_DEPENDENCE_CLIQUE (old) != 0)
1214 MR_DEPENDENCE_CLIQUE (*tp)
1215 = remap_dependence_clique (id, MR_DEPENDENCE_CLIQUE (old));
1216 MR_DEPENDENCE_BASE (*tp) = MR_DEPENDENCE_BASE (old);
1218 /* We cannot propagate the TREE_THIS_NOTRAP flag if we have
1219 remapped a parameter as the property might be valid only
1220 for the parameter itself. */
1221 if (TREE_THIS_NOTRAP (old)
1222 && (!is_parm (TREE_OPERAND (old, 0))
1223 || (!id->transform_parameter && is_parm (ptr))))
1224 TREE_THIS_NOTRAP (*tp) = 1;
1225 REF_REVERSE_STORAGE_ORDER (*tp) = REF_REVERSE_STORAGE_ORDER (old);
1230 /* Here is the "usual case". Copy this tree node, and then
1231 tweak some special cases. */
1232 copy_tree_r (tp, walk_subtrees, NULL);
1234 /* If EXPR has block defined, map it to newly constructed block.
1235 When inlining we want EXPRs without block appear in the block
1236 of function call if we are not remapping a type. */
1239 new_block = id->remapping_type_depth == 0 ? id->block : NULL;
1240 if (TREE_BLOCK (*tp))
1243 n = id->decl_map->get (TREE_BLOCK (*tp));
1247 TREE_SET_BLOCK (*tp, new_block);
1250 if (TREE_CODE (*tp) != OMP_CLAUSE)
1251 TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
1253 /* The copied TARGET_EXPR has never been expanded, even if the
1254 original node was expanded already. */
1255 if (TREE_CODE (*tp) == TARGET_EXPR && TREE_OPERAND (*tp, 3))
1257 TREE_OPERAND (*tp, 1) = TREE_OPERAND (*tp, 3);
1258 TREE_OPERAND (*tp, 3) = NULL_TREE;
1261 /* Variable substitution need not be simple. In particular, the
1262 INDIRECT_REF substitution above. Make sure that TREE_CONSTANT
1263 and friends are up-to-date. */
1264 else if (TREE_CODE (*tp) == ADDR_EXPR)
1266 int invariant = is_gimple_min_invariant (*tp);
1267 walk_tree (&TREE_OPERAND (*tp, 0), copy_tree_body_r, id, NULL);
1269 /* Handle the case where we substituted an INDIRECT_REF
1270 into the operand of the ADDR_EXPR. */
1271 if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
1273 tree t = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
1274 if (TREE_TYPE (t) != TREE_TYPE (*tp))
1275 t = fold_convert (remap_type (TREE_TYPE (*tp), id), t);
1279 recompute_tree_invariant_for_addr_expr (*tp);
1281 /* If this used to be invariant, but is not any longer,
1282 then regimplification is probably needed. */
1283 if (invariant && !is_gimple_min_invariant (*tp))
1284 id->regimplify = true;
1290 /* Keep iterating. */
1294 /* Helper for remap_gimple_stmt. Given an EH region number for the
1295 source function, map that to the duplicate EH region number in
1296 the destination function. */
1299 remap_eh_region_nr (int old_nr, copy_body_data *id)
1301 eh_region old_r, new_r;
1303 old_r = get_eh_region_from_number_fn (id->src_cfun, old_nr);
1304 new_r = static_cast<eh_region> (*id->eh_map->get (old_r));
1306 return new_r->index;
1309 /* Similar, but operate on INTEGER_CSTs. */
1312 remap_eh_region_tree_nr (tree old_t_nr, copy_body_data *id)
1316 old_nr = tree_to_shwi (old_t_nr);
1317 new_nr = remap_eh_region_nr (old_nr, id);
1319 return build_int_cst (integer_type_node, new_nr);
1322 /* Helper for copy_bb. Remap statement STMT using the inlining
1323 information in ID. Return the new statement copy. */
1326 remap_gimple_stmt (gimple *stmt, copy_body_data *id)
1328 gimple *copy = NULL;
1329 struct walk_stmt_info wi;
1330 bool skip_first = false;
1331 gimple_seq stmts = NULL;
1333 if (is_gimple_debug (stmt)
1334 && !opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
1337 /* Begin by recognizing trees that we'll completely rewrite for the
1338 inlining context. Our output for these trees is completely
1339 different from out input (e.g. RETURN_EXPR is deleted, and morphs
1340 into an edge). Further down, we'll handle trees that get
1341 duplicated and/or tweaked. */
1343 /* When requested, GIMPLE_RETURNs should be transformed to just the
1344 contained GIMPLE_ASSIGN. The branch semantics of the return will
1345 be handled elsewhere by manipulating the CFG rather than the
1347 if (gimple_code (stmt) == GIMPLE_RETURN && id->transform_return_to_modify)
1349 tree retval = gimple_return_retval (as_a <greturn *> (stmt));
1350 tree retbnd = gimple_return_retbnd (stmt);
1351 tree bndslot = id->retbnd;
1353 if (retbnd && bndslot)
1355 gimple *bndcopy = gimple_build_assign (bndslot, retbnd);
1356 memset (&wi, 0, sizeof (wi));
1358 walk_gimple_op (bndcopy, remap_gimple_op_r, &wi);
1359 gimple_seq_add_stmt (&stmts, bndcopy);
1362 /* If we're returning something, just turn that into an
1363 assignment into the equivalent of the original RESULT_DECL.
1364 If RETVAL is just the result decl, the result decl has
1365 already been set (e.g. a recent "foo (&result_decl, ...)");
1366 just toss the entire GIMPLE_RETURN. */
1368 && (TREE_CODE (retval) != RESULT_DECL
1369 && (TREE_CODE (retval) != SSA_NAME
1370 || ! SSA_NAME_VAR (retval)
1371 || TREE_CODE (SSA_NAME_VAR (retval)) != RESULT_DECL)))
1373 copy = gimple_build_assign (id->do_not_unshare
1374 ? id->retvar : unshare_expr (id->retvar),
1376 /* id->retvar is already substituted. Skip it on later remapping. */
1379 /* We need to copy bounds if return structure with pointers into
1380 instrumented function. */
1381 if (chkp_function_instrumented_p (id->dst_fn)
1383 && !BOUNDED_P (id->retvar)
1384 && chkp_type_has_pointer (TREE_TYPE (id->retvar)))
1385 id->assign_stmts.safe_push (copy);
1391 else if (gimple_has_substatements (stmt))
1395 /* When cloning bodies from the C++ front end, we will be handed bodies
1396 in High GIMPLE form. Handle here all the High GIMPLE statements that
1397 have embedded statements. */
1398 switch (gimple_code (stmt))
1401 copy = copy_gimple_bind (as_a <gbind *> (stmt), id);
1406 gcatch *catch_stmt = as_a <gcatch *> (stmt);
1407 s1 = remap_gimple_seq (gimple_catch_handler (catch_stmt), id);
1408 copy = gimple_build_catch (gimple_catch_types (catch_stmt), s1);
1412 case GIMPLE_EH_FILTER:
1413 s1 = remap_gimple_seq (gimple_eh_filter_failure (stmt), id);
1414 copy = gimple_build_eh_filter (gimple_eh_filter_types (stmt), s1);
1418 s1 = remap_gimple_seq (gimple_try_eval (stmt), id);
1419 s2 = remap_gimple_seq (gimple_try_cleanup (stmt), id);
1420 copy = gimple_build_try (s1, s2, gimple_try_kind (stmt));
1423 case GIMPLE_WITH_CLEANUP_EXPR:
1424 s1 = remap_gimple_seq (gimple_wce_cleanup (stmt), id);
1425 copy = gimple_build_wce (s1);
1428 case GIMPLE_OMP_PARALLEL:
1430 gomp_parallel *omp_par_stmt = as_a <gomp_parallel *> (stmt);
1431 s1 = remap_gimple_seq (gimple_omp_body (omp_par_stmt), id);
1432 copy = gimple_build_omp_parallel
1434 gimple_omp_parallel_clauses (omp_par_stmt),
1435 gimple_omp_parallel_child_fn (omp_par_stmt),
1436 gimple_omp_parallel_data_arg (omp_par_stmt));
1440 case GIMPLE_OMP_TASK:
1441 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1442 copy = gimple_build_omp_task
1444 gimple_omp_task_clauses (stmt),
1445 gimple_omp_task_child_fn (stmt),
1446 gimple_omp_task_data_arg (stmt),
1447 gimple_omp_task_copy_fn (stmt),
1448 gimple_omp_task_arg_size (stmt),
1449 gimple_omp_task_arg_align (stmt));
1452 case GIMPLE_OMP_FOR:
1453 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1454 s2 = remap_gimple_seq (gimple_omp_for_pre_body (stmt), id);
1455 copy = gimple_build_omp_for (s1, gimple_omp_for_kind (stmt),
1456 gimple_omp_for_clauses (stmt),
1457 gimple_omp_for_collapse (stmt), s2);
1460 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
1462 gimple_omp_for_set_index (copy, i,
1463 gimple_omp_for_index (stmt, i));
1464 gimple_omp_for_set_initial (copy, i,
1465 gimple_omp_for_initial (stmt, i));
1466 gimple_omp_for_set_final (copy, i,
1467 gimple_omp_for_final (stmt, i));
1468 gimple_omp_for_set_incr (copy, i,
1469 gimple_omp_for_incr (stmt, i));
1470 gimple_omp_for_set_cond (copy, i,
1471 gimple_omp_for_cond (stmt, i));
1476 case GIMPLE_OMP_MASTER:
1477 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1478 copy = gimple_build_omp_master (s1);
1481 case GIMPLE_OMP_TASKGROUP:
1482 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1483 copy = gimple_build_omp_taskgroup (s1);
1486 case GIMPLE_OMP_ORDERED:
1487 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1488 copy = gimple_build_omp_ordered
1490 gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)));
1493 case GIMPLE_OMP_SECTION:
1494 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1495 copy = gimple_build_omp_section (s1);
1498 case GIMPLE_OMP_SECTIONS:
1499 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1500 copy = gimple_build_omp_sections
1501 (s1, gimple_omp_sections_clauses (stmt));
1504 case GIMPLE_OMP_SINGLE:
1505 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1506 copy = gimple_build_omp_single
1507 (s1, gimple_omp_single_clauses (stmt));
1510 case GIMPLE_OMP_TARGET:
1511 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1512 copy = gimple_build_omp_target
1513 (s1, gimple_omp_target_kind (stmt),
1514 gimple_omp_target_clauses (stmt));
1517 case GIMPLE_OMP_TEAMS:
1518 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1519 copy = gimple_build_omp_teams
1520 (s1, gimple_omp_teams_clauses (stmt));
1523 case GIMPLE_OMP_CRITICAL:
1524 s1 = remap_gimple_seq (gimple_omp_body (stmt), id);
1525 copy = gimple_build_omp_critical (s1,
1526 gimple_omp_critical_name
1527 (as_a <gomp_critical *> (stmt)),
1528 gimple_omp_critical_clauses
1529 (as_a <gomp_critical *> (stmt)));
1532 case GIMPLE_TRANSACTION:
1534 gtransaction *old_trans_stmt = as_a <gtransaction *> (stmt);
1535 gtransaction *new_trans_stmt;
1536 s1 = remap_gimple_seq (gimple_transaction_body (old_trans_stmt),
1538 copy = new_trans_stmt = gimple_build_transaction (s1);
1539 gimple_transaction_set_subcode (new_trans_stmt,
1540 gimple_transaction_subcode (old_trans_stmt));
1541 gimple_transaction_set_label_norm (new_trans_stmt,
1542 gimple_transaction_label_norm (old_trans_stmt));
1543 gimple_transaction_set_label_uninst (new_trans_stmt,
1544 gimple_transaction_label_uninst (old_trans_stmt));
1545 gimple_transaction_set_label_over (new_trans_stmt,
1546 gimple_transaction_label_over (old_trans_stmt));
1556 if (gimple_assign_copy_p (stmt)
1557 && gimple_assign_lhs (stmt) == gimple_assign_rhs1 (stmt)
1558 && auto_var_in_fn_p (gimple_assign_lhs (stmt), id->src_fn))
1560 /* Here we handle statements that are not completely rewritten.
1561 First we detect some inlining-induced bogosities for
1564 /* Some assignments VAR = VAR; don't generate any rtl code
1565 and thus don't count as variable modification. Avoid
1566 keeping bogosities like 0 = 0. */
1567 tree decl = gimple_assign_lhs (stmt), value;
1570 n = id->decl_map->get (decl);
1574 STRIP_TYPE_NOPS (value);
1575 if (TREE_CONSTANT (value) || TREE_READONLY (value))
1580 /* For *ptr_N ={v} {CLOBBER}, if ptr_N is SSA_NAME defined
1581 in a block that we aren't copying during tree_function_versioning,
1582 just drop the clobber stmt. */
1583 if (id->blocks_to_copy && gimple_clobber_p (stmt))
1585 tree lhs = gimple_assign_lhs (stmt);
1586 if (TREE_CODE (lhs) == MEM_REF
1587 && TREE_CODE (TREE_OPERAND (lhs, 0)) == SSA_NAME)
1589 gimple *def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (lhs, 0));
1590 if (gimple_bb (def_stmt)
1591 && !bitmap_bit_p (id->blocks_to_copy,
1592 gimple_bb (def_stmt)->index))
1597 if (gimple_debug_bind_p (stmt))
1600 = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
1601 gimple_debug_bind_get_value (stmt),
1603 id->debug_stmts.safe_push (copy);
1604 gimple_seq_add_stmt (&stmts, copy);
1607 if (gimple_debug_source_bind_p (stmt))
1609 gdebug *copy = gimple_build_debug_source_bind
1610 (gimple_debug_source_bind_get_var (stmt),
1611 gimple_debug_source_bind_get_value (stmt),
1613 id->debug_stmts.safe_push (copy);
1614 gimple_seq_add_stmt (&stmts, copy);
1618 /* Create a new deep copy of the statement. */
1619 copy = gimple_copy (stmt);
1621 /* Clear flags that need revisiting. */
1622 if (gcall *call_stmt = dyn_cast <gcall *> (copy))
1624 if (gimple_call_tail_p (call_stmt))
1625 gimple_call_set_tail (call_stmt, false);
1626 if (gimple_call_from_thunk_p (call_stmt))
1627 gimple_call_set_from_thunk (call_stmt, false);
1628 if (gimple_call_internal_p (call_stmt))
1629 switch (gimple_call_internal_fn (call_stmt))
1631 case IFN_GOMP_SIMD_LANE:
1632 case IFN_GOMP_SIMD_VF:
1633 case IFN_GOMP_SIMD_LAST_LANE:
1634 case IFN_GOMP_SIMD_ORDERED_START:
1635 case IFN_GOMP_SIMD_ORDERED_END:
1636 DECL_STRUCT_FUNCTION (id->dst_fn)->has_simduid_loops = true;
1643 /* Remap the region numbers for __builtin_eh_{pointer,filter},
1644 RESX and EH_DISPATCH. */
1646 switch (gimple_code (copy))
1650 tree r, fndecl = gimple_call_fndecl (copy);
1651 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1652 switch (DECL_FUNCTION_CODE (fndecl))
1654 case BUILT_IN_EH_COPY_VALUES:
1655 r = gimple_call_arg (copy, 1);
1656 r = remap_eh_region_tree_nr (r, id);
1657 gimple_call_set_arg (copy, 1, r);
1660 case BUILT_IN_EH_POINTER:
1661 case BUILT_IN_EH_FILTER:
1662 r = gimple_call_arg (copy, 0);
1663 r = remap_eh_region_tree_nr (r, id);
1664 gimple_call_set_arg (copy, 0, r);
1671 /* Reset alias info if we didn't apply measures to
1672 keep it valid over inlining by setting DECL_PT_UID. */
1673 if (!id->src_cfun->gimple_df
1674 || !id->src_cfun->gimple_df->ipa_pta)
1675 gimple_call_reset_alias_info (as_a <gcall *> (copy));
1681 gresx *resx_stmt = as_a <gresx *> (copy);
1682 int r = gimple_resx_region (resx_stmt);
1683 r = remap_eh_region_nr (r, id);
1684 gimple_resx_set_region (resx_stmt, r);
1688 case GIMPLE_EH_DISPATCH:
1690 geh_dispatch *eh_dispatch = as_a <geh_dispatch *> (copy);
1691 int r = gimple_eh_dispatch_region (eh_dispatch);
1692 r = remap_eh_region_nr (r, id);
1693 gimple_eh_dispatch_set_region (eh_dispatch, r);
1702 /* If STMT has a block defined, map it to the newly constructed
1704 if (gimple_block (copy))
1707 n = id->decl_map->get (gimple_block (copy));
1709 gimple_set_block (copy, *n);
1712 if (gimple_debug_bind_p (copy) || gimple_debug_source_bind_p (copy))
1714 gimple_seq_add_stmt (&stmts, copy);
1718 /* Remap all the operands in COPY. */
1719 memset (&wi, 0, sizeof (wi));
1722 walk_tree (gimple_op_ptr (copy, 1), remap_gimple_op_r, &wi, NULL);
1724 walk_gimple_op (copy, remap_gimple_op_r, &wi);
1726 /* Clear the copied virtual operands. We are not remapping them here
1727 but are going to recreate them from scratch. */
1728 if (gimple_has_mem_ops (copy))
1730 gimple_set_vdef (copy, NULL_TREE);
1731 gimple_set_vuse (copy, NULL_TREE);
1734 gimple_seq_add_stmt (&stmts, copy);
1739 /* Copy basic block, scale profile accordingly. Edges will be taken care of
1743 copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
1744 gcov_type count_scale)
1746 gimple_stmt_iterator gsi, copy_gsi, seq_gsi;
1747 basic_block copy_basic_block;
1752 /* Search for previous copied basic block. */
1755 prev = prev->prev_bb;
1757 /* create_basic_block() will append every new block to
1758 basic_block_info automatically. */
1759 copy_basic_block = create_basic_block (NULL, (basic_block) prev->aux);
1760 copy_basic_block->count = apply_scale (bb->count, count_scale);
1762 /* We are going to rebuild frequencies from scratch. These values
1763 have just small importance to drive canonicalize_loop_headers. */
1764 freq = apply_scale ((gcov_type)bb->frequency, frequency_scale);
1766 /* We recompute frequencies after inlining, so this is quite safe. */
1767 if (freq > BB_FREQ_MAX)
1769 copy_basic_block->frequency = freq;
1771 copy_gsi = gsi_start_bb (copy_basic_block);
1773 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1776 gimple *stmt = gsi_stmt (gsi);
1777 gimple *orig_stmt = stmt;
1778 gimple_stmt_iterator stmts_gsi;
1779 bool stmt_added = false;
1781 id->regimplify = false;
1782 stmts = remap_gimple_stmt (stmt, id);
1784 if (gimple_seq_empty_p (stmts))
1789 for (stmts_gsi = gsi_start (stmts);
1790 !gsi_end_p (stmts_gsi); )
1792 stmt = gsi_stmt (stmts_gsi);
1794 /* Advance iterator now before stmt is moved to seq_gsi. */
1795 gsi_next (&stmts_gsi);
1797 if (gimple_nop_p (stmt))
1800 gimple_duplicate_stmt_histograms (cfun, stmt, id->src_cfun,
1803 /* With return slot optimization we can end up with
1804 non-gimple (foo *)&this->m, fix that here. */
1805 if (is_gimple_assign (stmt)
1806 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
1807 && !is_gimple_val (gimple_assign_rhs1 (stmt)))
1810 new_rhs = force_gimple_operand_gsi (&seq_gsi,
1811 gimple_assign_rhs1 (stmt),
1813 GSI_CONTINUE_LINKING);
1814 gimple_assign_set_rhs1 (stmt, new_rhs);
1815 id->regimplify = false;
1818 gsi_insert_after (&seq_gsi, stmt, GSI_NEW_STMT);
1821 gimple_regimplify_operands (stmt, &seq_gsi);
1829 /* If copy_basic_block has been empty at the start of this iteration,
1830 call gsi_start_bb again to get at the newly added statements. */
1831 if (gsi_end_p (copy_gsi))
1832 copy_gsi = gsi_start_bb (copy_basic_block);
1834 gsi_next (©_gsi);
1836 /* Process the new statement. The call to gimple_regimplify_operands
1837 possibly turned the statement into multiple statements, we
1838 need to process all of them. */
1844 stmt = gsi_stmt (copy_gsi);
1845 call_stmt = dyn_cast <gcall *> (stmt);
1847 && gimple_call_va_arg_pack_p (call_stmt)
1850 /* __builtin_va_arg_pack () should be replaced by
1851 all arguments corresponding to ... in the caller. */
1855 size_t nargs = gimple_call_num_args (id->call_stmt);
1856 size_t n, i, nargs_to_copy;
1857 bool remove_bounds = false;
1859 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1862 /* Bounds should be removed from arg pack in case
1863 we handle not instrumented call in instrumented
1865 nargs_to_copy = nargs;
1866 if (gimple_call_with_bounds_p (id->call_stmt)
1867 && !gimple_call_with_bounds_p (stmt))
1869 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1870 i < gimple_call_num_args (id->call_stmt);
1872 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1874 remove_bounds = true;
1877 /* Create the new array of arguments. */
1878 n = nargs_to_copy + gimple_call_num_args (call_stmt);
1879 argarray.create (n);
1880 argarray.safe_grow_cleared (n);
1882 /* Copy all the arguments before '...' */
1883 memcpy (argarray.address (),
1884 gimple_call_arg_ptr (call_stmt, 0),
1885 gimple_call_num_args (call_stmt) * sizeof (tree));
1889 /* Append the rest of arguments removing bounds. */
1890 unsigned cur = gimple_call_num_args (call_stmt);
1891 i = gimple_call_num_args (id->call_stmt) - nargs;
1892 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1893 i < gimple_call_num_args (id->call_stmt);
1895 if (!POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1896 argarray[cur++] = gimple_call_arg (id->call_stmt, i);
1897 gcc_assert (cur == n);
1901 /* Append the arguments passed in '...' */
1902 memcpy (argarray.address () + gimple_call_num_args (call_stmt),
1903 gimple_call_arg_ptr (id->call_stmt, 0)
1904 + (gimple_call_num_args (id->call_stmt) - nargs),
1905 nargs * sizeof (tree));
1908 new_call = gimple_build_call_vec (gimple_call_fn (call_stmt),
1911 argarray.release ();
1913 /* Copy all GIMPLE_CALL flags, location and block, except
1914 GF_CALL_VA_ARG_PACK. */
1915 gimple_call_copy_flags (new_call, call_stmt);
1916 gimple_call_set_va_arg_pack (new_call, false);
1917 gimple_set_location (new_call, gimple_location (stmt));
1918 gimple_set_block (new_call, gimple_block (stmt));
1919 gimple_call_set_lhs (new_call, gimple_call_lhs (call_stmt));
1921 gsi_replace (©_gsi, new_call, false);
1926 && (decl = gimple_call_fndecl (stmt))
1927 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
1928 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_ARG_PACK_LEN)
1930 /* __builtin_va_arg_pack_len () should be replaced by
1931 the number of anonymous arguments. */
1932 size_t nargs = gimple_call_num_args (id->call_stmt), i;
1936 for (p = DECL_ARGUMENTS (id->src_fn); p; p = DECL_CHAIN (p))
1939 /* For instrumented calls we should ignore bounds. */
1940 for (i = gimple_call_num_args (id->call_stmt) - nargs;
1941 i < gimple_call_num_args (id->call_stmt);
1943 if (POINTER_BOUNDS_P (gimple_call_arg (id->call_stmt, i)))
1946 count = build_int_cst (integer_type_node, nargs);
1947 new_stmt = gimple_build_assign (gimple_call_lhs (stmt), count);
1948 gsi_replace (©_gsi, new_stmt, false);
1953 && gimple_call_internal_p (stmt)
1954 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
1956 /* Drop TSAN_FUNC_EXIT () internal calls during inlining. */
1957 gsi_remove (©_gsi, false);
1961 /* Statements produced by inlining can be unfolded, especially
1962 when we constant propagated some operands. We can't fold
1963 them right now for two reasons:
1964 1) folding require SSA_NAME_DEF_STMTs to be correct
1965 2) we can't change function calls to builtins.
1966 So we just mark statement for later folding. We mark
1967 all new statements, instead just statements that has changed
1968 by some nontrivial substitution so even statements made
1969 foldable indirectly are updated. If this turns out to be
1970 expensive, copy_body can be told to watch for nontrivial
1972 if (id->statements_to_fold)
1973 id->statements_to_fold->add (stmt);
1975 /* We're duplicating a CALL_EXPR. Find any corresponding
1976 callgraph edges and update or duplicate them. */
1977 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
1979 struct cgraph_edge *edge;
1981 switch (id->transform_call_graph_edges)
1983 case CB_CGE_DUPLICATE:
1984 edge = id->src_node->get_edge (orig_stmt);
1987 int edge_freq = edge->frequency;
1989 struct cgraph_edge *old_edge = edge;
1990 edge = edge->clone (id->dst_node, call_stmt,
1992 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
1994 /* We could also just rescale the frequency, but
1995 doing so would introduce roundoff errors and make
1996 verifier unhappy. */
1997 new_freq = compute_call_stmt_bb_frequency (id->dst_node->decl,
2000 /* Speculative calls consist of two edges - direct and indirect.
2001 Duplicate the whole thing and distribute frequencies accordingly. */
2002 if (edge->speculative)
2004 struct cgraph_edge *direct, *indirect;
2005 struct ipa_ref *ref;
2007 gcc_assert (!edge->indirect_unknown_callee);
2008 old_edge->speculative_call_info (direct, indirect, ref);
2009 indirect = indirect->clone (id->dst_node, call_stmt,
2011 REG_BR_PROB_BASE, CGRAPH_FREQ_BASE,
2013 if (old_edge->frequency + indirect->frequency)
2015 edge->frequency = MIN (RDIV ((gcov_type)new_freq * old_edge->frequency,
2016 (old_edge->frequency + indirect->frequency)),
2018 indirect->frequency = MIN (RDIV ((gcov_type)new_freq * indirect->frequency,
2019 (old_edge->frequency + indirect->frequency)),
2022 id->dst_node->clone_reference (ref, stmt);
2026 edge->frequency = new_freq;
2028 && profile_status_for_fn (cfun) != PROFILE_ABSENT
2029 && (edge_freq > edge->frequency + 10
2030 || edge_freq < edge->frequency - 10))
2032 fprintf (dump_file, "Edge frequency estimated by "
2033 "cgraph %i diverge from inliner's estimate %i\n",
2037 "Orig bb: %i, orig bb freq %i, new bb freq %i\n",
2040 copy_basic_block->frequency);
2046 case CB_CGE_MOVE_CLONES:
2047 id->dst_node->set_call_stmt_including_clones (orig_stmt,
2049 edge = id->dst_node->get_edge (stmt);
2053 edge = id->dst_node->get_edge (orig_stmt);
2055 edge->set_call_stmt (call_stmt);
2062 /* Constant propagation on argument done during inlining
2063 may create new direct call. Produce an edge for it. */
2065 || (edge->indirect_inlining_edge
2066 && id->transform_call_graph_edges == CB_CGE_MOVE_CLONES))
2067 && id->dst_node->definition
2068 && (fn = gimple_call_fndecl (stmt)) != NULL)
2070 struct cgraph_node *dest = cgraph_node::get (fn);
2072 /* We have missing edge in the callgraph. This can happen
2073 when previous inlining turned an indirect call into a
2074 direct call by constant propagating arguments or we are
2075 producing dead clone (for further cloning). In all
2076 other cases we hit a bug (incorrect node sharing is the
2077 most common reason for missing edges). */
2078 gcc_assert (!dest->definition
2079 || dest->address_taken
2080 || !id->src_node->definition
2081 || !id->dst_node->definition);
2082 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES)
2083 id->dst_node->create_edge_including_clones
2084 (dest, orig_stmt, call_stmt, bb->count,
2085 compute_call_stmt_bb_frequency (id->dst_node->decl,
2087 CIF_ORIGINALLY_INDIRECT_CALL);
2089 id->dst_node->create_edge (dest, call_stmt,
2091 compute_call_stmt_bb_frequency
2092 (id->dst_node->decl,
2093 copy_basic_block))->inline_failed
2094 = CIF_ORIGINALLY_INDIRECT_CALL;
2097 fprintf (dump_file, "Created new direct edge to %s\n",
2102 notice_special_calls (as_a <gcall *> (stmt));
2105 maybe_duplicate_eh_stmt_fn (cfun, stmt, id->src_cfun, orig_stmt,
2106 id->eh_map, id->eh_lp_nr);
2108 if (gimple_in_ssa_p (cfun) && !is_gimple_debug (stmt))
2113 FOR_EACH_SSA_TREE_OPERAND (def, stmt, i, SSA_OP_DEF)
2114 if (TREE_CODE (def) == SSA_NAME)
2115 SSA_NAME_DEF_STMT (def) = stmt;
2118 gsi_next (©_gsi);
2120 while (!gsi_end_p (copy_gsi));
2122 copy_gsi = gsi_last_bb (copy_basic_block);
2125 return copy_basic_block;
2128 /* Inserting Single Entry Multiple Exit region in SSA form into code in SSA
2129 form is quite easy, since dominator relationship for old basic blocks does
2132 There is however exception where inlining might change dominator relation
2133 across EH edges from basic block within inlined functions destinating
2134 to landing pads in function we inline into.
2136 The function fills in PHI_RESULTs of such PHI nodes if they refer
2137 to gimple regs. Otherwise, the function mark PHI_RESULT of such
2138 PHI nodes for renaming. For non-gimple regs, renaming is safe: the
2139 EH edges are abnormal and SSA_NAME_OCCURS_IN_ABNORMAL_PHI must be
2140 set, and this means that there will be no overlapping live ranges
2141 for the underlying symbol.
2143 This might change in future if we allow redirecting of EH edges and
2144 we might want to change way build CFG pre-inlining to include
2145 all the possible edges then. */
2147 update_ssa_across_abnormal_edges (basic_block bb, basic_block ret_bb,
2148 bool can_throw, bool nonlocal_goto)
2153 FOR_EACH_EDGE (e, ei, bb->succs)
2155 || ((basic_block)e->dest->aux)->index == ENTRY_BLOCK)
2161 gcc_assert (e->flags & EDGE_EH);
2164 gcc_assert (!(e->flags & EDGE_EH));
2166 for (si = gsi_start_phis (e->dest); !gsi_end_p (si); gsi_next (&si))
2172 /* For abnormal goto/call edges the receiver can be the
2173 ENTRY_BLOCK. Do not assert this cannot happen. */
2175 gcc_assert ((e->flags & EDGE_EH)
2176 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)));
2178 re = find_edge (ret_bb, e->dest);
2179 gcc_checking_assert (re);
2180 gcc_assert ((re->flags & (EDGE_EH | EDGE_ABNORMAL))
2181 == (e->flags & (EDGE_EH | EDGE_ABNORMAL)));
2183 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
2184 USE_FROM_PTR (PHI_ARG_DEF_PTR_FROM_EDGE (phi, re)));
2190 /* Copy edges from BB into its copy constructed earlier, scale profile
2191 accordingly. Edges will be taken care of later. Assume aux
2192 pointers to point to the copies of each BB. Return true if any
2193 debug stmts are left after a statement that must end the basic block. */
2196 copy_edges_for_bb (basic_block bb, gcov_type count_scale, basic_block ret_bb,
2197 basic_block abnormal_goto_dest)
2199 basic_block new_bb = (basic_block) bb->aux;
2202 gimple_stmt_iterator si;
2204 bool need_debug_cleanup = false;
2206 /* Use the indices from the original blocks to create edges for the
2208 FOR_EACH_EDGE (old_edge, ei, bb->succs)
2209 if (!(old_edge->flags & EDGE_EH))
2213 flags = old_edge->flags;
2215 /* Return edges do get a FALLTHRU flag when the get inlined. */
2216 if (old_edge->dest->index == EXIT_BLOCK
2217 && !(old_edge->flags & (EDGE_TRUE_VALUE|EDGE_FALSE_VALUE|EDGE_FAKE))
2218 && old_edge->dest->aux != EXIT_BLOCK_PTR_FOR_FN (cfun))
2219 flags |= EDGE_FALLTHRU;
2220 new_edge = make_edge (new_bb, (basic_block) old_edge->dest->aux, flags);
2221 new_edge->count = apply_scale (old_edge->count, count_scale);
2222 new_edge->probability = old_edge->probability;
2225 if (bb->index == ENTRY_BLOCK || bb->index == EXIT_BLOCK)
2228 for (si = gsi_start_bb (new_bb); !gsi_end_p (si);)
2231 bool can_throw, nonlocal_goto;
2233 copy_stmt = gsi_stmt (si);
2234 if (!is_gimple_debug (copy_stmt))
2235 update_stmt (copy_stmt);
2237 /* Do this before the possible split_block. */
2240 /* If this tree could throw an exception, there are two
2241 cases where we need to add abnormal edge(s): the
2242 tree wasn't in a region and there is a "current
2243 region" in the caller; or the original tree had
2244 EH edges. In both cases split the block after the tree,
2245 and add abnormal edge(s) as needed; we need both
2246 those from the callee and the caller.
2247 We check whether the copy can throw, because the const
2248 propagation can change an INDIRECT_REF which throws
2249 into a COMPONENT_REF which doesn't. If the copy
2250 can throw, the original could also throw. */
2251 can_throw = stmt_can_throw_internal (copy_stmt);
2253 = (stmt_can_make_abnormal_goto (copy_stmt)
2254 && !computed_goto_p (copy_stmt));
2256 if (can_throw || nonlocal_goto)
2258 if (!gsi_end_p (si))
2260 while (!gsi_end_p (si) && is_gimple_debug (gsi_stmt (si)))
2263 need_debug_cleanup = true;
2265 if (!gsi_end_p (si))
2266 /* Note that bb's predecessor edges aren't necessarily
2267 right at this point; split_block doesn't care. */
2269 edge e = split_block (new_bb, copy_stmt);
2272 new_bb->aux = e->src->aux;
2273 si = gsi_start_bb (new_bb);
2277 if (gimple_code (copy_stmt) == GIMPLE_EH_DISPATCH)
2278 make_eh_dispatch_edges (as_a <geh_dispatch *> (copy_stmt));
2280 make_eh_edges (copy_stmt);
2282 /* If the call we inline cannot make abnormal goto do not add
2283 additional abnormal edges but only retain those already present
2284 in the original function body. */
2285 if (abnormal_goto_dest == NULL)
2286 nonlocal_goto = false;
2289 basic_block copy_stmt_bb = gimple_bb (copy_stmt);
2291 if (get_abnormal_succ_dispatcher (copy_stmt_bb))
2292 nonlocal_goto = false;
2293 /* ABNORMAL_DISPATCHER (1) is for longjmp/setjmp or nonlocal gotos
2294 in OpenMP regions which aren't allowed to be left abnormally.
2295 So, no need to add abnormal edge in that case. */
2296 else if (is_gimple_call (copy_stmt)
2297 && gimple_call_internal_p (copy_stmt)
2298 && (gimple_call_internal_fn (copy_stmt)
2299 == IFN_ABNORMAL_DISPATCHER)
2300 && gimple_call_arg (copy_stmt, 0) == boolean_true_node)
2301 nonlocal_goto = false;
2303 make_edge (copy_stmt_bb, abnormal_goto_dest, EDGE_ABNORMAL);
2306 if ((can_throw || nonlocal_goto)
2307 && gimple_in_ssa_p (cfun))
2308 update_ssa_across_abnormal_edges (gimple_bb (copy_stmt), ret_bb,
2309 can_throw, nonlocal_goto);
2311 return need_debug_cleanup;
2314 /* Copy the PHIs. All blocks and edges are copied, some blocks
2315 was possibly split and new outgoing EH edges inserted.
2316 BB points to the block of original function and AUX pointers links
2317 the original and newly copied blocks. */
2320 copy_phis_for_bb (basic_block bb, copy_body_data *id)
2322 basic_block const new_bb = (basic_block) bb->aux;
2327 bool inserted = false;
2329 for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
2335 res = PHI_RESULT (phi);
2337 if (!virtual_operand_p (res))
2339 walk_tree (&new_res, copy_tree_body_r, id, NULL);
2340 new_phi = create_phi_node (new_res, new_bb);
2341 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2343 edge old_edge = find_edge ((basic_block) new_edge->src->aux, bb);
2349 /* When doing partial cloning, we allow PHIs on the entry block
2350 as long as all the arguments are the same. Find any input
2351 edge to see argument to copy. */
2353 FOR_EACH_EDGE (old_edge, ei2, bb->preds)
2354 if (!old_edge->src->aux)
2357 arg = PHI_ARG_DEF_FROM_EDGE (phi, old_edge);
2359 walk_tree (&new_arg, copy_tree_body_r, id, NULL);
2360 gcc_assert (new_arg);
2361 /* With return slot optimization we can end up with
2362 non-gimple (foo *)&this->m, fix that here. */
2363 if (TREE_CODE (new_arg) != SSA_NAME
2364 && TREE_CODE (new_arg) != FUNCTION_DECL
2365 && !is_gimple_val (new_arg))
2367 gimple_seq stmts = NULL;
2368 new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
2369 gsi_insert_seq_on_edge (new_edge, stmts);
2372 locus = gimple_phi_arg_location_from_edge (phi, old_edge);
2373 if (LOCATION_BLOCK (locus))
2376 n = id->decl_map->get (LOCATION_BLOCK (locus));
2378 locus = set_block (locus, *n);
2381 locus = LOCATION_LOCUS (locus);
2383 add_phi_arg (new_phi, new_arg, new_edge, locus);
2388 /* Commit the delayed edge insertions. */
2390 FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
2391 gsi_commit_one_edge_insert (new_edge, NULL);
2395 /* Wrapper for remap_decl so it can be used as a callback. */
2398 remap_decl_1 (tree decl, void *data)
2400 return remap_decl (decl, (copy_body_data *) data);
2403 /* Build struct function and associated datastructures for the new clone
2404 NEW_FNDECL to be build. CALLEE_FNDECL is the original. Function changes
2405 the cfun to the function of new_fndecl (and current_function_decl too). */
2408 initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
2410 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2411 gcov_type count_scale;
2413 if (!DECL_ARGUMENTS (new_fndecl))
2414 DECL_ARGUMENTS (new_fndecl) = DECL_ARGUMENTS (callee_fndecl);
2415 if (!DECL_RESULT (new_fndecl))
2416 DECL_RESULT (new_fndecl) = DECL_RESULT (callee_fndecl);
2418 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2420 = GCOV_COMPUTE_SCALE (count,
2421 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2423 count_scale = REG_BR_PROB_BASE;
2425 /* Register specific tree functions. */
2426 gimple_register_cfg_hooks ();
2428 /* Get clean struct function. */
2429 push_struct_function (new_fndecl);
2431 /* We will rebuild these, so just sanity check that they are empty. */
2432 gcc_assert (VALUE_HISTOGRAMS (cfun) == NULL);
2433 gcc_assert (cfun->local_decls == NULL);
2434 gcc_assert (cfun->cfg == NULL);
2435 gcc_assert (cfun->decl == new_fndecl);
2437 /* Copy items we preserve during cloning. */
2438 cfun->static_chain_decl = src_cfun->static_chain_decl;
2439 cfun->nonlocal_goto_save_area = src_cfun->nonlocal_goto_save_area;
2440 cfun->function_end_locus = src_cfun->function_end_locus;
2441 cfun->curr_properties = src_cfun->curr_properties;
2442 cfun->last_verified = src_cfun->last_verified;
2443 cfun->va_list_gpr_size = src_cfun->va_list_gpr_size;
2444 cfun->va_list_fpr_size = src_cfun->va_list_fpr_size;
2445 cfun->has_nonlocal_label = src_cfun->has_nonlocal_label;
2446 cfun->stdarg = src_cfun->stdarg;
2447 cfun->after_inlining = src_cfun->after_inlining;
2448 cfun->can_throw_non_call_exceptions
2449 = src_cfun->can_throw_non_call_exceptions;
2450 cfun->can_delete_dead_exceptions = src_cfun->can_delete_dead_exceptions;
2451 cfun->returns_struct = src_cfun->returns_struct;
2452 cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
2454 init_empty_tree_cfg ();
2456 profile_status_for_fn (cfun) = profile_status_for_fn (src_cfun);
2457 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count =
2458 (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2460 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency
2461 = ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2462 EXIT_BLOCK_PTR_FOR_FN (cfun)->count =
2463 (EXIT_BLOCK_PTR_FOR_FN (src_cfun)->count * count_scale /
2465 EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency =
2466 EXIT_BLOCK_PTR_FOR_FN (src_cfun)->frequency;
2468 init_eh_for_function ();
2470 if (src_cfun->gimple_df)
2472 init_tree_ssa (cfun);
2473 cfun->gimple_df->in_ssa_p = true;
2474 init_ssa_operands (cfun);
2478 /* Helper function for copy_cfg_body. Move debug stmts from the end
2479 of NEW_BB to the beginning of successor basic blocks when needed. If the
2480 successor has multiple predecessors, reset them, otherwise keep
2484 maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
2488 gimple_stmt_iterator si = gsi_last_nondebug_bb (new_bb);
2491 || gsi_one_before_end_p (si)
2492 || !(stmt_can_throw_internal (gsi_stmt (si))
2493 || stmt_can_make_abnormal_goto (gsi_stmt (si))))
2496 FOR_EACH_EDGE (e, ei, new_bb->succs)
2498 gimple_stmt_iterator ssi = gsi_last_bb (new_bb);
2499 gimple_stmt_iterator dsi = gsi_after_labels (e->dest);
2500 while (is_gimple_debug (gsi_stmt (ssi)))
2502 gimple *stmt = gsi_stmt (ssi);
2507 /* For the last edge move the debug stmts instead of copying
2509 if (ei_one_before_end_p (ei))
2513 if (!single_pred_p (e->dest) && gimple_debug_bind_p (stmt))
2514 gimple_debug_bind_reset_value (stmt);
2515 gsi_remove (&si, false);
2516 gsi_insert_before (&dsi, stmt, GSI_SAME_STMT);
2520 if (gimple_debug_bind_p (stmt))
2522 var = gimple_debug_bind_get_var (stmt);
2523 if (single_pred_p (e->dest))
2525 value = gimple_debug_bind_get_value (stmt);
2526 value = unshare_expr (value);
2530 new_stmt = gimple_build_debug_bind (var, value, stmt);
2532 else if (gimple_debug_source_bind_p (stmt))
2534 var = gimple_debug_source_bind_get_var (stmt);
2535 value = gimple_debug_source_bind_get_value (stmt);
2536 new_stmt = gimple_build_debug_source_bind (var, value, stmt);
2540 gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
2541 id->debug_stmts.safe_push (new_stmt);
2547 /* Make a copy of the sub-loops of SRC_PARENT and place them
2548 as siblings of DEST_PARENT. */
2551 copy_loops (copy_body_data *id,
2552 struct loop *dest_parent, struct loop *src_parent)
2554 struct loop *src_loop = src_parent->inner;
2557 if (!id->blocks_to_copy
2558 || bitmap_bit_p (id->blocks_to_copy, src_loop->header->index))
2560 struct loop *dest_loop = alloc_loop ();
2562 /* Assign the new loop its header and latch and associate
2563 those with the new loop. */
2564 dest_loop->header = (basic_block)src_loop->header->aux;
2565 dest_loop->header->loop_father = dest_loop;
2566 if (src_loop->latch != NULL)
2568 dest_loop->latch = (basic_block)src_loop->latch->aux;
2569 dest_loop->latch->loop_father = dest_loop;
2572 /* Copy loop meta-data. */
2573 copy_loop_info (src_loop, dest_loop);
2575 /* Finally place it into the loop array and the loop tree. */
2576 place_new_loop (cfun, dest_loop);
2577 flow_loop_tree_node_add (dest_parent, dest_loop);
2579 dest_loop->safelen = src_loop->safelen;
2580 dest_loop->dont_vectorize = src_loop->dont_vectorize;
2581 if (src_loop->force_vectorize)
2583 dest_loop->force_vectorize = true;
2584 cfun->has_force_vectorize_loops = true;
2586 if (src_loop->simduid)
2588 dest_loop->simduid = remap_decl (src_loop->simduid, id);
2589 cfun->has_simduid_loops = true;
2593 copy_loops (id, dest_loop, src_loop);
2595 src_loop = src_loop->next;
2599 /* Call cgraph_redirect_edge_call_stmt_to_callee on all calls in BB */
2602 redirect_all_calls (copy_body_data * id, basic_block bb)
2604 gimple_stmt_iterator si;
2605 gimple *last = last_stmt (bb);
2606 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
2608 gimple *stmt = gsi_stmt (si);
2609 if (is_gimple_call (stmt))
2611 struct cgraph_edge *edge = id->dst_node->get_edge (stmt);
2614 edge->redirect_call_stmt_to_callee ();
2615 if (stmt == last && id->call_stmt && maybe_clean_eh_stmt (stmt))
2616 gimple_purge_dead_eh_edges (bb);
2622 /* Convert estimated frequencies into counts for NODE, scaling COUNT
2623 with each bb's frequency. Used when NODE has a 0-weight entry
2624 but we are about to inline it into a non-zero count call bb.
2625 See the comments for handle_missing_profiles() in predict.c for
2626 when this can happen for COMDATs. */
2629 freqs_to_counts (struct cgraph_node *node, gcov_type count)
2634 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
2636 FOR_ALL_BB_FN(bb, fn)
2638 bb->count = apply_scale (count,
2639 GCOV_COMPUTE_SCALE (bb->frequency, BB_FREQ_MAX));
2640 FOR_EACH_EDGE (e, ei, bb->succs)
2641 e->count = apply_probability (e->src->count, e->probability);
2645 /* Make a copy of the body of FN so that it can be inserted inline in
2646 another function. Walks FN via CFG, returns new fndecl. */
2649 copy_cfg_body (copy_body_data * id, gcov_type count, int frequency_scale,
2650 basic_block entry_block_map, basic_block exit_block_map,
2651 basic_block new_entry)
2653 tree callee_fndecl = id->src_fn;
2654 /* Original cfun for the callee, doesn't change. */
2655 struct function *src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2656 struct function *cfun_to_copy;
2658 tree new_fndecl = NULL;
2659 bool need_debug_cleanup = false;
2660 gcov_type count_scale;
2662 int incoming_frequency = 0;
2663 gcov_type incoming_count = 0;
2665 /* This can happen for COMDAT routines that end up with 0 counts
2666 despite being called (see the comments for handle_missing_profiles()
2667 in predict.c as to why). Apply counts to the blocks in the callee
2668 before inlining, using the guessed edge frequencies, so that we don't
2669 end up with a 0-count inline body which can confuse downstream
2670 optimizations such as function splitting. */
2671 if (!ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count && count)
2673 /* Apply the larger of the call bb count and the total incoming
2674 call edge count to the callee. */
2675 gcov_type in_count = 0;
2676 struct cgraph_edge *in_edge;
2677 for (in_edge = id->src_node->callers; in_edge;
2678 in_edge = in_edge->next_caller)
2679 in_count += in_edge->count;
2680 freqs_to_counts (id->src_node, count > in_count ? count : in_count);
2683 if (ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count)
2685 = GCOV_COMPUTE_SCALE (count,
2686 ENTRY_BLOCK_PTR_FOR_FN (src_cfun)->count);
2688 count_scale = REG_BR_PROB_BASE;
2690 /* Register specific tree functions. */
2691 gimple_register_cfg_hooks ();
2693 /* If we are inlining just region of the function, make sure to connect
2694 new entry to ENTRY_BLOCK_PTR_FOR_FN (cfun). Since new entry can be
2695 part of loop, we must compute frequency and probability of
2696 ENTRY_BLOCK_PTR_FOR_FN (cfun) based on the frequencies and
2697 probabilities of edges incoming from nonduplicated region. */
2703 FOR_EACH_EDGE (e, ei, new_entry->preds)
2706 incoming_frequency += EDGE_FREQUENCY (e);
2707 incoming_count += e->count;
2709 incoming_count = apply_scale (incoming_count, count_scale);
2711 = apply_scale ((gcov_type)incoming_frequency, frequency_scale);
2712 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = incoming_count;
2713 ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = incoming_frequency;
2716 /* Must have a CFG here at this point. */
2717 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN
2718 (DECL_STRUCT_FUNCTION (callee_fndecl)));
2720 cfun_to_copy = id->src_cfun = DECL_STRUCT_FUNCTION (callee_fndecl);
2722 ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = entry_block_map;
2723 EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy)->aux = exit_block_map;
2724 entry_block_map->aux = ENTRY_BLOCK_PTR_FOR_FN (cfun_to_copy);
2725 exit_block_map->aux = EXIT_BLOCK_PTR_FOR_FN (cfun_to_copy);
2727 /* Duplicate any exception-handling regions. */
2729 id->eh_map = duplicate_eh_regions (cfun_to_copy, NULL, id->eh_lp_nr,
2732 /* Use aux pointers to map the original blocks to copy. */
2733 FOR_EACH_BB_FN (bb, cfun_to_copy)
2734 if (!id->blocks_to_copy || bitmap_bit_p (id->blocks_to_copy, bb->index))
2736 basic_block new_bb = copy_bb (id, bb, frequency_scale, count_scale);
2739 new_bb->loop_father = entry_block_map->loop_father;
2742 last = last_basic_block_for_fn (cfun);
2744 /* Now that we've duplicated the blocks, duplicate their edges. */
2745 basic_block abnormal_goto_dest = NULL;
2747 && stmt_can_make_abnormal_goto (id->call_stmt))
2749 gimple_stmt_iterator gsi = gsi_for_stmt (id->call_stmt);
2751 bb = gimple_bb (id->call_stmt);
2753 if (gsi_end_p (gsi))
2754 abnormal_goto_dest = get_abnormal_succ_dispatcher (bb);
2756 FOR_ALL_BB_FN (bb, cfun_to_copy)
2757 if (!id->blocks_to_copy
2758 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2759 need_debug_cleanup |= copy_edges_for_bb (bb, count_scale, exit_block_map,
2760 abnormal_goto_dest);
2764 edge e = make_edge (entry_block_map, (basic_block)new_entry->aux, EDGE_FALLTHRU);
2765 e->probability = REG_BR_PROB_BASE;
2766 e->count = incoming_count;
2769 /* Duplicate the loop tree, if available and wanted. */
2770 if (loops_for_fn (src_cfun) != NULL
2771 && current_loops != NULL)
2773 copy_loops (id, entry_block_map->loop_father,
2774 get_loop (src_cfun, 0));
2775 /* Defer to cfgcleanup to update loop-father fields of basic-blocks. */
2776 loops_state_set (LOOPS_NEED_FIXUP);
2779 /* If the loop tree in the source function needed fixup, mark the
2780 destination loop tree for fixup, too. */
2781 if (loops_for_fn (src_cfun)->state & LOOPS_NEED_FIXUP)
2782 loops_state_set (LOOPS_NEED_FIXUP);
2784 if (gimple_in_ssa_p (cfun))
2785 FOR_ALL_BB_FN (bb, cfun_to_copy)
2786 if (!id->blocks_to_copy
2787 || (bb->index > 0 && bitmap_bit_p (id->blocks_to_copy, bb->index)))
2788 copy_phis_for_bb (bb, id);
2790 FOR_ALL_BB_FN (bb, cfun_to_copy)
2793 if (need_debug_cleanup
2794 && bb->index != ENTRY_BLOCK
2795 && bb->index != EXIT_BLOCK)
2796 maybe_move_debug_stmts_to_successors (id, (basic_block) bb->aux);
2797 /* Update call edge destinations. This can not be done before loop
2798 info is updated, because we may split basic blocks. */
2799 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE
2800 && bb->index != ENTRY_BLOCK
2801 && bb->index != EXIT_BLOCK)
2802 redirect_all_calls (id, (basic_block)bb->aux);
2803 ((basic_block)bb->aux)->aux = NULL;
2807 /* Zero out AUX fields of newly created block during EH edge
2809 for (; last < last_basic_block_for_fn (cfun); last++)
2811 if (need_debug_cleanup)
2812 maybe_move_debug_stmts_to_successors (id,
2813 BASIC_BLOCK_FOR_FN (cfun, last));
2814 BASIC_BLOCK_FOR_FN (cfun, last)->aux = NULL;
2815 /* Update call edge destinations. This can not be done before loop
2816 info is updated, because we may split basic blocks. */
2817 if (id->transform_call_graph_edges == CB_CGE_DUPLICATE)
2818 redirect_all_calls (id, BASIC_BLOCK_FOR_FN (cfun, last));
2820 entry_block_map->aux = NULL;
2821 exit_block_map->aux = NULL;
2828 if (id->dependence_map)
2830 delete id->dependence_map;
2831 id->dependence_map = NULL;
2837 /* Copy the debug STMT using ID. We deal with these statements in a
2838 special way: if any variable in their VALUE expression wasn't
2839 remapped yet, we won't remap it, because that would get decl uids
2840 out of sync, causing codegen differences between -g and -g0. If
2841 this arises, we drop the VALUE expression altogether. */
2844 copy_debug_stmt (gdebug *stmt, copy_body_data *id)
2847 struct walk_stmt_info wi;
2849 if (gimple_block (stmt))
2851 n = id->decl_map->get (gimple_block (stmt));
2852 gimple_set_block (stmt, n ? *n : id->block);
2855 /* Remap all the operands in COPY. */
2856 memset (&wi, 0, sizeof (wi));
2859 processing_debug_stmt = 1;
2861 if (gimple_debug_source_bind_p (stmt))
2862 t = gimple_debug_source_bind_get_var (stmt);
2864 t = gimple_debug_bind_get_var (stmt);
2866 if (TREE_CODE (t) == PARM_DECL && id->debug_map
2867 && (n = id->debug_map->get (t)))
2869 gcc_assert (TREE_CODE (*n) == VAR_DECL);
2872 else if (TREE_CODE (t) == VAR_DECL
2873 && !is_global_var (t)
2874 && !id->decl_map->get (t))
2875 /* T is a non-localized variable. */;
2877 walk_tree (&t, remap_gimple_op_r, &wi, NULL);
2879 if (gimple_debug_bind_p (stmt))
2881 gimple_debug_bind_set_var (stmt, t);
2883 if (gimple_debug_bind_has_value_p (stmt))
2884 walk_tree (gimple_debug_bind_get_value_ptr (stmt),
2885 remap_gimple_op_r, &wi, NULL);
2887 /* Punt if any decl couldn't be remapped. */
2888 if (processing_debug_stmt < 0)
2889 gimple_debug_bind_reset_value (stmt);
2891 else if (gimple_debug_source_bind_p (stmt))
2893 gimple_debug_source_bind_set_var (stmt, t);
2894 /* When inlining and source bind refers to one of the optimized
2895 away parameters, change the source bind into normal debug bind
2896 referring to the corresponding DEBUG_EXPR_DECL that should have
2897 been bound before the call stmt. */
2898 t = gimple_debug_source_bind_get_value (stmt);
2900 && TREE_CODE (t) == PARM_DECL
2903 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
2905 if (debug_args != NULL)
2907 for (i = 0; i < vec_safe_length (*debug_args); i += 2)
2908 if ((**debug_args)[i] == DECL_ORIGIN (t)
2909 && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
2911 t = (**debug_args)[i + 1];
2912 stmt->subcode = GIMPLE_DEBUG_BIND;
2913 gimple_debug_bind_set_value (stmt, t);
2918 if (gimple_debug_source_bind_p (stmt))
2919 walk_tree (gimple_debug_source_bind_get_value_ptr (stmt),
2920 remap_gimple_op_r, &wi, NULL);
2923 processing_debug_stmt = 0;
2928 /* Process deferred debug stmts. In order to give values better odds
2929 of being successfully remapped, we delay the processing of debug
2930 stmts until all other stmts that might require remapping are
2934 copy_debug_stmts (copy_body_data *id)
2939 if (!id->debug_stmts.exists ())
2942 FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
2943 copy_debug_stmt (stmt, id);
2945 id->debug_stmts.release ();
2948 /* Make a copy of the body of SRC_FN so that it can be inserted inline in
2949 another function. */
2952 copy_tree_body (copy_body_data *id)
2954 tree fndecl = id->src_fn;
2955 tree body = DECL_SAVED_TREE (fndecl);
2957 walk_tree (&body, copy_tree_body_r, id, NULL);
2962 /* Make a copy of the body of FN so that it can be inserted inline in
2963 another function. */
2966 copy_body (copy_body_data *id, gcov_type count, int frequency_scale,
2967 basic_block entry_block_map, basic_block exit_block_map,
2968 basic_block new_entry)
2970 tree fndecl = id->src_fn;
2973 /* If this body has a CFG, walk CFG and copy. */
2974 gcc_assert (ENTRY_BLOCK_PTR_FOR_FN (DECL_STRUCT_FUNCTION (fndecl)));
2975 body = copy_cfg_body (id, count, frequency_scale, entry_block_map, exit_block_map,
2977 copy_debug_stmts (id);
2982 /* Return true if VALUE is an ADDR_EXPR of an automatic variable
2983 defined in function FN, or of a data member thereof. */
2986 self_inlining_addr_expr (tree value, tree fn)
2990 if (TREE_CODE (value) != ADDR_EXPR)
2993 var = get_base_address (TREE_OPERAND (value, 0));
2995 return var && auto_var_in_fn_p (var, fn);
2998 /* Append to BB a debug annotation that binds VAR to VALUE, inheriting
2999 lexical block and line number information from base_stmt, if given,
3000 or from the last stmt of the block otherwise. */
3003 insert_init_debug_bind (copy_body_data *id,
3004 basic_block bb, tree var, tree value,
3008 gimple_stmt_iterator gsi;
3011 if (!gimple_in_ssa_p (id->src_cfun))
3014 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
3017 tracked_var = target_for_debug_bind (var);
3023 gsi = gsi_last_bb (bb);
3024 if (!base_stmt && !gsi_end_p (gsi))
3025 base_stmt = gsi_stmt (gsi);
3028 note = gimple_build_debug_bind (tracked_var, unshare_expr (value), base_stmt);
3032 if (!gsi_end_p (gsi))
3033 gsi_insert_after (&gsi, note, GSI_SAME_STMT);
3035 gsi_insert_before (&gsi, note, GSI_SAME_STMT);
3042 insert_init_stmt (copy_body_data *id, basic_block bb, gimple *init_stmt)
3044 /* If VAR represents a zero-sized variable, it's possible that the
3045 assignment statement may result in no gimple statements. */
3048 gimple_stmt_iterator si = gsi_last_bb (bb);
3050 /* We can end up with init statements that store to a non-register
3051 from a rhs with a conversion. Handle that here by forcing the
3052 rhs into a temporary. gimple_regimplify_operands is not
3053 prepared to do this for us. */
3054 if (!is_gimple_debug (init_stmt)
3055 && !is_gimple_reg (gimple_assign_lhs (init_stmt))
3056 && is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (init_stmt)))
3057 && gimple_assign_rhs_class (init_stmt) == GIMPLE_UNARY_RHS)
3059 tree rhs = build1 (gimple_assign_rhs_code (init_stmt),
3060 gimple_expr_type (init_stmt),
3061 gimple_assign_rhs1 (init_stmt));
3062 rhs = force_gimple_operand_gsi (&si, rhs, true, NULL_TREE, false,
3064 gimple_assign_set_rhs_code (init_stmt, TREE_CODE (rhs));
3065 gimple_assign_set_rhs1 (init_stmt, rhs);
3067 gsi_insert_after (&si, init_stmt, GSI_NEW_STMT);
3068 gimple_regimplify_operands (init_stmt, &si);
3070 if (!is_gimple_debug (init_stmt))
3072 tree def = gimple_assign_lhs (init_stmt);
3073 insert_init_debug_bind (id, bb, def, def, init_stmt);
3078 /* Initialize parameter P with VALUE. If needed, produce init statement
3079 at the end of BB. When BB is NULL, we return init statement to be
3082 setup_one_parameter (copy_body_data *id, tree p, tree value, tree fn,
3083 basic_block bb, tree *vars)
3085 gimple *init_stmt = NULL;
3088 tree def = (gimple_in_ssa_p (cfun)
3089 ? ssa_default_def (id->src_cfun, p) : NULL);
3092 && value != error_mark_node
3093 && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
3095 /* If we can match up types by promotion/demotion do so. */
3096 if (fold_convertible_p (TREE_TYPE (p), value))
3097 rhs = fold_convert (TREE_TYPE (p), value);
3100 /* ??? For valid programs we should not end up here.
3101 Still if we end up with truly mismatched types here, fall back
3102 to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
3103 GIMPLE to the following passes. */
3104 if (!is_gimple_reg_type (TREE_TYPE (value))
3105 || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
3106 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
3108 rhs = build_zero_cst (TREE_TYPE (p));
3112 /* Make an equivalent VAR_DECL. Note that we must NOT remap the type
3113 here since the type of this decl must be visible to the calling
3115 var = copy_decl_to_var (p, id);
3117 /* Declare this new variable. */
3118 DECL_CHAIN (var) = *vars;
3121 /* Make gimplifier happy about this variable. */
3122 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3124 /* If the parameter is never assigned to, has no SSA_NAMEs created,
3125 we would not need to create a new variable here at all, if it
3126 weren't for debug info. Still, we can just use the argument
3128 if (TREE_READONLY (p)
3129 && !TREE_ADDRESSABLE (p)
3130 && value && !TREE_SIDE_EFFECTS (value)
3133 /* We may produce non-gimple trees by adding NOPs or introduce
3134 invalid sharing when operand is not really constant.
3135 It is not big deal to prohibit constant propagation here as
3136 we will constant propagate in DOM1 pass anyway. */
3137 if (is_gimple_min_invariant (value)
3138 && useless_type_conversion_p (TREE_TYPE (p),
3140 /* We have to be very careful about ADDR_EXPR. Make sure
3141 the base variable isn't a local variable of the inlined
3142 function, e.g., when doing recursive inlining, direct or
3143 mutually-recursive or whatever, which is why we don't
3144 just test whether fn == current_function_decl. */
3145 && ! self_inlining_addr_expr (value, fn))
3147 insert_decl_map (id, p, value);
3148 insert_debug_decl_map (id, p, var);
3149 return insert_init_debug_bind (id, bb, var, value, NULL);
3153 /* Register the VAR_DECL as the equivalent for the PARM_DECL;
3154 that way, when the PARM_DECL is encountered, it will be
3155 automatically replaced by the VAR_DECL. */
3156 insert_decl_map (id, p, var);
3158 /* Even if P was TREE_READONLY, the new VAR should not be.
3159 In the original code, we would have constructed a
3160 temporary, and then the function body would have never
3161 changed the value of P. However, now, we will be
3162 constructing VAR directly. The constructor body may
3163 change its value multiple times as it is being
3164 constructed. Therefore, it must not be TREE_READONLY;
3165 the back-end assumes that TREE_READONLY variable is
3166 assigned to only once. */
3167 if (TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (p)))
3168 TREE_READONLY (var) = 0;
3170 /* If there is no setup required and we are in SSA, take the easy route
3171 replacing all SSA names representing the function parameter by the
3172 SSA name passed to function.
3174 We need to construct map for the variable anyway as it might be used
3175 in different SSA names when parameter is set in function.
3177 Do replacement at -O0 for const arguments replaced by constant.
3178 This is important for builtin_constant_p and other construct requiring
3179 constant argument to be visible in inlined function body. */
3180 if (gimple_in_ssa_p (cfun) && rhs && def && is_gimple_reg (p)
3182 || (TREE_READONLY (p)
3183 && is_gimple_min_invariant (rhs)))
3184 && (TREE_CODE (rhs) == SSA_NAME
3185 || is_gimple_min_invariant (rhs))
3186 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (def))
3188 insert_decl_map (id, def, rhs);
3189 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3192 /* If the value of argument is never used, don't care about initializing
3194 if (optimize && gimple_in_ssa_p (cfun) && !def && is_gimple_reg (p))
3196 gcc_assert (!value || !TREE_SIDE_EFFECTS (value));
3197 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3200 /* Initialize this VAR_DECL from the equivalent argument. Convert
3201 the argument to the proper type in case it was promoted. */
3204 if (rhs == error_mark_node)
3206 insert_decl_map (id, p, var);
3207 return insert_init_debug_bind (id, bb, var, rhs, NULL);
3210 STRIP_USELESS_TYPE_CONVERSION (rhs);
3212 /* If we are in SSA form properly remap the default definition
3213 or assign to a dummy SSA name if the parameter is unused and
3214 we are not optimizing. */
3215 if (gimple_in_ssa_p (cfun) && is_gimple_reg (p))
3219 def = remap_ssa_name (def, id);
3220 init_stmt = gimple_build_assign (def, rhs);
3221 SSA_NAME_IS_DEFAULT_DEF (def) = 0;
3222 set_ssa_default_def (cfun, var, NULL);
3226 def = make_ssa_name (var);
3227 init_stmt = gimple_build_assign (def, rhs);
3231 init_stmt = gimple_build_assign (var, rhs);
3233 if (bb && init_stmt)
3234 insert_init_stmt (id, bb, init_stmt);
3239 /* Generate code to initialize the parameters of the function at the
3240 top of the stack in ID from the GIMPLE_CALL STMT. */
3243 initialize_inlined_parameters (copy_body_data *id, gimple *stmt,
3244 tree fn, basic_block bb)
3249 tree vars = NULL_TREE;
3250 tree static_chain = gimple_call_chain (stmt);
3252 /* Figure out what the parameters are. */
3253 parms = DECL_ARGUMENTS (fn);
3255 /* Loop through the parameter declarations, replacing each with an
3256 equivalent VAR_DECL, appropriately initialized. */
3257 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3260 val = i < gimple_call_num_args (stmt) ? gimple_call_arg (stmt, i) : NULL;
3261 setup_one_parameter (id, p, val, fn, bb, &vars);
3263 /* After remapping parameters remap their types. This has to be done
3264 in a second loop over all parameters to appropriately remap
3265 variable sized arrays when the size is specified in a
3266 parameter following the array. */
3267 for (p = parms, i = 0; p; p = DECL_CHAIN (p), i++)
3269 tree *varp = id->decl_map->get (p);
3271 && TREE_CODE (*varp) == VAR_DECL)
3273 tree def = (gimple_in_ssa_p (cfun) && is_gimple_reg (p)
3274 ? ssa_default_def (id->src_cfun, p) : NULL);
3276 TREE_TYPE (var) = remap_type (TREE_TYPE (var), id);
3277 /* Also remap the default definition if it was remapped
3278 to the default definition of the parameter replacement
3279 by the parameter setup. */
3282 tree *defp = id->decl_map->get (def);
3284 && TREE_CODE (*defp) == SSA_NAME
3285 && SSA_NAME_VAR (*defp) == var)
3286 TREE_TYPE (*defp) = TREE_TYPE (var);
3291 /* Initialize the static chain. */
3292 p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
3293 gcc_assert (fn != current_function_decl);
3296 /* No static chain? Seems like a bug in tree-nested.c. */
3297 gcc_assert (static_chain);
3299 setup_one_parameter (id, p, static_chain, fn, bb, &vars);
3302 declare_inline_vars (id->block, vars);
3306 /* Declare a return variable to replace the RESULT_DECL for the
3307 function we are calling. An appropriate DECL_STMT is returned.
3308 The USE_STMT is filled to contain a use of the declaration to
3309 indicate the return value of the function.
3311 RETURN_SLOT, if non-null is place where to store the result. It
3312 is set only for CALL_EXPR_RETURN_SLOT_OPT. MODIFY_DEST, if non-null,
3313 was the LHS of the MODIFY_EXPR to which this call is the RHS.
3315 RETURN_BOUNDS holds a destination for returned bounds.
3317 The return value is a (possibly null) value that holds the result
3318 as seen by the caller. */
3321 declare_return_variable (copy_body_data *id, tree return_slot, tree modify_dest,
3322 tree return_bounds, basic_block entry_bb)
3324 tree callee = id->src_fn;
3325 tree result = DECL_RESULT (callee);
3326 tree callee_type = TREE_TYPE (result);
3330 /* Handle type-mismatches in the function declaration return type
3331 vs. the call expression. */
3333 caller_type = TREE_TYPE (modify_dest);
3335 caller_type = TREE_TYPE (TREE_TYPE (callee));
3337 /* We don't need to do anything for functions that don't return anything. */
3338 if (VOID_TYPE_P (callee_type))
3341 /* If there was a return slot, then the return value is the
3342 dereferenced address of that object. */
3345 /* The front end shouldn't have used both return_slot and
3346 a modify expression. */
3347 gcc_assert (!modify_dest);
3348 if (DECL_BY_REFERENCE (result))
3350 tree return_slot_addr = build_fold_addr_expr (return_slot);
3351 STRIP_USELESS_TYPE_CONVERSION (return_slot_addr);
3353 /* We are going to construct *&return_slot and we can't do that
3354 for variables believed to be not addressable.
3356 FIXME: This check possibly can match, because values returned
3357 via return slot optimization are not believed to have address
3358 taken by alias analysis. */
3359 gcc_assert (TREE_CODE (return_slot) != SSA_NAME);
3360 var = return_slot_addr;
3365 gcc_assert (TREE_CODE (var) != SSA_NAME);
3366 if (TREE_ADDRESSABLE (result))
3367 mark_addressable (var);
3369 if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3370 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3371 && !DECL_GIMPLE_REG_P (result)
3373 DECL_GIMPLE_REG_P (var) = 0;
3378 /* All types requiring non-trivial constructors should have been handled. */
3379 gcc_assert (!TREE_ADDRESSABLE (callee_type));
3381 /* Attempt to avoid creating a new temporary variable. */
3383 && TREE_CODE (modify_dest) != SSA_NAME)
3385 bool use_it = false;
3387 /* We can't use MODIFY_DEST if there's type promotion involved. */
3388 if (!useless_type_conversion_p (callee_type, caller_type))
3391 /* ??? If we're assigning to a variable sized type, then we must
3392 reuse the destination variable, because we've no good way to
3393 create variable sized temporaries at this point. */
3394 else if (TREE_CODE (TYPE_SIZE_UNIT (caller_type)) != INTEGER_CST)
3397 /* If the callee cannot possibly modify MODIFY_DEST, then we can
3398 reuse it as the result of the call directly. Don't do this if
3399 it would promote MODIFY_DEST to addressable. */
3400 else if (TREE_ADDRESSABLE (result))
3404 tree base_m = get_base_address (modify_dest);
3406 /* If the base isn't a decl, then it's a pointer, and we don't
3407 know where that's going to go. */
3408 if (!DECL_P (base_m))
3410 else if (is_global_var (base_m))
3412 else if ((TREE_CODE (TREE_TYPE (result)) == COMPLEX_TYPE
3413 || TREE_CODE (TREE_TYPE (result)) == VECTOR_TYPE)
3414 && !DECL_GIMPLE_REG_P (result)
3415 && DECL_GIMPLE_REG_P (base_m))
3417 else if (!TREE_ADDRESSABLE (base_m))
3429 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (callee_type)) == INTEGER_CST);
3431 var = copy_result_decl_to_var (result, id);
3432 DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
3434 /* Do not have the rest of GCC warn about this variable as it should
3435 not be visible to the user. */
3436 TREE_NO_WARNING (var) = 1;
3438 declare_inline_vars (id->block, var);
3440 /* Build the use expr. If the return type of the function was
3441 promoted, convert it back to the expected type. */
3443 if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
3445 /* If we can match up types by promotion/demotion do so. */
3446 if (fold_convertible_p (caller_type, var))
3447 use = fold_convert (caller_type, var);
3450 /* ??? For valid programs we should not end up here.
3451 Still if we end up with truly mismatched types here, fall back
3452 to using a MEM_REF to not leak invalid GIMPLE to the following
3454 /* Prevent var from being written into SSA form. */
3455 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
3456 || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
3457 DECL_GIMPLE_REG_P (var) = false;
3458 else if (is_gimple_reg_type (TREE_TYPE (var)))
3459 TREE_ADDRESSABLE (var) = true;
3460 use = fold_build2 (MEM_REF, caller_type,
3461 build_fold_addr_expr (var),
3462 build_int_cst (ptr_type_node, 0));
3466 STRIP_USELESS_TYPE_CONVERSION (use);
3468 if (DECL_BY_REFERENCE (result))
3470 TREE_ADDRESSABLE (var) = 1;
3471 var = build_fold_addr_expr (var);
3475 /* Register the VAR_DECL as the equivalent for the RESULT_DECL; that
3476 way, when the RESULT_DECL is encountered, it will be
3477 automatically replaced by the VAR_DECL.
3479 When returning by reference, ensure that RESULT_DECL remaps to
3481 if (DECL_BY_REFERENCE (result)
3482 && !is_gimple_val (var))
3484 tree temp = create_tmp_var (TREE_TYPE (result), "retvalptr");
3485 insert_decl_map (id, result, temp);
3486 /* When RESULT_DECL is in SSA form, we need to remap and initialize
3487 it's default_def SSA_NAME. */
3488 if (gimple_in_ssa_p (id->src_cfun)
3489 && is_gimple_reg (result))
3491 temp = make_ssa_name (temp);
3492 insert_decl_map (id, ssa_default_def (id->src_cfun, result), temp);
3494 insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
3497 insert_decl_map (id, result, var);
3499 /* Remember this so we can ignore it in remap_decls. */
3502 /* If returned bounds are used, then make var for them. */
3505 tree bndtemp = create_tmp_var (pointer_bounds_type_node, "retbnd");
3506 DECL_SEEN_IN_BIND_EXPR_P (bndtemp) = 1;
3507 TREE_NO_WARNING (bndtemp) = 1;
3508 declare_inline_vars (id->block, bndtemp);
3510 id->retbnd = bndtemp;
3511 insert_init_stmt (id, entry_bb,
3512 gimple_build_assign (bndtemp, chkp_get_zero_bounds_var ()));
3518 /* Determine if the function can be copied. If so return NULL. If
3519 not return a string describng the reason for failure. */
3522 copy_forbidden (struct function *fun)
3524 const char *reason = fun->cannot_be_copied_reason;
3526 /* Only examine the function once. */
3527 if (fun->cannot_be_copied_set)
3530 /* We cannot copy a function that receives a non-local goto
3531 because we cannot remap the destination label used in the
3532 function that is performing the non-local goto. */
3533 /* ??? Actually, this should be possible, if we work at it.
3534 No doubt there's just a handful of places that simply
3535 assume it doesn't happen and don't substitute properly. */
3536 if (fun->has_nonlocal_label)
3538 reason = G_("function %q+F can never be copied "
3539 "because it receives a non-local goto");
3543 if (fun->has_forced_label_in_static)
3545 reason = G_("function %q+F can never be copied because it saves "
3546 "address of local label in a static variable");
3551 fun->cannot_be_copied_reason = reason;
3552 fun->cannot_be_copied_set = true;
3557 static const char *inline_forbidden_reason;
3559 /* A callback for walk_gimple_seq to handle statements. Returns non-null
3560 iff a function can not be inlined. Also sets the reason why. */
3563 inline_forbidden_p_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3564 struct walk_stmt_info *wip)
3566 tree fn = (tree) wip->info;
3568 gimple *stmt = gsi_stmt (*gsi);
3570 switch (gimple_code (stmt))
3573 /* Refuse to inline alloca call unless user explicitly forced so as
3574 this may change program's memory overhead drastically when the
3575 function using alloca is called in loop. In GCC present in
3576 SPEC2000 inlining into schedule_block cause it to require 2GB of
3577 RAM instead of 256MB. Don't do so for alloca calls emitted for
3578 VLA objects as those can't cause unbounded growth (they're always
3579 wrapped inside stack_save/stack_restore regions. */
3580 if (gimple_alloca_call_p (stmt)
3581 && !gimple_call_alloca_for_var_p (as_a <gcall *> (stmt))
3582 && !lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
3584 inline_forbidden_reason
3585 = G_("function %q+F can never be inlined because it uses "
3586 "alloca (override using the always_inline attribute)");
3587 *handled_ops_p = true;
3591 t = gimple_call_fndecl (stmt);
3595 /* We cannot inline functions that call setjmp. */
3596 if (setjmp_call_p (t))
3598 inline_forbidden_reason
3599 = G_("function %q+F can never be inlined because it uses setjmp");
3600 *handled_ops_p = true;
3604 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
3605 switch (DECL_FUNCTION_CODE (t))
3607 /* We cannot inline functions that take a variable number of
3609 case BUILT_IN_VA_START:
3610 case BUILT_IN_NEXT_ARG:
3611 case BUILT_IN_VA_END:
3612 inline_forbidden_reason
3613 = G_("function %q+F can never be inlined because it "
3614 "uses variable argument lists");
3615 *handled_ops_p = true;
3618 case BUILT_IN_LONGJMP:
3619 /* We can't inline functions that call __builtin_longjmp at
3620 all. The non-local goto machinery really requires the
3621 destination be in a different function. If we allow the
3622 function calling __builtin_longjmp to be inlined into the
3623 function calling __builtin_setjmp, Things will Go Awry. */
3624 inline_forbidden_reason
3625 = G_("function %q+F can never be inlined because "
3626 "it uses setjmp-longjmp exception handling");
3627 *handled_ops_p = true;
3630 case BUILT_IN_NONLOCAL_GOTO:
3632 inline_forbidden_reason
3633 = G_("function %q+F can never be inlined because "
3634 "it uses non-local goto");
3635 *handled_ops_p = true;
3638 case BUILT_IN_RETURN:
3639 case BUILT_IN_APPLY_ARGS:
3640 /* If a __builtin_apply_args caller would be inlined,
3641 it would be saving arguments of the function it has
3642 been inlined into. Similarly __builtin_return would
3643 return from the function the inline has been inlined into. */
3644 inline_forbidden_reason
3645 = G_("function %q+F can never be inlined because "
3646 "it uses __builtin_return or __builtin_apply_args");
3647 *handled_ops_p = true;
3656 t = gimple_goto_dest (stmt);
3658 /* We will not inline a function which uses computed goto. The
3659 addresses of its local labels, which may be tucked into
3660 global storage, are of course not constant across
3661 instantiations, which causes unexpected behavior. */
3662 if (TREE_CODE (t) != LABEL_DECL)
3664 inline_forbidden_reason
3665 = G_("function %q+F can never be inlined "
3666 "because it contains a computed goto");
3667 *handled_ops_p = true;
3676 *handled_ops_p = false;
3680 /* Return true if FNDECL is a function that cannot be inlined into
3684 inline_forbidden_p (tree fndecl)
3686 struct function *fun = DECL_STRUCT_FUNCTION (fndecl);
3687 struct walk_stmt_info wi;
3689 bool forbidden_p = false;
3691 /* First check for shared reasons not to copy the code. */
3692 inline_forbidden_reason = copy_forbidden (fun);
3693 if (inline_forbidden_reason != NULL)
3696 /* Next, walk the statements of the function looking for
3697 constraucts we can't handle, or are non-optimal for inlining. */
3698 hash_set<tree> visited_nodes;
3699 memset (&wi, 0, sizeof (wi));
3700 wi.info = (void *) fndecl;
3701 wi.pset = &visited_nodes;
3703 FOR_EACH_BB_FN (bb, fun)
3706 gimple_seq seq = bb_seq (bb);
3707 ret = walk_gimple_seq (seq, inline_forbidden_p_stmt, NULL, &wi);
3708 forbidden_p = (ret != NULL);
3716 /* Return false if the function FNDECL cannot be inlined on account of its
3717 attributes, true otherwise. */
3719 function_attribute_inlinable_p (const_tree fndecl)
3721 if (targetm.attribute_table)
3725 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
3727 const_tree name = TREE_PURPOSE (a);
3730 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
3731 if (is_attribute_p (targetm.attribute_table[i].name, name))
3732 return targetm.function_attribute_inlinable_p (fndecl);
3739 /* Returns nonzero if FN is a function that does not have any
3740 fundamental inline blocking properties. */
3743 tree_inlinable_function_p (tree fn)
3745 bool inlinable = true;
3749 /* If we've already decided this function shouldn't be inlined,
3750 there's no need to check again. */
3751 if (DECL_UNINLINABLE (fn))
3754 /* We only warn for functions declared `inline' by the user. */
3755 do_warning = (warn_inline
3756 && DECL_DECLARED_INLINE_P (fn)
3757 && !DECL_NO_INLINE_WARNING_P (fn)
3758 && !DECL_IN_SYSTEM_HEADER (fn));
3760 always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
3763 && always_inline == NULL)
3766 warning (OPT_Winline, "function %q+F can never be inlined because it "
3767 "is suppressed using -fno-inline", fn);
3771 else if (!function_attribute_inlinable_p (fn))
3774 warning (OPT_Winline, "function %q+F can never be inlined because it "
3775 "uses attributes conflicting with inlining", fn);
3779 else if (inline_forbidden_p (fn))
3781 /* See if we should warn about uninlinable functions. Previously,
3782 some of these warnings would be issued while trying to expand
3783 the function inline, but that would cause multiple warnings
3784 about functions that would for example call alloca. But since
3785 this a property of the function, just one warning is enough.
3786 As a bonus we can now give more details about the reason why a
3787 function is not inlinable. */
3789 error (inline_forbidden_reason, fn);
3790 else if (do_warning)
3791 warning (OPT_Winline, inline_forbidden_reason, fn);
3796 /* Squirrel away the result so that we don't have to check again. */
3797 DECL_UNINLINABLE (fn) = !inlinable;
3802 /* Estimate the cost of a memory move of type TYPE. Use machine dependent
3803 word size and take possible memcpy call into account and return
3804 cost based on whether optimizing for size or speed according to SPEED_P. */
3807 estimate_move_cost (tree type, bool ARG_UNUSED (speed_p))
3811 gcc_assert (!VOID_TYPE_P (type));
3813 if (TREE_CODE (type) == VECTOR_TYPE)
3815 machine_mode inner = TYPE_MODE (TREE_TYPE (type));
3817 = targetm.vectorize.preferred_simd_mode (inner);
3818 int simd_mode_size = GET_MODE_SIZE (simd);
3819 return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
3823 size = int_size_in_bytes (type);
3825 if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (speed_p))
3826 /* Cost of a memcpy call, 3 arguments and the call. */
3829 return ((size + MOVE_MAX_PIECES - 1) / MOVE_MAX_PIECES);
3832 /* Returns cost of operation CODE, according to WEIGHTS */
3835 estimate_operator_cost (enum tree_code code, eni_weights *weights,
3836 tree op1 ATTRIBUTE_UNUSED, tree op2)
3840 /* These are "free" conversions, or their presumed cost
3841 is folded into other operations. */
3846 case VIEW_CONVERT_EXPR:
3849 /* Assign cost of 1 to usual operations.
3850 ??? We may consider mapping RTL costs to this. */
3856 case POINTER_PLUS_EXPR:
3859 case MULT_HIGHPART_EXPR:
3862 case ADDR_SPACE_CONVERT_EXPR:
3863 case FIXED_CONVERT_EXPR:
3864 case FIX_TRUNC_EXPR:
3882 case TRUTH_ANDIF_EXPR:
3883 case TRUTH_ORIF_EXPR:
3884 case TRUTH_AND_EXPR:
3886 case TRUTH_XOR_EXPR:
3887 case TRUTH_NOT_EXPR:
3896 case UNORDERED_EXPR:
3907 case PREDECREMENT_EXPR:
3908 case PREINCREMENT_EXPR:
3909 case POSTDECREMENT_EXPR:
3910 case POSTINCREMENT_EXPR:
3912 case REALIGN_LOAD_EXPR:
3914 case REDUC_MAX_EXPR:
3915 case REDUC_MIN_EXPR:
3916 case REDUC_PLUS_EXPR:
3917 case WIDEN_SUM_EXPR:
3918 case WIDEN_MULT_EXPR:
3921 case WIDEN_MULT_PLUS_EXPR:
3922 case WIDEN_MULT_MINUS_EXPR:
3923 case WIDEN_LSHIFT_EXPR:
3925 case VEC_WIDEN_MULT_HI_EXPR:
3926 case VEC_WIDEN_MULT_LO_EXPR:
3927 case VEC_WIDEN_MULT_EVEN_EXPR:
3928 case VEC_WIDEN_MULT_ODD_EXPR:
3929 case VEC_UNPACK_HI_EXPR:
3930 case VEC_UNPACK_LO_EXPR:
3931 case VEC_UNPACK_FLOAT_HI_EXPR:
3932 case VEC_UNPACK_FLOAT_LO_EXPR:
3933 case VEC_PACK_TRUNC_EXPR:
3934 case VEC_PACK_SAT_EXPR:
3935 case VEC_PACK_FIX_TRUNC_EXPR:
3936 case VEC_WIDEN_LSHIFT_HI_EXPR:
3937 case VEC_WIDEN_LSHIFT_LO_EXPR:
3941 /* Few special cases of expensive operations. This is useful
3942 to avoid inlining on functions having too many of these. */
3943 case TRUNC_DIV_EXPR:
3945 case FLOOR_DIV_EXPR:
3946 case ROUND_DIV_EXPR:
3947 case EXACT_DIV_EXPR:
3948 case TRUNC_MOD_EXPR:
3950 case FLOOR_MOD_EXPR:
3951 case ROUND_MOD_EXPR:
3953 if (TREE_CODE (op2) != INTEGER_CST)
3954 return weights->div_mod_cost;
3958 /* We expect a copy assignment with no operator. */
3959 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_SINGLE_RHS);
3965 /* Estimate number of instructions that will be created by expanding
3966 the statements in the statement sequence STMTS.
3967 WEIGHTS contains weights attributed to various constructs. */
3970 estimate_num_insns_seq (gimple_seq stmts, eni_weights *weights)
3973 gimple_stmt_iterator gsi;
3976 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
3977 cost += estimate_num_insns (gsi_stmt (gsi), weights);
3983 /* Estimate number of instructions that will be created by expanding STMT.
3984 WEIGHTS contains weights attributed to various constructs. */
3987 estimate_num_insns (gimple *stmt, eni_weights *weights)
3990 enum gimple_code code = gimple_code (stmt);
3997 /* Try to estimate the cost of assignments. We have three cases to
3999 1) Simple assignments to registers;
4000 2) Stores to things that must live in memory. This includes
4001 "normal" stores to scalars, but also assignments of large
4002 structures, or constructors of big arrays;
4004 Let us look at the first two cases, assuming we have "a = b + C":
4005 <GIMPLE_ASSIGN <var_decl "a">
4006 <plus_expr <var_decl "b"> <constant C>>
4007 If "a" is a GIMPLE register, the assignment to it is free on almost
4008 any target, because "a" usually ends up in a real register. Hence
4009 the only cost of this expression comes from the PLUS_EXPR, and we
4010 can ignore the GIMPLE_ASSIGN.
4011 If "a" is not a GIMPLE register, the assignment to "a" will most
4012 likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
4013 of moving something into "a", which we compute using the function
4014 estimate_move_cost. */
4015 if (gimple_clobber_p (stmt))
4016 return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
4018 lhs = gimple_assign_lhs (stmt);
4019 rhs = gimple_assign_rhs1 (stmt);
4023 /* Account for the cost of moving to / from memory. */
4024 if (gimple_store_p (stmt))
4025 cost += estimate_move_cost (TREE_TYPE (lhs), weights->time_based);
4026 if (gimple_assign_load_p (stmt))
4027 cost += estimate_move_cost (TREE_TYPE (rhs), weights->time_based);
4029 cost += estimate_operator_cost (gimple_assign_rhs_code (stmt), weights,
4030 gimple_assign_rhs1 (stmt),
4031 get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
4032 == GIMPLE_BINARY_RHS
4033 ? gimple_assign_rhs2 (stmt) : NULL);
4037 cost = 1 + estimate_operator_cost (gimple_cond_code (stmt), weights,
4038 gimple_op (stmt, 0),
4039 gimple_op (stmt, 1));
4044 gswitch *switch_stmt = as_a <gswitch *> (stmt);
4045 /* Take into account cost of the switch + guess 2 conditional jumps for
4048 TODO: once the switch expansion logic is sufficiently separated, we can
4049 do better job on estimating cost of the switch. */
4050 if (weights->time_based)
4051 cost = floor_log2 (gimple_switch_num_labels (switch_stmt)) * 2;
4053 cost = gimple_switch_num_labels (switch_stmt) * 2;
4061 if (gimple_call_internal_p (stmt))
4063 else if ((decl = gimple_call_fndecl (stmt))
4064 && DECL_BUILT_IN (decl))
4066 /* Do not special case builtins where we see the body.
4067 This just confuse inliner. */
4068 struct cgraph_node *node;
4069 if (!(node = cgraph_node::get (decl))
4070 || node->definition)
4072 /* For buitins that are likely expanded to nothing or
4073 inlined do not account operand costs. */
4074 else if (is_simple_builtin (decl))
4076 else if (is_inexpensive_builtin (decl))
4077 return weights->target_builtin_call_cost;
4078 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
4080 /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
4081 specialize the cheap expansion we do here.
4082 ??? This asks for a more general solution. */
4083 switch (DECL_FUNCTION_CODE (decl))
4088 if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
4090 (&TREE_REAL_CST (gimple_call_arg (stmt, 1)),
4092 return estimate_operator_cost
4093 (MULT_EXPR, weights, gimple_call_arg (stmt, 0),
4094 gimple_call_arg (stmt, 0));
4103 cost = decl ? weights->call_cost : weights->indirect_call_cost;
4104 if (gimple_call_lhs (stmt))
4105 cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)),
4106 weights->time_based);
4107 for (i = 0; i < gimple_call_num_args (stmt); i++)
4109 tree arg = gimple_call_arg (stmt, i);
4110 cost += estimate_move_cost (TREE_TYPE (arg),
4111 weights->time_based);
4117 return weights->return_cost;
4123 case GIMPLE_PREDICT:
4129 int count = asm_str_count (gimple_asm_string (as_a <gasm *> (stmt)));
4130 /* 1000 means infinity. This avoids overflows later
4131 with very long asm statements. */
4138 /* This is either going to be an external function call with one
4139 argument, or two register copy statements plus a goto. */
4142 case GIMPLE_EH_DISPATCH:
4143 /* ??? This is going to turn into a switch statement. Ideally
4144 we'd have a look at the eh region and estimate the number of
4149 return estimate_num_insns_seq (
4150 gimple_bind_body (as_a <gbind *> (stmt)),
4153 case GIMPLE_EH_FILTER:
4154 return estimate_num_insns_seq (gimple_eh_filter_failure (stmt), weights);
4157 return estimate_num_insns_seq (gimple_catch_handler (
4158 as_a <gcatch *> (stmt)),
4162 return (estimate_num_insns_seq (gimple_try_eval (stmt), weights)
4163 + estimate_num_insns_seq (gimple_try_cleanup (stmt), weights));
4165 /* OMP directives are generally very expensive. */
4167 case GIMPLE_OMP_RETURN:
4168 case GIMPLE_OMP_SECTIONS_SWITCH:
4169 case GIMPLE_OMP_ATOMIC_STORE:
4170 case GIMPLE_OMP_CONTINUE:
4171 /* ...except these, which are cheap. */
4174 case GIMPLE_OMP_ATOMIC_LOAD:
4175 return weights->omp_cost;
4177 case GIMPLE_OMP_FOR:
4178 return (weights->omp_cost
4179 + estimate_num_insns_seq (gimple_omp_body (stmt), weights)
4180 + estimate_num_insns_seq (gimple_omp_for_pre_body (stmt), weights));
4182 case GIMPLE_OMP_PARALLEL:
4183 case GIMPLE_OMP_TASK:
4184 case GIMPLE_OMP_CRITICAL:
4185 case GIMPLE_OMP_MASTER:
4186 case GIMPLE_OMP_TASKGROUP:
4187 case GIMPLE_OMP_ORDERED:
4188 case GIMPLE_OMP_SECTION:
4189 case GIMPLE_OMP_SECTIONS:
4190 case GIMPLE_OMP_SINGLE:
4191 case GIMPLE_OMP_TARGET:
4192 case GIMPLE_OMP_TEAMS:
4193 return (weights->omp_cost
4194 + estimate_num_insns_seq (gimple_omp_body (stmt), weights));
4196 case GIMPLE_TRANSACTION:
4197 return (weights->tm_cost
4198 + estimate_num_insns_seq (gimple_transaction_body (
4199 as_a <gtransaction *> (stmt)),
4209 /* Estimate number of instructions that will be created by expanding
4210 function FNDECL. WEIGHTS contains weights attributed to various
4214 estimate_num_insns_fn (tree fndecl, eni_weights *weights)
4216 struct function *my_function = DECL_STRUCT_FUNCTION (fndecl);
4217 gimple_stmt_iterator bsi;
4221 gcc_assert (my_function && my_function->cfg);
4222 FOR_EACH_BB_FN (bb, my_function)
4224 for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
4225 n += estimate_num_insns (gsi_stmt (bsi), weights);
4232 /* Initializes weights used by estimate_num_insns. */
4235 init_inline_once (void)
4237 eni_size_weights.call_cost = 1;
4238 eni_size_weights.indirect_call_cost = 3;
4239 eni_size_weights.target_builtin_call_cost = 1;
4240 eni_size_weights.div_mod_cost = 1;
4241 eni_size_weights.omp_cost = 40;
4242 eni_size_weights.tm_cost = 10;
4243 eni_size_weights.time_based = false;
4244 eni_size_weights.return_cost = 1;
4246 /* Estimating time for call is difficult, since we have no idea what the
4247 called function does. In the current uses of eni_time_weights,
4248 underestimating the cost does less harm than overestimating it, so
4249 we choose a rather small value here. */
4250 eni_time_weights.call_cost = 10;
4251 eni_time_weights.indirect_call_cost = 15;
4252 eni_time_weights.target_builtin_call_cost = 1;
4253 eni_time_weights.div_mod_cost = 10;
4254 eni_time_weights.omp_cost = 40;
4255 eni_time_weights.tm_cost = 40;
4256 eni_time_weights.time_based = true;
4257 eni_time_weights.return_cost = 2;
4261 /* Install new lexical TREE_BLOCK underneath 'current_block'. */
4264 prepend_lexical_block (tree current_block, tree new_block)
4266 BLOCK_CHAIN (new_block) = BLOCK_SUBBLOCKS (current_block);
4267 BLOCK_SUBBLOCKS (current_block) = new_block;
4268 BLOCK_SUPERCONTEXT (new_block) = current_block;
4271 /* Add local variables from CALLEE to CALLER. */
4274 add_local_variables (struct function *callee, struct function *caller,
4280 FOR_EACH_LOCAL_DECL (callee, ix, var)
4281 if (!can_be_nonlocal (var, id))
4283 tree new_var = remap_decl (var, id);
4285 /* Remap debug-expressions. */
4286 if (TREE_CODE (new_var) == VAR_DECL
4287 && DECL_HAS_DEBUG_EXPR_P (var)
4290 tree tem = DECL_DEBUG_EXPR (var);
4291 bool old_regimplify = id->regimplify;
4292 id->remapping_type_depth++;
4293 walk_tree (&tem, copy_tree_body_r, id, NULL);
4294 id->remapping_type_depth--;
4295 id->regimplify = old_regimplify;
4296 SET_DECL_DEBUG_EXPR (new_var, tem);
4297 DECL_HAS_DEBUG_EXPR_P (new_var) = 1;
4299 add_local_decl (caller, new_var);
4303 /* Add to BINDINGS a debug stmt resetting SRCVAR if inlining might
4304 have brought in or introduced any debug stmts for SRCVAR. */
4307 reset_debug_binding (copy_body_data *id, tree srcvar, gimple_seq *bindings)
4309 tree *remappedvarp = id->decl_map->get (srcvar);
4314 if (TREE_CODE (*remappedvarp) != VAR_DECL)
4317 if (*remappedvarp == id->retvar || *remappedvarp == id->retbnd)
4320 tree tvar = target_for_debug_bind (*remappedvarp);
4324 gdebug *stmt = gimple_build_debug_bind (tvar, NULL_TREE,
4326 gimple_seq_add_stmt (bindings, stmt);
4329 /* For each inlined variable for which we may have debug bind stmts,
4330 add before GSI a final debug stmt resetting it, marking the end of
4331 its life, so that var-tracking knows it doesn't have to compute
4332 further locations for it. */
4335 reset_debug_bindings (copy_body_data *id, gimple_stmt_iterator gsi)
4339 gimple_seq bindings = NULL;
4341 if (!gimple_in_ssa_p (id->src_cfun))
4344 if (!opt_for_fn (id->dst_fn, flag_var_tracking_assignments))
4347 for (var = DECL_ARGUMENTS (id->src_fn);
4348 var; var = DECL_CHAIN (var))
4349 reset_debug_binding (id, var, &bindings);
4351 FOR_EACH_LOCAL_DECL (id->src_cfun, ix, var)
4352 reset_debug_binding (id, var, &bindings);
4354 gsi_insert_seq_before_without_update (&gsi, bindings, GSI_SAME_STMT);
4357 /* If STMT is a GIMPLE_CALL, replace it with its inline expansion. */
4360 expand_call_inline (basic_block bb, gimple *stmt, copy_body_data *id)
4364 hash_map<tree, tree> *dst;
4365 hash_map<tree, tree> *st = NULL;
4368 tree return_bounds = NULL;
4369 struct cgraph_edge *cg_edge;
4370 cgraph_inline_failed_t reason;
4371 basic_block return_block;
4373 gimple_stmt_iterator gsi, stmt_gsi;
4374 bool successfully_inlined = false;
4375 bool purge_dead_abnormal_edges;
4379 /* The gimplifier uses input_location in too many places, such as
4380 internal_get_tmp_var (). */
4381 location_t saved_location = input_location;
4382 input_location = gimple_location (stmt);
4384 /* From here on, we're only interested in CALL_EXPRs. */
4385 call_stmt = dyn_cast <gcall *> (stmt);
4389 cg_edge = id->dst_node->get_edge (stmt);
4390 gcc_checking_assert (cg_edge);
4391 /* First, see if we can figure out what function is being called.
4392 If we cannot, then there is no hope of inlining the function. */
4393 if (cg_edge->indirect_unknown_callee)
4395 fn = cg_edge->callee->decl;
4396 gcc_checking_assert (fn);
4398 /* If FN is a declaration of a function in a nested scope that was
4399 globally declared inline, we don't set its DECL_INITIAL.
4400 However, we can't blindly follow DECL_ABSTRACT_ORIGIN because the
4401 C++ front-end uses it for cdtors to refer to their internal
4402 declarations, that are not real functions. Fortunately those
4403 don't have trees to be saved, so we can tell by checking their
4405 if (!DECL_INITIAL (fn)
4406 && DECL_ABSTRACT_ORIGIN (fn)
4407 && gimple_has_body_p (DECL_ABSTRACT_ORIGIN (fn)))
4408 fn = DECL_ABSTRACT_ORIGIN (fn);
4410 /* Don't try to inline functions that are not well-suited to inlining. */
4411 if (cg_edge->inline_failed)
4413 reason = cg_edge->inline_failed;
4414 /* If this call was originally indirect, we do not want to emit any
4415 inlining related warnings or sorry messages because there are no
4416 guarantees regarding those. */
4417 if (cg_edge->indirect_inlining_edge)
4420 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
4421 /* For extern inline functions that get redefined we always
4422 silently ignored always_inline flag. Better behavior would
4423 be to be able to keep both bodies and use extern inline body
4424 for inlining, but we can't do that because frontends overwrite
4426 && !cg_edge->callee->local.redefined_extern_inline
4427 /* During early inline pass, report only when optimization is
4429 && (symtab->global_info_ready
4431 || cgraph_inline_failed_type (reason) == CIF_FINAL_ERROR)
4432 /* PR 20090218-1_0.c. Body can be provided by another module. */
4433 && (reason != CIF_BODY_NOT_AVAILABLE || !flag_generate_lto))
4435 error ("inlining failed in call to always_inline %q+F: %s", fn,
4436 cgraph_inline_failed_string (reason));
4437 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4438 inform (gimple_location (stmt), "called from here");
4439 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4440 inform (DECL_SOURCE_LOCATION (cfun->decl),
4441 "called from this function");
4443 else if (warn_inline
4444 && DECL_DECLARED_INLINE_P (fn)
4445 && !DECL_NO_INLINE_WARNING_P (fn)
4446 && !DECL_IN_SYSTEM_HEADER (fn)
4447 && reason != CIF_UNSPECIFIED
4448 && !lookup_attribute ("noinline", DECL_ATTRIBUTES (fn))
4449 /* Do not warn about not inlined recursive calls. */
4450 && !cg_edge->recursive_p ()
4451 /* Avoid warnings during early inline pass. */
4452 && symtab->global_info_ready)
4454 if (warning (OPT_Winline, "inlining failed in call to %q+F: %s",
4455 fn, _(cgraph_inline_failed_string (reason))))
4457 if (gimple_location (stmt) != UNKNOWN_LOCATION)
4458 inform (gimple_location (stmt), "called from here");
4459 else if (DECL_SOURCE_LOCATION (cfun->decl) != UNKNOWN_LOCATION)
4460 inform (DECL_SOURCE_LOCATION (cfun->decl),
4461 "called from this function");
4466 fn = cg_edge->callee->decl;
4467 cg_edge->callee->get_untransformed_body ();
4469 if (flag_checking && cg_edge->callee->decl != id->dst_node->decl)
4470 cg_edge->callee->verify ();
4472 /* We will be inlining this callee. */
4473 id->eh_lp_nr = lookup_stmt_eh_lp (stmt);
4474 id->assign_stmts.create (0);
4476 /* Update the callers EH personality. */
4477 if (DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl))
4478 DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
4479 = DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl);
4481 /* Split the block holding the GIMPLE_CALL. */
4482 e = split_block (bb, stmt);
4484 return_block = e->dest;
4487 /* split_block splits after the statement; work around this by
4488 moving the call into the second block manually. Not pretty,
4489 but seems easier than doing the CFG manipulation by hand
4490 when the GIMPLE_CALL is in the last statement of BB. */
4491 stmt_gsi = gsi_last_bb (bb);
4492 gsi_remove (&stmt_gsi, false);
4494 /* If the GIMPLE_CALL was in the last statement of BB, it may have
4495 been the source of abnormal edges. In this case, schedule
4496 the removal of dead abnormal edges. */
4497 gsi = gsi_start_bb (return_block);
4498 if (gsi_end_p (gsi))
4500 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
4501 purge_dead_abnormal_edges = true;
4505 gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
4506 purge_dead_abnormal_edges = false;
4509 stmt_gsi = gsi_start_bb (return_block);
4511 /* Build a block containing code to initialize the arguments, the
4512 actual inline expansion of the body, and a label for the return
4513 statements within the function to jump to. The type of the
4514 statement expression is the return type of the function call.
4515 ??? If the call does not have an associated block then we will
4516 remap all callee blocks to NULL, effectively dropping most of
4517 its debug information. This should only happen for calls to
4518 artificial decls inserted by the compiler itself. We need to
4519 either link the inlined blocks into the caller block tree or
4520 not refer to them in any way to not break GC for locations. */
4521 if (gimple_block (stmt))
4523 id->block = make_node (BLOCK);
4524 BLOCK_ABSTRACT_ORIGIN (id->block) = fn;
4525 BLOCK_SOURCE_LOCATION (id->block)
4526 = LOCATION_LOCUS (gimple_location (stmt));
4527 prepend_lexical_block (gimple_block (stmt), id->block);
4530 /* Local declarations will be replaced by their equivalents in this
4533 id->decl_map = new hash_map<tree, tree>;
4534 dst = id->debug_map;
4535 id->debug_map = NULL;
4537 /* Record the function we are about to inline. */
4539 id->src_node = cg_edge->callee;
4540 id->src_cfun = DECL_STRUCT_FUNCTION (fn);
4541 id->call_stmt = stmt;
4543 /* If the src function contains an IFN_VA_ARG, then so will the dst
4544 function after inlining. */
4545 if ((id->src_cfun->curr_properties & PROP_gimple_lva) == 0)
4547 struct function *dst_cfun = DECL_STRUCT_FUNCTION (id->dst_fn);
4548 dst_cfun->curr_properties &= ~PROP_gimple_lva;
4551 gcc_assert (!id->src_cfun->after_inlining);
4554 if (lookup_attribute ("cold", DECL_ATTRIBUTES (fn)))
4556 gimple_stmt_iterator si = gsi_last_bb (bb);
4557 gsi_insert_after (&si, gimple_build_predict (PRED_COLD_FUNCTION,
4561 initialize_inlined_parameters (id, stmt, fn, bb);
4563 if (DECL_INITIAL (fn))
4565 if (gimple_block (stmt))
4569 prepend_lexical_block (id->block,
4570 remap_blocks (DECL_INITIAL (fn), id));
4571 gcc_checking_assert (BLOCK_SUBBLOCKS (id->block)
4572 && (BLOCK_CHAIN (BLOCK_SUBBLOCKS (id->block))
4574 /* Move vars for PARM_DECLs from DECL_INITIAL block to id->block,
4575 otherwise for DWARF DW_TAG_formal_parameter will not be children of
4576 DW_TAG_inlined_subroutine, but of a DW_TAG_lexical_block
4577 under it. The parameters can be then evaluated in the debugger,
4578 but don't show in backtraces. */
4579 for (var = &BLOCK_VARS (BLOCK_SUBBLOCKS (id->block)); *var; )
4580 if (TREE_CODE (DECL_ORIGIN (*var)) == PARM_DECL)
4583 *var = TREE_CHAIN (v);
4584 TREE_CHAIN (v) = BLOCK_VARS (id->block);
4585 BLOCK_VARS (id->block) = v;
4588 var = &TREE_CHAIN (*var);
4591 remap_blocks_to_null (DECL_INITIAL (fn), id);
4594 /* Return statements in the function body will be replaced by jumps
4595 to the RET_LABEL. */
4596 gcc_assert (DECL_INITIAL (fn));
4597 gcc_assert (TREE_CODE (DECL_INITIAL (fn)) == BLOCK);
4599 /* Find the LHS to which the result of this call is assigned. */
4601 if (gimple_call_lhs (stmt))
4603 modify_dest = gimple_call_lhs (stmt);
4605 /* Remember where to copy returned bounds. */
4606 if (gimple_call_with_bounds_p (stmt)
4607 && TREE_CODE (modify_dest) == SSA_NAME)
4609 gcall *retbnd = chkp_retbnd_call_by_val (modify_dest);
4612 return_bounds = gimple_call_lhs (retbnd);
4613 /* If returned bounds are not used then just
4614 remove unused call. */
4617 gimple_stmt_iterator iter = gsi_for_stmt (retbnd);
4618 gsi_remove (&iter, true);
4623 /* The function which we are inlining might not return a value,
4624 in which case we should issue a warning that the function
4625 does not return a value. In that case the optimizers will
4626 see that the variable to which the value is assigned was not
4627 initialized. We do not want to issue a warning about that
4628 uninitialized variable. */
4629 if (DECL_P (modify_dest))
4630 TREE_NO_WARNING (modify_dest) = 1;
4632 if (gimple_call_return_slot_opt_p (call_stmt))
4634 return_slot = modify_dest;
4641 /* If we are inlining a call to the C++ operator new, we don't want
4642 to use type based alias analysis on the return value. Otherwise
4643 we may get confused if the compiler sees that the inlined new
4644 function returns a pointer which was just deleted. See bug
4646 if (DECL_IS_OPERATOR_NEW (fn))
4652 /* Declare the return variable for the function. */
4653 use_retvar = declare_return_variable (id, return_slot, modify_dest,
4656 /* Add local vars in this inlined callee to caller. */
4657 add_local_variables (id->src_cfun, cfun, id);
4659 if (dump_file && (dump_flags & TDF_DETAILS))
4661 fprintf (dump_file, "Inlining ");
4662 print_generic_expr (dump_file, id->src_fn, 0);
4663 fprintf (dump_file, " to ");
4664 print_generic_expr (dump_file, id->dst_fn, 0);
4665 fprintf (dump_file, " with frequency %i\n", cg_edge->frequency);
4668 /* This is it. Duplicate the callee body. Assume callee is
4669 pre-gimplified. Note that we must not alter the caller
4670 function in any way before this point, as this CALL_EXPR may be
4671 a self-referential call; if we're calling ourselves, we need to
4672 duplicate our body before altering anything. */
4673 copy_body (id, cg_edge->callee->count,
4674 GCOV_COMPUTE_SCALE (cg_edge->frequency, CGRAPH_FREQ_BASE),
4675 bb, return_block, NULL);
4677 reset_debug_bindings (id, stmt_gsi);
4679 /* Reset the escaped solution. */
4680 if (cfun->gimple_df)
4681 pt_solution_reset (&cfun->gimple_df->escaped);
4686 delete id->debug_map;
4687 id->debug_map = dst;
4689 delete id->decl_map;
4692 /* Unlink the calls virtual operands before replacing it. */
4693 unlink_stmt_vdef (stmt);
4694 if (gimple_vdef (stmt)
4695 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
4696 release_ssa_name (gimple_vdef (stmt));
4698 /* If the inlined function returns a result that we care about,
4699 substitute the GIMPLE_CALL with an assignment of the return
4700 variable to the LHS of the call. That is, if STMT was
4701 'a = foo (...)', substitute the call with 'a = USE_RETVAR'. */
4702 if (use_retvar && gimple_call_lhs (stmt))
4704 gimple *old_stmt = stmt;
4705 stmt = gimple_build_assign (gimple_call_lhs (stmt), use_retvar);
4706 gsi_replace (&stmt_gsi, stmt, false);
4707 maybe_clean_or_replace_eh_stmt (old_stmt, stmt);
4709 /* Copy bounds if we copy structure with bounds. */
4710 if (chkp_function_instrumented_p (id->dst_fn)
4711 && !BOUNDED_P (use_retvar)
4712 && chkp_type_has_pointer (TREE_TYPE (use_retvar)))
4713 id->assign_stmts.safe_push (stmt);
4717 /* Handle the case of inlining a function with no return
4718 statement, which causes the return value to become undefined. */
4719 if (gimple_call_lhs (stmt)
4720 && TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
4722 tree name = gimple_call_lhs (stmt);
4723 tree var = SSA_NAME_VAR (name);
4724 tree def = ssa_default_def (cfun, var);
4728 /* If the variable is used undefined, make this name
4729 undefined via a move. */
4730 stmt = gimple_build_assign (gimple_call_lhs (stmt), def);
4731 gsi_replace (&stmt_gsi, stmt, true);
4735 /* Otherwise make this variable undefined. */
4736 gsi_remove (&stmt_gsi, true);
4737 set_ssa_default_def (cfun, var, name);
4738 SSA_NAME_DEF_STMT (name) = gimple_build_nop ();
4742 gsi_remove (&stmt_gsi, true);
4745 /* Put returned bounds into the correct place if required. */
4748 gimple *old_stmt = SSA_NAME_DEF_STMT (return_bounds);
4749 gimple *new_stmt = gimple_build_assign (return_bounds, id->retbnd);
4750 gimple_stmt_iterator bnd_gsi = gsi_for_stmt (old_stmt);
4751 unlink_stmt_vdef (old_stmt);
4752 gsi_replace (&bnd_gsi, new_stmt, false);
4753 maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt);
4754 cgraph_update_edges_for_call_stmt (old_stmt,
4755 gimple_call_fndecl (old_stmt),
4759 if (purge_dead_abnormal_edges)
4761 gimple_purge_dead_eh_edges (return_block);
4762 gimple_purge_dead_abnormal_call_edges (return_block);
4765 /* If the value of the new expression is ignored, that's OK. We
4766 don't warn about this for CALL_EXPRs, so we shouldn't warn about
4767 the equivalent inlined version either. */
4768 if (is_gimple_assign (stmt))
4770 gcc_assert (gimple_assign_single_p (stmt)
4771 || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)));
4772 TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
4775 /* Copy bounds for all generated assigns that need it. */
4776 for (i = 0; i < id->assign_stmts.length (); i++)
4777 chkp_copy_bounds_for_assign (id->assign_stmts[i], cg_edge);
4778 id->assign_stmts.release ();
4780 /* Output the inlining info for this abstract function, since it has been
4781 inlined. If we don't do this now, we can lose the information about the
4782 variables in the function when the blocks get blown away as soon as we
4783 remove the cgraph node. */
4784 if (gimple_block (stmt))
4785 (*debug_hooks->outlining_inline_function) (cg_edge->callee->decl);
4787 /* Update callgraph if needed. */
4788 cg_edge->callee->remove ();
4790 id->block = NULL_TREE;
4791 successfully_inlined = true;
4794 input_location = saved_location;
4795 return successfully_inlined;
4798 /* Expand call statements reachable from STMT_P.
4799 We can only have CALL_EXPRs as the "toplevel" tree code or nested
4800 in a MODIFY_EXPR. */
4803 gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
4805 gimple_stmt_iterator gsi;
4806 bool inlined = false;
4808 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
4810 gimple *stmt = gsi_stmt (gsi);
4813 if (is_gimple_call (stmt)
4814 && !gimple_call_internal_p (stmt))
4815 inlined |= expand_call_inline (bb, stmt, id);
4822 /* Walk all basic blocks created after FIRST and try to fold every statement
4823 in the STATEMENTS pointer set. */
4826 fold_marked_statements (int first, hash_set<gimple *> *statements)
4828 for (; first < n_basic_blocks_for_fn (cfun); first++)
4829 if (BASIC_BLOCK_FOR_FN (cfun, first))
4831 gimple_stmt_iterator gsi;
4833 for (gsi = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4836 if (statements->contains (gsi_stmt (gsi)))
4838 gimple *old_stmt = gsi_stmt (gsi);
4839 tree old_decl = is_gimple_call (old_stmt) ? gimple_call_fndecl (old_stmt) : 0;
4841 if (old_decl && DECL_BUILT_IN (old_decl))
4843 /* Folding builtins can create multiple instructions,
4844 we need to look at all of them. */
4845 gimple_stmt_iterator i2 = gsi;
4847 if (fold_stmt (&gsi))
4850 /* If a builtin at the end of a bb folded into nothing,
4851 the following loop won't work. */
4852 if (gsi_end_p (gsi))
4854 cgraph_update_edges_for_call_stmt (old_stmt,
4859 i2 = gsi_start_bb (BASIC_BLOCK_FOR_FN (cfun, first));
4864 new_stmt = gsi_stmt (i2);
4865 update_stmt (new_stmt);
4866 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4869 if (new_stmt == gsi_stmt (gsi))
4871 /* It is okay to check only for the very last
4872 of these statements. If it is a throwing
4873 statement nothing will change. If it isn't
4874 this can remove EH edges. If that weren't
4875 correct then because some intermediate stmts
4876 throw, but not the last one. That would mean
4877 we'd have to split the block, which we can't
4878 here and we'd loose anyway. And as builtins
4879 probably never throw, this all
4881 if (maybe_clean_or_replace_eh_stmt (old_stmt,
4883 gimple_purge_dead_eh_edges (
4884 BASIC_BLOCK_FOR_FN (cfun, first));
4891 else if (fold_stmt (&gsi))
4893 /* Re-read the statement from GSI as fold_stmt() may
4895 gimple *new_stmt = gsi_stmt (gsi);
4896 update_stmt (new_stmt);
4898 if (is_gimple_call (old_stmt)
4899 || is_gimple_call (new_stmt))
4900 cgraph_update_edges_for_call_stmt (old_stmt, old_decl,
4903 if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))
4904 gimple_purge_dead_eh_edges (BASIC_BLOCK_FOR_FN (cfun,
4911 /* Expand calls to inline functions in the body of FN. */
4914 optimize_inline_calls (tree fn)
4918 int last = n_basic_blocks_for_fn (cfun);
4919 bool inlined_p = false;
4922 memset (&id, 0, sizeof (id));
4924 id.src_node = id.dst_node = cgraph_node::get (fn);
4925 gcc_assert (id.dst_node->definition);
4927 /* Or any functions that aren't finished yet. */
4928 if (current_function_decl)
4929 id.dst_fn = current_function_decl;
4931 id.copy_decl = copy_decl_maybe_to_var;
4932 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
4933 id.transform_new_cfg = false;
4934 id.transform_return_to_modify = true;
4935 id.transform_parameter = true;
4936 id.transform_lang_insert_block = NULL;
4937 id.statements_to_fold = new hash_set<gimple *>;
4939 push_gimplify_context ();
4941 /* We make no attempts to keep dominance info up-to-date. */
4942 free_dominance_info (CDI_DOMINATORS);
4943 free_dominance_info (CDI_POST_DOMINATORS);
4945 /* Register specific gimple functions. */
4946 gimple_register_cfg_hooks ();
4948 /* Reach the trees by walking over the CFG, and note the
4949 enclosing basic-blocks in the call edges. */
4950 /* We walk the blocks going forward, because inlined function bodies
4951 will split id->current_basic_block, and the new blocks will
4952 follow it; we'll trudge through them, processing their CALL_EXPRs
4954 FOR_EACH_BB_FN (bb, cfun)
4955 inlined_p |= gimple_expand_calls_inline (bb, &id);
4957 pop_gimplify_context (NULL);
4961 struct cgraph_edge *e;
4963 id.dst_node->verify ();
4965 /* Double check that we inlined everything we are supposed to inline. */
4966 for (e = id.dst_node->callees; e; e = e->next_callee)
4967 gcc_assert (e->inline_failed);
4970 /* Fold queued statements. */
4971 fold_marked_statements (last, id.statements_to_fold);
4972 delete id.statements_to_fold;
4974 gcc_assert (!id.debug_stmts.exists ());
4976 /* If we didn't inline into the function there is nothing to do. */
4980 /* Renumber the lexical scoping (non-code) blocks consecutively. */
4983 delete_unreachable_blocks_update_callgraph (&id);
4985 id.dst_node->verify ();
4987 /* It would be nice to check SSA/CFG/statement consistency here, but it is
4988 not possible yet - the IPA passes might make various functions to not
4989 throw and they don't care to proactively update local EH info. This is
4990 done later in fixup_cfg pass that also execute the verification. */
4991 return (TODO_update_ssa
4993 | (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
4994 | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
4995 | (profile_status_for_fn (cfun) != PROFILE_ABSENT
4996 ? TODO_rebuild_frequencies : 0));
4999 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
5002 copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
5004 enum tree_code code = TREE_CODE (*tp);
5005 enum tree_code_class cl = TREE_CODE_CLASS (code);
5007 /* We make copies of most nodes. */
5008 if (IS_EXPR_CODE_CLASS (cl)
5009 || code == TREE_LIST
5011 || code == TYPE_DECL
5012 || code == OMP_CLAUSE)
5014 /* Because the chain gets clobbered when we make a copy, we save it
5016 tree chain = NULL_TREE, new_tree;
5018 if (CODE_CONTAINS_STRUCT (code, TS_COMMON))
5019 chain = TREE_CHAIN (*tp);
5021 /* Copy the node. */
5022 new_tree = copy_node (*tp);
5026 /* Now, restore the chain, if appropriate. That will cause
5027 walk_tree to walk into the chain as well. */
5028 if (code == PARM_DECL
5029 || code == TREE_LIST
5030 || code == OMP_CLAUSE)
5031 TREE_CHAIN (*tp) = chain;
5033 /* For now, we don't update BLOCKs when we make copies. So, we
5034 have to nullify all BIND_EXPRs. */
5035 if (TREE_CODE (*tp) == BIND_EXPR)
5036 BIND_EXPR_BLOCK (*tp) = NULL_TREE;
5038 else if (code == CONSTRUCTOR)
5040 /* CONSTRUCTOR nodes need special handling because
5041 we need to duplicate the vector of elements. */
5044 new_tree = copy_node (*tp);
5045 CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
5048 else if (code == STATEMENT_LIST)
5049 /* We used to just abort on STATEMENT_LIST, but we can run into them
5050 with statement-expressions (c++/40975). */
5051 copy_statement_list (tp);
5052 else if (TREE_CODE_CLASS (code) == tcc_type)
5054 else if (TREE_CODE_CLASS (code) == tcc_declaration)
5056 else if (TREE_CODE_CLASS (code) == tcc_constant)
5061 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
5062 information indicating to what new SAVE_EXPR this one should be mapped,
5063 use that one. Otherwise, create a new node and enter it in ST. FN is
5064 the function into which the copy will be placed. */
5067 remap_save_expr (tree *tp, hash_map<tree, tree> *st, int *walk_subtrees)
5072 /* See if we already encountered this SAVE_EXPR. */
5075 /* If we didn't already remap this SAVE_EXPR, do so now. */
5078 t = copy_node (*tp);
5080 /* Remember this SAVE_EXPR. */
5082 /* Make sure we don't remap an already-remapped SAVE_EXPR. */
5087 /* We've already walked into this SAVE_EXPR; don't do it again. */
5092 /* Replace this SAVE_EXPR with the copy. */
5096 /* Called via walk_gimple_seq. If *GSIP points to a GIMPLE_LABEL for a local
5097 label, copies the declaration and enters it in the splay_tree in DATA (which
5098 is really a 'copy_body_data *'. */
5101 mark_local_labels_stmt (gimple_stmt_iterator *gsip,
5102 bool *handled_ops_p ATTRIBUTE_UNUSED,
5103 struct walk_stmt_info *wi)
5105 copy_body_data *id = (copy_body_data *) wi->info;
5106 glabel *stmt = dyn_cast <glabel *> (gsi_stmt (*gsip));
5110 tree decl = gimple_label_label (stmt);
5112 /* Copy the decl and remember the copy. */
5113 insert_decl_map (id, decl, id->copy_decl (decl, id));
5119 static gimple_seq duplicate_remap_omp_clause_seq (gimple_seq seq,
5120 struct walk_stmt_info *wi);
5122 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5123 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5124 remaps all local declarations to appropriate replacements in gimple
5128 replace_locals_op (tree *tp, int *walk_subtrees, void *data)
5130 struct walk_stmt_info *wi = (struct walk_stmt_info*) data;
5131 copy_body_data *id = (copy_body_data *) wi->info;
5132 hash_map<tree, tree> *st = id->decl_map;
5136 /* Only a local declaration (variable or label). */
5137 if ((TREE_CODE (expr) == VAR_DECL
5138 && !TREE_STATIC (expr))
5139 || TREE_CODE (expr) == LABEL_DECL)
5141 /* Lookup the declaration. */
5144 /* If it's there, remap it. */
5149 else if (TREE_CODE (expr) == STATEMENT_LIST
5150 || TREE_CODE (expr) == BIND_EXPR
5151 || TREE_CODE (expr) == SAVE_EXPR)
5153 else if (TREE_CODE (expr) == TARGET_EXPR)
5155 /* Don't mess with a TARGET_EXPR that hasn't been expanded.
5156 It's OK for this to happen if it was part of a subtree that
5157 isn't immediately expanded, such as operand 2 of another
5159 if (!TREE_OPERAND (expr, 1))
5161 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 3);
5162 TREE_OPERAND (expr, 3) = NULL_TREE;
5165 else if (TREE_CODE (expr) == OMP_CLAUSE)
5167 /* Before the omplower pass completes, some OMP clauses can contain
5168 sequences that are neither copied by gimple_seq_copy nor walked by
5169 walk_gimple_seq. To make copy_gimple_seq_and_replace_locals work even
5170 in those situations, we have to copy and process them explicitely. */
5172 if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LASTPRIVATE)
5174 gimple_seq seq = OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr);
5175 seq = duplicate_remap_omp_clause_seq (seq, wi);
5176 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (expr) = seq;
5178 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_LINEAR)
5180 gimple_seq seq = OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr);
5181 seq = duplicate_remap_omp_clause_seq (seq, wi);
5182 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (expr) = seq;
5184 else if (OMP_CLAUSE_CODE (expr) == OMP_CLAUSE_REDUCTION)
5186 gimple_seq seq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr);
5187 seq = duplicate_remap_omp_clause_seq (seq, wi);
5188 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (expr) = seq;
5189 seq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr);
5190 seq = duplicate_remap_omp_clause_seq (seq, wi);
5191 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (expr) = seq;
5195 /* Keep iterating. */
5200 /* Called via walk_gimple_seq by copy_gimple_seq_and_replace_local.
5201 Using the splay_tree pointed to by ST (which is really a `splay_tree'),
5202 remaps all local declarations to appropriate replacements in gimple
5206 replace_locals_stmt (gimple_stmt_iterator *gsip,
5207 bool *handled_ops_p ATTRIBUTE_UNUSED,
5208 struct walk_stmt_info *wi)
5210 copy_body_data *id = (copy_body_data *) wi->info;
5211 gimple *gs = gsi_stmt (*gsip);
5213 if (gbind *stmt = dyn_cast <gbind *> (gs))
5215 tree block = gimple_bind_block (stmt);
5219 remap_block (&block, id);
5220 gimple_bind_set_block (stmt, block);
5223 /* This will remap a lot of the same decls again, but this should be
5225 if (gimple_bind_vars (stmt))
5227 tree old_var, decls = gimple_bind_vars (stmt);
5229 for (old_var = decls; old_var; old_var = DECL_CHAIN (old_var))
5230 if (!can_be_nonlocal (old_var, id)
5231 && ! variably_modified_type_p (TREE_TYPE (old_var), id->src_fn))
5232 remap_decl (old_var, id);
5234 gcc_checking_assert (!id->prevent_decl_creation_for_types);
5235 id->prevent_decl_creation_for_types = true;
5236 gimple_bind_set_vars (stmt, remap_decls (decls, NULL, id));
5237 id->prevent_decl_creation_for_types = false;
5241 /* Keep iterating. */
5245 /* Create a copy of SEQ and remap all decls in it. */
5248 duplicate_remap_omp_clause_seq (gimple_seq seq, struct walk_stmt_info *wi)
5253 /* If there are any labels in OMP sequences, they can be only referred to in
5254 the sequence itself and therefore we can do both here. */
5255 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, wi);
5256 gimple_seq copy = gimple_seq_copy (seq);
5257 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, wi);
5261 /* Copies everything in SEQ and replaces variables and labels local to
5262 current_function_decl. */
5265 copy_gimple_seq_and_replace_locals (gimple_seq seq)
5268 struct walk_stmt_info wi;
5271 /* There's nothing to do for NULL_TREE. */
5276 memset (&id, 0, sizeof (id));
5277 id.src_fn = current_function_decl;
5278 id.dst_fn = current_function_decl;
5279 id.decl_map = new hash_map<tree, tree>;
5280 id.debug_map = NULL;
5282 id.copy_decl = copy_decl_no_change;
5283 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
5284 id.transform_new_cfg = false;
5285 id.transform_return_to_modify = false;
5286 id.transform_parameter = false;
5287 id.transform_lang_insert_block = NULL;
5289 /* Walk the tree once to find local labels. */
5290 memset (&wi, 0, sizeof (wi));
5291 hash_set<tree> visited;
5294 walk_gimple_seq (seq, mark_local_labels_stmt, NULL, &wi);
5296 copy = gimple_seq_copy (seq);
5298 /* Walk the copy, remapping decls. */
5299 memset (&wi, 0, sizeof (wi));
5301 walk_gimple_seq (copy, replace_locals_stmt, replace_locals_op, &wi);
5306 delete id.debug_map;
5307 if (id.dependence_map)
5309 delete id.dependence_map;
5310 id.dependence_map = NULL;
5317 /* Allow someone to determine if SEARCH is a child of TOP from gdb. */
5320 debug_find_tree_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED, void *data)
5329 debug_find_tree (tree top, tree search)
5331 return walk_tree_without_duplicates (&top, debug_find_tree_1, search) != 0;
5335 /* Declare the variables created by the inliner. Add all the variables in
5336 VARS to BIND_EXPR. */
5339 declare_inline_vars (tree block, tree vars)
5342 for (t = vars; t; t = DECL_CHAIN (t))
5344 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
5345 gcc_assert (!TREE_STATIC (t) && !TREE_ASM_WRITTEN (t));
5346 add_local_decl (cfun, t);
5350 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), vars);
5353 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
5354 but now it will be in the TO_FN. PARM_TO_VAR means enable PARM_DECL to
5355 VAR_DECL translation. */
5358 copy_decl_for_dup_finish (copy_body_data *id, tree decl, tree copy)
5360 /* Don't generate debug information for the copy if we wouldn't have
5361 generated it for the copy either. */
5362 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
5363 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
5365 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
5366 declaration inspired this copy. */
5367 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
5369 /* The new variable/label has no RTL, yet. */
5370 if (CODE_CONTAINS_STRUCT (TREE_CODE (copy), TS_DECL_WRTL)
5371 && !TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
5372 SET_DECL_RTL (copy, 0);
5374 /* These args would always appear unused, if not for this. */
5375 TREE_USED (copy) = 1;
5377 /* Set the context for the new declaration. */
5378 if (!DECL_CONTEXT (decl))
5379 /* Globals stay global. */
5381 else if (DECL_CONTEXT (decl) != id->src_fn)
5382 /* Things that weren't in the scope of the function we're inlining
5383 from aren't in the scope we're inlining to, either. */
5385 else if (TREE_STATIC (decl))
5386 /* Function-scoped static variables should stay in the original
5390 /* Ordinary automatic local variables are now in the scope of the
5392 DECL_CONTEXT (copy) = id->dst_fn;
5398 copy_decl_to_var (tree decl, copy_body_data *id)
5402 gcc_assert (TREE_CODE (decl) == PARM_DECL
5403 || TREE_CODE (decl) == RESULT_DECL);
5405 type = TREE_TYPE (decl);
5407 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5408 VAR_DECL, DECL_NAME (decl), type);
5409 if (DECL_PT_UID_SET_P (decl))
5410 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5411 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5412 TREE_READONLY (copy) = TREE_READONLY (decl);
5413 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5414 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5416 return copy_decl_for_dup_finish (id, decl, copy);
5419 /* Like copy_decl_to_var, but create a return slot object instead of a
5420 pointer variable for return by invisible reference. */
5423 copy_result_decl_to_var (tree decl, copy_body_data *id)
5427 gcc_assert (TREE_CODE (decl) == PARM_DECL
5428 || TREE_CODE (decl) == RESULT_DECL);
5430 type = TREE_TYPE (decl);
5431 if (DECL_BY_REFERENCE (decl))
5432 type = TREE_TYPE (type);
5434 copy = build_decl (DECL_SOURCE_LOCATION (id->dst_fn),
5435 VAR_DECL, DECL_NAME (decl), type);
5436 if (DECL_PT_UID_SET_P (decl))
5437 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
5438 TREE_READONLY (copy) = TREE_READONLY (decl);
5439 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
5440 if (!DECL_BY_REFERENCE (decl))
5442 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
5443 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
5446 return copy_decl_for_dup_finish (id, decl, copy);
5450 copy_decl_no_change (tree decl, copy_body_data *id)
5454 copy = copy_node (decl);
5456 /* The COPY is not abstract; it will be generated in DST_FN. */
5457 DECL_ABSTRACT_P (copy) = false;
5458 lang_hooks.dup_lang_specific_decl (copy);
5460 /* TREE_ADDRESSABLE isn't used to indicate that a label's address has
5461 been taken; it's for internal bookkeeping in expand_goto_internal. */
5462 if (TREE_CODE (copy) == LABEL_DECL)
5464 TREE_ADDRESSABLE (copy) = 0;
5465 LABEL_DECL_UID (copy) = -1;
5468 return copy_decl_for_dup_finish (id, decl, copy);
5472 copy_decl_maybe_to_var (tree decl, copy_body_data *id)
5474 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
5475 return copy_decl_to_var (decl, id);
5477 return copy_decl_no_change (decl, id);
5480 /* Return a copy of the function's argument tree. */
5482 copy_arguments_for_versioning (tree orig_parm, copy_body_data * id,
5483 bitmap args_to_skip, tree *vars)
5486 tree new_parm = NULL;
5491 for (arg = orig_parm; arg; arg = DECL_CHAIN (arg), i++)
5492 if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
5494 tree new_tree = remap_decl (arg, id);
5495 if (TREE_CODE (new_tree) != PARM_DECL)
5496 new_tree = id->copy_decl (arg, id);
5497 lang_hooks.dup_lang_specific_decl (new_tree);
5499 parg = &DECL_CHAIN (new_tree);
5501 else if (!id->decl_map->get (arg))
5503 /* Make an equivalent VAR_DECL. If the argument was used
5504 as temporary variable later in function, the uses will be
5505 replaced by local variable. */
5506 tree var = copy_decl_to_var (arg, id);
5507 insert_decl_map (id, arg, var);
5508 /* Declare this new variable. */
5509 DECL_CHAIN (var) = *vars;
5515 /* Return a copy of the function's static chain. */
5517 copy_static_chain (tree static_chain, copy_body_data * id)
5519 tree *chain_copy, *pvar;
5521 chain_copy = &static_chain;
5522 for (pvar = chain_copy; *pvar; pvar = &DECL_CHAIN (*pvar))
5524 tree new_tree = remap_decl (*pvar, id);
5525 lang_hooks.dup_lang_specific_decl (new_tree);
5526 DECL_CHAIN (new_tree) = DECL_CHAIN (*pvar);
5529 return static_chain;
5532 /* Return true if the function is allowed to be versioned.
5533 This is a guard for the versioning functionality. */
5536 tree_versionable_function_p (tree fndecl)
5538 return (!lookup_attribute ("noclone", DECL_ATTRIBUTES (fndecl))
5539 && copy_forbidden (DECL_STRUCT_FUNCTION (fndecl)) == NULL);
5542 /* Delete all unreachable basic blocks and update callgraph.
5543 Doing so is somewhat nontrivial because we need to update all clones and
5544 remove inline function that become unreachable. */
5547 delete_unreachable_blocks_update_callgraph (copy_body_data *id)
5549 bool changed = false;
5550 basic_block b, next_bb;
5552 find_unreachable_blocks ();
5554 /* Delete all unreachable basic blocks. */
5556 for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
5557 != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
5559 next_bb = b->next_bb;
5561 if (!(b->flags & BB_REACHABLE))
5563 gimple_stmt_iterator bsi;
5565 for (bsi = gsi_start_bb (b); !gsi_end_p (bsi); gsi_next (&bsi))
5567 struct cgraph_edge *e;
5568 struct cgraph_node *node;
5570 id->dst_node->remove_stmt_references (gsi_stmt (bsi));
5572 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5573 &&(e = id->dst_node->get_edge (gsi_stmt (bsi))) != NULL)
5575 if (!e->inline_failed)
5576 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5580 if (id->transform_call_graph_edges == CB_CGE_MOVE_CLONES
5581 && id->dst_node->clones)
5582 for (node = id->dst_node->clones; node != id->dst_node;)
5584 node->remove_stmt_references (gsi_stmt (bsi));
5585 if (gimple_code (gsi_stmt (bsi)) == GIMPLE_CALL
5586 && (e = node->get_edge (gsi_stmt (bsi))) != NULL)
5588 if (!e->inline_failed)
5589 e->callee->remove_symbol_and_inline_clones (id->dst_node);
5595 node = node->clones;
5596 else if (node->next_sibling_clone)
5597 node = node->next_sibling_clone;
5600 while (node != id->dst_node && !node->next_sibling_clone)
5601 node = node->clone_of;
5602 if (node != id->dst_node)
5603 node = node->next_sibling_clone;
5607 delete_basic_block (b);
5615 /* Update clone info after duplication. */
5618 update_clone_info (copy_body_data * id)
5620 struct cgraph_node *node;
5621 if (!id->dst_node->clones)
5623 for (node = id->dst_node->clones; node != id->dst_node;)
5625 /* First update replace maps to match the new body. */
5626 if (node->clone.tree_map)
5629 for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
5631 struct ipa_replace_map *replace_info;
5632 replace_info = (*node->clone.tree_map)[i];
5633 walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
5634 walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
5638 node = node->clones;
5639 else if (node->next_sibling_clone)
5640 node = node->next_sibling_clone;
5643 while (node != id->dst_node && !node->next_sibling_clone)
5644 node = node->clone_of;
5645 if (node != id->dst_node)
5646 node = node->next_sibling_clone;
5651 /* Create a copy of a function's tree.
5652 OLD_DECL and NEW_DECL are FUNCTION_DECL tree nodes
5653 of the original function and the new copied function
5654 respectively. In case we want to replace a DECL
5655 tree with another tree while duplicating the function's
5656 body, TREE_MAP represents the mapping between these
5657 trees. If UPDATE_CLONES is set, the call_stmt fields
5658 of edges of clones of the function will be updated.
5660 If non-NULL ARGS_TO_SKIP determine function parameters to remove
5662 If SKIP_RETURN is true, the new version will return void.
5663 If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
5664 If non_NULL NEW_ENTRY determine new entry BB of the clone.
5667 tree_function_versioning (tree old_decl, tree new_decl,
5668 vec<ipa_replace_map *, va_gc> *tree_map,
5669 bool update_clones, bitmap args_to_skip,
5670 bool skip_return, bitmap blocks_to_copy,
5671 basic_block new_entry)
5673 struct cgraph_node *old_version_node;
5674 struct cgraph_node *new_version_node;
5678 struct ipa_replace_map *replace_info;
5679 basic_block old_entry_block, bb;
5680 auto_vec<gimple *, 10> init_stmts;
5681 tree vars = NULL_TREE;
5682 bitmap debug_args_to_skip = args_to_skip;
5684 gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
5685 && TREE_CODE (new_decl) == FUNCTION_DECL);
5686 DECL_POSSIBLY_INLINED (old_decl) = 1;
5688 old_version_node = cgraph_node::get (old_decl);
5689 gcc_checking_assert (old_version_node);
5690 new_version_node = cgraph_node::get (new_decl);
5691 gcc_checking_assert (new_version_node);
5693 /* Copy over debug args. */
5694 if (DECL_HAS_DEBUG_ARGS_P (old_decl))
5696 vec<tree, va_gc> **new_debug_args, **old_debug_args;
5697 gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
5698 DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
5699 old_debug_args = decl_debug_args_lookup (old_decl);
5702 new_debug_args = decl_debug_args_insert (new_decl);
5703 *new_debug_args = vec_safe_copy (*old_debug_args);
5707 /* Output the inlining info for this abstract function, since it has been
5708 inlined. If we don't do this now, we can lose the information about the
5709 variables in the function when the blocks get blown away as soon as we
5710 remove the cgraph node. */
5711 (*debug_hooks->outlining_inline_function) (old_decl);
5713 DECL_ARTIFICIAL (new_decl) = 1;
5714 DECL_ABSTRACT_ORIGIN (new_decl) = DECL_ORIGIN (old_decl);
5715 if (DECL_ORIGIN (old_decl) == old_decl)
5716 old_version_node->used_as_abstract_origin = true;
5717 DECL_FUNCTION_PERSONALITY (new_decl) = DECL_FUNCTION_PERSONALITY (old_decl);
5719 /* Prepare the data structures for the tree copy. */
5720 memset (&id, 0, sizeof (id));
5722 /* Generate a new name for the new version. */
5723 id.statements_to_fold = new hash_set<gimple *>;
5725 id.decl_map = new hash_map<tree, tree>;
5726 id.debug_map = NULL;
5727 id.src_fn = old_decl;
5728 id.dst_fn = new_decl;
5729 id.src_node = old_version_node;
5730 id.dst_node = new_version_node;
5731 id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
5732 id.blocks_to_copy = blocks_to_copy;
5734 id.copy_decl = copy_decl_no_change;
5735 id.transform_call_graph_edges
5736 = update_clones ? CB_CGE_MOVE_CLONES : CB_CGE_MOVE;
5737 id.transform_new_cfg = true;
5738 id.transform_return_to_modify = false;
5739 id.transform_parameter = false;
5740 id.transform_lang_insert_block = NULL;
5742 old_entry_block = ENTRY_BLOCK_PTR_FOR_FN
5743 (DECL_STRUCT_FUNCTION (old_decl));
5744 DECL_RESULT (new_decl) = DECL_RESULT (old_decl);
5745 DECL_ARGUMENTS (new_decl) = DECL_ARGUMENTS (old_decl);
5746 initialize_cfun (new_decl, old_decl,
5747 old_entry_block->count);
5748 if (DECL_STRUCT_FUNCTION (new_decl)->gimple_df)
5749 DECL_STRUCT_FUNCTION (new_decl)->gimple_df->ipa_pta
5750 = id.src_cfun->gimple_df->ipa_pta;
5752 /* Copy the function's static chain. */
5753 p = DECL_STRUCT_FUNCTION (old_decl)->static_chain_decl;
5755 DECL_STRUCT_FUNCTION (new_decl)->static_chain_decl
5756 = copy_static_chain (p, &id);
5758 /* If there's a tree_map, prepare for substitution. */
5760 for (i = 0; i < tree_map->length (); i++)
5763 replace_info = (*tree_map)[i];
5764 if (replace_info->replace_p)
5767 if (!replace_info->old_tree)
5769 int p = replace_info->parm_num;
5771 tree req_type, new_type;
5773 for (parm = DECL_ARGUMENTS (old_decl); p;
5774 parm = DECL_CHAIN (parm))
5776 replace_info->old_tree = parm;
5777 parm_num = replace_info->parm_num;
5778 req_type = TREE_TYPE (parm);
5779 new_type = TREE_TYPE (replace_info->new_tree);
5780 if (!useless_type_conversion_p (req_type, new_type))
5782 if (fold_convertible_p (req_type, replace_info->new_tree))
5783 replace_info->new_tree
5784 = fold_build1 (NOP_EXPR, req_type,
5785 replace_info->new_tree);
5786 else if (TYPE_SIZE (req_type) == TYPE_SIZE (new_type))
5787 replace_info->new_tree
5788 = fold_build1 (VIEW_CONVERT_EXPR, req_type,
5789 replace_info->new_tree);
5794 fprintf (dump_file, " const ");
5795 print_generic_expr (dump_file,
5796 replace_info->new_tree, 0);
5798 " can't be converted to param ");
5799 print_generic_expr (dump_file, parm, 0);
5800 fprintf (dump_file, "\n");
5802 replace_info->old_tree = NULL;
5807 gcc_assert (TREE_CODE (replace_info->old_tree) == PARM_DECL);
5808 if (replace_info->old_tree)
5810 init = setup_one_parameter (&id, replace_info->old_tree,
5811 replace_info->new_tree, id.src_fn,
5815 init_stmts.safe_push (init);
5816 if (MAY_HAVE_DEBUG_STMTS && args_to_skip)
5822 for (parm = DECL_ARGUMENTS (old_decl), p = 0; parm;
5823 parm = DECL_CHAIN (parm), p++)
5824 if (parm == replace_info->old_tree)
5832 if (debug_args_to_skip == args_to_skip)
5834 debug_args_to_skip = BITMAP_ALLOC (NULL);
5835 bitmap_copy (debug_args_to_skip, args_to_skip);
5837 bitmap_clear_bit (debug_args_to_skip, parm_num);
5843 /* Copy the function's arguments. */
5844 if (DECL_ARGUMENTS (old_decl) != NULL_TREE)
5845 DECL_ARGUMENTS (new_decl)
5846 = copy_arguments_for_versioning (DECL_ARGUMENTS (old_decl), &id,
5847 args_to_skip, &vars);
5849 DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
5850 BLOCK_SUPERCONTEXT (DECL_INITIAL (new_decl)) = new_decl;
5852 declare_inline_vars (DECL_INITIAL (new_decl), vars);
5854 if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
5855 /* Add local vars. */
5856 add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
5858 if (DECL_RESULT (old_decl) == NULL_TREE)
5860 else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
5862 DECL_RESULT (new_decl)
5863 = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
5864 RESULT_DECL, NULL_TREE, void_type_node);
5865 DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
5866 cfun->returns_struct = 0;
5867 cfun->returns_pcc_struct = 0;
5872 DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);
5873 lang_hooks.dup_lang_specific_decl (DECL_RESULT (new_decl));
5874 if (gimple_in_ssa_p (id.src_cfun)
5875 && DECL_BY_REFERENCE (DECL_RESULT (old_decl))
5876 && (old_name = ssa_default_def (id.src_cfun, DECL_RESULT (old_decl))))
5878 tree new_name = make_ssa_name (DECL_RESULT (new_decl));
5879 insert_decl_map (&id, old_name, new_name);
5880 SSA_NAME_DEF_STMT (new_name) = gimple_build_nop ();
5881 set_ssa_default_def (cfun, DECL_RESULT (new_decl), new_name);
5885 /* Set up the destination functions loop tree. */
5886 if (loops_for_fn (DECL_STRUCT_FUNCTION (old_decl)) != NULL)
5888 cfun->curr_properties &= ~PROP_loops;
5889 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
5890 cfun->curr_properties |= PROP_loops;
5893 /* Copy the Function's body. */
5894 copy_body (&id, old_entry_block->count, REG_BR_PROB_BASE,
5895 ENTRY_BLOCK_PTR_FOR_FN (cfun), EXIT_BLOCK_PTR_FOR_FN (cfun),
5898 /* Renumber the lexical scoping (non-code) blocks consecutively. */
5899 number_blocks (new_decl);
5901 /* We want to create the BB unconditionally, so that the addition of
5902 debug stmts doesn't affect BB count, which may in the end cause
5903 codegen differences. */
5904 bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5905 while (init_stmts.length ())
5906 insert_init_stmt (&id, bb, init_stmts.pop ());
5907 update_clone_info (&id);
5909 /* Remap the nonlocal_goto_save_area, if any. */
5910 if (cfun->nonlocal_goto_save_area)
5912 struct walk_stmt_info wi;
5914 memset (&wi, 0, sizeof (wi));
5916 walk_tree (&cfun->nonlocal_goto_save_area, remap_gimple_op_r, &wi, NULL);
5922 delete id.debug_map;
5923 free_dominance_info (CDI_DOMINATORS);
5924 free_dominance_info (CDI_POST_DOMINATORS);
5926 fold_marked_statements (0, id.statements_to_fold);
5927 delete id.statements_to_fold;
5928 delete_unreachable_blocks_update_callgraph (&id);
5929 if (id.dst_node->definition)
5930 cgraph_edge::rebuild_references ();
5931 if (loops_state_satisfies_p (LOOPS_NEED_FIXUP))
5933 calculate_dominance_info (CDI_DOMINATORS);
5934 fix_loop_structure (NULL);
5936 update_ssa (TODO_update_ssa);
5938 /* After partial cloning we need to rescale frequencies, so they are
5939 within proper range in the cloned function. */
5942 struct cgraph_edge *e;
5943 rebuild_frequencies ();
5945 new_version_node->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5946 for (e = new_version_node->callees; e; e = e->next_callee)
5948 basic_block bb = gimple_bb (e->call_stmt);
5949 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5951 e->count = bb->count;
5953 for (e = new_version_node->indirect_calls; e; e = e->next_callee)
5955 basic_block bb = gimple_bb (e->call_stmt);
5956 e->frequency = compute_call_stmt_bb_frequency (current_function_decl,
5958 e->count = bb->count;
5962 if (debug_args_to_skip && MAY_HAVE_DEBUG_STMTS)
5965 vec<tree, va_gc> **debug_args = NULL;
5966 unsigned int len = 0;
5967 for (parm = DECL_ARGUMENTS (old_decl), i = 0;
5968 parm; parm = DECL_CHAIN (parm), i++)
5969 if (bitmap_bit_p (debug_args_to_skip, i) && is_gimple_reg (parm))
5973 if (debug_args == NULL)
5975 debug_args = decl_debug_args_insert (new_decl);
5976 len = vec_safe_length (*debug_args);
5978 ddecl = make_node (DEBUG_EXPR_DECL);
5979 DECL_ARTIFICIAL (ddecl) = 1;
5980 TREE_TYPE (ddecl) = TREE_TYPE (parm);
5981 DECL_MODE (ddecl) = DECL_MODE (parm);
5982 vec_safe_push (*debug_args, DECL_ORIGIN (parm));
5983 vec_safe_push (*debug_args, ddecl);
5985 if (debug_args != NULL)
5987 /* On the callee side, add
5990 stmts to the first bb where var is a VAR_DECL created for the
5991 optimized away parameter in DECL_INITIAL block. This hints
5992 in the debug info that var (whole DECL_ORIGIN is the parm
5993 PARM_DECL) is optimized away, but could be looked up at the
5994 call site as value of D#X there. */
5995 tree var = vars, vexpr;
5996 gimple_stmt_iterator cgsi
5997 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
6000 i = vec_safe_length (*debug_args);
6004 while (var != NULL_TREE
6005 && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
6006 var = TREE_CHAIN (var);
6007 if (var == NULL_TREE)
6009 vexpr = make_node (DEBUG_EXPR_DECL);
6010 parm = (**debug_args)[i];
6011 DECL_ARTIFICIAL (vexpr) = 1;
6012 TREE_TYPE (vexpr) = TREE_TYPE (parm);
6013 DECL_MODE (vexpr) = DECL_MODE (parm);
6014 def_temp = gimple_build_debug_bind (var, vexpr, NULL);
6015 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6016 def_temp = gimple_build_debug_source_bind (vexpr, parm, NULL);
6017 gsi_insert_before (&cgsi, def_temp, GSI_NEW_STMT);
6023 if (debug_args_to_skip && debug_args_to_skip != args_to_skip)
6024 BITMAP_FREE (debug_args_to_skip);
6025 free_dominance_info (CDI_DOMINATORS);
6026 free_dominance_info (CDI_POST_DOMINATORS);
6028 gcc_assert (!id.debug_stmts.exists ());
6033 /* EXP is CALL_EXPR present in a GENERIC expression tree. Try to integrate
6034 the callee and return the inlined body on success. */
6037 maybe_inline_call_in_expr (tree exp)
6039 tree fn = get_callee_fndecl (exp);
6041 /* We can only try to inline "const" functions. */
6042 if (fn && TREE_READONLY (fn) && DECL_SAVED_TREE (fn))
6044 call_expr_arg_iterator iter;
6047 hash_map<tree, tree> decl_map;
6049 /* Remap the parameters. */
6050 for (param = DECL_ARGUMENTS (fn), arg = first_call_expr_arg (exp, &iter);
6052 param = DECL_CHAIN (param), arg = next_call_expr_arg (&iter))
6053 decl_map.put (param, arg);
6055 memset (&id, 0, sizeof (id));
6057 id.dst_fn = current_function_decl;
6058 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6059 id.decl_map = &decl_map;
6061 id.copy_decl = copy_decl_no_change;
6062 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6063 id.transform_new_cfg = false;
6064 id.transform_return_to_modify = true;
6065 id.transform_parameter = true;
6066 id.transform_lang_insert_block = NULL;
6068 /* Make sure not to unshare trees behind the front-end's back
6069 since front-end specific mechanisms may rely on sharing. */
6070 id.regimplify = false;
6071 id.do_not_unshare = true;
6073 /* We're not inside any EH region. */
6076 t = copy_tree_body (&id);
6078 /* We can only return something suitable for use in a GENERIC
6080 if (TREE_CODE (t) == MODIFY_EXPR)
6081 return TREE_OPERAND (t, 1);
6087 /* Duplicate a type, fields and all. */
6090 build_duplicate_type (tree type)
6092 struct copy_body_data id;
6094 memset (&id, 0, sizeof (id));
6095 id.src_fn = current_function_decl;
6096 id.dst_fn = current_function_decl;
6098 id.decl_map = new hash_map<tree, tree>;
6099 id.debug_map = NULL;
6100 id.copy_decl = copy_decl_no_change;
6102 type = remap_type_1 (type, &id);
6106 delete id.debug_map;
6108 TYPE_CANONICAL (type) = type;
6113 /* Unshare the entire DECL_SAVED_TREE of FN and return the remapped
6114 parameters and RESULT_DECL in PARMS and RESULT. Used by C++ constexpr
6118 copy_fn (tree fn, tree& parms, tree& result)
6122 hash_map<tree, tree> decl_map;
6127 memset (&id, 0, sizeof (id));
6129 id.dst_fn = current_function_decl;
6130 id.src_cfun = DECL_STRUCT_FUNCTION (fn);
6131 id.decl_map = &decl_map;
6133 id.copy_decl = copy_decl_no_change;
6134 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6135 id.transform_new_cfg = false;
6136 id.transform_return_to_modify = false;
6137 id.transform_parameter = true;
6138 id.transform_lang_insert_block = NULL;
6140 /* Make sure not to unshare trees behind the front-end's back
6141 since front-end specific mechanisms may rely on sharing. */
6142 id.regimplify = false;
6143 id.do_not_unshare = true;
6145 /* We're not inside any EH region. */
6148 /* Remap the parameters and result and return them to the caller. */
6149 for (param = DECL_ARGUMENTS (fn);
6151 param = DECL_CHAIN (param))
6153 *p = remap_decl (param, &id);
6154 p = &DECL_CHAIN (*p);
6157 if (DECL_RESULT (fn))
6158 result = remap_decl (DECL_RESULT (fn), &id);
6162 return copy_tree_body (&id);