1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 static rtx expand_debug_expr (tree);
62 /* Return an expression tree corresponding to the RHS of GIMPLE
66 gimple_assign_rhs_to_tree (gimple stmt)
69 enum gimple_rhs_class grhs_class;
71 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
73 if (grhs_class == GIMPLE_TERNARY_RHS)
74 t = build3 (gimple_assign_rhs_code (stmt),
75 TREE_TYPE (gimple_assign_lhs (stmt)),
76 gimple_assign_rhs1 (stmt),
77 gimple_assign_rhs2 (stmt),
78 gimple_assign_rhs3 (stmt));
79 else if (grhs_class == GIMPLE_BINARY_RHS)
80 t = build2 (gimple_assign_rhs_code (stmt),
81 TREE_TYPE (gimple_assign_lhs (stmt)),
82 gimple_assign_rhs1 (stmt),
83 gimple_assign_rhs2 (stmt));
84 else if (grhs_class == GIMPLE_UNARY_RHS)
85 t = build1 (gimple_assign_rhs_code (stmt),
86 TREE_TYPE (gimple_assign_lhs (stmt)),
87 gimple_assign_rhs1 (stmt));
88 else if (grhs_class == GIMPLE_SINGLE_RHS)
90 t = gimple_assign_rhs1 (stmt);
91 /* Avoid modifying this tree in place below. */
92 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
93 && gimple_location (stmt) != EXPR_LOCATION (t))
94 || (gimple_block (stmt)
95 && currently_expanding_to_rtl
97 && gimple_block (stmt) != TREE_BLOCK (t)))
103 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
104 SET_EXPR_LOCATION (t, gimple_location (stmt));
105 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
106 TREE_BLOCK (t) = gimple_block (stmt);
112 #ifndef STACK_ALIGNMENT_NEEDED
113 #define STACK_ALIGNMENT_NEEDED 1
116 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
118 /* Associate declaration T with storage space X. If T is no
119 SSA name this is exactly SET_DECL_RTL, otherwise make the
120 partition of T associated with X. */
122 set_rtl (tree t, rtx x)
124 if (TREE_CODE (t) == SSA_NAME)
126 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
128 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
129 /* For the benefit of debug information at -O0 (where vartracking
130 doesn't run) record the place also in the base DECL if it's
131 a normal variable (not a parameter). */
132 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
134 tree var = SSA_NAME_VAR (t);
135 /* If we don't yet have something recorded, just record it now. */
136 if (!DECL_RTL_SET_P (var))
137 SET_DECL_RTL (var, x);
138 /* If we have it set already to "multiple places" don't
140 else if (DECL_RTL (var) == pc_rtx)
142 /* If we have something recorded and it's not the same place
143 as we want to record now, we have multiple partitions for the
144 same base variable, with different places. We can't just
145 randomly chose one, hence we have to say that we don't know.
146 This only happens with optimization, and there var-tracking
147 will figure out the right thing. */
148 else if (DECL_RTL (var) != x)
149 SET_DECL_RTL (var, pc_rtx);
156 /* This structure holds data relevant to one variable that will be
157 placed in a stack slot. */
163 /* Initially, the size of the variable. Later, the size of the partition,
164 if this variable becomes it's partition's representative. */
167 /* The *byte* alignment required for this variable. Or as, with the
168 size, the alignment for this partition. */
171 /* The partition representative. */
172 size_t representative;
174 /* The next stack variable in the partition, or EOC. */
177 /* The numbers of conflicting stack variables. */
181 #define EOC ((size_t)-1)
183 /* We have an array of such objects while deciding allocation. */
184 static struct stack_var *stack_vars;
185 static size_t stack_vars_alloc;
186 static size_t stack_vars_num;
187 static struct pointer_map_t *decl_to_stack_part;
189 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
190 is non-decreasing. */
191 static size_t *stack_vars_sorted;
193 /* The phase of the stack frame. This is the known misalignment of
194 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
195 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
196 static int frame_phase;
198 /* Used during expand_used_vars to remember if we saw any decls for
199 which we'd like to enable stack smashing protection. */
200 static bool has_protected_decls;
202 /* Used during expand_used_vars. Remember if we say a character buffer
203 smaller than our cutoff threshold. Used for -Wstack-protector. */
204 static bool has_short_buffer;
206 /* Compute the byte alignment to use for DECL. Ignore alignment
207 we can't do with expected alignment of the stack boundary. */
210 align_local_variable (tree decl)
212 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
213 DECL_ALIGN (decl) = align;
214 return align / BITS_PER_UNIT;
217 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
218 Return the frame offset. */
221 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
223 HOST_WIDE_INT offset, new_frame_offset;
225 new_frame_offset = frame_offset;
226 if (FRAME_GROWS_DOWNWARD)
228 new_frame_offset -= size + frame_phase;
229 new_frame_offset &= -align;
230 new_frame_offset += frame_phase;
231 offset = new_frame_offset;
235 new_frame_offset -= frame_phase;
236 new_frame_offset += align - 1;
237 new_frame_offset &= -align;
238 new_frame_offset += frame_phase;
239 offset = new_frame_offset;
240 new_frame_offset += size;
242 frame_offset = new_frame_offset;
244 if (frame_offset_overflow (frame_offset, cfun->decl))
245 frame_offset = offset = 0;
250 /* Accumulate DECL into STACK_VARS. */
253 add_stack_var (tree decl)
257 if (stack_vars_num >= stack_vars_alloc)
259 if (stack_vars_alloc)
260 stack_vars_alloc = stack_vars_alloc * 3 / 2;
262 stack_vars_alloc = 32;
264 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
266 if (!decl_to_stack_part)
267 decl_to_stack_part = pointer_map_create ();
269 v = &stack_vars[stack_vars_num];
270 * (size_t *)pointer_map_insert (decl_to_stack_part, decl) = stack_vars_num;
273 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
274 /* Ensure that all variables have size, so that &a != &b for any two
275 variables that are simultaneously live. */
278 v->alignb = align_local_variable (SSAVAR (decl));
279 /* An alignment of zero can mightily confuse us later. */
280 gcc_assert (v->alignb != 0);
282 /* All variables are initially in their own partition. */
283 v->representative = stack_vars_num;
286 /* All variables initially conflict with no other. */
289 /* Ensure that this decl doesn't get put onto the list twice. */
290 set_rtl (decl, pc_rtx);
295 /* Make the decls associated with luid's X and Y conflict. */
298 add_stack_var_conflict (size_t x, size_t y)
300 struct stack_var *a = &stack_vars[x];
301 struct stack_var *b = &stack_vars[y];
303 a->conflicts = BITMAP_ALLOC (NULL);
305 b->conflicts = BITMAP_ALLOC (NULL);
306 bitmap_set_bit (a->conflicts, y);
307 bitmap_set_bit (b->conflicts, x);
310 /* Check whether the decls associated with luid's X and Y conflict. */
313 stack_var_conflict_p (size_t x, size_t y)
315 struct stack_var *a = &stack_vars[x];
316 struct stack_var *b = &stack_vars[y];
319 /* Partitions containing an SSA name result from gimple registers
320 with things like unsupported modes. They are top-level and
321 hence conflict with everything else. */
322 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
325 if (!a->conflicts || !b->conflicts)
327 return bitmap_bit_p (a->conflicts, y);
330 /* Returns true if TYPE is or contains a union type. */
333 aggregate_contains_union_type (tree type)
337 if (TREE_CODE (type) == UNION_TYPE
338 || TREE_CODE (type) == QUAL_UNION_TYPE)
340 if (TREE_CODE (type) == ARRAY_TYPE)
341 return aggregate_contains_union_type (TREE_TYPE (type));
342 if (TREE_CODE (type) != RECORD_TYPE)
345 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
346 if (TREE_CODE (field) == FIELD_DECL)
347 if (aggregate_contains_union_type (TREE_TYPE (field)))
353 /* A subroutine of expand_used_vars. If two variables X and Y have alias
354 sets that do not conflict, then do add a conflict for these variables
355 in the interference graph. We also need to make sure to add conflicts
356 for union containing structures. Else RTL alias analysis comes along
357 and due to type based aliasing rules decides that for two overlapping
358 union temporaries { short s; int i; } accesses to the same mem through
359 different types may not alias and happily reorders stores across
360 life-time boundaries of the temporaries (See PR25654). */
363 add_alias_set_conflicts (void)
365 size_t i, j, n = stack_vars_num;
367 for (i = 0; i < n; ++i)
369 tree type_i = TREE_TYPE (stack_vars[i].decl);
370 bool aggr_i = AGGREGATE_TYPE_P (type_i);
373 contains_union = aggregate_contains_union_type (type_i);
374 for (j = 0; j < i; ++j)
376 tree type_j = TREE_TYPE (stack_vars[j].decl);
377 bool aggr_j = AGGREGATE_TYPE_P (type_j);
379 /* Either the objects conflict by means of type based
380 aliasing rules, or we need to add a conflict. */
381 || !objects_must_conflict_p (type_i, type_j)
382 /* In case the types do not conflict ensure that access
383 to elements will conflict. In case of unions we have
384 to be careful as type based aliasing rules may say
385 access to the same memory does not conflict. So play
386 safe and add a conflict in this case when
387 -fstrict-aliasing is used. */
388 || (contains_union && flag_strict_aliasing))
389 add_stack_var_conflict (i, j);
394 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
395 enter its partition number into bitmap DATA. */
398 visit_op (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
400 bitmap active = (bitmap)data;
401 op = get_base_address (op);
404 && DECL_RTL_IF_SET (op) == pc_rtx)
406 size_t *v = (size_t *) pointer_map_contains (decl_to_stack_part, op);
408 bitmap_set_bit (active, *v);
413 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
414 record conflicts between it and all currently active other partitions
418 visit_conflict (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data)
420 bitmap active = (bitmap)data;
421 op = get_base_address (op);
424 && DECL_RTL_IF_SET (op) == pc_rtx)
427 (size_t *) pointer_map_contains (decl_to_stack_part, op);
428 if (v && bitmap_set_bit (active, *v))
433 gcc_assert (num < stack_vars_num);
434 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
435 add_stack_var_conflict (num, i);
441 /* Helper routine for add_scope_conflicts, calculating the active partitions
442 at the end of BB, leaving the result in WORK. We're called to generate
443 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
447 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
451 gimple_stmt_iterator gsi;
452 bool (*visit)(gimple, tree, void *);
455 FOR_EACH_EDGE (e, ei, bb->preds)
456 bitmap_ior_into (work, (bitmap)e->src->aux);
460 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
462 gimple stmt = gsi_stmt (gsi);
463 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
465 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
467 gimple stmt = gsi_stmt (gsi);
469 if (gimple_clobber_p (stmt))
471 tree lhs = gimple_assign_lhs (stmt);
473 /* Nested function lowering might introduce LHSs
474 that are COMPONENT_REFs. */
475 if (TREE_CODE (lhs) != VAR_DECL)
477 if (DECL_RTL_IF_SET (lhs) == pc_rtx
479 pointer_map_contains (decl_to_stack_part, lhs)))
480 bitmap_clear_bit (work, *v);
482 else if (!is_gimple_debug (stmt))
485 && visit == visit_op)
487 /* If this is the first real instruction in this BB we need
488 to add conflicts for everything live at this point now.
489 Unlike classical liveness for named objects we can't
490 rely on seeing a def/use of the names we're interested in.
491 There might merely be indirect loads/stores. We'd not add any
492 conflicts for such partitions. */
495 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
499 EXECUTE_IF_SET_IN_BITMAP (work, i + 1, j, bj)
500 add_stack_var_conflict (i, j);
502 visit = visit_conflict;
504 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
509 /* Generate stack partition conflicts between all partitions that are
510 simultaneously live. */
513 add_scope_conflicts (void)
517 bitmap work = BITMAP_ALLOC (NULL);
519 /* We approximate the live range of a stack variable by taking the first
520 mention of its name as starting point(s), and by the end-of-scope
521 death clobber added by gimplify as ending point(s) of the range.
522 This overapproximates in the case we for instance moved an address-taken
523 operation upward, without also moving a dereference to it upwards.
524 But it's conservatively correct as a variable never can hold values
525 before its name is mentioned at least once.
527 We then do a mostly classical bitmap liveness algorithm. */
530 bb->aux = BITMAP_ALLOC (NULL);
538 bitmap active = (bitmap)bb->aux;
539 add_scope_conflicts_1 (bb, work, false);
540 if (bitmap_ior_into (active, work))
546 add_scope_conflicts_1 (bb, work, true);
550 BITMAP_FREE (bb->aux);
553 /* A subroutine of partition_stack_vars. A comparison function for qsort,
554 sorting an array of indices by the properties of the object. */
557 stack_var_cmp (const void *a, const void *b)
559 size_t ia = *(const size_t *)a;
560 size_t ib = *(const size_t *)b;
561 unsigned int aligna = stack_vars[ia].alignb;
562 unsigned int alignb = stack_vars[ib].alignb;
563 HOST_WIDE_INT sizea = stack_vars[ia].size;
564 HOST_WIDE_INT sizeb = stack_vars[ib].size;
565 tree decla = stack_vars[ia].decl;
566 tree declb = stack_vars[ib].decl;
568 unsigned int uida, uidb;
570 /* Primary compare on "large" alignment. Large comes first. */
571 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
572 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
573 if (largea != largeb)
574 return (int)largeb - (int)largea;
576 /* Secondary compare on size, decreasing */
582 /* Tertiary compare on true alignment, decreasing. */
588 /* Final compare on ID for sort stability, increasing.
589 Two SSA names are compared by their version, SSA names come before
590 non-SSA names, and two normal decls are compared by their DECL_UID. */
591 if (TREE_CODE (decla) == SSA_NAME)
593 if (TREE_CODE (declb) == SSA_NAME)
594 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
598 else if (TREE_CODE (declb) == SSA_NAME)
601 uida = DECL_UID (decla), uidb = DECL_UID (declb);
610 /* If the points-to solution *PI points to variables that are in a partition
611 together with other variables add all partition members to the pointed-to
615 add_partitioned_vars_to_ptset (struct pt_solution *pt,
616 struct pointer_map_t *decls_to_partitions,
617 struct pointer_set_t *visited, bitmap temp)
625 /* The pointed-to vars bitmap is shared, it is enough to
627 || pointer_set_insert(visited, pt->vars))
632 /* By using a temporary bitmap to store all members of the partitions
633 we have to add we make sure to visit each of the partitions only
635 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
637 || !bitmap_bit_p (temp, i))
638 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
639 (void *)(size_t) i)))
640 bitmap_ior_into (temp, *part);
641 if (!bitmap_empty_p (temp))
642 bitmap_ior_into (pt->vars, temp);
645 /* Update points-to sets based on partition info, so we can use them on RTL.
646 The bitmaps representing stack partitions will be saved until expand,
647 where partitioned decls used as bases in memory expressions will be
651 update_alias_info_with_stack_vars (void)
653 struct pointer_map_t *decls_to_partitions = NULL;
655 tree var = NULL_TREE;
657 for (i = 0; i < stack_vars_num; i++)
661 struct ptr_info_def *pi;
663 /* Not interested in partitions with single variable. */
664 if (stack_vars[i].representative != i
665 || stack_vars[i].next == EOC)
668 if (!decls_to_partitions)
670 decls_to_partitions = pointer_map_create ();
671 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
674 /* Create an SSA_NAME that points to the partition for use
675 as base during alias-oracle queries on RTL for bases that
676 have been partitioned. */
677 if (var == NULL_TREE)
678 var = create_tmp_var (ptr_type_node, NULL);
679 name = make_ssa_name (var, NULL);
681 /* Create bitmaps representing partitions. They will be used for
682 points-to sets later, so use GGC alloc. */
683 part = BITMAP_GGC_ALLOC ();
684 for (j = i; j != EOC; j = stack_vars[j].next)
686 tree decl = stack_vars[j].decl;
687 unsigned int uid = DECL_PT_UID (decl);
688 /* We should never end up partitioning SSA names (though they
689 may end up on the stack). Neither should we allocate stack
690 space to something that is unused and thus unreferenced, except
691 for -O0 where we are preserving even unreferenced variables. */
692 gcc_assert (DECL_P (decl)
694 || referenced_var_lookup (cfun, DECL_UID (decl))));
695 bitmap_set_bit (part, uid);
696 *((bitmap *) pointer_map_insert (decls_to_partitions,
697 (void *)(size_t) uid)) = part;
698 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
700 if (TREE_ADDRESSABLE (decl))
701 TREE_ADDRESSABLE (name) = 1;
704 /* Make the SSA name point to all partition members. */
705 pi = get_ptr_info (name);
706 pt_solution_set (&pi->pt, part, false);
709 /* Make all points-to sets that contain one member of a partition
710 contain all members of the partition. */
711 if (decls_to_partitions)
714 struct pointer_set_t *visited = pointer_set_create ();
715 bitmap temp = BITMAP_ALLOC (NULL);
717 for (i = 1; i < num_ssa_names; i++)
719 tree name = ssa_name (i);
720 struct ptr_info_def *pi;
723 && POINTER_TYPE_P (TREE_TYPE (name))
724 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
725 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
729 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
730 decls_to_partitions, visited, temp);
732 pointer_set_destroy (visited);
733 pointer_map_destroy (decls_to_partitions);
738 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
739 partitioning algorithm. Partitions A and B are known to be non-conflicting.
740 Merge them into a single partition A. */
743 union_stack_vars (size_t a, size_t b)
745 struct stack_var *vb = &stack_vars[b];
749 gcc_assert (stack_vars[b].next == EOC);
750 /* Add B to A's partition. */
751 stack_vars[b].next = stack_vars[a].next;
752 stack_vars[b].representative = a;
753 stack_vars[a].next = b;
755 /* Update the required alignment of partition A to account for B. */
756 if (stack_vars[a].alignb < stack_vars[b].alignb)
757 stack_vars[a].alignb = stack_vars[b].alignb;
759 /* Update the interference graph and merge the conflicts. */
762 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
763 add_stack_var_conflict (a, stack_vars[u].representative);
764 BITMAP_FREE (vb->conflicts);
768 /* A subroutine of expand_used_vars. Binpack the variables into
769 partitions constrained by the interference graph. The overall
770 algorithm used is as follows:
772 Sort the objects by size in descending order.
777 Look for the largest non-conflicting object B with size <= S.
784 partition_stack_vars (void)
786 size_t si, sj, n = stack_vars_num;
788 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
789 for (si = 0; si < n; ++si)
790 stack_vars_sorted[si] = si;
795 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
797 for (si = 0; si < n; ++si)
799 size_t i = stack_vars_sorted[si];
800 unsigned int ialign = stack_vars[i].alignb;
802 /* Ignore objects that aren't partition representatives. If we
803 see a var that is not a partition representative, it must
804 have been merged earlier. */
805 if (stack_vars[i].representative != i)
808 for (sj = si + 1; sj < n; ++sj)
810 size_t j = stack_vars_sorted[sj];
811 unsigned int jalign = stack_vars[j].alignb;
813 /* Ignore objects that aren't partition representatives. */
814 if (stack_vars[j].representative != j)
817 /* Ignore conflicting objects. */
818 if (stack_var_conflict_p (i, j))
821 /* Do not mix objects of "small" (supported) alignment
822 and "large" (unsupported) alignment. */
823 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
824 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
827 /* UNION the objects, placing J at OFFSET. */
828 union_stack_vars (i, j);
832 update_alias_info_with_stack_vars ();
835 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
838 dump_stack_var_partition (void)
840 size_t si, i, j, n = stack_vars_num;
842 for (si = 0; si < n; ++si)
844 i = stack_vars_sorted[si];
846 /* Skip variables that aren't partition representatives, for now. */
847 if (stack_vars[i].representative != i)
850 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
851 " align %u\n", (unsigned long) i, stack_vars[i].size,
852 stack_vars[i].alignb);
854 for (j = i; j != EOC; j = stack_vars[j].next)
856 fputc ('\t', dump_file);
857 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
859 fputc ('\n', dump_file);
863 /* Assign rtl to DECL at BASE + OFFSET. */
866 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
867 HOST_WIDE_INT offset)
872 /* If this fails, we've overflowed the stack frame. Error nicely? */
873 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
875 x = plus_constant (base, offset);
876 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
878 if (TREE_CODE (decl) != SSA_NAME)
880 /* Set alignment we actually gave this decl if it isn't an SSA name.
881 If it is we generate stack slots only accidentally so it isn't as
882 important, we'll simply use the alignment that is already set. */
883 if (base == virtual_stack_vars_rtx)
884 offset -= frame_phase;
885 align = offset & -offset;
886 align *= BITS_PER_UNIT;
887 if (align == 0 || align > base_align)
890 /* One would think that we could assert that we're not decreasing
891 alignment here, but (at least) the i386 port does exactly this
892 via the MINIMUM_ALIGNMENT hook. */
894 DECL_ALIGN (decl) = align;
895 DECL_USER_ALIGN (decl) = 0;
898 set_mem_attributes (x, SSAVAR (decl), true);
902 /* A subroutine of expand_used_vars. Give each partition representative
903 a unique location within the stack frame. Update each partition member
904 with that location. */
907 expand_stack_vars (bool (*pred) (tree))
909 size_t si, i, j, n = stack_vars_num;
910 HOST_WIDE_INT large_size = 0, large_alloc = 0;
911 rtx large_base = NULL;
912 unsigned large_align = 0;
915 /* Determine if there are any variables requiring "large" alignment.
916 Since these are dynamically allocated, we only process these if
917 no predicate involved. */
918 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
919 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
921 /* Find the total size of these variables. */
922 for (si = 0; si < n; ++si)
926 i = stack_vars_sorted[si];
927 alignb = stack_vars[i].alignb;
929 /* Stop when we get to the first decl with "small" alignment. */
930 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
933 /* Skip variables that aren't partition representatives. */
934 if (stack_vars[i].representative != i)
937 /* Skip variables that have already had rtl assigned. See also
938 add_stack_var where we perpetrate this pc_rtx hack. */
939 decl = stack_vars[i].decl;
940 if ((TREE_CODE (decl) == SSA_NAME
941 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
942 : DECL_RTL (decl)) != pc_rtx)
945 large_size += alignb - 1;
946 large_size &= -(HOST_WIDE_INT)alignb;
947 large_size += stack_vars[i].size;
950 /* If there were any, allocate space. */
952 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
956 for (si = 0; si < n; ++si)
959 unsigned base_align, alignb;
960 HOST_WIDE_INT offset;
962 i = stack_vars_sorted[si];
964 /* Skip variables that aren't partition representatives, for now. */
965 if (stack_vars[i].representative != i)
968 /* Skip variables that have already had rtl assigned. See also
969 add_stack_var where we perpetrate this pc_rtx hack. */
970 decl = stack_vars[i].decl;
971 if ((TREE_CODE (decl) == SSA_NAME
972 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
973 : DECL_RTL (decl)) != pc_rtx)
976 /* Check the predicate to see whether this variable should be
977 allocated in this pass. */
978 if (pred && !pred (decl))
981 alignb = stack_vars[i].alignb;
982 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
984 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
985 base = virtual_stack_vars_rtx;
986 base_align = crtl->max_used_stack_slot_alignment;
990 /* Large alignment is only processed in the last pass. */
993 gcc_assert (large_base != NULL);
995 large_alloc += alignb - 1;
996 large_alloc &= -(HOST_WIDE_INT)alignb;
997 offset = large_alloc;
998 large_alloc += stack_vars[i].size;
1001 base_align = large_align;
1004 /* Create rtl for each variable based on their location within the
1006 for (j = i; j != EOC; j = stack_vars[j].next)
1008 expand_one_stack_var_at (stack_vars[j].decl,
1014 gcc_assert (large_alloc == large_size);
1017 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1018 static HOST_WIDE_INT
1019 account_stack_vars (void)
1021 size_t si, j, i, n = stack_vars_num;
1022 HOST_WIDE_INT size = 0;
1024 for (si = 0; si < n; ++si)
1026 i = stack_vars_sorted[si];
1028 /* Skip variables that aren't partition representatives, for now. */
1029 if (stack_vars[i].representative != i)
1032 size += stack_vars[i].size;
1033 for (j = i; j != EOC; j = stack_vars[j].next)
1034 set_rtl (stack_vars[j].decl, NULL);
1039 /* A subroutine of expand_one_var. Called to immediately assign rtl
1040 to a variable to be allocated in the stack frame. */
1043 expand_one_stack_var (tree var)
1045 HOST_WIDE_INT size, offset;
1046 unsigned byte_align;
1048 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1049 byte_align = align_local_variable (SSAVAR (var));
1051 /* We handle highly aligned variables in expand_stack_vars. */
1052 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1054 offset = alloc_stack_frame_space (size, byte_align);
1056 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1057 crtl->max_used_stack_slot_alignment, offset);
1060 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1061 that will reside in a hard register. */
1064 expand_one_hard_reg_var (tree var)
1066 rest_of_decl_compilation (var, 0, 0);
1069 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1070 that will reside in a pseudo register. */
1073 expand_one_register_var (tree var)
1075 tree decl = SSAVAR (var);
1076 tree type = TREE_TYPE (decl);
1077 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
1078 rtx x = gen_reg_rtx (reg_mode);
1082 /* Note if the object is a user variable. */
1083 if (!DECL_ARTIFICIAL (decl))
1086 if (POINTER_TYPE_P (type))
1087 mark_reg_pointer (x, get_pointer_alignment (var));
1090 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1091 has some associated error, e.g. its type is error-mark. We just need
1092 to pick something that won't crash the rest of the compiler. */
1095 expand_one_error_var (tree var)
1097 enum machine_mode mode = DECL_MODE (var);
1100 if (mode == BLKmode)
1101 x = gen_rtx_MEM (BLKmode, const0_rtx);
1102 else if (mode == VOIDmode)
1105 x = gen_reg_rtx (mode);
1107 SET_DECL_RTL (var, x);
1110 /* A subroutine of expand_one_var. VAR is a variable that will be
1111 allocated to the local stack frame. Return true if we wish to
1112 add VAR to STACK_VARS so that it will be coalesced with other
1113 variables. Return false to allocate VAR immediately.
1115 This function is used to reduce the number of variables considered
1116 for coalescing, which reduces the size of the quadratic problem. */
1119 defer_stack_allocation (tree var, bool toplevel)
1121 /* If stack protection is enabled, *all* stack variables must be deferred,
1122 so that we can re-order the strings to the top of the frame. */
1123 if (flag_stack_protect)
1126 /* We handle "large" alignment via dynamic allocation. We want to handle
1127 this extra complication in only one place, so defer them. */
1128 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
1131 /* Variables in the outermost scope automatically conflict with
1132 every other variable. The only reason to want to defer them
1133 at all is that, after sorting, we can more efficiently pack
1134 small variables in the stack frame. Continue to defer at -O2. */
1135 if (toplevel && optimize < 2)
1138 /* Without optimization, *most* variables are allocated from the
1139 stack, which makes the quadratic problem large exactly when we
1140 want compilation to proceed as quickly as possible. On the
1141 other hand, we don't want the function's stack frame size to
1142 get completely out of hand. So we avoid adding scalars and
1143 "small" aggregates to the list at all. */
1144 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1150 /* A subroutine of expand_used_vars. Expand one variable according to
1151 its flavor. Variables to be placed on the stack are not actually
1152 expanded yet, merely recorded.
1153 When REALLY_EXPAND is false, only add stack values to be allocated.
1154 Return stack usage this variable is supposed to take.
1157 static HOST_WIDE_INT
1158 expand_one_var (tree var, bool toplevel, bool really_expand)
1160 unsigned int align = BITS_PER_UNIT;
1165 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1167 /* Because we don't know if VAR will be in register or on stack,
1168 we conservatively assume it will be on stack even if VAR is
1169 eventually put into register after RA pass. For non-automatic
1170 variables, which won't be on stack, we collect alignment of
1171 type and ignore user specified alignment. */
1172 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1173 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1174 TYPE_MODE (TREE_TYPE (var)),
1175 TYPE_ALIGN (TREE_TYPE (var)));
1176 else if (DECL_HAS_VALUE_EXPR_P (var)
1177 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1178 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1179 or variables which were assigned a stack slot already by
1180 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1181 changed from the offset chosen to it. */
1182 align = crtl->stack_alignment_estimated;
1184 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1186 /* If the variable alignment is very large we'll dynamicaly allocate
1187 it, which means that in-frame portion is just a pointer. */
1188 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1189 align = POINTER_SIZE;
1192 if (SUPPORTS_STACK_ALIGNMENT
1193 && crtl->stack_alignment_estimated < align)
1195 /* stack_alignment_estimated shouldn't change after stack
1196 realign decision made */
1197 gcc_assert(!crtl->stack_realign_processed);
1198 crtl->stack_alignment_estimated = align;
1201 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1202 So here we only make sure stack_alignment_needed >= align. */
1203 if (crtl->stack_alignment_needed < align)
1204 crtl->stack_alignment_needed = align;
1205 if (crtl->max_used_stack_slot_alignment < align)
1206 crtl->max_used_stack_slot_alignment = align;
1208 if (TREE_CODE (origvar) == SSA_NAME)
1210 gcc_assert (TREE_CODE (var) != VAR_DECL
1211 || (!DECL_EXTERNAL (var)
1212 && !DECL_HAS_VALUE_EXPR_P (var)
1213 && !TREE_STATIC (var)
1214 && TREE_TYPE (var) != error_mark_node
1215 && !DECL_HARD_REGISTER (var)
1218 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1220 else if (DECL_EXTERNAL (var))
1222 else if (DECL_HAS_VALUE_EXPR_P (var))
1224 else if (TREE_STATIC (var))
1226 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1228 else if (TREE_TYPE (var) == error_mark_node)
1231 expand_one_error_var (var);
1233 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1236 expand_one_hard_reg_var (var);
1238 else if (use_register_for_decl (var))
1241 expand_one_register_var (origvar);
1243 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1247 error ("size of variable %q+D is too large", var);
1248 expand_one_error_var (var);
1251 else if (defer_stack_allocation (var, toplevel))
1252 add_stack_var (origvar);
1256 expand_one_stack_var (origvar);
1257 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1262 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1263 expanding variables. Those variables that can be put into registers
1264 are allocated pseudos; those that can't are put on the stack.
1266 TOPLEVEL is true if this is the outermost BLOCK. */
1269 expand_used_vars_for_block (tree block, bool toplevel)
1273 /* Expand all variables at this level. */
1274 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1276 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1277 || !DECL_NONSHAREABLE (t)))
1278 expand_one_var (t, toplevel, true);
1280 /* Expand all variables at containing levels. */
1281 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1282 expand_used_vars_for_block (t, false);
1285 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1286 and clear TREE_USED on all local variables. */
1289 clear_tree_used (tree block)
1293 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1294 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1295 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1296 || !DECL_NONSHAREABLE (t))
1299 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1300 clear_tree_used (t);
1303 /* Examine TYPE and determine a bit mask of the following features. */
1305 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1306 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1307 #define SPCT_HAS_ARRAY 4
1308 #define SPCT_HAS_AGGREGATE 8
1311 stack_protect_classify_type (tree type)
1313 unsigned int ret = 0;
1316 switch (TREE_CODE (type))
1319 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1320 if (t == char_type_node
1321 || t == signed_char_type_node
1322 || t == unsigned_char_type_node)
1324 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1325 unsigned HOST_WIDE_INT len;
1327 if (!TYPE_SIZE_UNIT (type)
1328 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1331 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1334 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1336 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1339 ret = SPCT_HAS_ARRAY;
1343 case QUAL_UNION_TYPE:
1345 ret = SPCT_HAS_AGGREGATE;
1346 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1347 if (TREE_CODE (t) == FIELD_DECL)
1348 ret |= stack_protect_classify_type (TREE_TYPE (t));
1358 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1359 part of the local stack frame. Remember if we ever return nonzero for
1360 any variable in this function. The return value is the phase number in
1361 which the variable should be allocated. */
1364 stack_protect_decl_phase (tree decl)
1366 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1369 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1370 has_short_buffer = true;
1372 if (flag_stack_protect == 2)
1374 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1375 && !(bits & SPCT_HAS_AGGREGATE))
1377 else if (bits & SPCT_HAS_ARRAY)
1381 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1384 has_protected_decls = true;
1389 /* Two helper routines that check for phase 1 and phase 2. These are used
1390 as callbacks for expand_stack_vars. */
1393 stack_protect_decl_phase_1 (tree decl)
1395 return stack_protect_decl_phase (decl) == 1;
1399 stack_protect_decl_phase_2 (tree decl)
1401 return stack_protect_decl_phase (decl) == 2;
1404 /* Ensure that variables in different stack protection phases conflict
1405 so that they are not merged and share the same stack slot. */
1408 add_stack_protection_conflicts (void)
1410 size_t i, j, n = stack_vars_num;
1411 unsigned char *phase;
1413 phase = XNEWVEC (unsigned char, n);
1414 for (i = 0; i < n; ++i)
1415 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1417 for (i = 0; i < n; ++i)
1419 unsigned char ph_i = phase[i];
1420 for (j = 0; j < i; ++j)
1421 if (ph_i != phase[j])
1422 add_stack_var_conflict (i, j);
1428 /* Create a decl for the guard at the top of the stack frame. */
1431 create_stack_guard (void)
1433 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1434 VAR_DECL, NULL, ptr_type_node);
1435 TREE_THIS_VOLATILE (guard) = 1;
1436 TREE_USED (guard) = 1;
1437 expand_one_stack_var (guard);
1438 crtl->stack_protect_guard = guard;
1441 /* Prepare for expanding variables. */
1443 init_vars_expansion (void)
1447 /* Set TREE_USED on all variables in the local_decls. */
1448 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1451 /* Clear TREE_USED on all variables associated with a block scope. */
1452 clear_tree_used (DECL_INITIAL (current_function_decl));
1454 /* Initialize local stack smashing state. */
1455 has_protected_decls = false;
1456 has_short_buffer = false;
1459 /* Free up stack variable graph data. */
1461 fini_vars_expansion (void)
1463 size_t i, n = stack_vars_num;
1464 for (i = 0; i < n; i++)
1465 BITMAP_FREE (stack_vars[i].conflicts);
1466 XDELETEVEC (stack_vars);
1467 XDELETEVEC (stack_vars_sorted);
1469 stack_vars_alloc = stack_vars_num = 0;
1470 pointer_map_destroy (decl_to_stack_part);
1471 decl_to_stack_part = NULL;
1474 /* Make a fair guess for the size of the stack frame of the function
1475 in NODE. This doesn't have to be exact, the result is only used in
1476 the inline heuristics. So we don't want to run the full stack var
1477 packing algorithm (which is quadratic in the number of stack vars).
1478 Instead, we calculate the total size of all stack vars. This turns
1479 out to be a pretty fair estimate -- packing of stack vars doesn't
1480 happen very often. */
1483 estimated_stack_frame_size (struct cgraph_node *node)
1485 HOST_WIDE_INT size = 0;
1488 tree old_cur_fun_decl = current_function_decl;
1489 referenced_var_iterator rvi;
1490 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1492 current_function_decl = node->decl;
1495 gcc_checking_assert (gimple_referenced_vars (fn));
1496 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1497 size += expand_one_var (var, true, false);
1499 if (stack_vars_num > 0)
1501 /* Fake sorting the stack vars for account_stack_vars (). */
1502 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1503 for (i = 0; i < stack_vars_num; ++i)
1504 stack_vars_sorted[i] = i;
1505 size += account_stack_vars ();
1506 fini_vars_expansion ();
1509 current_function_decl = old_cur_fun_decl;
1513 /* Expand all variables used in the function. */
1516 expand_used_vars (void)
1518 tree var, outer_block = DECL_INITIAL (current_function_decl);
1519 VEC(tree,heap) *maybe_local_decls = NULL;
1523 /* Compute the phase of the stack frame for this function. */
1525 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1526 int off = STARTING_FRAME_OFFSET % align;
1527 frame_phase = off ? align - off : 0;
1530 init_vars_expansion ();
1532 for (i = 0; i < SA.map->num_partitions; i++)
1534 tree var = partition_to_var (SA.map, i);
1536 gcc_assert (is_gimple_reg (var));
1537 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1538 expand_one_var (var, true, true);
1541 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1542 contain the default def (representing the parm or result itself)
1543 we don't do anything here. But those which don't contain the
1544 default def (representing a temporary based on the parm/result)
1545 we need to allocate space just like for normal VAR_DECLs. */
1546 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1548 expand_one_var (var, true, true);
1549 gcc_assert (SA.partition_to_pseudo[i]);
1554 /* At this point all variables on the local_decls with TREE_USED
1555 set are not associated with any block scope. Lay them out. */
1557 len = VEC_length (tree, cfun->local_decls);
1558 FOR_EACH_LOCAL_DECL (cfun, i, var)
1560 bool expand_now = false;
1562 /* Expanded above already. */
1563 if (is_gimple_reg (var))
1565 TREE_USED (var) = 0;
1568 /* We didn't set a block for static or extern because it's hard
1569 to tell the difference between a global variable (re)declared
1570 in a local scope, and one that's really declared there to
1571 begin with. And it doesn't really matter much, since we're
1572 not giving them stack space. Expand them now. */
1573 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1576 /* If the variable is not associated with any block, then it
1577 was created by the optimizers, and could be live anywhere
1579 else if (TREE_USED (var))
1582 /* Finally, mark all variables on the list as used. We'll use
1583 this in a moment when we expand those associated with scopes. */
1584 TREE_USED (var) = 1;
1587 expand_one_var (var, true, true);
1590 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1592 rtx rtl = DECL_RTL_IF_SET (var);
1594 /* Keep artificial non-ignored vars in cfun->local_decls
1595 chain until instantiate_decls. */
1596 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1597 add_local_decl (cfun, var);
1598 else if (rtl == NULL_RTX)
1599 /* If rtl isn't set yet, which can happen e.g. with
1600 -fstack-protector, retry before returning from this
1602 VEC_safe_push (tree, heap, maybe_local_decls, var);
1606 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1608 +-----------------+-----------------+
1609 | ...processed... | ...duplicates...|
1610 +-----------------+-----------------+
1612 +-- LEN points here.
1614 We just want the duplicates, as those are the artificial
1615 non-ignored vars that we want to keep until instantiate_decls.
1616 Move them down and truncate the array. */
1617 if (!VEC_empty (tree, cfun->local_decls))
1618 VEC_block_remove (tree, cfun->local_decls, 0, len);
1620 /* At this point, all variables within the block tree with TREE_USED
1621 set are actually used by the optimized function. Lay them out. */
1622 expand_used_vars_for_block (outer_block, true);
1624 if (stack_vars_num > 0)
1626 add_scope_conflicts ();
1627 /* Due to the way alias sets work, no variables with non-conflicting
1628 alias sets may be assigned the same address. Add conflicts to
1630 add_alias_set_conflicts ();
1632 /* If stack protection is enabled, we don't share space between
1633 vulnerable data and non-vulnerable data. */
1634 if (flag_stack_protect)
1635 add_stack_protection_conflicts ();
1637 /* Now that we have collected all stack variables, and have computed a
1638 minimal interference graph, attempt to save some stack space. */
1639 partition_stack_vars ();
1641 dump_stack_var_partition ();
1644 /* There are several conditions under which we should create a
1645 stack guard: protect-all, alloca used, protected decls present. */
1646 if (flag_stack_protect == 2
1647 || (flag_stack_protect
1648 && (cfun->calls_alloca || has_protected_decls)))
1649 create_stack_guard ();
1651 /* Assign rtl to each variable based on these partitions. */
1652 if (stack_vars_num > 0)
1654 /* Reorder decls to be protected by iterating over the variables
1655 array multiple times, and allocating out of each phase in turn. */
1656 /* ??? We could probably integrate this into the qsort we did
1657 earlier, such that we naturally see these variables first,
1658 and thus naturally allocate things in the right order. */
1659 if (has_protected_decls)
1661 /* Phase 1 contains only character arrays. */
1662 expand_stack_vars (stack_protect_decl_phase_1);
1664 /* Phase 2 contains other kinds of arrays. */
1665 if (flag_stack_protect == 2)
1666 expand_stack_vars (stack_protect_decl_phase_2);
1669 expand_stack_vars (NULL);
1671 fini_vars_expansion ();
1674 /* If there were any artificial non-ignored vars without rtl
1675 found earlier, see if deferred stack allocation hasn't assigned
1677 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1679 rtx rtl = DECL_RTL_IF_SET (var);
1681 /* Keep artificial non-ignored vars in cfun->local_decls
1682 chain until instantiate_decls. */
1683 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1684 add_local_decl (cfun, var);
1686 VEC_free (tree, heap, maybe_local_decls);
1688 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1689 if (STACK_ALIGNMENT_NEEDED)
1691 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1692 if (!FRAME_GROWS_DOWNWARD)
1693 frame_offset += align - 1;
1694 frame_offset &= -align;
1699 /* If we need to produce a detailed dump, print the tree representation
1700 for STMT to the dump file. SINCE is the last RTX after which the RTL
1701 generated for STMT should have been appended. */
1704 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1706 if (dump_file && (dump_flags & TDF_DETAILS))
1708 fprintf (dump_file, "\n;; ");
1709 print_gimple_stmt (dump_file, stmt, 0,
1710 TDF_SLIM | (dump_flags & TDF_LINENO));
1711 fprintf (dump_file, "\n");
1713 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1717 /* Maps the blocks that do not contain tree labels to rtx labels. */
1719 static struct pointer_map_t *lab_rtx_for_bb;
1721 /* Returns the label_rtx expression for a label starting basic block BB. */
1724 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1726 gimple_stmt_iterator gsi;
1731 if (bb->flags & BB_RTL)
1732 return block_label (bb);
1734 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1738 /* Find the tree label if it is present. */
1740 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1742 lab_stmt = gsi_stmt (gsi);
1743 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1746 lab = gimple_label_label (lab_stmt);
1747 if (DECL_NONLOCAL (lab))
1750 return label_rtx (lab);
1753 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1754 *elt = gen_label_rtx ();
1759 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1760 of a basic block where we just expanded the conditional at the end,
1761 possibly clean up the CFG and instruction sequence. LAST is the
1762 last instruction before the just emitted jump sequence. */
1765 maybe_cleanup_end_of_block (edge e, rtx last)
1767 /* Special case: when jumpif decides that the condition is
1768 trivial it emits an unconditional jump (and the necessary
1769 barrier). But we still have two edges, the fallthru one is
1770 wrong. purge_dead_edges would clean this up later. Unfortunately
1771 we have to insert insns (and split edges) before
1772 find_many_sub_basic_blocks and hence before purge_dead_edges.
1773 But splitting edges might create new blocks which depend on the
1774 fact that if there are two edges there's no barrier. So the
1775 barrier would get lost and verify_flow_info would ICE. Instead
1776 of auditing all edge splitters to care for the barrier (which
1777 normally isn't there in a cleaned CFG), fix it here. */
1778 if (BARRIER_P (get_last_insn ()))
1782 /* Now, we have a single successor block, if we have insns to
1783 insert on the remaining edge we potentially will insert
1784 it at the end of this block (if the dest block isn't feasible)
1785 in order to avoid splitting the edge. This insertion will take
1786 place in front of the last jump. But we might have emitted
1787 multiple jumps (conditional and one unconditional) to the
1788 same destination. Inserting in front of the last one then
1789 is a problem. See PR 40021. We fix this by deleting all
1790 jumps except the last unconditional one. */
1791 insn = PREV_INSN (get_last_insn ());
1792 /* Make sure we have an unconditional jump. Otherwise we're
1794 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1795 for (insn = PREV_INSN (insn); insn != last;)
1797 insn = PREV_INSN (insn);
1798 if (JUMP_P (NEXT_INSN (insn)))
1800 if (!any_condjump_p (NEXT_INSN (insn)))
1802 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1803 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1805 delete_insn (NEXT_INSN (insn));
1811 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1812 Returns a new basic block if we've terminated the current basic
1813 block and created a new one. */
1816 expand_gimple_cond (basic_block bb, gimple stmt)
1818 basic_block new_bb, dest;
1823 enum tree_code code;
1826 code = gimple_cond_code (stmt);
1827 op0 = gimple_cond_lhs (stmt);
1828 op1 = gimple_cond_rhs (stmt);
1829 /* We're sometimes presented with such code:
1833 This would expand to two comparisons which then later might
1834 be cleaned up by combine. But some pattern matchers like if-conversion
1835 work better when there's only one compare, so make up for this
1836 here as special exception if TER would have made the same change. */
1837 if (gimple_cond_single_var_p (stmt)
1839 && TREE_CODE (op0) == SSA_NAME
1840 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1842 gimple second = SSA_NAME_DEF_STMT (op0);
1843 if (gimple_code (second) == GIMPLE_ASSIGN)
1845 enum tree_code code2 = gimple_assign_rhs_code (second);
1846 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1849 op0 = gimple_assign_rhs1 (second);
1850 op1 = gimple_assign_rhs2 (second);
1852 /* If jumps are cheap turn some more codes into
1854 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1856 if ((code2 == BIT_AND_EXPR
1857 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1858 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1859 || code2 == TRUTH_AND_EXPR)
1861 code = TRUTH_ANDIF_EXPR;
1862 op0 = gimple_assign_rhs1 (second);
1863 op1 = gimple_assign_rhs2 (second);
1865 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1867 code = TRUTH_ORIF_EXPR;
1868 op0 = gimple_assign_rhs1 (second);
1869 op1 = gimple_assign_rhs2 (second);
1875 last2 = last = get_last_insn ();
1877 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1878 set_curr_insn_source_location (gimple_location (stmt));
1879 set_curr_insn_block (gimple_block (stmt));
1881 /* These flags have no purpose in RTL land. */
1882 true_edge->flags &= ~EDGE_TRUE_VALUE;
1883 false_edge->flags &= ~EDGE_FALSE_VALUE;
1885 /* We can either have a pure conditional jump with one fallthru edge or
1886 two-way jump that needs to be decomposed into two basic blocks. */
1887 if (false_edge->dest == bb->next_bb)
1889 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1890 true_edge->probability);
1891 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1892 if (true_edge->goto_locus)
1894 set_curr_insn_source_location (true_edge->goto_locus);
1895 set_curr_insn_block (true_edge->goto_block);
1896 true_edge->goto_locus = curr_insn_locator ();
1898 true_edge->goto_block = NULL;
1899 false_edge->flags |= EDGE_FALLTHRU;
1900 maybe_cleanup_end_of_block (false_edge, last);
1903 if (true_edge->dest == bb->next_bb)
1905 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1906 false_edge->probability);
1907 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1908 if (false_edge->goto_locus)
1910 set_curr_insn_source_location (false_edge->goto_locus);
1911 set_curr_insn_block (false_edge->goto_block);
1912 false_edge->goto_locus = curr_insn_locator ();
1914 false_edge->goto_block = NULL;
1915 true_edge->flags |= EDGE_FALLTHRU;
1916 maybe_cleanup_end_of_block (true_edge, last);
1920 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1921 true_edge->probability);
1922 last = get_last_insn ();
1923 if (false_edge->goto_locus)
1925 set_curr_insn_source_location (false_edge->goto_locus);
1926 set_curr_insn_block (false_edge->goto_block);
1927 false_edge->goto_locus = curr_insn_locator ();
1929 false_edge->goto_block = NULL;
1930 emit_jump (label_rtx_for_bb (false_edge->dest));
1933 if (BARRIER_P (BB_END (bb)))
1934 BB_END (bb) = PREV_INSN (BB_END (bb));
1935 update_bb_for_insn (bb);
1937 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1938 dest = false_edge->dest;
1939 redirect_edge_succ (false_edge, new_bb);
1940 false_edge->flags |= EDGE_FALLTHRU;
1941 new_bb->count = false_edge->count;
1942 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1943 new_edge = make_edge (new_bb, dest, 0);
1944 new_edge->probability = REG_BR_PROB_BASE;
1945 new_edge->count = new_bb->count;
1946 if (BARRIER_P (BB_END (new_bb)))
1947 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1948 update_bb_for_insn (new_bb);
1950 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1952 if (true_edge->goto_locus)
1954 set_curr_insn_source_location (true_edge->goto_locus);
1955 set_curr_insn_block (true_edge->goto_block);
1956 true_edge->goto_locus = curr_insn_locator ();
1958 true_edge->goto_block = NULL;
1963 /* Mark all calls that can have a transaction restart. */
1966 mark_transaction_restart_calls (gimple stmt)
1968 struct tm_restart_node dummy;
1971 if (!cfun->gimple_df->tm_restart)
1975 slot = htab_find_slot (cfun->gimple_df->tm_restart, &dummy, NO_INSERT);
1978 struct tm_restart_node *n = (struct tm_restart_node *) *slot;
1979 tree list = n->label_or_list;
1982 for (insn = next_real_insn (get_last_insn ());
1984 insn = next_real_insn (insn))
1987 if (TREE_CODE (list) == LABEL_DECL)
1988 add_reg_note (insn, REG_TM, label_rtx (list));
1990 for (; list ; list = TREE_CHAIN (list))
1991 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
1995 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1999 expand_call_stmt (gimple stmt)
2001 tree exp, decl, lhs;
2005 if (gimple_call_internal_p (stmt))
2007 expand_internal_call (stmt);
2011 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2013 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2014 decl = gimple_call_fndecl (stmt);
2015 builtin_p = decl && DECL_BUILT_IN (decl);
2017 /* If this is not a builtin function, the function type through which the
2018 call is made may be different from the type of the function. */
2021 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2022 CALL_EXPR_FN (exp));
2024 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2025 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2027 for (i = 0; i < gimple_call_num_args (stmt); i++)
2029 tree arg = gimple_call_arg (stmt, i);
2031 /* TER addresses into arguments of builtin functions so we have a
2032 chance to infer more correct alignment information. See PR39954. */
2034 && TREE_CODE (arg) == SSA_NAME
2035 && (def = get_gimple_for_ssa_name (arg))
2036 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2037 arg = gimple_assign_rhs1 (def);
2038 CALL_EXPR_ARG (exp, i) = arg;
2041 if (gimple_has_side_effects (stmt))
2042 TREE_SIDE_EFFECTS (exp) = 1;
2044 if (gimple_call_nothrow_p (stmt))
2045 TREE_NOTHROW (exp) = 1;
2047 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2048 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2050 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2051 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2052 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2053 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2055 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2056 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2057 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2058 TREE_BLOCK (exp) = gimple_block (stmt);
2060 /* Ensure RTL is created for debug args. */
2061 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2063 VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
2068 for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
2070 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2071 expand_debug_expr (dtemp);
2075 lhs = gimple_call_lhs (stmt);
2077 expand_assignment (lhs, exp, false);
2079 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
2081 mark_transaction_restart_calls (stmt);
2084 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
2085 STMT that doesn't require special handling for outgoing edges. That
2086 is no tailcalls and no GIMPLE_COND. */
2089 expand_gimple_stmt_1 (gimple stmt)
2093 set_curr_insn_source_location (gimple_location (stmt));
2094 set_curr_insn_block (gimple_block (stmt));
2096 switch (gimple_code (stmt))
2099 op0 = gimple_goto_dest (stmt);
2100 if (TREE_CODE (op0) == LABEL_DECL)
2103 expand_computed_goto (op0);
2106 expand_label (gimple_label_label (stmt));
2109 case GIMPLE_PREDICT:
2115 expand_asm_stmt (stmt);
2118 expand_call_stmt (stmt);
2122 op0 = gimple_return_retval (stmt);
2124 if (op0 && op0 != error_mark_node)
2126 tree result = DECL_RESULT (current_function_decl);
2128 /* If we are not returning the current function's RESULT_DECL,
2129 build an assignment to it. */
2132 /* I believe that a function's RESULT_DECL is unique. */
2133 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
2135 /* ??? We'd like to use simply expand_assignment here,
2136 but this fails if the value is of BLKmode but the return
2137 decl is a register. expand_return has special handling
2138 for this combination, which eventually should move
2139 to common code. See comments there. Until then, let's
2140 build a modify expression :-/ */
2141 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
2146 expand_null_return ();
2148 expand_return (op0);
2153 tree lhs = gimple_assign_lhs (stmt);
2155 /* Tree expand used to fiddle with |= and &= of two bitfield
2156 COMPONENT_REFs here. This can't happen with gimple, the LHS
2157 of binary assigns must be a gimple reg. */
2159 if (TREE_CODE (lhs) != SSA_NAME
2160 || get_gimple_rhs_class (gimple_expr_code (stmt))
2161 == GIMPLE_SINGLE_RHS)
2163 tree rhs = gimple_assign_rhs1 (stmt);
2164 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
2165 == GIMPLE_SINGLE_RHS);
2166 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
2167 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
2168 if (TREE_CLOBBER_P (rhs))
2169 /* This is a clobber to mark the going out of scope for
2173 expand_assignment (lhs, rhs,
2174 gimple_assign_nontemporal_move_p (stmt));
2179 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
2180 struct separate_ops ops;
2181 bool promoted = false;
2183 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
2184 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2187 ops.code = gimple_assign_rhs_code (stmt);
2188 ops.type = TREE_TYPE (lhs);
2189 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2191 case GIMPLE_TERNARY_RHS:
2192 ops.op2 = gimple_assign_rhs3 (stmt);
2194 case GIMPLE_BINARY_RHS:
2195 ops.op1 = gimple_assign_rhs2 (stmt);
2197 case GIMPLE_UNARY_RHS:
2198 ops.op0 = gimple_assign_rhs1 (stmt);
2203 ops.location = gimple_location (stmt);
2205 /* If we want to use a nontemporal store, force the value to
2206 register first. If we store into a promoted register,
2207 don't directly expand to target. */
2208 temp = nontemporal || promoted ? NULL_RTX : target;
2209 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2216 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2217 /* If TEMP is a VOIDmode constant, use convert_modes to make
2218 sure that we properly convert it. */
2219 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2221 temp = convert_modes (GET_MODE (target),
2222 TYPE_MODE (ops.type),
2224 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2225 GET_MODE (target), temp, unsignedp);
2228 convert_move (SUBREG_REG (target), temp, unsignedp);
2230 else if (nontemporal && emit_storent_insn (target, temp))
2234 temp = force_operand (temp, target);
2236 emit_move_insn (target, temp);
2247 /* Expand one gimple statement STMT and return the last RTL instruction
2248 before any of the newly generated ones.
2250 In addition to generating the necessary RTL instructions this also
2251 sets REG_EH_REGION notes if necessary and sets the current source
2252 location for diagnostics. */
2255 expand_gimple_stmt (gimple stmt)
2257 location_t saved_location = input_location;
2258 rtx last = get_last_insn ();
2263 /* We need to save and restore the current source location so that errors
2264 discovered during expansion are emitted with the right location. But
2265 it would be better if the diagnostic routines used the source location
2266 embedded in the tree nodes rather than globals. */
2267 if (gimple_has_location (stmt))
2268 input_location = gimple_location (stmt);
2270 expand_gimple_stmt_1 (stmt);
2272 /* Free any temporaries used to evaluate this statement. */
2275 input_location = saved_location;
2277 /* Mark all insns that may trap. */
2278 lp_nr = lookup_stmt_eh_lp (stmt);
2282 for (insn = next_real_insn (last); insn;
2283 insn = next_real_insn (insn))
2285 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2286 /* If we want exceptions for non-call insns, any
2287 may_trap_p instruction may throw. */
2288 && GET_CODE (PATTERN (insn)) != CLOBBER
2289 && GET_CODE (PATTERN (insn)) != USE
2290 && insn_could_throw_p (insn))
2291 make_reg_eh_region_note (insn, 0, lp_nr);
2298 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2299 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2300 generated a tail call (something that might be denied by the ABI
2301 rules governing the call; see calls.c).
2303 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2304 can still reach the rest of BB. The case here is __builtin_sqrt,
2305 where the NaN result goes through the external function (with a
2306 tailcall) and the normal result happens via a sqrt instruction. */
2309 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2317 last2 = last = expand_gimple_stmt (stmt);
2319 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2320 if (CALL_P (last) && SIBLING_CALL_P (last))
2323 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2325 *can_fallthru = true;
2329 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2330 Any instructions emitted here are about to be deleted. */
2331 do_pending_stack_adjust ();
2333 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2334 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2335 EH or abnormal edges, we shouldn't have created a tail call in
2336 the first place. So it seems to me we should just be removing
2337 all edges here, or redirecting the existing fallthru edge to
2343 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2345 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2347 if (e->dest != EXIT_BLOCK_PTR)
2349 e->dest->count -= e->count;
2350 e->dest->frequency -= EDGE_FREQUENCY (e);
2351 if (e->dest->count < 0)
2353 if (e->dest->frequency < 0)
2354 e->dest->frequency = 0;
2357 probability += e->probability;
2364 /* This is somewhat ugly: the call_expr expander often emits instructions
2365 after the sibcall (to perform the function return). These confuse the
2366 find_many_sub_basic_blocks code, so we need to get rid of these. */
2367 last = NEXT_INSN (last);
2368 gcc_assert (BARRIER_P (last));
2370 *can_fallthru = false;
2371 while (NEXT_INSN (last))
2373 /* For instance an sqrt builtin expander expands if with
2374 sibcall in the then and label for `else`. */
2375 if (LABEL_P (NEXT_INSN (last)))
2377 *can_fallthru = true;
2380 delete_insn (NEXT_INSN (last));
2383 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2384 e->probability += probability;
2387 update_bb_for_insn (bb);
2389 if (NEXT_INSN (last))
2391 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2394 if (BARRIER_P (last))
2395 BB_END (bb) = PREV_INSN (last);
2398 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2403 /* Return the difference between the floor and the truncated result of
2404 a signed division by OP1 with remainder MOD. */
2406 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2408 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2409 return gen_rtx_IF_THEN_ELSE
2410 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2411 gen_rtx_IF_THEN_ELSE
2412 (mode, gen_rtx_LT (BImode,
2413 gen_rtx_DIV (mode, op1, mod),
2415 constm1_rtx, const0_rtx),
2419 /* Return the difference between the ceil and the truncated result of
2420 a signed division by OP1 with remainder MOD. */
2422 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2424 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2425 return gen_rtx_IF_THEN_ELSE
2426 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2427 gen_rtx_IF_THEN_ELSE
2428 (mode, gen_rtx_GT (BImode,
2429 gen_rtx_DIV (mode, op1, mod),
2431 const1_rtx, const0_rtx),
2435 /* Return the difference between the ceil and the truncated result of
2436 an unsigned division by OP1 with remainder MOD. */
2438 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2440 /* (mod != 0 ? 1 : 0) */
2441 return gen_rtx_IF_THEN_ELSE
2442 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2443 const1_rtx, const0_rtx);
2446 /* Return the difference between the rounded and the truncated result
2447 of a signed division by OP1 with remainder MOD. Halfway cases are
2448 rounded away from zero, rather than to the nearest even number. */
2450 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2452 /* (abs (mod) >= abs (op1) - abs (mod)
2453 ? (op1 / mod > 0 ? 1 : -1)
2455 return gen_rtx_IF_THEN_ELSE
2456 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2457 gen_rtx_MINUS (mode,
2458 gen_rtx_ABS (mode, op1),
2459 gen_rtx_ABS (mode, mod))),
2460 gen_rtx_IF_THEN_ELSE
2461 (mode, gen_rtx_GT (BImode,
2462 gen_rtx_DIV (mode, op1, mod),
2464 const1_rtx, constm1_rtx),
2468 /* Return the difference between the rounded and the truncated result
2469 of a unsigned division by OP1 with remainder MOD. Halfway cases
2470 are rounded away from zero, rather than to the nearest even
2473 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2475 /* (mod >= op1 - mod ? 1 : 0) */
2476 return gen_rtx_IF_THEN_ELSE
2477 (mode, gen_rtx_GE (BImode, mod,
2478 gen_rtx_MINUS (mode, op1, mod)),
2479 const1_rtx, const0_rtx);
2482 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2486 convert_debug_memory_address (enum machine_mode mode, rtx x,
2489 enum machine_mode xmode = GET_MODE (x);
2491 #ifndef POINTERS_EXTEND_UNSIGNED
2492 gcc_assert (mode == Pmode
2493 || mode == targetm.addr_space.address_mode (as));
2494 gcc_assert (xmode == mode || xmode == VOIDmode);
2498 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
2500 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2503 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
2504 x = simplify_gen_subreg (mode, x, xmode,
2505 subreg_lowpart_offset
2507 else if (POINTERS_EXTEND_UNSIGNED > 0)
2508 x = gen_rtx_ZERO_EXTEND (mode, x);
2509 else if (!POINTERS_EXTEND_UNSIGNED)
2510 x = gen_rtx_SIGN_EXTEND (mode, x);
2513 switch (GET_CODE (x))
2516 if ((SUBREG_PROMOTED_VAR_P (x)
2517 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2518 || (GET_CODE (SUBREG_REG (x)) == PLUS
2519 && REG_P (XEXP (SUBREG_REG (x), 0))
2520 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2521 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2522 && GET_MODE (SUBREG_REG (x)) == mode)
2523 return SUBREG_REG (x);
2526 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2527 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2530 temp = shallow_copy_rtx (x);
2531 PUT_MODE (temp, mode);
2534 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2536 temp = gen_rtx_CONST (mode, temp);
2540 if (CONST_INT_P (XEXP (x, 1)))
2542 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2544 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2550 /* Don't know how to express ptr_extend as operation in debug info. */
2553 #endif /* POINTERS_EXTEND_UNSIGNED */
2558 /* Return an RTX equivalent to the value of the parameter DECL. */
2561 expand_debug_parm_decl (tree decl)
2563 rtx incoming = DECL_INCOMING_RTL (decl);
2566 && GET_MODE (incoming) != BLKmode
2567 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
2568 || (MEM_P (incoming)
2569 && REG_P (XEXP (incoming, 0))
2570 && HARD_REGISTER_P (XEXP (incoming, 0)))))
2572 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
2574 #ifdef HAVE_window_save
2575 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2576 If the target machine has an explicit window save instruction, the
2577 actual entry value is the corresponding OUTGOING_REGNO instead. */
2578 if (REG_P (incoming)
2579 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
2581 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
2582 OUTGOING_REGNO (REGNO (incoming)), 0);
2583 else if (MEM_P (incoming))
2585 rtx reg = XEXP (incoming, 0);
2586 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
2588 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
2589 incoming = replace_equiv_address_nv (incoming, reg);
2592 incoming = copy_rtx (incoming);
2596 ENTRY_VALUE_EXP (rtl) = incoming;
2601 && GET_MODE (incoming) != BLKmode
2602 && !TREE_ADDRESSABLE (decl)
2604 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
2605 || (GET_CODE (XEXP (incoming, 0)) == PLUS
2606 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
2607 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
2608 return copy_rtx (incoming);
2613 /* Return an RTX equivalent to the value of the tree expression EXP. */
2616 expand_debug_expr (tree exp)
2618 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2619 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2620 enum machine_mode inner_mode = VOIDmode;
2621 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2624 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2626 case tcc_expression:
2627 switch (TREE_CODE (exp))
2631 case WIDEN_MULT_PLUS_EXPR:
2632 case WIDEN_MULT_MINUS_EXPR:
2636 case TRUTH_ANDIF_EXPR:
2637 case TRUTH_ORIF_EXPR:
2638 case TRUTH_AND_EXPR:
2640 case TRUTH_XOR_EXPR:
2643 case TRUTH_NOT_EXPR:
2652 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2659 case tcc_comparison:
2660 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2667 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2668 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2678 case tcc_exceptional:
2679 case tcc_declaration:
2685 switch (TREE_CODE (exp))
2688 if (!lookup_constant_def (exp))
2690 if (strlen (TREE_STRING_POINTER (exp)) + 1
2691 != (size_t) TREE_STRING_LENGTH (exp))
2693 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2694 op0 = gen_rtx_MEM (BLKmode, op0);
2695 set_mem_attributes (op0, exp, 0);
2698 /* Fall through... */
2703 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2707 gcc_assert (COMPLEX_MODE_P (mode));
2708 op0 = expand_debug_expr (TREE_REALPART (exp));
2709 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2710 return gen_rtx_CONCAT (mode, op0, op1);
2712 case DEBUG_EXPR_DECL:
2713 op0 = DECL_RTL_IF_SET (exp);
2718 op0 = gen_rtx_DEBUG_EXPR (mode);
2719 DEBUG_EXPR_TREE_DECL (op0) = exp;
2720 SET_DECL_RTL (exp, op0);
2730 op0 = DECL_RTL_IF_SET (exp);
2732 /* This decl was probably optimized away. */
2735 if (TREE_CODE (exp) != VAR_DECL
2736 || DECL_EXTERNAL (exp)
2737 || !TREE_STATIC (exp)
2739 || DECL_HARD_REGISTER (exp)
2740 || DECL_IN_CONSTANT_POOL (exp)
2741 || mode == VOIDmode)
2744 op0 = make_decl_rtl_for_debug (exp);
2746 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2747 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2751 op0 = copy_rtx (op0);
2753 if (GET_MODE (op0) == BLKmode
2754 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2755 below would ICE. While it is likely a FE bug,
2756 try to be robust here. See PR43166. */
2758 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2760 gcc_assert (MEM_P (op0));
2761 op0 = adjust_address_nv (op0, mode, 0);
2772 inner_mode = GET_MODE (op0);
2774 if (mode == inner_mode)
2777 if (inner_mode == VOIDmode)
2779 if (TREE_CODE (exp) == SSA_NAME)
2780 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2782 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2783 if (mode == inner_mode)
2787 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2789 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2790 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2791 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2792 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2794 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2796 else if (FLOAT_MODE_P (mode))
2798 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2799 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2800 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2802 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2804 else if (FLOAT_MODE_P (inner_mode))
2807 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2809 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2811 else if (CONSTANT_P (op0)
2812 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
2813 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2814 subreg_lowpart_offset (mode,
2816 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2817 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2819 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
2821 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
2827 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2829 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
2830 TREE_OPERAND (exp, 0),
2831 TREE_OPERAND (exp, 1));
2833 return expand_debug_expr (newexp);
2837 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2841 if (TREE_CODE (exp) == MEM_REF)
2843 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2844 || (GET_CODE (op0) == PLUS
2845 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2846 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2847 Instead just use get_inner_reference. */
2850 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2851 if (!op1 || !CONST_INT_P (op1))
2854 op0 = plus_constant (op0, INTVAL (op1));
2857 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2858 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2860 as = ADDR_SPACE_GENERIC;
2862 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2864 if (op0 == NULL_RTX)
2867 op0 = gen_rtx_MEM (mode, op0);
2868 set_mem_attributes (op0, exp, 0);
2869 if (TREE_CODE (exp) == MEM_REF
2870 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2871 set_mem_expr (op0, NULL_TREE);
2872 set_mem_addr_space (op0, as);
2876 case TARGET_MEM_REF:
2877 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2878 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2881 op0 = expand_debug_expr
2882 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2886 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2887 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2889 as = ADDR_SPACE_GENERIC;
2891 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2893 if (op0 == NULL_RTX)
2896 op0 = gen_rtx_MEM (mode, op0);
2898 set_mem_attributes (op0, exp, 0);
2899 set_mem_addr_space (op0, as);
2905 case ARRAY_RANGE_REF:
2910 case VIEW_CONVERT_EXPR:
2912 enum machine_mode mode1;
2913 HOST_WIDE_INT bitsize, bitpos;
2916 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2917 &mode1, &unsignedp, &volatilep, false);
2923 orig_op0 = op0 = expand_debug_expr (tem);
2930 enum machine_mode addrmode, offmode;
2935 op0 = XEXP (op0, 0);
2936 addrmode = GET_MODE (op0);
2937 if (addrmode == VOIDmode)
2940 op1 = expand_debug_expr (offset);
2944 offmode = GET_MODE (op1);
2945 if (offmode == VOIDmode)
2946 offmode = TYPE_MODE (TREE_TYPE (offset));
2948 if (addrmode != offmode)
2949 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2950 subreg_lowpart_offset (addrmode,
2953 /* Don't use offset_address here, we don't need a
2954 recognizable address, and we don't want to generate
2956 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
2962 if (mode1 == VOIDmode)
2964 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2965 if (bitpos >= BITS_PER_UNIT)
2967 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2968 bitpos %= BITS_PER_UNIT;
2970 else if (bitpos < 0)
2973 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2974 op0 = adjust_address_nv (op0, mode1, units);
2975 bitpos += units * BITS_PER_UNIT;
2977 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2978 op0 = adjust_address_nv (op0, mode, 0);
2979 else if (GET_MODE (op0) != mode1)
2980 op0 = adjust_address_nv (op0, mode1, 0);
2982 op0 = copy_rtx (op0);
2983 if (op0 == orig_op0)
2984 op0 = shallow_copy_rtx (op0);
2985 set_mem_attributes (op0, exp, 0);
2988 if (bitpos == 0 && mode == GET_MODE (op0))
2994 if (GET_MODE (op0) == BLKmode)
2997 if ((bitpos % BITS_PER_UNIT) == 0
2998 && bitsize == GET_MODE_BITSIZE (mode1))
3000 enum machine_mode opmode = GET_MODE (op0);
3002 if (opmode == VOIDmode)
3003 opmode = TYPE_MODE (TREE_TYPE (tem));
3005 /* This condition may hold if we're expanding the address
3006 right past the end of an array that turned out not to
3007 be addressable (i.e., the address was only computed in
3008 debug stmts). The gen_subreg below would rightfully
3009 crash, and the address doesn't really exist, so just
3011 if (bitpos >= GET_MODE_BITSIZE (opmode))
3014 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
3015 return simplify_gen_subreg (mode, op0, opmode,
3016 bitpos / BITS_PER_UNIT);
3019 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
3020 && TYPE_UNSIGNED (TREE_TYPE (exp))
3022 : ZERO_EXTRACT, mode,
3023 GET_MODE (op0) != VOIDmode
3025 : TYPE_MODE (TREE_TYPE (tem)),
3026 op0, GEN_INT (bitsize), GEN_INT (bitpos));
3030 return simplify_gen_unary (ABS, mode, op0, mode);
3033 return simplify_gen_unary (NEG, mode, op0, mode);
3036 return simplify_gen_unary (NOT, mode, op0, mode);
3039 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3041 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
3044 case FIX_TRUNC_EXPR:
3045 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
3048 case POINTER_PLUS_EXPR:
3049 /* For the rare target where pointers are not the same size as
3050 size_t, we need to check for mis-matched modes and correct
3053 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
3054 && GET_MODE (op0) != GET_MODE (op1))
3056 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
3057 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
3060 /* We always sign-extend, regardless of the signedness of
3061 the operand, because the operand is always unsigned
3062 here even if the original C expression is signed. */
3063 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
3068 return simplify_gen_binary (PLUS, mode, op0, op1);
3071 return simplify_gen_binary (MINUS, mode, op0, op1);
3074 return simplify_gen_binary (MULT, mode, op0, op1);
3077 case TRUNC_DIV_EXPR:
3078 case EXACT_DIV_EXPR:
3080 return simplify_gen_binary (UDIV, mode, op0, op1);
3082 return simplify_gen_binary (DIV, mode, op0, op1);
3084 case TRUNC_MOD_EXPR:
3085 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
3087 case FLOOR_DIV_EXPR:
3089 return simplify_gen_binary (UDIV, mode, op0, op1);
3092 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3093 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3094 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3095 return simplify_gen_binary (PLUS, mode, div, adj);
3098 case FLOOR_MOD_EXPR:
3100 return simplify_gen_binary (UMOD, mode, op0, op1);
3103 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3104 rtx adj = floor_sdiv_adjust (mode, mod, op1);
3105 adj = simplify_gen_unary (NEG, mode,
3106 simplify_gen_binary (MULT, mode, adj, op1),
3108 return simplify_gen_binary (PLUS, mode, mod, adj);
3114 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3115 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3116 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3117 return simplify_gen_binary (PLUS, mode, div, adj);
3121 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3122 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3123 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3124 return simplify_gen_binary (PLUS, mode, div, adj);
3130 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3131 rtx adj = ceil_udiv_adjust (mode, mod, op1);
3132 adj = simplify_gen_unary (NEG, mode,
3133 simplify_gen_binary (MULT, mode, adj, op1),
3135 return simplify_gen_binary (PLUS, mode, mod, adj);
3139 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3140 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
3141 adj = simplify_gen_unary (NEG, mode,
3142 simplify_gen_binary (MULT, mode, adj, op1),
3144 return simplify_gen_binary (PLUS, mode, mod, adj);
3147 case ROUND_DIV_EXPR:
3150 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
3151 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3152 rtx adj = round_udiv_adjust (mode, mod, op1);
3153 return simplify_gen_binary (PLUS, mode, div, adj);
3157 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
3158 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3159 rtx adj = round_sdiv_adjust (mode, mod, op1);
3160 return simplify_gen_binary (PLUS, mode, div, adj);
3163 case ROUND_MOD_EXPR:
3166 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
3167 rtx adj = round_udiv_adjust (mode, mod, op1);
3168 adj = simplify_gen_unary (NEG, mode,
3169 simplify_gen_binary (MULT, mode, adj, op1),
3171 return simplify_gen_binary (PLUS, mode, mod, adj);
3175 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
3176 rtx adj = round_sdiv_adjust (mode, mod, op1);
3177 adj = simplify_gen_unary (NEG, mode,
3178 simplify_gen_binary (MULT, mode, adj, op1),
3180 return simplify_gen_binary (PLUS, mode, mod, adj);
3184 return simplify_gen_binary (ASHIFT, mode, op0, op1);
3188 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
3190 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
3193 return simplify_gen_binary (ROTATE, mode, op0, op1);
3196 return simplify_gen_binary (ROTATERT, mode, op0, op1);
3199 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
3202 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
3205 case TRUTH_AND_EXPR:
3206 return simplify_gen_binary (AND, mode, op0, op1);
3210 return simplify_gen_binary (IOR, mode, op0, op1);
3213 case TRUTH_XOR_EXPR:
3214 return simplify_gen_binary (XOR, mode, op0, op1);
3216 case TRUTH_ANDIF_EXPR:
3217 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
3219 case TRUTH_ORIF_EXPR:
3220 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
3222 case TRUTH_NOT_EXPR:
3223 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
3226 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
3230 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
3234 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
3238 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
3242 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
3245 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
3247 case UNORDERED_EXPR:
3248 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
3251 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
3254 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
3257 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
3260 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
3263 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
3266 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
3269 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
3272 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3275 gcc_assert (COMPLEX_MODE_P (mode));
3276 if (GET_MODE (op0) == VOIDmode)
3277 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3278 if (GET_MODE (op1) == VOIDmode)
3279 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3280 return gen_rtx_CONCAT (mode, op0, op1);
3283 if (GET_CODE (op0) == CONCAT)
3284 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3285 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
3287 GET_MODE_INNER (mode)));
3290 enum machine_mode imode = GET_MODE_INNER (mode);
3295 re = adjust_address_nv (op0, imode, 0);
3296 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3300 enum machine_mode ifmode = int_mode_for_mode (mode);
3301 enum machine_mode ihmode = int_mode_for_mode (imode);
3303 if (ifmode == BLKmode || ihmode == BLKmode)
3305 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3308 re = gen_rtx_SUBREG (ifmode, re, 0);
3309 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3310 if (imode != ihmode)
3311 re = gen_rtx_SUBREG (imode, re, 0);
3312 im = copy_rtx (op0);
3314 im = gen_rtx_SUBREG (ifmode, im, 0);
3315 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3316 if (imode != ihmode)
3317 im = gen_rtx_SUBREG (imode, im, 0);
3319 im = gen_rtx_NEG (imode, im);
3320 return gen_rtx_CONCAT (mode, re, im);
3324 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3325 if (!op0 || !MEM_P (op0))
3327 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3328 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3329 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3330 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
3331 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
3332 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3334 if (handled_component_p (TREE_OPERAND (exp, 0)))
3336 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3338 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3339 &bitoffset, &bitsize, &maxsize);
3340 if ((TREE_CODE (decl) == VAR_DECL
3341 || TREE_CODE (decl) == PARM_DECL
3342 || TREE_CODE (decl) == RESULT_DECL)
3343 && (!TREE_ADDRESSABLE (decl)
3344 || target_for_debug_bind (decl))
3345 && (bitoffset % BITS_PER_UNIT) == 0
3347 && bitsize == maxsize)
3348 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3349 bitoffset / BITS_PER_UNIT);
3355 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3356 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3361 exp = build_constructor_from_list (TREE_TYPE (exp),
3362 TREE_VECTOR_CST_ELTS (exp));
3366 if (TREE_CLOBBER_P (exp))
3368 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3373 op0 = gen_rtx_CONCATN
3374 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3376 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3378 op1 = expand_debug_expr (val);
3381 XVECEXP (op0, 0, i) = op1;
3384 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3386 op1 = expand_debug_expr
3387 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3392 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3393 XVECEXP (op0, 0, i) = op1;
3399 goto flag_unsupported;
3402 /* ??? Maybe handle some builtins? */
3407 gimple g = get_gimple_for_ssa_name (exp);
3410 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3416 int part = var_to_partition (SA.map, exp);
3418 if (part == NO_PARTITION)
3420 /* If this is a reference to an incoming value of parameter
3421 that is never used in the code or where the incoming
3422 value is never used in the code, use PARM_DECL's
3424 if (SSA_NAME_IS_DEFAULT_DEF (exp)
3425 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
3427 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
3430 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
3437 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3439 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3447 /* Vector stuff. For most of the codes we don't have rtl codes. */
3448 case REALIGN_LOAD_EXPR:
3449 case REDUC_MAX_EXPR:
3450 case REDUC_MIN_EXPR:
3451 case REDUC_PLUS_EXPR:
3453 case VEC_LSHIFT_EXPR:
3454 case VEC_PACK_FIX_TRUNC_EXPR:
3455 case VEC_PACK_SAT_EXPR:
3456 case VEC_PACK_TRUNC_EXPR:
3457 case VEC_RSHIFT_EXPR:
3458 case VEC_UNPACK_FLOAT_HI_EXPR:
3459 case VEC_UNPACK_FLOAT_LO_EXPR:
3460 case VEC_UNPACK_HI_EXPR:
3461 case VEC_UNPACK_LO_EXPR:
3462 case VEC_WIDEN_MULT_HI_EXPR:
3463 case VEC_WIDEN_MULT_LO_EXPR:
3464 case VEC_WIDEN_LSHIFT_HI_EXPR:
3465 case VEC_WIDEN_LSHIFT_LO_EXPR:
3470 case ADDR_SPACE_CONVERT_EXPR:
3471 case FIXED_CONVERT_EXPR:
3473 case WITH_SIZE_EXPR:
3477 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3478 && SCALAR_INT_MODE_P (mode))
3481 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3483 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3486 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3488 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
3490 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3491 return simplify_gen_binary (PLUS, mode, op0, op2);
3495 case WIDEN_MULT_EXPR:
3496 case WIDEN_MULT_PLUS_EXPR:
3497 case WIDEN_MULT_MINUS_EXPR:
3498 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3499 && SCALAR_INT_MODE_P (mode))
3501 inner_mode = GET_MODE (op0);
3502 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3503 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3505 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3506 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3507 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3509 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3510 op0 = simplify_gen_binary (MULT, mode, op0, op1);
3511 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3513 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3514 return simplify_gen_binary (PLUS, mode, op0, op2);
3516 return simplify_gen_binary (MINUS, mode, op2, op0);
3520 case WIDEN_SUM_EXPR:
3521 case WIDEN_LSHIFT_EXPR:
3522 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3523 && SCALAR_INT_MODE_P (mode))
3526 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
3528 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
3530 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
3531 ? ASHIFT : PLUS, mode, op0, op1);
3536 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
3540 #ifdef ENABLE_CHECKING
3549 /* Return an RTX equivalent to the source bind value of the tree expression
3553 expand_debug_source_expr (tree exp)
3556 enum machine_mode mode = VOIDmode, inner_mode;
3558 switch (TREE_CODE (exp))
3562 mode = DECL_MODE (exp);
3563 op0 = expand_debug_parm_decl (exp);
3566 /* See if this isn't an argument that has been completely
3568 if (!DECL_RTL_SET_P (exp)
3569 && !DECL_INCOMING_RTL (exp)
3570 && DECL_ABSTRACT_ORIGIN (current_function_decl))
3573 if (DECL_ABSTRACT_ORIGIN (exp))
3574 aexp = DECL_ABSTRACT_ORIGIN (exp);
3575 if (DECL_CONTEXT (aexp)
3576 == DECL_ABSTRACT_ORIGIN (current_function_decl))
3578 VEC(tree, gc) **debug_args;
3581 #ifdef ENABLE_CHECKING
3583 for (parm = DECL_ARGUMENTS (current_function_decl);
3584 parm; parm = DECL_CHAIN (parm))
3585 gcc_assert (parm != exp
3586 && DECL_ABSTRACT_ORIGIN (parm) != aexp);
3588 debug_args = decl_debug_args_lookup (current_function_decl);
3589 if (debug_args != NULL)
3591 for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
3594 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
3604 if (op0 == NULL_RTX)
3607 inner_mode = GET_MODE (op0);
3608 if (mode == inner_mode)
3611 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
3613 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
3614 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
3615 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
3616 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
3618 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
3620 else if (FLOAT_MODE_P (mode))
3622 else if (FLOAT_MODE_P (inner_mode))
3624 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3625 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
3627 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
3629 else if (CONSTANT_P (op0)
3630 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
3631 op0 = simplify_gen_subreg (mode, op0, inner_mode,
3632 subreg_lowpart_offset (mode, inner_mode));
3633 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
3634 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3636 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3641 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
3642 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
3643 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
3646 avoid_complex_debug_insns (rtx insn, rtx *exp_p, int depth)
3650 if (exp == NULL_RTX)
3653 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
3658 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
3659 rtx dval = make_debug_expr_from_rtl (exp);
3661 /* Emit a debug bind insn before INSN. */
3662 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
3663 DEBUG_EXPR_TREE_DECL (dval), exp,
3664 VAR_INIT_STATUS_INITIALIZED);
3666 emit_debug_insn_before (bind, insn);
3671 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
3673 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
3674 switch (*format_ptr++)
3677 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
3682 for (j = 0; j < XVECLEN (exp, i); j++)
3683 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
3691 /* Expand the _LOCs in debug insns. We run this after expanding all
3692 regular insns, so that any variables referenced in the function
3693 will have their DECL_RTLs set. */
3696 expand_debug_locations (void)
3699 rtx last = get_last_insn ();
3700 int save_strict_alias = flag_strict_aliasing;
3702 /* New alias sets while setting up memory attributes cause
3703 -fcompare-debug failures, even though it doesn't bring about any
3705 flag_strict_aliasing = 0;
3707 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3708 if (DEBUG_INSN_P (insn))
3710 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3711 rtx val, prev_insn, insn2;
3712 enum machine_mode mode;
3714 if (value == NULL_TREE)
3718 if (INSN_VAR_LOCATION_STATUS (insn)
3719 == VAR_INIT_STATUS_UNINITIALIZED)
3720 val = expand_debug_source_expr (value);
3722 val = expand_debug_expr (value);
3723 gcc_assert (last == get_last_insn ());
3727 val = gen_rtx_UNKNOWN_VAR_LOC ();
3730 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3732 gcc_assert (mode == GET_MODE (val)
3733 || (GET_MODE (val) == VOIDmode
3734 && (CONST_INT_P (val)
3735 || GET_CODE (val) == CONST_FIXED
3736 || GET_CODE (val) == CONST_DOUBLE
3737 || GET_CODE (val) == LABEL_REF)));
3740 INSN_VAR_LOCATION_LOC (insn) = val;
3741 prev_insn = PREV_INSN (insn);
3742 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
3743 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
3746 flag_strict_aliasing = save_strict_alias;
3749 /* Expand basic block BB from GIMPLE trees to RTL. */
3752 expand_gimple_basic_block (basic_block bb)
3754 gimple_stmt_iterator gsi;
3763 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3766 /* Note that since we are now transitioning from GIMPLE to RTL, we
3767 cannot use the gsi_*_bb() routines because they expect the basic
3768 block to be in GIMPLE, instead of RTL. Therefore, we need to
3769 access the BB sequence directly. */
3770 stmts = bb_seq (bb);
3771 bb->il.gimple = NULL;
3772 rtl_profile_for_bb (bb);
3773 init_rtl_bb_info (bb);
3774 bb->flags |= BB_RTL;
3776 /* Remove the RETURN_EXPR if we may fall though to the exit
3778 gsi = gsi_last (stmts);
3779 if (!gsi_end_p (gsi)
3780 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3782 gimple ret_stmt = gsi_stmt (gsi);
3784 gcc_assert (single_succ_p (bb));
3785 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3787 if (bb->next_bb == EXIT_BLOCK_PTR
3788 && !gimple_return_retval (ret_stmt))
3790 gsi_remove (&gsi, false);
3791 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3795 gsi = gsi_start (stmts);
3796 if (!gsi_end_p (gsi))
3798 stmt = gsi_stmt (gsi);
3799 if (gimple_code (stmt) != GIMPLE_LABEL)
3803 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3807 last = get_last_insn ();
3811 expand_gimple_stmt (stmt);
3816 emit_label ((rtx) *elt);
3818 /* Java emits line number notes in the top of labels.
3819 ??? Make this go away once line number notes are obsoleted. */
3820 BB_HEAD (bb) = NEXT_INSN (last);
3821 if (NOTE_P (BB_HEAD (bb)))
3822 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3823 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3825 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3828 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3830 NOTE_BASIC_BLOCK (note) = bb;
3832 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3836 stmt = gsi_stmt (gsi);
3838 /* If this statement is a non-debug one, and we generate debug
3839 insns, then this one might be the last real use of a TERed
3840 SSA_NAME, but where there are still some debug uses further
3841 down. Expanding the current SSA name in such further debug
3842 uses by their RHS might lead to wrong debug info, as coalescing
3843 might make the operands of such RHS be placed into the same
3844 pseudo as something else. Like so:
3845 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3849 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3850 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3851 the write to a_2 would actually have clobbered the place which
3854 So, instead of that, we recognize the situation, and generate
3855 debug temporaries at the last real use of TERed SSA names:
3862 if (MAY_HAVE_DEBUG_INSNS
3864 && !is_gimple_debug (stmt))
3870 location_t sloc = get_curr_insn_source_location ();
3871 tree sblock = get_curr_insn_block ();
3873 /* Look for SSA names that have their last use here (TERed
3874 names always have only one real use). */
3875 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3876 if ((def = get_gimple_for_ssa_name (op)))
3878 imm_use_iterator imm_iter;
3879 use_operand_p use_p;
3880 bool have_debug_uses = false;
3882 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3884 if (gimple_debug_bind_p (USE_STMT (use_p)))
3886 have_debug_uses = true;
3891 if (have_debug_uses)
3893 /* OP is a TERed SSA name, with DEF it's defining
3894 statement, and where OP is used in further debug
3895 instructions. Generate a debug temporary, and
3896 replace all uses of OP in debug insns with that
3899 tree value = gimple_assign_rhs_to_tree (def);
3900 tree vexpr = make_node (DEBUG_EXPR_DECL);
3902 enum machine_mode mode;
3904 set_curr_insn_source_location (gimple_location (def));
3905 set_curr_insn_block (gimple_block (def));
3907 DECL_ARTIFICIAL (vexpr) = 1;
3908 TREE_TYPE (vexpr) = TREE_TYPE (value);
3910 mode = DECL_MODE (value);
3912 mode = TYPE_MODE (TREE_TYPE (value));
3913 DECL_MODE (vexpr) = mode;
3915 val = gen_rtx_VAR_LOCATION
3916 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3918 emit_debug_insn (val);
3920 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3922 if (!gimple_debug_bind_p (debugstmt))
3925 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3926 SET_USE (use_p, vexpr);
3928 update_stmt (debugstmt);
3932 set_curr_insn_source_location (sloc);
3933 set_curr_insn_block (sblock);
3936 currently_expanding_gimple_stmt = stmt;
3938 /* Expand this statement, then evaluate the resulting RTL and
3939 fixup the CFG accordingly. */
3940 if (gimple_code (stmt) == GIMPLE_COND)
3942 new_bb = expand_gimple_cond (bb, stmt);
3946 else if (gimple_debug_bind_p (stmt))
3948 location_t sloc = get_curr_insn_source_location ();
3949 tree sblock = get_curr_insn_block ();
3950 gimple_stmt_iterator nsi = gsi;
3954 tree var = gimple_debug_bind_get_var (stmt);
3957 enum machine_mode mode;
3959 if (TREE_CODE (var) != DEBUG_EXPR_DECL
3960 && TREE_CODE (var) != LABEL_DECL
3961 && !target_for_debug_bind (var))
3962 goto delink_debug_stmt;
3964 if (gimple_debug_bind_has_value_p (stmt))
3965 value = gimple_debug_bind_get_value (stmt);
3969 last = get_last_insn ();
3971 set_curr_insn_source_location (gimple_location (stmt));
3972 set_curr_insn_block (gimple_block (stmt));
3975 mode = DECL_MODE (var);
3977 mode = TYPE_MODE (TREE_TYPE (var));
3979 val = gen_rtx_VAR_LOCATION
3980 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3982 emit_debug_insn (val);
3984 if (dump_file && (dump_flags & TDF_DETAILS))
3986 /* We can't dump the insn with a TREE where an RTX
3988 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
3989 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3990 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
3994 /* In order not to generate too many debug temporaries,
3995 we delink all uses of debug statements we already expanded.
3996 Therefore debug statements between definition and real
3997 use of TERed SSA names will continue to use the SSA name,
3998 and not be replaced with debug temps. */
3999 delink_stmt_imm_use (stmt);
4003 if (gsi_end_p (nsi))
4005 stmt = gsi_stmt (nsi);
4006 if (!gimple_debug_bind_p (stmt))
4010 set_curr_insn_source_location (sloc);
4011 set_curr_insn_block (sblock);
4013 else if (gimple_debug_source_bind_p (stmt))
4015 location_t sloc = get_curr_insn_source_location ();
4016 tree sblock = get_curr_insn_block ();
4017 tree var = gimple_debug_source_bind_get_var (stmt);
4018 tree value = gimple_debug_source_bind_get_value (stmt);
4020 enum machine_mode mode;
4022 last = get_last_insn ();
4024 set_curr_insn_source_location (gimple_location (stmt));
4025 set_curr_insn_block (gimple_block (stmt));
4027 mode = DECL_MODE (var);
4029 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
4030 VAR_INIT_STATUS_UNINITIALIZED);
4032 emit_debug_insn (val);
4034 if (dump_file && (dump_flags & TDF_DETAILS))
4036 /* We can't dump the insn with a TREE where an RTX
4038 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
4039 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4040 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
4043 set_curr_insn_source_location (sloc);
4044 set_curr_insn_block (sblock);
4048 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
4051 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
4062 def_operand_p def_p;
4063 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
4067 /* Ignore this stmt if it is in the list of
4068 replaceable expressions. */
4070 && bitmap_bit_p (SA.values,
4071 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4074 last = expand_gimple_stmt (stmt);
4075 maybe_dump_rtl_for_gimple_stmt (stmt, last);
4080 currently_expanding_gimple_stmt = NULL;
4082 /* Expand implicit goto and convert goto_locus. */
4083 FOR_EACH_EDGE (e, ei, bb->succs)
4085 if (e->goto_locus && e->goto_block)
4087 set_curr_insn_source_location (e->goto_locus);
4088 set_curr_insn_block (e->goto_block);
4089 e->goto_locus = curr_insn_locator ();
4091 e->goto_block = NULL;
4092 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
4094 emit_jump (label_rtx_for_bb (e->dest));
4095 e->flags &= ~EDGE_FALLTHRU;
4099 /* Expanded RTL can create a jump in the last instruction of block.
4100 This later might be assumed to be a jump to successor and break edge insertion.
4101 We need to insert dummy move to prevent this. PR41440. */
4102 if (single_succ_p (bb)
4103 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
4104 && (last = get_last_insn ())
4107 rtx dummy = gen_reg_rtx (SImode);
4108 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
4111 do_pending_stack_adjust ();
4113 /* Find the block tail. The last insn in the block is the insn
4114 before a barrier and/or table jump insn. */
4115 last = get_last_insn ();
4116 if (BARRIER_P (last))
4117 last = PREV_INSN (last);
4118 if (JUMP_TABLE_DATA_P (last))
4119 last = PREV_INSN (PREV_INSN (last));
4122 update_bb_for_insn (bb);
4128 /* Create a basic block for initialization code. */
4131 construct_init_block (void)
4133 basic_block init_block, first_block;
4137 /* Multiple entry points not supported yet. */
4138 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
4139 init_rtl_bb_info (ENTRY_BLOCK_PTR);
4140 init_rtl_bb_info (EXIT_BLOCK_PTR);
4141 ENTRY_BLOCK_PTR->flags |= BB_RTL;
4142 EXIT_BLOCK_PTR->flags |= BB_RTL;
4144 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
4146 /* When entry edge points to first basic block, we don't need jump,
4147 otherwise we have to jump into proper target. */
4148 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
4150 tree label = gimple_block_label (e->dest);
4152 emit_jump (label_rtx (label));
4156 flags = EDGE_FALLTHRU;
4158 init_block = create_basic_block (NEXT_INSN (get_insns ()),
4161 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
4162 init_block->count = ENTRY_BLOCK_PTR->count;
4165 first_block = e->dest;
4166 redirect_edge_succ (e, init_block);
4167 e = make_edge (init_block, first_block, flags);
4170 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4171 e->probability = REG_BR_PROB_BASE;
4172 e->count = ENTRY_BLOCK_PTR->count;
4174 update_bb_for_insn (init_block);
4178 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
4179 found in the block tree. */
4182 set_block_levels (tree block, int level)
4186 BLOCK_NUMBER (block) = level;
4187 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
4188 block = BLOCK_CHAIN (block);
4192 /* Create a block containing landing pads and similar stuff. */
4195 construct_exit_block (void)
4197 rtx head = get_last_insn ();
4199 basic_block exit_block;
4203 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
4205 rtl_profile_for_bb (EXIT_BLOCK_PTR);
4207 /* Make sure the locus is set to the end of the function, so that
4208 epilogue line numbers and warnings are set properly. */
4209 if (cfun->function_end_locus != UNKNOWN_LOCATION)
4210 input_location = cfun->function_end_locus;
4212 /* The following insns belong to the top scope. */
4213 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4215 /* Generate rtl for function exit. */
4216 expand_function_end ();
4218 end = get_last_insn ();
4221 /* While emitting the function end we could move end of the last basic block.
4223 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4224 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
4225 head = NEXT_INSN (head);
4226 exit_block = create_basic_block (NEXT_INSN (head), end,
4227 EXIT_BLOCK_PTR->prev_bb);
4228 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
4229 exit_block->count = EXIT_BLOCK_PTR->count;
4232 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
4234 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
4235 if (!(e->flags & EDGE_ABNORMAL))
4236 redirect_edge_succ (e, exit_block);
4241 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
4242 e->probability = REG_BR_PROB_BASE;
4243 e->count = EXIT_BLOCK_PTR->count;
4244 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
4247 e->count -= e2->count;
4248 exit_block->count -= e2->count;
4249 exit_block->frequency -= EDGE_FREQUENCY (e2);
4253 if (exit_block->count < 0)
4254 exit_block->count = 0;
4255 if (exit_block->frequency < 0)
4256 exit_block->frequency = 0;
4257 update_bb_for_insn (exit_block);
4260 /* Helper function for discover_nonconstant_array_refs.
4261 Look for ARRAY_REF nodes with non-constant indexes and mark them
4265 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
4266 void *data ATTRIBUTE_UNUSED)
4270 if (IS_TYPE_OR_DECL_P (t))
4272 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4274 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4275 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
4276 && (!TREE_OPERAND (t, 2)
4277 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4278 || (TREE_CODE (t) == COMPONENT_REF
4279 && (!TREE_OPERAND (t,2)
4280 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
4281 || TREE_CODE (t) == BIT_FIELD_REF
4282 || TREE_CODE (t) == REALPART_EXPR
4283 || TREE_CODE (t) == IMAGPART_EXPR
4284 || TREE_CODE (t) == VIEW_CONVERT_EXPR
4285 || CONVERT_EXPR_P (t))
4286 t = TREE_OPERAND (t, 0);
4288 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
4290 t = get_base_address (t);
4292 && DECL_MODE (t) != BLKmode)
4293 TREE_ADDRESSABLE (t) = 1;
4302 /* RTL expansion is not able to compile array references with variable
4303 offsets for arrays stored in single register. Discover such
4304 expressions and mark variables as addressable to avoid this
4308 discover_nonconstant_array_refs (void)
4311 gimple_stmt_iterator gsi;
4314 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4316 gimple stmt = gsi_stmt (gsi);
4317 if (!is_gimple_debug (stmt))
4318 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
4322 /* This function sets crtl->args.internal_arg_pointer to a virtual
4323 register if DRAP is needed. Local register allocator will replace
4324 virtual_incoming_args_rtx with the virtual register. */
4327 expand_stack_alignment (void)
4330 unsigned int preferred_stack_boundary;
4332 if (! SUPPORTS_STACK_ALIGNMENT)
4335 if (cfun->calls_alloca
4336 || cfun->has_nonlocal_label
4337 || crtl->has_nonlocal_goto)
4338 crtl->need_drap = true;
4340 /* Call update_stack_boundary here again to update incoming stack
4341 boundary. It may set incoming stack alignment to a different
4342 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4343 use the minimum incoming stack alignment to check if it is OK
4344 to perform sibcall optimization since sibcall optimization will
4345 only align the outgoing stack to incoming stack boundary. */
4346 if (targetm.calls.update_stack_boundary)
4347 targetm.calls.update_stack_boundary ();
4349 /* The incoming stack frame has to be aligned at least at
4350 parm_stack_boundary. */
4351 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
4353 /* Update crtl->stack_alignment_estimated and use it later to align
4354 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4355 exceptions since callgraph doesn't collect incoming stack alignment
4357 if (cfun->can_throw_non_call_exceptions
4358 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
4359 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4361 preferred_stack_boundary = crtl->preferred_stack_boundary;
4362 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
4363 crtl->stack_alignment_estimated = preferred_stack_boundary;
4364 if (preferred_stack_boundary > crtl->stack_alignment_needed)
4365 crtl->stack_alignment_needed = preferred_stack_boundary;
4367 gcc_assert (crtl->stack_alignment_needed
4368 <= crtl->stack_alignment_estimated);
4370 crtl->stack_realign_needed
4371 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
4372 crtl->stack_realign_tried = crtl->stack_realign_needed;
4374 crtl->stack_realign_processed = true;
4376 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4378 gcc_assert (targetm.calls.get_drap_rtx != NULL);
4379 drap_rtx = targetm.calls.get_drap_rtx ();
4381 /* stack_realign_drap and drap_rtx must match. */
4382 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
4384 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4385 if (NULL != drap_rtx)
4387 crtl->args.internal_arg_pointer = drap_rtx;
4389 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4391 fixup_tail_calls ();
4395 /* Translate the intermediate representation contained in the CFG
4396 from GIMPLE trees to RTL.
4398 We do conversion per basic block and preserve/update the tree CFG.
4399 This implies we have to do some magic as the CFG can simultaneously
4400 consist of basic blocks containing RTL and GIMPLE trees. This can
4401 confuse the CFG hooks, so be careful to not manipulate CFG during
4405 gimple_expand_cfg (void)
4407 basic_block bb, init_block;
4414 timevar_push (TV_OUT_OF_SSA);
4415 rewrite_out_of_ssa (&SA);
4416 timevar_pop (TV_OUT_OF_SSA);
4417 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
4420 /* Some backends want to know that we are expanding to RTL. */
4421 currently_expanding_to_rtl = 1;
4423 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
4425 insn_locators_alloc ();
4426 if (!DECL_IS_BUILTIN (current_function_decl))
4428 /* Eventually, all FEs should explicitly set function_start_locus. */
4429 if (cfun->function_start_locus == UNKNOWN_LOCATION)
4430 set_curr_insn_source_location
4431 (DECL_SOURCE_LOCATION (current_function_decl));
4433 set_curr_insn_source_location (cfun->function_start_locus);
4436 set_curr_insn_source_location (UNKNOWN_LOCATION);
4437 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4438 prologue_locator = curr_insn_locator ();
4440 #ifdef INSN_SCHEDULING
4441 init_sched_attrs ();
4444 /* Make sure first insn is a note even if we don't want linenums.
4445 This makes sure the first insn will never be deleted.
4446 Also, final expects a note to appear there. */
4447 emit_note (NOTE_INSN_DELETED);
4449 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4450 discover_nonconstant_array_refs ();
4452 targetm.expand_to_rtl_hook ();
4453 crtl->stack_alignment_needed = STACK_BOUNDARY;
4454 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
4455 crtl->stack_alignment_estimated = 0;
4456 crtl->preferred_stack_boundary = STACK_BOUNDARY;
4457 cfun->cfg->max_jumptable_ents = 0;
4459 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4460 of the function section at exapnsion time to predict distance of calls. */
4461 resolve_unique_section (current_function_decl, 0, flag_function_sections);
4463 /* Expand the variables recorded during gimple lowering. */
4464 timevar_push (TV_VAR_EXPAND);
4467 expand_used_vars ();
4469 var_seq = get_insns ();
4471 timevar_pop (TV_VAR_EXPAND);
4473 /* Honor stack protection warnings. */
4474 if (warn_stack_protect)
4476 if (cfun->calls_alloca)
4477 warning (OPT_Wstack_protector,
4478 "stack protector not protecting local variables: "
4479 "variable length buffer");
4480 if (has_short_buffer && !crtl->stack_protect_guard)
4481 warning (OPT_Wstack_protector,
4482 "stack protector not protecting function: "
4483 "all local arrays are less than %d bytes long",
4484 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4487 /* Set up parameters and prepare for return, for the function. */
4488 expand_function_start (current_function_decl);
4490 /* If we emitted any instructions for setting up the variables,
4491 emit them before the FUNCTION_START note. */
4494 emit_insn_before (var_seq, parm_birth_insn);
4496 /* In expand_function_end we'll insert the alloca save/restore
4497 before parm_birth_insn. We've just insertted an alloca call.
4498 Adjust the pointer to match. */
4499 parm_birth_insn = var_seq;
4502 /* Now that we also have the parameter RTXs, copy them over to our
4504 for (i = 0; i < SA.map->num_partitions; i++)
4506 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4508 if (TREE_CODE (var) != VAR_DECL
4509 && !SA.partition_to_pseudo[i])
4510 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4511 gcc_assert (SA.partition_to_pseudo[i]);
4513 /* If this decl was marked as living in multiple places, reset
4514 this now to NULL. */
4515 if (DECL_RTL_IF_SET (var) == pc_rtx)
4516 SET_DECL_RTL (var, NULL);
4518 /* Some RTL parts really want to look at DECL_RTL(x) when x
4519 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4520 SET_DECL_RTL here making this available, but that would mean
4521 to select one of the potentially many RTLs for one DECL. Instead
4522 of doing that we simply reset the MEM_EXPR of the RTL in question,
4523 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4524 if (!DECL_RTL_SET_P (var))
4526 if (MEM_P (SA.partition_to_pseudo[i]))
4527 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4531 /* If we have a class containing differently aligned pointers
4532 we need to merge those into the corresponding RTL pointer
4534 for (i = 1; i < num_ssa_names; i++)
4536 tree name = ssa_name (i);
4541 || !POINTER_TYPE_P (TREE_TYPE (name))
4542 /* We might have generated new SSA names in
4543 update_alias_info_with_stack_vars. They will have a NULL
4544 defining statements, and won't be part of the partitioning,
4546 || !SSA_NAME_DEF_STMT (name))
4548 part = var_to_partition (SA.map, name);
4549 if (part == NO_PARTITION)
4551 r = SA.partition_to_pseudo[part];
4553 mark_reg_pointer (r, get_pointer_alignment (name));
4556 /* If this function is `main', emit a call to `__main'
4557 to run global initializers, etc. */
4558 if (DECL_NAME (current_function_decl)
4559 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4560 && DECL_FILE_SCOPE_P (current_function_decl))
4561 expand_main_function ();
4563 /* Initialize the stack_protect_guard field. This must happen after the
4564 call to __main (if any) so that the external decl is initialized. */
4565 if (crtl->stack_protect_guard)
4566 stack_protect_prologue ();
4568 expand_phi_nodes (&SA);
4570 /* Register rtl specific functions for cfg. */
4571 rtl_register_cfg_hooks ();
4573 init_block = construct_init_block ();
4575 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4576 remaining edges later. */
4577 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4578 e->flags &= ~EDGE_EXECUTABLE;
4580 lab_rtx_for_bb = pointer_map_create ();
4581 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4582 bb = expand_gimple_basic_block (bb);
4584 if (MAY_HAVE_DEBUG_INSNS)
4585 expand_debug_locations ();
4587 execute_free_datastructures ();
4588 timevar_push (TV_OUT_OF_SSA);
4589 finish_out_of_ssa (&SA);
4590 timevar_pop (TV_OUT_OF_SSA);
4592 timevar_push (TV_POST_EXPAND);
4593 /* We are no longer in SSA form. */
4594 cfun->gimple_df->in_ssa_p = false;
4596 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4597 conservatively to true until they are all profile aware. */
4598 pointer_map_destroy (lab_rtx_for_bb);
4601 construct_exit_block ();
4602 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4603 insn_locators_finalize ();
4605 /* Zap the tree EH table. */
4606 set_eh_throw_stmt_table (cfun, NULL);
4608 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4609 split edges which edge insertions might do. */
4610 rebuild_jump_labels (get_insns ());
4612 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4616 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4620 rebuild_jump_labels_chain (e->insns.r);
4621 /* Avoid putting insns before parm_birth_insn. */
4622 if (e->src == ENTRY_BLOCK_PTR
4623 && single_succ_p (ENTRY_BLOCK_PTR)
4626 rtx insns = e->insns.r;
4627 e->insns.r = NULL_RTX;
4628 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4631 commit_one_edge_insertion (e);
4638 /* We're done expanding trees to RTL. */
4639 currently_expanding_to_rtl = 0;
4641 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4645 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4647 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4648 e->flags &= ~EDGE_EXECUTABLE;
4650 /* At the moment not all abnormal edges match the RTL
4651 representation. It is safe to remove them here as
4652 find_many_sub_basic_blocks will rediscover them.
4653 In the future we should get this fixed properly. */
4654 if ((e->flags & EDGE_ABNORMAL)
4655 && !(e->flags & EDGE_SIBCALL))
4662 blocks = sbitmap_alloc (last_basic_block);
4663 sbitmap_ones (blocks);
4664 find_many_sub_basic_blocks (blocks);
4665 sbitmap_free (blocks);
4666 purge_all_dead_edges ();
4670 expand_stack_alignment ();
4672 #ifdef ENABLE_CHECKING
4673 verify_flow_info ();
4676 /* There's no need to defer outputting this function any more; we
4677 know we want to output it. */
4678 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4680 /* Now that we're done expanding trees to RTL, we shouldn't have any
4681 more CONCATs anywhere. */
4682 generating_concat_p = 0;
4687 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4688 /* And the pass manager will dump RTL for us. */
4691 /* If we're emitting a nested function, make sure its parent gets
4692 emitted as well. Doing otherwise confuses debug info. */
4695 for (parent = DECL_CONTEXT (current_function_decl);
4696 parent != NULL_TREE;
4697 parent = get_containing_scope (parent))
4698 if (TREE_CODE (parent) == FUNCTION_DECL)
4699 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4702 /* We are now committed to emitting code for this function. Do any
4703 preparation, such as emitting abstract debug info for the inline
4704 before it gets mangled by optimization. */
4705 if (cgraph_function_possibly_inlined_p (current_function_decl))
4706 (*debug_hooks->outlining_inline_function) (current_function_decl);
4708 TREE_ASM_WRITTEN (current_function_decl) = 1;
4710 /* After expanding, the return labels are no longer needed. */
4711 return_label = NULL;
4712 naked_return_label = NULL;
4714 /* After expanding, the tm_restart map is no longer needed. */
4715 if (cfun->gimple_df->tm_restart)
4717 htab_delete (cfun->gimple_df->tm_restart);
4718 cfun->gimple_df->tm_restart = NULL;
4721 /* Tag the blocks with a depth number so that change_scope can find
4722 the common parent easily. */
4723 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4724 default_rtl_profile ();
4725 timevar_pop (TV_POST_EXPAND);
4729 struct rtl_opt_pass pass_expand =
4733 "expand", /* name */
4735 gimple_expand_cfg, /* execute */
4738 0, /* static_pass_number */
4739 TV_EXPAND, /* tv_id */
4740 PROP_ssa | PROP_gimple_leh | PROP_cfg
4741 | PROP_gimple_lcx, /* properties_required */
4742 PROP_rtl, /* properties_provided */
4743 PROP_ssa | PROP_trees, /* properties_destroyed */
4744 TODO_verify_ssa | TODO_verify_flow
4745 | TODO_verify_stmts, /* todo_flags_start */
4746 TODO_ggc_collect /* todo_flags_finish */