1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 /* Return an expression tree corresponding to the RHS of GIMPLE
64 gimple_assign_rhs_to_tree (gimple stmt)
67 enum gimple_rhs_class grhs_class;
69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82 else if (grhs_class == GIMPLE_UNARY_RHS)
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
86 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
95 && gimple_block (stmt) != TREE_BLOCK (t)))
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
110 #ifndef STACK_ALIGNMENT_NEEDED
111 #define STACK_ALIGNMENT_NEEDED 1
114 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116 /* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
120 set_rtl (tree t, rtx x)
122 if (TREE_CODE (t) == SSA_NAME)
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
138 else if (DECL_RTL (var) == pc_rtx)
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
154 /* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
174 /* The partition representative. */
175 size_t representative;
177 /* The next stack variable in the partition, or EOC. */
180 /* The numbers of conflicting stack variables. */
184 #define EOC ((size_t)-1)
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var *stack_vars;
188 static size_t stack_vars_alloc;
189 static size_t stack_vars_num;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer;
208 /* Discover the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
212 get_decl_align_unit (tree decl)
214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
215 return align / BITS_PER_UNIT;
218 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
219 Return the frame offset. */
222 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
224 HOST_WIDE_INT offset, new_frame_offset;
226 new_frame_offset = frame_offset;
227 if (FRAME_GROWS_DOWNWARD)
229 new_frame_offset -= size + frame_phase;
230 new_frame_offset &= -align;
231 new_frame_offset += frame_phase;
232 offset = new_frame_offset;
236 new_frame_offset -= frame_phase;
237 new_frame_offset += align - 1;
238 new_frame_offset &= -align;
239 new_frame_offset += frame_phase;
240 offset = new_frame_offset;
241 new_frame_offset += size;
243 frame_offset = new_frame_offset;
245 if (frame_offset_overflow (frame_offset, cfun->decl))
246 frame_offset = offset = 0;
251 /* Accumulate DECL into STACK_VARS. */
254 add_stack_var (tree decl)
256 if (stack_vars_num >= stack_vars_alloc)
258 if (stack_vars_alloc)
259 stack_vars_alloc = stack_vars_alloc * 3 / 2;
261 stack_vars_alloc = 32;
263 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
265 stack_vars[stack_vars_num].decl = decl;
266 stack_vars[stack_vars_num].offset = 0;
267 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
268 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
270 /* All variables are initially in their own partition. */
271 stack_vars[stack_vars_num].representative = stack_vars_num;
272 stack_vars[stack_vars_num].next = EOC;
274 /* All variables initially conflict with no other. */
275 stack_vars[stack_vars_num].conflicts = NULL;
277 /* Ensure that this decl doesn't get put onto the list twice. */
278 set_rtl (decl, pc_rtx);
283 /* Make the decls associated with luid's X and Y conflict. */
286 add_stack_var_conflict (size_t x, size_t y)
288 struct stack_var *a = &stack_vars[x];
289 struct stack_var *b = &stack_vars[y];
291 a->conflicts = BITMAP_ALLOC (NULL);
293 b->conflicts = BITMAP_ALLOC (NULL);
294 bitmap_set_bit (a->conflicts, y);
295 bitmap_set_bit (b->conflicts, x);
298 /* Check whether the decls associated with luid's X and Y conflict. */
301 stack_var_conflict_p (size_t x, size_t y)
303 struct stack_var *a = &stack_vars[x];
304 struct stack_var *b = &stack_vars[y];
305 if (!a->conflicts || !b->conflicts)
307 return bitmap_bit_p (a->conflicts, y);
310 /* Returns true if TYPE is or contains a union type. */
313 aggregate_contains_union_type (tree type)
317 if (TREE_CODE (type) == UNION_TYPE
318 || TREE_CODE (type) == QUAL_UNION_TYPE)
320 if (TREE_CODE (type) == ARRAY_TYPE)
321 return aggregate_contains_union_type (TREE_TYPE (type));
322 if (TREE_CODE (type) != RECORD_TYPE)
325 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
326 if (TREE_CODE (field) == FIELD_DECL)
327 if (aggregate_contains_union_type (TREE_TYPE (field)))
333 /* A subroutine of expand_used_vars. If two variables X and Y have alias
334 sets that do not conflict, then do add a conflict for these variables
335 in the interference graph. We also need to make sure to add conflicts
336 for union containing structures. Else RTL alias analysis comes along
337 and due to type based aliasing rules decides that for two overlapping
338 union temporaries { short s; int i; } accesses to the same mem through
339 different types may not alias and happily reorders stores across
340 life-time boundaries of the temporaries (See PR25654).
341 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
344 add_alias_set_conflicts (void)
346 size_t i, j, n = stack_vars_num;
348 for (i = 0; i < n; ++i)
350 tree type_i = TREE_TYPE (stack_vars[i].decl);
351 bool aggr_i = AGGREGATE_TYPE_P (type_i);
354 contains_union = aggregate_contains_union_type (type_i);
355 for (j = 0; j < i; ++j)
357 tree type_j = TREE_TYPE (stack_vars[j].decl);
358 bool aggr_j = AGGREGATE_TYPE_P (type_j);
360 /* Either the objects conflict by means of type based
361 aliasing rules, or we need to add a conflict. */
362 || !objects_must_conflict_p (type_i, type_j)
363 /* In case the types do not conflict ensure that access
364 to elements will conflict. In case of unions we have
365 to be careful as type based aliasing rules may say
366 access to the same memory does not conflict. So play
367 safe and add a conflict in this case. */
369 add_stack_var_conflict (i, j);
374 /* A subroutine of partition_stack_vars. A comparison function for qsort,
375 sorting an array of indices by the properties of the object. */
378 stack_var_cmp (const void *a, const void *b)
380 size_t ia = *(const size_t *)a;
381 size_t ib = *(const size_t *)b;
382 unsigned int aligna = stack_vars[ia].alignb;
383 unsigned int alignb = stack_vars[ib].alignb;
384 HOST_WIDE_INT sizea = stack_vars[ia].size;
385 HOST_WIDE_INT sizeb = stack_vars[ib].size;
386 tree decla = stack_vars[ia].decl;
387 tree declb = stack_vars[ib].decl;
389 unsigned int uida, uidb;
391 /* Primary compare on "large" alignment. Large comes first. */
392 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
393 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
394 if (largea != largeb)
395 return (int)largeb - (int)largea;
397 /* Secondary compare on size, decreasing */
403 /* Tertiary compare on true alignment, decreasing. */
409 /* Final compare on ID for sort stability, increasing.
410 Two SSA names are compared by their version, SSA names come before
411 non-SSA names, and two normal decls are compared by their DECL_UID. */
412 if (TREE_CODE (decla) == SSA_NAME)
414 if (TREE_CODE (declb) == SSA_NAME)
415 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
419 else if (TREE_CODE (declb) == SSA_NAME)
422 uida = DECL_UID (decla), uidb = DECL_UID (declb);
431 /* If the points-to solution *PI points to variables that are in a partition
432 together with other variables add all partition members to the pointed-to
436 add_partitioned_vars_to_ptset (struct pt_solution *pt,
437 struct pointer_map_t *decls_to_partitions,
438 struct pointer_set_t *visited, bitmap temp)
446 /* The pointed-to vars bitmap is shared, it is enough to
448 || pointer_set_insert(visited, pt->vars))
453 /* By using a temporary bitmap to store all members of the partitions
454 we have to add we make sure to visit each of the partitions only
456 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
458 || !bitmap_bit_p (temp, i))
459 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
460 (void *)(size_t) i)))
461 bitmap_ior_into (temp, *part);
462 if (!bitmap_empty_p (temp))
463 bitmap_ior_into (pt->vars, temp);
466 /* Update points-to sets based on partition info, so we can use them on RTL.
467 The bitmaps representing stack partitions will be saved until expand,
468 where partitioned decls used as bases in memory expressions will be
472 update_alias_info_with_stack_vars (void)
474 struct pointer_map_t *decls_to_partitions = NULL;
476 tree var = NULL_TREE;
478 for (i = 0; i < stack_vars_num; i++)
482 struct ptr_info_def *pi;
484 /* Not interested in partitions with single variable. */
485 if (stack_vars[i].representative != i
486 || stack_vars[i].next == EOC)
489 if (!decls_to_partitions)
491 decls_to_partitions = pointer_map_create ();
492 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
495 /* Create an SSA_NAME that points to the partition for use
496 as base during alias-oracle queries on RTL for bases that
497 have been partitioned. */
498 if (var == NULL_TREE)
499 var = create_tmp_var (ptr_type_node, NULL);
500 name = make_ssa_name (var, NULL);
502 /* Create bitmaps representing partitions. They will be used for
503 points-to sets later, so use GGC alloc. */
504 part = BITMAP_GGC_ALLOC ();
505 for (j = i; j != EOC; j = stack_vars[j].next)
507 tree decl = stack_vars[j].decl;
508 unsigned int uid = DECL_PT_UID (decl);
509 /* We should never end up partitioning SSA names (though they
510 may end up on the stack). Neither should we allocate stack
511 space to something that is unused and thus unreferenced. */
512 gcc_assert (DECL_P (decl)
513 && referenced_var_lookup (DECL_UID (decl)));
514 bitmap_set_bit (part, uid);
515 *((bitmap *) pointer_map_insert (decls_to_partitions,
516 (void *)(size_t) uid)) = part;
517 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
521 /* Make the SSA name point to all partition members. */
522 pi = get_ptr_info (name);
523 pt_solution_set (&pi->pt, part, false, false);
526 /* Make all points-to sets that contain one member of a partition
527 contain all members of the partition. */
528 if (decls_to_partitions)
531 struct pointer_set_t *visited = pointer_set_create ();
532 bitmap temp = BITMAP_ALLOC (NULL);
534 for (i = 1; i < num_ssa_names; i++)
536 tree name = ssa_name (i);
537 struct ptr_info_def *pi;
540 && POINTER_TYPE_P (TREE_TYPE (name))
541 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
542 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
546 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
547 decls_to_partitions, visited, temp);
549 pointer_set_destroy (visited);
550 pointer_map_destroy (decls_to_partitions);
555 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
556 partitioning algorithm. Partitions A and B are known to be non-conflicting.
557 Merge them into a single partition A.
559 At the same time, add OFFSET to all variables in partition B. At the end
560 of the partitioning process we've have a nice block easy to lay out within
564 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
567 struct stack_var *vb = &stack_vars[b];
571 /* Update each element of partition B with the given offset,
572 and merge them into partition A. */
573 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
575 stack_vars[i].offset += offset;
576 stack_vars[i].representative = a;
578 stack_vars[last].next = stack_vars[a].next;
579 stack_vars[a].next = b;
581 /* Update the required alignment of partition A to account for B. */
582 if (stack_vars[a].alignb < stack_vars[b].alignb)
583 stack_vars[a].alignb = stack_vars[b].alignb;
585 /* Update the interference graph and merge the conflicts. */
588 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
589 add_stack_var_conflict (a, stack_vars[u].representative);
590 BITMAP_FREE (vb->conflicts);
594 /* A subroutine of expand_used_vars. Binpack the variables into
595 partitions constrained by the interference graph. The overall
596 algorithm used is as follows:
598 Sort the objects by size.
603 Look for the largest non-conflicting object B with size <= S.
613 partition_stack_vars (void)
615 size_t si, sj, n = stack_vars_num;
617 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
618 for (si = 0; si < n; ++si)
619 stack_vars_sorted[si] = si;
624 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
626 for (si = 0; si < n; ++si)
628 size_t i = stack_vars_sorted[si];
629 HOST_WIDE_INT isize = stack_vars[i].size;
630 unsigned int ialign = stack_vars[i].alignb;
631 HOST_WIDE_INT offset = 0;
633 for (sj = si; sj-- > 0; )
635 size_t j = stack_vars_sorted[sj];
636 HOST_WIDE_INT jsize = stack_vars[j].size;
637 unsigned int jalign = stack_vars[j].alignb;
639 /* Ignore objects that aren't partition representatives. */
640 if (stack_vars[j].representative != j)
643 /* Ignore objects too large for the remaining space. */
647 /* Ignore conflicting objects. */
648 if (stack_var_conflict_p (i, j))
651 /* Do not mix objects of "small" (supported) alignment
652 and "large" (unsupported) alignment. */
653 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
654 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
657 /* Refine the remaining space check to include alignment. */
658 if (offset & (jalign - 1))
660 HOST_WIDE_INT toff = offset;
662 toff &= -(HOST_WIDE_INT)jalign;
663 if (isize - (toff - offset) < jsize)
666 isize -= toff - offset;
670 /* UNION the objects, placing J at OFFSET. */
671 union_stack_vars (i, j, offset);
680 update_alias_info_with_stack_vars ();
683 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
686 dump_stack_var_partition (void)
688 size_t si, i, j, n = stack_vars_num;
690 for (si = 0; si < n; ++si)
692 i = stack_vars_sorted[si];
694 /* Skip variables that aren't partition representatives, for now. */
695 if (stack_vars[i].representative != i)
698 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
699 " align %u\n", (unsigned long) i, stack_vars[i].size,
700 stack_vars[i].alignb);
702 for (j = i; j != EOC; j = stack_vars[j].next)
704 fputc ('\t', dump_file);
705 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
706 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
707 stack_vars[j].offset);
712 /* Assign rtl to DECL at BASE + OFFSET. */
715 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
716 HOST_WIDE_INT offset)
721 /* If this fails, we've overflowed the stack frame. Error nicely? */
722 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
724 x = plus_constant (base, offset);
725 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
727 if (TREE_CODE (decl) != SSA_NAME)
729 /* Set alignment we actually gave this decl if it isn't an SSA name.
730 If it is we generate stack slots only accidentally so it isn't as
731 important, we'll simply use the alignment that is already set. */
732 if (base == virtual_stack_vars_rtx)
733 offset -= frame_phase;
734 align = offset & -offset;
735 align *= BITS_PER_UNIT;
736 if (align == 0 || align > base_align)
739 /* One would think that we could assert that we're not decreasing
740 alignment here, but (at least) the i386 port does exactly this
741 via the MINIMUM_ALIGNMENT hook. */
743 DECL_ALIGN (decl) = align;
744 DECL_USER_ALIGN (decl) = 0;
747 set_mem_attributes (x, SSAVAR (decl), true);
751 /* A subroutine of expand_used_vars. Give each partition representative
752 a unique location within the stack frame. Update each partition member
753 with that location. */
756 expand_stack_vars (bool (*pred) (tree))
758 size_t si, i, j, n = stack_vars_num;
759 HOST_WIDE_INT large_size = 0, large_alloc = 0;
760 rtx large_base = NULL;
761 unsigned large_align = 0;
764 /* Determine if there are any variables requiring "large" alignment.
765 Since these are dynamically allocated, we only process these if
766 no predicate involved. */
767 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
768 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
770 /* Find the total size of these variables. */
771 for (si = 0; si < n; ++si)
775 i = stack_vars_sorted[si];
776 alignb = stack_vars[i].alignb;
778 /* Stop when we get to the first decl with "small" alignment. */
779 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
782 /* Skip variables that aren't partition representatives. */
783 if (stack_vars[i].representative != i)
786 /* Skip variables that have already had rtl assigned. See also
787 add_stack_var where we perpetrate this pc_rtx hack. */
788 decl = stack_vars[i].decl;
789 if ((TREE_CODE (decl) == SSA_NAME
790 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
791 : DECL_RTL (decl)) != pc_rtx)
794 large_size += alignb - 1;
795 large_size &= -(HOST_WIDE_INT)alignb;
796 large_size += stack_vars[i].size;
799 /* If there were any, allocate space. */
801 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
805 for (si = 0; si < n; ++si)
808 unsigned base_align, alignb;
809 HOST_WIDE_INT offset;
811 i = stack_vars_sorted[si];
813 /* Skip variables that aren't partition representatives, for now. */
814 if (stack_vars[i].representative != i)
817 /* Skip variables that have already had rtl assigned. See also
818 add_stack_var where we perpetrate this pc_rtx hack. */
819 decl = stack_vars[i].decl;
820 if ((TREE_CODE (decl) == SSA_NAME
821 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
822 : DECL_RTL (decl)) != pc_rtx)
825 /* Check the predicate to see whether this variable should be
826 allocated in this pass. */
827 if (pred && !pred (decl))
830 alignb = stack_vars[i].alignb;
831 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
833 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
834 base = virtual_stack_vars_rtx;
835 base_align = crtl->max_used_stack_slot_alignment;
839 /* Large alignment is only processed in the last pass. */
843 large_alloc += alignb - 1;
844 large_alloc &= -(HOST_WIDE_INT)alignb;
845 offset = large_alloc;
846 large_alloc += stack_vars[i].size;
849 base_align = large_align;
852 /* Create rtl for each variable based on their location within the
854 for (j = i; j != EOC; j = stack_vars[j].next)
856 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
857 expand_one_stack_var_at (stack_vars[j].decl,
859 stack_vars[j].offset + offset);
863 gcc_assert (large_alloc == large_size);
866 /* Take into account all sizes of partitions and reset DECL_RTLs. */
868 account_stack_vars (void)
870 size_t si, j, i, n = stack_vars_num;
871 HOST_WIDE_INT size = 0;
873 for (si = 0; si < n; ++si)
875 i = stack_vars_sorted[si];
877 /* Skip variables that aren't partition representatives, for now. */
878 if (stack_vars[i].representative != i)
881 size += stack_vars[i].size;
882 for (j = i; j != EOC; j = stack_vars[j].next)
883 set_rtl (stack_vars[j].decl, NULL);
888 /* A subroutine of expand_one_var. Called to immediately assign rtl
889 to a variable to be allocated in the stack frame. */
892 expand_one_stack_var (tree var)
894 HOST_WIDE_INT size, offset;
897 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
898 byte_align = get_decl_align_unit (SSAVAR (var));
900 /* We handle highly aligned variables in expand_stack_vars. */
901 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
903 offset = alloc_stack_frame_space (size, byte_align);
905 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
906 crtl->max_used_stack_slot_alignment, offset);
909 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
910 that will reside in a hard register. */
913 expand_one_hard_reg_var (tree var)
915 rest_of_decl_compilation (var, 0, 0);
918 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
919 that will reside in a pseudo register. */
922 expand_one_register_var (tree var)
924 tree decl = SSAVAR (var);
925 tree type = TREE_TYPE (decl);
926 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
927 rtx x = gen_reg_rtx (reg_mode);
931 /* Note if the object is a user variable. */
932 if (!DECL_ARTIFICIAL (decl))
935 if (POINTER_TYPE_P (type))
936 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
939 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
940 has some associated error, e.g. its type is error-mark. We just need
941 to pick something that won't crash the rest of the compiler. */
944 expand_one_error_var (tree var)
946 enum machine_mode mode = DECL_MODE (var);
950 x = gen_rtx_MEM (BLKmode, const0_rtx);
951 else if (mode == VOIDmode)
954 x = gen_reg_rtx (mode);
956 SET_DECL_RTL (var, x);
959 /* A subroutine of expand_one_var. VAR is a variable that will be
960 allocated to the local stack frame. Return true if we wish to
961 add VAR to STACK_VARS so that it will be coalesced with other
962 variables. Return false to allocate VAR immediately.
964 This function is used to reduce the number of variables considered
965 for coalescing, which reduces the size of the quadratic problem. */
968 defer_stack_allocation (tree var, bool toplevel)
970 /* If stack protection is enabled, *all* stack variables must be deferred,
971 so that we can re-order the strings to the top of the frame. */
972 if (flag_stack_protect)
975 /* We handle "large" alignment via dynamic allocation. We want to handle
976 this extra complication in only one place, so defer them. */
977 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
980 /* Variables in the outermost scope automatically conflict with
981 every other variable. The only reason to want to defer them
982 at all is that, after sorting, we can more efficiently pack
983 small variables in the stack frame. Continue to defer at -O2. */
984 if (toplevel && optimize < 2)
987 /* Without optimization, *most* variables are allocated from the
988 stack, which makes the quadratic problem large exactly when we
989 want compilation to proceed as quickly as possible. On the
990 other hand, we don't want the function's stack frame size to
991 get completely out of hand. So we avoid adding scalars and
992 "small" aggregates to the list at all. */
993 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
999 /* A subroutine of expand_used_vars. Expand one variable according to
1000 its flavor. Variables to be placed on the stack are not actually
1001 expanded yet, merely recorded.
1002 When REALLY_EXPAND is false, only add stack values to be allocated.
1003 Return stack usage this variable is supposed to take.
1006 static HOST_WIDE_INT
1007 expand_one_var (tree var, bool toplevel, bool really_expand)
1009 unsigned int align = BITS_PER_UNIT;
1014 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1016 /* Because we don't know if VAR will be in register or on stack,
1017 we conservatively assume it will be on stack even if VAR is
1018 eventually put into register after RA pass. For non-automatic
1019 variables, which won't be on stack, we collect alignment of
1020 type and ignore user specified alignment. */
1021 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1022 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1023 TYPE_MODE (TREE_TYPE (var)),
1024 TYPE_ALIGN (TREE_TYPE (var)));
1025 else if (DECL_HAS_VALUE_EXPR_P (var)
1026 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1027 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1028 or variables which were assigned a stack slot already by
1029 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1030 changed from the offset chosen to it. */
1031 align = crtl->stack_alignment_estimated;
1033 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1035 /* If the variable alignment is very large we'll dynamicaly allocate
1036 it, which means that in-frame portion is just a pointer. */
1037 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1038 align = POINTER_SIZE;
1041 if (SUPPORTS_STACK_ALIGNMENT
1042 && crtl->stack_alignment_estimated < align)
1044 /* stack_alignment_estimated shouldn't change after stack
1045 realign decision made */
1046 gcc_assert(!crtl->stack_realign_processed);
1047 crtl->stack_alignment_estimated = align;
1050 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1051 So here we only make sure stack_alignment_needed >= align. */
1052 if (crtl->stack_alignment_needed < align)
1053 crtl->stack_alignment_needed = align;
1054 if (crtl->max_used_stack_slot_alignment < align)
1055 crtl->max_used_stack_slot_alignment = align;
1057 if (TREE_CODE (origvar) == SSA_NAME)
1059 gcc_assert (TREE_CODE (var) != VAR_DECL
1060 || (!DECL_EXTERNAL (var)
1061 && !DECL_HAS_VALUE_EXPR_P (var)
1062 && !TREE_STATIC (var)
1063 && TREE_TYPE (var) != error_mark_node
1064 && !DECL_HARD_REGISTER (var)
1067 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1069 else if (DECL_EXTERNAL (var))
1071 else if (DECL_HAS_VALUE_EXPR_P (var))
1073 else if (TREE_STATIC (var))
1075 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1077 else if (TREE_TYPE (var) == error_mark_node)
1080 expand_one_error_var (var);
1082 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1085 expand_one_hard_reg_var (var);
1087 else if (use_register_for_decl (var))
1090 expand_one_register_var (origvar);
1092 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1096 error ("size of variable %q+D is too large", var);
1097 expand_one_error_var (var);
1100 else if (defer_stack_allocation (var, toplevel))
1101 add_stack_var (origvar);
1105 expand_one_stack_var (origvar);
1106 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1111 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1112 expanding variables. Those variables that can be put into registers
1113 are allocated pseudos; those that can't are put on the stack.
1115 TOPLEVEL is true if this is the outermost BLOCK. */
1118 expand_used_vars_for_block (tree block, bool toplevel)
1120 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1123 old_sv_num = toplevel ? 0 : stack_vars_num;
1125 /* Expand all variables at this level. */
1126 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1128 expand_one_var (t, toplevel, true);
1130 this_sv_num = stack_vars_num;
1132 /* Expand all variables at containing levels. */
1133 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1134 expand_used_vars_for_block (t, false);
1136 /* Since we do not track exact variable lifetimes (which is not even
1137 possible for variables whose address escapes), we mirror the block
1138 tree in the interference graph. Here we cause all variables at this
1139 level, and all sublevels, to conflict. */
1140 if (old_sv_num < this_sv_num)
1142 new_sv_num = stack_vars_num;
1144 for (i = old_sv_num; i < new_sv_num; ++i)
1145 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1146 add_stack_var_conflict (i, j);
1150 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1151 and clear TREE_USED on all local variables. */
1154 clear_tree_used (tree block)
1158 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1159 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1162 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1163 clear_tree_used (t);
1166 /* Examine TYPE and determine a bit mask of the following features. */
1168 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1169 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1170 #define SPCT_HAS_ARRAY 4
1171 #define SPCT_HAS_AGGREGATE 8
1174 stack_protect_classify_type (tree type)
1176 unsigned int ret = 0;
1179 switch (TREE_CODE (type))
1182 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1183 if (t == char_type_node
1184 || t == signed_char_type_node
1185 || t == unsigned_char_type_node)
1187 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1188 unsigned HOST_WIDE_INT len;
1190 if (!TYPE_SIZE_UNIT (type)
1191 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1194 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1197 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1199 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1202 ret = SPCT_HAS_ARRAY;
1206 case QUAL_UNION_TYPE:
1208 ret = SPCT_HAS_AGGREGATE;
1209 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1210 if (TREE_CODE (t) == FIELD_DECL)
1211 ret |= stack_protect_classify_type (TREE_TYPE (t));
1221 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1222 part of the local stack frame. Remember if we ever return nonzero for
1223 any variable in this function. The return value is the phase number in
1224 which the variable should be allocated. */
1227 stack_protect_decl_phase (tree decl)
1229 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1232 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1233 has_short_buffer = true;
1235 if (flag_stack_protect == 2)
1237 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1238 && !(bits & SPCT_HAS_AGGREGATE))
1240 else if (bits & SPCT_HAS_ARRAY)
1244 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1247 has_protected_decls = true;
1252 /* Two helper routines that check for phase 1 and phase 2. These are used
1253 as callbacks for expand_stack_vars. */
1256 stack_protect_decl_phase_1 (tree decl)
1258 return stack_protect_decl_phase (decl) == 1;
1262 stack_protect_decl_phase_2 (tree decl)
1264 return stack_protect_decl_phase (decl) == 2;
1267 /* Ensure that variables in different stack protection phases conflict
1268 so that they are not merged and share the same stack slot. */
1271 add_stack_protection_conflicts (void)
1273 size_t i, j, n = stack_vars_num;
1274 unsigned char *phase;
1276 phase = XNEWVEC (unsigned char, n);
1277 for (i = 0; i < n; ++i)
1278 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1280 for (i = 0; i < n; ++i)
1282 unsigned char ph_i = phase[i];
1283 for (j = 0; j < i; ++j)
1284 if (ph_i != phase[j])
1285 add_stack_var_conflict (i, j);
1291 /* Create a decl for the guard at the top of the stack frame. */
1294 create_stack_guard (void)
1296 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1297 VAR_DECL, NULL, ptr_type_node);
1298 TREE_THIS_VOLATILE (guard) = 1;
1299 TREE_USED (guard) = 1;
1300 expand_one_stack_var (guard);
1301 crtl->stack_protect_guard = guard;
1304 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1305 expanding variables. Those variables that can be put into registers
1306 are allocated pseudos; those that can't are put on the stack.
1308 TOPLEVEL is true if this is the outermost BLOCK. */
1310 static HOST_WIDE_INT
1311 account_used_vars_for_block (tree block, bool toplevel)
1314 HOST_WIDE_INT size = 0;
1316 /* Expand all variables at this level. */
1317 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1319 size += expand_one_var (t, toplevel, false);
1321 /* Expand all variables at containing levels. */
1322 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1323 size += account_used_vars_for_block (t, false);
1328 /* Prepare for expanding variables. */
1330 init_vars_expansion (void)
1334 /* Set TREE_USED on all variables in the local_decls. */
1335 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1338 /* Clear TREE_USED on all variables associated with a block scope. */
1339 clear_tree_used (DECL_INITIAL (current_function_decl));
1341 /* Initialize local stack smashing state. */
1342 has_protected_decls = false;
1343 has_short_buffer = false;
1346 /* Free up stack variable graph data. */
1348 fini_vars_expansion (void)
1350 size_t i, n = stack_vars_num;
1351 for (i = 0; i < n; i++)
1352 BITMAP_FREE (stack_vars[i].conflicts);
1353 XDELETEVEC (stack_vars);
1354 XDELETEVEC (stack_vars_sorted);
1356 stack_vars_alloc = stack_vars_num = 0;
1359 /* Make a fair guess for the size of the stack frame of the decl
1360 passed. This doesn't have to be exact, the result is only used
1361 in the inline heuristics. So we don't want to run the full stack
1362 var packing algorithm (which is quadratic in the number of stack
1363 vars). Instead, we calculate the total size of all stack vars.
1364 This turns out to be a pretty fair estimate -- packing of stack
1365 vars doesn't happen very often. */
1368 estimated_stack_frame_size (tree decl)
1370 HOST_WIDE_INT size = 0;
1372 tree var, outer_block = DECL_INITIAL (current_function_decl);
1374 tree old_cur_fun_decl = current_function_decl;
1375 current_function_decl = decl;
1376 push_cfun (DECL_STRUCT_FUNCTION (decl));
1378 init_vars_expansion ();
1380 FOR_EACH_LOCAL_DECL (cfun, ix, var)
1382 if (TREE_USED (var))
1383 size += expand_one_var (var, true, false);
1384 TREE_USED (var) = 1;
1386 size += account_used_vars_for_block (outer_block, true);
1388 if (stack_vars_num > 0)
1390 /* Fake sorting the stack vars for account_stack_vars (). */
1391 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1392 for (i = 0; i < stack_vars_num; ++i)
1393 stack_vars_sorted[i] = i;
1394 size += account_stack_vars ();
1395 fini_vars_expansion ();
1398 current_function_decl = old_cur_fun_decl;
1402 /* Expand all variables used in the function. */
1405 expand_used_vars (void)
1407 tree var, outer_block = DECL_INITIAL (current_function_decl);
1408 VEC(tree,heap) *maybe_local_decls = NULL;
1412 /* Compute the phase of the stack frame for this function. */
1414 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1415 int off = STARTING_FRAME_OFFSET % align;
1416 frame_phase = off ? align - off : 0;
1419 init_vars_expansion ();
1421 for (i = 0; i < SA.map->num_partitions; i++)
1423 tree var = partition_to_var (SA.map, i);
1425 gcc_assert (is_gimple_reg (var));
1426 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1427 expand_one_var (var, true, true);
1430 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1431 contain the default def (representing the parm or result itself)
1432 we don't do anything here. But those which don't contain the
1433 default def (representing a temporary based on the parm/result)
1434 we need to allocate space just like for normal VAR_DECLs. */
1435 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1437 expand_one_var (var, true, true);
1438 gcc_assert (SA.partition_to_pseudo[i]);
1443 /* At this point all variables on the local_decls with TREE_USED
1444 set are not associated with any block scope. Lay them out. */
1446 len = VEC_length (tree, cfun->local_decls);
1447 FOR_EACH_LOCAL_DECL (cfun, i, var)
1449 bool expand_now = false;
1451 /* Expanded above already. */
1452 if (is_gimple_reg (var))
1454 TREE_USED (var) = 0;
1457 /* We didn't set a block for static or extern because it's hard
1458 to tell the difference between a global variable (re)declared
1459 in a local scope, and one that's really declared there to
1460 begin with. And it doesn't really matter much, since we're
1461 not giving them stack space. Expand them now. */
1462 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1465 /* If the variable is not associated with any block, then it
1466 was created by the optimizers, and could be live anywhere
1468 else if (TREE_USED (var))
1471 /* Finally, mark all variables on the list as used. We'll use
1472 this in a moment when we expand those associated with scopes. */
1473 TREE_USED (var) = 1;
1476 expand_one_var (var, true, true);
1479 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1481 rtx rtl = DECL_RTL_IF_SET (var);
1483 /* Keep artificial non-ignored vars in cfun->local_decls
1484 chain until instantiate_decls. */
1485 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1486 add_local_decl (cfun, var);
1487 else if (rtl == NULL_RTX)
1488 /* If rtl isn't set yet, which can happen e.g. with
1489 -fstack-protector, retry before returning from this
1491 VEC_safe_push (tree, heap, maybe_local_decls, var);
1495 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1497 +-----------------+-----------------+
1498 | ...processed... | ...duplicates...|
1499 +-----------------+-----------------+
1501 +-- LEN points here.
1503 We just want the duplicates, as those are the artificial
1504 non-ignored vars that we want to keep until instantiate_decls.
1505 Move them down and truncate the array. */
1506 if (!VEC_empty (tree, cfun->local_decls))
1507 VEC_block_remove (tree, cfun->local_decls, 0, len);
1509 /* At this point, all variables within the block tree with TREE_USED
1510 set are actually used by the optimized function. Lay them out. */
1511 expand_used_vars_for_block (outer_block, true);
1513 if (stack_vars_num > 0)
1515 /* Due to the way alias sets work, no variables with non-conflicting
1516 alias sets may be assigned the same address. Add conflicts to
1518 add_alias_set_conflicts ();
1520 /* If stack protection is enabled, we don't share space between
1521 vulnerable data and non-vulnerable data. */
1522 if (flag_stack_protect)
1523 add_stack_protection_conflicts ();
1525 /* Now that we have collected all stack variables, and have computed a
1526 minimal interference graph, attempt to save some stack space. */
1527 partition_stack_vars ();
1529 dump_stack_var_partition ();
1532 /* There are several conditions under which we should create a
1533 stack guard: protect-all, alloca used, protected decls present. */
1534 if (flag_stack_protect == 2
1535 || (flag_stack_protect
1536 && (cfun->calls_alloca || has_protected_decls)))
1537 create_stack_guard ();
1539 /* Assign rtl to each variable based on these partitions. */
1540 if (stack_vars_num > 0)
1542 /* Reorder decls to be protected by iterating over the variables
1543 array multiple times, and allocating out of each phase in turn. */
1544 /* ??? We could probably integrate this into the qsort we did
1545 earlier, such that we naturally see these variables first,
1546 and thus naturally allocate things in the right order. */
1547 if (has_protected_decls)
1549 /* Phase 1 contains only character arrays. */
1550 expand_stack_vars (stack_protect_decl_phase_1);
1552 /* Phase 2 contains other kinds of arrays. */
1553 if (flag_stack_protect == 2)
1554 expand_stack_vars (stack_protect_decl_phase_2);
1557 expand_stack_vars (NULL);
1559 fini_vars_expansion ();
1562 /* If there were any artificial non-ignored vars without rtl
1563 found earlier, see if deferred stack allocation hasn't assigned
1565 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1567 rtx rtl = DECL_RTL_IF_SET (var);
1569 /* Keep artificial non-ignored vars in cfun->local_decls
1570 chain until instantiate_decls. */
1571 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1572 add_local_decl (cfun, var);
1574 VEC_free (tree, heap, maybe_local_decls);
1576 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1577 if (STACK_ALIGNMENT_NEEDED)
1579 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1580 if (!FRAME_GROWS_DOWNWARD)
1581 frame_offset += align - 1;
1582 frame_offset &= -align;
1587 /* If we need to produce a detailed dump, print the tree representation
1588 for STMT to the dump file. SINCE is the last RTX after which the RTL
1589 generated for STMT should have been appended. */
1592 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1594 if (dump_file && (dump_flags & TDF_DETAILS))
1596 fprintf (dump_file, "\n;; ");
1597 print_gimple_stmt (dump_file, stmt, 0,
1598 TDF_SLIM | (dump_flags & TDF_LINENO));
1599 fprintf (dump_file, "\n");
1601 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1605 /* Maps the blocks that do not contain tree labels to rtx labels. */
1607 static struct pointer_map_t *lab_rtx_for_bb;
1609 /* Returns the label_rtx expression for a label starting basic block BB. */
1612 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1614 gimple_stmt_iterator gsi;
1619 if (bb->flags & BB_RTL)
1620 return block_label (bb);
1622 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1626 /* Find the tree label if it is present. */
1628 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1630 lab_stmt = gsi_stmt (gsi);
1631 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1634 lab = gimple_label_label (lab_stmt);
1635 if (DECL_NONLOCAL (lab))
1638 return label_rtx (lab);
1641 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1642 *elt = gen_label_rtx ();
1647 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1648 of a basic block where we just expanded the conditional at the end,
1649 possibly clean up the CFG and instruction sequence. LAST is the
1650 last instruction before the just emitted jump sequence. */
1653 maybe_cleanup_end_of_block (edge e, rtx last)
1655 /* Special case: when jumpif decides that the condition is
1656 trivial it emits an unconditional jump (and the necessary
1657 barrier). But we still have two edges, the fallthru one is
1658 wrong. purge_dead_edges would clean this up later. Unfortunately
1659 we have to insert insns (and split edges) before
1660 find_many_sub_basic_blocks and hence before purge_dead_edges.
1661 But splitting edges might create new blocks which depend on the
1662 fact that if there are two edges there's no barrier. So the
1663 barrier would get lost and verify_flow_info would ICE. Instead
1664 of auditing all edge splitters to care for the barrier (which
1665 normally isn't there in a cleaned CFG), fix it here. */
1666 if (BARRIER_P (get_last_insn ()))
1670 /* Now, we have a single successor block, if we have insns to
1671 insert on the remaining edge we potentially will insert
1672 it at the end of this block (if the dest block isn't feasible)
1673 in order to avoid splitting the edge. This insertion will take
1674 place in front of the last jump. But we might have emitted
1675 multiple jumps (conditional and one unconditional) to the
1676 same destination. Inserting in front of the last one then
1677 is a problem. See PR 40021. We fix this by deleting all
1678 jumps except the last unconditional one. */
1679 insn = PREV_INSN (get_last_insn ());
1680 /* Make sure we have an unconditional jump. Otherwise we're
1682 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1683 for (insn = PREV_INSN (insn); insn != last;)
1685 insn = PREV_INSN (insn);
1686 if (JUMP_P (NEXT_INSN (insn)))
1687 delete_insn (NEXT_INSN (insn));
1692 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1693 Returns a new basic block if we've terminated the current basic
1694 block and created a new one. */
1697 expand_gimple_cond (basic_block bb, gimple stmt)
1699 basic_block new_bb, dest;
1704 enum tree_code code;
1707 code = gimple_cond_code (stmt);
1708 op0 = gimple_cond_lhs (stmt);
1709 op1 = gimple_cond_rhs (stmt);
1710 /* We're sometimes presented with such code:
1714 This would expand to two comparisons which then later might
1715 be cleaned up by combine. But some pattern matchers like if-conversion
1716 work better when there's only one compare, so make up for this
1717 here as special exception if TER would have made the same change. */
1718 if (gimple_cond_single_var_p (stmt)
1720 && TREE_CODE (op0) == SSA_NAME
1721 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1723 gimple second = SSA_NAME_DEF_STMT (op0);
1724 if (gimple_code (second) == GIMPLE_ASSIGN)
1726 enum tree_code code2 = gimple_assign_rhs_code (second);
1727 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1730 op0 = gimple_assign_rhs1 (second);
1731 op1 = gimple_assign_rhs2 (second);
1733 /* If jumps are cheap turn some more codes into
1735 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1737 if ((code2 == BIT_AND_EXPR
1738 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1739 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1740 || code2 == TRUTH_AND_EXPR)
1742 code = TRUTH_ANDIF_EXPR;
1743 op0 = gimple_assign_rhs1 (second);
1744 op1 = gimple_assign_rhs2 (second);
1746 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1748 code = TRUTH_ORIF_EXPR;
1749 op0 = gimple_assign_rhs1 (second);
1750 op1 = gimple_assign_rhs2 (second);
1756 last2 = last = get_last_insn ();
1758 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1759 if (gimple_has_location (stmt))
1761 set_curr_insn_source_location (gimple_location (stmt));
1762 set_curr_insn_block (gimple_block (stmt));
1765 /* These flags have no purpose in RTL land. */
1766 true_edge->flags &= ~EDGE_TRUE_VALUE;
1767 false_edge->flags &= ~EDGE_FALSE_VALUE;
1769 /* We can either have a pure conditional jump with one fallthru edge or
1770 two-way jump that needs to be decomposed into two basic blocks. */
1771 if (false_edge->dest == bb->next_bb)
1773 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1774 true_edge->probability);
1775 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1776 if (true_edge->goto_locus)
1778 set_curr_insn_source_location (true_edge->goto_locus);
1779 set_curr_insn_block (true_edge->goto_block);
1780 true_edge->goto_locus = curr_insn_locator ();
1782 true_edge->goto_block = NULL;
1783 false_edge->flags |= EDGE_FALLTHRU;
1784 maybe_cleanup_end_of_block (false_edge, last);
1787 if (true_edge->dest == bb->next_bb)
1789 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1790 false_edge->probability);
1791 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1792 if (false_edge->goto_locus)
1794 set_curr_insn_source_location (false_edge->goto_locus);
1795 set_curr_insn_block (false_edge->goto_block);
1796 false_edge->goto_locus = curr_insn_locator ();
1798 false_edge->goto_block = NULL;
1799 true_edge->flags |= EDGE_FALLTHRU;
1800 maybe_cleanup_end_of_block (true_edge, last);
1804 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1805 true_edge->probability);
1806 last = get_last_insn ();
1807 if (false_edge->goto_locus)
1809 set_curr_insn_source_location (false_edge->goto_locus);
1810 set_curr_insn_block (false_edge->goto_block);
1811 false_edge->goto_locus = curr_insn_locator ();
1813 false_edge->goto_block = NULL;
1814 emit_jump (label_rtx_for_bb (false_edge->dest));
1817 if (BARRIER_P (BB_END (bb)))
1818 BB_END (bb) = PREV_INSN (BB_END (bb));
1819 update_bb_for_insn (bb);
1821 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1822 dest = false_edge->dest;
1823 redirect_edge_succ (false_edge, new_bb);
1824 false_edge->flags |= EDGE_FALLTHRU;
1825 new_bb->count = false_edge->count;
1826 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1827 new_edge = make_edge (new_bb, dest, 0);
1828 new_edge->probability = REG_BR_PROB_BASE;
1829 new_edge->count = new_bb->count;
1830 if (BARRIER_P (BB_END (new_bb)))
1831 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1832 update_bb_for_insn (new_bb);
1834 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1836 if (true_edge->goto_locus)
1838 set_curr_insn_source_location (true_edge->goto_locus);
1839 set_curr_insn_block (true_edge->goto_block);
1840 true_edge->goto_locus = curr_insn_locator ();
1842 true_edge->goto_block = NULL;
1847 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1851 expand_call_stmt (gimple stmt)
1854 tree lhs = gimple_call_lhs (stmt);
1859 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1861 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1862 decl = gimple_call_fndecl (stmt);
1863 builtin_p = decl && DECL_BUILT_IN (decl);
1865 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1866 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1868 for (i = 0; i < gimple_call_num_args (stmt); i++)
1870 tree arg = gimple_call_arg (stmt, i);
1872 /* TER addresses into arguments of builtin functions so we have a
1873 chance to infer more correct alignment information. See PR39954. */
1875 && TREE_CODE (arg) == SSA_NAME
1876 && (def = get_gimple_for_ssa_name (arg))
1877 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1878 arg = gimple_assign_rhs1 (def);
1879 CALL_EXPR_ARG (exp, i) = arg;
1882 if (gimple_has_side_effects (stmt))
1883 TREE_SIDE_EFFECTS (exp) = 1;
1885 if (gimple_call_nothrow_p (stmt))
1886 TREE_NOTHROW (exp) = 1;
1888 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1889 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1890 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1891 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1892 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1893 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1894 TREE_BLOCK (exp) = gimple_block (stmt);
1897 expand_assignment (lhs, exp, false);
1899 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1902 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1903 STMT that doesn't require special handling for outgoing edges. That
1904 is no tailcalls and no GIMPLE_COND. */
1907 expand_gimple_stmt_1 (gimple stmt)
1910 switch (gimple_code (stmt))
1913 op0 = gimple_goto_dest (stmt);
1914 if (TREE_CODE (op0) == LABEL_DECL)
1917 expand_computed_goto (op0);
1920 expand_label (gimple_label_label (stmt));
1923 case GIMPLE_PREDICT:
1929 expand_asm_stmt (stmt);
1932 expand_call_stmt (stmt);
1936 op0 = gimple_return_retval (stmt);
1938 if (op0 && op0 != error_mark_node)
1940 tree result = DECL_RESULT (current_function_decl);
1942 /* If we are not returning the current function's RESULT_DECL,
1943 build an assignment to it. */
1946 /* I believe that a function's RESULT_DECL is unique. */
1947 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1949 /* ??? We'd like to use simply expand_assignment here,
1950 but this fails if the value is of BLKmode but the return
1951 decl is a register. expand_return has special handling
1952 for this combination, which eventually should move
1953 to common code. See comments there. Until then, let's
1954 build a modify expression :-/ */
1955 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1960 expand_null_return ();
1962 expand_return (op0);
1967 tree lhs = gimple_assign_lhs (stmt);
1969 /* Tree expand used to fiddle with |= and &= of two bitfield
1970 COMPONENT_REFs here. This can't happen with gimple, the LHS
1971 of binary assigns must be a gimple reg. */
1973 if (TREE_CODE (lhs) != SSA_NAME
1974 || get_gimple_rhs_class (gimple_expr_code (stmt))
1975 == GIMPLE_SINGLE_RHS)
1977 tree rhs = gimple_assign_rhs1 (stmt);
1978 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1979 == GIMPLE_SINGLE_RHS);
1980 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1981 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1982 expand_assignment (lhs, rhs,
1983 gimple_assign_nontemporal_move_p (stmt));
1988 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1989 struct separate_ops ops;
1990 bool promoted = false;
1992 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1993 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1996 ops.code = gimple_assign_rhs_code (stmt);
1997 ops.type = TREE_TYPE (lhs);
1998 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
2000 case GIMPLE_TERNARY_RHS:
2001 ops.op2 = gimple_assign_rhs3 (stmt);
2003 case GIMPLE_BINARY_RHS:
2004 ops.op1 = gimple_assign_rhs2 (stmt);
2006 case GIMPLE_UNARY_RHS:
2007 ops.op0 = gimple_assign_rhs1 (stmt);
2012 ops.location = gimple_location (stmt);
2014 /* If we want to use a nontemporal store, force the value to
2015 register first. If we store into a promoted register,
2016 don't directly expand to target. */
2017 temp = nontemporal || promoted ? NULL_RTX : target;
2018 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2025 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2026 /* If TEMP is a VOIDmode constant, use convert_modes to make
2027 sure that we properly convert it. */
2028 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2030 temp = convert_modes (GET_MODE (target),
2031 TYPE_MODE (ops.type),
2033 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2034 GET_MODE (target), temp, unsignedp);
2037 convert_move (SUBREG_REG (target), temp, unsignedp);
2039 else if (nontemporal && emit_storent_insn (target, temp))
2043 temp = force_operand (temp, target);
2045 emit_move_insn (target, temp);
2056 /* Expand one gimple statement STMT and return the last RTL instruction
2057 before any of the newly generated ones.
2059 In addition to generating the necessary RTL instructions this also
2060 sets REG_EH_REGION notes if necessary and sets the current source
2061 location for diagnostics. */
2064 expand_gimple_stmt (gimple stmt)
2068 location_t saved_location = input_location;
2070 last = get_last_insn ();
2072 /* If this is an expression of some kind and it has an associated line
2073 number, then emit the line number before expanding the expression.
2075 We need to save and restore the file and line information so that
2076 errors discovered during expansion are emitted with the right
2077 information. It would be better of the diagnostic routines
2078 used the file/line information embedded in the tree nodes rather
2082 if (gimple_has_location (stmt))
2084 input_location = gimple_location (stmt);
2085 set_curr_insn_source_location (input_location);
2087 /* Record where the insns produced belong. */
2088 set_curr_insn_block (gimple_block (stmt));
2091 expand_gimple_stmt_1 (stmt);
2092 /* Free any temporaries used to evaluate this statement. */
2095 input_location = saved_location;
2097 /* Mark all insns that may trap. */
2098 lp_nr = lookup_stmt_eh_lp (stmt);
2102 for (insn = next_real_insn (last); insn;
2103 insn = next_real_insn (insn))
2105 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2106 /* If we want exceptions for non-call insns, any
2107 may_trap_p instruction may throw. */
2108 && GET_CODE (PATTERN (insn)) != CLOBBER
2109 && GET_CODE (PATTERN (insn)) != USE
2110 && insn_could_throw_p (insn))
2111 make_reg_eh_region_note (insn, 0, lp_nr);
2118 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2119 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2120 generated a tail call (something that might be denied by the ABI
2121 rules governing the call; see calls.c).
2123 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2124 can still reach the rest of BB. The case here is __builtin_sqrt,
2125 where the NaN result goes through the external function (with a
2126 tailcall) and the normal result happens via a sqrt instruction. */
2129 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2137 last2 = last = expand_gimple_stmt (stmt);
2139 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2140 if (CALL_P (last) && SIBLING_CALL_P (last))
2143 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2145 *can_fallthru = true;
2149 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2150 Any instructions emitted here are about to be deleted. */
2151 do_pending_stack_adjust ();
2153 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2154 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2155 EH or abnormal edges, we shouldn't have created a tail call in
2156 the first place. So it seems to me we should just be removing
2157 all edges here, or redirecting the existing fallthru edge to
2163 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2165 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2167 if (e->dest != EXIT_BLOCK_PTR)
2169 e->dest->count -= e->count;
2170 e->dest->frequency -= EDGE_FREQUENCY (e);
2171 if (e->dest->count < 0)
2173 if (e->dest->frequency < 0)
2174 e->dest->frequency = 0;
2177 probability += e->probability;
2184 /* This is somewhat ugly: the call_expr expander often emits instructions
2185 after the sibcall (to perform the function return). These confuse the
2186 find_many_sub_basic_blocks code, so we need to get rid of these. */
2187 last = NEXT_INSN (last);
2188 gcc_assert (BARRIER_P (last));
2190 *can_fallthru = false;
2191 while (NEXT_INSN (last))
2193 /* For instance an sqrt builtin expander expands if with
2194 sibcall in the then and label for `else`. */
2195 if (LABEL_P (NEXT_INSN (last)))
2197 *can_fallthru = true;
2200 delete_insn (NEXT_INSN (last));
2203 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2204 e->probability += probability;
2207 update_bb_for_insn (bb);
2209 if (NEXT_INSN (last))
2211 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2214 if (BARRIER_P (last))
2215 BB_END (bb) = PREV_INSN (last);
2218 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2223 /* Return the difference between the floor and the truncated result of
2224 a signed division by OP1 with remainder MOD. */
2226 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2228 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2229 return gen_rtx_IF_THEN_ELSE
2230 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2231 gen_rtx_IF_THEN_ELSE
2232 (mode, gen_rtx_LT (BImode,
2233 gen_rtx_DIV (mode, op1, mod),
2235 constm1_rtx, const0_rtx),
2239 /* Return the difference between the ceil and the truncated result of
2240 a signed division by OP1 with remainder MOD. */
2242 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2244 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2245 return gen_rtx_IF_THEN_ELSE
2246 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2247 gen_rtx_IF_THEN_ELSE
2248 (mode, gen_rtx_GT (BImode,
2249 gen_rtx_DIV (mode, op1, mod),
2251 const1_rtx, const0_rtx),
2255 /* Return the difference between the ceil and the truncated result of
2256 an unsigned division by OP1 with remainder MOD. */
2258 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2260 /* (mod != 0 ? 1 : 0) */
2261 return gen_rtx_IF_THEN_ELSE
2262 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2263 const1_rtx, const0_rtx);
2266 /* Return the difference between the rounded and the truncated result
2267 of a signed division by OP1 with remainder MOD. Halfway cases are
2268 rounded away from zero, rather than to the nearest even number. */
2270 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2272 /* (abs (mod) >= abs (op1) - abs (mod)
2273 ? (op1 / mod > 0 ? 1 : -1)
2275 return gen_rtx_IF_THEN_ELSE
2276 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2277 gen_rtx_MINUS (mode,
2278 gen_rtx_ABS (mode, op1),
2279 gen_rtx_ABS (mode, mod))),
2280 gen_rtx_IF_THEN_ELSE
2281 (mode, gen_rtx_GT (BImode,
2282 gen_rtx_DIV (mode, op1, mod),
2284 const1_rtx, constm1_rtx),
2288 /* Return the difference between the rounded and the truncated result
2289 of a unsigned division by OP1 with remainder MOD. Halfway cases
2290 are rounded away from zero, rather than to the nearest even
2293 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2295 /* (mod >= op1 - mod ? 1 : 0) */
2296 return gen_rtx_IF_THEN_ELSE
2297 (mode, gen_rtx_GE (BImode, mod,
2298 gen_rtx_MINUS (mode, op1, mod)),
2299 const1_rtx, const0_rtx);
2302 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2306 convert_debug_memory_address (enum machine_mode mode, rtx x)
2308 enum machine_mode xmode = GET_MODE (x);
2310 #ifndef POINTERS_EXTEND_UNSIGNED
2311 gcc_assert (mode == Pmode);
2312 gcc_assert (xmode == mode || xmode == VOIDmode);
2314 gcc_assert (mode == Pmode || mode == ptr_mode);
2316 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2319 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2320 x = simplify_gen_subreg (mode, x, xmode,
2321 subreg_lowpart_offset
2323 else if (POINTERS_EXTEND_UNSIGNED > 0)
2324 x = gen_rtx_ZERO_EXTEND (mode, x);
2325 else if (!POINTERS_EXTEND_UNSIGNED)
2326 x = gen_rtx_SIGN_EXTEND (mode, x);
2329 #endif /* POINTERS_EXTEND_UNSIGNED */
2334 /* Return an RTX equivalent to the value of the tree expression
2338 expand_debug_expr (tree exp)
2340 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2341 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2342 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2345 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2347 case tcc_expression:
2348 switch (TREE_CODE (exp))
2352 case WIDEN_MULT_PLUS_EXPR:
2353 case WIDEN_MULT_MINUS_EXPR:
2356 case TRUTH_ANDIF_EXPR:
2357 case TRUTH_ORIF_EXPR:
2358 case TRUTH_AND_EXPR:
2360 case TRUTH_XOR_EXPR:
2363 case TRUTH_NOT_EXPR:
2372 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2379 case tcc_comparison:
2380 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2387 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2397 case tcc_exceptional:
2398 case tcc_declaration:
2404 switch (TREE_CODE (exp))
2407 if (!lookup_constant_def (exp))
2409 if (strlen (TREE_STRING_POINTER (exp)) + 1
2410 != (size_t) TREE_STRING_LENGTH (exp))
2412 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2413 op0 = gen_rtx_MEM (BLKmode, op0);
2414 set_mem_attributes (op0, exp, 0);
2417 /* Fall through... */
2422 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2426 gcc_assert (COMPLEX_MODE_P (mode));
2427 op0 = expand_debug_expr (TREE_REALPART (exp));
2428 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2429 return gen_rtx_CONCAT (mode, op0, op1);
2431 case DEBUG_EXPR_DECL:
2432 op0 = DECL_RTL_IF_SET (exp);
2437 op0 = gen_rtx_DEBUG_EXPR (mode);
2438 DEBUG_EXPR_TREE_DECL (op0) = exp;
2439 SET_DECL_RTL (exp, op0);
2449 op0 = DECL_RTL_IF_SET (exp);
2451 /* This decl was probably optimized away. */
2454 if (TREE_CODE (exp) != VAR_DECL
2455 || DECL_EXTERNAL (exp)
2456 || !TREE_STATIC (exp)
2458 || DECL_HARD_REGISTER (exp)
2459 || mode == VOIDmode)
2462 op0 = make_decl_rtl_for_debug (exp);
2464 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2465 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2469 op0 = copy_rtx (op0);
2471 if (GET_MODE (op0) == BLKmode
2472 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2473 below would ICE. While it is likely a FE bug,
2474 try to be robust here. See PR43166. */
2476 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2478 gcc_assert (MEM_P (op0));
2479 op0 = adjust_address_nv (op0, mode, 0);
2490 enum machine_mode inner_mode = GET_MODE (op0);
2492 if (mode == inner_mode)
2495 if (inner_mode == VOIDmode)
2497 if (TREE_CODE (exp) == SSA_NAME)
2498 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2500 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2501 if (mode == inner_mode)
2505 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2507 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2508 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2509 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2510 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2512 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2514 else if (FLOAT_MODE_P (mode))
2516 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2517 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2518 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2520 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2522 else if (FLOAT_MODE_P (inner_mode))
2525 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2527 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2529 else if (CONSTANT_P (op0)
2530 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2531 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2532 subreg_lowpart_offset (mode,
2534 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2535 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2537 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2539 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2546 if (!integer_zerop (TREE_OPERAND (exp, 1)))
2550 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2554 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2555 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2557 as = ADDR_SPACE_GENERIC;
2559 op0 = gen_rtx_MEM (mode, op0);
2561 set_mem_attributes (op0, exp, 0);
2562 set_mem_addr_space (op0, as);
2566 case TARGET_MEM_REF:
2567 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2568 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2571 op0 = expand_debug_expr
2572 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2576 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
2578 op0 = gen_rtx_MEM (mode, op0);
2580 set_mem_attributes (op0, exp, 0);
2581 set_mem_addr_space (op0, as);
2586 case ARRAY_RANGE_REF:
2591 case VIEW_CONVERT_EXPR:
2593 enum machine_mode mode1;
2594 HOST_WIDE_INT bitsize, bitpos;
2597 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2598 &mode1, &unsignedp, &volatilep, false);
2604 orig_op0 = op0 = expand_debug_expr (tem);
2611 enum machine_mode addrmode, offmode;
2616 op0 = XEXP (op0, 0);
2617 addrmode = GET_MODE (op0);
2618 if (addrmode == VOIDmode)
2621 op1 = expand_debug_expr (offset);
2625 offmode = GET_MODE (op1);
2626 if (offmode == VOIDmode)
2627 offmode = TYPE_MODE (TREE_TYPE (offset));
2629 if (addrmode != offmode)
2630 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2631 subreg_lowpart_offset (addrmode,
2634 /* Don't use offset_address here, we don't need a
2635 recognizable address, and we don't want to generate
2637 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2642 if (mode1 == VOIDmode)
2644 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2645 if (bitpos >= BITS_PER_UNIT)
2647 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2648 bitpos %= BITS_PER_UNIT;
2650 else if (bitpos < 0)
2653 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2654 op0 = adjust_address_nv (op0, mode1, units);
2655 bitpos += units * BITS_PER_UNIT;
2657 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2658 op0 = adjust_address_nv (op0, mode, 0);
2659 else if (GET_MODE (op0) != mode1)
2660 op0 = adjust_address_nv (op0, mode1, 0);
2662 op0 = copy_rtx (op0);
2663 if (op0 == orig_op0)
2664 op0 = shallow_copy_rtx (op0);
2665 set_mem_attributes (op0, exp, 0);
2668 if (bitpos == 0 && mode == GET_MODE (op0))
2674 if (GET_MODE (op0) == BLKmode)
2677 if ((bitpos % BITS_PER_UNIT) == 0
2678 && bitsize == GET_MODE_BITSIZE (mode1))
2680 enum machine_mode opmode = GET_MODE (op0);
2682 if (opmode == VOIDmode)
2685 /* This condition may hold if we're expanding the address
2686 right past the end of an array that turned out not to
2687 be addressable (i.e., the address was only computed in
2688 debug stmts). The gen_subreg below would rightfully
2689 crash, and the address doesn't really exist, so just
2691 if (bitpos >= GET_MODE_BITSIZE (opmode))
2694 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2695 return simplify_gen_subreg (mode, op0, opmode,
2696 bitpos / BITS_PER_UNIT);
2699 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2700 && TYPE_UNSIGNED (TREE_TYPE (exp))
2702 : ZERO_EXTRACT, mode,
2703 GET_MODE (op0) != VOIDmode
2704 ? GET_MODE (op0) : mode1,
2705 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2709 return gen_rtx_ABS (mode, op0);
2712 return gen_rtx_NEG (mode, op0);
2715 return gen_rtx_NOT (mode, op0);
2719 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2721 return gen_rtx_FLOAT (mode, op0);
2723 case FIX_TRUNC_EXPR:
2725 return gen_rtx_UNSIGNED_FIX (mode, op0);
2727 return gen_rtx_FIX (mode, op0);
2729 case POINTER_PLUS_EXPR:
2730 /* For the rare target where pointers are not the same size as
2731 size_t, we need to check for mis-matched modes and correct
2734 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2735 && GET_MODE (op0) != GET_MODE (op1))
2737 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2738 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2740 /* We always sign-extend, regardless of the signedness of
2741 the operand, because the operand is always unsigned
2742 here even if the original C expression is signed. */
2743 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2747 return gen_rtx_PLUS (mode, op0, op1);
2750 return gen_rtx_MINUS (mode, op0, op1);
2753 return gen_rtx_MULT (mode, op0, op1);
2756 case TRUNC_DIV_EXPR:
2757 case EXACT_DIV_EXPR:
2759 return gen_rtx_UDIV (mode, op0, op1);
2761 return gen_rtx_DIV (mode, op0, op1);
2763 case TRUNC_MOD_EXPR:
2765 return gen_rtx_UMOD (mode, op0, op1);
2767 return gen_rtx_MOD (mode, op0, op1);
2769 case FLOOR_DIV_EXPR:
2771 return gen_rtx_UDIV (mode, op0, op1);
2774 rtx div = gen_rtx_DIV (mode, op0, op1);
2775 rtx mod = gen_rtx_MOD (mode, op0, op1);
2776 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2777 return gen_rtx_PLUS (mode, div, adj);
2780 case FLOOR_MOD_EXPR:
2782 return gen_rtx_UMOD (mode, op0, op1);
2785 rtx mod = gen_rtx_MOD (mode, op0, op1);
2786 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2787 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2788 return gen_rtx_PLUS (mode, mod, adj);
2794 rtx div = gen_rtx_UDIV (mode, op0, op1);
2795 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2796 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2797 return gen_rtx_PLUS (mode, div, adj);
2801 rtx div = gen_rtx_DIV (mode, op0, op1);
2802 rtx mod = gen_rtx_MOD (mode, op0, op1);
2803 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2804 return gen_rtx_PLUS (mode, div, adj);
2810 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2811 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2812 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2813 return gen_rtx_PLUS (mode, mod, adj);
2817 rtx mod = gen_rtx_MOD (mode, op0, op1);
2818 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2819 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2820 return gen_rtx_PLUS (mode, mod, adj);
2823 case ROUND_DIV_EXPR:
2826 rtx div = gen_rtx_UDIV (mode, op0, op1);
2827 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2828 rtx adj = round_udiv_adjust (mode, mod, op1);
2829 return gen_rtx_PLUS (mode, div, adj);
2833 rtx div = gen_rtx_DIV (mode, op0, op1);
2834 rtx mod = gen_rtx_MOD (mode, op0, op1);
2835 rtx adj = round_sdiv_adjust (mode, mod, op1);
2836 return gen_rtx_PLUS (mode, div, adj);
2839 case ROUND_MOD_EXPR:
2842 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2843 rtx adj = round_udiv_adjust (mode, mod, op1);
2844 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2845 return gen_rtx_PLUS (mode, mod, adj);
2849 rtx mod = gen_rtx_MOD (mode, op0, op1);
2850 rtx adj = round_sdiv_adjust (mode, mod, op1);
2851 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2852 return gen_rtx_PLUS (mode, mod, adj);
2856 return gen_rtx_ASHIFT (mode, op0, op1);
2860 return gen_rtx_LSHIFTRT (mode, op0, op1);
2862 return gen_rtx_ASHIFTRT (mode, op0, op1);
2865 return gen_rtx_ROTATE (mode, op0, op1);
2868 return gen_rtx_ROTATERT (mode, op0, op1);
2872 return gen_rtx_UMIN (mode, op0, op1);
2874 return gen_rtx_SMIN (mode, op0, op1);
2878 return gen_rtx_UMAX (mode, op0, op1);
2880 return gen_rtx_SMAX (mode, op0, op1);
2883 case TRUTH_AND_EXPR:
2884 return gen_rtx_AND (mode, op0, op1);
2888 return gen_rtx_IOR (mode, op0, op1);
2891 case TRUTH_XOR_EXPR:
2892 return gen_rtx_XOR (mode, op0, op1);
2894 case TRUTH_ANDIF_EXPR:
2895 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2897 case TRUTH_ORIF_EXPR:
2898 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2900 case TRUTH_NOT_EXPR:
2901 return gen_rtx_EQ (mode, op0, const0_rtx);
2905 return gen_rtx_LTU (mode, op0, op1);
2907 return gen_rtx_LT (mode, op0, op1);
2911 return gen_rtx_LEU (mode, op0, op1);
2913 return gen_rtx_LE (mode, op0, op1);
2917 return gen_rtx_GTU (mode, op0, op1);
2919 return gen_rtx_GT (mode, op0, op1);
2923 return gen_rtx_GEU (mode, op0, op1);
2925 return gen_rtx_GE (mode, op0, op1);
2928 return gen_rtx_EQ (mode, op0, op1);
2931 return gen_rtx_NE (mode, op0, op1);
2933 case UNORDERED_EXPR:
2934 return gen_rtx_UNORDERED (mode, op0, op1);
2937 return gen_rtx_ORDERED (mode, op0, op1);
2940 return gen_rtx_UNLT (mode, op0, op1);
2943 return gen_rtx_UNLE (mode, op0, op1);
2946 return gen_rtx_UNGT (mode, op0, op1);
2949 return gen_rtx_UNGE (mode, op0, op1);
2952 return gen_rtx_UNEQ (mode, op0, op1);
2955 return gen_rtx_LTGT (mode, op0, op1);
2958 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2961 gcc_assert (COMPLEX_MODE_P (mode));
2962 if (GET_MODE (op0) == VOIDmode)
2963 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2964 if (GET_MODE (op1) == VOIDmode)
2965 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2966 return gen_rtx_CONCAT (mode, op0, op1);
2969 if (GET_CODE (op0) == CONCAT)
2970 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2971 gen_rtx_NEG (GET_MODE_INNER (mode),
2975 enum machine_mode imode = GET_MODE_INNER (mode);
2980 re = adjust_address_nv (op0, imode, 0);
2981 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2985 enum machine_mode ifmode = int_mode_for_mode (mode);
2986 enum machine_mode ihmode = int_mode_for_mode (imode);
2988 if (ifmode == BLKmode || ihmode == BLKmode)
2990 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2993 re = gen_rtx_SUBREG (ifmode, re, 0);
2994 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2995 if (imode != ihmode)
2996 re = gen_rtx_SUBREG (imode, re, 0);
2997 im = copy_rtx (op0);
2999 im = gen_rtx_SUBREG (ifmode, im, 0);
3000 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3001 if (imode != ihmode)
3002 im = gen_rtx_SUBREG (imode, im, 0);
3004 im = gen_rtx_NEG (imode, im);
3005 return gen_rtx_CONCAT (mode, re, im);
3009 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3010 if (!op0 || !MEM_P (op0))
3012 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3013 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3014 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3015 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3016 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3018 if (handled_component_p (TREE_OPERAND (exp, 0)))
3020 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3022 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3023 &bitoffset, &bitsize, &maxsize);
3024 if ((TREE_CODE (decl) == VAR_DECL
3025 || TREE_CODE (decl) == PARM_DECL
3026 || TREE_CODE (decl) == RESULT_DECL)
3027 && !TREE_ADDRESSABLE (decl)
3028 && (bitoffset % BITS_PER_UNIT) == 0
3030 && bitsize == maxsize)
3031 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3032 bitoffset / BITS_PER_UNIT);
3038 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
3043 exp = build_constructor_from_list (TREE_TYPE (exp),
3044 TREE_VECTOR_CST_ELTS (exp));
3048 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3053 op0 = gen_rtx_CONCATN
3054 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3056 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3058 op1 = expand_debug_expr (val);
3061 XVECEXP (op0, 0, i) = op1;
3064 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3066 op1 = expand_debug_expr
3067 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
3072 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3073 XVECEXP (op0, 0, i) = op1;
3079 goto flag_unsupported;
3082 /* ??? Maybe handle some builtins? */
3087 gimple g = get_gimple_for_ssa_name (exp);
3090 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3096 int part = var_to_partition (SA.map, exp);
3098 if (part == NO_PARTITION)
3101 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3103 op0 = SA.partition_to_pseudo[part];
3111 /* Vector stuff. For most of the codes we don't have rtl codes. */
3112 case REALIGN_LOAD_EXPR:
3113 case REDUC_MAX_EXPR:
3114 case REDUC_MIN_EXPR:
3115 case REDUC_PLUS_EXPR:
3117 case VEC_EXTRACT_EVEN_EXPR:
3118 case VEC_EXTRACT_ODD_EXPR:
3119 case VEC_INTERLEAVE_HIGH_EXPR:
3120 case VEC_INTERLEAVE_LOW_EXPR:
3121 case VEC_LSHIFT_EXPR:
3122 case VEC_PACK_FIX_TRUNC_EXPR:
3123 case VEC_PACK_SAT_EXPR:
3124 case VEC_PACK_TRUNC_EXPR:
3125 case VEC_RSHIFT_EXPR:
3126 case VEC_UNPACK_FLOAT_HI_EXPR:
3127 case VEC_UNPACK_FLOAT_LO_EXPR:
3128 case VEC_UNPACK_HI_EXPR:
3129 case VEC_UNPACK_LO_EXPR:
3130 case VEC_WIDEN_MULT_HI_EXPR:
3131 case VEC_WIDEN_MULT_LO_EXPR:
3135 case ADDR_SPACE_CONVERT_EXPR:
3136 case FIXED_CONVERT_EXPR:
3138 case WITH_SIZE_EXPR:
3142 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3143 && SCALAR_INT_MODE_P (mode))
3145 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3146 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3148 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3149 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3150 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3152 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3153 op0 = gen_rtx_MULT (mode, op0, op1);
3154 return gen_rtx_PLUS (mode, op0, op2);
3158 case WIDEN_MULT_EXPR:
3159 case WIDEN_MULT_PLUS_EXPR:
3160 case WIDEN_MULT_MINUS_EXPR:
3161 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3162 && SCALAR_INT_MODE_P (mode))
3164 enum machine_mode inner_mode = GET_MODE (op0);
3165 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3166 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3168 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3169 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3170 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3172 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3173 op0 = gen_rtx_MULT (mode, op0, op1);
3174 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3176 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3177 return gen_rtx_PLUS (mode, op0, op2);
3179 return gen_rtx_MINUS (mode, op2, op0);
3183 case WIDEN_SUM_EXPR:
3184 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3185 && SCALAR_INT_MODE_P (mode))
3187 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3188 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3190 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3191 return gen_rtx_PLUS (mode, op0, op1);
3197 #ifdef ENABLE_CHECKING
3206 /* Expand the _LOCs in debug insns. We run this after expanding all
3207 regular insns, so that any variables referenced in the function
3208 will have their DECL_RTLs set. */
3211 expand_debug_locations (void)
3214 rtx last = get_last_insn ();
3215 int save_strict_alias = flag_strict_aliasing;
3217 /* New alias sets while setting up memory attributes cause
3218 -fcompare-debug failures, even though it doesn't bring about any
3220 flag_strict_aliasing = 0;
3222 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3223 if (DEBUG_INSN_P (insn))
3225 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3227 enum machine_mode mode;
3229 if (value == NULL_TREE)
3233 val = expand_debug_expr (value);
3234 gcc_assert (last == get_last_insn ());
3238 val = gen_rtx_UNKNOWN_VAR_LOC ();
3241 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3243 gcc_assert (mode == GET_MODE (val)
3244 || (GET_MODE (val) == VOIDmode
3245 && (CONST_INT_P (val)
3246 || GET_CODE (val) == CONST_FIXED
3247 || GET_CODE (val) == CONST_DOUBLE
3248 || GET_CODE (val) == LABEL_REF)));
3251 INSN_VAR_LOCATION_LOC (insn) = val;
3254 flag_strict_aliasing = save_strict_alias;
3257 /* Expand basic block BB from GIMPLE trees to RTL. */
3260 expand_gimple_basic_block (basic_block bb)
3262 gimple_stmt_iterator gsi;
3271 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3274 /* Note that since we are now transitioning from GIMPLE to RTL, we
3275 cannot use the gsi_*_bb() routines because they expect the basic
3276 block to be in GIMPLE, instead of RTL. Therefore, we need to
3277 access the BB sequence directly. */
3278 stmts = bb_seq (bb);
3279 bb->il.gimple = NULL;
3280 rtl_profile_for_bb (bb);
3281 init_rtl_bb_info (bb);
3282 bb->flags |= BB_RTL;
3284 /* Remove the RETURN_EXPR if we may fall though to the exit
3286 gsi = gsi_last (stmts);
3287 if (!gsi_end_p (gsi)
3288 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3290 gimple ret_stmt = gsi_stmt (gsi);
3292 gcc_assert (single_succ_p (bb));
3293 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3295 if (bb->next_bb == EXIT_BLOCK_PTR
3296 && !gimple_return_retval (ret_stmt))
3298 gsi_remove (&gsi, false);
3299 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3303 gsi = gsi_start (stmts);
3304 if (!gsi_end_p (gsi))
3306 stmt = gsi_stmt (gsi);
3307 if (gimple_code (stmt) != GIMPLE_LABEL)
3311 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3315 last = get_last_insn ();
3319 expand_gimple_stmt (stmt);
3324 emit_label ((rtx) *elt);
3326 /* Java emits line number notes in the top of labels.
3327 ??? Make this go away once line number notes are obsoleted. */
3328 BB_HEAD (bb) = NEXT_INSN (last);
3329 if (NOTE_P (BB_HEAD (bb)))
3330 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3331 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3333 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3336 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3338 NOTE_BASIC_BLOCK (note) = bb;
3340 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3344 stmt = gsi_stmt (gsi);
3346 /* If this statement is a non-debug one, and we generate debug
3347 insns, then this one might be the last real use of a TERed
3348 SSA_NAME, but where there are still some debug uses further
3349 down. Expanding the current SSA name in such further debug
3350 uses by their RHS might lead to wrong debug info, as coalescing
3351 might make the operands of such RHS be placed into the same
3352 pseudo as something else. Like so:
3353 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3357 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3358 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3359 the write to a_2 would actually have clobbered the place which
3362 So, instead of that, we recognize the situation, and generate
3363 debug temporaries at the last real use of TERed SSA names:
3370 if (MAY_HAVE_DEBUG_INSNS
3372 && !is_gimple_debug (stmt))
3378 location_t sloc = get_curr_insn_source_location ();
3379 tree sblock = get_curr_insn_block ();
3381 /* Look for SSA names that have their last use here (TERed
3382 names always have only one real use). */
3383 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3384 if ((def = get_gimple_for_ssa_name (op)))
3386 imm_use_iterator imm_iter;
3387 use_operand_p use_p;
3388 bool have_debug_uses = false;
3390 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3392 if (gimple_debug_bind_p (USE_STMT (use_p)))
3394 have_debug_uses = true;
3399 if (have_debug_uses)
3401 /* OP is a TERed SSA name, with DEF it's defining
3402 statement, and where OP is used in further debug
3403 instructions. Generate a debug temporary, and
3404 replace all uses of OP in debug insns with that
3407 tree value = gimple_assign_rhs_to_tree (def);
3408 tree vexpr = make_node (DEBUG_EXPR_DECL);
3410 enum machine_mode mode;
3412 set_curr_insn_source_location (gimple_location (def));
3413 set_curr_insn_block (gimple_block (def));
3415 DECL_ARTIFICIAL (vexpr) = 1;
3416 TREE_TYPE (vexpr) = TREE_TYPE (value);
3418 mode = DECL_MODE (value);
3420 mode = TYPE_MODE (TREE_TYPE (value));
3421 DECL_MODE (vexpr) = mode;
3423 val = gen_rtx_VAR_LOCATION
3424 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3426 val = emit_debug_insn (val);
3428 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3430 if (!gimple_debug_bind_p (debugstmt))
3433 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3434 SET_USE (use_p, vexpr);
3436 update_stmt (debugstmt);
3440 set_curr_insn_source_location (sloc);
3441 set_curr_insn_block (sblock);
3444 currently_expanding_gimple_stmt = stmt;
3446 /* Expand this statement, then evaluate the resulting RTL and
3447 fixup the CFG accordingly. */
3448 if (gimple_code (stmt) == GIMPLE_COND)
3450 new_bb = expand_gimple_cond (bb, stmt);
3454 else if (gimple_debug_bind_p (stmt))
3456 location_t sloc = get_curr_insn_source_location ();
3457 tree sblock = get_curr_insn_block ();
3458 gimple_stmt_iterator nsi = gsi;
3462 tree var = gimple_debug_bind_get_var (stmt);
3465 enum machine_mode mode;
3467 if (gimple_debug_bind_has_value_p (stmt))
3468 value = gimple_debug_bind_get_value (stmt);
3472 last = get_last_insn ();
3474 set_curr_insn_source_location (gimple_location (stmt));
3475 set_curr_insn_block (gimple_block (stmt));
3478 mode = DECL_MODE (var);
3480 mode = TYPE_MODE (TREE_TYPE (var));
3482 val = gen_rtx_VAR_LOCATION
3483 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3485 val = emit_debug_insn (val);
3487 if (dump_file && (dump_flags & TDF_DETAILS))
3489 /* We can't dump the insn with a TREE where an RTX
3491 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3492 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3493 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3496 /* In order not to generate too many debug temporaries,
3497 we delink all uses of debug statements we already expanded.
3498 Therefore debug statements between definition and real
3499 use of TERed SSA names will continue to use the SSA name,
3500 and not be replaced with debug temps. */
3501 delink_stmt_imm_use (stmt);
3505 if (gsi_end_p (nsi))
3507 stmt = gsi_stmt (nsi);
3508 if (!gimple_debug_bind_p (stmt))
3512 set_curr_insn_source_location (sloc);
3513 set_curr_insn_block (sblock);
3517 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3520 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3531 def_operand_p def_p;
3532 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3536 /* Ignore this stmt if it is in the list of
3537 replaceable expressions. */
3539 && bitmap_bit_p (SA.values,
3540 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3543 last = expand_gimple_stmt (stmt);
3544 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3549 currently_expanding_gimple_stmt = NULL;
3551 /* Expand implicit goto and convert goto_locus. */
3552 FOR_EACH_EDGE (e, ei, bb->succs)
3554 if (e->goto_locus && e->goto_block)
3556 set_curr_insn_source_location (e->goto_locus);
3557 set_curr_insn_block (e->goto_block);
3558 e->goto_locus = curr_insn_locator ();
3560 e->goto_block = NULL;
3561 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3563 emit_jump (label_rtx_for_bb (e->dest));
3564 e->flags &= ~EDGE_FALLTHRU;
3568 /* Expanded RTL can create a jump in the last instruction of block.
3569 This later might be assumed to be a jump to successor and break edge insertion.
3570 We need to insert dummy move to prevent this. PR41440. */
3571 if (single_succ_p (bb)
3572 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3573 && (last = get_last_insn ())
3576 rtx dummy = gen_reg_rtx (SImode);
3577 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3580 do_pending_stack_adjust ();
3582 /* Find the block tail. The last insn in the block is the insn
3583 before a barrier and/or table jump insn. */
3584 last = get_last_insn ();
3585 if (BARRIER_P (last))
3586 last = PREV_INSN (last);
3587 if (JUMP_TABLE_DATA_P (last))
3588 last = PREV_INSN (PREV_INSN (last));
3591 update_bb_for_insn (bb);
3597 /* Create a basic block for initialization code. */
3600 construct_init_block (void)
3602 basic_block init_block, first_block;
3606 /* Multiple entry points not supported yet. */
3607 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3608 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3609 init_rtl_bb_info (EXIT_BLOCK_PTR);
3610 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3611 EXIT_BLOCK_PTR->flags |= BB_RTL;
3613 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3615 /* When entry edge points to first basic block, we don't need jump,
3616 otherwise we have to jump into proper target. */
3617 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3619 tree label = gimple_block_label (e->dest);
3621 emit_jump (label_rtx (label));
3625 flags = EDGE_FALLTHRU;
3627 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3630 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3631 init_block->count = ENTRY_BLOCK_PTR->count;
3634 first_block = e->dest;
3635 redirect_edge_succ (e, init_block);
3636 e = make_edge (init_block, first_block, flags);
3639 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3640 e->probability = REG_BR_PROB_BASE;
3641 e->count = ENTRY_BLOCK_PTR->count;
3643 update_bb_for_insn (init_block);
3647 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3648 found in the block tree. */
3651 set_block_levels (tree block, int level)
3655 BLOCK_NUMBER (block) = level;
3656 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3657 block = BLOCK_CHAIN (block);
3661 /* Create a block containing landing pads and similar stuff. */
3664 construct_exit_block (void)
3666 rtx head = get_last_insn ();
3668 basic_block exit_block;
3672 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3674 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3676 /* Make sure the locus is set to the end of the function, so that
3677 epilogue line numbers and warnings are set properly. */
3678 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3679 input_location = cfun->function_end_locus;
3681 /* The following insns belong to the top scope. */
3682 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3684 /* Generate rtl for function exit. */
3685 expand_function_end ();
3687 end = get_last_insn ();
3690 /* While emitting the function end we could move end of the last basic block.
3692 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3693 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3694 head = NEXT_INSN (head);
3695 exit_block = create_basic_block (NEXT_INSN (head), end,
3696 EXIT_BLOCK_PTR->prev_bb);
3697 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3698 exit_block->count = EXIT_BLOCK_PTR->count;
3701 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3703 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3704 if (!(e->flags & EDGE_ABNORMAL))
3705 redirect_edge_succ (e, exit_block);
3710 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3711 e->probability = REG_BR_PROB_BASE;
3712 e->count = EXIT_BLOCK_PTR->count;
3713 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3716 e->count -= e2->count;
3717 exit_block->count -= e2->count;
3718 exit_block->frequency -= EDGE_FREQUENCY (e2);
3722 if (exit_block->count < 0)
3723 exit_block->count = 0;
3724 if (exit_block->frequency < 0)
3725 exit_block->frequency = 0;
3726 update_bb_for_insn (exit_block);
3729 /* Helper function for discover_nonconstant_array_refs.
3730 Look for ARRAY_REF nodes with non-constant indexes and mark them
3734 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3735 void *data ATTRIBUTE_UNUSED)
3739 if (IS_TYPE_OR_DECL_P (t))
3741 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3743 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3744 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3745 && (!TREE_OPERAND (t, 2)
3746 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3747 || (TREE_CODE (t) == COMPONENT_REF
3748 && (!TREE_OPERAND (t,2)
3749 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3750 || TREE_CODE (t) == BIT_FIELD_REF
3751 || TREE_CODE (t) == REALPART_EXPR
3752 || TREE_CODE (t) == IMAGPART_EXPR
3753 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3754 || CONVERT_EXPR_P (t))
3755 t = TREE_OPERAND (t, 0);
3757 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3759 t = get_base_address (t);
3761 && DECL_MODE (t) != BLKmode)
3762 TREE_ADDRESSABLE (t) = 1;
3771 /* RTL expansion is not able to compile array references with variable
3772 offsets for arrays stored in single register. Discover such
3773 expressions and mark variables as addressable to avoid this
3777 discover_nonconstant_array_refs (void)
3780 gimple_stmt_iterator gsi;
3783 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3785 gimple stmt = gsi_stmt (gsi);
3786 if (!is_gimple_debug (stmt))
3787 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3791 /* This function sets crtl->args.internal_arg_pointer to a virtual
3792 register if DRAP is needed. Local register allocator will replace
3793 virtual_incoming_args_rtx with the virtual register. */
3796 expand_stack_alignment (void)
3799 unsigned int preferred_stack_boundary;
3801 if (! SUPPORTS_STACK_ALIGNMENT)
3804 if (cfun->calls_alloca
3805 || cfun->has_nonlocal_label
3806 || crtl->has_nonlocal_goto)
3807 crtl->need_drap = true;
3809 /* Call update_stack_boundary here again to update incoming stack
3810 boundary. It may set incoming stack alignment to a different
3811 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3812 use the minimum incoming stack alignment to check if it is OK
3813 to perform sibcall optimization since sibcall optimization will
3814 only align the outgoing stack to incoming stack boundary. */
3815 if (targetm.calls.update_stack_boundary)
3816 targetm.calls.update_stack_boundary ();
3818 /* The incoming stack frame has to be aligned at least at
3819 parm_stack_boundary. */
3820 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3822 /* Update crtl->stack_alignment_estimated and use it later to align
3823 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3824 exceptions since callgraph doesn't collect incoming stack alignment
3826 if (cfun->can_throw_non_call_exceptions
3827 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3828 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3830 preferred_stack_boundary = crtl->preferred_stack_boundary;
3831 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3832 crtl->stack_alignment_estimated = preferred_stack_boundary;
3833 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3834 crtl->stack_alignment_needed = preferred_stack_boundary;
3836 gcc_assert (crtl->stack_alignment_needed
3837 <= crtl->stack_alignment_estimated);
3839 crtl->stack_realign_needed
3840 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3841 crtl->stack_realign_tried = crtl->stack_realign_needed;
3843 crtl->stack_realign_processed = true;
3845 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3847 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3848 drap_rtx = targetm.calls.get_drap_rtx ();
3850 /* stack_realign_drap and drap_rtx must match. */
3851 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3853 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3854 if (NULL != drap_rtx)
3856 crtl->args.internal_arg_pointer = drap_rtx;
3858 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3860 fixup_tail_calls ();
3864 /* Translate the intermediate representation contained in the CFG
3865 from GIMPLE trees to RTL.
3867 We do conversion per basic block and preserve/update the tree CFG.
3868 This implies we have to do some magic as the CFG can simultaneously
3869 consist of basic blocks containing RTL and GIMPLE trees. This can
3870 confuse the CFG hooks, so be careful to not manipulate CFG during
3874 gimple_expand_cfg (void)
3876 basic_block bb, init_block;
3883 timevar_push (TV_OUT_OF_SSA);
3884 rewrite_out_of_ssa (&SA);
3885 timevar_pop (TV_OUT_OF_SSA);
3886 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3889 /* Some backends want to know that we are expanding to RTL. */
3890 currently_expanding_to_rtl = 1;
3892 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3894 insn_locators_alloc ();
3895 if (!DECL_IS_BUILTIN (current_function_decl))
3897 /* Eventually, all FEs should explicitly set function_start_locus. */
3898 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3899 set_curr_insn_source_location
3900 (DECL_SOURCE_LOCATION (current_function_decl));
3902 set_curr_insn_source_location (cfun->function_start_locus);
3904 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3905 prologue_locator = curr_insn_locator ();
3907 #ifdef INSN_SCHEDULING
3908 init_sched_attrs ();
3911 /* Make sure first insn is a note even if we don't want linenums.
3912 This makes sure the first insn will never be deleted.
3913 Also, final expects a note to appear there. */
3914 emit_note (NOTE_INSN_DELETED);
3916 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3917 discover_nonconstant_array_refs ();
3919 targetm.expand_to_rtl_hook ();
3920 crtl->stack_alignment_needed = STACK_BOUNDARY;
3921 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3922 crtl->stack_alignment_estimated = 0;
3923 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3924 cfun->cfg->max_jumptable_ents = 0;
3926 /* Expand the variables recorded during gimple lowering. */
3927 timevar_push (TV_VAR_EXPAND);
3930 expand_used_vars ();
3932 var_seq = get_insns ();
3934 timevar_pop (TV_VAR_EXPAND);
3936 /* Honor stack protection warnings. */
3937 if (warn_stack_protect)
3939 if (cfun->calls_alloca)
3940 warning (OPT_Wstack_protector,
3941 "stack protector not protecting local variables: "
3942 "variable length buffer");
3943 if (has_short_buffer && !crtl->stack_protect_guard)
3944 warning (OPT_Wstack_protector,
3945 "stack protector not protecting function: "
3946 "all local arrays are less than %d bytes long",
3947 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3950 /* Set up parameters and prepare for return, for the function. */
3951 expand_function_start (current_function_decl);
3953 /* If we emitted any instructions for setting up the variables,
3954 emit them before the FUNCTION_START note. */
3957 emit_insn_before (var_seq, parm_birth_insn);
3959 /* In expand_function_end we'll insert the alloca save/restore
3960 before parm_birth_insn. We've just insertted an alloca call.
3961 Adjust the pointer to match. */
3962 parm_birth_insn = var_seq;
3965 /* Now that we also have the parameter RTXs, copy them over to our
3967 for (i = 0; i < SA.map->num_partitions; i++)
3969 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3971 if (TREE_CODE (var) != VAR_DECL
3972 && !SA.partition_to_pseudo[i])
3973 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3974 gcc_assert (SA.partition_to_pseudo[i]);
3976 /* If this decl was marked as living in multiple places, reset
3977 this now to NULL. */
3978 if (DECL_RTL_IF_SET (var) == pc_rtx)
3979 SET_DECL_RTL (var, NULL);
3981 /* Some RTL parts really want to look at DECL_RTL(x) when x
3982 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3983 SET_DECL_RTL here making this available, but that would mean
3984 to select one of the potentially many RTLs for one DECL. Instead
3985 of doing that we simply reset the MEM_EXPR of the RTL in question,
3986 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3987 if (!DECL_RTL_SET_P (var))
3989 if (MEM_P (SA.partition_to_pseudo[i]))
3990 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3994 /* If this function is `main', emit a call to `__main'
3995 to run global initializers, etc. */
3996 if (DECL_NAME (current_function_decl)
3997 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3998 && DECL_FILE_SCOPE_P (current_function_decl))
3999 expand_main_function ();
4001 /* Initialize the stack_protect_guard field. This must happen after the
4002 call to __main (if any) so that the external decl is initialized. */
4003 if (crtl->stack_protect_guard)
4004 stack_protect_prologue ();
4006 expand_phi_nodes (&SA);
4008 /* Register rtl specific functions for cfg. */
4009 rtl_register_cfg_hooks ();
4011 init_block = construct_init_block ();
4013 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4014 remaining edges later. */
4015 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4016 e->flags &= ~EDGE_EXECUTABLE;
4018 lab_rtx_for_bb = pointer_map_create ();
4019 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4020 bb = expand_gimple_basic_block (bb);
4022 if (MAY_HAVE_DEBUG_INSNS)
4023 expand_debug_locations ();
4025 execute_free_datastructures ();
4026 timevar_push (TV_OUT_OF_SSA);
4027 finish_out_of_ssa (&SA);
4028 timevar_pop (TV_OUT_OF_SSA);
4030 timevar_push (TV_POST_EXPAND);
4031 /* We are no longer in SSA form. */
4032 cfun->gimple_df->in_ssa_p = false;
4034 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4035 conservatively to true until they are all profile aware. */
4036 pointer_map_destroy (lab_rtx_for_bb);
4039 construct_exit_block ();
4040 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4041 insn_locators_finalize ();
4043 /* Zap the tree EH table. */
4044 set_eh_throw_stmt_table (cfun, NULL);
4046 rebuild_jump_labels (get_insns ());
4048 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4052 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4055 commit_one_edge_insertion (e);
4061 /* We're done expanding trees to RTL. */
4062 currently_expanding_to_rtl = 0;
4064 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4068 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4070 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4071 e->flags &= ~EDGE_EXECUTABLE;
4073 /* At the moment not all abnormal edges match the RTL
4074 representation. It is safe to remove them here as
4075 find_many_sub_basic_blocks will rediscover them.
4076 In the future we should get this fixed properly. */
4077 if ((e->flags & EDGE_ABNORMAL)
4078 && !(e->flags & EDGE_SIBCALL))
4085 blocks = sbitmap_alloc (last_basic_block);
4086 sbitmap_ones (blocks);
4087 find_many_sub_basic_blocks (blocks);
4088 sbitmap_free (blocks);
4089 purge_all_dead_edges ();
4093 expand_stack_alignment ();
4095 #ifdef ENABLE_CHECKING
4096 verify_flow_info ();
4099 /* There's no need to defer outputting this function any more; we
4100 know we want to output it. */
4101 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4103 /* Now that we're done expanding trees to RTL, we shouldn't have any
4104 more CONCATs anywhere. */
4105 generating_concat_p = 0;
4110 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4111 /* And the pass manager will dump RTL for us. */
4114 /* If we're emitting a nested function, make sure its parent gets
4115 emitted as well. Doing otherwise confuses debug info. */
4118 for (parent = DECL_CONTEXT (current_function_decl);
4119 parent != NULL_TREE;
4120 parent = get_containing_scope (parent))
4121 if (TREE_CODE (parent) == FUNCTION_DECL)
4122 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4125 /* We are now committed to emitting code for this function. Do any
4126 preparation, such as emitting abstract debug info for the inline
4127 before it gets mangled by optimization. */
4128 if (cgraph_function_possibly_inlined_p (current_function_decl))
4129 (*debug_hooks->outlining_inline_function) (current_function_decl);
4131 TREE_ASM_WRITTEN (current_function_decl) = 1;
4133 /* After expanding, the return labels are no longer needed. */
4134 return_label = NULL;
4135 naked_return_label = NULL;
4136 /* Tag the blocks with a depth number so that change_scope can find
4137 the common parent easily. */
4138 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4139 default_rtl_profile ();
4140 timevar_pop (TV_POST_EXPAND);
4144 struct rtl_opt_pass pass_expand =
4148 "expand", /* name */
4150 gimple_expand_cfg, /* execute */
4153 0, /* static_pass_number */
4154 TV_EXPAND, /* tv_id */
4155 PROP_ssa | PROP_gimple_leh | PROP_cfg
4156 | PROP_gimple_lcx, /* properties_required */
4157 PROP_rtl, /* properties_provided */
4158 PROP_ssa | PROP_trees, /* properties_destroyed */
4159 TODO_verify_ssa | TODO_verify_flow
4160 | TODO_verify_stmts, /* todo_flags_start */
4162 | TODO_ggc_collect /* todo_flags_finish */