1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt;
60 /* Return an expression tree corresponding to the RHS of GIMPLE
64 gimple_assign_rhs_to_tree (gimple stmt)
67 enum gimple_rhs_class grhs_class;
69 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
71 if (grhs_class == GIMPLE_TERNARY_RHS)
72 t = build3 (gimple_assign_rhs_code (stmt),
73 TREE_TYPE (gimple_assign_lhs (stmt)),
74 gimple_assign_rhs1 (stmt),
75 gimple_assign_rhs2 (stmt),
76 gimple_assign_rhs3 (stmt));
77 else if (grhs_class == GIMPLE_BINARY_RHS)
78 t = build2 (gimple_assign_rhs_code (stmt),
79 TREE_TYPE (gimple_assign_lhs (stmt)),
80 gimple_assign_rhs1 (stmt),
81 gimple_assign_rhs2 (stmt));
82 else if (grhs_class == GIMPLE_UNARY_RHS)
83 t = build1 (gimple_assign_rhs_code (stmt),
84 TREE_TYPE (gimple_assign_lhs (stmt)),
85 gimple_assign_rhs1 (stmt));
86 else if (grhs_class == GIMPLE_SINGLE_RHS)
88 t = gimple_assign_rhs1 (stmt);
89 /* Avoid modifying this tree in place below. */
90 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
91 && gimple_location (stmt) != EXPR_LOCATION (t))
92 || (gimple_block (stmt)
93 && currently_expanding_to_rtl
95 && gimple_block (stmt) != TREE_BLOCK (t)))
101 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
102 SET_EXPR_LOCATION (t, gimple_location (stmt));
103 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
104 TREE_BLOCK (t) = gimple_block (stmt);
110 #ifndef STACK_ALIGNMENT_NEEDED
111 #define STACK_ALIGNMENT_NEEDED 1
114 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
116 /* Associate declaration T with storage space X. If T is no
117 SSA name this is exactly SET_DECL_RTL, otherwise make the
118 partition of T associated with X. */
120 set_rtl (tree t, rtx x)
122 if (TREE_CODE (t) == SSA_NAME)
124 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
126 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
127 /* For the benefit of debug information at -O0 (where vartracking
128 doesn't run) record the place also in the base DECL if it's
129 a normal variable (not a parameter). */
130 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
132 tree var = SSA_NAME_VAR (t);
133 /* If we don't yet have something recorded, just record it now. */
134 if (!DECL_RTL_SET_P (var))
135 SET_DECL_RTL (var, x);
136 /* If we have it set alrady to "multiple places" don't
138 else if (DECL_RTL (var) == pc_rtx)
140 /* If we have something recorded and it's not the same place
141 as we want to record now, we have multiple partitions for the
142 same base variable, with different places. We can't just
143 randomly chose one, hence we have to say that we don't know.
144 This only happens with optimization, and there var-tracking
145 will figure out the right thing. */
146 else if (DECL_RTL (var) != x)
147 SET_DECL_RTL (var, pc_rtx);
154 /* This structure holds data relevant to one variable that will be
155 placed in a stack slot. */
161 /* The offset of the variable. During partitioning, this is the
162 offset relative to the partition. After partitioning, this
163 is relative to the stack frame. */
164 HOST_WIDE_INT offset;
166 /* Initially, the size of the variable. Later, the size of the partition,
167 if this variable becomes it's partition's representative. */
170 /* The *byte* alignment required for this variable. Or as, with the
171 size, the alignment for this partition. */
174 /* The partition representative. */
175 size_t representative;
177 /* The next stack variable in the partition, or EOC. */
180 /* The numbers of conflicting stack variables. */
184 #define EOC ((size_t)-1)
186 /* We have an array of such objects while deciding allocation. */
187 static struct stack_var *stack_vars;
188 static size_t stack_vars_alloc;
189 static size_t stack_vars_num;
191 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
192 is non-decreasing. */
193 static size_t *stack_vars_sorted;
195 /* The phase of the stack frame. This is the known misalignment of
196 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
197 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
198 static int frame_phase;
200 /* Used during expand_used_vars to remember if we saw any decls for
201 which we'd like to enable stack smashing protection. */
202 static bool has_protected_decls;
204 /* Used during expand_used_vars. Remember if we say a character buffer
205 smaller than our cutoff threshold. Used for -Wstack-protector. */
206 static bool has_short_buffer;
208 /* Discover the byte alignment to use for DECL. Ignore alignment
209 we can't do with expected alignment of the stack boundary. */
212 get_decl_align_unit (tree decl)
214 unsigned int align = LOCAL_DECL_ALIGNMENT (decl);
215 return align / BITS_PER_UNIT;
218 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
219 Return the frame offset. */
222 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
224 HOST_WIDE_INT offset, new_frame_offset;
226 new_frame_offset = frame_offset;
227 if (FRAME_GROWS_DOWNWARD)
229 new_frame_offset -= size + frame_phase;
230 new_frame_offset &= -align;
231 new_frame_offset += frame_phase;
232 offset = new_frame_offset;
236 new_frame_offset -= frame_phase;
237 new_frame_offset += align - 1;
238 new_frame_offset &= -align;
239 new_frame_offset += frame_phase;
240 offset = new_frame_offset;
241 new_frame_offset += size;
243 frame_offset = new_frame_offset;
245 if (frame_offset_overflow (frame_offset, cfun->decl))
246 frame_offset = offset = 0;
251 /* Accumulate DECL into STACK_VARS. */
254 add_stack_var (tree decl)
258 if (stack_vars_num >= stack_vars_alloc)
260 if (stack_vars_alloc)
261 stack_vars_alloc = stack_vars_alloc * 3 / 2;
263 stack_vars_alloc = 32;
265 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
267 v = &stack_vars[stack_vars_num];
271 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
272 /* Ensure that all variables have size, so that &a != &b for any two
273 variables that are simultaneously live. */
276 v->alignb = get_decl_align_unit (SSAVAR (decl));
278 /* All variables are initially in their own partition. */
279 v->representative = stack_vars_num;
282 /* All variables initially conflict with no other. */
285 /* Ensure that this decl doesn't get put onto the list twice. */
286 set_rtl (decl, pc_rtx);
291 /* Make the decls associated with luid's X and Y conflict. */
294 add_stack_var_conflict (size_t x, size_t y)
296 struct stack_var *a = &stack_vars[x];
297 struct stack_var *b = &stack_vars[y];
299 a->conflicts = BITMAP_ALLOC (NULL);
301 b->conflicts = BITMAP_ALLOC (NULL);
302 bitmap_set_bit (a->conflicts, y);
303 bitmap_set_bit (b->conflicts, x);
306 /* Check whether the decls associated with luid's X and Y conflict. */
309 stack_var_conflict_p (size_t x, size_t y)
311 struct stack_var *a = &stack_vars[x];
312 struct stack_var *b = &stack_vars[y];
313 if (!a->conflicts || !b->conflicts)
315 return bitmap_bit_p (a->conflicts, y);
318 /* Returns true if TYPE is or contains a union type. */
321 aggregate_contains_union_type (tree type)
325 if (TREE_CODE (type) == UNION_TYPE
326 || TREE_CODE (type) == QUAL_UNION_TYPE)
328 if (TREE_CODE (type) == ARRAY_TYPE)
329 return aggregate_contains_union_type (TREE_TYPE (type));
330 if (TREE_CODE (type) != RECORD_TYPE)
333 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
334 if (TREE_CODE (field) == FIELD_DECL)
335 if (aggregate_contains_union_type (TREE_TYPE (field)))
341 /* A subroutine of expand_used_vars. If two variables X and Y have alias
342 sets that do not conflict, then do add a conflict for these variables
343 in the interference graph. We also need to make sure to add conflicts
344 for union containing structures. Else RTL alias analysis comes along
345 and due to type based aliasing rules decides that for two overlapping
346 union temporaries { short s; int i; } accesses to the same mem through
347 different types may not alias and happily reorders stores across
348 life-time boundaries of the temporaries (See PR25654).
349 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
352 add_alias_set_conflicts (void)
354 size_t i, j, n = stack_vars_num;
356 for (i = 0; i < n; ++i)
358 tree type_i = TREE_TYPE (stack_vars[i].decl);
359 bool aggr_i = AGGREGATE_TYPE_P (type_i);
362 contains_union = aggregate_contains_union_type (type_i);
363 for (j = 0; j < i; ++j)
365 tree type_j = TREE_TYPE (stack_vars[j].decl);
366 bool aggr_j = AGGREGATE_TYPE_P (type_j);
368 /* Either the objects conflict by means of type based
369 aliasing rules, or we need to add a conflict. */
370 || !objects_must_conflict_p (type_i, type_j)
371 /* In case the types do not conflict ensure that access
372 to elements will conflict. In case of unions we have
373 to be careful as type based aliasing rules may say
374 access to the same memory does not conflict. So play
375 safe and add a conflict in this case. */
377 add_stack_var_conflict (i, j);
382 /* A subroutine of partition_stack_vars. A comparison function for qsort,
383 sorting an array of indices by the properties of the object. */
386 stack_var_cmp (const void *a, const void *b)
388 size_t ia = *(const size_t *)a;
389 size_t ib = *(const size_t *)b;
390 unsigned int aligna = stack_vars[ia].alignb;
391 unsigned int alignb = stack_vars[ib].alignb;
392 HOST_WIDE_INT sizea = stack_vars[ia].size;
393 HOST_WIDE_INT sizeb = stack_vars[ib].size;
394 tree decla = stack_vars[ia].decl;
395 tree declb = stack_vars[ib].decl;
397 unsigned int uida, uidb;
399 /* Primary compare on "large" alignment. Large comes first. */
400 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
401 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
402 if (largea != largeb)
403 return (int)largeb - (int)largea;
405 /* Secondary compare on size, decreasing */
411 /* Tertiary compare on true alignment, decreasing. */
417 /* Final compare on ID for sort stability, increasing.
418 Two SSA names are compared by their version, SSA names come before
419 non-SSA names, and two normal decls are compared by their DECL_UID. */
420 if (TREE_CODE (decla) == SSA_NAME)
422 if (TREE_CODE (declb) == SSA_NAME)
423 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
427 else if (TREE_CODE (declb) == SSA_NAME)
430 uida = DECL_UID (decla), uidb = DECL_UID (declb);
439 /* If the points-to solution *PI points to variables that are in a partition
440 together with other variables add all partition members to the pointed-to
444 add_partitioned_vars_to_ptset (struct pt_solution *pt,
445 struct pointer_map_t *decls_to_partitions,
446 struct pointer_set_t *visited, bitmap temp)
454 /* The pointed-to vars bitmap is shared, it is enough to
456 || pointer_set_insert(visited, pt->vars))
461 /* By using a temporary bitmap to store all members of the partitions
462 we have to add we make sure to visit each of the partitions only
464 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
466 || !bitmap_bit_p (temp, i))
467 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
468 (void *)(size_t) i)))
469 bitmap_ior_into (temp, *part);
470 if (!bitmap_empty_p (temp))
471 bitmap_ior_into (pt->vars, temp);
474 /* Update points-to sets based on partition info, so we can use them on RTL.
475 The bitmaps representing stack partitions will be saved until expand,
476 where partitioned decls used as bases in memory expressions will be
480 update_alias_info_with_stack_vars (void)
482 struct pointer_map_t *decls_to_partitions = NULL;
484 tree var = NULL_TREE;
486 for (i = 0; i < stack_vars_num; i++)
490 struct ptr_info_def *pi;
492 /* Not interested in partitions with single variable. */
493 if (stack_vars[i].representative != i
494 || stack_vars[i].next == EOC)
497 if (!decls_to_partitions)
499 decls_to_partitions = pointer_map_create ();
500 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
503 /* Create an SSA_NAME that points to the partition for use
504 as base during alias-oracle queries on RTL for bases that
505 have been partitioned. */
506 if (var == NULL_TREE)
507 var = create_tmp_var (ptr_type_node, NULL);
508 name = make_ssa_name (var, NULL);
510 /* Create bitmaps representing partitions. They will be used for
511 points-to sets later, so use GGC alloc. */
512 part = BITMAP_GGC_ALLOC ();
513 for (j = i; j != EOC; j = stack_vars[j].next)
515 tree decl = stack_vars[j].decl;
516 unsigned int uid = DECL_PT_UID (decl);
517 /* We should never end up partitioning SSA names (though they
518 may end up on the stack). Neither should we allocate stack
519 space to something that is unused and thus unreferenced, except
520 for -O0 where we are preserving even unreferenced variables. */
521 gcc_assert (DECL_P (decl)
523 || referenced_var_lookup (cfun, DECL_UID (decl))));
524 bitmap_set_bit (part, uid);
525 *((bitmap *) pointer_map_insert (decls_to_partitions,
526 (void *)(size_t) uid)) = part;
527 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
531 /* Make the SSA name point to all partition members. */
532 pi = get_ptr_info (name);
533 pt_solution_set (&pi->pt, part, false, false);
536 /* Make all points-to sets that contain one member of a partition
537 contain all members of the partition. */
538 if (decls_to_partitions)
541 struct pointer_set_t *visited = pointer_set_create ();
542 bitmap temp = BITMAP_ALLOC (NULL);
544 for (i = 1; i < num_ssa_names; i++)
546 tree name = ssa_name (i);
547 struct ptr_info_def *pi;
550 && POINTER_TYPE_P (TREE_TYPE (name))
551 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
552 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
556 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
557 decls_to_partitions, visited, temp);
559 pointer_set_destroy (visited);
560 pointer_map_destroy (decls_to_partitions);
565 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
566 partitioning algorithm. Partitions A and B are known to be non-conflicting.
567 Merge them into a single partition A.
569 At the same time, add OFFSET to all variables in partition B. At the end
570 of the partitioning process we've have a nice block easy to lay out within
574 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
577 struct stack_var *vb = &stack_vars[b];
581 /* Update each element of partition B with the given offset,
582 and merge them into partition A. */
583 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
585 stack_vars[i].offset += offset;
586 stack_vars[i].representative = a;
588 stack_vars[last].next = stack_vars[a].next;
589 stack_vars[a].next = b;
591 /* Update the required alignment of partition A to account for B. */
592 if (stack_vars[a].alignb < stack_vars[b].alignb)
593 stack_vars[a].alignb = stack_vars[b].alignb;
595 /* Update the interference graph and merge the conflicts. */
598 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
599 add_stack_var_conflict (a, stack_vars[u].representative);
600 BITMAP_FREE (vb->conflicts);
604 /* A subroutine of expand_used_vars. Binpack the variables into
605 partitions constrained by the interference graph. The overall
606 algorithm used is as follows:
608 Sort the objects by size.
613 Look for the largest non-conflicting object B with size <= S.
623 partition_stack_vars (void)
625 size_t si, sj, n = stack_vars_num;
627 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
628 for (si = 0; si < n; ++si)
629 stack_vars_sorted[si] = si;
634 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
636 for (si = 0; si < n; ++si)
638 size_t i = stack_vars_sorted[si];
639 HOST_WIDE_INT isize = stack_vars[i].size;
640 unsigned int ialign = stack_vars[i].alignb;
641 HOST_WIDE_INT offset = 0;
643 for (sj = si; sj-- > 0; )
645 size_t j = stack_vars_sorted[sj];
646 HOST_WIDE_INT jsize = stack_vars[j].size;
647 unsigned int jalign = stack_vars[j].alignb;
649 /* Ignore objects that aren't partition representatives. */
650 if (stack_vars[j].representative != j)
653 /* Ignore objects too large for the remaining space. */
657 /* Ignore conflicting objects. */
658 if (stack_var_conflict_p (i, j))
661 /* Do not mix objects of "small" (supported) alignment
662 and "large" (unsupported) alignment. */
663 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
664 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
667 /* Refine the remaining space check to include alignment. */
668 if (offset & (jalign - 1))
670 HOST_WIDE_INT toff = offset;
672 toff &= -(HOST_WIDE_INT)jalign;
673 if (isize - (toff - offset) < jsize)
676 isize -= toff - offset;
680 /* UNION the objects, placing J at OFFSET. */
681 union_stack_vars (i, j, offset);
689 update_alias_info_with_stack_vars ();
692 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
695 dump_stack_var_partition (void)
697 size_t si, i, j, n = stack_vars_num;
699 for (si = 0; si < n; ++si)
701 i = stack_vars_sorted[si];
703 /* Skip variables that aren't partition representatives, for now. */
704 if (stack_vars[i].representative != i)
707 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
708 " align %u\n", (unsigned long) i, stack_vars[i].size,
709 stack_vars[i].alignb);
711 for (j = i; j != EOC; j = stack_vars[j].next)
713 fputc ('\t', dump_file);
714 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
715 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
716 stack_vars[j].offset);
721 /* Assign rtl to DECL at BASE + OFFSET. */
724 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
725 HOST_WIDE_INT offset)
730 /* If this fails, we've overflowed the stack frame. Error nicely? */
731 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
733 x = plus_constant (base, offset);
734 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
736 if (TREE_CODE (decl) != SSA_NAME)
738 /* Set alignment we actually gave this decl if it isn't an SSA name.
739 If it is we generate stack slots only accidentally so it isn't as
740 important, we'll simply use the alignment that is already set. */
741 if (base == virtual_stack_vars_rtx)
742 offset -= frame_phase;
743 align = offset & -offset;
744 align *= BITS_PER_UNIT;
745 if (align == 0 || align > base_align)
748 /* One would think that we could assert that we're not decreasing
749 alignment here, but (at least) the i386 port does exactly this
750 via the MINIMUM_ALIGNMENT hook. */
752 DECL_ALIGN (decl) = align;
753 DECL_USER_ALIGN (decl) = 0;
756 set_mem_attributes (x, SSAVAR (decl), true);
760 /* A subroutine of expand_used_vars. Give each partition representative
761 a unique location within the stack frame. Update each partition member
762 with that location. */
765 expand_stack_vars (bool (*pred) (tree))
767 size_t si, i, j, n = stack_vars_num;
768 HOST_WIDE_INT large_size = 0, large_alloc = 0;
769 rtx large_base = NULL;
770 unsigned large_align = 0;
773 /* Determine if there are any variables requiring "large" alignment.
774 Since these are dynamically allocated, we only process these if
775 no predicate involved. */
776 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
777 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
779 /* Find the total size of these variables. */
780 for (si = 0; si < n; ++si)
784 i = stack_vars_sorted[si];
785 alignb = stack_vars[i].alignb;
787 /* Stop when we get to the first decl with "small" alignment. */
788 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
791 /* Skip variables that aren't partition representatives. */
792 if (stack_vars[i].representative != i)
795 /* Skip variables that have already had rtl assigned. See also
796 add_stack_var where we perpetrate this pc_rtx hack. */
797 decl = stack_vars[i].decl;
798 if ((TREE_CODE (decl) == SSA_NAME
799 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
800 : DECL_RTL (decl)) != pc_rtx)
803 large_size += alignb - 1;
804 large_size &= -(HOST_WIDE_INT)alignb;
805 large_size += stack_vars[i].size;
808 /* If there were any, allocate space. */
810 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
814 for (si = 0; si < n; ++si)
817 unsigned base_align, alignb;
818 HOST_WIDE_INT offset;
820 i = stack_vars_sorted[si];
822 /* Skip variables that aren't partition representatives, for now. */
823 if (stack_vars[i].representative != i)
826 /* Skip variables that have already had rtl assigned. See also
827 add_stack_var where we perpetrate this pc_rtx hack. */
828 decl = stack_vars[i].decl;
829 if ((TREE_CODE (decl) == SSA_NAME
830 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)]
831 : DECL_RTL (decl)) != pc_rtx)
834 /* Check the predicate to see whether this variable should be
835 allocated in this pass. */
836 if (pred && !pred (decl))
839 alignb = stack_vars[i].alignb;
840 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
842 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
843 base = virtual_stack_vars_rtx;
844 base_align = crtl->max_used_stack_slot_alignment;
848 /* Large alignment is only processed in the last pass. */
851 gcc_assert (large_base != NULL);
853 large_alloc += alignb - 1;
854 large_alloc &= -(HOST_WIDE_INT)alignb;
855 offset = large_alloc;
856 large_alloc += stack_vars[i].size;
859 base_align = large_align;
862 /* Create rtl for each variable based on their location within the
864 for (j = i; j != EOC; j = stack_vars[j].next)
866 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
867 expand_one_stack_var_at (stack_vars[j].decl,
869 stack_vars[j].offset + offset);
873 gcc_assert (large_alloc == large_size);
876 /* Take into account all sizes of partitions and reset DECL_RTLs. */
878 account_stack_vars (void)
880 size_t si, j, i, n = stack_vars_num;
881 HOST_WIDE_INT size = 0;
883 for (si = 0; si < n; ++si)
885 i = stack_vars_sorted[si];
887 /* Skip variables that aren't partition representatives, for now. */
888 if (stack_vars[i].representative != i)
891 size += stack_vars[i].size;
892 for (j = i; j != EOC; j = stack_vars[j].next)
893 set_rtl (stack_vars[j].decl, NULL);
898 /* A subroutine of expand_one_var. Called to immediately assign rtl
899 to a variable to be allocated in the stack frame. */
902 expand_one_stack_var (tree var)
904 HOST_WIDE_INT size, offset;
907 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
908 byte_align = get_decl_align_unit (SSAVAR (var));
910 /* We handle highly aligned variables in expand_stack_vars. */
911 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
913 offset = alloc_stack_frame_space (size, byte_align);
915 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
916 crtl->max_used_stack_slot_alignment, offset);
919 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
920 that will reside in a hard register. */
923 expand_one_hard_reg_var (tree var)
925 rest_of_decl_compilation (var, 0, 0);
928 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
929 that will reside in a pseudo register. */
932 expand_one_register_var (tree var)
934 tree decl = SSAVAR (var);
935 tree type = TREE_TYPE (decl);
936 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
937 rtx x = gen_reg_rtx (reg_mode);
941 /* Note if the object is a user variable. */
942 if (!DECL_ARTIFICIAL (decl))
945 if (POINTER_TYPE_P (type))
946 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
949 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
950 has some associated error, e.g. its type is error-mark. We just need
951 to pick something that won't crash the rest of the compiler. */
954 expand_one_error_var (tree var)
956 enum machine_mode mode = DECL_MODE (var);
960 x = gen_rtx_MEM (BLKmode, const0_rtx);
961 else if (mode == VOIDmode)
964 x = gen_reg_rtx (mode);
966 SET_DECL_RTL (var, x);
969 /* A subroutine of expand_one_var. VAR is a variable that will be
970 allocated to the local stack frame. Return true if we wish to
971 add VAR to STACK_VARS so that it will be coalesced with other
972 variables. Return false to allocate VAR immediately.
974 This function is used to reduce the number of variables considered
975 for coalescing, which reduces the size of the quadratic problem. */
978 defer_stack_allocation (tree var, bool toplevel)
980 /* If stack protection is enabled, *all* stack variables must be deferred,
981 so that we can re-order the strings to the top of the frame. */
982 if (flag_stack_protect)
985 /* We handle "large" alignment via dynamic allocation. We want to handle
986 this extra complication in only one place, so defer them. */
987 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT)
990 /* Variables in the outermost scope automatically conflict with
991 every other variable. The only reason to want to defer them
992 at all is that, after sorting, we can more efficiently pack
993 small variables in the stack frame. Continue to defer at -O2. */
994 if (toplevel && optimize < 2)
997 /* Without optimization, *most* variables are allocated from the
998 stack, which makes the quadratic problem large exactly when we
999 want compilation to proceed as quickly as possible. On the
1000 other hand, we don't want the function's stack frame size to
1001 get completely out of hand. So we avoid adding scalars and
1002 "small" aggregates to the list at all. */
1003 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1009 /* A subroutine of expand_used_vars. Expand one variable according to
1010 its flavor. Variables to be placed on the stack are not actually
1011 expanded yet, merely recorded.
1012 When REALLY_EXPAND is false, only add stack values to be allocated.
1013 Return stack usage this variable is supposed to take.
1016 static HOST_WIDE_INT
1017 expand_one_var (tree var, bool toplevel, bool really_expand)
1019 unsigned int align = BITS_PER_UNIT;
1024 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1026 /* Because we don't know if VAR will be in register or on stack,
1027 we conservatively assume it will be on stack even if VAR is
1028 eventually put into register after RA pass. For non-automatic
1029 variables, which won't be on stack, we collect alignment of
1030 type and ignore user specified alignment. */
1031 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1032 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1033 TYPE_MODE (TREE_TYPE (var)),
1034 TYPE_ALIGN (TREE_TYPE (var)));
1035 else if (DECL_HAS_VALUE_EXPR_P (var)
1036 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1037 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1038 or variables which were assigned a stack slot already by
1039 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1040 changed from the offset chosen to it. */
1041 align = crtl->stack_alignment_estimated;
1043 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1045 /* If the variable alignment is very large we'll dynamicaly allocate
1046 it, which means that in-frame portion is just a pointer. */
1047 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1048 align = POINTER_SIZE;
1051 if (SUPPORTS_STACK_ALIGNMENT
1052 && crtl->stack_alignment_estimated < align)
1054 /* stack_alignment_estimated shouldn't change after stack
1055 realign decision made */
1056 gcc_assert(!crtl->stack_realign_processed);
1057 crtl->stack_alignment_estimated = align;
1060 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1061 So here we only make sure stack_alignment_needed >= align. */
1062 if (crtl->stack_alignment_needed < align)
1063 crtl->stack_alignment_needed = align;
1064 if (crtl->max_used_stack_slot_alignment < align)
1065 crtl->max_used_stack_slot_alignment = align;
1067 if (TREE_CODE (origvar) == SSA_NAME)
1069 gcc_assert (TREE_CODE (var) != VAR_DECL
1070 || (!DECL_EXTERNAL (var)
1071 && !DECL_HAS_VALUE_EXPR_P (var)
1072 && !TREE_STATIC (var)
1073 && TREE_TYPE (var) != error_mark_node
1074 && !DECL_HARD_REGISTER (var)
1077 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1079 else if (DECL_EXTERNAL (var))
1081 else if (DECL_HAS_VALUE_EXPR_P (var))
1083 else if (TREE_STATIC (var))
1085 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1087 else if (TREE_TYPE (var) == error_mark_node)
1090 expand_one_error_var (var);
1092 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1095 expand_one_hard_reg_var (var);
1097 else if (use_register_for_decl (var))
1100 expand_one_register_var (origvar);
1102 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
1106 error ("size of variable %q+D is too large", var);
1107 expand_one_error_var (var);
1110 else if (defer_stack_allocation (var, toplevel))
1111 add_stack_var (origvar);
1115 expand_one_stack_var (origvar);
1116 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1121 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1122 expanding variables. Those variables that can be put into registers
1123 are allocated pseudos; those that can't are put on the stack.
1125 TOPLEVEL is true if this is the outermost BLOCK. */
1128 expand_used_vars_for_block (tree block, bool toplevel)
1130 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1133 old_sv_num = toplevel ? 0 : stack_vars_num;
1135 /* Expand all variables at this level. */
1136 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1138 expand_one_var (t, toplevel, true);
1140 this_sv_num = stack_vars_num;
1142 /* Expand all variables at containing levels. */
1143 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1144 expand_used_vars_for_block (t, false);
1146 /* Since we do not track exact variable lifetimes (which is not even
1147 possible for variables whose address escapes), we mirror the block
1148 tree in the interference graph. Here we cause all variables at this
1149 level, and all sublevels, to conflict. */
1150 if (old_sv_num < this_sv_num)
1152 new_sv_num = stack_vars_num;
1154 for (i = old_sv_num; i < new_sv_num; ++i)
1155 for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;)
1156 add_stack_var_conflict (i, j);
1160 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1161 and clear TREE_USED on all local variables. */
1164 clear_tree_used (tree block)
1168 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1169 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1172 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1173 clear_tree_used (t);
1176 /* Examine TYPE and determine a bit mask of the following features. */
1178 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1179 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1180 #define SPCT_HAS_ARRAY 4
1181 #define SPCT_HAS_AGGREGATE 8
1184 stack_protect_classify_type (tree type)
1186 unsigned int ret = 0;
1189 switch (TREE_CODE (type))
1192 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1193 if (t == char_type_node
1194 || t == signed_char_type_node
1195 || t == unsigned_char_type_node)
1197 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1198 unsigned HOST_WIDE_INT len;
1200 if (!TYPE_SIZE_UNIT (type)
1201 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1204 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1207 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1209 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1212 ret = SPCT_HAS_ARRAY;
1216 case QUAL_UNION_TYPE:
1218 ret = SPCT_HAS_AGGREGATE;
1219 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1220 if (TREE_CODE (t) == FIELD_DECL)
1221 ret |= stack_protect_classify_type (TREE_TYPE (t));
1231 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1232 part of the local stack frame. Remember if we ever return nonzero for
1233 any variable in this function. The return value is the phase number in
1234 which the variable should be allocated. */
1237 stack_protect_decl_phase (tree decl)
1239 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1242 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1243 has_short_buffer = true;
1245 if (flag_stack_protect == 2)
1247 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1248 && !(bits & SPCT_HAS_AGGREGATE))
1250 else if (bits & SPCT_HAS_ARRAY)
1254 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1257 has_protected_decls = true;
1262 /* Two helper routines that check for phase 1 and phase 2. These are used
1263 as callbacks for expand_stack_vars. */
1266 stack_protect_decl_phase_1 (tree decl)
1268 return stack_protect_decl_phase (decl) == 1;
1272 stack_protect_decl_phase_2 (tree decl)
1274 return stack_protect_decl_phase (decl) == 2;
1277 /* Ensure that variables in different stack protection phases conflict
1278 so that they are not merged and share the same stack slot. */
1281 add_stack_protection_conflicts (void)
1283 size_t i, j, n = stack_vars_num;
1284 unsigned char *phase;
1286 phase = XNEWVEC (unsigned char, n);
1287 for (i = 0; i < n; ++i)
1288 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1290 for (i = 0; i < n; ++i)
1292 unsigned char ph_i = phase[i];
1293 for (j = 0; j < i; ++j)
1294 if (ph_i != phase[j])
1295 add_stack_var_conflict (i, j);
1301 /* Create a decl for the guard at the top of the stack frame. */
1304 create_stack_guard (void)
1306 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1307 VAR_DECL, NULL, ptr_type_node);
1308 TREE_THIS_VOLATILE (guard) = 1;
1309 TREE_USED (guard) = 1;
1310 expand_one_stack_var (guard);
1311 crtl->stack_protect_guard = guard;
1314 /* Prepare for expanding variables. */
1316 init_vars_expansion (void)
1320 /* Set TREE_USED on all variables in the local_decls. */
1321 FOR_EACH_LOCAL_DECL (cfun, ix, t)
1324 /* Clear TREE_USED on all variables associated with a block scope. */
1325 clear_tree_used (DECL_INITIAL (current_function_decl));
1327 /* Initialize local stack smashing state. */
1328 has_protected_decls = false;
1329 has_short_buffer = false;
1332 /* Free up stack variable graph data. */
1334 fini_vars_expansion (void)
1336 size_t i, n = stack_vars_num;
1337 for (i = 0; i < n; i++)
1338 BITMAP_FREE (stack_vars[i].conflicts);
1339 XDELETEVEC (stack_vars);
1340 XDELETEVEC (stack_vars_sorted);
1342 stack_vars_alloc = stack_vars_num = 0;
1345 /* Make a fair guess for the size of the stack frame of the function
1346 in NODE. This doesn't have to be exact, the result is only used in
1347 the inline heuristics. So we don't want to run the full stack var
1348 packing algorithm (which is quadratic in the number of stack vars).
1349 Instead, we calculate the total size of all stack vars. This turns
1350 out to be a pretty fair estimate -- packing of stack vars doesn't
1351 happen very often. */
1354 estimated_stack_frame_size (struct cgraph_node *node)
1356 HOST_WIDE_INT size = 0;
1359 tree old_cur_fun_decl = current_function_decl;
1360 referenced_var_iterator rvi;
1361 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1363 current_function_decl = node->decl;
1366 gcc_checking_assert (gimple_referenced_vars (fn));
1367 FOR_EACH_REFERENCED_VAR (fn, var, rvi)
1368 size += expand_one_var (var, true, false);
1370 if (stack_vars_num > 0)
1372 /* Fake sorting the stack vars for account_stack_vars (). */
1373 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1374 for (i = 0; i < stack_vars_num; ++i)
1375 stack_vars_sorted[i] = i;
1376 size += account_stack_vars ();
1377 fini_vars_expansion ();
1380 current_function_decl = old_cur_fun_decl;
1384 /* Expand all variables used in the function. */
1387 expand_used_vars (void)
1389 tree var, outer_block = DECL_INITIAL (current_function_decl);
1390 VEC(tree,heap) *maybe_local_decls = NULL;
1394 /* Compute the phase of the stack frame for this function. */
1396 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1397 int off = STARTING_FRAME_OFFSET % align;
1398 frame_phase = off ? align - off : 0;
1401 init_vars_expansion ();
1403 for (i = 0; i < SA.map->num_partitions; i++)
1405 tree var = partition_to_var (SA.map, i);
1407 gcc_assert (is_gimple_reg (var));
1408 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1409 expand_one_var (var, true, true);
1412 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1413 contain the default def (representing the parm or result itself)
1414 we don't do anything here. But those which don't contain the
1415 default def (representing a temporary based on the parm/result)
1416 we need to allocate space just like for normal VAR_DECLs. */
1417 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1419 expand_one_var (var, true, true);
1420 gcc_assert (SA.partition_to_pseudo[i]);
1425 /* At this point all variables on the local_decls with TREE_USED
1426 set are not associated with any block scope. Lay them out. */
1428 len = VEC_length (tree, cfun->local_decls);
1429 FOR_EACH_LOCAL_DECL (cfun, i, var)
1431 bool expand_now = false;
1433 /* Expanded above already. */
1434 if (is_gimple_reg (var))
1436 TREE_USED (var) = 0;
1439 /* We didn't set a block for static or extern because it's hard
1440 to tell the difference between a global variable (re)declared
1441 in a local scope, and one that's really declared there to
1442 begin with. And it doesn't really matter much, since we're
1443 not giving them stack space. Expand them now. */
1444 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1447 /* If the variable is not associated with any block, then it
1448 was created by the optimizers, and could be live anywhere
1450 else if (TREE_USED (var))
1453 /* Finally, mark all variables on the list as used. We'll use
1454 this in a moment when we expand those associated with scopes. */
1455 TREE_USED (var) = 1;
1458 expand_one_var (var, true, true);
1461 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1463 rtx rtl = DECL_RTL_IF_SET (var);
1465 /* Keep artificial non-ignored vars in cfun->local_decls
1466 chain until instantiate_decls. */
1467 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1468 add_local_decl (cfun, var);
1469 else if (rtl == NULL_RTX)
1470 /* If rtl isn't set yet, which can happen e.g. with
1471 -fstack-protector, retry before returning from this
1473 VEC_safe_push (tree, heap, maybe_local_decls, var);
1477 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1479 +-----------------+-----------------+
1480 | ...processed... | ...duplicates...|
1481 +-----------------+-----------------+
1483 +-- LEN points here.
1485 We just want the duplicates, as those are the artificial
1486 non-ignored vars that we want to keep until instantiate_decls.
1487 Move them down and truncate the array. */
1488 if (!VEC_empty (tree, cfun->local_decls))
1489 VEC_block_remove (tree, cfun->local_decls, 0, len);
1491 /* At this point, all variables within the block tree with TREE_USED
1492 set are actually used by the optimized function. Lay them out. */
1493 expand_used_vars_for_block (outer_block, true);
1495 if (stack_vars_num > 0)
1497 /* Due to the way alias sets work, no variables with non-conflicting
1498 alias sets may be assigned the same address. Add conflicts to
1500 add_alias_set_conflicts ();
1502 /* If stack protection is enabled, we don't share space between
1503 vulnerable data and non-vulnerable data. */
1504 if (flag_stack_protect)
1505 add_stack_protection_conflicts ();
1507 /* Now that we have collected all stack variables, and have computed a
1508 minimal interference graph, attempt to save some stack space. */
1509 partition_stack_vars ();
1511 dump_stack_var_partition ();
1514 /* There are several conditions under which we should create a
1515 stack guard: protect-all, alloca used, protected decls present. */
1516 if (flag_stack_protect == 2
1517 || (flag_stack_protect
1518 && (cfun->calls_alloca || has_protected_decls)))
1519 create_stack_guard ();
1521 /* Assign rtl to each variable based on these partitions. */
1522 if (stack_vars_num > 0)
1524 /* Reorder decls to be protected by iterating over the variables
1525 array multiple times, and allocating out of each phase in turn. */
1526 /* ??? We could probably integrate this into the qsort we did
1527 earlier, such that we naturally see these variables first,
1528 and thus naturally allocate things in the right order. */
1529 if (has_protected_decls)
1531 /* Phase 1 contains only character arrays. */
1532 expand_stack_vars (stack_protect_decl_phase_1);
1534 /* Phase 2 contains other kinds of arrays. */
1535 if (flag_stack_protect == 2)
1536 expand_stack_vars (stack_protect_decl_phase_2);
1539 expand_stack_vars (NULL);
1541 fini_vars_expansion ();
1544 /* If there were any artificial non-ignored vars without rtl
1545 found earlier, see if deferred stack allocation hasn't assigned
1547 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
1549 rtx rtl = DECL_RTL_IF_SET (var);
1551 /* Keep artificial non-ignored vars in cfun->local_decls
1552 chain until instantiate_decls. */
1553 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1554 add_local_decl (cfun, var);
1556 VEC_free (tree, heap, maybe_local_decls);
1558 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1559 if (STACK_ALIGNMENT_NEEDED)
1561 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1562 if (!FRAME_GROWS_DOWNWARD)
1563 frame_offset += align - 1;
1564 frame_offset &= -align;
1569 /* If we need to produce a detailed dump, print the tree representation
1570 for STMT to the dump file. SINCE is the last RTX after which the RTL
1571 generated for STMT should have been appended. */
1574 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1576 if (dump_file && (dump_flags & TDF_DETAILS))
1578 fprintf (dump_file, "\n;; ");
1579 print_gimple_stmt (dump_file, stmt, 0,
1580 TDF_SLIM | (dump_flags & TDF_LINENO));
1581 fprintf (dump_file, "\n");
1583 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1587 /* Maps the blocks that do not contain tree labels to rtx labels. */
1589 static struct pointer_map_t *lab_rtx_for_bb;
1591 /* Returns the label_rtx expression for a label starting basic block BB. */
1594 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1596 gimple_stmt_iterator gsi;
1601 if (bb->flags & BB_RTL)
1602 return block_label (bb);
1604 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1608 /* Find the tree label if it is present. */
1610 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1612 lab_stmt = gsi_stmt (gsi);
1613 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1616 lab = gimple_label_label (lab_stmt);
1617 if (DECL_NONLOCAL (lab))
1620 return label_rtx (lab);
1623 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1624 *elt = gen_label_rtx ();
1629 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1630 of a basic block where we just expanded the conditional at the end,
1631 possibly clean up the CFG and instruction sequence. LAST is the
1632 last instruction before the just emitted jump sequence. */
1635 maybe_cleanup_end_of_block (edge e, rtx last)
1637 /* Special case: when jumpif decides that the condition is
1638 trivial it emits an unconditional jump (and the necessary
1639 barrier). But we still have two edges, the fallthru one is
1640 wrong. purge_dead_edges would clean this up later. Unfortunately
1641 we have to insert insns (and split edges) before
1642 find_many_sub_basic_blocks and hence before purge_dead_edges.
1643 But splitting edges might create new blocks which depend on the
1644 fact that if there are two edges there's no barrier. So the
1645 barrier would get lost and verify_flow_info would ICE. Instead
1646 of auditing all edge splitters to care for the barrier (which
1647 normally isn't there in a cleaned CFG), fix it here. */
1648 if (BARRIER_P (get_last_insn ()))
1652 /* Now, we have a single successor block, if we have insns to
1653 insert on the remaining edge we potentially will insert
1654 it at the end of this block (if the dest block isn't feasible)
1655 in order to avoid splitting the edge. This insertion will take
1656 place in front of the last jump. But we might have emitted
1657 multiple jumps (conditional and one unconditional) to the
1658 same destination. Inserting in front of the last one then
1659 is a problem. See PR 40021. We fix this by deleting all
1660 jumps except the last unconditional one. */
1661 insn = PREV_INSN (get_last_insn ());
1662 /* Make sure we have an unconditional jump. Otherwise we're
1664 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1665 for (insn = PREV_INSN (insn); insn != last;)
1667 insn = PREV_INSN (insn);
1668 if (JUMP_P (NEXT_INSN (insn)))
1670 if (!any_condjump_p (NEXT_INSN (insn)))
1672 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
1673 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
1675 delete_insn (NEXT_INSN (insn));
1681 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1682 Returns a new basic block if we've terminated the current basic
1683 block and created a new one. */
1686 expand_gimple_cond (basic_block bb, gimple stmt)
1688 basic_block new_bb, dest;
1693 enum tree_code code;
1696 code = gimple_cond_code (stmt);
1697 op0 = gimple_cond_lhs (stmt);
1698 op1 = gimple_cond_rhs (stmt);
1699 /* We're sometimes presented with such code:
1703 This would expand to two comparisons which then later might
1704 be cleaned up by combine. But some pattern matchers like if-conversion
1705 work better when there's only one compare, so make up for this
1706 here as special exception if TER would have made the same change. */
1707 if (gimple_cond_single_var_p (stmt)
1709 && TREE_CODE (op0) == SSA_NAME
1710 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1712 gimple second = SSA_NAME_DEF_STMT (op0);
1713 if (gimple_code (second) == GIMPLE_ASSIGN)
1715 enum tree_code code2 = gimple_assign_rhs_code (second);
1716 if (TREE_CODE_CLASS (code2) == tcc_comparison)
1719 op0 = gimple_assign_rhs1 (second);
1720 op1 = gimple_assign_rhs2 (second);
1722 /* If jumps are cheap turn some more codes into
1724 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1726 if ((code2 == BIT_AND_EXPR
1727 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
1728 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
1729 || code2 == TRUTH_AND_EXPR)
1731 code = TRUTH_ANDIF_EXPR;
1732 op0 = gimple_assign_rhs1 (second);
1733 op1 = gimple_assign_rhs2 (second);
1735 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
1737 code = TRUTH_ORIF_EXPR;
1738 op0 = gimple_assign_rhs1 (second);
1739 op1 = gimple_assign_rhs2 (second);
1745 last2 = last = get_last_insn ();
1747 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1748 if (gimple_has_location (stmt))
1750 set_curr_insn_source_location (gimple_location (stmt));
1751 set_curr_insn_block (gimple_block (stmt));
1754 /* These flags have no purpose in RTL land. */
1755 true_edge->flags &= ~EDGE_TRUE_VALUE;
1756 false_edge->flags &= ~EDGE_FALSE_VALUE;
1758 /* We can either have a pure conditional jump with one fallthru edge or
1759 two-way jump that needs to be decomposed into two basic blocks. */
1760 if (false_edge->dest == bb->next_bb)
1762 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1763 true_edge->probability);
1764 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1765 if (true_edge->goto_locus)
1767 set_curr_insn_source_location (true_edge->goto_locus);
1768 set_curr_insn_block (true_edge->goto_block);
1769 true_edge->goto_locus = curr_insn_locator ();
1771 true_edge->goto_block = NULL;
1772 false_edge->flags |= EDGE_FALLTHRU;
1773 maybe_cleanup_end_of_block (false_edge, last);
1776 if (true_edge->dest == bb->next_bb)
1778 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1779 false_edge->probability);
1780 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1781 if (false_edge->goto_locus)
1783 set_curr_insn_source_location (false_edge->goto_locus);
1784 set_curr_insn_block (false_edge->goto_block);
1785 false_edge->goto_locus = curr_insn_locator ();
1787 false_edge->goto_block = NULL;
1788 true_edge->flags |= EDGE_FALLTHRU;
1789 maybe_cleanup_end_of_block (true_edge, last);
1793 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1794 true_edge->probability);
1795 last = get_last_insn ();
1796 if (false_edge->goto_locus)
1798 set_curr_insn_source_location (false_edge->goto_locus);
1799 set_curr_insn_block (false_edge->goto_block);
1800 false_edge->goto_locus = curr_insn_locator ();
1802 false_edge->goto_block = NULL;
1803 emit_jump (label_rtx_for_bb (false_edge->dest));
1806 if (BARRIER_P (BB_END (bb)))
1807 BB_END (bb) = PREV_INSN (BB_END (bb));
1808 update_bb_for_insn (bb);
1810 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1811 dest = false_edge->dest;
1812 redirect_edge_succ (false_edge, new_bb);
1813 false_edge->flags |= EDGE_FALLTHRU;
1814 new_bb->count = false_edge->count;
1815 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1816 new_edge = make_edge (new_bb, dest, 0);
1817 new_edge->probability = REG_BR_PROB_BASE;
1818 new_edge->count = new_bb->count;
1819 if (BARRIER_P (BB_END (new_bb)))
1820 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1821 update_bb_for_insn (new_bb);
1823 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1825 if (true_edge->goto_locus)
1827 set_curr_insn_source_location (true_edge->goto_locus);
1828 set_curr_insn_block (true_edge->goto_block);
1829 true_edge->goto_locus = curr_insn_locator ();
1831 true_edge->goto_block = NULL;
1836 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1840 expand_call_stmt (gimple stmt)
1843 tree lhs = gimple_call_lhs (stmt);
1848 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1850 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1851 decl = gimple_call_fndecl (stmt);
1852 builtin_p = decl && DECL_BUILT_IN (decl);
1854 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1855 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1857 for (i = 0; i < gimple_call_num_args (stmt); i++)
1859 tree arg = gimple_call_arg (stmt, i);
1861 /* TER addresses into arguments of builtin functions so we have a
1862 chance to infer more correct alignment information. See PR39954. */
1864 && TREE_CODE (arg) == SSA_NAME
1865 && (def = get_gimple_for_ssa_name (arg))
1866 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1867 arg = gimple_assign_rhs1 (def);
1868 CALL_EXPR_ARG (exp, i) = arg;
1871 if (gimple_has_side_effects (stmt))
1872 TREE_SIDE_EFFECTS (exp) = 1;
1874 if (gimple_call_nothrow_p (stmt))
1875 TREE_NOTHROW (exp) = 1;
1877 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1878 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1879 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1880 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1881 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1882 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1883 TREE_BLOCK (exp) = gimple_block (stmt);
1886 expand_assignment (lhs, exp, false);
1888 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1891 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1892 STMT that doesn't require special handling for outgoing edges. That
1893 is no tailcalls and no GIMPLE_COND. */
1896 expand_gimple_stmt_1 (gimple stmt)
1899 switch (gimple_code (stmt))
1902 op0 = gimple_goto_dest (stmt);
1903 if (TREE_CODE (op0) == LABEL_DECL)
1906 expand_computed_goto (op0);
1909 expand_label (gimple_label_label (stmt));
1912 case GIMPLE_PREDICT:
1918 expand_asm_stmt (stmt);
1921 expand_call_stmt (stmt);
1925 op0 = gimple_return_retval (stmt);
1927 if (op0 && op0 != error_mark_node)
1929 tree result = DECL_RESULT (current_function_decl);
1931 /* If we are not returning the current function's RESULT_DECL,
1932 build an assignment to it. */
1935 /* I believe that a function's RESULT_DECL is unique. */
1936 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1938 /* ??? We'd like to use simply expand_assignment here,
1939 but this fails if the value is of BLKmode but the return
1940 decl is a register. expand_return has special handling
1941 for this combination, which eventually should move
1942 to common code. See comments there. Until then, let's
1943 build a modify expression :-/ */
1944 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1949 expand_null_return ();
1951 expand_return (op0);
1956 tree lhs = gimple_assign_lhs (stmt);
1958 /* Tree expand used to fiddle with |= and &= of two bitfield
1959 COMPONENT_REFs here. This can't happen with gimple, the LHS
1960 of binary assigns must be a gimple reg. */
1962 if (TREE_CODE (lhs) != SSA_NAME
1963 || get_gimple_rhs_class (gimple_expr_code (stmt))
1964 == GIMPLE_SINGLE_RHS)
1966 tree rhs = gimple_assign_rhs1 (stmt);
1967 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1968 == GIMPLE_SINGLE_RHS);
1969 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1970 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1971 expand_assignment (lhs, rhs,
1972 gimple_assign_nontemporal_move_p (stmt));
1977 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1978 struct separate_ops ops;
1979 bool promoted = false;
1981 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1982 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1985 ops.code = gimple_assign_rhs_code (stmt);
1986 ops.type = TREE_TYPE (lhs);
1987 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1989 case GIMPLE_TERNARY_RHS:
1990 ops.op2 = gimple_assign_rhs3 (stmt);
1992 case GIMPLE_BINARY_RHS:
1993 ops.op1 = gimple_assign_rhs2 (stmt);
1995 case GIMPLE_UNARY_RHS:
1996 ops.op0 = gimple_assign_rhs1 (stmt);
2001 ops.location = gimple_location (stmt);
2003 /* If we want to use a nontemporal store, force the value to
2004 register first. If we store into a promoted register,
2005 don't directly expand to target. */
2006 temp = nontemporal || promoted ? NULL_RTX : target;
2007 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
2014 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
2015 /* If TEMP is a VOIDmode constant, use convert_modes to make
2016 sure that we properly convert it. */
2017 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2019 temp = convert_modes (GET_MODE (target),
2020 TYPE_MODE (ops.type),
2022 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2023 GET_MODE (target), temp, unsignedp);
2026 convert_move (SUBREG_REG (target), temp, unsignedp);
2028 else if (nontemporal && emit_storent_insn (target, temp))
2032 temp = force_operand (temp, target);
2034 emit_move_insn (target, temp);
2045 /* Expand one gimple statement STMT and return the last RTL instruction
2046 before any of the newly generated ones.
2048 In addition to generating the necessary RTL instructions this also
2049 sets REG_EH_REGION notes if necessary and sets the current source
2050 location for diagnostics. */
2053 expand_gimple_stmt (gimple stmt)
2057 location_t saved_location = input_location;
2059 last = get_last_insn ();
2061 /* If this is an expression of some kind and it has an associated line
2062 number, then emit the line number before expanding the expression.
2064 We need to save and restore the file and line information so that
2065 errors discovered during expansion are emitted with the right
2066 information. It would be better of the diagnostic routines
2067 used the file/line information embedded in the tree nodes rather
2071 if (gimple_has_location (stmt))
2073 input_location = gimple_location (stmt);
2074 set_curr_insn_source_location (input_location);
2076 /* Record where the insns produced belong. */
2077 set_curr_insn_block (gimple_block (stmt));
2080 expand_gimple_stmt_1 (stmt);
2081 /* Free any temporaries used to evaluate this statement. */
2084 input_location = saved_location;
2086 /* Mark all insns that may trap. */
2087 lp_nr = lookup_stmt_eh_lp (stmt);
2091 for (insn = next_real_insn (last); insn;
2092 insn = next_real_insn (insn))
2094 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2095 /* If we want exceptions for non-call insns, any
2096 may_trap_p instruction may throw. */
2097 && GET_CODE (PATTERN (insn)) != CLOBBER
2098 && GET_CODE (PATTERN (insn)) != USE
2099 && insn_could_throw_p (insn))
2100 make_reg_eh_region_note (insn, 0, lp_nr);
2107 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2108 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2109 generated a tail call (something that might be denied by the ABI
2110 rules governing the call; see calls.c).
2112 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2113 can still reach the rest of BB. The case here is __builtin_sqrt,
2114 where the NaN result goes through the external function (with a
2115 tailcall) and the normal result happens via a sqrt instruction. */
2118 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2126 last2 = last = expand_gimple_stmt (stmt);
2128 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2129 if (CALL_P (last) && SIBLING_CALL_P (last))
2132 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2134 *can_fallthru = true;
2138 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2139 Any instructions emitted here are about to be deleted. */
2140 do_pending_stack_adjust ();
2142 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2143 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2144 EH or abnormal edges, we shouldn't have created a tail call in
2145 the first place. So it seems to me we should just be removing
2146 all edges here, or redirecting the existing fallthru edge to
2152 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2154 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2156 if (e->dest != EXIT_BLOCK_PTR)
2158 e->dest->count -= e->count;
2159 e->dest->frequency -= EDGE_FREQUENCY (e);
2160 if (e->dest->count < 0)
2162 if (e->dest->frequency < 0)
2163 e->dest->frequency = 0;
2166 probability += e->probability;
2173 /* This is somewhat ugly: the call_expr expander often emits instructions
2174 after the sibcall (to perform the function return). These confuse the
2175 find_many_sub_basic_blocks code, so we need to get rid of these. */
2176 last = NEXT_INSN (last);
2177 gcc_assert (BARRIER_P (last));
2179 *can_fallthru = false;
2180 while (NEXT_INSN (last))
2182 /* For instance an sqrt builtin expander expands if with
2183 sibcall in the then and label for `else`. */
2184 if (LABEL_P (NEXT_INSN (last)))
2186 *can_fallthru = true;
2189 delete_insn (NEXT_INSN (last));
2192 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2193 e->probability += probability;
2196 update_bb_for_insn (bb);
2198 if (NEXT_INSN (last))
2200 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2203 if (BARRIER_P (last))
2204 BB_END (bb) = PREV_INSN (last);
2207 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2212 /* Return the difference between the floor and the truncated result of
2213 a signed division by OP1 with remainder MOD. */
2215 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2217 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2218 return gen_rtx_IF_THEN_ELSE
2219 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2220 gen_rtx_IF_THEN_ELSE
2221 (mode, gen_rtx_LT (BImode,
2222 gen_rtx_DIV (mode, op1, mod),
2224 constm1_rtx, const0_rtx),
2228 /* Return the difference between the ceil and the truncated result of
2229 a signed division by OP1 with remainder MOD. */
2231 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2233 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2234 return gen_rtx_IF_THEN_ELSE
2235 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2236 gen_rtx_IF_THEN_ELSE
2237 (mode, gen_rtx_GT (BImode,
2238 gen_rtx_DIV (mode, op1, mod),
2240 const1_rtx, const0_rtx),
2244 /* Return the difference between the ceil and the truncated result of
2245 an unsigned division by OP1 with remainder MOD. */
2247 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2249 /* (mod != 0 ? 1 : 0) */
2250 return gen_rtx_IF_THEN_ELSE
2251 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2252 const1_rtx, const0_rtx);
2255 /* Return the difference between the rounded and the truncated result
2256 of a signed division by OP1 with remainder MOD. Halfway cases are
2257 rounded away from zero, rather than to the nearest even number. */
2259 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2261 /* (abs (mod) >= abs (op1) - abs (mod)
2262 ? (op1 / mod > 0 ? 1 : -1)
2264 return gen_rtx_IF_THEN_ELSE
2265 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2266 gen_rtx_MINUS (mode,
2267 gen_rtx_ABS (mode, op1),
2268 gen_rtx_ABS (mode, mod))),
2269 gen_rtx_IF_THEN_ELSE
2270 (mode, gen_rtx_GT (BImode,
2271 gen_rtx_DIV (mode, op1, mod),
2273 const1_rtx, constm1_rtx),
2277 /* Return the difference between the rounded and the truncated result
2278 of a unsigned division by OP1 with remainder MOD. Halfway cases
2279 are rounded away from zero, rather than to the nearest even
2282 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2284 /* (mod >= op1 - mod ? 1 : 0) */
2285 return gen_rtx_IF_THEN_ELSE
2286 (mode, gen_rtx_GE (BImode, mod,
2287 gen_rtx_MINUS (mode, op1, mod)),
2288 const1_rtx, const0_rtx);
2291 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2295 convert_debug_memory_address (enum machine_mode mode, rtx x,
2298 enum machine_mode xmode = GET_MODE (x);
2300 #ifndef POINTERS_EXTEND_UNSIGNED
2301 gcc_assert (mode == Pmode
2302 || mode == targetm.addr_space.address_mode (as));
2303 gcc_assert (xmode == mode || xmode == VOIDmode);
2306 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2307 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
2309 gcc_assert (mode == address_mode || mode == pointer_mode);
2311 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2314 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2315 x = simplify_gen_subreg (mode, x, xmode,
2316 subreg_lowpart_offset
2318 else if (POINTERS_EXTEND_UNSIGNED > 0)
2319 x = gen_rtx_ZERO_EXTEND (mode, x);
2320 else if (!POINTERS_EXTEND_UNSIGNED)
2321 x = gen_rtx_SIGN_EXTEND (mode, x);
2324 switch (GET_CODE (x))
2327 if ((SUBREG_PROMOTED_VAR_P (x)
2328 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
2329 || (GET_CODE (SUBREG_REG (x)) == PLUS
2330 && REG_P (XEXP (SUBREG_REG (x), 0))
2331 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
2332 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
2333 && GET_MODE (SUBREG_REG (x)) == mode)
2334 return SUBREG_REG (x);
2337 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0));
2338 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
2341 temp = shallow_copy_rtx (x);
2342 PUT_MODE (temp, mode);
2345 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2347 temp = gen_rtx_CONST (mode, temp);
2351 if (CONST_INT_P (XEXP (x, 1)))
2353 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
2355 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
2361 /* Don't know how to express ptr_extend as operation in debug info. */
2364 #endif /* POINTERS_EXTEND_UNSIGNED */
2369 /* Return an RTX equivalent to the value of the tree expression
2373 expand_debug_expr (tree exp)
2375 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2376 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2377 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2380 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2382 case tcc_expression:
2383 switch (TREE_CODE (exp))
2387 case WIDEN_MULT_PLUS_EXPR:
2388 case WIDEN_MULT_MINUS_EXPR:
2392 case TRUTH_ANDIF_EXPR:
2393 case TRUTH_ORIF_EXPR:
2394 case TRUTH_AND_EXPR:
2396 case TRUTH_XOR_EXPR:
2399 case TRUTH_NOT_EXPR:
2408 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2415 case tcc_comparison:
2416 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2423 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2433 case tcc_exceptional:
2434 case tcc_declaration:
2440 switch (TREE_CODE (exp))
2443 if (!lookup_constant_def (exp))
2445 if (strlen (TREE_STRING_POINTER (exp)) + 1
2446 != (size_t) TREE_STRING_LENGTH (exp))
2448 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2449 op0 = gen_rtx_MEM (BLKmode, op0);
2450 set_mem_attributes (op0, exp, 0);
2453 /* Fall through... */
2458 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2462 gcc_assert (COMPLEX_MODE_P (mode));
2463 op0 = expand_debug_expr (TREE_REALPART (exp));
2464 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2465 return gen_rtx_CONCAT (mode, op0, op1);
2467 case DEBUG_EXPR_DECL:
2468 op0 = DECL_RTL_IF_SET (exp);
2473 op0 = gen_rtx_DEBUG_EXPR (mode);
2474 DEBUG_EXPR_TREE_DECL (op0) = exp;
2475 SET_DECL_RTL (exp, op0);
2485 op0 = DECL_RTL_IF_SET (exp);
2487 /* This decl was probably optimized away. */
2490 if (TREE_CODE (exp) != VAR_DECL
2491 || DECL_EXTERNAL (exp)
2492 || !TREE_STATIC (exp)
2494 || DECL_HARD_REGISTER (exp)
2495 || mode == VOIDmode)
2498 op0 = make_decl_rtl_for_debug (exp);
2500 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2501 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2505 op0 = copy_rtx (op0);
2507 if (GET_MODE (op0) == BLKmode
2508 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2509 below would ICE. While it is likely a FE bug,
2510 try to be robust here. See PR43166. */
2512 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2514 gcc_assert (MEM_P (op0));
2515 op0 = adjust_address_nv (op0, mode, 0);
2526 enum machine_mode inner_mode = GET_MODE (op0);
2528 if (mode == inner_mode)
2531 if (inner_mode == VOIDmode)
2533 if (TREE_CODE (exp) == SSA_NAME)
2534 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2536 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2537 if (mode == inner_mode)
2541 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2543 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2544 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2545 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2546 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2548 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2550 else if (FLOAT_MODE_P (mode))
2552 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2553 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2554 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2556 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2558 else if (FLOAT_MODE_P (inner_mode))
2561 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2563 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2565 else if (CONSTANT_P (op0)
2566 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2567 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2568 subreg_lowpart_offset (mode,
2570 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
2571 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
2573 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2575 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2582 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2586 if (TREE_CODE (exp) == MEM_REF)
2588 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
2589 || (GET_CODE (op0) == PLUS
2590 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
2591 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2592 Instead just use get_inner_reference. */
2595 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2596 if (!op1 || !CONST_INT_P (op1))
2599 op0 = plus_constant (op0, INTVAL (op1));
2602 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2603 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2605 as = ADDR_SPACE_GENERIC;
2607 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2609 if (op0 == NULL_RTX)
2612 op0 = gen_rtx_MEM (mode, op0);
2613 set_mem_attributes (op0, exp, 0);
2614 set_mem_addr_space (op0, as);
2618 case TARGET_MEM_REF:
2619 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
2620 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
2623 op0 = expand_debug_expr
2624 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
2628 if (POINTER_TYPE_P (TREE_TYPE (exp)))
2629 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
2631 as = ADDR_SPACE_GENERIC;
2633 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
2635 if (op0 == NULL_RTX)
2638 op0 = gen_rtx_MEM (mode, op0);
2640 set_mem_attributes (op0, exp, 0);
2641 set_mem_addr_space (op0, as);
2647 case ARRAY_RANGE_REF:
2652 case VIEW_CONVERT_EXPR:
2654 enum machine_mode mode1;
2655 HOST_WIDE_INT bitsize, bitpos;
2658 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2659 &mode1, &unsignedp, &volatilep, false);
2665 orig_op0 = op0 = expand_debug_expr (tem);
2672 enum machine_mode addrmode, offmode;
2677 op0 = XEXP (op0, 0);
2678 addrmode = GET_MODE (op0);
2679 if (addrmode == VOIDmode)
2682 op1 = expand_debug_expr (offset);
2686 offmode = GET_MODE (op1);
2687 if (offmode == VOIDmode)
2688 offmode = TYPE_MODE (TREE_TYPE (offset));
2690 if (addrmode != offmode)
2691 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2692 subreg_lowpart_offset (addrmode,
2695 /* Don't use offset_address here, we don't need a
2696 recognizable address, and we don't want to generate
2698 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2703 if (mode1 == VOIDmode)
2705 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2706 if (bitpos >= BITS_PER_UNIT)
2708 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2709 bitpos %= BITS_PER_UNIT;
2711 else if (bitpos < 0)
2714 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2715 op0 = adjust_address_nv (op0, mode1, units);
2716 bitpos += units * BITS_PER_UNIT;
2718 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2719 op0 = adjust_address_nv (op0, mode, 0);
2720 else if (GET_MODE (op0) != mode1)
2721 op0 = adjust_address_nv (op0, mode1, 0);
2723 op0 = copy_rtx (op0);
2724 if (op0 == orig_op0)
2725 op0 = shallow_copy_rtx (op0);
2726 set_mem_attributes (op0, exp, 0);
2729 if (bitpos == 0 && mode == GET_MODE (op0))
2735 if (GET_MODE (op0) == BLKmode)
2738 if ((bitpos % BITS_PER_UNIT) == 0
2739 && bitsize == GET_MODE_BITSIZE (mode1))
2741 enum machine_mode opmode = GET_MODE (op0);
2743 if (opmode == VOIDmode)
2744 opmode = TYPE_MODE (TREE_TYPE (tem));
2746 /* This condition may hold if we're expanding the address
2747 right past the end of an array that turned out not to
2748 be addressable (i.e., the address was only computed in
2749 debug stmts). The gen_subreg below would rightfully
2750 crash, and the address doesn't really exist, so just
2752 if (bitpos >= GET_MODE_BITSIZE (opmode))
2755 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2756 return simplify_gen_subreg (mode, op0, opmode,
2757 bitpos / BITS_PER_UNIT);
2760 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2761 && TYPE_UNSIGNED (TREE_TYPE (exp))
2763 : ZERO_EXTRACT, mode,
2764 GET_MODE (op0) != VOIDmode
2766 : TYPE_MODE (TREE_TYPE (tem)),
2767 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2771 return gen_rtx_ABS (mode, op0);
2774 return gen_rtx_NEG (mode, op0);
2777 return gen_rtx_NOT (mode, op0);
2781 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2783 return gen_rtx_FLOAT (mode, op0);
2785 case FIX_TRUNC_EXPR:
2787 return gen_rtx_UNSIGNED_FIX (mode, op0);
2789 return gen_rtx_FIX (mode, op0);
2791 case POINTER_PLUS_EXPR:
2792 /* For the rare target where pointers are not the same size as
2793 size_t, we need to check for mis-matched modes and correct
2796 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2797 && GET_MODE (op0) != GET_MODE (op1))
2799 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2800 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2802 /* We always sign-extend, regardless of the signedness of
2803 the operand, because the operand is always unsigned
2804 here even if the original C expression is signed. */
2805 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2809 return gen_rtx_PLUS (mode, op0, op1);
2812 return gen_rtx_MINUS (mode, op0, op1);
2815 return gen_rtx_MULT (mode, op0, op1);
2818 case TRUNC_DIV_EXPR:
2819 case EXACT_DIV_EXPR:
2821 return gen_rtx_UDIV (mode, op0, op1);
2823 return gen_rtx_DIV (mode, op0, op1);
2825 case TRUNC_MOD_EXPR:
2827 return gen_rtx_UMOD (mode, op0, op1);
2829 return gen_rtx_MOD (mode, op0, op1);
2831 case FLOOR_DIV_EXPR:
2833 return gen_rtx_UDIV (mode, op0, op1);
2836 rtx div = gen_rtx_DIV (mode, op0, op1);
2837 rtx mod = gen_rtx_MOD (mode, op0, op1);
2838 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2839 return gen_rtx_PLUS (mode, div, adj);
2842 case FLOOR_MOD_EXPR:
2844 return gen_rtx_UMOD (mode, op0, op1);
2847 rtx mod = gen_rtx_MOD (mode, op0, op1);
2848 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2849 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2850 return gen_rtx_PLUS (mode, mod, adj);
2856 rtx div = gen_rtx_UDIV (mode, op0, op1);
2857 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2858 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2859 return gen_rtx_PLUS (mode, div, adj);
2863 rtx div = gen_rtx_DIV (mode, op0, op1);
2864 rtx mod = gen_rtx_MOD (mode, op0, op1);
2865 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2866 return gen_rtx_PLUS (mode, div, adj);
2872 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2873 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2874 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2875 return gen_rtx_PLUS (mode, mod, adj);
2879 rtx mod = gen_rtx_MOD (mode, op0, op1);
2880 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2881 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2882 return gen_rtx_PLUS (mode, mod, adj);
2885 case ROUND_DIV_EXPR:
2888 rtx div = gen_rtx_UDIV (mode, op0, op1);
2889 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2890 rtx adj = round_udiv_adjust (mode, mod, op1);
2891 return gen_rtx_PLUS (mode, div, adj);
2895 rtx div = gen_rtx_DIV (mode, op0, op1);
2896 rtx mod = gen_rtx_MOD (mode, op0, op1);
2897 rtx adj = round_sdiv_adjust (mode, mod, op1);
2898 return gen_rtx_PLUS (mode, div, adj);
2901 case ROUND_MOD_EXPR:
2904 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2905 rtx adj = round_udiv_adjust (mode, mod, op1);
2906 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2907 return gen_rtx_PLUS (mode, mod, adj);
2911 rtx mod = gen_rtx_MOD (mode, op0, op1);
2912 rtx adj = round_sdiv_adjust (mode, mod, op1);
2913 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2914 return gen_rtx_PLUS (mode, mod, adj);
2918 return gen_rtx_ASHIFT (mode, op0, op1);
2922 return gen_rtx_LSHIFTRT (mode, op0, op1);
2924 return gen_rtx_ASHIFTRT (mode, op0, op1);
2927 return gen_rtx_ROTATE (mode, op0, op1);
2930 return gen_rtx_ROTATERT (mode, op0, op1);
2934 return gen_rtx_UMIN (mode, op0, op1);
2936 return gen_rtx_SMIN (mode, op0, op1);
2940 return gen_rtx_UMAX (mode, op0, op1);
2942 return gen_rtx_SMAX (mode, op0, op1);
2945 case TRUTH_AND_EXPR:
2946 return gen_rtx_AND (mode, op0, op1);
2950 return gen_rtx_IOR (mode, op0, op1);
2953 case TRUTH_XOR_EXPR:
2954 return gen_rtx_XOR (mode, op0, op1);
2956 case TRUTH_ANDIF_EXPR:
2957 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2959 case TRUTH_ORIF_EXPR:
2960 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2962 case TRUTH_NOT_EXPR:
2963 return gen_rtx_EQ (mode, op0, const0_rtx);
2967 return gen_rtx_LTU (mode, op0, op1);
2969 return gen_rtx_LT (mode, op0, op1);
2973 return gen_rtx_LEU (mode, op0, op1);
2975 return gen_rtx_LE (mode, op0, op1);
2979 return gen_rtx_GTU (mode, op0, op1);
2981 return gen_rtx_GT (mode, op0, op1);
2985 return gen_rtx_GEU (mode, op0, op1);
2987 return gen_rtx_GE (mode, op0, op1);
2990 return gen_rtx_EQ (mode, op0, op1);
2993 return gen_rtx_NE (mode, op0, op1);
2995 case UNORDERED_EXPR:
2996 return gen_rtx_UNORDERED (mode, op0, op1);
2999 return gen_rtx_ORDERED (mode, op0, op1);
3002 return gen_rtx_UNLT (mode, op0, op1);
3005 return gen_rtx_UNLE (mode, op0, op1);
3008 return gen_rtx_UNGT (mode, op0, op1);
3011 return gen_rtx_UNGE (mode, op0, op1);
3014 return gen_rtx_UNEQ (mode, op0, op1);
3017 return gen_rtx_LTGT (mode, op0, op1);
3020 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
3023 gcc_assert (COMPLEX_MODE_P (mode));
3024 if (GET_MODE (op0) == VOIDmode)
3025 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
3026 if (GET_MODE (op1) == VOIDmode)
3027 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
3028 return gen_rtx_CONCAT (mode, op0, op1);
3031 if (GET_CODE (op0) == CONCAT)
3032 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
3033 gen_rtx_NEG (GET_MODE_INNER (mode),
3037 enum machine_mode imode = GET_MODE_INNER (mode);
3042 re = adjust_address_nv (op0, imode, 0);
3043 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
3047 enum machine_mode ifmode = int_mode_for_mode (mode);
3048 enum machine_mode ihmode = int_mode_for_mode (imode);
3050 if (ifmode == BLKmode || ihmode == BLKmode)
3052 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
3055 re = gen_rtx_SUBREG (ifmode, re, 0);
3056 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
3057 if (imode != ihmode)
3058 re = gen_rtx_SUBREG (imode, re, 0);
3059 im = copy_rtx (op0);
3061 im = gen_rtx_SUBREG (ifmode, im, 0);
3062 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
3063 if (imode != ihmode)
3064 im = gen_rtx_SUBREG (imode, im, 0);
3066 im = gen_rtx_NEG (imode, im);
3067 return gen_rtx_CONCAT (mode, re, im);
3071 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
3072 if (!op0 || !MEM_P (op0))
3074 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3075 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
3076 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
3077 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0)))
3078 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
3080 if (handled_component_p (TREE_OPERAND (exp, 0)))
3082 HOST_WIDE_INT bitoffset, bitsize, maxsize;
3084 = get_ref_base_and_extent (TREE_OPERAND (exp, 0),
3085 &bitoffset, &bitsize, &maxsize);
3086 if ((TREE_CODE (decl) == VAR_DECL
3087 || TREE_CODE (decl) == PARM_DECL
3088 || TREE_CODE (decl) == RESULT_DECL)
3089 && !TREE_ADDRESSABLE (decl)
3090 && (bitoffset % BITS_PER_UNIT) == 0
3092 && bitsize == maxsize)
3093 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl),
3094 bitoffset / BITS_PER_UNIT);
3100 as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
3101 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
3106 exp = build_constructor_from_list (TREE_TYPE (exp),
3107 TREE_VECTOR_CST_ELTS (exp));
3111 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
3116 op0 = gen_rtx_CONCATN
3117 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
3119 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
3121 op1 = expand_debug_expr (val);
3124 XVECEXP (op0, 0, i) = op1;
3127 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
3129 op1 = expand_debug_expr
3130 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
3135 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
3136 XVECEXP (op0, 0, i) = op1;
3142 goto flag_unsupported;
3145 /* ??? Maybe handle some builtins? */
3150 gimple g = get_gimple_for_ssa_name (exp);
3153 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
3159 int part = var_to_partition (SA.map, exp);
3161 if (part == NO_PARTITION)
3164 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
3166 op0 = copy_rtx (SA.partition_to_pseudo[part]);
3174 /* Vector stuff. For most of the codes we don't have rtl codes. */
3175 case REALIGN_LOAD_EXPR:
3176 case REDUC_MAX_EXPR:
3177 case REDUC_MIN_EXPR:
3178 case REDUC_PLUS_EXPR:
3180 case VEC_EXTRACT_EVEN_EXPR:
3181 case VEC_EXTRACT_ODD_EXPR:
3182 case VEC_INTERLEAVE_HIGH_EXPR:
3183 case VEC_INTERLEAVE_LOW_EXPR:
3184 case VEC_LSHIFT_EXPR:
3185 case VEC_PACK_FIX_TRUNC_EXPR:
3186 case VEC_PACK_SAT_EXPR:
3187 case VEC_PACK_TRUNC_EXPR:
3188 case VEC_RSHIFT_EXPR:
3189 case VEC_UNPACK_FLOAT_HI_EXPR:
3190 case VEC_UNPACK_FLOAT_LO_EXPR:
3191 case VEC_UNPACK_HI_EXPR:
3192 case VEC_UNPACK_LO_EXPR:
3193 case VEC_WIDEN_MULT_HI_EXPR:
3194 case VEC_WIDEN_MULT_LO_EXPR:
3198 case ADDR_SPACE_CONVERT_EXPR:
3199 case FIXED_CONVERT_EXPR:
3201 case WITH_SIZE_EXPR:
3205 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3206 && SCALAR_INT_MODE_P (mode))
3208 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3209 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3211 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3212 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3213 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3215 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3216 op0 = gen_rtx_MULT (mode, op0, op1);
3217 return gen_rtx_PLUS (mode, op0, op2);
3221 case WIDEN_MULT_EXPR:
3222 case WIDEN_MULT_PLUS_EXPR:
3223 case WIDEN_MULT_MINUS_EXPR:
3224 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3225 && SCALAR_INT_MODE_P (mode))
3227 enum machine_mode inner_mode = GET_MODE (op0);
3228 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3229 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3231 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3232 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3233 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3235 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3236 op0 = gen_rtx_MULT (mode, op0, op1);
3237 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
3239 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
3240 return gen_rtx_PLUS (mode, op0, op2);
3242 return gen_rtx_MINUS (mode, op2, op0);
3246 case WIDEN_SUM_EXPR:
3247 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3248 && SCALAR_INT_MODE_P (mode))
3250 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3251 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3253 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3254 return gen_rtx_PLUS (mode, op0, op1);
3259 return gen_rtx_FMA (mode, op0, op1, op2);
3263 #ifdef ENABLE_CHECKING
3272 /* Expand the _LOCs in debug insns. We run this after expanding all
3273 regular insns, so that any variables referenced in the function
3274 will have their DECL_RTLs set. */
3277 expand_debug_locations (void)
3280 rtx last = get_last_insn ();
3281 int save_strict_alias = flag_strict_aliasing;
3283 /* New alias sets while setting up memory attributes cause
3284 -fcompare-debug failures, even though it doesn't bring about any
3286 flag_strict_aliasing = 0;
3288 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3289 if (DEBUG_INSN_P (insn))
3291 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3293 enum machine_mode mode;
3295 if (value == NULL_TREE)
3299 val = expand_debug_expr (value);
3300 gcc_assert (last == get_last_insn ());
3304 val = gen_rtx_UNKNOWN_VAR_LOC ();
3307 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3309 gcc_assert (mode == GET_MODE (val)
3310 || (GET_MODE (val) == VOIDmode
3311 && (CONST_INT_P (val)
3312 || GET_CODE (val) == CONST_FIXED
3313 || GET_CODE (val) == CONST_DOUBLE
3314 || GET_CODE (val) == LABEL_REF)));
3317 INSN_VAR_LOCATION_LOC (insn) = val;
3320 flag_strict_aliasing = save_strict_alias;
3323 /* Expand basic block BB from GIMPLE trees to RTL. */
3326 expand_gimple_basic_block (basic_block bb)
3328 gimple_stmt_iterator gsi;
3337 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3340 /* Note that since we are now transitioning from GIMPLE to RTL, we
3341 cannot use the gsi_*_bb() routines because they expect the basic
3342 block to be in GIMPLE, instead of RTL. Therefore, we need to
3343 access the BB sequence directly. */
3344 stmts = bb_seq (bb);
3345 bb->il.gimple = NULL;
3346 rtl_profile_for_bb (bb);
3347 init_rtl_bb_info (bb);
3348 bb->flags |= BB_RTL;
3350 /* Remove the RETURN_EXPR if we may fall though to the exit
3352 gsi = gsi_last (stmts);
3353 if (!gsi_end_p (gsi)
3354 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3356 gimple ret_stmt = gsi_stmt (gsi);
3358 gcc_assert (single_succ_p (bb));
3359 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3361 if (bb->next_bb == EXIT_BLOCK_PTR
3362 && !gimple_return_retval (ret_stmt))
3364 gsi_remove (&gsi, false);
3365 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3369 gsi = gsi_start (stmts);
3370 if (!gsi_end_p (gsi))
3372 stmt = gsi_stmt (gsi);
3373 if (gimple_code (stmt) != GIMPLE_LABEL)
3377 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3381 last = get_last_insn ();
3385 expand_gimple_stmt (stmt);
3390 emit_label ((rtx) *elt);
3392 /* Java emits line number notes in the top of labels.
3393 ??? Make this go away once line number notes are obsoleted. */
3394 BB_HEAD (bb) = NEXT_INSN (last);
3395 if (NOTE_P (BB_HEAD (bb)))
3396 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3397 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3399 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3402 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3404 NOTE_BASIC_BLOCK (note) = bb;
3406 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3410 stmt = gsi_stmt (gsi);
3412 /* If this statement is a non-debug one, and we generate debug
3413 insns, then this one might be the last real use of a TERed
3414 SSA_NAME, but where there are still some debug uses further
3415 down. Expanding the current SSA name in such further debug
3416 uses by their RHS might lead to wrong debug info, as coalescing
3417 might make the operands of such RHS be placed into the same
3418 pseudo as something else. Like so:
3419 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3423 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3424 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3425 the write to a_2 would actually have clobbered the place which
3428 So, instead of that, we recognize the situation, and generate
3429 debug temporaries at the last real use of TERed SSA names:
3436 if (MAY_HAVE_DEBUG_INSNS
3438 && !is_gimple_debug (stmt))
3444 location_t sloc = get_curr_insn_source_location ();
3445 tree sblock = get_curr_insn_block ();
3447 /* Look for SSA names that have their last use here (TERed
3448 names always have only one real use). */
3449 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3450 if ((def = get_gimple_for_ssa_name (op)))
3452 imm_use_iterator imm_iter;
3453 use_operand_p use_p;
3454 bool have_debug_uses = false;
3456 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3458 if (gimple_debug_bind_p (USE_STMT (use_p)))
3460 have_debug_uses = true;
3465 if (have_debug_uses)
3467 /* OP is a TERed SSA name, with DEF it's defining
3468 statement, and where OP is used in further debug
3469 instructions. Generate a debug temporary, and
3470 replace all uses of OP in debug insns with that
3473 tree value = gimple_assign_rhs_to_tree (def);
3474 tree vexpr = make_node (DEBUG_EXPR_DECL);
3476 enum machine_mode mode;
3478 set_curr_insn_source_location (gimple_location (def));
3479 set_curr_insn_block (gimple_block (def));
3481 DECL_ARTIFICIAL (vexpr) = 1;
3482 TREE_TYPE (vexpr) = TREE_TYPE (value);
3484 mode = DECL_MODE (value);
3486 mode = TYPE_MODE (TREE_TYPE (value));
3487 DECL_MODE (vexpr) = mode;
3489 val = gen_rtx_VAR_LOCATION
3490 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3492 val = emit_debug_insn (val);
3494 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3496 if (!gimple_debug_bind_p (debugstmt))
3499 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3500 SET_USE (use_p, vexpr);
3502 update_stmt (debugstmt);
3506 set_curr_insn_source_location (sloc);
3507 set_curr_insn_block (sblock);
3510 currently_expanding_gimple_stmt = stmt;
3512 /* Expand this statement, then evaluate the resulting RTL and
3513 fixup the CFG accordingly. */
3514 if (gimple_code (stmt) == GIMPLE_COND)
3516 new_bb = expand_gimple_cond (bb, stmt);
3520 else if (gimple_debug_bind_p (stmt))
3522 location_t sloc = get_curr_insn_source_location ();
3523 tree sblock = get_curr_insn_block ();
3524 gimple_stmt_iterator nsi = gsi;
3528 tree var = gimple_debug_bind_get_var (stmt);
3531 enum machine_mode mode;
3533 if (gimple_debug_bind_has_value_p (stmt))
3534 value = gimple_debug_bind_get_value (stmt);
3538 last = get_last_insn ();
3540 set_curr_insn_source_location (gimple_location (stmt));
3541 set_curr_insn_block (gimple_block (stmt));
3544 mode = DECL_MODE (var);
3546 mode = TYPE_MODE (TREE_TYPE (var));
3548 val = gen_rtx_VAR_LOCATION
3549 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3551 val = emit_debug_insn (val);
3553 if (dump_file && (dump_flags & TDF_DETAILS))
3555 /* We can't dump the insn with a TREE where an RTX
3557 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3558 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3559 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3562 /* In order not to generate too many debug temporaries,
3563 we delink all uses of debug statements we already expanded.
3564 Therefore debug statements between definition and real
3565 use of TERed SSA names will continue to use the SSA name,
3566 and not be replaced with debug temps. */
3567 delink_stmt_imm_use (stmt);
3571 if (gsi_end_p (nsi))
3573 stmt = gsi_stmt (nsi);
3574 if (!gimple_debug_bind_p (stmt))
3578 set_curr_insn_source_location (sloc);
3579 set_curr_insn_block (sblock);
3583 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3586 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3597 def_operand_p def_p;
3598 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3602 /* Ignore this stmt if it is in the list of
3603 replaceable expressions. */
3605 && bitmap_bit_p (SA.values,
3606 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3609 last = expand_gimple_stmt (stmt);
3610 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3615 currently_expanding_gimple_stmt = NULL;
3617 /* Expand implicit goto and convert goto_locus. */
3618 FOR_EACH_EDGE (e, ei, bb->succs)
3620 if (e->goto_locus && e->goto_block)
3622 set_curr_insn_source_location (e->goto_locus);
3623 set_curr_insn_block (e->goto_block);
3624 e->goto_locus = curr_insn_locator ();
3626 e->goto_block = NULL;
3627 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3629 emit_jump (label_rtx_for_bb (e->dest));
3630 e->flags &= ~EDGE_FALLTHRU;
3634 /* Expanded RTL can create a jump in the last instruction of block.
3635 This later might be assumed to be a jump to successor and break edge insertion.
3636 We need to insert dummy move to prevent this. PR41440. */
3637 if (single_succ_p (bb)
3638 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
3639 && (last = get_last_insn ())
3642 rtx dummy = gen_reg_rtx (SImode);
3643 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
3646 do_pending_stack_adjust ();
3648 /* Find the block tail. The last insn in the block is the insn
3649 before a barrier and/or table jump insn. */
3650 last = get_last_insn ();
3651 if (BARRIER_P (last))
3652 last = PREV_INSN (last);
3653 if (JUMP_TABLE_DATA_P (last))
3654 last = PREV_INSN (PREV_INSN (last));
3657 update_bb_for_insn (bb);
3663 /* Create a basic block for initialization code. */
3666 construct_init_block (void)
3668 basic_block init_block, first_block;
3672 /* Multiple entry points not supported yet. */
3673 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3674 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3675 init_rtl_bb_info (EXIT_BLOCK_PTR);
3676 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3677 EXIT_BLOCK_PTR->flags |= BB_RTL;
3679 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3681 /* When entry edge points to first basic block, we don't need jump,
3682 otherwise we have to jump into proper target. */
3683 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3685 tree label = gimple_block_label (e->dest);
3687 emit_jump (label_rtx (label));
3691 flags = EDGE_FALLTHRU;
3693 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3696 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3697 init_block->count = ENTRY_BLOCK_PTR->count;
3700 first_block = e->dest;
3701 redirect_edge_succ (e, init_block);
3702 e = make_edge (init_block, first_block, flags);
3705 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3706 e->probability = REG_BR_PROB_BASE;
3707 e->count = ENTRY_BLOCK_PTR->count;
3709 update_bb_for_insn (init_block);
3713 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3714 found in the block tree. */
3717 set_block_levels (tree block, int level)
3721 BLOCK_NUMBER (block) = level;
3722 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3723 block = BLOCK_CHAIN (block);
3727 /* Create a block containing landing pads and similar stuff. */
3730 construct_exit_block (void)
3732 rtx head = get_last_insn ();
3734 basic_block exit_block;
3738 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3740 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3742 /* Make sure the locus is set to the end of the function, so that
3743 epilogue line numbers and warnings are set properly. */
3744 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3745 input_location = cfun->function_end_locus;
3747 /* The following insns belong to the top scope. */
3748 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3750 /* Generate rtl for function exit. */
3751 expand_function_end ();
3753 end = get_last_insn ();
3756 /* While emitting the function end we could move end of the last basic block.
3758 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3759 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3760 head = NEXT_INSN (head);
3761 exit_block = create_basic_block (NEXT_INSN (head), end,
3762 EXIT_BLOCK_PTR->prev_bb);
3763 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3764 exit_block->count = EXIT_BLOCK_PTR->count;
3767 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3769 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3770 if (!(e->flags & EDGE_ABNORMAL))
3771 redirect_edge_succ (e, exit_block);
3776 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3777 e->probability = REG_BR_PROB_BASE;
3778 e->count = EXIT_BLOCK_PTR->count;
3779 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3782 e->count -= e2->count;
3783 exit_block->count -= e2->count;
3784 exit_block->frequency -= EDGE_FREQUENCY (e2);
3788 if (exit_block->count < 0)
3789 exit_block->count = 0;
3790 if (exit_block->frequency < 0)
3791 exit_block->frequency = 0;
3792 update_bb_for_insn (exit_block);
3795 /* Helper function for discover_nonconstant_array_refs.
3796 Look for ARRAY_REF nodes with non-constant indexes and mark them
3800 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3801 void *data ATTRIBUTE_UNUSED)
3805 if (IS_TYPE_OR_DECL_P (t))
3807 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3809 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3810 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3811 && (!TREE_OPERAND (t, 2)
3812 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3813 || (TREE_CODE (t) == COMPONENT_REF
3814 && (!TREE_OPERAND (t,2)
3815 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3816 || TREE_CODE (t) == BIT_FIELD_REF
3817 || TREE_CODE (t) == REALPART_EXPR
3818 || TREE_CODE (t) == IMAGPART_EXPR
3819 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3820 || CONVERT_EXPR_P (t))
3821 t = TREE_OPERAND (t, 0);
3823 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3825 t = get_base_address (t);
3827 && DECL_MODE (t) != BLKmode)
3828 TREE_ADDRESSABLE (t) = 1;
3837 /* RTL expansion is not able to compile array references with variable
3838 offsets for arrays stored in single register. Discover such
3839 expressions and mark variables as addressable to avoid this
3843 discover_nonconstant_array_refs (void)
3846 gimple_stmt_iterator gsi;
3849 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3851 gimple stmt = gsi_stmt (gsi);
3852 if (!is_gimple_debug (stmt))
3853 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3857 /* This function sets crtl->args.internal_arg_pointer to a virtual
3858 register if DRAP is needed. Local register allocator will replace
3859 virtual_incoming_args_rtx with the virtual register. */
3862 expand_stack_alignment (void)
3865 unsigned int preferred_stack_boundary;
3867 if (! SUPPORTS_STACK_ALIGNMENT)
3870 if (cfun->calls_alloca
3871 || cfun->has_nonlocal_label
3872 || crtl->has_nonlocal_goto)
3873 crtl->need_drap = true;
3875 /* Call update_stack_boundary here again to update incoming stack
3876 boundary. It may set incoming stack alignment to a different
3877 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
3878 use the minimum incoming stack alignment to check if it is OK
3879 to perform sibcall optimization since sibcall optimization will
3880 only align the outgoing stack to incoming stack boundary. */
3881 if (targetm.calls.update_stack_boundary)
3882 targetm.calls.update_stack_boundary ();
3884 /* The incoming stack frame has to be aligned at least at
3885 parm_stack_boundary. */
3886 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3888 /* Update crtl->stack_alignment_estimated and use it later to align
3889 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3890 exceptions since callgraph doesn't collect incoming stack alignment
3892 if (cfun->can_throw_non_call_exceptions
3893 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3894 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3896 preferred_stack_boundary = crtl->preferred_stack_boundary;
3897 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3898 crtl->stack_alignment_estimated = preferred_stack_boundary;
3899 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3900 crtl->stack_alignment_needed = preferred_stack_boundary;
3902 gcc_assert (crtl->stack_alignment_needed
3903 <= crtl->stack_alignment_estimated);
3905 crtl->stack_realign_needed
3906 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3907 crtl->stack_realign_tried = crtl->stack_realign_needed;
3909 crtl->stack_realign_processed = true;
3911 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3913 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3914 drap_rtx = targetm.calls.get_drap_rtx ();
3916 /* stack_realign_drap and drap_rtx must match. */
3917 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3919 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3920 if (NULL != drap_rtx)
3922 crtl->args.internal_arg_pointer = drap_rtx;
3924 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3926 fixup_tail_calls ();
3930 /* Translate the intermediate representation contained in the CFG
3931 from GIMPLE trees to RTL.
3933 We do conversion per basic block and preserve/update the tree CFG.
3934 This implies we have to do some magic as the CFG can simultaneously
3935 consist of basic blocks containing RTL and GIMPLE trees. This can
3936 confuse the CFG hooks, so be careful to not manipulate CFG during
3940 gimple_expand_cfg (void)
3942 basic_block bb, init_block;
3949 timevar_push (TV_OUT_OF_SSA);
3950 rewrite_out_of_ssa (&SA);
3951 timevar_pop (TV_OUT_OF_SSA);
3952 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3955 /* Some backends want to know that we are expanding to RTL. */
3956 currently_expanding_to_rtl = 1;
3958 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3960 insn_locators_alloc ();
3961 if (!DECL_IS_BUILTIN (current_function_decl))
3963 /* Eventually, all FEs should explicitly set function_start_locus. */
3964 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3965 set_curr_insn_source_location
3966 (DECL_SOURCE_LOCATION (current_function_decl));
3968 set_curr_insn_source_location (cfun->function_start_locus);
3971 set_curr_insn_source_location (UNKNOWN_LOCATION);
3972 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3973 prologue_locator = curr_insn_locator ();
3975 #ifdef INSN_SCHEDULING
3976 init_sched_attrs ();
3979 /* Make sure first insn is a note even if we don't want linenums.
3980 This makes sure the first insn will never be deleted.
3981 Also, final expects a note to appear there. */
3982 emit_note (NOTE_INSN_DELETED);
3984 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3985 discover_nonconstant_array_refs ();
3987 targetm.expand_to_rtl_hook ();
3988 crtl->stack_alignment_needed = STACK_BOUNDARY;
3989 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3990 crtl->stack_alignment_estimated = 0;
3991 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3992 cfun->cfg->max_jumptable_ents = 0;
3994 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
3995 of the function section at exapnsion time to predict distance of calls. */
3996 resolve_unique_section (current_function_decl, 0, flag_function_sections);
3998 /* Expand the variables recorded during gimple lowering. */
3999 timevar_push (TV_VAR_EXPAND);
4002 expand_used_vars ();
4004 var_seq = get_insns ();
4006 timevar_pop (TV_VAR_EXPAND);
4008 /* Honor stack protection warnings. */
4009 if (warn_stack_protect)
4011 if (cfun->calls_alloca)
4012 warning (OPT_Wstack_protector,
4013 "stack protector not protecting local variables: "
4014 "variable length buffer");
4015 if (has_short_buffer && !crtl->stack_protect_guard)
4016 warning (OPT_Wstack_protector,
4017 "stack protector not protecting function: "
4018 "all local arrays are less than %d bytes long",
4019 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
4022 /* Set up parameters and prepare for return, for the function. */
4023 expand_function_start (current_function_decl);
4025 /* If we emitted any instructions for setting up the variables,
4026 emit them before the FUNCTION_START note. */
4029 emit_insn_before (var_seq, parm_birth_insn);
4031 /* In expand_function_end we'll insert the alloca save/restore
4032 before parm_birth_insn. We've just insertted an alloca call.
4033 Adjust the pointer to match. */
4034 parm_birth_insn = var_seq;
4037 /* Now that we also have the parameter RTXs, copy them over to our
4039 for (i = 0; i < SA.map->num_partitions; i++)
4041 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
4043 if (TREE_CODE (var) != VAR_DECL
4044 && !SA.partition_to_pseudo[i])
4045 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
4046 gcc_assert (SA.partition_to_pseudo[i]);
4048 /* If this decl was marked as living in multiple places, reset
4049 this now to NULL. */
4050 if (DECL_RTL_IF_SET (var) == pc_rtx)
4051 SET_DECL_RTL (var, NULL);
4053 /* Some RTL parts really want to look at DECL_RTL(x) when x
4054 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4055 SET_DECL_RTL here making this available, but that would mean
4056 to select one of the potentially many RTLs for one DECL. Instead
4057 of doing that we simply reset the MEM_EXPR of the RTL in question,
4058 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4059 if (!DECL_RTL_SET_P (var))
4061 if (MEM_P (SA.partition_to_pseudo[i]))
4062 set_mem_expr (SA.partition_to_pseudo[i], NULL);
4066 /* If this function is `main', emit a call to `__main'
4067 to run global initializers, etc. */
4068 if (DECL_NAME (current_function_decl)
4069 && MAIN_NAME_P (DECL_NAME (current_function_decl))
4070 && DECL_FILE_SCOPE_P (current_function_decl))
4071 expand_main_function ();
4073 /* Initialize the stack_protect_guard field. This must happen after the
4074 call to __main (if any) so that the external decl is initialized. */
4075 if (crtl->stack_protect_guard)
4076 stack_protect_prologue ();
4078 expand_phi_nodes (&SA);
4080 /* Register rtl specific functions for cfg. */
4081 rtl_register_cfg_hooks ();
4083 init_block = construct_init_block ();
4085 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4086 remaining edges later. */
4087 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
4088 e->flags &= ~EDGE_EXECUTABLE;
4090 lab_rtx_for_bb = pointer_map_create ();
4091 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
4092 bb = expand_gimple_basic_block (bb);
4094 if (MAY_HAVE_DEBUG_INSNS)
4095 expand_debug_locations ();
4097 execute_free_datastructures ();
4098 timevar_push (TV_OUT_OF_SSA);
4099 finish_out_of_ssa (&SA);
4100 timevar_pop (TV_OUT_OF_SSA);
4102 timevar_push (TV_POST_EXPAND);
4103 /* We are no longer in SSA form. */
4104 cfun->gimple_df->in_ssa_p = false;
4106 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4107 conservatively to true until they are all profile aware. */
4108 pointer_map_destroy (lab_rtx_for_bb);
4111 construct_exit_block ();
4112 set_curr_insn_block (DECL_INITIAL (current_function_decl));
4113 insn_locators_finalize ();
4115 /* Zap the tree EH table. */
4116 set_eh_throw_stmt_table (cfun, NULL);
4118 rebuild_jump_labels (get_insns ());
4120 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
4124 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4128 /* Avoid putting insns before parm_birth_insn. */
4129 if (e->src == ENTRY_BLOCK_PTR
4130 && single_succ_p (ENTRY_BLOCK_PTR)
4133 rtx insns = e->insns.r;
4134 e->insns.r = NULL_RTX;
4135 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
4138 commit_one_edge_insertion (e);
4145 /* We're done expanding trees to RTL. */
4146 currently_expanding_to_rtl = 0;
4148 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
4152 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
4154 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4155 e->flags &= ~EDGE_EXECUTABLE;
4157 /* At the moment not all abnormal edges match the RTL
4158 representation. It is safe to remove them here as
4159 find_many_sub_basic_blocks will rediscover them.
4160 In the future we should get this fixed properly. */
4161 if ((e->flags & EDGE_ABNORMAL)
4162 && !(e->flags & EDGE_SIBCALL))
4169 blocks = sbitmap_alloc (last_basic_block);
4170 sbitmap_ones (blocks);
4171 find_many_sub_basic_blocks (blocks);
4172 sbitmap_free (blocks);
4173 purge_all_dead_edges ();
4177 expand_stack_alignment ();
4179 #ifdef ENABLE_CHECKING
4180 verify_flow_info ();
4183 /* There's no need to defer outputting this function any more; we
4184 know we want to output it. */
4185 DECL_DEFER_OUTPUT (current_function_decl) = 0;
4187 /* Now that we're done expanding trees to RTL, we shouldn't have any
4188 more CONCATs anywhere. */
4189 generating_concat_p = 0;
4194 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4195 /* And the pass manager will dump RTL for us. */
4198 /* If we're emitting a nested function, make sure its parent gets
4199 emitted as well. Doing otherwise confuses debug info. */
4202 for (parent = DECL_CONTEXT (current_function_decl);
4203 parent != NULL_TREE;
4204 parent = get_containing_scope (parent))
4205 if (TREE_CODE (parent) == FUNCTION_DECL)
4206 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
4209 /* We are now committed to emitting code for this function. Do any
4210 preparation, such as emitting abstract debug info for the inline
4211 before it gets mangled by optimization. */
4212 if (cgraph_function_possibly_inlined_p (current_function_decl))
4213 (*debug_hooks->outlining_inline_function) (current_function_decl);
4215 TREE_ASM_WRITTEN (current_function_decl) = 1;
4217 /* After expanding, the return labels are no longer needed. */
4218 return_label = NULL;
4219 naked_return_label = NULL;
4220 /* Tag the blocks with a depth number so that change_scope can find
4221 the common parent easily. */
4222 set_block_levels (DECL_INITIAL (cfun->decl), 0);
4223 default_rtl_profile ();
4224 timevar_pop (TV_POST_EXPAND);
4228 struct rtl_opt_pass pass_expand =
4232 "expand", /* name */
4234 gimple_expand_cfg, /* execute */
4237 0, /* static_pass_number */
4238 TV_EXPAND, /* tv_id */
4239 PROP_ssa | PROP_gimple_leh | PROP_cfg
4240 | PROP_gimple_lcx, /* properties_required */
4241 PROP_rtl, /* properties_provided */
4242 PROP_ssa | PROP_trees, /* properties_destroyed */
4243 TODO_verify_ssa | TODO_verify_flow
4244 | TODO_verify_stmts, /* todo_flags_start */
4246 | TODO_ggc_collect /* todo_flags_finish */