1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
42 #include "tree-inline.h"
43 #include "value-prof.h"
45 #include "ssaexpand.h"
48 /* This variable holds information helping the rewriting of SSA trees
52 /* Return an expression tree corresponding to the RHS of GIMPLE
56 gimple_assign_rhs_to_tree (gimple stmt)
59 enum gimple_rhs_class grhs_class;
61 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
63 if (grhs_class == GIMPLE_BINARY_RHS)
64 t = build2 (gimple_assign_rhs_code (stmt),
65 TREE_TYPE (gimple_assign_lhs (stmt)),
66 gimple_assign_rhs1 (stmt),
67 gimple_assign_rhs2 (stmt));
68 else if (grhs_class == GIMPLE_UNARY_RHS)
69 t = build1 (gimple_assign_rhs_code (stmt),
70 TREE_TYPE (gimple_assign_lhs (stmt)),
71 gimple_assign_rhs1 (stmt));
72 else if (grhs_class == GIMPLE_SINGLE_RHS)
74 t = gimple_assign_rhs1 (stmt);
75 /* Avoid modifying this tree in place below. */
76 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
77 && gimple_location (stmt) != EXPR_LOCATION (t))
83 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
84 SET_EXPR_LOCATION (t, gimple_location (stmt));
90 /* Verify that there is exactly single jump instruction since last and attach
91 REG_BR_PROB note specifying probability.
92 ??? We really ought to pass the probability down to RTL expanders and let it
93 re-distribute it when the conditional expands into multiple conditionals.
94 This is however difficult to do. */
96 add_reg_br_prob_note (rtx last, int probability)
98 if (profile_status == PROFILE_ABSENT)
100 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
103 /* It is common to emit condjump-around-jump sequence when we don't know
104 how to reverse the conditional. Special case this. */
105 if (!any_condjump_p (last)
106 || !JUMP_P (NEXT_INSN (last))
107 || !simplejump_p (NEXT_INSN (last))
108 || !NEXT_INSN (NEXT_INSN (last))
109 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
110 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
111 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
112 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
114 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
115 add_reg_note (last, REG_BR_PROB,
116 GEN_INT (REG_BR_PROB_BASE - probability));
119 if (!last || !JUMP_P (last) || !any_condjump_p (last))
121 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
122 add_reg_note (last, REG_BR_PROB, GEN_INT (probability));
126 fprintf (dump_file, "Failed to add probability note\n");
130 #ifndef STACK_ALIGNMENT_NEEDED
131 #define STACK_ALIGNMENT_NEEDED 1
134 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
136 /* Associate declaration T with storage space X. If T is no
137 SSA name this is exactly SET_DECL_RTL, otherwise make the
138 partition of T associated with X. */
140 set_rtl (tree t, rtx x)
142 if (TREE_CODE (t) == SSA_NAME)
144 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
146 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
147 /* For the benefit of debug information at -O0 (where vartracking
148 doesn't run) record the place also in the base DECL if it's
149 a normal variable (not a parameter). */
150 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
152 tree var = SSA_NAME_VAR (t);
153 /* If we don't yet have something recorded, just record it now. */
154 if (!DECL_RTL_SET_P (var))
155 SET_DECL_RTL (var, x);
156 /* If we have it set alrady to "multiple places" don't
158 else if (DECL_RTL (var) == pc_rtx)
160 /* If we have something recorded and it's not the same place
161 as we want to record now, we have multiple partitions for the
162 same base variable, with different places. We can't just
163 randomly chose one, hence we have to say that we don't know.
164 This only happens with optimization, and there var-tracking
165 will figure out the right thing. */
166 else if (DECL_RTL (var) != x)
167 SET_DECL_RTL (var, pc_rtx);
174 /* This structure holds data relevant to one variable that will be
175 placed in a stack slot. */
181 /* The offset of the variable. During partitioning, this is the
182 offset relative to the partition. After partitioning, this
183 is relative to the stack frame. */
184 HOST_WIDE_INT offset;
186 /* Initially, the size of the variable. Later, the size of the partition,
187 if this variable becomes it's partition's representative. */
190 /* The *byte* alignment required for this variable. Or as, with the
191 size, the alignment for this partition. */
194 /* The partition representative. */
195 size_t representative;
197 /* The next stack variable in the partition, or EOC. */
201 #define EOC ((size_t)-1)
203 /* We have an array of such objects while deciding allocation. */
204 static struct stack_var *stack_vars;
205 static size_t stack_vars_alloc;
206 static size_t stack_vars_num;
208 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
209 is non-decreasing. */
210 static size_t *stack_vars_sorted;
212 /* We have an interference graph between such objects. This graph
213 is lower triangular. */
214 static bool *stack_vars_conflict;
215 static size_t stack_vars_conflict_alloc;
217 /* The phase of the stack frame. This is the known misalignment of
218 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
219 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
220 static int frame_phase;
222 /* Used during expand_used_vars to remember if we saw any decls for
223 which we'd like to enable stack smashing protection. */
224 static bool has_protected_decls;
226 /* Used during expand_used_vars. Remember if we say a character buffer
227 smaller than our cutoff threshold. Used for -Wstack-protector. */
228 static bool has_short_buffer;
230 /* Discover the byte alignment to use for DECL. Ignore alignment
231 we can't do with expected alignment of the stack boundary. */
234 get_decl_align_unit (tree decl)
238 align = LOCAL_DECL_ALIGNMENT (decl);
240 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
241 align = MAX_SUPPORTED_STACK_ALIGNMENT;
243 if (SUPPORTS_STACK_ALIGNMENT)
245 if (crtl->stack_alignment_estimated < align)
247 gcc_assert(!crtl->stack_realign_processed);
248 crtl->stack_alignment_estimated = align;
252 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
253 So here we only make sure stack_alignment_needed >= align. */
254 if (crtl->stack_alignment_needed < align)
255 crtl->stack_alignment_needed = align;
256 if (crtl->max_used_stack_slot_alignment < align)
257 crtl->max_used_stack_slot_alignment = align;
259 return align / BITS_PER_UNIT;
262 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
263 Return the frame offset. */
266 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
268 HOST_WIDE_INT offset, new_frame_offset;
270 new_frame_offset = frame_offset;
271 if (FRAME_GROWS_DOWNWARD)
273 new_frame_offset -= size + frame_phase;
274 new_frame_offset &= -align;
275 new_frame_offset += frame_phase;
276 offset = new_frame_offset;
280 new_frame_offset -= frame_phase;
281 new_frame_offset += align - 1;
282 new_frame_offset &= -align;
283 new_frame_offset += frame_phase;
284 offset = new_frame_offset;
285 new_frame_offset += size;
287 frame_offset = new_frame_offset;
289 if (frame_offset_overflow (frame_offset, cfun->decl))
290 frame_offset = offset = 0;
295 /* Accumulate DECL into STACK_VARS. */
298 add_stack_var (tree decl)
300 if (stack_vars_num >= stack_vars_alloc)
302 if (stack_vars_alloc)
303 stack_vars_alloc = stack_vars_alloc * 3 / 2;
305 stack_vars_alloc = 32;
307 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
309 stack_vars[stack_vars_num].decl = decl;
310 stack_vars[stack_vars_num].offset = 0;
311 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
312 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
314 /* All variables are initially in their own partition. */
315 stack_vars[stack_vars_num].representative = stack_vars_num;
316 stack_vars[stack_vars_num].next = EOC;
318 /* Ensure that this decl doesn't get put onto the list twice. */
319 set_rtl (decl, pc_rtx);
324 /* Compute the linear index of a lower-triangular coordinate (I, J). */
327 triangular_index (size_t i, size_t j)
334 return (i * (i + 1)) / 2 + j;
337 /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
340 resize_stack_vars_conflict (size_t n)
342 size_t size = triangular_index (n-1, n-1) + 1;
344 if (size <= stack_vars_conflict_alloc)
347 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
348 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
349 (size - stack_vars_conflict_alloc) * sizeof (bool));
350 stack_vars_conflict_alloc = size;
353 /* Make the decls associated with luid's X and Y conflict. */
356 add_stack_var_conflict (size_t x, size_t y)
358 size_t index = triangular_index (x, y);
359 gcc_assert (index < stack_vars_conflict_alloc);
360 stack_vars_conflict[index] = true;
363 /* Check whether the decls associated with luid's X and Y conflict. */
366 stack_var_conflict_p (size_t x, size_t y)
368 size_t index = triangular_index (x, y);
369 gcc_assert (index < stack_vars_conflict_alloc);
370 return stack_vars_conflict[index];
373 /* Returns true if TYPE is or contains a union type. */
376 aggregate_contains_union_type (tree type)
380 if (TREE_CODE (type) == UNION_TYPE
381 || TREE_CODE (type) == QUAL_UNION_TYPE)
383 if (TREE_CODE (type) == ARRAY_TYPE)
384 return aggregate_contains_union_type (TREE_TYPE (type));
385 if (TREE_CODE (type) != RECORD_TYPE)
388 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
389 if (TREE_CODE (field) == FIELD_DECL)
390 if (aggregate_contains_union_type (TREE_TYPE (field)))
396 /* A subroutine of expand_used_vars. If two variables X and Y have alias
397 sets that do not conflict, then do add a conflict for these variables
398 in the interference graph. We also need to make sure to add conflicts
399 for union containing structures. Else RTL alias analysis comes along
400 and due to type based aliasing rules decides that for two overlapping
401 union temporaries { short s; int i; } accesses to the same mem through
402 different types may not alias and happily reorders stores across
403 life-time boundaries of the temporaries (See PR25654).
404 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
407 add_alias_set_conflicts (void)
409 size_t i, j, n = stack_vars_num;
411 for (i = 0; i < n; ++i)
413 tree type_i = TREE_TYPE (stack_vars[i].decl);
414 bool aggr_i = AGGREGATE_TYPE_P (type_i);
417 contains_union = aggregate_contains_union_type (type_i);
418 for (j = 0; j < i; ++j)
420 tree type_j = TREE_TYPE (stack_vars[j].decl);
421 bool aggr_j = AGGREGATE_TYPE_P (type_j);
423 /* Either the objects conflict by means of type based
424 aliasing rules, or we need to add a conflict. */
425 || !objects_must_conflict_p (type_i, type_j)
426 /* In case the types do not conflict ensure that access
427 to elements will conflict. In case of unions we have
428 to be careful as type based aliasing rules may say
429 access to the same memory does not conflict. So play
430 safe and add a conflict in this case. */
432 add_stack_var_conflict (i, j);
437 /* A subroutine of partition_stack_vars. A comparison function for qsort,
438 sorting an array of indices by the size and type of the object. */
441 stack_var_size_cmp (const void *a, const void *b)
443 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
444 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
446 unsigned int uida, uidb;
452 decla = stack_vars[*(const size_t *)a].decl;
453 declb = stack_vars[*(const size_t *)b].decl;
454 /* For stack variables of the same size use and id of the decls
455 to make the sort stable. Two SSA names are compared by their
456 version, SSA names come before non-SSA names, and two normal
457 decls are compared by their DECL_UID. */
458 if (TREE_CODE (decla) == SSA_NAME)
460 if (TREE_CODE (declb) == SSA_NAME)
461 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
465 else if (TREE_CODE (declb) == SSA_NAME)
468 uida = DECL_UID (decla), uidb = DECL_UID (declb);
477 /* If the points-to solution *PI points to variables that are in a partition
478 together with other variables add all partition members to the pointed-to
482 add_partitioned_vars_to_ptset (struct pt_solution *pt,
483 struct pointer_map_t *decls_to_partitions,
484 struct pointer_set_t *visited, bitmap temp)
492 /* The pointed-to vars bitmap is shared, it is enough to
494 || pointer_set_insert(visited, pt->vars))
499 /* By using a temporary bitmap to store all members of the partitions
500 we have to add we make sure to visit each of the partitions only
502 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
504 || !bitmap_bit_p (temp, i))
505 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
506 (void *)(size_t) i)))
507 bitmap_ior_into (temp, *part);
508 if (!bitmap_empty_p (temp))
509 bitmap_ior_into (pt->vars, temp);
512 /* Update points-to sets based on partition info, so we can use them on RTL.
513 The bitmaps representing stack partitions will be saved until expand,
514 where partitioned decls used as bases in memory expressions will be
518 update_alias_info_with_stack_vars (void)
520 struct pointer_map_t *decls_to_partitions = NULL;
522 tree var = NULL_TREE;
524 for (i = 0; i < stack_vars_num; i++)
528 struct ptr_info_def *pi;
530 /* Not interested in partitions with single variable. */
531 if (stack_vars[i].representative != i
532 || stack_vars[i].next == EOC)
535 if (!decls_to_partitions)
537 decls_to_partitions = pointer_map_create ();
538 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
541 /* Create an SSA_NAME that points to the partition for use
542 as base during alias-oracle queries on RTL for bases that
543 have been partitioned. */
544 if (var == NULL_TREE)
545 var = create_tmp_var (ptr_type_node, NULL);
546 name = make_ssa_name (var, NULL);
548 /* Create bitmaps representing partitions. They will be used for
549 points-to sets later, so use GGC alloc. */
550 part = BITMAP_GGC_ALLOC ();
551 for (j = i; j != EOC; j = stack_vars[j].next)
553 tree decl = stack_vars[j].decl;
554 unsigned int uid = DECL_UID (decl);
555 /* We should never end up partitioning SSA names (though they
556 may end up on the stack). Neither should we allocate stack
557 space to something that is unused and thus unreferenced. */
558 gcc_assert (DECL_P (decl)
559 && referenced_var_lookup (uid));
560 bitmap_set_bit (part, uid);
561 *((bitmap *) pointer_map_insert (decls_to_partitions,
562 (void *)(size_t) uid)) = part;
563 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
567 /* Make the SSA name point to all partition members. */
568 pi = get_ptr_info (name);
569 pt_solution_set (&pi->pt, part);
572 /* Make all points-to sets that contain one member of a partition
573 contain all members of the partition. */
574 if (decls_to_partitions)
577 struct pointer_set_t *visited = pointer_set_create ();
578 bitmap temp = BITMAP_ALLOC (NULL);
580 for (i = 1; i < num_ssa_names; i++)
582 tree name = ssa_name (i);
583 struct ptr_info_def *pi;
586 && POINTER_TYPE_P (TREE_TYPE (name))
587 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
588 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
592 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
593 decls_to_partitions, visited, temp);
594 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
595 decls_to_partitions, visited, temp);
597 pointer_set_destroy (visited);
598 pointer_map_destroy (decls_to_partitions);
603 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
604 partitioning algorithm. Partitions A and B are known to be non-conflicting.
605 Merge them into a single partition A.
607 At the same time, add OFFSET to all variables in partition B. At the end
608 of the partitioning process we've have a nice block easy to lay out within
612 union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
616 /* Update each element of partition B with the given offset,
617 and merge them into partition A. */
618 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
620 stack_vars[i].offset += offset;
621 stack_vars[i].representative = a;
623 stack_vars[last].next = stack_vars[a].next;
624 stack_vars[a].next = b;
626 /* Update the required alignment of partition A to account for B. */
627 if (stack_vars[a].alignb < stack_vars[b].alignb)
628 stack_vars[a].alignb = stack_vars[b].alignb;
630 /* Update the interference graph and merge the conflicts. */
631 for (last = stack_vars_num, i = 0; i < last; ++i)
632 if (stack_var_conflict_p (b, i))
633 add_stack_var_conflict (a, i);
636 /* A subroutine of expand_used_vars. Binpack the variables into
637 partitions constrained by the interference graph. The overall
638 algorithm used is as follows:
640 Sort the objects by size.
645 Look for the largest non-conflicting object B with size <= S.
655 partition_stack_vars (void)
657 size_t si, sj, n = stack_vars_num;
659 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
660 for (si = 0; si < n; ++si)
661 stack_vars_sorted[si] = si;
666 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
668 /* Special case: detect when all variables conflict, and thus we can't
669 do anything during the partitioning loop. It isn't uncommon (with
670 C code at least) to declare all variables at the top of the function,
671 and if we're not inlining, then all variables will be in the same scope.
672 Take advantage of very fast libc routines for this scan. */
673 gcc_assert (sizeof(bool) == sizeof(char));
674 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
677 for (si = 0; si < n; ++si)
679 size_t i = stack_vars_sorted[si];
680 HOST_WIDE_INT isize = stack_vars[i].size;
681 HOST_WIDE_INT offset = 0;
683 for (sj = si; sj-- > 0; )
685 size_t j = stack_vars_sorted[sj];
686 HOST_WIDE_INT jsize = stack_vars[j].size;
687 unsigned int jalign = stack_vars[j].alignb;
689 /* Ignore objects that aren't partition representatives. */
690 if (stack_vars[j].representative != j)
693 /* Ignore objects too large for the remaining space. */
697 /* Ignore conflicting objects. */
698 if (stack_var_conflict_p (i, j))
701 /* Refine the remaining space check to include alignment. */
702 if (offset & (jalign - 1))
704 HOST_WIDE_INT toff = offset;
706 toff &= -(HOST_WIDE_INT)jalign;
707 if (isize - (toff - offset) < jsize)
710 isize -= toff - offset;
714 /* UNION the objects, placing J at OFFSET. */
715 union_stack_vars (i, j, offset);
724 update_alias_info_with_stack_vars ();
727 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
730 dump_stack_var_partition (void)
732 size_t si, i, j, n = stack_vars_num;
734 for (si = 0; si < n; ++si)
736 i = stack_vars_sorted[si];
738 /* Skip variables that aren't partition representatives, for now. */
739 if (stack_vars[i].representative != i)
742 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
743 " align %u\n", (unsigned long) i, stack_vars[i].size,
744 stack_vars[i].alignb);
746 for (j = i; j != EOC; j = stack_vars[j].next)
748 fputc ('\t', dump_file);
749 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
750 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
751 stack_vars[j].offset);
756 /* Assign rtl to DECL at frame offset OFFSET. */
759 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
761 /* Alignment is unsigned. */
762 unsigned HOST_WIDE_INT align;
765 /* If this fails, we've overflowed the stack frame. Error nicely? */
766 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
768 x = plus_constant (virtual_stack_vars_rtx, offset);
769 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
771 if (TREE_CODE (decl) != SSA_NAME)
773 /* Set alignment we actually gave this decl if it isn't an SSA name.
774 If it is we generate stack slots only accidentally so it isn't as
775 important, we'll simply use the alignment that is already set. */
776 offset -= frame_phase;
777 align = offset & -offset;
778 align *= BITS_PER_UNIT;
780 align = STACK_BOUNDARY;
781 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
782 align = MAX_SUPPORTED_STACK_ALIGNMENT;
784 DECL_ALIGN (decl) = align;
785 DECL_USER_ALIGN (decl) = 0;
788 set_mem_attributes (x, SSAVAR (decl), true);
792 /* A subroutine of expand_used_vars. Give each partition representative
793 a unique location within the stack frame. Update each partition member
794 with that location. */
797 expand_stack_vars (bool (*pred) (tree))
799 size_t si, i, j, n = stack_vars_num;
801 for (si = 0; si < n; ++si)
803 HOST_WIDE_INT offset;
805 i = stack_vars_sorted[si];
807 /* Skip variables that aren't partition representatives, for now. */
808 if (stack_vars[i].representative != i)
811 /* Skip variables that have already had rtl assigned. See also
812 add_stack_var where we perpetrate this pc_rtx hack. */
813 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
814 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
815 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
818 /* Check the predicate to see whether this variable should be
819 allocated in this pass. */
820 if (pred && !pred (stack_vars[i].decl))
823 offset = alloc_stack_frame_space (stack_vars[i].size,
824 stack_vars[i].alignb);
826 /* Create rtl for each variable based on their location within the
828 for (j = i; j != EOC; j = stack_vars[j].next)
830 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
831 expand_one_stack_var_at (stack_vars[j].decl,
832 stack_vars[j].offset + offset);
837 /* Take into account all sizes of partitions and reset DECL_RTLs. */
839 account_stack_vars (void)
841 size_t si, j, i, n = stack_vars_num;
842 HOST_WIDE_INT size = 0;
844 for (si = 0; si < n; ++si)
846 i = stack_vars_sorted[si];
848 /* Skip variables that aren't partition representatives, for now. */
849 if (stack_vars[i].representative != i)
852 size += stack_vars[i].size;
853 for (j = i; j != EOC; j = stack_vars[j].next)
854 set_rtl (stack_vars[j].decl, NULL);
859 /* A subroutine of expand_one_var. Called to immediately assign rtl
860 to a variable to be allocated in the stack frame. */
863 expand_one_stack_var (tree var)
865 HOST_WIDE_INT size, offset, align;
867 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
868 align = get_decl_align_unit (SSAVAR (var));
869 offset = alloc_stack_frame_space (size, align);
871 expand_one_stack_var_at (var, offset);
874 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
875 that will reside in a hard register. */
878 expand_one_hard_reg_var (tree var)
880 rest_of_decl_compilation (var, 0, 0);
883 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
884 that will reside in a pseudo register. */
887 expand_one_register_var (tree var)
889 tree decl = SSAVAR (var);
890 tree type = TREE_TYPE (decl);
891 enum machine_mode reg_mode = promote_decl_mode (decl, NULL);
892 rtx x = gen_reg_rtx (reg_mode);
896 /* Note if the object is a user variable. */
897 if (!DECL_ARTIFICIAL (decl))
900 if (POINTER_TYPE_P (type))
901 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
904 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
905 has some associated error, e.g. its type is error-mark. We just need
906 to pick something that won't crash the rest of the compiler. */
909 expand_one_error_var (tree var)
911 enum machine_mode mode = DECL_MODE (var);
915 x = gen_rtx_MEM (BLKmode, const0_rtx);
916 else if (mode == VOIDmode)
919 x = gen_reg_rtx (mode);
921 SET_DECL_RTL (var, x);
924 /* A subroutine of expand_one_var. VAR is a variable that will be
925 allocated to the local stack frame. Return true if we wish to
926 add VAR to STACK_VARS so that it will be coalesced with other
927 variables. Return false to allocate VAR immediately.
929 This function is used to reduce the number of variables considered
930 for coalescing, which reduces the size of the quadratic problem. */
933 defer_stack_allocation (tree var, bool toplevel)
935 /* If stack protection is enabled, *all* stack variables must be deferred,
936 so that we can re-order the strings to the top of the frame. */
937 if (flag_stack_protect)
940 /* Variables in the outermost scope automatically conflict with
941 every other variable. The only reason to want to defer them
942 at all is that, after sorting, we can more efficiently pack
943 small variables in the stack frame. Continue to defer at -O2. */
944 if (toplevel && optimize < 2)
947 /* Without optimization, *most* variables are allocated from the
948 stack, which makes the quadratic problem large exactly when we
949 want compilation to proceed as quickly as possible. On the
950 other hand, we don't want the function's stack frame size to
951 get completely out of hand. So we avoid adding scalars and
952 "small" aggregates to the list at all. */
953 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
959 /* A subroutine of expand_used_vars. Expand one variable according to
960 its flavor. Variables to be placed on the stack are not actually
961 expanded yet, merely recorded.
962 When REALLY_EXPAND is false, only add stack values to be allocated.
963 Return stack usage this variable is supposed to take.
967 expand_one_var (tree var, bool toplevel, bool really_expand)
972 if (SUPPORTS_STACK_ALIGNMENT
973 && TREE_TYPE (var) != error_mark_node
974 && TREE_CODE (var) == VAR_DECL)
978 /* Because we don't know if VAR will be in register or on stack,
979 we conservatively assume it will be on stack even if VAR is
980 eventually put into register after RA pass. For non-automatic
981 variables, which won't be on stack, we collect alignment of
982 type and ignore user specified alignment. */
983 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
984 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
985 TYPE_MODE (TREE_TYPE (var)),
986 TYPE_ALIGN (TREE_TYPE (var)));
988 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
990 if (crtl->stack_alignment_estimated < align)
992 /* stack_alignment_estimated shouldn't change after stack
993 realign decision made */
994 gcc_assert(!crtl->stack_realign_processed);
995 crtl->stack_alignment_estimated = align;
999 if (TREE_CODE (origvar) == SSA_NAME)
1001 gcc_assert (TREE_CODE (var) != VAR_DECL
1002 || (!DECL_EXTERNAL (var)
1003 && !DECL_HAS_VALUE_EXPR_P (var)
1004 && !TREE_STATIC (var)
1005 && TREE_TYPE (var) != error_mark_node
1006 && !DECL_HARD_REGISTER (var)
1009 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1011 else if (DECL_EXTERNAL (var))
1013 else if (DECL_HAS_VALUE_EXPR_P (var))
1015 else if (TREE_STATIC (var))
1017 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1019 else if (TREE_TYPE (var) == error_mark_node)
1022 expand_one_error_var (var);
1024 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1027 expand_one_hard_reg_var (var);
1029 else if (use_register_for_decl (var))
1032 expand_one_register_var (origvar);
1034 else if (defer_stack_allocation (var, toplevel))
1035 add_stack_var (origvar);
1039 expand_one_stack_var (origvar);
1040 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1045 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1046 expanding variables. Those variables that can be put into registers
1047 are allocated pseudos; those that can't are put on the stack.
1049 TOPLEVEL is true if this is the outermost BLOCK. */
1052 expand_used_vars_for_block (tree block, bool toplevel)
1054 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1057 old_sv_num = toplevel ? 0 : stack_vars_num;
1059 /* Expand all variables at this level. */
1060 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1062 expand_one_var (t, toplevel, true);
1064 this_sv_num = stack_vars_num;
1066 /* Expand all variables at containing levels. */
1067 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1068 expand_used_vars_for_block (t, false);
1070 /* Since we do not track exact variable lifetimes (which is not even
1071 possible for variables whose address escapes), we mirror the block
1072 tree in the interference graph. Here we cause all variables at this
1073 level, and all sublevels, to conflict. Do make certain that a
1074 variable conflicts with itself. */
1075 if (old_sv_num < this_sv_num)
1077 new_sv_num = stack_vars_num;
1078 resize_stack_vars_conflict (new_sv_num);
1080 for (i = old_sv_num; i < new_sv_num; ++i)
1081 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1082 add_stack_var_conflict (i, j);
1086 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1087 and clear TREE_USED on all local variables. */
1090 clear_tree_used (tree block)
1094 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1095 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1098 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1099 clear_tree_used (t);
1102 /* Examine TYPE and determine a bit mask of the following features. */
1104 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1105 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1106 #define SPCT_HAS_ARRAY 4
1107 #define SPCT_HAS_AGGREGATE 8
1110 stack_protect_classify_type (tree type)
1112 unsigned int ret = 0;
1115 switch (TREE_CODE (type))
1118 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1119 if (t == char_type_node
1120 || t == signed_char_type_node
1121 || t == unsigned_char_type_node)
1123 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1124 unsigned HOST_WIDE_INT len;
1126 if (!TYPE_SIZE_UNIT (type)
1127 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1130 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1133 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1135 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1138 ret = SPCT_HAS_ARRAY;
1142 case QUAL_UNION_TYPE:
1144 ret = SPCT_HAS_AGGREGATE;
1145 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1146 if (TREE_CODE (t) == FIELD_DECL)
1147 ret |= stack_protect_classify_type (TREE_TYPE (t));
1157 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1158 part of the local stack frame. Remember if we ever return nonzero for
1159 any variable in this function. The return value is the phase number in
1160 which the variable should be allocated. */
1163 stack_protect_decl_phase (tree decl)
1165 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1168 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1169 has_short_buffer = true;
1171 if (flag_stack_protect == 2)
1173 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1174 && !(bits & SPCT_HAS_AGGREGATE))
1176 else if (bits & SPCT_HAS_ARRAY)
1180 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1183 has_protected_decls = true;
1188 /* Two helper routines that check for phase 1 and phase 2. These are used
1189 as callbacks for expand_stack_vars. */
1192 stack_protect_decl_phase_1 (tree decl)
1194 return stack_protect_decl_phase (decl) == 1;
1198 stack_protect_decl_phase_2 (tree decl)
1200 return stack_protect_decl_phase (decl) == 2;
1203 /* Ensure that variables in different stack protection phases conflict
1204 so that they are not merged and share the same stack slot. */
1207 add_stack_protection_conflicts (void)
1209 size_t i, j, n = stack_vars_num;
1210 unsigned char *phase;
1212 phase = XNEWVEC (unsigned char, n);
1213 for (i = 0; i < n; ++i)
1214 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1216 for (i = 0; i < n; ++i)
1218 unsigned char ph_i = phase[i];
1219 for (j = 0; j < i; ++j)
1220 if (ph_i != phase[j])
1221 add_stack_var_conflict (i, j);
1227 /* Create a decl for the guard at the top of the stack frame. */
1230 create_stack_guard (void)
1232 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1233 VAR_DECL, NULL, ptr_type_node);
1234 TREE_THIS_VOLATILE (guard) = 1;
1235 TREE_USED (guard) = 1;
1236 expand_one_stack_var (guard);
1237 crtl->stack_protect_guard = guard;
1240 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1241 expanding variables. Those variables that can be put into registers
1242 are allocated pseudos; those that can't are put on the stack.
1244 TOPLEVEL is true if this is the outermost BLOCK. */
1246 static HOST_WIDE_INT
1247 account_used_vars_for_block (tree block, bool toplevel)
1249 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1251 HOST_WIDE_INT size = 0;
1253 old_sv_num = toplevel ? 0 : stack_vars_num;
1255 /* Expand all variables at this level. */
1256 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1258 size += expand_one_var (t, toplevel, false);
1260 this_sv_num = stack_vars_num;
1262 /* Expand all variables at containing levels. */
1263 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1264 size += account_used_vars_for_block (t, false);
1266 /* Since we do not track exact variable lifetimes (which is not even
1267 possible for variables whose address escapes), we mirror the block
1268 tree in the interference graph. Here we cause all variables at this
1269 level, and all sublevels, to conflict. Do make certain that a
1270 variable conflicts with itself. */
1271 if (old_sv_num < this_sv_num)
1273 new_sv_num = stack_vars_num;
1274 resize_stack_vars_conflict (new_sv_num);
1276 for (i = old_sv_num; i < new_sv_num; ++i)
1277 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1278 add_stack_var_conflict (i, j);
1283 /* Prepare for expanding variables. */
1285 init_vars_expansion (void)
1288 /* Set TREE_USED on all variables in the local_decls. */
1289 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1290 TREE_USED (TREE_VALUE (t)) = 1;
1292 /* Clear TREE_USED on all variables associated with a block scope. */
1293 clear_tree_used (DECL_INITIAL (current_function_decl));
1295 /* Initialize local stack smashing state. */
1296 has_protected_decls = false;
1297 has_short_buffer = false;
1300 /* Free up stack variable graph data. */
1302 fini_vars_expansion (void)
1304 XDELETEVEC (stack_vars);
1305 XDELETEVEC (stack_vars_sorted);
1306 XDELETEVEC (stack_vars_conflict);
1308 stack_vars_alloc = stack_vars_num = 0;
1309 stack_vars_conflict = NULL;
1310 stack_vars_conflict_alloc = 0;
1313 /* Make a fair guess for the size of the stack frame of the current
1314 function. This doesn't have to be exact, the result is only used
1315 in the inline heuristics. So we don't want to run the full stack
1316 var packing algorithm (which is quadratic in the number of stack
1317 vars). Instead, we calculate the total size of all stack vars.
1318 This turns out to be a pretty fair estimate -- packing of stack
1319 vars doesn't happen very often. */
1322 estimated_stack_frame_size (void)
1324 HOST_WIDE_INT size = 0;
1326 tree t, outer_block = DECL_INITIAL (current_function_decl);
1328 init_vars_expansion ();
1330 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1332 tree var = TREE_VALUE (t);
1334 if (TREE_USED (var))
1335 size += expand_one_var (var, true, false);
1336 TREE_USED (var) = 1;
1338 size += account_used_vars_for_block (outer_block, true);
1340 if (stack_vars_num > 0)
1342 /* Fake sorting the stack vars for account_stack_vars (). */
1343 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1344 for (i = 0; i < stack_vars_num; ++i)
1345 stack_vars_sorted[i] = i;
1346 size += account_stack_vars ();
1347 fini_vars_expansion ();
1353 /* Expand all variables used in the function. */
1356 expand_used_vars (void)
1358 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
1361 /* Compute the phase of the stack frame for this function. */
1363 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1364 int off = STARTING_FRAME_OFFSET % align;
1365 frame_phase = off ? align - off : 0;
1368 init_vars_expansion ();
1370 for (i = 0; i < SA.map->num_partitions; i++)
1372 tree var = partition_to_var (SA.map, i);
1374 gcc_assert (is_gimple_reg (var));
1375 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1376 expand_one_var (var, true, true);
1379 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1380 contain the default def (representing the parm or result itself)
1381 we don't do anything here. But those which don't contain the
1382 default def (representing a temporary based on the parm/result)
1383 we need to allocate space just like for normal VAR_DECLs. */
1384 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1386 expand_one_var (var, true, true);
1387 gcc_assert (SA.partition_to_pseudo[i]);
1392 /* At this point all variables on the local_decls with TREE_USED
1393 set are not associated with any block scope. Lay them out. */
1394 t = cfun->local_decls;
1395 cfun->local_decls = NULL_TREE;
1398 tree var = TREE_VALUE (t);
1399 bool expand_now = false;
1401 next = TREE_CHAIN (t);
1403 /* Expanded above already. */
1404 if (is_gimple_reg (var))
1406 TREE_USED (var) = 0;
1410 /* We didn't set a block for static or extern because it's hard
1411 to tell the difference between a global variable (re)declared
1412 in a local scope, and one that's really declared there to
1413 begin with. And it doesn't really matter much, since we're
1414 not giving them stack space. Expand them now. */
1415 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1418 /* If the variable is not associated with any block, then it
1419 was created by the optimizers, and could be live anywhere
1421 else if (TREE_USED (var))
1424 /* Finally, mark all variables on the list as used. We'll use
1425 this in a moment when we expand those associated with scopes. */
1426 TREE_USED (var) = 1;
1430 expand_one_var (var, true, true);
1431 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1433 rtx rtl = DECL_RTL_IF_SET (var);
1435 /* Keep artificial non-ignored vars in cfun->local_decls
1436 chain until instantiate_decls. */
1437 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1439 TREE_CHAIN (t) = cfun->local_decls;
1440 cfun->local_decls = t;
1449 /* At this point, all variables within the block tree with TREE_USED
1450 set are actually used by the optimized function. Lay them out. */
1451 expand_used_vars_for_block (outer_block, true);
1453 if (stack_vars_num > 0)
1455 /* Due to the way alias sets work, no variables with non-conflicting
1456 alias sets may be assigned the same address. Add conflicts to
1458 add_alias_set_conflicts ();
1460 /* If stack protection is enabled, we don't share space between
1461 vulnerable data and non-vulnerable data. */
1462 if (flag_stack_protect)
1463 add_stack_protection_conflicts ();
1465 /* Now that we have collected all stack variables, and have computed a
1466 minimal interference graph, attempt to save some stack space. */
1467 partition_stack_vars ();
1469 dump_stack_var_partition ();
1472 /* There are several conditions under which we should create a
1473 stack guard: protect-all, alloca used, protected decls present. */
1474 if (flag_stack_protect == 2
1475 || (flag_stack_protect
1476 && (cfun->calls_alloca || has_protected_decls)))
1477 create_stack_guard ();
1479 /* Assign rtl to each variable based on these partitions. */
1480 if (stack_vars_num > 0)
1482 /* Reorder decls to be protected by iterating over the variables
1483 array multiple times, and allocating out of each phase in turn. */
1484 /* ??? We could probably integrate this into the qsort we did
1485 earlier, such that we naturally see these variables first,
1486 and thus naturally allocate things in the right order. */
1487 if (has_protected_decls)
1489 /* Phase 1 contains only character arrays. */
1490 expand_stack_vars (stack_protect_decl_phase_1);
1492 /* Phase 2 contains other kinds of arrays. */
1493 if (flag_stack_protect == 2)
1494 expand_stack_vars (stack_protect_decl_phase_2);
1497 expand_stack_vars (NULL);
1499 fini_vars_expansion ();
1502 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1503 if (STACK_ALIGNMENT_NEEDED)
1505 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1506 if (!FRAME_GROWS_DOWNWARD)
1507 frame_offset += align - 1;
1508 frame_offset &= -align;
1513 /* If we need to produce a detailed dump, print the tree representation
1514 for STMT to the dump file. SINCE is the last RTX after which the RTL
1515 generated for STMT should have been appended. */
1518 maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
1520 if (dump_file && (dump_flags & TDF_DETAILS))
1522 fprintf (dump_file, "\n;; ");
1523 print_gimple_stmt (dump_file, stmt, 0,
1524 TDF_SLIM | (dump_flags & TDF_LINENO));
1525 fprintf (dump_file, "\n");
1527 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1531 /* Maps the blocks that do not contain tree labels to rtx labels. */
1533 static struct pointer_map_t *lab_rtx_for_bb;
1535 /* Returns the label_rtx expression for a label starting basic block BB. */
1538 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
1540 gimple_stmt_iterator gsi;
1545 if (bb->flags & BB_RTL)
1546 return block_label (bb);
1548 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1552 /* Find the tree label if it is present. */
1554 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1556 lab_stmt = gsi_stmt (gsi);
1557 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
1560 lab = gimple_label_label (lab_stmt);
1561 if (DECL_NONLOCAL (lab))
1564 return label_rtx (lab);
1567 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1568 *elt = gen_label_rtx ();
1573 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1574 of a basic block where we just expanded the conditional at the end,
1575 possibly clean up the CFG and instruction sequence. */
1578 maybe_cleanup_end_of_block (edge e)
1580 /* Special case: when jumpif decides that the condition is
1581 trivial it emits an unconditional jump (and the necessary
1582 barrier). But we still have two edges, the fallthru one is
1583 wrong. purge_dead_edges would clean this up later. Unfortunately
1584 we have to insert insns (and split edges) before
1585 find_many_sub_basic_blocks and hence before purge_dead_edges.
1586 But splitting edges might create new blocks which depend on the
1587 fact that if there are two edges there's no barrier. So the
1588 barrier would get lost and verify_flow_info would ICE. Instead
1589 of auditing all edge splitters to care for the barrier (which
1590 normally isn't there in a cleaned CFG), fix it here. */
1591 if (BARRIER_P (get_last_insn ()))
1593 basic_block bb = e->src;
1596 /* Now, we have a single successor block, if we have insns to
1597 insert on the remaining edge we potentially will insert
1598 it at the end of this block (if the dest block isn't feasible)
1599 in order to avoid splitting the edge. This insertion will take
1600 place in front of the last jump. But we might have emitted
1601 multiple jumps (conditional and one unconditional) to the
1602 same destination. Inserting in front of the last one then
1603 is a problem. See PR 40021. We fix this by deleting all
1604 jumps except the last unconditional one. */
1605 insn = PREV_INSN (get_last_insn ());
1606 /* Make sure we have an unconditional jump. Otherwise we're
1608 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1609 for (insn = PREV_INSN (insn); insn != BB_HEAD (bb);)
1611 insn = PREV_INSN (insn);
1612 if (JUMP_P (NEXT_INSN (insn)))
1613 delete_insn (NEXT_INSN (insn));
1618 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1619 Returns a new basic block if we've terminated the current basic
1620 block and created a new one. */
1623 expand_gimple_cond (basic_block bb, gimple stmt)
1625 basic_block new_bb, dest;
1630 enum tree_code code;
1633 code = gimple_cond_code (stmt);
1634 op0 = gimple_cond_lhs (stmt);
1635 op1 = gimple_cond_rhs (stmt);
1636 /* We're sometimes presented with such code:
1640 This would expand to two comparisons which then later might
1641 be cleaned up by combine. But some pattern matchers like if-conversion
1642 work better when there's only one compare, so make up for this
1643 here as special exception if TER would have made the same change. */
1644 if (gimple_cond_single_var_p (stmt)
1646 && TREE_CODE (op0) == SSA_NAME
1647 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
1649 gimple second = SSA_NAME_DEF_STMT (op0);
1650 if (gimple_code (second) == GIMPLE_ASSIGN
1651 && TREE_CODE_CLASS (gimple_assign_rhs_code (second))
1654 code = gimple_assign_rhs_code (second);
1655 op0 = gimple_assign_rhs1 (second);
1656 op1 = gimple_assign_rhs2 (second);
1660 last2 = last = get_last_insn ();
1662 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1663 if (gimple_has_location (stmt))
1665 set_curr_insn_source_location (gimple_location (stmt));
1666 set_curr_insn_block (gimple_block (stmt));
1669 /* These flags have no purpose in RTL land. */
1670 true_edge->flags &= ~EDGE_TRUE_VALUE;
1671 false_edge->flags &= ~EDGE_FALSE_VALUE;
1673 /* We can either have a pure conditional jump with one fallthru edge or
1674 two-way jump that needs to be decomposed into two basic blocks. */
1675 if (false_edge->dest == bb->next_bb)
1677 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest));
1678 add_reg_br_prob_note (last, true_edge->probability);
1679 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1680 if (true_edge->goto_locus)
1682 set_curr_insn_source_location (true_edge->goto_locus);
1683 set_curr_insn_block (true_edge->goto_block);
1684 true_edge->goto_locus = curr_insn_locator ();
1686 true_edge->goto_block = NULL;
1687 false_edge->flags |= EDGE_FALLTHRU;
1688 maybe_cleanup_end_of_block (false_edge);
1691 if (true_edge->dest == bb->next_bb)
1693 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest));
1694 add_reg_br_prob_note (last, false_edge->probability);
1695 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1696 if (false_edge->goto_locus)
1698 set_curr_insn_source_location (false_edge->goto_locus);
1699 set_curr_insn_block (false_edge->goto_block);
1700 false_edge->goto_locus = curr_insn_locator ();
1702 false_edge->goto_block = NULL;
1703 true_edge->flags |= EDGE_FALLTHRU;
1704 maybe_cleanup_end_of_block (true_edge);
1708 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest));
1709 add_reg_br_prob_note (last, true_edge->probability);
1710 last = get_last_insn ();
1711 if (false_edge->goto_locus)
1713 set_curr_insn_source_location (false_edge->goto_locus);
1714 set_curr_insn_block (false_edge->goto_block);
1715 false_edge->goto_locus = curr_insn_locator ();
1717 false_edge->goto_block = NULL;
1718 emit_jump (label_rtx_for_bb (false_edge->dest));
1721 if (BARRIER_P (BB_END (bb)))
1722 BB_END (bb) = PREV_INSN (BB_END (bb));
1723 update_bb_for_insn (bb);
1725 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1726 dest = false_edge->dest;
1727 redirect_edge_succ (false_edge, new_bb);
1728 false_edge->flags |= EDGE_FALLTHRU;
1729 new_bb->count = false_edge->count;
1730 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1731 new_edge = make_edge (new_bb, dest, 0);
1732 new_edge->probability = REG_BR_PROB_BASE;
1733 new_edge->count = new_bb->count;
1734 if (BARRIER_P (BB_END (new_bb)))
1735 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1736 update_bb_for_insn (new_bb);
1738 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
1740 if (true_edge->goto_locus)
1742 set_curr_insn_source_location (true_edge->goto_locus);
1743 set_curr_insn_block (true_edge->goto_block);
1744 true_edge->goto_locus = curr_insn_locator ();
1746 true_edge->goto_block = NULL;
1751 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1755 expand_call_stmt (gimple stmt)
1758 tree lhs = gimple_call_lhs (stmt);
1759 tree fndecl = gimple_call_fndecl (stmt);
1762 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1764 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1765 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1766 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1768 for (i = 0; i < gimple_call_num_args (stmt); i++)
1769 CALL_EXPR_ARG (exp, i) = gimple_call_arg (stmt, i);
1771 if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
1772 TREE_SIDE_EFFECTS (exp) = 1;
1774 if (gimple_call_flags (stmt) & ECF_NOTHROW)
1775 TREE_NOTHROW (exp) = 1;
1777 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
1778 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
1779 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
1780 CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
1781 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
1782 SET_EXPR_LOCATION (exp, gimple_location (stmt));
1783 TREE_BLOCK (exp) = gimple_block (stmt);
1785 /* Record the original call statement, as it may be used
1786 to retrieve profile information during expansion. */
1788 if (fndecl && DECL_BUILT_IN (fndecl))
1790 tree_ann_common_t ann = get_tree_common_ann (exp);
1795 expand_assignment (lhs, exp, false);
1797 expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL);
1800 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1801 STMT that doesn't require special handling for outgoing edges. That
1802 is no tailcalls and no GIMPLE_COND. */
1805 expand_gimple_stmt_1 (gimple stmt)
1808 switch (gimple_code (stmt))
1811 op0 = gimple_goto_dest (stmt);
1812 if (TREE_CODE (op0) == LABEL_DECL)
1815 expand_computed_goto (op0);
1818 expand_label (gimple_label_label (stmt));
1821 case GIMPLE_PREDICT:
1827 expand_asm_stmt (stmt);
1830 expand_call_stmt (stmt);
1834 op0 = gimple_return_retval (stmt);
1836 if (op0 && op0 != error_mark_node)
1838 tree result = DECL_RESULT (current_function_decl);
1840 /* If we are not returning the current function's RESULT_DECL,
1841 build an assignment to it. */
1844 /* I believe that a function's RESULT_DECL is unique. */
1845 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
1847 /* ??? We'd like to use simply expand_assignment here,
1848 but this fails if the value is of BLKmode but the return
1849 decl is a register. expand_return has special handling
1850 for this combination, which eventually should move
1851 to common code. See comments there. Until then, let's
1852 build a modify expression :-/ */
1853 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
1858 expand_null_return ();
1860 expand_return (op0);
1865 tree lhs = gimple_assign_lhs (stmt);
1867 /* Tree expand used to fiddle with |= and &= of two bitfield
1868 COMPONENT_REFs here. This can't happen with gimple, the LHS
1869 of binary assigns must be a gimple reg. */
1871 if (TREE_CODE (lhs) != SSA_NAME
1872 || get_gimple_rhs_class (gimple_expr_code (stmt))
1873 == GIMPLE_SINGLE_RHS)
1875 tree rhs = gimple_assign_rhs1 (stmt);
1876 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
1877 == GIMPLE_SINGLE_RHS);
1878 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs))
1879 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
1880 expand_assignment (lhs, rhs,
1881 gimple_assign_nontemporal_move_p (stmt));
1886 bool nontemporal = gimple_assign_nontemporal_move_p (stmt);
1887 struct separate_ops ops;
1888 bool promoted = false;
1890 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
1891 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
1894 ops.code = gimple_assign_rhs_code (stmt);
1895 ops.type = TREE_TYPE (lhs);
1896 switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
1898 case GIMPLE_BINARY_RHS:
1899 ops.op1 = gimple_assign_rhs2 (stmt);
1901 case GIMPLE_UNARY_RHS:
1902 ops.op0 = gimple_assign_rhs1 (stmt);
1907 ops.location = gimple_location (stmt);
1909 /* If we want to use a nontemporal store, force the value to
1910 register first. If we store into a promoted register,
1911 don't directly expand to target. */
1912 temp = nontemporal || promoted ? NULL_RTX : target;
1913 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
1920 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target);
1921 /* If TEMP is a VOIDmode constant, use convert_modes to make
1922 sure that we properly convert it. */
1923 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1925 temp = convert_modes (GET_MODE (target),
1926 TYPE_MODE (ops.type),
1928 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
1929 GET_MODE (target), temp, unsignedp);
1932 convert_move (SUBREG_REG (target), temp, unsignedp);
1934 else if (nontemporal && emit_storent_insn (target, temp))
1938 temp = force_operand (temp, target);
1940 emit_move_insn (target, temp);
1951 /* Expand one gimple statement STMT and return the last RTL instruction
1952 before any of the newly generated ones.
1954 In addition to generating the necessary RTL instructions this also
1955 sets REG_EH_REGION notes if necessary and sets the current source
1956 location for diagnostics. */
1959 expand_gimple_stmt (gimple stmt)
1963 location_t saved_location = input_location;
1965 last = get_last_insn ();
1967 /* If this is an expression of some kind and it has an associated line
1968 number, then emit the line number before expanding the expression.
1970 We need to save and restore the file and line information so that
1971 errors discovered during expansion are emitted with the right
1972 information. It would be better of the diagnostic routines
1973 used the file/line information embedded in the tree nodes rather
1977 if (gimple_has_location (stmt))
1979 input_location = gimple_location (stmt);
1980 set_curr_insn_source_location (input_location);
1982 /* Record where the insns produced belong. */
1983 set_curr_insn_block (gimple_block (stmt));
1986 expand_gimple_stmt_1 (stmt);
1987 /* Free any temporaries used to evaluate this statement. */
1990 input_location = saved_location;
1992 /* Mark all insns that may trap. */
1993 lp_nr = lookup_stmt_eh_lp (stmt);
1997 for (insn = next_real_insn (last); insn;
1998 insn = next_real_insn (insn))
2000 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
2001 /* If we want exceptions for non-call insns, any
2002 may_trap_p instruction may throw. */
2003 && GET_CODE (PATTERN (insn)) != CLOBBER
2004 && GET_CODE (PATTERN (insn)) != USE
2005 && insn_could_throw_p (insn))
2006 make_reg_eh_region_note (insn, 0, lp_nr);
2013 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2014 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2015 generated a tail call (something that might be denied by the ABI
2016 rules governing the call; see calls.c).
2018 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2019 can still reach the rest of BB. The case here is __builtin_sqrt,
2020 where the NaN result goes through the external function (with a
2021 tailcall) and the normal result happens via a sqrt instruction. */
2024 expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
2032 last2 = last = expand_gimple_stmt (stmt);
2034 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
2035 if (CALL_P (last) && SIBLING_CALL_P (last))
2038 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2040 *can_fallthru = true;
2044 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2045 Any instructions emitted here are about to be deleted. */
2046 do_pending_stack_adjust ();
2048 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2049 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2050 EH or abnormal edges, we shouldn't have created a tail call in
2051 the first place. So it seems to me we should just be removing
2052 all edges here, or redirecting the existing fallthru edge to
2058 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2060 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2062 if (e->dest != EXIT_BLOCK_PTR)
2064 e->dest->count -= e->count;
2065 e->dest->frequency -= EDGE_FREQUENCY (e);
2066 if (e->dest->count < 0)
2068 if (e->dest->frequency < 0)
2069 e->dest->frequency = 0;
2072 probability += e->probability;
2079 /* This is somewhat ugly: the call_expr expander often emits instructions
2080 after the sibcall (to perform the function return). These confuse the
2081 find_many_sub_basic_blocks code, so we need to get rid of these. */
2082 last = NEXT_INSN (last);
2083 gcc_assert (BARRIER_P (last));
2085 *can_fallthru = false;
2086 while (NEXT_INSN (last))
2088 /* For instance an sqrt builtin expander expands if with
2089 sibcall in the then and label for `else`. */
2090 if (LABEL_P (NEXT_INSN (last)))
2092 *can_fallthru = true;
2095 delete_insn (NEXT_INSN (last));
2098 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2099 e->probability += probability;
2102 update_bb_for_insn (bb);
2104 if (NEXT_INSN (last))
2106 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2109 if (BARRIER_P (last))
2110 BB_END (bb) = PREV_INSN (last);
2113 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2118 /* Return the difference between the floor and the truncated result of
2119 a signed division by OP1 with remainder MOD. */
2121 floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2123 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2124 return gen_rtx_IF_THEN_ELSE
2125 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2126 gen_rtx_IF_THEN_ELSE
2127 (mode, gen_rtx_LT (BImode,
2128 gen_rtx_DIV (mode, op1, mod),
2130 constm1_rtx, const0_rtx),
2134 /* Return the difference between the ceil and the truncated result of
2135 a signed division by OP1 with remainder MOD. */
2137 ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2139 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2140 return gen_rtx_IF_THEN_ELSE
2141 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2142 gen_rtx_IF_THEN_ELSE
2143 (mode, gen_rtx_GT (BImode,
2144 gen_rtx_DIV (mode, op1, mod),
2146 const1_rtx, const0_rtx),
2150 /* Return the difference between the ceil and the truncated result of
2151 an unsigned division by OP1 with remainder MOD. */
2153 ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
2155 /* (mod != 0 ? 1 : 0) */
2156 return gen_rtx_IF_THEN_ELSE
2157 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
2158 const1_rtx, const0_rtx);
2161 /* Return the difference between the rounded and the truncated result
2162 of a signed division by OP1 with remainder MOD. Halfway cases are
2163 rounded away from zero, rather than to the nearest even number. */
2165 round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2167 /* (abs (mod) >= abs (op1) - abs (mod)
2168 ? (op1 / mod > 0 ? 1 : -1)
2170 return gen_rtx_IF_THEN_ELSE
2171 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
2172 gen_rtx_MINUS (mode,
2173 gen_rtx_ABS (mode, op1),
2174 gen_rtx_ABS (mode, mod))),
2175 gen_rtx_IF_THEN_ELSE
2176 (mode, gen_rtx_GT (BImode,
2177 gen_rtx_DIV (mode, op1, mod),
2179 const1_rtx, constm1_rtx),
2183 /* Return the difference between the rounded and the truncated result
2184 of a unsigned division by OP1 with remainder MOD. Halfway cases
2185 are rounded away from zero, rather than to the nearest even
2188 round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1)
2190 /* (mod >= op1 - mod ? 1 : 0) */
2191 return gen_rtx_IF_THEN_ELSE
2192 (mode, gen_rtx_GE (BImode, mod,
2193 gen_rtx_MINUS (mode, op1, mod)),
2194 const1_rtx, const0_rtx);
2197 /* Wrap modeless constants in CONST:MODE. */
2199 wrap_constant (enum machine_mode mode, rtx x)
2201 if (GET_MODE (x) != VOIDmode)
2205 || GET_CODE (x) == CONST_FIXED
2206 || GET_CODE (x) == CONST_DOUBLE
2207 || GET_CODE (x) == LABEL_REF)
2209 gcc_assert (mode != VOIDmode);
2211 x = gen_rtx_CONST (mode, x);
2217 /* Remove CONST wrapper added by wrap_constant(). */
2219 unwrap_constant (rtx x)
2223 if (GET_CODE (x) != CONST)
2229 || GET_CODE (x) == CONST_FIXED
2230 || GET_CODE (x) == CONST_DOUBLE
2231 || GET_CODE (x) == LABEL_REF)
2237 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2241 convert_debug_memory_address (enum machine_mode mode, rtx x)
2243 enum machine_mode xmode = GET_MODE (x);
2245 #ifndef POINTERS_EXTEND_UNSIGNED
2246 gcc_assert (mode == Pmode);
2247 gcc_assert (xmode == mode || xmode == VOIDmode);
2249 gcc_assert (mode == Pmode || mode == ptr_mode);
2251 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
2254 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
2255 x = simplify_gen_subreg (mode, x, xmode,
2256 subreg_lowpart_offset
2258 else if (POINTERS_EXTEND_UNSIGNED > 0)
2259 x = gen_rtx_ZERO_EXTEND (mode, x);
2260 else if (!POINTERS_EXTEND_UNSIGNED)
2261 x = gen_rtx_SIGN_EXTEND (mode, x);
2264 #endif /* POINTERS_EXTEND_UNSIGNED */
2269 /* Return an RTX equivalent to the value of the tree expression
2273 expand_debug_expr (tree exp)
2275 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
2276 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
2277 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
2279 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
2281 case tcc_expression:
2282 switch (TREE_CODE (exp))
2287 case TRUTH_ANDIF_EXPR:
2288 case TRUTH_ORIF_EXPR:
2289 case TRUTH_AND_EXPR:
2291 case TRUTH_XOR_EXPR:
2294 case TRUTH_NOT_EXPR:
2303 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
2310 case tcc_comparison:
2311 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
2318 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2328 case tcc_exceptional:
2329 case tcc_declaration:
2335 switch (TREE_CODE (exp))
2338 if (!lookup_constant_def (exp))
2340 if (strlen (TREE_STRING_POINTER (exp)) + 1
2341 != (size_t) TREE_STRING_LENGTH (exp))
2343 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
2344 op0 = gen_rtx_MEM (BLKmode, op0);
2345 set_mem_attributes (op0, exp, 0);
2348 /* Fall through... */
2353 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
2357 gcc_assert (COMPLEX_MODE_P (mode));
2358 op0 = expand_debug_expr (TREE_REALPART (exp));
2359 op0 = wrap_constant (GET_MODE_INNER (mode), op0);
2360 op1 = expand_debug_expr (TREE_IMAGPART (exp));
2361 op1 = wrap_constant (GET_MODE_INNER (mode), op1);
2362 return gen_rtx_CONCAT (mode, op0, op1);
2364 case DEBUG_EXPR_DECL:
2365 op0 = DECL_RTL_IF_SET (exp);
2370 op0 = gen_rtx_DEBUG_EXPR (mode);
2371 DEBUG_EXPR_TREE_DECL (op0) = exp;
2372 SET_DECL_RTL (exp, op0);
2382 op0 = DECL_RTL_IF_SET (exp);
2384 /* This decl was probably optimized away. */
2387 if (TREE_CODE (exp) != VAR_DECL
2388 || DECL_EXTERNAL (exp)
2389 || !TREE_STATIC (exp)
2391 || DECL_HARD_REGISTER (exp))
2394 op0 = DECL_RTL (exp);
2395 SET_DECL_RTL (exp, NULL);
2397 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2398 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2402 op0 = copy_rtx (op0);
2404 if (GET_MODE (op0) == BLKmode)
2406 gcc_assert (MEM_P (op0));
2407 op0 = adjust_address_nv (op0, mode, 0);
2418 enum machine_mode inner_mode = GET_MODE (op0);
2420 if (mode == inner_mode)
2423 if (inner_mode == VOIDmode)
2425 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2426 if (mode == inner_mode)
2430 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2432 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
2433 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
2434 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
2435 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
2437 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2439 else if (FLOAT_MODE_P (mode))
2441 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2442 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2444 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2446 else if (FLOAT_MODE_P (inner_mode))
2449 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
2451 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
2453 else if (CONSTANT_P (op0)
2454 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
2455 op0 = simplify_gen_subreg (mode, op0, inner_mode,
2456 subreg_lowpart_offset (mode,
2459 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
2461 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
2467 case ALIGN_INDIRECT_REF:
2468 case MISALIGNED_INDIRECT_REF:
2469 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2473 gcc_assert (GET_MODE (op0) == Pmode
2474 || GET_MODE (op0) == ptr_mode
2475 || GET_CODE (op0) == CONST_INT
2476 || GET_CODE (op0) == CONST_DOUBLE);
2478 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF)
2480 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp));
2481 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
2484 op0 = gen_rtx_MEM (mode, op0);
2486 set_mem_attributes (op0, exp, 0);
2490 case TARGET_MEM_REF:
2491 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp)))
2494 op0 = expand_debug_expr
2495 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)),
2500 gcc_assert (GET_MODE (op0) == Pmode
2501 || GET_MODE (op0) == ptr_mode
2502 || GET_CODE (op0) == CONST_INT
2503 || GET_CODE (op0) == CONST_DOUBLE);
2505 op0 = gen_rtx_MEM (mode, op0);
2507 set_mem_attributes (op0, exp, 0);
2512 case ARRAY_RANGE_REF:
2517 case VIEW_CONVERT_EXPR:
2519 enum machine_mode mode1;
2520 HOST_WIDE_INT bitsize, bitpos;
2523 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
2524 &mode1, &unsignedp, &volatilep, false);
2530 orig_op0 = op0 = expand_debug_expr (tem);
2537 enum machine_mode addrmode, offmode;
2539 gcc_assert (MEM_P (op0));
2541 op0 = XEXP (op0, 0);
2542 addrmode = GET_MODE (op0);
2543 if (addrmode == VOIDmode)
2546 op1 = expand_debug_expr (offset);
2550 offmode = GET_MODE (op1);
2551 if (offmode == VOIDmode)
2552 offmode = TYPE_MODE (TREE_TYPE (offset));
2554 if (addrmode != offmode)
2555 op1 = simplify_gen_subreg (addrmode, op1, offmode,
2556 subreg_lowpart_offset (addrmode,
2559 /* Don't use offset_address here, we don't need a
2560 recognizable address, and we don't want to generate
2562 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
2567 if (mode1 == VOIDmode)
2569 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
2570 if (bitpos >= BITS_PER_UNIT)
2572 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
2573 bitpos %= BITS_PER_UNIT;
2575 else if (bitpos < 0)
2578 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
2579 op0 = adjust_address_nv (op0, mode1, units);
2580 bitpos += units * BITS_PER_UNIT;
2582 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
2583 op0 = adjust_address_nv (op0, mode, 0);
2584 else if (GET_MODE (op0) != mode1)
2585 op0 = adjust_address_nv (op0, mode1, 0);
2587 op0 = copy_rtx (op0);
2588 if (op0 == orig_op0)
2589 op0 = shallow_copy_rtx (op0);
2590 set_mem_attributes (op0, exp, 0);
2593 if (bitpos == 0 && mode == GET_MODE (op0))
2596 if ((bitpos % BITS_PER_UNIT) == 0
2597 && bitsize == GET_MODE_BITSIZE (mode1))
2599 enum machine_mode opmode = GET_MODE (op0);
2601 gcc_assert (opmode != BLKmode);
2603 if (opmode == VOIDmode)
2606 /* This condition may hold if we're expanding the address
2607 right past the end of an array that turned out not to
2608 be addressable (i.e., the address was only computed in
2609 debug stmts). The gen_subreg below would rightfully
2610 crash, and the address doesn't really exist, so just
2612 if (bitpos >= GET_MODE_BITSIZE (opmode))
2615 return simplify_gen_subreg (mode, op0, opmode,
2616 bitpos / BITS_PER_UNIT);
2619 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2620 && TYPE_UNSIGNED (TREE_TYPE (exp))
2622 : ZERO_EXTRACT, mode,
2623 GET_MODE (op0) != VOIDmode
2624 ? GET_MODE (op0) : mode1,
2625 op0, GEN_INT (bitsize), GEN_INT (bitpos));
2629 return gen_rtx_ABS (mode, op0);
2632 return gen_rtx_NEG (mode, op0);
2635 return gen_rtx_NOT (mode, op0);
2639 return gen_rtx_UNSIGNED_FLOAT (mode, op0);
2641 return gen_rtx_FLOAT (mode, op0);
2643 case FIX_TRUNC_EXPR:
2645 return gen_rtx_UNSIGNED_FIX (mode, op0);
2647 return gen_rtx_FIX (mode, op0);
2649 case POINTER_PLUS_EXPR:
2651 return gen_rtx_PLUS (mode, op0, op1);
2654 return gen_rtx_MINUS (mode, op0, op1);
2657 return gen_rtx_MULT (mode, op0, op1);
2660 case TRUNC_DIV_EXPR:
2661 case EXACT_DIV_EXPR:
2663 return gen_rtx_UDIV (mode, op0, op1);
2665 return gen_rtx_DIV (mode, op0, op1);
2667 case TRUNC_MOD_EXPR:
2669 return gen_rtx_UMOD (mode, op0, op1);
2671 return gen_rtx_MOD (mode, op0, op1);
2673 case FLOOR_DIV_EXPR:
2675 return gen_rtx_UDIV (mode, op0, op1);
2678 rtx div = gen_rtx_DIV (mode, op0, op1);
2679 rtx mod = gen_rtx_MOD (mode, op0, op1);
2680 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2681 return gen_rtx_PLUS (mode, div, adj);
2684 case FLOOR_MOD_EXPR:
2686 return gen_rtx_UMOD (mode, op0, op1);
2689 rtx mod = gen_rtx_MOD (mode, op0, op1);
2690 rtx adj = floor_sdiv_adjust (mode, mod, op1);
2691 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2692 return gen_rtx_PLUS (mode, mod, adj);
2698 rtx div = gen_rtx_UDIV (mode, op0, op1);
2699 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2700 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2701 return gen_rtx_PLUS (mode, div, adj);
2705 rtx div = gen_rtx_DIV (mode, op0, op1);
2706 rtx mod = gen_rtx_MOD (mode, op0, op1);
2707 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2708 return gen_rtx_PLUS (mode, div, adj);
2714 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2715 rtx adj = ceil_udiv_adjust (mode, mod, op1);
2716 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2717 return gen_rtx_PLUS (mode, mod, adj);
2721 rtx mod = gen_rtx_MOD (mode, op0, op1);
2722 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
2723 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2724 return gen_rtx_PLUS (mode, mod, adj);
2727 case ROUND_DIV_EXPR:
2730 rtx div = gen_rtx_UDIV (mode, op0, op1);
2731 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2732 rtx adj = round_udiv_adjust (mode, mod, op1);
2733 return gen_rtx_PLUS (mode, div, adj);
2737 rtx div = gen_rtx_DIV (mode, op0, op1);
2738 rtx mod = gen_rtx_MOD (mode, op0, op1);
2739 rtx adj = round_sdiv_adjust (mode, mod, op1);
2740 return gen_rtx_PLUS (mode, div, adj);
2743 case ROUND_MOD_EXPR:
2746 rtx mod = gen_rtx_UMOD (mode, op0, op1);
2747 rtx adj = round_udiv_adjust (mode, mod, op1);
2748 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2749 return gen_rtx_PLUS (mode, mod, adj);
2753 rtx mod = gen_rtx_MOD (mode, op0, op1);
2754 rtx adj = round_sdiv_adjust (mode, mod, op1);
2755 adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
2756 return gen_rtx_PLUS (mode, mod, adj);
2760 return gen_rtx_ASHIFT (mode, op0, op1);
2764 return gen_rtx_LSHIFTRT (mode, op0, op1);
2766 return gen_rtx_ASHIFTRT (mode, op0, op1);
2769 return gen_rtx_ROTATE (mode, op0, op1);
2772 return gen_rtx_ROTATERT (mode, op0, op1);
2776 return gen_rtx_UMIN (mode, op0, op1);
2778 return gen_rtx_SMIN (mode, op0, op1);
2782 return gen_rtx_UMAX (mode, op0, op1);
2784 return gen_rtx_SMAX (mode, op0, op1);
2787 case TRUTH_AND_EXPR:
2788 return gen_rtx_AND (mode, op0, op1);
2792 return gen_rtx_IOR (mode, op0, op1);
2795 case TRUTH_XOR_EXPR:
2796 return gen_rtx_XOR (mode, op0, op1);
2798 case TRUTH_ANDIF_EXPR:
2799 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
2801 case TRUTH_ORIF_EXPR:
2802 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
2804 case TRUTH_NOT_EXPR:
2805 return gen_rtx_EQ (mode, op0, const0_rtx);
2809 return gen_rtx_LTU (mode, op0, op1);
2811 return gen_rtx_LT (mode, op0, op1);
2815 return gen_rtx_LEU (mode, op0, op1);
2817 return gen_rtx_LE (mode, op0, op1);
2821 return gen_rtx_GTU (mode, op0, op1);
2823 return gen_rtx_GT (mode, op0, op1);
2827 return gen_rtx_GEU (mode, op0, op1);
2829 return gen_rtx_GE (mode, op0, op1);
2832 return gen_rtx_EQ (mode, op0, op1);
2835 return gen_rtx_NE (mode, op0, op1);
2837 case UNORDERED_EXPR:
2838 return gen_rtx_UNORDERED (mode, op0, op1);
2841 return gen_rtx_ORDERED (mode, op0, op1);
2844 return gen_rtx_UNLT (mode, op0, op1);
2847 return gen_rtx_UNLE (mode, op0, op1);
2850 return gen_rtx_UNGT (mode, op0, op1);
2853 return gen_rtx_UNGE (mode, op0, op1);
2856 return gen_rtx_UNEQ (mode, op0, op1);
2859 return gen_rtx_LTGT (mode, op0, op1);
2862 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
2865 gcc_assert (COMPLEX_MODE_P (mode));
2866 if (GET_MODE (op0) == VOIDmode)
2867 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
2868 if (GET_MODE (op1) == VOIDmode)
2869 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
2870 return gen_rtx_CONCAT (mode, op0, op1);
2873 if (GET_CODE (op0) == CONCAT)
2874 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
2875 gen_rtx_NEG (GET_MODE_INNER (mode),
2879 enum machine_mode imode = GET_MODE_INNER (mode);
2884 re = adjust_address_nv (op0, imode, 0);
2885 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
2889 enum machine_mode ifmode = int_mode_for_mode (mode);
2890 enum machine_mode ihmode = int_mode_for_mode (imode);
2892 if (ifmode == BLKmode || ihmode == BLKmode)
2894 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
2897 re = gen_rtx_SUBREG (ifmode, re, 0);
2898 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
2899 if (imode != ihmode)
2900 re = gen_rtx_SUBREG (imode, re, 0);
2901 im = copy_rtx (op0);
2903 im = gen_rtx_SUBREG (ifmode, im, 0);
2904 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
2905 if (imode != ihmode)
2906 im = gen_rtx_SUBREG (imode, im, 0);
2908 im = gen_rtx_NEG (imode, im);
2909 return gen_rtx_CONCAT (mode, re, im);
2913 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
2914 if (!op0 || !MEM_P (op0))
2917 op0 = convert_debug_memory_address (mode, XEXP (op0, 0));
2922 exp = build_constructor_from_list (TREE_TYPE (exp),
2923 TREE_VECTOR_CST_ELTS (exp));
2927 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
2932 op0 = gen_rtx_CONCATN
2933 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
2935 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
2937 op1 = expand_debug_expr (val);
2940 XVECEXP (op0, 0, i) = op1;
2943 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
2945 op1 = expand_debug_expr
2946 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node));
2951 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
2952 XVECEXP (op0, 0, i) = op1;
2958 goto flag_unsupported;
2961 /* ??? Maybe handle some builtins? */
2966 int part = var_to_partition (SA.map, exp);
2968 if (part == NO_PARTITION)
2971 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
2973 op0 = SA.partition_to_pseudo[part];
2982 #ifdef ENABLE_CHECKING
2991 /* Expand the _LOCs in debug insns. We run this after expanding all
2992 regular insns, so that any variables referenced in the function
2993 will have their DECL_RTLs set. */
2996 expand_debug_locations (void)
2999 rtx last = get_last_insn ();
3000 int save_strict_alias = flag_strict_aliasing;
3002 /* New alias sets while setting up memory attributes cause
3003 -fcompare-debug failures, even though it doesn't bring about any
3005 flag_strict_aliasing = 0;
3007 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
3008 if (DEBUG_INSN_P (insn))
3010 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
3012 enum machine_mode mode;
3014 if (value == NULL_TREE)
3018 val = expand_debug_expr (value);
3019 gcc_assert (last == get_last_insn ());
3023 val = gen_rtx_UNKNOWN_VAR_LOC ();
3026 mode = GET_MODE (INSN_VAR_LOCATION (insn));
3028 gcc_assert (mode == GET_MODE (val)
3029 || (GET_MODE (val) == VOIDmode
3030 && (CONST_INT_P (val)
3031 || GET_CODE (val) == CONST_FIXED
3032 || GET_CODE (val) == CONST_DOUBLE
3033 || GET_CODE (val) == LABEL_REF)));
3036 INSN_VAR_LOCATION_LOC (insn) = val;
3039 flag_strict_aliasing = save_strict_alias;
3042 /* Expand basic block BB from GIMPLE trees to RTL. */
3045 expand_gimple_basic_block (basic_block bb)
3047 gimple_stmt_iterator gsi;
3056 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
3059 /* Note that since we are now transitioning from GIMPLE to RTL, we
3060 cannot use the gsi_*_bb() routines because they expect the basic
3061 block to be in GIMPLE, instead of RTL. Therefore, we need to
3062 access the BB sequence directly. */
3063 stmts = bb_seq (bb);
3064 bb->il.gimple = NULL;
3065 rtl_profile_for_bb (bb);
3066 init_rtl_bb_info (bb);
3067 bb->flags |= BB_RTL;
3069 /* Remove the RETURN_EXPR if we may fall though to the exit
3071 gsi = gsi_last (stmts);
3072 if (!gsi_end_p (gsi)
3073 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
3075 gimple ret_stmt = gsi_stmt (gsi);
3077 gcc_assert (single_succ_p (bb));
3078 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
3080 if (bb->next_bb == EXIT_BLOCK_PTR
3081 && !gimple_return_retval (ret_stmt))
3083 gsi_remove (&gsi, false);
3084 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
3088 gsi = gsi_start (stmts);
3089 if (!gsi_end_p (gsi))
3091 stmt = gsi_stmt (gsi);
3092 if (gimple_code (stmt) != GIMPLE_LABEL)
3096 elt = pointer_map_contains (lab_rtx_for_bb, bb);
3100 last = get_last_insn ();
3104 expand_gimple_stmt (stmt);
3109 emit_label ((rtx) *elt);
3111 /* Java emits line number notes in the top of labels.
3112 ??? Make this go away once line number notes are obsoleted. */
3113 BB_HEAD (bb) = NEXT_INSN (last);
3114 if (NOTE_P (BB_HEAD (bb)))
3115 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
3116 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
3118 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3121 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
3123 NOTE_BASIC_BLOCK (note) = bb;
3125 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3129 stmt = gsi_stmt (gsi);
3131 /* Expand this statement, then evaluate the resulting RTL and
3132 fixup the CFG accordingly. */
3133 if (gimple_code (stmt) == GIMPLE_COND)
3135 new_bb = expand_gimple_cond (bb, stmt);
3139 else if (gimple_debug_bind_p (stmt))
3141 location_t sloc = get_curr_insn_source_location ();
3142 tree sblock = get_curr_insn_block ();
3143 gimple_stmt_iterator nsi = gsi;
3147 tree var = gimple_debug_bind_get_var (stmt);
3150 enum machine_mode mode;
3152 if (gimple_debug_bind_has_value_p (stmt))
3153 value = gimple_debug_bind_get_value (stmt);
3157 last = get_last_insn ();
3159 set_curr_insn_source_location (gimple_location (stmt));
3160 set_curr_insn_block (gimple_block (stmt));
3163 mode = DECL_MODE (var);
3165 mode = TYPE_MODE (TREE_TYPE (var));
3167 val = gen_rtx_VAR_LOCATION
3168 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3170 val = emit_debug_insn (val);
3172 if (dump_file && (dump_flags & TDF_DETAILS))
3174 /* We can't dump the insn with a TREE where an RTX
3176 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3177 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3178 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3183 if (gsi_end_p (nsi))
3185 stmt = gsi_stmt (nsi);
3186 if (!gimple_debug_bind_p (stmt))
3190 set_curr_insn_source_location (sloc);
3191 set_curr_insn_block (sblock);
3195 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
3198 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
3209 def_operand_p def_p;
3210 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
3214 /* Ignore this stmt if it is in the list of
3215 replaceable expressions. */
3217 && bitmap_bit_p (SA.values,
3218 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
3221 last = expand_gimple_stmt (stmt);
3222 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3227 /* Expand implicit goto and convert goto_locus. */
3228 FOR_EACH_EDGE (e, ei, bb->succs)
3230 if (e->goto_locus && e->goto_block)
3232 set_curr_insn_source_location (e->goto_locus);
3233 set_curr_insn_block (e->goto_block);
3234 e->goto_locus = curr_insn_locator ();
3236 e->goto_block = NULL;
3237 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
3239 emit_jump (label_rtx_for_bb (e->dest));
3240 e->flags &= ~EDGE_FALLTHRU;
3244 do_pending_stack_adjust ();
3246 /* Find the block tail. The last insn in the block is the insn
3247 before a barrier and/or table jump insn. */
3248 last = get_last_insn ();
3249 if (BARRIER_P (last))
3250 last = PREV_INSN (last);
3251 if (JUMP_TABLE_DATA_P (last))
3252 last = PREV_INSN (PREV_INSN (last));
3255 update_bb_for_insn (bb);
3261 /* Create a basic block for initialization code. */
3264 construct_init_block (void)
3266 basic_block init_block, first_block;
3270 /* Multiple entry points not supported yet. */
3271 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
3272 init_rtl_bb_info (ENTRY_BLOCK_PTR);
3273 init_rtl_bb_info (EXIT_BLOCK_PTR);
3274 ENTRY_BLOCK_PTR->flags |= BB_RTL;
3275 EXIT_BLOCK_PTR->flags |= BB_RTL;
3277 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
3279 /* When entry edge points to first basic block, we don't need jump,
3280 otherwise we have to jump into proper target. */
3281 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
3283 tree label = gimple_block_label (e->dest);
3285 emit_jump (label_rtx (label));
3289 flags = EDGE_FALLTHRU;
3291 init_block = create_basic_block (NEXT_INSN (get_insns ()),
3294 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
3295 init_block->count = ENTRY_BLOCK_PTR->count;
3298 first_block = e->dest;
3299 redirect_edge_succ (e, init_block);
3300 e = make_edge (init_block, first_block, flags);
3303 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3304 e->probability = REG_BR_PROB_BASE;
3305 e->count = ENTRY_BLOCK_PTR->count;
3307 update_bb_for_insn (init_block);
3311 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3312 found in the block tree. */
3315 set_block_levels (tree block, int level)
3319 BLOCK_NUMBER (block) = level;
3320 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
3321 block = BLOCK_CHAIN (block);
3325 /* Create a block containing landing pads and similar stuff. */
3328 construct_exit_block (void)
3330 rtx head = get_last_insn ();
3332 basic_block exit_block;
3336 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
3338 rtl_profile_for_bb (EXIT_BLOCK_PTR);
3340 /* Make sure the locus is set to the end of the function, so that
3341 epilogue line numbers and warnings are set properly. */
3342 if (cfun->function_end_locus != UNKNOWN_LOCATION)
3343 input_location = cfun->function_end_locus;
3345 /* The following insns belong to the top scope. */
3346 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3348 /* Generate rtl for function exit. */
3349 expand_function_end ();
3351 end = get_last_insn ();
3354 /* While emitting the function end we could move end of the last basic block.
3356 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
3357 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
3358 head = NEXT_INSN (head);
3359 exit_block = create_basic_block (NEXT_INSN (head), end,
3360 EXIT_BLOCK_PTR->prev_bb);
3361 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
3362 exit_block->count = EXIT_BLOCK_PTR->count;
3365 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
3367 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
3368 if (!(e->flags & EDGE_ABNORMAL))
3369 redirect_edge_succ (e, exit_block);
3374 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
3375 e->probability = REG_BR_PROB_BASE;
3376 e->count = EXIT_BLOCK_PTR->count;
3377 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
3380 e->count -= e2->count;
3381 exit_block->count -= e2->count;
3382 exit_block->frequency -= EDGE_FREQUENCY (e2);
3386 if (exit_block->count < 0)
3387 exit_block->count = 0;
3388 if (exit_block->frequency < 0)
3389 exit_block->frequency = 0;
3390 update_bb_for_insn (exit_block);
3393 /* Helper function for discover_nonconstant_array_refs.
3394 Look for ARRAY_REF nodes with non-constant indexes and mark them
3398 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
3399 void *data ATTRIBUTE_UNUSED)
3403 if (IS_TYPE_OR_DECL_P (t))
3405 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3407 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3408 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
3409 && (!TREE_OPERAND (t, 2)
3410 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3411 || (TREE_CODE (t) == COMPONENT_REF
3412 && (!TREE_OPERAND (t,2)
3413 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
3414 || TREE_CODE (t) == BIT_FIELD_REF
3415 || TREE_CODE (t) == REALPART_EXPR
3416 || TREE_CODE (t) == IMAGPART_EXPR
3417 || TREE_CODE (t) == VIEW_CONVERT_EXPR
3418 || CONVERT_EXPR_P (t))
3419 t = TREE_OPERAND (t, 0);
3421 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3423 t = get_base_address (t);
3425 && DECL_MODE (t) != BLKmode)
3426 TREE_ADDRESSABLE (t) = 1;
3435 /* RTL expansion is not able to compile array references with variable
3436 offsets for arrays stored in single register. Discover such
3437 expressions and mark variables as addressable to avoid this
3441 discover_nonconstant_array_refs (void)
3444 gimple_stmt_iterator gsi;
3447 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3449 gimple stmt = gsi_stmt (gsi);
3450 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3454 /* This function sets crtl->args.internal_arg_pointer to a virtual
3455 register if DRAP is needed. Local register allocator will replace
3456 virtual_incoming_args_rtx with the virtual register. */
3459 expand_stack_alignment (void)
3462 unsigned int preferred_stack_boundary;
3464 if (! SUPPORTS_STACK_ALIGNMENT)
3467 if (cfun->calls_alloca
3468 || cfun->has_nonlocal_label
3469 || crtl->has_nonlocal_goto)
3470 crtl->need_drap = true;
3472 gcc_assert (crtl->stack_alignment_needed
3473 <= crtl->stack_alignment_estimated);
3475 /* Update crtl->stack_alignment_estimated and use it later to align
3476 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
3477 exceptions since callgraph doesn't collect incoming stack alignment
3479 if (flag_non_call_exceptions
3480 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
3481 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3483 preferred_stack_boundary = crtl->preferred_stack_boundary;
3484 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
3485 crtl->stack_alignment_estimated = preferred_stack_boundary;
3486 if (preferred_stack_boundary > crtl->stack_alignment_needed)
3487 crtl->stack_alignment_needed = preferred_stack_boundary;
3489 crtl->stack_realign_needed
3490 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
3491 crtl->stack_realign_tried = crtl->stack_realign_needed;
3493 crtl->stack_realign_processed = true;
3495 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
3497 gcc_assert (targetm.calls.get_drap_rtx != NULL);
3498 drap_rtx = targetm.calls.get_drap_rtx ();
3500 /* stack_realign_drap and drap_rtx must match. */
3501 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
3503 /* Do nothing if NULL is returned, which means DRAP is not needed. */
3504 if (NULL != drap_rtx)
3506 crtl->args.internal_arg_pointer = drap_rtx;
3508 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
3510 fixup_tail_calls ();
3514 /* Translate the intermediate representation contained in the CFG
3515 from GIMPLE trees to RTL.
3517 We do conversion per basic block and preserve/update the tree CFG.
3518 This implies we have to do some magic as the CFG can simultaneously
3519 consist of basic blocks containing RTL and GIMPLE trees. This can
3520 confuse the CFG hooks, so be careful to not manipulate CFG during
3524 gimple_expand_cfg (void)
3526 basic_block bb, init_block;
3532 rewrite_out_of_ssa (&SA);
3533 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
3536 /* Some backends want to know that we are expanding to RTL. */
3537 currently_expanding_to_rtl = 1;
3539 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
3541 insn_locators_alloc ();
3542 if (!DECL_IS_BUILTIN (current_function_decl))
3544 /* Eventually, all FEs should explicitly set function_start_locus. */
3545 if (cfun->function_start_locus == UNKNOWN_LOCATION)
3546 set_curr_insn_source_location
3547 (DECL_SOURCE_LOCATION (current_function_decl));
3549 set_curr_insn_source_location (cfun->function_start_locus);
3551 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3552 prologue_locator = curr_insn_locator ();
3554 /* Make sure first insn is a note even if we don't want linenums.
3555 This makes sure the first insn will never be deleted.
3556 Also, final expects a note to appear there. */
3557 emit_note (NOTE_INSN_DELETED);
3559 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
3560 discover_nonconstant_array_refs ();
3562 targetm.expand_to_rtl_hook ();
3563 crtl->stack_alignment_needed = STACK_BOUNDARY;
3564 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
3565 crtl->stack_alignment_estimated = STACK_BOUNDARY;
3566 crtl->preferred_stack_boundary = STACK_BOUNDARY;
3567 cfun->cfg->max_jumptable_ents = 0;
3570 /* Expand the variables recorded during gimple lowering. */
3571 expand_used_vars ();
3573 /* Honor stack protection warnings. */
3574 if (warn_stack_protect)
3576 if (cfun->calls_alloca)
3577 warning (OPT_Wstack_protector,
3578 "not protecting local variables: variable length buffer");
3579 if (has_short_buffer && !crtl->stack_protect_guard)
3580 warning (OPT_Wstack_protector,
3581 "not protecting function: no buffer at least %d bytes long",
3582 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3585 /* Set up parameters and prepare for return, for the function. */
3586 expand_function_start (current_function_decl);
3588 /* Now that we also have the parameter RTXs, copy them over to our
3590 for (i = 0; i < SA.map->num_partitions; i++)
3592 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
3594 if (TREE_CODE (var) != VAR_DECL
3595 && !SA.partition_to_pseudo[i])
3596 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
3597 gcc_assert (SA.partition_to_pseudo[i]);
3599 /* If this decl was marked as living in multiple places, reset
3600 this now to NULL. */
3601 if (DECL_RTL_IF_SET (var) == pc_rtx)
3602 SET_DECL_RTL (var, NULL);
3604 /* Some RTL parts really want to look at DECL_RTL(x) when x
3605 was a decl marked in REG_ATTR or MEM_ATTR. We could use
3606 SET_DECL_RTL here making this available, but that would mean
3607 to select one of the potentially many RTLs for one DECL. Instead
3608 of doing that we simply reset the MEM_EXPR of the RTL in question,
3609 then nobody can get at it and hence nobody can call DECL_RTL on it. */
3610 if (!DECL_RTL_SET_P (var))
3612 if (MEM_P (SA.partition_to_pseudo[i]))
3613 set_mem_expr (SA.partition_to_pseudo[i], NULL);
3617 /* If this function is `main', emit a call to `__main'
3618 to run global initializers, etc. */
3619 if (DECL_NAME (current_function_decl)
3620 && MAIN_NAME_P (DECL_NAME (current_function_decl))
3621 && DECL_FILE_SCOPE_P (current_function_decl))
3622 expand_main_function ();
3624 /* Initialize the stack_protect_guard field. This must happen after the
3625 call to __main (if any) so that the external decl is initialized. */
3626 if (crtl->stack_protect_guard)
3627 stack_protect_prologue ();
3629 /* Update stack boundary if needed. */
3630 if (SUPPORTS_STACK_ALIGNMENT)
3632 /* Call update_stack_boundary here to update incoming stack
3633 boundary before TARGET_FUNCTION_OK_FOR_SIBCALL is called.
3634 TARGET_FUNCTION_OK_FOR_SIBCALL needs to know the accurate
3635 incoming stack alignment to check if it is OK to perform
3636 sibcall optimization since sibcall optimization will only
3637 align the outgoing stack to incoming stack boundary. */
3638 if (targetm.calls.update_stack_boundary)
3639 targetm.calls.update_stack_boundary ();
3641 /* The incoming stack frame has to be aligned at least at
3642 parm_stack_boundary. */
3643 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
3646 expand_phi_nodes (&SA);
3648 /* Register rtl specific functions for cfg. */
3649 rtl_register_cfg_hooks ();
3651 init_block = construct_init_block ();
3653 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
3654 remaining edges later. */
3655 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
3656 e->flags &= ~EDGE_EXECUTABLE;
3658 lab_rtx_for_bb = pointer_map_create ();
3659 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
3660 bb = expand_gimple_basic_block (bb);
3662 if (MAY_HAVE_DEBUG_INSNS)
3663 expand_debug_locations ();
3665 execute_free_datastructures ();
3666 finish_out_of_ssa (&SA);
3668 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3669 conservatively to true until they are all profile aware. */
3670 pointer_map_destroy (lab_rtx_for_bb);
3673 construct_exit_block ();
3674 set_curr_insn_block (DECL_INITIAL (current_function_decl));
3675 insn_locators_finalize ();
3677 /* Zap the tree EH table. */
3678 set_eh_throw_stmt_table (cfun, NULL);
3680 rebuild_jump_labels (get_insns ());
3682 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3686 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3689 commit_one_edge_insertion (e);
3695 /* We're done expanding trees to RTL. */
3696 currently_expanding_to_rtl = 0;
3698 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
3702 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3704 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
3705 e->flags &= ~EDGE_EXECUTABLE;
3707 /* At the moment not all abnormal edges match the RTL
3708 representation. It is safe to remove them here as
3709 find_many_sub_basic_blocks will rediscover them.
3710 In the future we should get this fixed properly. */
3711 if ((e->flags & EDGE_ABNORMAL)
3712 && !(e->flags & EDGE_SIBCALL))
3719 blocks = sbitmap_alloc (last_basic_block);
3720 sbitmap_ones (blocks);
3721 find_many_sub_basic_blocks (blocks);
3722 sbitmap_free (blocks);
3723 purge_all_dead_edges ();
3727 expand_stack_alignment ();
3729 #ifdef ENABLE_CHECKING
3730 verify_flow_info ();
3733 /* There's no need to defer outputting this function any more; we
3734 know we want to output it. */
3735 DECL_DEFER_OUTPUT (current_function_decl) = 0;
3737 /* Now that we're done expanding trees to RTL, we shouldn't have any
3738 more CONCATs anywhere. */
3739 generating_concat_p = 0;
3744 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
3745 /* And the pass manager will dump RTL for us. */
3748 /* If we're emitting a nested function, make sure its parent gets
3749 emitted as well. Doing otherwise confuses debug info. */
3752 for (parent = DECL_CONTEXT (current_function_decl);
3753 parent != NULL_TREE;
3754 parent = get_containing_scope (parent))
3755 if (TREE_CODE (parent) == FUNCTION_DECL)
3756 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
3759 /* We are now committed to emitting code for this function. Do any
3760 preparation, such as emitting abstract debug info for the inline
3761 before it gets mangled by optimization. */
3762 if (cgraph_function_possibly_inlined_p (current_function_decl))
3763 (*debug_hooks->outlining_inline_function) (current_function_decl);
3765 TREE_ASM_WRITTEN (current_function_decl) = 1;
3767 /* After expanding, the return labels are no longer needed. */
3768 return_label = NULL;
3769 naked_return_label = NULL;
3770 /* Tag the blocks with a depth number so that change_scope can find
3771 the common parent easily. */
3772 set_block_levels (DECL_INITIAL (cfun->decl), 0);
3773 default_rtl_profile ();
3777 struct rtl_opt_pass pass_expand =
3781 "expand", /* name */
3783 gimple_expand_cfg, /* execute */
3786 0, /* static_pass_number */
3787 TV_EXPAND, /* tv_id */
3788 PROP_ssa | PROP_gimple_leh | PROP_cfg,/* properties_required */
3789 PROP_rtl, /* properties_provided */
3790 PROP_ssa | PROP_trees, /* properties_destroyed */
3791 TODO_verify_ssa | TODO_verify_flow
3792 | TODO_verify_stmts, /* todo_flags_start */
3794 | TODO_ggc_collect /* todo_flags_finish */