1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "tree-pass.h"
33 #include "regs.h" /* For reg_renumber. */
37 #include "diagnostic.h"
38 #include "fold-const.h"
40 #include "stor-layout.h"
42 #include "print-tree.h"
46 #include "cfgcleanup.h"
51 #include "internal-fn.h"
53 #include "gimple-iterator.h"
54 #include "gimple-expr.h"
55 #include "gimple-walk.h"
60 #include "gimple-pretty-print.h"
64 #include "tree-inline.h"
65 #include "value-prof.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
69 #include "insn-attr.h" /* For INSN_SCHEDULING. */
71 #include "tree-ssa-address.h"
74 #include "tree-chkp.h"
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
82 #define NAME__MAIN "__main"
85 /* This variable holds information helping the rewriting of SSA trees
89 /* This variable holds the currently expanded gimple statement for purposes
90 of comminucating the profile info to the builtin expanders. */
91 gimple *currently_expanding_gimple_stmt;
93 static rtx expand_debug_expr (tree);
95 static bool defer_stack_allocation (tree, bool);
97 static void record_alignment_for_reg_var (unsigned int);
99 /* Return an expression tree corresponding to the RHS of GIMPLE
103 gimple_assign_rhs_to_tree (gimple *stmt)
106 enum gimple_rhs_class grhs_class;
108 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
110 if (grhs_class == GIMPLE_TERNARY_RHS)
111 t = build3 (gimple_assign_rhs_code (stmt),
112 TREE_TYPE (gimple_assign_lhs (stmt)),
113 gimple_assign_rhs1 (stmt),
114 gimple_assign_rhs2 (stmt),
115 gimple_assign_rhs3 (stmt));
116 else if (grhs_class == GIMPLE_BINARY_RHS)
117 t = build2 (gimple_assign_rhs_code (stmt),
118 TREE_TYPE (gimple_assign_lhs (stmt)),
119 gimple_assign_rhs1 (stmt),
120 gimple_assign_rhs2 (stmt));
121 else if (grhs_class == GIMPLE_UNARY_RHS)
122 t = build1 (gimple_assign_rhs_code (stmt),
123 TREE_TYPE (gimple_assign_lhs (stmt)),
124 gimple_assign_rhs1 (stmt));
125 else if (grhs_class == GIMPLE_SINGLE_RHS)
127 t = gimple_assign_rhs1 (stmt);
128 /* Avoid modifying this tree in place below. */
129 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
130 && gimple_location (stmt) != EXPR_LOCATION (t))
131 || (gimple_block (stmt)
132 && currently_expanding_to_rtl
139 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
140 SET_EXPR_LOCATION (t, gimple_location (stmt));
146 #ifndef STACK_ALIGNMENT_NEEDED
147 #define STACK_ALIGNMENT_NEEDED 1
150 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
152 /* Choose either CUR or NEXT as the leader DECL for a partition.
153 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
154 out of the same user variable being in multiple partitions (this is
155 less likely for compiler-introduced temps). */
158 leader_merge (tree cur, tree next)
160 if (cur == NULL || cur == next)
163 if (DECL_P (cur) && DECL_IGNORED_P (cur))
166 if (DECL_P (next) && DECL_IGNORED_P (next))
172 /* Associate declaration T with storage space X. If T is no
173 SSA name this is exactly SET_DECL_RTL, otherwise make the
174 partition of T associated with X. */
176 set_rtl (tree t, rtx x)
178 gcc_checking_assert (!x
179 || !(TREE_CODE (t) == SSA_NAME || is_gimple_reg (t))
180 || (use_register_for_decl (t)
182 || (GET_CODE (x) == CONCAT
183 && (REG_P (XEXP (x, 0))
184 || SUBREG_P (XEXP (x, 0)))
185 && (REG_P (XEXP (x, 1))
186 || SUBREG_P (XEXP (x, 1))))
187 /* We need to accept PARALLELs for RESUT_DECLs
188 because of vector types with BLKmode returned
189 in multiple registers, but they are supposed
190 to be uncoalesced. */
191 || (GET_CODE (x) == PARALLEL
193 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
194 && (GET_MODE (x) == BLKmode
195 || !flag_tree_coalesce_vars)))
196 : (MEM_P (x) || x == pc_rtx
197 || (GET_CODE (x) == CONCAT
198 && MEM_P (XEXP (x, 0))
199 && MEM_P (XEXP (x, 1))))));
200 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
201 RESULT_DECLs has the expected mode. For memory, we accept
202 unpromoted modes, since that's what we're likely to get. For
203 PARM_DECLs and RESULT_DECLs, we'll have been called by
204 set_parm_rtl, which will give us the default def, so we don't
205 have to compute it ourselves. For RESULT_DECLs, we accept mode
206 mismatches too, as long as we have BLKmode or are not coalescing
207 across variables, so that we don't reject BLKmode PARALLELs or
209 gcc_checking_assert (!x || x == pc_rtx || TREE_CODE (t) != SSA_NAME
211 && TREE_CODE (SSAVAR (t)) == RESULT_DECL
212 && (promote_ssa_mode (t, NULL) == BLKmode
213 || !flag_tree_coalesce_vars))
214 || !use_register_for_decl (t)
215 || GET_MODE (x) == promote_ssa_mode (t, NULL));
220 tree cur = NULL_TREE;
228 else if (SUBREG_P (xm))
230 gcc_assert (subreg_lowpart_p (xm));
231 xm = SUBREG_REG (xm);
234 else if (GET_CODE (xm) == CONCAT)
239 else if (GET_CODE (xm) == PARALLEL)
241 xm = XVECEXP (xm, 0, 0);
242 gcc_assert (GET_CODE (xm) == EXPR_LIST);
246 else if (xm == pc_rtx)
251 tree next = skip ? cur : leader_merge (cur, SSAVAR (t) ? SSAVAR (t) : t);
256 set_mem_attributes (x,
257 next && TREE_CODE (next) == SSA_NAME
261 set_reg_attrs_for_decl_rtl (next, x);
265 if (TREE_CODE (t) == SSA_NAME)
267 int part = var_to_partition (SA.map, t);
268 if (part != NO_PARTITION)
270 if (SA.partition_to_pseudo[part])
271 gcc_assert (SA.partition_to_pseudo[part] == x);
272 else if (x != pc_rtx)
273 SA.partition_to_pseudo[part] = x;
275 /* For the benefit of debug information at -O0 (where
276 vartracking doesn't run) record the place also in the base
277 DECL. For PARMs and RESULTs, do so only when setting the
279 if (x && x != pc_rtx && SSA_NAME_VAR (t)
280 && (VAR_P (SSA_NAME_VAR (t))
281 || SSA_NAME_IS_DEFAULT_DEF (t)))
283 tree var = SSA_NAME_VAR (t);
284 /* If we don't yet have something recorded, just record it now. */
285 if (!DECL_RTL_SET_P (var))
286 SET_DECL_RTL (var, x);
287 /* If we have it set already to "multiple places" don't
289 else if (DECL_RTL (var) == pc_rtx)
291 /* If we have something recorded and it's not the same place
292 as we want to record now, we have multiple partitions for the
293 same base variable, with different places. We can't just
294 randomly chose one, hence we have to say that we don't know.
295 This only happens with optimization, and there var-tracking
296 will figure out the right thing. */
297 else if (DECL_RTL (var) != x)
298 SET_DECL_RTL (var, pc_rtx);
305 /* This structure holds data relevant to one variable that will be
306 placed in a stack slot. */
312 /* Initially, the size of the variable. Later, the size of the partition,
313 if this variable becomes it's partition's representative. */
316 /* The *byte* alignment required for this variable. Or as, with the
317 size, the alignment for this partition. */
320 /* The partition representative. */
321 size_t representative;
323 /* The next stack variable in the partition, or EOC. */
326 /* The numbers of conflicting stack variables. */
330 #define EOC ((size_t)-1)
332 /* We have an array of such objects while deciding allocation. */
333 static struct stack_var *stack_vars;
334 static size_t stack_vars_alloc;
335 static size_t stack_vars_num;
336 static hash_map<tree, size_t> *decl_to_stack_part;
338 /* Conflict bitmaps go on this obstack. This allows us to destroy
339 all of them in one big sweep. */
340 static bitmap_obstack stack_var_bitmap_obstack;
342 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
343 is non-decreasing. */
344 static size_t *stack_vars_sorted;
346 /* The phase of the stack frame. This is the known misalignment of
347 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
348 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
349 static int frame_phase;
351 /* Used during expand_used_vars to remember if we saw any decls for
352 which we'd like to enable stack smashing protection. */
353 static bool has_protected_decls;
355 /* Used during expand_used_vars. Remember if we say a character buffer
356 smaller than our cutoff threshold. Used for -Wstack-protector. */
357 static bool has_short_buffer;
359 /* Compute the byte alignment to use for DECL. Ignore alignment
360 we can't do with expected alignment of the stack boundary. */
363 align_local_variable (tree decl)
367 if (TREE_CODE (decl) == SSA_NAME)
368 align = TYPE_ALIGN (TREE_TYPE (decl));
371 align = LOCAL_DECL_ALIGNMENT (decl);
372 SET_DECL_ALIGN (decl, align);
374 return align / BITS_PER_UNIT;
377 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
378 down otherwise. Return truncated BASE value. */
380 static inline unsigned HOST_WIDE_INT
381 align_base (HOST_WIDE_INT base, unsigned HOST_WIDE_INT align, bool align_up)
383 return align_up ? (base + align - 1) & -align : base & -align;
386 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
387 Return the frame offset. */
390 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align)
392 HOST_WIDE_INT offset, new_frame_offset;
394 if (FRAME_GROWS_DOWNWARD)
397 = align_base (frame_offset - frame_phase - size,
398 align, false) + frame_phase;
399 offset = new_frame_offset;
404 = align_base (frame_offset - frame_phase, align, true) + frame_phase;
405 offset = new_frame_offset;
406 new_frame_offset += size;
408 frame_offset = new_frame_offset;
410 if (frame_offset_overflow (frame_offset, cfun->decl))
411 frame_offset = offset = 0;
416 /* Accumulate DECL into STACK_VARS. */
419 add_stack_var (tree decl)
423 if (stack_vars_num >= stack_vars_alloc)
425 if (stack_vars_alloc)
426 stack_vars_alloc = stack_vars_alloc * 3 / 2;
428 stack_vars_alloc = 32;
430 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
432 if (!decl_to_stack_part)
433 decl_to_stack_part = new hash_map<tree, size_t>;
435 v = &stack_vars[stack_vars_num];
436 decl_to_stack_part->put (decl, stack_vars_num);
439 tree size = TREE_CODE (decl) == SSA_NAME
440 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
441 : DECL_SIZE_UNIT (decl);
442 v->size = tree_to_uhwi (size);
443 /* Ensure that all variables have size, so that &a != &b for any two
444 variables that are simultaneously live. */
447 v->alignb = align_local_variable (decl);
448 /* An alignment of zero can mightily confuse us later. */
449 gcc_assert (v->alignb != 0);
451 /* All variables are initially in their own partition. */
452 v->representative = stack_vars_num;
455 /* All variables initially conflict with no other. */
458 /* Ensure that this decl doesn't get put onto the list twice. */
459 set_rtl (decl, pc_rtx);
464 /* Make the decls associated with luid's X and Y conflict. */
467 add_stack_var_conflict (size_t x, size_t y)
469 struct stack_var *a = &stack_vars[x];
470 struct stack_var *b = &stack_vars[y];
472 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
474 b->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
475 bitmap_set_bit (a->conflicts, y);
476 bitmap_set_bit (b->conflicts, x);
479 /* Check whether the decls associated with luid's X and Y conflict. */
482 stack_var_conflict_p (size_t x, size_t y)
484 struct stack_var *a = &stack_vars[x];
485 struct stack_var *b = &stack_vars[y];
488 /* Partitions containing an SSA name result from gimple registers
489 with things like unsupported modes. They are top-level and
490 hence conflict with everything else. */
491 if (TREE_CODE (a->decl) == SSA_NAME || TREE_CODE (b->decl) == SSA_NAME)
494 if (!a->conflicts || !b->conflicts)
496 return bitmap_bit_p (a->conflicts, y);
499 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
500 enter its partition number into bitmap DATA. */
503 visit_op (gimple *, tree op, tree, void *data)
505 bitmap active = (bitmap)data;
506 op = get_base_address (op);
509 && DECL_RTL_IF_SET (op) == pc_rtx)
511 size_t *v = decl_to_stack_part->get (op);
513 bitmap_set_bit (active, *v);
518 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
519 record conflicts between it and all currently active other partitions
523 visit_conflict (gimple *, tree op, tree, void *data)
525 bitmap active = (bitmap)data;
526 op = get_base_address (op);
529 && DECL_RTL_IF_SET (op) == pc_rtx)
531 size_t *v = decl_to_stack_part->get (op);
532 if (v && bitmap_set_bit (active, *v))
537 gcc_assert (num < stack_vars_num);
538 EXECUTE_IF_SET_IN_BITMAP (active, 0, i, bi)
539 add_stack_var_conflict (num, i);
545 /* Helper routine for add_scope_conflicts, calculating the active partitions
546 at the end of BB, leaving the result in WORK. We're called to generate
547 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
551 add_scope_conflicts_1 (basic_block bb, bitmap work, bool for_conflict)
555 gimple_stmt_iterator gsi;
556 walk_stmt_load_store_addr_fn visit;
559 FOR_EACH_EDGE (e, ei, bb->preds)
560 bitmap_ior_into (work, (bitmap)e->src->aux);
564 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
566 gimple *stmt = gsi_stmt (gsi);
567 walk_stmt_load_store_addr_ops (stmt, work, NULL, NULL, visit);
569 for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
571 gimple *stmt = gsi_stmt (gsi);
573 if (gimple_clobber_p (stmt))
575 tree lhs = gimple_assign_lhs (stmt);
577 /* Nested function lowering might introduce LHSs
578 that are COMPONENT_REFs. */
579 if (TREE_CODE (lhs) != VAR_DECL)
581 if (DECL_RTL_IF_SET (lhs) == pc_rtx
582 && (v = decl_to_stack_part->get (lhs)))
583 bitmap_clear_bit (work, *v);
585 else if (!is_gimple_debug (stmt))
588 && visit == visit_op)
590 /* If this is the first real instruction in this BB we need
591 to add conflicts for everything live at this point now.
592 Unlike classical liveness for named objects we can't
593 rely on seeing a def/use of the names we're interested in.
594 There might merely be indirect loads/stores. We'd not add any
595 conflicts for such partitions. */
598 EXECUTE_IF_SET_IN_BITMAP (work, 0, i, bi)
600 struct stack_var *a = &stack_vars[i];
602 a->conflicts = BITMAP_ALLOC (&stack_var_bitmap_obstack);
603 bitmap_ior_into (a->conflicts, work);
605 visit = visit_conflict;
607 walk_stmt_load_store_addr_ops (stmt, work, visit, visit, visit);
612 /* Generate stack partition conflicts between all partitions that are
613 simultaneously live. */
616 add_scope_conflicts (void)
620 bitmap work = BITMAP_ALLOC (NULL);
624 /* We approximate the live range of a stack variable by taking the first
625 mention of its name as starting point(s), and by the end-of-scope
626 death clobber added by gimplify as ending point(s) of the range.
627 This overapproximates in the case we for instance moved an address-taken
628 operation upward, without also moving a dereference to it upwards.
629 But it's conservatively correct as a variable never can hold values
630 before its name is mentioned at least once.
632 We then do a mostly classical bitmap liveness algorithm. */
634 FOR_ALL_BB_FN (bb, cfun)
635 bb->aux = BITMAP_ALLOC (&stack_var_bitmap_obstack);
637 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
638 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
645 for (i = 0; i < n_bbs; i++)
648 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
649 active = (bitmap)bb->aux;
650 add_scope_conflicts_1 (bb, work, false);
651 if (bitmap_ior_into (active, work))
656 FOR_EACH_BB_FN (bb, cfun)
657 add_scope_conflicts_1 (bb, work, true);
661 FOR_ALL_BB_FN (bb, cfun)
662 BITMAP_FREE (bb->aux);
665 /* A subroutine of partition_stack_vars. A comparison function for qsort,
666 sorting an array of indices by the properties of the object. */
669 stack_var_cmp (const void *a, const void *b)
671 size_t ia = *(const size_t *)a;
672 size_t ib = *(const size_t *)b;
673 unsigned int aligna = stack_vars[ia].alignb;
674 unsigned int alignb = stack_vars[ib].alignb;
675 HOST_WIDE_INT sizea = stack_vars[ia].size;
676 HOST_WIDE_INT sizeb = stack_vars[ib].size;
677 tree decla = stack_vars[ia].decl;
678 tree declb = stack_vars[ib].decl;
680 unsigned int uida, uidb;
682 /* Primary compare on "large" alignment. Large comes first. */
683 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
684 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
685 if (largea != largeb)
686 return (int)largeb - (int)largea;
688 /* Secondary compare on size, decreasing */
694 /* Tertiary compare on true alignment, decreasing. */
700 /* Final compare on ID for sort stability, increasing.
701 Two SSA names are compared by their version, SSA names come before
702 non-SSA names, and two normal decls are compared by their DECL_UID. */
703 if (TREE_CODE (decla) == SSA_NAME)
705 if (TREE_CODE (declb) == SSA_NAME)
706 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
710 else if (TREE_CODE (declb) == SSA_NAME)
713 uida = DECL_UID (decla), uidb = DECL_UID (declb);
721 struct part_traits : unbounded_int_hashmap_traits <size_t, bitmap> {};
722 typedef hash_map<size_t, bitmap, part_traits> part_hashmap;
724 /* If the points-to solution *PI points to variables that are in a partition
725 together with other variables add all partition members to the pointed-to
729 add_partitioned_vars_to_ptset (struct pt_solution *pt,
730 part_hashmap *decls_to_partitions,
731 hash_set<bitmap> *visited, bitmap temp)
739 /* The pointed-to vars bitmap is shared, it is enough to
741 || visited->add (pt->vars))
746 /* By using a temporary bitmap to store all members of the partitions
747 we have to add we make sure to visit each of the partitions only
749 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
751 || !bitmap_bit_p (temp, i))
752 && (part = decls_to_partitions->get (i)))
753 bitmap_ior_into (temp, *part);
754 if (!bitmap_empty_p (temp))
755 bitmap_ior_into (pt->vars, temp);
758 /* Update points-to sets based on partition info, so we can use them on RTL.
759 The bitmaps representing stack partitions will be saved until expand,
760 where partitioned decls used as bases in memory expressions will be
764 update_alias_info_with_stack_vars (void)
766 part_hashmap *decls_to_partitions = NULL;
768 tree var = NULL_TREE;
770 for (i = 0; i < stack_vars_num; i++)
774 struct ptr_info_def *pi;
776 /* Not interested in partitions with single variable. */
777 if (stack_vars[i].representative != i
778 || stack_vars[i].next == EOC)
781 if (!decls_to_partitions)
783 decls_to_partitions = new part_hashmap;
784 cfun->gimple_df->decls_to_pointers = new hash_map<tree, tree>;
787 /* Create an SSA_NAME that points to the partition for use
788 as base during alias-oracle queries on RTL for bases that
789 have been partitioned. */
790 if (var == NULL_TREE)
791 var = create_tmp_var (ptr_type_node);
792 name = make_ssa_name (var);
794 /* Create bitmaps representing partitions. They will be used for
795 points-to sets later, so use GGC alloc. */
796 part = BITMAP_GGC_ALLOC ();
797 for (j = i; j != EOC; j = stack_vars[j].next)
799 tree decl = stack_vars[j].decl;
800 unsigned int uid = DECL_PT_UID (decl);
801 bitmap_set_bit (part, uid);
802 decls_to_partitions->put (uid, part);
803 cfun->gimple_df->decls_to_pointers->put (decl, name);
804 if (TREE_ADDRESSABLE (decl))
805 TREE_ADDRESSABLE (name) = 1;
808 /* Make the SSA name point to all partition members. */
809 pi = get_ptr_info (name);
810 pt_solution_set (&pi->pt, part, false);
813 /* Make all points-to sets that contain one member of a partition
814 contain all members of the partition. */
815 if (decls_to_partitions)
818 hash_set<bitmap> visited;
819 bitmap temp = BITMAP_ALLOC (&stack_var_bitmap_obstack);
821 for (i = 1; i < num_ssa_names; i++)
823 tree name = ssa_name (i);
824 struct ptr_info_def *pi;
827 && POINTER_TYPE_P (TREE_TYPE (name))
828 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
829 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
833 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
834 decls_to_partitions, &visited, temp);
836 delete decls_to_partitions;
841 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
842 partitioning algorithm. Partitions A and B are known to be non-conflicting.
843 Merge them into a single partition A. */
846 union_stack_vars (size_t a, size_t b)
848 struct stack_var *vb = &stack_vars[b];
852 gcc_assert (stack_vars[b].next == EOC);
853 /* Add B to A's partition. */
854 stack_vars[b].next = stack_vars[a].next;
855 stack_vars[b].representative = a;
856 stack_vars[a].next = b;
858 /* Update the required alignment of partition A to account for B. */
859 if (stack_vars[a].alignb < stack_vars[b].alignb)
860 stack_vars[a].alignb = stack_vars[b].alignb;
862 /* Update the interference graph and merge the conflicts. */
865 EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi)
866 add_stack_var_conflict (a, stack_vars[u].representative);
867 BITMAP_FREE (vb->conflicts);
871 /* A subroutine of expand_used_vars. Binpack the variables into
872 partitions constrained by the interference graph. The overall
873 algorithm used is as follows:
875 Sort the objects by size in descending order.
880 Look for the largest non-conflicting object B with size <= S.
887 partition_stack_vars (void)
889 size_t si, sj, n = stack_vars_num;
891 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
892 for (si = 0; si < n; ++si)
893 stack_vars_sorted[si] = si;
898 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp);
900 for (si = 0; si < n; ++si)
902 size_t i = stack_vars_sorted[si];
903 unsigned int ialign = stack_vars[i].alignb;
904 HOST_WIDE_INT isize = stack_vars[i].size;
906 /* Ignore objects that aren't partition representatives. If we
907 see a var that is not a partition representative, it must
908 have been merged earlier. */
909 if (stack_vars[i].representative != i)
912 for (sj = si + 1; sj < n; ++sj)
914 size_t j = stack_vars_sorted[sj];
915 unsigned int jalign = stack_vars[j].alignb;
916 HOST_WIDE_INT jsize = stack_vars[j].size;
918 /* Ignore objects that aren't partition representatives. */
919 if (stack_vars[j].representative != j)
922 /* Do not mix objects of "small" (supported) alignment
923 and "large" (unsupported) alignment. */
924 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
925 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
928 /* For Address Sanitizer do not mix objects with different
929 sizes, as the shorter vars wouldn't be adequately protected.
930 Don't do that for "large" (unsupported) alignment objects,
931 those aren't protected anyway. */
932 if ((asan_sanitize_stack_p ())
934 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
937 /* Ignore conflicting objects. */
938 if (stack_var_conflict_p (i, j))
941 /* UNION the objects, placing J at OFFSET. */
942 union_stack_vars (i, j);
946 update_alias_info_with_stack_vars ();
949 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
952 dump_stack_var_partition (void)
954 size_t si, i, j, n = stack_vars_num;
956 for (si = 0; si < n; ++si)
958 i = stack_vars_sorted[si];
960 /* Skip variables that aren't partition representatives, for now. */
961 if (stack_vars[i].representative != i)
964 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
965 " align %u\n", (unsigned long) i, stack_vars[i].size,
966 stack_vars[i].alignb);
968 for (j = i; j != EOC; j = stack_vars[j].next)
970 fputc ('\t', dump_file);
971 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
973 fputc ('\n', dump_file);
977 /* Assign rtl to DECL at BASE + OFFSET. */
980 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
981 HOST_WIDE_INT offset)
986 /* If this fails, we've overflowed the stack frame. Error nicely? */
987 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
989 x = plus_constant (Pmode, base, offset);
990 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
991 ? TYPE_MODE (TREE_TYPE (decl))
992 : DECL_MODE (SSAVAR (decl)), x);
994 if (TREE_CODE (decl) != SSA_NAME)
996 /* Set alignment we actually gave this decl if it isn't an SSA name.
997 If it is we generate stack slots only accidentally so it isn't as
998 important, we'll simply use the alignment that is already set. */
999 if (base == virtual_stack_vars_rtx)
1000 offset -= frame_phase;
1001 align = offset & -offset;
1002 align *= BITS_PER_UNIT;
1003 if (align == 0 || align > base_align)
1006 /* One would think that we could assert that we're not decreasing
1007 alignment here, but (at least) the i386 port does exactly this
1008 via the MINIMUM_ALIGNMENT hook. */
1010 SET_DECL_ALIGN (decl, align);
1011 DECL_USER_ALIGN (decl) = 0;
1017 struct stack_vars_data
1019 /* Vector of offset pairs, always end of some padding followed
1020 by start of the padding that needs Address Sanitizer protection.
1021 The vector is in reversed, highest offset pairs come first. */
1022 vec<HOST_WIDE_INT> asan_vec;
1024 /* Vector of partition representative decls in between the paddings. */
1025 vec<tree> asan_decl_vec;
1027 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1030 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1031 unsigned int asan_alignb;
1034 /* A subroutine of expand_used_vars. Give each partition representative
1035 a unique location within the stack frame. Update each partition member
1036 with that location. */
1039 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1041 size_t si, i, j, n = stack_vars_num;
1042 HOST_WIDE_INT large_size = 0, large_alloc = 0;
1043 rtx large_base = NULL;
1044 unsigned large_align = 0;
1047 /* Determine if there are any variables requiring "large" alignment.
1048 Since these are dynamically allocated, we only process these if
1049 no predicate involved. */
1050 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT;
1051 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT)
1053 /* Find the total size of these variables. */
1054 for (si = 0; si < n; ++si)
1058 i = stack_vars_sorted[si];
1059 alignb = stack_vars[i].alignb;
1061 /* All "large" alignment decls come before all "small" alignment
1062 decls, but "large" alignment decls are not sorted based on
1063 their alignment. Increase large_align to track the largest
1064 required alignment. */
1065 if ((alignb * BITS_PER_UNIT) > large_align)
1066 large_align = alignb * BITS_PER_UNIT;
1068 /* Stop when we get to the first decl with "small" alignment. */
1069 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1072 /* Skip variables that aren't partition representatives. */
1073 if (stack_vars[i].representative != i)
1076 /* Skip variables that have already had rtl assigned. See also
1077 add_stack_var where we perpetrate this pc_rtx hack. */
1078 decl = stack_vars[i].decl;
1079 if (TREE_CODE (decl) == SSA_NAME
1080 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1081 : DECL_RTL (decl) != pc_rtx)
1084 large_size += alignb - 1;
1085 large_size &= -(HOST_WIDE_INT)alignb;
1086 large_size += stack_vars[i].size;
1089 /* If there were any, allocate space. */
1091 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0,
1095 for (si = 0; si < n; ++si)
1098 unsigned base_align, alignb;
1099 HOST_WIDE_INT offset;
1101 i = stack_vars_sorted[si];
1103 /* Skip variables that aren't partition representatives, for now. */
1104 if (stack_vars[i].representative != i)
1107 /* Skip variables that have already had rtl assigned. See also
1108 add_stack_var where we perpetrate this pc_rtx hack. */
1109 decl = stack_vars[i].decl;
1110 if (TREE_CODE (decl) == SSA_NAME
1111 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1112 : DECL_RTL (decl) != pc_rtx)
1115 /* Check the predicate to see whether this variable should be
1116 allocated in this pass. */
1117 if (pred && !pred (i))
1120 alignb = stack_vars[i].alignb;
1121 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1123 base = virtual_stack_vars_rtx;
1124 if ((asan_sanitize_stack_p ())
1127 HOST_WIDE_INT prev_offset
1128 = align_base (frame_offset,
1129 MAX (alignb, ASAN_RED_ZONE_SIZE),
1130 !FRAME_GROWS_DOWNWARD);
1131 tree repr_decl = NULL_TREE;
1133 = alloc_stack_frame_space (stack_vars[i].size
1134 + ASAN_RED_ZONE_SIZE,
1135 MAX (alignb, ASAN_RED_ZONE_SIZE));
1137 data->asan_vec.safe_push (prev_offset);
1138 data->asan_vec.safe_push (offset + stack_vars[i].size);
1139 /* Find best representative of the partition.
1140 Prefer those with DECL_NAME, even better
1141 satisfying asan_protect_stack_decl predicate. */
1142 for (j = i; j != EOC; j = stack_vars[j].next)
1143 if (asan_protect_stack_decl (stack_vars[j].decl)
1144 && DECL_NAME (stack_vars[j].decl))
1146 repr_decl = stack_vars[j].decl;
1149 else if (repr_decl == NULL_TREE
1150 && DECL_P (stack_vars[j].decl)
1151 && DECL_NAME (stack_vars[j].decl))
1152 repr_decl = stack_vars[j].decl;
1153 if (repr_decl == NULL_TREE)
1154 repr_decl = stack_vars[i].decl;
1155 data->asan_decl_vec.safe_push (repr_decl);
1156 data->asan_alignb = MAX (data->asan_alignb, alignb);
1157 if (data->asan_base == NULL)
1158 data->asan_base = gen_reg_rtx (Pmode);
1159 base = data->asan_base;
1161 if (!STRICT_ALIGNMENT)
1162 base_align = crtl->max_used_stack_slot_alignment;
1164 base_align = MAX (crtl->max_used_stack_slot_alignment,
1165 GET_MODE_ALIGNMENT (SImode)
1166 << ASAN_SHADOW_SHIFT);
1170 offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
1171 base_align = crtl->max_used_stack_slot_alignment;
1176 /* Large alignment is only processed in the last pass. */
1179 gcc_assert (large_base != NULL);
1181 large_alloc += alignb - 1;
1182 large_alloc &= -(HOST_WIDE_INT)alignb;
1183 offset = large_alloc;
1184 large_alloc += stack_vars[i].size;
1187 base_align = large_align;
1190 /* Create rtl for each variable based on their location within the
1192 for (j = i; j != EOC; j = stack_vars[j].next)
1194 expand_one_stack_var_at (stack_vars[j].decl,
1200 gcc_assert (large_alloc == large_size);
1203 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1204 static HOST_WIDE_INT
1205 account_stack_vars (void)
1207 size_t si, j, i, n = stack_vars_num;
1208 HOST_WIDE_INT size = 0;
1210 for (si = 0; si < n; ++si)
1212 i = stack_vars_sorted[si];
1214 /* Skip variables that aren't partition representatives, for now. */
1215 if (stack_vars[i].representative != i)
1218 size += stack_vars[i].size;
1219 for (j = i; j != EOC; j = stack_vars[j].next)
1220 set_rtl (stack_vars[j].decl, NULL);
1225 /* Record the RTL assignment X for the default def of PARM. */
1228 set_parm_rtl (tree parm, rtx x)
1230 gcc_assert (TREE_CODE (parm) == PARM_DECL
1231 || TREE_CODE (parm) == RESULT_DECL);
1233 if (x && !MEM_P (x))
1235 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (parm),
1236 TYPE_MODE (TREE_TYPE (parm)),
1237 TYPE_ALIGN (TREE_TYPE (parm)));
1239 /* If the variable alignment is very large we'll dynamicaly
1240 allocate it, which means that in-frame portion is just a
1241 pointer. ??? We've got a pseudo for sure here, do we
1242 actually dynamically allocate its spilling area if needed?
1243 ??? Isn't it a problem when POINTER_SIZE also exceeds
1244 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */
1245 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1246 align = POINTER_SIZE;
1248 record_alignment_for_reg_var (align);
1251 tree ssa = ssa_default_def (cfun, parm);
1253 return set_rtl (parm, x);
1255 int part = var_to_partition (SA.map, ssa);
1256 gcc_assert (part != NO_PARTITION);
1258 bool changed = bitmap_bit_p (SA.partitions_for_parm_default_defs, part);
1259 gcc_assert (changed);
1262 gcc_assert (DECL_RTL (parm) == x);
1265 /* A subroutine of expand_one_var. Called to immediately assign rtl
1266 to a variable to be allocated in the stack frame. */
1269 expand_one_stack_var_1 (tree var)
1271 HOST_WIDE_INT size, offset;
1272 unsigned byte_align;
1274 if (TREE_CODE (var) == SSA_NAME)
1276 tree type = TREE_TYPE (var);
1277 size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1278 byte_align = TYPE_ALIGN_UNIT (type);
1282 size = tree_to_uhwi (DECL_SIZE_UNIT (var));
1283 byte_align = align_local_variable (var);
1286 /* We handle highly aligned variables in expand_stack_vars. */
1287 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1289 offset = alloc_stack_frame_space (size, byte_align);
1291 expand_one_stack_var_at (var, virtual_stack_vars_rtx,
1292 crtl->max_used_stack_slot_alignment, offset);
1295 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1296 already assigned some MEM. */
1299 expand_one_stack_var (tree var)
1301 if (TREE_CODE (var) == SSA_NAME)
1303 int part = var_to_partition (SA.map, var);
1304 if (part != NO_PARTITION)
1306 rtx x = SA.partition_to_pseudo[part];
1308 gcc_assert (MEM_P (x));
1313 return expand_one_stack_var_1 (var);
1316 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1317 that will reside in a hard register. */
1320 expand_one_hard_reg_var (tree var)
1322 rest_of_decl_compilation (var, 0, 0);
1325 /* Record the alignment requirements of some variable assigned to a
1329 record_alignment_for_reg_var (unsigned int align)
1331 if (SUPPORTS_STACK_ALIGNMENT
1332 && crtl->stack_alignment_estimated < align)
1334 /* stack_alignment_estimated shouldn't change after stack
1335 realign decision made */
1336 gcc_assert (!crtl->stack_realign_processed);
1337 crtl->stack_alignment_estimated = align;
1340 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1341 So here we only make sure stack_alignment_needed >= align. */
1342 if (crtl->stack_alignment_needed < align)
1343 crtl->stack_alignment_needed = align;
1344 if (crtl->max_used_stack_slot_alignment < align)
1345 crtl->max_used_stack_slot_alignment = align;
1348 /* Create RTL for an SSA partition. */
1351 expand_one_ssa_partition (tree var)
1353 int part = var_to_partition (SA.map, var);
1354 gcc_assert (part != NO_PARTITION);
1356 if (SA.partition_to_pseudo[part])
1359 unsigned int align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1360 TYPE_MODE (TREE_TYPE (var)),
1361 TYPE_ALIGN (TREE_TYPE (var)));
1363 /* If the variable alignment is very large we'll dynamicaly allocate
1364 it, which means that in-frame portion is just a pointer. */
1365 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1366 align = POINTER_SIZE;
1368 record_alignment_for_reg_var (align);
1370 if (!use_register_for_decl (var))
1372 if (defer_stack_allocation (var, true))
1373 add_stack_var (var);
1375 expand_one_stack_var_1 (var);
1379 machine_mode reg_mode = promote_ssa_mode (var, NULL);
1381 rtx x = gen_reg_rtx (reg_mode);
1386 /* Record the association between the RTL generated for partition PART
1387 and the underlying variable of the SSA_NAME VAR. */
1390 adjust_one_expanded_partition_var (tree var)
1395 tree decl = SSA_NAME_VAR (var);
1397 int part = var_to_partition (SA.map, var);
1398 if (part == NO_PARTITION)
1401 rtx x = SA.partition_to_pseudo[part];
1410 /* Note if the object is a user variable. */
1411 if (decl && !DECL_ARTIFICIAL (decl))
1414 if (POINTER_TYPE_P (decl ? TREE_TYPE (decl) : TREE_TYPE (var)))
1415 mark_reg_pointer (x, get_pointer_alignment (var));
1418 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1419 that will reside in a pseudo register. */
1422 expand_one_register_var (tree var)
1424 if (TREE_CODE (var) == SSA_NAME)
1426 int part = var_to_partition (SA.map, var);
1427 if (part != NO_PARTITION)
1429 rtx x = SA.partition_to_pseudo[part];
1431 gcc_assert (REG_P (x));
1438 tree type = TREE_TYPE (decl);
1439 machine_mode reg_mode = promote_decl_mode (decl, NULL);
1440 rtx x = gen_reg_rtx (reg_mode);
1444 /* Note if the object is a user variable. */
1445 if (!DECL_ARTIFICIAL (decl))
1448 if (POINTER_TYPE_P (type))
1449 mark_reg_pointer (x, get_pointer_alignment (var));
1452 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1453 has some associated error, e.g. its type is error-mark. We just need
1454 to pick something that won't crash the rest of the compiler. */
1457 expand_one_error_var (tree var)
1459 machine_mode mode = DECL_MODE (var);
1462 if (mode == BLKmode)
1463 x = gen_rtx_MEM (BLKmode, const0_rtx);
1464 else if (mode == VOIDmode)
1467 x = gen_reg_rtx (mode);
1469 SET_DECL_RTL (var, x);
1472 /* A subroutine of expand_one_var. VAR is a variable that will be
1473 allocated to the local stack frame. Return true if we wish to
1474 add VAR to STACK_VARS so that it will be coalesced with other
1475 variables. Return false to allocate VAR immediately.
1477 This function is used to reduce the number of variables considered
1478 for coalescing, which reduces the size of the quadratic problem. */
1481 defer_stack_allocation (tree var, bool toplevel)
1483 tree size_unit = TREE_CODE (var) == SSA_NAME
1484 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1485 : DECL_SIZE_UNIT (var);
1487 /* Whether the variable is small enough for immediate allocation not to be
1488 a problem with regard to the frame size. */
1490 = ((HOST_WIDE_INT) tree_to_uhwi (size_unit)
1491 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING));
1493 /* If stack protection is enabled, *all* stack variables must be deferred,
1494 so that we can re-order the strings to the top of the frame.
1495 Similarly for Address Sanitizer. */
1496 if (flag_stack_protect || asan_sanitize_stack_p ())
1499 unsigned int align = TREE_CODE (var) == SSA_NAME
1500 ? TYPE_ALIGN (TREE_TYPE (var))
1503 /* We handle "large" alignment via dynamic allocation. We want to handle
1504 this extra complication in only one place, so defer them. */
1505 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1508 bool ignored = TREE_CODE (var) == SSA_NAME
1509 ? !SSAVAR (var) || DECL_IGNORED_P (SSA_NAME_VAR (var))
1510 : DECL_IGNORED_P (var);
1512 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1513 might be detached from their block and appear at toplevel when we reach
1514 here. We want to coalesce them with variables from other blocks when
1515 the immediate contribution to the frame size would be noticeable. */
1516 if (toplevel && optimize > 0 && ignored && !smallish)
1519 /* Variables declared in the outermost scope automatically conflict
1520 with every other variable. The only reason to want to defer them
1521 at all is that, after sorting, we can more efficiently pack
1522 small variables in the stack frame. Continue to defer at -O2. */
1523 if (toplevel && optimize < 2)
1526 /* Without optimization, *most* variables are allocated from the
1527 stack, which makes the quadratic problem large exactly when we
1528 want compilation to proceed as quickly as possible. On the
1529 other hand, we don't want the function's stack frame size to
1530 get completely out of hand. So we avoid adding scalars and
1531 "small" aggregates to the list at all. */
1532 if (optimize == 0 && smallish)
1538 /* A subroutine of expand_used_vars. Expand one variable according to
1539 its flavor. Variables to be placed on the stack are not actually
1540 expanded yet, merely recorded.
1541 When REALLY_EXPAND is false, only add stack values to be allocated.
1542 Return stack usage this variable is supposed to take.
1545 static HOST_WIDE_INT
1546 expand_one_var (tree var, bool toplevel, bool really_expand)
1548 unsigned int align = BITS_PER_UNIT;
1553 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL)
1555 if (is_global_var (var))
1558 /* Because we don't know if VAR will be in register or on stack,
1559 we conservatively assume it will be on stack even if VAR is
1560 eventually put into register after RA pass. For non-automatic
1561 variables, which won't be on stack, we collect alignment of
1562 type and ignore user specified alignment. Similarly for
1563 SSA_NAMEs for which use_register_for_decl returns true. */
1564 if (TREE_STATIC (var)
1565 || DECL_EXTERNAL (var)
1566 || (TREE_CODE (origvar) == SSA_NAME && use_register_for_decl (var)))
1567 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1568 TYPE_MODE (TREE_TYPE (var)),
1569 TYPE_ALIGN (TREE_TYPE (var)));
1570 else if (DECL_HAS_VALUE_EXPR_P (var)
1571 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var))))
1572 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1573 or variables which were assigned a stack slot already by
1574 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1575 changed from the offset chosen to it. */
1576 align = crtl->stack_alignment_estimated;
1578 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1580 /* If the variable alignment is very large we'll dynamicaly allocate
1581 it, which means that in-frame portion is just a pointer. */
1582 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1583 align = POINTER_SIZE;
1586 record_alignment_for_reg_var (align);
1588 if (TREE_CODE (origvar) == SSA_NAME)
1590 gcc_assert (TREE_CODE (var) != VAR_DECL
1591 || (!DECL_EXTERNAL (var)
1592 && !DECL_HAS_VALUE_EXPR_P (var)
1593 && !TREE_STATIC (var)
1594 && TREE_TYPE (var) != error_mark_node
1595 && !DECL_HARD_REGISTER (var)
1598 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
1600 else if (DECL_EXTERNAL (var))
1602 else if (DECL_HAS_VALUE_EXPR_P (var))
1604 else if (TREE_STATIC (var))
1606 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1608 else if (TREE_TYPE (var) == error_mark_node)
1611 expand_one_error_var (var);
1613 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
1617 expand_one_hard_reg_var (var);
1618 if (!DECL_HARD_REGISTER (var))
1619 /* Invalid register specification. */
1620 expand_one_error_var (var);
1623 else if (use_register_for_decl (var))
1626 expand_one_register_var (origvar);
1628 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var)))
1630 /* Reject variables which cover more than half of the address-space. */
1633 error ("size of variable %q+D is too large", var);
1634 expand_one_error_var (var);
1637 else if (defer_stack_allocation (var, toplevel))
1638 add_stack_var (origvar);
1643 if (lookup_attribute ("naked",
1644 DECL_ATTRIBUTES (current_function_decl)))
1645 error ("cannot allocate stack for variable %q+D, naked function.",
1648 expand_one_stack_var (origvar);
1652 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1657 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1658 expanding variables. Those variables that can be put into registers
1659 are allocated pseudos; those that can't are put on the stack.
1661 TOPLEVEL is true if this is the outermost BLOCK. */
1664 expand_used_vars_for_block (tree block, bool toplevel)
1668 /* Expand all variables at this level. */
1669 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1671 && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1672 || !DECL_NONSHAREABLE (t)))
1673 expand_one_var (t, toplevel, true);
1675 /* Expand all variables at containing levels. */
1676 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1677 expand_used_vars_for_block (t, false);
1680 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1681 and clear TREE_USED on all local variables. */
1684 clear_tree_used (tree block)
1688 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
1689 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1690 if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
1691 || !DECL_NONSHAREABLE (t))
1694 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1695 clear_tree_used (t);
1699 SPCT_FLAG_DEFAULT = 1,
1701 SPCT_FLAG_STRONG = 3,
1702 SPCT_FLAG_EXPLICIT = 4
1705 /* Examine TYPE and determine a bit mask of the following features. */
1707 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1708 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1709 #define SPCT_HAS_ARRAY 4
1710 #define SPCT_HAS_AGGREGATE 8
1713 stack_protect_classify_type (tree type)
1715 unsigned int ret = 0;
1718 switch (TREE_CODE (type))
1721 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1722 if (t == char_type_node
1723 || t == signed_char_type_node
1724 || t == unsigned_char_type_node)
1726 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1727 unsigned HOST_WIDE_INT len;
1729 if (!TYPE_SIZE_UNIT (type)
1730 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type)))
1733 len = tree_to_uhwi (TYPE_SIZE_UNIT (type));
1736 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1738 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1741 ret = SPCT_HAS_ARRAY;
1745 case QUAL_UNION_TYPE:
1747 ret = SPCT_HAS_AGGREGATE;
1748 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1749 if (TREE_CODE (t) == FIELD_DECL)
1750 ret |= stack_protect_classify_type (TREE_TYPE (t));
1760 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1761 part of the local stack frame. Remember if we ever return nonzero for
1762 any variable in this function. The return value is the phase number in
1763 which the variable should be allocated. */
1766 stack_protect_decl_phase (tree decl)
1768 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1771 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1772 has_short_buffer = true;
1774 if (flag_stack_protect == SPCT_FLAG_ALL
1775 || flag_stack_protect == SPCT_FLAG_STRONG
1776 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
1777 && lookup_attribute ("stack_protect",
1778 DECL_ATTRIBUTES (current_function_decl))))
1780 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1781 && !(bits & SPCT_HAS_AGGREGATE))
1783 else if (bits & SPCT_HAS_ARRAY)
1787 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1790 has_protected_decls = true;
1795 /* Two helper routines that check for phase 1 and phase 2. These are used
1796 as callbacks for expand_stack_vars. */
1799 stack_protect_decl_phase_1 (size_t i)
1801 return stack_protect_decl_phase (stack_vars[i].decl) == 1;
1805 stack_protect_decl_phase_2 (size_t i)
1807 return stack_protect_decl_phase (stack_vars[i].decl) == 2;
1810 /* And helper function that checks for asan phase (with stack protector
1811 it is phase 3). This is used as callback for expand_stack_vars.
1812 Returns true if any of the vars in the partition need to be protected. */
1815 asan_decl_phase_3 (size_t i)
1819 if (asan_protect_stack_decl (stack_vars[i].decl))
1821 i = stack_vars[i].next;
1826 /* Ensure that variables in different stack protection phases conflict
1827 so that they are not merged and share the same stack slot. */
1830 add_stack_protection_conflicts (void)
1832 size_t i, j, n = stack_vars_num;
1833 unsigned char *phase;
1835 phase = XNEWVEC (unsigned char, n);
1836 for (i = 0; i < n; ++i)
1837 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1839 for (i = 0; i < n; ++i)
1841 unsigned char ph_i = phase[i];
1842 for (j = i + 1; j < n; ++j)
1843 if (ph_i != phase[j])
1844 add_stack_var_conflict (i, j);
1850 /* Create a decl for the guard at the top of the stack frame. */
1853 create_stack_guard (void)
1855 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1856 VAR_DECL, NULL, ptr_type_node);
1857 TREE_THIS_VOLATILE (guard) = 1;
1858 TREE_USED (guard) = 1;
1859 expand_one_stack_var (guard);
1860 crtl->stack_protect_guard = guard;
1863 /* Prepare for expanding variables. */
1865 init_vars_expansion (void)
1867 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1868 bitmap_obstack_initialize (&stack_var_bitmap_obstack);
1870 /* A map from decl to stack partition. */
1871 decl_to_stack_part = new hash_map<tree, size_t>;
1873 /* Initialize local stack smashing state. */
1874 has_protected_decls = false;
1875 has_short_buffer = false;
1878 /* Free up stack variable graph data. */
1880 fini_vars_expansion (void)
1882 bitmap_obstack_release (&stack_var_bitmap_obstack);
1884 XDELETEVEC (stack_vars);
1885 if (stack_vars_sorted)
1886 XDELETEVEC (stack_vars_sorted);
1888 stack_vars_sorted = NULL;
1889 stack_vars_alloc = stack_vars_num = 0;
1890 delete decl_to_stack_part;
1891 decl_to_stack_part = NULL;
1894 /* Make a fair guess for the size of the stack frame of the function
1895 in NODE. This doesn't have to be exact, the result is only used in
1896 the inline heuristics. So we don't want to run the full stack var
1897 packing algorithm (which is quadratic in the number of stack vars).
1898 Instead, we calculate the total size of all stack vars. This turns
1899 out to be a pretty fair estimate -- packing of stack vars doesn't
1900 happen very often. */
1903 estimated_stack_frame_size (struct cgraph_node *node)
1905 HOST_WIDE_INT size = 0;
1908 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1912 init_vars_expansion ();
1914 FOR_EACH_LOCAL_DECL (fn, i, var)
1915 if (auto_var_in_fn_p (var, fn->decl))
1916 size += expand_one_var (var, true, false);
1918 if (stack_vars_num > 0)
1920 /* Fake sorting the stack vars for account_stack_vars (). */
1921 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1922 for (i = 0; i < stack_vars_num; ++i)
1923 stack_vars_sorted[i] = i;
1924 size += account_stack_vars ();
1927 fini_vars_expansion ();
1932 /* Helper routine to check if a record or union contains an array field. */
1935 record_or_union_type_has_array_p (const_tree tree_type)
1937 tree fields = TYPE_FIELDS (tree_type);
1940 for (f = fields; f; f = DECL_CHAIN (f))
1941 if (TREE_CODE (f) == FIELD_DECL)
1943 tree field_type = TREE_TYPE (f);
1944 if (RECORD_OR_UNION_TYPE_P (field_type)
1945 && record_or_union_type_has_array_p (field_type))
1947 if (TREE_CODE (field_type) == ARRAY_TYPE)
1953 /* Check if the current function has local referenced variables that
1954 have their addresses taken, contain an array, or are arrays. */
1957 stack_protect_decl_p ()
1962 FOR_EACH_LOCAL_DECL (cfun, i, var)
1963 if (!is_global_var (var))
1965 tree var_type = TREE_TYPE (var);
1966 if (TREE_CODE (var) == VAR_DECL
1967 && (TREE_CODE (var_type) == ARRAY_TYPE
1968 || TREE_ADDRESSABLE (var)
1969 || (RECORD_OR_UNION_TYPE_P (var_type)
1970 && record_or_union_type_has_array_p (var_type))))
1976 /* Check if the current function has calls that use a return slot. */
1979 stack_protect_return_slot_p ()
1983 FOR_ALL_BB_FN (bb, cfun)
1984 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
1985 !gsi_end_p (gsi); gsi_next (&gsi))
1987 gimple *stmt = gsi_stmt (gsi);
1988 /* This assumes that calls to internal-only functions never
1989 use a return slot. */
1990 if (is_gimple_call (stmt)
1991 && !gimple_call_internal_p (stmt)
1992 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt)),
1993 gimple_call_fndecl (stmt)))
1999 /* Expand all variables used in the function. */
2002 expand_used_vars (void)
2004 tree var, outer_block = DECL_INITIAL (current_function_decl);
2005 vec<tree> maybe_local_decls = vNULL;
2006 rtx_insn *var_end_seq = NULL;
2009 bool gen_stack_protect_signal = false;
2011 /* Compute the phase of the stack frame for this function. */
2013 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2014 int off = STARTING_FRAME_OFFSET % align;
2015 frame_phase = off ? align - off : 0;
2018 /* Set TREE_USED on all variables in the local_decls. */
2019 FOR_EACH_LOCAL_DECL (cfun, i, var)
2020 TREE_USED (var) = 1;
2021 /* Clear TREE_USED on all variables associated with a block scope. */
2022 clear_tree_used (DECL_INITIAL (current_function_decl));
2024 init_vars_expansion ();
2026 if (targetm.use_pseudo_pic_reg ())
2027 pic_offset_table_rtx = gen_reg_rtx (Pmode);
2029 for (i = 0; i < SA.map->num_partitions; i++)
2031 if (bitmap_bit_p (SA.partitions_for_parm_default_defs, i))
2034 tree var = partition_to_var (SA.map, i);
2036 gcc_assert (!virtual_operand_p (var));
2038 expand_one_ssa_partition (var);
2041 if (flag_stack_protect == SPCT_FLAG_STRONG)
2042 gen_stack_protect_signal
2043 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2045 /* At this point all variables on the local_decls with TREE_USED
2046 set are not associated with any block scope. Lay them out. */
2048 len = vec_safe_length (cfun->local_decls);
2049 FOR_EACH_LOCAL_DECL (cfun, i, var)
2051 bool expand_now = false;
2053 /* Expanded above already. */
2054 if (is_gimple_reg (var))
2056 TREE_USED (var) = 0;
2059 /* We didn't set a block for static or extern because it's hard
2060 to tell the difference between a global variable (re)declared
2061 in a local scope, and one that's really declared there to
2062 begin with. And it doesn't really matter much, since we're
2063 not giving them stack space. Expand them now. */
2064 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
2067 /* Expand variables not associated with any block now. Those created by
2068 the optimizers could be live anywhere in the function. Those that
2069 could possibly have been scoped originally and detached from their
2070 block will have their allocation deferred so we coalesce them with
2071 others when optimization is enabled. */
2072 else if (TREE_USED (var))
2075 /* Finally, mark all variables on the list as used. We'll use
2076 this in a moment when we expand those associated with scopes. */
2077 TREE_USED (var) = 1;
2080 expand_one_var (var, true, true);
2083 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
2085 rtx rtl = DECL_RTL_IF_SET (var);
2087 /* Keep artificial non-ignored vars in cfun->local_decls
2088 chain until instantiate_decls. */
2089 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2090 add_local_decl (cfun, var);
2091 else if (rtl == NULL_RTX)
2092 /* If rtl isn't set yet, which can happen e.g. with
2093 -fstack-protector, retry before returning from this
2095 maybe_local_decls.safe_push (var);
2099 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2101 +-----------------+-----------------+
2102 | ...processed... | ...duplicates...|
2103 +-----------------+-----------------+
2105 +-- LEN points here.
2107 We just want the duplicates, as those are the artificial
2108 non-ignored vars that we want to keep until instantiate_decls.
2109 Move them down and truncate the array. */
2110 if (!vec_safe_is_empty (cfun->local_decls))
2111 cfun->local_decls->block_remove (0, len);
2113 /* At this point, all variables within the block tree with TREE_USED
2114 set are actually used by the optimized function. Lay them out. */
2115 expand_used_vars_for_block (outer_block, true);
2117 if (stack_vars_num > 0)
2119 add_scope_conflicts ();
2121 /* If stack protection is enabled, we don't share space between
2122 vulnerable data and non-vulnerable data. */
2123 if (flag_stack_protect != 0
2124 && (flag_stack_protect != SPCT_FLAG_EXPLICIT
2125 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2126 && lookup_attribute ("stack_protect",
2127 DECL_ATTRIBUTES (current_function_decl)))))
2128 add_stack_protection_conflicts ();
2130 /* Now that we have collected all stack variables, and have computed a
2131 minimal interference graph, attempt to save some stack space. */
2132 partition_stack_vars ();
2134 dump_stack_var_partition ();
2137 switch (flag_stack_protect)
2140 create_stack_guard ();
2143 case SPCT_FLAG_STRONG:
2144 if (gen_stack_protect_signal
2145 || cfun->calls_alloca || has_protected_decls
2146 || lookup_attribute ("stack_protect",
2147 DECL_ATTRIBUTES (current_function_decl)))
2148 create_stack_guard ();
2151 case SPCT_FLAG_DEFAULT:
2152 if (cfun->calls_alloca || has_protected_decls
2153 || lookup_attribute ("stack_protect",
2154 DECL_ATTRIBUTES (current_function_decl)))
2155 create_stack_guard ();
2158 case SPCT_FLAG_EXPLICIT:
2159 if (lookup_attribute ("stack_protect",
2160 DECL_ATTRIBUTES (current_function_decl)))
2161 create_stack_guard ();
2167 /* Assign rtl to each variable based on these partitions. */
2168 if (stack_vars_num > 0)
2170 struct stack_vars_data data;
2172 data.asan_vec = vNULL;
2173 data.asan_decl_vec = vNULL;
2174 data.asan_base = NULL_RTX;
2175 data.asan_alignb = 0;
2177 /* Reorder decls to be protected by iterating over the variables
2178 array multiple times, and allocating out of each phase in turn. */
2179 /* ??? We could probably integrate this into the qsort we did
2180 earlier, such that we naturally see these variables first,
2181 and thus naturally allocate things in the right order. */
2182 if (has_protected_decls)
2184 /* Phase 1 contains only character arrays. */
2185 expand_stack_vars (stack_protect_decl_phase_1, &data);
2187 /* Phase 2 contains other kinds of arrays. */
2188 if (flag_stack_protect == SPCT_FLAG_ALL
2189 || flag_stack_protect == SPCT_FLAG_STRONG
2190 || (flag_stack_protect == SPCT_FLAG_EXPLICIT
2191 && lookup_attribute ("stack_protect",
2192 DECL_ATTRIBUTES (current_function_decl))))
2193 expand_stack_vars (stack_protect_decl_phase_2, &data);
2196 if (asan_sanitize_stack_p ())
2197 /* Phase 3, any partitions that need asan protection
2198 in addition to phase 1 and 2. */
2199 expand_stack_vars (asan_decl_phase_3, &data);
2201 if (!data.asan_vec.is_empty ())
2203 HOST_WIDE_INT prev_offset = frame_offset;
2204 HOST_WIDE_INT offset, sz, redzonesz;
2205 redzonesz = ASAN_RED_ZONE_SIZE;
2206 sz = data.asan_vec[0] - prev_offset;
2207 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2208 && data.asan_alignb <= 4096
2209 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2210 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2211 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2213 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE);
2214 data.asan_vec.safe_push (prev_offset);
2215 data.asan_vec.safe_push (offset);
2216 /* Leave space for alignment if STRICT_ALIGNMENT. */
2217 if (STRICT_ALIGNMENT)
2218 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2219 << ASAN_SHADOW_SHIFT)
2220 / BITS_PER_UNIT, 1);
2223 = asan_emit_stack_protection (virtual_stack_vars_rtx,
2226 data.asan_vec.address (),
2227 data.asan_decl_vec.address (),
2228 data.asan_vec.length ());
2231 expand_stack_vars (NULL, &data);
2233 data.asan_vec.release ();
2234 data.asan_decl_vec.release ();
2237 fini_vars_expansion ();
2239 /* If there were any artificial non-ignored vars without rtl
2240 found earlier, see if deferred stack allocation hasn't assigned
2242 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
2244 rtx rtl = DECL_RTL_IF_SET (var);
2246 /* Keep artificial non-ignored vars in cfun->local_decls
2247 chain until instantiate_decls. */
2248 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
2249 add_local_decl (cfun, var);
2251 maybe_local_decls.release ();
2253 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2254 if (STACK_ALIGNMENT_NEEDED)
2256 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2257 if (!FRAME_GROWS_DOWNWARD)
2258 frame_offset += align - 1;
2259 frame_offset &= -align;
2266 /* If we need to produce a detailed dump, print the tree representation
2267 for STMT to the dump file. SINCE is the last RTX after which the RTL
2268 generated for STMT should have been appended. */
2271 maybe_dump_rtl_for_gimple_stmt (gimple *stmt, rtx_insn *since)
2273 if (dump_file && (dump_flags & TDF_DETAILS))
2275 fprintf (dump_file, "\n;; ");
2276 print_gimple_stmt (dump_file, stmt, 0,
2277 TDF_SLIM | (dump_flags & TDF_LINENO));
2278 fprintf (dump_file, "\n");
2280 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
2284 /* Maps the blocks that do not contain tree labels to rtx labels. */
2286 static hash_map<basic_block, rtx_code_label *> *lab_rtx_for_bb;
2288 /* Returns the label_rtx expression for a label starting basic block BB. */
2290 static rtx_code_label *
2291 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
2293 gimple_stmt_iterator gsi;
2296 if (bb->flags & BB_RTL)
2297 return block_label (bb);
2299 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
2303 /* Find the tree label if it is present. */
2305 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2309 lab_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
2313 lab = gimple_label_label (lab_stmt);
2314 if (DECL_NONLOCAL (lab))
2317 return jump_target_rtx (lab);
2320 rtx_code_label *l = gen_label_rtx ();
2321 lab_rtx_for_bb->put (bb, l);
2326 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2327 of a basic block where we just expanded the conditional at the end,
2328 possibly clean up the CFG and instruction sequence. LAST is the
2329 last instruction before the just emitted jump sequence. */
2332 maybe_cleanup_end_of_block (edge e, rtx_insn *last)
2334 /* Special case: when jumpif decides that the condition is
2335 trivial it emits an unconditional jump (and the necessary
2336 barrier). But we still have two edges, the fallthru one is
2337 wrong. purge_dead_edges would clean this up later. Unfortunately
2338 we have to insert insns (and split edges) before
2339 find_many_sub_basic_blocks and hence before purge_dead_edges.
2340 But splitting edges might create new blocks which depend on the
2341 fact that if there are two edges there's no barrier. So the
2342 barrier would get lost and verify_flow_info would ICE. Instead
2343 of auditing all edge splitters to care for the barrier (which
2344 normally isn't there in a cleaned CFG), fix it here. */
2345 if (BARRIER_P (get_last_insn ()))
2349 /* Now, we have a single successor block, if we have insns to
2350 insert on the remaining edge we potentially will insert
2351 it at the end of this block (if the dest block isn't feasible)
2352 in order to avoid splitting the edge. This insertion will take
2353 place in front of the last jump. But we might have emitted
2354 multiple jumps (conditional and one unconditional) to the
2355 same destination. Inserting in front of the last one then
2356 is a problem. See PR 40021. We fix this by deleting all
2357 jumps except the last unconditional one. */
2358 insn = PREV_INSN (get_last_insn ());
2359 /* Make sure we have an unconditional jump. Otherwise we're
2361 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
2362 for (insn = PREV_INSN (insn); insn != last;)
2364 insn = PREV_INSN (insn);
2365 if (JUMP_P (NEXT_INSN (insn)))
2367 if (!any_condjump_p (NEXT_INSN (insn)))
2369 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn))));
2370 delete_insn (NEXT_INSN (NEXT_INSN (insn)));
2372 delete_insn (NEXT_INSN (insn));
2378 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2379 Returns a new basic block if we've terminated the current basic
2380 block and created a new one. */
2383 expand_gimple_cond (basic_block bb, gcond *stmt)
2385 basic_block new_bb, dest;
2389 rtx_insn *last2, *last;
2390 enum tree_code code;
2393 code = gimple_cond_code (stmt);
2394 op0 = gimple_cond_lhs (stmt);
2395 op1 = gimple_cond_rhs (stmt);
2396 /* We're sometimes presented with such code:
2400 This would expand to two comparisons which then later might
2401 be cleaned up by combine. But some pattern matchers like if-conversion
2402 work better when there's only one compare, so make up for this
2403 here as special exception if TER would have made the same change. */
2405 && TREE_CODE (op0) == SSA_NAME
2406 && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
2407 && TREE_CODE (op1) == INTEGER_CST
2408 && ((gimple_cond_code (stmt) == NE_EXPR
2409 && integer_zerop (op1))
2410 || (gimple_cond_code (stmt) == EQ_EXPR
2411 && integer_onep (op1)))
2412 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0)))
2414 gimple *second = SSA_NAME_DEF_STMT (op0);
2415 if (gimple_code (second) == GIMPLE_ASSIGN)
2417 enum tree_code code2 = gimple_assign_rhs_code (second);
2418 if (TREE_CODE_CLASS (code2) == tcc_comparison)
2421 op0 = gimple_assign_rhs1 (second);
2422 op1 = gimple_assign_rhs2 (second);
2424 /* If jumps are cheap and the target does not support conditional
2425 compare, turn some more codes into jumpy sequences. */
2426 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2427 && targetm.gen_ccmp_first == NULL)
2429 if ((code2 == BIT_AND_EXPR
2430 && TYPE_PRECISION (TREE_TYPE (op0)) == 1
2431 && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST)
2432 || code2 == TRUTH_AND_EXPR)
2434 code = TRUTH_ANDIF_EXPR;
2435 op0 = gimple_assign_rhs1 (second);
2436 op1 = gimple_assign_rhs2 (second);
2438 else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR)
2440 code = TRUTH_ORIF_EXPR;
2441 op0 = gimple_assign_rhs1 (second);
2442 op1 = gimple_assign_rhs2 (second);
2448 last2 = last = get_last_insn ();
2450 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2451 set_curr_insn_location (gimple_location (stmt));
2453 /* These flags have no purpose in RTL land. */
2454 true_edge->flags &= ~EDGE_TRUE_VALUE;
2455 false_edge->flags &= ~EDGE_FALSE_VALUE;
2457 /* We can either have a pure conditional jump with one fallthru edge or
2458 two-way jump that needs to be decomposed into two basic blocks. */
2459 if (false_edge->dest == bb->next_bb)
2461 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2462 true_edge->probability);
2463 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2464 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2465 set_curr_insn_location (true_edge->goto_locus);
2466 false_edge->flags |= EDGE_FALLTHRU;
2467 maybe_cleanup_end_of_block (false_edge, last);
2470 if (true_edge->dest == bb->next_bb)
2472 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
2473 false_edge->probability);
2474 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2475 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2476 set_curr_insn_location (false_edge->goto_locus);
2477 true_edge->flags |= EDGE_FALLTHRU;
2478 maybe_cleanup_end_of_block (true_edge, last);
2482 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
2483 true_edge->probability);
2484 last = get_last_insn ();
2485 if (false_edge->goto_locus != UNKNOWN_LOCATION)
2486 set_curr_insn_location (false_edge->goto_locus);
2487 emit_jump (label_rtx_for_bb (false_edge->dest));
2490 if (BARRIER_P (BB_END (bb)))
2491 BB_END (bb) = PREV_INSN (BB_END (bb));
2492 update_bb_for_insn (bb);
2494 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2495 dest = false_edge->dest;
2496 redirect_edge_succ (false_edge, new_bb);
2497 false_edge->flags |= EDGE_FALLTHRU;
2498 new_bb->count = false_edge->count;
2499 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2500 add_bb_to_loop (new_bb, bb->loop_father);
2501 new_edge = make_edge (new_bb, dest, 0);
2502 new_edge->probability = REG_BR_PROB_BASE;
2503 new_edge->count = new_bb->count;
2504 if (BARRIER_P (BB_END (new_bb)))
2505 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2506 update_bb_for_insn (new_bb);
2508 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
2510 if (true_edge->goto_locus != UNKNOWN_LOCATION)
2512 set_curr_insn_location (true_edge->goto_locus);
2513 true_edge->goto_locus = curr_insn_location ();
2519 /* Mark all calls that can have a transaction restart. */
2522 mark_transaction_restart_calls (gimple *stmt)
2524 struct tm_restart_node dummy;
2525 tm_restart_node **slot;
2527 if (!cfun->gimple_df->tm_restart)
2531 slot = cfun->gimple_df->tm_restart->find_slot (&dummy, NO_INSERT);
2534 struct tm_restart_node *n = *slot;
2535 tree list = n->label_or_list;
2538 for (insn = next_real_insn (get_last_insn ());
2540 insn = next_real_insn (insn))
2543 if (TREE_CODE (list) == LABEL_DECL)
2544 add_reg_note (insn, REG_TM, label_rtx (list));
2546 for (; list ; list = TREE_CHAIN (list))
2547 add_reg_note (insn, REG_TM, label_rtx (TREE_VALUE (list)));
2551 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2555 expand_call_stmt (gcall *stmt)
2557 tree exp, decl, lhs;
2561 if (gimple_call_internal_p (stmt))
2563 expand_internal_call (stmt);
2567 /* If this is a call to a built-in function and it has no effect other
2568 than setting the lhs, try to implement it using an internal function
2570 decl = gimple_call_fndecl (stmt);
2571 if (gimple_call_lhs (stmt)
2572 && !gimple_has_side_effects (stmt)
2573 && (optimize || (decl && called_as_built_in (decl))))
2575 internal_fn ifn = replacement_internal_fn (stmt);
2576 if (ifn != IFN_LAST)
2578 expand_internal_call (ifn, stmt);
2583 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2585 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2586 builtin_p = decl && DECL_BUILT_IN (decl);
2588 /* If this is not a builtin function, the function type through which the
2589 call is made may be different from the type of the function. */
2592 = fold_convert (build_pointer_type (gimple_call_fntype (stmt)),
2593 CALL_EXPR_FN (exp));
2595 TREE_TYPE (exp) = gimple_call_return_type (stmt);
2596 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
2598 for (i = 0; i < gimple_call_num_args (stmt); i++)
2600 tree arg = gimple_call_arg (stmt, i);
2602 /* TER addresses into arguments of builtin functions so we have a
2603 chance to infer more correct alignment information. See PR39954. */
2605 && TREE_CODE (arg) == SSA_NAME
2606 && (def = get_gimple_for_ssa_name (arg))
2607 && gimple_assign_rhs_code (def) == ADDR_EXPR)
2608 arg = gimple_assign_rhs1 (def);
2609 CALL_EXPR_ARG (exp, i) = arg;
2612 if (gimple_has_side_effects (stmt))
2613 TREE_SIDE_EFFECTS (exp) = 1;
2615 if (gimple_call_nothrow_p (stmt))
2616 TREE_NOTHROW (exp) = 1;
2618 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2619 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2621 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2622 && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
2623 || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
2624 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2626 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2627 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2628 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2629 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2631 /* Ensure RTL is created for debug args. */
2632 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2634 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
2639 for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
2641 gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
2642 expand_debug_expr (dtemp);
2646 lhs = gimple_call_lhs (stmt);
2648 expand_assignment (lhs, exp, false);
2650 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
2652 mark_transaction_restart_calls (stmt);
2656 /* Generate RTL for an asm statement (explicit assembler code).
2657 STRING is a STRING_CST node containing the assembler code text,
2658 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2659 insn is volatile; don't optimize it. */
2662 expand_asm_loc (tree string, int vol, location_t locus)
2666 if (TREE_CODE (string) == ADDR_EXPR)
2667 string = TREE_OPERAND (string, 0);
2669 body = gen_rtx_ASM_INPUT_loc (VOIDmode,
2670 ggc_strdup (TREE_STRING_POINTER (string)),
2673 MEM_VOLATILE_P (body) = vol;
2678 /* Return the number of times character C occurs in string S. */
2680 n_occurrences (int c, const char *s)
2688 /* A subroutine of expand_asm_operands. Check that all operands have
2689 the same number of alternatives. Return true if so. */
2692 check_operand_nalternatives (const vec<const char *> &constraints)
2694 unsigned len = constraints.length();
2697 int nalternatives = n_occurrences (',', constraints[0]);
2699 if (nalternatives + 1 > MAX_RECOG_ALTERNATIVES)
2701 error ("too many alternatives in %<asm%>");
2705 for (unsigned i = 1; i < len; ++i)
2706 if (n_occurrences (',', constraints[i]) != nalternatives)
2708 error ("operand constraints for %<asm%> differ "
2709 "in number of alternatives");
2716 /* Check for overlap between registers marked in CLOBBERED_REGS and
2717 anything inappropriate in T. Emit error and return the register
2718 variable definition for error, NULL_TREE for ok. */
2721 tree_conflicts_with_clobbers_p (tree t, HARD_REG_SET *clobbered_regs)
2723 /* Conflicts between asm-declared register variables and the clobber
2724 list are not allowed. */
2725 tree overlap = tree_overlaps_hard_reg_set (t, clobbered_regs);
2729 error ("asm-specifier for variable %qE conflicts with asm clobber list",
2730 DECL_NAME (overlap));
2732 /* Reset registerness to stop multiple errors emitted for a single
2734 DECL_REGISTER (overlap) = 0;
2741 /* Generate RTL for an asm statement with arguments.
2742 STRING is the instruction template.
2743 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2744 Each output or input has an expression in the TREE_VALUE and
2745 a tree list in TREE_PURPOSE which in turn contains a constraint
2746 name in TREE_VALUE (or NULL_TREE) and a constraint string
2748 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2749 that is clobbered by this insn.
2751 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2752 should be the fallthru basic block of the asm goto.
2754 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2755 Some elements of OUTPUTS may be replaced with trees representing temporary
2756 values. The caller should copy those temporary values to the originally
2759 VOL nonzero means the insn is volatile; don't optimize it. */
2762 expand_asm_stmt (gasm *stmt)
2764 class save_input_location
2769 explicit save_input_location(location_t where)
2771 old = input_location;
2772 input_location = where;
2775 ~save_input_location()
2777 input_location = old;
2781 location_t locus = gimple_location (stmt);
2783 if (gimple_asm_input_p (stmt))
2785 const char *s = gimple_asm_string (stmt);
2786 tree string = build_string (strlen (s), s);
2787 expand_asm_loc (string, gimple_asm_volatile_p (stmt), locus);
2791 /* There are some legacy diagnostics in here, and also avoids a
2792 sixth parameger to targetm.md_asm_adjust. */
2793 save_input_location s_i_l(locus);
2795 unsigned noutputs = gimple_asm_noutputs (stmt);
2796 unsigned ninputs = gimple_asm_ninputs (stmt);
2797 unsigned nlabels = gimple_asm_nlabels (stmt);
2800 /* ??? Diagnose during gimplification? */
2801 if (ninputs + noutputs + nlabels > MAX_RECOG_OPERANDS)
2803 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS);
2807 auto_vec<tree, MAX_RECOG_OPERANDS> output_tvec;
2808 auto_vec<tree, MAX_RECOG_OPERANDS> input_tvec;
2809 auto_vec<const char *, MAX_RECOG_OPERANDS> constraints;
2811 /* Copy the gimple vectors into new vectors that we can manipulate. */
2813 output_tvec.safe_grow (noutputs);
2814 input_tvec.safe_grow (ninputs);
2815 constraints.safe_grow (noutputs + ninputs);
2817 for (i = 0; i < noutputs; ++i)
2819 tree t = gimple_asm_output_op (stmt, i);
2820 output_tvec[i] = TREE_VALUE (t);
2821 constraints[i] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2823 for (i = 0; i < ninputs; i++)
2825 tree t = gimple_asm_input_op (stmt, i);
2826 input_tvec[i] = TREE_VALUE (t);
2827 constraints[i + noutputs]
2828 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
2831 /* ??? Diagnose during gimplification? */
2832 if (! check_operand_nalternatives (constraints))
2835 /* Count the number of meaningful clobbered registers, ignoring what
2836 we would ignore later. */
2837 auto_vec<rtx> clobber_rvec;
2838 HARD_REG_SET clobbered_regs;
2839 CLEAR_HARD_REG_SET (clobbered_regs);
2841 if (unsigned n = gimple_asm_nclobbers (stmt))
2843 clobber_rvec.reserve (n);
2844 for (i = 0; i < n; i++)
2846 tree t = gimple_asm_clobber_op (stmt, i);
2847 const char *regname = TREE_STRING_POINTER (TREE_VALUE (t));
2850 j = decode_reg_name_and_count (regname, &nregs);
2855 /* ??? Diagnose during gimplification? */
2856 error ("unknown register name %qs in %<asm%>", regname);
2860 rtx x = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2861 clobber_rvec.safe_push (x);
2865 /* Otherwise we should have -1 == empty string
2866 or -3 == cc, which is not a register. */
2867 gcc_assert (j == -1 || j == -3);
2871 for (int reg = j; reg < j + nregs; reg++)
2873 /* Clobbering the PIC register is an error. */
2874 if (reg == (int) PIC_OFFSET_TABLE_REGNUM)
2876 /* ??? Diagnose during gimplification? */
2877 error ("PIC register clobbered by %qs in %<asm%>",
2882 SET_HARD_REG_BIT (clobbered_regs, reg);
2883 rtx x = gen_rtx_REG (reg_raw_mode[reg], reg);
2884 clobber_rvec.safe_push (x);
2888 unsigned nclobbers = clobber_rvec.length();
2890 /* First pass over inputs and outputs checks validity and sets
2891 mark_addressable if needed. */
2892 /* ??? Diagnose during gimplification? */
2894 for (i = 0; i < noutputs; ++i)
2896 tree val = output_tvec[i];
2897 tree type = TREE_TYPE (val);
2898 const char *constraint;
2903 /* Try to parse the output constraint. If that fails, there's
2904 no point in going further. */
2905 constraint = constraints[i];
2906 if (!parse_output_constraint (&constraint, i, ninputs, noutputs,
2907 &allows_mem, &allows_reg, &is_inout))
2914 && REG_P (DECL_RTL (val))
2915 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))))
2916 mark_addressable (val);
2919 for (i = 0; i < ninputs; ++i)
2921 bool allows_reg, allows_mem;
2922 const char *constraint;
2924 constraint = constraints[i + noutputs];
2925 if (! parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
2926 constraints.address (),
2927 &allows_mem, &allows_reg))
2930 if (! allows_reg && allows_mem)
2931 mark_addressable (input_tvec[i]);
2934 /* Second pass evaluates arguments. */
2936 /* Make sure stack is consistent for asm goto. */
2938 do_pending_stack_adjust ();
2939 int old_generating_concat_p = generating_concat_p;
2941 /* Vector of RTX's of evaluated output operands. */
2942 auto_vec<rtx, MAX_RECOG_OPERANDS> output_rvec;
2943 auto_vec<int, MAX_RECOG_OPERANDS> inout_opnum;
2944 rtx_insn *after_rtl_seq = NULL, *after_rtl_end = NULL;
2946 output_rvec.safe_grow (noutputs);
2948 for (i = 0; i < noutputs; ++i)
2950 tree val = output_tvec[i];
2951 tree type = TREE_TYPE (val);
2952 bool is_inout, allows_reg, allows_mem, ok;
2955 ok = parse_output_constraint (&constraints[i], i, ninputs,
2956 noutputs, &allows_mem, &allows_reg,
2960 /* If an output operand is not a decl or indirect ref and our constraint
2961 allows a register, make a temporary to act as an intermediate.
2962 Make the asm insn write into that, then we will copy it to
2963 the real output operand. Likewise for promoted variables. */
2965 generating_concat_p = 0;
2967 if ((TREE_CODE (val) == INDIRECT_REF
2970 && (allows_mem || REG_P (DECL_RTL (val)))
2971 && ! (REG_P (DECL_RTL (val))
2972 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
2976 op = expand_expr (val, NULL_RTX, VOIDmode,
2977 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
2979 op = validize_mem (op);
2981 if (! allows_reg && !MEM_P (op))
2982 error ("output number %d not directly addressable", i);
2983 if ((! allows_mem && MEM_P (op))
2984 || GET_CODE (op) == CONCAT)
2987 op = gen_reg_rtx (GET_MODE (op));
2989 generating_concat_p = old_generating_concat_p;
2992 emit_move_insn (op, old_op);
2994 push_to_sequence2 (after_rtl_seq, after_rtl_end);
2995 emit_move_insn (old_op, op);
2996 after_rtl_seq = get_insns ();
2997 after_rtl_end = get_last_insn ();
3003 op = assign_temp (type, 0, 1);
3004 op = validize_mem (op);
3005 if (!MEM_P (op) && TREE_CODE (val) == SSA_NAME)
3006 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val), op);
3008 generating_concat_p = old_generating_concat_p;
3010 push_to_sequence2 (after_rtl_seq, after_rtl_end);
3011 expand_assignment (val, make_tree (type, op), false);
3012 after_rtl_seq = get_insns ();
3013 after_rtl_end = get_last_insn ();
3016 output_rvec[i] = op;
3019 inout_opnum.safe_push (i);
3022 auto_vec<rtx, MAX_RECOG_OPERANDS> input_rvec;
3023 auto_vec<machine_mode, MAX_RECOG_OPERANDS> input_mode;
3025 input_rvec.safe_grow (ninputs);
3026 input_mode.safe_grow (ninputs);
3028 generating_concat_p = 0;
3030 for (i = 0; i < ninputs; ++i)
3032 tree val = input_tvec[i];
3033 tree type = TREE_TYPE (val);
3034 bool allows_reg, allows_mem, ok;
3035 const char *constraint;
3038 constraint = constraints[i + noutputs];
3039 ok = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
3040 constraints.address (),
3041 &allows_mem, &allows_reg);
3044 /* EXPAND_INITIALIZER will not generate code for valid initializer
3045 constants, but will still generate code for other types of operand.
3046 This is the behavior we want for constant constraints. */
3047 op = expand_expr (val, NULL_RTX, VOIDmode,
3048 allows_reg ? EXPAND_NORMAL
3049 : allows_mem ? EXPAND_MEMORY
3050 : EXPAND_INITIALIZER);
3052 /* Never pass a CONCAT to an ASM. */
3053 if (GET_CODE (op) == CONCAT)
3054 op = force_reg (GET_MODE (op), op);
3055 else if (MEM_P (op))
3056 op = validize_mem (op);
3058 if (asm_operand_ok (op, constraint, NULL) <= 0)
3060 if (allows_reg && TYPE_MODE (type) != BLKmode)
3061 op = force_reg (TYPE_MODE (type), op);
3062 else if (!allows_mem)
3063 warning (0, "asm operand %d probably doesn%'t match constraints",
3065 else if (MEM_P (op))
3067 /* We won't recognize either volatile memory or memory
3068 with a queued address as available a memory_operand
3069 at this point. Ignore it: clearly this *is* a memory. */
3075 input_mode[i] = TYPE_MODE (type);
3078 /* For in-out operands, copy output rtx to input rtx. */
3079 unsigned ninout = inout_opnum.length();
3080 for (i = 0; i < ninout; i++)
3082 int j = inout_opnum[i];
3083 rtx o = output_rvec[j];
3085 input_rvec.safe_push (o);
3086 input_mode.safe_push (GET_MODE (o));
3089 sprintf (buffer, "%d", j);
3090 constraints.safe_push (ggc_strdup (buffer));
3094 /* Sometimes we wish to automatically clobber registers across an asm.
3095 Case in point is when the i386 backend moved from cc0 to a hard reg --
3096 maintaining source-level compatibility means automatically clobbering
3097 the flags register. */
3098 rtx_insn *after_md_seq = NULL;
3099 if (targetm.md_asm_adjust)
3100 after_md_seq = targetm.md_asm_adjust (output_rvec, input_rvec,
3101 constraints, clobber_rvec,
3104 /* Do not allow the hook to change the output and input count,
3105 lest it mess up the operand numbering. */
3106 gcc_assert (output_rvec.length() == noutputs);
3107 gcc_assert (input_rvec.length() == ninputs);
3108 gcc_assert (constraints.length() == noutputs + ninputs);
3110 /* But it certainly can adjust the clobbers. */
3111 nclobbers = clobber_rvec.length();
3113 /* Third pass checks for easy conflicts. */
3114 /* ??? Why are we doing this on trees instead of rtx. */
3116 bool clobber_conflict_found = 0;
3117 for (i = 0; i < noutputs; ++i)
3118 if (tree_conflicts_with_clobbers_p (output_tvec[i], &clobbered_regs))
3119 clobber_conflict_found = 1;
3120 for (i = 0; i < ninputs - ninout; ++i)
3121 if (tree_conflicts_with_clobbers_p (input_tvec[i], &clobbered_regs))
3122 clobber_conflict_found = 1;
3124 /* Make vectors for the expression-rtx, constraint strings,
3125 and named operands. */
3127 rtvec argvec = rtvec_alloc (ninputs);
3128 rtvec constraintvec = rtvec_alloc (ninputs);
3129 rtvec labelvec = rtvec_alloc (nlabels);
3131 rtx body = gen_rtx_ASM_OPERANDS ((noutputs == 0 ? VOIDmode
3132 : GET_MODE (output_rvec[0])),
3133 ggc_strdup (gimple_asm_string (stmt)),
3134 empty_string, 0, argvec, constraintvec,
3136 MEM_VOLATILE_P (body) = gimple_asm_volatile_p (stmt);
3138 for (i = 0; i < ninputs; ++i)
3140 ASM_OPERANDS_INPUT (body, i) = input_rvec[i];
3141 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body, i)
3142 = gen_rtx_ASM_INPUT_loc (input_mode[i],
3143 constraints[i + noutputs],
3147 /* Copy labels to the vector. */
3148 rtx_code_label *fallthru_label = NULL;
3151 basic_block fallthru_bb = NULL;
3152 edge fallthru = find_fallthru_edge (gimple_bb (stmt)->succs);
3154 fallthru_bb = fallthru->dest;
3156 for (i = 0; i < nlabels; ++i)
3158 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
3160 /* If asm goto has any labels in the fallthru basic block, use
3161 a label that we emit immediately after the asm goto. Expansion
3162 may insert further instructions into the same basic block after
3163 asm goto and if we don't do this, insertion of instructions on
3164 the fallthru edge might misbehave. See PR58670. */
3165 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb)
3167 if (fallthru_label == NULL_RTX)
3168 fallthru_label = gen_label_rtx ();
3172 r = label_rtx (label);
3173 ASM_OPERANDS_LABEL (body, i) = gen_rtx_LABEL_REF (Pmode, r);
3177 /* Now, for each output, construct an rtx
3178 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3179 ARGVEC CONSTRAINTS OPNAMES))
3180 If there is more than one, put them inside a PARALLEL. */
3182 if (nlabels > 0 && nclobbers == 0)
3184 gcc_assert (noutputs == 0);
3185 emit_jump_insn (body);
3187 else if (noutputs == 0 && nclobbers == 0)
3189 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3192 else if (noutputs == 1 && nclobbers == 0)
3194 ASM_OPERANDS_OUTPUT_CONSTRAINT (body) = constraints[0];
3195 emit_insn (gen_rtx_SET (output_rvec[0], body));
3205 body = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num + nclobbers));
3207 /* For each output operand, store a SET. */
3208 for (i = 0; i < noutputs; ++i)
3210 rtx src, o = output_rvec[i];
3213 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody) = constraints[0];
3218 src = gen_rtx_ASM_OPERANDS (GET_MODE (o),
3219 ASM_OPERANDS_TEMPLATE (obody),
3220 constraints[i], i, argvec,
3221 constraintvec, labelvec, locus);
3222 MEM_VOLATILE_P (src) = gimple_asm_volatile_p (stmt);
3224 XVECEXP (body, 0, i) = gen_rtx_SET (o, src);
3227 /* If there are no outputs (but there are some clobbers)
3228 store the bare ASM_OPERANDS into the PARALLEL. */
3230 XVECEXP (body, 0, i++) = obody;
3232 /* Store (clobber REG) for each clobbered register specified. */
3233 for (unsigned j = 0; j < nclobbers; ++j)
3235 rtx clobbered_reg = clobber_rvec[j];
3237 /* Do sanity check for overlap between clobbers and respectively
3238 input and outputs that hasn't been handled. Such overlap
3239 should have been detected and reported above. */
3240 if (!clobber_conflict_found && REG_P (clobbered_reg))
3242 /* We test the old body (obody) contents to avoid
3243 tripping over the under-construction body. */
3244 for (unsigned k = 0; k < noutputs; ++k)
3245 if (reg_overlap_mentioned_p (clobbered_reg, output_rvec[k]))
3246 internal_error ("asm clobber conflict with output operand");
3248 for (unsigned k = 0; k < ninputs - ninout; ++k)
3249 if (reg_overlap_mentioned_p (clobbered_reg, input_rvec[k]))
3250 internal_error ("asm clobber conflict with input operand");
3253 XVECEXP (body, 0, i++) = gen_rtx_CLOBBER (VOIDmode, clobbered_reg);
3257 emit_jump_insn (body);
3262 generating_concat_p = old_generating_concat_p;
3265 emit_label (fallthru_label);
3268 emit_insn (after_md_seq);
3270 emit_insn (after_rtl_seq);
3273 crtl->has_asm_statement = 1;
3276 /* Emit code to jump to the address
3277 specified by the pointer expression EXP. */
3280 expand_computed_goto (tree exp)
3282 rtx x = expand_normal (exp);
3284 do_pending_stack_adjust ();
3285 emit_indirect_jump (x);
3288 /* Generate RTL code for a `goto' statement with target label LABEL.
3289 LABEL should be a LABEL_DECL tree node that was or will later be
3290 defined with `expand_label'. */
3293 expand_goto (tree label)
3297 /* Check for a nonlocal goto to a containing function. Should have
3298 gotten translated to __builtin_nonlocal_goto. */
3299 tree context = decl_function_context (label);
3300 gcc_assert (!context || context == current_function_decl);
3303 emit_jump (jump_target_rtx (label));
3306 /* Output a return with no value. */
3309 expand_null_return_1 (void)
3311 clear_pending_stack_adjust ();
3312 do_pending_stack_adjust ();
3313 emit_jump (return_label);
3316 /* Generate RTL to return from the current function, with no value.
3317 (That is, we do not do anything about returning any value.) */
3320 expand_null_return (void)
3322 /* If this function was declared to return a value, but we
3323 didn't, clobber the return registers so that they are not
3324 propagated live to the rest of the function. */
3325 clobber_return_register ();
3327 expand_null_return_1 ();
3330 /* Generate RTL to return from the current function, with value VAL. */
3333 expand_value_return (rtx val)
3335 /* Copy the value to the return location unless it's already there. */
3337 tree decl = DECL_RESULT (current_function_decl);
3338 rtx return_reg = DECL_RTL (decl);
3339 if (return_reg != val)
3341 tree funtype = TREE_TYPE (current_function_decl);
3342 tree type = TREE_TYPE (decl);
3343 int unsignedp = TYPE_UNSIGNED (type);
3344 machine_mode old_mode = DECL_MODE (decl);
3346 if (DECL_BY_REFERENCE (decl))
3347 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 2);
3349 mode = promote_function_mode (type, old_mode, &unsignedp, funtype, 1);
3351 if (mode != old_mode)
3352 val = convert_modes (mode, old_mode, val, unsignedp);
3354 if (GET_CODE (return_reg) == PARALLEL)
3355 emit_group_load (return_reg, val, type, int_size_in_bytes (type));
3357 emit_move_insn (return_reg, val);
3360 expand_null_return_1 ();
3363 /* Generate RTL to evaluate the expression RETVAL and return it
3364 from the current function. */
3367 expand_return (tree retval, tree bounds)
3374 /* If function wants no value, give it none. */
3375 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3377 expand_normal (retval);
3378 expand_null_return ();
3382 if (retval == error_mark_node)
3384 /* Treat this like a return of no value from a function that
3386 expand_null_return ();
3389 else if ((TREE_CODE (retval) == MODIFY_EXPR
3390 || TREE_CODE (retval) == INIT_EXPR)
3391 && TREE_CODE (TREE_OPERAND (retval, 0)) == RESULT_DECL)
3392 retval_rhs = TREE_OPERAND (retval, 1);
3394 retval_rhs = retval;
3396 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3398 /* Put returned bounds to the right place. */
3399 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3405 if (bounds && bounds != error_mark_node)
3407 bnd = expand_normal (bounds);
3408 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3410 else if (REG_P (bounds_rtl))
3413 bnd = chkp_expand_zero_bounds ();
3416 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3417 addr = gen_rtx_MEM (Pmode, addr);
3418 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3421 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3427 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3430 bnd = chkp_expand_zero_bounds ();
3433 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3434 addr = gen_rtx_MEM (Pmode, addr);
3437 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3439 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3442 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3443 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3444 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3446 targetm.calls.store_returned_bounds (slot, bnd);
3450 else if (chkp_function_instrumented_p (current_function_decl)
3451 && !BOUNDED_P (retval_rhs)
3452 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3453 && TREE_CODE (retval_rhs) != RESULT_DECL)
3455 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3456 addr = gen_rtx_MEM (Pmode, addr);
3458 gcc_assert (MEM_P (result_rtl));
3460 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3463 /* If we are returning the RESULT_DECL, then the value has already
3464 been stored into it, so we don't have to do anything special. */
3465 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3466 expand_value_return (result_rtl);
3468 /* If the result is an aggregate that is being returned in one (or more)
3469 registers, load the registers here. */
3471 else if (retval_rhs != 0
3472 && TYPE_MODE (TREE_TYPE (retval_rhs)) == BLKmode
3473 && REG_P (result_rtl))
3475 val = copy_blkmode_to_reg (GET_MODE (result_rtl), retval_rhs);
3478 /* Use the mode of the result value on the return register. */
3479 PUT_MODE (result_rtl, GET_MODE (val));
3480 expand_value_return (val);
3483 expand_null_return ();
3485 else if (retval_rhs != 0
3486 && !VOID_TYPE_P (TREE_TYPE (retval_rhs))
3487 && (REG_P (result_rtl)
3488 || (GET_CODE (result_rtl) == PARALLEL)))
3490 /* Compute the return value into a temporary (usually a pseudo reg). */
3492 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl)), 0, 1);
3493 val = expand_expr (retval_rhs, val, GET_MODE (val), EXPAND_NORMAL);
3494 val = force_not_mem (val);
3495 expand_value_return (val);
3499 /* No hard reg used; calculate value into hard return reg. */
3500 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3501 expand_value_return (result_rtl);
3505 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3506 STMT that doesn't require special handling for outgoing edges. That
3507 is no tailcalls and no GIMPLE_COND. */
3510 expand_gimple_stmt_1 (gimple *stmt)
3514 set_curr_insn_location (gimple_location (stmt));
3516 switch (gimple_code (stmt))
3519 op0 = gimple_goto_dest (stmt);
3520 if (TREE_CODE (op0) == LABEL_DECL)
3523 expand_computed_goto (op0);
3526 expand_label (gimple_label_label (as_a <glabel *> (stmt)));
3529 case GIMPLE_PREDICT:
3532 expand_case (as_a <gswitch *> (stmt));
3535 expand_asm_stmt (as_a <gasm *> (stmt));
3538 expand_call_stmt (as_a <gcall *> (stmt));
3543 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3544 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3546 if (op0 && op0 != error_mark_node)
3548 tree result = DECL_RESULT (current_function_decl);
3550 /* Mark we have return statement with missing bounds. */
3552 && chkp_function_instrumented_p (cfun->decl)
3554 bnd = error_mark_node;
3556 /* If we are not returning the current function's RESULT_DECL,
3557 build an assignment to it. */
3560 /* I believe that a function's RESULT_DECL is unique. */
3561 gcc_assert (TREE_CODE (op0) != RESULT_DECL);
3563 /* ??? We'd like to use simply expand_assignment here,
3564 but this fails if the value is of BLKmode but the return
3565 decl is a register. expand_return has special handling
3566 for this combination, which eventually should move
3567 to common code. See comments there. Until then, let's
3568 build a modify expression :-/ */
3569 op0 = build2 (MODIFY_EXPR, TREE_TYPE (result),
3575 expand_null_return ();
3577 expand_return (op0, bnd);
3583 gassign *assign_stmt = as_a <gassign *> (stmt);
3584 tree lhs = gimple_assign_lhs (assign_stmt);
3586 /* Tree expand used to fiddle with |= and &= of two bitfield
3587 COMPONENT_REFs here. This can't happen with gimple, the LHS
3588 of binary assigns must be a gimple reg. */
3590 if (TREE_CODE (lhs) != SSA_NAME
3591 || get_gimple_rhs_class (gimple_expr_code (stmt))
3592 == GIMPLE_SINGLE_RHS)
3594 tree rhs = gimple_assign_rhs1 (assign_stmt);
3595 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt))
3596 == GIMPLE_SINGLE_RHS);
3597 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)
3598 /* Do not put locations on possibly shared trees. */
3599 && !is_gimple_min_invariant (rhs))
3600 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3601 if (TREE_CLOBBER_P (rhs))
3602 /* This is a clobber to mark the going out of scope for
3606 expand_assignment (lhs, rhs,
3607 gimple_assign_nontemporal_move_p (
3613 bool nontemporal = gimple_assign_nontemporal_move_p (assign_stmt);
3614 struct separate_ops ops;
3615 bool promoted = false;
3617 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
3618 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3621 ops.code = gimple_assign_rhs_code (assign_stmt);
3622 ops.type = TREE_TYPE (lhs);
3623 switch (get_gimple_rhs_class (ops.code))
3625 case GIMPLE_TERNARY_RHS:
3626 ops.op2 = gimple_assign_rhs3 (assign_stmt);
3628 case GIMPLE_BINARY_RHS:
3629 ops.op1 = gimple_assign_rhs2 (assign_stmt);
3631 case GIMPLE_UNARY_RHS:
3632 ops.op0 = gimple_assign_rhs1 (assign_stmt);
3637 ops.location = gimple_location (stmt);
3639 /* If we want to use a nontemporal store, force the value to
3640 register first. If we store into a promoted register,
3641 don't directly expand to target. */
3642 temp = nontemporal || promoted ? NULL_RTX : target;
3643 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target),
3650 int unsignedp = SUBREG_PROMOTED_SIGN (target);
3651 /* If TEMP is a VOIDmode constant, use convert_modes to make
3652 sure that we properly convert it. */
3653 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3655 temp = convert_modes (GET_MODE (target),
3656 TYPE_MODE (ops.type),
3658 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3659 GET_MODE (target), temp, unsignedp);
3662 convert_move (SUBREG_REG (target), temp, unsignedp);
3664 else if (nontemporal && emit_storent_insn (target, temp))
3668 temp = force_operand (temp, target);
3670 emit_move_insn (target, temp);
3681 /* Expand one gimple statement STMT and return the last RTL instruction
3682 before any of the newly generated ones.
3684 In addition to generating the necessary RTL instructions this also
3685 sets REG_EH_REGION notes if necessary and sets the current source
3686 location for diagnostics. */
3689 expand_gimple_stmt (gimple *stmt)
3691 location_t saved_location = input_location;
3692 rtx_insn *last = get_last_insn ();
3697 /* We need to save and restore the current source location so that errors
3698 discovered during expansion are emitted with the right location. But
3699 it would be better if the diagnostic routines used the source location
3700 embedded in the tree nodes rather than globals. */
3701 if (gimple_has_location (stmt))
3702 input_location = gimple_location (stmt);
3704 expand_gimple_stmt_1 (stmt);
3706 /* Free any temporaries used to evaluate this statement. */
3709 input_location = saved_location;
3711 /* Mark all insns that may trap. */
3712 lp_nr = lookup_stmt_eh_lp (stmt);
3716 for (insn = next_real_insn (last); insn;
3717 insn = next_real_insn (insn))
3719 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3720 /* If we want exceptions for non-call insns, any
3721 may_trap_p instruction may throw. */
3722 && GET_CODE (PATTERN (insn)) != CLOBBER
3723 && GET_CODE (PATTERN (insn)) != USE
3724 && insn_could_throw_p (insn))
3725 make_reg_eh_region_note (insn, 0, lp_nr);
3732 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3733 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3734 generated a tail call (something that might be denied by the ABI
3735 rules governing the call; see calls.c).
3737 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3738 can still reach the rest of BB. The case here is __builtin_sqrt,
3739 where the NaN result goes through the external function (with a
3740 tailcall) and the normal result happens via a sqrt instruction. */
3743 expand_gimple_tailcall (basic_block bb, gcall *stmt, bool *can_fallthru)
3745 rtx_insn *last2, *last;
3751 last2 = last = expand_gimple_stmt (stmt);
3753 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
3754 if (CALL_P (last) && SIBLING_CALL_P (last))
3757 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3759 *can_fallthru = true;
3763 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3764 Any instructions emitted here are about to be deleted. */
3765 do_pending_stack_adjust ();
3767 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3768 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3769 EH or abnormal edges, we shouldn't have created a tail call in
3770 the first place. So it seems to me we should just be removing
3771 all edges here, or redirecting the existing fallthru edge to
3777 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3779 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3781 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3783 e->dest->count -= e->count;
3784 e->dest->frequency -= EDGE_FREQUENCY (e);
3785 if (e->dest->count < 0)
3787 if (e->dest->frequency < 0)
3788 e->dest->frequency = 0;
3791 probability += e->probability;
3798 /* This is somewhat ugly: the call_expr expander often emits instructions
3799 after the sibcall (to perform the function return). These confuse the
3800 find_many_sub_basic_blocks code, so we need to get rid of these. */
3801 last = NEXT_INSN (last);
3802 gcc_assert (BARRIER_P (last));
3804 *can_fallthru = false;
3805 while (NEXT_INSN (last))
3807 /* For instance an sqrt builtin expander expands if with
3808 sibcall in the then and label for `else`. */
3809 if (LABEL_P (NEXT_INSN (last)))
3811 *can_fallthru = true;
3814 delete_insn (NEXT_INSN (last));
3817 e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_ABNORMAL
3819 e->probability += probability;
3822 update_bb_for_insn (bb);
3824 if (NEXT_INSN (last))
3826 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
3829 if (BARRIER_P (last))
3830 BB_END (bb) = PREV_INSN (last);
3833 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
3838 /* Return the difference between the floor and the truncated result of
3839 a signed division by OP1 with remainder MOD. */
3841 floor_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3843 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3844 return gen_rtx_IF_THEN_ELSE
3845 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3846 gen_rtx_IF_THEN_ELSE
3847 (mode, gen_rtx_LT (BImode,
3848 gen_rtx_DIV (mode, op1, mod),
3850 constm1_rtx, const0_rtx),
3854 /* Return the difference between the ceil and the truncated result of
3855 a signed division by OP1 with remainder MOD. */
3857 ceil_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3859 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3860 return gen_rtx_IF_THEN_ELSE
3861 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3862 gen_rtx_IF_THEN_ELSE
3863 (mode, gen_rtx_GT (BImode,
3864 gen_rtx_DIV (mode, op1, mod),
3866 const1_rtx, const0_rtx),
3870 /* Return the difference between the ceil and the truncated result of
3871 an unsigned division by OP1 with remainder MOD. */
3873 ceil_udiv_adjust (machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED)
3875 /* (mod != 0 ? 1 : 0) */
3876 return gen_rtx_IF_THEN_ELSE
3877 (mode, gen_rtx_NE (BImode, mod, const0_rtx),
3878 const1_rtx, const0_rtx);
3881 /* Return the difference between the rounded and the truncated result
3882 of a signed division by OP1 with remainder MOD. Halfway cases are
3883 rounded away from zero, rather than to the nearest even number. */
3885 round_sdiv_adjust (machine_mode mode, rtx mod, rtx op1)
3887 /* (abs (mod) >= abs (op1) - abs (mod)
3888 ? (op1 / mod > 0 ? 1 : -1)
3890 return gen_rtx_IF_THEN_ELSE
3891 (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod),
3892 gen_rtx_MINUS (mode,
3893 gen_rtx_ABS (mode, op1),
3894 gen_rtx_ABS (mode, mod))),
3895 gen_rtx_IF_THEN_ELSE
3896 (mode, gen_rtx_GT (BImode,
3897 gen_rtx_DIV (mode, op1, mod),
3899 const1_rtx, constm1_rtx),
3903 /* Return the difference between the rounded and the truncated result
3904 of a unsigned division by OP1 with remainder MOD. Halfway cases
3905 are rounded away from zero, rather than to the nearest even
3908 round_udiv_adjust (machine_mode mode, rtx mod, rtx op1)
3910 /* (mod >= op1 - mod ? 1 : 0) */
3911 return gen_rtx_IF_THEN_ELSE
3912 (mode, gen_rtx_GE (BImode, mod,
3913 gen_rtx_MINUS (mode, op1, mod)),
3914 const1_rtx, const0_rtx);
3917 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
3921 convert_debug_memory_address (machine_mode mode, rtx x,
3924 machine_mode xmode = GET_MODE (x);
3926 #ifndef POINTERS_EXTEND_UNSIGNED
3927 gcc_assert (mode == Pmode
3928 || mode == targetm.addr_space.address_mode (as));
3929 gcc_assert (xmode == mode || xmode == VOIDmode);
3933 gcc_assert (targetm.addr_space.valid_pointer_mode (mode, as));
3935 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
3938 if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
3939 x = lowpart_subreg (mode, x, xmode);
3940 else if (POINTERS_EXTEND_UNSIGNED > 0)
3941 x = gen_rtx_ZERO_EXTEND (mode, x);
3942 else if (!POINTERS_EXTEND_UNSIGNED)
3943 x = gen_rtx_SIGN_EXTEND (mode, x);
3946 switch (GET_CODE (x))
3949 if ((SUBREG_PROMOTED_VAR_P (x)
3950 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x)))
3951 || (GET_CODE (SUBREG_REG (x)) == PLUS
3952 && REG_P (XEXP (SUBREG_REG (x), 0))
3953 && REG_POINTER (XEXP (SUBREG_REG (x), 0))
3954 && CONST_INT_P (XEXP (SUBREG_REG (x), 1))))
3955 && GET_MODE (SUBREG_REG (x)) == mode)
3956 return SUBREG_REG (x);
3959 temp = gen_rtx_LABEL_REF (mode, LABEL_REF_LABEL (x));
3960 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
3963 temp = shallow_copy_rtx (x);
3964 PUT_MODE (temp, mode);
3967 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3969 temp = gen_rtx_CONST (mode, temp);
3973 if (CONST_INT_P (XEXP (x, 1)))
3975 temp = convert_debug_memory_address (mode, XEXP (x, 0), as);
3977 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1));
3983 /* Don't know how to express ptr_extend as operation in debug info. */
3986 #endif /* POINTERS_EXTEND_UNSIGNED */
3991 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
3992 by avoid_deep_ter_for_debug. */
3994 static hash_map<tree, tree> *deep_ter_debug_map;
3996 /* Split too deep TER chains for debug stmts using debug temporaries. */
3999 avoid_deep_ter_for_debug (gimple *stmt, int depth)
4001 use_operand_p use_p;
4003 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
4005 tree use = USE_FROM_PTR (use_p);
4006 if (TREE_CODE (use) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (use))
4008 gimple *g = get_gimple_for_ssa_name (use);
4011 if (depth > 6 && !stmt_ends_bb_p (g))
4013 if (deep_ter_debug_map == NULL)
4014 deep_ter_debug_map = new hash_map<tree, tree>;
4016 tree &vexpr = deep_ter_debug_map->get_or_insert (use);
4019 vexpr = make_node (DEBUG_EXPR_DECL);
4020 gimple *def_temp = gimple_build_debug_bind (vexpr, use, g);
4021 DECL_ARTIFICIAL (vexpr) = 1;
4022 TREE_TYPE (vexpr) = TREE_TYPE (use);
4023 DECL_MODE (vexpr) = TYPE_MODE (TREE_TYPE (use));
4024 gimple_stmt_iterator gsi = gsi_for_stmt (g);
4025 gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
4026 avoid_deep_ter_for_debug (def_temp, 0);
4029 avoid_deep_ter_for_debug (g, depth + 1);
4033 /* Return an RTX equivalent to the value of the parameter DECL. */
4036 expand_debug_parm_decl (tree decl)
4038 rtx incoming = DECL_INCOMING_RTL (decl);
4041 && GET_MODE (incoming) != BLKmode
4042 && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
4043 || (MEM_P (incoming)
4044 && REG_P (XEXP (incoming, 0))
4045 && HARD_REGISTER_P (XEXP (incoming, 0)))))
4047 rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
4049 #ifdef HAVE_window_save
4050 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4051 If the target machine has an explicit window save instruction, the
4052 actual entry value is the corresponding OUTGOING_REGNO instead. */
4053 if (REG_P (incoming)
4054 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
4056 = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
4057 OUTGOING_REGNO (REGNO (incoming)), 0);
4058 else if (MEM_P (incoming))
4060 rtx reg = XEXP (incoming, 0);
4061 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
4063 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
4064 incoming = replace_equiv_address_nv (incoming, reg);
4067 incoming = copy_rtx (incoming);
4071 ENTRY_VALUE_EXP (rtl) = incoming;
4076 && GET_MODE (incoming) != BLKmode
4077 && !TREE_ADDRESSABLE (decl)
4079 && (XEXP (incoming, 0) == virtual_incoming_args_rtx
4080 || (GET_CODE (XEXP (incoming, 0)) == PLUS
4081 && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
4082 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
4083 return copy_rtx (incoming);
4088 /* Return an RTX equivalent to the value of the tree expression EXP. */
4091 expand_debug_expr (tree exp)
4093 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
4094 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4095 machine_mode inner_mode = VOIDmode;
4096 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4099 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4101 case tcc_expression:
4102 switch (TREE_CODE (exp))
4107 case WIDEN_MULT_PLUS_EXPR:
4108 case WIDEN_MULT_MINUS_EXPR:
4112 case TRUTH_ANDIF_EXPR:
4113 case TRUTH_ORIF_EXPR:
4114 case TRUTH_AND_EXPR:
4116 case TRUTH_XOR_EXPR:
4119 case TRUTH_NOT_EXPR:
4128 op2 = expand_debug_expr (TREE_OPERAND (exp, 2));
4135 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4138 switch (TREE_CODE (exp))
4144 case WIDEN_LSHIFT_EXPR:
4145 /* Ensure second operand isn't wider than the first one. */
4146 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
4147 if (SCALAR_INT_MODE_P (inner_mode))
4149 machine_mode opmode = mode;
4150 if (VECTOR_MODE_P (mode))
4151 opmode = GET_MODE_INNER (mode);
4152 if (SCALAR_INT_MODE_P (opmode)
4153 && (GET_MODE_PRECISION (opmode)
4154 < GET_MODE_PRECISION (inner_mode)))
4155 op1 = lowpart_subreg (opmode, op1, inner_mode);
4165 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4166 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4171 case tcc_comparison:
4172 unsignedp = TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
4180 case tcc_exceptional:
4181 case tcc_declaration:
4187 switch (TREE_CODE (exp))
4190 if (!lookup_constant_def (exp))
4192 if (strlen (TREE_STRING_POINTER (exp)) + 1
4193 != (size_t) TREE_STRING_LENGTH (exp))
4195 op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp));
4196 op0 = gen_rtx_MEM (BLKmode, op0);
4197 set_mem_attributes (op0, exp, 0);
4205 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4209 gcc_assert (COMPLEX_MODE_P (mode));
4210 op0 = expand_debug_expr (TREE_REALPART (exp));
4211 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4212 return gen_rtx_CONCAT (mode, op0, op1);
4214 case DEBUG_EXPR_DECL:
4215 op0 = DECL_RTL_IF_SET (exp);
4220 op0 = gen_rtx_DEBUG_EXPR (mode);
4221 DEBUG_EXPR_TREE_DECL (op0) = exp;
4222 SET_DECL_RTL (exp, op0);
4232 op0 = DECL_RTL_IF_SET (exp);
4234 /* This decl was probably optimized away. */
4237 if (TREE_CODE (exp) != VAR_DECL
4238 || DECL_EXTERNAL (exp)
4239 || !TREE_STATIC (exp)
4241 || DECL_HARD_REGISTER (exp)
4242 || DECL_IN_CONSTANT_POOL (exp)
4243 || mode == VOIDmode)
4246 op0 = make_decl_rtl_for_debug (exp);
4248 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
4249 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
4253 op0 = copy_rtx (op0);
4255 if (GET_MODE (op0) == BLKmode
4256 /* If op0 is not BLKmode, but mode is, adjust_mode
4257 below would ICE. While it is likely a FE bug,
4258 try to be robust here. See PR43166. */
4260 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
4262 gcc_assert (MEM_P (op0));
4263 op0 = adjust_address_nv (op0, mode, 0);
4273 inner_mode = GET_MODE (op0);
4275 if (mode == inner_mode)
4278 if (inner_mode == VOIDmode)
4280 if (TREE_CODE (exp) == SSA_NAME)
4281 inner_mode = TYPE_MODE (TREE_TYPE (exp));
4283 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4284 if (mode == inner_mode)
4288 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
4290 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
4291 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
4292 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
4293 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
4295 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
4297 else if (FLOAT_MODE_P (mode))
4299 gcc_assert (TREE_CODE (exp) != SSA_NAME);
4300 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
4301 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
4303 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
4305 else if (FLOAT_MODE_P (inner_mode))
4308 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
4310 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
4312 else if (CONSTANT_P (op0)
4313 || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
4314 op0 = lowpart_subreg (mode, op0, inner_mode);
4315 else if (UNARY_CLASS_P (exp)
4316 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4318 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
4320 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
4326 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4328 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp),
4329 TREE_OPERAND (exp, 0),
4330 TREE_OPERAND (exp, 1));
4332 return expand_debug_expr (newexp);
4336 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4337 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4341 if (TREE_CODE (exp) == MEM_REF)
4343 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4344 || (GET_CODE (op0) == PLUS
4345 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR))
4346 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4347 Instead just use get_inner_reference. */
4350 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4351 if (!op1 || !CONST_INT_P (op1))
4354 op0 = plus_constant (inner_mode, op0, INTVAL (op1));
4357 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4359 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4361 if (op0 == NULL_RTX)
4364 op0 = gen_rtx_MEM (mode, op0);
4365 set_mem_attributes (op0, exp, 0);
4366 if (TREE_CODE (exp) == MEM_REF
4367 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
4368 set_mem_expr (op0, NULL_TREE);
4369 set_mem_addr_space (op0, as);
4373 case TARGET_MEM_REF:
4374 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR
4375 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0)))
4378 op0 = expand_debug_expr
4379 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp));
4383 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4384 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4386 if (op0 == NULL_RTX)
4389 op0 = gen_rtx_MEM (mode, op0);
4391 set_mem_attributes (op0, exp, 0);
4392 set_mem_addr_space (op0, as);
4398 case ARRAY_RANGE_REF:
4403 case VIEW_CONVERT_EXPR:
4406 HOST_WIDE_INT bitsize, bitpos;
4408 int reversep, volatilep = 0;
4410 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4411 &unsignedp, &reversep, &volatilep, false);
4417 orig_op0 = op0 = expand_debug_expr (tem);
4424 machine_mode addrmode, offmode;
4429 op0 = XEXP (op0, 0);
4430 addrmode = GET_MODE (op0);
4431 if (addrmode == VOIDmode)
4434 op1 = expand_debug_expr (offset);
4438 offmode = GET_MODE (op1);
4439 if (offmode == VOIDmode)
4440 offmode = TYPE_MODE (TREE_TYPE (offset));
4442 if (addrmode != offmode)
4443 op1 = lowpart_subreg (addrmode, op1, offmode);
4445 /* Don't use offset_address here, we don't need a
4446 recognizable address, and we don't want to generate
4448 op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
4454 if (mode1 == VOIDmode)
4456 mode1 = smallest_mode_for_size (bitsize, MODE_INT);
4457 if (bitpos >= BITS_PER_UNIT)
4459 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
4460 bitpos %= BITS_PER_UNIT;
4462 else if (bitpos < 0)
4465 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
4466 op0 = adjust_address_nv (op0, mode1, units);
4467 bitpos += units * BITS_PER_UNIT;
4469 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode))
4470 op0 = adjust_address_nv (op0, mode, 0);
4471 else if (GET_MODE (op0) != mode1)
4472 op0 = adjust_address_nv (op0, mode1, 0);
4474 op0 = copy_rtx (op0);
4475 if (op0 == orig_op0)
4476 op0 = shallow_copy_rtx (op0);
4477 set_mem_attributes (op0, exp, 0);
4480 if (bitpos == 0 && mode == GET_MODE (op0))
4486 if (GET_MODE (op0) == BLKmode)
4489 if ((bitpos % BITS_PER_UNIT) == 0
4490 && bitsize == GET_MODE_BITSIZE (mode1))
4492 machine_mode opmode = GET_MODE (op0);
4494 if (opmode == VOIDmode)
4495 opmode = TYPE_MODE (TREE_TYPE (tem));
4497 /* This condition may hold if we're expanding the address
4498 right past the end of an array that turned out not to
4499 be addressable (i.e., the address was only computed in
4500 debug stmts). The gen_subreg below would rightfully
4501 crash, and the address doesn't really exist, so just
4503 if (bitpos >= GET_MODE_BITSIZE (opmode))
4506 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
4507 return simplify_gen_subreg (mode, op0, opmode,
4508 bitpos / BITS_PER_UNIT);
4511 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4512 && TYPE_UNSIGNED (TREE_TYPE (exp))
4514 : ZERO_EXTRACT, mode,
4515 GET_MODE (op0) != VOIDmode
4517 : TYPE_MODE (TREE_TYPE (tem)),
4518 op0, GEN_INT (bitsize), GEN_INT (bitpos));
4522 return simplify_gen_unary (ABS, mode, op0, mode);
4525 return simplify_gen_unary (NEG, mode, op0, mode);
4528 return simplify_gen_unary (NOT, mode, op0, mode);
4531 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
4533 ? UNSIGNED_FLOAT : FLOAT, mode, op0,
4536 case FIX_TRUNC_EXPR:
4537 return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
4540 case POINTER_PLUS_EXPR:
4541 /* For the rare target where pointers are not the same size as
4542 size_t, we need to check for mis-matched modes and correct
4545 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
4546 && GET_MODE (op0) != GET_MODE (op1))
4548 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))
4549 /* If OP0 is a partial mode, then we must truncate, even if it has
4550 the same bitsize as OP1 as GCC's representation of partial modes
4552 || (GET_MODE_CLASS (GET_MODE (op0)) == MODE_PARTIAL_INT
4553 && GET_MODE_BITSIZE (GET_MODE (op0)) == GET_MODE_BITSIZE (GET_MODE (op1))))
4554 op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
4557 /* We always sign-extend, regardless of the signedness of
4558 the operand, because the operand is always unsigned
4559 here even if the original C expression is signed. */
4560 op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
4565 return simplify_gen_binary (PLUS, mode, op0, op1);
4568 return simplify_gen_binary (MINUS, mode, op0, op1);
4571 return simplify_gen_binary (MULT, mode, op0, op1);
4574 case TRUNC_DIV_EXPR:
4575 case EXACT_DIV_EXPR:
4577 return simplify_gen_binary (UDIV, mode, op0, op1);
4579 return simplify_gen_binary (DIV, mode, op0, op1);
4581 case TRUNC_MOD_EXPR:
4582 return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
4584 case FLOOR_DIV_EXPR:
4586 return simplify_gen_binary (UDIV, mode, op0, op1);
4589 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4590 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4591 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4592 return simplify_gen_binary (PLUS, mode, div, adj);
4595 case FLOOR_MOD_EXPR:
4597 return simplify_gen_binary (UMOD, mode, op0, op1);
4600 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4601 rtx adj = floor_sdiv_adjust (mode, mod, op1);
4602 adj = simplify_gen_unary (NEG, mode,
4603 simplify_gen_binary (MULT, mode, adj, op1),
4605 return simplify_gen_binary (PLUS, mode, mod, adj);
4611 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4612 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4613 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4614 return simplify_gen_binary (PLUS, mode, div, adj);
4618 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4619 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4620 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4621 return simplify_gen_binary (PLUS, mode, div, adj);
4627 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4628 rtx adj = ceil_udiv_adjust (mode, mod, op1);
4629 adj = simplify_gen_unary (NEG, mode,
4630 simplify_gen_binary (MULT, mode, adj, op1),
4632 return simplify_gen_binary (PLUS, mode, mod, adj);
4636 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4637 rtx adj = ceil_sdiv_adjust (mode, mod, op1);
4638 adj = simplify_gen_unary (NEG, mode,
4639 simplify_gen_binary (MULT, mode, adj, op1),
4641 return simplify_gen_binary (PLUS, mode, mod, adj);
4644 case ROUND_DIV_EXPR:
4647 rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
4648 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4649 rtx adj = round_udiv_adjust (mode, mod, op1);
4650 return simplify_gen_binary (PLUS, mode, div, adj);
4654 rtx div = simplify_gen_binary (DIV, mode, op0, op1);
4655 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4656 rtx adj = round_sdiv_adjust (mode, mod, op1);
4657 return simplify_gen_binary (PLUS, mode, div, adj);
4660 case ROUND_MOD_EXPR:
4663 rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
4664 rtx adj = round_udiv_adjust (mode, mod, op1);
4665 adj = simplify_gen_unary (NEG, mode,
4666 simplify_gen_binary (MULT, mode, adj, op1),
4668 return simplify_gen_binary (PLUS, mode, mod, adj);
4672 rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
4673 rtx adj = round_sdiv_adjust (mode, mod, op1);
4674 adj = simplify_gen_unary (NEG, mode,
4675 simplify_gen_binary (MULT, mode, adj, op1),
4677 return simplify_gen_binary (PLUS, mode, mod, adj);
4681 return simplify_gen_binary (ASHIFT, mode, op0, op1);
4685 return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
4687 return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
4690 return simplify_gen_binary (ROTATE, mode, op0, op1);
4693 return simplify_gen_binary (ROTATERT, mode, op0, op1);
4696 return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
4699 return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
4702 case TRUTH_AND_EXPR:
4703 return simplify_gen_binary (AND, mode, op0, op1);
4707 return simplify_gen_binary (IOR, mode, op0, op1);
4710 case TRUTH_XOR_EXPR:
4711 return simplify_gen_binary (XOR, mode, op0, op1);
4713 case TRUTH_ANDIF_EXPR:
4714 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
4716 case TRUTH_ORIF_EXPR:
4717 return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
4719 case TRUTH_NOT_EXPR:
4720 return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
4723 return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
4727 return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
4731 return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
4735 return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
4739 return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
4742 return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
4744 case UNORDERED_EXPR:
4745 return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
4748 return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
4751 return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
4754 return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
4757 return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
4760 return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
4763 return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
4766 return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
4769 return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
4772 gcc_assert (COMPLEX_MODE_P (mode));
4773 if (GET_MODE (op0) == VOIDmode)
4774 op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0);
4775 if (GET_MODE (op1) == VOIDmode)
4776 op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1);
4777 return gen_rtx_CONCAT (mode, op0, op1);
4780 if (GET_CODE (op0) == CONCAT)
4781 return gen_rtx_CONCAT (mode, XEXP (op0, 0),
4782 simplify_gen_unary (NEG, GET_MODE_INNER (mode),
4784 GET_MODE_INNER (mode)));
4787 machine_mode imode = GET_MODE_INNER (mode);
4792 re = adjust_address_nv (op0, imode, 0);
4793 im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode));
4797 machine_mode ifmode = int_mode_for_mode (mode);
4798 machine_mode ihmode = int_mode_for_mode (imode);
4800 if (ifmode == BLKmode || ihmode == BLKmode)
4802 halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode));
4805 re = gen_rtx_SUBREG (ifmode, re, 0);
4806 re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx);
4807 if (imode != ihmode)
4808 re = gen_rtx_SUBREG (imode, re, 0);
4809 im = copy_rtx (op0);
4811 im = gen_rtx_SUBREG (ifmode, im, 0);
4812 im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize);
4813 if (imode != ihmode)
4814 im = gen_rtx_SUBREG (imode, im, 0);
4816 im = gen_rtx_NEG (imode, im);
4817 return gen_rtx_CONCAT (mode, re, im);
4821 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4822 if (!op0 || !MEM_P (op0))
4824 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4825 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL
4826 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL)
4827 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))
4828 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4829 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4831 if (handled_component_p (TREE_OPERAND (exp, 0)))
4833 HOST_WIDE_INT bitoffset, bitsize, maxsize;
4836 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4837 &bitsize, &maxsize, &reverse);
4838 if ((TREE_CODE (decl) == VAR_DECL
4839 || TREE_CODE (decl) == PARM_DECL
4840 || TREE_CODE (decl) == RESULT_DECL)
4841 && (!TREE_ADDRESSABLE (decl)
4842 || target_for_debug_bind (decl))
4843 && (bitoffset % BITS_PER_UNIT) == 0
4845 && bitsize == maxsize)
4847 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4848 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT);
4852 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4853 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4856 op0 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4859 && (GET_CODE (op0) == DEBUG_IMPLICIT_PTR
4860 || (GET_CODE (op0) == PLUS
4861 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4862 && CONST_INT_P (XEXP (op0, 1)))))
4864 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4866 if (!op1 || !CONST_INT_P (op1))
4869 return plus_constant (mode, op0, INTVAL (op1));
4876 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
4877 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as);
4885 op0 = gen_rtx_CONCATN
4886 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4888 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
4890 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4893 XVECEXP (op0, 0, i) = op1;
4900 if (TREE_CLOBBER_P (exp))
4902 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4907 op0 = gen_rtx_CONCATN
4908 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))));
4910 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4912 op1 = expand_debug_expr (val);
4915 XVECEXP (op0, 0, i) = op1;
4918 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))
4920 op1 = expand_debug_expr
4921 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4926 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++)
4927 XVECEXP (op0, 0, i) = op1;
4933 goto flag_unsupported;
4936 /* ??? Maybe handle some builtins? */
4941 gimple *g = get_gimple_for_ssa_name (exp);
4945 if (deep_ter_debug_map)
4947 tree *slot = deep_ter_debug_map->get (exp);
4952 t = gimple_assign_rhs_to_tree (g);
4953 op0 = expand_debug_expr (t);
4959 /* If this is a reference to an incoming value of
4960 parameter that is never used in the code or where the
4961 incoming value is never used in the code, use
4962 PARM_DECL's DECL_RTL if set. */
4963 if (SSA_NAME_IS_DEFAULT_DEF (exp)
4964 && SSA_NAME_VAR (exp)
4965 && TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL
4966 && has_zero_uses (exp))
4968 op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
4971 op0 = expand_debug_expr (SSA_NAME_VAR (exp));
4976 int part = var_to_partition (SA.map, exp);
4978 if (part == NO_PARTITION)
4981 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
4983 op0 = copy_rtx (SA.partition_to_pseudo[part]);
4991 /* Vector stuff. For most of the codes we don't have rtl codes. */
4992 case REALIGN_LOAD_EXPR:
4993 case REDUC_MAX_EXPR:
4994 case REDUC_MIN_EXPR:
4995 case REDUC_PLUS_EXPR:
4997 case VEC_PACK_FIX_TRUNC_EXPR:
4998 case VEC_PACK_SAT_EXPR:
4999 case VEC_PACK_TRUNC_EXPR:
5000 case VEC_UNPACK_FLOAT_HI_EXPR:
5001 case VEC_UNPACK_FLOAT_LO_EXPR:
5002 case VEC_UNPACK_HI_EXPR:
5003 case VEC_UNPACK_LO_EXPR:
5004 case VEC_WIDEN_MULT_HI_EXPR:
5005 case VEC_WIDEN_MULT_LO_EXPR:
5006 case VEC_WIDEN_MULT_EVEN_EXPR:
5007 case VEC_WIDEN_MULT_ODD_EXPR:
5008 case VEC_WIDEN_LSHIFT_HI_EXPR:
5009 case VEC_WIDEN_LSHIFT_LO_EXPR:
5014 case ADDR_SPACE_CONVERT_EXPR:
5015 case FIXED_CONVERT_EXPR:
5017 case WITH_SIZE_EXPR:
5021 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5022 && SCALAR_INT_MODE_P (mode))
5025 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5027 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5030 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5032 ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
5034 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5035 return simplify_gen_binary (PLUS, mode, op0, op2);
5039 case WIDEN_MULT_EXPR:
5040 case WIDEN_MULT_PLUS_EXPR:
5041 case WIDEN_MULT_MINUS_EXPR:
5042 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5043 && SCALAR_INT_MODE_P (mode))
5045 inner_mode = GET_MODE (op0);
5046 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
5047 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5049 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5050 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
5051 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
5053 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
5054 op0 = simplify_gen_binary (MULT, mode, op0, op1);
5055 if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
5057 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
5058 return simplify_gen_binary (PLUS, mode, op0, op2);
5060 return simplify_gen_binary (MINUS, mode, op2, op0);
5064 case MULT_HIGHPART_EXPR:
5065 /* ??? Similar to the above. */
5068 case WIDEN_SUM_EXPR:
5069 case WIDEN_LSHIFT_EXPR:
5070 if (SCALAR_INT_MODE_P (GET_MODE (op0))
5071 && SCALAR_INT_MODE_P (mode))
5074 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
5076 ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
5078 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5079 ? ASHIFT : PLUS, mode, op0, op1);
5084 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
5097 /* Return an RTX equivalent to the source bind value of the tree expression
5101 expand_debug_source_expr (tree exp)
5104 machine_mode mode = VOIDmode, inner_mode;
5106 switch (TREE_CODE (exp))
5110 mode = DECL_MODE (exp);
5111 op0 = expand_debug_parm_decl (exp);
5114 /* See if this isn't an argument that has been completely
5116 if (!DECL_RTL_SET_P (exp)
5117 && !DECL_INCOMING_RTL (exp)
5118 && DECL_ABSTRACT_ORIGIN (current_function_decl))
5120 tree aexp = DECL_ORIGIN (exp);
5121 if (DECL_CONTEXT (aexp)
5122 == DECL_ABSTRACT_ORIGIN (current_function_decl))
5124 vec<tree, va_gc> **debug_args;
5127 debug_args = decl_debug_args_lookup (current_function_decl);
5128 if (debug_args != NULL)
5130 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
5133 return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
5143 if (op0 == NULL_RTX)
5146 inner_mode = GET_MODE (op0);
5147 if (mode == inner_mode)
5150 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
5152 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
5153 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
5154 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
5155 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
5157 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
5159 else if (FLOAT_MODE_P (mode))
5161 else if (FLOAT_MODE_P (inner_mode))
5163 if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5164 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
5166 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
5168 else if (CONSTANT_P (op0)
5169 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
5170 op0 = lowpart_subreg (mode, op0, inner_mode);
5171 else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
5172 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
5174 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
5179 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5180 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5181 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5184 avoid_complex_debug_insns (rtx_insn *insn, rtx *exp_p, int depth)
5188 if (exp == NULL_RTX)
5191 if ((OBJECT_P (exp) && !MEM_P (exp)) || GET_CODE (exp) == CLOBBER)
5196 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5197 rtx dval = make_debug_expr_from_rtl (exp);
5199 /* Emit a debug bind insn before INSN. */
5200 rtx bind = gen_rtx_VAR_LOCATION (GET_MODE (exp),
5201 DEBUG_EXPR_TREE_DECL (dval), exp,
5202 VAR_INIT_STATUS_INITIALIZED);
5204 emit_debug_insn_before (bind, insn);
5209 const char *format_ptr = GET_RTX_FORMAT (GET_CODE (exp));
5211 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (exp)); i++)
5212 switch (*format_ptr++)
5215 avoid_complex_debug_insns (insn, &XEXP (exp, i), depth + 1);
5220 for (j = 0; j < XVECLEN (exp, i); j++)
5221 avoid_complex_debug_insns (insn, &XVECEXP (exp, i, j), depth + 1);
5229 /* Expand the _LOCs in debug insns. We run this after expanding all
5230 regular insns, so that any variables referenced in the function
5231 will have their DECL_RTLs set. */
5234 expand_debug_locations (void)
5237 rtx_insn *last = get_last_insn ();
5238 int save_strict_alias = flag_strict_aliasing;
5240 /* New alias sets while setting up memory attributes cause
5241 -fcompare-debug failures, even though it doesn't bring about any
5243 flag_strict_aliasing = 0;
5245 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5246 if (DEBUG_INSN_P (insn))
5248 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5250 rtx_insn *prev_insn, *insn2;
5253 if (value == NULL_TREE)
5257 if (INSN_VAR_LOCATION_STATUS (insn)
5258 == VAR_INIT_STATUS_UNINITIALIZED)
5259 val = expand_debug_source_expr (value);
5260 /* The avoid_deep_ter_for_debug function inserts
5261 debug bind stmts after SSA_NAME definition, with the
5262 SSA_NAME as the whole bind location. Disable temporarily
5263 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5264 being defined in this DEBUG_INSN. */
5265 else if (deep_ter_debug_map && TREE_CODE (value) == SSA_NAME)
5267 tree *slot = deep_ter_debug_map->get (value);
5270 if (*slot == INSN_VAR_LOCATION_DECL (insn))
5275 val = expand_debug_expr (value);
5277 *slot = INSN_VAR_LOCATION_DECL (insn);
5280 val = expand_debug_expr (value);
5281 gcc_assert (last == get_last_insn ());
5285 val = gen_rtx_UNKNOWN_VAR_LOC ();
5288 mode = GET_MODE (INSN_VAR_LOCATION (insn));
5290 gcc_assert (mode == GET_MODE (val)
5291 || (GET_MODE (val) == VOIDmode
5292 && (CONST_SCALAR_INT_P (val)
5293 || GET_CODE (val) == CONST_FIXED
5294 || GET_CODE (val) == LABEL_REF)));
5297 INSN_VAR_LOCATION_LOC (insn) = val;
5298 prev_insn = PREV_INSN (insn);
5299 for (insn2 = insn; insn2 != prev_insn; insn2 = PREV_INSN (insn2))
5300 avoid_complex_debug_insns (insn2, &INSN_VAR_LOCATION_LOC (insn2), 0);
5303 flag_strict_aliasing = save_strict_alias;
5306 /* Performs swapping operands of commutative operations to expand
5307 the expensive one first. */
5310 reorder_operands (basic_block bb)
5312 unsigned int *lattice; /* Hold cost of each statement. */
5313 unsigned int i = 0, n = 0;
5314 gimple_stmt_iterator gsi;
5320 use_operand_p use_p;
5321 gimple *def0, *def1;
5323 /* Compute cost of each statement using estimate_num_insns. */
5324 stmts = bb_seq (bb);
5325 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5327 stmt = gsi_stmt (gsi);
5328 if (!is_gimple_debug (stmt))
5329 gimple_set_uid (stmt, n++);
5331 lattice = XNEWVEC (unsigned int, n);
5332 for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
5335 stmt = gsi_stmt (gsi);
5336 if (is_gimple_debug (stmt))
5338 cost = estimate_num_insns (stmt, &eni_size_weights);
5340 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5342 tree use = USE_FROM_PTR (use_p);
5344 if (TREE_CODE (use) != SSA_NAME)
5346 def_stmt = get_gimple_for_ssa_name (use);
5349 lattice[i] += lattice[gimple_uid (def_stmt)];
5352 if (!is_gimple_assign (stmt)
5353 || !commutative_tree_code (gimple_assign_rhs_code (stmt)))
5355 op0 = gimple_op (stmt, 1);
5356 op1 = gimple_op (stmt, 2);
5357 if (TREE_CODE (op0) != SSA_NAME
5358 || TREE_CODE (op1) != SSA_NAME)
5360 /* Swap operands if the second one is more expensive. */
5361 def0 = get_gimple_for_ssa_name (op0);
5362 def1 = get_gimple_for_ssa_name (op1);
5366 if (!def0 || lattice[gimple_uid (def1)] > lattice[gimple_uid (def0)])
5370 if (dump_file && (dump_flags & TDF_DETAILS))
5372 fprintf (dump_file, "Swap operands in stmt:\n");
5373 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5374 fprintf (dump_file, "Cost left opnd=%d, right opnd=%d\n",
5375 def0 ? lattice[gimple_uid (def0)] : 0,
5376 lattice[gimple_uid (def1)]);
5378 swap_ssa_operands (stmt, gimple_assign_rhs1_ptr (stmt),
5379 gimple_assign_rhs2_ptr (stmt));
5385 /* Expand basic block BB from GIMPLE trees to RTL. */
5388 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5390 gimple_stmt_iterator gsi;
5392 gimple *stmt = NULL;
5399 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
5402 /* Note that since we are now transitioning from GIMPLE to RTL, we
5403 cannot use the gsi_*_bb() routines because they expect the basic
5404 block to be in GIMPLE, instead of RTL. Therefore, we need to
5405 access the BB sequence directly. */
5407 reorder_operands (bb);
5408 stmts = bb_seq (bb);
5409 bb->il.gimple.seq = NULL;
5410 bb->il.gimple.phi_nodes = NULL;
5411 rtl_profile_for_bb (bb);
5412 init_rtl_bb_info (bb);
5413 bb->flags |= BB_RTL;
5415 /* Remove the RETURN_EXPR if we may fall though to the exit
5417 gsi = gsi_last (stmts);
5418 if (!gsi_end_p (gsi)
5419 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
5421 greturn *ret_stmt = as_a <greturn *> (gsi_stmt (gsi));
5423 gcc_assert (single_succ_p (bb));
5424 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun));
5426 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
5427 && !gimple_return_retval (ret_stmt))
5429 gsi_remove (&gsi, false);
5430 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
5434 gsi = gsi_start (stmts);
5435 if (!gsi_end_p (gsi))
5437 stmt = gsi_stmt (gsi);
5438 if (gimple_code (stmt) != GIMPLE_LABEL)
5442 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5446 last = get_last_insn ();
5450 expand_gimple_stmt (stmt);
5457 /* Java emits line number notes in the top of labels.
5458 ??? Make this go away once line number notes are obsoleted. */
5459 BB_HEAD (bb) = NEXT_INSN (last);
5460 if (NOTE_P (BB_HEAD (bb)))
5461 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5462 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5464 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5467 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5469 NOTE_BASIC_BLOCK (note) = bb;
5471 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5475 stmt = gsi_stmt (gsi);
5477 /* If this statement is a non-debug one, and we generate debug
5478 insns, then this one might be the last real use of a TERed
5479 SSA_NAME, but where there are still some debug uses further
5480 down. Expanding the current SSA name in such further debug
5481 uses by their RHS might lead to wrong debug info, as coalescing
5482 might make the operands of such RHS be placed into the same
5483 pseudo as something else. Like so:
5484 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5488 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5489 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5490 the write to a_2 would actually have clobbered the place which
5493 So, instead of that, we recognize the situation, and generate
5494 debug temporaries at the last real use of TERed SSA names:
5501 if (MAY_HAVE_DEBUG_INSNS
5503 && !is_gimple_debug (stmt))
5509 location_t sloc = curr_insn_location ();
5511 /* Look for SSA names that have their last use here (TERed
5512 names always have only one real use). */
5513 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5514 if ((def = get_gimple_for_ssa_name (op)))
5516 imm_use_iterator imm_iter;
5517 use_operand_p use_p;
5518 bool have_debug_uses = false;
5520 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
5522 if (gimple_debug_bind_p (USE_STMT (use_p)))
5524 have_debug_uses = true;
5529 if (have_debug_uses)
5531 /* OP is a TERed SSA name, with DEF its defining
5532 statement, and where OP is used in further debug
5533 instructions. Generate a debug temporary, and
5534 replace all uses of OP in debug insns with that
5537 tree value = gimple_assign_rhs_to_tree (def);
5538 tree vexpr = make_node (DEBUG_EXPR_DECL);
5542 set_curr_insn_location (gimple_location (def));
5544 DECL_ARTIFICIAL (vexpr) = 1;
5545 TREE_TYPE (vexpr) = TREE_TYPE (value);
5547 mode = DECL_MODE (value);
5549 mode = TYPE_MODE (TREE_TYPE (value));
5550 DECL_MODE (vexpr) = mode;
5552 val = gen_rtx_VAR_LOCATION
5553 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5555 emit_debug_insn (val);
5557 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
5559 if (!gimple_debug_bind_p (debugstmt))
5562 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
5563 SET_USE (use_p, vexpr);
5565 update_stmt (debugstmt);
5569 set_curr_insn_location (sloc);
5572 currently_expanding_gimple_stmt = stmt;
5574 /* Expand this statement, then evaluate the resulting RTL and
5575 fixup the CFG accordingly. */
5576 if (gimple_code (stmt) == GIMPLE_COND)
5578 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5582 else if (gimple_debug_bind_p (stmt))
5584 location_t sloc = curr_insn_location ();
5585 gimple_stmt_iterator nsi = gsi;
5589 tree var = gimple_debug_bind_get_var (stmt);
5594 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5595 && TREE_CODE (var) != LABEL_DECL
5596 && !target_for_debug_bind (var))
5597 goto delink_debug_stmt;
5599 if (gimple_debug_bind_has_value_p (stmt))
5600 value = gimple_debug_bind_get_value (stmt);
5604 last = get_last_insn ();
5606 set_curr_insn_location (gimple_location (stmt));
5609 mode = DECL_MODE (var);
5611 mode = TYPE_MODE (TREE_TYPE (var));
5613 val = gen_rtx_VAR_LOCATION
5614 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5616 emit_debug_insn (val);
5618 if (dump_file && (dump_flags & TDF_DETAILS))
5620 /* We can't dump the insn with a TREE where an RTX
5622 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5623 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5624 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5628 /* In order not to generate too many debug temporaries,
5629 we delink all uses of debug statements we already expanded.
5630 Therefore debug statements between definition and real
5631 use of TERed SSA names will continue to use the SSA name,
5632 and not be replaced with debug temps. */
5633 delink_stmt_imm_use (stmt);
5637 if (gsi_end_p (nsi))
5639 stmt = gsi_stmt (nsi);
5640 if (!gimple_debug_bind_p (stmt))
5644 set_curr_insn_location (sloc);
5646 else if (gimple_debug_source_bind_p (stmt))
5648 location_t sloc = curr_insn_location ();
5649 tree var = gimple_debug_source_bind_get_var (stmt);
5650 tree value = gimple_debug_source_bind_get_value (stmt);
5654 last = get_last_insn ();
5656 set_curr_insn_location (gimple_location (stmt));
5658 mode = DECL_MODE (var);
5660 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5661 VAR_INIT_STATUS_UNINITIALIZED);
5663 emit_debug_insn (val);
5665 if (dump_file && (dump_flags & TDF_DETAILS))
5667 /* We can't dump the insn with a TREE where an RTX
5669 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5670 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5671 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5674 set_curr_insn_location (sloc);
5678 gcall *call_stmt = dyn_cast <gcall *> (stmt);
5680 && gimple_call_tail_p (call_stmt)
5681 && disable_tail_calls)
5682 gimple_call_set_tail (call_stmt, false);
5684 if (call_stmt && gimple_call_tail_p (call_stmt))
5687 new_bb = expand_gimple_tailcall (bb, call_stmt, &can_fallthru);
5698 def_operand_p def_p;
5699 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
5703 /* Ignore this stmt if it is in the list of
5704 replaceable expressions. */
5706 && bitmap_bit_p (SA.values,
5707 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
5710 last = expand_gimple_stmt (stmt);
5711 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5716 currently_expanding_gimple_stmt = NULL;
5718 /* Expand implicit goto and convert goto_locus. */
5719 FOR_EACH_EDGE (e, ei, bb->succs)
5721 if (e->goto_locus != UNKNOWN_LOCATION)
5722 set_curr_insn_location (e->goto_locus);
5723 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
5725 emit_jump (label_rtx_for_bb (e->dest));
5726 e->flags &= ~EDGE_FALLTHRU;
5730 /* Expanded RTL can create a jump in the last instruction of block.
5731 This later might be assumed to be a jump to successor and break edge insertion.
5732 We need to insert dummy move to prevent this. PR41440. */
5733 if (single_succ_p (bb)
5734 && (single_succ_edge (bb)->flags & EDGE_FALLTHRU)
5735 && (last = get_last_insn ())
5738 rtx dummy = gen_reg_rtx (SImode);
5739 emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL);
5742 do_pending_stack_adjust ();
5744 /* Find the block tail. The last insn in the block is the insn
5745 before a barrier and/or table jump insn. */
5746 last = get_last_insn ();
5747 if (BARRIER_P (last))
5748 last = PREV_INSN (last);
5749 if (JUMP_TABLE_DATA_P (last))
5750 last = PREV_INSN (PREV_INSN (last));
5753 update_bb_for_insn (bb);
5759 /* Create a basic block for initialization code. */
5762 construct_init_block (void)
5764 basic_block init_block, first_block;
5768 /* Multiple entry points not supported yet. */
5769 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs) == 1);
5770 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5771 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun));
5772 ENTRY_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5773 EXIT_BLOCK_PTR_FOR_FN (cfun)->flags |= BB_RTL;
5775 e = EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun), 0);
5777 /* When entry edge points to first basic block, we don't need jump,
5778 otherwise we have to jump into proper target. */
5779 if (e && e->dest != ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb)
5781 tree label = gimple_block_label (e->dest);
5783 emit_jump (jump_target_rtx (label));
5787 flags = EDGE_FALLTHRU;
5789 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5791 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5792 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5793 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5794 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5797 first_block = e->dest;
5798 redirect_edge_succ (e, init_block);
5799 e = make_edge (init_block, first_block, flags);
5802 e = make_edge (init_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5803 e->probability = REG_BR_PROB_BASE;
5804 e->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5806 update_bb_for_insn (init_block);
5810 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5811 found in the block tree. */
5814 set_block_levels (tree block, int level)
5818 BLOCK_NUMBER (block) = level;
5819 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
5820 block = BLOCK_CHAIN (block);
5824 /* Create a block containing landing pads and similar stuff. */
5827 construct_exit_block (void)
5829 rtx_insn *head = get_last_insn ();
5831 basic_block exit_block;
5835 basic_block prev_bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
5836 rtx_insn *orig_end = BB_END (prev_bb);
5838 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
5840 /* Make sure the locus is set to the end of the function, so that
5841 epilogue line numbers and warnings are set properly. */
5842 if (LOCATION_LOCUS (cfun->function_end_locus) != UNKNOWN_LOCATION)
5843 input_location = cfun->function_end_locus;
5845 /* Generate rtl for function exit. */
5846 expand_function_end ();
5848 end = get_last_insn ();
5851 /* While emitting the function end we could move end of the last basic
5853 BB_END (prev_bb) = orig_end;
5854 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5855 head = NEXT_INSN (head);
5856 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5857 bb frequency counting will be confused. Any instructions before that
5858 label are emitted for the case where PREV_BB falls through into the
5859 exit block, so append those instructions to prev_bb in that case. */
5860 if (NEXT_INSN (head) != return_label)
5862 while (NEXT_INSN (head) != return_label)
5864 if (!NOTE_P (NEXT_INSN (head)))
5865 BB_END (prev_bb) = NEXT_INSN (head);
5866 head = NEXT_INSN (head);
5869 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5870 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5871 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5872 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5875 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5877 e = EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun), ix);
5878 if (!(e->flags & EDGE_ABNORMAL))
5879 redirect_edge_succ (e, exit_block);
5884 e = make_edge (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FALLTHRU);
5885 e->probability = REG_BR_PROB_BASE;
5886 e->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5887 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5890 e->count -= e2->count;
5891 exit_block->count -= e2->count;
5892 exit_block->frequency -= EDGE_FREQUENCY (e2);
5896 if (exit_block->count < 0)
5897 exit_block->count = 0;
5898 if (exit_block->frequency < 0)
5899 exit_block->frequency = 0;
5900 update_bb_for_insn (exit_block);
5903 /* Helper function for discover_nonconstant_array_refs.
5904 Look for ARRAY_REF nodes with non-constant indexes and mark them
5908 discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
5909 void *data ATTRIBUTE_UNUSED)
5913 if (IS_TYPE_OR_DECL_P (t))
5915 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5917 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5918 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
5919 && (!TREE_OPERAND (t, 2)
5920 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5921 || (TREE_CODE (t) == COMPONENT_REF
5922 && (!TREE_OPERAND (t,2)
5923 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
5924 || TREE_CODE (t) == BIT_FIELD_REF
5925 || TREE_CODE (t) == REALPART_EXPR
5926 || TREE_CODE (t) == IMAGPART_EXPR
5927 || TREE_CODE (t) == VIEW_CONVERT_EXPR
5928 || CONVERT_EXPR_P (t))
5929 t = TREE_OPERAND (t, 0);
5931 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
5933 t = get_base_address (t);
5935 && DECL_MODE (t) != BLKmode)
5936 TREE_ADDRESSABLE (t) = 1;
5945 /* RTL expansion is not able to compile array references with variable
5946 offsets for arrays stored in single register. Discover such
5947 expressions and mark variables as addressable to avoid this
5951 discover_nonconstant_array_refs (void)
5954 gimple_stmt_iterator gsi;
5956 FOR_EACH_BB_FN (bb, cfun)
5957 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5959 gimple *stmt = gsi_stmt (gsi);
5960 if (!is_gimple_debug (stmt))
5961 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
5965 /* This function sets crtl->args.internal_arg_pointer to a virtual
5966 register if DRAP is needed. Local register allocator will replace
5967 virtual_incoming_args_rtx with the virtual register. */
5970 expand_stack_alignment (void)
5973 unsigned int preferred_stack_boundary;
5975 if (! SUPPORTS_STACK_ALIGNMENT)
5978 if (cfun->calls_alloca
5979 || cfun->has_nonlocal_label
5980 || crtl->has_nonlocal_goto)
5981 crtl->need_drap = true;
5983 /* Call update_stack_boundary here again to update incoming stack
5984 boundary. It may set incoming stack alignment to a different
5985 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
5986 use the minimum incoming stack alignment to check if it is OK
5987 to perform sibcall optimization since sibcall optimization will
5988 only align the outgoing stack to incoming stack boundary. */
5989 if (targetm.calls.update_stack_boundary)
5990 targetm.calls.update_stack_boundary ();
5992 /* The incoming stack frame has to be aligned at least at
5993 parm_stack_boundary. */
5994 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
5996 /* Update crtl->stack_alignment_estimated and use it later to align
5997 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
5998 exceptions since callgraph doesn't collect incoming stack alignment
6000 if (cfun->can_throw_non_call_exceptions
6001 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
6002 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
6004 preferred_stack_boundary = crtl->preferred_stack_boundary;
6005 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
6006 crtl->stack_alignment_estimated = preferred_stack_boundary;
6007 if (preferred_stack_boundary > crtl->stack_alignment_needed)
6008 crtl->stack_alignment_needed = preferred_stack_boundary;
6010 gcc_assert (crtl->stack_alignment_needed
6011 <= crtl->stack_alignment_estimated);
6013 crtl->stack_realign_needed
6014 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
6015 crtl->stack_realign_tried = crtl->stack_realign_needed;
6017 crtl->stack_realign_processed = true;
6019 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6021 gcc_assert (targetm.calls.get_drap_rtx != NULL);
6022 drap_rtx = targetm.calls.get_drap_rtx ();
6024 /* stack_realign_drap and drap_rtx must match. */
6025 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6027 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6028 if (NULL != drap_rtx)
6030 crtl->args.internal_arg_pointer = drap_rtx;
6032 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6034 fixup_tail_calls ();
6040 expand_main_function (void)
6042 #if (defined(INVOKE__main) \
6043 || (!defined(HAS_INIT_SECTION) \
6044 && !defined(INIT_SECTION_ASM_OP) \
6045 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6046 emit_library_call (init_one_libfunc (NAME__MAIN), LCT_NORMAL, VOIDmode, 0);
6051 /* Expand code to initialize the stack_protect_guard. This is invoked at
6052 the beginning of a function to be protected. */
6055 stack_protect_prologue (void)
6057 tree guard_decl = targetm.stack_protect_guard ();
6060 x = expand_normal (crtl->stack_protect_guard);
6061 y = expand_normal (guard_decl);
6063 /* Allow the target to copy from Y to X without leaking Y into a
6065 if (targetm.have_stack_protect_set ())
6066 if (rtx_insn *insn = targetm.gen_stack_protect_set (x, y))
6072 /* Otherwise do a straight move. */
6073 emit_move_insn (x, y);
6076 /* Translate the intermediate representation contained in the CFG
6077 from GIMPLE trees to RTL.
6079 We do conversion per basic block and preserve/update the tree CFG.
6080 This implies we have to do some magic as the CFG can simultaneously
6081 consist of basic blocks containing RTL and GIMPLE trees. This can
6082 confuse the CFG hooks, so be careful to not manipulate CFG during
6087 const pass_data pass_data_expand =
6089 RTL_PASS, /* type */
6090 "expand", /* name */
6091 OPTGROUP_NONE, /* optinfo_flags */
6092 TV_EXPAND, /* tv_id */
6093 ( PROP_ssa | PROP_gimple_leh | PROP_cfg
6096 | PROP_gimple_lva), /* properties_required */
6097 PROP_rtl, /* properties_provided */
6098 ( PROP_ssa | PROP_trees ), /* properties_destroyed */
6099 0, /* todo_flags_start */
6100 0, /* todo_flags_finish */
6103 class pass_expand : public rtl_opt_pass
6106 pass_expand (gcc::context *ctxt)
6107 : rtl_opt_pass (pass_data_expand, ctxt)
6110 /* opt_pass methods: */
6111 virtual unsigned int execute (function *);
6113 }; // class pass_expand
6116 pass_expand::execute (function *fun)
6118 basic_block bb, init_block;
6122 rtx_insn *var_seq, *var_ret_seq;
6125 timevar_push (TV_OUT_OF_SSA);
6126 rewrite_out_of_ssa (&SA);
6127 timevar_pop (TV_OUT_OF_SSA);
6128 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6130 if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter)
6132 gimple_stmt_iterator gsi;
6133 FOR_EACH_BB_FN (bb, cfun)
6134 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6135 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6136 avoid_deep_ter_for_debug (gsi_stmt (gsi), 0);
6139 /* Make sure all values used by the optimization passes have sane
6143 /* Some backends want to know that we are expanding to RTL. */
6144 currently_expanding_to_rtl = 1;
6145 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6146 free_dominance_info (CDI_DOMINATORS);
6148 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6150 if (chkp_function_instrumented_p (current_function_decl))
6151 chkp_reset_rtl_bounds ();
6153 insn_locations_init ();
6154 if (!DECL_IS_BUILTIN (current_function_decl))
6156 /* Eventually, all FEs should explicitly set function_start_locus. */
6157 if (LOCATION_LOCUS (fun->function_start_locus) == UNKNOWN_LOCATION)
6158 set_curr_insn_location
6159 (DECL_SOURCE_LOCATION (current_function_decl));
6161 set_curr_insn_location (fun->function_start_locus);
6164 set_curr_insn_location (UNKNOWN_LOCATION);
6165 prologue_location = curr_insn_location ();
6167 #ifdef INSN_SCHEDULING
6168 init_sched_attrs ();
6171 /* Make sure first insn is a note even if we don't want linenums.
6172 This makes sure the first insn will never be deleted.
6173 Also, final expects a note to appear there. */
6174 emit_note (NOTE_INSN_DELETED);
6176 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6177 discover_nonconstant_array_refs ();
6179 targetm.expand_to_rtl_hook ();
6180 crtl->stack_alignment_needed = STACK_BOUNDARY;
6181 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
6182 crtl->stack_alignment_estimated = 0;
6183 crtl->preferred_stack_boundary = STACK_BOUNDARY;
6184 fun->cfg->max_jumptable_ents = 0;
6186 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6187 of the function section at exapnsion time to predict distance of calls. */
6188 resolve_unique_section (current_function_decl, 0, flag_function_sections);
6190 /* Expand the variables recorded during gimple lowering. */
6191 timevar_push (TV_VAR_EXPAND);
6194 var_ret_seq = expand_used_vars ();
6196 var_seq = get_insns ();
6198 timevar_pop (TV_VAR_EXPAND);
6200 /* Honor stack protection warnings. */
6201 if (warn_stack_protect)
6203 if (fun->calls_alloca)
6204 warning (OPT_Wstack_protector,
6205 "stack protector not protecting local variables: "
6206 "variable length buffer");
6207 if (has_short_buffer && !crtl->stack_protect_guard)
6208 warning (OPT_Wstack_protector,
6209 "stack protector not protecting function: "
6210 "all local arrays are less than %d bytes long",
6211 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
6214 /* Set up parameters and prepare for return, for the function. */
6215 expand_function_start (current_function_decl);
6217 /* If we emitted any instructions for setting up the variables,
6218 emit them before the FUNCTION_START note. */
6221 emit_insn_before (var_seq, parm_birth_insn);
6223 /* In expand_function_end we'll insert the alloca save/restore
6224 before parm_birth_insn. We've just insertted an alloca call.
6225 Adjust the pointer to match. */
6226 parm_birth_insn = var_seq;
6229 /* Now propagate the RTL assignment of each partition to the
6230 underlying var of each SSA_NAME. */
6231 for (i = 1; i < num_ssa_names; i++)
6233 tree name = ssa_name (i);
6236 /* We might have generated new SSA names in
6237 update_alias_info_with_stack_vars. They will have a NULL
6238 defining statements, and won't be part of the partitioning,
6240 || !SSA_NAME_DEF_STMT (name))
6243 adjust_one_expanded_partition_var (name);
6246 /* Clean up RTL of variables that straddle across multiple
6247 partitions, and check that the rtl of any PARM_DECLs that are not
6248 cleaned up is that of their default defs. */
6249 for (i = 1; i < num_ssa_names; i++)
6251 tree name = ssa_name (i);
6255 /* We might have generated new SSA names in
6256 update_alias_info_with_stack_vars. They will have a NULL
6257 defining statements, and won't be part of the partitioning,
6259 || !SSA_NAME_DEF_STMT (name))
6261 part = var_to_partition (SA.map, name);
6262 if (part == NO_PARTITION)
6265 /* If this decl was marked as living in multiple places, reset
6266 this now to NULL. */
6267 tree var = SSA_NAME_VAR (name);
6268 if (var && DECL_RTL_IF_SET (var) == pc_rtx)
6269 SET_DECL_RTL (var, NULL);
6270 /* Check that the pseudos chosen by assign_parms are those of
6271 the corresponding default defs. */
6272 else if (SSA_NAME_IS_DEFAULT_DEF (name)
6273 && (TREE_CODE (var) == PARM_DECL
6274 || TREE_CODE (var) == RESULT_DECL))
6276 rtx in = DECL_RTL_IF_SET (var);
6278 rtx out = SA.partition_to_pseudo[part];
6279 gcc_assert (in == out);
6281 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6282 those expected by debug backends for each parm and for
6283 the result. This is particularly important for stabs,
6284 whose register elimination from parm's DECL_RTL may cause
6285 -fcompare-debug differences as SET_DECL_RTL changes reg's
6286 attrs. So, make sure the RTL already has the parm as the
6287 EXPR, so that it won't change. */
6288 SET_DECL_RTL (var, NULL_RTX);
6290 set_mem_attributes (in, var, true);
6291 SET_DECL_RTL (var, in);
6295 /* If this function is `main', emit a call to `__main'
6296 to run global initializers, etc. */
6297 if (DECL_NAME (current_function_decl)
6298 && MAIN_NAME_P (DECL_NAME (current_function_decl))
6299 && DECL_FILE_SCOPE_P (current_function_decl))
6300 expand_main_function ();
6302 /* Initialize the stack_protect_guard field. This must happen after the
6303 call to __main (if any) so that the external decl is initialized. */
6304 if (crtl->stack_protect_guard)
6305 stack_protect_prologue ();
6307 expand_phi_nodes (&SA);
6309 /* Release any stale SSA redirection data. */
6310 redirect_edge_var_map_empty ();
6312 /* Register rtl specific functions for cfg. */
6313 rtl_register_cfg_hooks ();
6315 init_block = construct_init_block ();
6317 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6318 remaining edges later. */
6319 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6320 e->flags &= ~EDGE_EXECUTABLE;
6322 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6323 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6325 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6327 if (MAY_HAVE_DEBUG_INSNS)
6328 expand_debug_locations ();
6330 if (deep_ter_debug_map)
6332 delete deep_ter_debug_map;
6333 deep_ter_debug_map = NULL;
6336 /* Free stuff we no longer need after GIMPLE optimizations. */
6337 free_dominance_info (CDI_DOMINATORS);
6338 free_dominance_info (CDI_POST_DOMINATORS);
6339 delete_tree_cfg_annotations (fun);
6341 timevar_push (TV_OUT_OF_SSA);
6342 finish_out_of_ssa (&SA);
6343 timevar_pop (TV_OUT_OF_SSA);
6345 timevar_push (TV_POST_EXPAND);
6346 /* We are no longer in SSA form. */
6347 fun->gimple_df->in_ssa_p = false;
6348 loops_state_clear (LOOP_CLOSED_SSA);
6350 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6351 conservatively to true until they are all profile aware. */
6352 delete lab_rtx_for_bb;
6353 free_histograms (fun);
6355 construct_exit_block ();
6356 insn_locations_finalize ();
6360 rtx_insn *after = return_label;
6361 rtx_insn *next = NEXT_INSN (after);
6362 if (next && NOTE_INSN_BASIC_BLOCK_P (next))
6364 emit_insn_after (var_ret_seq, after);
6367 /* Zap the tree EH table. */
6368 set_eh_throw_stmt_table (fun, NULL);
6370 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6371 split edges which edge insertions might do. */
6372 rebuild_jump_labels (get_insns ());
6374 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun),
6375 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6379 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6383 rebuild_jump_labels_chain (e->insns.r);
6384 /* Put insns after parm birth, but before
6385 NOTE_INSNS_FUNCTION_BEG. */
6386 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (fun)
6387 && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun)))
6389 rtx_insn *insns = e->insns.r;
6391 if (NOTE_P (parm_birth_insn)
6392 && NOTE_KIND (parm_birth_insn) == NOTE_INSN_FUNCTION_BEG)
6393 emit_insn_before_noloc (insns, parm_birth_insn, e->dest);
6395 emit_insn_after_noloc (insns, parm_birth_insn, e->dest);
6398 commit_one_edge_insertion (e);
6405 /* We're done expanding trees to RTL. */
6406 currently_expanding_to_rtl = 0;
6408 flush_mark_addressable_queue ();
6410 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (fun)->next_bb,
6411 EXIT_BLOCK_PTR_FOR_FN (fun), next_bb)
6415 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
6417 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6418 e->flags &= ~EDGE_EXECUTABLE;
6420 /* At the moment not all abnormal edges match the RTL
6421 representation. It is safe to remove them here as
6422 find_many_sub_basic_blocks will rediscover them.
6423 In the future we should get this fixed properly. */
6424 if ((e->flags & EDGE_ABNORMAL)
6425 && !(e->flags & EDGE_SIBCALL))
6432 blocks = sbitmap_alloc (last_basic_block_for_fn (fun));
6433 bitmap_ones (blocks);
6434 find_many_sub_basic_blocks (blocks);
6435 sbitmap_free (blocks);
6436 purge_all_dead_edges ();
6438 expand_stack_alignment ();
6440 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6442 if (crtl->tail_call_emit)
6443 fixup_tail_calls ();
6445 /* After initial rtl generation, call back to finish generating
6446 exception support code. We need to do this before cleaning up
6447 the CFG as the code does not expect dead landing pads. */
6448 if (fun->eh->region_tree != NULL)
6449 finish_eh_generation ();
6451 /* Remove unreachable blocks, otherwise we cannot compute dominators
6452 which are needed for loop state verification. As a side-effect
6453 this also compacts blocks.
6454 ??? We cannot remove trivially dead insns here as for example
6455 the DRAP reg on i?86 is not magically live at this point.
6456 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6457 cleanup_cfg (CLEANUP_NO_INSN_DEL);
6459 checking_verify_flow_info ();
6461 /* Initialize pseudos allocated for hard registers. */
6462 emit_initial_value_sets ();
6464 /* And finally unshare all RTL. */
6467 /* There's no need to defer outputting this function any more; we
6468 know we want to output it. */
6469 DECL_DEFER_OUTPUT (current_function_decl) = 0;
6471 /* Now that we're done expanding trees to RTL, we shouldn't have any
6472 more CONCATs anywhere. */
6473 generating_concat_p = 0;
6478 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6479 /* And the pass manager will dump RTL for us. */
6482 /* If we're emitting a nested function, make sure its parent gets
6483 emitted as well. Doing otherwise confuses debug info. */
6486 for (parent = DECL_CONTEXT (current_function_decl);
6487 parent != NULL_TREE;
6488 parent = get_containing_scope (parent))
6489 if (TREE_CODE (parent) == FUNCTION_DECL)
6490 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
6493 /* We are now committed to emitting code for this function. Do any
6494 preparation, such as emitting abstract debug info for the inline
6495 before it gets mangled by optimization. */
6496 if (cgraph_function_possibly_inlined_p (current_function_decl))
6497 (*debug_hooks->outlining_inline_function) (current_function_decl);
6499 TREE_ASM_WRITTEN (current_function_decl) = 1;
6501 /* After expanding, the return labels are no longer needed. */
6502 return_label = NULL;
6503 naked_return_label = NULL;
6505 /* After expanding, the tm_restart map is no longer needed. */
6506 if (fun->gimple_df->tm_restart)
6507 fun->gimple_df->tm_restart = NULL;
6509 /* Tag the blocks with a depth number so that change_scope can find
6510 the common parent easily. */
6511 set_block_levels (DECL_INITIAL (fun->decl), 0);
6512 default_rtl_profile ();
6514 timevar_pop (TV_POST_EXPAND);
6522 make_pass_expand (gcc::context *ctxt)
6524 return new pass_expand (ctxt);