1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
32 /* These RTL headers are needed for basic-block.h. */
35 #include "hard-reg-set.h"
36 #include "basic-block.h"
37 #include "diagnostic.h"
38 #include "langhooks.h"
39 #include "tree-inline.h"
40 #include "tree-flow.h"
41 #include "tree-gimple.h"
42 #include "tree-dump.h"
43 #include "tree-pass.h"
49 /* expr.h is needed for MOVE_RATIO. */
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* The set of aggregate variables that are candidates for scalarization. */
79 static bitmap sra_candidates;
81 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
82 beginning of the function. */
83 static bitmap needs_copy_in;
85 /* Sets of bit pairs that cache type decomposition and instantiation. */
86 static bitmap sra_type_decomp_cache;
87 static bitmap sra_type_inst_cache;
89 /* One of these structures is created for each candidate aggregate
90 and each (accessed) member of such an aggregate. */
93 /* A tree of the elements. Used when we want to traverse everything. */
94 struct sra_elt *parent;
95 struct sra_elt *children;
96 struct sra_elt *sibling;
98 /* If this element is a root, then this is the VAR_DECL. If this is
99 a sub-element, this is some token used to identify the reference.
100 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
101 of an ARRAY_REF, this is the (constant) index. In the case of a
102 complex number, this is a zero or one. */
105 /* The type of the element. */
108 /* A VAR_DECL, for any sub-element we've decided to replace. */
111 /* The number of times the element is referenced as a whole. I.e.
112 given "a.b.c", this would be incremented for C, but not for A or B. */
115 /* The number of times the element is copied to or from another
116 scalarizable element. */
117 unsigned int n_copies;
119 /* True if TYPE is scalar. */
122 /* True if we saw something about this element that prevents scalarization,
123 such as non-constant indexing. */
124 bool cannot_scalarize;
126 /* True if we've decided that structure-to-structure assignment
127 should happen via memcpy and not per-element. */
130 /* A flag for use with/after random access traversals. */
134 /* Random access to the child of a parent is performed by hashing.
135 This prevents quadratic behavior, and allows SRA to function
136 reasonably on larger records. */
137 static htab_t sra_map;
139 /* All structures are allocated out of the following obstack. */
140 static struct obstack sra_obstack;
142 /* Debugging functions. */
143 static void dump_sra_elt_name (FILE *, struct sra_elt *);
144 extern void debug_sra_elt_name (struct sra_elt *);
147 /* Return true if DECL is an SRA candidate. */
150 is_sra_candidate_decl (tree decl)
152 return DECL_P (decl) && bitmap_bit_p (sra_candidates, var_ann (decl)->uid);
155 /* Return true if TYPE is a scalar type. */
158 is_sra_scalar_type (tree type)
160 enum tree_code code = TREE_CODE (type);
161 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
162 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
163 || code == CHAR_TYPE || code == POINTER_TYPE || code == OFFSET_TYPE
164 || code == REFERENCE_TYPE);
167 /* Return true if TYPE can be decomposed into a set of independent variables.
169 Note that this doesn't imply that all elements of TYPE can be
170 instantiated, just that if we decide to break up the type into
171 separate pieces that it can be done. */
174 type_can_be_decomposed_p (tree type)
176 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
179 /* Avoid searching the same type twice. */
180 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
182 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
185 /* The type must have a definite nonzero size. */
186 if (TYPE_SIZE (type) == NULL || integer_zerop (TYPE_SIZE (type)))
189 /* The type must be a non-union aggregate. */
190 switch (TREE_CODE (type))
194 bool saw_one_field = false;
196 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
197 if (TREE_CODE (t) == FIELD_DECL)
199 /* Reject incorrectly represented bit fields. */
200 if (DECL_BIT_FIELD (t)
201 && (tree_low_cst (DECL_SIZE (t), 1)
202 != TYPE_PRECISION (TREE_TYPE (t))))
205 saw_one_field = true;
208 /* Record types must have at least one field. */
215 /* Array types must have a fixed lower and upper bound. */
216 t = TYPE_DOMAIN (type);
219 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
221 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
232 bitmap_set_bit (sra_type_decomp_cache, cache+0);
236 bitmap_set_bit (sra_type_decomp_cache, cache+1);
240 /* Return true if DECL can be decomposed into a set of independent
241 (though not necessarily scalar) variables. */
244 decl_can_be_decomposed_p (tree var)
246 /* Early out for scalars. */
247 if (is_sra_scalar_type (TREE_TYPE (var)))
250 /* The variable must not be aliased. */
251 if (!is_gimple_non_addressable (var))
253 if (dump_file && (dump_flags & TDF_DETAILS))
255 fprintf (dump_file, "Cannot scalarize variable ");
256 print_generic_expr (dump_file, var, dump_flags);
257 fprintf (dump_file, " because it must live in memory\n");
262 /* The variable must not be volatile. */
263 if (TREE_THIS_VOLATILE (var))
265 if (dump_file && (dump_flags & TDF_DETAILS))
267 fprintf (dump_file, "Cannot scalarize variable ");
268 print_generic_expr (dump_file, var, dump_flags);
269 fprintf (dump_file, " because it is declared volatile\n");
274 /* We must be able to decompose the variable's type. */
275 if (!type_can_be_decomposed_p (TREE_TYPE (var)))
277 if (dump_file && (dump_flags & TDF_DETAILS))
279 fprintf (dump_file, "Cannot scalarize variable ");
280 print_generic_expr (dump_file, var, dump_flags);
281 fprintf (dump_file, " because its type cannot be decomposed\n");
289 /* Return true if TYPE can be *completely* decomposed into scalars. */
292 type_can_instantiate_all_elements (tree type)
294 if (is_sra_scalar_type (type))
296 if (!type_can_be_decomposed_p (type))
299 switch (TREE_CODE (type))
303 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
306 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
308 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
311 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
312 if (TREE_CODE (f) == FIELD_DECL)
314 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
316 bitmap_set_bit (sra_type_inst_cache, cache+1);
321 bitmap_set_bit (sra_type_inst_cache, cache+0);
326 return type_can_instantiate_all_elements (TREE_TYPE (type));
336 /* Test whether ELT or some sub-element cannot be scalarized. */
339 can_completely_scalarize_p (struct sra_elt *elt)
343 if (elt->cannot_scalarize)
346 for (c = elt->children; c ; c = c->sibling)
347 if (!can_completely_scalarize_p (c))
354 /* A simplified tree hashing algorithm that only handles the types of
355 trees we expect to find in sra_elt->element. */
358 sra_hash_tree (tree t)
362 switch (TREE_CODE (t))
371 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
375 /* We can have types that are compatible, but have different member
376 lists, so we can't hash fields by ID. Use offsets instead. */
377 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
378 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
388 /* Hash function for type SRA_PAIR. */
391 sra_elt_hash (const void *x)
393 const struct sra_elt *e = x;
394 const struct sra_elt *p;
397 h = sra_hash_tree (e->element);
399 /* Take into account everything back up the chain. Given that chain
400 lengths are rarely very long, this should be acceptable. If we
401 truly identify this as a performance problem, it should work to
402 hash the pointer value "e->parent". */
403 for (p = e->parent; p ; p = p->parent)
404 h = (h * 65521) ^ sra_hash_tree (p->element);
409 /* Equality function for type SRA_PAIR. */
412 sra_elt_eq (const void *x, const void *y)
414 const struct sra_elt *a = x;
415 const struct sra_elt *b = y;
418 if (a->parent != b->parent)
426 if (TREE_CODE (ae) != TREE_CODE (be))
429 switch (TREE_CODE (ae))
434 /* These are all pointer unique. */
438 /* Integers are not pointer unique, so compare their values. */
439 return tree_int_cst_equal (ae, be);
442 /* Fields are unique within a record, but not between
443 compatible records. */
444 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
446 return fields_compatible_p (ae, be);
453 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
454 may be null, in which case CHILD must be a DECL. */
456 static struct sra_elt *
457 lookup_element (struct sra_elt *parent, tree child, tree type,
458 enum insert_option insert)
460 struct sra_elt dummy;
461 struct sra_elt **slot;
464 dummy.parent = parent;
465 dummy.element = child;
467 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
468 if (!slot && insert == NO_INSERT)
472 if (!elt && insert == INSERT)
474 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
475 memset (elt, 0, sizeof (*elt));
477 elt->parent = parent;
478 elt->element = child;
480 elt->is_scalar = is_sra_scalar_type (type);
484 elt->sibling = parent->children;
485 parent->children = elt;
488 /* If this is a parameter, then if we want to scalarize, we have
489 one copy from the true function parameter. Count it now. */
490 if (TREE_CODE (child) == PARM_DECL)
493 bitmap_set_bit (needs_copy_in, var_ann (child)->uid);
500 /* Return true if the ARRAY_REF in EXPR is a constant, in bounds access. */
503 is_valid_const_index (tree expr)
505 tree dom, t, index = TREE_OPERAND (expr, 1);
507 if (TREE_CODE (index) != INTEGER_CST)
510 /* Watch out for stupid user tricks, indexing outside the array.
512 Careful, we're not called only on scalarizable types, so do not
513 assume constant array bounds. We needn't do anything with such
514 cases, since they'll be referring to objects that we should have
515 already rejected for scalarization, so returning false is fine. */
517 dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (expr, 0)));
521 t = TYPE_MIN_VALUE (dom);
522 if (!t || TREE_CODE (t) != INTEGER_CST)
524 if (tree_int_cst_lt (index, t))
527 t = TYPE_MAX_VALUE (dom);
528 if (!t || TREE_CODE (t) != INTEGER_CST)
530 if (tree_int_cst_lt (t, index))
536 /* Create or return the SRA_ELT structure for EXPR if the expression
537 refers to a scalarizable variable. */
539 static struct sra_elt *
540 maybe_lookup_element_for_expr (tree expr)
545 switch (TREE_CODE (expr))
550 if (is_sra_candidate_decl (expr))
551 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
555 /* We can't scalarize variable array indicies. */
556 if (is_valid_const_index (expr))
557 child = TREE_OPERAND (expr, 1);
563 /* Don't look through unions. */
564 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE)
566 child = TREE_OPERAND (expr, 1);
570 child = integer_zero_node;
573 child = integer_one_node;
580 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
582 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
587 /* Functions to walk just enough of the tree to see all scalarizable
588 references, and categorize them. */
590 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
591 various kinds of references seen. In all cases, *BSI is an iterator
592 pointing to the statement being processed. */
595 /* Invoked when ELT is required as a unit. Note that ELT might refer to
596 a leaf node, in which case this is a simple scalar reference. *EXPR_P
597 points to the location of the expression. IS_OUTPUT is true if this
598 is a left-hand-side reference. */
599 void (*use) (struct sra_elt *elt, tree *expr_p,
600 block_stmt_iterator *bsi, bool is_output);
602 /* Invoked when we have a copy between two scalarizable references. */
603 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
604 block_stmt_iterator *bsi);
606 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
607 in which case it should be treated as an empty CONSTRUCTOR. */
608 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
610 /* Invoked when we have a copy between one scalarizable reference ELT
611 and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
612 is on the left-hand side. */
613 void (*ldst) (struct sra_elt *elt, tree other,
614 block_stmt_iterator *bsi, bool is_output);
616 /* True during phase 2, false during phase 4. */
617 /* ??? This is a hack. */
621 #ifdef ENABLE_CHECKING
622 /* Invoked via walk_tree, if *TP contains an candidate decl, return it. */
625 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
626 void *data ATTRIBUTE_UNUSED)
629 enum tree_code code = TREE_CODE (t);
631 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
634 if (is_sra_candidate_decl (t))
644 /* Walk most expressions looking for a scalarizable aggregate.
645 If we find one, invoke FNS->USE. */
648 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
649 const struct sra_walk_fns *fns)
653 bool disable_scalarization = false;
655 /* We're looking to collect a reference expression between EXPR and INNER,
656 such that INNER is a scalarizable decl and all other nodes through EXPR
657 are references that we can scalarize. If we come across something that
658 we can't scalarize, we reset EXPR. This has the effect of making it
659 appear that we're referring to the larger expression as a whole. */
662 switch (TREE_CODE (inner))
667 /* If there is a scalarizable decl at the bottom, then process it. */
668 if (is_sra_candidate_decl (inner))
670 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
671 if (disable_scalarization)
672 elt->cannot_scalarize = true;
674 fns->use (elt, expr_p, bsi, is_output);
679 /* Non-constant index means any member may be accessed. Prevent the
680 expression from being scalarized. If we were to treat this as a
681 reference to the whole array, we can wind up with a single dynamic
682 index reference inside a loop being overridden by several constant
683 index references during loop setup. It's possible that this could
684 be avoided by using dynamic usage counts based on BB trip counts
685 (based on loop analysis or profiling), but that hardly seems worth
687 /* ??? Hack. Figure out how to push this into the scan routines
688 without duplicating too much code. */
689 if (!is_valid_const_index (inner))
691 disable_scalarization = true;
694 /* ??? Are we assured that non-constant bounds and stride will have
695 the same value everywhere? I don't think Fortran will... */
696 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
698 inner = TREE_OPERAND (inner, 0);
702 /* A reference to a union member constitutes a reference to the
704 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) != RECORD_TYPE)
706 /* ??? See above re non-constant stride. */
707 if (TREE_OPERAND (inner, 2))
709 inner = TREE_OPERAND (inner, 0);
714 inner = TREE_OPERAND (inner, 0);
718 /* A bit field reference (access to *multiple* fields simultaneously)
719 is not currently scalarized. Consider this an access to the
720 complete outer element, to which walk_tree will bring us next. */
723 case ARRAY_RANGE_REF:
724 /* Similarly, an subrange reference is used to modify indexing. Which
725 means that the canonical element names that we have won't work. */
728 case VIEW_CONVERT_EXPR:
730 /* Similarly, a view/nop explicitly wants to look at an object in a
731 type other than the one we've scalarized. */
735 /* This is a transparent wrapper. The entire inner expression really
740 expr_p = &TREE_OPERAND (inner, 0);
741 inner = expr = *expr_p;
745 #ifdef ENABLE_CHECKING
746 /* Validate that we're not missing any references. */
747 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
753 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
754 If we find one, invoke FNS->USE. */
757 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
758 const struct sra_walk_fns *fns)
761 for (op = list; op ; op = TREE_CHAIN (op))
762 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
765 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
766 If we find one, invoke FNS->USE. */
769 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
770 const struct sra_walk_fns *fns)
772 sra_walk_tree_list (TREE_OPERAND (expr, 1), bsi, false, fns);
775 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
776 aggregates. If we find one, invoke FNS->USE. */
779 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
780 const struct sra_walk_fns *fns)
782 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
783 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
786 /* Walk a MODIFY_EXPR and categorize the assignment appropriately. */
789 sra_walk_modify_expr (tree expr, block_stmt_iterator *bsi,
790 const struct sra_walk_fns *fns)
792 struct sra_elt *lhs_elt, *rhs_elt;
795 lhs = TREE_OPERAND (expr, 0);
796 rhs = TREE_OPERAND (expr, 1);
797 lhs_elt = maybe_lookup_element_for_expr (lhs);
798 rhs_elt = maybe_lookup_element_for_expr (rhs);
800 /* If both sides are scalarizable, this is a COPY operation. */
801 if (lhs_elt && rhs_elt)
803 fns->copy (lhs_elt, rhs_elt, bsi);
809 /* If this is an assignment from a constant, or constructor, then
810 we have access to all of the elements individually. Invoke INIT. */
811 if (TREE_CODE (rhs) == COMPLEX_EXPR
812 || TREE_CODE (rhs) == COMPLEX_CST
813 || TREE_CODE (rhs) == CONSTRUCTOR)
814 fns->init (lhs_elt, rhs, bsi);
816 /* If this is an assignment from read-only memory, treat this as if
817 we'd been passed the constructor directly. Invoke INIT. */
818 else if (TREE_CODE (rhs) == VAR_DECL
820 && TREE_READONLY (rhs)
821 && targetm.binds_local_p (rhs))
822 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
824 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
825 The lvalue requirement prevents us from trying to directly scalarize
826 the result of a function call. Which would result in trying to call
827 the function multiple times, and other evil things. */
828 else if (!lhs_elt->is_scalar && is_gimple_addressable (rhs))
829 fns->ldst (lhs_elt, rhs, bsi, true);
831 /* Otherwise we're being used in some context that requires the
832 aggregate to be seen as a whole. Invoke USE. */
834 fns->use (lhs_elt, &TREE_OPERAND (expr, 0), bsi, true);
838 /* LHS_ELT being null only means that the LHS as a whole is not a
839 scalarizable reference. There may be occurrences of scalarizable
840 variables within, which implies a USE. */
841 sra_walk_expr (&TREE_OPERAND (expr, 0), bsi, true, fns);
844 /* Likewise for the right-hand side. The only difference here is that
845 we don't have to handle constants, and the RHS may be a call. */
848 if (!rhs_elt->is_scalar)
849 fns->ldst (rhs_elt, lhs, bsi, false);
851 fns->use (rhs_elt, &TREE_OPERAND (expr, 1), bsi, false);
855 tree call = get_call_expr_in (rhs);
857 sra_walk_call_expr (call, bsi, fns);
859 sra_walk_expr (&TREE_OPERAND (expr, 1), bsi, false, fns);
863 /* Entry point to the walk functions. Search the entire function,
864 invoking the callbacks in FNS on each of the references to
865 scalarizable variables. */
868 sra_walk_function (const struct sra_walk_fns *fns)
871 block_stmt_iterator si, ni;
873 /* ??? Phase 4 could derive some benefit to walking the function in
874 dominator tree order. */
877 for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
882 stmt = bsi_stmt (si);
883 ann = stmt_ann (stmt);
888 /* If the statement has no virtual operands, then it doesn't
889 make any structure references that we care about. */
890 if (NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann)) == 0
891 && NUM_VUSES (VUSE_OPS (ann)) == 0
892 && NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann)) == 0)
895 switch (TREE_CODE (stmt))
898 /* If we have "return <retval>" then the return value is
899 already exposed for our pleasure. Walk it as a USE to
900 force all the components back in place for the return.
902 If we have an embedded assignment, then <retval> is of
903 a type that gets returned in registers in this ABI, and
904 we do not wish to extend their lifetimes. Treat this
905 as a USE of the variable on the RHS of this assignment. */
907 t = TREE_OPERAND (stmt, 0);
908 if (TREE_CODE (t) == MODIFY_EXPR)
909 sra_walk_expr (&TREE_OPERAND (t, 1), &si, false, fns);
911 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
915 sra_walk_modify_expr (stmt, &si, fns);
918 sra_walk_call_expr (stmt, &si, fns);
921 sra_walk_asm_expr (stmt, &si, fns);
930 /* Phase One: Scan all referenced variables in the program looking for
931 structures that could be decomposed. */
934 find_candidates_for_sra (void)
937 bool any_set = false;
939 for (i = 0; i < num_referenced_vars; i++)
941 tree var = referenced_var (i);
942 if (decl_can_be_decomposed_p (var))
944 bitmap_set_bit (sra_candidates, var_ann (var)->uid);
953 /* Phase Two: Scan all references to scalarizable variables. Count the
954 number of times they are used or copied respectively. */
956 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
957 considered a copy, because we can decompose the reference such that
958 the sub-elements needn't be contiguous. */
961 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
962 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
963 bool is_output ATTRIBUTE_UNUSED)
969 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
970 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
972 lhs_elt->n_copies += 1;
973 rhs_elt->n_copies += 1;
977 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
978 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
980 lhs_elt->n_copies += 1;
984 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
985 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
986 bool is_output ATTRIBUTE_UNUSED)
991 /* Dump the values we collected during the scanning phase. */
994 scan_dump (struct sra_elt *elt)
998 dump_sra_elt_name (dump_file, elt);
999 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1001 for (c = elt->children; c ; c = c->sibling)
1005 /* Entry point to phase 2. Scan the entire function, building up
1006 scalarization data structures, recording copies and uses. */
1009 scan_function (void)
1011 static const struct sra_walk_fns fns = {
1012 scan_use, scan_copy, scan_init, scan_ldst, true
1016 sra_walk_function (&fns);
1018 if (dump_file && (dump_flags & TDF_DETAILS))
1022 fputs ("\nScan results:\n", dump_file);
1023 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1025 tree var = referenced_var (i);
1026 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1030 fputc ('\n', dump_file);
1034 /* Phase Three: Make decisions about which variables to scalarize, if any.
1035 All elements to be scalarized have replacement variables made for them. */
1037 /* A subroutine of build_element_name. Recursively build the element
1038 name on the obstack. */
1041 build_element_name_1 (struct sra_elt *elt)
1048 build_element_name_1 (elt->parent);
1049 obstack_1grow (&sra_obstack, '$');
1051 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1053 if (elt->element == integer_zero_node)
1054 obstack_grow (&sra_obstack, "real", 4);
1056 obstack_grow (&sra_obstack, "imag", 4);
1062 if (TREE_CODE (t) == INTEGER_CST)
1064 /* ??? Eh. Don't bother doing double-wide printing. */
1065 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1066 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1070 tree name = DECL_NAME (t);
1072 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1073 IDENTIFIER_LENGTH (name));
1076 sprintf (buffer, "D%u", DECL_UID (t));
1077 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1082 /* Construct a pretty variable name for an element's replacement variable.
1083 The name is built on the obstack. */
1086 build_element_name (struct sra_elt *elt)
1088 build_element_name_1 (elt);
1089 obstack_1grow (&sra_obstack, '\0');
1090 return obstack_finish (&sra_obstack);
1093 /* Instantiate an element as an independent variable. */
1096 instantiate_element (struct sra_elt *elt)
1098 struct sra_elt *base_elt;
1101 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1103 base = base_elt->element;
1105 elt->replacement = var = make_rename_temp (elt->type, "SR");
1106 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1107 TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
1108 DECL_ARTIFICIAL (var) = DECL_ARTIFICIAL (base);
1109 DECL_IGNORED_P (var) = DECL_IGNORED_P (base);
1111 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1113 char *pretty_name = build_element_name (elt);
1114 DECL_NAME (var) = get_identifier (pretty_name);
1115 obstack_free (&sra_obstack, pretty_name);
1120 fputs (" ", dump_file);
1121 dump_sra_elt_name (dump_file, elt);
1122 fputs (" -> ", dump_file);
1123 print_generic_expr (dump_file, var, dump_flags);
1124 fputc ('\n', dump_file);
1128 /* Make one pass across an element tree deciding whether or not it's
1129 profitable to instantiate individual leaf scalars.
1131 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1132 fields all the way up the tree. */
1135 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1136 unsigned int parent_copies)
1138 if (dump_file && !elt->parent)
1140 fputs ("Initial instantiation for ", dump_file);
1141 dump_sra_elt_name (dump_file, elt);
1142 fputc ('\n', dump_file);
1145 if (elt->cannot_scalarize)
1150 /* The decision is simple: instantiate if we're used more frequently
1151 than the parent needs to be seen as a complete unit. */
1152 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1153 instantiate_element (elt);
1158 unsigned int this_uses = elt->n_uses + parent_uses;
1159 unsigned int this_copies = elt->n_copies + parent_copies;
1161 for (c = elt->children; c ; c = c->sibling)
1162 decide_instantiation_1 (c, this_uses, this_copies);
1166 /* Compute the size and number of all instantiated elements below ELT.
1167 We will only care about this if the size of the complete structure
1168 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1171 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1173 if (elt->replacement)
1175 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1181 unsigned int count = 0;
1183 for (c = elt->children; c ; c = c->sibling)
1184 count += sum_instantiated_sizes (c, sizep);
1190 /* Instantiate fields in ELT->TYPE that are not currently present as
1193 static void instantiate_missing_elements (struct sra_elt *elt);
1196 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1198 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1201 if (sub->replacement == NULL)
1202 instantiate_element (sub);
1205 instantiate_missing_elements (sub);
1209 instantiate_missing_elements (struct sra_elt *elt)
1211 tree type = elt->type;
1213 switch (TREE_CODE (type))
1218 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1219 if (TREE_CODE (f) == FIELD_DECL)
1220 instantiate_missing_elements_1 (elt, f, TREE_TYPE (f));
1226 tree i, max, subtype;
1228 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1229 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1230 subtype = TREE_TYPE (type);
1234 instantiate_missing_elements_1 (elt, i, subtype);
1235 if (tree_int_cst_equal (i, max))
1237 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1244 type = TREE_TYPE (type);
1245 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1246 instantiate_missing_elements_1 (elt, integer_one_node, type);
1254 /* Make one pass across an element tree deciding whether to perform block
1255 or element copies. If we decide on element copies, instantiate all
1256 elements. Return true if there are any instantiated sub-elements. */
1259 decide_block_copy (struct sra_elt *elt)
1264 /* If scalarization is disabled, respect it. */
1265 if (elt->cannot_scalarize)
1267 elt->use_block_copy = 1;
1271 fputs ("Scalarization disabled for ", dump_file);
1272 dump_sra_elt_name (dump_file, elt);
1273 fputc ('\n', dump_file);
1279 /* Don't decide if we've no uses. */
1280 if (elt->n_uses == 0 && elt->n_copies == 0)
1283 else if (!elt->is_scalar)
1285 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1286 bool use_block_copy = true;
1288 /* Don't bother trying to figure out the rest if the structure is
1289 so large we can't do easy arithmetic. This also forces block
1290 copies for variable sized structures. */
1291 if (host_integerp (size_tree, 1))
1293 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1294 unsigned int inst_count;
1296 full_size = tree_low_cst (size_tree, 1);
1298 /* ??? What to do here. If there are two fields, and we've only
1299 instantiated one, then instantiating the other is clearly a win.
1300 If there are a large number of fields then the size of the copy
1301 is much more of a factor. */
1303 /* If the structure is small, and we've made copies, go ahead
1304 and instantiate, hoping that the copies will go away. */
1305 if (full_size <= (unsigned) MOVE_RATIO * UNITS_PER_WORD
1306 && elt->n_copies > elt->n_uses)
1307 use_block_copy = false;
1310 inst_count = sum_instantiated_sizes (elt, &inst_size);
1312 if (inst_size * 4 >= full_size * 3)
1313 use_block_copy = false;
1316 /* In order to avoid block copy, we have to be able to instantiate
1317 all elements of the type. See if this is possible. */
1319 && (!can_completely_scalarize_p (elt)
1320 || !type_can_instantiate_all_elements (elt->type)))
1321 use_block_copy = true;
1323 elt->use_block_copy = use_block_copy;
1327 fprintf (dump_file, "Using %s for ",
1328 use_block_copy ? "block-copy" : "element-copy");
1329 dump_sra_elt_name (dump_file, elt);
1330 fputc ('\n', dump_file);
1333 if (!use_block_copy)
1335 instantiate_missing_elements (elt);
1340 any_inst = elt->replacement != NULL;
1342 for (c = elt->children; c ; c = c->sibling)
1343 any_inst |= decide_block_copy (c);
1348 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1351 decide_instantiations (void)
1355 struct bitmap_head_def done_head;
1358 /* We cannot clear bits from a bitmap we're iterating over,
1359 so save up all the bits to clear until the end. */
1360 bitmap_initialize (&done_head, 1);
1361 cleared_any = false;
1363 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1365 tree var = referenced_var (i);
1366 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1369 decide_instantiation_1 (elt, 0, 0);
1370 if (!decide_block_copy (elt))
1375 bitmap_set_bit (&done_head, i);
1382 bitmap_operation (sra_candidates, sra_candidates, &done_head,
1384 bitmap_operation (needs_copy_in, needs_copy_in, &done_head,
1387 bitmap_clear (&done_head);
1390 fputc ('\n', dump_file);
1394 /* Phase Four: Update the function to match the replacements created. */
1396 /* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
1397 renaming. This becomes necessary when we modify all of a non-scalar. */
1400 mark_all_v_defs (tree stmt)
1405 get_stmt_operands (stmt);
1407 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_VIRTUAL_DEFS)
1409 if (TREE_CODE (sym) == SSA_NAME)
1410 sym = SSA_NAME_VAR (sym);
1411 bitmap_set_bit (vars_to_rename, var_ann (sym)->uid);
1415 /* Build a single level component reference to ELT rooted at BASE. */
1418 generate_one_element_ref (struct sra_elt *elt, tree base)
1420 switch (TREE_CODE (TREE_TYPE (base)))
1424 tree field = elt->element;
1426 /* Watch out for compatible records with differing field lists. */
1427 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
1428 field = find_compatible_field (TREE_TYPE (base), field);
1430 return build (COMPONENT_REF, elt->type, base, field, NULL);
1434 return build (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
1437 if (elt->element == integer_zero_node)
1438 return build (REALPART_EXPR, elt->type, base);
1440 return build (IMAGPART_EXPR, elt->type, base);
1447 /* Build a full component reference to ELT rooted at its native variable. */
1450 generate_element_ref (struct sra_elt *elt)
1453 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
1455 return elt->element;
1458 /* Generate a set of assignment statements in *LIST_P to copy all
1459 instantiated elements under ELT to or from the equivalent structure
1460 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1461 true meaning to copy out of EXPR into ELT. */
1464 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
1470 if (elt->replacement)
1473 t = build (MODIFY_EXPR, void_type_node, elt->replacement, expr);
1475 t = build (MODIFY_EXPR, void_type_node, expr, elt->replacement);
1476 append_to_statement_list (t, list_p);
1480 for (c = elt->children; c ; c = c->sibling)
1482 t = generate_one_element_ref (c, unshare_expr (expr));
1483 generate_copy_inout (c, copy_out, t, list_p);
1488 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1489 elements under SRC to their counterparts under DST. There must be a 1-1
1490 correspondence of instantiated elements. */
1493 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
1495 struct sra_elt *dc, *sc;
1497 for (dc = dst->children; dc ; dc = dc->sibling)
1499 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
1501 generate_element_copy (dc, sc, list_p);
1504 if (dst->replacement)
1508 gcc_assert (src->replacement);
1510 t = build (MODIFY_EXPR, void_type_node, dst->replacement,
1512 append_to_statement_list (t, list_p);
1516 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1517 elements under ELT. In addition, do not assign to elements that have been
1518 marked VISITED but do reset the visited flag; this allows easy coordination
1519 with generate_element_init. */
1522 generate_element_zero (struct sra_elt *elt, tree *list_p)
1528 elt->visited = false;
1532 for (c = elt->children; c ; c = c->sibling)
1533 generate_element_zero (c, list_p);
1535 if (elt->replacement)
1539 gcc_assert (elt->is_scalar);
1540 t = fold_convert (elt->type, integer_zero_node);
1542 t = build (MODIFY_EXPR, void_type_node, elt->replacement, t);
1543 append_to_statement_list (t, list_p);
1547 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
1548 Add the result to *LIST_P. */
1551 generate_one_element_init (tree var, tree init, tree *list_p)
1555 /* The replacement can be almost arbitrarily complex. Gimplify. */
1556 stmt = build (MODIFY_EXPR, void_type_node, var, init);
1557 gimplify_stmt (&stmt);
1559 /* The replacement can expose previously unreferenced variables. */
1560 if (TREE_CODE (stmt) == STATEMENT_LIST)
1562 tree_stmt_iterator i;
1563 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i))
1564 find_new_referenced_vars (tsi_stmt_ptr (i));
1567 find_new_referenced_vars (&stmt);
1569 append_to_statement_list (stmt, list_p);
1572 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1573 elements under ELT with the contents of the initializer INIT. In addition,
1574 mark all assigned elements VISITED; this allows easy coordination with
1575 generate_element_zero. Return false if we found a case we couldn't
1579 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
1582 enum tree_code init_code;
1583 struct sra_elt *sub;
1586 /* We can be passed DECL_INITIAL of a static variable. It might have a
1587 conversion, which we strip off here. */
1588 STRIP_USELESS_TYPE_CONVERSION (init);
1589 init_code = TREE_CODE (init);
1593 if (elt->replacement)
1595 generate_one_element_init (elt->replacement, init, list_p);
1596 elt->visited = true;
1605 for (sub = elt->children; sub ; sub = sub->sibling)
1607 if (sub->element == integer_zero_node)
1608 t = (init_code == COMPLEX_EXPR
1609 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
1611 t = (init_code == COMPLEX_EXPR
1612 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
1613 result &= generate_element_init (sub, t, list_p);
1618 for (t = CONSTRUCTOR_ELTS (init); t ; t = TREE_CHAIN (t))
1620 sub = lookup_element (elt, TREE_PURPOSE (t), NULL, NO_INSERT);
1623 result &= generate_element_init (sub, TREE_VALUE (t), list_p);
1628 elt->visited = true;
1635 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1636 has more than one edge, STMT will be replicated for each edge. Also,
1637 abnormal edges will be ignored. */
1640 insert_edge_copies (tree stmt, basic_block bb)
1647 FOR_EACH_EDGE (e, ei, bb->succs)
1649 /* We don't need to insert copies on abnormal edges. The
1650 value of the scalar replacement is not guaranteed to
1651 be valid through an abnormal edge. */
1652 if (!(e->flags & EDGE_ABNORMAL))
1656 bsi_insert_on_edge (e, stmt);
1660 bsi_insert_on_edge (e, unsave_expr_now (stmt));
1665 /* Helper function to insert LIST before BSI, and set up line number info. */
1668 sra_insert_before (block_stmt_iterator *bsi, tree list)
1670 tree stmt = bsi_stmt (*bsi);
1672 if (EXPR_HAS_LOCATION (stmt))
1673 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1674 bsi_insert_before (bsi, list, BSI_SAME_STMT);
1677 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
1680 sra_insert_after (block_stmt_iterator *bsi, tree list)
1682 tree stmt = bsi_stmt (*bsi);
1684 if (EXPR_HAS_LOCATION (stmt))
1685 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1687 if (stmt_ends_bb_p (stmt))
1688 insert_edge_copies (list, bsi->bb);
1690 bsi_insert_after (bsi, list, BSI_SAME_STMT);
1693 /* Similarly, but replace the statement at BSI. */
1696 sra_replace (block_stmt_iterator *bsi, tree list)
1698 sra_insert_before (bsi, list);
1700 if (bsi_end_p (*bsi))
1701 *bsi = bsi_last (bsi->bb);
1706 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
1707 if elt is scalar, or some occurrence of ELT that requires a complete
1708 aggregate. IS_OUTPUT is true if ELT is being modified. */
1711 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
1714 tree list = NULL, stmt = bsi_stmt (*bsi);
1716 if (elt->replacement)
1718 /* If we have a replacement, then updating the reference is as
1719 simple as modifying the existing statement in place. */
1721 mark_all_v_defs (stmt);
1722 *expr_p = elt->replacement;
1727 /* Otherwise we need some copies. If ELT is being read, then we want
1728 to store all (modified) sub-elements back into the structure before
1729 the reference takes place. If ELT is being written, then we want to
1730 load the changed values back into our shadow variables. */
1731 /* ??? We don't check modified for reads, we just always write all of
1732 the values. We should be able to record the SSA number of the VOP
1733 for which the values were last read. If that number matches the
1734 SSA number of the VOP in the current statement, then we needn't
1735 emit an assignment. This would also eliminate double writes when
1736 a structure is passed as more than one argument to a function call.
1737 This optimization would be most effective if sra_walk_function
1738 processed the blocks in dominator order. */
1740 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
1743 mark_all_v_defs (expr_first (list));
1745 sra_insert_after (bsi, list);
1747 sra_insert_before (bsi, list);
1751 /* Scalarize a COPY. To recap, this is an assignment statement between
1752 two scalarizable references, LHS_ELT and RHS_ELT. */
1755 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1756 block_stmt_iterator *bsi)
1760 if (lhs_elt->replacement && rhs_elt->replacement)
1762 /* If we have two scalar operands, modify the existing statement. */
1763 stmt = bsi_stmt (*bsi);
1765 /* See the commentary in sra_walk_function concerning
1766 RETURN_EXPR, and why we should never see one here. */
1767 gcc_assert (TREE_CODE (stmt) == MODIFY_EXPR);
1769 TREE_OPERAND (stmt, 0) = lhs_elt->replacement;
1770 TREE_OPERAND (stmt, 1) = rhs_elt->replacement;
1773 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
1775 /* If either side requires a block copy, then sync the RHS back
1776 to the original structure, leave the original assignment
1777 statement (which will perform the block copy), then load the
1778 LHS values out of its now-updated original structure. */
1779 /* ??? Could perform a modified pair-wise element copy. That
1780 would at least allow those elements that are instantiated in
1781 both structures to be optimized well. */
1784 generate_copy_inout (rhs_elt, false,
1785 generate_element_ref (rhs_elt), &list);
1788 mark_all_v_defs (expr_first (list));
1789 sra_insert_before (bsi, list);
1793 generate_copy_inout (lhs_elt, true,
1794 generate_element_ref (lhs_elt), &list);
1796 sra_insert_after (bsi, list);
1800 /* Otherwise both sides must be fully instantiated. In which
1801 case perform pair-wise element assignments and replace the
1802 original block copy statement. */
1804 stmt = bsi_stmt (*bsi);
1805 mark_all_v_defs (stmt);
1808 generate_element_copy (lhs_elt, rhs_elt, &list);
1810 sra_replace (bsi, list);
1814 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
1815 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
1816 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
1820 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
1825 /* Generate initialization statements for all members extant in the RHS. */
1828 push_gimplify_context ();
1829 result = generate_element_init (lhs_elt, rhs, &list);
1830 pop_gimplify_context (NULL);
1833 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
1834 a zero value. Initialize the rest of the instantiated elements. */
1835 generate_element_zero (lhs_elt, &list);
1839 /* If we failed to convert the entire initializer, then we must
1840 leave the structure assignment in place and must load values
1841 from the structure into the slots for which we did not find
1842 constants. The easiest way to do this is to generate a complete
1843 copy-out, and then follow that with the constant assignments
1844 that we were able to build. DCE will clean things up. */
1846 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
1848 append_to_statement_list (list, &list0);
1852 if (lhs_elt->use_block_copy || !result)
1854 /* Since LHS is not fully instantiated, we must leave the structure
1855 assignment in place. Treating this case differently from a USE
1856 exposes constants to later optimizations. */
1859 mark_all_v_defs (expr_first (list));
1860 sra_insert_after (bsi, list);
1865 /* The LHS is fully instantiated. The list of initializations
1866 replaces the original structure assignment. */
1868 mark_all_v_defs (bsi_stmt (*bsi));
1869 sra_replace (bsi, list);
1873 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
1874 on all INDIRECT_REFs. */
1877 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1881 if (TREE_CODE (t) == INDIRECT_REF)
1883 TREE_THIS_NOTRAP (t) = 1;
1886 else if (IS_TYPE_OR_DECL_P (t))
1892 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
1893 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
1894 if ELT is on the left-hand side. */
1897 scalarize_ldst (struct sra_elt *elt, tree other,
1898 block_stmt_iterator *bsi, bool is_output)
1900 /* Shouldn't have gotten called for a scalar. */
1901 gcc_assert (!elt->replacement);
1903 if (elt->use_block_copy)
1905 /* Since ELT is not fully instantiated, we have to leave the
1906 block copy in place. Treat this as a USE. */
1907 scalarize_use (elt, NULL, bsi, is_output);
1911 /* The interesting case is when ELT is fully instantiated. In this
1912 case we can have each element stored/loaded directly to/from the
1913 corresponding slot in OTHER. This avoids a block copy. */
1915 tree list = NULL, stmt = bsi_stmt (*bsi);
1917 mark_all_v_defs (stmt);
1918 generate_copy_inout (elt, is_output, other, &list);
1921 /* Preserve EH semantics. */
1922 if (stmt_ends_bb_p (stmt))
1924 tree_stmt_iterator tsi;
1927 /* Extract the first statement from LIST. */
1928 tsi = tsi_start (list);
1929 first = tsi_stmt (tsi);
1932 /* Replace the old statement with this new representative. */
1933 bsi_replace (bsi, first, true);
1935 if (!tsi_end_p (tsi))
1937 /* If any reference would trap, then they all would. And more
1938 to the point, the first would. Therefore none of the rest
1939 will trap since the first didn't. Indicate this by
1940 iterating over the remaining statements and set
1941 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
1944 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
1947 while (!tsi_end_p (tsi));
1949 insert_edge_copies (list, bsi->bb);
1953 sra_replace (bsi, list);
1957 /* Generate initializations for all scalarizable parameters. */
1960 scalarize_parms (void)
1966 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
1968 tree var = referenced_var (i);
1969 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1970 generate_copy_inout (elt, true, var, &list);
1974 insert_edge_copies (list, ENTRY_BLOCK_PTR);
1977 /* Entry point to phase 4. Update the function to match replacements. */
1980 scalarize_function (void)
1982 static const struct sra_walk_fns fns = {
1983 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
1986 sra_walk_function (&fns);
1988 bsi_commit_edge_inserts (NULL);
1992 /* Debug helper function. Print ELT in a nice human-readable format. */
1995 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
1997 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1999 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
2000 dump_sra_elt_name (f, elt->parent);
2005 dump_sra_elt_name (f, elt->parent);
2006 if (DECL_P (elt->element))
2008 if (TREE_CODE (elt->element) == FIELD_DECL)
2010 print_generic_expr (f, elt->element, dump_flags);
2013 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
2014 TREE_INT_CST_LOW (elt->element));
2018 /* Likewise, but callable from the debugger. */
2021 debug_sra_elt_name (struct sra_elt *elt)
2023 dump_sra_elt_name (stderr, elt);
2024 fputc ('\n', stderr);
2027 /* Main entry point. */
2032 /* Initialize local variables. */
2033 gcc_obstack_init (&sra_obstack);
2034 sra_candidates = BITMAP_XMALLOC ();
2035 needs_copy_in = BITMAP_XMALLOC ();
2036 sra_type_decomp_cache = BITMAP_XMALLOC ();
2037 sra_type_inst_cache = BITMAP_XMALLOC ();
2038 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
2040 /* Scan. If we find anything, instantiate and scalarize. */
2041 if (find_candidates_for_sra ())
2044 decide_instantiations ();
2045 scalarize_function ();
2048 /* Free allocated memory. */
2049 htab_delete (sra_map);
2051 BITMAP_XFREE (sra_candidates);
2052 BITMAP_XFREE (needs_copy_in);
2053 BITMAP_XFREE (sra_type_decomp_cache);
2054 BITMAP_XFREE (sra_type_inst_cache);
2055 obstack_free (&sra_obstack, NULL);
2061 return flag_tree_sra != 0;
2064 struct tree_opt_pass pass_sra =
2067 gate_sra, /* gate */
2068 tree_sra, /* execute */
2071 0, /* static_pass_number */
2072 TV_TREE_SRA, /* tv_id */
2073 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
2074 0, /* properties_provided */
2075 0, /* properties_destroyed */
2076 0, /* todo_flags_start */
2077 TODO_dump_func | TODO_rename_vars
2078 | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */