1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2003, 2004 Free Software Foundation, Inc.
5 Contributed by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 2, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
26 #include "coretypes.h"
32 /* These RTL headers are needed for basic-block.h. */
35 #include "hard-reg-set.h"
36 #include "basic-block.h"
37 #include "diagnostic.h"
38 #include "langhooks.h"
39 #include "tree-inline.h"
40 #include "tree-flow.h"
41 #include "tree-gimple.h"
42 #include "tree-dump.h"
43 #include "tree-pass.h"
49 /* expr.h is needed for MOVE_RATIO. */
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
62 The optimization proceeds in phases:
64 (1) Identify variables that have types that are candidates for
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
72 (3) Based on the usage profile, instantiate substitution variables.
74 (4) Scan the function making replacements.
78 /* The set of aggregate variables that are candidates for scalarization. */
79 static bitmap sra_candidates;
81 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
82 beginning of the function. */
83 static bitmap needs_copy_in;
85 /* Sets of bit pairs that cache type decomposition and instantiation. */
86 static bitmap sra_type_decomp_cache;
87 static bitmap sra_type_inst_cache;
89 /* One of these structures is created for each candidate aggregate
90 and each (accessed) member of such an aggregate. */
93 /* A tree of the elements. Used when we want to traverse everything. */
94 struct sra_elt *parent;
95 struct sra_elt *children;
96 struct sra_elt *sibling;
98 /* If this element is a root, then this is the VAR_DECL. If this is
99 a sub-element, this is some token used to identify the reference.
100 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
101 of an ARRAY_REF, this is the (constant) index. In the case of a
102 complex number, this is a zero or one. */
105 /* The type of the element. */
108 /* A VAR_DECL, for any sub-element we've decided to replace. */
111 /* The number of times the element is referenced as a whole. I.e.
112 given "a.b.c", this would be incremented for C, but not for A or B. */
115 /* The number of times the element is copied to or from another
116 scalarizable element. */
117 unsigned int n_copies;
119 /* True if TYPE is scalar. */
122 /* True if we saw something about this element that prevents scalarization,
123 such as non-constant indexing. */
124 bool cannot_scalarize;
126 /* True if we've decided that structure-to-structure assignment
127 should happen via memcpy and not per-element. */
130 /* A flag for use with/after random access traversals. */
134 /* Random access to the child of a parent is performed by hashing.
135 This prevents quadratic behaviour, and allows SRA to function
136 reasonably on larger records. */
137 static htab_t sra_map;
139 /* All structures are allocated out of the following obstack. */
140 static struct obstack sra_obstack;
142 /* Debugging functions. */
143 static void dump_sra_elt_name (FILE *, struct sra_elt *);
144 extern void debug_sra_elt_name (struct sra_elt *);
147 /* Return true if DECL is an SRA candidate. */
150 is_sra_candidate_decl (tree decl)
152 return DECL_P (decl) && bitmap_bit_p (sra_candidates, var_ann (decl)->uid);
155 /* Return true if TYPE is a scalar type. */
158 is_sra_scalar_type (tree type)
160 enum tree_code code = TREE_CODE (type);
161 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
162 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
163 || code == CHAR_TYPE || code == POINTER_TYPE || code == OFFSET_TYPE
164 || code == REFERENCE_TYPE);
167 /* Return true if TYPE can be decomposed into a set of independent variables.
169 Note that this doesn't imply that all elements of TYPE can be
170 instantiated, just that if we decide to break up the type into
171 separate pieces that it can be done. */
174 type_can_be_decomposed_p (tree type)
176 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
179 /* Avoid searching the same type twice. */
180 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
182 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
185 /* The type must have a definite non-zero size. */
186 if (TYPE_SIZE (type) == NULL || integer_zerop (TYPE_SIZE (type)))
189 /* The type must be a non-union aggregate. */
190 switch (TREE_CODE (type))
194 bool saw_one_field = false;
196 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
197 if (TREE_CODE (t) == FIELD_DECL)
199 /* Reject incorrectly represented bit fields. */
200 if (DECL_BIT_FIELD (t)
201 && (tree_low_cst (DECL_SIZE (t), 1)
202 != TYPE_PRECISION (TREE_TYPE (t))))
205 saw_one_field = true;
208 /* Record types must have at least one field. */
215 /* Array types must have a fixed lower and upper bound. */
216 t = TYPE_DOMAIN (type);
219 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
221 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
232 bitmap_set_bit (sra_type_decomp_cache, cache+0);
236 bitmap_set_bit (sra_type_decomp_cache, cache+1);
240 /* Return true if DECL can be decomposed into a set of independent
241 (though not necessarily scalar) variables. */
244 decl_can_be_decomposed_p (tree var)
246 /* Early out for scalars. */
247 if (is_sra_scalar_type (TREE_TYPE (var)))
250 /* The variable must not be aliased. */
251 if (!is_gimple_non_addressable (var))
253 if (dump_file && (dump_flags & TDF_DETAILS))
255 fprintf (dump_file, "Cannot scalarize variable ");
256 print_generic_expr (dump_file, var, dump_flags);
257 fprintf (dump_file, " because it must live in memory\n");
262 /* The variable must not be volatile. */
263 if (TREE_THIS_VOLATILE (var))
265 if (dump_file && (dump_flags & TDF_DETAILS))
267 fprintf (dump_file, "Cannot scalarize variable ");
268 print_generic_expr (dump_file, var, dump_flags);
269 fprintf (dump_file, " because it is declared volatile\n");
274 /* We must be able to decompose the variable's type. */
275 if (!type_can_be_decomposed_p (TREE_TYPE (var)))
277 if (dump_file && (dump_flags & TDF_DETAILS))
279 fprintf (dump_file, "Cannot scalarize variable ");
280 print_generic_expr (dump_file, var, dump_flags);
281 fprintf (dump_file, " because its type cannot be decomposed\n");
289 /* Return true if TYPE can be *completely* decomposed into scalars. */
292 type_can_instantiate_all_elements (tree type)
294 if (is_sra_scalar_type (type))
296 if (!type_can_be_decomposed_p (type))
299 switch (TREE_CODE (type))
303 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
306 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
308 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
311 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
312 if (TREE_CODE (f) == FIELD_DECL)
314 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
316 bitmap_set_bit (sra_type_inst_cache, cache+1);
321 bitmap_set_bit (sra_type_inst_cache, cache+0);
326 return type_can_instantiate_all_elements (TREE_TYPE (type));
336 /* Test whether ELT or some sub-element cannot be scalarized. */
339 can_completely_scalarize_p (struct sra_elt *elt)
343 if (elt->cannot_scalarize)
346 for (c = elt->children; c ; c = c->sibling)
347 if (!can_completely_scalarize_p (c))
354 /* A simplified tree hashing algorithm that only handles the types of
355 trees we expect to find in sra_elt->element. */
358 sra_hash_tree (tree t)
360 switch (TREE_CODE (t))
368 return TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
374 /* Hash function for type SRA_PAIR. */
377 sra_elt_hash (const void *x)
379 const struct sra_elt *e = x;
380 const struct sra_elt *p;
383 h = sra_hash_tree (e->element);
385 /* Take into account everything back up the chain. Given that chain
386 lengths are rarely very long, this should be acceptable. If we
387 truely identify this as a performance problem, it should work to
388 hash the pointer value "e->parent". */
389 for (p = e->parent; p ; p = p->parent)
390 h = (h * 65521) ^ sra_hash_tree (p->element);
395 /* Equality function for type SRA_PAIR. */
398 sra_elt_eq (const void *x, const void *y)
400 const struct sra_elt *a = x;
401 const struct sra_elt *b = y;
403 if (a->parent != b->parent)
406 /* All the field/decl stuff is unique. */
407 if (a->element == b->element)
410 /* The only thing left is integer equality. */
411 if (TREE_CODE (a->element) == INTEGER_CST
412 && TREE_CODE (b->element) == INTEGER_CST)
413 return tree_int_cst_equal (a->element, b->element);
418 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
419 may be null, in which case CHILD must be a DECL. */
421 static struct sra_elt *
422 lookup_element (struct sra_elt *parent, tree child, tree type,
423 enum insert_option insert)
425 struct sra_elt dummy;
426 struct sra_elt **slot;
429 dummy.parent = parent;
430 dummy.element = child;
432 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
433 if (!slot && insert == NO_INSERT)
437 if (!elt && insert == INSERT)
439 *slot = elt = obstack_alloc (&sra_obstack, sizeof (*elt));
440 memset (elt, 0, sizeof (*elt));
442 elt->parent = parent;
443 elt->element = child;
445 elt->is_scalar = is_sra_scalar_type (type);
449 elt->sibling = parent->children;
450 parent->children = elt;
453 /* If this is a parameter, then if we want to scalarize, we have
454 one copy from the true function parameter. Count it now. */
455 if (TREE_CODE (child) == PARM_DECL)
458 bitmap_set_bit (needs_copy_in, var_ann (child)->uid);
465 /* Return true if the ARRAY_REF in EXPR is a constant, in bounds access. */
468 is_valid_const_index (tree expr)
470 tree dom, t, index = TREE_OPERAND (expr, 1);
472 if (TREE_CODE (index) != INTEGER_CST)
475 /* Watch out for stupid user tricks, indexing outside the array.
477 Careful, we're not called only on scalarizable types, so do not
478 assume constant array bounds. We needn't do anything with such
479 cases, since they'll be referring to objects that we should have
480 already rejected for scalarization, so returning false is fine. */
482 dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (expr, 0)));
486 t = TYPE_MIN_VALUE (dom);
487 if (!t || TREE_CODE (t) != INTEGER_CST)
489 if (tree_int_cst_lt (index, t))
492 t = TYPE_MAX_VALUE (dom);
493 if (!t || TREE_CODE (t) != INTEGER_CST)
495 if (tree_int_cst_lt (t, index))
501 /* Create or return the SRA_ELT structure for EXPR if the expression
502 refers to a scalarizable variable. */
504 static struct sra_elt *
505 maybe_lookup_element_for_expr (tree expr)
510 switch (TREE_CODE (expr))
515 if (is_sra_candidate_decl (expr))
516 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
520 /* We can't scalarize variable array indicies. */
521 if (is_valid_const_index (expr))
522 child = TREE_OPERAND (expr, 1);
528 /* Don't look through unions. */
529 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) != RECORD_TYPE)
531 child = TREE_OPERAND (expr, 1);
535 child = integer_zero_node;
538 child = integer_one_node;
545 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
547 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
552 /* Functions to walk just enough of the tree to see all scalarizable
553 references, and categorize them. */
555 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
556 various kinds of references seen. In all cases, *BSI is an iterator
557 pointing to the statement being processed. */
560 /* Invoked when ELT is required as a unit. Note that ELT might refer to
561 a leaf node, in which case this is a simple scalar reference. *EXPR_P
562 points to the location of the expression. IS_OUTPUT is true if this
563 is a left-hand-side reference. */
564 void (*use) (struct sra_elt *elt, tree *expr_p,
565 block_stmt_iterator *bsi, bool is_output);
567 /* Invoked when we have a copy between two scalarizable references. */
568 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
569 block_stmt_iterator *bsi);
571 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
572 in which case it should be treated as an empty CONSTRUCTOR. */
573 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
575 /* Invoked when we have a copy between one scalarizable reference ELT
576 and one non-scalarizable reference OTHER. IS_OUTPUT is true if ELT
577 is on the left-hand side. */
578 void (*ldst) (struct sra_elt *elt, tree other,
579 block_stmt_iterator *bsi, bool is_output);
581 /* True during phase 2, false during phase 4. */
582 /* ??? This is a hack. */
586 #ifdef ENABLE_CHECKING
587 /* Invoked via walk_tree, if *TP contains an candidate decl, return it. */
590 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
591 void *data ATTRIBUTE_UNUSED)
594 enum tree_code code = TREE_CODE (t);
596 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
599 if (is_sra_candidate_decl (t))
609 /* Walk most expressions looking for a scalarizable aggregate.
610 If we find one, invoke FNS->USE. */
613 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
614 const struct sra_walk_fns *fns)
619 /* We're looking to collect a reference expression between EXPR and INNER,
620 such that INNER is a scalarizable decl and all other nodes through EXPR
621 are references that we can scalarize. If we come across something that
622 we can't scalarize, we reset EXPR. This has the effect of making it
623 appear that we're referring to the larger expression as a whole. */
626 switch (TREE_CODE (inner))
631 /* If there is a scalarizable decl at the bottom, then process it. */
632 if (is_sra_candidate_decl (inner))
634 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
635 fns->use (elt, expr_p, bsi, is_output);
640 /* Non-constant index means any member may be accessed. Prevent the
641 expression from being scalarized. If we were to treat this as a
642 reference to the whole array, we can wind up with a single dynamic
643 index reference inside a loop being overridden by several constant
644 index references during loop setup. It's possible that this could
645 be avoided by using dynamic usage counts based on BB trip counts
646 (based on loop analysis or profiling), but that hardly seems worth
648 /* ??? Hack. Figure out how to push this into the scan routines
649 without duplicating too much code. */
650 if (!is_valid_const_index (inner))
652 if (fns->initial_scan)
655 = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
657 elt->cannot_scalarize = true;
661 /* ??? Are we assured that non-constant bounds and stride will have
662 the same value everywhere? I don't think Fortran will... */
663 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
665 inner = TREE_OPERAND (inner, 0);
669 /* A reference to a union member constitutes a reference to the
671 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) != RECORD_TYPE)
673 /* ??? See above re non-constant stride. */
674 if (TREE_OPERAND (inner, 2))
676 inner = TREE_OPERAND (inner, 0);
681 inner = TREE_OPERAND (inner, 0);
685 /* A bit field reference (access to *multiple* fields simultaneously)
686 is not currently scalarized. Consider this an access to the
687 complete outer element, to which walk_tree will bring us next. */
690 case ARRAY_RANGE_REF:
691 /* Similarly, an subrange reference is used to modify indexing. Which
692 means that the canonical element names that we have won't work. */
695 case VIEW_CONVERT_EXPR:
697 /* Similarly, a view/nop explicitly wants to look at an object in a
698 type other than the one we've scalarized. */
702 expr_p = &TREE_OPERAND (inner, 0);
703 inner = expr = *expr_p;
707 #ifdef ENABLE_CHECKING
708 /* Validate that we're not missing any references. */
709 if (walk_tree (&inner, sra_find_candidate_decl, NULL, NULL))
716 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
717 If we find one, invoke FNS->USE. */
720 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
721 const struct sra_walk_fns *fns)
724 for (op = list; op ; op = TREE_CHAIN (op))
725 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
728 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
729 If we find one, invoke FNS->USE. */
732 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
733 const struct sra_walk_fns *fns)
735 sra_walk_tree_list (TREE_OPERAND (expr, 1), bsi, false, fns);
738 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
739 aggregates. If we find one, invoke FNS->USE. */
742 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
743 const struct sra_walk_fns *fns)
745 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
746 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
749 /* Walk a MODIFY_EXPR and categorize the assignment appropriately. */
752 sra_walk_modify_expr (tree expr, block_stmt_iterator *bsi,
753 const struct sra_walk_fns *fns)
755 struct sra_elt *lhs_elt, *rhs_elt;
758 lhs = TREE_OPERAND (expr, 0);
759 rhs = TREE_OPERAND (expr, 1);
760 lhs_elt = maybe_lookup_element_for_expr (lhs);
761 rhs_elt = maybe_lookup_element_for_expr (rhs);
763 /* If both sides are scalarizable, this is a COPY operation. */
764 if (lhs_elt && rhs_elt)
766 fns->copy (lhs_elt, rhs_elt, bsi);
772 /* If this is an assignment from a constant, or constructor, then
773 we have access to all of the elements individually. Invoke INIT. */
774 if (TREE_CODE (rhs) == COMPLEX_EXPR
775 || TREE_CODE (rhs) == COMPLEX_CST
776 || TREE_CODE (rhs) == CONSTRUCTOR)
777 fns->init (lhs_elt, rhs, bsi);
779 /* If this is an assignment from read-only memory, treat this as if
780 we'd been passed the constructor directly. Invoke INIT. */
781 else if (TREE_CODE (rhs) == VAR_DECL
783 && TREE_READONLY (rhs)
784 && targetm.binds_local_p (rhs))
786 if (DECL_INITIAL (rhs) != error_mark_node)
787 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
790 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
791 The lvalue requirement prevents us from trying to directly scalarize
792 the result of a function call. Which would result in trying to call
793 the function multiple times, and other evil things. */
794 else if (!lhs_elt->is_scalar && is_gimple_addr_expr_arg (rhs))
795 fns->ldst (lhs_elt, rhs, bsi, true);
797 /* Otherwise we're being used in some context that requires the
798 aggregate to be seen as a whole. Invoke USE. */
800 fns->use (lhs_elt, &TREE_OPERAND (expr, 0), bsi, true);
804 /* LHS_ELT being null only means that the LHS as a whole is not a
805 scalarizable reference. There may be occurrences of scalarizable
806 variables within, which implies a USE. */
807 sra_walk_expr (&TREE_OPERAND (expr, 0), bsi, true, fns);
810 /* Likewise for the right-hand side. The only difference here is that
811 we don't have to handle constants, and the RHS may be a call. */
814 if (!rhs_elt->is_scalar)
815 fns->ldst (rhs_elt, lhs, bsi, false);
817 fns->use (rhs_elt, &TREE_OPERAND (expr, 1), bsi, false);
819 else if (TREE_CODE (rhs) == CALL_EXPR)
820 sra_walk_call_expr (rhs, bsi, fns);
822 sra_walk_expr (&TREE_OPERAND (expr, 1), bsi, false, fns);
825 /* Entry point to the walk functions. Search the entire function,
826 invoking the callbacks in FNS on each of the references to
827 scalarizable variables. */
830 sra_walk_function (const struct sra_walk_fns *fns)
833 block_stmt_iterator si;
835 /* ??? Phase 4 could derive some benefit to walking the function in
836 dominator tree order. */
839 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
844 stmt = bsi_stmt (si);
845 ann = stmt_ann (stmt);
847 /* If the statement has no virtual operands, then it doesn't
848 make any structure references that we care about. */
849 if (NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann)) == 0
850 && NUM_VUSES (VUSE_OPS (ann)) == 0
851 && NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann)) == 0)
854 switch (TREE_CODE (stmt))
857 /* If we have "return <retval>" then the return value is
858 already exposed for our pleasure. Walk it as a USE to
859 force all the components back in place for the return.
861 If we have an embedded assignment, then <retval> is of
862 a type that gets returned in registers in this ABI, and
863 we do not wish to extend their lifetimes. Treat this
864 as a USE of the variable on the RHS of this assignment. */
866 t = TREE_OPERAND (stmt, 0);
867 if (TREE_CODE (t) == MODIFY_EXPR)
868 sra_walk_expr (&TREE_OPERAND (t, 1), &si, false, fns);
870 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
874 sra_walk_modify_expr (stmt, &si, fns);
877 sra_walk_call_expr (stmt, &si, fns);
880 sra_walk_asm_expr (stmt, &si, fns);
889 /* Phase One: Scan all referenced variables in the program looking for
890 structures that could be decomposed. */
893 find_candidates_for_sra (void)
896 bool any_set = false;
898 for (i = 0; i < num_referenced_vars; i++)
900 tree var = referenced_var (i);
901 if (decl_can_be_decomposed_p (var))
903 bitmap_set_bit (sra_candidates, var_ann (var)->uid);
912 /* Phase Two: Scan all references to scalarizable variables. Count the
913 number of times they are used or copied respectively. */
915 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
916 considered a copy, because we can decompose the reference such that
917 the sub-elements needn't be contiguous. */
920 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
921 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
922 bool is_output ATTRIBUTE_UNUSED)
928 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
929 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
931 lhs_elt->n_copies += 1;
932 rhs_elt->n_copies += 1;
936 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
937 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
939 lhs_elt->n_copies += 1;
943 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
944 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
945 bool is_output ATTRIBUTE_UNUSED)
950 /* Dump the values we collected during the scanning phase. */
953 scan_dump (struct sra_elt *elt)
957 dump_sra_elt_name (dump_file, elt);
958 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
960 for (c = elt->children; c ; c = c->sibling)
964 /* Entry point to phase 2. Scan the entire function, building up
965 scalarization data structures, recording copies and uses. */
970 static const struct sra_walk_fns fns = {
971 scan_use, scan_copy, scan_init, scan_ldst, true
974 sra_walk_function (&fns);
976 if (dump_file && (dump_flags & TDF_DETAILS))
980 fputs ("\nScan results:\n", dump_file);
981 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i,
983 tree var = referenced_var (i);
984 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
988 fputc ('\n', dump_file);
992 /* Phase Three: Make decisions about which variables to scalarize, if any.
993 All elements to be scalarized have replacement variables made for them. */
995 /* A subroutine of build_element_name. Recursively build the element
996 name on the obstack. */
999 build_element_name_1 (struct sra_elt *elt)
1006 build_element_name_1 (elt->parent);
1007 obstack_1grow (&sra_obstack, '$');
1009 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1011 if (elt->element == integer_zero_node)
1012 obstack_grow (&sra_obstack, "real", 4);
1014 obstack_grow (&sra_obstack, "imag", 4);
1020 if (TREE_CODE (t) == INTEGER_CST)
1022 /* ??? Eh. Don't bother doing double-wide printing. */
1023 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1024 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1028 tree name = DECL_NAME (t);
1030 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1031 IDENTIFIER_LENGTH (name));
1034 sprintf (buffer, "D%u", DECL_UID (t));
1035 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1040 /* Construct a pretty variable name for an element's replacement variable.
1041 The name is built on the obstack. */
1044 build_element_name (struct sra_elt *elt)
1046 build_element_name_1 (elt);
1047 obstack_1grow (&sra_obstack, '\0');
1048 return obstack_finish (&sra_obstack);
1051 /* Instantiate an element as an independent variable. */
1054 instantiate_element (struct sra_elt *elt)
1056 struct sra_elt *base_elt;
1059 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1061 base = base_elt->element;
1063 elt->replacement = var = make_rename_temp (elt->type, "SR");
1064 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1065 TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
1066 DECL_ARTIFICIAL (var) = DECL_ARTIFICIAL (base);
1068 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1070 char *pretty_name = build_element_name (elt);
1071 DECL_NAME (var) = get_identifier (pretty_name);
1072 obstack_free (&sra_obstack, pretty_name);
1077 fputs (" ", dump_file);
1078 dump_sra_elt_name (dump_file, elt);
1079 fputs (" -> ", dump_file);
1080 print_generic_expr (dump_file, var, dump_flags);
1081 fputc ('\n', dump_file);
1085 /* Make one pass across an element tree deciding whether or not it's
1086 profitable to instantiate individual leaf scalars.
1088 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1089 fields all the way up the tree. */
1092 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1093 unsigned int parent_copies)
1095 if (dump_file && !elt->parent)
1097 fputs ("Initial instantiation for ", dump_file);
1098 dump_sra_elt_name (dump_file, elt);
1099 fputc ('\n', dump_file);
1102 if (elt->cannot_scalarize)
1107 /* The decision is simple: instantiate if we're used more frequently
1108 than the parent needs to be seen as a complete unit. */
1109 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1110 instantiate_element (elt);
1115 unsigned int this_uses = elt->n_uses + parent_uses;
1116 unsigned int this_copies = elt->n_copies + parent_copies;
1118 for (c = elt->children; c ; c = c->sibling)
1119 decide_instantiation_1 (c, this_uses, this_copies);
1123 /* Compute the size and number of all instantiated elements below ELT.
1124 We will only care about this if the size of the complete structure
1125 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1128 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1130 if (elt->replacement)
1132 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1138 unsigned int count = 0;
1140 for (c = elt->children; c ; c = c->sibling)
1141 count += sum_instantiated_sizes (c, sizep);
1147 /* Instantiate fields in ELT->TYPE that are not currently present as
1150 static void instantiate_missing_elements (struct sra_elt *elt);
1153 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1155 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1158 if (sub->replacement == NULL)
1159 instantiate_element (sub);
1162 instantiate_missing_elements (sub);
1166 instantiate_missing_elements (struct sra_elt *elt)
1168 tree type = elt->type;
1170 switch (TREE_CODE (type))
1175 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1176 if (TREE_CODE (f) == FIELD_DECL)
1177 instantiate_missing_elements_1 (elt, f, TREE_TYPE (f));
1183 tree i, max, subtype;
1185 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1186 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1187 subtype = TREE_TYPE (type);
1191 instantiate_missing_elements_1 (elt, i, subtype);
1192 if (tree_int_cst_equal (i, max))
1194 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1201 type = TREE_TYPE (type);
1202 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1203 instantiate_missing_elements_1 (elt, integer_one_node, type);
1211 /* Make one pass across an element tree deciding whether to perform block
1212 or element copies. If we decide on element copies, instantiate all
1213 elements. Return true if there are any instantiated sub-elements. */
1216 decide_block_copy (struct sra_elt *elt)
1221 /* If scalarization is disabled, respect it. */
1222 if (elt->cannot_scalarize)
1224 elt->use_block_copy = 1;
1228 fputs ("Scalarization disabled for ", dump_file);
1229 dump_sra_elt_name (dump_file, elt);
1230 fputc ('\n', dump_file);
1236 /* Don't decide if we've no uses. */
1237 if (elt->n_uses == 0 && elt->n_copies == 0)
1240 else if (!elt->is_scalar)
1242 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1243 bool use_block_copy = true;
1245 /* Don't bother trying to figure out the rest if the structure is
1246 so large we can't do easy arithmetic. This also forces block
1247 copies for variable sized structures. */
1248 if (host_integerp (size_tree, 1))
1250 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1251 unsigned int inst_count;
1253 full_size = tree_low_cst (size_tree, 1);
1255 /* ??? What to do here. If there are two fields, and we've only
1256 instantiated one, then instantiating the other is clearly a win.
1257 If there are a large number of fields then the size of the copy
1258 is much more of a factor. */
1260 /* If the structure is small, and we've made copies, go ahead
1261 and instantiate, hoping that the copies will go away. */
1262 if (full_size <= (unsigned) MOVE_RATIO * UNITS_PER_WORD
1263 && elt->n_copies > elt->n_uses)
1264 use_block_copy = false;
1267 inst_count = sum_instantiated_sizes (elt, &inst_size);
1269 if (inst_size * 4 >= full_size * 3)
1270 use_block_copy = false;
1273 /* In order to avoid block copy, we have to be able to instantiate
1274 all elements of the type. See if this is possible. */
1276 && (!can_completely_scalarize_p (elt)
1277 || !type_can_instantiate_all_elements (elt->type)))
1278 use_block_copy = true;
1280 elt->use_block_copy = use_block_copy;
1284 fprintf (dump_file, "Using %s for ",
1285 use_block_copy ? "block-copy" : "element-copy");
1286 dump_sra_elt_name (dump_file, elt);
1287 fputc ('\n', dump_file);
1290 if (!use_block_copy)
1292 instantiate_missing_elements (elt);
1297 any_inst = elt->replacement != NULL;
1299 for (c = elt->children; c ; c = c->sibling)
1300 any_inst |= decide_block_copy (c);
1305 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1308 decide_instantiations (void)
1312 struct bitmap_head_def done_head;
1314 /* We cannot clear bits from a bitmap we're iterating over,
1315 so save up all the bits to clear until the end. */
1316 bitmap_initialize (&done_head, 1);
1317 cleared_any = false;
1319 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i,
1321 tree var = referenced_var (i);
1322 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1325 decide_instantiation_1 (elt, 0, 0);
1326 if (!decide_block_copy (elt))
1331 bitmap_set_bit (&done_head, i);
1338 bitmap_operation (sra_candidates, sra_candidates, &done_head,
1340 bitmap_operation (needs_copy_in, needs_copy_in, &done_head,
1343 bitmap_clear (&done_head);
1346 fputc ('\n', dump_file);
1350 /* Phase Four: Update the function to match the replacements created. */
1352 /* Mark all the variables in V_MAY_DEF or V_MUST_DEF operands for STMT for
1353 renaming. This becomes necessary when we modify all of a non-scalar. */
1356 mark_all_v_defs (tree stmt)
1358 v_may_def_optype v_may_defs;
1359 v_must_def_optype v_must_defs;
1362 get_stmt_operands (stmt);
1364 v_may_defs = V_MAY_DEF_OPS (stmt_ann (stmt));
1365 n = NUM_V_MAY_DEFS (v_may_defs);
1366 for (i = 0; i < n; i++)
1368 tree sym = V_MAY_DEF_RESULT (v_may_defs, i);
1369 if (TREE_CODE (sym) == SSA_NAME)
1370 sym = SSA_NAME_VAR (sym);
1371 bitmap_set_bit (vars_to_rename, var_ann (sym)->uid);
1374 v_must_defs = V_MUST_DEF_OPS (stmt_ann (stmt));
1375 n = NUM_V_MUST_DEFS (v_must_defs);
1376 for (i = 0; i < n; i++)
1378 tree sym = V_MUST_DEF_OP (v_must_defs, i);
1379 if (TREE_CODE (sym) == SSA_NAME)
1380 sym = SSA_NAME_VAR (sym);
1381 bitmap_set_bit (vars_to_rename, var_ann (sym)->uid);
1385 /* Build a single level component reference to ELT rooted at BASE. */
1388 generate_one_element_ref (struct sra_elt *elt, tree base)
1390 switch (TREE_CODE (TREE_TYPE (base)))
1393 return build (COMPONENT_REF, elt->type, base, elt->element, NULL);
1396 return build (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
1399 if (elt->element == integer_zero_node)
1400 return build (REALPART_EXPR, elt->type, base);
1402 return build (IMAGPART_EXPR, elt->type, base);
1409 /* Build a full component reference to ELT rooted at its native variable. */
1412 generate_element_ref (struct sra_elt *elt)
1415 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
1417 return elt->element;
1420 /* Generate a set of assignment statements in *LIST_P to copy all
1421 instantiated elements under ELT to or from the equivalent structure
1422 rooted at EXPR. COPY_OUT controls the direction of the copy, with
1423 true meaning to copy out of EXPR into ELT. */
1426 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
1432 if (elt->replacement)
1435 t = build (MODIFY_EXPR, void_type_node, elt->replacement, expr);
1437 t = build (MODIFY_EXPR, void_type_node, expr, elt->replacement);
1438 append_to_statement_list (t, list_p);
1442 for (c = elt->children; c ; c = c->sibling)
1444 t = generate_one_element_ref (c, unshare_expr (expr));
1445 generate_copy_inout (c, copy_out, t, list_p);
1450 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
1451 elements under SRC to their counterparts under DST. There must be a 1-1
1452 correspondence of instantiated elements. */
1455 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
1457 struct sra_elt *dc, *sc;
1459 for (dc = dst->children; dc ; dc = dc->sibling)
1461 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
1464 generate_element_copy (dc, sc, list_p);
1467 if (dst->replacement)
1471 if (src->replacement == NULL)
1474 t = build (MODIFY_EXPR, void_type_node, dst->replacement,
1476 append_to_statement_list (t, list_p);
1480 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
1481 elements under ELT. In addition, do not assign to elements that have been
1482 marked VISITED but do reset the visited flag; this allows easy coordination
1483 with generate_element_init. */
1486 generate_element_zero (struct sra_elt *elt, tree *list_p)
1490 for (c = elt->children; c ; c = c->sibling)
1491 generate_element_zero (c, list_p);
1494 elt->visited = false;
1495 else if (elt->replacement)
1500 t = fold_convert (elt->type, integer_zero_node);
1502 /* We generated a replacement for a non-scalar? */
1505 t = build (MODIFY_EXPR, void_type_node, elt->replacement, t);
1506 append_to_statement_list (t, list_p);
1510 /* Generate a set of assignment statements in *LIST_P to set all instantiated
1511 elements under ELT with the contents of the initializer INIT. In addition,
1512 mark all assigned elements VISITED; this allows easy coordination with
1513 generate_element_zero. */
1516 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
1518 enum tree_code init_code = TREE_CODE (init);
1519 struct sra_elt *sub;
1524 if (elt->replacement)
1526 t = build (MODIFY_EXPR, void_type_node, elt->replacement, init);
1527 append_to_statement_list (t, list_p);
1528 elt->visited = true;
1537 for (sub = elt->children; sub ; sub = sub->sibling)
1539 if (sub->element == integer_zero_node)
1540 t = (init_code == COMPLEX_EXPR
1541 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
1543 t = (init_code == COMPLEX_EXPR
1544 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
1545 generate_element_init (sub, t, list_p);
1550 for (t = CONSTRUCTOR_ELTS (init); t ; t = TREE_CHAIN (t))
1552 sub = lookup_element (elt, TREE_PURPOSE (t), NULL, NO_INSERT);
1555 generate_element_init (sub, TREE_VALUE (t), list_p);
1564 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
1565 has more than one edge, STMT will be replicated for each edge. Also,
1566 abnormal edges will be ignored. */
1569 insert_edge_copies (tree stmt, basic_block bb)
1575 for (e = bb->succ; e; e = e->succ_next)
1577 /* We don't need to insert copies on abnormal edges. The
1578 value of the scalar replacement is not guaranteed to
1579 be valid through an abnormal edge. */
1580 if (!(e->flags & EDGE_ABNORMAL))
1584 bsi_insert_on_edge (e, stmt);
1588 bsi_insert_on_edge (e, lhd_unsave_expr_now (stmt));
1593 /* Helper function to insert LIST before BSI, and set up line number info. */
1596 sra_insert_before (block_stmt_iterator *bsi, tree list)
1598 tree stmt = bsi_stmt (*bsi);
1600 if (EXPR_HAS_LOCATION (stmt))
1601 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1602 bsi_insert_before (bsi, list, BSI_SAME_STMT);
1605 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
1608 sra_insert_after (block_stmt_iterator *bsi, tree list)
1610 tree stmt = bsi_stmt (*bsi);
1612 if (EXPR_HAS_LOCATION (stmt))
1613 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
1615 if (stmt_ends_bb_p (stmt))
1616 insert_edge_copies (list, bsi->bb);
1618 bsi_insert_after (bsi, list, BSI_CONTINUE_LINKING);
1621 /* Similarly, but replace the statement at BSI. */
1624 sra_replace (block_stmt_iterator *bsi, tree list)
1626 sra_insert_before (bsi, list);
1628 if (bsi_end_p (*bsi))
1629 *bsi = bsi_last (bsi->bb);
1634 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
1635 if elt is scalar, or some ocurrence of ELT that requires a complete
1636 aggregate. IS_OUTPUT is true if ELT is being modified. */
1639 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
1642 tree list = NULL, stmt = bsi_stmt (*bsi);
1644 if (elt->replacement)
1646 /* If we have a replacement, then updating the reference is as
1647 simple as modifying the existing statement in place. */
1649 mark_all_v_defs (stmt);
1650 *expr_p = elt->replacement;
1655 /* Otherwise we need some copies. If ELT is being read, then we want
1656 to store all (modified) sub-elements back into the structure before
1657 the reference takes place. If ELT is being written, then we want to
1658 load the changed values back into our shadow variables. */
1659 /* ??? We don't check modified for reads, we just always write all of
1660 the values. We should be able to record the SSA number of the VOP
1661 for which the values were last read. If that number matches the
1662 SSA number of the VOP in the current statement, then we needn't
1663 emit an assignment. This would also eliminate double writes when
1664 a structure is passed as more than one argument to a function call.
1665 This optimization would be most effective if sra_walk_function
1666 processed the blocks in dominator order. */
1668 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
1673 mark_all_v_defs (expr_first (list));
1674 sra_insert_after (bsi, list);
1677 sra_insert_before (bsi, list);
1681 /* Scalarize a COPY. To recap, this is an assignment statement between
1682 two scalarizable references, LHS_ELT and RHS_ELT. */
1685 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1686 block_stmt_iterator *bsi)
1690 if (lhs_elt->replacement && rhs_elt->replacement)
1692 /* If we have two scalar operands, modify the existing statement. */
1693 stmt = bsi_stmt (*bsi);
1695 #ifdef ENABLE_CHECKING
1696 /* See the commentary in sra_walk_function concerning
1697 RETURN_EXPR, and why we should never see one here. */
1698 if (TREE_CODE (stmt) != MODIFY_EXPR)
1702 TREE_OPERAND (stmt, 0) = lhs_elt->replacement;
1703 TREE_OPERAND (stmt, 1) = rhs_elt->replacement;
1706 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
1708 /* If either side requires a block copy, then sync the RHS back
1709 to the original structure, leave the original assignment
1710 statement (which will perform the block copy), then load the
1711 LHS values out of its now-updated original structure. */
1712 /* ??? Could perform a modified pair-wise element copy. That
1713 would at least allow those elements that are instantiated in
1714 both structures to be optimized well. */
1717 generate_copy_inout (rhs_elt, false,
1718 generate_element_ref (rhs_elt), &list);
1721 mark_all_v_defs (expr_first (list));
1722 sra_insert_before (bsi, list);
1726 generate_copy_inout (lhs_elt, true,
1727 generate_element_ref (lhs_elt), &list);
1729 sra_insert_after (bsi, list);
1733 /* Otherwise both sides must be fully instantiated. In which
1734 case perform pair-wise element assignments and replace the
1735 original block copy statement. */
1737 stmt = bsi_stmt (*bsi);
1738 mark_all_v_defs (stmt);
1741 generate_element_copy (lhs_elt, rhs_elt, &list);
1744 sra_replace (bsi, list);
1748 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
1749 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
1750 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
1754 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
1758 /* Generate initialization statements for all members extant in the RHS. */
1760 generate_element_init (lhs_elt, rhs, &list);
1762 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
1763 a zero value. Initialize the rest of the instantiated elements. */
1764 generate_element_zero (lhs_elt, &list);
1768 if (lhs_elt->use_block_copy)
1770 /* Since LHS is not fully instantiated, we must leave the structure
1771 assignment in place. Treating this case differently from a USE
1772 exposes constants to later optimizations. */
1773 mark_all_v_defs (expr_first (list));
1774 sra_insert_after (bsi, list);
1778 /* The LHS is fully instantiated. The list of initializations
1779 replaces the original structure assignment. */
1780 mark_all_v_defs (bsi_stmt (*bsi));
1781 sra_replace (bsi, list);
1785 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
1786 on all INDIRECT_REFs. */
1789 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1793 if (TREE_CODE (t) == INDIRECT_REF)
1795 TREE_THIS_NOTRAP (t) = 1;
1798 else if (DECL_P (t) || TYPE_P (t))
1804 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
1805 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
1806 if ELT is on the left-hand side. */
1809 scalarize_ldst (struct sra_elt *elt, tree other,
1810 block_stmt_iterator *bsi, bool is_output)
1812 /* Shouldn't have gotten called for a scalar. */
1813 if (elt->replacement)
1816 if (elt->use_block_copy)
1818 /* Since ELT is not fully instantiated, we have to leave the
1819 block copy in place. Treat this as a USE. */
1820 scalarize_use (elt, NULL, bsi, is_output);
1824 /* The interesting case is when ELT is fully instantiated. In this
1825 case we can have each element stored/loaded directly to/from the
1826 corresponding slot in OTHER. This avoids a block copy. */
1828 tree list = NULL, stmt = bsi_stmt (*bsi);
1830 mark_all_v_defs (stmt);
1831 generate_copy_inout (elt, is_output, other, &list);
1835 /* Preserve EH semantics. */
1836 if (stmt_ends_bb_p (stmt))
1838 tree_stmt_iterator tsi;
1841 /* Extract the first statement from LIST. */
1842 tsi = tsi_start (list);
1843 first = tsi_stmt (tsi);
1846 /* Replace the old statement with this new representative. */
1847 bsi_replace (bsi, first, true);
1849 if (!tsi_end_p (tsi))
1851 /* If any reference would trap, then they all would. And more
1852 to the point, the first would. Therefore none of the rest
1853 will trap since the first didn't. Indicate this by
1854 iterating over the remaining statements and set
1855 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
1858 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
1861 while (!tsi_end_p (tsi));
1863 insert_edge_copies (list, bsi->bb);
1867 sra_replace (bsi, list);
1871 /* Generate initializations for all scalarizable parameters. */
1874 scalarize_parms (void)
1879 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i,
1881 tree var = referenced_var (i);
1882 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1883 generate_copy_inout (elt, true, var, &list);
1887 insert_edge_copies (list, ENTRY_BLOCK_PTR);
1890 /* Entry point to phase 4. Update the function to match replacements. */
1893 scalarize_function (void)
1895 static const struct sra_walk_fns fns = {
1896 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
1899 sra_walk_function (&fns);
1901 bsi_commit_edge_inserts (NULL);
1905 /* Debug helper function. Print ELT in a nice human-readable format. */
1908 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
1910 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1912 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
1913 dump_sra_elt_name (f, elt->parent);
1918 dump_sra_elt_name (f, elt->parent);
1919 if (DECL_P (elt->element))
1921 if (TREE_CODE (elt->element) == FIELD_DECL)
1923 print_generic_expr (f, elt->element, dump_flags);
1926 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
1927 TREE_INT_CST_LOW (elt->element));
1931 /* Likewise, but callable from the debugger. */
1934 debug_sra_elt_name (struct sra_elt *elt)
1936 dump_sra_elt_name (stderr, elt);
1937 fputc ('\n', stderr);
1940 /* Main entry point. */
1945 /* Initialize local variables. */
1946 gcc_obstack_init (&sra_obstack);
1947 sra_candidates = BITMAP_XMALLOC ();
1948 needs_copy_in = BITMAP_XMALLOC ();
1949 sra_type_decomp_cache = BITMAP_XMALLOC ();
1950 sra_type_inst_cache = BITMAP_XMALLOC ();
1951 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
1953 /* Scan. If we find anything, instantiate and scalarize. */
1954 if (find_candidates_for_sra ())
1957 decide_instantiations ();
1958 scalarize_function ();
1961 /* Free allocated memory. */
1962 htab_delete (sra_map);
1964 BITMAP_XFREE (sra_candidates);
1965 BITMAP_XFREE (needs_copy_in);
1966 BITMAP_XFREE (sra_type_decomp_cache);
1967 BITMAP_XFREE (sra_type_inst_cache);
1968 obstack_free (&sra_obstack, NULL);
1974 return flag_tree_sra != 0;
1977 struct tree_opt_pass pass_sra =
1980 gate_sra, /* gate */
1981 tree_sra, /* execute */
1984 0, /* static_pass_number */
1985 TV_TREE_SRA, /* tv_id */
1986 PROP_cfg | PROP_ssa, /* properties_required */
1987 0, /* properties_provided */
1988 0, /* properties_destroyed */
1989 0, /* todo_flags_start */
1990 TODO_dump_func | TODO_rename_vars
1991 | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */