2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify
10 it under the terms of the GNU General Public License as published by
11 the Free Software Foundation; either version 3, or (at your option)
14 GCC is distributed in the hope that it will be useful,
15 but WITHOUT ANY WARRANTY; without even the implied warranty of
16 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 GNU General Public License for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
25 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "tree-pretty-print.h"
30 #include "gimple-pretty-print.h"
31 #include "tree-inline.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
38 #include "tree-iterator.h"
39 #include "alloc-pool.h"
41 #include "tree-pass.h"
44 #include "langhooks.h"
46 #include "tree-ssa-sccvn.h"
47 #include "tree-scalar-evolution.h"
53 1. Avail sets can be shared by making an avail_find_leader that
54 walks up the dominator tree and looks in those avail sets.
55 This might affect code optimality, it's unclear right now.
56 2. Strength reduction can be performed by anticipating expressions
57 we can repair later on.
58 3. We can do back-substitution or smarter value numbering to catch
59 commutative expressions split up over multiple statements.
62 /* For ease of terminology, "expression node" in the below refers to
63 every expression node but GIMPLE_ASSIGN, because GIMPLE_ASSIGNs
64 represent the actual statement containing the expressions we care about,
65 and we cache the value number by putting it in the expression. */
69 First we walk the statements to generate the AVAIL sets, the
70 EXP_GEN sets, and the tmp_gen sets. EXP_GEN sets represent the
71 generation of values/expressions by a given block. We use them
72 when computing the ANTIC sets. The AVAIL sets consist of
73 SSA_NAME's that represent values, so we know what values are
74 available in what blocks. AVAIL is a forward dataflow problem. In
75 SSA, values are never killed, so we don't need a kill set, or a
76 fixpoint iteration, in order to calculate the AVAIL sets. In
77 traditional parlance, AVAIL sets tell us the downsafety of the
80 Next, we generate the ANTIC sets. These sets represent the
81 anticipatable expressions. ANTIC is a backwards dataflow
82 problem. An expression is anticipatable in a given block if it could
83 be generated in that block. This means that if we had to perform
84 an insertion in that block, of the value of that expression, we
85 could. Calculating the ANTIC sets requires phi translation of
86 expressions, because the flow goes backwards through phis. We must
87 iterate to a fixpoint of the ANTIC sets, because we have a kill
88 set. Even in SSA form, values are not live over the entire
89 function, only from their definition point onwards. So we have to
90 remove values from the ANTIC set once we go past the definition
91 point of the leaders that make them up.
92 compute_antic/compute_antic_aux performs this computation.
94 Third, we perform insertions to make partially redundant
95 expressions fully redundant.
97 An expression is partially redundant (excluding partial
100 1. It is AVAIL in some, but not all, of the predecessors of a
102 2. It is ANTIC in all the predecessors.
104 In order to make it fully redundant, we insert the expression into
105 the predecessors where it is not available, but is ANTIC.
107 For the partial anticipation case, we only perform insertion if it
108 is partially anticipated in some block, and fully available in all
111 insert/insert_aux/do_regular_insertion/do_partial_partial_insertion
112 performs these steps.
114 Fourth, we eliminate fully redundant expressions.
115 This is a simple statement walk that replaces redundant
116 calculations with the now available values. */
118 /* Representations of value numbers:
120 Value numbers are represented by a representative SSA_NAME. We
121 will create fake SSA_NAME's in situations where we need a
122 representative but do not have one (because it is a complex
123 expression). In order to facilitate storing the value numbers in
124 bitmaps, and keep the number of wasted SSA_NAME's down, we also
125 associate a value_id with each value number, and create full blown
126 ssa_name's only where we actually need them (IE in operands of
127 existing expressions).
129 Theoretically you could replace all the value_id's with
130 SSA_NAME_VERSION, but this would allocate a large number of
131 SSA_NAME's (which are each > 30 bytes) just to get a 4 byte number.
132 It would also require an additional indirection at each point we
135 /* Representation of expressions on value numbers:
137 Expressions consisting of value numbers are represented the same
138 way as our VN internally represents them, with an additional
139 "pre_expr" wrapping around them in order to facilitate storing all
140 of the expressions in the same sets. */
142 /* Representation of sets:
144 The dataflow sets do not need to be sorted in any particular order
145 for the majority of their lifetime, are simply represented as two
146 bitmaps, one that keeps track of values present in the set, and one
147 that keeps track of expressions present in the set.
149 When we need them in topological order, we produce it on demand by
150 transforming the bitmap into an array and sorting it into topo
153 /* Type of expression, used to know which member of the PRE_EXPR union
164 typedef union pre_expr_union_d
169 vn_reference_t reference;
172 typedef struct pre_expr_d
174 enum pre_expr_kind kind;
179 #define PRE_EXPR_NAME(e) (e)->u.name
180 #define PRE_EXPR_NARY(e) (e)->u.nary
181 #define PRE_EXPR_REFERENCE(e) (e)->u.reference
182 #define PRE_EXPR_CONSTANT(e) (e)->u.constant
185 pre_expr_eq (const void *p1, const void *p2)
187 const struct pre_expr_d *e1 = (const struct pre_expr_d *) p1;
188 const struct pre_expr_d *e2 = (const struct pre_expr_d *) p2;
190 if (e1->kind != e2->kind)
196 return vn_constant_eq_with_type (PRE_EXPR_CONSTANT (e1),
197 PRE_EXPR_CONSTANT (e2));
199 return PRE_EXPR_NAME (e1) == PRE_EXPR_NAME (e2);
201 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
203 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
204 PRE_EXPR_REFERENCE (e2));
211 pre_expr_hash (const void *p1)
213 const struct pre_expr_d *e = (const struct pre_expr_d *) p1;
217 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
219 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
221 return PRE_EXPR_NARY (e)->hashcode;
223 return PRE_EXPR_REFERENCE (e)->hashcode;
230 /* Next global expression id number. */
231 static unsigned int next_expression_id;
233 /* Mapping from expression to id number we can use in bitmap sets. */
234 DEF_VEC_P (pre_expr);
235 DEF_VEC_ALLOC_P (pre_expr, heap);
236 static VEC(pre_expr, heap) *expressions;
237 static htab_t expression_to_id;
238 static VEC(unsigned, heap) *name_to_id;
240 /* Allocate an expression id for EXPR. */
242 static inline unsigned int
243 alloc_expression_id (pre_expr expr)
246 /* Make sure we won't overflow. */
247 gcc_assert (next_expression_id + 1 > next_expression_id);
248 expr->id = next_expression_id++;
249 VEC_safe_push (pre_expr, heap, expressions, expr);
250 if (expr->kind == NAME)
252 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
253 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
254 re-allocations by using VEC_reserve upfront. There is no
255 VEC_quick_grow_cleared unfortunately. */
256 VEC_reserve (unsigned, heap, name_to_id, num_ssa_names);
257 VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
258 gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
259 VEC_replace (unsigned, name_to_id, version, expr->id);
263 slot = htab_find_slot (expression_to_id, expr, INSERT);
267 return next_expression_id - 1;
270 /* Return the expression id for tree EXPR. */
272 static inline unsigned int
273 get_expression_id (const pre_expr expr)
278 static inline unsigned int
279 lookup_expression_id (const pre_expr expr)
283 if (expr->kind == NAME)
285 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
286 if (VEC_length (unsigned, name_to_id) <= version)
288 return VEC_index (unsigned, name_to_id, version);
292 slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
295 return ((pre_expr)*slot)->id;
299 /* Return the existing expression id for EXPR, or create one if one
300 does not exist yet. */
302 static inline unsigned int
303 get_or_alloc_expression_id (pre_expr expr)
305 unsigned int id = lookup_expression_id (expr);
307 return alloc_expression_id (expr);
308 return expr->id = id;
311 /* Return the expression that has expression id ID */
313 static inline pre_expr
314 expression_for_id (unsigned int id)
316 return VEC_index (pre_expr, expressions, id);
319 /* Free the expression id field in all of our expressions,
320 and then destroy the expressions array. */
323 clear_expression_ids (void)
325 VEC_free (pre_expr, heap, expressions);
328 static alloc_pool pre_expr_pool;
330 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
333 get_or_alloc_expr_for_name (tree name)
335 struct pre_expr_d expr;
337 unsigned int result_id;
341 PRE_EXPR_NAME (&expr) = name;
342 result_id = lookup_expression_id (&expr);
344 return expression_for_id (result_id);
346 result = (pre_expr) pool_alloc (pre_expr_pool);
348 PRE_EXPR_NAME (result) = name;
349 alloc_expression_id (result);
353 static bool in_fre = false;
355 /* An unordered bitmap set. One bitmap tracks values, the other,
357 typedef struct bitmap_set
359 bitmap_head expressions;
363 #define FOR_EACH_EXPR_ID_IN_SET(set, id, bi) \
364 EXECUTE_IF_SET_IN_BITMAP(&(set)->expressions, 0, (id), (bi))
366 #define FOR_EACH_VALUE_ID_IN_SET(set, id, bi) \
367 EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
369 /* Mapping from value id to expressions with that value_id. */
370 DEF_VEC_P (bitmap_set_t);
371 DEF_VEC_ALLOC_P (bitmap_set_t, heap);
372 static VEC(bitmap_set_t, heap) *value_expressions;
374 /* Sets that we need to keep track of. */
375 typedef struct bb_bitmap_sets
377 /* The EXP_GEN set, which represents expressions/values generated in
379 bitmap_set_t exp_gen;
381 /* The PHI_GEN set, which represents PHI results generated in a
383 bitmap_set_t phi_gen;
385 /* The TMP_GEN set, which represents results/temporaries generated
386 in a basic block. IE the LHS of an expression. */
387 bitmap_set_t tmp_gen;
389 /* The AVAIL_OUT set, which represents which values are available in
390 a given basic block. */
391 bitmap_set_t avail_out;
393 /* The ANTIC_IN set, which represents which values are anticipatable
394 in a given basic block. */
395 bitmap_set_t antic_in;
397 /* The PA_IN set, which represents which values are
398 partially anticipatable in a given basic block. */
401 /* The NEW_SETS set, which is used during insertion to augment the
402 AVAIL_OUT set of blocks with the new insertions performed during
403 the current iteration. */
404 bitmap_set_t new_sets;
406 /* A cache for value_dies_in_block_x. */
409 /* True if we have visited this block during ANTIC calculation. */
410 unsigned int visited : 1;
412 /* True we have deferred processing this block during ANTIC
413 calculation until its successor is processed. */
414 unsigned int deferred : 1;
416 /* True when the block contains a call that might not return. */
417 unsigned int contains_may_not_return_call : 1;
420 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
421 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
422 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
423 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out
424 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in
425 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
426 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
427 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
428 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
429 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
430 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
433 /* Basic block list in postorder. */
434 static int *postorder;
436 /* This structure is used to keep track of statistics on what
437 optimization PRE was able to perform. */
440 /* The number of RHS computations eliminated by PRE. */
443 /* The number of new expressions/temporaries generated by PRE. */
446 /* The number of inserts found due to partial anticipation */
449 /* The number of new PHI nodes added by PRE. */
453 static bool do_partial_partial;
454 static pre_expr bitmap_find_leader (bitmap_set_t, unsigned int, gimple);
455 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
456 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
457 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
458 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
459 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
460 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
462 static bitmap_set_t bitmap_set_new (void);
463 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
465 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
467 static unsigned int get_expr_value_id (pre_expr);
469 /* We can add and remove elements and entries to and from sets
470 and hash tables, so we use alloc pools for them. */
472 static alloc_pool bitmap_set_pool;
473 static bitmap_obstack grand_bitmap_obstack;
475 /* To avoid adding 300 temporary variables when we only need one, we
476 only create one temporary variable, on demand, and build ssa names
477 off that. We do have to change the variable if the types don't
478 match the current variable's type. */
480 static tree storetemp;
481 static tree prephitemp;
483 /* Set of blocks with statements that have had their EH properties changed. */
484 static bitmap need_eh_cleanup;
486 /* Set of blocks with statements that have had their AB properties changed. */
487 static bitmap need_ab_cleanup;
489 /* The phi_translate_table caches phi translations for a given
490 expression and predecessor. */
492 static htab_t phi_translate_table;
494 /* A three tuple {e, pred, v} used to cache phi translations in the
495 phi_translate_table. */
497 typedef struct expr_pred_trans_d
499 /* The expression. */
502 /* The predecessor block along which we translated the expression. */
505 /* The value that resulted from the translation. */
508 /* The hashcode for the expression, pred pair. This is cached for
511 } *expr_pred_trans_t;
512 typedef const struct expr_pred_trans_d *const_expr_pred_trans_t;
514 /* Return the hash value for a phi translation table entry. */
517 expr_pred_trans_hash (const void *p)
519 const_expr_pred_trans_t const ve = (const_expr_pred_trans_t) p;
523 /* Return true if two phi translation table entries are the same.
524 P1 and P2 should point to the expr_pred_trans_t's to be compared.*/
527 expr_pred_trans_eq (const void *p1, const void *p2)
529 const_expr_pred_trans_t const ve1 = (const_expr_pred_trans_t) p1;
530 const_expr_pred_trans_t const ve2 = (const_expr_pred_trans_t) p2;
531 basic_block b1 = ve1->pred;
532 basic_block b2 = ve2->pred;
534 /* If they are not translations for the same basic block, they can't
538 return pre_expr_eq (ve1->e, ve2->e);
541 /* Search in the phi translation table for the translation of
542 expression E in basic block PRED.
543 Return the translated value, if found, NULL otherwise. */
545 static inline pre_expr
546 phi_trans_lookup (pre_expr e, basic_block pred)
549 struct expr_pred_trans_d ept;
553 ept.hashcode = iterative_hash_hashval_t (pre_expr_hash (e), pred->index);
554 slot = htab_find_slot_with_hash (phi_translate_table, &ept, ept.hashcode,
559 return ((expr_pred_trans_t) *slot)->v;
563 /* Add the tuple mapping from {expression E, basic block PRED} to
564 value V, to the phi translation table. */
567 phi_trans_add (pre_expr e, pre_expr v, basic_block pred)
570 expr_pred_trans_t new_pair = XNEW (struct expr_pred_trans_d);
572 new_pair->pred = pred;
574 new_pair->hashcode = iterative_hash_hashval_t (pre_expr_hash (e),
577 slot = htab_find_slot_with_hash (phi_translate_table, new_pair,
578 new_pair->hashcode, INSERT);
580 *slot = (void *) new_pair;
584 /* Add expression E to the expression set of value id V. */
587 add_to_value (unsigned int v, pre_expr e)
591 gcc_assert (get_expr_value_id (e) == v);
593 if (v >= VEC_length (bitmap_set_t, value_expressions))
595 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
599 set = VEC_index (bitmap_set_t, value_expressions, v);
602 set = bitmap_set_new ();
603 VEC_replace (bitmap_set_t, value_expressions, v, set);
606 bitmap_insert_into_set_1 (set, e, v, true);
609 /* Create a new bitmap set and return it. */
612 bitmap_set_new (void)
614 bitmap_set_t ret = (bitmap_set_t) pool_alloc (bitmap_set_pool);
615 bitmap_initialize (&ret->expressions, &grand_bitmap_obstack);
616 bitmap_initialize (&ret->values, &grand_bitmap_obstack);
620 /* Return the value id for a PRE expression EXPR. */
623 get_expr_value_id (pre_expr expr)
630 id = get_constant_value_id (PRE_EXPR_CONSTANT (expr));
633 id = get_or_alloc_constant_value_id (PRE_EXPR_CONSTANT (expr));
634 add_to_value (id, expr);
639 return VN_INFO (PRE_EXPR_NAME (expr))->value_id;
641 return PRE_EXPR_NARY (expr)->value_id;
643 return PRE_EXPR_REFERENCE (expr)->value_id;
649 /* Remove an expression EXPR from a bitmapped set. */
652 bitmap_remove_from_set (bitmap_set_t set, pre_expr expr)
654 unsigned int val = get_expr_value_id (expr);
655 if (!value_id_constant_p (val))
657 bitmap_clear_bit (&set->values, val);
658 bitmap_clear_bit (&set->expressions, get_expression_id (expr));
663 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
664 unsigned int val, bool allow_constants)
666 if (allow_constants || !value_id_constant_p (val))
668 /* We specifically expect this and only this function to be able to
669 insert constants into a set. */
670 bitmap_set_bit (&set->values, val);
671 bitmap_set_bit (&set->expressions, get_or_alloc_expression_id (expr));
675 /* Insert an expression EXPR into a bitmapped set. */
678 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
680 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
683 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
686 bitmap_set_copy (bitmap_set_t dest, bitmap_set_t orig)
688 bitmap_copy (&dest->expressions, &orig->expressions);
689 bitmap_copy (&dest->values, &orig->values);
693 /* Free memory used up by SET. */
695 bitmap_set_free (bitmap_set_t set)
697 bitmap_clear (&set->expressions);
698 bitmap_clear (&set->values);
702 /* Generate an topological-ordered array of bitmap set SET. */
704 static VEC(pre_expr, heap) *
705 sorted_array_from_bitmap_set (bitmap_set_t set)
708 bitmap_iterator bi, bj;
709 VEC(pre_expr, heap) *result;
711 /* Pre-allocate roughly enough space for the array. */
712 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (&set->values));
714 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
716 /* The number of expressions having a given value is usually
717 relatively small. Thus, rather than making a vector of all
718 the expressions and sorting it by value-id, we walk the values
719 and check in the reverse mapping that tells us what expressions
720 have a given value, to filter those in our set. As a result,
721 the expressions are inserted in value-id order, which means
724 If this is somehow a significant lose for some cases, we can
725 choose which set to walk based on the set size. */
726 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, i);
727 FOR_EACH_EXPR_ID_IN_SET (exprset, j, bj)
729 if (bitmap_bit_p (&set->expressions, j))
730 VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
737 /* Perform bitmapped set operation DEST &= ORIG. */
740 bitmap_set_and (bitmap_set_t dest, bitmap_set_t orig)
748 bitmap_initialize (&temp, &grand_bitmap_obstack);
750 bitmap_and_into (&dest->values, &orig->values);
751 bitmap_copy (&temp, &dest->expressions);
752 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
754 pre_expr expr = expression_for_id (i);
755 unsigned int value_id = get_expr_value_id (expr);
756 if (!bitmap_bit_p (&dest->values, value_id))
757 bitmap_clear_bit (&dest->expressions, i);
759 bitmap_clear (&temp);
763 /* Subtract all values and expressions contained in ORIG from DEST. */
766 bitmap_set_subtract (bitmap_set_t dest, bitmap_set_t orig)
768 bitmap_set_t result = bitmap_set_new ();
772 bitmap_and_compl (&result->expressions, &dest->expressions,
775 FOR_EACH_EXPR_ID_IN_SET (result, i, bi)
777 pre_expr expr = expression_for_id (i);
778 unsigned int value_id = get_expr_value_id (expr);
779 bitmap_set_bit (&result->values, value_id);
785 /* Subtract all the values in bitmap set B from bitmap set A. */
788 bitmap_set_subtract_values (bitmap_set_t a, bitmap_set_t b)
794 bitmap_initialize (&temp, &grand_bitmap_obstack);
796 bitmap_copy (&temp, &a->expressions);
797 EXECUTE_IF_SET_IN_BITMAP (&temp, 0, i, bi)
799 pre_expr expr = expression_for_id (i);
800 if (bitmap_set_contains_value (b, get_expr_value_id (expr)))
801 bitmap_remove_from_set (a, expr);
803 bitmap_clear (&temp);
807 /* Return true if bitmapped set SET contains the value VALUE_ID. */
810 bitmap_set_contains_value (bitmap_set_t set, unsigned int value_id)
812 if (value_id_constant_p (value_id))
815 if (!set || bitmap_empty_p (&set->expressions))
818 return bitmap_bit_p (&set->values, value_id);
822 bitmap_set_contains_expr (bitmap_set_t set, const pre_expr expr)
824 return bitmap_bit_p (&set->expressions, get_expression_id (expr));
827 /* Replace an instance of value LOOKFOR with expression EXPR in SET. */
830 bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
833 bitmap_set_t exprset;
837 if (value_id_constant_p (lookfor))
840 if (!bitmap_set_contains_value (set, lookfor))
843 /* The number of expressions having a given value is usually
844 significantly less than the total number of expressions in SET.
845 Thus, rather than check, for each expression in SET, whether it
846 has the value LOOKFOR, we walk the reverse mapping that tells us
847 what expressions have a given value, and see if any of those
848 expressions are in our set. For large testcases, this is about
849 5-10x faster than walking the bitmap. If this is somehow a
850 significant lose for some cases, we can choose which set to walk
851 based on the set size. */
852 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
853 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
855 if (bitmap_clear_bit (&set->expressions, i))
857 bitmap_set_bit (&set->expressions, get_expression_id (expr));
865 /* Return true if two bitmap sets are equal. */
868 bitmap_set_equal (bitmap_set_t a, bitmap_set_t b)
870 return bitmap_equal_p (&a->values, &b->values);
873 /* Replace an instance of EXPR's VALUE with EXPR in SET if it exists,
874 and add it otherwise. */
877 bitmap_value_replace_in_set (bitmap_set_t set, pre_expr expr)
879 unsigned int val = get_expr_value_id (expr);
881 if (bitmap_set_contains_value (set, val))
882 bitmap_set_replace_value (set, val, expr);
884 bitmap_insert_into_set (set, expr);
887 /* Insert EXPR into SET if EXPR's value is not already present in
891 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
893 unsigned int val = get_expr_value_id (expr);
895 gcc_checking_assert (expr->id == get_or_alloc_expression_id (expr));
897 /* Constant values are always considered to be part of the set. */
898 if (value_id_constant_p (val))
901 /* If the value membership changed, add the expression. */
902 if (bitmap_set_bit (&set->values, val))
903 bitmap_set_bit (&set->expressions, expr->id);
906 /* Print out EXPR to outfile. */
909 print_pre_expr (FILE *outfile, const pre_expr expr)
914 print_generic_expr (outfile, PRE_EXPR_CONSTANT (expr), 0);
917 print_generic_expr (outfile, PRE_EXPR_NAME (expr), 0);
922 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
923 fprintf (outfile, "{%s,", tree_code_name [nary->opcode]);
924 for (i = 0; i < nary->length; i++)
926 print_generic_expr (outfile, nary->op[i], 0);
927 if (i != (unsigned) nary->length - 1)
928 fprintf (outfile, ",");
930 fprintf (outfile, "}");
936 vn_reference_op_t vro;
938 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
939 fprintf (outfile, "{");
941 VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
944 bool closebrace = false;
945 if (vro->opcode != SSA_NAME
946 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
948 fprintf (outfile, "%s", tree_code_name [vro->opcode]);
951 fprintf (outfile, "<");
957 print_generic_expr (outfile, vro->op0, 0);
960 fprintf (outfile, ",");
961 print_generic_expr (outfile, vro->op1, 0);
965 fprintf (outfile, ",");
966 print_generic_expr (outfile, vro->op2, 0);
970 fprintf (outfile, ">");
971 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
972 fprintf (outfile, ",");
974 fprintf (outfile, "}");
977 fprintf (outfile, "@");
978 print_generic_expr (outfile, ref->vuse, 0);
984 void debug_pre_expr (pre_expr);
986 /* Like print_pre_expr but always prints to stderr. */
988 debug_pre_expr (pre_expr e)
990 print_pre_expr (stderr, e);
991 fprintf (stderr, "\n");
994 /* Print out SET to OUTFILE. */
997 print_bitmap_set (FILE *outfile, bitmap_set_t set,
998 const char *setname, int blockindex)
1000 fprintf (outfile, "%s[%d] := { ", setname, blockindex);
1007 FOR_EACH_EXPR_ID_IN_SET (set, i, bi)
1009 const pre_expr expr = expression_for_id (i);
1012 fprintf (outfile, ", ");
1014 print_pre_expr (outfile, expr);
1016 fprintf (outfile, " (%04d)", get_expr_value_id (expr));
1019 fprintf (outfile, " }\n");
1022 void debug_bitmap_set (bitmap_set_t);
1025 debug_bitmap_set (bitmap_set_t set)
1027 print_bitmap_set (stderr, set, "debug", 0);
1030 /* Print out the expressions that have VAL to OUTFILE. */
1033 print_value_expressions (FILE *outfile, unsigned int val)
1035 bitmap_set_t set = VEC_index (bitmap_set_t, value_expressions, val);
1039 sprintf (s, "%04d", val);
1040 print_bitmap_set (outfile, set, s, 0);
1046 debug_value_expressions (unsigned int val)
1048 print_value_expressions (stderr, val);
1051 /* Given a CONSTANT, allocate a new CONSTANT type PRE_EXPR to
1055 get_or_alloc_expr_for_constant (tree constant)
1057 unsigned int result_id;
1058 unsigned int value_id;
1059 struct pre_expr_d expr;
1062 expr.kind = CONSTANT;
1063 PRE_EXPR_CONSTANT (&expr) = constant;
1064 result_id = lookup_expression_id (&expr);
1066 return expression_for_id (result_id);
1068 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1069 newexpr->kind = CONSTANT;
1070 PRE_EXPR_CONSTANT (newexpr) = constant;
1071 alloc_expression_id (newexpr);
1072 value_id = get_or_alloc_constant_value_id (constant);
1073 add_to_value (value_id, newexpr);
1077 /* Given a value id V, find the actual tree representing the constant
1078 value if there is one, and return it. Return NULL if we can't find
1082 get_constant_for_value_id (unsigned int v)
1084 if (value_id_constant_p (v))
1088 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, v);
1090 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1092 pre_expr expr = expression_for_id (i);
1093 if (expr->kind == CONSTANT)
1094 return PRE_EXPR_CONSTANT (expr);
1100 /* Get or allocate a pre_expr for a piece of GIMPLE, and return it.
1101 Currently only supports constants and SSA_NAMES. */
1103 get_or_alloc_expr_for (tree t)
1105 if (TREE_CODE (t) == SSA_NAME)
1106 return get_or_alloc_expr_for_name (t);
1107 else if (is_gimple_min_invariant (t))
1108 return get_or_alloc_expr_for_constant (t);
1111 /* More complex expressions can result from SCCVN expression
1112 simplification that inserts values for them. As they all
1113 do not have VOPs the get handled by the nary ops struct. */
1114 vn_nary_op_t result;
1115 unsigned int result_id;
1116 vn_nary_op_lookup (t, &result);
1119 pre_expr e = (pre_expr) pool_alloc (pre_expr_pool);
1121 PRE_EXPR_NARY (e) = result;
1122 result_id = lookup_expression_id (e);
1125 pool_free (pre_expr_pool, e);
1126 e = expression_for_id (result_id);
1129 alloc_expression_id (e);
1136 /* Return the folded version of T if T, when folded, is a gimple
1137 min_invariant. Otherwise, return T. */
1140 fully_constant_expression (pre_expr e)
1148 vn_nary_op_t nary = PRE_EXPR_NARY (e);
1149 switch (TREE_CODE_CLASS (nary->opcode))
1152 case tcc_comparison:
1154 /* We have to go from trees to pre exprs to value ids to
1156 tree naryop0 = nary->op[0];
1157 tree naryop1 = nary->op[1];
1159 if (!is_gimple_min_invariant (naryop0))
1161 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1162 unsigned int vrep0 = get_expr_value_id (rep0);
1163 tree const0 = get_constant_for_value_id (vrep0);
1165 naryop0 = fold_convert (TREE_TYPE (naryop0), const0);
1167 if (!is_gimple_min_invariant (naryop1))
1169 pre_expr rep1 = get_or_alloc_expr_for (naryop1);
1170 unsigned int vrep1 = get_expr_value_id (rep1);
1171 tree const1 = get_constant_for_value_id (vrep1);
1173 naryop1 = fold_convert (TREE_TYPE (naryop1), const1);
1175 result = fold_binary (nary->opcode, nary->type,
1177 if (result && is_gimple_min_invariant (result))
1178 return get_or_alloc_expr_for_constant (result);
1179 /* We might have simplified the expression to a
1180 SSA_NAME for example from x_1 * 1. But we cannot
1181 insert a PHI for x_1 unconditionally as x_1 might
1182 not be available readily. */
1186 if (nary->opcode != REALPART_EXPR
1187 && nary->opcode != IMAGPART_EXPR
1188 && nary->opcode != VIEW_CONVERT_EXPR)
1193 /* We have to go from trees to pre exprs to value ids to
1195 tree naryop0 = nary->op[0];
1196 tree const0, result;
1197 if (is_gimple_min_invariant (naryop0))
1201 pre_expr rep0 = get_or_alloc_expr_for (naryop0);
1202 unsigned int vrep0 = get_expr_value_id (rep0);
1203 const0 = get_constant_for_value_id (vrep0);
1208 tree type1 = TREE_TYPE (nary->op[0]);
1209 const0 = fold_convert (type1, const0);
1210 result = fold_unary (nary->opcode, nary->type, const0);
1212 if (result && is_gimple_min_invariant (result))
1213 return get_or_alloc_expr_for_constant (result);
1222 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1224 if ((folded = fully_constant_vn_reference_p (ref)))
1225 return get_or_alloc_expr_for_constant (folded);
1234 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that
1235 it has the value it would have in BLOCK. Set *SAME_VALID to true
1236 in case the new vuse doesn't change the value id of the OPERANDS. */
1239 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
1240 alias_set_type set, tree type, tree vuse,
1241 basic_block phiblock,
1242 basic_block block, bool *same_valid)
1244 gimple phi = SSA_NAME_DEF_STMT (vuse);
1251 if (gimple_bb (phi) != phiblock)
1254 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands);
1256 /* Use the alias-oracle to find either the PHI node in this block,
1257 the first VUSE used in this block that is equivalent to vuse or
1258 the first VUSE which definition in this block kills the value. */
1259 if (gimple_code (phi) == GIMPLE_PHI)
1260 e = find_edge (block, phiblock);
1261 else if (use_oracle)
1262 while (!stmt_may_clobber_ref_p_1 (phi, &ref))
1264 vuse = gimple_vuse (phi);
1265 phi = SSA_NAME_DEF_STMT (vuse);
1266 if (gimple_bb (phi) != phiblock)
1268 if (gimple_code (phi) == GIMPLE_PHI)
1270 e = find_edge (block, phiblock);
1281 bitmap visited = NULL;
1282 /* Try to find a vuse that dominates this phi node by skipping
1283 non-clobbering statements. */
1284 vuse = get_continuation_for_phi (phi, &ref, &visited, false);
1286 BITMAP_FREE (visited);
1292 /* If we didn't find any, the value ID can't stay the same,
1293 but return the translated vuse. */
1294 *same_valid = false;
1295 vuse = PHI_ARG_DEF (phi, e->dest_idx);
1297 /* ??? We would like to return vuse here as this is the canonical
1298 upmost vdef that this reference is associated with. But during
1299 insertion of the references into the hash tables we only ever
1300 directly insert with their direct gimple_vuse, hence returning
1301 something else would make us not find the other expression. */
1302 return PHI_ARG_DEF (phi, e->dest_idx);
1308 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or*
1309 SET2. This is used to avoid making a set consisting of the union
1310 of PA_IN and ANTIC_IN during insert. */
1312 static inline pre_expr
1313 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2)
1317 result = bitmap_find_leader (set1, val, NULL);
1318 if (!result && set2)
1319 result = bitmap_find_leader (set2, val, NULL);
1323 /* Get the tree type for our PRE expression e. */
1326 get_expr_type (const pre_expr e)
1331 return TREE_TYPE (PRE_EXPR_NAME (e));
1333 return TREE_TYPE (PRE_EXPR_CONSTANT (e));
1335 return PRE_EXPR_REFERENCE (e)->type;
1337 return PRE_EXPR_NARY (e)->type;
1342 /* Get a representative SSA_NAME for a given expression.
1343 Since all of our sub-expressions are treated as values, we require
1344 them to be SSA_NAME's for simplicity.
1345 Prior versions of GVNPRE used to use "value handles" here, so that
1346 an expression would be VH.11 + VH.10 instead of d_3 + e_6. In
1347 either case, the operands are really values (IE we do not expect
1348 them to be usable without finding leaders). */
1351 get_representative_for (const pre_expr e)
1355 unsigned int value_id = get_expr_value_id (e);
1360 return PRE_EXPR_NAME (e);
1362 return PRE_EXPR_CONSTANT (e);
1366 /* Go through all of the expressions representing this value
1367 and pick out an SSA_NAME. */
1370 bitmap_set_t exprs = VEC_index (bitmap_set_t, value_expressions,
1372 FOR_EACH_EXPR_ID_IN_SET (exprs, i, bi)
1374 pre_expr rep = expression_for_id (i);
1375 if (rep->kind == NAME)
1376 return PRE_EXPR_NAME (rep);
1381 /* If we reached here we couldn't find an SSA_NAME. This can
1382 happen when we've discovered a value that has never appeared in
1383 the program as set to an SSA_NAME, most likely as the result of
1388 "Could not find SSA_NAME representative for expression:");
1389 print_pre_expr (dump_file, e);
1390 fprintf (dump_file, "\n");
1393 exprtype = get_expr_type (e);
1395 /* Build and insert the assignment of the end result to the temporary
1396 that we will return. */
1397 if (!pretemp || exprtype != TREE_TYPE (pretemp))
1399 pretemp = create_tmp_reg (exprtype, "pretmp");
1400 add_referenced_var (pretemp);
1403 name = make_ssa_name (pretemp, gimple_build_nop ());
1404 VN_INFO_GET (name)->value_id = value_id;
1405 if (e->kind == CONSTANT)
1406 VN_INFO (name)->valnum = PRE_EXPR_CONSTANT (e);
1408 VN_INFO (name)->valnum = name;
1410 add_to_value (value_id, get_or_alloc_expr_for_name (name));
1413 fprintf (dump_file, "Created SSA_NAME representative ");
1414 print_generic_expr (dump_file, name, 0);
1415 fprintf (dump_file, " for expression:");
1416 print_pre_expr (dump_file, e);
1417 fprintf (dump_file, "\n");
1426 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1427 basic_block pred, basic_block phiblock);
1429 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1430 the phis in PRED. Return NULL if we can't find a leader for each part
1431 of the translated expression. */
1434 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1435 basic_block pred, basic_block phiblock)
1442 bool changed = false;
1443 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1444 vn_nary_op_t newnary = XALLOCAVAR (struct vn_nary_op_s,
1445 sizeof_vn_nary_op (nary->length));
1446 memcpy (newnary, nary, sizeof_vn_nary_op (nary->length));
1448 for (i = 0; i < newnary->length; i++)
1450 if (TREE_CODE (newnary->op[i]) != SSA_NAME)
1454 pre_expr leader, result;
1455 unsigned int op_val_id = VN_INFO (newnary->op[i])->value_id;
1456 leader = find_leader_in_sets (op_val_id, set1, set2);
1457 result = phi_translate (leader, set1, set2, pred, phiblock);
1458 if (result && result != leader)
1460 tree name = get_representative_for (result);
1463 newnary->op[i] = name;
1468 changed |= newnary->op[i] != nary->op[i];
1474 unsigned int new_val_id;
1476 tree result = vn_nary_op_lookup_pieces (newnary->length,
1481 if (result && is_gimple_min_invariant (result))
1482 return get_or_alloc_expr_for_constant (result);
1484 expr = (pre_expr) pool_alloc (pre_expr_pool);
1489 PRE_EXPR_NARY (expr) = nary;
1490 constant = fully_constant_expression (expr);
1491 if (constant != expr)
1494 new_val_id = nary->value_id;
1495 get_or_alloc_expression_id (expr);
1499 new_val_id = get_next_value_id ();
1500 VEC_safe_grow_cleared (bitmap_set_t, heap,
1502 get_max_value_id() + 1);
1503 nary = vn_nary_op_insert_pieces (newnary->length,
1507 result, new_val_id);
1508 PRE_EXPR_NARY (expr) = nary;
1509 constant = fully_constant_expression (expr);
1510 if (constant != expr)
1512 get_or_alloc_expression_id (expr);
1514 add_to_value (new_val_id, expr);
1522 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
1523 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1524 tree vuse = ref->vuse;
1525 tree newvuse = vuse;
1526 VEC (vn_reference_op_s, heap) *newoperands = NULL;
1527 bool changed = false, same_valid = true;
1528 unsigned int i, j, n;
1529 vn_reference_op_t operand;
1530 vn_reference_t newref;
1533 VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
1538 tree type = operand->type;
1539 vn_reference_op_s newop = *operand;
1540 op[0] = operand->op0;
1541 op[1] = operand->op1;
1542 op[2] = operand->op2;
1543 for (n = 0; n < 3; ++n)
1545 unsigned int op_val_id;
1548 if (TREE_CODE (op[n]) != SSA_NAME)
1550 /* We can't possibly insert these. */
1552 && !is_gimple_min_invariant (op[n]))
1556 op_val_id = VN_INFO (op[n])->value_id;
1557 leader = find_leader_in_sets (op_val_id, set1, set2);
1560 /* Make sure we do not recursively translate ourselves
1561 like for translating a[n_1] with the leader for
1562 n_1 being a[n_1]. */
1563 if (get_expression_id (leader) != get_expression_id (expr))
1565 opresult = phi_translate (leader, set1, set2,
1569 if (opresult != leader)
1571 tree name = get_representative_for (opresult);
1574 changed |= name != op[n];
1582 VEC_free (vn_reference_op_s, heap, newoperands);
1586 newoperands = VEC_copy (vn_reference_op_s, heap, operands);
1587 /* We may have changed from an SSA_NAME to a constant */
1588 if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
1589 newop.opcode = TREE_CODE (op[0]);
1594 /* If it transforms a non-constant ARRAY_REF into a constant
1595 one, adjust the constant offset. */
1596 if (newop.opcode == ARRAY_REF
1598 && TREE_CODE (op[0]) == INTEGER_CST
1599 && TREE_CODE (op[1]) == INTEGER_CST
1600 && TREE_CODE (op[2]) == INTEGER_CST)
1602 double_int off = tree_to_double_int (op[0]);
1603 off = double_int_add (off,
1605 (tree_to_double_int (op[1])));
1606 off = double_int_mul (off, tree_to_double_int (op[2]));
1607 if (double_int_fits_in_shwi_p (off))
1608 newop.off = off.low;
1610 VEC_replace (vn_reference_op_s, newoperands, j, &newop);
1611 /* If it transforms from an SSA_NAME to an address, fold with
1612 a preceding indirect reference. */
1613 if (j > 0 && op[0] && TREE_CODE (op[0]) == ADDR_EXPR
1614 && VEC_index (vn_reference_op_s,
1615 newoperands, j - 1)->opcode == MEM_REF)
1616 vn_reference_fold_indirect (&newoperands, &j);
1618 if (i != VEC_length (vn_reference_op_s, operands))
1621 VEC_free (vn_reference_op_s, heap, newoperands);
1627 newvuse = translate_vuse_through_block (newoperands,
1628 ref->set, ref->type,
1629 vuse, phiblock, pred,
1631 if (newvuse == NULL_TREE)
1633 VEC_free (vn_reference_op_s, heap, newoperands);
1638 if (changed || newvuse != vuse)
1640 unsigned int new_val_id;
1642 bool converted = false;
1644 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1649 VEC_free (vn_reference_op_s, heap, newoperands);
1652 && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
1654 result = fold_build1 (VIEW_CONVERT_EXPR, ref->type, result);
1657 else if (!result && newref
1658 && !useless_type_conversion_p (ref->type, newref->type))
1660 VEC_free (vn_reference_op_s, heap, newoperands);
1664 if (result && is_gimple_min_invariant (result))
1666 gcc_assert (!newoperands);
1667 return get_or_alloc_expr_for_constant (result);
1670 expr = (pre_expr) pool_alloc (pre_expr_pool);
1671 expr->kind = REFERENCE;
1679 gcc_assert (CONVERT_EXPR_P (result)
1680 || TREE_CODE (result) == VIEW_CONVERT_EXPR);
1682 nresult = vn_nary_op_lookup_pieces (1, TREE_CODE (result),
1684 &TREE_OPERAND (result, 0),
1686 if (nresult && is_gimple_min_invariant (nresult))
1687 return get_or_alloc_expr_for_constant (nresult);
1692 PRE_EXPR_NARY (expr) = nary;
1693 constant = fully_constant_expression (expr);
1694 if (constant != expr)
1697 new_val_id = nary->value_id;
1698 get_or_alloc_expression_id (expr);
1702 new_val_id = get_next_value_id ();
1703 VEC_safe_grow_cleared (bitmap_set_t, heap,
1705 get_max_value_id() + 1);
1706 nary = vn_nary_op_insert_pieces (1, TREE_CODE (result),
1708 &TREE_OPERAND (result, 0),
1711 PRE_EXPR_NARY (expr) = nary;
1712 constant = fully_constant_expression (expr);
1713 if (constant != expr)
1715 get_or_alloc_expression_id (expr);
1720 PRE_EXPR_REFERENCE (expr) = newref;
1721 constant = fully_constant_expression (expr);
1722 if (constant != expr)
1725 new_val_id = newref->value_id;
1726 get_or_alloc_expression_id (expr);
1730 if (changed || !same_valid)
1732 new_val_id = get_next_value_id ();
1733 VEC_safe_grow_cleared (bitmap_set_t, heap,
1735 get_max_value_id() + 1);
1738 new_val_id = ref->value_id;
1739 newref = vn_reference_insert_pieces (newvuse, ref->set,
1742 result, new_val_id);
1744 PRE_EXPR_REFERENCE (expr) = newref;
1745 constant = fully_constant_expression (expr);
1746 if (constant != expr)
1748 get_or_alloc_expression_id (expr);
1750 add_to_value (new_val_id, expr);
1752 VEC_free (vn_reference_op_s, heap, newoperands);
1762 tree name = PRE_EXPR_NAME (expr);
1764 def_stmt = SSA_NAME_DEF_STMT (name);
1765 if (gimple_code (def_stmt) == GIMPLE_PHI
1766 && gimple_bb (def_stmt) == phiblock)
1771 e = find_edge (pred, gimple_bb (phi));
1774 tree def = PHI_ARG_DEF (phi, e->dest_idx);
1777 if (TREE_CODE (def) == SSA_NAME)
1778 def = VN_INFO (def)->valnum;
1780 /* Handle constant. */
1781 if (is_gimple_min_invariant (def))
1782 return get_or_alloc_expr_for_constant (def);
1784 if (TREE_CODE (def) == SSA_NAME && ssa_undefined_value_p (def))
1787 newexpr = get_or_alloc_expr_for_name (def);
1798 /* Wrapper around phi_translate_1 providing caching functionality. */
1801 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1802 basic_block pred, basic_block phiblock)
1809 /* Constants contain no values that need translation. */
1810 if (expr->kind == CONSTANT)
1813 if (value_id_constant_p (get_expr_value_id (expr)))
1816 if (expr->kind != NAME)
1818 phitrans = phi_trans_lookup (expr, pred);
1824 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1826 /* Don't add empty translations to the cache. Neither add
1827 translations of NAMEs as those are cheap to translate. */
1829 && expr->kind != NAME)
1830 phi_trans_add (expr, phitrans, pred);
1836 /* For each expression in SET, translate the values through phi nodes
1837 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1838 expressions in DEST. */
1841 phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
1842 basic_block phiblock)
1844 VEC (pre_expr, heap) *exprs;
1848 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1850 bitmap_set_copy (dest, set);
1854 exprs = sorted_array_from_bitmap_set (set);
1855 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
1857 pre_expr translated;
1858 translated = phi_translate (expr, set, NULL, pred, phiblock);
1862 /* We might end up with multiple expressions from SET being
1863 translated to the same value. In this case we do not want
1864 to retain the NARY or REFERENCE expression but prefer a NAME
1865 which would be the leader. */
1866 if (translated->kind == NAME)
1867 bitmap_value_replace_in_set (dest, translated);
1869 bitmap_value_insert_into_set (dest, translated);
1871 VEC_free (pre_expr, heap, exprs);
1874 /* Find the leader for a value (i.e., the name representing that
1875 value) in a given set, and return it. If STMT is non-NULL it
1876 makes sure the defining statement for the leader dominates it.
1877 Return NULL if no leader is found. */
1880 bitmap_find_leader (bitmap_set_t set, unsigned int val, gimple stmt)
1882 if (value_id_constant_p (val))
1886 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1888 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
1890 pre_expr expr = expression_for_id (i);
1891 if (expr->kind == CONSTANT)
1895 if (bitmap_set_contains_value (set, val))
1897 /* Rather than walk the entire bitmap of expressions, and see
1898 whether any of them has the value we are looking for, we look
1899 at the reverse mapping, which tells us the set of expressions
1900 that have a given value (IE value->expressions with that
1901 value) and see if any of those expressions are in our set.
1902 The number of expressions per value is usually significantly
1903 less than the number of expressions in the set. In fact, for
1904 large testcases, doing it this way is roughly 5-10x faster
1905 than walking the bitmap.
1906 If this is somehow a significant lose for some cases, we can
1907 choose which set to walk based on which set is smaller. */
1910 bitmap_set_t exprset = VEC_index (bitmap_set_t, value_expressions, val);
1912 EXECUTE_IF_AND_IN_BITMAP (&exprset->expressions,
1913 &set->expressions, 0, i, bi)
1915 pre_expr val = expression_for_id (i);
1916 /* At the point where stmt is not null, there should always
1917 be an SSA_NAME first in the list of expressions. */
1920 gimple def_stmt = SSA_NAME_DEF_STMT (PRE_EXPR_NAME (val));
1921 if (gimple_code (def_stmt) != GIMPLE_PHI
1922 && gimple_bb (def_stmt) == gimple_bb (stmt)
1923 /* PRE insertions are at the end of the basic-block
1925 && (gimple_uid (def_stmt) == 0
1926 || gimple_uid (def_stmt) >= gimple_uid (stmt)))
1935 /* Determine if EXPR, a memory expression, is ANTIC_IN at the top of
1936 BLOCK by seeing if it is not killed in the block. Note that we are
1937 only determining whether there is a store that kills it. Because
1938 of the order in which clean iterates over values, we are guaranteed
1939 that altered operands will have caused us to be eliminated from the
1940 ANTIC_IN set already. */
1943 value_dies_in_block_x (pre_expr expr, basic_block block)
1945 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse;
1946 vn_reference_t refx = PRE_EXPR_REFERENCE (expr);
1948 gimple_stmt_iterator gsi;
1949 unsigned id = get_expression_id (expr);
1956 /* Lookup a previously calculated result. */
1957 if (EXPR_DIES (block)
1958 && bitmap_bit_p (EXPR_DIES (block), id * 2))
1959 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1);
1961 /* A memory expression {e, VUSE} dies in the block if there is a
1962 statement that may clobber e. If, starting statement walk from the
1963 top of the basic block, a statement uses VUSE there can be no kill
1964 inbetween that use and the original statement that loaded {e, VUSE},
1965 so we can stop walking. */
1966 ref.base = NULL_TREE;
1967 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
1969 tree def_vuse, def_vdef;
1970 def = gsi_stmt (gsi);
1971 def_vuse = gimple_vuse (def);
1972 def_vdef = gimple_vdef (def);
1974 /* Not a memory statement. */
1978 /* Not a may-def. */
1981 /* A load with the same VUSE, we're done. */
1982 if (def_vuse == vuse)
1988 /* Init ref only if we really need it. */
1989 if (ref.base == NULL_TREE
1990 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type,
1996 /* If the statement may clobber expr, it dies. */
1997 if (stmt_may_clobber_ref_p_1 (def, &ref))
2004 /* Remember the result. */
2005 if (!EXPR_DIES (block))
2006 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack);
2007 bitmap_set_bit (EXPR_DIES (block), id * 2);
2009 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1);
2015 #define union_contains_value(SET1, SET2, VAL) \
2016 (bitmap_set_contains_value ((SET1), (VAL)) \
2017 || ((SET2) && bitmap_set_contains_value ((SET2), (VAL))))
2019 /* Determine if vn_reference_op_t VRO is legal in SET1 U SET2.
2022 vro_valid_in_sets (bitmap_set_t set1, bitmap_set_t set2,
2023 vn_reference_op_t vro)
2025 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
2027 struct pre_expr_d temp;
2030 PRE_EXPR_NAME (&temp) = vro->op0;
2031 temp.id = lookup_expression_id (&temp);
2034 if (!union_contains_value (set1, set2,
2035 get_expr_value_id (&temp)))
2038 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
2040 struct pre_expr_d temp;
2043 PRE_EXPR_NAME (&temp) = vro->op1;
2044 temp.id = lookup_expression_id (&temp);
2047 if (!union_contains_value (set1, set2,
2048 get_expr_value_id (&temp)))
2052 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
2054 struct pre_expr_d temp;
2057 PRE_EXPR_NAME (&temp) = vro->op2;
2058 temp.id = lookup_expression_id (&temp);
2061 if (!union_contains_value (set1, set2,
2062 get_expr_value_id (&temp)))
2069 /* Determine if the expression EXPR is valid in SET1 U SET2.
2070 ONLY SET2 CAN BE NULL.
2071 This means that we have a leader for each part of the expression
2072 (if it consists of values), or the expression is an SSA_NAME.
2073 For loads/calls, we also see if the vuse is killed in this block. */
2076 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
2082 return bitmap_set_contains_expr (AVAIL_OUT (block), expr);
2086 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
2087 for (i = 0; i < nary->length; i++)
2089 if (TREE_CODE (nary->op[i]) == SSA_NAME)
2091 struct pre_expr_d temp;
2094 PRE_EXPR_NAME (&temp) = nary->op[i];
2095 temp.id = lookup_expression_id (&temp);
2098 if (!union_contains_value (set1, set2,
2099 get_expr_value_id (&temp)))
2103 /* If the NARY may trap make sure the block does not contain
2104 a possible exit point.
2105 ??? This is overly conservative if we translate AVAIL_OUT
2106 as the available expression might be after the exit point. */
2107 if (BB_MAY_NOTRETURN (block)
2108 && vn_nary_may_trap (nary))
2115 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
2116 vn_reference_op_t vro;
2119 FOR_EACH_VEC_ELT (vn_reference_op_s, ref->operands, i, vro)
2121 if (!vro_valid_in_sets (set1, set2, vro))
2126 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse);
2127 if (!gimple_nop_p (def_stmt)
2128 && gimple_bb (def_stmt) != block
2129 && !dominated_by_p (CDI_DOMINATORS,
2130 block, gimple_bb (def_stmt)))
2133 return !value_dies_in_block_x (expr, block);
2140 /* Clean the set of expressions that are no longer valid in SET1 or
2141 SET2. This means expressions that are made up of values we have no
2142 leaders for in SET1 or SET2. This version is used for partial
2143 anticipation, which means it is not valid in either ANTIC_IN or
2147 dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
2149 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
2153 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
2155 if (!valid_in_sets (set1, set2, expr, block))
2156 bitmap_remove_from_set (set1, expr);
2158 VEC_free (pre_expr, heap, exprs);
2161 /* Clean the set of expressions that are no longer valid in SET. This
2162 means expressions that are made up of values we have no leaders for
2166 clean (bitmap_set_t set, basic_block block)
2168 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
2172 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
2174 if (!valid_in_sets (set, NULL, expr, block))
2175 bitmap_remove_from_set (set, expr);
2177 VEC_free (pre_expr, heap, exprs);
2180 static sbitmap has_abnormal_preds;
2182 /* List of blocks that may have changed during ANTIC computation and
2183 thus need to be iterated over. */
2185 static sbitmap changed_blocks;
2187 /* Decide whether to defer a block for a later iteration, or PHI
2188 translate SOURCE to DEST using phis in PHIBLOCK. Return false if we
2189 should defer the block, and true if we processed it. */
2192 defer_or_phi_translate_block (bitmap_set_t dest, bitmap_set_t source,
2193 basic_block block, basic_block phiblock)
2195 if (!BB_VISITED (phiblock))
2197 SET_BIT (changed_blocks, block->index);
2198 BB_VISITED (block) = 0;
2199 BB_DEFERRED (block) = 1;
2203 phi_translate_set (dest, source, block, phiblock);
2207 /* Compute the ANTIC set for BLOCK.
2209 If succs(BLOCK) > 1 then
2210 ANTIC_OUT[BLOCK] = intersection of ANTIC_IN[b] for all succ(BLOCK)
2211 else if succs(BLOCK) == 1 then
2212 ANTIC_OUT[BLOCK] = phi_translate (ANTIC_IN[succ(BLOCK)])
2214 ANTIC_IN[BLOCK] = clean(ANTIC_OUT[BLOCK] U EXP_GEN[BLOCK] - TMP_GEN[BLOCK])
2218 compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
2220 bool changed = false;
2221 bitmap_set_t S, old, ANTIC_OUT;
2227 old = ANTIC_OUT = S = NULL;
2228 BB_VISITED (block) = 1;
2230 /* If any edges from predecessors are abnormal, antic_in is empty,
2232 if (block_has_abnormal_pred_edge)
2233 goto maybe_dump_sets;
2235 old = ANTIC_IN (block);
2236 ANTIC_OUT = bitmap_set_new ();
2238 /* If the block has no successors, ANTIC_OUT is empty. */
2239 if (EDGE_COUNT (block->succs) == 0)
2241 /* If we have one successor, we could have some phi nodes to
2242 translate through. */
2243 else if (single_succ_p (block))
2245 basic_block succ_bb = single_succ (block);
2247 /* We trade iterations of the dataflow equations for having to
2248 phi translate the maximal set, which is incredibly slow
2249 (since the maximal set often has 300+ members, even when you
2250 have a small number of blocks).
2251 Basically, we defer the computation of ANTIC for this block
2252 until we have processed it's successor, which will inevitably
2253 have a *much* smaller set of values to phi translate once
2254 clean has been run on it.
2255 The cost of doing this is that we technically perform more
2256 iterations, however, they are lower cost iterations.
2258 Timings for PRE on tramp3d-v4:
2259 without maximal set fix: 11 seconds
2260 with maximal set fix/without deferring: 26 seconds
2261 with maximal set fix/with deferring: 11 seconds
2264 if (!defer_or_phi_translate_block (ANTIC_OUT, ANTIC_IN (succ_bb),
2268 goto maybe_dump_sets;
2271 /* If we have multiple successors, we take the intersection of all of
2272 them. Note that in the case of loop exit phi nodes, we may have
2273 phis to translate through. */
2276 VEC(basic_block, heap) * worklist;
2278 basic_block bprime, first = NULL;
2280 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2281 FOR_EACH_EDGE (e, ei, block->succs)
2284 && BB_VISITED (e->dest))
2286 else if (BB_VISITED (e->dest))
2287 VEC_quick_push (basic_block, worklist, e->dest);
2290 /* Of multiple successors we have to have visited one already. */
2293 SET_BIT (changed_blocks, block->index);
2294 BB_VISITED (block) = 0;
2295 BB_DEFERRED (block) = 1;
2297 VEC_free (basic_block, heap, worklist);
2298 goto maybe_dump_sets;
2301 if (!gimple_seq_empty_p (phi_nodes (first)))
2302 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2304 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2306 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
2308 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2310 bitmap_set_t tmp = bitmap_set_new ();
2311 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2312 bitmap_set_and (ANTIC_OUT, tmp);
2313 bitmap_set_free (tmp);
2316 bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
2318 VEC_free (basic_block, heap, worklist);
2321 /* Generate ANTIC_OUT - TMP_GEN. */
2322 S = bitmap_set_subtract (ANTIC_OUT, TMP_GEN (block));
2324 /* Start ANTIC_IN with EXP_GEN - TMP_GEN. */
2325 ANTIC_IN (block) = bitmap_set_subtract (EXP_GEN (block),
2328 /* Then union in the ANTIC_OUT - TMP_GEN values,
2329 to get ANTIC_OUT U EXP_GEN - TMP_GEN */
2330 FOR_EACH_EXPR_ID_IN_SET (S, bii, bi)
2331 bitmap_value_insert_into_set (ANTIC_IN (block),
2332 expression_for_id (bii));
2334 clean (ANTIC_IN (block), block);
2336 if (!bitmap_set_equal (old, ANTIC_IN (block)))
2339 SET_BIT (changed_blocks, block->index);
2340 FOR_EACH_EDGE (e, ei, block->preds)
2341 SET_BIT (changed_blocks, e->src->index);
2344 RESET_BIT (changed_blocks, block->index);
2347 if (dump_file && (dump_flags & TDF_DETAILS))
2349 if (!BB_DEFERRED (block) || BB_VISITED (block))
2352 print_bitmap_set (dump_file, ANTIC_OUT, "ANTIC_OUT", block->index);
2354 print_bitmap_set (dump_file, ANTIC_IN (block), "ANTIC_IN",
2358 print_bitmap_set (dump_file, S, "S", block->index);
2363 "Block %d was deferred for a future iteration.\n",
2368 bitmap_set_free (old);
2370 bitmap_set_free (S);
2372 bitmap_set_free (ANTIC_OUT);
2376 /* Compute PARTIAL_ANTIC for BLOCK.
2378 If succs(BLOCK) > 1 then
2379 PA_OUT[BLOCK] = value wise union of PA_IN[b] + all ANTIC_IN not
2380 in ANTIC_OUT for all succ(BLOCK)
2381 else if succs(BLOCK) == 1 then
2382 PA_OUT[BLOCK] = phi_translate (PA_IN[succ(BLOCK)])
2384 PA_IN[BLOCK] = dependent_clean(PA_OUT[BLOCK] - TMP_GEN[BLOCK]
2389 compute_partial_antic_aux (basic_block block,
2390 bool block_has_abnormal_pred_edge)
2392 bool changed = false;
2393 bitmap_set_t old_PA_IN;
2394 bitmap_set_t PA_OUT;
2397 unsigned long max_pa = PARAM_VALUE (PARAM_MAX_PARTIAL_ANTIC_LENGTH);
2399 old_PA_IN = PA_OUT = NULL;
2401 /* If any edges from predecessors are abnormal, antic_in is empty,
2403 if (block_has_abnormal_pred_edge)
2404 goto maybe_dump_sets;
2406 /* If there are too many partially anticipatable values in the
2407 block, phi_translate_set can take an exponential time: stop
2408 before the translation starts. */
2410 && single_succ_p (block)
2411 && bitmap_count_bits (&PA_IN (single_succ (block))->values) > max_pa)
2412 goto maybe_dump_sets;
2414 old_PA_IN = PA_IN (block);
2415 PA_OUT = bitmap_set_new ();
2417 /* If the block has no successors, ANTIC_OUT is empty. */
2418 if (EDGE_COUNT (block->succs) == 0)
2420 /* If we have one successor, we could have some phi nodes to
2421 translate through. Note that we can't phi translate across DFS
2422 back edges in partial antic, because it uses a union operation on
2423 the successors. For recurrences like IV's, we will end up
2424 generating a new value in the set on each go around (i + 3 (VH.1)
2425 VH.1 + 1 (VH.2), VH.2 + 1 (VH.3), etc), forever. */
2426 else if (single_succ_p (block))
2428 basic_block succ = single_succ (block);
2429 if (!(single_succ_edge (block)->flags & EDGE_DFS_BACK))
2430 phi_translate_set (PA_OUT, PA_IN (succ), block, succ);
2432 /* If we have multiple successors, we take the union of all of
2436 VEC(basic_block, heap) * worklist;
2440 worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
2441 FOR_EACH_EDGE (e, ei, block->succs)
2443 if (e->flags & EDGE_DFS_BACK)
2445 VEC_quick_push (basic_block, worklist, e->dest);
2447 if (VEC_length (basic_block, worklist) > 0)
2449 FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
2454 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2455 bitmap_value_insert_into_set (PA_OUT,
2456 expression_for_id (i));
2457 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2459 bitmap_set_t pa_in = bitmap_set_new ();
2460 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2461 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2462 bitmap_value_insert_into_set (PA_OUT,
2463 expression_for_id (i));
2464 bitmap_set_free (pa_in);
2467 FOR_EACH_EXPR_ID_IN_SET (PA_IN (bprime), i, bi)
2468 bitmap_value_insert_into_set (PA_OUT,
2469 expression_for_id (i));
2472 VEC_free (basic_block, heap, worklist);
2475 /* PA_IN starts with PA_OUT - TMP_GEN.
2476 Then we subtract things from ANTIC_IN. */
2477 PA_IN (block) = bitmap_set_subtract (PA_OUT, TMP_GEN (block));
2479 /* For partial antic, we want to put back in the phi results, since
2480 we will properly avoid making them partially antic over backedges. */
2481 bitmap_ior_into (&PA_IN (block)->values, &PHI_GEN (block)->values);
2482 bitmap_ior_into (&PA_IN (block)->expressions, &PHI_GEN (block)->expressions);
2484 /* PA_IN[block] = PA_IN[block] - ANTIC_IN[block] */
2485 bitmap_set_subtract_values (PA_IN (block), ANTIC_IN (block));
2487 dependent_clean (PA_IN (block), ANTIC_IN (block), block);
2489 if (!bitmap_set_equal (old_PA_IN, PA_IN (block)))
2492 SET_BIT (changed_blocks, block->index);
2493 FOR_EACH_EDGE (e, ei, block->preds)
2494 SET_BIT (changed_blocks, e->src->index);
2497 RESET_BIT (changed_blocks, block->index);
2500 if (dump_file && (dump_flags & TDF_DETAILS))
2503 print_bitmap_set (dump_file, PA_OUT, "PA_OUT", block->index);
2505 print_bitmap_set (dump_file, PA_IN (block), "PA_IN", block->index);
2508 bitmap_set_free (old_PA_IN);
2510 bitmap_set_free (PA_OUT);
2514 /* Compute ANTIC and partial ANTIC sets. */
2517 compute_antic (void)
2519 bool changed = true;
2520 int num_iterations = 0;
2524 /* If any predecessor edges are abnormal, we punt, so antic_in is empty.
2525 We pre-build the map of blocks with incoming abnormal edges here. */
2526 has_abnormal_preds = sbitmap_alloc (last_basic_block);
2527 sbitmap_zero (has_abnormal_preds);
2534 FOR_EACH_EDGE (e, ei, block->preds)
2536 e->flags &= ~EDGE_DFS_BACK;
2537 if (e->flags & EDGE_ABNORMAL)
2539 SET_BIT (has_abnormal_preds, block->index);
2544 BB_VISITED (block) = 0;
2545 BB_DEFERRED (block) = 0;
2547 /* While we are here, give empty ANTIC_IN sets to each block. */
2548 ANTIC_IN (block) = bitmap_set_new ();
2549 PA_IN (block) = bitmap_set_new ();
2552 /* At the exit block we anticipate nothing. */
2553 ANTIC_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2554 BB_VISITED (EXIT_BLOCK_PTR) = 1;
2555 PA_IN (EXIT_BLOCK_PTR) = bitmap_set_new ();
2557 changed_blocks = sbitmap_alloc (last_basic_block + 1);
2558 sbitmap_ones (changed_blocks);
2561 if (dump_file && (dump_flags & TDF_DETAILS))
2562 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2563 /* ??? We need to clear our PHI translation cache here as the
2564 ANTIC sets shrink and we restrict valid translations to
2565 those having operands with leaders in ANTIC. Same below
2566 for PA ANTIC computation. */
2569 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2571 if (TEST_BIT (changed_blocks, postorder[i]))
2573 basic_block block = BASIC_BLOCK (postorder[i]);
2574 changed |= compute_antic_aux (block,
2575 TEST_BIT (has_abnormal_preds,
2579 /* Theoretically possible, but *highly* unlikely. */
2580 gcc_checking_assert (num_iterations < 500);
2583 statistics_histogram_event (cfun, "compute_antic iterations",
2586 if (do_partial_partial)
2588 sbitmap_ones (changed_blocks);
2589 mark_dfs_back_edges ();
2594 if (dump_file && (dump_flags & TDF_DETAILS))
2595 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2598 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2600 if (TEST_BIT (changed_blocks, postorder[i]))
2602 basic_block block = BASIC_BLOCK (postorder[i]);
2604 |= compute_partial_antic_aux (block,
2605 TEST_BIT (has_abnormal_preds,
2609 /* Theoretically possible, but *highly* unlikely. */
2610 gcc_checking_assert (num_iterations < 500);
2612 statistics_histogram_event (cfun, "compute_partial_antic iterations",
2615 sbitmap_free (has_abnormal_preds);
2616 sbitmap_free (changed_blocks);
2619 /* Return true if OP is a tree which we can perform PRE on.
2620 This may not match the operations we can value number, but in
2621 a perfect world would. */
2624 can_PRE_operation (tree op)
2626 return UNARY_CLASS_P (op)
2627 || BINARY_CLASS_P (op)
2628 || COMPARISON_CLASS_P (op)
2629 || TREE_CODE (op) == MEM_REF
2630 || TREE_CODE (op) == COMPONENT_REF
2631 || TREE_CODE (op) == VIEW_CONVERT_EXPR
2632 || TREE_CODE (op) == CALL_EXPR
2633 || TREE_CODE (op) == ARRAY_REF;
2637 /* Inserted expressions are placed onto this worklist, which is used
2638 for performing quick dead code elimination of insertions we made
2639 that didn't turn out to be necessary. */
2640 static bitmap inserted_exprs;
2642 /* Pool allocated fake store expressions are placed onto this
2643 worklist, which, after performing dead code elimination, is walked
2644 to see which expressions need to be put into GC'able memory */
2645 static VEC(gimple, heap) *need_creation;
2647 /* The actual worker for create_component_ref_by_pieces. */
2650 create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
2651 unsigned int *operand, gimple_seq *stmts,
2654 vn_reference_op_t currop = VEC_index (vn_reference_op_s, ref->operands,
2658 switch (currop->opcode)
2662 tree folded, sc = NULL_TREE;
2663 unsigned int nargs = 0;
2665 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2669 pre_expr op0 = get_or_alloc_expr_for (currop->op0);
2670 fn = find_or_generate_expression (block, op0, stmts, domstmt);
2676 pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
2677 sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2681 args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2682 ref->operands) - 1);
2683 while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2685 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2695 folded = build_call_array (currop->type,
2696 (TREE_CODE (fn) == FUNCTION_DECL
2697 ? build_fold_addr_expr (fn) : fn),
2701 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2707 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2709 tree offset = currop->op0;
2712 if (TREE_CODE (baseop) == ADDR_EXPR
2713 && handled_component_p (TREE_OPERAND (baseop, 0)))
2717 base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
2720 offset = int_const_binop (PLUS_EXPR, offset,
2721 build_int_cst (TREE_TYPE (offset),
2723 baseop = build_fold_addr_expr (base);
2725 return fold_build2 (MEM_REF, currop->type, baseop, offset);
2728 case TARGET_MEM_REF:
2730 pre_expr op0expr, op1expr;
2731 tree genop0 = NULL_TREE, genop1 = NULL_TREE;
2732 vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
2734 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
2740 op0expr = get_or_alloc_expr_for (currop->op0);
2741 genop0 = find_or_generate_expression (block, op0expr,
2748 op1expr = get_or_alloc_expr_for (nextop->op0);
2749 genop1 = find_or_generate_expression (block, op1expr,
2754 return build5 (TARGET_MEM_REF, currop->type,
2755 baseop, currop->op2, genop0, currop->op1, genop1);
2761 gcc_assert (is_gimple_min_invariant (currop->op0));
2767 case VIEW_CONVERT_EXPR:
2770 tree genop0 = create_component_ref_by_pieces_1 (block, ref,
2775 folded = fold_build1 (currop->opcode, currop->type,
2780 case WITH_SIZE_EXPR:
2782 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2784 pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2790 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2794 return fold_build2 (currop->opcode, currop->type, genop0, genop1);
2800 tree genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2802 pre_expr op1expr = get_or_alloc_expr_for (currop->op0);
2803 pre_expr op2expr = get_or_alloc_expr_for (currop->op1);
2809 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2812 genop2 = find_or_generate_expression (block, op2expr, stmts, domstmt);
2815 folded = fold_build3 (BIT_FIELD_REF, currop->type, genop0, genop1,
2820 /* For array ref vn_reference_op's, operand 1 of the array ref
2821 is op0 of the reference op and operand 3 of the array ref is
2823 case ARRAY_RANGE_REF:
2827 tree genop1 = currop->op0;
2829 tree genop2 = currop->op1;
2831 tree genop3 = currop->op2;
2833 genop0 = create_component_ref_by_pieces_1 (block, ref, operand,
2837 op1expr = get_or_alloc_expr_for (genop1);
2838 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2843 tree domain_type = TYPE_DOMAIN (TREE_TYPE (genop0));
2844 /* Drop zero minimum index if redundant. */
2845 if (integer_zerop (genop2)
2847 || integer_zerop (TYPE_MIN_VALUE (domain_type))))
2851 op2expr = get_or_alloc_expr_for (genop2);
2852 genop2 = find_or_generate_expression (block, op2expr, stmts,
2860 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2861 /* We can't always put a size in units of the element alignment
2862 here as the element alignment may be not visible. See
2863 PR43783. Simply drop the element size for constant
2865 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2869 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2870 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2871 op3expr = get_or_alloc_expr_for (genop3);
2872 genop3 = find_or_generate_expression (block, op3expr, stmts,
2878 return build4 (currop->opcode, currop->type, genop0, genop1,
2885 tree genop2 = currop->op1;
2887 op0 = create_component_ref_by_pieces_1 (block, ref, operand,
2891 /* op1 should be a FIELD_DECL, which are represented by
2896 op2expr = get_or_alloc_expr_for (genop2);
2897 genop2 = find_or_generate_expression (block, op2expr, stmts,
2903 return fold_build3 (COMPONENT_REF, TREE_TYPE (op1), op0, op1,
2909 pre_expr op0expr = get_or_alloc_expr_for (currop->op0);
2910 genop = find_or_generate_expression (block, op0expr, stmts, domstmt);
2931 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
2932 COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
2933 trying to rename aggregates into ssa form directly, which is a no no.
2935 Thus, this routine doesn't create temporaries, it just builds a
2936 single access expression for the array, calling
2937 find_or_generate_expression to build the innermost pieces.
2939 This function is a subroutine of create_expression_by_pieces, and
2940 should not be called on it's own unless you really know what you
2944 create_component_ref_by_pieces (basic_block block, vn_reference_t ref,
2945 gimple_seq *stmts, gimple domstmt)
2947 unsigned int op = 0;
2948 return create_component_ref_by_pieces_1 (block, ref, &op, stmts, domstmt);
2951 /* Find a leader for an expression, or generate one using
2952 create_expression_by_pieces if it's ANTIC but
2954 BLOCK is the basic_block we are looking for leaders in.
2955 EXPR is the expression to find a leader or generate for.
2956 STMTS is the statement list to put the inserted expressions on.
2957 Returns the SSA_NAME of the LHS of the generated expression or the
2959 DOMSTMT if non-NULL is a statement that should be dominated by
2960 all uses in the generated expression. If DOMSTMT is non-NULL this
2961 routine can fail and return NULL_TREE. Otherwise it will assert
2965 find_or_generate_expression (basic_block block, pre_expr expr,
2966 gimple_seq *stmts, gimple domstmt)
2968 pre_expr leader = bitmap_find_leader (AVAIL_OUT (block),
2969 get_expr_value_id (expr), domstmt);
2973 if (leader->kind == NAME)
2974 genop = PRE_EXPR_NAME (leader);
2975 else if (leader->kind == CONSTANT)
2976 genop = PRE_EXPR_CONSTANT (leader);
2979 /* If it's still NULL, it must be a complex expression, so generate
2980 it recursively. Not so if inserting expressions for values generated
2985 bitmap_set_t exprset;
2986 unsigned int lookfor = get_expr_value_id (expr);
2987 bool handled = false;
2991 exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
2992 FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
2994 pre_expr temp = expression_for_id (i);
2995 if (temp->kind != NAME)
2998 genop = create_expression_by_pieces (block, temp, stmts,
3000 get_expr_type (expr));
3004 if (!handled && domstmt)
3007 gcc_assert (handled);
3012 #define NECESSARY GF_PLF_1
3014 /* Create an expression in pieces, so that we can handle very complex
3015 expressions that may be ANTIC, but not necessary GIMPLE.
3016 BLOCK is the basic block the expression will be inserted into,
3017 EXPR is the expression to insert (in value form)
3018 STMTS is a statement list to append the necessary insertions into.
3020 This function will die if we hit some value that shouldn't be
3021 ANTIC but is (IE there is no leader for it, or its components).
3022 This function may also generate expressions that are themselves
3023 partially or fully redundant. Those that are will be either made
3024 fully redundant during the next iteration of insert (for partially
3025 redundant ones), or eliminated by eliminate (for fully redundant
3028 If DOMSTMT is non-NULL then we make sure that all uses in the
3029 expressions dominate that statement. In this case the function
3030 can return NULL_TREE to signal failure. */
3033 create_expression_by_pieces (basic_block block, pre_expr expr,
3034 gimple_seq *stmts, gimple domstmt, tree type)
3038 gimple_seq forced_stmts = NULL;
3039 unsigned int value_id;
3040 gimple_stmt_iterator gsi;
3041 tree exprtype = type ? type : get_expr_type (expr);
3047 /* We may hit the NAME/CONSTANT case if we have to convert types
3048 that value numbering saw through. */
3050 folded = PRE_EXPR_NAME (expr);
3053 folded = PRE_EXPR_CONSTANT (expr);
3057 vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
3058 folded = create_component_ref_by_pieces (block, ref, stmts, domstmt);
3063 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
3064 tree *genop = XALLOCAVEC (tree, nary->length);
3066 for (i = 0; i < nary->length; ++i)
3068 pre_expr op = get_or_alloc_expr_for (nary->op[i]);
3069 genop[i] = find_or_generate_expression (block, op,
3073 /* Ensure genop[] is properly typed for POINTER_PLUS_EXPR. It
3074 may have conversions stripped. */
3075 if (nary->opcode == POINTER_PLUS_EXPR)
3078 genop[i] = fold_convert (nary->type, genop[i]);
3080 genop[i] = convert_to_ptrofftype (genop[i]);
3083 genop[i] = fold_convert (TREE_TYPE (nary->op[i]), genop[i]);
3085 if (nary->opcode == CONSTRUCTOR)
3087 VEC(constructor_elt,gc) *elts = NULL;
3088 for (i = 0; i < nary->length; ++i)
3089 CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
3090 folded = build_constructor (nary->type, elts);
3094 switch (nary->length)
3097 folded = fold_build1 (nary->opcode, nary->type,
3101 folded = fold_build2 (nary->opcode, nary->type,
3102 genop[0], genop[1]);
3105 folded = fold_build3 (nary->opcode, nary->type,
3106 genop[0], genop[1], genop[2]);
3118 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded)))
3119 folded = fold_convert (exprtype, folded);
3121 /* Force the generated expression to be a sequence of GIMPLE
3123 We have to call unshare_expr because force_gimple_operand may
3124 modify the tree we pass to it. */
3125 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts,
3128 /* If we have any intermediate expressions to the value sets, add them
3129 to the value sets and chain them in the instruction stream. */
3132 gsi = gsi_start (forced_stmts);
3133 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3135 gimple stmt = gsi_stmt (gsi);
3136 tree forcedname = gimple_get_lhs (stmt);
3139 if (TREE_CODE (forcedname) == SSA_NAME)
3141 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
3142 VN_INFO_GET (forcedname)->valnum = forcedname;
3143 VN_INFO (forcedname)->value_id = get_next_value_id ();
3144 nameexpr = get_or_alloc_expr_for_name (forcedname);
3145 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3147 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3148 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3150 mark_symbols_for_renaming (stmt);
3152 gimple_seq_add_seq (stmts, forced_stmts);
3155 /* Build and insert the assignment of the end result to the temporary
3156 that we will return. */
3157 if (!pretemp || exprtype != TREE_TYPE (pretemp))
3158 pretemp = create_tmp_reg (exprtype, "pretmp");
3161 add_referenced_var (temp);
3163 newstmt = gimple_build_assign (temp, folded);
3164 name = make_ssa_name (temp, newstmt);
3165 gimple_assign_set_lhs (newstmt, name);
3166 gimple_set_plf (newstmt, NECESSARY, false);
3168 gimple_seq_add_stmt (stmts, newstmt);
3169 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
3171 /* All the symbols in NEWEXPR should be put into SSA form. */
3172 mark_symbols_for_renaming (newstmt);
3174 /* Fold the last statement. */
3175 gsi = gsi_last (*stmts);
3176 if (fold_stmt_inplace (&gsi))
3177 update_stmt (gsi_stmt (gsi));
3179 /* Add a value number to the temporary.
3180 The value may already exist in either NEW_SETS, or AVAIL_OUT, because
3181 we are creating the expression by pieces, and this particular piece of
3182 the expression may have been represented. There is no harm in replacing
3184 VN_INFO_GET (name)->valnum = name;
3185 value_id = get_expr_value_id (expr);
3186 VN_INFO (name)->value_id = value_id;
3187 nameexpr = get_or_alloc_expr_for_name (name);
3188 add_to_value (value_id, nameexpr);
3189 if (NEW_SETS (block))
3190 bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
3191 bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
3193 pre_stats.insertions++;
3194 if (dump_file && (dump_flags & TDF_DETAILS))
3196 fprintf (dump_file, "Inserted ");
3197 print_gimple_stmt (dump_file, newstmt, 0, 0);
3198 fprintf (dump_file, " in predecessor %d\n", block->index);
3205 /* Returns true if we want to inhibit the insertions of PHI nodes
3206 for the given EXPR for basic block BB (a member of a loop).
3207 We want to do this, when we fear that the induction variable we
3208 create might inhibit vectorization. */
3211 inhibit_phi_insertion (basic_block bb, pre_expr expr)
3213 vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
3214 VEC (vn_reference_op_s, heap) *ops = vr->operands;
3215 vn_reference_op_t op;
3218 /* If we aren't going to vectorize we don't inhibit anything. */
3219 if (!flag_tree_vectorize)
3222 /* Otherwise we inhibit the insertion when the address of the
3223 memory reference is a simple induction variable. In other
3224 cases the vectorizer won't do anything anyway (either it's
3225 loop invariant or a complicated expression). */
3226 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
3231 case ARRAY_RANGE_REF:
3232 if (TREE_CODE (op->op0) != SSA_NAME)
3237 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0));
3239 /* Default defs are loop invariant. */
3242 /* Defined outside this loop, also loop invariant. */
3243 if (!flow_bb_inside_loop_p (bb->loop_father, defbb))
3245 /* If it's a simple induction variable inhibit insertion,
3246 the vectorizer might be interested in this one. */
3247 if (simple_iv (bb->loop_father, bb->loop_father,
3248 op->op0, &iv, true))
3250 /* No simple IV, vectorizer can't do anything, hence no
3251 reason to inhibit the transformation for this operand. */
3261 /* Insert the to-be-made-available values of expression EXPRNUM for each
3262 predecessor, stored in AVAIL, into the predecessors of BLOCK, and
3263 merge the result with a phi node, given the same value number as
3264 NODE. Return true if we have inserted new stuff. */
3267 insert_into_preds_of_block (basic_block block, unsigned int exprnum,
3270 pre_expr expr = expression_for_id (exprnum);
3272 unsigned int val = get_expr_value_id (expr);
3274 bool insertions = false;
3279 tree type = get_expr_type (expr);
3283 if (dump_file && (dump_flags & TDF_DETAILS))
3285 fprintf (dump_file, "Found partial redundancy for expression ");
3286 print_pre_expr (dump_file, expr);
3287 fprintf (dump_file, " (%04d)\n", val);
3290 /* Make sure we aren't creating an induction variable. */
3291 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2)
3293 bool firstinsideloop = false;
3294 bool secondinsideloop = false;
3295 firstinsideloop = flow_bb_inside_loop_p (block->loop_father,
3296 EDGE_PRED (block, 0)->src);
3297 secondinsideloop = flow_bb_inside_loop_p (block->loop_father,
3298 EDGE_PRED (block, 1)->src);
3299 /* Induction variables only have one edge inside the loop. */
3300 if ((firstinsideloop ^ secondinsideloop)
3301 && (expr->kind != REFERENCE
3302 || inhibit_phi_insertion (block, expr)))
3304 if (dump_file && (dump_flags & TDF_DETAILS))
3305 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3310 /* Make the necessary insertions. */
3311 FOR_EACH_EDGE (pred, ei, block->preds)
3313 gimple_seq stmts = NULL;
3316 eprime = avail[bprime->index];
3318 if (eprime->kind != NAME && eprime->kind != CONSTANT)
3320 builtexpr = create_expression_by_pieces (bprime,
3324 gcc_assert (!(pred->flags & EDGE_ABNORMAL));
3325 gsi_insert_seq_on_edge (pred, stmts);
3326 avail[bprime->index] = get_or_alloc_expr_for_name (builtexpr);
3329 else if (eprime->kind == CONSTANT)
3331 /* Constants may not have the right type, fold_convert
3332 should give us back a constant with the right type.
3334 tree constant = PRE_EXPR_CONSTANT (eprime);
3335 if (!useless_type_conversion_p (type, TREE_TYPE (constant)))
3337 tree builtexpr = fold_convert (type, constant);
3338 if (!is_gimple_min_invariant (builtexpr))
3340 tree forcedexpr = force_gimple_operand (builtexpr,
3343 if (!is_gimple_min_invariant (forcedexpr))
3345 if (forcedexpr != builtexpr)
3347 VN_INFO_GET (forcedexpr)->valnum = PRE_EXPR_CONSTANT (eprime);
3348 VN_INFO (forcedexpr)->value_id = get_expr_value_id (eprime);
3352 gimple_stmt_iterator gsi;
3353 gsi = gsi_start (stmts);
3354 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3356 gimple stmt = gsi_stmt (gsi);
3357 tree lhs = gimple_get_lhs (stmt);
3358 if (TREE_CODE (lhs) == SSA_NAME)
3359 bitmap_set_bit (inserted_exprs,
3360 SSA_NAME_VERSION (lhs));
3361 gimple_set_plf (stmt, NECESSARY, false);
3363 gsi_insert_seq_on_edge (pred, stmts);
3365 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3369 avail[bprime->index] = get_or_alloc_expr_for_constant (builtexpr);
3372 else if (eprime->kind == NAME)
3374 /* We may have to do a conversion because our value
3375 numbering can look through types in certain cases, but
3376 our IL requires all operands of a phi node have the same
3378 tree name = PRE_EXPR_NAME (eprime);
3379 if (!useless_type_conversion_p (type, TREE_TYPE (name)))
3383 builtexpr = fold_convert (type, name);
3384 forcedexpr = force_gimple_operand (builtexpr,
3388 if (forcedexpr != name)
3390 VN_INFO_GET (forcedexpr)->valnum = VN_INFO (name)->valnum;
3391 VN_INFO (forcedexpr)->value_id = VN_INFO (name)->value_id;
3396 gimple_stmt_iterator gsi;
3397 gsi = gsi_start (stmts);
3398 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3400 gimple stmt = gsi_stmt (gsi);
3401 tree lhs = gimple_get_lhs (stmt);
3402 if (TREE_CODE (lhs) == SSA_NAME)
3403 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
3404 gimple_set_plf (stmt, NECESSARY, false);
3406 gsi_insert_seq_on_edge (pred, stmts);
3408 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3412 /* If we didn't want a phi node, and we made insertions, we still have
3413 inserted new stuff, and thus return true. If we didn't want a phi node,
3414 and didn't make insertions, we haven't added anything new, so return
3416 if (nophi && insertions)
3418 else if (nophi && !insertions)
3421 /* Now build a phi for the new variable. */
3422 if (!prephitemp || TREE_TYPE (prephitemp) != type)
3423 prephitemp = create_tmp_var (type, "prephitmp");
3426 add_referenced_var (temp);
3428 if (TREE_CODE (type) == COMPLEX_TYPE
3429 || TREE_CODE (type) == VECTOR_TYPE)
3430 DECL_GIMPLE_REG_P (temp) = 1;
3431 phi = create_phi_node (temp, block);
3433 gimple_set_plf (phi, NECESSARY, false);
3434 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3435 VN_INFO (gimple_phi_result (phi))->value_id = val;
3436 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (gimple_phi_result (phi)));
3437 FOR_EACH_EDGE (pred, ei, block->preds)
3439 pre_expr ae = avail[pred->src->index];
3440 gcc_assert (get_expr_type (ae) == type
3441 || useless_type_conversion_p (type, get_expr_type (ae)));
3442 if (ae->kind == CONSTANT)
3443 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION);
3445 add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred,
3449 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi));
3450 add_to_value (val, newphi);
3452 /* The value should *not* exist in PHI_GEN, or else we wouldn't be doing
3453 this insertion, since we test for the existence of this value in PHI_GEN
3454 before proceeding with the partial redundancy checks in insert_aux.
3456 The value may exist in AVAIL_OUT, in particular, it could be represented
3457 by the expression we are trying to eliminate, in which case we want the
3458 replacement to occur. If it's not existing in AVAIL_OUT, we want it
3461 Similarly, to the PHI_GEN case, the value should not exist in NEW_SETS of
3462 this block, because if it did, it would have existed in our dominator's
3463 AVAIL_OUT, and would have been skipped due to the full redundancy check.
3466 bitmap_insert_into_set (PHI_GEN (block), newphi);
3467 bitmap_value_replace_in_set (AVAIL_OUT (block),
3469 bitmap_insert_into_set (NEW_SETS (block),
3472 if (dump_file && (dump_flags & TDF_DETAILS))
3474 fprintf (dump_file, "Created phi ");
3475 print_gimple_stmt (dump_file, phi, 0, 0);
3476 fprintf (dump_file, " in block %d\n", block->index);
3484 /* Perform insertion of partially redundant values.
3485 For BLOCK, do the following:
3486 1. Propagate the NEW_SETS of the dominator into the current block.
3487 If the block has multiple predecessors,
3488 2a. Iterate over the ANTIC expressions for the block to see if
3489 any of them are partially redundant.
3490 2b. If so, insert them into the necessary predecessors to make
3491 the expression fully redundant.
3492 2c. Insert a new PHI merging the values of the predecessors.
3493 2d. Insert the new PHI, and the new expressions, into the
3495 3. Recursively call ourselves on the dominator children of BLOCK.
3497 Steps 1, 2a, and 3 are done by insert_aux. 2b, 2c and 2d are done by
3498 do_regular_insertion and do_partial_insertion.
3503 do_regular_insertion (basic_block block, basic_block dom)
3505 bool new_stuff = false;
3506 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
3510 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
3512 if (expr->kind == NARY
3513 || expr->kind == REFERENCE)
3517 bool by_some = false;
3518 bool cant_insert = false;
3519 bool all_same = true;
3520 pre_expr first_s = NULL;
3523 pre_expr eprime = NULL;
3525 pre_expr edoubleprime = NULL;
3526 bool do_insertion = false;
3528 val = get_expr_value_id (expr);
3529 if (bitmap_set_contains_value (PHI_GEN (block), val))
3531 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3533 if (dump_file && (dump_flags & TDF_DETAILS))
3534 fprintf (dump_file, "Found fully redundant value\n");
3538 avail = XCNEWVEC (pre_expr, last_basic_block);
3539 FOR_EACH_EDGE (pred, ei, block->preds)
3541 unsigned int vprime;
3543 /* We should never run insertion for the exit block
3544 and so not come across fake pred edges. */
3545 gcc_assert (!(pred->flags & EDGE_FAKE));
3547 eprime = phi_translate (expr, ANTIC_IN (block), NULL,
3550 /* eprime will generally only be NULL if the
3551 value of the expression, translated
3552 through the PHI for this predecessor, is
3553 undefined. If that is the case, we can't
3554 make the expression fully redundant,
3555 because its value is undefined along a
3556 predecessor path. We can thus break out
3557 early because it doesn't matter what the
3558 rest of the results are. */
3565 eprime = fully_constant_expression (eprime);
3566 vprime = get_expr_value_id (eprime);
3567 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3569 if (edoubleprime == NULL)
3571 avail[bprime->index] = eprime;
3576 avail[bprime->index] = edoubleprime;
3578 /* We want to perform insertions to remove a redundancy on
3579 a path in the CFG we want to optimize for speed. */
3580 if (optimize_edge_for_speed_p (pred))
3581 do_insertion = true;
3582 if (first_s == NULL)
3583 first_s = edoubleprime;
3584 else if (!pre_expr_eq (first_s, edoubleprime))
3588 /* If we can insert it, it's not the same value
3589 already existing along every predecessor, and
3590 it's defined by some predecessor, it is
3591 partially redundant. */
3592 if (!cant_insert && !all_same && by_some)
3596 if (dump_file && (dump_flags & TDF_DETAILS))
3598 fprintf (dump_file, "Skipping partial redundancy for "
3600 print_pre_expr (dump_file, expr);
3601 fprintf (dump_file, " (%04d), no redundancy on to be "
3602 "optimized for speed edge\n", val);
3605 else if (dbg_cnt (treepre_insert)
3606 && insert_into_preds_of_block (block,
3607 get_expression_id (expr),
3611 /* If all edges produce the same value and that value is
3612 an invariant, then the PHI has the same value on all
3613 edges. Note this. */
3614 else if (!cant_insert && all_same)
3616 tree exprtype = get_expr_type (expr);
3620 gimple_stmt_iterator gsi;
3622 gcc_assert (edoubleprime->kind == CONSTANT
3623 || edoubleprime->kind == NAME);
3625 if (!pretemp || TREE_TYPE (pretemp) != exprtype)
3627 pretemp = create_tmp_reg (exprtype, "pretmp");
3628 add_referenced_var (pretemp);
3630 temp = make_ssa_name (pretemp, NULL);
3631 assign = gimple_build_assign (temp,
3632 edoubleprime->kind == CONSTANT ? PRE_EXPR_CONSTANT (edoubleprime) : PRE_EXPR_NAME (edoubleprime));
3633 gsi = gsi_after_labels (block);
3634 gsi_insert_before (&gsi, assign, GSI_NEW_STMT);
3636 gimple_set_plf (assign, NECESSARY, false);
3637 VN_INFO_GET (temp)->value_id = val;
3638 VN_INFO (temp)->valnum = temp;
3639 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
3640 newe = get_or_alloc_expr_for_name (temp);
3641 add_to_value (val, newe);
3642 bitmap_value_replace_in_set (AVAIL_OUT (block), newe);
3643 bitmap_insert_into_set (NEW_SETS (block), newe);
3649 VEC_free (pre_expr, heap, exprs);
3654 /* Perform insertion for partially anticipatable expressions. There
3655 is only one case we will perform insertion for these. This case is
3656 if the expression is partially anticipatable, and fully available.
3657 In this case, we know that putting it earlier will enable us to
3658 remove the later computation. */
3662 do_partial_partial_insertion (basic_block block, basic_block dom)
3664 bool new_stuff = false;
3665 VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (PA_IN (block));
3669 FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
3671 if (expr->kind == NARY
3672 || expr->kind == REFERENCE)
3677 bool cant_insert = false;
3680 pre_expr eprime = NULL;
3683 val = get_expr_value_id (expr);
3684 if (bitmap_set_contains_value (PHI_GEN (block), val))
3686 if (bitmap_set_contains_value (AVAIL_OUT (dom), val))
3689 avail = XCNEWVEC (pre_expr, last_basic_block);
3690 FOR_EACH_EDGE (pred, ei, block->preds)
3692 unsigned int vprime;
3693 pre_expr edoubleprime;
3695 /* We should never run insertion for the exit block
3696 and so not come across fake pred edges. */
3697 gcc_assert (!(pred->flags & EDGE_FAKE));
3699 eprime = phi_translate (expr, ANTIC_IN (block),
3703 /* eprime will generally only be NULL if the
3704 value of the expression, translated
3705 through the PHI for this predecessor, is
3706 undefined. If that is the case, we can't
3707 make the expression fully redundant,
3708 because its value is undefined along a
3709 predecessor path. We can thus break out
3710 early because it doesn't matter what the
3711 rest of the results are. */
3718 eprime = fully_constant_expression (eprime);
3719 vprime = get_expr_value_id (eprime);
3720 edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime),
3722 if (edoubleprime == NULL)
3728 avail[bprime->index] = edoubleprime;
3732 /* If we can insert it, it's not the same value
3733 already existing along every predecessor, and
3734 it's defined by some predecessor, it is
3735 partially redundant. */
3736 if (!cant_insert && by_all && dbg_cnt (treepre_insert))
3738 pre_stats.pa_insert++;
3739 if (insert_into_preds_of_block (block, get_expression_id (expr),
3747 VEC_free (pre_expr, heap, exprs);
3752 insert_aux (basic_block block)
3755 bool new_stuff = false;
3760 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3765 bitmap_set_t newset = NEW_SETS (dom);
3768 /* Note that we need to value_replace both NEW_SETS, and
3769 AVAIL_OUT. For both the case of NEW_SETS, the value may be
3770 represented by some non-simple expression here that we want
3771 to replace it with. */
3772 FOR_EACH_EXPR_ID_IN_SET (newset, i, bi)
3774 pre_expr expr = expression_for_id (i);
3775 bitmap_value_replace_in_set (NEW_SETS (block), expr);
3776 bitmap_value_replace_in_set (AVAIL_OUT (block), expr);
3779 if (!single_pred_p (block))
3781 new_stuff |= do_regular_insertion (block, dom);
3782 if (do_partial_partial)
3783 new_stuff |= do_partial_partial_insertion (block, dom);
3787 for (son = first_dom_son (CDI_DOMINATORS, block);
3789 son = next_dom_son (CDI_DOMINATORS, son))
3791 new_stuff |= insert_aux (son);
3797 /* Perform insertion of partially redundant values. */
3802 bool new_stuff = true;
3804 int num_iterations = 0;
3807 NEW_SETS (bb) = bitmap_set_new ();
3812 new_stuff = insert_aux (ENTRY_BLOCK_PTR);
3814 statistics_histogram_event (cfun, "insert iterations", num_iterations);
3818 /* Add OP to EXP_GEN (block), and possibly to the maximal set. */
3821 add_to_exp_gen (basic_block block, tree op)
3826 if (TREE_CODE (op) == SSA_NAME && ssa_undefined_value_p (op))
3828 result = get_or_alloc_expr_for_name (op);
3829 bitmap_value_insert_into_set (EXP_GEN (block), result);
3833 /* Create value ids for PHI in BLOCK. */
3836 make_values_for_phi (gimple phi, basic_block block)
3838 tree result = gimple_phi_result (phi);
3840 /* We have no need for virtual phis, as they don't represent
3841 actual computations. */
3842 if (is_gimple_reg (result))
3844 pre_expr e = get_or_alloc_expr_for_name (result);
3845 add_to_value (get_expr_value_id (e), e);
3846 bitmap_insert_into_set (PHI_GEN (block), e);
3847 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3851 for (i = 0; i < gimple_phi_num_args (phi); ++i)
3853 tree arg = gimple_phi_arg_def (phi, i);
3854 if (TREE_CODE (arg) == SSA_NAME)
3856 e = get_or_alloc_expr_for_name (arg);
3857 add_to_value (get_expr_value_id (e), e);
3864 /* Compute the AVAIL set for all basic blocks.
3866 This function performs value numbering of the statements in each basic
3867 block. The AVAIL sets are built from information we glean while doing
3868 this value numbering, since the AVAIL sets contain only one entry per
3871 AVAIL_IN[BLOCK] = AVAIL_OUT[dom(BLOCK)].
3872 AVAIL_OUT[BLOCK] = AVAIL_IN[BLOCK] U PHI_GEN[BLOCK] U TMP_GEN[BLOCK]. */
3875 compute_avail (void)
3878 basic_block block, son;
3879 basic_block *worklist;
3883 /* We pretend that default definitions are defined in the entry block.
3884 This includes function arguments and the static chain decl. */
3885 for (i = 1; i < num_ssa_names; ++i)
3887 tree name = ssa_name (i);
3890 || !SSA_NAME_IS_DEFAULT_DEF (name)
3891 || has_zero_uses (name)
3892 || !is_gimple_reg (name))
3895 e = get_or_alloc_expr_for_name (name);
3896 add_to_value (get_expr_value_id (e), e);
3898 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e);
3899 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e);
3902 /* Allocate the worklist. */
3903 worklist = XNEWVEC (basic_block, n_basic_blocks);
3905 /* Seed the algorithm by putting the dominator children of the entry
3906 block on the worklist. */
3907 for (son = first_dom_son (CDI_DOMINATORS, ENTRY_BLOCK_PTR);
3909 son = next_dom_son (CDI_DOMINATORS, son))
3910 worklist[sp++] = son;
3912 /* Loop until the worklist is empty. */
3915 gimple_stmt_iterator gsi;
3918 unsigned int stmt_uid = 1;
3920 /* Pick a block from the worklist. */
3921 block = worklist[--sp];
3923 /* Initially, the set of available values in BLOCK is that of
3924 its immediate dominator. */
3925 dom = get_immediate_dominator (CDI_DOMINATORS, block);
3927 bitmap_set_copy (AVAIL_OUT (block), AVAIL_OUT (dom));
3929 /* Generate values for PHI nodes. */
3930 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3931 make_values_for_phi (gsi_stmt (gsi), block);
3933 BB_MAY_NOTRETURN (block) = 0;
3935 /* Now compute value numbers and populate value sets with all
3936 the expressions computed in BLOCK. */
3937 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3942 stmt = gsi_stmt (gsi);
3943 gimple_set_uid (stmt, stmt_uid++);
3945 /* Cache whether the basic-block has any non-visible side-effect
3947 If this isn't a call or it is the last stmt in the
3948 basic-block then the CFG represents things correctly. */
3949 if (is_gimple_call (stmt) && !stmt_ends_bb_p (stmt))
3951 /* Non-looping const functions always return normally.
3952 Otherwise the call might not return or have side-effects
3953 that forbids hoisting possibly trapping expressions
3955 int flags = gimple_call_flags (stmt);
3956 if (!(flags & ECF_CONST)
3957 || (flags & ECF_LOOPING_CONST_OR_PURE))
3958 BB_MAY_NOTRETURN (block) = 1;
3961 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3963 pre_expr e = get_or_alloc_expr_for_name (op);
3965 add_to_value (get_expr_value_id (e), e);
3967 bitmap_insert_into_set (TMP_GEN (block), e);
3968 bitmap_value_insert_into_set (AVAIL_OUT (block), e);
3971 if (gimple_has_side_effects (stmt) || stmt_could_throw_p (stmt))
3974 switch (gimple_code (stmt))
3977 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3978 add_to_exp_gen (block, op);
3985 vn_reference_op_t vro;
3986 pre_expr result = NULL;
3987 VEC(vn_reference_op_s, heap) *ops = NULL;
3989 /* We can value number only calls to real functions. */
3990 if (gimple_call_internal_p (stmt))
3993 copy_reference_ops_from_call (stmt, &ops);
3994 vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
3995 gimple_expr_type (stmt),
3996 ops, &ref, VN_NOWALK);
3997 VEC_free (vn_reference_op_s, heap, ops);
4001 for (i = 0; VEC_iterate (vn_reference_op_s,
4005 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4006 add_to_exp_gen (block, vro->op0);
4007 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4008 add_to_exp_gen (block, vro->op1);
4009 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4010 add_to_exp_gen (block, vro->op2);
4012 result = (pre_expr) pool_alloc (pre_expr_pool);
4013 result->kind = REFERENCE;
4015 PRE_EXPR_REFERENCE (result) = ref;
4017 get_or_alloc_expression_id (result);
4018 add_to_value (get_expr_value_id (result), result);
4020 bitmap_value_insert_into_set (EXP_GEN (block), result);
4026 pre_expr result = NULL;
4027 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
4031 case tcc_comparison:
4036 vn_nary_op_lookup_pieces (gimple_num_ops (stmt) - 1,
4037 gimple_assign_rhs_code (stmt),
4038 gimple_expr_type (stmt),
4039 gimple_assign_rhs1_ptr (stmt),
4045 for (i = 0; i < nary->length; i++)
4046 if (TREE_CODE (nary->op[i]) == SSA_NAME)
4047 add_to_exp_gen (block, nary->op[i]);
4049 result = (pre_expr) pool_alloc (pre_expr_pool);
4050 result->kind = NARY;
4052 PRE_EXPR_NARY (result) = nary;
4056 case tcc_declaration:
4061 vn_reference_op_t vro;
4063 vn_reference_lookup (gimple_assign_rhs1 (stmt),
4069 for (i = 0; VEC_iterate (vn_reference_op_s,
4073 if (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME)
4074 add_to_exp_gen (block, vro->op0);
4075 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
4076 add_to_exp_gen (block, vro->op1);
4077 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
4078 add_to_exp_gen (block, vro->op2);
4080 result = (pre_expr) pool_alloc (pre_expr_pool);
4081 result->kind = REFERENCE;
4083 PRE_EXPR_REFERENCE (result) = ref;
4088 /* For any other statement that we don't
4089 recognize, simply add all referenced
4090 SSA_NAMEs to EXP_GEN. */
4091 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4092 add_to_exp_gen (block, op);
4096 get_or_alloc_expression_id (result);
4097 add_to_value (get_expr_value_id (result), result);
4099 bitmap_value_insert_into_set (EXP_GEN (block), result);
4108 /* Put the dominator children of BLOCK on the worklist of blocks
4109 to compute available sets for. */
4110 for (son = first_dom_son (CDI_DOMINATORS, block);
4112 son = next_dom_son (CDI_DOMINATORS, son))
4113 worklist[sp++] = son;
4119 /* Insert the expression for SSA_VN that SCCVN thought would be simpler
4120 than the available expressions for it. The insertion point is
4121 right before the first use in STMT. Returns the SSA_NAME that should
4122 be used for replacement. */
4125 do_SCCVN_insertion (gimple stmt, tree ssa_vn)
4127 basic_block bb = gimple_bb (stmt);
4128 gimple_stmt_iterator gsi;
4129 gimple_seq stmts = NULL;
4133 /* First create a value expression from the expression we want
4134 to insert and associate it with the value handle for SSA_VN. */
4135 e = get_or_alloc_expr_for (vn_get_expr_for (ssa_vn));
4139 /* Then use create_expression_by_pieces to generate a valid
4140 expression to insert at this point of the IL stream. */
4141 expr = create_expression_by_pieces (bb, e, &stmts, stmt, NULL);
4142 if (expr == NULL_TREE)
4144 gsi = gsi_for_stmt (stmt);
4145 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
4150 /* Eliminate fully redundant computations. */
4155 VEC (gimple, heap) *to_remove = NULL;
4156 VEC (gimple, heap) *to_update = NULL;
4158 unsigned int todo = 0;
4159 gimple_stmt_iterator gsi;
4165 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi))
4167 tree lhs = NULL_TREE;
4168 tree rhs = NULL_TREE;
4170 stmt = gsi_stmt (gsi);
4172 if (gimple_has_lhs (stmt))
4173 lhs = gimple_get_lhs (stmt);
4175 if (gimple_assign_single_p (stmt))
4176 rhs = gimple_assign_rhs1 (stmt);
4178 /* Lookup the RHS of the expression, see if we have an
4179 available computation for it. If so, replace the RHS with
4180 the available computation.
4183 We don't replace global register variable when it is a the RHS of
4184 a single assign. We do replace local register variable since gcc
4185 does not guarantee local variable will be allocated in register. */
4186 if (gimple_has_lhs (stmt)
4187 && TREE_CODE (lhs) == SSA_NAME
4188 && !gimple_assign_ssa_name_copy_p (stmt)
4189 && (!gimple_assign_single_p (stmt)
4190 || (!is_gimple_min_invariant (rhs)
4191 && (gimple_assign_rhs_code (stmt) != VAR_DECL
4192 || !is_global_var (rhs)
4193 || !DECL_HARD_REGISTER (rhs))))
4194 && !gimple_has_volatile_ops (stmt)
4195 && !has_zero_uses (lhs))
4198 pre_expr lhsexpr = get_or_alloc_expr_for_name (lhs);
4199 pre_expr sprimeexpr;
4200 gimple orig_stmt = stmt;
4202 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4203 get_expr_value_id (lhsexpr),
4208 if (sprimeexpr->kind == CONSTANT)
4209 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4210 else if (sprimeexpr->kind == NAME)
4211 sprime = PRE_EXPR_NAME (sprimeexpr);
4216 /* If there is no existing leader but SCCVN knows this
4217 value is constant, use that constant. */
4218 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum))
4220 sprime = VN_INFO (lhs)->valnum;
4221 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4222 TREE_TYPE (sprime)))
4223 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4225 if (dump_file && (dump_flags & TDF_DETAILS))
4227 fprintf (dump_file, "Replaced ");
4228 print_gimple_expr (dump_file, stmt, 0, 0);
4229 fprintf (dump_file, " with ");
4230 print_generic_expr (dump_file, sprime, 0);
4231 fprintf (dump_file, " in ");
4232 print_gimple_stmt (dump_file, stmt, 0, 0);
4234 pre_stats.eliminations++;
4235 propagate_tree_value_into_stmt (&gsi, sprime);
4236 stmt = gsi_stmt (gsi);
4239 /* If we removed EH side-effects from the statement, clean
4240 its EH information. */
4241 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4243 bitmap_set_bit (need_eh_cleanup,
4244 gimple_bb (stmt)->index);
4245 if (dump_file && (dump_flags & TDF_DETAILS))
4246 fprintf (dump_file, " Removed EH side-effects.\n");
4251 /* If there is no existing usable leader but SCCVN thinks
4252 it has an expression it wants to use as replacement,
4254 if (!sprime || sprime == lhs)
4256 tree val = VN_INFO (lhs)->valnum;
4258 && TREE_CODE (val) == SSA_NAME
4259 && VN_INFO (val)->needs_insertion
4260 && can_PRE_operation (vn_get_expr_for (val)))
4261 sprime = do_SCCVN_insertion (stmt, val);
4265 && (rhs == NULL_TREE
4266 || TREE_CODE (rhs) != SSA_NAME
4267 || may_propagate_copy (rhs, sprime)))
4269 bool can_make_abnormal_goto
4270 = is_gimple_call (stmt)
4271 && stmt_can_make_abnormal_goto (stmt);
4273 gcc_assert (sprime != rhs);
4275 if (dump_file && (dump_flags & TDF_DETAILS))
4277 fprintf (dump_file, "Replaced ");
4278 print_gimple_expr (dump_file, stmt, 0, 0);
4279 fprintf (dump_file, " with ");
4280 print_generic_expr (dump_file, sprime, 0);
4281 fprintf (dump_file, " in ");
4282 print_gimple_stmt (dump_file, stmt, 0, 0);
4285 if (TREE_CODE (sprime) == SSA_NAME)
4286 gimple_set_plf (SSA_NAME_DEF_STMT (sprime),
4288 /* We need to make sure the new and old types actually match,
4289 which may require adding a simple cast, which fold_convert
4291 if ((!rhs || TREE_CODE (rhs) != SSA_NAME)
4292 && !useless_type_conversion_p (gimple_expr_type (stmt),
4293 TREE_TYPE (sprime)))
4294 sprime = fold_convert (gimple_expr_type (stmt), sprime);
4296 pre_stats.eliminations++;
4297 propagate_tree_value_into_stmt (&gsi, sprime);
4298 stmt = gsi_stmt (gsi);
4301 /* If we removed EH side-effects from the statement, clean
4302 its EH information. */
4303 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4305 bitmap_set_bit (need_eh_cleanup,
4306 gimple_bb (stmt)->index);
4307 if (dump_file && (dump_flags & TDF_DETAILS))
4308 fprintf (dump_file, " Removed EH side-effects.\n");
4311 /* Likewise for AB side-effects. */
4312 if (can_make_abnormal_goto
4313 && !stmt_can_make_abnormal_goto (stmt))
4315 bitmap_set_bit (need_ab_cleanup,
4316 gimple_bb (stmt)->index);
4317 if (dump_file && (dump_flags & TDF_DETAILS))
4318 fprintf (dump_file, " Removed AB side-effects.\n");
4322 /* If the statement is a scalar store, see if the expression
4323 has the same value number as its rhs. If so, the store is
4325 else if (gimple_assign_single_p (stmt)
4326 && !gimple_has_volatile_ops (stmt)
4327 && !is_gimple_reg (gimple_assign_lhs (stmt))
4328 && (TREE_CODE (rhs) == SSA_NAME
4329 || is_gimple_min_invariant (rhs)))
4332 val = vn_reference_lookup (gimple_assign_lhs (stmt),
4333 gimple_vuse (stmt), VN_WALK, NULL);
4334 if (TREE_CODE (rhs) == SSA_NAME)
4335 rhs = VN_INFO (rhs)->valnum;
4337 && operand_equal_p (val, rhs, 0))
4339 if (dump_file && (dump_flags & TDF_DETAILS))
4341 fprintf (dump_file, "Deleted redundant store ");
4342 print_gimple_stmt (dump_file, stmt, 0, 0);
4345 /* Queue stmt for removal. */
4346 VEC_safe_push (gimple, heap, to_remove, stmt);
4349 /* Visit COND_EXPRs and fold the comparison with the
4350 available value-numbers. */
4351 else if (gimple_code (stmt) == GIMPLE_COND)
4353 tree op0 = gimple_cond_lhs (stmt);
4354 tree op1 = gimple_cond_rhs (stmt);
4357 if (TREE_CODE (op0) == SSA_NAME)
4358 op0 = VN_INFO (op0)->valnum;
4359 if (TREE_CODE (op1) == SSA_NAME)
4360 op1 = VN_INFO (op1)->valnum;
4361 result = fold_binary (gimple_cond_code (stmt), boolean_type_node,
4363 if (result && TREE_CODE (result) == INTEGER_CST)
4365 if (integer_zerop (result))
4366 gimple_cond_make_false (stmt);
4368 gimple_cond_make_true (stmt);
4370 todo = TODO_cleanup_cfg;
4373 /* Visit indirect calls and turn them into direct calls if
4375 if (is_gimple_call (stmt))
4377 tree orig_fn = gimple_call_fn (stmt);
4381 if (TREE_CODE (orig_fn) == SSA_NAME)
4382 fn = VN_INFO (orig_fn)->valnum;
4383 else if (TREE_CODE (orig_fn) == OBJ_TYPE_REF
4384 && TREE_CODE (OBJ_TYPE_REF_EXPR (orig_fn)) == SSA_NAME)
4385 fn = VN_INFO (OBJ_TYPE_REF_EXPR (orig_fn))->valnum;
4388 if (gimple_call_addr_fndecl (fn) != NULL_TREE
4389 && useless_type_conversion_p (TREE_TYPE (orig_fn),
4392 bool can_make_abnormal_goto
4393 = stmt_can_make_abnormal_goto (stmt);
4394 bool was_noreturn = gimple_call_noreturn_p (stmt);
4396 if (dump_file && (dump_flags & TDF_DETAILS))
4398 fprintf (dump_file, "Replacing call target with ");
4399 print_generic_expr (dump_file, fn, 0);
4400 fprintf (dump_file, " in ");
4401 print_gimple_stmt (dump_file, stmt, 0, 0);
4404 gimple_call_set_fn (stmt, fn);
4405 VEC_safe_push (gimple, heap, to_update, stmt);
4407 /* When changing a call into a noreturn call, cfg cleanup
4408 is needed to fix up the noreturn call. */
4409 if (!was_noreturn && gimple_call_noreturn_p (stmt))
4410 todo |= TODO_cleanup_cfg;
4412 /* If we removed EH side-effects from the statement, clean
4413 its EH information. */
4414 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
4416 bitmap_set_bit (need_eh_cleanup,
4417 gimple_bb (stmt)->index);
4418 if (dump_file && (dump_flags & TDF_DETAILS))
4419 fprintf (dump_file, " Removed EH side-effects.\n");
4422 /* Likewise for AB side-effects. */
4423 if (can_make_abnormal_goto
4424 && !stmt_can_make_abnormal_goto (stmt))
4426 bitmap_set_bit (need_ab_cleanup,
4427 gimple_bb (stmt)->index);
4428 if (dump_file && (dump_flags & TDF_DETAILS))
4429 fprintf (dump_file, " Removed AB side-effects.\n");
4432 /* Changing an indirect call to a direct call may
4433 have exposed different semantics. This may
4434 require an SSA update. */
4435 todo |= TODO_update_ssa_only_virtuals;
4440 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
4442 gimple stmt, phi = gsi_stmt (gsi);
4443 tree sprime = NULL_TREE, res = PHI_RESULT (phi);
4444 pre_expr sprimeexpr, resexpr;
4445 gimple_stmt_iterator gsi2;
4447 /* We want to perform redundant PHI elimination. Do so by
4448 replacing the PHI with a single copy if possible.
4449 Do not touch inserted, single-argument or virtual PHIs. */
4450 if (gimple_phi_num_args (phi) == 1
4451 || !is_gimple_reg (res))
4457 resexpr = get_or_alloc_expr_for_name (res);
4458 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b),
4459 get_expr_value_id (resexpr), NULL);
4462 if (sprimeexpr->kind == CONSTANT)
4463 sprime = PRE_EXPR_CONSTANT (sprimeexpr);
4464 else if (sprimeexpr->kind == NAME)
4465 sprime = PRE_EXPR_NAME (sprimeexpr);
4469 if (!sprime && is_gimple_min_invariant (VN_INFO (res)->valnum))
4471 sprime = VN_INFO (res)->valnum;
4472 if (!useless_type_conversion_p (TREE_TYPE (res),
4473 TREE_TYPE (sprime)))
4474 sprime = fold_convert (TREE_TYPE (res), sprime);
4483 if (dump_file && (dump_flags & TDF_DETAILS))
4485 fprintf (dump_file, "Replaced redundant PHI node defining ");
4486 print_generic_expr (dump_file, res, 0);
4487 fprintf (dump_file, " with ");
4488 print_generic_expr (dump_file, sprime, 0);
4489 fprintf (dump_file, "\n");
4492 remove_phi_node (&gsi, false);
4494 if (!bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4495 && TREE_CODE (sprime) == SSA_NAME)
4496 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4498 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4499 sprime = fold_convert (TREE_TYPE (res), sprime);
4500 stmt = gimple_build_assign (res, sprime);
4501 SSA_NAME_DEF_STMT (res) = stmt;
4502 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4504 gsi2 = gsi_after_labels (b);
4505 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4506 /* Queue the copy for eventual removal. */
4507 VEC_safe_push (gimple, heap, to_remove, stmt);
4508 /* If we inserted this PHI node ourself, it's not an elimination. */
4509 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4512 pre_stats.eliminations++;
4516 /* We cannot remove stmts during BB walk, especially not release SSA
4517 names there as this confuses the VN machinery. The stmts ending
4518 up in to_remove are either stores or simple copies. */
4519 FOR_EACH_VEC_ELT (gimple, to_remove, i, stmt)
4521 tree lhs = gimple_assign_lhs (stmt);
4522 tree rhs = gimple_assign_rhs1 (stmt);
4523 use_operand_p use_p;
4526 /* If there is a single use only, propagate the equivalency
4527 instead of keeping the copy. */
4528 if (TREE_CODE (lhs) == SSA_NAME
4529 && TREE_CODE (rhs) == SSA_NAME
4530 && single_imm_use (lhs, &use_p, &use_stmt)
4531 && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4533 SET_USE (use_p, rhs);
4534 update_stmt (use_stmt);
4535 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs))
4536 && TREE_CODE (rhs) == SSA_NAME)
4537 gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true);
4540 /* If this is a store or a now unused copy, remove it. */
4541 if (TREE_CODE (lhs) != SSA_NAME
4542 || has_zero_uses (lhs))
4544 basic_block bb = gimple_bb (stmt);
4545 gsi = gsi_for_stmt (stmt);
4546 unlink_stmt_vdef (stmt);
4547 gsi_remove (&gsi, true);
4548 /* ??? gsi_remove doesn't tell us whether the stmt was
4549 in EH tables and thus whether we need to purge EH edges.
4550 Simply schedule the block for a cleanup. */
4551 bitmap_set_bit (need_eh_cleanup, bb->index);
4552 if (TREE_CODE (lhs) == SSA_NAME)
4553 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4554 release_defs (stmt);
4557 VEC_free (gimple, heap, to_remove);
4559 /* We cannot update call statements with virtual operands during
4560 SSA walk. This might remove them which in turn makes our
4561 VN lattice invalid. */
4562 FOR_EACH_VEC_ELT (gimple, to_update, i, stmt)
4564 VEC_free (gimple, heap, to_update);
4569 /* Borrow a bit of tree-ssa-dce.c for the moment.
4570 XXX: In 4.1, we should be able to just run a DCE pass after PRE, though
4571 this may be a bit faster, and we may want critical edges kept split. */
4573 /* If OP's defining statement has not already been determined to be necessary,
4574 mark that statement necessary. Return the stmt, if it is newly
4577 static inline gimple
4578 mark_operand_necessary (tree op)
4584 if (TREE_CODE (op) != SSA_NAME)
4587 stmt = SSA_NAME_DEF_STMT (op);
4590 if (gimple_plf (stmt, NECESSARY)
4591 || gimple_nop_p (stmt))
4594 gimple_set_plf (stmt, NECESSARY, true);
4598 /* Because we don't follow exactly the standard PRE algorithm, and decide not
4599 to insert PHI nodes sometimes, and because value numbering of casts isn't
4600 perfect, we sometimes end up inserting dead code. This simple DCE-like
4601 pass removes any insertions we made that weren't actually used. */
4604 remove_dead_inserted_code (void)
4611 worklist = BITMAP_ALLOC (NULL);
4612 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4614 t = SSA_NAME_DEF_STMT (ssa_name (i));
4615 if (gimple_plf (t, NECESSARY))
4616 bitmap_set_bit (worklist, i);
4618 while (!bitmap_empty_p (worklist))
4620 i = bitmap_first_set_bit (worklist);
4621 bitmap_clear_bit (worklist, i);
4622 t = SSA_NAME_DEF_STMT (ssa_name (i));
4624 /* PHI nodes are somewhat special in that each PHI alternative has
4625 data and control dependencies. All the statements feeding the
4626 PHI node's arguments are always necessary. */
4627 if (gimple_code (t) == GIMPLE_PHI)
4631 for (k = 0; k < gimple_phi_num_args (t); k++)
4633 tree arg = PHI_ARG_DEF (t, k);
4634 if (TREE_CODE (arg) == SSA_NAME)
4636 gimple n = mark_operand_necessary (arg);
4638 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4644 /* Propagate through the operands. Examine all the USE, VUSE and
4645 VDEF operands in this statement. Mark all the statements
4646 which feed this statement's uses as necessary. */
4650 /* The operands of VDEF expressions are also needed as they
4651 represent potential definitions that may reach this
4652 statement (VDEF operands allow us to follow def-def
4655 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4657 gimple n = mark_operand_necessary (use);
4659 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4664 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4666 t = SSA_NAME_DEF_STMT (ssa_name (i));
4667 if (!gimple_plf (t, NECESSARY))
4669 gimple_stmt_iterator gsi;
4671 if (dump_file && (dump_flags & TDF_DETAILS))
4673 fprintf (dump_file, "Removing unnecessary insertion:");
4674 print_gimple_stmt (dump_file, t, 0, 0);
4677 gsi = gsi_for_stmt (t);
4678 if (gimple_code (t) == GIMPLE_PHI)
4679 remove_phi_node (&gsi, true);
4682 gsi_remove (&gsi, true);
4687 BITMAP_FREE (worklist);
4690 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4691 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4692 the number of visited blocks. */
4695 my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4697 edge_iterator *stack;
4699 int post_order_num = 0;
4702 if (include_entry_exit)
4703 post_order[post_order_num++] = EXIT_BLOCK;
4705 /* Allocate stack for back-tracking up CFG. */
4706 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4709 /* Allocate bitmap to track nodes that have been visited. */
4710 visited = sbitmap_alloc (last_basic_block);
4712 /* None of the nodes in the CFG have been visited yet. */
4713 sbitmap_zero (visited);
4715 /* Push the last edge on to the stack. */
4716 stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4724 /* Look at the edge on the top of the stack. */
4726 src = ei_edge (ei)->src;
4727 dest = ei_edge (ei)->dest;
4729 /* Check if the edge destination has been visited yet. */
4730 if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4732 /* Mark that we have visited the destination. */
4733 SET_BIT (visited, src->index);
4735 if (EDGE_COUNT (src->preds) > 0)
4736 /* Since the DEST node has been visited for the first
4737 time, check its successors. */
4738 stack[sp++] = ei_start (src->preds);
4740 post_order[post_order_num++] = src->index;
4744 if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4745 post_order[post_order_num++] = dest->index;
4747 if (!ei_one_before_end_p (ei))
4748 ei_next (&stack[sp - 1]);
4754 if (include_entry_exit)
4755 post_order[post_order_num++] = ENTRY_BLOCK;
4758 sbitmap_free (visited);
4759 return post_order_num;
4763 /* Initialize data structures used by PRE. */
4766 init_pre (bool do_fre)
4770 next_expression_id = 1;
4772 VEC_safe_push (pre_expr, heap, expressions, NULL);
4773 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
4774 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
4775 get_max_value_id() + 1);
4780 inserted_exprs = BITMAP_ALLOC (NULL);
4781 need_creation = NULL;
4782 pretemp = NULL_TREE;
4783 storetemp = NULL_TREE;
4784 prephitemp = NULL_TREE;
4786 connect_infinite_loops_to_exit ();
4787 memset (&pre_stats, 0, sizeof (pre_stats));
4790 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4791 my_rev_post_order_compute (postorder, false);
4793 alloc_aux_for_blocks (sizeof (struct bb_bitmap_sets));
4795 calculate_dominance_info (CDI_POST_DOMINATORS);
4796 calculate_dominance_info (CDI_DOMINATORS);
4798 bitmap_obstack_initialize (&grand_bitmap_obstack);
4799 phi_translate_table = htab_create (5110, expr_pred_trans_hash,
4800 expr_pred_trans_eq, free);
4801 expression_to_id = htab_create (num_ssa_names * 3,
4804 bitmap_set_pool = create_alloc_pool ("Bitmap sets",
4805 sizeof (struct bitmap_set), 30);
4806 pre_expr_pool = create_alloc_pool ("pre_expr nodes",
4807 sizeof (struct pre_expr_d), 30);
4810 EXP_GEN (bb) = bitmap_set_new ();
4811 PHI_GEN (bb) = bitmap_set_new ();
4812 TMP_GEN (bb) = bitmap_set_new ();
4813 AVAIL_OUT (bb) = bitmap_set_new ();
4816 need_eh_cleanup = BITMAP_ALLOC (NULL);
4817 need_ab_cleanup = BITMAP_ALLOC (NULL);
4821 /* Deallocate data structures used by PRE. */
4824 fini_pre (bool do_fre)
4826 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
4827 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
4831 VEC_free (bitmap_set_t, heap, value_expressions);
4832 BITMAP_FREE (inserted_exprs);
4833 VEC_free (gimple, heap, need_creation);
4834 bitmap_obstack_release (&grand_bitmap_obstack);
4835 free_alloc_pool (bitmap_set_pool);
4836 free_alloc_pool (pre_expr_pool);
4837 htab_delete (phi_translate_table);
4838 htab_delete (expression_to_id);
4839 VEC_free (unsigned, heap, name_to_id);
4841 free_aux_for_blocks ();
4843 free_dominance_info (CDI_POST_DOMINATORS);
4846 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
4849 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
4851 BITMAP_FREE (need_eh_cleanup);
4852 BITMAP_FREE (need_ab_cleanup);
4854 if (do_eh_cleanup || do_ab_cleanup)
4855 todo = TODO_cleanup_cfg;
4858 loop_optimizer_finalize ();
4863 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller
4864 only wants to do full redundancy elimination. */
4867 execute_pre (bool do_fre)
4869 unsigned int todo = 0;
4871 do_partial_partial = optimize > 2 && optimize_function_for_speed_p (cfun);
4873 /* This has to happen before SCCVN runs because
4874 loop_optimizer_init may create new phis, etc. */
4876 loop_optimizer_init (LOOPS_NORMAL);
4878 if (!run_scc_vn (do_fre ? VN_WALKREWRITE : VN_WALK))
4881 loop_optimizer_finalize ();
4889 /* Collect and value number expressions computed in each basic block. */
4892 if (dump_file && (dump_flags & TDF_DETAILS))
4898 print_bitmap_set (dump_file, EXP_GEN (bb), "exp_gen", bb->index);
4899 print_bitmap_set (dump_file, PHI_GEN (bb), "phi_gen", bb->index);
4900 print_bitmap_set (dump_file, TMP_GEN (bb), "tmp_gen", bb->index);
4901 print_bitmap_set (dump_file, AVAIL_OUT (bb), "avail_out", bb->index);
4905 /* Insert can get quite slow on an incredibly large number of basic
4906 blocks due to some quadratic behavior. Until this behavior is
4907 fixed, don't run it when he have an incredibly large number of
4908 bb's. If we aren't going to run insert, there is no point in
4909 computing ANTIC, either, even though it's plenty fast. */
4910 if (!do_fre && n_basic_blocks < 4000)
4916 /* Make sure to remove fake edges before committing our inserts.
4917 This makes sure we don't end up with extra critical edges that
4918 we would need to split. */
4919 remove_fake_exit_edges ();
4920 gsi_commit_edge_inserts ();
4922 /* Remove all the redundant expressions. */
4923 todo |= eliminate ();
4925 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4926 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4927 statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4928 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4930 clear_expression_ids ();
4933 remove_dead_inserted_code ();
4934 todo |= TODO_verify_flow;
4938 todo |= fini_pre (do_fre);
4941 /* TODO: tail_merge_optimize may merge all predecessors of a block, in which
4942 case we can merge the block with the remaining predecessor of the block.
4944 - call merge_blocks after each tail merge iteration
4945 - call merge_blocks after all tail merge iterations
4946 - mark TODO_cleanup_cfg when necessary
4947 - share the cfg cleanup with fini_pre. */
4948 todo |= tail_merge_optimize (todo);
4951 /* Tail merging invalidates the virtual SSA web, together with
4952 cfg-cleanup opportunities exposed by PRE this will wreck the
4953 SSA updating machinery. So make sure to run update-ssa
4954 manually, before eventually scheduling cfg-cleanup as part of
4956 update_ssa (TODO_update_ssa_only_virtuals);
4961 /* Gate and execute functions for PRE. */
4966 return execute_pre (false);
4972 return flag_tree_pre != 0;
4975 struct gimple_opt_pass pass_pre =
4980 gate_pre, /* gate */
4981 do_pre, /* execute */
4984 0, /* static_pass_number */
4985 TV_TREE_PRE, /* tv_id */
4986 PROP_no_crit_edges | PROP_cfg
4987 | PROP_ssa, /* properties_required */
4988 0, /* properties_provided */
4989 0, /* properties_destroyed */
4990 TODO_rebuild_alias, /* todo_flags_start */
4991 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
4996 /* Gate and execute functions for FRE. */
5001 return execute_pre (true);
5007 return flag_tree_fre != 0;
5010 struct gimple_opt_pass pass_fre =
5015 gate_fre, /* gate */
5016 execute_fre, /* execute */
5019 0, /* static_pass_number */
5020 TV_TREE_FRE, /* tv_id */
5021 PROP_cfg | PROP_ssa, /* properties_required */
5022 0, /* properties_provided */
5023 0, /* properties_destroyed */
5024 0, /* todo_flags_start */
5025 TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */