1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
184 /* Set the value numbering info for a given SSA name to a given
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
226 /* If the value-number is a constant it is the representative
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
252 if (!is_gimple_assign (def_stmt))
255 /* FIXME tuples. This is incomplete and likely will miss some
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
260 if (gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 && gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 && gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
264 gimple_expr_type (def_stmt),
265 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
269 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
270 gimple_expr_type (def_stmt),
271 gimple_assign_rhs1 (def_stmt));
275 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
276 gimple_expr_type (def_stmt),
277 gimple_assign_rhs1 (def_stmt),
278 gimple_assign_rhs2 (def_stmt));
283 if (expr == NULL_TREE)
286 /* Cache the expression. */
293 /* Free a phi operation structure VP. */
298 vn_phi_t phi = (vn_phi_t) vp;
299 VEC_free (tree, heap, phi->phiargs);
302 /* Free a reference operation structure VP. */
305 free_reference (void *vp)
307 vn_reference_t vr = (vn_reference_t) vp;
308 VEC_free (vn_reference_op_s, heap, vr->operands);
311 /* Hash table equality function for vn_constant_t. */
314 vn_constant_eq (const void *p1, const void *p2)
316 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
317 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
319 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
322 /* Hash table hash function for vn_constant_t. */
325 vn_constant_hash (const void *p1)
327 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
328 return vc1->hashcode;
331 /* Lookup a value id for CONSTANT and return it. If it does not
335 get_constant_value_id (tree constant)
338 struct vn_constant_s vc;
340 vc.hashcode = vn_hash_constant_with_type (constant);
341 vc.constant = constant;
342 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
343 vc.hashcode, NO_INSERT);
345 return ((vn_constant_t)*slot)->value_id;
349 /* Lookup a value id for CONSTANT, and if it does not exist, create a
350 new one and return it. If it does exist, return it. */
353 get_or_alloc_constant_value_id (tree constant)
356 vn_constant_t vc = XNEW (struct vn_constant_s);
358 vc->hashcode = vn_hash_constant_with_type (constant);
359 vc->constant = constant;
360 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
361 vc->hashcode, INSERT);
365 return ((vn_constant_t)*slot)->value_id;
367 vc->value_id = get_next_value_id ();
369 bitmap_set_bit (constant_value_ids, vc->value_id);
373 /* Return true if V is a value id for a constant. */
376 value_id_constant_p (unsigned int v)
378 return bitmap_bit_p (constant_value_ids, v);
381 /* Compare two reference operands P1 and P2 for equality. Return true if
382 they are equal, and false otherwise. */
385 vn_reference_op_eq (const void *p1, const void *p2)
387 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
388 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
389 return vro1->opcode == vro2->opcode
390 && vro1->type == vro2->type
391 && expressions_equal_p (vro1->op0, vro2->op0)
392 && expressions_equal_p (vro1->op1, vro2->op1)
393 && expressions_equal_p (vro1->op2, vro2->op2);
396 /* Compute the hash for a reference operand VRO1. */
399 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
401 return iterative_hash_expr (vro1->op0, vro1->opcode)
402 + iterative_hash_expr (vro1->op1, vro1->opcode)
403 + iterative_hash_expr (vro1->op2, vro1->opcode);
406 /* Return the hashcode for a given reference operation P1. */
409 vn_reference_hash (const void *p1)
411 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
412 return vr1->hashcode;
415 /* Compute a hash for the reference operation VR1 and return it. */
418 vn_reference_compute_hash (const vn_reference_t vr1)
420 hashval_t result = 0;
423 vn_reference_op_t vro;
425 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
426 result += iterative_hash_expr (v, 0);
427 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
428 result += vn_reference_op_compute_hash (vro);
433 /* Return true if reference operations P1 and P2 are equivalent. This
434 means they have the same set of operands and vuses. */
437 vn_reference_eq (const void *p1, const void *p2)
441 vn_reference_op_t vro;
443 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
444 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
446 if (vr1->vuses == vr2->vuses
447 && vr1->operands == vr2->operands)
450 /* Impossible for them to be equivalent if they have different
452 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
455 /* We require that address operands be canonicalized in a way that
456 two memory references will have the same operands if they are
458 if (VEC_length (vn_reference_op_s, vr1->operands)
459 != VEC_length (vn_reference_op_s, vr2->operands))
462 /* The memory state is more often different than the address of the
463 store/load, so check it first. */
464 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
466 if (VEC_index (tree, vr2->vuses, i) != v)
470 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
472 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
479 /* Place the vuses from STMT into *result. */
482 vuses_to_vec (gimple stmt, VEC (tree, gc) **result)
490 VEC_reserve_exact (tree, gc, *result,
491 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
493 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
494 VEC_quick_push (tree, *result, vuse);
498 /* Copy the VUSE names in STMT into a vector, and return
502 copy_vuses_from_stmt (gimple stmt)
504 VEC (tree, gc) *vuses = NULL;
506 vuses_to_vec (stmt, &vuses);
511 /* Place the vdefs from STMT into *result. */
514 vdefs_to_vec (gimple stmt, VEC (tree, gc) **result)
522 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
524 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
525 VEC_quick_push (tree, *result, vdef);
528 /* Copy the names of vdef results in STMT into a vector, and return
531 static VEC (tree, gc) *
532 copy_vdefs_from_stmt (gimple stmt)
534 VEC (tree, gc) *vdefs = NULL;
536 vdefs_to_vec (stmt, &vdefs);
541 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
542 static VEC (tree, gc) *shared_lookup_vops;
544 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
545 This function will overwrite the current SHARED_LOOKUP_VOPS
549 shared_vuses_from_stmt (gimple stmt)
551 VEC_truncate (tree, shared_lookup_vops, 0);
552 vuses_to_vec (stmt, &shared_lookup_vops);
554 return shared_lookup_vops;
557 /* Copy the operations present in load/store REF into RESULT, a vector of
558 vn_reference_op_s's. */
561 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
563 if (TREE_CODE (ref) == TARGET_MEM_REF)
565 vn_reference_op_s temp;
567 memset (&temp, 0, sizeof (temp));
568 /* We do not care for spurious type qualifications. */
569 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
570 temp.opcode = TREE_CODE (ref);
571 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
572 temp.op1 = TMR_INDEX (ref);
573 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
575 memset (&temp, 0, sizeof (temp));
576 temp.type = NULL_TREE;
577 temp.opcode = TREE_CODE (ref);
578 temp.op0 = TMR_STEP (ref);
579 temp.op1 = TMR_OFFSET (ref);
580 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
584 /* For non-calls, store the information that makes up the address. */
588 vn_reference_op_s temp;
590 memset (&temp, 0, sizeof (temp));
591 /* We do not care for spurious type qualifications. */
592 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
593 temp.opcode = TREE_CODE (ref);
597 case ALIGN_INDIRECT_REF:
598 case MISALIGNED_INDIRECT_REF:
600 /* The only operand is the address, which gets its own
601 vn_reference_op_s structure. */
604 /* Record bits and position. */
605 temp.op0 = TREE_OPERAND (ref, 1);
606 temp.op1 = TREE_OPERAND (ref, 2);
609 /* The field decl is enough to unambiguously specify the field,
610 a matching type is not necessary and a mismatching type
611 is always a spurious difference. */
612 temp.type = NULL_TREE;
614 /* If this is a reference to a union member, record the union
615 member size as operand. Do so only if we are doing
616 expression insertion (during FRE), as PRE currently gets
617 confused with this. */
619 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
620 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
621 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
622 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
625 /* Record field as operand. */
626 temp.op0 = TREE_OPERAND (ref, 1);
627 temp.op1 = TREE_OPERAND (ref, 2);
629 case ARRAY_RANGE_REF:
631 /* Record index as operand. */
632 temp.op0 = TREE_OPERAND (ref, 1);
633 temp.op1 = TREE_OPERAND (ref, 2);
634 temp.op2 = TREE_OPERAND (ref, 3);
650 if (is_gimple_min_invariant (ref))
656 /* These are only interesting for their operands, their
657 existence, and their type. They will never be the last
658 ref in the chain of references (IE they require an
659 operand), so we don't have to put anything
660 for op* as it will be handled by the iteration */
663 case VIEW_CONVERT_EXPR:
668 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
670 if (REFERENCE_CLASS_P (ref)
671 || (TREE_CODE (ref) == ADDR_EXPR
672 && !is_gimple_min_invariant (ref)))
673 ref = TREE_OPERAND (ref, 0);
679 /* Copy the operations present in load/store/call REF into RESULT, a vector of
680 vn_reference_op_s's. */
683 copy_reference_ops_from_call (gimple call,
684 VEC(vn_reference_op_s, heap) **result)
686 vn_reference_op_s temp;
689 /* Copy the type, opcode, function being called and static chain. */
690 memset (&temp, 0, sizeof (temp));
691 temp.type = gimple_call_return_type (call);
692 temp.opcode = CALL_EXPR;
693 temp.op0 = gimple_call_fn (call);
694 temp.op1 = gimple_call_chain (call);
695 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
697 /* Copy the call arguments. As they can be references as well,
698 just chain them together. */
699 for (i = 0; i < gimple_call_num_args (call); ++i)
701 tree callarg = gimple_call_arg (call, i);
702 copy_reference_ops_from_ref (callarg, result);
706 /* Create a vector of vn_reference_op_s structures from REF, a
707 REFERENCE_CLASS_P tree. The vector is not shared. */
709 static VEC(vn_reference_op_s, heap) *
710 create_reference_ops_from_ref (tree ref)
712 VEC (vn_reference_op_s, heap) *result = NULL;
714 copy_reference_ops_from_ref (ref, &result);
718 /* Create a vector of vn_reference_op_s structures from CALL, a
719 call statement. The vector is not shared. */
721 static VEC(vn_reference_op_s, heap) *
722 create_reference_ops_from_call (gimple call)
724 VEC (vn_reference_op_s, heap) *result = NULL;
726 copy_reference_ops_from_call (call, &result);
730 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
732 /* Create a vector of vn_reference_op_s structures from REF, a
733 REFERENCE_CLASS_P tree. The vector is shared among all callers of
736 static VEC(vn_reference_op_s, heap) *
737 shared_reference_ops_from_ref (tree ref)
741 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
742 copy_reference_ops_from_ref (ref, &shared_lookup_references);
743 return shared_lookup_references;
746 /* Create a vector of vn_reference_op_s structures from CALL, a
747 call statement. The vector is shared among all callers of
750 static VEC(vn_reference_op_s, heap) *
751 shared_reference_ops_from_call (gimple call)
755 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
756 copy_reference_ops_from_call (call, &shared_lookup_references);
757 return shared_lookup_references;
761 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
762 structures into their value numbers. This is done in-place, and
763 the vector passed in is returned. */
765 static VEC (vn_reference_op_s, heap) *
766 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
768 vn_reference_op_t vro;
771 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
773 if (vro->opcode == SSA_NAME
774 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
776 vro->op0 = SSA_VAL (vro->op0);
777 /* If it transforms from an SSA_NAME to a constant, update
779 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
780 vro->opcode = TREE_CODE (vro->op0);
782 /* TODO: Do we want to valueize op2 and op1 of
783 ARRAY_REF/COMPONENT_REF for Ada */
790 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
791 their value numbers. This is done in-place, and the vector passed
794 static VEC (tree, gc) *
795 valueize_vuses (VEC (tree, gc) *orig)
797 bool made_replacement = false;
801 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
803 if (vuse != SSA_VAL (vuse))
805 made_replacement = true;
806 VEC_replace (tree, orig, i, SSA_VAL (vuse));
810 if (made_replacement && VEC_length (tree, orig) > 1)
816 /* Return the single reference statement defining all virtual uses
817 in VUSES or NULL_TREE, if there are multiple defining statements.
818 Take into account only definitions that alias REF if following
822 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
828 gcc_assert (VEC_length (tree, vuses) >= 1);
830 def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
831 if (gimple_code (def_stmt) == GIMPLE_PHI)
833 /* We can only handle lookups over PHI nodes for a single
835 if (VEC_length (tree, vuses) == 1)
837 def_stmt = get_single_def_stmt_from_phi (ref, def_stmt);
844 /* Verify each VUSE reaches the same defining stmt. */
845 for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
847 gimple tmp = SSA_NAME_DEF_STMT (vuse);
852 /* Now see if the definition aliases ref, and loop until it does. */
855 && is_gimple_assign (def_stmt)
856 && !refs_may_alias_p (ref, gimple_get_lhs (def_stmt)))
857 def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
862 /* Lookup a SCCVN reference operation VR in the current hash table.
863 Returns the resulting value number if it exists in the hash table,
864 NULL_TREE otherwise. VNRESULT will be filled in with the actual
865 vn_reference_t stored in the hashtable if something is found. */
868 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
874 slot = htab_find_slot_with_hash (current_info->references, vr,
876 if (!slot && current_info == optimistic_info)
877 slot = htab_find_slot_with_hash (valid_info->references, vr,
882 *vnresult = (vn_reference_t)*slot;
883 return ((vn_reference_t)*slot)->result;
890 /* Lookup a reference operation by it's parts, in the current hash table.
891 Returns the resulting value number if it exists in the hash table,
892 NULL_TREE otherwise. VNRESULT will be filled in with the actual
893 vn_reference_t stored in the hashtable if something is found. */
896 vn_reference_lookup_pieces (VEC (tree, gc) *vuses,
897 VEC (vn_reference_op_s, heap) *operands,
898 vn_reference_t *vnresult)
900 struct vn_reference_s vr1;
905 vr1.vuses = valueize_vuses (vuses);
906 vr1.operands = valueize_refs (operands);
907 vr1.hashcode = vn_reference_compute_hash (&vr1);
908 result = vn_reference_lookup_1 (&vr1, vnresult);
913 /* Lookup OP in the current hash table, and return the resulting value
914 number if it exists in the hash table. Return NULL_TREE if it does
915 not exist in the hash table or if the result field of the structure
916 was NULL.. VNRESULT will be filled in with the vn_reference_t
917 stored in the hashtable if one exists. */
920 vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk,
921 vn_reference_t *vnresult)
923 struct vn_reference_s vr1;
929 vr1.vuses = valueize_vuses (vuses);
930 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
931 vr1.hashcode = vn_reference_compute_hash (&vr1);
932 result = vn_reference_lookup_1 (&vr1, vnresult);
934 /* If there is a single defining statement for all virtual uses, we can
935 use that, following virtual use-def chains. */
939 && VEC_length (tree, vr1.vuses) >= 1
940 && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses))
941 && is_gimple_assign (def_stmt))
943 /* We are now at an aliasing definition for the vuses we want to
944 look up. Re-do the lookup with the vdefs for this stmt. */
945 vdefs_to_vec (def_stmt, &vuses);
946 vr1.vuses = valueize_vuses (vuses);
947 vr1.hashcode = vn_reference_compute_hash (&vr1);
948 result = vn_reference_lookup_1 (&vr1, vnresult);
955 /* Insert OP into the current hash table with a value number of
956 RESULT, and return the resulting reference structure we created. */
959 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
964 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
965 if (TREE_CODE (result) == SSA_NAME)
966 vr1->value_id = VN_INFO (result)->value_id;
968 vr1->value_id = get_or_alloc_constant_value_id (result);
969 vr1->vuses = valueize_vuses (vuses);
970 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
971 vr1->hashcode = vn_reference_compute_hash (vr1);
972 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
974 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
977 /* Because we lookup stores using vuses, and value number failures
978 using the vdefs (see visit_reference_op_store for how and why),
979 it's possible that on failure we may try to insert an already
980 inserted store. This is not wrong, there is no ssa name for a
981 store that we could use as a differentiator anyway. Thus, unlike
982 the other lookup functions, you cannot gcc_assert (!*slot)
985 /* But free the old slot in case of a collision. */
987 free_reference (*slot);
993 /* Insert a reference by it's pieces into the current hash table with
994 a value number of RESULT. Return the resulting reference
995 structure we created. */
998 vn_reference_insert_pieces (VEC (tree, gc) *vuses,
999 VEC (vn_reference_op_s, heap) *operands,
1000 tree result, unsigned int value_id)
1006 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1007 vr1->value_id = value_id;
1008 vr1->vuses = valueize_vuses (vuses);
1009 vr1->operands = valueize_refs (operands);
1010 vr1->hashcode = vn_reference_compute_hash (vr1);
1011 if (result && TREE_CODE (result) == SSA_NAME)
1012 result = SSA_VAL (result);
1013 vr1->result = result;
1015 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1018 /* At this point we should have all the things inserted that we have
1019 seen before, and we should never try inserting something that
1021 gcc_assert (!*slot);
1023 free_reference (*slot);
1029 /* Compute and return the hash value for nary operation VBO1. */
1032 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1037 for (i = 0; i < vno1->length; ++i)
1038 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1039 vno1->op[i] = SSA_VAL (vno1->op[i]);
1041 if (vno1->length == 2
1042 && commutative_tree_code (vno1->opcode)
1043 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1045 tree temp = vno1->op[0];
1046 vno1->op[0] = vno1->op[1];
1050 for (i = 0; i < vno1->length; ++i)
1051 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1056 /* Return the computed hashcode for nary operation P1. */
1059 vn_nary_op_hash (const void *p1)
1061 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1062 return vno1->hashcode;
1065 /* Compare nary operations P1 and P2 and return true if they are
1069 vn_nary_op_eq (const void *p1, const void *p2)
1071 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1072 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1075 if (vno1->opcode != vno2->opcode
1076 || vno1->type != vno2->type)
1079 for (i = 0; i < vno1->length; ++i)
1080 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1086 /* Lookup a n-ary operation by its pieces and return the resulting value
1087 number if it exists in the hash table. Return NULL_TREE if it does
1088 not exist in the hash table or if the result field of the operation
1089 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1093 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1094 tree type, tree op0, tree op1, tree op2,
1095 tree op3, vn_nary_op_t *vnresult)
1098 struct vn_nary_op_s vno1;
1102 vno1.length = length;
1108 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1109 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1111 if (!slot && current_info == optimistic_info)
1112 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1117 *vnresult = (vn_nary_op_t)*slot;
1118 return ((vn_nary_op_t)*slot)->result;
1121 /* Lookup OP in the current hash table, and return the resulting value
1122 number if it exists in the hash table. Return NULL_TREE if it does
1123 not exist in the hash table or if the result field of the operation
1124 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1128 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1131 struct vn_nary_op_s vno1;
1136 vno1.opcode = TREE_CODE (op);
1137 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1138 vno1.type = TREE_TYPE (op);
1139 for (i = 0; i < vno1.length; ++i)
1140 vno1.op[i] = TREE_OPERAND (op, i);
1141 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1142 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1144 if (!slot && current_info == optimistic_info)
1145 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1150 *vnresult = (vn_nary_op_t)*slot;
1151 return ((vn_nary_op_t)*slot)->result;
1154 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1155 value number if it exists in the hash table. Return NULL_TREE if
1156 it does not exist in the hash table. VNRESULT will contain the
1157 vn_nary_op_t from the hashtable if it exists. */
1160 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1163 struct vn_nary_op_s vno1;
1168 vno1.opcode = gimple_assign_rhs_code (stmt);
1169 vno1.length = gimple_num_ops (stmt) - 1;
1170 vno1.type = TREE_TYPE (gimple_assign_lhs (stmt));
1171 for (i = 0; i < vno1.length; ++i)
1172 vno1.op[i] = gimple_op (stmt, i + 1);
1173 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1174 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1176 if (!slot && current_info == optimistic_info)
1177 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1182 *vnresult = (vn_nary_op_t)*slot;
1183 return ((vn_nary_op_t)*slot)->result;
1186 /* Insert a n-ary operation into the current hash table using it's
1187 pieces. Return the vn_nary_op_t structure we created and put in
1191 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1192 tree type, tree op0,
1193 tree op1, tree op2, tree op3,
1195 unsigned int value_id)
1200 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1201 (sizeof (struct vn_nary_op_s)
1202 - sizeof (tree) * (4 - length)));
1203 vno1->value_id = value_id;
1204 vno1->opcode = code;
1205 vno1->length = length;
1215 vno1->result = result;
1216 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1217 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1219 gcc_assert (!*slot);
1226 /* Insert OP into the current hash table with a value number of
1227 RESULT. Return the vn_nary_op_t structure we created and put in
1231 vn_nary_op_insert (tree op, tree result)
1233 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1238 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1239 (sizeof (struct vn_nary_op_s)
1240 - sizeof (tree) * (4 - length)));
1241 vno1->value_id = VN_INFO (result)->value_id;
1242 vno1->opcode = TREE_CODE (op);
1243 vno1->length = length;
1244 vno1->type = TREE_TYPE (op);
1245 for (i = 0; i < vno1->length; ++i)
1246 vno1->op[i] = TREE_OPERAND (op, i);
1247 vno1->result = result;
1248 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1249 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1251 gcc_assert (!*slot);
1257 /* Insert the rhs of STMT into the current hash table with a value number of
1261 vn_nary_op_insert_stmt (gimple stmt, tree result)
1263 unsigned length = gimple_num_ops (stmt) - 1;
1268 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1269 (sizeof (struct vn_nary_op_s)
1270 - sizeof (tree) * (4 - length)));
1271 vno1->value_id = VN_INFO (result)->value_id;
1272 vno1->opcode = gimple_assign_rhs_code (stmt);
1273 vno1->length = length;
1274 vno1->type = TREE_TYPE (gimple_assign_lhs (stmt));
1275 for (i = 0; i < vno1->length; ++i)
1276 vno1->op[i] = gimple_op (stmt, i + 1);
1277 vno1->result = result;
1278 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1279 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1281 gcc_assert (!*slot);
1287 /* Compute a hashcode for PHI operation VP1 and return it. */
1289 static inline hashval_t
1290 vn_phi_compute_hash (vn_phi_t vp1)
1292 hashval_t result = 0;
1296 result = vp1->block->index;
1298 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1300 if (phi1op == VN_TOP)
1302 result += iterative_hash_expr (phi1op, result);
1308 /* Return the computed hashcode for phi operation P1. */
1311 vn_phi_hash (const void *p1)
1313 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1314 return vp1->hashcode;
1317 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1320 vn_phi_eq (const void *p1, const void *p2)
1322 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1323 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1325 if (vp1->block == vp2->block)
1330 /* Any phi in the same block will have it's arguments in the
1331 same edge order, because of how we store phi nodes. */
1332 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1334 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1335 if (phi1op == VN_TOP || phi2op == VN_TOP)
1337 if (!expressions_equal_p (phi1op, phi2op))
1345 static VEC(tree, heap) *shared_lookup_phiargs;
1347 /* Lookup PHI in the current hash table, and return the resulting
1348 value number if it exists in the hash table. Return NULL_TREE if
1349 it does not exist in the hash table. */
1352 vn_phi_lookup (gimple phi)
1355 struct vn_phi_s vp1;
1358 VEC_truncate (tree, shared_lookup_phiargs, 0);
1360 /* Canonicalize the SSA_NAME's to their value number. */
1361 for (i = 0; i < gimple_phi_num_args (phi); i++)
1363 tree def = PHI_ARG_DEF (phi, i);
1364 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1365 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1367 vp1.phiargs = shared_lookup_phiargs;
1368 vp1.block = gimple_bb (phi);
1369 vp1.hashcode = vn_phi_compute_hash (&vp1);
1370 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1372 if (!slot && current_info == optimistic_info)
1373 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1377 return ((vn_phi_t)*slot)->result;
1380 /* Insert PHI into the current hash table with a value number of
1384 vn_phi_insert (gimple phi, tree result)
1387 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1389 VEC (tree, heap) *args = NULL;
1391 /* Canonicalize the SSA_NAME's to their value number. */
1392 for (i = 0; i < gimple_phi_num_args (phi); i++)
1394 tree def = PHI_ARG_DEF (phi, i);
1395 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1396 VEC_safe_push (tree, heap, args, def);
1398 vp1->value_id = VN_INFO (result)->value_id;
1399 vp1->phiargs = args;
1400 vp1->block = gimple_bb (phi);
1401 vp1->result = result;
1402 vp1->hashcode = vn_phi_compute_hash (vp1);
1404 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1407 /* Because we iterate over phi operations more than once, it's
1408 possible the slot might already exist here, hence no assert.*/
1414 /* Print set of components in strongly connected component SCC to OUT. */
1417 print_scc (FILE *out, VEC (tree, heap) *scc)
1422 fprintf (out, "SCC consists of: ");
1423 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1425 print_generic_expr (out, var, 0);
1428 fprintf (out, "\n");
1431 /* Set the value number of FROM to TO, return true if it has changed
1435 set_ssa_val_to (tree from, tree to)
1440 && TREE_CODE (to) == SSA_NAME
1441 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1444 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1445 and invariants. So assert that here. */
1446 gcc_assert (to != NULL_TREE
1448 || TREE_CODE (to) == SSA_NAME
1449 || is_gimple_min_invariant (to)));
1451 if (dump_file && (dump_flags & TDF_DETAILS))
1453 fprintf (dump_file, "Setting value number of ");
1454 print_generic_expr (dump_file, from, 0);
1455 fprintf (dump_file, " to ");
1456 print_generic_expr (dump_file, to, 0);
1457 fprintf (dump_file, "\n");
1460 currval = SSA_VAL (from);
1462 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1464 SSA_VAL (from) = to;
1470 /* Set all definitions in STMT to value number to themselves.
1471 Return true if a value number changed. */
1474 defs_to_varying (gimple stmt)
1476 bool changed = false;
1480 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1482 tree def = DEF_FROM_PTR (defp);
1484 VN_INFO (def)->use_processed = true;
1485 changed |= set_ssa_val_to (def, def);
1490 static bool expr_has_constants (tree expr);
1491 static tree try_to_simplify (gimple stmt);
1493 /* Visit a copy between LHS and RHS, return true if the value number
1497 visit_copy (tree lhs, tree rhs)
1499 /* Follow chains of copies to their destination. */
1500 while (SSA_VAL (rhs) != rhs && TREE_CODE (SSA_VAL (rhs)) == SSA_NAME)
1501 rhs = SSA_VAL (rhs);
1503 /* The copy may have a more interesting constant filled expression
1504 (we don't, since we know our RHS is just an SSA name). */
1505 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1506 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1508 return set_ssa_val_to (lhs, rhs);
1511 /* Visit a unary operator RHS, value number it, and return true if the
1512 value number of LHS has changed as a result. */
1515 visit_unary_op (tree lhs, gimple stmt)
1517 bool changed = false;
1518 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1522 changed = set_ssa_val_to (lhs, result);
1526 changed = set_ssa_val_to (lhs, lhs);
1527 vn_nary_op_insert_stmt (stmt, lhs);
1533 /* Visit a binary operator RHS, value number it, and return true if the
1534 value number of LHS has changed as a result. */
1537 visit_binary_op (tree lhs, gimple stmt)
1539 bool changed = false;
1540 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1544 changed = set_ssa_val_to (lhs, result);
1548 changed = set_ssa_val_to (lhs, lhs);
1549 vn_nary_op_insert_stmt (stmt, lhs);
1555 /* Visit a call STMT storing into LHS. Return true if the value number
1556 of the LHS has changed as a result. */
1559 visit_reference_op_call (tree lhs, gimple stmt)
1561 bool changed = false;
1562 struct vn_reference_s vr1;
1565 vr1.vuses = valueize_vuses (shared_vuses_from_stmt (stmt));
1566 vr1.operands = valueize_refs (shared_reference_ops_from_call (stmt));
1567 vr1.hashcode = vn_reference_compute_hash (&vr1);
1568 result = vn_reference_lookup_1 (&vr1, NULL);
1571 changed = set_ssa_val_to (lhs, result);
1572 if (TREE_CODE (result) == SSA_NAME
1573 && VN_INFO (result)->has_constants)
1574 VN_INFO (lhs)->has_constants = true;
1580 changed = set_ssa_val_to (lhs, lhs);
1581 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1582 vr2->vuses = valueize_vuses (copy_vuses_from_stmt (stmt));
1583 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1584 vr2->hashcode = vr1.hashcode;
1586 slot = htab_find_slot_with_hash (current_info->references,
1587 vr2, vr2->hashcode, INSERT);
1589 free_reference (*slot);
1596 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1597 and return true if the value number of the LHS has changed as a result. */
1600 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1602 bool changed = false;
1603 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true,
1606 /* We handle type-punning through unions by value-numbering based
1607 on offset and size of the access. Be prepared to handle a
1608 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1610 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1612 /* We will be setting the value number of lhs to the value number
1613 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1614 So first simplify and lookup this expression to see if it
1615 is already available. */
1616 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1618 && !is_gimple_min_invariant (val)
1619 && TREE_CODE (val) != SSA_NAME)
1621 tree tem = try_to_simplify (stmt);
1626 if (!is_gimple_min_invariant (val)
1627 && TREE_CODE (val) != SSA_NAME)
1628 result = vn_nary_op_lookup (val, NULL);
1629 /* If the expression is not yet available, value-number lhs to
1630 a new SSA_NAME we create. */
1631 if (!result && may_insert)
1633 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
1634 /* Initialize value-number information properly. */
1635 VN_INFO_GET (result)->valnum = result;
1636 VN_INFO (result)->value_id = get_next_value_id ();
1637 VN_INFO (result)->expr = val;
1638 VN_INFO (result)->has_constants = expr_has_constants (val);
1639 VN_INFO (result)->needs_insertion = true;
1640 /* As all "inserted" statements are singleton SCCs, insert
1641 to the valid table. This is strictly needed to
1642 avoid re-generating new value SSA_NAMEs for the same
1643 expression during SCC iteration over and over (the
1644 optimistic table gets cleared after each iteration).
1645 We do not need to insert into the optimistic table, as
1646 lookups there will fall back to the valid table. */
1647 if (current_info == optimistic_info)
1649 current_info = valid_info;
1650 vn_nary_op_insert (val, result);
1651 current_info = optimistic_info;
1654 vn_nary_op_insert (val, result);
1655 if (dump_file && (dump_flags & TDF_DETAILS))
1657 fprintf (dump_file, "Inserting name ");
1658 print_generic_expr (dump_file, result, 0);
1659 fprintf (dump_file, " for expression ");
1660 print_generic_expr (dump_file, val, 0);
1661 fprintf (dump_file, "\n");
1668 changed = set_ssa_val_to (lhs, result);
1669 if (TREE_CODE (result) == SSA_NAME
1670 && VN_INFO (result)->has_constants)
1672 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1673 VN_INFO (lhs)->has_constants = true;
1678 changed = set_ssa_val_to (lhs, lhs);
1679 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1686 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1687 and return true if the value number of the LHS has changed as a result. */
1690 visit_reference_op_store (tree lhs, tree op, gimple stmt)
1692 bool changed = false;
1694 bool resultsame = false;
1696 /* First we want to lookup using the *vuses* from the store and see
1697 if there the last store to this location with the same address
1700 The vuses represent the memory state before the store. If the
1701 memory state, address, and value of the store is the same as the
1702 last store to this location, then this store will produce the
1703 same memory state as that store.
1705 In this case the vdef versions for this store are value numbered to those
1706 vuse versions, since they represent the same memory state after
1709 Otherwise, the vdefs for the store are used when inserting into
1710 the table, since the store generates a new memory state. */
1712 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false,
1717 if (TREE_CODE (result) == SSA_NAME)
1718 result = SSA_VAL (result);
1719 if (TREE_CODE (op) == SSA_NAME)
1721 resultsame = expressions_equal_p (result, op);
1724 if (!result || !resultsame)
1726 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1730 if (dump_file && (dump_flags & TDF_DETAILS))
1732 fprintf (dump_file, "No store match\n");
1733 fprintf (dump_file, "Value numbering store ");
1734 print_generic_expr (dump_file, lhs, 0);
1735 fprintf (dump_file, " to ");
1736 print_generic_expr (dump_file, op, 0);
1737 fprintf (dump_file, "\n");
1739 /* Have to set value numbers before insert, since insert is
1740 going to valueize the references in-place. */
1741 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1743 VN_INFO (vdef)->use_processed = true;
1744 changed |= set_ssa_val_to (vdef, vdef);
1747 /* Do not insert structure copies into the tables. */
1748 if (is_gimple_min_invariant (op)
1749 || is_gimple_reg (op))
1750 vn_reference_insert (lhs, op, vdefs);
1754 /* We had a match, so value number the vdefs to have the value
1755 number of the vuses they came from. */
1756 ssa_op_iter op_iter;
1760 if (dump_file && (dump_flags & TDF_DETAILS))
1761 fprintf (dump_file, "Store matched earlier value,"
1762 "value numbering store vdefs to matching vuses.\n");
1764 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1766 tree def = DEF_FROM_PTR (var);
1769 /* Uh, if the vuse is a multiuse, we can't really do much
1770 here, sadly, since we don't know which value number of
1771 which vuse to use. */
1772 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1775 use = VUSE_ELEMENT_VAR (*vv, 0);
1777 VN_INFO (def)->use_processed = true;
1778 changed |= set_ssa_val_to (def, SSA_VAL (use));
1785 /* Visit and value number PHI, return true if the value number
1789 visit_phi (gimple phi)
1791 bool changed = false;
1793 tree sameval = VN_TOP;
1794 bool allsame = true;
1797 /* TODO: We could check for this in init_sccvn, and replace this
1798 with a gcc_assert. */
1799 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1800 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1802 /* See if all non-TOP arguments have the same value. TOP is
1803 equivalent to everything, so we can ignore it. */
1804 for (i = 0; i < gimple_phi_num_args (phi); i++)
1806 tree def = PHI_ARG_DEF (phi, i);
1808 if (TREE_CODE (def) == SSA_NAME)
1809 def = SSA_VAL (def);
1812 if (sameval == VN_TOP)
1818 if (!expressions_equal_p (def, sameval))
1826 /* If all value numbered to the same value, the phi node has that
1830 if (is_gimple_min_invariant (sameval))
1832 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1833 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1837 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1838 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1841 if (TREE_CODE (sameval) == SSA_NAME)
1842 return visit_copy (PHI_RESULT (phi), sameval);
1844 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1847 /* Otherwise, see if it is equivalent to a phi node in this block. */
1848 result = vn_phi_lookup (phi);
1851 if (TREE_CODE (result) == SSA_NAME)
1852 changed = visit_copy (PHI_RESULT (phi), result);
1854 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1858 vn_phi_insert (phi, PHI_RESULT (phi));
1859 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1860 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1861 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1867 /* Return true if EXPR contains constants. */
1870 expr_has_constants (tree expr)
1872 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1875 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1878 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1879 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1880 /* Constants inside reference ops are rarely interesting, but
1881 it can take a lot of looking to find them. */
1883 case tcc_declaration:
1886 return is_gimple_min_invariant (expr);
1891 /* Return true if STMT contains constants. */
1894 stmt_has_constants (gimple stmt)
1896 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1899 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
1901 case GIMPLE_UNARY_RHS:
1902 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
1904 case GIMPLE_BINARY_RHS:
1905 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
1906 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
1907 case GIMPLE_SINGLE_RHS:
1908 /* Constants inside reference ops are rarely interesting, but
1909 it can take a lot of looking to find them. */
1910 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
1917 /* Replace SSA_NAMES in expr with their value numbers, and return the
1919 This is performed in place. */
1922 valueize_expr (tree expr)
1924 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1927 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1928 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1929 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1932 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
1933 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
1934 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
1935 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
1936 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
1937 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
1945 /* Simplify the binary expression RHS, and return the result if
1949 simplify_binary_expression (gimple stmt)
1951 tree result = NULL_TREE;
1952 tree op0 = gimple_assign_rhs1 (stmt);
1953 tree op1 = gimple_assign_rhs2 (stmt);
1955 /* This will not catch every single case we could combine, but will
1956 catch those with constants. The goal here is to simultaneously
1957 combine constants between expressions, but avoid infinite
1958 expansion of expressions during simplification. */
1959 if (TREE_CODE (op0) == SSA_NAME)
1961 if (VN_INFO (op0)->has_constants
1962 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
1963 op0 = valueize_expr (vn_get_expr_for (op0));
1964 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
1965 op0 = SSA_VAL (op0);
1968 if (TREE_CODE (op1) == SSA_NAME)
1970 if (VN_INFO (op1)->has_constants)
1971 op1 = valueize_expr (vn_get_expr_for (op1));
1972 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
1973 op1 = SSA_VAL (op1);
1976 /* Avoid folding if nothing changed. */
1977 if (op0 == gimple_assign_rhs1 (stmt)
1978 && op1 == gimple_assign_rhs2 (stmt))
1981 fold_defer_overflow_warnings ();
1983 result = fold_binary (gimple_assign_rhs_code (stmt),
1984 TREE_TYPE (gimple_get_lhs (stmt)), op0, op1);
1986 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
1989 /* Make sure result is not a complex expression consisting
1990 of operators of operators (IE (a + b) + (a + c))
1991 Otherwise, we will end up with unbounded expressions if
1992 fold does anything at all. */
1993 if (result && valid_gimple_rhs_p (result))
1999 /* Simplify the unary expression RHS, and return the result if
2003 simplify_unary_expression (gimple stmt)
2005 tree result = NULL_TREE;
2006 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2008 /* We handle some tcc_reference codes here that are all
2009 GIMPLE_ASSIGN_SINGLE codes. */
2010 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2011 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2012 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2013 op0 = TREE_OPERAND (op0, 0);
2015 if (TREE_CODE (op0) != SSA_NAME)
2019 if (VN_INFO (op0)->has_constants)
2020 op0 = valueize_expr (vn_get_expr_for (op0));
2021 else if (gimple_assign_cast_p (stmt)
2022 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2023 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2024 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2026 /* We want to do tree-combining on conversion-like expressions.
2027 Make sure we feed only SSA_NAMEs or constants to fold though. */
2028 tree tem = valueize_expr (vn_get_expr_for (op0));
2029 if (UNARY_CLASS_P (tem)
2030 || BINARY_CLASS_P (tem)
2031 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2032 || TREE_CODE (tem) == SSA_NAME
2033 || is_gimple_min_invariant (tem))
2037 /* Avoid folding if nothing changed, but remember the expression. */
2038 if (op0 == orig_op0)
2041 result = fold_unary (gimple_assign_rhs_code (stmt),
2042 gimple_expr_type (stmt), op0);
2045 STRIP_USELESS_TYPE_CONVERSION (result);
2046 if (valid_gimple_rhs_p (result))
2053 /* Try to simplify RHS using equivalences and constant folding. */
2056 try_to_simplify (gimple stmt)
2060 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2061 in this case, there is no point in doing extra work. */
2062 if (gimple_assign_copy_p (stmt)
2063 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2066 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2068 case tcc_declaration:
2069 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2075 /* Do not do full-blown reference lookup here, but simplify
2076 reads from constant aggregates. */
2077 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2081 /* Fallthrough for some codes that can operate on registers. */
2082 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2083 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2084 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2086 /* We could do a little more with unary ops, if they expand
2087 into binary ops, but it's debatable whether it is worth it. */
2089 return simplify_unary_expression (stmt);
2091 case tcc_comparison:
2093 return simplify_binary_expression (stmt);
2102 /* Visit and value number USE, return true if the value number
2106 visit_use (tree use)
2108 bool changed = false;
2109 gimple stmt = SSA_NAME_DEF_STMT (use);
2111 VN_INFO (use)->use_processed = true;
2113 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2114 if (dump_file && (dump_flags & TDF_DETAILS)
2115 && !SSA_NAME_IS_DEFAULT_DEF (use))
2117 fprintf (dump_file, "Value numbering ");
2118 print_generic_expr (dump_file, use, 0);
2119 fprintf (dump_file, " stmt = ");
2120 print_gimple_stmt (dump_file, stmt, 0, 0);
2123 /* Handle uninitialized uses. */
2124 if (SSA_NAME_IS_DEFAULT_DEF (use))
2125 changed = set_ssa_val_to (use, use);
2128 if (gimple_code (stmt) == GIMPLE_PHI)
2129 changed = visit_phi (stmt);
2130 else if (!gimple_has_lhs (stmt)
2131 || gimple_has_volatile_ops (stmt)
2132 || stmt_could_throw_p (stmt))
2133 changed = defs_to_varying (stmt);
2134 else if (is_gimple_assign (stmt))
2136 tree lhs = gimple_assign_lhs (stmt);
2139 /* Shortcut for copies. Simplifying copies is pointless,
2140 since we copy the expression and value they represent. */
2141 if (gimple_assign_copy_p (stmt)
2142 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2143 && TREE_CODE (lhs) == SSA_NAME)
2145 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2148 simplified = try_to_simplify (stmt);
2151 if (dump_file && (dump_flags & TDF_DETAILS))
2153 fprintf (dump_file, "RHS ");
2154 print_gimple_expr (dump_file, stmt, 0, 0);
2155 fprintf (dump_file, " simplified to ");
2156 print_generic_expr (dump_file, simplified, 0);
2157 if (TREE_CODE (lhs) == SSA_NAME)
2158 fprintf (dump_file, " has constants %d\n",
2159 expr_has_constants (simplified));
2161 fprintf (dump_file, "\n");
2164 /* Setting value numbers to constants will occasionally
2165 screw up phi congruence because constants are not
2166 uniquely associated with a single ssa name that can be
2169 && is_gimple_min_invariant (simplified)
2170 && TREE_CODE (lhs) == SSA_NAME)
2172 VN_INFO (lhs)->expr = simplified;
2173 VN_INFO (lhs)->has_constants = true;
2174 changed = set_ssa_val_to (lhs, simplified);
2178 && TREE_CODE (simplified) == SSA_NAME
2179 && TREE_CODE (lhs) == SSA_NAME)
2181 changed = visit_copy (lhs, simplified);
2184 else if (simplified)
2186 if (TREE_CODE (lhs) == SSA_NAME)
2188 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2189 /* We have to unshare the expression or else
2190 valuizing may change the IL stream. */
2191 VN_INFO (lhs)->expr = unshare_expr (simplified);
2194 else if (stmt_has_constants (stmt)
2195 && TREE_CODE (lhs) == SSA_NAME)
2196 VN_INFO (lhs)->has_constants = true;
2197 else if (TREE_CODE (lhs) == SSA_NAME)
2199 /* We reset expr and constantness here because we may
2200 have been value numbering optimistically, and
2201 iterating. They may become non-constant in this case,
2202 even if they were optimistically constant. */
2204 VN_INFO (lhs)->has_constants = false;
2205 VN_INFO (lhs)->expr = NULL_TREE;
2208 if (TREE_CODE (lhs) == SSA_NAME
2209 /* We can substitute SSA_NAMEs that are live over
2210 abnormal edges with their constant value. */
2211 && !(gimple_assign_copy_p (stmt)
2212 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2214 && is_gimple_min_invariant (simplified))
2215 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2216 changed = defs_to_varying (stmt);
2217 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2219 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2221 else if (TREE_CODE (lhs) == SSA_NAME)
2223 if ((gimple_assign_copy_p (stmt)
2224 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2226 && is_gimple_min_invariant (simplified)))
2228 VN_INFO (lhs)->has_constants = true;
2230 changed = set_ssa_val_to (lhs, simplified);
2232 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2236 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2238 case GIMPLE_UNARY_RHS:
2239 changed = visit_unary_op (lhs, stmt);
2241 case GIMPLE_BINARY_RHS:
2242 changed = visit_binary_op (lhs, stmt);
2244 case GIMPLE_SINGLE_RHS:
2245 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2247 case tcc_declaration:
2249 changed = visit_reference_op_load
2250 (lhs, gimple_assign_rhs1 (stmt), stmt);
2252 case tcc_expression:
2253 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2255 changed = visit_unary_op (lhs, stmt);
2260 changed = defs_to_varying (stmt);
2264 changed = defs_to_varying (stmt);
2270 changed = defs_to_varying (stmt);
2272 else if (is_gimple_call (stmt))
2274 tree lhs = gimple_call_lhs (stmt);
2276 /* ??? We could try to simplify calls. */
2278 if (stmt_has_constants (stmt)
2279 && TREE_CODE (lhs) == SSA_NAME)
2280 VN_INFO (lhs)->has_constants = true;
2281 else if (TREE_CODE (lhs) == SSA_NAME)
2283 /* We reset expr and constantness here because we may
2284 have been value numbering optimistically, and
2285 iterating. They may become non-constant in this case,
2286 even if they were optimistically constant. */
2287 VN_INFO (lhs)->has_constants = false;
2288 VN_INFO (lhs)->expr = NULL_TREE;
2291 if (TREE_CODE (lhs) == SSA_NAME
2292 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2293 changed = defs_to_varying (stmt);
2294 /* ??? We should handle stores from calls. */
2295 else if (TREE_CODE (lhs) == SSA_NAME)
2297 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2298 changed = visit_reference_op_call (lhs, stmt);
2300 changed = defs_to_varying (stmt);
2303 changed = defs_to_varying (stmt);
2310 /* Compare two operands by reverse postorder index */
2313 compare_ops (const void *pa, const void *pb)
2315 const tree opa = *((const tree *)pa);
2316 const tree opb = *((const tree *)pb);
2317 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2318 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2322 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2324 else if (gimple_nop_p (opstmta))
2326 else if (gimple_nop_p (opstmtb))
2329 bba = gimple_bb (opstmta);
2330 bbb = gimple_bb (opstmtb);
2341 if (gimple_code (opstmta) == GIMPLE_PHI
2342 && gimple_code (opstmtb) == GIMPLE_PHI)
2344 else if (gimple_code (opstmta) == GIMPLE_PHI)
2346 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2348 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2350 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2353 /* Sort an array containing members of a strongly connected component
2354 SCC so that the members are ordered by RPO number.
2355 This means that when the sort is complete, iterating through the
2356 array will give you the members in RPO order. */
2359 sort_scc (VEC (tree, heap) *scc)
2361 qsort (VEC_address (tree, scc),
2362 VEC_length (tree, scc),
2367 /* Process a strongly connected component in the SSA graph. */
2370 process_scc (VEC (tree, heap) *scc)
2372 /* If the SCC has a single member, just visit it. */
2374 if (VEC_length (tree, scc) == 1)
2376 tree use = VEC_index (tree, scc, 0);
2377 if (!VN_INFO (use)->use_processed)
2384 unsigned int iterations = 0;
2385 bool changed = true;
2387 /* Iterate over the SCC with the optimistic table until it stops
2389 current_info = optimistic_info;
2394 /* As we are value-numbering optimistically we have to
2395 clear the expression tables and the simplified expressions
2396 in each iteration until we converge. */
2397 htab_empty (optimistic_info->nary);
2398 htab_empty (optimistic_info->phis);
2399 htab_empty (optimistic_info->references);
2400 obstack_free (&optimistic_info->nary_obstack, NULL);
2401 gcc_obstack_init (&optimistic_info->nary_obstack);
2402 empty_alloc_pool (optimistic_info->phis_pool);
2403 empty_alloc_pool (optimistic_info->references_pool);
2404 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2405 VN_INFO (var)->expr = NULL_TREE;
2406 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2407 changed |= visit_use (var);
2410 statistics_histogram_event (cfun, "SCC iterations", iterations);
2412 /* Finally, visit the SCC once using the valid table. */
2413 current_info = valid_info;
2414 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2419 DEF_VEC_O(ssa_op_iter);
2420 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2422 /* Pop the components of the found SCC for NAME off the SCC stack
2423 and process them. Returns true if all went well, false if
2424 we run into resource limits. */
2427 extract_and_process_scc_for_name (tree name)
2429 VEC (tree, heap) *scc = NULL;
2432 /* Found an SCC, pop the components off the SCC stack and
2436 x = VEC_pop (tree, sccstack);
2438 VN_INFO (x)->on_sccstack = false;
2439 VEC_safe_push (tree, heap, scc, x);
2440 } while (x != name);
2442 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2443 if (VEC_length (tree, scc)
2444 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2447 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2448 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2449 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2453 if (VEC_length (tree, scc) > 1)
2456 if (dump_file && (dump_flags & TDF_DETAILS))
2457 print_scc (dump_file, scc);
2461 VEC_free (tree, heap, scc);
2466 /* Depth first search on NAME to discover and process SCC's in the SSA
2468 Execution of this algorithm relies on the fact that the SCC's are
2469 popped off the stack in topological order.
2470 Returns true if successful, false if we stopped processing SCC's due
2471 to resource constraints. */
2476 VEC(ssa_op_iter, heap) *itervec = NULL;
2477 VEC(tree, heap) *namevec = NULL;
2478 use_operand_p usep = NULL;
2485 VN_INFO (name)->dfsnum = next_dfs_num++;
2486 VN_INFO (name)->visited = true;
2487 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2489 VEC_safe_push (tree, heap, sccstack, name);
2490 VN_INFO (name)->on_sccstack = true;
2491 defstmt = SSA_NAME_DEF_STMT (name);
2493 /* Recursively DFS on our operands, looking for SCC's. */
2494 if (!gimple_nop_p (defstmt))
2496 /* Push a new iterator. */
2497 if (gimple_code (defstmt) == GIMPLE_PHI)
2498 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2500 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2507 /* If we are done processing uses of a name, go up the stack
2508 of iterators and process SCCs as we found them. */
2509 if (op_iter_done (&iter))
2511 /* See if we found an SCC. */
2512 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2513 if (!extract_and_process_scc_for_name (name))
2515 VEC_free (tree, heap, namevec);
2516 VEC_free (ssa_op_iter, heap, itervec);
2520 /* Check if we are done. */
2521 if (VEC_empty (tree, namevec))
2523 VEC_free (tree, heap, namevec);
2524 VEC_free (ssa_op_iter, heap, itervec);
2528 /* Restore the last use walker and continue walking there. */
2530 name = VEC_pop (tree, namevec);
2531 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2532 sizeof (ssa_op_iter));
2533 VEC_pop (ssa_op_iter, itervec);
2534 goto continue_walking;
2537 use = USE_FROM_PTR (usep);
2539 /* Since we handle phi nodes, we will sometimes get
2540 invariants in the use expression. */
2541 if (TREE_CODE (use) == SSA_NAME)
2543 if (! (VN_INFO (use)->visited))
2545 /* Recurse by pushing the current use walking state on
2546 the stack and starting over. */
2547 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2548 VEC_safe_push(tree, heap, namevec, name);
2553 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2554 VN_INFO (use)->low);
2556 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2557 && VN_INFO (use)->on_sccstack)
2559 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2560 VN_INFO (name)->low);
2564 usep = op_iter_next_use (&iter);
2568 /* Allocate a value number table. */
2571 allocate_vn_table (vn_tables_t table)
2573 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2574 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2575 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2578 gcc_obstack_init (&table->nary_obstack);
2579 table->phis_pool = create_alloc_pool ("VN phis",
2580 sizeof (struct vn_phi_s),
2582 table->references_pool = create_alloc_pool ("VN references",
2583 sizeof (struct vn_reference_s),
2587 /* Free a value number table. */
2590 free_vn_table (vn_tables_t table)
2592 htab_delete (table->phis);
2593 htab_delete (table->nary);
2594 htab_delete (table->references);
2595 obstack_free (&table->nary_obstack, NULL);
2596 free_alloc_pool (table->phis_pool);
2597 free_alloc_pool (table->references_pool);
2605 int *rpo_numbers_temp;
2607 calculate_dominance_info (CDI_DOMINATORS);
2609 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2612 constant_value_ids = BITMAP_ALLOC (NULL);
2617 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2618 /* VEC_alloc doesn't actually grow it to the right size, it just
2619 preallocates the space to do so. */
2620 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2621 gcc_obstack_init (&vn_ssa_aux_obstack);
2623 shared_lookup_phiargs = NULL;
2624 shared_lookup_vops = NULL;
2625 shared_lookup_references = NULL;
2626 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2627 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2628 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2630 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2631 the i'th block in RPO order is bb. We want to map bb's to RPO
2632 numbers, so we need to rearrange this array. */
2633 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2634 rpo_numbers[rpo_numbers_temp[j]] = j;
2636 XDELETE (rpo_numbers_temp);
2638 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2640 /* Create the VN_INFO structures, and initialize value numbers to
2642 for (i = 0; i < num_ssa_names; i++)
2644 tree name = ssa_name (i);
2647 VN_INFO_GET (name)->valnum = VN_TOP;
2648 VN_INFO (name)->expr = NULL_TREE;
2649 VN_INFO (name)->value_id = 0;
2653 renumber_gimple_stmt_uids ();
2655 /* Create the valid and optimistic value numbering tables. */
2656 valid_info = XCNEW (struct vn_tables_s);
2657 allocate_vn_table (valid_info);
2658 optimistic_info = XCNEW (struct vn_tables_s);
2659 allocate_vn_table (optimistic_info);
2667 htab_delete (constant_to_value_id);
2668 BITMAP_FREE (constant_value_ids);
2669 VEC_free (tree, heap, shared_lookup_phiargs);
2670 VEC_free (tree, gc, shared_lookup_vops);
2671 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2672 XDELETEVEC (rpo_numbers);
2674 for (i = 0; i < num_ssa_names; i++)
2676 tree name = ssa_name (i);
2678 && VN_INFO (name)->needs_insertion)
2679 release_ssa_name (name);
2681 obstack_free (&vn_ssa_aux_obstack, NULL);
2682 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2684 VEC_free (tree, heap, sccstack);
2685 free_vn_table (valid_info);
2686 XDELETE (valid_info);
2687 free_vn_table (optimistic_info);
2688 XDELETE (optimistic_info);
2691 /* Set the value ids in the valid hash tables. */
2694 set_hashtable_value_ids (void)
2701 /* Now set the value ids of the things we had put in the hash
2704 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2705 vno, vn_nary_op_t, hi)
2709 if (TREE_CODE (vno->result) == SSA_NAME)
2710 vno->value_id = VN_INFO (vno->result)->value_id;
2711 else if (is_gimple_min_invariant (vno->result))
2712 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2716 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2721 if (TREE_CODE (vp->result) == SSA_NAME)
2722 vp->value_id = VN_INFO (vp->result)->value_id;
2723 else if (is_gimple_min_invariant (vp->result))
2724 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2728 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2729 vr, vn_reference_t, hi)
2733 if (TREE_CODE (vr->result) == SSA_NAME)
2734 vr->value_id = VN_INFO (vr->result)->value_id;
2735 else if (is_gimple_min_invariant (vr->result))
2736 vr->value_id = get_or_alloc_constant_value_id (vr->result);
2741 /* Do SCCVN. Returns true if it finished, false if we bailed out
2742 due to resource constraints. */
2745 run_scc_vn (bool may_insert_arg)
2749 bool changed = true;
2751 may_insert = may_insert_arg;
2754 current_info = valid_info;
2756 for (param = DECL_ARGUMENTS (current_function_decl);
2758 param = TREE_CHAIN (param))
2760 if (gimple_default_def (cfun, param) != NULL)
2762 tree def = gimple_default_def (cfun, param);
2763 SSA_VAL (def) = def;
2767 for (i = 1; i < num_ssa_names; ++i)
2769 tree name = ssa_name (i);
2771 && VN_INFO (name)->visited == false
2772 && !has_zero_uses (name))
2781 /* Initialize the value ids. */
2783 for (i = 1; i < num_ssa_names; ++i)
2785 tree name = ssa_name (i);
2789 info = VN_INFO (name);
2790 if (info->valnum == name)
2791 info->value_id = get_next_value_id ();
2792 else if (is_gimple_min_invariant (info->valnum))
2793 info->value_id = get_or_alloc_constant_value_id (info->valnum);
2796 /* Propagate until they stop changing. */
2800 for (i = 1; i < num_ssa_names; ++i)
2802 tree name = ssa_name (i);
2806 info = VN_INFO (name);
2807 if (TREE_CODE (info->valnum) == SSA_NAME
2808 && info->valnum != name
2809 && info->value_id != VN_INFO (info->valnum)->value_id)
2812 info->value_id = VN_INFO (info->valnum)->value_id;
2817 set_hashtable_value_ids ();
2819 if (dump_file && (dump_flags & TDF_DETAILS))
2821 fprintf (dump_file, "Value numbers:\n");
2822 for (i = 0; i < num_ssa_names; i++)
2824 tree name = ssa_name (i);
2826 && VN_INFO (name)->visited
2827 && SSA_VAL (name) != name)
2829 print_generic_expr (dump_file, name, 0);
2830 fprintf (dump_file, " = ");
2831 print_generic_expr (dump_file, SSA_VAL (name), 0);
2832 fprintf (dump_file, "\n");
2841 /* Return the maximum value id we have ever seen. */
2844 get_max_value_id (void)
2846 return next_value_id;
2849 /* Return the next unique value id. */
2852 get_next_value_id (void)
2854 return next_value_id++;
2858 /* Compare two expressions E1 and E2 and return true if they are equal. */
2861 expressions_equal_p (tree e1, tree e2)
2863 /* The obvious case. */
2867 /* If only one of them is null, they cannot be equal. */
2871 /* Recurse on elements of lists. */
2872 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
2876 for (lop1 = e1, lop2 = e2;
2878 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
2882 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
2888 /* Now perform the actual comparison. */
2889 if (TREE_CODE (e1) == TREE_CODE (e2)
2890 && operand_equal_p (e1, e2, OEP_PURE_SAME))
2896 /* Sort the VUSE array so that we can do equality comparisons
2897 quicker on two vuse vecs. */
2900 sort_vuses (VEC (tree,gc) *vuses)
2902 if (VEC_length (tree, vuses) > 1)
2903 qsort (VEC_address (tree, vuses),
2904 VEC_length (tree, vuses),
2909 /* Sort the VUSE array so that we can do equality comparisons
2910 quicker on two vuse vecs. */
2913 sort_vuses_heap (VEC (tree,heap) *vuses)
2915 if (VEC_length (tree, vuses) > 1)
2916 qsort (VEC_address (tree, vuses),
2917 VEC_length (tree, vuses),