1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
184 /* Set the value numbering info for a given SSA name to a given
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
226 /* If the value-number is a constant it is the representative
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
252 if (!is_gimple_assign (def_stmt))
255 /* FIXME tuples. This is incomplete and likely will miss some
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
260 if (gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
264 gimple_expr_type (def_stmt),
265 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
269 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
270 gimple_expr_type (def_stmt),
271 gimple_assign_rhs1 (def_stmt));
275 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
276 gimple_expr_type (def_stmt),
277 gimple_assign_rhs1 (def_stmt),
278 gimple_assign_rhs2 (def_stmt));
283 if (expr == NULL_TREE)
286 /* Cache the expression. */
293 /* Free a phi operation structure VP. */
298 vn_phi_t phi = (vn_phi_t) vp;
299 VEC_free (tree, heap, phi->phiargs);
302 /* Free a reference operation structure VP. */
305 free_reference (void *vp)
307 vn_reference_t vr = (vn_reference_t) vp;
308 VEC_free (vn_reference_op_s, heap, vr->operands);
311 /* Hash table equality function for vn_constant_t. */
314 vn_constant_eq (const void *p1, const void *p2)
316 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
317 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
319 if (vc1->hashcode != vc2->hashcode)
322 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
325 /* Hash table hash function for vn_constant_t. */
328 vn_constant_hash (const void *p1)
330 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
331 return vc1->hashcode;
334 /* Lookup a value id for CONSTANT and return it. If it does not
338 get_constant_value_id (tree constant)
341 struct vn_constant_s vc;
343 vc.hashcode = vn_hash_constant_with_type (constant);
344 vc.constant = constant;
345 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
346 vc.hashcode, NO_INSERT);
348 return ((vn_constant_t)*slot)->value_id;
352 /* Lookup a value id for CONSTANT, and if it does not exist, create a
353 new one and return it. If it does exist, return it. */
356 get_or_alloc_constant_value_id (tree constant)
359 vn_constant_t vc = XNEW (struct vn_constant_s);
361 vc->hashcode = vn_hash_constant_with_type (constant);
362 vc->constant = constant;
363 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
364 vc->hashcode, INSERT);
368 return ((vn_constant_t)*slot)->value_id;
370 vc->value_id = get_next_value_id ();
372 bitmap_set_bit (constant_value_ids, vc->value_id);
376 /* Return true if V is a value id for a constant. */
379 value_id_constant_p (unsigned int v)
381 return bitmap_bit_p (constant_value_ids, v);
384 /* Compare two reference operands P1 and P2 for equality. Return true if
385 they are equal, and false otherwise. */
388 vn_reference_op_eq (const void *p1, const void *p2)
390 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
391 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
393 return vro1->opcode == vro2->opcode
394 && types_compatible_p (vro1->type, vro2->type)
395 && expressions_equal_p (vro1->op0, vro2->op0)
396 && expressions_equal_p (vro1->op1, vro2->op1)
397 && expressions_equal_p (vro1->op2, vro2->op2);
400 /* Compute the hash for a reference operand VRO1. */
403 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
405 hashval_t result = 0;
407 result += iterative_hash_expr (vro1->op0, vro1->opcode);
409 result += iterative_hash_expr (vro1->op1, vro1->opcode);
411 result += iterative_hash_expr (vro1->op2, vro1->opcode);
415 /* Return the hashcode for a given reference operation P1. */
418 vn_reference_hash (const void *p1)
420 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
421 return vr1->hashcode;
424 /* Compute a hash for the reference operation VR1 and return it. */
427 vn_reference_compute_hash (const vn_reference_t vr1)
429 hashval_t result = 0;
432 vn_reference_op_t vro;
434 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
435 result += iterative_hash_expr (v, 0);
436 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
437 result += vn_reference_op_compute_hash (vro);
442 /* Return true if reference operations P1 and P2 are equivalent. This
443 means they have the same set of operands and vuses. */
446 vn_reference_eq (const void *p1, const void *p2)
450 vn_reference_op_t vro;
452 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
453 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
454 if (vr1->hashcode != vr2->hashcode)
457 if (vr1->vuses == vr2->vuses
458 && vr1->operands == vr2->operands)
461 /* Impossible for them to be equivalent if they have different
463 if (VEC_length (tree, vr1->vuses) != VEC_length (tree, vr2->vuses))
466 /* We require that address operands be canonicalized in a way that
467 two memory references will have the same operands if they are
469 if (VEC_length (vn_reference_op_s, vr1->operands)
470 != VEC_length (vn_reference_op_s, vr2->operands))
473 /* The memory state is more often different than the address of the
474 store/load, so check it first. */
475 for (i = 0; VEC_iterate (tree, vr1->vuses, i, v); i++)
477 if (VEC_index (tree, vr2->vuses, i) != v)
481 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
483 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
490 /* Place the vuses from STMT into *result. */
493 vuses_to_vec (gimple stmt, VEC (tree, gc) **result)
501 VEC_reserve_exact (tree, gc, *result,
502 num_ssa_operands (stmt, SSA_OP_VIRTUAL_USES));
504 FOR_EACH_SSA_TREE_OPERAND (vuse, stmt, iter, SSA_OP_VIRTUAL_USES)
505 VEC_quick_push (tree, *result, vuse);
509 /* Copy the VUSE names in STMT into a vector, and return
512 static VEC (tree, gc) *
513 copy_vuses_from_stmt (gimple stmt)
515 VEC (tree, gc) *vuses = NULL;
517 vuses_to_vec (stmt, &vuses);
522 /* Place the vdefs from STMT into *result. */
525 vdefs_to_vec (gimple stmt, VEC (tree, gc) **result)
533 *result = VEC_alloc (tree, gc, num_ssa_operands (stmt, SSA_OP_VIRTUAL_DEFS));
535 FOR_EACH_SSA_TREE_OPERAND (vdef, stmt, iter, SSA_OP_VIRTUAL_DEFS)
536 VEC_quick_push (tree, *result, vdef);
539 /* Copy the names of vdef results in STMT into a vector, and return
542 static VEC (tree, gc) *
543 copy_vdefs_from_stmt (gimple stmt)
545 VEC (tree, gc) *vdefs = NULL;
547 vdefs_to_vec (stmt, &vdefs);
552 /* Place for shared_v{uses/defs}_from_stmt to shove vuses/vdefs. */
553 static VEC (tree, gc) *shared_lookup_vops;
555 /* Copy the virtual uses from STMT into SHARED_LOOKUP_VOPS.
556 This function will overwrite the current SHARED_LOOKUP_VOPS
560 shared_vuses_from_stmt (gimple stmt)
562 VEC_truncate (tree, shared_lookup_vops, 0);
563 vuses_to_vec (stmt, &shared_lookup_vops);
565 return shared_lookup_vops;
568 /* Copy the operations present in load/store REF into RESULT, a vector of
569 vn_reference_op_s's. */
572 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
574 if (TREE_CODE (ref) == TARGET_MEM_REF)
576 vn_reference_op_s temp;
578 memset (&temp, 0, sizeof (temp));
579 /* We do not care for spurious type qualifications. */
580 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
581 temp.opcode = TREE_CODE (ref);
582 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
583 temp.op1 = TMR_INDEX (ref);
584 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
586 memset (&temp, 0, sizeof (temp));
587 temp.type = NULL_TREE;
588 temp.opcode = TREE_CODE (ref);
589 temp.op0 = TMR_STEP (ref);
590 temp.op1 = TMR_OFFSET (ref);
591 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
595 /* For non-calls, store the information that makes up the address. */
599 vn_reference_op_s temp;
601 memset (&temp, 0, sizeof (temp));
602 /* We do not care for spurious type qualifications. */
603 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
604 temp.opcode = TREE_CODE (ref);
608 case ALIGN_INDIRECT_REF:
610 /* The only operand is the address, which gets its own
611 vn_reference_op_s structure. */
613 case MISALIGNED_INDIRECT_REF:
614 temp.op0 = TREE_OPERAND (ref, 1);
617 /* Record bits and position. */
618 temp.op0 = TREE_OPERAND (ref, 1);
619 temp.op1 = TREE_OPERAND (ref, 2);
622 /* The field decl is enough to unambiguously specify the field,
623 a matching type is not necessary and a mismatching type
624 is always a spurious difference. */
625 temp.type = NULL_TREE;
626 /* If this is a reference to a union member, record the union
627 member size as operand. Do so only if we are doing
628 expression insertion (during FRE), as PRE currently gets
629 confused with this. */
631 && TREE_OPERAND (ref, 2) == NULL_TREE
632 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
633 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
634 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
635 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
638 /* Record field as operand. */
639 temp.op0 = TREE_OPERAND (ref, 1);
640 temp.op1 = TREE_OPERAND (ref, 2);
643 case ARRAY_RANGE_REF:
645 /* Record index as operand. */
646 temp.op0 = TREE_OPERAND (ref, 1);
647 temp.op1 = TREE_OPERAND (ref, 2);
648 temp.op2 = TREE_OPERAND (ref, 3);
666 if (is_gimple_min_invariant (ref))
672 /* These are only interesting for their operands, their
673 existence, and their type. They will never be the last
674 ref in the chain of references (IE they require an
675 operand), so we don't have to put anything
676 for op* as it will be handled by the iteration */
679 case VIEW_CONVERT_EXPR:
684 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
686 if (REFERENCE_CLASS_P (ref)
687 || (TREE_CODE (ref) == ADDR_EXPR
688 && !is_gimple_min_invariant (ref)))
689 ref = TREE_OPERAND (ref, 0);
695 /* Re-create a reference tree from the reference ops OPS.
696 Returns NULL_TREE if the ops were not handled.
697 This routine needs to be kept in sync with copy_reference_ops_from_ref. */
700 get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
702 vn_reference_op_t op;
704 tree ref, *op0_p = &ref;
706 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
713 case ALIGN_INDIRECT_REF:
715 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
716 op0_p = &TREE_OPERAND (*op0_p, 0);
719 case MISALIGNED_INDIRECT_REF:
720 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
722 op0_p = &TREE_OPERAND (*op0_p, 0);
726 *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE,
728 op0_p = &TREE_OPERAND (*op0_p, 0);
732 *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
734 op0_p = &TREE_OPERAND (*op0_p, 0);
737 case ARRAY_RANGE_REF:
739 *op0_p = build4 (op->opcode, op->type, NULL_TREE,
740 op->op0, op->op1, op->op2);
741 op0_p = &TREE_OPERAND (*op0_p, 0);
761 if (op->op0 != NULL_TREE)
763 gcc_assert (is_gimple_min_invariant (op->op0));
770 case VIEW_CONVERT_EXPR:
771 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
772 op0_p = &TREE_OPERAND (*op0_p, 0);
783 /* Copy the operations present in load/store/call REF into RESULT, a vector of
784 vn_reference_op_s's. */
787 copy_reference_ops_from_call (gimple call,
788 VEC(vn_reference_op_s, heap) **result)
790 vn_reference_op_s temp;
793 /* Copy the type, opcode, function being called and static chain. */
794 memset (&temp, 0, sizeof (temp));
795 temp.type = gimple_call_return_type (call);
796 temp.opcode = CALL_EXPR;
797 temp.op0 = gimple_call_fn (call);
798 temp.op1 = gimple_call_chain (call);
799 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
801 /* Copy the call arguments. As they can be references as well,
802 just chain them together. */
803 for (i = 0; i < gimple_call_num_args (call); ++i)
805 tree callarg = gimple_call_arg (call, i);
806 copy_reference_ops_from_ref (callarg, result);
810 /* Create a vector of vn_reference_op_s structures from REF, a
811 REFERENCE_CLASS_P tree. The vector is not shared. */
813 static VEC(vn_reference_op_s, heap) *
814 create_reference_ops_from_ref (tree ref)
816 VEC (vn_reference_op_s, heap) *result = NULL;
818 copy_reference_ops_from_ref (ref, &result);
822 /* Create a vector of vn_reference_op_s structures from CALL, a
823 call statement. The vector is not shared. */
825 static VEC(vn_reference_op_s, heap) *
826 create_reference_ops_from_call (gimple call)
828 VEC (vn_reference_op_s, heap) *result = NULL;
830 copy_reference_ops_from_call (call, &result);
834 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
836 /* Create a vector of vn_reference_op_s structures from REF, a
837 REFERENCE_CLASS_P tree. The vector is shared among all callers of
840 static VEC(vn_reference_op_s, heap) *
841 shared_reference_ops_from_ref (tree ref)
845 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
846 copy_reference_ops_from_ref (ref, &shared_lookup_references);
847 return shared_lookup_references;
850 /* Create a vector of vn_reference_op_s structures from CALL, a
851 call statement. The vector is shared among all callers of
854 static VEC(vn_reference_op_s, heap) *
855 shared_reference_ops_from_call (gimple call)
859 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
860 copy_reference_ops_from_call (call, &shared_lookup_references);
861 return shared_lookup_references;
865 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
866 structures into their value numbers. This is done in-place, and
867 the vector passed in is returned. */
869 static VEC (vn_reference_op_s, heap) *
870 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
872 vn_reference_op_t vro;
875 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
877 if (vro->opcode == SSA_NAME
878 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
880 vro->op0 = SSA_VAL (vro->op0);
881 /* If it transforms from an SSA_NAME to a constant, update
883 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
884 vro->opcode = TREE_CODE (vro->op0);
886 /* TODO: Do we want to valueize op2 and op1 of
887 ARRAY_REF/COMPONENT_REF for Ada */
894 /* Transform any SSA_NAME's in ORIG, a vector of vuse trees, into
895 their value numbers. This is done in-place, and the vector passed
898 static VEC (tree, gc) *
899 valueize_vuses (VEC (tree, gc) *orig)
901 bool made_replacement = false;
905 for (i = 0; VEC_iterate (tree, orig, i, vuse); i++)
907 if (vuse != SSA_VAL (vuse))
909 made_replacement = true;
910 VEC_replace (tree, orig, i, SSA_VAL (vuse));
914 if (made_replacement && VEC_length (tree, orig) > 1)
920 /* Return the single reference statement defining all virtual uses
921 in VUSES or NULL_TREE, if there are multiple defining statements.
922 Take into account only definitions that alias REF if following
926 get_def_ref_stmt_vuses (tree ref, VEC (tree, gc) *vuses)
932 gcc_assert (VEC_length (tree, vuses) >= 1);
934 def_stmt = SSA_NAME_DEF_STMT (VEC_index (tree, vuses, 0));
935 if (gimple_code (def_stmt) == GIMPLE_PHI)
937 /* We can only handle lookups over PHI nodes for a single
939 if (VEC_length (tree, vuses) == 1)
941 def_stmt = get_single_def_stmt_from_phi (ref, def_stmt);
948 /* Verify each VUSE reaches the same defining stmt. */
949 for (i = 1; VEC_iterate (tree, vuses, i, vuse); ++i)
951 gimple tmp = SSA_NAME_DEF_STMT (vuse);
956 /* Now see if the definition aliases ref, and loop until it does. */
959 && is_gimple_assign (def_stmt)
960 && !refs_may_alias_p (ref, gimple_get_lhs (def_stmt)))
961 def_stmt = get_single_def_stmt_with_phi (ref, def_stmt);
966 /* Lookup a SCCVN reference operation VR in the current hash table.
967 Returns the resulting value number if it exists in the hash table,
968 NULL_TREE otherwise. VNRESULT will be filled in with the actual
969 vn_reference_t stored in the hashtable if something is found. */
972 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
978 slot = htab_find_slot_with_hash (current_info->references, vr,
980 if (!slot && current_info == optimistic_info)
981 slot = htab_find_slot_with_hash (valid_info->references, vr,
986 *vnresult = (vn_reference_t)*slot;
987 return ((vn_reference_t)*slot)->result;
994 /* Lookup a reference operation by it's parts, in the current hash table.
995 Returns the resulting value number if it exists in the hash table,
996 NULL_TREE otherwise. VNRESULT will be filled in with the actual
997 vn_reference_t stored in the hashtable if something is found. */
1000 vn_reference_lookup_pieces (VEC (tree, gc) *vuses,
1001 VEC (vn_reference_op_s, heap) *operands,
1002 vn_reference_t *vnresult, bool maywalk)
1004 struct vn_reference_s vr1;
1009 vr1.vuses = valueize_vuses (vuses);
1010 vr1.operands = valueize_refs (operands);
1011 vr1.hashcode = vn_reference_compute_hash (&vr1);
1012 result = vn_reference_lookup_1 (&vr1, vnresult);
1014 /* If there is a single defining statement for all virtual uses, we can
1015 use that, following virtual use-def chains. */
1019 && VEC_length (tree, vr1.vuses) >= 1)
1021 tree ref = get_ref_from_reference_ops (operands);
1024 && (def_stmt = get_def_ref_stmt_vuses (ref, vr1.vuses))
1025 && is_gimple_assign (def_stmt))
1027 /* We are now at an aliasing definition for the vuses we want to
1028 look up. Re-do the lookup with the vdefs for this stmt. */
1029 vdefs_to_vec (def_stmt, &vuses);
1030 vr1.vuses = valueize_vuses (vuses);
1031 vr1.hashcode = vn_reference_compute_hash (&vr1);
1032 result = vn_reference_lookup_1 (&vr1, vnresult);
1039 /* Lookup OP in the current hash table, and return the resulting value
1040 number if it exists in the hash table. Return NULL_TREE if it does
1041 not exist in the hash table or if the result field of the structure
1042 was NULL.. VNRESULT will be filled in with the vn_reference_t
1043 stored in the hashtable if one exists. */
1046 vn_reference_lookup (tree op, VEC (tree, gc) *vuses, bool maywalk,
1047 vn_reference_t *vnresult)
1049 struct vn_reference_s vr1;
1055 vr1.vuses = valueize_vuses (vuses);
1056 vr1.operands = valueize_refs (shared_reference_ops_from_ref (op));
1057 vr1.hashcode = vn_reference_compute_hash (&vr1);
1058 result = vn_reference_lookup_1 (&vr1, vnresult);
1060 /* If there is a single defining statement for all virtual uses, we can
1061 use that, following virtual use-def chains. */
1065 && VEC_length (tree, vr1.vuses) >= 1
1066 && (def_stmt = get_def_ref_stmt_vuses (op, vr1.vuses))
1067 && is_gimple_assign (def_stmt))
1069 /* We are now at an aliasing definition for the vuses we want to
1070 look up. Re-do the lookup with the vdefs for this stmt. */
1071 vdefs_to_vec (def_stmt, &vuses);
1072 vr1.vuses = valueize_vuses (vuses);
1073 vr1.hashcode = vn_reference_compute_hash (&vr1);
1074 result = vn_reference_lookup_1 (&vr1, vnresult);
1081 /* Insert OP into the current hash table with a value number of
1082 RESULT, and return the resulting reference structure we created. */
1085 vn_reference_insert (tree op, tree result, VEC (tree, gc) *vuses)
1090 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1091 if (TREE_CODE (result) == SSA_NAME)
1092 vr1->value_id = VN_INFO (result)->value_id;
1094 vr1->value_id = get_or_alloc_constant_value_id (result);
1095 vr1->vuses = valueize_vuses (vuses);
1096 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1097 vr1->hashcode = vn_reference_compute_hash (vr1);
1098 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1100 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1103 /* Because we lookup stores using vuses, and value number failures
1104 using the vdefs (see visit_reference_op_store for how and why),
1105 it's possible that on failure we may try to insert an already
1106 inserted store. This is not wrong, there is no ssa name for a
1107 store that we could use as a differentiator anyway. Thus, unlike
1108 the other lookup functions, you cannot gcc_assert (!*slot)
1111 /* But free the old slot in case of a collision. */
1113 free_reference (*slot);
1119 /* Insert a reference by it's pieces into the current hash table with
1120 a value number of RESULT. Return the resulting reference
1121 structure we created. */
1124 vn_reference_insert_pieces (VEC (tree, gc) *vuses,
1125 VEC (vn_reference_op_s, heap) *operands,
1126 tree result, unsigned int value_id)
1132 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1133 vr1->value_id = value_id;
1134 vr1->vuses = valueize_vuses (vuses);
1135 vr1->operands = valueize_refs (operands);
1136 vr1->hashcode = vn_reference_compute_hash (vr1);
1137 if (result && TREE_CODE (result) == SSA_NAME)
1138 result = SSA_VAL (result);
1139 vr1->result = result;
1141 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1144 /* At this point we should have all the things inserted that we have
1145 seen before, and we should never try inserting something that
1147 gcc_assert (!*slot);
1149 free_reference (*slot);
1155 /* Compute and return the hash value for nary operation VBO1. */
1158 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1163 for (i = 0; i < vno1->length; ++i)
1164 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1165 vno1->op[i] = SSA_VAL (vno1->op[i]);
1167 if (vno1->length == 2
1168 && commutative_tree_code (vno1->opcode)
1169 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1171 tree temp = vno1->op[0];
1172 vno1->op[0] = vno1->op[1];
1176 for (i = 0; i < vno1->length; ++i)
1177 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1182 /* Return the computed hashcode for nary operation P1. */
1185 vn_nary_op_hash (const void *p1)
1187 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1188 return vno1->hashcode;
1191 /* Compare nary operations P1 and P2 and return true if they are
1195 vn_nary_op_eq (const void *p1, const void *p2)
1197 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1198 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1201 if (vno1->hashcode != vno2->hashcode)
1204 if (vno1->opcode != vno2->opcode
1205 || !types_compatible_p (vno1->type, vno2->type))
1208 for (i = 0; i < vno1->length; ++i)
1209 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1215 /* Lookup a n-ary operation by its pieces and return the resulting value
1216 number if it exists in the hash table. Return NULL_TREE if it does
1217 not exist in the hash table or if the result field of the operation
1218 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1222 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1223 tree type, tree op0, tree op1, tree op2,
1224 tree op3, vn_nary_op_t *vnresult)
1227 struct vn_nary_op_s vno1;
1231 vno1.length = length;
1237 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1238 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1240 if (!slot && current_info == optimistic_info)
1241 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1246 *vnresult = (vn_nary_op_t)*slot;
1247 return ((vn_nary_op_t)*slot)->result;
1250 /* Lookup OP in the current hash table, and return the resulting value
1251 number if it exists in the hash table. Return NULL_TREE if it does
1252 not exist in the hash table or if the result field of the operation
1253 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1257 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1260 struct vn_nary_op_s vno1;
1265 vno1.opcode = TREE_CODE (op);
1266 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1267 vno1.type = TREE_TYPE (op);
1268 for (i = 0; i < vno1.length; ++i)
1269 vno1.op[i] = TREE_OPERAND (op, i);
1270 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1271 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1273 if (!slot && current_info == optimistic_info)
1274 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1279 *vnresult = (vn_nary_op_t)*slot;
1280 return ((vn_nary_op_t)*slot)->result;
1283 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1284 value number if it exists in the hash table. Return NULL_TREE if
1285 it does not exist in the hash table. VNRESULT will contain the
1286 vn_nary_op_t from the hashtable if it exists. */
1289 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1292 struct vn_nary_op_s vno1;
1297 vno1.opcode = gimple_assign_rhs_code (stmt);
1298 vno1.length = gimple_num_ops (stmt) - 1;
1299 vno1.type = TREE_TYPE (gimple_assign_lhs (stmt));
1300 for (i = 0; i < vno1.length; ++i)
1301 vno1.op[i] = gimple_op (stmt, i + 1);
1302 if (vno1.opcode == REALPART_EXPR
1303 || vno1.opcode == IMAGPART_EXPR
1304 || vno1.opcode == VIEW_CONVERT_EXPR)
1305 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1306 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1307 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1309 if (!slot && current_info == optimistic_info)
1310 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1315 *vnresult = (vn_nary_op_t)*slot;
1316 return ((vn_nary_op_t)*slot)->result;
1319 /* Insert a n-ary operation into the current hash table using it's
1320 pieces. Return the vn_nary_op_t structure we created and put in
1324 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1325 tree type, tree op0,
1326 tree op1, tree op2, tree op3,
1328 unsigned int value_id)
1333 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1334 (sizeof (struct vn_nary_op_s)
1335 - sizeof (tree) * (4 - length)));
1336 vno1->value_id = value_id;
1337 vno1->opcode = code;
1338 vno1->length = length;
1348 vno1->result = result;
1349 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1350 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1352 gcc_assert (!*slot);
1359 /* Insert OP into the current hash table with a value number of
1360 RESULT. Return the vn_nary_op_t structure we created and put in
1364 vn_nary_op_insert (tree op, tree result)
1366 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1371 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1372 (sizeof (struct vn_nary_op_s)
1373 - sizeof (tree) * (4 - length)));
1374 vno1->value_id = VN_INFO (result)->value_id;
1375 vno1->opcode = TREE_CODE (op);
1376 vno1->length = length;
1377 vno1->type = TREE_TYPE (op);
1378 for (i = 0; i < vno1->length; ++i)
1379 vno1->op[i] = TREE_OPERAND (op, i);
1380 vno1->result = result;
1381 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1382 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1384 gcc_assert (!*slot);
1390 /* Insert the rhs of STMT into the current hash table with a value number of
1394 vn_nary_op_insert_stmt (gimple stmt, tree result)
1396 unsigned length = gimple_num_ops (stmt) - 1;
1401 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1402 (sizeof (struct vn_nary_op_s)
1403 - sizeof (tree) * (4 - length)));
1404 vno1->value_id = VN_INFO (result)->value_id;
1405 vno1->opcode = gimple_assign_rhs_code (stmt);
1406 vno1->length = length;
1407 vno1->type = TREE_TYPE (gimple_assign_lhs (stmt));
1408 for (i = 0; i < vno1->length; ++i)
1409 vno1->op[i] = gimple_op (stmt, i + 1);
1410 if (vno1->opcode == REALPART_EXPR
1411 || vno1->opcode == IMAGPART_EXPR
1412 || vno1->opcode == VIEW_CONVERT_EXPR)
1413 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1414 vno1->result = result;
1415 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1416 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1418 gcc_assert (!*slot);
1424 /* Compute a hashcode for PHI operation VP1 and return it. */
1426 static inline hashval_t
1427 vn_phi_compute_hash (vn_phi_t vp1)
1429 hashval_t result = 0;
1434 result = vp1->block->index;
1436 /* If all PHI arguments are constants we need to distinguish
1437 the PHI node via its type. */
1438 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1439 result += (INTEGRAL_TYPE_P (type)
1440 + (INTEGRAL_TYPE_P (type)
1441 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1443 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1445 if (phi1op == VN_TOP)
1447 result += iterative_hash_expr (phi1op, result);
1453 /* Return the computed hashcode for phi operation P1. */
1456 vn_phi_hash (const void *p1)
1458 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1459 return vp1->hashcode;
1462 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1465 vn_phi_eq (const void *p1, const void *p2)
1467 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1468 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1470 if (vp1->hashcode != vp2->hashcode)
1473 if (vp1->block == vp2->block)
1478 /* If the PHI nodes do not have compatible types
1479 they are not the same. */
1480 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1481 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1484 /* Any phi in the same block will have it's arguments in the
1485 same edge order, because of how we store phi nodes. */
1486 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1488 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1489 if (phi1op == VN_TOP || phi2op == VN_TOP)
1491 if (!expressions_equal_p (phi1op, phi2op))
1499 static VEC(tree, heap) *shared_lookup_phiargs;
1501 /* Lookup PHI in the current hash table, and return the resulting
1502 value number if it exists in the hash table. Return NULL_TREE if
1503 it does not exist in the hash table. */
1506 vn_phi_lookup (gimple phi)
1509 struct vn_phi_s vp1;
1512 VEC_truncate (tree, shared_lookup_phiargs, 0);
1514 /* Canonicalize the SSA_NAME's to their value number. */
1515 for (i = 0; i < gimple_phi_num_args (phi); i++)
1517 tree def = PHI_ARG_DEF (phi, i);
1518 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1519 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1521 vp1.phiargs = shared_lookup_phiargs;
1522 vp1.block = gimple_bb (phi);
1523 vp1.hashcode = vn_phi_compute_hash (&vp1);
1524 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1526 if (!slot && current_info == optimistic_info)
1527 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1531 return ((vn_phi_t)*slot)->result;
1534 /* Insert PHI into the current hash table with a value number of
1538 vn_phi_insert (gimple phi, tree result)
1541 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1543 VEC (tree, heap) *args = NULL;
1545 /* Canonicalize the SSA_NAME's to their value number. */
1546 for (i = 0; i < gimple_phi_num_args (phi); i++)
1548 tree def = PHI_ARG_DEF (phi, i);
1549 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1550 VEC_safe_push (tree, heap, args, def);
1552 vp1->value_id = VN_INFO (result)->value_id;
1553 vp1->phiargs = args;
1554 vp1->block = gimple_bb (phi);
1555 vp1->result = result;
1556 vp1->hashcode = vn_phi_compute_hash (vp1);
1558 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1561 /* Because we iterate over phi operations more than once, it's
1562 possible the slot might already exist here, hence no assert.*/
1568 /* Print set of components in strongly connected component SCC to OUT. */
1571 print_scc (FILE *out, VEC (tree, heap) *scc)
1576 fprintf (out, "SCC consists of: ");
1577 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1579 print_generic_expr (out, var, 0);
1582 fprintf (out, "\n");
1585 /* Set the value number of FROM to TO, return true if it has changed
1589 set_ssa_val_to (tree from, tree to)
1594 && TREE_CODE (to) == SSA_NAME
1595 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1598 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1599 and invariants. So assert that here. */
1600 gcc_assert (to != NULL_TREE
1602 || TREE_CODE (to) == SSA_NAME
1603 || is_gimple_min_invariant (to)));
1605 if (dump_file && (dump_flags & TDF_DETAILS))
1607 fprintf (dump_file, "Setting value number of ");
1608 print_generic_expr (dump_file, from, 0);
1609 fprintf (dump_file, " to ");
1610 print_generic_expr (dump_file, to, 0);
1613 currval = SSA_VAL (from);
1615 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1617 SSA_VAL (from) = to;
1618 if (dump_file && (dump_flags & TDF_DETAILS))
1619 fprintf (dump_file, " (changed)\n");
1622 if (dump_file && (dump_flags & TDF_DETAILS))
1623 fprintf (dump_file, "\n");
1627 /* Set all definitions in STMT to value number to themselves.
1628 Return true if a value number changed. */
1631 defs_to_varying (gimple stmt)
1633 bool changed = false;
1637 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1639 tree def = DEF_FROM_PTR (defp);
1641 VN_INFO (def)->use_processed = true;
1642 changed |= set_ssa_val_to (def, def);
1647 static bool expr_has_constants (tree expr);
1648 static tree valueize_expr (tree expr);
1650 /* Visit a copy between LHS and RHS, return true if the value number
1654 visit_copy (tree lhs, tree rhs)
1656 /* Follow chains of copies to their destination. */
1657 while (TREE_CODE (rhs) == SSA_NAME
1658 && SSA_VAL (rhs) != rhs)
1659 rhs = SSA_VAL (rhs);
1661 /* The copy may have a more interesting constant filled expression
1662 (we don't, since we know our RHS is just an SSA name). */
1663 if (TREE_CODE (rhs) == SSA_NAME)
1665 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1666 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1669 return set_ssa_val_to (lhs, rhs);
1672 /* Visit a unary operator RHS, value number it, and return true if the
1673 value number of LHS has changed as a result. */
1676 visit_unary_op (tree lhs, gimple stmt)
1678 bool changed = false;
1679 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1683 changed = set_ssa_val_to (lhs, result);
1687 changed = set_ssa_val_to (lhs, lhs);
1688 vn_nary_op_insert_stmt (stmt, lhs);
1694 /* Visit a binary operator RHS, value number it, and return true if the
1695 value number of LHS has changed as a result. */
1698 visit_binary_op (tree lhs, gimple stmt)
1700 bool changed = false;
1701 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1705 changed = set_ssa_val_to (lhs, result);
1709 changed = set_ssa_val_to (lhs, lhs);
1710 vn_nary_op_insert_stmt (stmt, lhs);
1716 /* Visit a call STMT storing into LHS. Return true if the value number
1717 of the LHS has changed as a result. */
1720 visit_reference_op_call (tree lhs, gimple stmt)
1722 bool changed = false;
1723 struct vn_reference_s vr1;
1726 vr1.vuses = valueize_vuses (shared_vuses_from_stmt (stmt));
1727 vr1.operands = valueize_refs (shared_reference_ops_from_call (stmt));
1728 vr1.hashcode = vn_reference_compute_hash (&vr1);
1729 result = vn_reference_lookup_1 (&vr1, NULL);
1732 changed = set_ssa_val_to (lhs, result);
1733 if (TREE_CODE (result) == SSA_NAME
1734 && VN_INFO (result)->has_constants)
1735 VN_INFO (lhs)->has_constants = true;
1741 changed = set_ssa_val_to (lhs, lhs);
1742 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1743 vr2->vuses = valueize_vuses (copy_vuses_from_stmt (stmt));
1744 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1745 vr2->hashcode = vr1.hashcode;
1747 slot = htab_find_slot_with_hash (current_info->references,
1748 vr2, vr2->hashcode, INSERT);
1750 free_reference (*slot);
1757 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1758 and return true if the value number of the LHS has changed as a result. */
1761 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1763 bool changed = false;
1764 tree result = vn_reference_lookup (op, shared_vuses_from_stmt (stmt), true,
1767 /* We handle type-punning through unions by value-numbering based
1768 on offset and size of the access. Be prepared to handle a
1769 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1771 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1773 /* We will be setting the value number of lhs to the value number
1774 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1775 So first simplify and lookup this expression to see if it
1776 is already available. */
1777 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1778 if ((CONVERT_EXPR_P (val)
1779 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1780 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1782 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1783 if ((CONVERT_EXPR_P (tem)
1784 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1785 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
1786 TREE_TYPE (val), tem)))
1790 if (!is_gimple_min_invariant (val)
1791 && TREE_CODE (val) != SSA_NAME)
1792 result = vn_nary_op_lookup (val, NULL);
1793 /* If the expression is not yet available, value-number lhs to
1794 a new SSA_NAME we create. */
1795 if (!result && may_insert)
1797 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
1798 /* Initialize value-number information properly. */
1799 VN_INFO_GET (result)->valnum = result;
1800 VN_INFO (result)->value_id = get_next_value_id ();
1801 VN_INFO (result)->expr = val;
1802 VN_INFO (result)->has_constants = expr_has_constants (val);
1803 VN_INFO (result)->needs_insertion = true;
1804 /* As all "inserted" statements are singleton SCCs, insert
1805 to the valid table. This is strictly needed to
1806 avoid re-generating new value SSA_NAMEs for the same
1807 expression during SCC iteration over and over (the
1808 optimistic table gets cleared after each iteration).
1809 We do not need to insert into the optimistic table, as
1810 lookups there will fall back to the valid table. */
1811 if (current_info == optimistic_info)
1813 current_info = valid_info;
1814 vn_nary_op_insert (val, result);
1815 current_info = optimistic_info;
1818 vn_nary_op_insert (val, result);
1819 if (dump_file && (dump_flags & TDF_DETAILS))
1821 fprintf (dump_file, "Inserting name ");
1822 print_generic_expr (dump_file, result, 0);
1823 fprintf (dump_file, " for expression ");
1824 print_generic_expr (dump_file, val, 0);
1825 fprintf (dump_file, "\n");
1832 changed = set_ssa_val_to (lhs, result);
1833 if (TREE_CODE (result) == SSA_NAME
1834 && VN_INFO (result)->has_constants)
1836 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1837 VN_INFO (lhs)->has_constants = true;
1842 changed = set_ssa_val_to (lhs, lhs);
1843 vn_reference_insert (op, lhs, copy_vuses_from_stmt (stmt));
1850 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1851 and return true if the value number of the LHS has changed as a result. */
1854 visit_reference_op_store (tree lhs, tree op, gimple stmt)
1856 bool changed = false;
1858 bool resultsame = false;
1860 /* First we want to lookup using the *vuses* from the store and see
1861 if there the last store to this location with the same address
1864 The vuses represent the memory state before the store. If the
1865 memory state, address, and value of the store is the same as the
1866 last store to this location, then this store will produce the
1867 same memory state as that store.
1869 In this case the vdef versions for this store are value numbered to those
1870 vuse versions, since they represent the same memory state after
1873 Otherwise, the vdefs for the store are used when inserting into
1874 the table, since the store generates a new memory state. */
1876 result = vn_reference_lookup (lhs, shared_vuses_from_stmt (stmt), false,
1881 if (TREE_CODE (result) == SSA_NAME)
1882 result = SSA_VAL (result);
1883 if (TREE_CODE (op) == SSA_NAME)
1885 resultsame = expressions_equal_p (result, op);
1888 if (!result || !resultsame)
1890 VEC(tree, gc) *vdefs = copy_vdefs_from_stmt (stmt);
1894 if (dump_file && (dump_flags & TDF_DETAILS))
1896 fprintf (dump_file, "No store match\n");
1897 fprintf (dump_file, "Value numbering store ");
1898 print_generic_expr (dump_file, lhs, 0);
1899 fprintf (dump_file, " to ");
1900 print_generic_expr (dump_file, op, 0);
1901 fprintf (dump_file, "\n");
1903 /* Have to set value numbers before insert, since insert is
1904 going to valueize the references in-place. */
1905 for (i = 0; VEC_iterate (tree, vdefs, i, vdef); i++)
1907 VN_INFO (vdef)->use_processed = true;
1908 changed |= set_ssa_val_to (vdef, vdef);
1911 /* Do not insert structure copies into the tables. */
1912 if (is_gimple_min_invariant (op)
1913 || is_gimple_reg (op))
1914 vn_reference_insert (lhs, op, vdefs);
1918 /* We had a match, so value number the vdefs to have the value
1919 number of the vuses they came from. */
1920 ssa_op_iter op_iter;
1924 if (dump_file && (dump_flags & TDF_DETAILS))
1925 fprintf (dump_file, "Store matched earlier value,"
1926 "value numbering store vdefs to matching vuses.\n");
1928 FOR_EACH_SSA_VDEF_OPERAND (var, vv, stmt, op_iter)
1930 tree def = DEF_FROM_PTR (var);
1933 /* Uh, if the vuse is a multiuse, we can't really do much
1934 here, sadly, since we don't know which value number of
1935 which vuse to use. */
1936 if (VUSE_VECT_NUM_ELEM (*vv) != 1)
1939 use = VUSE_ELEMENT_VAR (*vv, 0);
1941 VN_INFO (def)->use_processed = true;
1942 changed |= set_ssa_val_to (def, SSA_VAL (use));
1949 /* Visit and value number PHI, return true if the value number
1953 visit_phi (gimple phi)
1955 bool changed = false;
1957 tree sameval = VN_TOP;
1958 bool allsame = true;
1961 /* TODO: We could check for this in init_sccvn, and replace this
1962 with a gcc_assert. */
1963 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1964 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1966 /* See if all non-TOP arguments have the same value. TOP is
1967 equivalent to everything, so we can ignore it. */
1968 for (i = 0; i < gimple_phi_num_args (phi); i++)
1970 tree def = PHI_ARG_DEF (phi, i);
1972 if (TREE_CODE (def) == SSA_NAME)
1973 def = SSA_VAL (def);
1976 if (sameval == VN_TOP)
1982 if (!expressions_equal_p (def, sameval))
1990 /* If all value numbered to the same value, the phi node has that
1994 if (is_gimple_min_invariant (sameval))
1996 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1997 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2001 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2002 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2005 if (TREE_CODE (sameval) == SSA_NAME)
2006 return visit_copy (PHI_RESULT (phi), sameval);
2008 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2011 /* Otherwise, see if it is equivalent to a phi node in this block. */
2012 result = vn_phi_lookup (phi);
2015 if (TREE_CODE (result) == SSA_NAME)
2016 changed = visit_copy (PHI_RESULT (phi), result);
2018 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2022 vn_phi_insert (phi, PHI_RESULT (phi));
2023 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2024 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2025 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2031 /* Return true if EXPR contains constants. */
2034 expr_has_constants (tree expr)
2036 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2039 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2042 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2043 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2044 /* Constants inside reference ops are rarely interesting, but
2045 it can take a lot of looking to find them. */
2047 case tcc_declaration:
2050 return is_gimple_min_invariant (expr);
2055 /* Return true if STMT contains constants. */
2058 stmt_has_constants (gimple stmt)
2060 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2063 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2065 case GIMPLE_UNARY_RHS:
2066 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2068 case GIMPLE_BINARY_RHS:
2069 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2070 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2071 case GIMPLE_SINGLE_RHS:
2072 /* Constants inside reference ops are rarely interesting, but
2073 it can take a lot of looking to find them. */
2074 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2081 /* Replace SSA_NAMES in expr with their value numbers, and return the
2083 This is performed in place. */
2086 valueize_expr (tree expr)
2088 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2091 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2092 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2093 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2096 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2097 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2098 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2099 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2100 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2101 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2109 /* Simplify the binary expression RHS, and return the result if
2113 simplify_binary_expression (gimple stmt)
2115 tree result = NULL_TREE;
2116 tree op0 = gimple_assign_rhs1 (stmt);
2117 tree op1 = gimple_assign_rhs2 (stmt);
2119 /* This will not catch every single case we could combine, but will
2120 catch those with constants. The goal here is to simultaneously
2121 combine constants between expressions, but avoid infinite
2122 expansion of expressions during simplification. */
2123 if (TREE_CODE (op0) == SSA_NAME)
2125 if (VN_INFO (op0)->has_constants
2126 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2127 op0 = valueize_expr (vn_get_expr_for (op0));
2128 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2129 op0 = SSA_VAL (op0);
2132 if (TREE_CODE (op1) == SSA_NAME)
2134 if (VN_INFO (op1)->has_constants)
2135 op1 = valueize_expr (vn_get_expr_for (op1));
2136 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2137 op1 = SSA_VAL (op1);
2140 /* Avoid folding if nothing changed. */
2141 if (op0 == gimple_assign_rhs1 (stmt)
2142 && op1 == gimple_assign_rhs2 (stmt))
2145 fold_defer_overflow_warnings ();
2147 result = fold_binary (gimple_assign_rhs_code (stmt),
2148 TREE_TYPE (gimple_get_lhs (stmt)), op0, op1);
2150 STRIP_USELESS_TYPE_CONVERSION (result);
2152 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2155 /* Make sure result is not a complex expression consisting
2156 of operators of operators (IE (a + b) + (a + c))
2157 Otherwise, we will end up with unbounded expressions if
2158 fold does anything at all. */
2159 if (result && valid_gimple_rhs_p (result))
2165 /* Simplify the unary expression RHS, and return the result if
2169 simplify_unary_expression (gimple stmt)
2171 tree result = NULL_TREE;
2172 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2174 /* We handle some tcc_reference codes here that are all
2175 GIMPLE_ASSIGN_SINGLE codes. */
2176 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2177 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2178 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2179 op0 = TREE_OPERAND (op0, 0);
2181 if (TREE_CODE (op0) != SSA_NAME)
2185 if (VN_INFO (op0)->has_constants)
2186 op0 = valueize_expr (vn_get_expr_for (op0));
2187 else if (gimple_assign_cast_p (stmt)
2188 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2189 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2190 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2192 /* We want to do tree-combining on conversion-like expressions.
2193 Make sure we feed only SSA_NAMEs or constants to fold though. */
2194 tree tem = valueize_expr (vn_get_expr_for (op0));
2195 if (UNARY_CLASS_P (tem)
2196 || BINARY_CLASS_P (tem)
2197 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2198 || TREE_CODE (tem) == SSA_NAME
2199 || is_gimple_min_invariant (tem))
2203 /* Avoid folding if nothing changed, but remember the expression. */
2204 if (op0 == orig_op0)
2207 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2208 gimple_expr_type (stmt), op0);
2211 STRIP_USELESS_TYPE_CONVERSION (result);
2212 if (valid_gimple_rhs_p (result))
2219 /* Try to simplify RHS using equivalences and constant folding. */
2222 try_to_simplify (gimple stmt)
2226 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2227 in this case, there is no point in doing extra work. */
2228 if (gimple_assign_copy_p (stmt)
2229 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2232 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2234 case tcc_declaration:
2235 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2241 /* Do not do full-blown reference lookup here, but simplify
2242 reads from constant aggregates. */
2243 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2247 /* Fallthrough for some codes that can operate on registers. */
2248 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2249 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2250 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2252 /* We could do a little more with unary ops, if they expand
2253 into binary ops, but it's debatable whether it is worth it. */
2255 return simplify_unary_expression (stmt);
2257 case tcc_comparison:
2259 return simplify_binary_expression (stmt);
2268 /* Visit and value number USE, return true if the value number
2272 visit_use (tree use)
2274 bool changed = false;
2275 gimple stmt = SSA_NAME_DEF_STMT (use);
2277 VN_INFO (use)->use_processed = true;
2279 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2280 if (dump_file && (dump_flags & TDF_DETAILS)
2281 && !SSA_NAME_IS_DEFAULT_DEF (use))
2283 fprintf (dump_file, "Value numbering ");
2284 print_generic_expr (dump_file, use, 0);
2285 fprintf (dump_file, " stmt = ");
2286 print_gimple_stmt (dump_file, stmt, 0, 0);
2289 /* Handle uninitialized uses. */
2290 if (SSA_NAME_IS_DEFAULT_DEF (use))
2291 changed = set_ssa_val_to (use, use);
2294 if (gimple_code (stmt) == GIMPLE_PHI)
2295 changed = visit_phi (stmt);
2296 else if (!gimple_has_lhs (stmt)
2297 || gimple_has_volatile_ops (stmt)
2298 || stmt_could_throw_p (stmt))
2299 changed = defs_to_varying (stmt);
2300 else if (is_gimple_assign (stmt))
2302 tree lhs = gimple_assign_lhs (stmt);
2305 /* Shortcut for copies. Simplifying copies is pointless,
2306 since we copy the expression and value they represent. */
2307 if (gimple_assign_copy_p (stmt)
2308 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2309 && TREE_CODE (lhs) == SSA_NAME)
2311 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2314 simplified = try_to_simplify (stmt);
2317 if (dump_file && (dump_flags & TDF_DETAILS))
2319 fprintf (dump_file, "RHS ");
2320 print_gimple_expr (dump_file, stmt, 0, 0);
2321 fprintf (dump_file, " simplified to ");
2322 print_generic_expr (dump_file, simplified, 0);
2323 if (TREE_CODE (lhs) == SSA_NAME)
2324 fprintf (dump_file, " has constants %d\n",
2325 expr_has_constants (simplified));
2327 fprintf (dump_file, "\n");
2330 /* Setting value numbers to constants will occasionally
2331 screw up phi congruence because constants are not
2332 uniquely associated with a single ssa name that can be
2335 && is_gimple_min_invariant (simplified)
2336 && TREE_CODE (lhs) == SSA_NAME)
2338 VN_INFO (lhs)->expr = simplified;
2339 VN_INFO (lhs)->has_constants = true;
2340 changed = set_ssa_val_to (lhs, simplified);
2344 && TREE_CODE (simplified) == SSA_NAME
2345 && TREE_CODE (lhs) == SSA_NAME)
2347 changed = visit_copy (lhs, simplified);
2350 else if (simplified)
2352 if (TREE_CODE (lhs) == SSA_NAME)
2354 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2355 /* We have to unshare the expression or else
2356 valuizing may change the IL stream. */
2357 VN_INFO (lhs)->expr = unshare_expr (simplified);
2360 else if (stmt_has_constants (stmt)
2361 && TREE_CODE (lhs) == SSA_NAME)
2362 VN_INFO (lhs)->has_constants = true;
2363 else if (TREE_CODE (lhs) == SSA_NAME)
2365 /* We reset expr and constantness here because we may
2366 have been value numbering optimistically, and
2367 iterating. They may become non-constant in this case,
2368 even if they were optimistically constant. */
2370 VN_INFO (lhs)->has_constants = false;
2371 VN_INFO (lhs)->expr = NULL_TREE;
2374 if ((TREE_CODE (lhs) == SSA_NAME
2375 /* We can substitute SSA_NAMEs that are live over
2376 abnormal edges with their constant value. */
2377 && !(gimple_assign_copy_p (stmt)
2378 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2380 && is_gimple_min_invariant (simplified))
2381 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2382 /* Stores or copies from SSA_NAMEs that are live over
2383 abnormal edges are a problem. */
2384 || (gimple_assign_single_p (stmt)
2385 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2386 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2387 changed = defs_to_varying (stmt);
2388 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2390 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2392 else if (TREE_CODE (lhs) == SSA_NAME)
2394 if ((gimple_assign_copy_p (stmt)
2395 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2397 && is_gimple_min_invariant (simplified)))
2399 VN_INFO (lhs)->has_constants = true;
2401 changed = set_ssa_val_to (lhs, simplified);
2403 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2407 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2409 case GIMPLE_UNARY_RHS:
2410 changed = visit_unary_op (lhs, stmt);
2412 case GIMPLE_BINARY_RHS:
2413 changed = visit_binary_op (lhs, stmt);
2415 case GIMPLE_SINGLE_RHS:
2416 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2419 /* VOP-less references can go through unary case. */
2420 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2421 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2422 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2423 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2425 changed = visit_unary_op (lhs, stmt);
2429 case tcc_declaration:
2430 changed = visit_reference_op_load
2431 (lhs, gimple_assign_rhs1 (stmt), stmt);
2433 case tcc_expression:
2434 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2436 changed = visit_unary_op (lhs, stmt);
2441 changed = defs_to_varying (stmt);
2445 changed = defs_to_varying (stmt);
2451 changed = defs_to_varying (stmt);
2453 else if (is_gimple_call (stmt))
2455 tree lhs = gimple_call_lhs (stmt);
2457 /* ??? We could try to simplify calls. */
2459 if (stmt_has_constants (stmt)
2460 && TREE_CODE (lhs) == SSA_NAME)
2461 VN_INFO (lhs)->has_constants = true;
2462 else if (TREE_CODE (lhs) == SSA_NAME)
2464 /* We reset expr and constantness here because we may
2465 have been value numbering optimistically, and
2466 iterating. They may become non-constant in this case,
2467 even if they were optimistically constant. */
2468 VN_INFO (lhs)->has_constants = false;
2469 VN_INFO (lhs)->expr = NULL_TREE;
2472 if (TREE_CODE (lhs) == SSA_NAME
2473 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2474 changed = defs_to_varying (stmt);
2475 /* ??? We should handle stores from calls. */
2476 else if (TREE_CODE (lhs) == SSA_NAME)
2478 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2479 changed = visit_reference_op_call (lhs, stmt);
2481 changed = defs_to_varying (stmt);
2484 changed = defs_to_varying (stmt);
2491 /* Compare two operands by reverse postorder index */
2494 compare_ops (const void *pa, const void *pb)
2496 const tree opa = *((const tree *)pa);
2497 const tree opb = *((const tree *)pb);
2498 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2499 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2503 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2505 else if (gimple_nop_p (opstmta))
2507 else if (gimple_nop_p (opstmtb))
2510 bba = gimple_bb (opstmta);
2511 bbb = gimple_bb (opstmtb);
2522 if (gimple_code (opstmta) == GIMPLE_PHI
2523 && gimple_code (opstmtb) == GIMPLE_PHI)
2525 else if (gimple_code (opstmta) == GIMPLE_PHI)
2527 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2529 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2531 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2534 /* Sort an array containing members of a strongly connected component
2535 SCC so that the members are ordered by RPO number.
2536 This means that when the sort is complete, iterating through the
2537 array will give you the members in RPO order. */
2540 sort_scc (VEC (tree, heap) *scc)
2542 qsort (VEC_address (tree, scc),
2543 VEC_length (tree, scc),
2548 /* Process a strongly connected component in the SSA graph. */
2551 process_scc (VEC (tree, heap) *scc)
2553 /* If the SCC has a single member, just visit it. */
2555 if (VEC_length (tree, scc) == 1)
2557 tree use = VEC_index (tree, scc, 0);
2558 if (!VN_INFO (use)->use_processed)
2565 unsigned int iterations = 0;
2566 bool changed = true;
2568 /* Iterate over the SCC with the optimistic table until it stops
2570 current_info = optimistic_info;
2575 /* As we are value-numbering optimistically we have to
2576 clear the expression tables and the simplified expressions
2577 in each iteration until we converge. */
2578 htab_empty (optimistic_info->nary);
2579 htab_empty (optimistic_info->phis);
2580 htab_empty (optimistic_info->references);
2581 obstack_free (&optimistic_info->nary_obstack, NULL);
2582 gcc_obstack_init (&optimistic_info->nary_obstack);
2583 empty_alloc_pool (optimistic_info->phis_pool);
2584 empty_alloc_pool (optimistic_info->references_pool);
2585 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2586 VN_INFO (var)->expr = NULL_TREE;
2587 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2588 changed |= visit_use (var);
2591 statistics_histogram_event (cfun, "SCC iterations", iterations);
2593 /* Finally, visit the SCC once using the valid table. */
2594 current_info = valid_info;
2595 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2600 DEF_VEC_O(ssa_op_iter);
2601 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2603 /* Pop the components of the found SCC for NAME off the SCC stack
2604 and process them. Returns true if all went well, false if
2605 we run into resource limits. */
2608 extract_and_process_scc_for_name (tree name)
2610 VEC (tree, heap) *scc = NULL;
2613 /* Found an SCC, pop the components off the SCC stack and
2617 x = VEC_pop (tree, sccstack);
2619 VN_INFO (x)->on_sccstack = false;
2620 VEC_safe_push (tree, heap, scc, x);
2621 } while (x != name);
2623 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2624 if (VEC_length (tree, scc)
2625 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2628 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2629 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2630 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2634 if (VEC_length (tree, scc) > 1)
2637 if (dump_file && (dump_flags & TDF_DETAILS))
2638 print_scc (dump_file, scc);
2642 VEC_free (tree, heap, scc);
2647 /* Depth first search on NAME to discover and process SCC's in the SSA
2649 Execution of this algorithm relies on the fact that the SCC's are
2650 popped off the stack in topological order.
2651 Returns true if successful, false if we stopped processing SCC's due
2652 to resource constraints. */
2657 VEC(ssa_op_iter, heap) *itervec = NULL;
2658 VEC(tree, heap) *namevec = NULL;
2659 use_operand_p usep = NULL;
2666 VN_INFO (name)->dfsnum = next_dfs_num++;
2667 VN_INFO (name)->visited = true;
2668 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2670 VEC_safe_push (tree, heap, sccstack, name);
2671 VN_INFO (name)->on_sccstack = true;
2672 defstmt = SSA_NAME_DEF_STMT (name);
2674 /* Recursively DFS on our operands, looking for SCC's. */
2675 if (!gimple_nop_p (defstmt))
2677 /* Push a new iterator. */
2678 if (gimple_code (defstmt) == GIMPLE_PHI)
2679 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2681 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2684 clear_and_done_ssa_iter (&iter);
2688 /* If we are done processing uses of a name, go up the stack
2689 of iterators and process SCCs as we found them. */
2690 if (op_iter_done (&iter))
2692 /* See if we found an SCC. */
2693 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2694 if (!extract_and_process_scc_for_name (name))
2696 VEC_free (tree, heap, namevec);
2697 VEC_free (ssa_op_iter, heap, itervec);
2701 /* Check if we are done. */
2702 if (VEC_empty (tree, namevec))
2704 VEC_free (tree, heap, namevec);
2705 VEC_free (ssa_op_iter, heap, itervec);
2709 /* Restore the last use walker and continue walking there. */
2711 name = VEC_pop (tree, namevec);
2712 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2713 sizeof (ssa_op_iter));
2714 VEC_pop (ssa_op_iter, itervec);
2715 goto continue_walking;
2718 use = USE_FROM_PTR (usep);
2720 /* Since we handle phi nodes, we will sometimes get
2721 invariants in the use expression. */
2722 if (TREE_CODE (use) == SSA_NAME)
2724 if (! (VN_INFO (use)->visited))
2726 /* Recurse by pushing the current use walking state on
2727 the stack and starting over. */
2728 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2729 VEC_safe_push(tree, heap, namevec, name);
2734 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2735 VN_INFO (use)->low);
2737 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2738 && VN_INFO (use)->on_sccstack)
2740 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2741 VN_INFO (name)->low);
2745 usep = op_iter_next_use (&iter);
2749 /* Allocate a value number table. */
2752 allocate_vn_table (vn_tables_t table)
2754 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2755 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2756 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2759 gcc_obstack_init (&table->nary_obstack);
2760 table->phis_pool = create_alloc_pool ("VN phis",
2761 sizeof (struct vn_phi_s),
2763 table->references_pool = create_alloc_pool ("VN references",
2764 sizeof (struct vn_reference_s),
2768 /* Free a value number table. */
2771 free_vn_table (vn_tables_t table)
2773 htab_delete (table->phis);
2774 htab_delete (table->nary);
2775 htab_delete (table->references);
2776 obstack_free (&table->nary_obstack, NULL);
2777 free_alloc_pool (table->phis_pool);
2778 free_alloc_pool (table->references_pool);
2786 int *rpo_numbers_temp;
2788 calculate_dominance_info (CDI_DOMINATORS);
2790 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2793 constant_value_ids = BITMAP_ALLOC (NULL);
2798 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2799 /* VEC_alloc doesn't actually grow it to the right size, it just
2800 preallocates the space to do so. */
2801 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2802 gcc_obstack_init (&vn_ssa_aux_obstack);
2804 shared_lookup_phiargs = NULL;
2805 shared_lookup_vops = NULL;
2806 shared_lookup_references = NULL;
2807 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2808 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2809 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2811 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2812 the i'th block in RPO order is bb. We want to map bb's to RPO
2813 numbers, so we need to rearrange this array. */
2814 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2815 rpo_numbers[rpo_numbers_temp[j]] = j;
2817 XDELETE (rpo_numbers_temp);
2819 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2821 /* Create the VN_INFO structures, and initialize value numbers to
2823 for (i = 0; i < num_ssa_names; i++)
2825 tree name = ssa_name (i);
2828 VN_INFO_GET (name)->valnum = VN_TOP;
2829 VN_INFO (name)->expr = NULL_TREE;
2830 VN_INFO (name)->value_id = 0;
2834 renumber_gimple_stmt_uids ();
2836 /* Create the valid and optimistic value numbering tables. */
2837 valid_info = XCNEW (struct vn_tables_s);
2838 allocate_vn_table (valid_info);
2839 optimistic_info = XCNEW (struct vn_tables_s);
2840 allocate_vn_table (optimistic_info);
2848 htab_delete (constant_to_value_id);
2849 BITMAP_FREE (constant_value_ids);
2850 VEC_free (tree, heap, shared_lookup_phiargs);
2851 VEC_free (tree, gc, shared_lookup_vops);
2852 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2853 XDELETEVEC (rpo_numbers);
2855 for (i = 0; i < num_ssa_names; i++)
2857 tree name = ssa_name (i);
2859 && VN_INFO (name)->needs_insertion)
2860 release_ssa_name (name);
2862 obstack_free (&vn_ssa_aux_obstack, NULL);
2863 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2865 VEC_free (tree, heap, sccstack);
2866 free_vn_table (valid_info);
2867 XDELETE (valid_info);
2868 free_vn_table (optimistic_info);
2869 XDELETE (optimistic_info);
2872 /* Set the value ids in the valid hash tables. */
2875 set_hashtable_value_ids (void)
2882 /* Now set the value ids of the things we had put in the hash
2885 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2886 vno, vn_nary_op_t, hi)
2890 if (TREE_CODE (vno->result) == SSA_NAME)
2891 vno->value_id = VN_INFO (vno->result)->value_id;
2892 else if (is_gimple_min_invariant (vno->result))
2893 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2897 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2902 if (TREE_CODE (vp->result) == SSA_NAME)
2903 vp->value_id = VN_INFO (vp->result)->value_id;
2904 else if (is_gimple_min_invariant (vp->result))
2905 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2909 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2910 vr, vn_reference_t, hi)
2914 if (TREE_CODE (vr->result) == SSA_NAME)
2915 vr->value_id = VN_INFO (vr->result)->value_id;
2916 else if (is_gimple_min_invariant (vr->result))
2917 vr->value_id = get_or_alloc_constant_value_id (vr->result);
2922 /* Do SCCVN. Returns true if it finished, false if we bailed out
2923 due to resource constraints. */
2926 run_scc_vn (bool may_insert_arg)
2930 bool changed = true;
2932 may_insert = may_insert_arg;
2935 current_info = valid_info;
2937 for (param = DECL_ARGUMENTS (current_function_decl);
2939 param = TREE_CHAIN (param))
2941 if (gimple_default_def (cfun, param) != NULL)
2943 tree def = gimple_default_def (cfun, param);
2944 SSA_VAL (def) = def;
2948 for (i = 1; i < num_ssa_names; ++i)
2950 tree name = ssa_name (i);
2952 && VN_INFO (name)->visited == false
2953 && !has_zero_uses (name))
2962 /* Initialize the value ids. */
2964 for (i = 1; i < num_ssa_names; ++i)
2966 tree name = ssa_name (i);
2970 info = VN_INFO (name);
2971 if (info->valnum == name)
2972 info->value_id = get_next_value_id ();
2973 else if (is_gimple_min_invariant (info->valnum))
2974 info->value_id = get_or_alloc_constant_value_id (info->valnum);
2977 /* Propagate until they stop changing. */
2981 for (i = 1; i < num_ssa_names; ++i)
2983 tree name = ssa_name (i);
2987 info = VN_INFO (name);
2988 if (TREE_CODE (info->valnum) == SSA_NAME
2989 && info->valnum != name
2990 && info->value_id != VN_INFO (info->valnum)->value_id)
2993 info->value_id = VN_INFO (info->valnum)->value_id;
2998 set_hashtable_value_ids ();
3000 if (dump_file && (dump_flags & TDF_DETAILS))
3002 fprintf (dump_file, "Value numbers:\n");
3003 for (i = 0; i < num_ssa_names; i++)
3005 tree name = ssa_name (i);
3007 && VN_INFO (name)->visited
3008 && SSA_VAL (name) != name)
3010 print_generic_expr (dump_file, name, 0);
3011 fprintf (dump_file, " = ");
3012 print_generic_expr (dump_file, SSA_VAL (name), 0);
3013 fprintf (dump_file, "\n");
3022 /* Return the maximum value id we have ever seen. */
3025 get_max_value_id (void)
3027 return next_value_id;
3030 /* Return the next unique value id. */
3033 get_next_value_id (void)
3035 return next_value_id++;
3039 /* Compare two expressions E1 and E2 and return true if they are equal. */
3042 expressions_equal_p (tree e1, tree e2)
3044 /* The obvious case. */
3048 /* If only one of them is null, they cannot be equal. */
3052 /* Recurse on elements of lists. */
3053 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
3057 for (lop1 = e1, lop2 = e2;
3059 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
3063 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
3069 /* Now perform the actual comparison. */
3070 if (TREE_CODE (e1) == TREE_CODE (e2)
3071 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3077 /* Sort the VUSE array so that we can do equality comparisons
3078 quicker on two vuse vecs. */
3081 sort_vuses (VEC (tree,gc) *vuses)
3083 if (VEC_length (tree, vuses) > 1)
3084 qsort (VEC_address (tree, vuses),
3085 VEC_length (tree, vuses),
3090 /* Sort the VUSE array so that we can do equality comparisons
3091 quicker on two vuse vecs. */
3094 sort_vuses_heap (VEC (tree,heap) *vuses)
3096 if (VEC_length (tree, vuses) > 1)
3097 qsort (VEC_address (tree, vuses),
3098 VEC_length (tree, vuses),
3104 /* Return true if the nary operation NARY may trap. This is a copy
3105 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3108 vn_nary_may_trap (vn_nary_op_t nary)
3112 bool honor_nans = false;
3113 bool honor_snans = false;
3114 bool fp_operation = false;
3115 bool honor_trapv = false;
3119 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3120 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3121 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3124 fp_operation = FLOAT_TYPE_P (type);
3127 honor_nans = flag_trapping_math && !flag_finite_math_only;
3128 honor_snans = flag_signaling_nans != 0;
3130 else if (INTEGRAL_TYPE_P (type)
3131 && TYPE_OVERFLOW_TRAPS (type))
3135 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3137 honor_nans, honor_snans, rhs2,
3143 for (i = 0; i < nary->length; ++i)
3144 if (tree_could_trap_p (nary->op[i]))