1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "diagnostic.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
33 #include "tree-dump.h"
37 #include "tree-iterator.h"
39 #include "alloc-pool.h"
40 #include "tree-pass.h"
43 #include "langhooks.h"
46 #include "tree-ssa-propagate.h"
47 #include "tree-ssa-sccvn.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
160 static bool may_insert;
163 DEF_VEC_P(vn_ssa_aux_t);
164 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
166 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
167 are allocated on an obstack for locality reasons, and to free them
168 without looping over the VEC. */
170 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
171 static struct obstack vn_ssa_aux_obstack;
173 /* Return the value numbering information for a given SSA name. */
178 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
179 SSA_NAME_VERSION (name));
184 /* Set the value numbering info for a given SSA name to a given
188 VN_INFO_SET (tree name, vn_ssa_aux_t value)
190 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
191 SSA_NAME_VERSION (name), value);
194 /* Initialize the value numbering info for a given SSA name.
195 This should be called just once for every SSA name. */
198 VN_INFO_GET (tree name)
200 vn_ssa_aux_t newinfo;
202 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
203 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
204 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
205 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name) + 1);
207 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
208 SSA_NAME_VERSION (name), newinfo);
213 /* Get the representative expression for the SSA_NAME NAME. Returns
214 the representative SSA_NAME if there is no expression associated with it. */
217 vn_get_expr_for (tree name)
219 vn_ssa_aux_t vn = VN_INFO (name);
221 tree expr = NULL_TREE;
223 if (vn->valnum == VN_TOP)
226 /* If the value-number is a constant it is the representative
228 if (TREE_CODE (vn->valnum) != SSA_NAME)
231 /* Get to the information of the value of this SSA_NAME. */
232 vn = VN_INFO (vn->valnum);
234 /* If the value-number is a constant it is the representative
236 if (TREE_CODE (vn->valnum) != SSA_NAME)
239 /* Else if we have an expression, return it. */
240 if (vn->expr != NULL_TREE)
243 /* Otherwise use the defining statement to build the expression. */
244 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
246 /* If the value number is a default-definition or a PHI result
248 if (gimple_nop_p (def_stmt)
249 || gimple_code (def_stmt) == GIMPLE_PHI)
252 if (!is_gimple_assign (def_stmt))
255 /* FIXME tuples. This is incomplete and likely will miss some
257 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
260 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
261 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
262 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
263 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
264 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
265 gimple_expr_type (def_stmt),
266 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
270 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
271 gimple_expr_type (def_stmt),
272 gimple_assign_rhs1 (def_stmt));
276 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
277 gimple_expr_type (def_stmt),
278 gimple_assign_rhs1 (def_stmt),
279 gimple_assign_rhs2 (def_stmt));
284 if (expr == NULL_TREE)
287 /* Cache the expression. */
294 /* Free a phi operation structure VP. */
299 vn_phi_t phi = (vn_phi_t) vp;
300 VEC_free (tree, heap, phi->phiargs);
303 /* Free a reference operation structure VP. */
306 free_reference (void *vp)
308 vn_reference_t vr = (vn_reference_t) vp;
309 VEC_free (vn_reference_op_s, heap, vr->operands);
312 /* Hash table equality function for vn_constant_t. */
315 vn_constant_eq (const void *p1, const void *p2)
317 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
318 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
320 if (vc1->hashcode != vc2->hashcode)
323 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
326 /* Hash table hash function for vn_constant_t. */
329 vn_constant_hash (const void *p1)
331 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
332 return vc1->hashcode;
335 /* Lookup a value id for CONSTANT and return it. If it does not
339 get_constant_value_id (tree constant)
342 struct vn_constant_s vc;
344 vc.hashcode = vn_hash_constant_with_type (constant);
345 vc.constant = constant;
346 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
347 vc.hashcode, NO_INSERT);
349 return ((vn_constant_t)*slot)->value_id;
353 /* Lookup a value id for CONSTANT, and if it does not exist, create a
354 new one and return it. If it does exist, return it. */
357 get_or_alloc_constant_value_id (tree constant)
360 vn_constant_t vc = XNEW (struct vn_constant_s);
362 vc->hashcode = vn_hash_constant_with_type (constant);
363 vc->constant = constant;
364 slot = htab_find_slot_with_hash (constant_to_value_id, vc,
365 vc->hashcode, INSERT);
369 return ((vn_constant_t)*slot)->value_id;
371 vc->value_id = get_next_value_id ();
373 bitmap_set_bit (constant_value_ids, vc->value_id);
377 /* Return true if V is a value id for a constant. */
380 value_id_constant_p (unsigned int v)
382 return bitmap_bit_p (constant_value_ids, v);
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
389 vn_reference_op_eq (const void *p1, const void *p2)
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394 return vro1->opcode == vro2->opcode
395 && types_compatible_p (vro1->type, vro2->type)
396 && expressions_equal_p (vro1->op0, vro2->op0)
397 && expressions_equal_p (vro1->op1, vro2->op1)
398 && expressions_equal_p (vro1->op2, vro2->op2);
401 /* Compute the hash for a reference operand VRO1. */
404 vn_reference_op_compute_hash (const vn_reference_op_t vro1)
406 hashval_t result = 0;
408 result += iterative_hash_expr (vro1->op0, vro1->opcode);
410 result += iterative_hash_expr (vro1->op1, vro1->opcode);
412 result += iterative_hash_expr (vro1->op2, vro1->opcode);
416 /* Return the hashcode for a given reference operation P1. */
419 vn_reference_hash (const void *p1)
421 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
422 return vr1->hashcode;
425 /* Compute a hash for the reference operation VR1 and return it. */
428 vn_reference_compute_hash (const vn_reference_t vr1)
432 vn_reference_op_t vro;
434 result = iterative_hash_expr (vr1->vuse, 0);
435 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
436 result += vn_reference_op_compute_hash (vro);
441 /* Return true if reference operations P1 and P2 are equivalent. This
442 means they have the same set of operands and vuses. */
445 vn_reference_eq (const void *p1, const void *p2)
448 vn_reference_op_t vro;
450 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
451 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
452 if (vr1->hashcode != vr2->hashcode)
455 /* Early out if this is not a hash collision. */
456 if (vr1->hashcode != vr2->hashcode)
459 /* The VOP needs to be the same. */
460 if (vr1->vuse != vr2->vuse)
463 /* If the operands are the same we are done. */
464 if (vr1->operands == vr2->operands)
467 /* We require that address operands be canonicalized in a way that
468 two memory references will have the same operands if they are
470 if (VEC_length (vn_reference_op_s, vr1->operands)
471 != VEC_length (vn_reference_op_s, vr2->operands))
474 for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
475 if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
482 /* Copy the operations present in load/store REF into RESULT, a vector of
483 vn_reference_op_s's. */
486 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
488 if (TREE_CODE (ref) == TARGET_MEM_REF)
490 vn_reference_op_s temp;
492 memset (&temp, 0, sizeof (temp));
493 /* We do not care for spurious type qualifications. */
494 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
495 temp.opcode = TREE_CODE (ref);
496 temp.op0 = TMR_SYMBOL (ref) ? TMR_SYMBOL (ref) : TMR_BASE (ref);
497 temp.op1 = TMR_INDEX (ref);
498 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
500 memset (&temp, 0, sizeof (temp));
501 temp.type = NULL_TREE;
502 temp.opcode = TREE_CODE (ref);
503 temp.op0 = TMR_STEP (ref);
504 temp.op1 = TMR_OFFSET (ref);
505 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
509 /* For non-calls, store the information that makes up the address. */
513 vn_reference_op_s temp;
515 memset (&temp, 0, sizeof (temp));
516 /* We do not care for spurious type qualifications. */
517 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
518 temp.opcode = TREE_CODE (ref);
522 case ALIGN_INDIRECT_REF:
524 /* The only operand is the address, which gets its own
525 vn_reference_op_s structure. */
527 case MISALIGNED_INDIRECT_REF:
528 temp.op0 = TREE_OPERAND (ref, 1);
531 /* Record bits and position. */
532 temp.op0 = TREE_OPERAND (ref, 1);
533 temp.op1 = TREE_OPERAND (ref, 2);
536 /* The field decl is enough to unambiguously specify the field,
537 a matching type is not necessary and a mismatching type
538 is always a spurious difference. */
539 temp.type = NULL_TREE;
540 /* If this is a reference to a union member, record the union
541 member size as operand. Do so only if we are doing
542 expression insertion (during FRE), as PRE currently gets
543 confused with this. */
545 && TREE_OPERAND (ref, 2) == NULL_TREE
546 && TREE_CODE (DECL_CONTEXT (TREE_OPERAND (ref, 1))) == UNION_TYPE
547 && integer_zerop (DECL_FIELD_OFFSET (TREE_OPERAND (ref, 1)))
548 && integer_zerop (DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1))))
549 temp.op0 = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)));
552 /* Record field as operand. */
553 temp.op0 = TREE_OPERAND (ref, 1);
554 temp.op1 = TREE_OPERAND (ref, 2);
557 case ARRAY_RANGE_REF:
559 /* Record index as operand. */
560 temp.op0 = TREE_OPERAND (ref, 1);
561 temp.op1 = TREE_OPERAND (ref, 2);
562 temp.op2 = TREE_OPERAND (ref, 3);
580 if (is_gimple_min_invariant (ref))
586 /* These are only interesting for their operands, their
587 existence, and their type. They will never be the last
588 ref in the chain of references (IE they require an
589 operand), so we don't have to put anything
590 for op* as it will be handled by the iteration */
593 case VIEW_CONVERT_EXPR:
598 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
600 if (REFERENCE_CLASS_P (ref)
601 || (TREE_CODE (ref) == ADDR_EXPR
602 && !is_gimple_min_invariant (ref)))
603 ref = TREE_OPERAND (ref, 0);
609 /* Re-create a reference tree from the reference ops OPS.
610 Returns NULL_TREE if the ops were not handled.
611 This routine needs to be kept in sync with copy_reference_ops_from_ref. */
614 get_ref_from_reference_ops (VEC(vn_reference_op_s, heap) *ops)
616 vn_reference_op_t op;
618 tree ref, *op0_p = &ref;
620 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i)
627 case ALIGN_INDIRECT_REF:
629 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
630 op0_p = &TREE_OPERAND (*op0_p, 0);
633 case MISALIGNED_INDIRECT_REF:
634 *op0_p = build2 (MISALIGNED_INDIRECT_REF, op->type,
636 op0_p = &TREE_OPERAND (*op0_p, 0);
640 *op0_p = build3 (BIT_FIELD_REF, op->type, NULL_TREE,
642 op0_p = &TREE_OPERAND (*op0_p, 0);
646 *op0_p = build3 (COMPONENT_REF, TREE_TYPE (op->op0), NULL_TREE,
648 op0_p = &TREE_OPERAND (*op0_p, 0);
651 case ARRAY_RANGE_REF:
653 *op0_p = build4 (op->opcode, op->type, NULL_TREE,
654 op->op0, op->op1, op->op2);
655 op0_p = &TREE_OPERAND (*op0_p, 0);
675 if (op->op0 != NULL_TREE)
677 gcc_assert (is_gimple_min_invariant (op->op0));
684 case VIEW_CONVERT_EXPR:
685 *op0_p = build1 (op->opcode, op->type, NULL_TREE);
686 op0_p = &TREE_OPERAND (*op0_p, 0);
697 /* Copy the operations present in load/store/call REF into RESULT, a vector of
698 vn_reference_op_s's. */
701 copy_reference_ops_from_call (gimple call,
702 VEC(vn_reference_op_s, heap) **result)
704 vn_reference_op_s temp;
707 /* Copy the type, opcode, function being called and static chain. */
708 memset (&temp, 0, sizeof (temp));
709 temp.type = gimple_call_return_type (call);
710 temp.opcode = CALL_EXPR;
711 temp.op0 = gimple_call_fn (call);
712 temp.op1 = gimple_call_chain (call);
713 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
715 /* Copy the call arguments. As they can be references as well,
716 just chain them together. */
717 for (i = 0; i < gimple_call_num_args (call); ++i)
719 tree callarg = gimple_call_arg (call, i);
720 copy_reference_ops_from_ref (callarg, result);
724 /* Create a vector of vn_reference_op_s structures from REF, a
725 REFERENCE_CLASS_P tree. The vector is not shared. */
727 static VEC(vn_reference_op_s, heap) *
728 create_reference_ops_from_ref (tree ref)
730 VEC (vn_reference_op_s, heap) *result = NULL;
732 copy_reference_ops_from_ref (ref, &result);
736 /* Create a vector of vn_reference_op_s structures from CALL, a
737 call statement. The vector is not shared. */
739 static VEC(vn_reference_op_s, heap) *
740 create_reference_ops_from_call (gimple call)
742 VEC (vn_reference_op_s, heap) *result = NULL;
744 copy_reference_ops_from_call (call, &result);
748 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
749 *I_P to point to the last element of the replacement. */
751 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
754 VEC(vn_reference_op_s, heap) *mem = NULL;
755 vn_reference_op_t op;
756 unsigned int i = *i_p;
759 /* Get ops for the addressed object. */
760 op = VEC_index (vn_reference_op_s, *ops, i);
761 /* ??? If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
762 around it to avoid later ICEs. */
763 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
764 && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
766 vn_reference_op_s aref;
768 aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
769 aref.opcode = ARRAY_REF;
770 aref.op0 = integer_zero_node;
771 if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
772 && TYPE_MIN_VALUE (dom))
773 aref.op0 = TYPE_MIN_VALUE (dom);
774 aref.op1 = NULL_TREE;
775 aref.op2 = NULL_TREE;
776 VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
778 copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
780 /* Do the replacement - we should have at least one op in mem now. */
781 if (VEC_length (vn_reference_op_s, mem) == 1)
783 VEC_replace (vn_reference_op_s, *ops, i - 1,
784 VEC_index (vn_reference_op_s, mem, 0));
785 VEC_ordered_remove (vn_reference_op_s, *ops, i);
788 else if (VEC_length (vn_reference_op_s, mem) == 2)
790 VEC_replace (vn_reference_op_s, *ops, i - 1,
791 VEC_index (vn_reference_op_s, mem, 0));
792 VEC_replace (vn_reference_op_s, *ops, i,
793 VEC_index (vn_reference_op_s, mem, 1));
795 else if (VEC_length (vn_reference_op_s, mem) > 2)
797 VEC_replace (vn_reference_op_s, *ops, i - 1,
798 VEC_index (vn_reference_op_s, mem, 0));
799 VEC_replace (vn_reference_op_s, *ops, i,
800 VEC_index (vn_reference_op_s, mem, 1));
801 /* ??? There is no VEC_splice. */
802 for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
803 VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
808 VEC_free (vn_reference_op_s, heap, mem);
812 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
813 structures into their value numbers. This is done in-place, and
814 the vector passed in is returned. */
816 static VEC (vn_reference_op_s, heap) *
817 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
819 vn_reference_op_t vro;
822 for (i = 0; VEC_iterate (vn_reference_op_s, orig, i, vro); i++)
824 if (vro->opcode == SSA_NAME
825 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
827 vro->op0 = SSA_VAL (vro->op0);
828 /* If it transforms from an SSA_NAME to a constant, update
830 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
831 vro->opcode = TREE_CODE (vro->op0);
832 /* If it transforms from an SSA_NAME to an address, fold with
833 a preceding indirect reference. */
834 if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
835 && VEC_index (vn_reference_op_s,
836 orig, i - 1)->opcode == INDIRECT_REF)
838 vn_reference_fold_indirect (&orig, &i);
842 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
843 vro->op1 = SSA_VAL (vro->op1);
844 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
845 vro->op2 = SSA_VAL (vro->op2);
851 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
853 /* Create a vector of vn_reference_op_s structures from REF, a
854 REFERENCE_CLASS_P tree. The vector is shared among all callers of
857 static VEC(vn_reference_op_s, heap) *
858 valueize_shared_reference_ops_from_ref (tree ref)
862 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
863 copy_reference_ops_from_ref (ref, &shared_lookup_references);
864 shared_lookup_references = valueize_refs (shared_lookup_references);
865 return shared_lookup_references;
868 /* Create a vector of vn_reference_op_s structures from CALL, a
869 call statement. The vector is shared among all callers of
872 static VEC(vn_reference_op_s, heap) *
873 valueize_shared_reference_ops_from_call (gimple call)
877 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
878 copy_reference_ops_from_call (call, &shared_lookup_references);
879 shared_lookup_references = valueize_refs (shared_lookup_references);
880 return shared_lookup_references;
883 /* Lookup a SCCVN reference operation VR in the current hash table.
884 Returns the resulting value number if it exists in the hash table,
885 NULL_TREE otherwise. VNRESULT will be filled in with the actual
886 vn_reference_t stored in the hashtable if something is found. */
889 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
895 slot = htab_find_slot_with_hash (current_info->references, vr,
897 if (!slot && current_info == optimistic_info)
898 slot = htab_find_slot_with_hash (valid_info->references, vr,
903 *vnresult = (vn_reference_t)*slot;
904 return ((vn_reference_t)*slot)->result;
910 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
911 with the current VUSE and performs the expression lookup. */
914 vn_reference_lookup_2 (tree op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
916 vn_reference_t vr = (vn_reference_t)vr_;
920 /* Fixup vuse and hash. */
921 vr->hashcode = vr->hashcode - iterative_hash_expr (vr->vuse, 0);
922 vr->vuse = SSA_VAL (vuse);
923 vr->hashcode = vr->hashcode + iterative_hash_expr (vr->vuse, 0);
926 slot = htab_find_slot_with_hash (current_info->references, vr,
928 if (!slot && current_info == optimistic_info)
929 slot = htab_find_slot_with_hash (valid_info->references, vr,
937 /* Lookup a reference operation by it's parts, in the current hash table.
938 Returns the resulting value number if it exists in the hash table,
939 NULL_TREE otherwise. VNRESULT will be filled in with the actual
940 vn_reference_t stored in the hashtable if something is found. */
943 vn_reference_lookup_pieces (tree vuse,
944 VEC (vn_reference_op_s, heap) *operands,
945 vn_reference_t *vnresult, bool maywalk)
947 struct vn_reference_s vr1;
954 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
955 vr1.operands = valueize_refs (operands);
956 vr1.hashcode = vn_reference_compute_hash (&vr1);
957 vn_reference_lookup_1 (&vr1, vnresult);
963 tree ref = get_ref_from_reference_ops (operands);
967 (vn_reference_t)walk_non_aliased_vuses (ref, vr1.vuse,
968 vn_reference_lookup_2, &vr1);
972 return (*vnresult)->result;
977 /* Lookup OP in the current hash table, and return the resulting value
978 number if it exists in the hash table. Return NULL_TREE if it does
979 not exist in the hash table or if the result field of the structure
980 was NULL.. VNRESULT will be filled in with the vn_reference_t
981 stored in the hashtable if one exists. */
984 vn_reference_lookup (tree op, tree vuse, bool maywalk,
985 vn_reference_t *vnresult)
987 struct vn_reference_s vr1;
992 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
993 vr1.operands = valueize_shared_reference_ops_from_ref (op);
994 vr1.hashcode = vn_reference_compute_hash (&vr1);
999 vn_reference_t wvnresult;
1001 (vn_reference_t)walk_non_aliased_vuses (op, vr1.vuse,
1002 vn_reference_lookup_2, &vr1);
1006 *vnresult = wvnresult;
1007 return wvnresult->result;
1013 return vn_reference_lookup_1 (&vr1, vnresult);
1017 /* Insert OP into the current hash table with a value number of
1018 RESULT, and return the resulting reference structure we created. */
1021 vn_reference_insert (tree op, tree result, tree vuse)
1026 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1027 if (TREE_CODE (result) == SSA_NAME)
1028 vr1->value_id = VN_INFO (result)->value_id;
1030 vr1->value_id = get_or_alloc_constant_value_id (result);
1031 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1032 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1033 vr1->hashcode = vn_reference_compute_hash (vr1);
1034 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1036 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1039 /* Because we lookup stores using vuses, and value number failures
1040 using the vdefs (see visit_reference_op_store for how and why),
1041 it's possible that on failure we may try to insert an already
1042 inserted store. This is not wrong, there is no ssa name for a
1043 store that we could use as a differentiator anyway. Thus, unlike
1044 the other lookup functions, you cannot gcc_assert (!*slot)
1047 /* But free the old slot in case of a collision. */
1049 free_reference (*slot);
1055 /* Insert a reference by it's pieces into the current hash table with
1056 a value number of RESULT. Return the resulting reference
1057 structure we created. */
1060 vn_reference_insert_pieces (tree vuse,
1061 VEC (vn_reference_op_s, heap) *operands,
1062 tree result, unsigned int value_id)
1068 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1069 vr1->value_id = value_id;
1070 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1071 vr1->operands = valueize_refs (operands);
1072 vr1->hashcode = vn_reference_compute_hash (vr1);
1073 if (result && TREE_CODE (result) == SSA_NAME)
1074 result = SSA_VAL (result);
1075 vr1->result = result;
1077 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1080 /* At this point we should have all the things inserted that we have
1081 seen before, and we should never try inserting something that
1083 gcc_assert (!*slot);
1085 free_reference (*slot);
1091 /* Compute and return the hash value for nary operation VBO1. */
1094 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1099 for (i = 0; i < vno1->length; ++i)
1100 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1101 vno1->op[i] = SSA_VAL (vno1->op[i]);
1103 if (vno1->length == 2
1104 && commutative_tree_code (vno1->opcode)
1105 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1107 tree temp = vno1->op[0];
1108 vno1->op[0] = vno1->op[1];
1112 for (i = 0; i < vno1->length; ++i)
1113 hash += iterative_hash_expr (vno1->op[i], vno1->opcode);
1118 /* Return the computed hashcode for nary operation P1. */
1121 vn_nary_op_hash (const void *p1)
1123 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1124 return vno1->hashcode;
1127 /* Compare nary operations P1 and P2 and return true if they are
1131 vn_nary_op_eq (const void *p1, const void *p2)
1133 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1134 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1137 if (vno1->hashcode != vno2->hashcode)
1140 if (vno1->opcode != vno2->opcode
1141 || !types_compatible_p (vno1->type, vno2->type))
1144 for (i = 0; i < vno1->length; ++i)
1145 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1151 /* Lookup a n-ary operation by its pieces and return the resulting value
1152 number if it exists in the hash table. Return NULL_TREE if it does
1153 not exist in the hash table or if the result field of the operation
1154 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1158 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1159 tree type, tree op0, tree op1, tree op2,
1160 tree op3, vn_nary_op_t *vnresult)
1163 struct vn_nary_op_s vno1;
1167 vno1.length = length;
1173 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1174 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1176 if (!slot && current_info == optimistic_info)
1177 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1182 *vnresult = (vn_nary_op_t)*slot;
1183 return ((vn_nary_op_t)*slot)->result;
1186 /* Lookup OP in the current hash table, and return the resulting value
1187 number if it exists in the hash table. Return NULL_TREE if it does
1188 not exist in the hash table or if the result field of the operation
1189 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1193 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1196 struct vn_nary_op_s vno1;
1201 vno1.opcode = TREE_CODE (op);
1202 vno1.length = TREE_CODE_LENGTH (TREE_CODE (op));
1203 vno1.type = TREE_TYPE (op);
1204 for (i = 0; i < vno1.length; ++i)
1205 vno1.op[i] = TREE_OPERAND (op, i);
1206 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1207 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1209 if (!slot && current_info == optimistic_info)
1210 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1215 *vnresult = (vn_nary_op_t)*slot;
1216 return ((vn_nary_op_t)*slot)->result;
1219 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1220 value number if it exists in the hash table. Return NULL_TREE if
1221 it does not exist in the hash table. VNRESULT will contain the
1222 vn_nary_op_t from the hashtable if it exists. */
1225 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1228 struct vn_nary_op_s vno1;
1233 vno1.opcode = gimple_assign_rhs_code (stmt);
1234 vno1.length = gimple_num_ops (stmt) - 1;
1235 vno1.type = TREE_TYPE (gimple_assign_lhs (stmt));
1236 for (i = 0; i < vno1.length; ++i)
1237 vno1.op[i] = gimple_op (stmt, i + 1);
1238 if (vno1.opcode == REALPART_EXPR
1239 || vno1.opcode == IMAGPART_EXPR
1240 || vno1.opcode == VIEW_CONVERT_EXPR)
1241 vno1.op[0] = TREE_OPERAND (vno1.op[0], 0);
1242 vno1.hashcode = vn_nary_op_compute_hash (&vno1);
1243 slot = htab_find_slot_with_hash (current_info->nary, &vno1, vno1.hashcode,
1245 if (!slot && current_info == optimistic_info)
1246 slot = htab_find_slot_with_hash (valid_info->nary, &vno1, vno1.hashcode,
1251 *vnresult = (vn_nary_op_t)*slot;
1252 return ((vn_nary_op_t)*slot)->result;
1255 /* Insert a n-ary operation into the current hash table using it's
1256 pieces. Return the vn_nary_op_t structure we created and put in
1260 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
1261 tree type, tree op0,
1262 tree op1, tree op2, tree op3,
1264 unsigned int value_id)
1269 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1270 (sizeof (struct vn_nary_op_s)
1271 - sizeof (tree) * (4 - length)));
1272 vno1->value_id = value_id;
1273 vno1->opcode = code;
1274 vno1->length = length;
1284 vno1->result = result;
1285 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1286 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1288 gcc_assert (!*slot);
1295 /* Insert OP into the current hash table with a value number of
1296 RESULT. Return the vn_nary_op_t structure we created and put in
1300 vn_nary_op_insert (tree op, tree result)
1302 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
1307 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1308 (sizeof (struct vn_nary_op_s)
1309 - sizeof (tree) * (4 - length)));
1310 vno1->value_id = VN_INFO (result)->value_id;
1311 vno1->opcode = TREE_CODE (op);
1312 vno1->length = length;
1313 vno1->type = TREE_TYPE (op);
1314 for (i = 0; i < vno1->length; ++i)
1315 vno1->op[i] = TREE_OPERAND (op, i);
1316 vno1->result = result;
1317 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1318 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1320 gcc_assert (!*slot);
1326 /* Insert the rhs of STMT into the current hash table with a value number of
1330 vn_nary_op_insert_stmt (gimple stmt, tree result)
1332 unsigned length = gimple_num_ops (stmt) - 1;
1337 vno1 = (vn_nary_op_t) obstack_alloc (¤t_info->nary_obstack,
1338 (sizeof (struct vn_nary_op_s)
1339 - sizeof (tree) * (4 - length)));
1340 vno1->value_id = VN_INFO (result)->value_id;
1341 vno1->opcode = gimple_assign_rhs_code (stmt);
1342 vno1->length = length;
1343 vno1->type = TREE_TYPE (gimple_assign_lhs (stmt));
1344 for (i = 0; i < vno1->length; ++i)
1345 vno1->op[i] = gimple_op (stmt, i + 1);
1346 if (vno1->opcode == REALPART_EXPR
1347 || vno1->opcode == IMAGPART_EXPR
1348 || vno1->opcode == VIEW_CONVERT_EXPR)
1349 vno1->op[0] = TREE_OPERAND (vno1->op[0], 0);
1350 vno1->result = result;
1351 vno1->hashcode = vn_nary_op_compute_hash (vno1);
1352 slot = htab_find_slot_with_hash (current_info->nary, vno1, vno1->hashcode,
1354 gcc_assert (!*slot);
1360 /* Compute a hashcode for PHI operation VP1 and return it. */
1362 static inline hashval_t
1363 vn_phi_compute_hash (vn_phi_t vp1)
1365 hashval_t result = 0;
1370 result = vp1->block->index;
1372 /* If all PHI arguments are constants we need to distinguish
1373 the PHI node via its type. */
1374 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
1375 result += (INTEGRAL_TYPE_P (type)
1376 + (INTEGRAL_TYPE_P (type)
1377 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
1379 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1381 if (phi1op == VN_TOP)
1383 result += iterative_hash_expr (phi1op, result);
1389 /* Return the computed hashcode for phi operation P1. */
1392 vn_phi_hash (const void *p1)
1394 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1395 return vp1->hashcode;
1398 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
1401 vn_phi_eq (const void *p1, const void *p2)
1403 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
1404 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
1406 if (vp1->hashcode != vp2->hashcode)
1409 if (vp1->block == vp2->block)
1414 /* If the PHI nodes do not have compatible types
1415 they are not the same. */
1416 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
1417 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
1420 /* Any phi in the same block will have it's arguments in the
1421 same edge order, because of how we store phi nodes. */
1422 for (i = 0; VEC_iterate (tree, vp1->phiargs, i, phi1op); i++)
1424 tree phi2op = VEC_index (tree, vp2->phiargs, i);
1425 if (phi1op == VN_TOP || phi2op == VN_TOP)
1427 if (!expressions_equal_p (phi1op, phi2op))
1435 static VEC(tree, heap) *shared_lookup_phiargs;
1437 /* Lookup PHI in the current hash table, and return the resulting
1438 value number if it exists in the hash table. Return NULL_TREE if
1439 it does not exist in the hash table. */
1442 vn_phi_lookup (gimple phi)
1445 struct vn_phi_s vp1;
1448 VEC_truncate (tree, shared_lookup_phiargs, 0);
1450 /* Canonicalize the SSA_NAME's to their value number. */
1451 for (i = 0; i < gimple_phi_num_args (phi); i++)
1453 tree def = PHI_ARG_DEF (phi, i);
1454 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1455 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
1457 vp1.phiargs = shared_lookup_phiargs;
1458 vp1.block = gimple_bb (phi);
1459 vp1.hashcode = vn_phi_compute_hash (&vp1);
1460 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
1462 if (!slot && current_info == optimistic_info)
1463 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
1467 return ((vn_phi_t)*slot)->result;
1470 /* Insert PHI into the current hash table with a value number of
1474 vn_phi_insert (gimple phi, tree result)
1477 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
1479 VEC (tree, heap) *args = NULL;
1481 /* Canonicalize the SSA_NAME's to their value number. */
1482 for (i = 0; i < gimple_phi_num_args (phi); i++)
1484 tree def = PHI_ARG_DEF (phi, i);
1485 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
1486 VEC_safe_push (tree, heap, args, def);
1488 vp1->value_id = VN_INFO (result)->value_id;
1489 vp1->phiargs = args;
1490 vp1->block = gimple_bb (phi);
1491 vp1->result = result;
1492 vp1->hashcode = vn_phi_compute_hash (vp1);
1494 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
1497 /* Because we iterate over phi operations more than once, it's
1498 possible the slot might already exist here, hence no assert.*/
1504 /* Print set of components in strongly connected component SCC to OUT. */
1507 print_scc (FILE *out, VEC (tree, heap) *scc)
1512 fprintf (out, "SCC consists of: ");
1513 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
1515 print_generic_expr (out, var, 0);
1518 fprintf (out, "\n");
1521 /* Set the value number of FROM to TO, return true if it has changed
1525 set_ssa_val_to (tree from, tree to)
1530 && TREE_CODE (to) == SSA_NAME
1531 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
1534 /* The only thing we allow as value numbers are VN_TOP, ssa_names
1535 and invariants. So assert that here. */
1536 gcc_assert (to != NULL_TREE
1538 || TREE_CODE (to) == SSA_NAME
1539 || is_gimple_min_invariant (to)));
1541 if (dump_file && (dump_flags & TDF_DETAILS))
1543 fprintf (dump_file, "Setting value number of ");
1544 print_generic_expr (dump_file, from, 0);
1545 fprintf (dump_file, " to ");
1546 print_generic_expr (dump_file, to, 0);
1549 currval = SSA_VAL (from);
1551 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
1553 VN_INFO (from)->valnum = to;
1554 if (dump_file && (dump_flags & TDF_DETAILS))
1555 fprintf (dump_file, " (changed)\n");
1558 if (dump_file && (dump_flags & TDF_DETAILS))
1559 fprintf (dump_file, "\n");
1563 /* Set all definitions in STMT to value number to themselves.
1564 Return true if a value number changed. */
1567 defs_to_varying (gimple stmt)
1569 bool changed = false;
1573 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
1575 tree def = DEF_FROM_PTR (defp);
1577 VN_INFO (def)->use_processed = true;
1578 changed |= set_ssa_val_to (def, def);
1583 static bool expr_has_constants (tree expr);
1584 static tree valueize_expr (tree expr);
1586 /* Visit a copy between LHS and RHS, return true if the value number
1590 visit_copy (tree lhs, tree rhs)
1592 /* Follow chains of copies to their destination. */
1593 while (TREE_CODE (rhs) == SSA_NAME
1594 && SSA_VAL (rhs) != rhs)
1595 rhs = SSA_VAL (rhs);
1597 /* The copy may have a more interesting constant filled expression
1598 (we don't, since we know our RHS is just an SSA name). */
1599 if (TREE_CODE (rhs) == SSA_NAME)
1601 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
1602 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
1605 return set_ssa_val_to (lhs, rhs);
1608 /* Visit a unary operator RHS, value number it, and return true if the
1609 value number of LHS has changed as a result. */
1612 visit_unary_op (tree lhs, gimple stmt)
1614 bool changed = false;
1615 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1619 changed = set_ssa_val_to (lhs, result);
1623 changed = set_ssa_val_to (lhs, lhs);
1624 vn_nary_op_insert_stmt (stmt, lhs);
1630 /* Visit a binary operator RHS, value number it, and return true if the
1631 value number of LHS has changed as a result. */
1634 visit_binary_op (tree lhs, gimple stmt)
1636 bool changed = false;
1637 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
1641 changed = set_ssa_val_to (lhs, result);
1645 changed = set_ssa_val_to (lhs, lhs);
1646 vn_nary_op_insert_stmt (stmt, lhs);
1652 /* Visit a call STMT storing into LHS. Return true if the value number
1653 of the LHS has changed as a result. */
1656 visit_reference_op_call (tree lhs, gimple stmt)
1658 bool changed = false;
1659 struct vn_reference_s vr1;
1661 tree vuse = gimple_vuse (stmt);
1663 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1664 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
1665 vr1.hashcode = vn_reference_compute_hash (&vr1);
1666 result = vn_reference_lookup_1 (&vr1, NULL);
1669 changed = set_ssa_val_to (lhs, result);
1670 if (TREE_CODE (result) == SSA_NAME
1671 && VN_INFO (result)->has_constants)
1672 VN_INFO (lhs)->has_constants = true;
1678 changed = set_ssa_val_to (lhs, lhs);
1679 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
1680 vr2->vuse = vr1.vuse;
1681 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
1682 vr2->hashcode = vr1.hashcode;
1684 slot = htab_find_slot_with_hash (current_info->references,
1685 vr2, vr2->hashcode, INSERT);
1687 free_reference (*slot);
1694 /* Visit a load from a reference operator RHS, part of STMT, value number it,
1695 and return true if the value number of the LHS has changed as a result. */
1698 visit_reference_op_load (tree lhs, tree op, gimple stmt)
1700 bool changed = false;
1701 tree result = vn_reference_lookup (op, gimple_vuse (stmt), true, NULL);
1703 /* We handle type-punning through unions by value-numbering based
1704 on offset and size of the access. Be prepared to handle a
1705 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
1707 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
1709 /* We will be setting the value number of lhs to the value number
1710 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
1711 So first simplify and lookup this expression to see if it
1712 is already available. */
1713 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
1714 if ((CONVERT_EXPR_P (val)
1715 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
1716 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
1718 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
1719 if ((CONVERT_EXPR_P (tem)
1720 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
1721 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
1722 TREE_TYPE (val), tem)))
1726 if (!is_gimple_min_invariant (val)
1727 && TREE_CODE (val) != SSA_NAME)
1728 result = vn_nary_op_lookup (val, NULL);
1729 /* If the expression is not yet available, value-number lhs to
1730 a new SSA_NAME we create. */
1731 if (!result && may_insert)
1733 result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
1734 /* Initialize value-number information properly. */
1735 VN_INFO_GET (result)->valnum = result;
1736 VN_INFO (result)->value_id = get_next_value_id ();
1737 VN_INFO (result)->expr = val;
1738 VN_INFO (result)->has_constants = expr_has_constants (val);
1739 VN_INFO (result)->needs_insertion = true;
1740 /* As all "inserted" statements are singleton SCCs, insert
1741 to the valid table. This is strictly needed to
1742 avoid re-generating new value SSA_NAMEs for the same
1743 expression during SCC iteration over and over (the
1744 optimistic table gets cleared after each iteration).
1745 We do not need to insert into the optimistic table, as
1746 lookups there will fall back to the valid table. */
1747 if (current_info == optimistic_info)
1749 current_info = valid_info;
1750 vn_nary_op_insert (val, result);
1751 current_info = optimistic_info;
1754 vn_nary_op_insert (val, result);
1755 if (dump_file && (dump_flags & TDF_DETAILS))
1757 fprintf (dump_file, "Inserting name ");
1758 print_generic_expr (dump_file, result, 0);
1759 fprintf (dump_file, " for expression ");
1760 print_generic_expr (dump_file, val, 0);
1761 fprintf (dump_file, "\n");
1768 changed = set_ssa_val_to (lhs, result);
1769 if (TREE_CODE (result) == SSA_NAME
1770 && VN_INFO (result)->has_constants)
1772 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
1773 VN_INFO (lhs)->has_constants = true;
1778 changed = set_ssa_val_to (lhs, lhs);
1779 vn_reference_insert (op, lhs, gimple_vuse (stmt));
1786 /* Visit a store to a reference operator LHS, part of STMT, value number it,
1787 and return true if the value number of the LHS has changed as a result. */
1790 visit_reference_op_store (tree lhs, tree op, gimple stmt)
1792 bool changed = false;
1794 bool resultsame = false;
1796 /* First we want to lookup using the *vuses* from the store and see
1797 if there the last store to this location with the same address
1800 The vuses represent the memory state before the store. If the
1801 memory state, address, and value of the store is the same as the
1802 last store to this location, then this store will produce the
1803 same memory state as that store.
1805 In this case the vdef versions for this store are value numbered to those
1806 vuse versions, since they represent the same memory state after
1809 Otherwise, the vdefs for the store are used when inserting into
1810 the table, since the store generates a new memory state. */
1812 result = vn_reference_lookup (lhs, gimple_vuse (stmt), false, NULL);
1816 if (TREE_CODE (result) == SSA_NAME)
1817 result = SSA_VAL (result);
1818 if (TREE_CODE (op) == SSA_NAME)
1820 resultsame = expressions_equal_p (result, op);
1823 if (!result || !resultsame)
1827 if (dump_file && (dump_flags & TDF_DETAILS))
1829 fprintf (dump_file, "No store match\n");
1830 fprintf (dump_file, "Value numbering store ");
1831 print_generic_expr (dump_file, lhs, 0);
1832 fprintf (dump_file, " to ");
1833 print_generic_expr (dump_file, op, 0);
1834 fprintf (dump_file, "\n");
1836 /* Have to set value numbers before insert, since insert is
1837 going to valueize the references in-place. */
1838 if ((vdef = gimple_vdef (stmt)))
1840 VN_INFO (vdef)->use_processed = true;
1841 changed |= set_ssa_val_to (vdef, vdef);
1844 /* Do not insert structure copies into the tables. */
1845 if (is_gimple_min_invariant (op)
1846 || is_gimple_reg (op))
1847 vn_reference_insert (lhs, op, vdef);
1851 /* We had a match, so value number the vdef to have the value
1852 number of the vuse it came from. */
1855 if (dump_file && (dump_flags & TDF_DETAILS))
1856 fprintf (dump_file, "Store matched earlier value,"
1857 "value numbering store vdefs to matching vuses.\n");
1859 def = gimple_vdef (stmt);
1860 use = gimple_vuse (stmt);
1862 VN_INFO (def)->use_processed = true;
1863 changed |= set_ssa_val_to (def, SSA_VAL (use));
1869 /* Visit and value number PHI, return true if the value number
1873 visit_phi (gimple phi)
1875 bool changed = false;
1877 tree sameval = VN_TOP;
1878 bool allsame = true;
1881 /* TODO: We could check for this in init_sccvn, and replace this
1882 with a gcc_assert. */
1883 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
1884 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1886 /* See if all non-TOP arguments have the same value. TOP is
1887 equivalent to everything, so we can ignore it. */
1888 for (i = 0; i < gimple_phi_num_args (phi); i++)
1890 tree def = PHI_ARG_DEF (phi, i);
1892 if (TREE_CODE (def) == SSA_NAME)
1893 def = SSA_VAL (def);
1896 if (sameval == VN_TOP)
1902 if (!expressions_equal_p (def, sameval))
1910 /* If all value numbered to the same value, the phi node has that
1914 if (is_gimple_min_invariant (sameval))
1916 VN_INFO (PHI_RESULT (phi))->has_constants = true;
1917 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1921 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1922 VN_INFO (PHI_RESULT (phi))->expr = sameval;
1925 if (TREE_CODE (sameval) == SSA_NAME)
1926 return visit_copy (PHI_RESULT (phi), sameval);
1928 return set_ssa_val_to (PHI_RESULT (phi), sameval);
1931 /* Otherwise, see if it is equivalent to a phi node in this block. */
1932 result = vn_phi_lookup (phi);
1935 if (TREE_CODE (result) == SSA_NAME)
1936 changed = visit_copy (PHI_RESULT (phi), result);
1938 changed = set_ssa_val_to (PHI_RESULT (phi), result);
1942 vn_phi_insert (phi, PHI_RESULT (phi));
1943 VN_INFO (PHI_RESULT (phi))->has_constants = false;
1944 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
1945 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
1951 /* Return true if EXPR contains constants. */
1954 expr_has_constants (tree expr)
1956 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
1959 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
1962 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
1963 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
1964 /* Constants inside reference ops are rarely interesting, but
1965 it can take a lot of looking to find them. */
1967 case tcc_declaration:
1970 return is_gimple_min_invariant (expr);
1975 /* Return true if STMT contains constants. */
1978 stmt_has_constants (gimple stmt)
1980 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1983 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
1985 case GIMPLE_UNARY_RHS:
1986 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
1988 case GIMPLE_BINARY_RHS:
1989 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
1990 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
1991 case GIMPLE_SINGLE_RHS:
1992 /* Constants inside reference ops are rarely interesting, but
1993 it can take a lot of looking to find them. */
1994 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2001 /* Replace SSA_NAMES in expr with their value numbers, and return the
2003 This is performed in place. */
2006 valueize_expr (tree expr)
2008 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2011 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2012 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2013 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2016 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2017 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2018 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2019 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2020 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2021 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2029 /* Simplify the binary expression RHS, and return the result if
2033 simplify_binary_expression (gimple stmt)
2035 tree result = NULL_TREE;
2036 tree op0 = gimple_assign_rhs1 (stmt);
2037 tree op1 = gimple_assign_rhs2 (stmt);
2039 /* This will not catch every single case we could combine, but will
2040 catch those with constants. The goal here is to simultaneously
2041 combine constants between expressions, but avoid infinite
2042 expansion of expressions during simplification. */
2043 if (TREE_CODE (op0) == SSA_NAME)
2045 if (VN_INFO (op0)->has_constants
2046 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2047 op0 = valueize_expr (vn_get_expr_for (op0));
2048 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2049 op0 = SSA_VAL (op0);
2052 if (TREE_CODE (op1) == SSA_NAME)
2054 if (VN_INFO (op1)->has_constants)
2055 op1 = valueize_expr (vn_get_expr_for (op1));
2056 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2057 op1 = SSA_VAL (op1);
2060 /* Avoid folding if nothing changed. */
2061 if (op0 == gimple_assign_rhs1 (stmt)
2062 && op1 == gimple_assign_rhs2 (stmt))
2065 fold_defer_overflow_warnings ();
2067 result = fold_binary (gimple_assign_rhs_code (stmt),
2068 TREE_TYPE (gimple_get_lhs (stmt)), op0, op1);
2070 STRIP_USELESS_TYPE_CONVERSION (result);
2072 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2075 /* Make sure result is not a complex expression consisting
2076 of operators of operators (IE (a + b) + (a + c))
2077 Otherwise, we will end up with unbounded expressions if
2078 fold does anything at all. */
2079 if (result && valid_gimple_rhs_p (result))
2085 /* Simplify the unary expression RHS, and return the result if
2089 simplify_unary_expression (gimple stmt)
2091 tree result = NULL_TREE;
2092 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2094 /* We handle some tcc_reference codes here that are all
2095 GIMPLE_ASSIGN_SINGLE codes. */
2096 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2097 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2098 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2099 op0 = TREE_OPERAND (op0, 0);
2101 if (TREE_CODE (op0) != SSA_NAME)
2105 if (VN_INFO (op0)->has_constants)
2106 op0 = valueize_expr (vn_get_expr_for (op0));
2107 else if (gimple_assign_cast_p (stmt)
2108 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2109 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2110 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2112 /* We want to do tree-combining on conversion-like expressions.
2113 Make sure we feed only SSA_NAMEs or constants to fold though. */
2114 tree tem = valueize_expr (vn_get_expr_for (op0));
2115 if (UNARY_CLASS_P (tem)
2116 || BINARY_CLASS_P (tem)
2117 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2118 || TREE_CODE (tem) == SSA_NAME
2119 || is_gimple_min_invariant (tem))
2123 /* Avoid folding if nothing changed, but remember the expression. */
2124 if (op0 == orig_op0)
2127 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2128 gimple_expr_type (stmt), op0);
2131 STRIP_USELESS_TYPE_CONVERSION (result);
2132 if (valid_gimple_rhs_p (result))
2139 /* Try to simplify RHS using equivalences and constant folding. */
2142 try_to_simplify (gimple stmt)
2146 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2147 in this case, there is no point in doing extra work. */
2148 if (gimple_assign_copy_p (stmt)
2149 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2152 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2154 case tcc_declaration:
2155 tem = get_symbol_constant_value (gimple_assign_rhs1 (stmt));
2161 /* Do not do full-blown reference lookup here, but simplify
2162 reads from constant aggregates. */
2163 tem = fold_const_aggregate_ref (gimple_assign_rhs1 (stmt));
2167 /* Fallthrough for some codes that can operate on registers. */
2168 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2169 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2170 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2172 /* We could do a little more with unary ops, if they expand
2173 into binary ops, but it's debatable whether it is worth it. */
2175 return simplify_unary_expression (stmt);
2177 case tcc_comparison:
2179 return simplify_binary_expression (stmt);
2188 /* Visit and value number USE, return true if the value number
2192 visit_use (tree use)
2194 bool changed = false;
2195 gimple stmt = SSA_NAME_DEF_STMT (use);
2197 VN_INFO (use)->use_processed = true;
2199 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2200 if (dump_file && (dump_flags & TDF_DETAILS)
2201 && !SSA_NAME_IS_DEFAULT_DEF (use))
2203 fprintf (dump_file, "Value numbering ");
2204 print_generic_expr (dump_file, use, 0);
2205 fprintf (dump_file, " stmt = ");
2206 print_gimple_stmt (dump_file, stmt, 0, 0);
2209 /* Handle uninitialized uses. */
2210 if (SSA_NAME_IS_DEFAULT_DEF (use))
2211 changed = set_ssa_val_to (use, use);
2214 if (gimple_code (stmt) == GIMPLE_PHI)
2215 changed = visit_phi (stmt);
2216 else if (!gimple_has_lhs (stmt)
2217 || gimple_has_volatile_ops (stmt)
2218 || stmt_could_throw_p (stmt))
2219 changed = defs_to_varying (stmt);
2220 else if (is_gimple_assign (stmt))
2222 tree lhs = gimple_assign_lhs (stmt);
2225 /* Shortcut for copies. Simplifying copies is pointless,
2226 since we copy the expression and value they represent. */
2227 if (gimple_assign_copy_p (stmt)
2228 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2229 && TREE_CODE (lhs) == SSA_NAME)
2231 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2234 simplified = try_to_simplify (stmt);
2237 if (dump_file && (dump_flags & TDF_DETAILS))
2239 fprintf (dump_file, "RHS ");
2240 print_gimple_expr (dump_file, stmt, 0, 0);
2241 fprintf (dump_file, " simplified to ");
2242 print_generic_expr (dump_file, simplified, 0);
2243 if (TREE_CODE (lhs) == SSA_NAME)
2244 fprintf (dump_file, " has constants %d\n",
2245 expr_has_constants (simplified));
2247 fprintf (dump_file, "\n");
2250 /* Setting value numbers to constants will occasionally
2251 screw up phi congruence because constants are not
2252 uniquely associated with a single ssa name that can be
2255 && is_gimple_min_invariant (simplified)
2256 && TREE_CODE (lhs) == SSA_NAME)
2258 VN_INFO (lhs)->expr = simplified;
2259 VN_INFO (lhs)->has_constants = true;
2260 changed = set_ssa_val_to (lhs, simplified);
2264 && TREE_CODE (simplified) == SSA_NAME
2265 && TREE_CODE (lhs) == SSA_NAME)
2267 changed = visit_copy (lhs, simplified);
2270 else if (simplified)
2272 if (TREE_CODE (lhs) == SSA_NAME)
2274 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
2275 /* We have to unshare the expression or else
2276 valuizing may change the IL stream. */
2277 VN_INFO (lhs)->expr = unshare_expr (simplified);
2280 else if (stmt_has_constants (stmt)
2281 && TREE_CODE (lhs) == SSA_NAME)
2282 VN_INFO (lhs)->has_constants = true;
2283 else if (TREE_CODE (lhs) == SSA_NAME)
2285 /* We reset expr and constantness here because we may
2286 have been value numbering optimistically, and
2287 iterating. They may become non-constant in this case,
2288 even if they were optimistically constant. */
2290 VN_INFO (lhs)->has_constants = false;
2291 VN_INFO (lhs)->expr = NULL_TREE;
2294 if ((TREE_CODE (lhs) == SSA_NAME
2295 /* We can substitute SSA_NAMEs that are live over
2296 abnormal edges with their constant value. */
2297 && !(gimple_assign_copy_p (stmt)
2298 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2300 && is_gimple_min_invariant (simplified))
2301 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2302 /* Stores or copies from SSA_NAMEs that are live over
2303 abnormal edges are a problem. */
2304 || (gimple_assign_single_p (stmt)
2305 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2306 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
2307 changed = defs_to_varying (stmt);
2308 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
2310 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
2312 else if (TREE_CODE (lhs) == SSA_NAME)
2314 if ((gimple_assign_copy_p (stmt)
2315 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
2317 && is_gimple_min_invariant (simplified)))
2319 VN_INFO (lhs)->has_constants = true;
2321 changed = set_ssa_val_to (lhs, simplified);
2323 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
2327 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2329 case GIMPLE_UNARY_RHS:
2330 changed = visit_unary_op (lhs, stmt);
2332 case GIMPLE_BINARY_RHS:
2333 changed = visit_binary_op (lhs, stmt);
2335 case GIMPLE_SINGLE_RHS:
2336 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2339 /* VOP-less references can go through unary case. */
2340 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
2341 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2342 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR )
2343 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
2345 changed = visit_unary_op (lhs, stmt);
2349 case tcc_declaration:
2350 changed = visit_reference_op_load
2351 (lhs, gimple_assign_rhs1 (stmt), stmt);
2353 case tcc_expression:
2354 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
2356 changed = visit_unary_op (lhs, stmt);
2361 changed = defs_to_varying (stmt);
2365 changed = defs_to_varying (stmt);
2371 changed = defs_to_varying (stmt);
2373 else if (is_gimple_call (stmt))
2375 tree lhs = gimple_call_lhs (stmt);
2377 /* ??? We could try to simplify calls. */
2379 if (stmt_has_constants (stmt)
2380 && TREE_CODE (lhs) == SSA_NAME)
2381 VN_INFO (lhs)->has_constants = true;
2382 else if (TREE_CODE (lhs) == SSA_NAME)
2384 /* We reset expr and constantness here because we may
2385 have been value numbering optimistically, and
2386 iterating. They may become non-constant in this case,
2387 even if they were optimistically constant. */
2388 VN_INFO (lhs)->has_constants = false;
2389 VN_INFO (lhs)->expr = NULL_TREE;
2392 if (TREE_CODE (lhs) == SSA_NAME
2393 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
2394 changed = defs_to_varying (stmt);
2395 /* ??? We should handle stores from calls. */
2396 else if (TREE_CODE (lhs) == SSA_NAME)
2398 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
2399 changed = visit_reference_op_call (lhs, stmt);
2401 changed = defs_to_varying (stmt);
2404 changed = defs_to_varying (stmt);
2411 /* Compare two operands by reverse postorder index */
2414 compare_ops (const void *pa, const void *pb)
2416 const tree opa = *((const tree *)pa);
2417 const tree opb = *((const tree *)pb);
2418 gimple opstmta = SSA_NAME_DEF_STMT (opa);
2419 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
2423 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
2424 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2425 else if (gimple_nop_p (opstmta))
2427 else if (gimple_nop_p (opstmtb))
2430 bba = gimple_bb (opstmta);
2431 bbb = gimple_bb (opstmtb);
2434 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2442 if (gimple_code (opstmta) == GIMPLE_PHI
2443 && gimple_code (opstmtb) == GIMPLE_PHI)
2444 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2445 else if (gimple_code (opstmta) == GIMPLE_PHI)
2447 else if (gimple_code (opstmtb) == GIMPLE_PHI)
2449 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
2450 return gimple_uid (opstmta) - gimple_uid (opstmtb);
2452 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
2454 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
2457 /* Sort an array containing members of a strongly connected component
2458 SCC so that the members are ordered by RPO number.
2459 This means that when the sort is complete, iterating through the
2460 array will give you the members in RPO order. */
2463 sort_scc (VEC (tree, heap) *scc)
2465 qsort (VEC_address (tree, scc),
2466 VEC_length (tree, scc),
2471 /* Process a strongly connected component in the SSA graph. */
2474 process_scc (VEC (tree, heap) *scc)
2476 /* If the SCC has a single member, just visit it. */
2478 if (VEC_length (tree, scc) == 1)
2480 tree use = VEC_index (tree, scc, 0);
2481 if (!VN_INFO (use)->use_processed)
2488 unsigned int iterations = 0;
2489 bool changed = true;
2491 /* Iterate over the SCC with the optimistic table until it stops
2493 current_info = optimistic_info;
2498 /* As we are value-numbering optimistically we have to
2499 clear the expression tables and the simplified expressions
2500 in each iteration until we converge. */
2501 htab_empty (optimistic_info->nary);
2502 htab_empty (optimistic_info->phis);
2503 htab_empty (optimistic_info->references);
2504 obstack_free (&optimistic_info->nary_obstack, NULL);
2505 gcc_obstack_init (&optimistic_info->nary_obstack);
2506 empty_alloc_pool (optimistic_info->phis_pool);
2507 empty_alloc_pool (optimistic_info->references_pool);
2508 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2509 VN_INFO (var)->expr = NULL_TREE;
2510 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2511 changed |= visit_use (var);
2514 statistics_histogram_event (cfun, "SCC iterations", iterations);
2516 /* Finally, visit the SCC once using the valid table. */
2517 current_info = valid_info;
2518 for (i = 0; VEC_iterate (tree, scc, i, var); i++)
2523 DEF_VEC_O(ssa_op_iter);
2524 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
2526 /* Pop the components of the found SCC for NAME off the SCC stack
2527 and process them. Returns true if all went well, false if
2528 we run into resource limits. */
2531 extract_and_process_scc_for_name (tree name)
2533 VEC (tree, heap) *scc = NULL;
2536 /* Found an SCC, pop the components off the SCC stack and
2540 x = VEC_pop (tree, sccstack);
2542 VN_INFO (x)->on_sccstack = false;
2543 VEC_safe_push (tree, heap, scc, x);
2544 } while (x != name);
2546 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
2547 if (VEC_length (tree, scc)
2548 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
2551 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
2552 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
2553 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
2557 if (VEC_length (tree, scc) > 1)
2560 if (dump_file && (dump_flags & TDF_DETAILS))
2561 print_scc (dump_file, scc);
2565 VEC_free (tree, heap, scc);
2570 /* Depth first search on NAME to discover and process SCC's in the SSA
2572 Execution of this algorithm relies on the fact that the SCC's are
2573 popped off the stack in topological order.
2574 Returns true if successful, false if we stopped processing SCC's due
2575 to resource constraints. */
2580 VEC(ssa_op_iter, heap) *itervec = NULL;
2581 VEC(tree, heap) *namevec = NULL;
2582 use_operand_p usep = NULL;
2589 VN_INFO (name)->dfsnum = next_dfs_num++;
2590 VN_INFO (name)->visited = true;
2591 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
2593 VEC_safe_push (tree, heap, sccstack, name);
2594 VN_INFO (name)->on_sccstack = true;
2595 defstmt = SSA_NAME_DEF_STMT (name);
2597 /* Recursively DFS on our operands, looking for SCC's. */
2598 if (!gimple_nop_p (defstmt))
2600 /* Push a new iterator. */
2601 if (gimple_code (defstmt) == GIMPLE_PHI)
2602 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
2604 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
2607 clear_and_done_ssa_iter (&iter);
2611 /* If we are done processing uses of a name, go up the stack
2612 of iterators and process SCCs as we found them. */
2613 if (op_iter_done (&iter))
2615 /* See if we found an SCC. */
2616 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
2617 if (!extract_and_process_scc_for_name (name))
2619 VEC_free (tree, heap, namevec);
2620 VEC_free (ssa_op_iter, heap, itervec);
2624 /* Check if we are done. */
2625 if (VEC_empty (tree, namevec))
2627 VEC_free (tree, heap, namevec);
2628 VEC_free (ssa_op_iter, heap, itervec);
2632 /* Restore the last use walker and continue walking there. */
2634 name = VEC_pop (tree, namevec);
2635 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
2636 sizeof (ssa_op_iter));
2637 VEC_pop (ssa_op_iter, itervec);
2638 goto continue_walking;
2641 use = USE_FROM_PTR (usep);
2643 /* Since we handle phi nodes, we will sometimes get
2644 invariants in the use expression. */
2645 if (TREE_CODE (use) == SSA_NAME)
2647 if (! (VN_INFO (use)->visited))
2649 /* Recurse by pushing the current use walking state on
2650 the stack and starting over. */
2651 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
2652 VEC_safe_push(tree, heap, namevec, name);
2657 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
2658 VN_INFO (use)->low);
2660 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
2661 && VN_INFO (use)->on_sccstack)
2663 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
2664 VN_INFO (name)->low);
2668 usep = op_iter_next_use (&iter);
2672 /* Allocate a value number table. */
2675 allocate_vn_table (vn_tables_t table)
2677 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
2678 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
2679 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
2682 gcc_obstack_init (&table->nary_obstack);
2683 table->phis_pool = create_alloc_pool ("VN phis",
2684 sizeof (struct vn_phi_s),
2686 table->references_pool = create_alloc_pool ("VN references",
2687 sizeof (struct vn_reference_s),
2691 /* Free a value number table. */
2694 free_vn_table (vn_tables_t table)
2696 htab_delete (table->phis);
2697 htab_delete (table->nary);
2698 htab_delete (table->references);
2699 obstack_free (&table->nary_obstack, NULL);
2700 free_alloc_pool (table->phis_pool);
2701 free_alloc_pool (table->references_pool);
2709 int *rpo_numbers_temp;
2711 calculate_dominance_info (CDI_DOMINATORS);
2713 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
2716 constant_value_ids = BITMAP_ALLOC (NULL);
2721 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
2722 /* VEC_alloc doesn't actually grow it to the right size, it just
2723 preallocates the space to do so. */
2724 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
2725 gcc_obstack_init (&vn_ssa_aux_obstack);
2727 shared_lookup_phiargs = NULL;
2728 shared_lookup_references = NULL;
2729 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2730 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
2731 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
2733 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
2734 the i'th block in RPO order is bb. We want to map bb's to RPO
2735 numbers, so we need to rearrange this array. */
2736 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
2737 rpo_numbers[rpo_numbers_temp[j]] = j;
2739 XDELETE (rpo_numbers_temp);
2741 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
2743 /* Create the VN_INFO structures, and initialize value numbers to
2745 for (i = 0; i < num_ssa_names; i++)
2747 tree name = ssa_name (i);
2750 VN_INFO_GET (name)->valnum = VN_TOP;
2751 VN_INFO (name)->expr = NULL_TREE;
2752 VN_INFO (name)->value_id = 0;
2756 renumber_gimple_stmt_uids ();
2758 /* Create the valid and optimistic value numbering tables. */
2759 valid_info = XCNEW (struct vn_tables_s);
2760 allocate_vn_table (valid_info);
2761 optimistic_info = XCNEW (struct vn_tables_s);
2762 allocate_vn_table (optimistic_info);
2770 htab_delete (constant_to_value_id);
2771 BITMAP_FREE (constant_value_ids);
2772 VEC_free (tree, heap, shared_lookup_phiargs);
2773 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
2774 XDELETEVEC (rpo_numbers);
2776 for (i = 0; i < num_ssa_names; i++)
2778 tree name = ssa_name (i);
2780 && VN_INFO (name)->needs_insertion)
2781 release_ssa_name (name);
2783 obstack_free (&vn_ssa_aux_obstack, NULL);
2784 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
2786 VEC_free (tree, heap, sccstack);
2787 free_vn_table (valid_info);
2788 XDELETE (valid_info);
2789 free_vn_table (optimistic_info);
2790 XDELETE (optimistic_info);
2793 /* Set the value ids in the valid hash tables. */
2796 set_hashtable_value_ids (void)
2803 /* Now set the value ids of the things we had put in the hash
2806 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
2807 vno, vn_nary_op_t, hi)
2811 if (TREE_CODE (vno->result) == SSA_NAME)
2812 vno->value_id = VN_INFO (vno->result)->value_id;
2813 else if (is_gimple_min_invariant (vno->result))
2814 vno->value_id = get_or_alloc_constant_value_id (vno->result);
2818 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
2823 if (TREE_CODE (vp->result) == SSA_NAME)
2824 vp->value_id = VN_INFO (vp->result)->value_id;
2825 else if (is_gimple_min_invariant (vp->result))
2826 vp->value_id = get_or_alloc_constant_value_id (vp->result);
2830 FOR_EACH_HTAB_ELEMENT (valid_info->references,
2831 vr, vn_reference_t, hi)
2835 if (TREE_CODE (vr->result) == SSA_NAME)
2836 vr->value_id = VN_INFO (vr->result)->value_id;
2837 else if (is_gimple_min_invariant (vr->result))
2838 vr->value_id = get_or_alloc_constant_value_id (vr->result);
2843 /* Do SCCVN. Returns true if it finished, false if we bailed out
2844 due to resource constraints. */
2847 run_scc_vn (bool may_insert_arg)
2851 bool changed = true;
2853 may_insert = may_insert_arg;
2856 current_info = valid_info;
2858 for (param = DECL_ARGUMENTS (current_function_decl);
2860 param = TREE_CHAIN (param))
2862 if (gimple_default_def (cfun, param) != NULL)
2864 tree def = gimple_default_def (cfun, param);
2865 VN_INFO (def)->valnum = def;
2869 for (i = 1; i < num_ssa_names; ++i)
2871 tree name = ssa_name (i);
2873 && VN_INFO (name)->visited == false
2874 && !has_zero_uses (name))
2883 /* Initialize the value ids. */
2885 for (i = 1; i < num_ssa_names; ++i)
2887 tree name = ssa_name (i);
2891 info = VN_INFO (name);
2892 if (info->valnum == name
2893 || info->valnum == VN_TOP)
2894 info->value_id = get_next_value_id ();
2895 else if (is_gimple_min_invariant (info->valnum))
2896 info->value_id = get_or_alloc_constant_value_id (info->valnum);
2899 /* Propagate until they stop changing. */
2903 for (i = 1; i < num_ssa_names; ++i)
2905 tree name = ssa_name (i);
2909 info = VN_INFO (name);
2910 if (TREE_CODE (info->valnum) == SSA_NAME
2911 && info->valnum != name
2912 && info->value_id != VN_INFO (info->valnum)->value_id)
2915 info->value_id = VN_INFO (info->valnum)->value_id;
2920 set_hashtable_value_ids ();
2922 if (dump_file && (dump_flags & TDF_DETAILS))
2924 fprintf (dump_file, "Value numbers:\n");
2925 for (i = 0; i < num_ssa_names; i++)
2927 tree name = ssa_name (i);
2929 && VN_INFO (name)->visited
2930 && SSA_VAL (name) != name)
2932 print_generic_expr (dump_file, name, 0);
2933 fprintf (dump_file, " = ");
2934 print_generic_expr (dump_file, SSA_VAL (name), 0);
2935 fprintf (dump_file, "\n");
2944 /* Return the maximum value id we have ever seen. */
2947 get_max_value_id (void)
2949 return next_value_id;
2952 /* Return the next unique value id. */
2955 get_next_value_id (void)
2957 return next_value_id++;
2961 /* Compare two expressions E1 and E2 and return true if they are equal. */
2964 expressions_equal_p (tree e1, tree e2)
2966 /* The obvious case. */
2970 /* If only one of them is null, they cannot be equal. */
2974 /* Recurse on elements of lists. */
2975 if (TREE_CODE (e1) == TREE_LIST && TREE_CODE (e2) == TREE_LIST)
2979 for (lop1 = e1, lop2 = e2;
2981 lop1 = TREE_CHAIN (lop1), lop2 = TREE_CHAIN (lop2))
2985 if (!expressions_equal_p (TREE_VALUE (lop1), TREE_VALUE (lop2)))
2991 /* Now perform the actual comparison. */
2992 if (TREE_CODE (e1) == TREE_CODE (e2)
2993 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3000 /* Return true if the nary operation NARY may trap. This is a copy
3001 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3004 vn_nary_may_trap (vn_nary_op_t nary)
3008 bool honor_nans = false;
3009 bool honor_snans = false;
3010 bool fp_operation = false;
3011 bool honor_trapv = false;
3015 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3016 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3017 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3020 fp_operation = FLOAT_TYPE_P (type);
3023 honor_nans = flag_trapping_math && !flag_finite_math_only;
3024 honor_snans = flag_signaling_nans != 0;
3026 else if (INTEGRAL_TYPE_P (type)
3027 && TYPE_OVERFLOW_TRAPS (type))
3031 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3033 honor_nans, honor_snans, rhs2,
3039 for (i = 0; i < nary->length; ++i)
3040 if (tree_could_trap_p (nary->op[i]))