1 /* Conditional constant propagation pass for the GNU compiler.
2 Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
3 2010, 2011, 2012 Free Software Foundation, Inc.
4 Adapted from original RTL SSA-CCP by Daniel Berlin <dberlin@dberlin.org>
5 Adapted to GIMPLE trees by Diego Novillo <dnovillo@redhat.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it
10 under the terms of the GNU General Public License as published by the
11 Free Software Foundation; either version 3, or (at your option) any
14 GCC is distributed in the hope that it will be useful, but WITHOUT
15 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* Conditional constant propagation (CCP) is based on the SSA
24 propagation engine (tree-ssa-propagate.c). Constant assignments of
25 the form VAR = CST are propagated from the assignments into uses of
26 VAR, which in turn may generate new constants. The simulation uses
27 a four level lattice to keep track of constant values associated
28 with SSA names. Given an SSA name V_i, it may take one of the
31 UNINITIALIZED -> the initial state of the value. This value
32 is replaced with a correct initial value
33 the first time the value is used, so the
34 rest of the pass does not need to care about
35 it. Using this value simplifies initialization
36 of the pass, and prevents us from needlessly
37 scanning statements that are never reached.
39 UNDEFINED -> V_i is a local variable whose definition
40 has not been processed yet. Therefore we
41 don't yet know if its value is a constant
44 CONSTANT -> V_i has been found to hold a constant
47 VARYING -> V_i cannot take a constant value, or if it
48 does, it is not possible to determine it
51 The core of SSA-CCP is in ccp_visit_stmt and ccp_visit_phi_node:
53 1- In ccp_visit_stmt, we are interested in assignments whose RHS
54 evaluates into a constant and conditional jumps whose predicate
55 evaluates into a boolean true or false. When an assignment of
56 the form V_i = CONST is found, V_i's lattice value is set to
57 CONSTANT and CONST is associated with it. This causes the
58 propagation engine to add all the SSA edges coming out the
59 assignment into the worklists, so that statements that use V_i
62 If the statement is a conditional with a constant predicate, we
63 mark the outgoing edges as executable or not executable
64 depending on the predicate's value. This is then used when
65 visiting PHI nodes to know when a PHI argument can be ignored.
68 2- In ccp_visit_phi_node, if all the PHI arguments evaluate to the
69 same constant C, then the LHS of the PHI is set to C. This
70 evaluation is known as the "meet operation". Since one of the
71 goals of this evaluation is to optimistically return constant
72 values as often as possible, it uses two main short cuts:
74 - If an argument is flowing in through a non-executable edge, it
75 is ignored. This is useful in cases like this:
81 a_11 = PHI (a_9, a_10)
83 If PRED is known to always evaluate to false, then we can
84 assume that a_11 will always take its value from a_10, meaning
85 that instead of consider it VARYING (a_9 and a_10 have
86 different values), we can consider it CONSTANT 100.
88 - If an argument has an UNDEFINED value, then it does not affect
89 the outcome of the meet operation. If a variable V_i has an
90 UNDEFINED value, it means that either its defining statement
91 hasn't been visited yet or V_i has no defining statement, in
92 which case the original symbol 'V' is being used
93 uninitialized. Since 'V' is a local variable, the compiler
94 may assume any initial value for it.
97 After propagation, every variable V_i that ends up with a lattice
98 value of CONSTANT will have the associated constant value in the
99 array CONST_VAL[i].VALUE. That is fed into substitute_and_fold for
100 final substitution and folding.
104 Constant propagation with conditional branches,
105 Wegman and Zadeck, ACM TOPLAS 13(2):181-210.
107 Building an Optimizing Compiler,
108 Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
110 Advanced Compiler Design and Implementation,
111 Steven Muchnick, Morgan Kaufmann, 1997, Section 12.6 */
115 #include "coretypes.h"
120 #include "basic-block.h"
122 #include "function.h"
123 #include "tree-pretty-print.h"
124 #include "gimple-pretty-print.h"
126 #include "tree-dump.h"
127 #include "tree-flow.h"
128 #include "tree-pass.h"
129 #include "tree-ssa-propagate.h"
130 #include "value-prof.h"
131 #include "langhooks.h"
133 #include "diagnostic-core.h"
135 #include "gimple-fold.h"
139 /* Possible lattice values. */
148 struct prop_value_d {
150 ccp_lattice_t lattice_val;
152 /* Propagated value. */
155 /* Mask that applies to the propagated value during CCP. For
156 X with a CONSTANT lattice value X & ~mask == value & ~mask. */
160 typedef struct prop_value_d prop_value_t;
162 /* Array of propagated constant values. After propagation,
163 CONST_VAL[I].VALUE holds the constant value for SSA_NAME(I). If
164 the constant is held in an SSA name representing a memory store
165 (i.e., a VDEF), CONST_VAL[I].MEM_REF will contain the actual
166 memory reference used to store (i.e., the LHS of the assignment
168 static prop_value_t *const_val;
170 static void canonicalize_float_value (prop_value_t *);
171 static bool ccp_fold_stmt (gimple_stmt_iterator *);
173 /* Dump constant propagation value VAL to file OUTF prefixed by PREFIX. */
176 dump_lattice_value (FILE *outf, const char *prefix, prop_value_t val)
178 switch (val.lattice_val)
181 fprintf (outf, "%sUNINITIALIZED", prefix);
184 fprintf (outf, "%sUNDEFINED", prefix);
187 fprintf (outf, "%sVARYING", prefix);
190 fprintf (outf, "%sCONSTANT ", prefix);
191 if (TREE_CODE (val.value) != INTEGER_CST
192 || double_int_zero_p (val.mask))
193 print_generic_expr (outf, val.value, dump_flags);
196 double_int cval = double_int_and_not (tree_to_double_int (val.value),
198 fprintf (outf, "%sCONSTANT " HOST_WIDE_INT_PRINT_DOUBLE_HEX,
199 prefix, cval.high, cval.low);
200 fprintf (outf, " (" HOST_WIDE_INT_PRINT_DOUBLE_HEX ")",
201 val.mask.high, val.mask.low);
210 /* Print lattice value VAL to stderr. */
212 void debug_lattice_value (prop_value_t val);
215 debug_lattice_value (prop_value_t val)
217 dump_lattice_value (stderr, "", val);
218 fprintf (stderr, "\n");
222 /* Compute a default value for variable VAR and store it in the
223 CONST_VAL array. The following rules are used to get default
226 1- Global and static variables that are declared constant are
229 2- Any other value is considered UNDEFINED. This is useful when
230 considering PHI nodes. PHI arguments that are undefined do not
231 change the constant value of the PHI node, which allows for more
232 constants to be propagated.
234 3- Variables defined by statements other than assignments and PHI
235 nodes are considered VARYING.
237 4- Initial values of variables that are not GIMPLE registers are
238 considered VARYING. */
241 get_default_value (tree var)
243 tree sym = SSA_NAME_VAR (var);
244 prop_value_t val = { UNINITIALIZED, NULL_TREE, { 0, 0 } };
247 stmt = SSA_NAME_DEF_STMT (var);
249 if (gimple_nop_p (stmt))
251 /* Variables defined by an empty statement are those used
252 before being initialized. If VAR is a local variable, we
253 can assume initially that it is UNDEFINED, otherwise we must
254 consider it VARYING. */
255 if (is_gimple_reg (sym)
256 && TREE_CODE (sym) == VAR_DECL)
257 val.lattice_val = UNDEFINED;
260 val.lattice_val = VARYING;
261 val.mask = double_int_minus_one;
264 else if (is_gimple_assign (stmt)
265 /* Value-returning GIMPLE_CALL statements assign to
266 a variable, and are treated similarly to GIMPLE_ASSIGN. */
267 || (is_gimple_call (stmt)
268 && gimple_call_lhs (stmt) != NULL_TREE)
269 || gimple_code (stmt) == GIMPLE_PHI)
272 if (gimple_assign_single_p (stmt)
273 && DECL_P (gimple_assign_rhs1 (stmt))
274 && (cst = get_symbol_constant_value (gimple_assign_rhs1 (stmt))))
276 val.lattice_val = CONSTANT;
280 /* Any other variable defined by an assignment or a PHI node
281 is considered UNDEFINED. */
282 val.lattice_val = UNDEFINED;
286 /* Otherwise, VAR will never take on a constant value. */
287 val.lattice_val = VARYING;
288 val.mask = double_int_minus_one;
295 /* Get the constant value associated with variable VAR. */
297 static inline prop_value_t *
302 if (const_val == NULL)
305 val = &const_val[SSA_NAME_VERSION (var)];
306 if (val->lattice_val == UNINITIALIZED)
307 *val = get_default_value (var);
309 canonicalize_float_value (val);
314 /* Return the constant tree value associated with VAR. */
317 get_constant_value (tree var)
320 if (TREE_CODE (var) != SSA_NAME)
322 if (is_gimple_min_invariant (var))
326 val = get_value (var);
328 && val->lattice_val == CONSTANT
329 && (TREE_CODE (val->value) != INTEGER_CST
330 || double_int_zero_p (val->mask)))
335 /* Sets the value associated with VAR to VARYING. */
338 set_value_varying (tree var)
340 prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
342 val->lattice_val = VARYING;
343 val->value = NULL_TREE;
344 val->mask = double_int_minus_one;
347 /* For float types, modify the value of VAL to make ccp work correctly
348 for non-standard values (-0, NaN):
350 If HONOR_SIGNED_ZEROS is false, and VAL = -0, we canonicalize it to 0.
351 If HONOR_NANS is false, and VAL is NaN, we canonicalize it to UNDEFINED.
352 This is to fix the following problem (see PR 29921): Suppose we have
356 and we set value of y to NaN. This causes value of x to be set to NaN.
357 When we later determine that y is in fact VARYING, fold uses the fact
358 that HONOR_NANS is false, and we try to change the value of x to 0,
359 causing an ICE. With HONOR_NANS being false, the real appearance of
360 NaN would cause undefined behavior, though, so claiming that y (and x)
361 are UNDEFINED initially is correct. */
364 canonicalize_float_value (prop_value_t *val)
366 enum machine_mode mode;
370 if (val->lattice_val != CONSTANT
371 || TREE_CODE (val->value) != REAL_CST)
374 d = TREE_REAL_CST (val->value);
375 type = TREE_TYPE (val->value);
376 mode = TYPE_MODE (type);
378 if (!HONOR_SIGNED_ZEROS (mode)
379 && REAL_VALUE_MINUS_ZERO (d))
381 val->value = build_real (type, dconst0);
385 if (!HONOR_NANS (mode)
386 && REAL_VALUE_ISNAN (d))
388 val->lattice_val = UNDEFINED;
394 /* Return whether the lattice transition is valid. */
397 valid_lattice_transition (prop_value_t old_val, prop_value_t new_val)
399 /* Lattice transitions must always be monotonically increasing in
401 if (old_val.lattice_val < new_val.lattice_val)
404 if (old_val.lattice_val != new_val.lattice_val)
407 if (!old_val.value && !new_val.value)
410 /* Now both lattice values are CONSTANT. */
412 /* Allow transitioning from &x to &x & ~3. */
413 if (TREE_CODE (old_val.value) != INTEGER_CST
414 && TREE_CODE (new_val.value) == INTEGER_CST)
417 /* Bit-lattices have to agree in the still valid bits. */
418 if (TREE_CODE (old_val.value) == INTEGER_CST
419 && TREE_CODE (new_val.value) == INTEGER_CST)
420 return double_int_equal_p
421 (double_int_and_not (tree_to_double_int (old_val.value),
423 double_int_and_not (tree_to_double_int (new_val.value),
426 /* Otherwise constant values have to agree. */
427 return operand_equal_p (old_val.value, new_val.value, 0);
430 /* Set the value for variable VAR to NEW_VAL. Return true if the new
431 value is different from VAR's previous value. */
434 set_lattice_value (tree var, prop_value_t new_val)
436 /* We can deal with old UNINITIALIZED values just fine here. */
437 prop_value_t *old_val = &const_val[SSA_NAME_VERSION (var)];
439 canonicalize_float_value (&new_val);
441 /* We have to be careful to not go up the bitwise lattice
442 represented by the mask.
443 ??? This doesn't seem to be the best place to enforce this. */
444 if (new_val.lattice_val == CONSTANT
445 && old_val->lattice_val == CONSTANT
446 && TREE_CODE (new_val.value) == INTEGER_CST
447 && TREE_CODE (old_val->value) == INTEGER_CST)
450 diff = double_int_xor (tree_to_double_int (new_val.value),
451 tree_to_double_int (old_val->value));
452 new_val.mask = double_int_ior (new_val.mask,
453 double_int_ior (old_val->mask, diff));
456 gcc_assert (valid_lattice_transition (*old_val, new_val));
458 /* If *OLD_VAL and NEW_VAL are the same, return false to inform the
459 caller that this was a non-transition. */
460 if (old_val->lattice_val != new_val.lattice_val
461 || (new_val.lattice_val == CONSTANT
462 && TREE_CODE (new_val.value) == INTEGER_CST
463 && (TREE_CODE (old_val->value) != INTEGER_CST
464 || !double_int_equal_p (new_val.mask, old_val->mask))))
466 /* ??? We would like to delay creation of INTEGER_CSTs from
467 partially constants here. */
469 if (dump_file && (dump_flags & TDF_DETAILS))
471 dump_lattice_value (dump_file, "Lattice value changed to ", new_val);
472 fprintf (dump_file, ". Adding SSA edges to worklist.\n");
477 gcc_assert (new_val.lattice_val != UNINITIALIZED);
484 static prop_value_t get_value_for_expr (tree, bool);
485 static prop_value_t bit_value_binop (enum tree_code, tree, tree, tree);
486 static void bit_value_binop_1 (enum tree_code, tree, double_int *, double_int *,
487 tree, double_int, double_int,
488 tree, double_int, double_int);
490 /* Return a double_int that can be used for bitwise simplifications
494 value_to_double_int (prop_value_t val)
497 && TREE_CODE (val.value) == INTEGER_CST)
498 return tree_to_double_int (val.value);
500 return double_int_zero;
503 /* Return the value for the address expression EXPR based on alignment
507 get_value_from_alignment (tree expr)
509 tree type = TREE_TYPE (expr);
511 unsigned HOST_WIDE_INT bitpos;
514 gcc_assert (TREE_CODE (expr) == ADDR_EXPR);
516 get_object_alignment_1 (TREE_OPERAND (expr, 0), &align, &bitpos);
518 = double_int_and_not (POINTER_TYPE_P (type) || TYPE_UNSIGNED (type)
519 ? double_int_mask (TYPE_PRECISION (type))
520 : double_int_minus_one,
521 uhwi_to_double_int (align / BITS_PER_UNIT - 1));
522 val.lattice_val = double_int_minus_one_p (val.mask) ? VARYING : CONSTANT;
523 if (val.lattice_val == CONSTANT)
525 = double_int_to_tree (type, uhwi_to_double_int (bitpos / BITS_PER_UNIT));
527 val.value = NULL_TREE;
532 /* Return the value for the tree operand EXPR. If FOR_BITS_P is true
533 return constant bits extracted from alignment information for
534 invariant addresses. */
537 get_value_for_expr (tree expr, bool for_bits_p)
541 if (TREE_CODE (expr) == SSA_NAME)
543 val = *get_value (expr);
545 && val.lattice_val == CONSTANT
546 && TREE_CODE (val.value) == ADDR_EXPR)
547 val = get_value_from_alignment (val.value);
549 else if (is_gimple_min_invariant (expr)
550 && (!for_bits_p || TREE_CODE (expr) != ADDR_EXPR))
552 val.lattice_val = CONSTANT;
554 val.mask = double_int_zero;
555 canonicalize_float_value (&val);
557 else if (TREE_CODE (expr) == ADDR_EXPR)
558 val = get_value_from_alignment (expr);
561 val.lattice_val = VARYING;
562 val.mask = double_int_minus_one;
563 val.value = NULL_TREE;
568 /* Return the likely CCP lattice value for STMT.
570 If STMT has no operands, then return CONSTANT.
572 Else if undefinedness of operands of STMT cause its value to be
573 undefined, then return UNDEFINED.
575 Else if any operands of STMT are constants, then return CONSTANT.
577 Else return VARYING. */
580 likely_value (gimple stmt)
582 bool has_constant_operand, has_undefined_operand, all_undefined_operands;
587 enum gimple_code code = gimple_code (stmt);
589 /* This function appears to be called only for assignments, calls,
590 conditionals, and switches, due to the logic in visit_stmt. */
591 gcc_assert (code == GIMPLE_ASSIGN
592 || code == GIMPLE_CALL
593 || code == GIMPLE_COND
594 || code == GIMPLE_SWITCH);
596 /* If the statement has volatile operands, it won't fold to a
598 if (gimple_has_volatile_ops (stmt))
601 /* Arrive here for more complex cases. */
602 has_constant_operand = false;
603 has_undefined_operand = false;
604 all_undefined_operands = true;
605 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
607 prop_value_t *val = get_value (use);
609 if (val->lattice_val == UNDEFINED)
610 has_undefined_operand = true;
612 all_undefined_operands = false;
614 if (val->lattice_val == CONSTANT)
615 has_constant_operand = true;
618 /* There may be constants in regular rhs operands. For calls we
619 have to ignore lhs, fndecl and static chain, otherwise only
621 for (i = (is_gimple_call (stmt) ? 2 : 0) + gimple_has_lhs (stmt);
622 i < gimple_num_ops (stmt); ++i)
624 tree op = gimple_op (stmt, i);
625 if (!op || TREE_CODE (op) == SSA_NAME)
627 if (is_gimple_min_invariant (op))
628 has_constant_operand = true;
631 if (has_constant_operand)
632 all_undefined_operands = false;
634 /* If the operation combines operands like COMPLEX_EXPR make sure to
635 not mark the result UNDEFINED if only one part of the result is
637 if (has_undefined_operand && all_undefined_operands)
639 else if (code == GIMPLE_ASSIGN && has_undefined_operand)
641 switch (gimple_assign_rhs_code (stmt))
643 /* Unary operators are handled with all_undefined_operands. */
646 case POINTER_PLUS_EXPR:
647 /* Not MIN_EXPR, MAX_EXPR. One VARYING operand may be selected.
648 Not bitwise operators, one VARYING operand may specify the
649 result completely. Not logical operators for the same reason.
650 Not COMPLEX_EXPR as one VARYING operand makes the result partly
651 not UNDEFINED. Not *DIV_EXPR, comparisons and shifts because
652 the undefined operand may be promoted. */
659 /* If there was an UNDEFINED operand but the result may be not UNDEFINED
660 fall back to CONSTANT. During iteration UNDEFINED may still drop
662 if (has_undefined_operand)
665 /* We do not consider virtual operands here -- load from read-only
666 memory may have only VARYING virtual operands, but still be
668 if (has_constant_operand
669 || gimple_references_memory_p (stmt))
675 /* Returns true if STMT cannot be constant. */
678 surely_varying_stmt_p (gimple stmt)
680 /* If the statement has operands that we cannot handle, it cannot be
682 if (gimple_has_volatile_ops (stmt))
685 /* If it is a call and does not return a value or is not a
686 builtin and not an indirect call, it is varying. */
687 if (is_gimple_call (stmt))
690 if (!gimple_call_lhs (stmt)
691 || ((fndecl = gimple_call_fndecl (stmt)) != NULL_TREE
692 && !DECL_BUILT_IN (fndecl)))
696 /* Any other store operation is not interesting. */
697 else if (gimple_vdef (stmt))
700 /* Anything other than assignments and conditional jumps are not
701 interesting for CCP. */
702 if (gimple_code (stmt) != GIMPLE_ASSIGN
703 && gimple_code (stmt) != GIMPLE_COND
704 && gimple_code (stmt) != GIMPLE_SWITCH
705 && gimple_code (stmt) != GIMPLE_CALL)
711 /* Initialize local data structures for CCP. */
714 ccp_initialize (void)
718 const_val = XCNEWVEC (prop_value_t, num_ssa_names);
720 /* Initialize simulation flags for PHI nodes and statements. */
723 gimple_stmt_iterator i;
725 for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
727 gimple stmt = gsi_stmt (i);
730 /* If the statement is a control insn, then we do not
731 want to avoid simulating the statement once. Failure
732 to do so means that those edges will never get added. */
733 if (stmt_ends_bb_p (stmt))
736 is_varying = surely_varying_stmt_p (stmt);
743 /* If the statement will not produce a constant, mark
744 all its outputs VARYING. */
745 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
746 set_value_varying (def);
748 prop_set_simulate_again (stmt, !is_varying);
752 /* Now process PHI nodes. We never clear the simulate_again flag on
753 phi nodes, since we do not know which edges are executable yet,
754 except for phi nodes for virtual operands when we do not do store ccp. */
757 gimple_stmt_iterator i;
759 for (i = gsi_start_phis (bb); !gsi_end_p (i); gsi_next (&i))
761 gimple phi = gsi_stmt (i);
763 if (!is_gimple_reg (gimple_phi_result (phi)))
764 prop_set_simulate_again (phi, false);
766 prop_set_simulate_again (phi, true);
771 /* Debug count support. Reset the values of ssa names
772 VARYING when the total number ssa names analyzed is
773 beyond the debug count specified. */
779 for (i = 0; i < num_ssa_names; i++)
783 const_val[i].lattice_val = VARYING;
784 const_val[i].mask = double_int_minus_one;
785 const_val[i].value = NULL_TREE;
791 /* Do final substitution of propagated values, cleanup the flowgraph and
792 free allocated storage.
794 Return TRUE when something was optimized. */
799 bool something_changed;
804 /* Derive alignment and misalignment information from partially
805 constant pointers in the lattice. */
806 for (i = 1; i < num_ssa_names; ++i)
808 tree name = ssa_name (i);
810 unsigned int tem, align;
813 || !POINTER_TYPE_P (TREE_TYPE (name)))
816 val = get_value (name);
817 if (val->lattice_val != CONSTANT
818 || TREE_CODE (val->value) != INTEGER_CST)
821 /* Trailing constant bits specify the alignment, trailing value
822 bits the misalignment. */
824 align = (tem & -tem);
826 set_ptr_info_alignment (get_ptr_info (name), align,
827 TREE_INT_CST_LOW (val->value) & (align - 1));
830 /* Perform substitutions based on the known constant values. */
831 something_changed = substitute_and_fold (get_constant_value,
832 ccp_fold_stmt, true);
836 return something_changed;;
840 /* Compute the meet operator between *VAL1 and *VAL2. Store the result
843 any M UNDEFINED = any
844 any M VARYING = VARYING
845 Ci M Cj = Ci if (i == j)
846 Ci M Cj = VARYING if (i != j)
850 ccp_lattice_meet (prop_value_t *val1, prop_value_t *val2)
852 if (val1->lattice_val == UNDEFINED)
854 /* UNDEFINED M any = any */
857 else if (val2->lattice_val == UNDEFINED)
859 /* any M UNDEFINED = any
860 Nothing to do. VAL1 already contains the value we want. */
863 else if (val1->lattice_val == VARYING
864 || val2->lattice_val == VARYING)
866 /* any M VARYING = VARYING. */
867 val1->lattice_val = VARYING;
868 val1->mask = double_int_minus_one;
869 val1->value = NULL_TREE;
871 else if (val1->lattice_val == CONSTANT
872 && val2->lattice_val == CONSTANT
873 && TREE_CODE (val1->value) == INTEGER_CST
874 && TREE_CODE (val2->value) == INTEGER_CST)
876 /* Ci M Cj = Ci if (i == j)
877 Ci M Cj = VARYING if (i != j)
879 For INTEGER_CSTs mask unequal bits. If no equal bits remain,
882 = double_int_ior (double_int_ior (val1->mask,
884 double_int_xor (tree_to_double_int (val1->value),
885 tree_to_double_int (val2->value)));
886 if (double_int_minus_one_p (val1->mask))
888 val1->lattice_val = VARYING;
889 val1->value = NULL_TREE;
892 else if (val1->lattice_val == CONSTANT
893 && val2->lattice_val == CONSTANT
894 && simple_cst_equal (val1->value, val2->value) == 1)
896 /* Ci M Cj = Ci if (i == j)
897 Ci M Cj = VARYING if (i != j)
899 VAL1 already contains the value we want for equivalent values. */
901 else if (val1->lattice_val == CONSTANT
902 && val2->lattice_val == CONSTANT
903 && (TREE_CODE (val1->value) == ADDR_EXPR
904 || TREE_CODE (val2->value) == ADDR_EXPR))
906 /* When not equal addresses are involved try meeting for
908 prop_value_t tem = *val2;
909 if (TREE_CODE (val1->value) == ADDR_EXPR)
910 *val1 = get_value_for_expr (val1->value, true);
911 if (TREE_CODE (val2->value) == ADDR_EXPR)
912 tem = get_value_for_expr (val2->value, true);
913 ccp_lattice_meet (val1, &tem);
917 /* Any other combination is VARYING. */
918 val1->lattice_val = VARYING;
919 val1->mask = double_int_minus_one;
920 val1->value = NULL_TREE;
925 /* Loop through the PHI_NODE's parameters for BLOCK and compare their
926 lattice values to determine PHI_NODE's lattice value. The value of a
927 PHI node is determined calling ccp_lattice_meet with all the arguments
928 of the PHI node that are incoming via executable edges. */
930 static enum ssa_prop_result
931 ccp_visit_phi_node (gimple phi)
934 prop_value_t *old_val, new_val;
936 if (dump_file && (dump_flags & TDF_DETAILS))
938 fprintf (dump_file, "\nVisiting PHI node: ");
939 print_gimple_stmt (dump_file, phi, 0, dump_flags);
942 old_val = get_value (gimple_phi_result (phi));
943 switch (old_val->lattice_val)
946 return SSA_PROP_VARYING;
953 new_val.lattice_val = UNDEFINED;
954 new_val.value = NULL_TREE;
961 for (i = 0; i < gimple_phi_num_args (phi); i++)
963 /* Compute the meet operator over all the PHI arguments flowing
964 through executable edges. */
965 edge e = gimple_phi_arg_edge (phi, i);
967 if (dump_file && (dump_flags & TDF_DETAILS))
970 "\n Argument #%d (%d -> %d %sexecutable)\n",
971 i, e->src->index, e->dest->index,
972 (e->flags & EDGE_EXECUTABLE) ? "" : "not ");
975 /* If the incoming edge is executable, Compute the meet operator for
976 the existing value of the PHI node and the current PHI argument. */
977 if (e->flags & EDGE_EXECUTABLE)
979 tree arg = gimple_phi_arg (phi, i)->def;
980 prop_value_t arg_val = get_value_for_expr (arg, false);
982 ccp_lattice_meet (&new_val, &arg_val);
984 if (dump_file && (dump_flags & TDF_DETAILS))
986 fprintf (dump_file, "\t");
987 print_generic_expr (dump_file, arg, dump_flags);
988 dump_lattice_value (dump_file, "\tValue: ", arg_val);
989 fprintf (dump_file, "\n");
992 if (new_val.lattice_val == VARYING)
997 if (dump_file && (dump_flags & TDF_DETAILS))
999 dump_lattice_value (dump_file, "\n PHI node value: ", new_val);
1000 fprintf (dump_file, "\n\n");
1003 /* Make the transition to the new value. */
1004 if (set_lattice_value (gimple_phi_result (phi), new_val))
1006 if (new_val.lattice_val == VARYING)
1007 return SSA_PROP_VARYING;
1009 return SSA_PROP_INTERESTING;
1012 return SSA_PROP_NOT_INTERESTING;
1015 /* Return the constant value for OP or OP otherwise. */
1018 valueize_op (tree op)
1020 if (TREE_CODE (op) == SSA_NAME)
1022 tree tem = get_constant_value (op);
1029 /* CCP specific front-end to the non-destructive constant folding
1032 Attempt to simplify the RHS of STMT knowing that one or more
1033 operands are constants.
1035 If simplification is possible, return the simplified RHS,
1036 otherwise return the original RHS or NULL_TREE. */
1039 ccp_fold (gimple stmt)
1041 location_t loc = gimple_location (stmt);
1042 switch (gimple_code (stmt))
1046 /* Handle comparison operators that can appear in GIMPLE form. */
1047 tree op0 = valueize_op (gimple_cond_lhs (stmt));
1048 tree op1 = valueize_op (gimple_cond_rhs (stmt));
1049 enum tree_code code = gimple_cond_code (stmt);
1050 return fold_binary_loc (loc, code, boolean_type_node, op0, op1);
1055 /* Return the constant switch index. */
1056 return valueize_op (gimple_switch_index (stmt));
1061 return gimple_fold_stmt_to_constant_1 (stmt, valueize_op);
1068 /* Apply the operation CODE in type TYPE to the value, mask pair
1069 RVAL and RMASK representing a value of type RTYPE and set
1070 the value, mask pair *VAL and *MASK to the result. */
1073 bit_value_unop_1 (enum tree_code code, tree type,
1074 double_int *val, double_int *mask,
1075 tree rtype, double_int rval, double_int rmask)
1081 *val = double_int_not (rval);
1086 double_int temv, temm;
1087 /* Return ~rval + 1. */
1088 bit_value_unop_1 (BIT_NOT_EXPR, type, &temv, &temm, type, rval, rmask);
1089 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1091 type, double_int_one, double_int_zero);
1099 /* First extend mask and value according to the original type. */
1100 uns = TYPE_UNSIGNED (rtype);
1101 *mask = double_int_ext (rmask, TYPE_PRECISION (rtype), uns);
1102 *val = double_int_ext (rval, TYPE_PRECISION (rtype), uns);
1104 /* Then extend mask and value according to the target type. */
1105 uns = TYPE_UNSIGNED (type);
1106 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1107 *val = double_int_ext (*val, TYPE_PRECISION (type), uns);
1112 *mask = double_int_minus_one;
1117 /* Apply the operation CODE in type TYPE to the value, mask pairs
1118 R1VAL, R1MASK and R2VAL, R2MASK representing a values of type R1TYPE
1119 and R2TYPE and set the value, mask pair *VAL and *MASK to the result. */
1122 bit_value_binop_1 (enum tree_code code, tree type,
1123 double_int *val, double_int *mask,
1124 tree r1type, double_int r1val, double_int r1mask,
1125 tree r2type, double_int r2val, double_int r2mask)
1127 bool uns = TYPE_UNSIGNED (type);
1128 /* Assume we'll get a constant result. Use an initial varying value,
1129 we fall back to varying in the end if necessary. */
1130 *mask = double_int_minus_one;
1134 /* The mask is constant where there is a known not
1135 set bit, (m1 | m2) & ((v1 | m1) & (v2 | m2)) */
1136 *mask = double_int_and (double_int_ior (r1mask, r2mask),
1137 double_int_and (double_int_ior (r1val, r1mask),
1138 double_int_ior (r2val, r2mask)));
1139 *val = double_int_and (r1val, r2val);
1143 /* The mask is constant where there is a known
1144 set bit, (m1 | m2) & ~((v1 & ~m1) | (v2 & ~m2)). */
1145 *mask = double_int_and_not
1146 (double_int_ior (r1mask, r2mask),
1147 double_int_ior (double_int_and_not (r1val, r1mask),
1148 double_int_and_not (r2val, r2mask)));
1149 *val = double_int_ior (r1val, r2val);
1154 *mask = double_int_ior (r1mask, r2mask);
1155 *val = double_int_xor (r1val, r2val);
1160 if (double_int_zero_p (r2mask))
1162 HOST_WIDE_INT shift = r2val.low;
1163 if (code == RROTATE_EXPR)
1165 *mask = double_int_lrotate (r1mask, shift, TYPE_PRECISION (type));
1166 *val = double_int_lrotate (r1val, shift, TYPE_PRECISION (type));
1172 /* ??? We can handle partially known shift counts if we know
1173 its sign. That way we can tell that (x << (y | 8)) & 255
1175 if (double_int_zero_p (r2mask))
1177 HOST_WIDE_INT shift = r2val.low;
1178 if (code == RSHIFT_EXPR)
1180 /* We need to know if we are doing a left or a right shift
1181 to properly shift in zeros for left shift and unsigned
1182 right shifts and the sign bit for signed right shifts.
1183 For signed right shifts we shift in varying in case
1184 the sign bit was varying. */
1187 *mask = double_int_lshift (r1mask, shift,
1188 TYPE_PRECISION (type), false);
1189 *val = double_int_lshift (r1val, shift,
1190 TYPE_PRECISION (type), false);
1195 *mask = double_int_rshift (r1mask, shift,
1196 TYPE_PRECISION (type), !uns);
1197 *val = double_int_rshift (r1val, shift,
1198 TYPE_PRECISION (type), !uns);
1209 case POINTER_PLUS_EXPR:
1212 /* Do the addition with unknown bits set to zero, to give carry-ins of
1213 zero wherever possible. */
1214 lo = double_int_add (double_int_and_not (r1val, r1mask),
1215 double_int_and_not (r2val, r2mask));
1216 lo = double_int_ext (lo, TYPE_PRECISION (type), uns);
1217 /* Do the addition with unknown bits set to one, to give carry-ins of
1218 one wherever possible. */
1219 hi = double_int_add (double_int_ior (r1val, r1mask),
1220 double_int_ior (r2val, r2mask));
1221 hi = double_int_ext (hi, TYPE_PRECISION (type), uns);
1222 /* Each bit in the result is known if (a) the corresponding bits in
1223 both inputs are known, and (b) the carry-in to that bit position
1224 is known. We can check condition (b) by seeing if we got the same
1225 result with minimised carries as with maximised carries. */
1226 *mask = double_int_ior (double_int_ior (r1mask, r2mask),
1227 double_int_xor (lo, hi));
1228 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1229 /* It shouldn't matter whether we choose lo or hi here. */
1236 double_int temv, temm;
1237 bit_value_unop_1 (NEGATE_EXPR, r2type, &temv, &temm,
1238 r2type, r2val, r2mask);
1239 bit_value_binop_1 (PLUS_EXPR, type, val, mask,
1240 r1type, r1val, r1mask,
1241 r2type, temv, temm);
1247 /* Just track trailing zeros in both operands and transfer
1248 them to the other. */
1249 int r1tz = double_int_ctz (double_int_ior (r1val, r1mask));
1250 int r2tz = double_int_ctz (double_int_ior (r2val, r2mask));
1251 if (r1tz + r2tz >= HOST_BITS_PER_DOUBLE_INT)
1253 *mask = double_int_zero;
1254 *val = double_int_zero;
1256 else if (r1tz + r2tz > 0)
1258 *mask = double_int_not (double_int_mask (r1tz + r2tz));
1259 *mask = double_int_ext (*mask, TYPE_PRECISION (type), uns);
1260 *val = double_int_zero;
1268 double_int m = double_int_ior (r1mask, r2mask);
1269 if (!double_int_equal_p (double_int_and_not (r1val, m),
1270 double_int_and_not (r2val, m)))
1272 *mask = double_int_zero;
1273 *val = ((code == EQ_EXPR) ? double_int_zero : double_int_one);
1277 /* We know the result of a comparison is always one or zero. */
1278 *mask = double_int_one;
1279 *val = double_int_zero;
1287 double_int tem = r1val;
1293 code = swap_tree_comparison (code);
1300 /* If the most significant bits are not known we know nothing. */
1301 if (double_int_negative_p (r1mask) || double_int_negative_p (r2mask))
1304 /* For comparisons the signedness is in the comparison operands. */
1305 uns = TYPE_UNSIGNED (r1type);
1307 /* If we know the most significant bits we know the values
1308 value ranges by means of treating varying bits as zero
1309 or one. Do a cross comparison of the max/min pairs. */
1310 maxmin = double_int_cmp (double_int_ior (r1val, r1mask),
1311 double_int_and_not (r2val, r2mask), uns);
1312 minmax = double_int_cmp (double_int_and_not (r1val, r1mask),
1313 double_int_ior (r2val, r2mask), uns);
1314 if (maxmin < 0) /* r1 is less than r2. */
1316 *mask = double_int_zero;
1317 *val = double_int_one;
1319 else if (minmax > 0) /* r1 is not less or equal to r2. */
1321 *mask = double_int_zero;
1322 *val = double_int_zero;
1324 else if (maxmin == minmax) /* r1 and r2 are equal. */
1326 /* This probably should never happen as we'd have
1327 folded the thing during fully constant value folding. */
1328 *mask = double_int_zero;
1329 *val = (code == LE_EXPR ? double_int_one : double_int_zero);
1333 /* We know the result of a comparison is always one or zero. */
1334 *mask = double_int_one;
1335 *val = double_int_zero;
1344 /* Return the propagation value when applying the operation CODE to
1345 the value RHS yielding type TYPE. */
1348 bit_value_unop (enum tree_code code, tree type, tree rhs)
1350 prop_value_t rval = get_value_for_expr (rhs, true);
1351 double_int value, mask;
1354 if (rval.lattice_val == UNDEFINED)
1357 gcc_assert ((rval.lattice_val == CONSTANT
1358 && TREE_CODE (rval.value) == INTEGER_CST)
1359 || double_int_minus_one_p (rval.mask));
1360 bit_value_unop_1 (code, type, &value, &mask,
1361 TREE_TYPE (rhs), value_to_double_int (rval), rval.mask);
1362 if (!double_int_minus_one_p (mask))
1364 val.lattice_val = CONSTANT;
1366 /* ??? Delay building trees here. */
1367 val.value = double_int_to_tree (type, value);
1371 val.lattice_val = VARYING;
1372 val.value = NULL_TREE;
1373 val.mask = double_int_minus_one;
1378 /* Return the propagation value when applying the operation CODE to
1379 the values RHS1 and RHS2 yielding type TYPE. */
1382 bit_value_binop (enum tree_code code, tree type, tree rhs1, tree rhs2)
1384 prop_value_t r1val = get_value_for_expr (rhs1, true);
1385 prop_value_t r2val = get_value_for_expr (rhs2, true);
1386 double_int value, mask;
1389 if (r1val.lattice_val == UNDEFINED
1390 || r2val.lattice_val == UNDEFINED)
1392 val.lattice_val = VARYING;
1393 val.value = NULL_TREE;
1394 val.mask = double_int_minus_one;
1398 gcc_assert ((r1val.lattice_val == CONSTANT
1399 && TREE_CODE (r1val.value) == INTEGER_CST)
1400 || double_int_minus_one_p (r1val.mask));
1401 gcc_assert ((r2val.lattice_val == CONSTANT
1402 && TREE_CODE (r2val.value) == INTEGER_CST)
1403 || double_int_minus_one_p (r2val.mask));
1404 bit_value_binop_1 (code, type, &value, &mask,
1405 TREE_TYPE (rhs1), value_to_double_int (r1val), r1val.mask,
1406 TREE_TYPE (rhs2), value_to_double_int (r2val), r2val.mask);
1407 if (!double_int_minus_one_p (mask))
1409 val.lattice_val = CONSTANT;
1411 /* ??? Delay building trees here. */
1412 val.value = double_int_to_tree (type, value);
1416 val.lattice_val = VARYING;
1417 val.value = NULL_TREE;
1418 val.mask = double_int_minus_one;
1423 /* Return the propagation value when applying __builtin_assume_aligned to
1427 bit_value_assume_aligned (gimple stmt)
1429 tree ptr = gimple_call_arg (stmt, 0), align, misalign = NULL_TREE;
1430 tree type = TREE_TYPE (ptr);
1431 unsigned HOST_WIDE_INT aligni, misaligni = 0;
1432 prop_value_t ptrval = get_value_for_expr (ptr, true);
1433 prop_value_t alignval;
1434 double_int value, mask;
1436 if (ptrval.lattice_val == UNDEFINED)
1438 gcc_assert ((ptrval.lattice_val == CONSTANT
1439 && TREE_CODE (ptrval.value) == INTEGER_CST)
1440 || double_int_minus_one_p (ptrval.mask));
1441 align = gimple_call_arg (stmt, 1);
1442 if (!host_integerp (align, 1))
1444 aligni = tree_low_cst (align, 1);
1446 || (aligni & (aligni - 1)) != 0)
1448 if (gimple_call_num_args (stmt) > 2)
1450 misalign = gimple_call_arg (stmt, 2);
1451 if (!host_integerp (misalign, 1))
1453 misaligni = tree_low_cst (misalign, 1);
1454 if (misaligni >= aligni)
1457 align = build_int_cst_type (type, -aligni);
1458 alignval = get_value_for_expr (align, true);
1459 bit_value_binop_1 (BIT_AND_EXPR, type, &value, &mask,
1460 type, value_to_double_int (ptrval), ptrval.mask,
1461 type, value_to_double_int (alignval), alignval.mask);
1462 if (!double_int_minus_one_p (mask))
1464 val.lattice_val = CONSTANT;
1466 gcc_assert ((mask.low & (aligni - 1)) == 0);
1467 gcc_assert ((value.low & (aligni - 1)) == 0);
1468 value.low |= misaligni;
1469 /* ??? Delay building trees here. */
1470 val.value = double_int_to_tree (type, value);
1474 val.lattice_val = VARYING;
1475 val.value = NULL_TREE;
1476 val.mask = double_int_minus_one;
1481 /* Evaluate statement STMT.
1482 Valid only for assignments, calls, conditionals, and switches. */
1485 evaluate_stmt (gimple stmt)
1488 tree simplified = NULL_TREE;
1489 ccp_lattice_t likelyvalue = likely_value (stmt);
1490 bool is_constant = false;
1493 if (dump_file && (dump_flags & TDF_DETAILS))
1495 fprintf (dump_file, "which is likely ");
1496 switch (likelyvalue)
1499 fprintf (dump_file, "CONSTANT");
1502 fprintf (dump_file, "UNDEFINED");
1505 fprintf (dump_file, "VARYING");
1509 fprintf (dump_file, "\n");
1512 /* If the statement is likely to have a CONSTANT result, then try
1513 to fold the statement to determine the constant value. */
1514 /* FIXME. This is the only place that we call ccp_fold.
1515 Since likely_value never returns CONSTANT for calls, we will
1516 not attempt to fold them, including builtins that may profit. */
1517 if (likelyvalue == CONSTANT)
1519 fold_defer_overflow_warnings ();
1520 simplified = ccp_fold (stmt);
1521 is_constant = simplified && is_gimple_min_invariant (simplified);
1522 fold_undefer_overflow_warnings (is_constant, stmt, 0);
1525 /* The statement produced a constant value. */
1526 val.lattice_val = CONSTANT;
1527 val.value = simplified;
1528 val.mask = double_int_zero;
1531 /* If the statement is likely to have a VARYING result, then do not
1532 bother folding the statement. */
1533 else if (likelyvalue == VARYING)
1535 enum gimple_code code = gimple_code (stmt);
1536 if (code == GIMPLE_ASSIGN)
1538 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1540 /* Other cases cannot satisfy is_gimple_min_invariant
1542 if (get_gimple_rhs_class (subcode) == GIMPLE_SINGLE_RHS)
1543 simplified = gimple_assign_rhs1 (stmt);
1545 else if (code == GIMPLE_SWITCH)
1546 simplified = gimple_switch_index (stmt);
1548 /* These cannot satisfy is_gimple_min_invariant without folding. */
1549 gcc_assert (code == GIMPLE_CALL || code == GIMPLE_COND);
1550 is_constant = simplified && is_gimple_min_invariant (simplified);
1553 /* The statement produced a constant value. */
1554 val.lattice_val = CONSTANT;
1555 val.value = simplified;
1556 val.mask = double_int_zero;
1560 /* Resort to simplification for bitwise tracking. */
1561 if (flag_tree_bit_ccp
1562 && (likelyvalue == CONSTANT || is_gimple_call (stmt))
1565 enum gimple_code code = gimple_code (stmt);
1567 val.lattice_val = VARYING;
1568 val.value = NULL_TREE;
1569 val.mask = double_int_minus_one;
1570 if (code == GIMPLE_ASSIGN)
1572 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1573 tree rhs1 = gimple_assign_rhs1 (stmt);
1574 switch (get_gimple_rhs_class (subcode))
1576 case GIMPLE_SINGLE_RHS:
1577 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1578 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1579 val = get_value_for_expr (rhs1, true);
1582 case GIMPLE_UNARY_RHS:
1583 if ((INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1584 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1585 && (INTEGRAL_TYPE_P (gimple_expr_type (stmt))
1586 || POINTER_TYPE_P (gimple_expr_type (stmt))))
1587 val = bit_value_unop (subcode, gimple_expr_type (stmt), rhs1);
1590 case GIMPLE_BINARY_RHS:
1591 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1592 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1594 tree lhs = gimple_assign_lhs (stmt);
1595 tree rhs2 = gimple_assign_rhs2 (stmt);
1596 val = bit_value_binop (subcode,
1597 TREE_TYPE (lhs), rhs1, rhs2);
1604 else if (code == GIMPLE_COND)
1606 enum tree_code code = gimple_cond_code (stmt);
1607 tree rhs1 = gimple_cond_lhs (stmt);
1608 tree rhs2 = gimple_cond_rhs (stmt);
1609 if (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
1610 || POINTER_TYPE_P (TREE_TYPE (rhs1)))
1611 val = bit_value_binop (code, TREE_TYPE (rhs1), rhs1, rhs2);
1613 else if (code == GIMPLE_CALL
1614 && (fndecl = gimple_call_fndecl (stmt))
1615 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
1617 switch (DECL_FUNCTION_CODE (fndecl))
1619 case BUILT_IN_MALLOC:
1620 case BUILT_IN_REALLOC:
1621 case BUILT_IN_CALLOC:
1622 case BUILT_IN_STRDUP:
1623 case BUILT_IN_STRNDUP:
1624 val.lattice_val = CONSTANT;
1625 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1626 val.mask = shwi_to_double_int
1627 (~(((HOST_WIDE_INT) MALLOC_ABI_ALIGNMENT)
1628 / BITS_PER_UNIT - 1));
1631 case BUILT_IN_ALLOCA:
1632 case BUILT_IN_ALLOCA_WITH_ALIGN:
1633 align = (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN
1634 ? TREE_INT_CST_LOW (gimple_call_arg (stmt, 1))
1635 : BIGGEST_ALIGNMENT);
1636 val.lattice_val = CONSTANT;
1637 val.value = build_int_cst (TREE_TYPE (gimple_get_lhs (stmt)), 0);
1638 val.mask = shwi_to_double_int
1639 (~(((HOST_WIDE_INT) align)
1640 / BITS_PER_UNIT - 1));
1643 /* These builtins return their first argument, unmodified. */
1644 case BUILT_IN_MEMCPY:
1645 case BUILT_IN_MEMMOVE:
1646 case BUILT_IN_MEMSET:
1647 case BUILT_IN_STRCPY:
1648 case BUILT_IN_STRNCPY:
1649 case BUILT_IN_MEMCPY_CHK:
1650 case BUILT_IN_MEMMOVE_CHK:
1651 case BUILT_IN_MEMSET_CHK:
1652 case BUILT_IN_STRCPY_CHK:
1653 case BUILT_IN_STRNCPY_CHK:
1654 val = get_value_for_expr (gimple_call_arg (stmt, 0), true);
1657 case BUILT_IN_ASSUME_ALIGNED:
1658 val = bit_value_assume_aligned (stmt);
1664 is_constant = (val.lattice_val == CONSTANT);
1669 /* The statement produced a nonconstant value. If the statement
1670 had UNDEFINED operands, then the result of the statement
1671 should be UNDEFINED. Otherwise, the statement is VARYING. */
1672 if (likelyvalue == UNDEFINED)
1674 val.lattice_val = likelyvalue;
1675 val.mask = double_int_zero;
1679 val.lattice_val = VARYING;
1680 val.mask = double_int_minus_one;
1683 val.value = NULL_TREE;
1689 /* Given a BUILT_IN_STACK_SAVE value SAVED_VAL, insert a clobber of VAR before
1690 each matching BUILT_IN_STACK_RESTORE. Mark visited phis in VISITED. */
1693 insert_clobber_before_stack_restore (tree saved_val, tree var, htab_t *visited)
1695 gimple stmt, clobber_stmt;
1697 imm_use_iterator iter;
1698 gimple_stmt_iterator i;
1701 FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
1702 if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
1704 clobber = build_constructor (TREE_TYPE (var), NULL);
1705 TREE_THIS_VOLATILE (clobber) = 1;
1706 clobber_stmt = gimple_build_assign (var, clobber);
1708 i = gsi_for_stmt (stmt);
1709 gsi_insert_before (&i, clobber_stmt, GSI_SAME_STMT);
1711 else if (gimple_code (stmt) == GIMPLE_PHI)
1713 if (*visited == NULL)
1714 *visited = htab_create (10, htab_hash_pointer, htab_eq_pointer, NULL);
1716 slot = (gimple *)htab_find_slot (*visited, stmt, INSERT);
1721 insert_clobber_before_stack_restore (gimple_phi_result (stmt), var,
1725 gcc_assert (is_gimple_debug (stmt));
1728 /* Advance the iterator to the previous non-debug gimple statement in the same
1729 or dominating basic block. */
1732 gsi_prev_dom_bb_nondebug (gimple_stmt_iterator *i)
1736 gsi_prev_nondebug (i);
1737 while (gsi_end_p (*i))
1739 dom = get_immediate_dominator (CDI_DOMINATORS, i->bb);
1740 if (dom == NULL || dom == ENTRY_BLOCK_PTR)
1743 *i = gsi_last_bb (dom);
1747 /* Find a BUILT_IN_STACK_SAVE dominating gsi_stmt (I), and insert
1748 a clobber of VAR before each matching BUILT_IN_STACK_RESTORE.
1750 It is possible that BUILT_IN_STACK_SAVE cannot be find in a dominator when a
1751 previous pass (such as DOM) duplicated it along multiple paths to a BB. In
1752 that case the function gives up without inserting the clobbers. */
1755 insert_clobbers_for_var (gimple_stmt_iterator i, tree var)
1759 htab_t visited = NULL;
1761 for (; !gsi_end_p (i); gsi_prev_dom_bb_nondebug (&i))
1763 stmt = gsi_stmt (i);
1765 if (!gimple_call_builtin_p (stmt, BUILT_IN_STACK_SAVE))
1768 saved_val = gimple_call_lhs (stmt);
1769 if (saved_val == NULL_TREE)
1772 insert_clobber_before_stack_restore (saved_val, var, &visited);
1776 if (visited != NULL)
1777 htab_delete (visited);
1780 /* Detects a __builtin_alloca_with_align with constant size argument. Declares
1781 fixed-size array and returns the address, if found, otherwise returns
1785 fold_builtin_alloca_with_align (gimple stmt)
1787 unsigned HOST_WIDE_INT size, threshold, n_elem;
1788 tree lhs, arg, block, var, elem_type, array_type;
1791 lhs = gimple_call_lhs (stmt);
1792 if (lhs == NULL_TREE)
1795 /* Detect constant argument. */
1796 arg = get_constant_value (gimple_call_arg (stmt, 0));
1797 if (arg == NULL_TREE
1798 || TREE_CODE (arg) != INTEGER_CST
1799 || !host_integerp (arg, 1))
1802 size = TREE_INT_CST_LOW (arg);
1804 /* Heuristic: don't fold large allocas. */
1805 threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME);
1806 /* In case the alloca is located at function entry, it has the same lifetime
1807 as a declared array, so we allow a larger size. */
1808 block = gimple_block (stmt);
1809 if (!(cfun->after_inlining
1810 && TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL))
1812 if (size > threshold)
1815 /* Declare array. */
1816 elem_type = build_nonstandard_integer_type (BITS_PER_UNIT, 1);
1817 n_elem = size * 8 / BITS_PER_UNIT;
1818 array_type = build_array_type_nelts (elem_type, n_elem);
1819 var = create_tmp_var (array_type, NULL);
1820 DECL_ALIGN (var) = TREE_INT_CST_LOW (gimple_call_arg (stmt, 1));
1822 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (lhs);
1823 if (pi != NULL && !pi->pt.anything)
1827 singleton_p = pt_solution_singleton_p (&pi->pt, &uid);
1828 gcc_assert (singleton_p);
1829 SET_DECL_PT_UID (var, uid);
1833 /* Fold alloca to the address of the array. */
1834 return fold_convert (TREE_TYPE (lhs), build_fold_addr_expr (var));
1837 /* Fold the stmt at *GSI with CCP specific information that propagating
1838 and regular folding does not catch. */
1841 ccp_fold_stmt (gimple_stmt_iterator *gsi)
1843 gimple stmt = gsi_stmt (*gsi);
1845 switch (gimple_code (stmt))
1850 /* Statement evaluation will handle type mismatches in constants
1851 more gracefully than the final propagation. This allows us to
1852 fold more conditionals here. */
1853 val = evaluate_stmt (stmt);
1854 if (val.lattice_val != CONSTANT
1855 || !double_int_zero_p (val.mask))
1860 fprintf (dump_file, "Folding predicate ");
1861 print_gimple_expr (dump_file, stmt, 0, 0);
1862 fprintf (dump_file, " to ");
1863 print_generic_expr (dump_file, val.value, 0);
1864 fprintf (dump_file, "\n");
1867 if (integer_zerop (val.value))
1868 gimple_cond_make_false (stmt);
1870 gimple_cond_make_true (stmt);
1877 tree lhs = gimple_call_lhs (stmt);
1878 int flags = gimple_call_flags (stmt);
1881 bool changed = false;
1884 /* If the call was folded into a constant make sure it goes
1885 away even if we cannot propagate into all uses because of
1888 && TREE_CODE (lhs) == SSA_NAME
1889 && (val = get_constant_value (lhs))
1890 /* Don't optimize away calls that have side-effects. */
1891 && (flags & (ECF_CONST|ECF_PURE)) != 0
1892 && (flags & ECF_LOOPING_CONST_OR_PURE) == 0)
1894 tree new_rhs = unshare_expr (val);
1896 if (!useless_type_conversion_p (TREE_TYPE (lhs),
1897 TREE_TYPE (new_rhs)))
1898 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1899 res = update_call_from_tree (gsi, new_rhs);
1904 /* Internal calls provide no argument types, so the extra laxity
1905 for normal calls does not apply. */
1906 if (gimple_call_internal_p (stmt))
1909 /* The heuristic of fold_builtin_alloca_with_align differs before and
1910 after inlining, so we don't require the arg to be changed into a
1911 constant for folding, but just to be constant. */
1912 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1914 tree new_rhs = fold_builtin_alloca_with_align (stmt);
1917 bool res = update_call_from_tree (gsi, new_rhs);
1918 tree var = TREE_OPERAND (TREE_OPERAND (new_rhs, 0),0);
1920 insert_clobbers_for_var (*gsi, var);
1925 /* Propagate into the call arguments. Compared to replace_uses_in
1926 this can use the argument slot types for type verification
1927 instead of the current argument type. We also can safely
1928 drop qualifiers here as we are dealing with constants anyway. */
1929 argt = TYPE_ARG_TYPES (gimple_call_fntype (stmt));
1930 for (i = 0; i < gimple_call_num_args (stmt) && argt;
1931 ++i, argt = TREE_CHAIN (argt))
1933 tree arg = gimple_call_arg (stmt, i);
1934 if (TREE_CODE (arg) == SSA_NAME
1935 && (val = get_constant_value (arg))
1936 && useless_type_conversion_p
1937 (TYPE_MAIN_VARIANT (TREE_VALUE (argt)),
1938 TYPE_MAIN_VARIANT (TREE_TYPE (val))))
1940 gimple_call_set_arg (stmt, i, unshare_expr (val));
1950 tree lhs = gimple_assign_lhs (stmt);
1953 /* If we have a load that turned out to be constant replace it
1954 as we cannot propagate into all uses in all cases. */
1955 if (gimple_assign_single_p (stmt)
1956 && TREE_CODE (lhs) == SSA_NAME
1957 && (val = get_constant_value (lhs)))
1959 tree rhs = unshare_expr (val);
1960 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
1961 rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
1962 gimple_assign_set_rhs_from_tree (gsi, rhs);
1974 /* Visit the assignment statement STMT. Set the value of its LHS to the
1975 value computed by the RHS and store LHS in *OUTPUT_P. If STMT
1976 creates virtual definitions, set the value of each new name to that
1977 of the RHS (if we can derive a constant out of the RHS).
1978 Value-returning call statements also perform an assignment, and
1979 are handled here. */
1981 static enum ssa_prop_result
1982 visit_assignment (gimple stmt, tree *output_p)
1985 enum ssa_prop_result retval;
1987 tree lhs = gimple_get_lhs (stmt);
1989 gcc_assert (gimple_code (stmt) != GIMPLE_CALL
1990 || gimple_call_lhs (stmt) != NULL_TREE);
1992 if (gimple_assign_single_p (stmt)
1993 && gimple_assign_rhs_code (stmt) == SSA_NAME)
1994 /* For a simple copy operation, we copy the lattice values. */
1995 val = *get_value (gimple_assign_rhs1 (stmt));
1997 /* Evaluate the statement, which could be
1998 either a GIMPLE_ASSIGN or a GIMPLE_CALL. */
1999 val = evaluate_stmt (stmt);
2001 retval = SSA_PROP_NOT_INTERESTING;
2003 /* Set the lattice value of the statement's output. */
2004 if (TREE_CODE (lhs) == SSA_NAME)
2006 /* If STMT is an assignment to an SSA_NAME, we only have one
2008 if (set_lattice_value (lhs, val))
2011 if (val.lattice_val == VARYING)
2012 retval = SSA_PROP_VARYING;
2014 retval = SSA_PROP_INTERESTING;
2022 /* Visit the conditional statement STMT. Return SSA_PROP_INTERESTING
2023 if it can determine which edge will be taken. Otherwise, return
2024 SSA_PROP_VARYING. */
2026 static enum ssa_prop_result
2027 visit_cond_stmt (gimple stmt, edge *taken_edge_p)
2032 block = gimple_bb (stmt);
2033 val = evaluate_stmt (stmt);
2034 if (val.lattice_val != CONSTANT
2035 || !double_int_zero_p (val.mask))
2036 return SSA_PROP_VARYING;
2038 /* Find which edge out of the conditional block will be taken and add it
2039 to the worklist. If no single edge can be determined statically,
2040 return SSA_PROP_VARYING to feed all the outgoing edges to the
2041 propagation engine. */
2042 *taken_edge_p = find_taken_edge (block, val.value);
2044 return SSA_PROP_INTERESTING;
2046 return SSA_PROP_VARYING;
2050 /* Evaluate statement STMT. If the statement produces an output value and
2051 its evaluation changes the lattice value of its output, return
2052 SSA_PROP_INTERESTING and set *OUTPUT_P to the SSA_NAME holding the
2055 If STMT is a conditional branch and we can determine its truth
2056 value, set *TAKEN_EDGE_P accordingly. If STMT produces a varying
2057 value, return SSA_PROP_VARYING. */
2059 static enum ssa_prop_result
2060 ccp_visit_stmt (gimple stmt, edge *taken_edge_p, tree *output_p)
2065 if (dump_file && (dump_flags & TDF_DETAILS))
2067 fprintf (dump_file, "\nVisiting statement:\n");
2068 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2071 switch (gimple_code (stmt))
2074 /* If the statement is an assignment that produces a single
2075 output value, evaluate its RHS to see if the lattice value of
2076 its output has changed. */
2077 return visit_assignment (stmt, output_p);
2080 /* A value-returning call also performs an assignment. */
2081 if (gimple_call_lhs (stmt) != NULL_TREE)
2082 return visit_assignment (stmt, output_p);
2087 /* If STMT is a conditional branch, see if we can determine
2088 which branch will be taken. */
2089 /* FIXME. It appears that we should be able to optimize
2090 computed GOTOs here as well. */
2091 return visit_cond_stmt (stmt, taken_edge_p);
2097 /* Any other kind of statement is not interesting for constant
2098 propagation and, therefore, not worth simulating. */
2099 if (dump_file && (dump_flags & TDF_DETAILS))
2100 fprintf (dump_file, "No interesting values produced. Marked VARYING.\n");
2102 /* Definitions made by statements other than assignments to
2103 SSA_NAMEs represent unknown modifications to their outputs.
2104 Mark them VARYING. */
2105 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_ALL_DEFS)
2107 prop_value_t v = { VARYING, NULL_TREE, { -1, (HOST_WIDE_INT) -1 } };
2108 set_lattice_value (def, v);
2111 return SSA_PROP_VARYING;
2115 /* Main entry point for SSA Conditional Constant Propagation. */
2120 unsigned int todo = 0;
2121 calculate_dominance_info (CDI_DOMINATORS);
2123 ssa_propagate (ccp_visit_stmt, ccp_visit_phi_node);
2124 if (ccp_finalize ())
2125 todo = (TODO_cleanup_cfg | TODO_update_ssa | TODO_remove_unused_locals);
2126 free_dominance_info (CDI_DOMINATORS);
2134 return flag_tree_ccp != 0;
2138 struct gimple_opt_pass pass_ccp =
2143 gate_ccp, /* gate */
2144 do_ssa_ccp, /* execute */
2147 0, /* static_pass_number */
2148 TV_TREE_CCP, /* tv_id */
2149 PROP_cfg | PROP_ssa, /* properties_required */
2150 0, /* properties_provided */
2151 0, /* properties_destroyed */
2152 0, /* todo_flags_start */
2154 | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
2160 /* Try to optimize out __builtin_stack_restore. Optimize it out
2161 if there is another __builtin_stack_restore in the same basic
2162 block and no calls or ASM_EXPRs are in between, or if this block's
2163 only outgoing edge is to EXIT_BLOCK and there are no calls or
2164 ASM_EXPRs after this __builtin_stack_restore. */
2167 optimize_stack_restore (gimple_stmt_iterator i)
2172 basic_block bb = gsi_bb (i);
2173 gimple call = gsi_stmt (i);
2175 if (gimple_code (call) != GIMPLE_CALL
2176 || gimple_call_num_args (call) != 1
2177 || TREE_CODE (gimple_call_arg (call, 0)) != SSA_NAME
2178 || !POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, 0))))
2181 for (gsi_next (&i); !gsi_end_p (i); gsi_next (&i))
2183 stmt = gsi_stmt (i);
2184 if (gimple_code (stmt) == GIMPLE_ASM)
2186 if (gimple_code (stmt) != GIMPLE_CALL)
2189 callee = gimple_call_fndecl (stmt);
2191 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2192 /* All regular builtins are ok, just obviously not alloca. */
2193 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
2194 || DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA_WITH_ALIGN)
2197 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_RESTORE)
2198 goto second_stack_restore;
2204 /* Allow one successor of the exit block, or zero successors. */
2205 switch (EDGE_COUNT (bb->succs))
2210 if (single_succ_edge (bb)->dest != EXIT_BLOCK_PTR)
2216 second_stack_restore:
2218 /* If there's exactly one use, then zap the call to __builtin_stack_save.
2219 If there are multiple uses, then the last one should remove the call.
2220 In any case, whether the call to __builtin_stack_save can be removed
2221 or not is irrelevant to removing the call to __builtin_stack_restore. */
2222 if (has_single_use (gimple_call_arg (call, 0)))
2224 gimple stack_save = SSA_NAME_DEF_STMT (gimple_call_arg (call, 0));
2225 if (is_gimple_call (stack_save))
2227 callee = gimple_call_fndecl (stack_save);
2229 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
2230 && DECL_FUNCTION_CODE (callee) == BUILT_IN_STACK_SAVE)
2232 gimple_stmt_iterator stack_save_gsi;
2235 stack_save_gsi = gsi_for_stmt (stack_save);
2236 rhs = build_int_cst (TREE_TYPE (gimple_call_arg (call, 0)), 0);
2237 update_call_from_tree (&stack_save_gsi, rhs);
2242 /* No effect, so the statement will be deleted. */
2243 return integer_zero_node;
2246 /* If va_list type is a simple pointer and nothing special is needed,
2247 optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
2248 __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
2249 pointer assignment. */
2252 optimize_stdarg_builtin (gimple call)
2254 tree callee, lhs, rhs, cfun_va_list;
2255 bool va_list_simple_ptr;
2256 location_t loc = gimple_location (call);
2258 if (gimple_code (call) != GIMPLE_CALL)
2261 callee = gimple_call_fndecl (call);
2263 cfun_va_list = targetm.fn_abi_va_list (callee);
2264 va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
2265 && (TREE_TYPE (cfun_va_list) == void_type_node
2266 || TREE_TYPE (cfun_va_list) == char_type_node);
2268 switch (DECL_FUNCTION_CODE (callee))
2270 case BUILT_IN_VA_START:
2271 if (!va_list_simple_ptr
2272 || targetm.expand_builtin_va_start != NULL
2273 || !builtin_decl_explicit_p (BUILT_IN_NEXT_ARG))
2276 if (gimple_call_num_args (call) != 2)
2279 lhs = gimple_call_arg (call, 0);
2280 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2281 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2282 != TYPE_MAIN_VARIANT (cfun_va_list))
2285 lhs = build_fold_indirect_ref_loc (loc, lhs);
2286 rhs = build_call_expr_loc (loc, builtin_decl_explicit (BUILT_IN_NEXT_ARG),
2287 1, integer_zero_node);
2288 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2289 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2291 case BUILT_IN_VA_COPY:
2292 if (!va_list_simple_ptr)
2295 if (gimple_call_num_args (call) != 2)
2298 lhs = gimple_call_arg (call, 0);
2299 if (!POINTER_TYPE_P (TREE_TYPE (lhs))
2300 || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
2301 != TYPE_MAIN_VARIANT (cfun_va_list))
2304 lhs = build_fold_indirect_ref_loc (loc, lhs);
2305 rhs = gimple_call_arg (call, 1);
2306 if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
2307 != TYPE_MAIN_VARIANT (cfun_va_list))
2310 rhs = fold_convert_loc (loc, TREE_TYPE (lhs), rhs);
2311 return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
2313 case BUILT_IN_VA_END:
2314 /* No effect, so the statement will be deleted. */
2315 return integer_zero_node;
2322 /* A simple pass that attempts to fold all builtin functions. This pass
2323 is run after we've propagated as many constants as we can. */
2326 execute_fold_all_builtins (void)
2328 bool cfg_changed = false;
2330 unsigned int todoflags = 0;
2334 gimple_stmt_iterator i;
2335 for (i = gsi_start_bb (bb); !gsi_end_p (i); )
2337 gimple stmt, old_stmt;
2338 tree callee, result;
2339 enum built_in_function fcode;
2341 stmt = gsi_stmt (i);
2343 if (gimple_code (stmt) != GIMPLE_CALL)
2348 callee = gimple_call_fndecl (stmt);
2349 if (!callee || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
2354 fcode = DECL_FUNCTION_CODE (callee);
2356 result = gimple_fold_builtin (stmt);
2359 gimple_remove_stmt_histograms (cfun, stmt);
2362 switch (DECL_FUNCTION_CODE (callee))
2364 case BUILT_IN_CONSTANT_P:
2365 /* Resolve __builtin_constant_p. If it hasn't been
2366 folded to integer_one_node by now, it's fairly
2367 certain that the value simply isn't constant. */
2368 result = integer_zero_node;
2371 case BUILT_IN_ASSUME_ALIGNED:
2372 /* Remove __builtin_assume_aligned. */
2373 result = gimple_call_arg (stmt, 0);
2376 case BUILT_IN_STACK_RESTORE:
2377 result = optimize_stack_restore (i);
2383 case BUILT_IN_VA_START:
2384 case BUILT_IN_VA_END:
2385 case BUILT_IN_VA_COPY:
2386 /* These shouldn't be folded before pass_stdarg. */
2387 result = optimize_stdarg_builtin (stmt);
2397 if (dump_file && (dump_flags & TDF_DETAILS))
2399 fprintf (dump_file, "Simplified\n ");
2400 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2404 if (!update_call_from_tree (&i, result))
2406 gimplify_and_update_call_from_tree (&i, result);
2407 todoflags |= TODO_update_address_taken;
2410 stmt = gsi_stmt (i);
2413 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)
2414 && gimple_purge_dead_eh_edges (bb))
2417 if (dump_file && (dump_flags & TDF_DETAILS))
2419 fprintf (dump_file, "to\n ");
2420 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
2421 fprintf (dump_file, "\n");
2424 /* Retry the same statement if it changed into another
2425 builtin, there might be new opportunities now. */
2426 if (gimple_code (stmt) != GIMPLE_CALL)
2431 callee = gimple_call_fndecl (stmt);
2433 || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
2434 || DECL_FUNCTION_CODE (callee) == fcode)
2439 /* Delete unreachable blocks. */
2441 todoflags |= TODO_cleanup_cfg;
2447 struct gimple_opt_pass pass_fold_builtins =
2453 execute_fold_all_builtins, /* execute */
2456 0, /* static_pass_number */
2457 TV_NONE, /* tv_id */
2458 PROP_cfg | PROP_ssa, /* properties_required */
2459 0, /* properties_provided */
2460 0, /* properties_destroyed */
2461 0, /* todo_flags_start */
2463 | TODO_update_ssa /* todo_flags_finish */