1 /* Predicate aware uninitialized variable warning.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2010 Free Software
4 Contributed by Xinliang David Li <davidxl@google.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
29 #include "langhooks.h"
30 #include "basic-block.h"
33 #include "gimple-pretty-print.h"
35 #include "pointer-set.h"
36 #include "tree-flow.h"
38 #include "tree-inline.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "diagnostic-core.h"
47 /* This implements the pass that does predicate aware warning on uses of
48 possibly uninitialized variables. The pass first collects the set of
49 possibly uninitialized SSA names. For each such name, it walks through
50 all its immediate uses. For each immediate use, it rebuilds the condition
51 expression (the predicate) that guards the use. The predicate is then
52 examined to see if the variable is always defined under that same condition.
53 This is done either by pruning the unrealizable paths that lead to the
54 default definitions or by checking if the predicate set that guards the
55 defining paths is a superset of the use predicate. */
58 /* Pointer set of potentially undefined ssa names, i.e.,
59 ssa names that are defined by phi with operands that
60 are not defined or potentially undefined. */
61 static struct pointer_set_t *possibly_undefined_names = 0;
63 /* Bit mask handling macros. */
64 #define MASK_SET_BIT(mask, pos) mask |= (1 << pos)
65 #define MASK_TEST_BIT(mask, pos) (mask & (1 << pos))
66 #define MASK_EMPTY(mask) (mask == 0)
68 /* Returns the first bit position (starting from LSB)
69 in mask that is non zero. Returns -1 if the mask is empty. */
71 get_mask_first_set_bit (unsigned mask)
77 while ((mask & (1 << pos)) == 0)
82 #define MASK_FIRST_SET_BIT(mask) get_mask_first_set_bit (mask)
85 /* Return true if T, an SSA_NAME, has an undefined value. */
88 ssa_undefined_value_p (tree t)
90 tree var = SSA_NAME_VAR (t);
92 /* Parameters get their initial value from the function entry. */
93 if (TREE_CODE (var) == PARM_DECL)
96 /* When returning by reference the return address is actually a hidden
98 if (TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL
99 && DECL_BY_REFERENCE (SSA_NAME_VAR (t)))
102 /* Hard register variables get their initial value from the ether. */
103 if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
106 /* The value is undefined iff its definition statement is empty. */
107 return (gimple_nop_p (SSA_NAME_DEF_STMT (t))
108 || (possibly_undefined_names
109 && pointer_set_contains (possibly_undefined_names, t)));
112 /* Checks if the operand OPND of PHI is defined by
113 another phi with one operand defined by this PHI,
114 but the rest operands are all defined. If yes,
115 returns true to skip this this operand as being
116 redundant. Can be enhanced to be more general. */
119 can_skip_redundant_opnd (tree opnd, gimple phi)
125 phi_def = gimple_phi_result (phi);
126 op_def = SSA_NAME_DEF_STMT (opnd);
127 if (gimple_code (op_def) != GIMPLE_PHI)
129 n = gimple_phi_num_args (op_def);
130 for (i = 0; i < n; ++i)
132 tree op = gimple_phi_arg_def (op_def, i);
133 if (TREE_CODE (op) != SSA_NAME)
135 if (op != phi_def && ssa_undefined_value_p (op))
142 /* Returns a bit mask holding the positions of arguments in PHI
143 that have empty (or possibly empty) definitions. */
146 compute_uninit_opnds_pos (gimple phi)
149 unsigned uninit_opnds = 0;
151 n = gimple_phi_num_args (phi);
153 for (i = 0; i < n; ++i)
155 tree op = gimple_phi_arg_def (phi, i);
156 if (TREE_CODE (op) == SSA_NAME
157 && ssa_undefined_value_p (op)
158 && !can_skip_redundant_opnd (op, phi))
159 MASK_SET_BIT (uninit_opnds, i);
164 /* Find the immediate postdominator PDOM of the specified
165 basic block BLOCK. */
167 static inline basic_block
168 find_pdom (basic_block block)
170 if (block == EXIT_BLOCK_PTR)
171 return EXIT_BLOCK_PTR;
175 = get_immediate_dominator (CDI_POST_DOMINATORS, block);
177 return EXIT_BLOCK_PTR;
182 /* Find the immediate DOM of the specified
183 basic block BLOCK. */
185 static inline basic_block
186 find_dom (basic_block block)
188 if (block == ENTRY_BLOCK_PTR)
189 return ENTRY_BLOCK_PTR;
192 basic_block bb = get_immediate_dominator (CDI_DOMINATORS, block);
194 return ENTRY_BLOCK_PTR;
199 /* Returns true if BB1 is postdominating BB2 and BB1 is
200 not a loop exit bb. The loop exit bb check is simple and does
201 not cover all cases. */
204 is_non_loop_exit_postdominating (basic_block bb1, basic_block bb2)
206 if (!dominated_by_p (CDI_POST_DOMINATORS, bb2, bb1))
209 if (single_pred_p (bb1) && !single_succ_p (bb2))
215 /* Find the closest postdominator of a specified BB, which is control
218 static inline basic_block
219 find_control_equiv_block (basic_block bb)
223 pdom = find_pdom (bb);
225 /* Skip the postdominating bb that is also loop exit. */
226 if (!is_non_loop_exit_postdominating (pdom, bb))
229 if (dominated_by_p (CDI_DOMINATORS, pdom, bb))
235 #define MAX_NUM_CHAINS 8
236 #define MAX_CHAIN_LEN 5
238 /* Computes the control dependence chains (paths of edges)
239 for DEP_BB up to the dominating basic block BB (the head node of a
240 chain should be dominated by it). CD_CHAINS is pointer to a
241 dynamic array holding the result chains. CUR_CD_CHAIN is the current
242 chain being computed. *NUM_CHAINS is total number of chains. The
243 function returns true if the information is successfully computed,
244 return false if there is no control dependence or not computed. */
247 compute_control_dep_chain (basic_block bb, basic_block dep_bb,
248 VEC(edge, heap) **cd_chains,
250 VEC(edge, heap) **cur_cd_chain)
255 bool found_cd_chain = false;
256 size_t cur_chain_len = 0;
258 if (EDGE_COUNT (bb->succs) < 2)
261 /* Could use a set instead. */
262 cur_chain_len = VEC_length (edge, *cur_cd_chain);
263 if (cur_chain_len > MAX_CHAIN_LEN)
266 for (i = 0; i < cur_chain_len; i++)
268 edge e = VEC_index (edge, *cur_cd_chain, i);
269 /* cycle detected. */
274 FOR_EACH_EDGE (e, ei, bb->succs)
277 if (e->flags & (EDGE_FAKE | EDGE_ABNORMAL))
281 VEC_safe_push (edge, heap, *cur_cd_chain, e);
282 while (!is_non_loop_exit_postdominating (cd_bb, bb))
286 /* Found a direct control dependence. */
287 if (*num_chains < MAX_NUM_CHAINS)
289 cd_chains[*num_chains]
290 = VEC_copy (edge, heap, *cur_cd_chain);
293 found_cd_chain = true;
294 /* check path from next edge. */
298 /* Now check if DEP_BB is indirectly control dependent on BB. */
299 if (compute_control_dep_chain (cd_bb, dep_bb, cd_chains,
300 num_chains, cur_cd_chain))
302 found_cd_chain = true;
306 cd_bb = find_pdom (cd_bb);
307 if (cd_bb == EXIT_BLOCK_PTR)
310 VEC_pop (edge, *cur_cd_chain);
311 gcc_assert (VEC_length (edge, *cur_cd_chain) == cur_chain_len);
313 gcc_assert (VEC_length (edge, *cur_cd_chain) == cur_chain_len);
315 return found_cd_chain;
318 typedef struct use_pred_info
324 DEF_VEC_P(use_pred_info_t);
325 DEF_VEC_ALLOC_P(use_pred_info_t, heap);
328 /* Converts the chains of control dependence edges into a set of
329 predicates. A control dependence chain is represented by a vector
330 edges. DEP_CHAINS points to an array of dependence chains.
331 NUM_CHAINS is the size of the chain array. One edge in a dependence
332 chain is mapped to predicate expression represented by use_pred_info_t
333 type. One dependence chain is converted to a composite predicate that
334 is the result of AND operation of use_pred_info_t mapped to each edge.
335 A composite predicate is presented by a vector of use_pred_info_t. On
336 return, *PREDS points to the resulting array of composite predicates.
337 *NUM_PREDS is the number of composite predictes. */
340 convert_control_dep_chain_into_preds (VEC(edge, heap) **dep_chains,
342 VEC(use_pred_info_t, heap) ***preds,
345 bool has_valid_pred = false;
347 if (num_chains == 0 || num_chains >= MAX_NUM_CHAINS)
350 /* Now convert CD chains into predicates */
351 has_valid_pred = true;
353 /* Now convert the control dep chain into a set
355 *preds = XCNEWVEC (VEC(use_pred_info_t, heap) *,
357 *num_preds = num_chains;
359 for (i = 0; i < num_chains; i++)
361 VEC(edge, heap) *one_cd_chain = dep_chains[i];
362 for (j = 0; j < VEC_length (edge, one_cd_chain); j++)
365 gimple_stmt_iterator gsi;
366 basic_block guard_bb;
367 use_pred_info_t one_pred;
370 e = VEC_index (edge, one_cd_chain, j);
372 gsi = gsi_last_bb (guard_bb);
375 has_valid_pred = false;
378 cond_stmt = gsi_stmt (gsi);
379 if (gimple_code (cond_stmt) == GIMPLE_CALL
380 && EDGE_COUNT (e->src->succs) >= 2)
382 /* Ignore EH edge. Can add assertion
383 on the other edge's flag. */
386 /* Skip if there is essentially one succesor. */
387 if (EDGE_COUNT (e->src->succs) == 2)
393 FOR_EACH_EDGE (e1, ei1, e->src->succs)
395 if (EDGE_COUNT (e1->dest->succs) == 0)
404 if (gimple_code (cond_stmt) != GIMPLE_COND)
406 has_valid_pred = false;
409 one_pred = XNEW (struct use_pred_info);
410 one_pred->cond = cond_stmt;
411 one_pred->invert = !!(e->flags & EDGE_FALSE_VALUE);
412 VEC_safe_push (use_pred_info_t, heap, (*preds)[i], one_pred);
418 return has_valid_pred;
421 /* Computes all control dependence chains for USE_BB. The control
422 dependence chains are then converted to an array of composite
423 predicates pointed to by PREDS. PHI_BB is the basic block of
424 the phi whose result is used in USE_BB. */
427 find_predicates (VEC(use_pred_info_t, heap) ***preds,
432 size_t num_chains = 0, i;
433 VEC(edge, heap) **dep_chains = 0;
434 VEC(edge, heap) *cur_chain = 0;
435 bool has_valid_pred = false;
436 basic_block cd_root = 0;
438 dep_chains = XCNEWVEC (VEC(edge, heap) *, MAX_NUM_CHAINS);
440 /* First find the closest bb that is control equivalent to PHI_BB
441 that also dominates USE_BB. */
443 while (dominated_by_p (CDI_DOMINATORS, use_bb, cd_root))
445 basic_block ctrl_eq_bb = find_control_equiv_block (cd_root);
446 if (ctrl_eq_bb && dominated_by_p (CDI_DOMINATORS, use_bb, ctrl_eq_bb))
447 cd_root = ctrl_eq_bb;
452 compute_control_dep_chain (cd_root, use_bb,
453 dep_chains, &num_chains,
457 = convert_control_dep_chain_into_preds (dep_chains,
461 /* Free individual chain */
462 VEC_free (edge, heap, cur_chain);
463 for (i = 0; i < num_chains; i++)
464 VEC_free (edge, heap, dep_chains[i]);
466 return has_valid_pred;
469 /* Computes the set of incoming edges of PHI that have non empty
470 definitions of a phi chain. The collection will be done
471 recursively on operands that are defined by phis. CD_ROOT
472 is the control dependence root. *EDGES holds the result, and
473 VISITED_PHIS is a pointer set for detecting cycles. */
476 collect_phi_def_edges (gimple phi, basic_block cd_root,
477 VEC(edge, heap) **edges,
478 struct pointer_set_t *visited_phis)
484 if (pointer_set_insert (visited_phis, phi))
487 n = gimple_phi_num_args (phi);
488 for (i = 0; i < n; i++)
490 opnd_edge = gimple_phi_arg_edge (phi, i);
491 opnd = gimple_phi_arg_def (phi, i);
493 if (TREE_CODE (opnd) != SSA_NAME
494 || !ssa_undefined_value_p (opnd))
495 VEC_safe_push (edge, heap, *edges, opnd_edge);
498 gimple def = SSA_NAME_DEF_STMT (opnd);
499 if (gimple_code (def) == GIMPLE_PHI
500 && dominated_by_p (CDI_DOMINATORS,
501 gimple_bb (def), cd_root))
502 collect_phi_def_edges (def, cd_root, edges,
508 /* For each use edge of PHI, computes all control dependence chains.
509 The control dependence chains are then converted to an array of
510 composite predicates pointed to by PREDS. */
513 find_def_preds (VEC(use_pred_info_t, heap) ***preds,
514 size_t *num_preds, gimple phi)
516 size_t num_chains = 0, i, n;
517 VEC(edge, heap) **dep_chains = 0;
518 VEC(edge, heap) *cur_chain = 0;
519 VEC(edge, heap) *def_edges = 0;
520 bool has_valid_pred = false;
521 basic_block phi_bb, cd_root = 0;
522 struct pointer_set_t *visited_phis;
524 dep_chains = XCNEWVEC (VEC(edge, heap) *, MAX_NUM_CHAINS);
526 phi_bb = gimple_bb (phi);
527 /* First find the closest dominating bb to be
528 the control dependence root */
529 cd_root = find_dom (phi_bb);
533 visited_phis = pointer_set_create ();
534 collect_phi_def_edges (phi, cd_root, &def_edges, visited_phis);
535 pointer_set_destroy (visited_phis);
537 n = VEC_length (edge, def_edges);
541 for (i = 0; i < n; i++)
546 opnd_edge = VEC_index (edge, def_edges, i);
547 prev_nc = num_chains;
548 compute_control_dep_chain (cd_root, opnd_edge->src,
549 dep_chains, &num_chains,
551 /* Free individual chain */
552 VEC_free (edge, heap, cur_chain);
555 /* Now update the newly added chains with
556 the phi operand edge: */
557 if (EDGE_COUNT (opnd_edge->src->succs) > 1)
559 if (prev_nc == num_chains
560 && num_chains < MAX_NUM_CHAINS)
562 for (j = prev_nc; j < num_chains; j++)
564 VEC_safe_push (edge, heap, dep_chains[j], opnd_edge);
570 = convert_control_dep_chain_into_preds (dep_chains,
574 for (i = 0; i < num_chains; i++)
575 VEC_free (edge, heap, dep_chains[i]);
577 return has_valid_pred;
580 /* Dumps the predicates (PREDS) for USESTMT. */
583 dump_predicates (gimple usestmt, size_t num_preds,
584 VEC(use_pred_info_t, heap) **preds,
588 VEC(use_pred_info_t, heap) *one_pred_chain;
589 fprintf (dump_file, msg);
590 print_gimple_stmt (dump_file, usestmt, 0, 0);
591 fprintf (dump_file, "is guarded by :\n");
592 /* do some dumping here: */
593 for (i = 0; i < num_preds; i++)
597 one_pred_chain = preds[i];
598 np = VEC_length (use_pred_info_t, one_pred_chain);
600 for (j = 0; j < np; j++)
602 use_pred_info_t one_pred
603 = VEC_index (use_pred_info_t, one_pred_chain, j);
604 if (one_pred->invert)
605 fprintf (dump_file, " (.NOT.) ");
606 print_gimple_stmt (dump_file, one_pred->cond, 0, 0);
608 fprintf (dump_file, "(.AND.)\n");
610 if (i < num_preds - 1)
611 fprintf (dump_file, "(.OR.)\n");
615 /* Destroys the predicate set *PREDS. */
618 destroy_predicate_vecs (size_t n,
619 VEC(use_pred_info_t, heap) ** preds)
622 for (i = 0; i < n; i++)
624 for (j = 0; j < VEC_length (use_pred_info_t, preds[i]); j++)
625 free (VEC_index (use_pred_info_t, preds[i], j));
626 VEC_free (use_pred_info_t, heap, preds[i]);
632 /* Computes the 'normalized' conditional code with operand
633 swapping and condition inversion. */
635 static enum tree_code
636 get_cmp_code (enum tree_code orig_cmp_code,
637 bool swap_cond, bool invert)
639 enum tree_code tc = orig_cmp_code;
642 tc = swap_tree_comparison (orig_cmp_code);
644 tc = invert_tree_comparison (tc, false);
661 /* Returns true if VAL falls in the range defined by BOUNDARY and CMPC, i.e.
662 all values in the range satisfies (x CMPC BOUNDARY) == true. */
665 is_value_included_in (tree val, tree boundary, enum tree_code cmpc)
667 bool inverted = false;
671 /* Only handle integer constant here. */
672 if (TREE_CODE (val) != INTEGER_CST
673 || TREE_CODE (boundary) != INTEGER_CST)
676 is_unsigned = TYPE_UNSIGNED (TREE_TYPE (val));
678 if (cmpc == GE_EXPR || cmpc == GT_EXPR
681 cmpc = invert_tree_comparison (cmpc, false);
688 result = tree_int_cst_equal (val, boundary);
689 else if (cmpc == LT_EXPR)
690 result = INT_CST_LT_UNSIGNED (val, boundary);
693 gcc_assert (cmpc == LE_EXPR);
694 result = (tree_int_cst_equal (val, boundary)
695 || INT_CST_LT_UNSIGNED (val, boundary));
701 result = tree_int_cst_equal (val, boundary);
702 else if (cmpc == LT_EXPR)
703 result = INT_CST_LT (val, boundary);
706 gcc_assert (cmpc == LE_EXPR);
707 result = (tree_int_cst_equal (val, boundary)
708 || INT_CST_LT (val, boundary));
718 /* Returns true if PRED is common among all the predicate
719 chains (PREDS) (and therefore can be factored out).
720 NUM_PRED_CHAIN is the size of array PREDS. */
723 find_matching_predicate_in_rest_chains (use_pred_info_t pred,
724 VEC(use_pred_info_t, heap) **preds,
725 size_t num_pred_chains)
730 if (num_pred_chains == 1)
733 for (i = 1; i < num_pred_chains; i++)
736 VEC(use_pred_info_t, heap) *one_chain = preds[i];
737 n = VEC_length (use_pred_info_t, one_chain);
738 for (j = 0; j < n; j++)
740 use_pred_info_t pred2
741 = VEC_index (use_pred_info_t, one_chain, j);
742 /* can relax the condition comparison to not
743 use address comparison. However, the most common
744 case is that multiple control dependent paths share
745 a common path prefix, so address comparison should
748 if (pred2->cond == pred->cond
749 && pred2->invert == pred->invert)
761 /* Forward declaration. */
763 is_use_properly_guarded (gimple use_stmt,
766 unsigned uninit_opnds,
767 struct pointer_set_t *visited_phis);
769 /* A helper function that determines if the predicate set
770 of the use is not overlapping with that of the uninit paths.
771 The most common senario of guarded use is in Example 1:
784 The real world examples are usually more complicated, but similar
785 and usually result from inlining:
787 bool init_func (int * x)
806 Another possible use scenario is in the following trivial example:
818 Predicate analysis needs to compute the composite predicate:
820 1) 'x' use predicate: (n > 0) .AND. (m < 2)
821 2) 'x' default value (non-def) predicate: .NOT. (n > 0)
822 (the predicate chain for phi operand defs can be computed
823 starting from a bb that is control equivalent to the phi's
824 bb and is dominating the operand def.)
826 and check overlapping:
827 (n > 0) .AND. (m < 2) .AND. (.NOT. (n > 0))
830 This implementation provides framework that can handle
831 scenarios. (Note that many simple cases are handled properly
832 without the predicate analysis -- this is due to jump threading
833 transformation which eliminates the merge point thus makes
834 path sensitive analysis unnecessary.)
836 NUM_PREDS is the number is the number predicate chains, PREDS is
837 the array of chains, PHI is the phi node whose incoming (undefined)
838 paths need to be pruned, and UNINIT_OPNDS is the bitmap holding
839 uninit operand positions. VISITED_PHIS is the pointer set of phi
840 stmts being checked. */
844 use_pred_not_overlap_with_undef_path_pred (
846 VEC(use_pred_info_t, heap) **preds,
847 gimple phi, unsigned uninit_opnds,
848 struct pointer_set_t *visited_phis)
852 tree boundary_cst = 0;
853 enum tree_code cmp_code;
854 bool swap_cond = false;
856 VEC(use_pred_info_t, heap) *the_pred_chain;
858 gcc_assert (num_preds > 0);
859 /* Find within the common prefix of multiple predicate chains
860 a predicate that is a comparison of a flag variable against
862 the_pred_chain = preds[0];
863 n = VEC_length (use_pred_info_t, the_pred_chain);
864 for (i = 0; i < n; i++)
867 tree cond_lhs, cond_rhs, flag = 0;
869 use_pred_info_t the_pred
870 = VEC_index (use_pred_info_t, the_pred_chain, i);
872 cond = the_pred->cond;
873 invert = the_pred->invert;
874 cond_lhs = gimple_cond_lhs (cond);
875 cond_rhs = gimple_cond_rhs (cond);
876 cmp_code = gimple_cond_code (cond);
878 if (cond_lhs != NULL_TREE && TREE_CODE (cond_lhs) == SSA_NAME
879 && cond_rhs != NULL_TREE && is_gimple_constant (cond_rhs))
881 boundary_cst = cond_rhs;
884 else if (cond_rhs != NULL_TREE && TREE_CODE (cond_rhs) == SSA_NAME
885 && cond_lhs != NULL_TREE && is_gimple_constant (cond_lhs))
887 boundary_cst = cond_lhs;
895 flag_def = SSA_NAME_DEF_STMT (flag);
900 if ((gimple_code (flag_def) == GIMPLE_PHI)
901 && (gimple_bb (flag_def) == gimple_bb (phi))
902 && find_matching_predicate_in_rest_chains (
903 the_pred, preds, num_preds))
912 /* Now check all the uninit incoming edge has a constant flag value
913 that is in conflict with the use guard/predicate. */
914 cmp_code = get_cmp_code (cmp_code, swap_cond, invert);
916 if (cmp_code == ERROR_MARK)
919 for (i = 0; i < sizeof (unsigned); i++)
923 if (!MASK_TEST_BIT (uninit_opnds, i))
926 flag_arg = gimple_phi_arg_def (flag_def, i);
927 if (!is_gimple_constant (flag_arg))
930 /* Now check if the constant is in the guarded range. */
931 if (is_value_included_in (flag_arg, boundary_cst, cmp_code))
936 /* Now that we know that this undefined edge is not
937 pruned. If the operand is defined by another phi,
938 we can further prune the incoming edges of that
939 phi by checking the predicates of this operands. */
941 opnd = gimple_phi_arg_def (phi, i);
942 opnd_def = SSA_NAME_DEF_STMT (opnd);
943 if (gimple_code (opnd_def) == GIMPLE_PHI)
946 unsigned uninit_opnds2
947 = compute_uninit_opnds_pos (opnd_def);
948 gcc_assert (!MASK_EMPTY (uninit_opnds2));
949 opnd_edge = gimple_phi_arg_edge (phi, i);
950 if (!is_use_properly_guarded (phi,
965 /* Returns true if TC is AND or OR */
968 is_and_or_or (enum tree_code tc, tree typ)
970 return (tc == TRUTH_AND_EXPR
971 || tc == TRUTH_OR_EXPR
972 || tc == BIT_IOR_EXPR
973 || (tc == BIT_AND_EXPR
974 && (typ == 0 || TREE_CODE (typ) == BOOLEAN_TYPE)));
977 typedef struct norm_cond
979 VEC(gimple, heap) *conds;
980 enum tree_code cond_code;
985 /* Normalizes gimple condition COND. The normalization follows
986 UD chains to form larger condition expression trees. NORM_COND
987 holds the normalized result. COND_CODE is the logical opcode
988 (AND or OR) of the normalized tree. */
991 normalize_cond_1 (gimple cond,
992 norm_cond_t norm_cond,
993 enum tree_code cond_code)
996 enum tree_code cur_cond_code;
999 gc = gimple_code (cond);
1000 if (gc != GIMPLE_ASSIGN)
1002 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1006 cur_cond_code = gimple_assign_rhs_code (cond);
1007 rhs1 = gimple_assign_rhs1 (cond);
1008 rhs2 = gimple_assign_rhs2 (cond);
1009 if (cur_cond_code == NE_EXPR)
1011 if (integer_zerop (rhs2)
1012 && (TREE_CODE (rhs1) == SSA_NAME))
1014 SSA_NAME_DEF_STMT (rhs1),
1015 norm_cond, cond_code);
1016 else if (integer_zerop (rhs1)
1017 && (TREE_CODE (rhs2) == SSA_NAME))
1019 SSA_NAME_DEF_STMT (rhs2),
1020 norm_cond, cond_code);
1022 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1027 if (is_and_or_or (cur_cond_code, TREE_TYPE (rhs1))
1028 && (cond_code == cur_cond_code || cond_code == ERROR_MARK)
1029 && (TREE_CODE (rhs1) == SSA_NAME && TREE_CODE (rhs2) == SSA_NAME))
1031 normalize_cond_1 (SSA_NAME_DEF_STMT (rhs1),
1032 norm_cond, cur_cond_code);
1033 normalize_cond_1 (SSA_NAME_DEF_STMT (rhs2),
1034 norm_cond, cur_cond_code);
1035 norm_cond->cond_code = cur_cond_code;
1038 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1041 /* See normalize_cond_1 for details. INVERT is a flag to indicate
1042 if COND needs to be inverted or not. */
1045 normalize_cond (gimple cond, norm_cond_t norm_cond, bool invert)
1047 enum tree_code cond_code;
1049 norm_cond->cond_code = ERROR_MARK;
1050 norm_cond->invert = false;
1051 norm_cond->conds = NULL;
1052 gcc_assert (gimple_code (cond) == GIMPLE_COND);
1053 cond_code = gimple_cond_code (cond);
1055 cond_code = invert_tree_comparison (cond_code, false);
1057 if (cond_code == NE_EXPR)
1059 if (integer_zerop (gimple_cond_rhs (cond))
1060 && (TREE_CODE (gimple_cond_lhs (cond)) == SSA_NAME))
1062 SSA_NAME_DEF_STMT (gimple_cond_lhs (cond)),
1063 norm_cond, ERROR_MARK);
1064 else if (integer_zerop (gimple_cond_lhs (cond))
1065 && (TREE_CODE (gimple_cond_rhs (cond)) == SSA_NAME))
1067 SSA_NAME_DEF_STMT (gimple_cond_rhs (cond)),
1068 norm_cond, ERROR_MARK);
1071 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1072 norm_cond->invert = invert;
1077 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1078 norm_cond->invert = invert;
1081 gcc_assert (VEC_length (gimple, norm_cond->conds) == 1
1082 || is_and_or_or (norm_cond->cond_code, NULL));
1085 /* Returns true if the domain for condition COND1 is a subset of
1086 COND2. REVERSE is a flag. when it is true the function checks
1087 if COND1 is a superset of COND2. INVERT1 and INVERT2 are flags
1088 to indicate if COND1 and COND2 need to be inverted or not. */
1091 is_gcond_subset_of (gimple cond1, bool invert1,
1092 gimple cond2, bool invert2,
1095 enum gimple_code gc1, gc2;
1096 enum tree_code cond1_code, cond2_code;
1098 tree cond1_lhs, cond1_rhs, cond2_lhs, cond2_rhs;
1100 /* Take the short cut. */
1111 gc1 = gimple_code (cond1);
1112 gc2 = gimple_code (cond2);
1114 if ((gc1 != GIMPLE_ASSIGN && gc1 != GIMPLE_COND)
1115 || (gc2 != GIMPLE_ASSIGN && gc2 != GIMPLE_COND))
1116 return cond1 == cond2;
1118 cond1_code = ((gc1 == GIMPLE_ASSIGN)
1119 ? gimple_assign_rhs_code (cond1)
1120 : gimple_cond_code (cond1));
1122 cond2_code = ((gc2 == GIMPLE_ASSIGN)
1123 ? gimple_assign_rhs_code (cond2)
1124 : gimple_cond_code (cond2));
1126 if (TREE_CODE_CLASS (cond1_code) != tcc_comparison
1127 || TREE_CODE_CLASS (cond2_code) != tcc_comparison)
1131 cond1_code = invert_tree_comparison (cond1_code, false);
1133 cond2_code = invert_tree_comparison (cond2_code, false);
1135 cond1_lhs = ((gc1 == GIMPLE_ASSIGN)
1136 ? gimple_assign_rhs1 (cond1)
1137 : gimple_cond_lhs (cond1));
1138 cond1_rhs = ((gc1 == GIMPLE_ASSIGN)
1139 ? gimple_assign_rhs2 (cond1)
1140 : gimple_cond_rhs (cond1));
1141 cond2_lhs = ((gc2 == GIMPLE_ASSIGN)
1142 ? gimple_assign_rhs1 (cond2)
1143 : gimple_cond_lhs (cond2));
1144 cond2_rhs = ((gc2 == GIMPLE_ASSIGN)
1145 ? gimple_assign_rhs2 (cond2)
1146 : gimple_cond_rhs (cond2));
1148 /* Assuming const operands have been swapped to the
1149 rhs at this point of the analysis. */
1151 if (cond1_lhs != cond2_lhs)
1154 if (!is_gimple_constant (cond1_rhs)
1155 || TREE_CODE (cond1_rhs) != INTEGER_CST)
1156 return (cond1_rhs == cond2_rhs);
1158 if (!is_gimple_constant (cond2_rhs)
1159 || TREE_CODE (cond2_rhs) != INTEGER_CST)
1160 return (cond1_rhs == cond2_rhs);
1162 if (cond1_code == EQ_EXPR)
1163 return is_value_included_in (cond1_rhs,
1164 cond2_rhs, cond2_code);
1165 if (cond1_code == NE_EXPR || cond2_code == EQ_EXPR)
1166 return ((cond2_code == cond1_code)
1167 && tree_int_cst_equal (cond1_rhs, cond2_rhs));
1169 if (((cond1_code == GE_EXPR || cond1_code == GT_EXPR)
1170 && (cond2_code == LE_EXPR || cond2_code == LT_EXPR))
1171 || ((cond1_code == LE_EXPR || cond1_code == LT_EXPR)
1172 && (cond2_code == GE_EXPR || cond2_code == GT_EXPR)))
1175 if (cond1_code != GE_EXPR && cond1_code != GT_EXPR
1176 && cond1_code != LE_EXPR && cond1_code != LT_EXPR)
1179 if (cond1_code == GT_EXPR)
1181 cond1_code = GE_EXPR;
1182 cond1_rhs = fold_binary (PLUS_EXPR, TREE_TYPE (cond1_rhs),
1184 fold_convert (TREE_TYPE (cond1_rhs),
1187 else if (cond1_code == LT_EXPR)
1189 cond1_code = LE_EXPR;
1190 cond1_rhs = fold_binary (MINUS_EXPR, TREE_TYPE (cond1_rhs),
1192 fold_convert (TREE_TYPE (cond1_rhs),
1199 gcc_assert (cond1_code == GE_EXPR || cond1_code == LE_EXPR);
1201 if (cond2_code == GE_EXPR || cond2_code == GT_EXPR ||
1202 cond2_code == LE_EXPR || cond2_code == LT_EXPR)
1203 return is_value_included_in (cond1_rhs,
1204 cond2_rhs, cond2_code);
1205 else if (cond2_code == NE_EXPR)
1207 (is_value_included_in (cond1_rhs,
1208 cond2_rhs, cond2_code)
1209 && !is_value_included_in (cond2_rhs,
1210 cond1_rhs, cond1_code));
1214 /* Returns true if the domain of the condition expression
1215 in COND is a subset of any of the sub-conditions
1216 of the normalized condtion NORM_COND. INVERT is a flag
1217 to indicate of the COND needs to be inverted.
1218 REVERSE is a flag. When it is true, the check is reversed --
1219 it returns true if COND is a superset of any of the subconditions
1223 is_subset_of_any (gimple cond, bool invert,
1224 norm_cond_t norm_cond, bool reverse)
1227 size_t len = VEC_length (gimple, norm_cond->conds);
1229 for (i = 0; i < len; i++)
1231 if (is_gcond_subset_of (cond, invert,
1232 VEC_index (gimple, norm_cond->conds, i),
1239 /* NORM_COND1 and NORM_COND2 are normalized logical/BIT OR
1240 expressions (formed by following UD chains not control
1241 dependence chains). The function returns true of domain
1242 of and expression NORM_COND1 is a subset of NORM_COND2's.
1243 The implementation is conservative, and it returns false if
1244 it the inclusion relationship may not hold. */
1247 is_or_set_subset_of (norm_cond_t norm_cond1,
1248 norm_cond_t norm_cond2)
1251 size_t len = VEC_length (gimple, norm_cond1->conds);
1253 for (i = 0; i < len; i++)
1255 if (!is_subset_of_any (VEC_index (gimple, norm_cond1->conds, i),
1256 false, norm_cond2, false))
1262 /* NORM_COND1 and NORM_COND2 are normalized logical AND
1263 expressions (formed by following UD chains not control
1264 dependence chains). The function returns true of domain
1265 of and expression NORM_COND1 is a subset of NORM_COND2's. */
1268 is_and_set_subset_of (norm_cond_t norm_cond1,
1269 norm_cond_t norm_cond2)
1272 size_t len = VEC_length (gimple, norm_cond2->conds);
1274 for (i = 0; i < len; i++)
1276 if (!is_subset_of_any (VEC_index (gimple, norm_cond2->conds, i),
1277 false, norm_cond1, true))
1283 /* Returns true of the domain if NORM_COND1 is a subset
1284 of that of NORM_COND2. Returns false if it can not be
1288 is_norm_cond_subset_of (norm_cond_t norm_cond1,
1289 norm_cond_t norm_cond2)
1292 enum tree_code code1, code2;
1294 code1 = norm_cond1->cond_code;
1295 code2 = norm_cond2->cond_code;
1297 if (code1 == TRUTH_AND_EXPR || code1 == BIT_AND_EXPR)
1299 /* Both conditions are AND expressions. */
1300 if (code2 == TRUTH_AND_EXPR || code2 == BIT_AND_EXPR)
1301 return is_and_set_subset_of (norm_cond1, norm_cond2);
1302 /* NORM_COND1 is an AND expression, and NORM_COND2 is an OR
1303 expression. In this case, returns true if any subexpression
1304 of NORM_COND1 is a subset of any subexpression of NORM_COND2. */
1305 else if (code2 == TRUTH_OR_EXPR || code2 == BIT_IOR_EXPR)
1308 len1 = VEC_length (gimple, norm_cond1->conds);
1309 for (i = 0; i < len1; i++)
1311 gimple cond1 = VEC_index (gimple, norm_cond1->conds, i);
1312 if (is_subset_of_any (cond1, false, norm_cond2, false))
1319 gcc_assert (code2 == ERROR_MARK);
1320 gcc_assert (VEC_length (gimple, norm_cond2->conds) == 1);
1321 return is_subset_of_any (VEC_index (gimple, norm_cond2->conds, 0),
1322 norm_cond2->invert, norm_cond1, true);
1325 /* NORM_COND1 is an OR expression */
1326 else if (code1 == TRUTH_OR_EXPR || code1 == BIT_IOR_EXPR)
1331 return is_or_set_subset_of (norm_cond1, norm_cond2);
1335 gcc_assert (code1 == ERROR_MARK);
1336 gcc_assert (VEC_length (gimple, norm_cond1->conds) == 1);
1337 /* Conservatively returns false if NORM_COND1 is non-decomposible
1338 and NORM_COND2 is an AND expression. */
1339 if (code2 == TRUTH_AND_EXPR || code2 == BIT_AND_EXPR)
1342 if (code2 == TRUTH_OR_EXPR || code2 == BIT_IOR_EXPR)
1343 return is_subset_of_any (VEC_index (gimple, norm_cond1->conds, 0),
1344 norm_cond1->invert, norm_cond2, false);
1346 gcc_assert (code2 == ERROR_MARK);
1347 gcc_assert (VEC_length (gimple, norm_cond2->conds) == 1);
1348 return is_gcond_subset_of (VEC_index (gimple, norm_cond1->conds, 0),
1350 VEC_index (gimple, norm_cond2->conds, 0),
1351 norm_cond2->invert, false);
1355 /* Returns true of the domain of single predicate expression
1356 EXPR1 is a subset of that of EXPR2. Returns false if it
1357 can not be proved. */
1360 is_pred_expr_subset_of (use_pred_info_t expr1,
1361 use_pred_info_t expr2)
1363 gimple cond1, cond2;
1364 enum tree_code code1, code2;
1365 struct norm_cond norm_cond1, norm_cond2;
1366 bool is_subset = false;
1368 cond1 = expr1->cond;
1369 cond2 = expr2->cond;
1370 code1 = gimple_cond_code (cond1);
1371 code2 = gimple_cond_code (cond2);
1374 code1 = invert_tree_comparison (code1, false);
1376 code2 = invert_tree_comparison (code2, false);
1378 /* Fast path -- match exactly */
1379 if ((gimple_cond_lhs (cond1) == gimple_cond_lhs (cond2))
1380 && (gimple_cond_rhs (cond1) == gimple_cond_rhs (cond2))
1381 && (code1 == code2))
1384 /* Normalize conditions. To keep NE_EXPR, do not invert
1385 with both need inversion. */
1386 normalize_cond (cond1, &norm_cond1, (expr1->invert));
1387 normalize_cond (cond2, &norm_cond2, (expr2->invert));
1389 is_subset = is_norm_cond_subset_of (&norm_cond1, &norm_cond2);
1392 VEC_free (gimple, heap, norm_cond1.conds);
1393 VEC_free (gimple, heap, norm_cond2.conds);
1397 /* Returns true if the domain of PRED1 is a subset
1398 of that of PRED2. Returns false if it can not be proved so. */
1401 is_pred_chain_subset_of (VEC(use_pred_info_t, heap) *pred1,
1402 VEC(use_pred_info_t, heap) *pred2)
1404 size_t np1, np2, i1, i2;
1406 np1 = VEC_length (use_pred_info_t, pred1);
1407 np2 = VEC_length (use_pred_info_t, pred2);
1409 for (i2 = 0; i2 < np2; i2++)
1412 use_pred_info_t info2
1413 = VEC_index (use_pred_info_t, pred2, i2);
1414 for (i1 = 0; i1 < np1; i1++)
1416 use_pred_info_t info1
1417 = VEC_index (use_pred_info_t, pred1, i1);
1418 if (is_pred_expr_subset_of (info1, info2))
1430 /* Returns true if the domain defined by
1431 one pred chain ONE_PRED is a subset of the domain
1432 of *PREDS. It returns false if ONE_PRED's domain is
1433 not a subset of any of the sub-domains of PREDS (
1434 corresponding to each individual chains in it), even
1435 though it may be still be a subset of whole domain
1436 of PREDS which is the union (ORed) of all its subdomains.
1437 In other words, the result is conservative. */
1440 is_included_in (VEC(use_pred_info_t, heap) *one_pred,
1441 VEC(use_pred_info_t, heap) **preds,
1446 for (i = 0; i < n; i++)
1448 if (is_pred_chain_subset_of (one_pred, preds[i]))
1455 /* compares two predicate sets PREDS1 and PREDS2 and returns
1456 true if the domain defined by PREDS1 is a superset
1457 of PREDS2's domain. N1 and N2 are array sizes of PREDS1 and
1458 PREDS2 respectively. The implementation chooses not to build
1459 generic trees (and relying on the folding capability of the
1460 compiler), but instead performs brute force comparison of
1461 individual predicate chains (won't be a compile time problem
1462 as the chains are pretty short). When the function returns
1463 false, it does not necessarily mean *PREDS1 is not a superset
1464 of *PREDS2, but mean it may not be so since the analysis can
1465 not prove it. In such cases, false warnings may still be
1469 is_superset_of (VEC(use_pred_info_t, heap) **preds1,
1471 VEC(use_pred_info_t, heap) **preds2,
1475 VEC(use_pred_info_t, heap) *one_pred_chain;
1477 for (i = 0; i < n2; i++)
1479 one_pred_chain = preds2[i];
1480 if (!is_included_in (one_pred_chain, preds1, n1))
1487 /* Computes the predicates that guard the use and checks
1488 if the incoming paths that have empty (or possibly
1489 empty) defintion can be pruned/filtered. The function returns
1490 true if it can be determined that the use of PHI's def in
1491 USE_STMT is guarded with a predicate set not overlapping with
1492 predicate sets of all runtime paths that do not have a definition.
1493 Returns false if it is not or it can not be determined. USE_BB is
1494 the bb of the use (for phi operand use, the bb is not the bb of
1495 the phi stmt, but the src bb of the operand edge). UNINIT_OPNDS
1496 is a bit vector. If an operand of PHI is uninitialized, the
1497 correponding bit in the vector is 1. VISIED_PHIS is a pointer
1498 set of phis being visted. */
1501 is_use_properly_guarded (gimple use_stmt,
1504 unsigned uninit_opnds,
1505 struct pointer_set_t *visited_phis)
1508 VEC(use_pred_info_t, heap) **preds = 0;
1509 VEC(use_pred_info_t, heap) **def_preds = 0;
1510 size_t num_preds = 0, num_def_preds = 0;
1511 bool has_valid_preds = false;
1512 bool is_properly_guarded = false;
1514 if (pointer_set_insert (visited_phis, phi))
1517 phi_bb = gimple_bb (phi);
1519 if (is_non_loop_exit_postdominating (use_bb, phi_bb))
1522 has_valid_preds = find_predicates (&preds, &num_preds,
1525 if (!has_valid_preds)
1527 destroy_predicate_vecs (num_preds, preds);
1532 dump_predicates (use_stmt, num_preds, preds,
1535 has_valid_preds = find_def_preds (&def_preds,
1536 &num_def_preds, phi);
1538 if (has_valid_preds)
1541 dump_predicates (phi, num_def_preds, def_preds,
1542 "Operand defs of phi ");
1543 is_properly_guarded =
1544 is_superset_of (def_preds, num_def_preds,
1548 /* further prune the dead incoming phi edges. */
1549 if (!is_properly_guarded)
1551 = use_pred_not_overlap_with_undef_path_pred (
1552 num_preds, preds, phi, uninit_opnds, visited_phis);
1554 destroy_predicate_vecs (num_preds, preds);
1555 destroy_predicate_vecs (num_def_preds, def_preds);
1556 return is_properly_guarded;
1559 /* Searches through all uses of a potentially
1560 uninitialized variable defined by PHI and returns a use
1561 statement if the use is not properly guarded. It returns
1562 NULL if all uses are guarded. UNINIT_OPNDS is a bitvector
1563 holding the position(s) of uninit PHI operands. WORKLIST
1564 is the vector of candidate phis that may be updated by this
1565 function. ADDED_TO_WORKLIST is the pointer set tracking
1566 if the new phi is already in the worklist. */
1569 find_uninit_use (gimple phi, unsigned uninit_opnds,
1570 VEC(gimple, heap) **worklist,
1571 struct pointer_set_t *added_to_worklist)
1574 use_operand_p use_p;
1576 imm_use_iterator iter;
1578 phi_result = gimple_phi_result (phi);
1580 FOR_EACH_IMM_USE_FAST (use_p, iter, phi_result)
1582 struct pointer_set_t *visited_phis;
1585 use_stmt = use_p->loc.stmt;
1587 visited_phis = pointer_set_create ();
1589 use_bb = gimple_bb (use_stmt);
1590 if (gimple_code (use_stmt) == GIMPLE_PHI)
1593 n = gimple_phi_num_args (use_stmt);
1595 /* Find the matching phi argument of the use. */
1596 for (i = 0; i < n; ++i)
1598 if (gimple_phi_arg_def_ptr (use_stmt, i) == use_p->use)
1600 edge e = gimple_phi_arg_edge (use_stmt, i);
1607 if (is_use_properly_guarded (use_stmt,
1613 pointer_set_destroy (visited_phis);
1616 pointer_set_destroy (visited_phis);
1618 /* Found one real use, return. */
1619 if (gimple_code (use_stmt) != GIMPLE_PHI)
1622 /* Found a phi use that is not guarded,
1623 add the phi to the worklist. */
1624 if (!pointer_set_insert (added_to_worklist,
1627 VEC_safe_push (gimple, heap, *worklist, use_stmt);
1628 pointer_set_insert (possibly_undefined_names,
1636 /* Look for inputs to PHI that are SSA_NAMEs that have empty definitions
1637 and gives warning if there exists a runtime path from the entry to a
1638 use of the PHI def that does not contain a definition. In other words,
1639 the warning is on the real use. The more dead paths that can be pruned
1640 by the compiler, the fewer false positives the warning is. WORKLIST
1641 is a vector of candidate phis to be examined. ADDED_TO_WORKLIST is
1642 a pointer set tracking if the new phi is added to the worklist or not. */
1645 warn_uninitialized_phi (gimple phi, VEC(gimple, heap) **worklist,
1646 struct pointer_set_t *added_to_worklist)
1648 unsigned uninit_opnds;
1649 gimple uninit_use_stmt = 0;
1652 /* Don't look at memory tags. */
1653 if (!is_gimple_reg (gimple_phi_result (phi)))
1656 uninit_opnds = compute_uninit_opnds_pos (phi);
1658 if (MASK_EMPTY (uninit_opnds))
1661 /* Now check if we have any use of the value without proper guard. */
1662 uninit_use_stmt = find_uninit_use (phi, uninit_opnds,
1663 worklist, added_to_worklist);
1665 /* All uses are properly guarded. */
1666 if (!uninit_use_stmt)
1669 uninit_op = gimple_phi_arg_def (phi, MASK_FIRST_SET_BIT (uninit_opnds));
1670 warn_uninit (uninit_op,
1671 "%qD may be used uninitialized in this function",
1677 /* Entry point to the late uninitialized warning pass. */
1680 execute_late_warn_uninitialized (void)
1683 gimple_stmt_iterator gsi;
1684 VEC(gimple, heap) *worklist = 0;
1685 struct pointer_set_t *added_to_worklist;
1687 calculate_dominance_info (CDI_DOMINATORS);
1688 calculate_dominance_info (CDI_POST_DOMINATORS);
1689 /* Re-do the plain uninitialized variable check, as optimization may have
1690 straightened control flow. Do this first so that we don't accidentally
1691 get a "may be" warning when we'd have seen an "is" warning later. */
1692 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/1);
1694 timevar_push (TV_TREE_UNINIT);
1696 possibly_undefined_names = pointer_set_create ();
1697 added_to_worklist = pointer_set_create ();
1699 /* Initialize worklist */
1701 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1703 gimple phi = gsi_stmt (gsi);
1706 n = gimple_phi_num_args (phi);
1708 /* Don't look at memory tags. */
1709 if (!is_gimple_reg (gimple_phi_result (phi)))
1712 for (i = 0; i < n; ++i)
1714 tree op = gimple_phi_arg_def (phi, i);
1715 if (TREE_CODE (op) == SSA_NAME
1716 && ssa_undefined_value_p (op))
1718 VEC_safe_push (gimple, heap, worklist, phi);
1719 pointer_set_insert (added_to_worklist, phi);
1725 while (VEC_length (gimple, worklist) != 0)
1728 cur_phi = VEC_pop (gimple, worklist);
1729 warn_uninitialized_phi (cur_phi, &worklist, added_to_worklist);
1732 VEC_free (gimple, heap, worklist);
1733 pointer_set_destroy (added_to_worklist);
1734 pointer_set_destroy (possibly_undefined_names);
1735 possibly_undefined_names = NULL;
1736 free_dominance_info (CDI_POST_DOMINATORS);
1737 timevar_pop (TV_TREE_UNINIT);
1742 gate_warn_uninitialized (void)
1744 return warn_uninitialized != 0;
1747 struct gimple_opt_pass pass_late_warn_uninitialized =
1751 "uninit", /* name */
1752 gate_warn_uninitialized, /* gate */
1753 execute_late_warn_uninitialized, /* execute */
1756 0, /* static_pass_number */
1757 TV_NONE, /* tv_id */
1758 PROP_ssa, /* properties_required */
1759 0, /* properties_provided */
1760 0, /* properties_destroyed */
1761 0, /* todo_flags_start */
1762 0 /* todo_flags_finish */