* friend.c (make_friend_class): Handle template template parameters.
[official-gcc.git] / gcc / tree-ssa-uninit.c
blobcc58870c7d841bc49c3887d64cdf1d92d5eebfba
1 /* Predicate aware uninitialized variable warning.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2010 Free Software
3 Foundation, Inc.
4 Contributed by Xinliang David Li <davidxl@google.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "tm_p.h"
29 #include "basic-block.h"
30 #include "function.h"
31 #include "gimple-pretty-print.h"
32 #include "bitmap.h"
33 #include "pointer-set.h"
34 #include "tree-flow.h"
35 #include "gimple.h"
36 #include "tree-inline.h"
37 #include "hashtab.h"
38 #include "tree-pass.h"
39 #include "diagnostic-core.h"
41 /* This implements the pass that does predicate aware warning on uses of
42 possibly uninitialized variables. The pass first collects the set of
43 possibly uninitialized SSA names. For each such name, it walks through
44 all its immediate uses. For each immediate use, it rebuilds the condition
45 expression (the predicate) that guards the use. The predicate is then
46 examined to see if the variable is always defined under that same condition.
47 This is done either by pruning the unrealizable paths that lead to the
48 default definitions or by checking if the predicate set that guards the
49 defining paths is a superset of the use predicate. */
52 /* Pointer set of potentially undefined ssa names, i.e.,
53 ssa names that are defined by phi with operands that
54 are not defined or potentially undefined. */
55 static struct pointer_set_t *possibly_undefined_names = 0;
57 /* Bit mask handling macros. */
58 #define MASK_SET_BIT(mask, pos) mask |= (1 << pos)
59 #define MASK_TEST_BIT(mask, pos) (mask & (1 << pos))
60 #define MASK_EMPTY(mask) (mask == 0)
62 /* Returns the first bit position (starting from LSB)
63 in mask that is non zero. Returns -1 if the mask is empty. */
64 static int
65 get_mask_first_set_bit (unsigned mask)
67 int pos = 0;
68 if (mask == 0)
69 return -1;
71 while ((mask & (1 << pos)) == 0)
72 pos++;
74 return pos;
76 #define MASK_FIRST_SET_BIT(mask) get_mask_first_set_bit (mask)
79 /* Return true if T, an SSA_NAME, has an undefined value. */
81 bool
82 ssa_undefined_value_p (tree t)
84 tree var = SSA_NAME_VAR (t);
86 if (!var)
88 /* Parameters get their initial value from the function entry. */
89 else if (TREE_CODE (var) == PARM_DECL)
90 return false;
91 /* When returning by reference the return address is actually a hidden
92 parameter. */
93 else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
94 return false;
95 /* Hard register variables get their initial value from the ether. */
96 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
97 return false;
99 /* The value is undefined iff its definition statement is empty. */
100 return (gimple_nop_p (SSA_NAME_DEF_STMT (t))
101 || (possibly_undefined_names
102 && pointer_set_contains (possibly_undefined_names, t)));
105 /* Checks if the operand OPND of PHI is defined by
106 another phi with one operand defined by this PHI,
107 but the rest operands are all defined. If yes,
108 returns true to skip this this operand as being
109 redundant. Can be enhanced to be more general. */
111 static bool
112 can_skip_redundant_opnd (tree opnd, gimple phi)
114 gimple op_def;
115 tree phi_def;
116 int i, n;
118 phi_def = gimple_phi_result (phi);
119 op_def = SSA_NAME_DEF_STMT (opnd);
120 if (gimple_code (op_def) != GIMPLE_PHI)
121 return false;
122 n = gimple_phi_num_args (op_def);
123 for (i = 0; i < n; ++i)
125 tree op = gimple_phi_arg_def (op_def, i);
126 if (TREE_CODE (op) != SSA_NAME)
127 continue;
128 if (op != phi_def && ssa_undefined_value_p (op))
129 return false;
132 return true;
135 /* Returns a bit mask holding the positions of arguments in PHI
136 that have empty (or possibly empty) definitions. */
138 static unsigned
139 compute_uninit_opnds_pos (gimple phi)
141 size_t i, n;
142 unsigned uninit_opnds = 0;
144 n = gimple_phi_num_args (phi);
145 /* Bail out for phi with too many args. */
146 if (n > 32)
147 return 0;
149 for (i = 0; i < n; ++i)
151 tree op = gimple_phi_arg_def (phi, i);
152 if (TREE_CODE (op) == SSA_NAME
153 && ssa_undefined_value_p (op)
154 && !can_skip_redundant_opnd (op, phi))
155 MASK_SET_BIT (uninit_opnds, i);
157 return uninit_opnds;
160 /* Find the immediate postdominator PDOM of the specified
161 basic block BLOCK. */
163 static inline basic_block
164 find_pdom (basic_block block)
166 if (block == EXIT_BLOCK_PTR)
167 return EXIT_BLOCK_PTR;
168 else
170 basic_block bb
171 = get_immediate_dominator (CDI_POST_DOMINATORS, block);
172 if (! bb)
173 return EXIT_BLOCK_PTR;
174 return bb;
178 /* Find the immediate DOM of the specified
179 basic block BLOCK. */
181 static inline basic_block
182 find_dom (basic_block block)
184 if (block == ENTRY_BLOCK_PTR)
185 return ENTRY_BLOCK_PTR;
186 else
188 basic_block bb = get_immediate_dominator (CDI_DOMINATORS, block);
189 if (! bb)
190 return ENTRY_BLOCK_PTR;
191 return bb;
195 /* Returns true if BB1 is postdominating BB2 and BB1 is
196 not a loop exit bb. The loop exit bb check is simple and does
197 not cover all cases. */
199 static bool
200 is_non_loop_exit_postdominating (basic_block bb1, basic_block bb2)
202 if (!dominated_by_p (CDI_POST_DOMINATORS, bb2, bb1))
203 return false;
205 if (single_pred_p (bb1) && !single_succ_p (bb2))
206 return false;
208 return true;
211 /* Find the closest postdominator of a specified BB, which is control
212 equivalent to BB. */
214 static inline basic_block
215 find_control_equiv_block (basic_block bb)
217 basic_block pdom;
219 pdom = find_pdom (bb);
221 /* Skip the postdominating bb that is also loop exit. */
222 if (!is_non_loop_exit_postdominating (pdom, bb))
223 return NULL;
225 if (dominated_by_p (CDI_DOMINATORS, pdom, bb))
226 return pdom;
228 return NULL;
231 #define MAX_NUM_CHAINS 8
232 #define MAX_CHAIN_LEN 5
234 /* Computes the control dependence chains (paths of edges)
235 for DEP_BB up to the dominating basic block BB (the head node of a
236 chain should be dominated by it). CD_CHAINS is pointer to a
237 dynamic array holding the result chains. CUR_CD_CHAIN is the current
238 chain being computed. *NUM_CHAINS is total number of chains. The
239 function returns true if the information is successfully computed,
240 return false if there is no control dependence or not computed. */
242 static bool
243 compute_control_dep_chain (basic_block bb, basic_block dep_bb,
244 VEC(edge, heap) **cd_chains,
245 size_t *num_chains,
246 VEC(edge, heap) **cur_cd_chain)
248 edge_iterator ei;
249 edge e;
250 size_t i;
251 bool found_cd_chain = false;
252 size_t cur_chain_len = 0;
254 if (EDGE_COUNT (bb->succs) < 2)
255 return false;
257 /* Could use a set instead. */
258 cur_chain_len = VEC_length (edge, *cur_cd_chain);
259 if (cur_chain_len > MAX_CHAIN_LEN)
260 return false;
262 for (i = 0; i < cur_chain_len; i++)
264 edge e = VEC_index (edge, *cur_cd_chain, i);
265 /* cycle detected. */
266 if (e->src == bb)
267 return false;
270 FOR_EACH_EDGE (e, ei, bb->succs)
272 basic_block cd_bb;
273 if (e->flags & (EDGE_FAKE | EDGE_ABNORMAL))
274 continue;
276 cd_bb = e->dest;
277 VEC_safe_push (edge, heap, *cur_cd_chain, e);
278 while (!is_non_loop_exit_postdominating (cd_bb, bb))
280 if (cd_bb == dep_bb)
282 /* Found a direct control dependence. */
283 if (*num_chains < MAX_NUM_CHAINS)
285 cd_chains[*num_chains]
286 = VEC_copy (edge, heap, *cur_cd_chain);
287 (*num_chains)++;
289 found_cd_chain = true;
290 /* check path from next edge. */
291 break;
294 /* Now check if DEP_BB is indirectly control dependent on BB. */
295 if (compute_control_dep_chain (cd_bb, dep_bb, cd_chains,
296 num_chains, cur_cd_chain))
298 found_cd_chain = true;
299 break;
302 cd_bb = find_pdom (cd_bb);
303 if (cd_bb == EXIT_BLOCK_PTR)
304 break;
306 VEC_pop (edge, *cur_cd_chain);
307 gcc_assert (VEC_length (edge, *cur_cd_chain) == cur_chain_len);
309 gcc_assert (VEC_length (edge, *cur_cd_chain) == cur_chain_len);
311 return found_cd_chain;
314 typedef struct use_pred_info
316 gimple cond;
317 bool invert;
318 } *use_pred_info_t;
320 DEF_VEC_P(use_pred_info_t);
321 DEF_VEC_ALLOC_P(use_pred_info_t, heap);
324 /* Converts the chains of control dependence edges into a set of
325 predicates. A control dependence chain is represented by a vector
326 edges. DEP_CHAINS points to an array of dependence chains.
327 NUM_CHAINS is the size of the chain array. One edge in a dependence
328 chain is mapped to predicate expression represented by use_pred_info_t
329 type. One dependence chain is converted to a composite predicate that
330 is the result of AND operation of use_pred_info_t mapped to each edge.
331 A composite predicate is presented by a vector of use_pred_info_t. On
332 return, *PREDS points to the resulting array of composite predicates.
333 *NUM_PREDS is the number of composite predictes. */
335 static bool
336 convert_control_dep_chain_into_preds (VEC(edge, heap) **dep_chains,
337 size_t num_chains,
338 VEC(use_pred_info_t, heap) ***preds,
339 size_t *num_preds)
341 bool has_valid_pred = false;
342 size_t i, j;
343 if (num_chains == 0 || num_chains >= MAX_NUM_CHAINS)
344 return false;
346 /* Now convert the control dep chain into a set
347 of predicates. */
348 *preds = XCNEWVEC (VEC(use_pred_info_t, heap) *,
349 num_chains);
350 *num_preds = num_chains;
352 for (i = 0; i < num_chains; i++)
354 VEC(edge, heap) *one_cd_chain = dep_chains[i];
356 has_valid_pred = false;
357 for (j = 0; j < VEC_length (edge, one_cd_chain); j++)
359 gimple cond_stmt;
360 gimple_stmt_iterator gsi;
361 basic_block guard_bb;
362 use_pred_info_t one_pred;
363 edge e;
365 e = VEC_index (edge, one_cd_chain, j);
366 guard_bb = e->src;
367 gsi = gsi_last_bb (guard_bb);
368 if (gsi_end_p (gsi))
370 has_valid_pred = false;
371 break;
373 cond_stmt = gsi_stmt (gsi);
374 if (gimple_code (cond_stmt) == GIMPLE_CALL
375 && EDGE_COUNT (e->src->succs) >= 2)
377 /* Ignore EH edge. Can add assertion
378 on the other edge's flag. */
379 continue;
381 /* Skip if there is essentially one succesor. */
382 if (EDGE_COUNT (e->src->succs) == 2)
384 edge e1;
385 edge_iterator ei1;
386 bool skip = false;
388 FOR_EACH_EDGE (e1, ei1, e->src->succs)
390 if (EDGE_COUNT (e1->dest->succs) == 0)
392 skip = true;
393 break;
396 if (skip)
397 continue;
399 if (gimple_code (cond_stmt) != GIMPLE_COND)
401 has_valid_pred = false;
402 break;
404 one_pred = XNEW (struct use_pred_info);
405 one_pred->cond = cond_stmt;
406 one_pred->invert = !!(e->flags & EDGE_FALSE_VALUE);
407 VEC_safe_push (use_pred_info_t, heap, (*preds)[i], one_pred);
408 has_valid_pred = true;
411 if (!has_valid_pred)
412 break;
414 return has_valid_pred;
417 /* Computes all control dependence chains for USE_BB. The control
418 dependence chains are then converted to an array of composite
419 predicates pointed to by PREDS. PHI_BB is the basic block of
420 the phi whose result is used in USE_BB. */
422 static bool
423 find_predicates (VEC(use_pred_info_t, heap) ***preds,
424 size_t *num_preds,
425 basic_block phi_bb,
426 basic_block use_bb)
428 size_t num_chains = 0, i;
429 VEC(edge, heap) **dep_chains = 0;
430 VEC(edge, heap) *cur_chain = 0;
431 bool has_valid_pred = false;
432 basic_block cd_root = 0;
434 dep_chains = XCNEWVEC (VEC(edge, heap) *, MAX_NUM_CHAINS);
436 /* First find the closest bb that is control equivalent to PHI_BB
437 that also dominates USE_BB. */
438 cd_root = phi_bb;
439 while (dominated_by_p (CDI_DOMINATORS, use_bb, cd_root))
441 basic_block ctrl_eq_bb = find_control_equiv_block (cd_root);
442 if (ctrl_eq_bb && dominated_by_p (CDI_DOMINATORS, use_bb, ctrl_eq_bb))
443 cd_root = ctrl_eq_bb;
444 else
445 break;
448 compute_control_dep_chain (cd_root, use_bb,
449 dep_chains, &num_chains,
450 &cur_chain);
452 has_valid_pred
453 = convert_control_dep_chain_into_preds (dep_chains,
454 num_chains,
455 preds,
456 num_preds);
457 /* Free individual chain */
458 VEC_free (edge, heap, cur_chain);
459 for (i = 0; i < num_chains; i++)
460 VEC_free (edge, heap, dep_chains[i]);
461 free (dep_chains);
462 return has_valid_pred;
465 /* Computes the set of incoming edges of PHI that have non empty
466 definitions of a phi chain. The collection will be done
467 recursively on operands that are defined by phis. CD_ROOT
468 is the control dependence root. *EDGES holds the result, and
469 VISITED_PHIS is a pointer set for detecting cycles. */
471 static void
472 collect_phi_def_edges (gimple phi, basic_block cd_root,
473 VEC(edge, heap) **edges,
474 struct pointer_set_t *visited_phis)
476 size_t i, n;
477 edge opnd_edge;
478 tree opnd;
480 if (pointer_set_insert (visited_phis, phi))
481 return;
483 n = gimple_phi_num_args (phi);
484 for (i = 0; i < n; i++)
486 opnd_edge = gimple_phi_arg_edge (phi, i);
487 opnd = gimple_phi_arg_def (phi, i);
489 if (TREE_CODE (opnd) != SSA_NAME)
491 if (dump_file && (dump_flags & TDF_DETAILS))
493 fprintf (dump_file, "\n[CHECK] Found def edge %d in ", (int)i);
494 print_gimple_stmt (dump_file, phi, 0, 0);
496 VEC_safe_push (edge, heap, *edges, opnd_edge);
498 else
500 gimple def = SSA_NAME_DEF_STMT (opnd);
502 if (gimple_code (def) == GIMPLE_PHI
503 && dominated_by_p (CDI_DOMINATORS,
504 gimple_bb (def), cd_root))
505 collect_phi_def_edges (def, cd_root, edges,
506 visited_phis);
507 else if (!ssa_undefined_value_p (opnd))
509 if (dump_file && (dump_flags & TDF_DETAILS))
511 fprintf (dump_file, "\n[CHECK] Found def edge %d in ", (int)i);
512 print_gimple_stmt (dump_file, phi, 0, 0);
514 VEC_safe_push (edge, heap, *edges, opnd_edge);
520 /* For each use edge of PHI, computes all control dependence chains.
521 The control dependence chains are then converted to an array of
522 composite predicates pointed to by PREDS. */
524 static bool
525 find_def_preds (VEC(use_pred_info_t, heap) ***preds,
526 size_t *num_preds, gimple phi)
528 size_t num_chains = 0, i, n;
529 VEC(edge, heap) **dep_chains = 0;
530 VEC(edge, heap) *cur_chain = 0;
531 VEC(edge, heap) *def_edges = 0;
532 bool has_valid_pred = false;
533 basic_block phi_bb, cd_root = 0;
534 struct pointer_set_t *visited_phis;
536 dep_chains = XCNEWVEC (VEC(edge, heap) *, MAX_NUM_CHAINS);
538 phi_bb = gimple_bb (phi);
539 /* First find the closest dominating bb to be
540 the control dependence root */
541 cd_root = find_dom (phi_bb);
542 if (!cd_root)
543 return false;
545 visited_phis = pointer_set_create ();
546 collect_phi_def_edges (phi, cd_root, &def_edges, visited_phis);
547 pointer_set_destroy (visited_phis);
549 n = VEC_length (edge, def_edges);
550 if (n == 0)
551 return false;
553 for (i = 0; i < n; i++)
555 size_t prev_nc, j;
556 edge opnd_edge;
558 opnd_edge = VEC_index (edge, def_edges, i);
559 prev_nc = num_chains;
560 compute_control_dep_chain (cd_root, opnd_edge->src,
561 dep_chains, &num_chains,
562 &cur_chain);
563 /* Free individual chain */
564 VEC_free (edge, heap, cur_chain);
565 cur_chain = 0;
567 /* Now update the newly added chains with
568 the phi operand edge: */
569 if (EDGE_COUNT (opnd_edge->src->succs) > 1)
571 if (prev_nc == num_chains
572 && num_chains < MAX_NUM_CHAINS)
573 num_chains++;
574 for (j = prev_nc; j < num_chains; j++)
576 VEC_safe_push (edge, heap, dep_chains[j], opnd_edge);
581 has_valid_pred
582 = convert_control_dep_chain_into_preds (dep_chains,
583 num_chains,
584 preds,
585 num_preds);
586 for (i = 0; i < num_chains; i++)
587 VEC_free (edge, heap, dep_chains[i]);
588 free (dep_chains);
589 return has_valid_pred;
592 /* Dumps the predicates (PREDS) for USESTMT. */
594 static void
595 dump_predicates (gimple usestmt, size_t num_preds,
596 VEC(use_pred_info_t, heap) **preds,
597 const char* msg)
599 size_t i, j;
600 VEC(use_pred_info_t, heap) *one_pred_chain;
601 fprintf (dump_file, msg);
602 print_gimple_stmt (dump_file, usestmt, 0, 0);
603 fprintf (dump_file, "is guarded by :\n");
604 /* do some dumping here: */
605 for (i = 0; i < num_preds; i++)
607 size_t np;
609 one_pred_chain = preds[i];
610 np = VEC_length (use_pred_info_t, one_pred_chain);
612 for (j = 0; j < np; j++)
614 use_pred_info_t one_pred
615 = VEC_index (use_pred_info_t, one_pred_chain, j);
616 if (one_pred->invert)
617 fprintf (dump_file, " (.NOT.) ");
618 print_gimple_stmt (dump_file, one_pred->cond, 0, 0);
619 if (j < np - 1)
620 fprintf (dump_file, "(.AND.)\n");
622 if (i < num_preds - 1)
623 fprintf (dump_file, "(.OR.)\n");
627 /* Destroys the predicate set *PREDS. */
629 static void
630 destroy_predicate_vecs (size_t n,
631 VEC(use_pred_info_t, heap) ** preds)
633 size_t i, j;
634 for (i = 0; i < n; i++)
636 for (j = 0; j < VEC_length (use_pred_info_t, preds[i]); j++)
637 free (VEC_index (use_pred_info_t, preds[i], j));
638 VEC_free (use_pred_info_t, heap, preds[i]);
640 free (preds);
644 /* Computes the 'normalized' conditional code with operand
645 swapping and condition inversion. */
647 static enum tree_code
648 get_cmp_code (enum tree_code orig_cmp_code,
649 bool swap_cond, bool invert)
651 enum tree_code tc = orig_cmp_code;
653 if (swap_cond)
654 tc = swap_tree_comparison (orig_cmp_code);
655 if (invert)
656 tc = invert_tree_comparison (tc, false);
658 switch (tc)
660 case LT_EXPR:
661 case LE_EXPR:
662 case GT_EXPR:
663 case GE_EXPR:
664 case EQ_EXPR:
665 case NE_EXPR:
666 break;
667 default:
668 return ERROR_MARK;
670 return tc;
673 /* Returns true if VAL falls in the range defined by BOUNDARY and CMPC, i.e.
674 all values in the range satisfies (x CMPC BOUNDARY) == true. */
676 static bool
677 is_value_included_in (tree val, tree boundary, enum tree_code cmpc)
679 bool inverted = false;
680 bool is_unsigned;
681 bool result;
683 /* Only handle integer constant here. */
684 if (TREE_CODE (val) != INTEGER_CST
685 || TREE_CODE (boundary) != INTEGER_CST)
686 return true;
688 is_unsigned = TYPE_UNSIGNED (TREE_TYPE (val));
690 if (cmpc == GE_EXPR || cmpc == GT_EXPR
691 || cmpc == NE_EXPR)
693 cmpc = invert_tree_comparison (cmpc, false);
694 inverted = true;
697 if (is_unsigned)
699 if (cmpc == EQ_EXPR)
700 result = tree_int_cst_equal (val, boundary);
701 else if (cmpc == LT_EXPR)
702 result = INT_CST_LT_UNSIGNED (val, boundary);
703 else
705 gcc_assert (cmpc == LE_EXPR);
706 result = (tree_int_cst_equal (val, boundary)
707 || INT_CST_LT_UNSIGNED (val, boundary));
710 else
712 if (cmpc == EQ_EXPR)
713 result = tree_int_cst_equal (val, boundary);
714 else if (cmpc == LT_EXPR)
715 result = INT_CST_LT (val, boundary);
716 else
718 gcc_assert (cmpc == LE_EXPR);
719 result = (tree_int_cst_equal (val, boundary)
720 || INT_CST_LT (val, boundary));
724 if (inverted)
725 result ^= 1;
727 return result;
730 /* Returns true if PRED is common among all the predicate
731 chains (PREDS) (and therefore can be factored out).
732 NUM_PRED_CHAIN is the size of array PREDS. */
734 static bool
735 find_matching_predicate_in_rest_chains (use_pred_info_t pred,
736 VEC(use_pred_info_t, heap) **preds,
737 size_t num_pred_chains)
739 size_t i, j, n;
741 /* trival case */
742 if (num_pred_chains == 1)
743 return true;
745 for (i = 1; i < num_pred_chains; i++)
747 bool found = false;
748 VEC(use_pred_info_t, heap) *one_chain = preds[i];
749 n = VEC_length (use_pred_info_t, one_chain);
750 for (j = 0; j < n; j++)
752 use_pred_info_t pred2
753 = VEC_index (use_pred_info_t, one_chain, j);
754 /* can relax the condition comparison to not
755 use address comparison. However, the most common
756 case is that multiple control dependent paths share
757 a common path prefix, so address comparison should
758 be ok. */
760 if (pred2->cond == pred->cond
761 && pred2->invert == pred->invert)
763 found = true;
764 break;
767 if (!found)
768 return false;
770 return true;
773 /* Forward declaration. */
774 static bool
775 is_use_properly_guarded (gimple use_stmt,
776 basic_block use_bb,
777 gimple phi,
778 unsigned uninit_opnds,
779 struct pointer_set_t *visited_phis);
781 /* Returns true if all uninitialized opnds are pruned. Returns false
782 otherwise. PHI is the phi node with uninitialized operands,
783 UNINIT_OPNDS is the bitmap of the uninitialize operand positions,
784 FLAG_DEF is the statement defining the flag guarding the use of the
785 PHI output, BOUNDARY_CST is the const value used in the predicate
786 associated with the flag, CMP_CODE is the comparison code used in
787 the predicate, VISITED_PHIS is the pointer set of phis visited, and
788 VISITED_FLAG_PHIS is the pointer to the pointer set of flag definitions
789 that are also phis.
791 Example scenario:
793 BB1:
794 flag_1 = phi <0, 1> // (1)
795 var_1 = phi <undef, some_val>
798 BB2:
799 flag_2 = phi <0, flag_1, flag_1> // (2)
800 var_2 = phi <undef, var_1, var_1>
801 if (flag_2 == 1)
802 goto BB3;
804 BB3:
805 use of var_2 // (3)
807 Because some flag arg in (1) is not constant, if we do not look into the
808 flag phis recursively, it is conservatively treated as unknown and var_1
809 is thought to be flowed into use at (3). Since var_1 is potentially uninitialized
810 a false warning will be emitted. Checking recursively into (1), the compiler can
811 find out that only some_val (which is defined) can flow into (3) which is OK.
815 static bool
816 prune_uninit_phi_opnds_in_unrealizable_paths (
817 gimple phi, unsigned uninit_opnds,
818 gimple flag_def, tree boundary_cst,
819 enum tree_code cmp_code,
820 struct pointer_set_t *visited_phis,
821 bitmap *visited_flag_phis)
823 unsigned i;
825 for (i = 0; i < MIN (32, gimple_phi_num_args (flag_def)); i++)
827 tree flag_arg;
829 if (!MASK_TEST_BIT (uninit_opnds, i))
830 continue;
832 flag_arg = gimple_phi_arg_def (flag_def, i);
833 if (!is_gimple_constant (flag_arg))
835 gimple flag_arg_def, phi_arg_def;
836 tree phi_arg;
837 unsigned uninit_opnds_arg_phi;
839 if (TREE_CODE (flag_arg) != SSA_NAME)
840 return false;
841 flag_arg_def = SSA_NAME_DEF_STMT (flag_arg);
842 if (gimple_code (flag_arg_def) != GIMPLE_PHI)
843 return false;
845 phi_arg = gimple_phi_arg_def (phi, i);
846 if (TREE_CODE (phi_arg) != SSA_NAME)
847 return false;
849 phi_arg_def = SSA_NAME_DEF_STMT (phi_arg);
850 if (gimple_code (phi_arg_def) != GIMPLE_PHI)
851 return false;
853 if (gimple_bb (phi_arg_def) != gimple_bb (flag_arg_def))
854 return false;
856 if (!*visited_flag_phis)
857 *visited_flag_phis = BITMAP_ALLOC (NULL);
859 if (bitmap_bit_p (*visited_flag_phis,
860 SSA_NAME_VERSION (gimple_phi_result (flag_arg_def))))
861 return false;
863 bitmap_set_bit (*visited_flag_phis,
864 SSA_NAME_VERSION (gimple_phi_result (flag_arg_def)));
866 /* Now recursively prune the uninitialized phi args. */
867 uninit_opnds_arg_phi = compute_uninit_opnds_pos (phi_arg_def);
868 if (!prune_uninit_phi_opnds_in_unrealizable_paths (
869 phi_arg_def, uninit_opnds_arg_phi,
870 flag_arg_def, boundary_cst, cmp_code,
871 visited_phis, visited_flag_phis))
872 return false;
874 bitmap_clear_bit (*visited_flag_phis,
875 SSA_NAME_VERSION (gimple_phi_result (flag_arg_def)));
876 continue;
879 /* Now check if the constant is in the guarded range. */
880 if (is_value_included_in (flag_arg, boundary_cst, cmp_code))
882 tree opnd;
883 gimple opnd_def;
885 /* Now that we know that this undefined edge is not
886 pruned. If the operand is defined by another phi,
887 we can further prune the incoming edges of that
888 phi by checking the predicates of this operands. */
890 opnd = gimple_phi_arg_def (phi, i);
891 opnd_def = SSA_NAME_DEF_STMT (opnd);
892 if (gimple_code (opnd_def) == GIMPLE_PHI)
894 edge opnd_edge;
895 unsigned uninit_opnds2
896 = compute_uninit_opnds_pos (opnd_def);
897 gcc_assert (!MASK_EMPTY (uninit_opnds2));
898 opnd_edge = gimple_phi_arg_edge (phi, i);
899 if (!is_use_properly_guarded (phi,
900 opnd_edge->src,
901 opnd_def,
902 uninit_opnds2,
903 visited_phis))
904 return false;
906 else
907 return false;
911 return true;
914 /* A helper function that determines if the predicate set
915 of the use is not overlapping with that of the uninit paths.
916 The most common senario of guarded use is in Example 1:
917 Example 1:
918 if (some_cond)
920 x = ...;
921 flag = true;
924 ... some code ...
926 if (flag)
927 use (x);
929 The real world examples are usually more complicated, but similar
930 and usually result from inlining:
932 bool init_func (int * x)
934 if (some_cond)
935 return false;
936 *x = ..
937 return true;
940 void foo(..)
942 int x;
944 if (!init_func(&x))
945 return;
947 .. some_code ...
948 use (x);
951 Another possible use scenario is in the following trivial example:
953 Example 2:
954 if (n > 0)
955 x = 1;
957 if (n > 0)
959 if (m < 2)
960 .. = x;
963 Predicate analysis needs to compute the composite predicate:
965 1) 'x' use predicate: (n > 0) .AND. (m < 2)
966 2) 'x' default value (non-def) predicate: .NOT. (n > 0)
967 (the predicate chain for phi operand defs can be computed
968 starting from a bb that is control equivalent to the phi's
969 bb and is dominating the operand def.)
971 and check overlapping:
972 (n > 0) .AND. (m < 2) .AND. (.NOT. (n > 0))
973 <==> false
975 This implementation provides framework that can handle
976 scenarios. (Note that many simple cases are handled properly
977 without the predicate analysis -- this is due to jump threading
978 transformation which eliminates the merge point thus makes
979 path sensitive analysis unnecessary.)
981 NUM_PREDS is the number is the number predicate chains, PREDS is
982 the array of chains, PHI is the phi node whose incoming (undefined)
983 paths need to be pruned, and UNINIT_OPNDS is the bitmap holding
984 uninit operand positions. VISITED_PHIS is the pointer set of phi
985 stmts being checked. */
988 static bool
989 use_pred_not_overlap_with_undef_path_pred (
990 size_t num_preds,
991 VEC(use_pred_info_t, heap) **preds,
992 gimple phi, unsigned uninit_opnds,
993 struct pointer_set_t *visited_phis)
995 unsigned int i, n;
996 gimple flag_def = 0;
997 tree boundary_cst = 0;
998 enum tree_code cmp_code;
999 bool swap_cond = false;
1000 bool invert = false;
1001 VEC(use_pred_info_t, heap) *the_pred_chain;
1002 bitmap visited_flag_phis = NULL;
1003 bool all_pruned = false;
1005 gcc_assert (num_preds > 0);
1006 /* Find within the common prefix of multiple predicate chains
1007 a predicate that is a comparison of a flag variable against
1008 a constant. */
1009 the_pred_chain = preds[0];
1010 n = VEC_length (use_pred_info_t, the_pred_chain);
1011 for (i = 0; i < n; i++)
1013 gimple cond;
1014 tree cond_lhs, cond_rhs, flag = 0;
1016 use_pred_info_t the_pred
1017 = VEC_index (use_pred_info_t, the_pred_chain, i);
1019 cond = the_pred->cond;
1020 invert = the_pred->invert;
1021 cond_lhs = gimple_cond_lhs (cond);
1022 cond_rhs = gimple_cond_rhs (cond);
1023 cmp_code = gimple_cond_code (cond);
1025 if (cond_lhs != NULL_TREE && TREE_CODE (cond_lhs) == SSA_NAME
1026 && cond_rhs != NULL_TREE && is_gimple_constant (cond_rhs))
1028 boundary_cst = cond_rhs;
1029 flag = cond_lhs;
1031 else if (cond_rhs != NULL_TREE && TREE_CODE (cond_rhs) == SSA_NAME
1032 && cond_lhs != NULL_TREE && is_gimple_constant (cond_lhs))
1034 boundary_cst = cond_lhs;
1035 flag = cond_rhs;
1036 swap_cond = true;
1039 if (!flag)
1040 continue;
1042 flag_def = SSA_NAME_DEF_STMT (flag);
1044 if (!flag_def)
1045 continue;
1047 if ((gimple_code (flag_def) == GIMPLE_PHI)
1048 && (gimple_bb (flag_def) == gimple_bb (phi))
1049 && find_matching_predicate_in_rest_chains (
1050 the_pred, preds, num_preds))
1051 break;
1053 flag_def = 0;
1056 if (!flag_def)
1057 return false;
1059 /* Now check all the uninit incoming edge has a constant flag value
1060 that is in conflict with the use guard/predicate. */
1061 cmp_code = get_cmp_code (cmp_code, swap_cond, invert);
1063 if (cmp_code == ERROR_MARK)
1064 return false;
1066 all_pruned = prune_uninit_phi_opnds_in_unrealizable_paths (phi,
1067 uninit_opnds,
1068 flag_def,
1069 boundary_cst,
1070 cmp_code,
1071 visited_phis,
1072 &visited_flag_phis);
1074 if (visited_flag_phis)
1075 BITMAP_FREE (visited_flag_phis);
1077 return all_pruned;
1080 /* Returns true if TC is AND or OR */
1082 static inline bool
1083 is_and_or_or (enum tree_code tc, tree typ)
1085 return (tc == BIT_IOR_EXPR
1086 || (tc == BIT_AND_EXPR
1087 && (typ == 0 || TREE_CODE (typ) == BOOLEAN_TYPE)));
1090 typedef struct norm_cond
1092 VEC(gimple, heap) *conds;
1093 enum tree_code cond_code;
1094 bool invert;
1095 } *norm_cond_t;
1098 /* Normalizes gimple condition COND. The normalization follows
1099 UD chains to form larger condition expression trees. NORM_COND
1100 holds the normalized result. COND_CODE is the logical opcode
1101 (AND or OR) of the normalized tree. */
1103 static void
1104 normalize_cond_1 (gimple cond,
1105 norm_cond_t norm_cond,
1106 enum tree_code cond_code)
1108 enum gimple_code gc;
1109 enum tree_code cur_cond_code;
1110 tree rhs1, rhs2;
1112 gc = gimple_code (cond);
1113 if (gc != GIMPLE_ASSIGN)
1115 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1116 return;
1119 cur_cond_code = gimple_assign_rhs_code (cond);
1120 rhs1 = gimple_assign_rhs1 (cond);
1121 rhs2 = gimple_assign_rhs2 (cond);
1122 if (cur_cond_code == NE_EXPR)
1124 if (integer_zerop (rhs2)
1125 && (TREE_CODE (rhs1) == SSA_NAME))
1126 normalize_cond_1 (
1127 SSA_NAME_DEF_STMT (rhs1),
1128 norm_cond, cond_code);
1129 else if (integer_zerop (rhs1)
1130 && (TREE_CODE (rhs2) == SSA_NAME))
1131 normalize_cond_1 (
1132 SSA_NAME_DEF_STMT (rhs2),
1133 norm_cond, cond_code);
1134 else
1135 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1137 return;
1140 if (is_and_or_or (cur_cond_code, TREE_TYPE (rhs1))
1141 && (cond_code == cur_cond_code || cond_code == ERROR_MARK)
1142 && (TREE_CODE (rhs1) == SSA_NAME && TREE_CODE (rhs2) == SSA_NAME))
1144 normalize_cond_1 (SSA_NAME_DEF_STMT (rhs1),
1145 norm_cond, cur_cond_code);
1146 normalize_cond_1 (SSA_NAME_DEF_STMT (rhs2),
1147 norm_cond, cur_cond_code);
1148 norm_cond->cond_code = cur_cond_code;
1150 else
1151 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1154 /* See normalize_cond_1 for details. INVERT is a flag to indicate
1155 if COND needs to be inverted or not. */
1157 static void
1158 normalize_cond (gimple cond, norm_cond_t norm_cond, bool invert)
1160 enum tree_code cond_code;
1162 norm_cond->cond_code = ERROR_MARK;
1163 norm_cond->invert = false;
1164 norm_cond->conds = NULL;
1165 gcc_assert (gimple_code (cond) == GIMPLE_COND);
1166 cond_code = gimple_cond_code (cond);
1167 if (invert)
1168 cond_code = invert_tree_comparison (cond_code, false);
1170 if (cond_code == NE_EXPR)
1172 if (integer_zerop (gimple_cond_rhs (cond))
1173 && (TREE_CODE (gimple_cond_lhs (cond)) == SSA_NAME))
1174 normalize_cond_1 (
1175 SSA_NAME_DEF_STMT (gimple_cond_lhs (cond)),
1176 norm_cond, ERROR_MARK);
1177 else if (integer_zerop (gimple_cond_lhs (cond))
1178 && (TREE_CODE (gimple_cond_rhs (cond)) == SSA_NAME))
1179 normalize_cond_1 (
1180 SSA_NAME_DEF_STMT (gimple_cond_rhs (cond)),
1181 norm_cond, ERROR_MARK);
1182 else
1184 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1185 norm_cond->invert = invert;
1188 else
1190 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1191 norm_cond->invert = invert;
1194 gcc_assert (VEC_length (gimple, norm_cond->conds) == 1
1195 || is_and_or_or (norm_cond->cond_code, NULL));
1198 /* Returns true if the domain for condition COND1 is a subset of
1199 COND2. REVERSE is a flag. when it is true the function checks
1200 if COND1 is a superset of COND2. INVERT1 and INVERT2 are flags
1201 to indicate if COND1 and COND2 need to be inverted or not. */
1203 static bool
1204 is_gcond_subset_of (gimple cond1, bool invert1,
1205 gimple cond2, bool invert2,
1206 bool reverse)
1208 enum gimple_code gc1, gc2;
1209 enum tree_code cond1_code, cond2_code;
1210 gimple tmp;
1211 tree cond1_lhs, cond1_rhs, cond2_lhs, cond2_rhs;
1213 /* Take the short cut. */
1214 if (cond1 == cond2)
1215 return true;
1217 if (reverse)
1219 tmp = cond1;
1220 cond1 = cond2;
1221 cond2 = tmp;
1224 gc1 = gimple_code (cond1);
1225 gc2 = gimple_code (cond2);
1227 if ((gc1 != GIMPLE_ASSIGN && gc1 != GIMPLE_COND)
1228 || (gc2 != GIMPLE_ASSIGN && gc2 != GIMPLE_COND))
1229 return cond1 == cond2;
1231 cond1_code = ((gc1 == GIMPLE_ASSIGN)
1232 ? gimple_assign_rhs_code (cond1)
1233 : gimple_cond_code (cond1));
1235 cond2_code = ((gc2 == GIMPLE_ASSIGN)
1236 ? gimple_assign_rhs_code (cond2)
1237 : gimple_cond_code (cond2));
1239 if (TREE_CODE_CLASS (cond1_code) != tcc_comparison
1240 || TREE_CODE_CLASS (cond2_code) != tcc_comparison)
1241 return false;
1243 if (invert1)
1244 cond1_code = invert_tree_comparison (cond1_code, false);
1245 if (invert2)
1246 cond2_code = invert_tree_comparison (cond2_code, false);
1248 cond1_lhs = ((gc1 == GIMPLE_ASSIGN)
1249 ? gimple_assign_rhs1 (cond1)
1250 : gimple_cond_lhs (cond1));
1251 cond1_rhs = ((gc1 == GIMPLE_ASSIGN)
1252 ? gimple_assign_rhs2 (cond1)
1253 : gimple_cond_rhs (cond1));
1254 cond2_lhs = ((gc2 == GIMPLE_ASSIGN)
1255 ? gimple_assign_rhs1 (cond2)
1256 : gimple_cond_lhs (cond2));
1257 cond2_rhs = ((gc2 == GIMPLE_ASSIGN)
1258 ? gimple_assign_rhs2 (cond2)
1259 : gimple_cond_rhs (cond2));
1261 /* Assuming const operands have been swapped to the
1262 rhs at this point of the analysis. */
1264 if (cond1_lhs != cond2_lhs)
1265 return false;
1267 if (!is_gimple_constant (cond1_rhs)
1268 || TREE_CODE (cond1_rhs) != INTEGER_CST)
1269 return (cond1_rhs == cond2_rhs);
1271 if (!is_gimple_constant (cond2_rhs)
1272 || TREE_CODE (cond2_rhs) != INTEGER_CST)
1273 return (cond1_rhs == cond2_rhs);
1275 if (cond1_code == EQ_EXPR)
1276 return is_value_included_in (cond1_rhs,
1277 cond2_rhs, cond2_code);
1278 if (cond1_code == NE_EXPR || cond2_code == EQ_EXPR)
1279 return ((cond2_code == cond1_code)
1280 && tree_int_cst_equal (cond1_rhs, cond2_rhs));
1282 if (((cond1_code == GE_EXPR || cond1_code == GT_EXPR)
1283 && (cond2_code == LE_EXPR || cond2_code == LT_EXPR))
1284 || ((cond1_code == LE_EXPR || cond1_code == LT_EXPR)
1285 && (cond2_code == GE_EXPR || cond2_code == GT_EXPR)))
1286 return false;
1288 if (cond1_code != GE_EXPR && cond1_code != GT_EXPR
1289 && cond1_code != LE_EXPR && cond1_code != LT_EXPR)
1290 return false;
1292 if (cond1_code == GT_EXPR)
1294 cond1_code = GE_EXPR;
1295 cond1_rhs = fold_binary (PLUS_EXPR, TREE_TYPE (cond1_rhs),
1296 cond1_rhs,
1297 fold_convert (TREE_TYPE (cond1_rhs),
1298 integer_one_node));
1300 else if (cond1_code == LT_EXPR)
1302 cond1_code = LE_EXPR;
1303 cond1_rhs = fold_binary (MINUS_EXPR, TREE_TYPE (cond1_rhs),
1304 cond1_rhs,
1305 fold_convert (TREE_TYPE (cond1_rhs),
1306 integer_one_node));
1309 if (!cond1_rhs)
1310 return false;
1312 gcc_assert (cond1_code == GE_EXPR || cond1_code == LE_EXPR);
1314 if (cond2_code == GE_EXPR || cond2_code == GT_EXPR ||
1315 cond2_code == LE_EXPR || cond2_code == LT_EXPR)
1316 return is_value_included_in (cond1_rhs,
1317 cond2_rhs, cond2_code);
1318 else if (cond2_code == NE_EXPR)
1319 return
1320 (is_value_included_in (cond1_rhs,
1321 cond2_rhs, cond2_code)
1322 && !is_value_included_in (cond2_rhs,
1323 cond1_rhs, cond1_code));
1324 return false;
1327 /* Returns true if the domain of the condition expression
1328 in COND is a subset of any of the sub-conditions
1329 of the normalized condtion NORM_COND. INVERT is a flag
1330 to indicate of the COND needs to be inverted.
1331 REVERSE is a flag. When it is true, the check is reversed --
1332 it returns true if COND is a superset of any of the subconditions
1333 of NORM_COND. */
1335 static bool
1336 is_subset_of_any (gimple cond, bool invert,
1337 norm_cond_t norm_cond, bool reverse)
1339 size_t i;
1340 size_t len = VEC_length (gimple, norm_cond->conds);
1342 for (i = 0; i < len; i++)
1344 if (is_gcond_subset_of (cond, invert,
1345 VEC_index (gimple, norm_cond->conds, i),
1346 false, reverse))
1347 return true;
1349 return false;
1352 /* NORM_COND1 and NORM_COND2 are normalized logical/BIT OR
1353 expressions (formed by following UD chains not control
1354 dependence chains). The function returns true of domain
1355 of and expression NORM_COND1 is a subset of NORM_COND2's.
1356 The implementation is conservative, and it returns false if
1357 it the inclusion relationship may not hold. */
1359 static bool
1360 is_or_set_subset_of (norm_cond_t norm_cond1,
1361 norm_cond_t norm_cond2)
1363 size_t i;
1364 size_t len = VEC_length (gimple, norm_cond1->conds);
1366 for (i = 0; i < len; i++)
1368 if (!is_subset_of_any (VEC_index (gimple, norm_cond1->conds, i),
1369 false, norm_cond2, false))
1370 return false;
1372 return true;
1375 /* NORM_COND1 and NORM_COND2 are normalized logical AND
1376 expressions (formed by following UD chains not control
1377 dependence chains). The function returns true of domain
1378 of and expression NORM_COND1 is a subset of NORM_COND2's. */
1380 static bool
1381 is_and_set_subset_of (norm_cond_t norm_cond1,
1382 norm_cond_t norm_cond2)
1384 size_t i;
1385 size_t len = VEC_length (gimple, norm_cond2->conds);
1387 for (i = 0; i < len; i++)
1389 if (!is_subset_of_any (VEC_index (gimple, norm_cond2->conds, i),
1390 false, norm_cond1, true))
1391 return false;
1393 return true;
1396 /* Returns true of the domain if NORM_COND1 is a subset
1397 of that of NORM_COND2. Returns false if it can not be
1398 proved to be so. */
1400 static bool
1401 is_norm_cond_subset_of (norm_cond_t norm_cond1,
1402 norm_cond_t norm_cond2)
1404 size_t i;
1405 enum tree_code code1, code2;
1407 code1 = norm_cond1->cond_code;
1408 code2 = norm_cond2->cond_code;
1410 if (code1 == BIT_AND_EXPR)
1412 /* Both conditions are AND expressions. */
1413 if (code2 == BIT_AND_EXPR)
1414 return is_and_set_subset_of (norm_cond1, norm_cond2);
1415 /* NORM_COND1 is an AND expression, and NORM_COND2 is an OR
1416 expression. In this case, returns true if any subexpression
1417 of NORM_COND1 is a subset of any subexpression of NORM_COND2. */
1418 else if (code2 == BIT_IOR_EXPR)
1420 size_t len1;
1421 len1 = VEC_length (gimple, norm_cond1->conds);
1422 for (i = 0; i < len1; i++)
1424 gimple cond1 = VEC_index (gimple, norm_cond1->conds, i);
1425 if (is_subset_of_any (cond1, false, norm_cond2, false))
1426 return true;
1428 return false;
1430 else
1432 gcc_assert (code2 == ERROR_MARK);
1433 gcc_assert (VEC_length (gimple, norm_cond2->conds) == 1);
1434 return is_subset_of_any (VEC_index (gimple, norm_cond2->conds, 0),
1435 norm_cond2->invert, norm_cond1, true);
1438 /* NORM_COND1 is an OR expression */
1439 else if (code1 == BIT_IOR_EXPR)
1441 if (code2 != code1)
1442 return false;
1444 return is_or_set_subset_of (norm_cond1, norm_cond2);
1446 else
1448 gcc_assert (code1 == ERROR_MARK);
1449 gcc_assert (VEC_length (gimple, norm_cond1->conds) == 1);
1450 /* Conservatively returns false if NORM_COND1 is non-decomposible
1451 and NORM_COND2 is an AND expression. */
1452 if (code2 == BIT_AND_EXPR)
1453 return false;
1455 if (code2 == BIT_IOR_EXPR)
1456 return is_subset_of_any (VEC_index (gimple, norm_cond1->conds, 0),
1457 norm_cond1->invert, norm_cond2, false);
1459 gcc_assert (code2 == ERROR_MARK);
1460 gcc_assert (VEC_length (gimple, norm_cond2->conds) == 1);
1461 return is_gcond_subset_of (VEC_index (gimple, norm_cond1->conds, 0),
1462 norm_cond1->invert,
1463 VEC_index (gimple, norm_cond2->conds, 0),
1464 norm_cond2->invert, false);
1468 /* Returns true of the domain of single predicate expression
1469 EXPR1 is a subset of that of EXPR2. Returns false if it
1470 can not be proved. */
1472 static bool
1473 is_pred_expr_subset_of (use_pred_info_t expr1,
1474 use_pred_info_t expr2)
1476 gimple cond1, cond2;
1477 enum tree_code code1, code2;
1478 struct norm_cond norm_cond1, norm_cond2;
1479 bool is_subset = false;
1481 cond1 = expr1->cond;
1482 cond2 = expr2->cond;
1483 code1 = gimple_cond_code (cond1);
1484 code2 = gimple_cond_code (cond2);
1486 if (expr1->invert)
1487 code1 = invert_tree_comparison (code1, false);
1488 if (expr2->invert)
1489 code2 = invert_tree_comparison (code2, false);
1491 /* Fast path -- match exactly */
1492 if ((gimple_cond_lhs (cond1) == gimple_cond_lhs (cond2))
1493 && (gimple_cond_rhs (cond1) == gimple_cond_rhs (cond2))
1494 && (code1 == code2))
1495 return true;
1497 /* Normalize conditions. To keep NE_EXPR, do not invert
1498 with both need inversion. */
1499 normalize_cond (cond1, &norm_cond1, (expr1->invert));
1500 normalize_cond (cond2, &norm_cond2, (expr2->invert));
1502 is_subset = is_norm_cond_subset_of (&norm_cond1, &norm_cond2);
1504 /* Free memory */
1505 VEC_free (gimple, heap, norm_cond1.conds);
1506 VEC_free (gimple, heap, norm_cond2.conds);
1507 return is_subset ;
1510 /* Returns true if the domain of PRED1 is a subset
1511 of that of PRED2. Returns false if it can not be proved so. */
1513 static bool
1514 is_pred_chain_subset_of (VEC(use_pred_info_t, heap) *pred1,
1515 VEC(use_pred_info_t, heap) *pred2)
1517 size_t np1, np2, i1, i2;
1519 np1 = VEC_length (use_pred_info_t, pred1);
1520 np2 = VEC_length (use_pred_info_t, pred2);
1522 for (i2 = 0; i2 < np2; i2++)
1524 bool found = false;
1525 use_pred_info_t info2
1526 = VEC_index (use_pred_info_t, pred2, i2);
1527 for (i1 = 0; i1 < np1; i1++)
1529 use_pred_info_t info1
1530 = VEC_index (use_pred_info_t, pred1, i1);
1531 if (is_pred_expr_subset_of (info1, info2))
1533 found = true;
1534 break;
1537 if (!found)
1538 return false;
1540 return true;
1543 /* Returns true if the domain defined by
1544 one pred chain ONE_PRED is a subset of the domain
1545 of *PREDS. It returns false if ONE_PRED's domain is
1546 not a subset of any of the sub-domains of PREDS (
1547 corresponding to each individual chains in it), even
1548 though it may be still be a subset of whole domain
1549 of PREDS which is the union (ORed) of all its subdomains.
1550 In other words, the result is conservative. */
1552 static bool
1553 is_included_in (VEC(use_pred_info_t, heap) *one_pred,
1554 VEC(use_pred_info_t, heap) **preds,
1555 size_t n)
1557 size_t i;
1559 for (i = 0; i < n; i++)
1561 if (is_pred_chain_subset_of (one_pred, preds[i]))
1562 return true;
1565 return false;
1568 /* compares two predicate sets PREDS1 and PREDS2 and returns
1569 true if the domain defined by PREDS1 is a superset
1570 of PREDS2's domain. N1 and N2 are array sizes of PREDS1 and
1571 PREDS2 respectively. The implementation chooses not to build
1572 generic trees (and relying on the folding capability of the
1573 compiler), but instead performs brute force comparison of
1574 individual predicate chains (won't be a compile time problem
1575 as the chains are pretty short). When the function returns
1576 false, it does not necessarily mean *PREDS1 is not a superset
1577 of *PREDS2, but mean it may not be so since the analysis can
1578 not prove it. In such cases, false warnings may still be
1579 emitted. */
1581 static bool
1582 is_superset_of (VEC(use_pred_info_t, heap) **preds1,
1583 size_t n1,
1584 VEC(use_pred_info_t, heap) **preds2,
1585 size_t n2)
1587 size_t i;
1588 VEC(use_pred_info_t, heap) *one_pred_chain;
1590 for (i = 0; i < n2; i++)
1592 one_pred_chain = preds2[i];
1593 if (!is_included_in (one_pred_chain, preds1, n1))
1594 return false;
1597 return true;
1600 /* Comparison function used by qsort. It is used to
1601 sort predicate chains to allow predicate
1602 simplification. */
1604 static int
1605 pred_chain_length_cmp (const void *p1, const void *p2)
1607 use_pred_info_t i1, i2;
1608 VEC(use_pred_info_t, heap) * const *chain1
1609 = (VEC(use_pred_info_t, heap) * const *)p1;
1610 VEC(use_pred_info_t, heap) * const *chain2
1611 = (VEC(use_pred_info_t, heap) * const *)p2;
1613 if (VEC_length (use_pred_info_t, *chain1)
1614 != VEC_length (use_pred_info_t, *chain2))
1615 return (VEC_length (use_pred_info_t, *chain1)
1616 - VEC_length (use_pred_info_t, *chain2));
1618 i1 = VEC_index (use_pred_info_t, *chain1, 0);
1619 i2 = VEC_index (use_pred_info_t, *chain2, 0);
1621 /* Allow predicates with similar prefix come together. */
1622 if (!i1->invert && i2->invert)
1623 return -1;
1624 else if (i1->invert && !i2->invert)
1625 return 1;
1627 return gimple_uid (i1->cond) - gimple_uid (i2->cond);
1630 /* x OR (!x AND y) is equivalent to x OR y.
1631 This function normalizes x1 OR (!x1 AND x2) OR (!x1 AND !x2 AND x3)
1632 into x1 OR x2 OR x3. PREDS is the predicate chains, and N is
1633 the number of chains. Returns true if normalization happens. */
1635 static bool
1636 normalize_preds (VEC(use_pred_info_t, heap) **preds, size_t *n)
1638 size_t i, j, ll;
1639 VEC(use_pred_info_t, heap) *pred_chain;
1640 VEC(use_pred_info_t, heap) *x = 0;
1641 use_pred_info_t xj = 0, nxj = 0;
1643 if (*n < 2)
1644 return false;
1646 /* First sort the chains in ascending order of lengths. */
1647 qsort (preds, *n, sizeof (void *), pred_chain_length_cmp);
1648 pred_chain = preds[0];
1649 ll = VEC_length (use_pred_info_t, pred_chain);
1650 if (ll != 1)
1652 if (ll == 2)
1654 use_pred_info_t xx, yy, xx2, nyy;
1655 VEC(use_pred_info_t, heap) *pred_chain2 = preds[1];
1656 if (VEC_length (use_pred_info_t, pred_chain2) != 2)
1657 return false;
1659 /* See if simplification x AND y OR x AND !y is possible. */
1660 xx = VEC_index (use_pred_info_t, pred_chain, 0);
1661 yy = VEC_index (use_pred_info_t, pred_chain, 1);
1662 xx2 = VEC_index (use_pred_info_t, pred_chain2, 0);
1663 nyy = VEC_index (use_pred_info_t, pred_chain2, 1);
1664 if (gimple_cond_lhs (xx->cond) != gimple_cond_lhs (xx2->cond)
1665 || gimple_cond_rhs (xx->cond) != gimple_cond_rhs (xx2->cond)
1666 || gimple_cond_code (xx->cond) != gimple_cond_code (xx2->cond)
1667 || (xx->invert != xx2->invert))
1668 return false;
1669 if (gimple_cond_lhs (yy->cond) != gimple_cond_lhs (nyy->cond)
1670 || gimple_cond_rhs (yy->cond) != gimple_cond_rhs (nyy->cond)
1671 || gimple_cond_code (yy->cond) != gimple_cond_code (nyy->cond)
1672 || (yy->invert == nyy->invert))
1673 return false;
1675 /* Now merge the first two chains. */
1676 free (yy);
1677 free (nyy);
1678 free (xx2);
1679 VEC_free (use_pred_info_t, heap, pred_chain);
1680 VEC_free (use_pred_info_t, heap, pred_chain2);
1681 pred_chain = 0;
1682 VEC_safe_push (use_pred_info_t, heap, pred_chain, xx);
1683 preds[0] = pred_chain;
1684 for (i = 1; i < *n - 1; i++)
1685 preds[i] = preds[i + 1];
1687 preds[*n - 1] = 0;
1688 *n = *n - 1;
1690 else
1691 return false;
1694 VEC_safe_push (use_pred_info_t, heap, x,
1695 VEC_index (use_pred_info_t, pred_chain, 0));
1697 /* The loop extracts x1, x2, x3, etc from chains
1698 x1 OR (!x1 AND x2) OR (!x1 AND !x2 AND x3) OR ... */
1699 for (i = 1; i < *n; i++)
1701 pred_chain = preds[i];
1702 if (VEC_length (use_pred_info_t, pred_chain) != i + 1)
1703 return false;
1705 for (j = 0; j < i; j++)
1707 xj = VEC_index (use_pred_info_t, x, j);
1708 nxj = VEC_index (use_pred_info_t, pred_chain, j);
1710 /* Check if nxj is !xj */
1711 if (gimple_cond_lhs (xj->cond) != gimple_cond_lhs (nxj->cond)
1712 || gimple_cond_rhs (xj->cond) != gimple_cond_rhs (nxj->cond)
1713 || gimple_cond_code (xj->cond) != gimple_cond_code (nxj->cond)
1714 || (xj->invert == nxj->invert))
1715 return false;
1718 VEC_safe_push (use_pred_info_t, heap, x,
1719 VEC_index (use_pred_info_t, pred_chain, i));
1722 /* Now normalize the pred chains using the extraced x1, x2, x3 etc. */
1723 for (j = 0; j < *n; j++)
1725 use_pred_info_t t;
1726 xj = VEC_index (use_pred_info_t, x, j);
1728 t = XNEW (struct use_pred_info);
1729 *t = *xj;
1731 VEC_replace (use_pred_info_t, x, j, t);
1734 for (i = 0; i < *n; i++)
1736 pred_chain = preds[i];
1737 for (j = 0; j < VEC_length (use_pred_info_t, pred_chain); j++)
1738 free (VEC_index (use_pred_info_t, pred_chain, j));
1739 VEC_free (use_pred_info_t, heap, pred_chain);
1740 pred_chain = 0;
1741 /* A new chain. */
1742 VEC_safe_push (use_pred_info_t, heap, pred_chain,
1743 VEC_index (use_pred_info_t, x, i));
1744 preds[i] = pred_chain;
1746 return true;
1751 /* Computes the predicates that guard the use and checks
1752 if the incoming paths that have empty (or possibly
1753 empty) definition can be pruned/filtered. The function returns
1754 true if it can be determined that the use of PHI's def in
1755 USE_STMT is guarded with a predicate set not overlapping with
1756 predicate sets of all runtime paths that do not have a definition.
1757 Returns false if it is not or it can not be determined. USE_BB is
1758 the bb of the use (for phi operand use, the bb is not the bb of
1759 the phi stmt, but the src bb of the operand edge). UNINIT_OPNDS
1760 is a bit vector. If an operand of PHI is uninitialized, the
1761 corresponding bit in the vector is 1. VISIED_PHIS is a pointer
1762 set of phis being visted. */
1764 static bool
1765 is_use_properly_guarded (gimple use_stmt,
1766 basic_block use_bb,
1767 gimple phi,
1768 unsigned uninit_opnds,
1769 struct pointer_set_t *visited_phis)
1771 basic_block phi_bb;
1772 VEC(use_pred_info_t, heap) **preds = 0;
1773 VEC(use_pred_info_t, heap) **def_preds = 0;
1774 size_t num_preds = 0, num_def_preds = 0;
1775 bool has_valid_preds = false;
1776 bool is_properly_guarded = false;
1778 if (pointer_set_insert (visited_phis, phi))
1779 return false;
1781 phi_bb = gimple_bb (phi);
1783 if (is_non_loop_exit_postdominating (use_bb, phi_bb))
1784 return false;
1786 has_valid_preds = find_predicates (&preds, &num_preds,
1787 phi_bb, use_bb);
1789 if (!has_valid_preds)
1791 destroy_predicate_vecs (num_preds, preds);
1792 return false;
1795 if (dump_file)
1796 dump_predicates (use_stmt, num_preds, preds,
1797 "\nUse in stmt ");
1799 has_valid_preds = find_def_preds (&def_preds,
1800 &num_def_preds, phi);
1802 if (has_valid_preds)
1804 bool normed;
1805 if (dump_file)
1806 dump_predicates (phi, num_def_preds, def_preds,
1807 "Operand defs of phi ");
1809 normed = normalize_preds (def_preds, &num_def_preds);
1810 if (normed && dump_file)
1812 fprintf (dump_file, "\nNormalized to\n");
1813 dump_predicates (phi, num_def_preds, def_preds,
1814 "Operand defs of phi ");
1816 is_properly_guarded =
1817 is_superset_of (def_preds, num_def_preds,
1818 preds, num_preds);
1821 /* further prune the dead incoming phi edges. */
1822 if (!is_properly_guarded)
1823 is_properly_guarded
1824 = use_pred_not_overlap_with_undef_path_pred (
1825 num_preds, preds, phi, uninit_opnds, visited_phis);
1827 destroy_predicate_vecs (num_preds, preds);
1828 destroy_predicate_vecs (num_def_preds, def_preds);
1829 return is_properly_guarded;
1832 /* Searches through all uses of a potentially
1833 uninitialized variable defined by PHI and returns a use
1834 statement if the use is not properly guarded. It returns
1835 NULL if all uses are guarded. UNINIT_OPNDS is a bitvector
1836 holding the position(s) of uninit PHI operands. WORKLIST
1837 is the vector of candidate phis that may be updated by this
1838 function. ADDED_TO_WORKLIST is the pointer set tracking
1839 if the new phi is already in the worklist. */
1841 static gimple
1842 find_uninit_use (gimple phi, unsigned uninit_opnds,
1843 VEC(gimple, heap) **worklist,
1844 struct pointer_set_t *added_to_worklist)
1846 tree phi_result;
1847 use_operand_p use_p;
1848 gimple use_stmt;
1849 imm_use_iterator iter;
1851 phi_result = gimple_phi_result (phi);
1853 FOR_EACH_IMM_USE_FAST (use_p, iter, phi_result)
1855 struct pointer_set_t *visited_phis;
1856 basic_block use_bb;
1858 use_stmt = USE_STMT (use_p);
1859 if (is_gimple_debug (use_stmt))
1860 continue;
1862 visited_phis = pointer_set_create ();
1864 if (gimple_code (use_stmt) == GIMPLE_PHI)
1865 use_bb = gimple_phi_arg_edge (use_stmt,
1866 PHI_ARG_INDEX_FROM_USE (use_p))->src;
1867 else
1868 use_bb = gimple_bb (use_stmt);
1870 if (is_use_properly_guarded (use_stmt,
1871 use_bb,
1872 phi,
1873 uninit_opnds,
1874 visited_phis))
1876 pointer_set_destroy (visited_phis);
1877 continue;
1879 pointer_set_destroy (visited_phis);
1881 if (dump_file && (dump_flags & TDF_DETAILS))
1883 fprintf (dump_file, "[CHECK]: Found unguarded use: ");
1884 print_gimple_stmt (dump_file, use_stmt, 0, 0);
1886 /* Found one real use, return. */
1887 if (gimple_code (use_stmt) != GIMPLE_PHI)
1888 return use_stmt;
1890 /* Found a phi use that is not guarded,
1891 add the phi to the worklist. */
1892 if (!pointer_set_insert (added_to_worklist,
1893 use_stmt))
1895 if (dump_file && (dump_flags & TDF_DETAILS))
1897 fprintf (dump_file, "[WORKLIST]: Update worklist with phi: ");
1898 print_gimple_stmt (dump_file, use_stmt, 0, 0);
1901 VEC_safe_push (gimple, heap, *worklist, use_stmt);
1902 pointer_set_insert (possibly_undefined_names,
1903 phi_result);
1907 return NULL;
1910 /* Look for inputs to PHI that are SSA_NAMEs that have empty definitions
1911 and gives warning if there exists a runtime path from the entry to a
1912 use of the PHI def that does not contain a definition. In other words,
1913 the warning is on the real use. The more dead paths that can be pruned
1914 by the compiler, the fewer false positives the warning is. WORKLIST
1915 is a vector of candidate phis to be examined. ADDED_TO_WORKLIST is
1916 a pointer set tracking if the new phi is added to the worklist or not. */
1918 static void
1919 warn_uninitialized_phi (gimple phi, VEC(gimple, heap) **worklist,
1920 struct pointer_set_t *added_to_worklist)
1922 unsigned uninit_opnds;
1923 gimple uninit_use_stmt = 0;
1924 tree uninit_op;
1926 /* Don't look at virtual operands. */
1927 if (virtual_operand_p (gimple_phi_result (phi)))
1928 return;
1930 uninit_opnds = compute_uninit_opnds_pos (phi);
1932 if (MASK_EMPTY (uninit_opnds))
1933 return;
1935 if (dump_file && (dump_flags & TDF_DETAILS))
1937 fprintf (dump_file, "[CHECK]: examining phi: ");
1938 print_gimple_stmt (dump_file, phi, 0, 0);
1941 /* Now check if we have any use of the value without proper guard. */
1942 uninit_use_stmt = find_uninit_use (phi, uninit_opnds,
1943 worklist, added_to_worklist);
1945 /* All uses are properly guarded. */
1946 if (!uninit_use_stmt)
1947 return;
1949 uninit_op = gimple_phi_arg_def (phi, MASK_FIRST_SET_BIT (uninit_opnds));
1950 if (SSA_NAME_VAR (uninit_op) == NULL_TREE)
1951 return;
1952 warn_uninit (OPT_Wmaybe_uninitialized, uninit_op, SSA_NAME_VAR (uninit_op),
1953 SSA_NAME_VAR (uninit_op),
1954 "%qD may be used uninitialized in this function",
1955 uninit_use_stmt);
1960 /* Entry point to the late uninitialized warning pass. */
1962 static unsigned int
1963 execute_late_warn_uninitialized (void)
1965 basic_block bb;
1966 gimple_stmt_iterator gsi;
1967 VEC(gimple, heap) *worklist = 0;
1968 struct pointer_set_t *added_to_worklist;
1970 calculate_dominance_info (CDI_DOMINATORS);
1971 calculate_dominance_info (CDI_POST_DOMINATORS);
1972 /* Re-do the plain uninitialized variable check, as optimization may have
1973 straightened control flow. Do this first so that we don't accidentally
1974 get a "may be" warning when we'd have seen an "is" warning later. */
1975 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/1);
1977 timevar_push (TV_TREE_UNINIT);
1979 possibly_undefined_names = pointer_set_create ();
1980 added_to_worklist = pointer_set_create ();
1982 /* Initialize worklist */
1983 FOR_EACH_BB (bb)
1984 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1986 gimple phi = gsi_stmt (gsi);
1987 size_t n, i;
1989 n = gimple_phi_num_args (phi);
1991 /* Don't look at virtual operands. */
1992 if (virtual_operand_p (gimple_phi_result (phi)))
1993 continue;
1995 for (i = 0; i < n; ++i)
1997 tree op = gimple_phi_arg_def (phi, i);
1998 if (TREE_CODE (op) == SSA_NAME
1999 && ssa_undefined_value_p (op))
2001 VEC_safe_push (gimple, heap, worklist, phi);
2002 pointer_set_insert (added_to_worklist, phi);
2003 if (dump_file && (dump_flags & TDF_DETAILS))
2005 fprintf (dump_file, "[WORKLIST]: add to initial list: ");
2006 print_gimple_stmt (dump_file, phi, 0, 0);
2008 break;
2013 while (VEC_length (gimple, worklist) != 0)
2015 gimple cur_phi = 0;
2016 cur_phi = VEC_pop (gimple, worklist);
2017 warn_uninitialized_phi (cur_phi, &worklist, added_to_worklist);
2020 VEC_free (gimple, heap, worklist);
2021 pointer_set_destroy (added_to_worklist);
2022 pointer_set_destroy (possibly_undefined_names);
2023 possibly_undefined_names = NULL;
2024 free_dominance_info (CDI_POST_DOMINATORS);
2025 timevar_pop (TV_TREE_UNINIT);
2026 return 0;
2029 static bool
2030 gate_warn_uninitialized (void)
2032 return warn_uninitialized != 0;
2035 struct gimple_opt_pass pass_late_warn_uninitialized =
2038 GIMPLE_PASS,
2039 "uninit", /* name */
2040 gate_warn_uninitialized, /* gate */
2041 execute_late_warn_uninitialized, /* execute */
2042 NULL, /* sub */
2043 NULL, /* next */
2044 0, /* static_pass_number */
2045 TV_NONE, /* tv_id */
2046 PROP_ssa, /* properties_required */
2047 0, /* properties_provided */
2048 0, /* properties_destroyed */
2049 0, /* todo_flags_start */
2050 0 /* todo_flags_finish */