PR target/47063
[official-gcc.git] / gcc / tree-ssa-uninit.c
blob39785d4b588e214b717db3aef47bac3d674fb125
1 /* Predicate aware uninitialized variable warning.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2010 Free Software
3 Foundation, Inc.
4 Contributed by Xinliang David Li <davidxl@google.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "tm_p.h"
29 #include "langhooks.h"
30 #include "basic-block.h"
31 #include "output.h"
32 #include "function.h"
33 #include "gimple-pretty-print.h"
34 #include "bitmap.h"
35 #include "pointer-set.h"
36 #include "tree-flow.h"
37 #include "gimple.h"
38 #include "tree-inline.h"
39 #include "timevar.h"
40 #include "hashtab.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "diagnostic-core.h"
44 #include "timevar.h"
46 /* This implements the pass that does predicate aware warning on uses of
47 possibly uninitialized variables. The pass first collects the set of
48 possibly uninitialized SSA names. For each such name, it walks through
49 all its immediate uses. For each immediate use, it rebuilds the condition
50 expression (the predicate) that guards the use. The predicate is then
51 examined to see if the variable is always defined under that same condition.
52 This is done either by pruning the unrealizable paths that lead to the
53 default definitions or by checking if the predicate set that guards the
54 defining paths is a superset of the use predicate. */
57 /* Pointer set of potentially undefined ssa names, i.e.,
58 ssa names that are defined by phi with operands that
59 are not defined or potentially undefined. */
60 static struct pointer_set_t *possibly_undefined_names = 0;
62 /* Bit mask handling macros. */
63 #define MASK_SET_BIT(mask, pos) mask |= (1 << pos)
64 #define MASK_TEST_BIT(mask, pos) (mask & (1 << pos))
65 #define MASK_EMPTY(mask) (mask == 0)
67 /* Returns the first bit position (starting from LSB)
68 in mask that is non zero. Returns -1 if the mask is empty. */
69 static int
70 get_mask_first_set_bit (unsigned mask)
72 int pos = 0;
73 if (mask == 0)
74 return -1;
76 while ((mask & (1 << pos)) == 0)
77 pos++;
79 return pos;
81 #define MASK_FIRST_SET_BIT(mask) get_mask_first_set_bit (mask)
84 /* Return true if T, an SSA_NAME, has an undefined value. */
86 bool
87 ssa_undefined_value_p (tree t)
89 tree var = SSA_NAME_VAR (t);
91 /* Parameters get their initial value from the function entry. */
92 if (TREE_CODE (var) == PARM_DECL)
93 return false;
95 /* When returning by reference the return address is actually a hidden
96 parameter. */
97 if (TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL
98 && DECL_BY_REFERENCE (SSA_NAME_VAR (t)))
99 return false;
101 /* Hard register variables get their initial value from the ether. */
102 if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
103 return false;
105 /* The value is undefined iff its definition statement is empty. */
106 return (gimple_nop_p (SSA_NAME_DEF_STMT (t))
107 || (possibly_undefined_names
108 && pointer_set_contains (possibly_undefined_names, t)));
111 /* Checks if the operand OPND of PHI is defined by
112 another phi with one operand defined by this PHI,
113 but the rest operands are all defined. If yes,
114 returns true to skip this this operand as being
115 redundant. Can be enhanced to be more general. */
117 static bool
118 can_skip_redundant_opnd (tree opnd, gimple phi)
120 gimple op_def;
121 tree phi_def;
122 int i, n;
124 phi_def = gimple_phi_result (phi);
125 op_def = SSA_NAME_DEF_STMT (opnd);
126 if (gimple_code (op_def) != GIMPLE_PHI)
127 return false;
128 n = gimple_phi_num_args (op_def);
129 for (i = 0; i < n; ++i)
131 tree op = gimple_phi_arg_def (op_def, i);
132 if (TREE_CODE (op) != SSA_NAME)
133 continue;
134 if (op != phi_def && ssa_undefined_value_p (op))
135 return false;
138 return true;
141 /* Returns a bit mask holding the positions of arguments in PHI
142 that have empty (or possibly empty) definitions. */
144 static unsigned
145 compute_uninit_opnds_pos (gimple phi)
147 size_t i, n;
148 unsigned uninit_opnds = 0;
150 n = gimple_phi_num_args (phi);
151 /* Bail out for phi with too many args. */
152 if (n > 32)
153 return 0;
155 for (i = 0; i < n; ++i)
157 tree op = gimple_phi_arg_def (phi, i);
158 if (TREE_CODE (op) == SSA_NAME
159 && ssa_undefined_value_p (op)
160 && !can_skip_redundant_opnd (op, phi))
161 MASK_SET_BIT (uninit_opnds, i);
163 return uninit_opnds;
166 /* Find the immediate postdominator PDOM of the specified
167 basic block BLOCK. */
169 static inline basic_block
170 find_pdom (basic_block block)
172 if (block == EXIT_BLOCK_PTR)
173 return EXIT_BLOCK_PTR;
174 else
176 basic_block bb
177 = get_immediate_dominator (CDI_POST_DOMINATORS, block);
178 if (! bb)
179 return EXIT_BLOCK_PTR;
180 return bb;
184 /* Find the immediate DOM of the specified
185 basic block BLOCK. */
187 static inline basic_block
188 find_dom (basic_block block)
190 if (block == ENTRY_BLOCK_PTR)
191 return ENTRY_BLOCK_PTR;
192 else
194 basic_block bb = get_immediate_dominator (CDI_DOMINATORS, block);
195 if (! bb)
196 return ENTRY_BLOCK_PTR;
197 return bb;
201 /* Returns true if BB1 is postdominating BB2 and BB1 is
202 not a loop exit bb. The loop exit bb check is simple and does
203 not cover all cases. */
205 static bool
206 is_non_loop_exit_postdominating (basic_block bb1, basic_block bb2)
208 if (!dominated_by_p (CDI_POST_DOMINATORS, bb2, bb1))
209 return false;
211 if (single_pred_p (bb1) && !single_succ_p (bb2))
212 return false;
214 return true;
217 /* Find the closest postdominator of a specified BB, which is control
218 equivalent to BB. */
220 static inline basic_block
221 find_control_equiv_block (basic_block bb)
223 basic_block pdom;
225 pdom = find_pdom (bb);
227 /* Skip the postdominating bb that is also loop exit. */
228 if (!is_non_loop_exit_postdominating (pdom, bb))
229 return NULL;
231 if (dominated_by_p (CDI_DOMINATORS, pdom, bb))
232 return pdom;
234 return NULL;
237 #define MAX_NUM_CHAINS 8
238 #define MAX_CHAIN_LEN 5
240 /* Computes the control dependence chains (paths of edges)
241 for DEP_BB up to the dominating basic block BB (the head node of a
242 chain should be dominated by it). CD_CHAINS is pointer to a
243 dynamic array holding the result chains. CUR_CD_CHAIN is the current
244 chain being computed. *NUM_CHAINS is total number of chains. The
245 function returns true if the information is successfully computed,
246 return false if there is no control dependence or not computed. */
248 static bool
249 compute_control_dep_chain (basic_block bb, basic_block dep_bb,
250 VEC(edge, heap) **cd_chains,
251 size_t *num_chains,
252 VEC(edge, heap) **cur_cd_chain)
254 edge_iterator ei;
255 edge e;
256 size_t i;
257 bool found_cd_chain = false;
258 size_t cur_chain_len = 0;
260 if (EDGE_COUNT (bb->succs) < 2)
261 return false;
263 /* Could use a set instead. */
264 cur_chain_len = VEC_length (edge, *cur_cd_chain);
265 if (cur_chain_len > MAX_CHAIN_LEN)
266 return false;
268 for (i = 0; i < cur_chain_len; i++)
270 edge e = VEC_index (edge, *cur_cd_chain, i);
271 /* cycle detected. */
272 if (e->src == bb)
273 return false;
276 FOR_EACH_EDGE (e, ei, bb->succs)
278 basic_block cd_bb;
279 if (e->flags & (EDGE_FAKE | EDGE_ABNORMAL))
280 continue;
282 cd_bb = e->dest;
283 VEC_safe_push (edge, heap, *cur_cd_chain, e);
284 while (!is_non_loop_exit_postdominating (cd_bb, bb))
286 if (cd_bb == dep_bb)
288 /* Found a direct control dependence. */
289 if (*num_chains < MAX_NUM_CHAINS)
291 cd_chains[*num_chains]
292 = VEC_copy (edge, heap, *cur_cd_chain);
293 (*num_chains)++;
295 found_cd_chain = true;
296 /* check path from next edge. */
297 break;
300 /* Now check if DEP_BB is indirectly control dependent on BB. */
301 if (compute_control_dep_chain (cd_bb, dep_bb, cd_chains,
302 num_chains, cur_cd_chain))
304 found_cd_chain = true;
305 break;
308 cd_bb = find_pdom (cd_bb);
309 if (cd_bb == EXIT_BLOCK_PTR)
310 break;
312 VEC_pop (edge, *cur_cd_chain);
313 gcc_assert (VEC_length (edge, *cur_cd_chain) == cur_chain_len);
315 gcc_assert (VEC_length (edge, *cur_cd_chain) == cur_chain_len);
317 return found_cd_chain;
320 typedef struct use_pred_info
322 gimple cond;
323 bool invert;
324 } *use_pred_info_t;
326 DEF_VEC_P(use_pred_info_t);
327 DEF_VEC_ALLOC_P(use_pred_info_t, heap);
330 /* Converts the chains of control dependence edges into a set of
331 predicates. A control dependence chain is represented by a vector
332 edges. DEP_CHAINS points to an array of dependence chains.
333 NUM_CHAINS is the size of the chain array. One edge in a dependence
334 chain is mapped to predicate expression represented by use_pred_info_t
335 type. One dependence chain is converted to a composite predicate that
336 is the result of AND operation of use_pred_info_t mapped to each edge.
337 A composite predicate is presented by a vector of use_pred_info_t. On
338 return, *PREDS points to the resulting array of composite predicates.
339 *NUM_PREDS is the number of composite predictes. */
341 static bool
342 convert_control_dep_chain_into_preds (VEC(edge, heap) **dep_chains,
343 size_t num_chains,
344 VEC(use_pred_info_t, heap) ***preds,
345 size_t *num_preds)
347 bool has_valid_pred = false;
348 size_t i, j;
349 if (num_chains == 0 || num_chains >= MAX_NUM_CHAINS)
350 return false;
352 /* Now convert CD chains into predicates */
353 has_valid_pred = true;
355 /* Now convert the control dep chain into a set
356 of predicates. */
357 *preds = XCNEWVEC (VEC(use_pred_info_t, heap) *,
358 num_chains);
359 *num_preds = num_chains;
361 for (i = 0; i < num_chains; i++)
363 VEC(edge, heap) *one_cd_chain = dep_chains[i];
364 for (j = 0; j < VEC_length (edge, one_cd_chain); j++)
366 gimple cond_stmt;
367 gimple_stmt_iterator gsi;
368 basic_block guard_bb;
369 use_pred_info_t one_pred;
370 edge e;
372 e = VEC_index (edge, one_cd_chain, j);
373 guard_bb = e->src;
374 gsi = gsi_last_bb (guard_bb);
375 if (gsi_end_p (gsi))
377 has_valid_pred = false;
378 break;
380 cond_stmt = gsi_stmt (gsi);
381 if (gimple_code (cond_stmt) == GIMPLE_CALL
382 && EDGE_COUNT (e->src->succs) >= 2)
384 /* Ignore EH edge. Can add assertion
385 on the other edge's flag. */
386 continue;
388 /* Skip if there is essentially one succesor. */
389 if (EDGE_COUNT (e->src->succs) == 2)
391 edge e1;
392 edge_iterator ei1;
393 bool skip = false;
395 FOR_EACH_EDGE (e1, ei1, e->src->succs)
397 if (EDGE_COUNT (e1->dest->succs) == 0)
399 skip = true;
400 break;
403 if (skip)
404 continue;
406 if (gimple_code (cond_stmt) != GIMPLE_COND)
408 has_valid_pred = false;
409 break;
411 one_pred = XNEW (struct use_pred_info);
412 one_pred->cond = cond_stmt;
413 one_pred->invert = !!(e->flags & EDGE_FALSE_VALUE);
414 VEC_safe_push (use_pred_info_t, heap, (*preds)[i], one_pred);
417 if (!has_valid_pred)
418 break;
420 return has_valid_pred;
423 /* Computes all control dependence chains for USE_BB. The control
424 dependence chains are then converted to an array of composite
425 predicates pointed to by PREDS. PHI_BB is the basic block of
426 the phi whose result is used in USE_BB. */
428 static bool
429 find_predicates (VEC(use_pred_info_t, heap) ***preds,
430 size_t *num_preds,
431 basic_block phi_bb,
432 basic_block use_bb)
434 size_t num_chains = 0, i;
435 VEC(edge, heap) **dep_chains = 0;
436 VEC(edge, heap) *cur_chain = 0;
437 bool has_valid_pred = false;
438 basic_block cd_root = 0;
440 dep_chains = XCNEWVEC (VEC(edge, heap) *, MAX_NUM_CHAINS);
442 /* First find the closest bb that is control equivalent to PHI_BB
443 that also dominates USE_BB. */
444 cd_root = phi_bb;
445 while (dominated_by_p (CDI_DOMINATORS, use_bb, cd_root))
447 basic_block ctrl_eq_bb = find_control_equiv_block (cd_root);
448 if (ctrl_eq_bb && dominated_by_p (CDI_DOMINATORS, use_bb, ctrl_eq_bb))
449 cd_root = ctrl_eq_bb;
450 else
451 break;
454 compute_control_dep_chain (cd_root, use_bb,
455 dep_chains, &num_chains,
456 &cur_chain);
458 has_valid_pred
459 = convert_control_dep_chain_into_preds (dep_chains,
460 num_chains,
461 preds,
462 num_preds);
463 /* Free individual chain */
464 VEC_free (edge, heap, cur_chain);
465 for (i = 0; i < num_chains; i++)
466 VEC_free (edge, heap, dep_chains[i]);
467 free (dep_chains);
468 return has_valid_pred;
471 /* Computes the set of incoming edges of PHI that have non empty
472 definitions of a phi chain. The collection will be done
473 recursively on operands that are defined by phis. CD_ROOT
474 is the control dependence root. *EDGES holds the result, and
475 VISITED_PHIS is a pointer set for detecting cycles. */
477 static void
478 collect_phi_def_edges (gimple phi, basic_block cd_root,
479 VEC(edge, heap) **edges,
480 struct pointer_set_t *visited_phis)
482 size_t i, n;
483 edge opnd_edge;
484 tree opnd;
486 if (pointer_set_insert (visited_phis, phi))
487 return;
489 n = gimple_phi_num_args (phi);
490 for (i = 0; i < n; i++)
492 opnd_edge = gimple_phi_arg_edge (phi, i);
493 opnd = gimple_phi_arg_def (phi, i);
495 if (TREE_CODE (opnd) != SSA_NAME)
497 if (dump_file && (dump_flags & TDF_DETAILS))
499 fprintf (dump_file, "\n[CHECK] Found def edge %d in ", (int)i);
500 print_gimple_stmt (dump_file, phi, 0, 0);
502 VEC_safe_push (edge, heap, *edges, opnd_edge);
504 else
506 gimple def = SSA_NAME_DEF_STMT (opnd);
508 if (gimple_code (def) == GIMPLE_PHI
509 && dominated_by_p (CDI_DOMINATORS,
510 gimple_bb (def), cd_root))
511 collect_phi_def_edges (def, cd_root, edges,
512 visited_phis);
513 else if (!ssa_undefined_value_p (opnd))
515 if (dump_file && (dump_flags & TDF_DETAILS))
517 fprintf (dump_file, "\n[CHECK] Found def edge %d in ", (int)i);
518 print_gimple_stmt (dump_file, phi, 0, 0);
520 VEC_safe_push (edge, heap, *edges, opnd_edge);
526 /* For each use edge of PHI, computes all control dependence chains.
527 The control dependence chains are then converted to an array of
528 composite predicates pointed to by PREDS. */
530 static bool
531 find_def_preds (VEC(use_pred_info_t, heap) ***preds,
532 size_t *num_preds, gimple phi)
534 size_t num_chains = 0, i, n;
535 VEC(edge, heap) **dep_chains = 0;
536 VEC(edge, heap) *cur_chain = 0;
537 VEC(edge, heap) *def_edges = 0;
538 bool has_valid_pred = false;
539 basic_block phi_bb, cd_root = 0;
540 struct pointer_set_t *visited_phis;
542 dep_chains = XCNEWVEC (VEC(edge, heap) *, MAX_NUM_CHAINS);
544 phi_bb = gimple_bb (phi);
545 /* First find the closest dominating bb to be
546 the control dependence root */
547 cd_root = find_dom (phi_bb);
548 if (!cd_root)
549 return false;
551 visited_phis = pointer_set_create ();
552 collect_phi_def_edges (phi, cd_root, &def_edges, visited_phis);
553 pointer_set_destroy (visited_phis);
555 n = VEC_length (edge, def_edges);
556 if (n == 0)
557 return false;
559 for (i = 0; i < n; i++)
561 size_t prev_nc, j;
562 edge opnd_edge;
564 opnd_edge = VEC_index (edge, def_edges, i);
565 prev_nc = num_chains;
566 compute_control_dep_chain (cd_root, opnd_edge->src,
567 dep_chains, &num_chains,
568 &cur_chain);
569 /* Free individual chain */
570 VEC_free (edge, heap, cur_chain);
571 cur_chain = 0;
573 /* Now update the newly added chains with
574 the phi operand edge: */
575 if (EDGE_COUNT (opnd_edge->src->succs) > 1)
577 if (prev_nc == num_chains
578 && num_chains < MAX_NUM_CHAINS)
579 num_chains++;
580 for (j = prev_nc; j < num_chains; j++)
582 VEC_safe_push (edge, heap, dep_chains[j], opnd_edge);
587 has_valid_pred
588 = convert_control_dep_chain_into_preds (dep_chains,
589 num_chains,
590 preds,
591 num_preds);
592 for (i = 0; i < num_chains; i++)
593 VEC_free (edge, heap, dep_chains[i]);
594 free (dep_chains);
595 return has_valid_pred;
598 /* Dumps the predicates (PREDS) for USESTMT. */
600 static void
601 dump_predicates (gimple usestmt, size_t num_preds,
602 VEC(use_pred_info_t, heap) **preds,
603 const char* msg)
605 size_t i, j;
606 VEC(use_pred_info_t, heap) *one_pred_chain;
607 fprintf (dump_file, msg);
608 print_gimple_stmt (dump_file, usestmt, 0, 0);
609 fprintf (dump_file, "is guarded by :\n");
610 /* do some dumping here: */
611 for (i = 0; i < num_preds; i++)
613 size_t np;
615 one_pred_chain = preds[i];
616 np = VEC_length (use_pred_info_t, one_pred_chain);
618 for (j = 0; j < np; j++)
620 use_pred_info_t one_pred
621 = VEC_index (use_pred_info_t, one_pred_chain, j);
622 if (one_pred->invert)
623 fprintf (dump_file, " (.NOT.) ");
624 print_gimple_stmt (dump_file, one_pred->cond, 0, 0);
625 if (j < np - 1)
626 fprintf (dump_file, "(.AND.)\n");
628 if (i < num_preds - 1)
629 fprintf (dump_file, "(.OR.)\n");
633 /* Destroys the predicate set *PREDS. */
635 static void
636 destroy_predicate_vecs (size_t n,
637 VEC(use_pred_info_t, heap) ** preds)
639 size_t i, j;
640 for (i = 0; i < n; i++)
642 for (j = 0; j < VEC_length (use_pred_info_t, preds[i]); j++)
643 free (VEC_index (use_pred_info_t, preds[i], j));
644 VEC_free (use_pred_info_t, heap, preds[i]);
646 free (preds);
650 /* Computes the 'normalized' conditional code with operand
651 swapping and condition inversion. */
653 static enum tree_code
654 get_cmp_code (enum tree_code orig_cmp_code,
655 bool swap_cond, bool invert)
657 enum tree_code tc = orig_cmp_code;
659 if (swap_cond)
660 tc = swap_tree_comparison (orig_cmp_code);
661 if (invert)
662 tc = invert_tree_comparison (tc, false);
664 switch (tc)
666 case LT_EXPR:
667 case LE_EXPR:
668 case GT_EXPR:
669 case GE_EXPR:
670 case EQ_EXPR:
671 case NE_EXPR:
672 break;
673 default:
674 return ERROR_MARK;
676 return tc;
679 /* Returns true if VAL falls in the range defined by BOUNDARY and CMPC, i.e.
680 all values in the range satisfies (x CMPC BOUNDARY) == true. */
682 static bool
683 is_value_included_in (tree val, tree boundary, enum tree_code cmpc)
685 bool inverted = false;
686 bool is_unsigned;
687 bool result;
689 /* Only handle integer constant here. */
690 if (TREE_CODE (val) != INTEGER_CST
691 || TREE_CODE (boundary) != INTEGER_CST)
692 return true;
694 is_unsigned = TYPE_UNSIGNED (TREE_TYPE (val));
696 if (cmpc == GE_EXPR || cmpc == GT_EXPR
697 || cmpc == NE_EXPR)
699 cmpc = invert_tree_comparison (cmpc, false);
700 inverted = true;
703 if (is_unsigned)
705 if (cmpc == EQ_EXPR)
706 result = tree_int_cst_equal (val, boundary);
707 else if (cmpc == LT_EXPR)
708 result = INT_CST_LT_UNSIGNED (val, boundary);
709 else
711 gcc_assert (cmpc == LE_EXPR);
712 result = (tree_int_cst_equal (val, boundary)
713 || INT_CST_LT_UNSIGNED (val, boundary));
716 else
718 if (cmpc == EQ_EXPR)
719 result = tree_int_cst_equal (val, boundary);
720 else if (cmpc == LT_EXPR)
721 result = INT_CST_LT (val, boundary);
722 else
724 gcc_assert (cmpc == LE_EXPR);
725 result = (tree_int_cst_equal (val, boundary)
726 || INT_CST_LT (val, boundary));
730 if (inverted)
731 result ^= 1;
733 return result;
736 /* Returns true if PRED is common among all the predicate
737 chains (PREDS) (and therefore can be factored out).
738 NUM_PRED_CHAIN is the size of array PREDS. */
740 static bool
741 find_matching_predicate_in_rest_chains (use_pred_info_t pred,
742 VEC(use_pred_info_t, heap) **preds,
743 size_t num_pred_chains)
745 size_t i, j, n;
747 /* trival case */
748 if (num_pred_chains == 1)
749 return true;
751 for (i = 1; i < num_pred_chains; i++)
753 bool found = false;
754 VEC(use_pred_info_t, heap) *one_chain = preds[i];
755 n = VEC_length (use_pred_info_t, one_chain);
756 for (j = 0; j < n; j++)
758 use_pred_info_t pred2
759 = VEC_index (use_pred_info_t, one_chain, j);
760 /* can relax the condition comparison to not
761 use address comparison. However, the most common
762 case is that multiple control dependent paths share
763 a common path prefix, so address comparison should
764 be ok. */
766 if (pred2->cond == pred->cond
767 && pred2->invert == pred->invert)
769 found = true;
770 break;
773 if (!found)
774 return false;
776 return true;
779 /* Forward declaration. */
780 static bool
781 is_use_properly_guarded (gimple use_stmt,
782 basic_block use_bb,
783 gimple phi,
784 unsigned uninit_opnds,
785 struct pointer_set_t *visited_phis);
787 /* Returns true if all uninitialized opnds are pruned. Returns false
788 otherwise. PHI is the phi node with uninitialized operands,
789 UNINIT_OPNDS is the bitmap of the uninitialize operand positions,
790 FLAG_DEF is the statement defining the flag guarding the use of the
791 PHI output, BOUNDARY_CST is the const value used in the predicate
792 associated with the flag, CMP_CODE is the comparison code used in
793 the predicate, VISITED_PHIS is the pointer set of phis visited, and
794 VISITED_FLAG_PHIS is the pointer to the pointer set of flag definitions
795 that are also phis.
797 Example scenario:
799 BB1:
800 flag_1 = phi <0, 1> // (1)
801 var_1 = phi <undef, some_val>
804 BB2:
805 flag_2 = phi <0, flag_1, flag_1> // (2)
806 var_2 = phi <undef, var_1, var_1>
807 if (flag_2 == 1)
808 goto BB3;
810 BB3:
811 use of var_2 // (3)
813 Because some flag arg in (1) is not constant, if we do not look into the
814 flag phis recursively, it is conservatively treated as unknown and var_1
815 is thought to be flowed into use at (3). Since var_1 is potentially uninitialized
816 a false warning will be emitted. Checking recursively into (1), the compiler can
817 find out that only some_val (which is defined) can flow into (3) which is OK.
821 static bool
822 prune_uninit_phi_opnds_in_unrealizable_paths (
823 gimple phi, unsigned uninit_opnds,
824 gimple flag_def, tree boundary_cst,
825 enum tree_code cmp_code,
826 struct pointer_set_t *visited_phis,
827 bitmap *visited_flag_phis)
829 unsigned i;
831 for (i = 0; i < MIN (32, gimple_phi_num_args (flag_def)); i++)
833 tree flag_arg;
835 if (!MASK_TEST_BIT (uninit_opnds, i))
836 continue;
838 flag_arg = gimple_phi_arg_def (flag_def, i);
839 if (!is_gimple_constant (flag_arg))
841 gimple flag_arg_def, phi_arg_def;
842 tree phi_arg;
843 unsigned uninit_opnds_arg_phi;
845 if (TREE_CODE (flag_arg) != SSA_NAME)
846 return false;
847 flag_arg_def = SSA_NAME_DEF_STMT (flag_arg);
848 if (gimple_code (flag_arg_def) != GIMPLE_PHI)
849 return false;
851 phi_arg = gimple_phi_arg_def (phi, i);
852 if (TREE_CODE (phi_arg) != SSA_NAME)
853 return false;
855 phi_arg_def = SSA_NAME_DEF_STMT (phi_arg);
856 if (gimple_code (phi_arg_def) != GIMPLE_PHI)
857 return false;
859 if (gimple_bb (phi_arg_def) != gimple_bb (flag_arg_def))
860 return false;
862 if (!*visited_flag_phis)
863 *visited_flag_phis = BITMAP_ALLOC (NULL);
865 if (bitmap_bit_p (*visited_flag_phis,
866 SSA_NAME_VERSION (gimple_phi_result (flag_arg_def))))
867 return false;
869 bitmap_set_bit (*visited_flag_phis,
870 SSA_NAME_VERSION (gimple_phi_result (flag_arg_def)));
872 /* Now recursively prune the uninitialized phi args. */
873 uninit_opnds_arg_phi = compute_uninit_opnds_pos (phi_arg_def);
874 if (!prune_uninit_phi_opnds_in_unrealizable_paths (
875 phi_arg_def, uninit_opnds_arg_phi,
876 flag_arg_def, boundary_cst, cmp_code,
877 visited_phis, visited_flag_phis))
878 return false;
880 bitmap_clear_bit (*visited_flag_phis,
881 SSA_NAME_VERSION (gimple_phi_result (flag_arg_def)));
882 continue;
885 /* Now check if the constant is in the guarded range. */
886 if (is_value_included_in (flag_arg, boundary_cst, cmp_code))
888 tree opnd;
889 gimple opnd_def;
891 /* Now that we know that this undefined edge is not
892 pruned. If the operand is defined by another phi,
893 we can further prune the incoming edges of that
894 phi by checking the predicates of this operands. */
896 opnd = gimple_phi_arg_def (phi, i);
897 opnd_def = SSA_NAME_DEF_STMT (opnd);
898 if (gimple_code (opnd_def) == GIMPLE_PHI)
900 edge opnd_edge;
901 unsigned uninit_opnds2
902 = compute_uninit_opnds_pos (opnd_def);
903 gcc_assert (!MASK_EMPTY (uninit_opnds2));
904 opnd_edge = gimple_phi_arg_edge (phi, i);
905 if (!is_use_properly_guarded (phi,
906 opnd_edge->src,
907 opnd_def,
908 uninit_opnds2,
909 visited_phis))
910 return false;
912 else
913 return false;
917 return true;
920 /* A helper function that determines if the predicate set
921 of the use is not overlapping with that of the uninit paths.
922 The most common senario of guarded use is in Example 1:
923 Example 1:
924 if (some_cond)
926 x = ...;
927 flag = true;
930 ... some code ...
932 if (flag)
933 use (x);
935 The real world examples are usually more complicated, but similar
936 and usually result from inlining:
938 bool init_func (int * x)
940 if (some_cond)
941 return false;
942 *x = ..
943 return true;
946 void foo(..)
948 int x;
950 if (!init_func(&x))
951 return;
953 .. some_code ...
954 use (x);
957 Another possible use scenario is in the following trivial example:
959 Example 2:
960 if (n > 0)
961 x = 1;
963 if (n > 0)
965 if (m < 2)
966 .. = x;
969 Predicate analysis needs to compute the composite predicate:
971 1) 'x' use predicate: (n > 0) .AND. (m < 2)
972 2) 'x' default value (non-def) predicate: .NOT. (n > 0)
973 (the predicate chain for phi operand defs can be computed
974 starting from a bb that is control equivalent to the phi's
975 bb and is dominating the operand def.)
977 and check overlapping:
978 (n > 0) .AND. (m < 2) .AND. (.NOT. (n > 0))
979 <==> false
981 This implementation provides framework that can handle
982 scenarios. (Note that many simple cases are handled properly
983 without the predicate analysis -- this is due to jump threading
984 transformation which eliminates the merge point thus makes
985 path sensitive analysis unnecessary.)
987 NUM_PREDS is the number is the number predicate chains, PREDS is
988 the array of chains, PHI is the phi node whose incoming (undefined)
989 paths need to be pruned, and UNINIT_OPNDS is the bitmap holding
990 uninit operand positions. VISITED_PHIS is the pointer set of phi
991 stmts being checked. */
994 static bool
995 use_pred_not_overlap_with_undef_path_pred (
996 size_t num_preds,
997 VEC(use_pred_info_t, heap) **preds,
998 gimple phi, unsigned uninit_opnds,
999 struct pointer_set_t *visited_phis)
1001 unsigned int i, n;
1002 gimple flag_def = 0;
1003 tree boundary_cst = 0;
1004 enum tree_code cmp_code;
1005 bool swap_cond = false;
1006 bool invert = false;
1007 VEC(use_pred_info_t, heap) *the_pred_chain;
1008 bitmap visited_flag_phis = NULL;
1009 bool all_pruned = false;
1011 gcc_assert (num_preds > 0);
1012 /* Find within the common prefix of multiple predicate chains
1013 a predicate that is a comparison of a flag variable against
1014 a constant. */
1015 the_pred_chain = preds[0];
1016 n = VEC_length (use_pred_info_t, the_pred_chain);
1017 for (i = 0; i < n; i++)
1019 gimple cond;
1020 tree cond_lhs, cond_rhs, flag = 0;
1022 use_pred_info_t the_pred
1023 = VEC_index (use_pred_info_t, the_pred_chain, i);
1025 cond = the_pred->cond;
1026 invert = the_pred->invert;
1027 cond_lhs = gimple_cond_lhs (cond);
1028 cond_rhs = gimple_cond_rhs (cond);
1029 cmp_code = gimple_cond_code (cond);
1031 if (cond_lhs != NULL_TREE && TREE_CODE (cond_lhs) == SSA_NAME
1032 && cond_rhs != NULL_TREE && is_gimple_constant (cond_rhs))
1034 boundary_cst = cond_rhs;
1035 flag = cond_lhs;
1037 else if (cond_rhs != NULL_TREE && TREE_CODE (cond_rhs) == SSA_NAME
1038 && cond_lhs != NULL_TREE && is_gimple_constant (cond_lhs))
1040 boundary_cst = cond_lhs;
1041 flag = cond_rhs;
1042 swap_cond = true;
1045 if (!flag)
1046 continue;
1048 flag_def = SSA_NAME_DEF_STMT (flag);
1050 if (!flag_def)
1051 continue;
1053 if ((gimple_code (flag_def) == GIMPLE_PHI)
1054 && (gimple_bb (flag_def) == gimple_bb (phi))
1055 && find_matching_predicate_in_rest_chains (
1056 the_pred, preds, num_preds))
1057 break;
1059 flag_def = 0;
1062 if (!flag_def)
1063 return false;
1065 /* Now check all the uninit incoming edge has a constant flag value
1066 that is in conflict with the use guard/predicate. */
1067 cmp_code = get_cmp_code (cmp_code, swap_cond, invert);
1069 if (cmp_code == ERROR_MARK)
1070 return false;
1072 all_pruned = prune_uninit_phi_opnds_in_unrealizable_paths (phi,
1073 uninit_opnds,
1074 flag_def,
1075 boundary_cst,
1076 cmp_code,
1077 visited_phis,
1078 &visited_flag_phis);
1080 if (visited_flag_phis)
1081 BITMAP_FREE (visited_flag_phis);
1083 return all_pruned;
1086 /* Returns true if TC is AND or OR */
1088 static inline bool
1089 is_and_or_or (enum tree_code tc, tree typ)
1091 return (tc == TRUTH_AND_EXPR
1092 || tc == TRUTH_OR_EXPR
1093 || tc == BIT_IOR_EXPR
1094 || (tc == BIT_AND_EXPR
1095 && (typ == 0 || TREE_CODE (typ) == BOOLEAN_TYPE)));
1098 typedef struct norm_cond
1100 VEC(gimple, heap) *conds;
1101 enum tree_code cond_code;
1102 bool invert;
1103 } *norm_cond_t;
1106 /* Normalizes gimple condition COND. The normalization follows
1107 UD chains to form larger condition expression trees. NORM_COND
1108 holds the normalized result. COND_CODE is the logical opcode
1109 (AND or OR) of the normalized tree. */
1111 static void
1112 normalize_cond_1 (gimple cond,
1113 norm_cond_t norm_cond,
1114 enum tree_code cond_code)
1116 enum gimple_code gc;
1117 enum tree_code cur_cond_code;
1118 tree rhs1, rhs2;
1120 gc = gimple_code (cond);
1121 if (gc != GIMPLE_ASSIGN)
1123 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1124 return;
1127 cur_cond_code = gimple_assign_rhs_code (cond);
1128 rhs1 = gimple_assign_rhs1 (cond);
1129 rhs2 = gimple_assign_rhs2 (cond);
1130 if (cur_cond_code == NE_EXPR)
1132 if (integer_zerop (rhs2)
1133 && (TREE_CODE (rhs1) == SSA_NAME))
1134 normalize_cond_1 (
1135 SSA_NAME_DEF_STMT (rhs1),
1136 norm_cond, cond_code);
1137 else if (integer_zerop (rhs1)
1138 && (TREE_CODE (rhs2) == SSA_NAME))
1139 normalize_cond_1 (
1140 SSA_NAME_DEF_STMT (rhs2),
1141 norm_cond, cond_code);
1142 else
1143 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1145 return;
1148 if (is_and_or_or (cur_cond_code, TREE_TYPE (rhs1))
1149 && (cond_code == cur_cond_code || cond_code == ERROR_MARK)
1150 && (TREE_CODE (rhs1) == SSA_NAME && TREE_CODE (rhs2) == SSA_NAME))
1152 normalize_cond_1 (SSA_NAME_DEF_STMT (rhs1),
1153 norm_cond, cur_cond_code);
1154 normalize_cond_1 (SSA_NAME_DEF_STMT (rhs2),
1155 norm_cond, cur_cond_code);
1156 norm_cond->cond_code = cur_cond_code;
1158 else
1159 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1162 /* See normalize_cond_1 for details. INVERT is a flag to indicate
1163 if COND needs to be inverted or not. */
1165 static void
1166 normalize_cond (gimple cond, norm_cond_t norm_cond, bool invert)
1168 enum tree_code cond_code;
1170 norm_cond->cond_code = ERROR_MARK;
1171 norm_cond->invert = false;
1172 norm_cond->conds = NULL;
1173 gcc_assert (gimple_code (cond) == GIMPLE_COND);
1174 cond_code = gimple_cond_code (cond);
1175 if (invert)
1176 cond_code = invert_tree_comparison (cond_code, false);
1178 if (cond_code == NE_EXPR)
1180 if (integer_zerop (gimple_cond_rhs (cond))
1181 && (TREE_CODE (gimple_cond_lhs (cond)) == SSA_NAME))
1182 normalize_cond_1 (
1183 SSA_NAME_DEF_STMT (gimple_cond_lhs (cond)),
1184 norm_cond, ERROR_MARK);
1185 else if (integer_zerop (gimple_cond_lhs (cond))
1186 && (TREE_CODE (gimple_cond_rhs (cond)) == SSA_NAME))
1187 normalize_cond_1 (
1188 SSA_NAME_DEF_STMT (gimple_cond_rhs (cond)),
1189 norm_cond, ERROR_MARK);
1190 else
1192 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1193 norm_cond->invert = invert;
1196 else
1198 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1199 norm_cond->invert = invert;
1202 gcc_assert (VEC_length (gimple, norm_cond->conds) == 1
1203 || is_and_or_or (norm_cond->cond_code, NULL));
1206 /* Returns true if the domain for condition COND1 is a subset of
1207 COND2. REVERSE is a flag. when it is true the function checks
1208 if COND1 is a superset of COND2. INVERT1 and INVERT2 are flags
1209 to indicate if COND1 and COND2 need to be inverted or not. */
1211 static bool
1212 is_gcond_subset_of (gimple cond1, bool invert1,
1213 gimple cond2, bool invert2,
1214 bool reverse)
1216 enum gimple_code gc1, gc2;
1217 enum tree_code cond1_code, cond2_code;
1218 gimple tmp;
1219 tree cond1_lhs, cond1_rhs, cond2_lhs, cond2_rhs;
1221 /* Take the short cut. */
1222 if (cond1 == cond2)
1223 return true;
1225 if (reverse)
1227 tmp = cond1;
1228 cond1 = cond2;
1229 cond2 = tmp;
1232 gc1 = gimple_code (cond1);
1233 gc2 = gimple_code (cond2);
1235 if ((gc1 != GIMPLE_ASSIGN && gc1 != GIMPLE_COND)
1236 || (gc2 != GIMPLE_ASSIGN && gc2 != GIMPLE_COND))
1237 return cond1 == cond2;
1239 cond1_code = ((gc1 == GIMPLE_ASSIGN)
1240 ? gimple_assign_rhs_code (cond1)
1241 : gimple_cond_code (cond1));
1243 cond2_code = ((gc2 == GIMPLE_ASSIGN)
1244 ? gimple_assign_rhs_code (cond2)
1245 : gimple_cond_code (cond2));
1247 if (TREE_CODE_CLASS (cond1_code) != tcc_comparison
1248 || TREE_CODE_CLASS (cond2_code) != tcc_comparison)
1249 return false;
1251 if (invert1)
1252 cond1_code = invert_tree_comparison (cond1_code, false);
1253 if (invert2)
1254 cond2_code = invert_tree_comparison (cond2_code, false);
1256 cond1_lhs = ((gc1 == GIMPLE_ASSIGN)
1257 ? gimple_assign_rhs1 (cond1)
1258 : gimple_cond_lhs (cond1));
1259 cond1_rhs = ((gc1 == GIMPLE_ASSIGN)
1260 ? gimple_assign_rhs2 (cond1)
1261 : gimple_cond_rhs (cond1));
1262 cond2_lhs = ((gc2 == GIMPLE_ASSIGN)
1263 ? gimple_assign_rhs1 (cond2)
1264 : gimple_cond_lhs (cond2));
1265 cond2_rhs = ((gc2 == GIMPLE_ASSIGN)
1266 ? gimple_assign_rhs2 (cond2)
1267 : gimple_cond_rhs (cond2));
1269 /* Assuming const operands have been swapped to the
1270 rhs at this point of the analysis. */
1272 if (cond1_lhs != cond2_lhs)
1273 return false;
1275 if (!is_gimple_constant (cond1_rhs)
1276 || TREE_CODE (cond1_rhs) != INTEGER_CST)
1277 return (cond1_rhs == cond2_rhs);
1279 if (!is_gimple_constant (cond2_rhs)
1280 || TREE_CODE (cond2_rhs) != INTEGER_CST)
1281 return (cond1_rhs == cond2_rhs);
1283 if (cond1_code == EQ_EXPR)
1284 return is_value_included_in (cond1_rhs,
1285 cond2_rhs, cond2_code);
1286 if (cond1_code == NE_EXPR || cond2_code == EQ_EXPR)
1287 return ((cond2_code == cond1_code)
1288 && tree_int_cst_equal (cond1_rhs, cond2_rhs));
1290 if (((cond1_code == GE_EXPR || cond1_code == GT_EXPR)
1291 && (cond2_code == LE_EXPR || cond2_code == LT_EXPR))
1292 || ((cond1_code == LE_EXPR || cond1_code == LT_EXPR)
1293 && (cond2_code == GE_EXPR || cond2_code == GT_EXPR)))
1294 return false;
1296 if (cond1_code != GE_EXPR && cond1_code != GT_EXPR
1297 && cond1_code != LE_EXPR && cond1_code != LT_EXPR)
1298 return false;
1300 if (cond1_code == GT_EXPR)
1302 cond1_code = GE_EXPR;
1303 cond1_rhs = fold_binary (PLUS_EXPR, TREE_TYPE (cond1_rhs),
1304 cond1_rhs,
1305 fold_convert (TREE_TYPE (cond1_rhs),
1306 integer_one_node));
1308 else if (cond1_code == LT_EXPR)
1310 cond1_code = LE_EXPR;
1311 cond1_rhs = fold_binary (MINUS_EXPR, TREE_TYPE (cond1_rhs),
1312 cond1_rhs,
1313 fold_convert (TREE_TYPE (cond1_rhs),
1314 integer_one_node));
1317 if (!cond1_rhs)
1318 return false;
1320 gcc_assert (cond1_code == GE_EXPR || cond1_code == LE_EXPR);
1322 if (cond2_code == GE_EXPR || cond2_code == GT_EXPR ||
1323 cond2_code == LE_EXPR || cond2_code == LT_EXPR)
1324 return is_value_included_in (cond1_rhs,
1325 cond2_rhs, cond2_code);
1326 else if (cond2_code == NE_EXPR)
1327 return
1328 (is_value_included_in (cond1_rhs,
1329 cond2_rhs, cond2_code)
1330 && !is_value_included_in (cond2_rhs,
1331 cond1_rhs, cond1_code));
1332 return false;
1335 /* Returns true if the domain of the condition expression
1336 in COND is a subset of any of the sub-conditions
1337 of the normalized condtion NORM_COND. INVERT is a flag
1338 to indicate of the COND needs to be inverted.
1339 REVERSE is a flag. When it is true, the check is reversed --
1340 it returns true if COND is a superset of any of the subconditions
1341 of NORM_COND. */
1343 static bool
1344 is_subset_of_any (gimple cond, bool invert,
1345 norm_cond_t norm_cond, bool reverse)
1347 size_t i;
1348 size_t len = VEC_length (gimple, norm_cond->conds);
1350 for (i = 0; i < len; i++)
1352 if (is_gcond_subset_of (cond, invert,
1353 VEC_index (gimple, norm_cond->conds, i),
1354 false, reverse))
1355 return true;
1357 return false;
1360 /* NORM_COND1 and NORM_COND2 are normalized logical/BIT OR
1361 expressions (formed by following UD chains not control
1362 dependence chains). The function returns true of domain
1363 of and expression NORM_COND1 is a subset of NORM_COND2's.
1364 The implementation is conservative, and it returns false if
1365 it the inclusion relationship may not hold. */
1367 static bool
1368 is_or_set_subset_of (norm_cond_t norm_cond1,
1369 norm_cond_t norm_cond2)
1371 size_t i;
1372 size_t len = VEC_length (gimple, norm_cond1->conds);
1374 for (i = 0; i < len; i++)
1376 if (!is_subset_of_any (VEC_index (gimple, norm_cond1->conds, i),
1377 false, norm_cond2, false))
1378 return false;
1380 return true;
1383 /* NORM_COND1 and NORM_COND2 are normalized logical AND
1384 expressions (formed by following UD chains not control
1385 dependence chains). The function returns true of domain
1386 of and expression NORM_COND1 is a subset of NORM_COND2's. */
1388 static bool
1389 is_and_set_subset_of (norm_cond_t norm_cond1,
1390 norm_cond_t norm_cond2)
1392 size_t i;
1393 size_t len = VEC_length (gimple, norm_cond2->conds);
1395 for (i = 0; i < len; i++)
1397 if (!is_subset_of_any (VEC_index (gimple, norm_cond2->conds, i),
1398 false, norm_cond1, true))
1399 return false;
1401 return true;
1404 /* Returns true of the domain if NORM_COND1 is a subset
1405 of that of NORM_COND2. Returns false if it can not be
1406 proved to be so. */
1408 static bool
1409 is_norm_cond_subset_of (norm_cond_t norm_cond1,
1410 norm_cond_t norm_cond2)
1412 size_t i;
1413 enum tree_code code1, code2;
1415 code1 = norm_cond1->cond_code;
1416 code2 = norm_cond2->cond_code;
1418 if (code1 == TRUTH_AND_EXPR || code1 == BIT_AND_EXPR)
1420 /* Both conditions are AND expressions. */
1421 if (code2 == TRUTH_AND_EXPR || code2 == BIT_AND_EXPR)
1422 return is_and_set_subset_of (norm_cond1, norm_cond2);
1423 /* NORM_COND1 is an AND expression, and NORM_COND2 is an OR
1424 expression. In this case, returns true if any subexpression
1425 of NORM_COND1 is a subset of any subexpression of NORM_COND2. */
1426 else if (code2 == TRUTH_OR_EXPR || code2 == BIT_IOR_EXPR)
1428 size_t len1;
1429 len1 = VEC_length (gimple, norm_cond1->conds);
1430 for (i = 0; i < len1; i++)
1432 gimple cond1 = VEC_index (gimple, norm_cond1->conds, i);
1433 if (is_subset_of_any (cond1, false, norm_cond2, false))
1434 return true;
1436 return false;
1438 else
1440 gcc_assert (code2 == ERROR_MARK);
1441 gcc_assert (VEC_length (gimple, norm_cond2->conds) == 1);
1442 return is_subset_of_any (VEC_index (gimple, norm_cond2->conds, 0),
1443 norm_cond2->invert, norm_cond1, true);
1446 /* NORM_COND1 is an OR expression */
1447 else if (code1 == TRUTH_OR_EXPR || code1 == BIT_IOR_EXPR)
1449 if (code2 != code1)
1450 return false;
1452 return is_or_set_subset_of (norm_cond1, norm_cond2);
1454 else
1456 gcc_assert (code1 == ERROR_MARK);
1457 gcc_assert (VEC_length (gimple, norm_cond1->conds) == 1);
1458 /* Conservatively returns false if NORM_COND1 is non-decomposible
1459 and NORM_COND2 is an AND expression. */
1460 if (code2 == TRUTH_AND_EXPR || code2 == BIT_AND_EXPR)
1461 return false;
1463 if (code2 == TRUTH_OR_EXPR || code2 == BIT_IOR_EXPR)
1464 return is_subset_of_any (VEC_index (gimple, norm_cond1->conds, 0),
1465 norm_cond1->invert, norm_cond2, false);
1467 gcc_assert (code2 == ERROR_MARK);
1468 gcc_assert (VEC_length (gimple, norm_cond2->conds) == 1);
1469 return is_gcond_subset_of (VEC_index (gimple, norm_cond1->conds, 0),
1470 norm_cond1->invert,
1471 VEC_index (gimple, norm_cond2->conds, 0),
1472 norm_cond2->invert, false);
1476 /* Returns true of the domain of single predicate expression
1477 EXPR1 is a subset of that of EXPR2. Returns false if it
1478 can not be proved. */
1480 static bool
1481 is_pred_expr_subset_of (use_pred_info_t expr1,
1482 use_pred_info_t expr2)
1484 gimple cond1, cond2;
1485 enum tree_code code1, code2;
1486 struct norm_cond norm_cond1, norm_cond2;
1487 bool is_subset = false;
1489 cond1 = expr1->cond;
1490 cond2 = expr2->cond;
1491 code1 = gimple_cond_code (cond1);
1492 code2 = gimple_cond_code (cond2);
1494 if (expr1->invert)
1495 code1 = invert_tree_comparison (code1, false);
1496 if (expr2->invert)
1497 code2 = invert_tree_comparison (code2, false);
1499 /* Fast path -- match exactly */
1500 if ((gimple_cond_lhs (cond1) == gimple_cond_lhs (cond2))
1501 && (gimple_cond_rhs (cond1) == gimple_cond_rhs (cond2))
1502 && (code1 == code2))
1503 return true;
1505 /* Normalize conditions. To keep NE_EXPR, do not invert
1506 with both need inversion. */
1507 normalize_cond (cond1, &norm_cond1, (expr1->invert));
1508 normalize_cond (cond2, &norm_cond2, (expr2->invert));
1510 is_subset = is_norm_cond_subset_of (&norm_cond1, &norm_cond2);
1512 /* Free memory */
1513 VEC_free (gimple, heap, norm_cond1.conds);
1514 VEC_free (gimple, heap, norm_cond2.conds);
1515 return is_subset ;
1518 /* Returns true if the domain of PRED1 is a subset
1519 of that of PRED2. Returns false if it can not be proved so. */
1521 static bool
1522 is_pred_chain_subset_of (VEC(use_pred_info_t, heap) *pred1,
1523 VEC(use_pred_info_t, heap) *pred2)
1525 size_t np1, np2, i1, i2;
1527 np1 = VEC_length (use_pred_info_t, pred1);
1528 np2 = VEC_length (use_pred_info_t, pred2);
1530 for (i2 = 0; i2 < np2; i2++)
1532 bool found = false;
1533 use_pred_info_t info2
1534 = VEC_index (use_pred_info_t, pred2, i2);
1535 for (i1 = 0; i1 < np1; i1++)
1537 use_pred_info_t info1
1538 = VEC_index (use_pred_info_t, pred1, i1);
1539 if (is_pred_expr_subset_of (info1, info2))
1541 found = true;
1542 break;
1545 if (!found)
1546 return false;
1548 return true;
1551 /* Returns true if the domain defined by
1552 one pred chain ONE_PRED is a subset of the domain
1553 of *PREDS. It returns false if ONE_PRED's domain is
1554 not a subset of any of the sub-domains of PREDS (
1555 corresponding to each individual chains in it), even
1556 though it may be still be a subset of whole domain
1557 of PREDS which is the union (ORed) of all its subdomains.
1558 In other words, the result is conservative. */
1560 static bool
1561 is_included_in (VEC(use_pred_info_t, heap) *one_pred,
1562 VEC(use_pred_info_t, heap) **preds,
1563 size_t n)
1565 size_t i;
1567 for (i = 0; i < n; i++)
1569 if (is_pred_chain_subset_of (one_pred, preds[i]))
1570 return true;
1573 return false;
1576 /* compares two predicate sets PREDS1 and PREDS2 and returns
1577 true if the domain defined by PREDS1 is a superset
1578 of PREDS2's domain. N1 and N2 are array sizes of PREDS1 and
1579 PREDS2 respectively. The implementation chooses not to build
1580 generic trees (and relying on the folding capability of the
1581 compiler), but instead performs brute force comparison of
1582 individual predicate chains (won't be a compile time problem
1583 as the chains are pretty short). When the function returns
1584 false, it does not necessarily mean *PREDS1 is not a superset
1585 of *PREDS2, but mean it may not be so since the analysis can
1586 not prove it. In such cases, false warnings may still be
1587 emitted. */
1589 static bool
1590 is_superset_of (VEC(use_pred_info_t, heap) **preds1,
1591 size_t n1,
1592 VEC(use_pred_info_t, heap) **preds2,
1593 size_t n2)
1595 size_t i;
1596 VEC(use_pred_info_t, heap) *one_pred_chain;
1598 for (i = 0; i < n2; i++)
1600 one_pred_chain = preds2[i];
1601 if (!is_included_in (one_pred_chain, preds1, n1))
1602 return false;
1605 return true;
1608 /* Computes the predicates that guard the use and checks
1609 if the incoming paths that have empty (or possibly
1610 empty) defintion can be pruned/filtered. The function returns
1611 true if it can be determined that the use of PHI's def in
1612 USE_STMT is guarded with a predicate set not overlapping with
1613 predicate sets of all runtime paths that do not have a definition.
1614 Returns false if it is not or it can not be determined. USE_BB is
1615 the bb of the use (for phi operand use, the bb is not the bb of
1616 the phi stmt, but the src bb of the operand edge). UNINIT_OPNDS
1617 is a bit vector. If an operand of PHI is uninitialized, the
1618 correponding bit in the vector is 1. VISIED_PHIS is a pointer
1619 set of phis being visted. */
1621 static bool
1622 is_use_properly_guarded (gimple use_stmt,
1623 basic_block use_bb,
1624 gimple phi,
1625 unsigned uninit_opnds,
1626 struct pointer_set_t *visited_phis)
1628 basic_block phi_bb;
1629 VEC(use_pred_info_t, heap) **preds = 0;
1630 VEC(use_pred_info_t, heap) **def_preds = 0;
1631 size_t num_preds = 0, num_def_preds = 0;
1632 bool has_valid_preds = false;
1633 bool is_properly_guarded = false;
1635 if (pointer_set_insert (visited_phis, phi))
1636 return false;
1638 phi_bb = gimple_bb (phi);
1640 if (is_non_loop_exit_postdominating (use_bb, phi_bb))
1641 return false;
1643 has_valid_preds = find_predicates (&preds, &num_preds,
1644 phi_bb, use_bb);
1646 if (!has_valid_preds)
1648 destroy_predicate_vecs (num_preds, preds);
1649 return false;
1652 if (dump_file)
1653 dump_predicates (use_stmt, num_preds, preds,
1654 "\nUse in stmt ");
1656 has_valid_preds = find_def_preds (&def_preds,
1657 &num_def_preds, phi);
1659 if (has_valid_preds)
1661 if (dump_file)
1662 dump_predicates (phi, num_def_preds, def_preds,
1663 "Operand defs of phi ");
1664 is_properly_guarded =
1665 is_superset_of (def_preds, num_def_preds,
1666 preds, num_preds);
1669 /* further prune the dead incoming phi edges. */
1670 if (!is_properly_guarded)
1671 is_properly_guarded
1672 = use_pred_not_overlap_with_undef_path_pred (
1673 num_preds, preds, phi, uninit_opnds, visited_phis);
1675 destroy_predicate_vecs (num_preds, preds);
1676 destroy_predicate_vecs (num_def_preds, def_preds);
1677 return is_properly_guarded;
1680 /* Searches through all uses of a potentially
1681 uninitialized variable defined by PHI and returns a use
1682 statement if the use is not properly guarded. It returns
1683 NULL if all uses are guarded. UNINIT_OPNDS is a bitvector
1684 holding the position(s) of uninit PHI operands. WORKLIST
1685 is the vector of candidate phis that may be updated by this
1686 function. ADDED_TO_WORKLIST is the pointer set tracking
1687 if the new phi is already in the worklist. */
1689 static gimple
1690 find_uninit_use (gimple phi, unsigned uninit_opnds,
1691 VEC(gimple, heap) **worklist,
1692 struct pointer_set_t *added_to_worklist)
1694 tree phi_result;
1695 use_operand_p use_p;
1696 gimple use_stmt;
1697 imm_use_iterator iter;
1699 phi_result = gimple_phi_result (phi);
1701 FOR_EACH_IMM_USE_FAST (use_p, iter, phi_result)
1703 struct pointer_set_t *visited_phis;
1704 basic_block use_bb;
1706 use_stmt = USE_STMT (use_p);
1707 if (is_gimple_debug (use_stmt))
1708 continue;
1710 visited_phis = pointer_set_create ();
1712 if (gimple_code (use_stmt) == GIMPLE_PHI)
1713 use_bb = gimple_phi_arg_edge (use_stmt,
1714 PHI_ARG_INDEX_FROM_USE (use_p))->src;
1715 else
1716 use_bb = gimple_bb (use_stmt);
1718 if (is_use_properly_guarded (use_stmt,
1719 use_bb,
1720 phi,
1721 uninit_opnds,
1722 visited_phis))
1724 pointer_set_destroy (visited_phis);
1725 continue;
1727 pointer_set_destroy (visited_phis);
1729 if (dump_file && (dump_flags & TDF_DETAILS))
1731 fprintf (dump_file, "[CHECK]: Found unguarded use: ");
1732 print_gimple_stmt (dump_file, use_stmt, 0, 0);
1734 /* Found one real use, return. */
1735 if (gimple_code (use_stmt) != GIMPLE_PHI)
1736 return use_stmt;
1738 /* Found a phi use that is not guarded,
1739 add the phi to the worklist. */
1740 if (!pointer_set_insert (added_to_worklist,
1741 use_stmt))
1743 if (dump_file && (dump_flags & TDF_DETAILS))
1745 fprintf (dump_file, "[WORKLIST]: Update worklist with phi: ");
1746 print_gimple_stmt (dump_file, use_stmt, 0, 0);
1749 VEC_safe_push (gimple, heap, *worklist, use_stmt);
1750 pointer_set_insert (possibly_undefined_names,
1751 phi_result);
1755 return NULL;
1758 /* Look for inputs to PHI that are SSA_NAMEs that have empty definitions
1759 and gives warning if there exists a runtime path from the entry to a
1760 use of the PHI def that does not contain a definition. In other words,
1761 the warning is on the real use. The more dead paths that can be pruned
1762 by the compiler, the fewer false positives the warning is. WORKLIST
1763 is a vector of candidate phis to be examined. ADDED_TO_WORKLIST is
1764 a pointer set tracking if the new phi is added to the worklist or not. */
1766 static void
1767 warn_uninitialized_phi (gimple phi, VEC(gimple, heap) **worklist,
1768 struct pointer_set_t *added_to_worklist)
1770 unsigned uninit_opnds;
1771 gimple uninit_use_stmt = 0;
1772 tree uninit_op;
1774 /* Don't look at memory tags. */
1775 if (!is_gimple_reg (gimple_phi_result (phi)))
1776 return;
1778 uninit_opnds = compute_uninit_opnds_pos (phi);
1780 if (MASK_EMPTY (uninit_opnds))
1781 return;
1783 if (dump_file && (dump_flags & TDF_DETAILS))
1785 fprintf (dump_file, "[CHECK]: examining phi: ");
1786 print_gimple_stmt (dump_file, phi, 0, 0);
1789 /* Now check if we have any use of the value without proper guard. */
1790 uninit_use_stmt = find_uninit_use (phi, uninit_opnds,
1791 worklist, added_to_worklist);
1793 /* All uses are properly guarded. */
1794 if (!uninit_use_stmt)
1795 return;
1797 uninit_op = gimple_phi_arg_def (phi, MASK_FIRST_SET_BIT (uninit_opnds));
1798 warn_uninit (uninit_op,
1799 "%qD may be used uninitialized in this function",
1800 uninit_use_stmt);
1805 /* Entry point to the late uninitialized warning pass. */
1807 static unsigned int
1808 execute_late_warn_uninitialized (void)
1810 basic_block bb;
1811 gimple_stmt_iterator gsi;
1812 VEC(gimple, heap) *worklist = 0;
1813 struct pointer_set_t *added_to_worklist;
1815 calculate_dominance_info (CDI_DOMINATORS);
1816 calculate_dominance_info (CDI_POST_DOMINATORS);
1817 /* Re-do the plain uninitialized variable check, as optimization may have
1818 straightened control flow. Do this first so that we don't accidentally
1819 get a "may be" warning when we'd have seen an "is" warning later. */
1820 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/1);
1822 timevar_push (TV_TREE_UNINIT);
1824 possibly_undefined_names = pointer_set_create ();
1825 added_to_worklist = pointer_set_create ();
1827 /* Initialize worklist */
1828 FOR_EACH_BB (bb)
1829 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1831 gimple phi = gsi_stmt (gsi);
1832 size_t n, i;
1834 n = gimple_phi_num_args (phi);
1836 /* Don't look at memory tags. */
1837 if (!is_gimple_reg (gimple_phi_result (phi)))
1838 continue;
1840 for (i = 0; i < n; ++i)
1842 tree op = gimple_phi_arg_def (phi, i);
1843 if (TREE_CODE (op) == SSA_NAME
1844 && ssa_undefined_value_p (op))
1846 VEC_safe_push (gimple, heap, worklist, phi);
1847 pointer_set_insert (added_to_worklist, phi);
1848 if (dump_file && (dump_flags & TDF_DETAILS))
1850 fprintf (dump_file, "[WORKLIST]: add to initial list: ");
1851 print_gimple_stmt (dump_file, phi, 0, 0);
1853 break;
1858 while (VEC_length (gimple, worklist) != 0)
1860 gimple cur_phi = 0;
1861 cur_phi = VEC_pop (gimple, worklist);
1862 warn_uninitialized_phi (cur_phi, &worklist, added_to_worklist);
1865 VEC_free (gimple, heap, worklist);
1866 pointer_set_destroy (added_to_worklist);
1867 pointer_set_destroy (possibly_undefined_names);
1868 possibly_undefined_names = NULL;
1869 free_dominance_info (CDI_POST_DOMINATORS);
1870 timevar_pop (TV_TREE_UNINIT);
1871 return 0;
1874 static bool
1875 gate_warn_uninitialized (void)
1877 return warn_uninitialized != 0;
1880 struct gimple_opt_pass pass_late_warn_uninitialized =
1883 GIMPLE_PASS,
1884 "uninit", /* name */
1885 gate_warn_uninitialized, /* gate */
1886 execute_late_warn_uninitialized, /* execute */
1887 NULL, /* sub */
1888 NULL, /* next */
1889 0, /* static_pass_number */
1890 TV_NONE, /* tv_id */
1891 PROP_ssa, /* properties_required */
1892 0, /* properties_provided */
1893 0, /* properties_destroyed */
1894 0, /* todo_flags_start */
1895 0 /* todo_flags_finish */