2012-07-06 Tom de Vries <tom@codesourcery.com>
[official-gcc.git] / gcc / tree-ssa-uninit.c
blob109578fa555dd1bae35d68c081bd0edae36cbe43
1 /* Predicate aware uninitialized variable warning.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2010 Free Software
3 Foundation, Inc.
4 Contributed by Xinliang David Li <davidxl@google.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "tm_p.h"
29 #include "langhooks.h"
30 #include "basic-block.h"
31 #include "function.h"
32 #include "gimple-pretty-print.h"
33 #include "bitmap.h"
34 #include "pointer-set.h"
35 #include "tree-flow.h"
36 #include "gimple.h"
37 #include "tree-inline.h"
38 #include "timevar.h"
39 #include "hashtab.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
42 #include "diagnostic-core.h"
43 #include "timevar.h"
45 /* This implements the pass that does predicate aware warning on uses of
46 possibly uninitialized variables. The pass first collects the set of
47 possibly uninitialized SSA names. For each such name, it walks through
48 all its immediate uses. For each immediate use, it rebuilds the condition
49 expression (the predicate) that guards the use. The predicate is then
50 examined to see if the variable is always defined under that same condition.
51 This is done either by pruning the unrealizable paths that lead to the
52 default definitions or by checking if the predicate set that guards the
53 defining paths is a superset of the use predicate. */
56 /* Pointer set of potentially undefined ssa names, i.e.,
57 ssa names that are defined by phi with operands that
58 are not defined or potentially undefined. */
59 static struct pointer_set_t *possibly_undefined_names = 0;
61 /* Bit mask handling macros. */
62 #define MASK_SET_BIT(mask, pos) mask |= (1 << pos)
63 #define MASK_TEST_BIT(mask, pos) (mask & (1 << pos))
64 #define MASK_EMPTY(mask) (mask == 0)
66 /* Returns the first bit position (starting from LSB)
67 in mask that is non zero. Returns -1 if the mask is empty. */
68 static int
69 get_mask_first_set_bit (unsigned mask)
71 int pos = 0;
72 if (mask == 0)
73 return -1;
75 while ((mask & (1 << pos)) == 0)
76 pos++;
78 return pos;
80 #define MASK_FIRST_SET_BIT(mask) get_mask_first_set_bit (mask)
83 /* Return true if T, an SSA_NAME, has an undefined value. */
85 bool
86 ssa_undefined_value_p (tree t)
88 tree var = SSA_NAME_VAR (t);
90 /* Parameters get their initial value from the function entry. */
91 if (TREE_CODE (var) == PARM_DECL)
92 return false;
94 /* When returning by reference the return address is actually a hidden
95 parameter. */
96 if (TREE_CODE (SSA_NAME_VAR (t)) == RESULT_DECL
97 && DECL_BY_REFERENCE (SSA_NAME_VAR (t)))
98 return false;
100 /* Hard register variables get their initial value from the ether. */
101 if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
102 return false;
104 /* The value is undefined iff its definition statement is empty. */
105 return (gimple_nop_p (SSA_NAME_DEF_STMT (t))
106 || (possibly_undefined_names
107 && pointer_set_contains (possibly_undefined_names, t)));
110 /* Checks if the operand OPND of PHI is defined by
111 another phi with one operand defined by this PHI,
112 but the rest operands are all defined. If yes,
113 returns true to skip this this operand as being
114 redundant. Can be enhanced to be more general. */
116 static bool
117 can_skip_redundant_opnd (tree opnd, gimple phi)
119 gimple op_def;
120 tree phi_def;
121 int i, n;
123 phi_def = gimple_phi_result (phi);
124 op_def = SSA_NAME_DEF_STMT (opnd);
125 if (gimple_code (op_def) != GIMPLE_PHI)
126 return false;
127 n = gimple_phi_num_args (op_def);
128 for (i = 0; i < n; ++i)
130 tree op = gimple_phi_arg_def (op_def, i);
131 if (TREE_CODE (op) != SSA_NAME)
132 continue;
133 if (op != phi_def && ssa_undefined_value_p (op))
134 return false;
137 return true;
140 /* Returns a bit mask holding the positions of arguments in PHI
141 that have empty (or possibly empty) definitions. */
143 static unsigned
144 compute_uninit_opnds_pos (gimple phi)
146 size_t i, n;
147 unsigned uninit_opnds = 0;
149 n = gimple_phi_num_args (phi);
150 /* Bail out for phi with too many args. */
151 if (n > 32)
152 return 0;
154 for (i = 0; i < n; ++i)
156 tree op = gimple_phi_arg_def (phi, i);
157 if (TREE_CODE (op) == SSA_NAME
158 && ssa_undefined_value_p (op)
159 && !can_skip_redundant_opnd (op, phi))
160 MASK_SET_BIT (uninit_opnds, i);
162 return uninit_opnds;
165 /* Find the immediate postdominator PDOM of the specified
166 basic block BLOCK. */
168 static inline basic_block
169 find_pdom (basic_block block)
171 if (block == EXIT_BLOCK_PTR)
172 return EXIT_BLOCK_PTR;
173 else
175 basic_block bb
176 = get_immediate_dominator (CDI_POST_DOMINATORS, block);
177 if (! bb)
178 return EXIT_BLOCK_PTR;
179 return bb;
183 /* Find the immediate DOM of the specified
184 basic block BLOCK. */
186 static inline basic_block
187 find_dom (basic_block block)
189 if (block == ENTRY_BLOCK_PTR)
190 return ENTRY_BLOCK_PTR;
191 else
193 basic_block bb = get_immediate_dominator (CDI_DOMINATORS, block);
194 if (! bb)
195 return ENTRY_BLOCK_PTR;
196 return bb;
200 /* Returns true if BB1 is postdominating BB2 and BB1 is
201 not a loop exit bb. The loop exit bb check is simple and does
202 not cover all cases. */
204 static bool
205 is_non_loop_exit_postdominating (basic_block bb1, basic_block bb2)
207 if (!dominated_by_p (CDI_POST_DOMINATORS, bb2, bb1))
208 return false;
210 if (single_pred_p (bb1) && !single_succ_p (bb2))
211 return false;
213 return true;
216 /* Find the closest postdominator of a specified BB, which is control
217 equivalent to BB. */
219 static inline basic_block
220 find_control_equiv_block (basic_block bb)
222 basic_block pdom;
224 pdom = find_pdom (bb);
226 /* Skip the postdominating bb that is also loop exit. */
227 if (!is_non_loop_exit_postdominating (pdom, bb))
228 return NULL;
230 if (dominated_by_p (CDI_DOMINATORS, pdom, bb))
231 return pdom;
233 return NULL;
236 #define MAX_NUM_CHAINS 8
237 #define MAX_CHAIN_LEN 5
239 /* Computes the control dependence chains (paths of edges)
240 for DEP_BB up to the dominating basic block BB (the head node of a
241 chain should be dominated by it). CD_CHAINS is pointer to a
242 dynamic array holding the result chains. CUR_CD_CHAIN is the current
243 chain being computed. *NUM_CHAINS is total number of chains. The
244 function returns true if the information is successfully computed,
245 return false if there is no control dependence or not computed. */
247 static bool
248 compute_control_dep_chain (basic_block bb, basic_block dep_bb,
249 VEC(edge, heap) **cd_chains,
250 size_t *num_chains,
251 VEC(edge, heap) **cur_cd_chain)
253 edge_iterator ei;
254 edge e;
255 size_t i;
256 bool found_cd_chain = false;
257 size_t cur_chain_len = 0;
259 if (EDGE_COUNT (bb->succs) < 2)
260 return false;
262 /* Could use a set instead. */
263 cur_chain_len = VEC_length (edge, *cur_cd_chain);
264 if (cur_chain_len > MAX_CHAIN_LEN)
265 return false;
267 for (i = 0; i < cur_chain_len; i++)
269 edge e = VEC_index (edge, *cur_cd_chain, i);
270 /* cycle detected. */
271 if (e->src == bb)
272 return false;
275 FOR_EACH_EDGE (e, ei, bb->succs)
277 basic_block cd_bb;
278 if (e->flags & (EDGE_FAKE | EDGE_ABNORMAL))
279 continue;
281 cd_bb = e->dest;
282 VEC_safe_push (edge, heap, *cur_cd_chain, e);
283 while (!is_non_loop_exit_postdominating (cd_bb, bb))
285 if (cd_bb == dep_bb)
287 /* Found a direct control dependence. */
288 if (*num_chains < MAX_NUM_CHAINS)
290 cd_chains[*num_chains]
291 = VEC_copy (edge, heap, *cur_cd_chain);
292 (*num_chains)++;
294 found_cd_chain = true;
295 /* check path from next edge. */
296 break;
299 /* Now check if DEP_BB is indirectly control dependent on BB. */
300 if (compute_control_dep_chain (cd_bb, dep_bb, cd_chains,
301 num_chains, cur_cd_chain))
303 found_cd_chain = true;
304 break;
307 cd_bb = find_pdom (cd_bb);
308 if (cd_bb == EXIT_BLOCK_PTR)
309 break;
311 VEC_pop (edge, *cur_cd_chain);
312 gcc_assert (VEC_length (edge, *cur_cd_chain) == cur_chain_len);
314 gcc_assert (VEC_length (edge, *cur_cd_chain) == cur_chain_len);
316 return found_cd_chain;
319 typedef struct use_pred_info
321 gimple cond;
322 bool invert;
323 } *use_pred_info_t;
325 DEF_VEC_P(use_pred_info_t);
326 DEF_VEC_ALLOC_P(use_pred_info_t, heap);
329 /* Converts the chains of control dependence edges into a set of
330 predicates. A control dependence chain is represented by a vector
331 edges. DEP_CHAINS points to an array of dependence chains.
332 NUM_CHAINS is the size of the chain array. One edge in a dependence
333 chain is mapped to predicate expression represented by use_pred_info_t
334 type. One dependence chain is converted to a composite predicate that
335 is the result of AND operation of use_pred_info_t mapped to each edge.
336 A composite predicate is presented by a vector of use_pred_info_t. On
337 return, *PREDS points to the resulting array of composite predicates.
338 *NUM_PREDS is the number of composite predictes. */
340 static bool
341 convert_control_dep_chain_into_preds (VEC(edge, heap) **dep_chains,
342 size_t num_chains,
343 VEC(use_pred_info_t, heap) ***preds,
344 size_t *num_preds)
346 bool has_valid_pred = false;
347 size_t i, j;
348 if (num_chains == 0 || num_chains >= MAX_NUM_CHAINS)
349 return false;
351 /* Now convert the control dep chain into a set
352 of predicates. */
353 *preds = XCNEWVEC (VEC(use_pred_info_t, heap) *,
354 num_chains);
355 *num_preds = num_chains;
357 for (i = 0; i < num_chains; i++)
359 VEC(edge, heap) *one_cd_chain = dep_chains[i];
361 has_valid_pred = false;
362 for (j = 0; j < VEC_length (edge, one_cd_chain); j++)
364 gimple cond_stmt;
365 gimple_stmt_iterator gsi;
366 basic_block guard_bb;
367 use_pred_info_t one_pred;
368 edge e;
370 e = VEC_index (edge, one_cd_chain, j);
371 guard_bb = e->src;
372 gsi = gsi_last_bb (guard_bb);
373 if (gsi_end_p (gsi))
375 has_valid_pred = false;
376 break;
378 cond_stmt = gsi_stmt (gsi);
379 if (gimple_code (cond_stmt) == GIMPLE_CALL
380 && EDGE_COUNT (e->src->succs) >= 2)
382 /* Ignore EH edge. Can add assertion
383 on the other edge's flag. */
384 continue;
386 /* Skip if there is essentially one succesor. */
387 if (EDGE_COUNT (e->src->succs) == 2)
389 edge e1;
390 edge_iterator ei1;
391 bool skip = false;
393 FOR_EACH_EDGE (e1, ei1, e->src->succs)
395 if (EDGE_COUNT (e1->dest->succs) == 0)
397 skip = true;
398 break;
401 if (skip)
402 continue;
404 if (gimple_code (cond_stmt) != GIMPLE_COND)
406 has_valid_pred = false;
407 break;
409 one_pred = XNEW (struct use_pred_info);
410 one_pred->cond = cond_stmt;
411 one_pred->invert = !!(e->flags & EDGE_FALSE_VALUE);
412 VEC_safe_push (use_pred_info_t, heap, (*preds)[i], one_pred);
413 has_valid_pred = true;
416 if (!has_valid_pred)
417 break;
419 return has_valid_pred;
422 /* Computes all control dependence chains for USE_BB. The control
423 dependence chains are then converted to an array of composite
424 predicates pointed to by PREDS. PHI_BB is the basic block of
425 the phi whose result is used in USE_BB. */
427 static bool
428 find_predicates (VEC(use_pred_info_t, heap) ***preds,
429 size_t *num_preds,
430 basic_block phi_bb,
431 basic_block use_bb)
433 size_t num_chains = 0, i;
434 VEC(edge, heap) **dep_chains = 0;
435 VEC(edge, heap) *cur_chain = 0;
436 bool has_valid_pred = false;
437 basic_block cd_root = 0;
439 dep_chains = XCNEWVEC (VEC(edge, heap) *, MAX_NUM_CHAINS);
441 /* First find the closest bb that is control equivalent to PHI_BB
442 that also dominates USE_BB. */
443 cd_root = phi_bb;
444 while (dominated_by_p (CDI_DOMINATORS, use_bb, cd_root))
446 basic_block ctrl_eq_bb = find_control_equiv_block (cd_root);
447 if (ctrl_eq_bb && dominated_by_p (CDI_DOMINATORS, use_bb, ctrl_eq_bb))
448 cd_root = ctrl_eq_bb;
449 else
450 break;
453 compute_control_dep_chain (cd_root, use_bb,
454 dep_chains, &num_chains,
455 &cur_chain);
457 has_valid_pred
458 = convert_control_dep_chain_into_preds (dep_chains,
459 num_chains,
460 preds,
461 num_preds);
462 /* Free individual chain */
463 VEC_free (edge, heap, cur_chain);
464 for (i = 0; i < num_chains; i++)
465 VEC_free (edge, heap, dep_chains[i]);
466 free (dep_chains);
467 return has_valid_pred;
470 /* Computes the set of incoming edges of PHI that have non empty
471 definitions of a phi chain. The collection will be done
472 recursively on operands that are defined by phis. CD_ROOT
473 is the control dependence root. *EDGES holds the result, and
474 VISITED_PHIS is a pointer set for detecting cycles. */
476 static void
477 collect_phi_def_edges (gimple phi, basic_block cd_root,
478 VEC(edge, heap) **edges,
479 struct pointer_set_t *visited_phis)
481 size_t i, n;
482 edge opnd_edge;
483 tree opnd;
485 if (pointer_set_insert (visited_phis, phi))
486 return;
488 n = gimple_phi_num_args (phi);
489 for (i = 0; i < n; i++)
491 opnd_edge = gimple_phi_arg_edge (phi, i);
492 opnd = gimple_phi_arg_def (phi, i);
494 if (TREE_CODE (opnd) != SSA_NAME)
496 if (dump_file && (dump_flags & TDF_DETAILS))
498 fprintf (dump_file, "\n[CHECK] Found def edge %d in ", (int)i);
499 print_gimple_stmt (dump_file, phi, 0, 0);
501 VEC_safe_push (edge, heap, *edges, opnd_edge);
503 else
505 gimple def = SSA_NAME_DEF_STMT (opnd);
507 if (gimple_code (def) == GIMPLE_PHI
508 && dominated_by_p (CDI_DOMINATORS,
509 gimple_bb (def), cd_root))
510 collect_phi_def_edges (def, cd_root, edges,
511 visited_phis);
512 else if (!ssa_undefined_value_p (opnd))
514 if (dump_file && (dump_flags & TDF_DETAILS))
516 fprintf (dump_file, "\n[CHECK] Found def edge %d in ", (int)i);
517 print_gimple_stmt (dump_file, phi, 0, 0);
519 VEC_safe_push (edge, heap, *edges, opnd_edge);
525 /* For each use edge of PHI, computes all control dependence chains.
526 The control dependence chains are then converted to an array of
527 composite predicates pointed to by PREDS. */
529 static bool
530 find_def_preds (VEC(use_pred_info_t, heap) ***preds,
531 size_t *num_preds, gimple phi)
533 size_t num_chains = 0, i, n;
534 VEC(edge, heap) **dep_chains = 0;
535 VEC(edge, heap) *cur_chain = 0;
536 VEC(edge, heap) *def_edges = 0;
537 bool has_valid_pred = false;
538 basic_block phi_bb, cd_root = 0;
539 struct pointer_set_t *visited_phis;
541 dep_chains = XCNEWVEC (VEC(edge, heap) *, MAX_NUM_CHAINS);
543 phi_bb = gimple_bb (phi);
544 /* First find the closest dominating bb to be
545 the control dependence root */
546 cd_root = find_dom (phi_bb);
547 if (!cd_root)
548 return false;
550 visited_phis = pointer_set_create ();
551 collect_phi_def_edges (phi, cd_root, &def_edges, visited_phis);
552 pointer_set_destroy (visited_phis);
554 n = VEC_length (edge, def_edges);
555 if (n == 0)
556 return false;
558 for (i = 0; i < n; i++)
560 size_t prev_nc, j;
561 edge opnd_edge;
563 opnd_edge = VEC_index (edge, def_edges, i);
564 prev_nc = num_chains;
565 compute_control_dep_chain (cd_root, opnd_edge->src,
566 dep_chains, &num_chains,
567 &cur_chain);
568 /* Free individual chain */
569 VEC_free (edge, heap, cur_chain);
570 cur_chain = 0;
572 /* Now update the newly added chains with
573 the phi operand edge: */
574 if (EDGE_COUNT (opnd_edge->src->succs) > 1)
576 if (prev_nc == num_chains
577 && num_chains < MAX_NUM_CHAINS)
578 num_chains++;
579 for (j = prev_nc; j < num_chains; j++)
581 VEC_safe_push (edge, heap, dep_chains[j], opnd_edge);
586 has_valid_pred
587 = convert_control_dep_chain_into_preds (dep_chains,
588 num_chains,
589 preds,
590 num_preds);
591 for (i = 0; i < num_chains; i++)
592 VEC_free (edge, heap, dep_chains[i]);
593 free (dep_chains);
594 return has_valid_pred;
597 /* Dumps the predicates (PREDS) for USESTMT. */
599 static void
600 dump_predicates (gimple usestmt, size_t num_preds,
601 VEC(use_pred_info_t, heap) **preds,
602 const char* msg)
604 size_t i, j;
605 VEC(use_pred_info_t, heap) *one_pred_chain;
606 fprintf (dump_file, msg);
607 print_gimple_stmt (dump_file, usestmt, 0, 0);
608 fprintf (dump_file, "is guarded by :\n");
609 /* do some dumping here: */
610 for (i = 0; i < num_preds; i++)
612 size_t np;
614 one_pred_chain = preds[i];
615 np = VEC_length (use_pred_info_t, one_pred_chain);
617 for (j = 0; j < np; j++)
619 use_pred_info_t one_pred
620 = VEC_index (use_pred_info_t, one_pred_chain, j);
621 if (one_pred->invert)
622 fprintf (dump_file, " (.NOT.) ");
623 print_gimple_stmt (dump_file, one_pred->cond, 0, 0);
624 if (j < np - 1)
625 fprintf (dump_file, "(.AND.)\n");
627 if (i < num_preds - 1)
628 fprintf (dump_file, "(.OR.)\n");
632 /* Destroys the predicate set *PREDS. */
634 static void
635 destroy_predicate_vecs (size_t n,
636 VEC(use_pred_info_t, heap) ** preds)
638 size_t i, j;
639 for (i = 0; i < n; i++)
641 for (j = 0; j < VEC_length (use_pred_info_t, preds[i]); j++)
642 free (VEC_index (use_pred_info_t, preds[i], j));
643 VEC_free (use_pred_info_t, heap, preds[i]);
645 free (preds);
649 /* Computes the 'normalized' conditional code with operand
650 swapping and condition inversion. */
652 static enum tree_code
653 get_cmp_code (enum tree_code orig_cmp_code,
654 bool swap_cond, bool invert)
656 enum tree_code tc = orig_cmp_code;
658 if (swap_cond)
659 tc = swap_tree_comparison (orig_cmp_code);
660 if (invert)
661 tc = invert_tree_comparison (tc, false);
663 switch (tc)
665 case LT_EXPR:
666 case LE_EXPR:
667 case GT_EXPR:
668 case GE_EXPR:
669 case EQ_EXPR:
670 case NE_EXPR:
671 break;
672 default:
673 return ERROR_MARK;
675 return tc;
678 /* Returns true if VAL falls in the range defined by BOUNDARY and CMPC, i.e.
679 all values in the range satisfies (x CMPC BOUNDARY) == true. */
681 static bool
682 is_value_included_in (tree val, tree boundary, enum tree_code cmpc)
684 bool inverted = false;
685 bool is_unsigned;
686 bool result;
688 /* Only handle integer constant here. */
689 if (TREE_CODE (val) != INTEGER_CST
690 || TREE_CODE (boundary) != INTEGER_CST)
691 return true;
693 is_unsigned = TYPE_UNSIGNED (TREE_TYPE (val));
695 if (cmpc == GE_EXPR || cmpc == GT_EXPR
696 || cmpc == NE_EXPR)
698 cmpc = invert_tree_comparison (cmpc, false);
699 inverted = true;
702 if (is_unsigned)
704 if (cmpc == EQ_EXPR)
705 result = tree_int_cst_equal (val, boundary);
706 else if (cmpc == LT_EXPR)
707 result = INT_CST_LT_UNSIGNED (val, boundary);
708 else
710 gcc_assert (cmpc == LE_EXPR);
711 result = (tree_int_cst_equal (val, boundary)
712 || INT_CST_LT_UNSIGNED (val, boundary));
715 else
717 if (cmpc == EQ_EXPR)
718 result = tree_int_cst_equal (val, boundary);
719 else if (cmpc == LT_EXPR)
720 result = INT_CST_LT (val, boundary);
721 else
723 gcc_assert (cmpc == LE_EXPR);
724 result = (tree_int_cst_equal (val, boundary)
725 || INT_CST_LT (val, boundary));
729 if (inverted)
730 result ^= 1;
732 return result;
735 /* Returns true if PRED is common among all the predicate
736 chains (PREDS) (and therefore can be factored out).
737 NUM_PRED_CHAIN is the size of array PREDS. */
739 static bool
740 find_matching_predicate_in_rest_chains (use_pred_info_t pred,
741 VEC(use_pred_info_t, heap) **preds,
742 size_t num_pred_chains)
744 size_t i, j, n;
746 /* trival case */
747 if (num_pred_chains == 1)
748 return true;
750 for (i = 1; i < num_pred_chains; i++)
752 bool found = false;
753 VEC(use_pred_info_t, heap) *one_chain = preds[i];
754 n = VEC_length (use_pred_info_t, one_chain);
755 for (j = 0; j < n; j++)
757 use_pred_info_t pred2
758 = VEC_index (use_pred_info_t, one_chain, j);
759 /* can relax the condition comparison to not
760 use address comparison. However, the most common
761 case is that multiple control dependent paths share
762 a common path prefix, so address comparison should
763 be ok. */
765 if (pred2->cond == pred->cond
766 && pred2->invert == pred->invert)
768 found = true;
769 break;
772 if (!found)
773 return false;
775 return true;
778 /* Forward declaration. */
779 static bool
780 is_use_properly_guarded (gimple use_stmt,
781 basic_block use_bb,
782 gimple phi,
783 unsigned uninit_opnds,
784 struct pointer_set_t *visited_phis);
786 /* Returns true if all uninitialized opnds are pruned. Returns false
787 otherwise. PHI is the phi node with uninitialized operands,
788 UNINIT_OPNDS is the bitmap of the uninitialize operand positions,
789 FLAG_DEF is the statement defining the flag guarding the use of the
790 PHI output, BOUNDARY_CST is the const value used in the predicate
791 associated with the flag, CMP_CODE is the comparison code used in
792 the predicate, VISITED_PHIS is the pointer set of phis visited, and
793 VISITED_FLAG_PHIS is the pointer to the pointer set of flag definitions
794 that are also phis.
796 Example scenario:
798 BB1:
799 flag_1 = phi <0, 1> // (1)
800 var_1 = phi <undef, some_val>
803 BB2:
804 flag_2 = phi <0, flag_1, flag_1> // (2)
805 var_2 = phi <undef, var_1, var_1>
806 if (flag_2 == 1)
807 goto BB3;
809 BB3:
810 use of var_2 // (3)
812 Because some flag arg in (1) is not constant, if we do not look into the
813 flag phis recursively, it is conservatively treated as unknown and var_1
814 is thought to be flowed into use at (3). Since var_1 is potentially uninitialized
815 a false warning will be emitted. Checking recursively into (1), the compiler can
816 find out that only some_val (which is defined) can flow into (3) which is OK.
820 static bool
821 prune_uninit_phi_opnds_in_unrealizable_paths (
822 gimple phi, unsigned uninit_opnds,
823 gimple flag_def, tree boundary_cst,
824 enum tree_code cmp_code,
825 struct pointer_set_t *visited_phis,
826 bitmap *visited_flag_phis)
828 unsigned i;
830 for (i = 0; i < MIN (32, gimple_phi_num_args (flag_def)); i++)
832 tree flag_arg;
834 if (!MASK_TEST_BIT (uninit_opnds, i))
835 continue;
837 flag_arg = gimple_phi_arg_def (flag_def, i);
838 if (!is_gimple_constant (flag_arg))
840 gimple flag_arg_def, phi_arg_def;
841 tree phi_arg;
842 unsigned uninit_opnds_arg_phi;
844 if (TREE_CODE (flag_arg) != SSA_NAME)
845 return false;
846 flag_arg_def = SSA_NAME_DEF_STMT (flag_arg);
847 if (gimple_code (flag_arg_def) != GIMPLE_PHI)
848 return false;
850 phi_arg = gimple_phi_arg_def (phi, i);
851 if (TREE_CODE (phi_arg) != SSA_NAME)
852 return false;
854 phi_arg_def = SSA_NAME_DEF_STMT (phi_arg);
855 if (gimple_code (phi_arg_def) != GIMPLE_PHI)
856 return false;
858 if (gimple_bb (phi_arg_def) != gimple_bb (flag_arg_def))
859 return false;
861 if (!*visited_flag_phis)
862 *visited_flag_phis = BITMAP_ALLOC (NULL);
864 if (bitmap_bit_p (*visited_flag_phis,
865 SSA_NAME_VERSION (gimple_phi_result (flag_arg_def))))
866 return false;
868 bitmap_set_bit (*visited_flag_phis,
869 SSA_NAME_VERSION (gimple_phi_result (flag_arg_def)));
871 /* Now recursively prune the uninitialized phi args. */
872 uninit_opnds_arg_phi = compute_uninit_opnds_pos (phi_arg_def);
873 if (!prune_uninit_phi_opnds_in_unrealizable_paths (
874 phi_arg_def, uninit_opnds_arg_phi,
875 flag_arg_def, boundary_cst, cmp_code,
876 visited_phis, visited_flag_phis))
877 return false;
879 bitmap_clear_bit (*visited_flag_phis,
880 SSA_NAME_VERSION (gimple_phi_result (flag_arg_def)));
881 continue;
884 /* Now check if the constant is in the guarded range. */
885 if (is_value_included_in (flag_arg, boundary_cst, cmp_code))
887 tree opnd;
888 gimple opnd_def;
890 /* Now that we know that this undefined edge is not
891 pruned. If the operand is defined by another phi,
892 we can further prune the incoming edges of that
893 phi by checking the predicates of this operands. */
895 opnd = gimple_phi_arg_def (phi, i);
896 opnd_def = SSA_NAME_DEF_STMT (opnd);
897 if (gimple_code (opnd_def) == GIMPLE_PHI)
899 edge opnd_edge;
900 unsigned uninit_opnds2
901 = compute_uninit_opnds_pos (opnd_def);
902 gcc_assert (!MASK_EMPTY (uninit_opnds2));
903 opnd_edge = gimple_phi_arg_edge (phi, i);
904 if (!is_use_properly_guarded (phi,
905 opnd_edge->src,
906 opnd_def,
907 uninit_opnds2,
908 visited_phis))
909 return false;
911 else
912 return false;
916 return true;
919 /* A helper function that determines if the predicate set
920 of the use is not overlapping with that of the uninit paths.
921 The most common senario of guarded use is in Example 1:
922 Example 1:
923 if (some_cond)
925 x = ...;
926 flag = true;
929 ... some code ...
931 if (flag)
932 use (x);
934 The real world examples are usually more complicated, but similar
935 and usually result from inlining:
937 bool init_func (int * x)
939 if (some_cond)
940 return false;
941 *x = ..
942 return true;
945 void foo(..)
947 int x;
949 if (!init_func(&x))
950 return;
952 .. some_code ...
953 use (x);
956 Another possible use scenario is in the following trivial example:
958 Example 2:
959 if (n > 0)
960 x = 1;
962 if (n > 0)
964 if (m < 2)
965 .. = x;
968 Predicate analysis needs to compute the composite predicate:
970 1) 'x' use predicate: (n > 0) .AND. (m < 2)
971 2) 'x' default value (non-def) predicate: .NOT. (n > 0)
972 (the predicate chain for phi operand defs can be computed
973 starting from a bb that is control equivalent to the phi's
974 bb and is dominating the operand def.)
976 and check overlapping:
977 (n > 0) .AND. (m < 2) .AND. (.NOT. (n > 0))
978 <==> false
980 This implementation provides framework that can handle
981 scenarios. (Note that many simple cases are handled properly
982 without the predicate analysis -- this is due to jump threading
983 transformation which eliminates the merge point thus makes
984 path sensitive analysis unnecessary.)
986 NUM_PREDS is the number is the number predicate chains, PREDS is
987 the array of chains, PHI is the phi node whose incoming (undefined)
988 paths need to be pruned, and UNINIT_OPNDS is the bitmap holding
989 uninit operand positions. VISITED_PHIS is the pointer set of phi
990 stmts being checked. */
993 static bool
994 use_pred_not_overlap_with_undef_path_pred (
995 size_t num_preds,
996 VEC(use_pred_info_t, heap) **preds,
997 gimple phi, unsigned uninit_opnds,
998 struct pointer_set_t *visited_phis)
1000 unsigned int i, n;
1001 gimple flag_def = 0;
1002 tree boundary_cst = 0;
1003 enum tree_code cmp_code;
1004 bool swap_cond = false;
1005 bool invert = false;
1006 VEC(use_pred_info_t, heap) *the_pred_chain;
1007 bitmap visited_flag_phis = NULL;
1008 bool all_pruned = false;
1010 gcc_assert (num_preds > 0);
1011 /* Find within the common prefix of multiple predicate chains
1012 a predicate that is a comparison of a flag variable against
1013 a constant. */
1014 the_pred_chain = preds[0];
1015 n = VEC_length (use_pred_info_t, the_pred_chain);
1016 for (i = 0; i < n; i++)
1018 gimple cond;
1019 tree cond_lhs, cond_rhs, flag = 0;
1021 use_pred_info_t the_pred
1022 = VEC_index (use_pred_info_t, the_pred_chain, i);
1024 cond = the_pred->cond;
1025 invert = the_pred->invert;
1026 cond_lhs = gimple_cond_lhs (cond);
1027 cond_rhs = gimple_cond_rhs (cond);
1028 cmp_code = gimple_cond_code (cond);
1030 if (cond_lhs != NULL_TREE && TREE_CODE (cond_lhs) == SSA_NAME
1031 && cond_rhs != NULL_TREE && is_gimple_constant (cond_rhs))
1033 boundary_cst = cond_rhs;
1034 flag = cond_lhs;
1036 else if (cond_rhs != NULL_TREE && TREE_CODE (cond_rhs) == SSA_NAME
1037 && cond_lhs != NULL_TREE && is_gimple_constant (cond_lhs))
1039 boundary_cst = cond_lhs;
1040 flag = cond_rhs;
1041 swap_cond = true;
1044 if (!flag)
1045 continue;
1047 flag_def = SSA_NAME_DEF_STMT (flag);
1049 if (!flag_def)
1050 continue;
1052 if ((gimple_code (flag_def) == GIMPLE_PHI)
1053 && (gimple_bb (flag_def) == gimple_bb (phi))
1054 && find_matching_predicate_in_rest_chains (
1055 the_pred, preds, num_preds))
1056 break;
1058 flag_def = 0;
1061 if (!flag_def)
1062 return false;
1064 /* Now check all the uninit incoming edge has a constant flag value
1065 that is in conflict with the use guard/predicate. */
1066 cmp_code = get_cmp_code (cmp_code, swap_cond, invert);
1068 if (cmp_code == ERROR_MARK)
1069 return false;
1071 all_pruned = prune_uninit_phi_opnds_in_unrealizable_paths (phi,
1072 uninit_opnds,
1073 flag_def,
1074 boundary_cst,
1075 cmp_code,
1076 visited_phis,
1077 &visited_flag_phis);
1079 if (visited_flag_phis)
1080 BITMAP_FREE (visited_flag_phis);
1082 return all_pruned;
1085 /* Returns true if TC is AND or OR */
1087 static inline bool
1088 is_and_or_or (enum tree_code tc, tree typ)
1090 return (tc == BIT_IOR_EXPR
1091 || (tc == BIT_AND_EXPR
1092 && (typ == 0 || TREE_CODE (typ) == BOOLEAN_TYPE)));
1095 typedef struct norm_cond
1097 VEC(gimple, heap) *conds;
1098 enum tree_code cond_code;
1099 bool invert;
1100 } *norm_cond_t;
1103 /* Normalizes gimple condition COND. The normalization follows
1104 UD chains to form larger condition expression trees. NORM_COND
1105 holds the normalized result. COND_CODE is the logical opcode
1106 (AND or OR) of the normalized tree. */
1108 static void
1109 normalize_cond_1 (gimple cond,
1110 norm_cond_t norm_cond,
1111 enum tree_code cond_code)
1113 enum gimple_code gc;
1114 enum tree_code cur_cond_code;
1115 tree rhs1, rhs2;
1117 gc = gimple_code (cond);
1118 if (gc != GIMPLE_ASSIGN)
1120 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1121 return;
1124 cur_cond_code = gimple_assign_rhs_code (cond);
1125 rhs1 = gimple_assign_rhs1 (cond);
1126 rhs2 = gimple_assign_rhs2 (cond);
1127 if (cur_cond_code == NE_EXPR)
1129 if (integer_zerop (rhs2)
1130 && (TREE_CODE (rhs1) == SSA_NAME))
1131 normalize_cond_1 (
1132 SSA_NAME_DEF_STMT (rhs1),
1133 norm_cond, cond_code);
1134 else if (integer_zerop (rhs1)
1135 && (TREE_CODE (rhs2) == SSA_NAME))
1136 normalize_cond_1 (
1137 SSA_NAME_DEF_STMT (rhs2),
1138 norm_cond, cond_code);
1139 else
1140 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1142 return;
1145 if (is_and_or_or (cur_cond_code, TREE_TYPE (rhs1))
1146 && (cond_code == cur_cond_code || cond_code == ERROR_MARK)
1147 && (TREE_CODE (rhs1) == SSA_NAME && TREE_CODE (rhs2) == SSA_NAME))
1149 normalize_cond_1 (SSA_NAME_DEF_STMT (rhs1),
1150 norm_cond, cur_cond_code);
1151 normalize_cond_1 (SSA_NAME_DEF_STMT (rhs2),
1152 norm_cond, cur_cond_code);
1153 norm_cond->cond_code = cur_cond_code;
1155 else
1156 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1159 /* See normalize_cond_1 for details. INVERT is a flag to indicate
1160 if COND needs to be inverted or not. */
1162 static void
1163 normalize_cond (gimple cond, norm_cond_t norm_cond, bool invert)
1165 enum tree_code cond_code;
1167 norm_cond->cond_code = ERROR_MARK;
1168 norm_cond->invert = false;
1169 norm_cond->conds = NULL;
1170 gcc_assert (gimple_code (cond) == GIMPLE_COND);
1171 cond_code = gimple_cond_code (cond);
1172 if (invert)
1173 cond_code = invert_tree_comparison (cond_code, false);
1175 if (cond_code == NE_EXPR)
1177 if (integer_zerop (gimple_cond_rhs (cond))
1178 && (TREE_CODE (gimple_cond_lhs (cond)) == SSA_NAME))
1179 normalize_cond_1 (
1180 SSA_NAME_DEF_STMT (gimple_cond_lhs (cond)),
1181 norm_cond, ERROR_MARK);
1182 else if (integer_zerop (gimple_cond_lhs (cond))
1183 && (TREE_CODE (gimple_cond_rhs (cond)) == SSA_NAME))
1184 normalize_cond_1 (
1185 SSA_NAME_DEF_STMT (gimple_cond_rhs (cond)),
1186 norm_cond, ERROR_MARK);
1187 else
1189 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1190 norm_cond->invert = invert;
1193 else
1195 VEC_safe_push (gimple, heap, norm_cond->conds, cond);
1196 norm_cond->invert = invert;
1199 gcc_assert (VEC_length (gimple, norm_cond->conds) == 1
1200 || is_and_or_or (norm_cond->cond_code, NULL));
1203 /* Returns true if the domain for condition COND1 is a subset of
1204 COND2. REVERSE is a flag. when it is true the function checks
1205 if COND1 is a superset of COND2. INVERT1 and INVERT2 are flags
1206 to indicate if COND1 and COND2 need to be inverted or not. */
1208 static bool
1209 is_gcond_subset_of (gimple cond1, bool invert1,
1210 gimple cond2, bool invert2,
1211 bool reverse)
1213 enum gimple_code gc1, gc2;
1214 enum tree_code cond1_code, cond2_code;
1215 gimple tmp;
1216 tree cond1_lhs, cond1_rhs, cond2_lhs, cond2_rhs;
1218 /* Take the short cut. */
1219 if (cond1 == cond2)
1220 return true;
1222 if (reverse)
1224 tmp = cond1;
1225 cond1 = cond2;
1226 cond2 = tmp;
1229 gc1 = gimple_code (cond1);
1230 gc2 = gimple_code (cond2);
1232 if ((gc1 != GIMPLE_ASSIGN && gc1 != GIMPLE_COND)
1233 || (gc2 != GIMPLE_ASSIGN && gc2 != GIMPLE_COND))
1234 return cond1 == cond2;
1236 cond1_code = ((gc1 == GIMPLE_ASSIGN)
1237 ? gimple_assign_rhs_code (cond1)
1238 : gimple_cond_code (cond1));
1240 cond2_code = ((gc2 == GIMPLE_ASSIGN)
1241 ? gimple_assign_rhs_code (cond2)
1242 : gimple_cond_code (cond2));
1244 if (TREE_CODE_CLASS (cond1_code) != tcc_comparison
1245 || TREE_CODE_CLASS (cond2_code) != tcc_comparison)
1246 return false;
1248 if (invert1)
1249 cond1_code = invert_tree_comparison (cond1_code, false);
1250 if (invert2)
1251 cond2_code = invert_tree_comparison (cond2_code, false);
1253 cond1_lhs = ((gc1 == GIMPLE_ASSIGN)
1254 ? gimple_assign_rhs1 (cond1)
1255 : gimple_cond_lhs (cond1));
1256 cond1_rhs = ((gc1 == GIMPLE_ASSIGN)
1257 ? gimple_assign_rhs2 (cond1)
1258 : gimple_cond_rhs (cond1));
1259 cond2_lhs = ((gc2 == GIMPLE_ASSIGN)
1260 ? gimple_assign_rhs1 (cond2)
1261 : gimple_cond_lhs (cond2));
1262 cond2_rhs = ((gc2 == GIMPLE_ASSIGN)
1263 ? gimple_assign_rhs2 (cond2)
1264 : gimple_cond_rhs (cond2));
1266 /* Assuming const operands have been swapped to the
1267 rhs at this point of the analysis. */
1269 if (cond1_lhs != cond2_lhs)
1270 return false;
1272 if (!is_gimple_constant (cond1_rhs)
1273 || TREE_CODE (cond1_rhs) != INTEGER_CST)
1274 return (cond1_rhs == cond2_rhs);
1276 if (!is_gimple_constant (cond2_rhs)
1277 || TREE_CODE (cond2_rhs) != INTEGER_CST)
1278 return (cond1_rhs == cond2_rhs);
1280 if (cond1_code == EQ_EXPR)
1281 return is_value_included_in (cond1_rhs,
1282 cond2_rhs, cond2_code);
1283 if (cond1_code == NE_EXPR || cond2_code == EQ_EXPR)
1284 return ((cond2_code == cond1_code)
1285 && tree_int_cst_equal (cond1_rhs, cond2_rhs));
1287 if (((cond1_code == GE_EXPR || cond1_code == GT_EXPR)
1288 && (cond2_code == LE_EXPR || cond2_code == LT_EXPR))
1289 || ((cond1_code == LE_EXPR || cond1_code == LT_EXPR)
1290 && (cond2_code == GE_EXPR || cond2_code == GT_EXPR)))
1291 return false;
1293 if (cond1_code != GE_EXPR && cond1_code != GT_EXPR
1294 && cond1_code != LE_EXPR && cond1_code != LT_EXPR)
1295 return false;
1297 if (cond1_code == GT_EXPR)
1299 cond1_code = GE_EXPR;
1300 cond1_rhs = fold_binary (PLUS_EXPR, TREE_TYPE (cond1_rhs),
1301 cond1_rhs,
1302 fold_convert (TREE_TYPE (cond1_rhs),
1303 integer_one_node));
1305 else if (cond1_code == LT_EXPR)
1307 cond1_code = LE_EXPR;
1308 cond1_rhs = fold_binary (MINUS_EXPR, TREE_TYPE (cond1_rhs),
1309 cond1_rhs,
1310 fold_convert (TREE_TYPE (cond1_rhs),
1311 integer_one_node));
1314 if (!cond1_rhs)
1315 return false;
1317 gcc_assert (cond1_code == GE_EXPR || cond1_code == LE_EXPR);
1319 if (cond2_code == GE_EXPR || cond2_code == GT_EXPR ||
1320 cond2_code == LE_EXPR || cond2_code == LT_EXPR)
1321 return is_value_included_in (cond1_rhs,
1322 cond2_rhs, cond2_code);
1323 else if (cond2_code == NE_EXPR)
1324 return
1325 (is_value_included_in (cond1_rhs,
1326 cond2_rhs, cond2_code)
1327 && !is_value_included_in (cond2_rhs,
1328 cond1_rhs, cond1_code));
1329 return false;
1332 /* Returns true if the domain of the condition expression
1333 in COND is a subset of any of the sub-conditions
1334 of the normalized condtion NORM_COND. INVERT is a flag
1335 to indicate of the COND needs to be inverted.
1336 REVERSE is a flag. When it is true, the check is reversed --
1337 it returns true if COND is a superset of any of the subconditions
1338 of NORM_COND. */
1340 static bool
1341 is_subset_of_any (gimple cond, bool invert,
1342 norm_cond_t norm_cond, bool reverse)
1344 size_t i;
1345 size_t len = VEC_length (gimple, norm_cond->conds);
1347 for (i = 0; i < len; i++)
1349 if (is_gcond_subset_of (cond, invert,
1350 VEC_index (gimple, norm_cond->conds, i),
1351 false, reverse))
1352 return true;
1354 return false;
1357 /* NORM_COND1 and NORM_COND2 are normalized logical/BIT OR
1358 expressions (formed by following UD chains not control
1359 dependence chains). The function returns true of domain
1360 of and expression NORM_COND1 is a subset of NORM_COND2's.
1361 The implementation is conservative, and it returns false if
1362 it the inclusion relationship may not hold. */
1364 static bool
1365 is_or_set_subset_of (norm_cond_t norm_cond1,
1366 norm_cond_t norm_cond2)
1368 size_t i;
1369 size_t len = VEC_length (gimple, norm_cond1->conds);
1371 for (i = 0; i < len; i++)
1373 if (!is_subset_of_any (VEC_index (gimple, norm_cond1->conds, i),
1374 false, norm_cond2, false))
1375 return false;
1377 return true;
1380 /* NORM_COND1 and NORM_COND2 are normalized logical AND
1381 expressions (formed by following UD chains not control
1382 dependence chains). The function returns true of domain
1383 of and expression NORM_COND1 is a subset of NORM_COND2's. */
1385 static bool
1386 is_and_set_subset_of (norm_cond_t norm_cond1,
1387 norm_cond_t norm_cond2)
1389 size_t i;
1390 size_t len = VEC_length (gimple, norm_cond2->conds);
1392 for (i = 0; i < len; i++)
1394 if (!is_subset_of_any (VEC_index (gimple, norm_cond2->conds, i),
1395 false, norm_cond1, true))
1396 return false;
1398 return true;
1401 /* Returns true of the domain if NORM_COND1 is a subset
1402 of that of NORM_COND2. Returns false if it can not be
1403 proved to be so. */
1405 static bool
1406 is_norm_cond_subset_of (norm_cond_t norm_cond1,
1407 norm_cond_t norm_cond2)
1409 size_t i;
1410 enum tree_code code1, code2;
1412 code1 = norm_cond1->cond_code;
1413 code2 = norm_cond2->cond_code;
1415 if (code1 == BIT_AND_EXPR)
1417 /* Both conditions are AND expressions. */
1418 if (code2 == BIT_AND_EXPR)
1419 return is_and_set_subset_of (norm_cond1, norm_cond2);
1420 /* NORM_COND1 is an AND expression, and NORM_COND2 is an OR
1421 expression. In this case, returns true if any subexpression
1422 of NORM_COND1 is a subset of any subexpression of NORM_COND2. */
1423 else if (code2 == BIT_IOR_EXPR)
1425 size_t len1;
1426 len1 = VEC_length (gimple, norm_cond1->conds);
1427 for (i = 0; i < len1; i++)
1429 gimple cond1 = VEC_index (gimple, norm_cond1->conds, i);
1430 if (is_subset_of_any (cond1, false, norm_cond2, false))
1431 return true;
1433 return false;
1435 else
1437 gcc_assert (code2 == ERROR_MARK);
1438 gcc_assert (VEC_length (gimple, norm_cond2->conds) == 1);
1439 return is_subset_of_any (VEC_index (gimple, norm_cond2->conds, 0),
1440 norm_cond2->invert, norm_cond1, true);
1443 /* NORM_COND1 is an OR expression */
1444 else if (code1 == BIT_IOR_EXPR)
1446 if (code2 != code1)
1447 return false;
1449 return is_or_set_subset_of (norm_cond1, norm_cond2);
1451 else
1453 gcc_assert (code1 == ERROR_MARK);
1454 gcc_assert (VEC_length (gimple, norm_cond1->conds) == 1);
1455 /* Conservatively returns false if NORM_COND1 is non-decomposible
1456 and NORM_COND2 is an AND expression. */
1457 if (code2 == BIT_AND_EXPR)
1458 return false;
1460 if (code2 == BIT_IOR_EXPR)
1461 return is_subset_of_any (VEC_index (gimple, norm_cond1->conds, 0),
1462 norm_cond1->invert, norm_cond2, false);
1464 gcc_assert (code2 == ERROR_MARK);
1465 gcc_assert (VEC_length (gimple, norm_cond2->conds) == 1);
1466 return is_gcond_subset_of (VEC_index (gimple, norm_cond1->conds, 0),
1467 norm_cond1->invert,
1468 VEC_index (gimple, norm_cond2->conds, 0),
1469 norm_cond2->invert, false);
1473 /* Returns true of the domain of single predicate expression
1474 EXPR1 is a subset of that of EXPR2. Returns false if it
1475 can not be proved. */
1477 static bool
1478 is_pred_expr_subset_of (use_pred_info_t expr1,
1479 use_pred_info_t expr2)
1481 gimple cond1, cond2;
1482 enum tree_code code1, code2;
1483 struct norm_cond norm_cond1, norm_cond2;
1484 bool is_subset = false;
1486 cond1 = expr1->cond;
1487 cond2 = expr2->cond;
1488 code1 = gimple_cond_code (cond1);
1489 code2 = gimple_cond_code (cond2);
1491 if (expr1->invert)
1492 code1 = invert_tree_comparison (code1, false);
1493 if (expr2->invert)
1494 code2 = invert_tree_comparison (code2, false);
1496 /* Fast path -- match exactly */
1497 if ((gimple_cond_lhs (cond1) == gimple_cond_lhs (cond2))
1498 && (gimple_cond_rhs (cond1) == gimple_cond_rhs (cond2))
1499 && (code1 == code2))
1500 return true;
1502 /* Normalize conditions. To keep NE_EXPR, do not invert
1503 with both need inversion. */
1504 normalize_cond (cond1, &norm_cond1, (expr1->invert));
1505 normalize_cond (cond2, &norm_cond2, (expr2->invert));
1507 is_subset = is_norm_cond_subset_of (&norm_cond1, &norm_cond2);
1509 /* Free memory */
1510 VEC_free (gimple, heap, norm_cond1.conds);
1511 VEC_free (gimple, heap, norm_cond2.conds);
1512 return is_subset ;
1515 /* Returns true if the domain of PRED1 is a subset
1516 of that of PRED2. Returns false if it can not be proved so. */
1518 static bool
1519 is_pred_chain_subset_of (VEC(use_pred_info_t, heap) *pred1,
1520 VEC(use_pred_info_t, heap) *pred2)
1522 size_t np1, np2, i1, i2;
1524 np1 = VEC_length (use_pred_info_t, pred1);
1525 np2 = VEC_length (use_pred_info_t, pred2);
1527 for (i2 = 0; i2 < np2; i2++)
1529 bool found = false;
1530 use_pred_info_t info2
1531 = VEC_index (use_pred_info_t, pred2, i2);
1532 for (i1 = 0; i1 < np1; i1++)
1534 use_pred_info_t info1
1535 = VEC_index (use_pred_info_t, pred1, i1);
1536 if (is_pred_expr_subset_of (info1, info2))
1538 found = true;
1539 break;
1542 if (!found)
1543 return false;
1545 return true;
1548 /* Returns true if the domain defined by
1549 one pred chain ONE_PRED is a subset of the domain
1550 of *PREDS. It returns false if ONE_PRED's domain is
1551 not a subset of any of the sub-domains of PREDS (
1552 corresponding to each individual chains in it), even
1553 though it may be still be a subset of whole domain
1554 of PREDS which is the union (ORed) of all its subdomains.
1555 In other words, the result is conservative. */
1557 static bool
1558 is_included_in (VEC(use_pred_info_t, heap) *one_pred,
1559 VEC(use_pred_info_t, heap) **preds,
1560 size_t n)
1562 size_t i;
1564 for (i = 0; i < n; i++)
1566 if (is_pred_chain_subset_of (one_pred, preds[i]))
1567 return true;
1570 return false;
1573 /* compares two predicate sets PREDS1 and PREDS2 and returns
1574 true if the domain defined by PREDS1 is a superset
1575 of PREDS2's domain. N1 and N2 are array sizes of PREDS1 and
1576 PREDS2 respectively. The implementation chooses not to build
1577 generic trees (and relying on the folding capability of the
1578 compiler), but instead performs brute force comparison of
1579 individual predicate chains (won't be a compile time problem
1580 as the chains are pretty short). When the function returns
1581 false, it does not necessarily mean *PREDS1 is not a superset
1582 of *PREDS2, but mean it may not be so since the analysis can
1583 not prove it. In such cases, false warnings may still be
1584 emitted. */
1586 static bool
1587 is_superset_of (VEC(use_pred_info_t, heap) **preds1,
1588 size_t n1,
1589 VEC(use_pred_info_t, heap) **preds2,
1590 size_t n2)
1592 size_t i;
1593 VEC(use_pred_info_t, heap) *one_pred_chain;
1595 for (i = 0; i < n2; i++)
1597 one_pred_chain = preds2[i];
1598 if (!is_included_in (one_pred_chain, preds1, n1))
1599 return false;
1602 return true;
1605 /* Comparison function used by qsort. It is used to
1606 sort predicate chains to allow predicate
1607 simplification. */
1609 static int
1610 pred_chain_length_cmp (const void *p1, const void *p2)
1612 use_pred_info_t i1, i2;
1613 VEC(use_pred_info_t, heap) * const *chain1
1614 = (VEC(use_pred_info_t, heap) * const *)p1;
1615 VEC(use_pred_info_t, heap) * const *chain2
1616 = (VEC(use_pred_info_t, heap) * const *)p2;
1618 if (VEC_length (use_pred_info_t, *chain1)
1619 != VEC_length (use_pred_info_t, *chain2))
1620 return (VEC_length (use_pred_info_t, *chain1)
1621 - VEC_length (use_pred_info_t, *chain2));
1623 i1 = VEC_index (use_pred_info_t, *chain1, 0);
1624 i2 = VEC_index (use_pred_info_t, *chain2, 0);
1626 /* Allow predicates with similar prefix come together. */
1627 if (!i1->invert && i2->invert)
1628 return -1;
1629 else if (i1->invert && !i2->invert)
1630 return 1;
1632 return gimple_uid (i1->cond) - gimple_uid (i2->cond);
1635 /* x OR (!x AND y) is equivalent to x OR y.
1636 This function normalizes x1 OR (!x1 AND x2) OR (!x1 AND !x2 AND x3)
1637 into x1 OR x2 OR x3. PREDS is the predicate chains, and N is
1638 the number of chains. Returns true if normalization happens. */
1640 static bool
1641 normalize_preds (VEC(use_pred_info_t, heap) **preds, size_t *n)
1643 size_t i, j, ll;
1644 VEC(use_pred_info_t, heap) *pred_chain;
1645 VEC(use_pred_info_t, heap) *x = 0;
1646 use_pred_info_t xj = 0, nxj = 0;
1648 if (*n < 2)
1649 return false;
1651 /* First sort the chains in ascending order of lengths. */
1652 qsort (preds, *n, sizeof (void *), pred_chain_length_cmp);
1653 pred_chain = preds[0];
1654 ll = VEC_length (use_pred_info_t, pred_chain);
1655 if (ll != 1)
1657 if (ll == 2)
1659 use_pred_info_t xx, yy, xx2, nyy;
1660 VEC(use_pred_info_t, heap) *pred_chain2 = preds[1];
1661 if (VEC_length (use_pred_info_t, pred_chain2) != 2)
1662 return false;
1664 /* See if simplification x AND y OR x AND !y is possible. */
1665 xx = VEC_index (use_pred_info_t, pred_chain, 0);
1666 yy = VEC_index (use_pred_info_t, pred_chain, 1);
1667 xx2 = VEC_index (use_pred_info_t, pred_chain2, 0);
1668 nyy = VEC_index (use_pred_info_t, pred_chain2, 1);
1669 if (gimple_cond_lhs (xx->cond) != gimple_cond_lhs (xx2->cond)
1670 || gimple_cond_rhs (xx->cond) != gimple_cond_rhs (xx2->cond)
1671 || gimple_cond_code (xx->cond) != gimple_cond_code (xx2->cond)
1672 || (xx->invert != xx2->invert))
1673 return false;
1674 if (gimple_cond_lhs (yy->cond) != gimple_cond_lhs (nyy->cond)
1675 || gimple_cond_rhs (yy->cond) != gimple_cond_rhs (nyy->cond)
1676 || gimple_cond_code (yy->cond) != gimple_cond_code (nyy->cond)
1677 || (yy->invert == nyy->invert))
1678 return false;
1680 /* Now merge the first two chains. */
1681 free (yy);
1682 free (nyy);
1683 free (xx2);
1684 VEC_free (use_pred_info_t, heap, pred_chain);
1685 VEC_free (use_pred_info_t, heap, pred_chain2);
1686 pred_chain = 0;
1687 VEC_safe_push (use_pred_info_t, heap, pred_chain, xx);
1688 preds[0] = pred_chain;
1689 for (i = 1; i < *n - 1; i++)
1690 preds[i] = preds[i + 1];
1692 preds[*n - 1] = 0;
1693 *n = *n - 1;
1695 else
1696 return false;
1699 VEC_safe_push (use_pred_info_t, heap, x,
1700 VEC_index (use_pred_info_t, pred_chain, 0));
1702 /* The loop extracts x1, x2, x3, etc from chains
1703 x1 OR (!x1 AND x2) OR (!x1 AND !x2 AND x3) OR ... */
1704 for (i = 1; i < *n; i++)
1706 pred_chain = preds[i];
1707 if (VEC_length (use_pred_info_t, pred_chain) != i + 1)
1708 return false;
1710 for (j = 0; j < i; j++)
1712 xj = VEC_index (use_pred_info_t, x, j);
1713 nxj = VEC_index (use_pred_info_t, pred_chain, j);
1715 /* Check if nxj is !xj */
1716 if (gimple_cond_lhs (xj->cond) != gimple_cond_lhs (nxj->cond)
1717 || gimple_cond_rhs (xj->cond) != gimple_cond_rhs (nxj->cond)
1718 || gimple_cond_code (xj->cond) != gimple_cond_code (nxj->cond)
1719 || (xj->invert == nxj->invert))
1720 return false;
1723 VEC_safe_push (use_pred_info_t, heap, x,
1724 VEC_index (use_pred_info_t, pred_chain, i));
1727 /* Now normalize the pred chains using the extraced x1, x2, x3 etc. */
1728 for (j = 0; j < *n; j++)
1730 use_pred_info_t t;
1731 xj = VEC_index (use_pred_info_t, x, j);
1733 t = XNEW (struct use_pred_info);
1734 *t = *xj;
1736 VEC_replace (use_pred_info_t, x, j, t);
1739 for (i = 0; i < *n; i++)
1741 pred_chain = preds[i];
1742 for (j = 0; j < VEC_length (use_pred_info_t, pred_chain); j++)
1743 free (VEC_index (use_pred_info_t, pred_chain, j));
1744 VEC_free (use_pred_info_t, heap, pred_chain);
1745 pred_chain = 0;
1746 /* A new chain. */
1747 VEC_safe_push (use_pred_info_t, heap, pred_chain,
1748 VEC_index (use_pred_info_t, x, i));
1749 preds[i] = pred_chain;
1751 return true;
1756 /* Computes the predicates that guard the use and checks
1757 if the incoming paths that have empty (or possibly
1758 empty) definition can be pruned/filtered. The function returns
1759 true if it can be determined that the use of PHI's def in
1760 USE_STMT is guarded with a predicate set not overlapping with
1761 predicate sets of all runtime paths that do not have a definition.
1762 Returns false if it is not or it can not be determined. USE_BB is
1763 the bb of the use (for phi operand use, the bb is not the bb of
1764 the phi stmt, but the src bb of the operand edge). UNINIT_OPNDS
1765 is a bit vector. If an operand of PHI is uninitialized, the
1766 corresponding bit in the vector is 1. VISIED_PHIS is a pointer
1767 set of phis being visted. */
1769 static bool
1770 is_use_properly_guarded (gimple use_stmt,
1771 basic_block use_bb,
1772 gimple phi,
1773 unsigned uninit_opnds,
1774 struct pointer_set_t *visited_phis)
1776 basic_block phi_bb;
1777 VEC(use_pred_info_t, heap) **preds = 0;
1778 VEC(use_pred_info_t, heap) **def_preds = 0;
1779 size_t num_preds = 0, num_def_preds = 0;
1780 bool has_valid_preds = false;
1781 bool is_properly_guarded = false;
1783 if (pointer_set_insert (visited_phis, phi))
1784 return false;
1786 phi_bb = gimple_bb (phi);
1788 if (is_non_loop_exit_postdominating (use_bb, phi_bb))
1789 return false;
1791 has_valid_preds = find_predicates (&preds, &num_preds,
1792 phi_bb, use_bb);
1794 if (!has_valid_preds)
1796 destroy_predicate_vecs (num_preds, preds);
1797 return false;
1800 if (dump_file)
1801 dump_predicates (use_stmt, num_preds, preds,
1802 "\nUse in stmt ");
1804 has_valid_preds = find_def_preds (&def_preds,
1805 &num_def_preds, phi);
1807 if (has_valid_preds)
1809 bool normed;
1810 if (dump_file)
1811 dump_predicates (phi, num_def_preds, def_preds,
1812 "Operand defs of phi ");
1814 normed = normalize_preds (def_preds, &num_def_preds);
1815 if (normed && dump_file)
1817 fprintf (dump_file, "\nNormalized to\n");
1818 dump_predicates (phi, num_def_preds, def_preds,
1819 "Operand defs of phi ");
1821 is_properly_guarded =
1822 is_superset_of (def_preds, num_def_preds,
1823 preds, num_preds);
1826 /* further prune the dead incoming phi edges. */
1827 if (!is_properly_guarded)
1828 is_properly_guarded
1829 = use_pred_not_overlap_with_undef_path_pred (
1830 num_preds, preds, phi, uninit_opnds, visited_phis);
1832 destroy_predicate_vecs (num_preds, preds);
1833 destroy_predicate_vecs (num_def_preds, def_preds);
1834 return is_properly_guarded;
1837 /* Searches through all uses of a potentially
1838 uninitialized variable defined by PHI and returns a use
1839 statement if the use is not properly guarded. It returns
1840 NULL if all uses are guarded. UNINIT_OPNDS is a bitvector
1841 holding the position(s) of uninit PHI operands. WORKLIST
1842 is the vector of candidate phis that may be updated by this
1843 function. ADDED_TO_WORKLIST is the pointer set tracking
1844 if the new phi is already in the worklist. */
1846 static gimple
1847 find_uninit_use (gimple phi, unsigned uninit_opnds,
1848 VEC(gimple, heap) **worklist,
1849 struct pointer_set_t *added_to_worklist)
1851 tree phi_result;
1852 use_operand_p use_p;
1853 gimple use_stmt;
1854 imm_use_iterator iter;
1856 phi_result = gimple_phi_result (phi);
1858 FOR_EACH_IMM_USE_FAST (use_p, iter, phi_result)
1860 struct pointer_set_t *visited_phis;
1861 basic_block use_bb;
1863 use_stmt = USE_STMT (use_p);
1864 if (is_gimple_debug (use_stmt))
1865 continue;
1867 visited_phis = pointer_set_create ();
1869 if (gimple_code (use_stmt) == GIMPLE_PHI)
1870 use_bb = gimple_phi_arg_edge (use_stmt,
1871 PHI_ARG_INDEX_FROM_USE (use_p))->src;
1872 else
1873 use_bb = gimple_bb (use_stmt);
1875 if (is_use_properly_guarded (use_stmt,
1876 use_bb,
1877 phi,
1878 uninit_opnds,
1879 visited_phis))
1881 pointer_set_destroy (visited_phis);
1882 continue;
1884 pointer_set_destroy (visited_phis);
1886 if (dump_file && (dump_flags & TDF_DETAILS))
1888 fprintf (dump_file, "[CHECK]: Found unguarded use: ");
1889 print_gimple_stmt (dump_file, use_stmt, 0, 0);
1891 /* Found one real use, return. */
1892 if (gimple_code (use_stmt) != GIMPLE_PHI)
1893 return use_stmt;
1895 /* Found a phi use that is not guarded,
1896 add the phi to the worklist. */
1897 if (!pointer_set_insert (added_to_worklist,
1898 use_stmt))
1900 if (dump_file && (dump_flags & TDF_DETAILS))
1902 fprintf (dump_file, "[WORKLIST]: Update worklist with phi: ");
1903 print_gimple_stmt (dump_file, use_stmt, 0, 0);
1906 VEC_safe_push (gimple, heap, *worklist, use_stmt);
1907 pointer_set_insert (possibly_undefined_names,
1908 phi_result);
1912 return NULL;
1915 /* Look for inputs to PHI that are SSA_NAMEs that have empty definitions
1916 and gives warning if there exists a runtime path from the entry to a
1917 use of the PHI def that does not contain a definition. In other words,
1918 the warning is on the real use. The more dead paths that can be pruned
1919 by the compiler, the fewer false positives the warning is. WORKLIST
1920 is a vector of candidate phis to be examined. ADDED_TO_WORKLIST is
1921 a pointer set tracking if the new phi is added to the worklist or not. */
1923 static void
1924 warn_uninitialized_phi (gimple phi, VEC(gimple, heap) **worklist,
1925 struct pointer_set_t *added_to_worklist)
1927 unsigned uninit_opnds;
1928 gimple uninit_use_stmt = 0;
1929 tree uninit_op;
1931 /* Don't look at memory tags. */
1932 if (!is_gimple_reg (gimple_phi_result (phi)))
1933 return;
1935 uninit_opnds = compute_uninit_opnds_pos (phi);
1937 if (MASK_EMPTY (uninit_opnds))
1938 return;
1940 if (dump_file && (dump_flags & TDF_DETAILS))
1942 fprintf (dump_file, "[CHECK]: examining phi: ");
1943 print_gimple_stmt (dump_file, phi, 0, 0);
1946 /* Now check if we have any use of the value without proper guard. */
1947 uninit_use_stmt = find_uninit_use (phi, uninit_opnds,
1948 worklist, added_to_worklist);
1950 /* All uses are properly guarded. */
1951 if (!uninit_use_stmt)
1952 return;
1954 uninit_op = gimple_phi_arg_def (phi, MASK_FIRST_SET_BIT (uninit_opnds));
1955 warn_uninit (OPT_Wmaybe_uninitialized, uninit_op, SSA_NAME_VAR (uninit_op),
1956 SSA_NAME_VAR (uninit_op),
1957 "%qD may be used uninitialized in this function",
1958 uninit_use_stmt);
1963 /* Entry point to the late uninitialized warning pass. */
1965 static unsigned int
1966 execute_late_warn_uninitialized (void)
1968 basic_block bb;
1969 gimple_stmt_iterator gsi;
1970 VEC(gimple, heap) *worklist = 0;
1971 struct pointer_set_t *added_to_worklist;
1973 calculate_dominance_info (CDI_DOMINATORS);
1974 calculate_dominance_info (CDI_POST_DOMINATORS);
1975 /* Re-do the plain uninitialized variable check, as optimization may have
1976 straightened control flow. Do this first so that we don't accidentally
1977 get a "may be" warning when we'd have seen an "is" warning later. */
1978 warn_uninitialized_vars (/*warn_possibly_uninitialized=*/1);
1980 timevar_push (TV_TREE_UNINIT);
1982 possibly_undefined_names = pointer_set_create ();
1983 added_to_worklist = pointer_set_create ();
1985 /* Initialize worklist */
1986 FOR_EACH_BB (bb)
1987 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1989 gimple phi = gsi_stmt (gsi);
1990 size_t n, i;
1992 n = gimple_phi_num_args (phi);
1994 /* Don't look at memory tags. */
1995 if (!is_gimple_reg (gimple_phi_result (phi)))
1996 continue;
1998 for (i = 0; i < n; ++i)
2000 tree op = gimple_phi_arg_def (phi, i);
2001 if (TREE_CODE (op) == SSA_NAME
2002 && ssa_undefined_value_p (op))
2004 VEC_safe_push (gimple, heap, worklist, phi);
2005 pointer_set_insert (added_to_worklist, phi);
2006 if (dump_file && (dump_flags & TDF_DETAILS))
2008 fprintf (dump_file, "[WORKLIST]: add to initial list: ");
2009 print_gimple_stmt (dump_file, phi, 0, 0);
2011 break;
2016 while (VEC_length (gimple, worklist) != 0)
2018 gimple cur_phi = 0;
2019 cur_phi = VEC_pop (gimple, worklist);
2020 warn_uninitialized_phi (cur_phi, &worklist, added_to_worklist);
2023 VEC_free (gimple, heap, worklist);
2024 pointer_set_destroy (added_to_worklist);
2025 pointer_set_destroy (possibly_undefined_names);
2026 possibly_undefined_names = NULL;
2027 free_dominance_info (CDI_POST_DOMINATORS);
2028 timevar_pop (TV_TREE_UNINIT);
2029 return 0;
2032 static bool
2033 gate_warn_uninitialized (void)
2035 return warn_uninitialized != 0;
2038 struct gimple_opt_pass pass_late_warn_uninitialized =
2041 GIMPLE_PASS,
2042 "uninit", /* name */
2043 gate_warn_uninitialized, /* gate */
2044 execute_late_warn_uninitialized, /* execute */
2045 NULL, /* sub */
2046 NULL, /* next */
2047 0, /* static_pass_number */
2048 TV_NONE, /* tv_id */
2049 PROP_ssa, /* properties_required */
2050 0, /* properties_provided */
2051 0, /* properties_destroyed */
2052 0, /* todo_flags_start */
2053 0 /* todo_flags_finish */