PR c/61077
[official-gcc.git] / gcc / tree-ssa-ifcombine.c
blob118e99cab89677abed82220c9e5786c58b3c433f
1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2014 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 /* rtl is needed only because arm back-end requires it for
26 BRANCH_COST. */
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "tree.h"
30 #include "stor-layout.h"
31 #include "basic-block.h"
32 #include "tree-pretty-print.h"
33 #include "tree-ssa-alias.h"
34 #include "internal-fn.h"
35 #include "gimple-fold.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "gimple-iterator.h"
40 #include "gimplify-me.h"
41 #include "gimple-ssa.h"
42 #include "tree-cfg.h"
43 #include "tree-phinodes.h"
44 #include "ssa-iterators.h"
45 #include "tree-pass.h"
47 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
48 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
49 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
50 false) >= 2)
51 #endif
53 /* This pass combines COND_EXPRs to simplify control flow. It
54 currently recognizes bit tests and comparisons in chains that
55 represent logical and or logical or of two COND_EXPRs.
57 It does so by walking basic blocks in a approximate reverse
58 post-dominator order and trying to match CFG patterns that
59 represent logical and or logical or of two COND_EXPRs.
60 Transformations are done if the COND_EXPR conditions match
61 either
63 1. two single bit tests X & (1 << Yn) (for logical and)
65 2. two bit tests X & Yn (for logical or)
67 3. two comparisons X OPn Y (for logical or)
69 To simplify this pass, removing basic blocks and dead code
70 is left to CFG cleanup and DCE. */
73 /* Recognize a if-then-else CFG pattern starting to match with the
74 COND_BB basic-block containing the COND_EXPR. The recognized
75 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
76 *THEN_BB and/or *ELSE_BB are already set, they are required to
77 match the then and else basic-blocks to make the pattern match.
78 Returns true if the pattern matched, false otherwise. */
80 static bool
81 recognize_if_then_else (basic_block cond_bb,
82 basic_block *then_bb, basic_block *else_bb)
84 edge t, e;
86 if (EDGE_COUNT (cond_bb->succs) != 2)
87 return false;
89 /* Find the then/else edges. */
90 t = EDGE_SUCC (cond_bb, 0);
91 e = EDGE_SUCC (cond_bb, 1);
92 if (!(t->flags & EDGE_TRUE_VALUE))
94 edge tmp = t;
95 t = e;
96 e = tmp;
98 if (!(t->flags & EDGE_TRUE_VALUE)
99 || !(e->flags & EDGE_FALSE_VALUE))
100 return false;
102 /* Check if the edge destinations point to the required block. */
103 if (*then_bb
104 && t->dest != *then_bb)
105 return false;
106 if (*else_bb
107 && e->dest != *else_bb)
108 return false;
110 if (!*then_bb)
111 *then_bb = t->dest;
112 if (!*else_bb)
113 *else_bb = e->dest;
115 return true;
118 /* Verify if the basic block BB does not have side-effects. Return
119 true in this case, else false. */
121 static bool
122 bb_no_side_effects_p (basic_block bb)
124 gimple_stmt_iterator gsi;
126 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
128 gimple stmt = gsi_stmt (gsi);
130 if (is_gimple_debug (stmt))
131 continue;
133 if (gimple_has_side_effects (stmt)
134 || gimple_could_trap_p (stmt)
135 || gimple_vuse (stmt))
136 return false;
139 return true;
142 /* Return true if BB is an empty forwarder block to TO_BB. */
144 static bool
145 forwarder_block_to (basic_block bb, basic_block to_bb)
147 return empty_block_p (bb)
148 && single_succ_p (bb)
149 && single_succ (bb) == to_bb;
152 /* Verify if all PHI node arguments in DEST for edges from BB1 or
153 BB2 to DEST are the same. This makes the CFG merge point
154 free from side-effects. Return true in this case, else false. */
156 static bool
157 same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest)
159 edge e1 = find_edge (bb1, dest);
160 edge e2 = find_edge (bb2, dest);
161 gimple_stmt_iterator gsi;
162 gimple phi;
164 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
166 phi = gsi_stmt (gsi);
167 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
168 PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
169 return false;
172 return true;
175 /* Return the best representative SSA name for CANDIDATE which is used
176 in a bit test. */
178 static tree
179 get_name_for_bit_test (tree candidate)
181 /* Skip single-use names in favor of using the name from a
182 non-widening conversion definition. */
183 if (TREE_CODE (candidate) == SSA_NAME
184 && has_single_use (candidate))
186 gimple def_stmt = SSA_NAME_DEF_STMT (candidate);
187 if (is_gimple_assign (def_stmt)
188 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
190 if (TYPE_PRECISION (TREE_TYPE (candidate))
191 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
192 return gimple_assign_rhs1 (def_stmt);
196 return candidate;
199 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
200 statements. Store the name being tested in *NAME and the bit
201 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
202 Returns true if the pattern matched, false otherwise. */
204 static bool
205 recognize_single_bit_test (gimple cond, tree *name, tree *bit, bool inv)
207 gimple stmt;
209 /* Get at the definition of the result of the bit test. */
210 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
211 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
212 || !integer_zerop (gimple_cond_rhs (cond)))
213 return false;
214 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
215 if (!is_gimple_assign (stmt))
216 return false;
218 /* Look at which bit is tested. One form to recognize is
219 D.1985_5 = state_3(D) >> control1_4(D);
220 D.1986_6 = (int) D.1985_5;
221 D.1987_7 = op0 & 1;
222 if (D.1987_7 != 0) */
223 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
224 && integer_onep (gimple_assign_rhs2 (stmt))
225 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
227 tree orig_name = gimple_assign_rhs1 (stmt);
229 /* Look through copies and conversions to eventually
230 find the stmt that computes the shift. */
231 stmt = SSA_NAME_DEF_STMT (orig_name);
233 while (is_gimple_assign (stmt)
234 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
235 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt)))
236 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt))))
237 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
238 || gimple_assign_ssa_name_copy_p (stmt)))
239 stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
241 /* If we found such, decompose it. */
242 if (is_gimple_assign (stmt)
243 && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR)
245 /* op0 & (1 << op1) */
246 *bit = gimple_assign_rhs2 (stmt);
247 *name = gimple_assign_rhs1 (stmt);
249 else
251 /* t & 1 */
252 *bit = integer_zero_node;
253 *name = get_name_for_bit_test (orig_name);
256 return true;
259 /* Another form is
260 D.1987_7 = op0 & (1 << CST)
261 if (D.1987_7 != 0) */
262 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
263 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
264 && integer_pow2p (gimple_assign_rhs2 (stmt)))
266 *name = gimple_assign_rhs1 (stmt);
267 *bit = build_int_cst (integer_type_node,
268 tree_log2 (gimple_assign_rhs2 (stmt)));
269 return true;
272 /* Another form is
273 D.1986_6 = 1 << control1_4(D)
274 D.1987_7 = op0 & D.1986_6
275 if (D.1987_7 != 0) */
276 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
277 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
278 && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
280 gimple tmp;
282 /* Both arguments of the BIT_AND_EXPR can be the single-bit
283 specifying expression. */
284 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
285 if (is_gimple_assign (tmp)
286 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
287 && integer_onep (gimple_assign_rhs1 (tmp)))
289 *name = gimple_assign_rhs2 (stmt);
290 *bit = gimple_assign_rhs2 (tmp);
291 return true;
294 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
295 if (is_gimple_assign (tmp)
296 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
297 && integer_onep (gimple_assign_rhs1 (tmp)))
299 *name = gimple_assign_rhs1 (stmt);
300 *bit = gimple_assign_rhs2 (tmp);
301 return true;
305 return false;
308 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
309 statements. Store the name being tested in *NAME and the bits
310 in *BITS. The COND_EXPR computes *NAME & *BITS.
311 Returns true if the pattern matched, false otherwise. */
313 static bool
314 recognize_bits_test (gimple cond, tree *name, tree *bits, bool inv)
316 gimple stmt;
318 /* Get at the definition of the result of the bit test. */
319 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
320 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
321 || !integer_zerop (gimple_cond_rhs (cond)))
322 return false;
323 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
324 if (!is_gimple_assign (stmt)
325 || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR)
326 return false;
328 *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt));
329 *bits = gimple_assign_rhs2 (stmt);
331 return true;
334 /* If-convert on a and pattern with a common else block. The inner
335 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
336 inner_inv, outer_inv and result_inv indicate whether the conditions
337 are inverted.
338 Returns true if the edges to the common else basic-block were merged. */
340 static bool
341 ifcombine_ifandif (basic_block inner_cond_bb, bool inner_inv,
342 basic_block outer_cond_bb, bool outer_inv, bool result_inv)
344 gimple_stmt_iterator gsi;
345 gimple inner_cond, outer_cond;
346 tree name1, name2, bit1, bit2, bits1, bits2;
348 inner_cond = last_stmt (inner_cond_bb);
349 if (!inner_cond
350 || gimple_code (inner_cond) != GIMPLE_COND)
351 return false;
353 outer_cond = last_stmt (outer_cond_bb);
354 if (!outer_cond
355 || gimple_code (outer_cond) != GIMPLE_COND)
356 return false;
358 /* See if we test a single bit of the same name in both tests. In
359 that case remove the outer test, merging both else edges,
360 and change the inner one to test for
361 name & (bit1 | bit2) == (bit1 | bit2). */
362 if (recognize_single_bit_test (inner_cond, &name1, &bit1, inner_inv)
363 && recognize_single_bit_test (outer_cond, &name2, &bit2, outer_inv)
364 && name1 == name2)
366 tree t, t2;
368 /* Do it. */
369 gsi = gsi_for_stmt (inner_cond);
370 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
371 build_int_cst (TREE_TYPE (name1), 1), bit1);
372 t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
373 build_int_cst (TREE_TYPE (name1), 1), bit2);
374 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2);
375 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
376 true, GSI_SAME_STMT);
377 t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
378 t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE,
379 true, GSI_SAME_STMT);
380 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR,
381 boolean_type_node, t2, t);
382 t = canonicalize_cond_expr_cond (t);
383 if (!t)
384 return false;
385 gimple_cond_set_condition_from_tree (inner_cond, t);
386 update_stmt (inner_cond);
388 /* Leave CFG optimization to cfg_cleanup. */
389 gimple_cond_set_condition_from_tree (outer_cond,
390 outer_inv ? boolean_false_node : boolean_true_node);
391 update_stmt (outer_cond);
393 if (dump_file)
395 fprintf (dump_file, "optimizing double bit test to ");
396 print_generic_expr (dump_file, name1, 0);
397 fprintf (dump_file, " & T == T\nwith temporary T = (1 << ");
398 print_generic_expr (dump_file, bit1, 0);
399 fprintf (dump_file, ") | (1 << ");
400 print_generic_expr (dump_file, bit2, 0);
401 fprintf (dump_file, ")\n");
404 return true;
407 /* See if we have two bit tests of the same name in both tests.
408 In that case remove the outer test and change the inner one to
409 test for name & (bits1 | bits2) != 0. */
410 else if (recognize_bits_test (inner_cond, &name1, &bits1, !inner_inv)
411 && recognize_bits_test (outer_cond, &name2, &bits2, !outer_inv))
413 gimple_stmt_iterator gsi;
414 tree t;
416 /* Find the common name which is bit-tested. */
417 if (name1 == name2)
419 else if (bits1 == bits2)
421 t = name2;
422 name2 = bits2;
423 bits2 = t;
424 t = name1;
425 name1 = bits1;
426 bits1 = t;
428 else if (name1 == bits2)
430 t = name2;
431 name2 = bits2;
432 bits2 = t;
434 else if (bits1 == name2)
436 t = name1;
437 name1 = bits1;
438 bits1 = t;
440 else
441 return false;
443 /* As we strip non-widening conversions in finding a common
444 name that is tested make sure to end up with an integral
445 type for building the bit operations. */
446 if (TYPE_PRECISION (TREE_TYPE (bits1))
447 >= TYPE_PRECISION (TREE_TYPE (bits2)))
449 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
450 name1 = fold_convert (TREE_TYPE (bits1), name1);
451 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
452 bits2 = fold_convert (TREE_TYPE (bits1), bits2);
454 else
456 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
457 name1 = fold_convert (TREE_TYPE (bits2), name1);
458 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
459 bits1 = fold_convert (TREE_TYPE (bits2), bits1);
462 /* Do it. */
463 gsi = gsi_for_stmt (inner_cond);
464 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2);
465 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
466 true, GSI_SAME_STMT);
467 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
468 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
469 true, GSI_SAME_STMT);
470 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, boolean_type_node, t,
471 build_int_cst (TREE_TYPE (t), 0));
472 t = canonicalize_cond_expr_cond (t);
473 if (!t)
474 return false;
475 gimple_cond_set_condition_from_tree (inner_cond, t);
476 update_stmt (inner_cond);
478 /* Leave CFG optimization to cfg_cleanup. */
479 gimple_cond_set_condition_from_tree (outer_cond,
480 outer_inv ? boolean_false_node : boolean_true_node);
481 update_stmt (outer_cond);
483 if (dump_file)
485 fprintf (dump_file, "optimizing bits or bits test to ");
486 print_generic_expr (dump_file, name1, 0);
487 fprintf (dump_file, " & T != 0\nwith temporary T = ");
488 print_generic_expr (dump_file, bits1, 0);
489 fprintf (dump_file, " | ");
490 print_generic_expr (dump_file, bits2, 0);
491 fprintf (dump_file, "\n");
494 return true;
497 /* See if we have two comparisons that we can merge into one. */
498 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison
499 && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison)
501 tree t;
502 enum tree_code inner_cond_code = gimple_cond_code (inner_cond);
503 enum tree_code outer_cond_code = gimple_cond_code (outer_cond);
505 /* Invert comparisons if necessary (and possible). */
506 if (inner_inv)
507 inner_cond_code = invert_tree_comparison (inner_cond_code,
508 HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (inner_cond)))));
509 if (inner_cond_code == ERROR_MARK)
510 return false;
511 if (outer_inv)
512 outer_cond_code = invert_tree_comparison (outer_cond_code,
513 HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (outer_cond)))));
514 if (outer_cond_code == ERROR_MARK)
515 return false;
516 /* Don't return false so fast, try maybe_fold_or_comparisons? */
518 if (!(t = maybe_fold_and_comparisons (inner_cond_code,
519 gimple_cond_lhs (inner_cond),
520 gimple_cond_rhs (inner_cond),
521 outer_cond_code,
522 gimple_cond_lhs (outer_cond),
523 gimple_cond_rhs (outer_cond))))
525 tree t1, t2;
526 gimple_stmt_iterator gsi;
527 if (!LOGICAL_OP_NON_SHORT_CIRCUIT)
528 return false;
529 /* Only do this optimization if the inner bb contains only the conditional. */
530 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb)))
531 return false;
532 t1 = fold_build2_loc (gimple_location (inner_cond),
533 inner_cond_code,
534 boolean_type_node,
535 gimple_cond_lhs (inner_cond),
536 gimple_cond_rhs (inner_cond));
537 t2 = fold_build2_loc (gimple_location (outer_cond),
538 outer_cond_code,
539 boolean_type_node,
540 gimple_cond_lhs (outer_cond),
541 gimple_cond_rhs (outer_cond));
542 t = fold_build2_loc (gimple_location (inner_cond),
543 TRUTH_AND_EXPR, boolean_type_node, t1, t2);
544 if (result_inv)
546 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
547 result_inv = false;
549 gsi = gsi_for_stmt (inner_cond);
550 t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr, NULL, true,
551 GSI_SAME_STMT);
553 if (result_inv)
554 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
555 t = canonicalize_cond_expr_cond (t);
556 if (!t)
557 return false;
558 gimple_cond_set_condition_from_tree (inner_cond, t);
559 update_stmt (inner_cond);
561 /* Leave CFG optimization to cfg_cleanup. */
562 gimple_cond_set_condition_from_tree (outer_cond,
563 outer_inv ? boolean_false_node : boolean_true_node);
564 update_stmt (outer_cond);
566 if (dump_file)
568 fprintf (dump_file, "optimizing two comparisons to ");
569 print_generic_expr (dump_file, t, 0);
570 fprintf (dump_file, "\n");
573 return true;
576 return false;
579 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
580 dispatch to the appropriate if-conversion helper for a particular
581 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
582 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */
584 static bool
585 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb, basic_block outer_cond_bb,
586 basic_block then_bb, basic_block else_bb,
587 basic_block phi_pred_bb)
589 /* The && form is characterized by a common else_bb with
590 the two edges leading to it mergable. The latter is
591 guaranteed by matching PHI arguments in the else_bb and
592 the inner cond_bb having no side-effects. */
593 if (phi_pred_bb != else_bb
594 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &else_bb)
595 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb)
596 && bb_no_side_effects_p (inner_cond_bb))
598 /* We have
599 <outer_cond_bb>
600 if (q) goto inner_cond_bb; else goto else_bb;
601 <inner_cond_bb>
602 if (p) goto ...; else goto else_bb;
604 <else_bb>
607 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, false,
608 false);
611 /* And a version where the outer condition is negated. */
612 if (phi_pred_bb != else_bb
613 && recognize_if_then_else (outer_cond_bb, &else_bb, &inner_cond_bb)
614 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb)
615 && bb_no_side_effects_p (inner_cond_bb))
617 /* We have
618 <outer_cond_bb>
619 if (q) goto else_bb; else goto inner_cond_bb;
620 <inner_cond_bb>
621 if (p) goto ...; else goto else_bb;
623 <else_bb>
626 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, true,
627 false);
630 /* The || form is characterized by a common then_bb with the
631 two edges leading to it mergable. The latter is guaranteed
632 by matching PHI arguments in the then_bb and the inner cond_bb
633 having no side-effects. */
634 if (phi_pred_bb != then_bb
635 && recognize_if_then_else (outer_cond_bb, &then_bb, &inner_cond_bb)
636 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb)
637 && bb_no_side_effects_p (inner_cond_bb))
639 /* We have
640 <outer_cond_bb>
641 if (q) goto then_bb; else goto inner_cond_bb;
642 <inner_cond_bb>
643 if (q) goto then_bb; else goto ...;
644 <then_bb>
647 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, true,
648 true);
651 /* And a version where the outer condition is negated. */
652 if (phi_pred_bb != then_bb
653 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &then_bb)
654 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb)
655 && bb_no_side_effects_p (inner_cond_bb))
657 /* We have
658 <outer_cond_bb>
659 if (q) goto inner_cond_bb; else goto then_bb;
660 <inner_cond_bb>
661 if (q) goto then_bb; else goto ...;
662 <then_bb>
665 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, false,
666 true);
669 return false;
672 /* Recognize a CFG pattern and dispatch to the appropriate
673 if-conversion helper. We start with BB as the innermost
674 worker basic-block. Returns true if a transformation was done. */
676 static bool
677 tree_ssa_ifcombine_bb (basic_block inner_cond_bb)
679 basic_block then_bb = NULL, else_bb = NULL;
681 if (!recognize_if_then_else (inner_cond_bb, &then_bb, &else_bb))
682 return false;
684 /* Recognize && and || of two conditions with a common
685 then/else block which entry edges we can merge. That is:
686 if (a || b)
689 if (a && b)
691 This requires a single predecessor of the inner cond_bb. */
692 if (single_pred_p (inner_cond_bb))
694 basic_block outer_cond_bb = single_pred (inner_cond_bb);
696 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb,
697 then_bb, else_bb, inner_cond_bb))
698 return true;
700 if (forwarder_block_to (else_bb, then_bb))
702 /* Other possibilities for the && form, if else_bb is
703 empty forwarder block to then_bb. Compared to the above simpler
704 forms this can be treated as if then_bb and else_bb were swapped,
705 and the corresponding inner_cond_bb not inverted because of that.
706 For same_phi_args_p we look at equality of arguments between
707 edge from outer_cond_bb and the forwarder block. */
708 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
709 then_bb, else_bb))
710 return true;
712 else if (forwarder_block_to (then_bb, else_bb))
714 /* Other possibilities for the || form, if then_bb is
715 empty forwarder block to else_bb. Compared to the above simpler
716 forms this can be treated as if then_bb and else_bb were swapped,
717 and the corresponding inner_cond_bb not inverted because of that.
718 For same_phi_args_p we look at equality of arguments between
719 edge from outer_cond_bb and the forwarder block. */
720 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
721 then_bb, then_bb))
722 return true;
726 return false;
729 /* Main entry for the tree if-conversion pass. */
731 namespace {
733 const pass_data pass_data_tree_ifcombine =
735 GIMPLE_PASS, /* type */
736 "ifcombine", /* name */
737 OPTGROUP_NONE, /* optinfo_flags */
738 TV_TREE_IFCOMBINE, /* tv_id */
739 ( PROP_cfg | PROP_ssa ), /* properties_required */
740 0, /* properties_provided */
741 0, /* properties_destroyed */
742 0, /* todo_flags_start */
743 TODO_update_ssa, /* todo_flags_finish */
746 class pass_tree_ifcombine : public gimple_opt_pass
748 public:
749 pass_tree_ifcombine (gcc::context *ctxt)
750 : gimple_opt_pass (pass_data_tree_ifcombine, ctxt)
753 /* opt_pass methods: */
754 virtual unsigned int execute (function *);
756 }; // class pass_tree_ifcombine
758 unsigned int
759 pass_tree_ifcombine::execute (function *fun)
761 basic_block *bbs;
762 bool cfg_changed = false;
763 int i;
765 bbs = single_pred_before_succ_order ();
766 calculate_dominance_info (CDI_DOMINATORS);
768 /* Search every basic block for COND_EXPR we may be able to optimize.
770 We walk the blocks in order that guarantees that a block with
771 a single predecessor is processed after the predecessor.
772 This ensures that we collapse outter ifs before visiting the
773 inner ones, and also that we do not try to visit a removed
774 block. This is opposite of PHI-OPT, because we cascade the
775 combining rather than cascading PHIs. */
776 for (i = n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
778 basic_block bb = bbs[i];
779 gimple stmt = last_stmt (bb);
781 if (stmt
782 && gimple_code (stmt) == GIMPLE_COND)
783 cfg_changed |= tree_ssa_ifcombine_bb (bb);
786 free (bbs);
788 return cfg_changed ? TODO_cleanup_cfg : 0;
791 } // anon namespace
793 gimple_opt_pass *
794 make_pass_tree_ifcombine (gcc::context *ctxt)
796 return new pass_tree_ifcombine (ctxt);