Clean up some minor white space issues in trans-decl.c and trans-expr.c
[official-gcc.git] / gcc / tree-ssa-ifcombine.c
blob22b8b78a83fc43abd3b86baecddd979b08b7cc12
1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2016 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "tm_p.h"
31 #include "ssa.h"
32 #include "tree-pretty-print.h"
33 /* rtl is needed only because arm back-end requires it for
34 BRANCH_COST. */
35 #include "fold-const.h"
36 #include "cfganal.h"
37 #include "gimple-fold.h"
38 #include "gimple-iterator.h"
39 #include "gimplify-me.h"
40 #include "tree-cfg.h"
41 #include "tree-ssa.h"
43 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
44 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
45 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
46 false) >= 2)
47 #endif
49 /* This pass combines COND_EXPRs to simplify control flow. It
50 currently recognizes bit tests and comparisons in chains that
51 represent logical and or logical or of two COND_EXPRs.
53 It does so by walking basic blocks in a approximate reverse
54 post-dominator order and trying to match CFG patterns that
55 represent logical and or logical or of two COND_EXPRs.
56 Transformations are done if the COND_EXPR conditions match
57 either
59 1. two single bit tests X & (1 << Yn) (for logical and)
61 2. two bit tests X & Yn (for logical or)
63 3. two comparisons X OPn Y (for logical or)
65 To simplify this pass, removing basic blocks and dead code
66 is left to CFG cleanup and DCE. */
69 /* Recognize a if-then-else CFG pattern starting to match with the
70 COND_BB basic-block containing the COND_EXPR. The recognized
71 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
72 *THEN_BB and/or *ELSE_BB are already set, they are required to
73 match the then and else basic-blocks to make the pattern match.
74 Returns true if the pattern matched, false otherwise. */
76 static bool
77 recognize_if_then_else (basic_block cond_bb,
78 basic_block *then_bb, basic_block *else_bb)
80 edge t, e;
82 if (EDGE_COUNT (cond_bb->succs) != 2)
83 return false;
85 /* Find the then/else edges. */
86 t = EDGE_SUCC (cond_bb, 0);
87 e = EDGE_SUCC (cond_bb, 1);
88 if (!(t->flags & EDGE_TRUE_VALUE))
89 std::swap (t, e);
90 if (!(t->flags & EDGE_TRUE_VALUE)
91 || !(e->flags & EDGE_FALSE_VALUE))
92 return false;
94 /* Check if the edge destinations point to the required block. */
95 if (*then_bb
96 && t->dest != *then_bb)
97 return false;
98 if (*else_bb
99 && e->dest != *else_bb)
100 return false;
102 if (!*then_bb)
103 *then_bb = t->dest;
104 if (!*else_bb)
105 *else_bb = e->dest;
107 return true;
110 /* Verify if the basic block BB does not have side-effects. Return
111 true in this case, else false. */
113 static bool
114 bb_no_side_effects_p (basic_block bb)
116 gimple_stmt_iterator gsi;
118 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
120 gimple *stmt = gsi_stmt (gsi);
122 if (is_gimple_debug (stmt))
123 continue;
125 if (gimple_has_side_effects (stmt)
126 || gimple_uses_undefined_value_p (stmt)
127 || gimple_could_trap_p (stmt)
128 || gimple_vuse (stmt))
129 return false;
132 return true;
135 /* Return true if BB is an empty forwarder block to TO_BB. */
137 static bool
138 forwarder_block_to (basic_block bb, basic_block to_bb)
140 return empty_block_p (bb)
141 && single_succ_p (bb)
142 && single_succ (bb) == to_bb;
145 /* Verify if all PHI node arguments in DEST for edges from BB1 or
146 BB2 to DEST are the same. This makes the CFG merge point
147 free from side-effects. Return true in this case, else false. */
149 static bool
150 same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest)
152 edge e1 = find_edge (bb1, dest);
153 edge e2 = find_edge (bb2, dest);
154 gphi_iterator gsi;
155 gphi *phi;
157 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
159 phi = gsi.phi ();
160 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
161 PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
162 return false;
165 return true;
168 /* Return the best representative SSA name for CANDIDATE which is used
169 in a bit test. */
171 static tree
172 get_name_for_bit_test (tree candidate)
174 /* Skip single-use names in favor of using the name from a
175 non-widening conversion definition. */
176 if (TREE_CODE (candidate) == SSA_NAME
177 && has_single_use (candidate))
179 gimple *def_stmt = SSA_NAME_DEF_STMT (candidate);
180 if (is_gimple_assign (def_stmt)
181 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
183 if (TYPE_PRECISION (TREE_TYPE (candidate))
184 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
185 return gimple_assign_rhs1 (def_stmt);
189 return candidate;
192 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
193 statements. Store the name being tested in *NAME and the bit
194 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
195 Returns true if the pattern matched, false otherwise. */
197 static bool
198 recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv)
200 gimple *stmt;
202 /* Get at the definition of the result of the bit test. */
203 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
204 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
205 || !integer_zerop (gimple_cond_rhs (cond)))
206 return false;
207 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
208 if (!is_gimple_assign (stmt))
209 return false;
211 /* Look at which bit is tested. One form to recognize is
212 D.1985_5 = state_3(D) >> control1_4(D);
213 D.1986_6 = (int) D.1985_5;
214 D.1987_7 = op0 & 1;
215 if (D.1987_7 != 0) */
216 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
217 && integer_onep (gimple_assign_rhs2 (stmt))
218 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
220 tree orig_name = gimple_assign_rhs1 (stmt);
222 /* Look through copies and conversions to eventually
223 find the stmt that computes the shift. */
224 stmt = SSA_NAME_DEF_STMT (orig_name);
226 while (is_gimple_assign (stmt)
227 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
228 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt)))
229 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt))))
230 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
231 || gimple_assign_ssa_name_copy_p (stmt)))
232 stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
234 /* If we found such, decompose it. */
235 if (is_gimple_assign (stmt)
236 && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR)
238 /* op0 & (1 << op1) */
239 *bit = gimple_assign_rhs2 (stmt);
240 *name = gimple_assign_rhs1 (stmt);
242 else
244 /* t & 1 */
245 *bit = integer_zero_node;
246 *name = get_name_for_bit_test (orig_name);
249 return true;
252 /* Another form is
253 D.1987_7 = op0 & (1 << CST)
254 if (D.1987_7 != 0) */
255 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
256 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
257 && integer_pow2p (gimple_assign_rhs2 (stmt)))
259 *name = gimple_assign_rhs1 (stmt);
260 *bit = build_int_cst (integer_type_node,
261 tree_log2 (gimple_assign_rhs2 (stmt)));
262 return true;
265 /* Another form is
266 D.1986_6 = 1 << control1_4(D)
267 D.1987_7 = op0 & D.1986_6
268 if (D.1987_7 != 0) */
269 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
270 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
271 && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
273 gimple *tmp;
275 /* Both arguments of the BIT_AND_EXPR can be the single-bit
276 specifying expression. */
277 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
278 if (is_gimple_assign (tmp)
279 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
280 && integer_onep (gimple_assign_rhs1 (tmp)))
282 *name = gimple_assign_rhs2 (stmt);
283 *bit = gimple_assign_rhs2 (tmp);
284 return true;
287 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
288 if (is_gimple_assign (tmp)
289 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
290 && integer_onep (gimple_assign_rhs1 (tmp)))
292 *name = gimple_assign_rhs1 (stmt);
293 *bit = gimple_assign_rhs2 (tmp);
294 return true;
298 return false;
301 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
302 statements. Store the name being tested in *NAME and the bits
303 in *BITS. The COND_EXPR computes *NAME & *BITS.
304 Returns true if the pattern matched, false otherwise. */
306 static bool
307 recognize_bits_test (gcond *cond, tree *name, tree *bits, bool inv)
309 gimple *stmt;
311 /* Get at the definition of the result of the bit test. */
312 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
313 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
314 || !integer_zerop (gimple_cond_rhs (cond)))
315 return false;
316 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
317 if (!is_gimple_assign (stmt)
318 || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR)
319 return false;
321 *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt));
322 *bits = gimple_assign_rhs2 (stmt);
324 return true;
327 /* If-convert on a and pattern with a common else block. The inner
328 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
329 inner_inv, outer_inv and result_inv indicate whether the conditions
330 are inverted.
331 Returns true if the edges to the common else basic-block were merged. */
333 static bool
334 ifcombine_ifandif (basic_block inner_cond_bb, bool inner_inv,
335 basic_block outer_cond_bb, bool outer_inv, bool result_inv)
337 gimple_stmt_iterator gsi;
338 gimple *inner_stmt, *outer_stmt;
339 gcond *inner_cond, *outer_cond;
340 tree name1, name2, bit1, bit2, bits1, bits2;
342 inner_stmt = last_stmt (inner_cond_bb);
343 if (!inner_stmt
344 || gimple_code (inner_stmt) != GIMPLE_COND)
345 return false;
346 inner_cond = as_a <gcond *> (inner_stmt);
348 outer_stmt = last_stmt (outer_cond_bb);
349 if (!outer_stmt
350 || gimple_code (outer_stmt) != GIMPLE_COND)
351 return false;
352 outer_cond = as_a <gcond *> (outer_stmt);
354 /* See if we test a single bit of the same name in both tests. In
355 that case remove the outer test, merging both else edges,
356 and change the inner one to test for
357 name & (bit1 | bit2) == (bit1 | bit2). */
358 if (recognize_single_bit_test (inner_cond, &name1, &bit1, inner_inv)
359 && recognize_single_bit_test (outer_cond, &name2, &bit2, outer_inv)
360 && name1 == name2)
362 tree t, t2;
364 /* Do it. */
365 gsi = gsi_for_stmt (inner_cond);
366 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
367 build_int_cst (TREE_TYPE (name1), 1), bit1);
368 t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
369 build_int_cst (TREE_TYPE (name1), 1), bit2);
370 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2);
371 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
372 true, GSI_SAME_STMT);
373 t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
374 t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE,
375 true, GSI_SAME_STMT);
376 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR,
377 boolean_type_node, t2, t);
378 t = canonicalize_cond_expr_cond (t);
379 if (!t)
380 return false;
381 gimple_cond_set_condition_from_tree (inner_cond, t);
382 update_stmt (inner_cond);
384 /* Leave CFG optimization to cfg_cleanup. */
385 gimple_cond_set_condition_from_tree (outer_cond,
386 outer_inv ? boolean_false_node : boolean_true_node);
387 update_stmt (outer_cond);
389 if (dump_file)
391 fprintf (dump_file, "optimizing double bit test to ");
392 print_generic_expr (dump_file, name1, 0);
393 fprintf (dump_file, " & T == T\nwith temporary T = (1 << ");
394 print_generic_expr (dump_file, bit1, 0);
395 fprintf (dump_file, ") | (1 << ");
396 print_generic_expr (dump_file, bit2, 0);
397 fprintf (dump_file, ")\n");
400 return true;
403 /* See if we have two bit tests of the same name in both tests.
404 In that case remove the outer test and change the inner one to
405 test for name & (bits1 | bits2) != 0. */
406 else if (recognize_bits_test (inner_cond, &name1, &bits1, !inner_inv)
407 && recognize_bits_test (outer_cond, &name2, &bits2, !outer_inv))
409 gimple_stmt_iterator gsi;
410 tree t;
412 /* Find the common name which is bit-tested. */
413 if (name1 == name2)
415 else if (bits1 == bits2)
417 std::swap (name2, bits2);
418 std::swap (name1, bits1);
420 else if (name1 == bits2)
421 std::swap (name2, bits2);
422 else if (bits1 == name2)
423 std::swap (name1, bits1);
424 else
425 return false;
427 /* As we strip non-widening conversions in finding a common
428 name that is tested make sure to end up with an integral
429 type for building the bit operations. */
430 if (TYPE_PRECISION (TREE_TYPE (bits1))
431 >= TYPE_PRECISION (TREE_TYPE (bits2)))
433 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
434 name1 = fold_convert (TREE_TYPE (bits1), name1);
435 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
436 bits2 = fold_convert (TREE_TYPE (bits1), bits2);
438 else
440 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
441 name1 = fold_convert (TREE_TYPE (bits2), name1);
442 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
443 bits1 = fold_convert (TREE_TYPE (bits2), bits1);
446 /* Do it. */
447 gsi = gsi_for_stmt (inner_cond);
448 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2);
449 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
450 true, GSI_SAME_STMT);
451 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
452 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
453 true, GSI_SAME_STMT);
454 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, boolean_type_node, t,
455 build_int_cst (TREE_TYPE (t), 0));
456 t = canonicalize_cond_expr_cond (t);
457 if (!t)
458 return false;
459 gimple_cond_set_condition_from_tree (inner_cond, t);
460 update_stmt (inner_cond);
462 /* Leave CFG optimization to cfg_cleanup. */
463 gimple_cond_set_condition_from_tree (outer_cond,
464 outer_inv ? boolean_false_node : boolean_true_node);
465 update_stmt (outer_cond);
467 if (dump_file)
469 fprintf (dump_file, "optimizing bits or bits test to ");
470 print_generic_expr (dump_file, name1, 0);
471 fprintf (dump_file, " & T != 0\nwith temporary T = ");
472 print_generic_expr (dump_file, bits1, 0);
473 fprintf (dump_file, " | ");
474 print_generic_expr (dump_file, bits2, 0);
475 fprintf (dump_file, "\n");
478 return true;
481 /* See if we have two comparisons that we can merge into one. */
482 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison
483 && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison)
485 tree t;
486 enum tree_code inner_cond_code = gimple_cond_code (inner_cond);
487 enum tree_code outer_cond_code = gimple_cond_code (outer_cond);
489 /* Invert comparisons if necessary (and possible). */
490 if (inner_inv)
491 inner_cond_code = invert_tree_comparison (inner_cond_code,
492 HONOR_NANS (gimple_cond_lhs (inner_cond)));
493 if (inner_cond_code == ERROR_MARK)
494 return false;
495 if (outer_inv)
496 outer_cond_code = invert_tree_comparison (outer_cond_code,
497 HONOR_NANS (gimple_cond_lhs (outer_cond)));
498 if (outer_cond_code == ERROR_MARK)
499 return false;
500 /* Don't return false so fast, try maybe_fold_or_comparisons? */
502 if (!(t = maybe_fold_and_comparisons (inner_cond_code,
503 gimple_cond_lhs (inner_cond),
504 gimple_cond_rhs (inner_cond),
505 outer_cond_code,
506 gimple_cond_lhs (outer_cond),
507 gimple_cond_rhs (outer_cond))))
509 tree t1, t2;
510 gimple_stmt_iterator gsi;
511 if (!LOGICAL_OP_NON_SHORT_CIRCUIT)
512 return false;
513 /* Only do this optimization if the inner bb contains only the conditional. */
514 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb)))
515 return false;
516 t1 = fold_build2_loc (gimple_location (inner_cond),
517 inner_cond_code,
518 boolean_type_node,
519 gimple_cond_lhs (inner_cond),
520 gimple_cond_rhs (inner_cond));
521 t2 = fold_build2_loc (gimple_location (outer_cond),
522 outer_cond_code,
523 boolean_type_node,
524 gimple_cond_lhs (outer_cond),
525 gimple_cond_rhs (outer_cond));
526 t = fold_build2_loc (gimple_location (inner_cond),
527 TRUTH_AND_EXPR, boolean_type_node, t1, t2);
528 if (result_inv)
530 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
531 result_inv = false;
533 gsi = gsi_for_stmt (inner_cond);
534 t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr, NULL, true,
535 GSI_SAME_STMT);
537 if (result_inv)
538 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
539 t = canonicalize_cond_expr_cond (t);
540 if (!t)
541 return false;
542 gimple_cond_set_condition_from_tree (inner_cond, t);
543 update_stmt (inner_cond);
545 /* Leave CFG optimization to cfg_cleanup. */
546 gimple_cond_set_condition_from_tree (outer_cond,
547 outer_inv ? boolean_false_node : boolean_true_node);
548 update_stmt (outer_cond);
550 if (dump_file)
552 fprintf (dump_file, "optimizing two comparisons to ");
553 print_generic_expr (dump_file, t, 0);
554 fprintf (dump_file, "\n");
557 return true;
560 return false;
563 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
564 dispatch to the appropriate if-conversion helper for a particular
565 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
566 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */
568 static bool
569 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb, basic_block outer_cond_bb,
570 basic_block then_bb, basic_block else_bb,
571 basic_block phi_pred_bb)
573 /* The && form is characterized by a common else_bb with
574 the two edges leading to it mergable. The latter is
575 guaranteed by matching PHI arguments in the else_bb and
576 the inner cond_bb having no side-effects. */
577 if (phi_pred_bb != else_bb
578 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &else_bb)
579 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb))
581 /* We have
582 <outer_cond_bb>
583 if (q) goto inner_cond_bb; else goto else_bb;
584 <inner_cond_bb>
585 if (p) goto ...; else goto else_bb;
587 <else_bb>
590 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, false,
591 false);
594 /* And a version where the outer condition is negated. */
595 if (phi_pred_bb != else_bb
596 && recognize_if_then_else (outer_cond_bb, &else_bb, &inner_cond_bb)
597 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb))
599 /* We have
600 <outer_cond_bb>
601 if (q) goto else_bb; else goto inner_cond_bb;
602 <inner_cond_bb>
603 if (p) goto ...; else goto else_bb;
605 <else_bb>
608 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, true,
609 false);
612 /* The || form is characterized by a common then_bb with the
613 two edges leading to it mergable. The latter is guaranteed
614 by matching PHI arguments in the then_bb and the inner cond_bb
615 having no side-effects. */
616 if (phi_pred_bb != then_bb
617 && recognize_if_then_else (outer_cond_bb, &then_bb, &inner_cond_bb)
618 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb))
620 /* We have
621 <outer_cond_bb>
622 if (q) goto then_bb; else goto inner_cond_bb;
623 <inner_cond_bb>
624 if (q) goto then_bb; else goto ...;
625 <then_bb>
628 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, true,
629 true);
632 /* And a version where the outer condition is negated. */
633 if (phi_pred_bb != then_bb
634 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &then_bb)
635 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb))
637 /* We have
638 <outer_cond_bb>
639 if (q) goto inner_cond_bb; else goto then_bb;
640 <inner_cond_bb>
641 if (q) goto then_bb; else goto ...;
642 <then_bb>
645 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, false,
646 true);
649 return false;
652 /* Recognize a CFG pattern and dispatch to the appropriate
653 if-conversion helper. We start with BB as the innermost
654 worker basic-block. Returns true if a transformation was done. */
656 static bool
657 tree_ssa_ifcombine_bb (basic_block inner_cond_bb)
659 basic_block then_bb = NULL, else_bb = NULL;
661 if (!recognize_if_then_else (inner_cond_bb, &then_bb, &else_bb))
662 return false;
664 /* Recognize && and || of two conditions with a common
665 then/else block which entry edges we can merge. That is:
666 if (a || b)
669 if (a && b)
671 This requires a single predecessor of the inner cond_bb. */
672 if (single_pred_p (inner_cond_bb)
673 && bb_no_side_effects_p (inner_cond_bb))
675 basic_block outer_cond_bb = single_pred (inner_cond_bb);
677 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb,
678 then_bb, else_bb, inner_cond_bb))
679 return true;
681 if (forwarder_block_to (else_bb, then_bb))
683 /* Other possibilities for the && form, if else_bb is
684 empty forwarder block to then_bb. Compared to the above simpler
685 forms this can be treated as if then_bb and else_bb were swapped,
686 and the corresponding inner_cond_bb not inverted because of that.
687 For same_phi_args_p we look at equality of arguments between
688 edge from outer_cond_bb and the forwarder block. */
689 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
690 then_bb, else_bb))
691 return true;
693 else if (forwarder_block_to (then_bb, else_bb))
695 /* Other possibilities for the || form, if then_bb is
696 empty forwarder block to else_bb. Compared to the above simpler
697 forms this can be treated as if then_bb and else_bb were swapped,
698 and the corresponding inner_cond_bb not inverted because of that.
699 For same_phi_args_p we look at equality of arguments between
700 edge from outer_cond_bb and the forwarder block. */
701 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
702 then_bb, then_bb))
703 return true;
707 return false;
710 /* Main entry for the tree if-conversion pass. */
712 namespace {
714 const pass_data pass_data_tree_ifcombine =
716 GIMPLE_PASS, /* type */
717 "ifcombine", /* name */
718 OPTGROUP_NONE, /* optinfo_flags */
719 TV_TREE_IFCOMBINE, /* tv_id */
720 ( PROP_cfg | PROP_ssa ), /* properties_required */
721 0, /* properties_provided */
722 0, /* properties_destroyed */
723 0, /* todo_flags_start */
724 TODO_update_ssa, /* todo_flags_finish */
727 class pass_tree_ifcombine : public gimple_opt_pass
729 public:
730 pass_tree_ifcombine (gcc::context *ctxt)
731 : gimple_opt_pass (pass_data_tree_ifcombine, ctxt)
734 /* opt_pass methods: */
735 virtual unsigned int execute (function *);
737 }; // class pass_tree_ifcombine
739 unsigned int
740 pass_tree_ifcombine::execute (function *fun)
742 basic_block *bbs;
743 bool cfg_changed = false;
744 int i;
746 bbs = single_pred_before_succ_order ();
747 calculate_dominance_info (CDI_DOMINATORS);
749 /* Search every basic block for COND_EXPR we may be able to optimize.
751 We walk the blocks in order that guarantees that a block with
752 a single predecessor is processed after the predecessor.
753 This ensures that we collapse outter ifs before visiting the
754 inner ones, and also that we do not try to visit a removed
755 block. This is opposite of PHI-OPT, because we cascade the
756 combining rather than cascading PHIs. */
757 for (i = n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
759 basic_block bb = bbs[i];
760 gimple *stmt = last_stmt (bb);
762 if (stmt
763 && gimple_code (stmt) == GIMPLE_COND)
764 if (tree_ssa_ifcombine_bb (bb))
766 /* Clear range info from all stmts in BB which is now executed
767 conditional on a always true/false condition. */
768 reset_flow_sensitive_info_in_bb (bb);
769 cfg_changed |= true;
773 free (bbs);
775 return cfg_changed ? TODO_cleanup_cfg : 0;
778 } // anon namespace
780 gimple_opt_pass *
781 make_pass_tree_ifcombine (gcc::context *ctxt)
783 return new pass_tree_ifcombine (ctxt);