Daily bump.
[official-gcc.git] / gcc / tree-ssa-ifcombine.c
blob21a70f4386dc0bb7837ed0c72bb6efe1601050e0
1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2020 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "tree-pretty-print.h"
34 /* rtl is needed only because arm back-end requires it for
35 BRANCH_COST. */
36 #include "fold-const.h"
37 #include "cfganal.h"
38 #include "gimple-fold.h"
39 #include "gimple-iterator.h"
40 #include "gimplify-me.h"
41 #include "tree-cfg.h"
42 #include "tree-ssa.h"
44 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
45 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
46 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
47 false) >= 2)
48 #endif
50 /* This pass combines COND_EXPRs to simplify control flow. It
51 currently recognizes bit tests and comparisons in chains that
52 represent logical and or logical or of two COND_EXPRs.
54 It does so by walking basic blocks in a approximate reverse
55 post-dominator order and trying to match CFG patterns that
56 represent logical and or logical or of two COND_EXPRs.
57 Transformations are done if the COND_EXPR conditions match
58 either
60 1. two single bit tests X & (1 << Yn) (for logical and)
62 2. two bit tests X & Yn (for logical or)
64 3. two comparisons X OPn Y (for logical or)
66 To simplify this pass, removing basic blocks and dead code
67 is left to CFG cleanup and DCE. */
70 /* Recognize a if-then-else CFG pattern starting to match with the
71 COND_BB basic-block containing the COND_EXPR. The recognized
72 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
73 *THEN_BB and/or *ELSE_BB are already set, they are required to
74 match the then and else basic-blocks to make the pattern match.
75 Returns true if the pattern matched, false otherwise. */
77 static bool
78 recognize_if_then_else (basic_block cond_bb,
79 basic_block *then_bb, basic_block *else_bb)
81 edge t, e;
83 if (EDGE_COUNT (cond_bb->succs) != 2)
84 return false;
86 /* Find the then/else edges. */
87 t = EDGE_SUCC (cond_bb, 0);
88 e = EDGE_SUCC (cond_bb, 1);
89 if (!(t->flags & EDGE_TRUE_VALUE))
90 std::swap (t, e);
91 if (!(t->flags & EDGE_TRUE_VALUE)
92 || !(e->flags & EDGE_FALSE_VALUE))
93 return false;
95 /* Check if the edge destinations point to the required block. */
96 if (*then_bb
97 && t->dest != *then_bb)
98 return false;
99 if (*else_bb
100 && e->dest != *else_bb)
101 return false;
103 if (!*then_bb)
104 *then_bb = t->dest;
105 if (!*else_bb)
106 *else_bb = e->dest;
108 return true;
111 /* Verify if the basic block BB does not have side-effects. Return
112 true in this case, else false. */
114 static bool
115 bb_no_side_effects_p (basic_block bb)
117 gimple_stmt_iterator gsi;
119 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
121 gimple *stmt = gsi_stmt (gsi);
123 if (is_gimple_debug (stmt))
124 continue;
126 if (gimple_has_side_effects (stmt)
127 || gimple_uses_undefined_value_p (stmt)
128 || gimple_could_trap_p (stmt)
129 || gimple_vuse (stmt)
130 /* const calls don't match any of the above, yet they could
131 still have some side-effects - they could contain
132 gimple_could_trap_p statements, like floating point
133 exceptions or integer division by zero. See PR70586.
134 FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
135 should handle this. */
136 || is_gimple_call (stmt))
137 return false;
140 return true;
143 /* Return true if BB is an empty forwarder block to TO_BB. */
145 static bool
146 forwarder_block_to (basic_block bb, basic_block to_bb)
148 return empty_block_p (bb)
149 && single_succ_p (bb)
150 && single_succ (bb) == to_bb;
153 /* Verify if all PHI node arguments in DEST for edges from BB1 or
154 BB2 to DEST are the same. This makes the CFG merge point
155 free from side-effects. Return true in this case, else false. */
157 static bool
158 same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest)
160 edge e1 = find_edge (bb1, dest);
161 edge e2 = find_edge (bb2, dest);
162 gphi_iterator gsi;
163 gphi *phi;
165 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
167 phi = gsi.phi ();
168 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
169 PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
170 return false;
173 return true;
176 /* Return the best representative SSA name for CANDIDATE which is used
177 in a bit test. */
179 static tree
180 get_name_for_bit_test (tree candidate)
182 /* Skip single-use names in favor of using the name from a
183 non-widening conversion definition. */
184 if (TREE_CODE (candidate) == SSA_NAME
185 && has_single_use (candidate))
187 gimple *def_stmt = SSA_NAME_DEF_STMT (candidate);
188 if (is_gimple_assign (def_stmt)
189 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
191 if (TYPE_PRECISION (TREE_TYPE (candidate))
192 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
193 return gimple_assign_rhs1 (def_stmt);
197 return candidate;
200 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
201 statements. Store the name being tested in *NAME and the bit
202 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
203 Returns true if the pattern matched, false otherwise. */
205 static bool
206 recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv)
208 gimple *stmt;
210 /* Get at the definition of the result of the bit test. */
211 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
212 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
213 || !integer_zerop (gimple_cond_rhs (cond)))
214 return false;
215 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
216 if (!is_gimple_assign (stmt))
217 return false;
219 /* Look at which bit is tested. One form to recognize is
220 D.1985_5 = state_3(D) >> control1_4(D);
221 D.1986_6 = (int) D.1985_5;
222 D.1987_7 = op0 & 1;
223 if (D.1987_7 != 0) */
224 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
225 && integer_onep (gimple_assign_rhs2 (stmt))
226 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
228 tree orig_name = gimple_assign_rhs1 (stmt);
230 /* Look through copies and conversions to eventually
231 find the stmt that computes the shift. */
232 stmt = SSA_NAME_DEF_STMT (orig_name);
234 while (is_gimple_assign (stmt)
235 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
236 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt)))
237 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt))))
238 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
239 || gimple_assign_ssa_name_copy_p (stmt)))
240 stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
242 /* If we found such, decompose it. */
243 if (is_gimple_assign (stmt)
244 && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR)
246 /* op0 & (1 << op1) */
247 *bit = gimple_assign_rhs2 (stmt);
248 *name = gimple_assign_rhs1 (stmt);
250 else
252 /* t & 1 */
253 *bit = integer_zero_node;
254 *name = get_name_for_bit_test (orig_name);
257 return true;
260 /* Another form is
261 D.1987_7 = op0 & (1 << CST)
262 if (D.1987_7 != 0) */
263 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
264 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
265 && integer_pow2p (gimple_assign_rhs2 (stmt)))
267 *name = gimple_assign_rhs1 (stmt);
268 *bit = build_int_cst (integer_type_node,
269 tree_log2 (gimple_assign_rhs2 (stmt)));
270 return true;
273 /* Another form is
274 D.1986_6 = 1 << control1_4(D)
275 D.1987_7 = op0 & D.1986_6
276 if (D.1987_7 != 0) */
277 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
278 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
279 && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
281 gimple *tmp;
283 /* Both arguments of the BIT_AND_EXPR can be the single-bit
284 specifying expression. */
285 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
286 if (is_gimple_assign (tmp)
287 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
288 && integer_onep (gimple_assign_rhs1 (tmp)))
290 *name = gimple_assign_rhs2 (stmt);
291 *bit = gimple_assign_rhs2 (tmp);
292 return true;
295 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
296 if (is_gimple_assign (tmp)
297 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
298 && integer_onep (gimple_assign_rhs1 (tmp)))
300 *name = gimple_assign_rhs1 (stmt);
301 *bit = gimple_assign_rhs2 (tmp);
302 return true;
306 return false;
309 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
310 statements. Store the name being tested in *NAME and the bits
311 in *BITS. The COND_EXPR computes *NAME & *BITS.
312 Returns true if the pattern matched, false otherwise. */
314 static bool
315 recognize_bits_test (gcond *cond, tree *name, tree *bits, bool inv)
317 gimple *stmt;
319 /* Get at the definition of the result of the bit test. */
320 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
321 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
322 || !integer_zerop (gimple_cond_rhs (cond)))
323 return false;
324 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
325 if (!is_gimple_assign (stmt)
326 || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR)
327 return false;
329 *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt));
330 *bits = gimple_assign_rhs2 (stmt);
332 return true;
336 /* Update profile after code in outer_cond_bb was adjusted so
337 outer_cond_bb has no condition. */
339 static void
340 update_profile_after_ifcombine (basic_block inner_cond_bb,
341 basic_block outer_cond_bb)
343 edge outer_to_inner = find_edge (outer_cond_bb, inner_cond_bb);
344 edge outer2 = (EDGE_SUCC (outer_cond_bb, 0) == outer_to_inner
345 ? EDGE_SUCC (outer_cond_bb, 1)
346 : EDGE_SUCC (outer_cond_bb, 0));
347 edge inner_taken = EDGE_SUCC (inner_cond_bb, 0);
348 edge inner_not_taken = EDGE_SUCC (inner_cond_bb, 1);
350 if (inner_taken->dest != outer2->dest)
351 std::swap (inner_taken, inner_not_taken);
352 gcc_assert (inner_taken->dest == outer2->dest);
354 /* In the following we assume that inner_cond_bb has single predecessor. */
355 gcc_assert (single_pred_p (inner_cond_bb));
357 /* Path outer_cond_bb->(outer2) needs to be merged into path
358 outer_cond_bb->(outer_to_inner)->inner_cond_bb->(inner_taken)
359 and probability of inner_not_taken updated. */
361 inner_cond_bb->count = outer_cond_bb->count;
363 /* Handle special case where inner_taken probability is always. In this case
364 we know that the overall outcome will be always as well, but combining
365 probabilities will be conservative because it does not know that
366 outer2->probability is inverse of outer_to_inner->probability. */
367 if (inner_taken->probability == profile_probability::always ())
369 else
370 inner_taken->probability = outer2->probability + outer_to_inner->probability
371 * inner_taken->probability;
372 inner_not_taken->probability = profile_probability::always ()
373 - inner_taken->probability;
375 outer_to_inner->probability = profile_probability::always ();
376 outer2->probability = profile_probability::never ();
379 /* If-convert on a and pattern with a common else block. The inner
380 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
381 inner_inv, outer_inv and result_inv indicate whether the conditions
382 are inverted.
383 Returns true if the edges to the common else basic-block were merged. */
385 static bool
386 ifcombine_ifandif (basic_block inner_cond_bb, bool inner_inv,
387 basic_block outer_cond_bb, bool outer_inv, bool result_inv)
389 gimple_stmt_iterator gsi;
390 gimple *inner_stmt, *outer_stmt;
391 gcond *inner_cond, *outer_cond;
392 tree name1, name2, bit1, bit2, bits1, bits2;
394 inner_stmt = last_stmt (inner_cond_bb);
395 if (!inner_stmt
396 || gimple_code (inner_stmt) != GIMPLE_COND)
397 return false;
398 inner_cond = as_a <gcond *> (inner_stmt);
400 outer_stmt = last_stmt (outer_cond_bb);
401 if (!outer_stmt
402 || gimple_code (outer_stmt) != GIMPLE_COND)
403 return false;
404 outer_cond = as_a <gcond *> (outer_stmt);
406 /* See if we test a single bit of the same name in both tests. In
407 that case remove the outer test, merging both else edges,
408 and change the inner one to test for
409 name & (bit1 | bit2) == (bit1 | bit2). */
410 if (recognize_single_bit_test (inner_cond, &name1, &bit1, inner_inv)
411 && recognize_single_bit_test (outer_cond, &name2, &bit2, outer_inv)
412 && name1 == name2)
414 tree t, t2;
416 /* Do it. */
417 gsi = gsi_for_stmt (inner_cond);
418 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
419 build_int_cst (TREE_TYPE (name1), 1), bit1);
420 t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
421 build_int_cst (TREE_TYPE (name1), 1), bit2);
422 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2);
423 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
424 true, GSI_SAME_STMT);
425 t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
426 t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE,
427 true, GSI_SAME_STMT);
428 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR,
429 boolean_type_node, t2, t);
430 t = canonicalize_cond_expr_cond (t);
431 if (!t)
432 return false;
433 gimple_cond_set_condition_from_tree (inner_cond, t);
434 update_stmt (inner_cond);
436 /* Leave CFG optimization to cfg_cleanup. */
437 gimple_cond_set_condition_from_tree (outer_cond,
438 outer_inv ? boolean_false_node : boolean_true_node);
439 update_stmt (outer_cond);
441 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
443 if (dump_file)
445 fprintf (dump_file, "optimizing double bit test to ");
446 print_generic_expr (dump_file, name1);
447 fprintf (dump_file, " & T == T\nwith temporary T = (1 << ");
448 print_generic_expr (dump_file, bit1);
449 fprintf (dump_file, ") | (1 << ");
450 print_generic_expr (dump_file, bit2);
451 fprintf (dump_file, ")\n");
454 return true;
457 /* See if we have two bit tests of the same name in both tests.
458 In that case remove the outer test and change the inner one to
459 test for name & (bits1 | bits2) != 0. */
460 else if (recognize_bits_test (inner_cond, &name1, &bits1, !inner_inv)
461 && recognize_bits_test (outer_cond, &name2, &bits2, !outer_inv))
463 gimple_stmt_iterator gsi;
464 tree t;
466 /* Find the common name which is bit-tested. */
467 if (name1 == name2)
469 else if (bits1 == bits2)
471 std::swap (name2, bits2);
472 std::swap (name1, bits1);
474 else if (name1 == bits2)
475 std::swap (name2, bits2);
476 else if (bits1 == name2)
477 std::swap (name1, bits1);
478 else
479 return false;
481 /* As we strip non-widening conversions in finding a common
482 name that is tested make sure to end up with an integral
483 type for building the bit operations. */
484 if (TYPE_PRECISION (TREE_TYPE (bits1))
485 >= TYPE_PRECISION (TREE_TYPE (bits2)))
487 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
488 name1 = fold_convert (TREE_TYPE (bits1), name1);
489 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
490 bits2 = fold_convert (TREE_TYPE (bits1), bits2);
492 else
494 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
495 name1 = fold_convert (TREE_TYPE (bits2), name1);
496 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
497 bits1 = fold_convert (TREE_TYPE (bits2), bits1);
500 /* Do it. */
501 gsi = gsi_for_stmt (inner_cond);
502 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2);
503 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
504 true, GSI_SAME_STMT);
505 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
506 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
507 true, GSI_SAME_STMT);
508 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, boolean_type_node, t,
509 build_int_cst (TREE_TYPE (t), 0));
510 t = canonicalize_cond_expr_cond (t);
511 if (!t)
512 return false;
513 gimple_cond_set_condition_from_tree (inner_cond, t);
514 update_stmt (inner_cond);
516 /* Leave CFG optimization to cfg_cleanup. */
517 gimple_cond_set_condition_from_tree (outer_cond,
518 outer_inv ? boolean_false_node : boolean_true_node);
519 update_stmt (outer_cond);
520 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
522 if (dump_file)
524 fprintf (dump_file, "optimizing bits or bits test to ");
525 print_generic_expr (dump_file, name1);
526 fprintf (dump_file, " & T != 0\nwith temporary T = ");
527 print_generic_expr (dump_file, bits1);
528 fprintf (dump_file, " | ");
529 print_generic_expr (dump_file, bits2);
530 fprintf (dump_file, "\n");
533 return true;
536 /* See if we have two comparisons that we can merge into one. */
537 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison
538 && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison)
540 tree t;
541 enum tree_code inner_cond_code = gimple_cond_code (inner_cond);
542 enum tree_code outer_cond_code = gimple_cond_code (outer_cond);
544 /* Invert comparisons if necessary (and possible). */
545 if (inner_inv)
546 inner_cond_code = invert_tree_comparison (inner_cond_code,
547 HONOR_NANS (gimple_cond_lhs (inner_cond)));
548 if (inner_cond_code == ERROR_MARK)
549 return false;
550 if (outer_inv)
551 outer_cond_code = invert_tree_comparison (outer_cond_code,
552 HONOR_NANS (gimple_cond_lhs (outer_cond)));
553 if (outer_cond_code == ERROR_MARK)
554 return false;
555 /* Don't return false so fast, try maybe_fold_or_comparisons? */
557 if (!(t = maybe_fold_and_comparisons (boolean_type_node, inner_cond_code,
558 gimple_cond_lhs (inner_cond),
559 gimple_cond_rhs (inner_cond),
560 outer_cond_code,
561 gimple_cond_lhs (outer_cond),
562 gimple_cond_rhs (outer_cond))))
564 tree t1, t2;
565 gimple_stmt_iterator gsi;
566 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
567 if (param_logical_op_non_short_circuit != -1)
568 logical_op_non_short_circuit
569 = param_logical_op_non_short_circuit;
570 if (!logical_op_non_short_circuit || flag_sanitize_coverage)
571 return false;
572 /* Only do this optimization if the inner bb contains only the conditional. */
573 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb)))
574 return false;
575 t1 = fold_build2_loc (gimple_location (inner_cond),
576 inner_cond_code,
577 boolean_type_node,
578 gimple_cond_lhs (inner_cond),
579 gimple_cond_rhs (inner_cond));
580 t2 = fold_build2_loc (gimple_location (outer_cond),
581 outer_cond_code,
582 boolean_type_node,
583 gimple_cond_lhs (outer_cond),
584 gimple_cond_rhs (outer_cond));
585 t = fold_build2_loc (gimple_location (inner_cond),
586 TRUTH_AND_EXPR, boolean_type_node, t1, t2);
587 if (result_inv)
589 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
590 result_inv = false;
592 gsi = gsi_for_stmt (inner_cond);
593 t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr, NULL, true,
594 GSI_SAME_STMT);
596 if (result_inv)
597 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
598 t = canonicalize_cond_expr_cond (t);
599 if (!t)
600 return false;
601 if (!is_gimple_condexpr_for_cond (t))
603 gsi = gsi_for_stmt (inner_cond);
604 t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr_for_cond,
605 NULL, true, GSI_SAME_STMT);
607 gimple_cond_set_condition_from_tree (inner_cond, t);
608 update_stmt (inner_cond);
610 /* Leave CFG optimization to cfg_cleanup. */
611 gimple_cond_set_condition_from_tree (outer_cond,
612 outer_inv ? boolean_false_node : boolean_true_node);
613 update_stmt (outer_cond);
614 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
616 if (dump_file)
618 fprintf (dump_file, "optimizing two comparisons to ");
619 print_generic_expr (dump_file, t);
620 fprintf (dump_file, "\n");
623 return true;
626 return false;
629 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
630 dispatch to the appropriate if-conversion helper for a particular
631 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
632 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */
634 static bool
635 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb, basic_block outer_cond_bb,
636 basic_block then_bb, basic_block else_bb,
637 basic_block phi_pred_bb)
639 /* The && form is characterized by a common else_bb with
640 the two edges leading to it mergable. The latter is
641 guaranteed by matching PHI arguments in the else_bb and
642 the inner cond_bb having no side-effects. */
643 if (phi_pred_bb != else_bb
644 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &else_bb)
645 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb))
647 /* We have
648 <outer_cond_bb>
649 if (q) goto inner_cond_bb; else goto else_bb;
650 <inner_cond_bb>
651 if (p) goto ...; else goto else_bb;
653 <else_bb>
656 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, false,
657 false);
660 /* And a version where the outer condition is negated. */
661 if (phi_pred_bb != else_bb
662 && recognize_if_then_else (outer_cond_bb, &else_bb, &inner_cond_bb)
663 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb))
665 /* We have
666 <outer_cond_bb>
667 if (q) goto else_bb; else goto inner_cond_bb;
668 <inner_cond_bb>
669 if (p) goto ...; else goto else_bb;
671 <else_bb>
674 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, true,
675 false);
678 /* The || form is characterized by a common then_bb with the
679 two edges leading to it mergable. The latter is guaranteed
680 by matching PHI arguments in the then_bb and the inner cond_bb
681 having no side-effects. */
682 if (phi_pred_bb != then_bb
683 && recognize_if_then_else (outer_cond_bb, &then_bb, &inner_cond_bb)
684 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb))
686 /* We have
687 <outer_cond_bb>
688 if (q) goto then_bb; else goto inner_cond_bb;
689 <inner_cond_bb>
690 if (q) goto then_bb; else goto ...;
691 <then_bb>
694 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, true,
695 true);
698 /* And a version where the outer condition is negated. */
699 if (phi_pred_bb != then_bb
700 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &then_bb)
701 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb))
703 /* We have
704 <outer_cond_bb>
705 if (q) goto inner_cond_bb; else goto then_bb;
706 <inner_cond_bb>
707 if (q) goto then_bb; else goto ...;
708 <then_bb>
711 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, false,
712 true);
715 return false;
718 /* Recognize a CFG pattern and dispatch to the appropriate
719 if-conversion helper. We start with BB as the innermost
720 worker basic-block. Returns true if a transformation was done. */
722 static bool
723 tree_ssa_ifcombine_bb (basic_block inner_cond_bb)
725 basic_block then_bb = NULL, else_bb = NULL;
727 if (!recognize_if_then_else (inner_cond_bb, &then_bb, &else_bb))
728 return false;
730 /* Recognize && and || of two conditions with a common
731 then/else block which entry edges we can merge. That is:
732 if (a || b)
735 if (a && b)
737 This requires a single predecessor of the inner cond_bb. */
738 if (single_pred_p (inner_cond_bb)
739 && bb_no_side_effects_p (inner_cond_bb))
741 basic_block outer_cond_bb = single_pred (inner_cond_bb);
743 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb,
744 then_bb, else_bb, inner_cond_bb))
745 return true;
747 if (forwarder_block_to (else_bb, then_bb))
749 /* Other possibilities for the && form, if else_bb is
750 empty forwarder block to then_bb. Compared to the above simpler
751 forms this can be treated as if then_bb and else_bb were swapped,
752 and the corresponding inner_cond_bb not inverted because of that.
753 For same_phi_args_p we look at equality of arguments between
754 edge from outer_cond_bb and the forwarder block. */
755 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
756 then_bb, else_bb))
757 return true;
759 else if (forwarder_block_to (then_bb, else_bb))
761 /* Other possibilities for the || form, if then_bb is
762 empty forwarder block to else_bb. Compared to the above simpler
763 forms this can be treated as if then_bb and else_bb were swapped,
764 and the corresponding inner_cond_bb not inverted because of that.
765 For same_phi_args_p we look at equality of arguments between
766 edge from outer_cond_bb and the forwarder block. */
767 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
768 then_bb, then_bb))
769 return true;
773 return false;
776 /* Main entry for the tree if-conversion pass. */
778 namespace {
780 const pass_data pass_data_tree_ifcombine =
782 GIMPLE_PASS, /* type */
783 "ifcombine", /* name */
784 OPTGROUP_NONE, /* optinfo_flags */
785 TV_TREE_IFCOMBINE, /* tv_id */
786 ( PROP_cfg | PROP_ssa ), /* properties_required */
787 0, /* properties_provided */
788 0, /* properties_destroyed */
789 0, /* todo_flags_start */
790 TODO_update_ssa, /* todo_flags_finish */
793 class pass_tree_ifcombine : public gimple_opt_pass
795 public:
796 pass_tree_ifcombine (gcc::context *ctxt)
797 : gimple_opt_pass (pass_data_tree_ifcombine, ctxt)
800 /* opt_pass methods: */
801 virtual unsigned int execute (function *);
803 }; // class pass_tree_ifcombine
805 unsigned int
806 pass_tree_ifcombine::execute (function *fun)
808 basic_block *bbs;
809 bool cfg_changed = false;
810 int i;
812 bbs = single_pred_before_succ_order ();
813 calculate_dominance_info (CDI_DOMINATORS);
815 /* Search every basic block for COND_EXPR we may be able to optimize.
817 We walk the blocks in order that guarantees that a block with
818 a single predecessor is processed after the predecessor.
819 This ensures that we collapse outter ifs before visiting the
820 inner ones, and also that we do not try to visit a removed
821 block. This is opposite of PHI-OPT, because we cascade the
822 combining rather than cascading PHIs. */
823 for (i = n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
825 basic_block bb = bbs[i];
826 gimple *stmt = last_stmt (bb);
828 if (stmt
829 && gimple_code (stmt) == GIMPLE_COND)
830 if (tree_ssa_ifcombine_bb (bb))
832 /* Clear range info from all stmts in BB which is now executed
833 conditional on a always true/false condition. */
834 reset_flow_sensitive_info_in_bb (bb);
835 cfg_changed |= true;
839 free (bbs);
841 return cfg_changed ? TODO_cleanup_cfg : 0;
844 } // anon namespace
846 gimple_opt_pass *
847 make_pass_tree_ifcombine (gcc::context *ctxt)
849 return new pass_tree_ifcombine (ctxt);