PR27116, Spelling errors found by Debian style checker
[official-gcc.git] / gcc / tree-ssa-ifcombine.cc
blob58e19c1508e4e38bbbfb89d1dec21a5dce410127
1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2023 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "cfghooks.h"
29 #include "tree-pass.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "ssa.h"
33 #include "tree-pretty-print.h"
34 /* rtl is needed only because arm back-end requires it for
35 BRANCH_COST. */
36 #include "fold-const.h"
37 #include "cfganal.h"
38 #include "gimple-iterator.h"
39 #include "gimple-fold.h"
40 #include "gimplify-me.h"
41 #include "tree-cfg.h"
42 #include "tree-ssa.h"
43 #include "attribs.h"
44 #include "asan.h"
46 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
47 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
48 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
49 false) >= 2)
50 #endif
52 /* This pass combines COND_EXPRs to simplify control flow. It
53 currently recognizes bit tests and comparisons in chains that
54 represent logical and or logical or of two COND_EXPRs.
56 It does so by walking basic blocks in a approximate reverse
57 post-dominator order and trying to match CFG patterns that
58 represent logical and or logical or of two COND_EXPRs.
59 Transformations are done if the COND_EXPR conditions match
60 either
62 1. two single bit tests X & (1 << Yn) (for logical and)
64 2. two bit tests X & Yn (for logical or)
66 3. two comparisons X OPn Y (for logical or)
68 To simplify this pass, removing basic blocks and dead code
69 is left to CFG cleanup and DCE. */
72 /* Recognize a if-then-else CFG pattern starting to match with the
73 COND_BB basic-block containing the COND_EXPR. The recognized
74 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
75 *THEN_BB and/or *ELSE_BB are already set, they are required to
76 match the then and else basic-blocks to make the pattern match.
77 Returns true if the pattern matched, false otherwise. */
79 static bool
80 recognize_if_then_else (basic_block cond_bb,
81 basic_block *then_bb, basic_block *else_bb)
83 edge t, e;
85 if (EDGE_COUNT (cond_bb->succs) != 2)
86 return false;
88 /* Find the then/else edges. */
89 t = EDGE_SUCC (cond_bb, 0);
90 e = EDGE_SUCC (cond_bb, 1);
91 if (!(t->flags & EDGE_TRUE_VALUE))
92 std::swap (t, e);
93 if (!(t->flags & EDGE_TRUE_VALUE)
94 || !(e->flags & EDGE_FALSE_VALUE))
95 return false;
97 /* Check if the edge destinations point to the required block. */
98 if (*then_bb
99 && t->dest != *then_bb)
100 return false;
101 if (*else_bb
102 && e->dest != *else_bb)
103 return false;
105 if (!*then_bb)
106 *then_bb = t->dest;
107 if (!*else_bb)
108 *else_bb = e->dest;
110 return true;
113 /* Verify if the basic block BB does not have side-effects. Return
114 true in this case, else false. */
116 static bool
117 bb_no_side_effects_p (basic_block bb)
119 gimple_stmt_iterator gsi;
121 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
123 gimple *stmt = gsi_stmt (gsi);
125 if (is_gimple_debug (stmt))
126 continue;
128 gassign *ass;
129 enum tree_code rhs_code;
130 if (gimple_has_side_effects (stmt)
131 || gimple_could_trap_p (stmt)
132 || gimple_vuse (stmt)
133 /* We need to rewrite stmts with undefined overflow to use
134 unsigned arithmetic but cannot do so for signed division. */
135 || ((ass = dyn_cast <gassign *> (stmt))
136 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (ass)))
137 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (gimple_assign_lhs (ass)))
138 && ((rhs_code = gimple_assign_rhs_code (ass)), true)
139 && (rhs_code == TRUNC_DIV_EXPR
140 || rhs_code == CEIL_DIV_EXPR
141 || rhs_code == FLOOR_DIV_EXPR
142 || rhs_code == ROUND_DIV_EXPR)
143 /* We cannot use expr_not_equal_to since we'd have to restrict
144 flow-sensitive info to whats known at the outer if. */
145 && (TREE_CODE (gimple_assign_rhs2 (ass)) != INTEGER_CST
146 || !integer_minus_onep (gimple_assign_rhs2 (ass))))
147 /* const calls don't match any of the above, yet they could
148 still have some side-effects - they could contain
149 gimple_could_trap_p statements, like floating point
150 exceptions or integer division by zero. See PR70586.
151 FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
152 should handle this. */
153 || is_gimple_call (stmt))
154 return false;
156 ssa_op_iter it;
157 tree use;
158 FOR_EACH_SSA_TREE_OPERAND (use, stmt, it, SSA_OP_USE)
159 if (ssa_name_maybe_undef_p (use))
160 return false;
163 return true;
166 /* Return true if BB is an empty forwarder block to TO_BB. */
168 static bool
169 forwarder_block_to (basic_block bb, basic_block to_bb)
171 return empty_block_p (bb)
172 && single_succ_p (bb)
173 && single_succ (bb) == to_bb;
176 /* Verify if all PHI node arguments in DEST for edges from BB1 or
177 BB2 to DEST are the same. This makes the CFG merge point
178 free from side-effects. Return true in this case, else false. */
180 static bool
181 same_phi_args_p (basic_block bb1, basic_block bb2, basic_block dest)
183 edge e1 = find_edge (bb1, dest);
184 edge e2 = find_edge (bb2, dest);
185 gphi_iterator gsi;
186 gphi *phi;
188 for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
190 phi = gsi.phi ();
191 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi, e1),
192 PHI_ARG_DEF_FROM_EDGE (phi, e2), 0))
193 return false;
196 return true;
199 /* Return the best representative SSA name for CANDIDATE which is used
200 in a bit test. */
202 static tree
203 get_name_for_bit_test (tree candidate)
205 /* Skip single-use names in favor of using the name from a
206 non-widening conversion definition. */
207 if (TREE_CODE (candidate) == SSA_NAME
208 && has_single_use (candidate))
210 gimple *def_stmt = SSA_NAME_DEF_STMT (candidate);
211 if (is_gimple_assign (def_stmt)
212 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt)))
214 if (TYPE_PRECISION (TREE_TYPE (candidate))
215 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
216 return gimple_assign_rhs1 (def_stmt);
220 return candidate;
223 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
224 statements. Store the name being tested in *NAME and the bit
225 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
226 Returns true if the pattern matched, false otherwise. */
228 static bool
229 recognize_single_bit_test (gcond *cond, tree *name, tree *bit, bool inv)
231 gimple *stmt;
233 /* Get at the definition of the result of the bit test. */
234 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
235 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
236 || !integer_zerop (gimple_cond_rhs (cond)))
237 return false;
238 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
239 if (!is_gimple_assign (stmt))
240 return false;
242 /* Look at which bit is tested. One form to recognize is
243 D.1985_5 = state_3(D) >> control1_4(D);
244 D.1986_6 = (int) D.1985_5;
245 D.1987_7 = op0 & 1;
246 if (D.1987_7 != 0) */
247 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
248 && integer_onep (gimple_assign_rhs2 (stmt))
249 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
251 tree orig_name = gimple_assign_rhs1 (stmt);
253 /* Look through copies and conversions to eventually
254 find the stmt that computes the shift. */
255 stmt = SSA_NAME_DEF_STMT (orig_name);
257 while (is_gimple_assign (stmt)
258 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
259 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt)))
260 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt))))
261 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
262 || gimple_assign_ssa_name_copy_p (stmt)))
263 stmt = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
265 /* If we found such, decompose it. */
266 if (is_gimple_assign (stmt)
267 && gimple_assign_rhs_code (stmt) == RSHIFT_EXPR)
269 /* op0 & (1 << op1) */
270 *bit = gimple_assign_rhs2 (stmt);
271 *name = gimple_assign_rhs1 (stmt);
273 else
275 /* t & 1 */
276 *bit = integer_zero_node;
277 *name = get_name_for_bit_test (orig_name);
280 return true;
283 /* Another form is
284 D.1987_7 = op0 & (1 << CST)
285 if (D.1987_7 != 0) */
286 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
287 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
288 && integer_pow2p (gimple_assign_rhs2 (stmt)))
290 *name = gimple_assign_rhs1 (stmt);
291 *bit = build_int_cst (integer_type_node,
292 tree_log2 (gimple_assign_rhs2 (stmt)));
293 return true;
296 /* Another form is
297 D.1986_6 = 1 << control1_4(D)
298 D.1987_7 = op0 & D.1986_6
299 if (D.1987_7 != 0) */
300 if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR
301 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
302 && TREE_CODE (gimple_assign_rhs2 (stmt)) == SSA_NAME)
304 gimple *tmp;
306 /* Both arguments of the BIT_AND_EXPR can be the single-bit
307 specifying expression. */
308 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt));
309 if (is_gimple_assign (tmp)
310 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
311 && integer_onep (gimple_assign_rhs1 (tmp)))
313 *name = gimple_assign_rhs2 (stmt);
314 *bit = gimple_assign_rhs2 (tmp);
315 return true;
318 tmp = SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt));
319 if (is_gimple_assign (tmp)
320 && gimple_assign_rhs_code (tmp) == LSHIFT_EXPR
321 && integer_onep (gimple_assign_rhs1 (tmp)))
323 *name = gimple_assign_rhs1 (stmt);
324 *bit = gimple_assign_rhs2 (tmp);
325 return true;
329 return false;
332 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
333 statements. Store the name being tested in *NAME and the bits
334 in *BITS. The COND_EXPR computes *NAME & *BITS.
335 Returns true if the pattern matched, false otherwise. */
337 static bool
338 recognize_bits_test (gcond *cond, tree *name, tree *bits, bool inv)
340 gimple *stmt;
342 /* Get at the definition of the result of the bit test. */
343 if (gimple_cond_code (cond) != (inv ? EQ_EXPR : NE_EXPR)
344 || TREE_CODE (gimple_cond_lhs (cond)) != SSA_NAME
345 || !integer_zerop (gimple_cond_rhs (cond)))
346 return false;
347 stmt = SSA_NAME_DEF_STMT (gimple_cond_lhs (cond));
348 if (!is_gimple_assign (stmt)
349 || gimple_assign_rhs_code (stmt) != BIT_AND_EXPR)
350 return false;
352 *name = get_name_for_bit_test (gimple_assign_rhs1 (stmt));
353 *bits = gimple_assign_rhs2 (stmt);
355 return true;
359 /* Update profile after code in outer_cond_bb was adjusted so
360 outer_cond_bb has no condition. */
362 static void
363 update_profile_after_ifcombine (basic_block inner_cond_bb,
364 basic_block outer_cond_bb)
366 edge outer_to_inner = find_edge (outer_cond_bb, inner_cond_bb);
367 edge outer2 = (EDGE_SUCC (outer_cond_bb, 0) == outer_to_inner
368 ? EDGE_SUCC (outer_cond_bb, 1)
369 : EDGE_SUCC (outer_cond_bb, 0));
370 edge inner_taken = EDGE_SUCC (inner_cond_bb, 0);
371 edge inner_not_taken = EDGE_SUCC (inner_cond_bb, 1);
373 if (inner_taken->dest != outer2->dest)
374 std::swap (inner_taken, inner_not_taken);
375 gcc_assert (inner_taken->dest == outer2->dest);
377 /* In the following we assume that inner_cond_bb has single predecessor. */
378 gcc_assert (single_pred_p (inner_cond_bb));
380 /* Path outer_cond_bb->(outer2) needs to be merged into path
381 outer_cond_bb->(outer_to_inner)->inner_cond_bb->(inner_taken)
382 and probability of inner_not_taken updated. */
384 inner_cond_bb->count = outer_cond_bb->count;
386 /* Handle special case where inner_taken probability is always. In this case
387 we know that the overall outcome will be always as well, but combining
388 probabilities will be conservative because it does not know that
389 outer2->probability is inverse of outer_to_inner->probability. */
390 if (inner_taken->probability == profile_probability::always ())
392 else
393 inner_taken->probability = outer2->probability + outer_to_inner->probability
394 * inner_taken->probability;
395 inner_not_taken->probability = profile_probability::always ()
396 - inner_taken->probability;
398 outer_to_inner->probability = profile_probability::always ();
399 outer2->probability = profile_probability::never ();
402 /* If-convert on a and pattern with a common else block. The inner
403 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
404 inner_inv, outer_inv and result_inv indicate whether the conditions
405 are inverted.
406 Returns true if the edges to the common else basic-block were merged. */
408 static bool
409 ifcombine_ifandif (basic_block inner_cond_bb, bool inner_inv,
410 basic_block outer_cond_bb, bool outer_inv, bool result_inv)
412 gimple_stmt_iterator gsi;
413 tree name1, name2, bit1, bit2, bits1, bits2;
415 gcond *inner_cond = safe_dyn_cast <gcond *> (*gsi_last_bb (inner_cond_bb));
416 if (!inner_cond)
417 return false;
419 gcond *outer_cond = safe_dyn_cast <gcond *> (*gsi_last_bb (outer_cond_bb));
420 if (!outer_cond)
421 return false;
423 /* See if we test a single bit of the same name in both tests. In
424 that case remove the outer test, merging both else edges,
425 and change the inner one to test for
426 name & (bit1 | bit2) == (bit1 | bit2). */
427 if (recognize_single_bit_test (inner_cond, &name1, &bit1, inner_inv)
428 && recognize_single_bit_test (outer_cond, &name2, &bit2, outer_inv)
429 && name1 == name2)
431 tree t, t2;
433 /* Do it. */
434 gsi = gsi_for_stmt (inner_cond);
435 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
436 build_int_cst (TREE_TYPE (name1), 1), bit1);
437 t2 = fold_build2 (LSHIFT_EXPR, TREE_TYPE (name1),
438 build_int_cst (TREE_TYPE (name1), 1), bit2);
439 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), t, t2);
440 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
441 true, GSI_SAME_STMT);
442 t2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
443 t2 = force_gimple_operand_gsi (&gsi, t2, true, NULL_TREE,
444 true, GSI_SAME_STMT);
445 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR,
446 boolean_type_node, t2, t);
447 t = canonicalize_cond_expr_cond (t);
448 if (!t)
449 return false;
450 if (!is_gimple_condexpr_for_cond (t))
452 gsi = gsi_for_stmt (inner_cond);
453 t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr_for_cond,
454 NULL, true, GSI_SAME_STMT);
456 gimple_cond_set_condition_from_tree (inner_cond, t);
457 update_stmt (inner_cond);
459 /* Leave CFG optimization to cfg_cleanup. */
460 gimple_cond_set_condition_from_tree (outer_cond,
461 outer_inv ? boolean_false_node : boolean_true_node);
462 update_stmt (outer_cond);
464 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
466 if (dump_file)
468 fprintf (dump_file, "optimizing double bit test to ");
469 print_generic_expr (dump_file, name1);
470 fprintf (dump_file, " & T == T\nwith temporary T = (1 << ");
471 print_generic_expr (dump_file, bit1);
472 fprintf (dump_file, ") | (1 << ");
473 print_generic_expr (dump_file, bit2);
474 fprintf (dump_file, ")\n");
477 return true;
480 /* See if we have two bit tests of the same name in both tests.
481 In that case remove the outer test and change the inner one to
482 test for name & (bits1 | bits2) != 0. */
483 else if (recognize_bits_test (inner_cond, &name1, &bits1, !inner_inv)
484 && recognize_bits_test (outer_cond, &name2, &bits2, !outer_inv))
486 gimple_stmt_iterator gsi;
487 tree t;
489 /* Find the common name which is bit-tested. */
490 if (name1 == name2)
492 else if (bits1 == bits2)
494 std::swap (name2, bits2);
495 std::swap (name1, bits1);
497 else if (name1 == bits2)
498 std::swap (name2, bits2);
499 else if (bits1 == name2)
500 std::swap (name1, bits1);
501 else
502 return false;
504 /* As we strip non-widening conversions in finding a common
505 name that is tested make sure to end up with an integral
506 type for building the bit operations. */
507 if (TYPE_PRECISION (TREE_TYPE (bits1))
508 >= TYPE_PRECISION (TREE_TYPE (bits2)))
510 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
511 name1 = fold_convert (TREE_TYPE (bits1), name1);
512 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
513 bits2 = fold_convert (TREE_TYPE (bits1), bits2);
515 else
517 bits2 = fold_convert (unsigned_type_for (TREE_TYPE (bits2)), bits2);
518 name1 = fold_convert (TREE_TYPE (bits2), name1);
519 bits1 = fold_convert (unsigned_type_for (TREE_TYPE (bits1)), bits1);
520 bits1 = fold_convert (TREE_TYPE (bits2), bits1);
523 /* Do it. */
524 gsi = gsi_for_stmt (inner_cond);
525 t = fold_build2 (BIT_IOR_EXPR, TREE_TYPE (name1), bits1, bits2);
526 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
527 true, GSI_SAME_STMT);
528 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (name1), name1, t);
529 t = force_gimple_operand_gsi (&gsi, t, true, NULL_TREE,
530 true, GSI_SAME_STMT);
531 t = fold_build2 (result_inv ? NE_EXPR : EQ_EXPR, boolean_type_node, t,
532 build_int_cst (TREE_TYPE (t), 0));
533 t = canonicalize_cond_expr_cond (t);
534 if (!t)
535 return false;
536 if (!is_gimple_condexpr_for_cond (t))
538 gsi = gsi_for_stmt (inner_cond);
539 t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr_for_cond,
540 NULL, true, GSI_SAME_STMT);
542 gimple_cond_set_condition_from_tree (inner_cond, t);
543 update_stmt (inner_cond);
545 /* Leave CFG optimization to cfg_cleanup. */
546 gimple_cond_set_condition_from_tree (outer_cond,
547 outer_inv ? boolean_false_node : boolean_true_node);
548 update_stmt (outer_cond);
549 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
551 if (dump_file)
553 fprintf (dump_file, "optimizing bits or bits test to ");
554 print_generic_expr (dump_file, name1);
555 fprintf (dump_file, " & T != 0\nwith temporary T = ");
556 print_generic_expr (dump_file, bits1);
557 fprintf (dump_file, " | ");
558 print_generic_expr (dump_file, bits2);
559 fprintf (dump_file, "\n");
562 return true;
565 /* See if we have two comparisons that we can merge into one. */
566 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond)) == tcc_comparison
567 && TREE_CODE_CLASS (gimple_cond_code (outer_cond)) == tcc_comparison)
569 tree t;
570 enum tree_code inner_cond_code = gimple_cond_code (inner_cond);
571 enum tree_code outer_cond_code = gimple_cond_code (outer_cond);
573 /* Invert comparisons if necessary (and possible). */
574 if (inner_inv)
575 inner_cond_code = invert_tree_comparison (inner_cond_code,
576 HONOR_NANS (gimple_cond_lhs (inner_cond)));
577 if (inner_cond_code == ERROR_MARK)
578 return false;
579 if (outer_inv)
580 outer_cond_code = invert_tree_comparison (outer_cond_code,
581 HONOR_NANS (gimple_cond_lhs (outer_cond)));
582 if (outer_cond_code == ERROR_MARK)
583 return false;
584 /* Don't return false so fast, try maybe_fold_or_comparisons? */
586 if (!(t = maybe_fold_and_comparisons (boolean_type_node, inner_cond_code,
587 gimple_cond_lhs (inner_cond),
588 gimple_cond_rhs (inner_cond),
589 outer_cond_code,
590 gimple_cond_lhs (outer_cond),
591 gimple_cond_rhs (outer_cond),
592 gimple_bb (outer_cond))))
594 tree t1, t2;
595 gimple_stmt_iterator gsi;
596 bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
597 if (param_logical_op_non_short_circuit != -1)
598 logical_op_non_short_circuit
599 = param_logical_op_non_short_circuit;
600 if (!logical_op_non_short_circuit || sanitize_coverage_p ())
601 return false;
602 /* Only do this optimization if the inner bb contains only the conditional. */
603 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb)))
604 return false;
605 t1 = fold_build2_loc (gimple_location (inner_cond),
606 inner_cond_code,
607 boolean_type_node,
608 gimple_cond_lhs (inner_cond),
609 gimple_cond_rhs (inner_cond));
610 t2 = fold_build2_loc (gimple_location (outer_cond),
611 outer_cond_code,
612 boolean_type_node,
613 gimple_cond_lhs (outer_cond),
614 gimple_cond_rhs (outer_cond));
615 t = fold_build2_loc (gimple_location (inner_cond),
616 TRUTH_AND_EXPR, boolean_type_node, t1, t2);
617 if (result_inv)
619 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
620 result_inv = false;
622 gsi = gsi_for_stmt (inner_cond);
623 t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr_for_cond,
624 NULL, true, GSI_SAME_STMT);
626 if (result_inv)
627 t = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (t), t);
628 t = canonicalize_cond_expr_cond (t);
629 if (!t)
630 return false;
631 if (!is_gimple_condexpr_for_cond (t))
633 gsi = gsi_for_stmt (inner_cond);
634 t = force_gimple_operand_gsi_1 (&gsi, t, is_gimple_condexpr_for_cond,
635 NULL, true, GSI_SAME_STMT);
637 gimple_cond_set_condition_from_tree (inner_cond, t);
638 update_stmt (inner_cond);
640 /* Leave CFG optimization to cfg_cleanup. */
641 gimple_cond_set_condition_from_tree (outer_cond,
642 outer_inv ? boolean_false_node : boolean_true_node);
643 update_stmt (outer_cond);
644 update_profile_after_ifcombine (inner_cond_bb, outer_cond_bb);
646 if (dump_file)
648 fprintf (dump_file, "optimizing two comparisons to ");
649 print_generic_expr (dump_file, t);
650 fprintf (dump_file, "\n");
653 return true;
656 return false;
659 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
660 dispatch to the appropriate if-conversion helper for a particular
661 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
662 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */
664 static bool
665 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb, basic_block outer_cond_bb,
666 basic_block then_bb, basic_block else_bb,
667 basic_block phi_pred_bb)
669 /* The && form is characterized by a common else_bb with
670 the two edges leading to it mergable. The latter is
671 guaranteed by matching PHI arguments in the else_bb and
672 the inner cond_bb having no side-effects. */
673 if (phi_pred_bb != else_bb
674 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &else_bb)
675 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb))
677 /* We have
678 <outer_cond_bb>
679 if (q) goto inner_cond_bb; else goto else_bb;
680 <inner_cond_bb>
681 if (p) goto ...; else goto else_bb;
683 <else_bb>
686 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, false,
687 false);
690 /* And a version where the outer condition is negated. */
691 if (phi_pred_bb != else_bb
692 && recognize_if_then_else (outer_cond_bb, &else_bb, &inner_cond_bb)
693 && same_phi_args_p (outer_cond_bb, phi_pred_bb, else_bb))
695 /* We have
696 <outer_cond_bb>
697 if (q) goto else_bb; else goto inner_cond_bb;
698 <inner_cond_bb>
699 if (p) goto ...; else goto else_bb;
701 <else_bb>
704 return ifcombine_ifandif (inner_cond_bb, false, outer_cond_bb, true,
705 false);
708 /* The || form is characterized by a common then_bb with the
709 two edges leading to it mergable. The latter is guaranteed
710 by matching PHI arguments in the then_bb and the inner cond_bb
711 having no side-effects. */
712 if (phi_pred_bb != then_bb
713 && recognize_if_then_else (outer_cond_bb, &then_bb, &inner_cond_bb)
714 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb))
716 /* We have
717 <outer_cond_bb>
718 if (q) goto then_bb; else goto inner_cond_bb;
719 <inner_cond_bb>
720 if (q) goto then_bb; else goto ...;
721 <then_bb>
724 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, true,
725 true);
728 /* And a version where the outer condition is negated. */
729 if (phi_pred_bb != then_bb
730 && recognize_if_then_else (outer_cond_bb, &inner_cond_bb, &then_bb)
731 && same_phi_args_p (outer_cond_bb, phi_pred_bb, then_bb))
733 /* We have
734 <outer_cond_bb>
735 if (q) goto inner_cond_bb; else goto then_bb;
736 <inner_cond_bb>
737 if (q) goto then_bb; else goto ...;
738 <then_bb>
741 return ifcombine_ifandif (inner_cond_bb, true, outer_cond_bb, false,
742 true);
745 return false;
748 /* Recognize a CFG pattern and dispatch to the appropriate
749 if-conversion helper. We start with BB as the innermost
750 worker basic-block. Returns true if a transformation was done. */
752 static bool
753 tree_ssa_ifcombine_bb (basic_block inner_cond_bb)
755 basic_block then_bb = NULL, else_bb = NULL;
757 if (!recognize_if_then_else (inner_cond_bb, &then_bb, &else_bb))
758 return false;
760 /* Recognize && and || of two conditions with a common
761 then/else block which entry edges we can merge. That is:
762 if (a || b)
765 if (a && b)
767 This requires a single predecessor of the inner cond_bb. */
768 if (single_pred_p (inner_cond_bb)
769 && bb_no_side_effects_p (inner_cond_bb))
771 basic_block outer_cond_bb = single_pred (inner_cond_bb);
773 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb,
774 then_bb, else_bb, inner_cond_bb))
775 return true;
777 if (forwarder_block_to (else_bb, then_bb))
779 /* Other possibilities for the && form, if else_bb is
780 empty forwarder block to then_bb. Compared to the above simpler
781 forms this can be treated as if then_bb and else_bb were swapped,
782 and the corresponding inner_cond_bb not inverted because of that.
783 For same_phi_args_p we look at equality of arguments between
784 edge from outer_cond_bb and the forwarder block. */
785 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
786 then_bb, else_bb))
787 return true;
789 else if (forwarder_block_to (then_bb, else_bb))
791 /* Other possibilities for the || form, if then_bb is
792 empty forwarder block to else_bb. Compared to the above simpler
793 forms this can be treated as if then_bb and else_bb were swapped,
794 and the corresponding inner_cond_bb not inverted because of that.
795 For same_phi_args_p we look at equality of arguments between
796 edge from outer_cond_bb and the forwarder block. */
797 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb, outer_cond_bb, else_bb,
798 then_bb, then_bb))
799 return true;
803 return false;
806 /* Main entry for the tree if-conversion pass. */
808 namespace {
810 const pass_data pass_data_tree_ifcombine =
812 GIMPLE_PASS, /* type */
813 "ifcombine", /* name */
814 OPTGROUP_NONE, /* optinfo_flags */
815 TV_TREE_IFCOMBINE, /* tv_id */
816 ( PROP_cfg | PROP_ssa ), /* properties_required */
817 0, /* properties_provided */
818 0, /* properties_destroyed */
819 0, /* todo_flags_start */
820 TODO_update_ssa, /* todo_flags_finish */
823 class pass_tree_ifcombine : public gimple_opt_pass
825 public:
826 pass_tree_ifcombine (gcc::context *ctxt)
827 : gimple_opt_pass (pass_data_tree_ifcombine, ctxt)
830 /* opt_pass methods: */
831 unsigned int execute (function *) final override;
833 }; // class pass_tree_ifcombine
835 unsigned int
836 pass_tree_ifcombine::execute (function *fun)
838 basic_block *bbs;
839 bool cfg_changed = false;
840 int i;
842 bbs = single_pred_before_succ_order ();
843 calculate_dominance_info (CDI_DOMINATORS);
844 mark_ssa_maybe_undefs ();
846 /* Search every basic block for COND_EXPR we may be able to optimize.
848 We walk the blocks in order that guarantees that a block with
849 a single predecessor is processed after the predecessor.
850 This ensures that we collapse outter ifs before visiting the
851 inner ones, and also that we do not try to visit a removed
852 block. This is opposite of PHI-OPT, because we cascade the
853 combining rather than cascading PHIs. */
854 for (i = n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
856 basic_block bb = bbs[i];
858 if (safe_is_a <gcond *> (*gsi_last_bb (bb)))
859 if (tree_ssa_ifcombine_bb (bb))
861 /* Clear range info from all stmts in BB which is now executed
862 conditional on a always true/false condition. */
863 reset_flow_sensitive_info_in_bb (bb);
864 for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
865 gsi_next (&gsi))
867 gassign *ass = dyn_cast <gassign *> (gsi_stmt (gsi));
868 if (!ass)
869 continue;
870 tree lhs = gimple_assign_lhs (ass);
871 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs))
872 || POINTER_TYPE_P (TREE_TYPE (lhs)))
873 && arith_code_with_undefined_signed_overflow
874 (gimple_assign_rhs_code (ass)))
875 rewrite_to_defined_overflow (ass, true);
877 cfg_changed |= true;
881 free (bbs);
883 return cfg_changed ? TODO_cleanup_cfg : 0;
886 } // anon namespace
888 gimple_opt_pass *
889 make_pass_tree_ifcombine (gcc::context *ctxt)
891 return new pass_tree_ifcombine (ctxt);