1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2023 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-pass.h"
33 #include "tree-pretty-print.h"
34 /* rtl is needed only because arm back-end requires it for
36 #include "fold-const.h"
38 #include "gimple-iterator.h"
39 #include "gimple-fold.h"
40 #include "gimplify-me.h"
46 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
47 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
48 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
52 /* This pass combines COND_EXPRs to simplify control flow. It
53 currently recognizes bit tests and comparisons in chains that
54 represent logical and or logical or of two COND_EXPRs.
56 It does so by walking basic blocks in a approximate reverse
57 post-dominator order and trying to match CFG patterns that
58 represent logical and or logical or of two COND_EXPRs.
59 Transformations are done if the COND_EXPR conditions match
62 1. two single bit tests X & (1 << Yn) (for logical and)
64 2. two bit tests X & Yn (for logical or)
66 3. two comparisons X OPn Y (for logical or)
68 To simplify this pass, removing basic blocks and dead code
69 is left to CFG cleanup and DCE. */
72 /* Recognize a if-then-else CFG pattern starting to match with the
73 COND_BB basic-block containing the COND_EXPR. The recognized
74 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
75 *THEN_BB and/or *ELSE_BB are already set, they are required to
76 match the then and else basic-blocks to make the pattern match.
77 Returns true if the pattern matched, false otherwise. */
80 recognize_if_then_else (basic_block cond_bb
,
81 basic_block
*then_bb
, basic_block
*else_bb
)
85 if (EDGE_COUNT (cond_bb
->succs
) != 2)
88 /* Find the then/else edges. */
89 t
= EDGE_SUCC (cond_bb
, 0);
90 e
= EDGE_SUCC (cond_bb
, 1);
91 if (!(t
->flags
& EDGE_TRUE_VALUE
))
93 if (!(t
->flags
& EDGE_TRUE_VALUE
)
94 || !(e
->flags
& EDGE_FALSE_VALUE
))
97 /* Check if the edge destinations point to the required block. */
99 && t
->dest
!= *then_bb
)
102 && e
->dest
!= *else_bb
)
113 /* Verify if the basic block BB does not have side-effects. Return
114 true in this case, else false. */
117 bb_no_side_effects_p (basic_block bb
)
119 gimple_stmt_iterator gsi
;
121 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
123 gimple
*stmt
= gsi_stmt (gsi
);
125 if (is_gimple_debug (stmt
))
129 enum tree_code rhs_code
;
130 if (gimple_has_side_effects (stmt
)
131 || gimple_could_trap_p (stmt
)
132 || gimple_vuse (stmt
)
133 /* We need to rewrite stmts with undefined overflow to use
134 unsigned arithmetic but cannot do so for signed division. */
135 || ((ass
= dyn_cast
<gassign
*> (stmt
))
136 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (ass
)))
137 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (gimple_assign_lhs (ass
)))
138 && ((rhs_code
= gimple_assign_rhs_code (ass
)), true)
139 && (rhs_code
== TRUNC_DIV_EXPR
140 || rhs_code
== CEIL_DIV_EXPR
141 || rhs_code
== FLOOR_DIV_EXPR
142 || rhs_code
== ROUND_DIV_EXPR
)
143 /* We cannot use expr_not_equal_to since we'd have to restrict
144 flow-sensitive info to whats known at the outer if. */
145 && (TREE_CODE (gimple_assign_rhs2 (ass
)) != INTEGER_CST
146 || !integer_minus_onep (gimple_assign_rhs2 (ass
))))
147 /* const calls don't match any of the above, yet they could
148 still have some side-effects - they could contain
149 gimple_could_trap_p statements, like floating point
150 exceptions or integer division by zero. See PR70586.
151 FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
152 should handle this. */
153 || is_gimple_call (stmt
))
158 FOR_EACH_SSA_TREE_OPERAND (use
, stmt
, it
, SSA_OP_USE
)
159 if (ssa_name_maybe_undef_p (use
))
166 /* Return true if BB is an empty forwarder block to TO_BB. */
169 forwarder_block_to (basic_block bb
, basic_block to_bb
)
171 return empty_block_p (bb
)
172 && single_succ_p (bb
)
173 && single_succ (bb
) == to_bb
;
176 /* Verify if all PHI node arguments in DEST for edges from BB1 or
177 BB2 to DEST are the same. This makes the CFG merge point
178 free from side-effects. Return true in this case, else false. */
181 same_phi_args_p (basic_block bb1
, basic_block bb2
, basic_block dest
)
183 edge e1
= find_edge (bb1
, dest
);
184 edge e2
= find_edge (bb2
, dest
);
188 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
191 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi
, e1
),
192 PHI_ARG_DEF_FROM_EDGE (phi
, e2
), 0))
199 /* Return the best representative SSA name for CANDIDATE which is used
203 get_name_for_bit_test (tree candidate
)
205 /* Skip single-use names in favor of using the name from a
206 non-widening conversion definition. */
207 if (TREE_CODE (candidate
) == SSA_NAME
208 && has_single_use (candidate
))
210 gimple
*def_stmt
= SSA_NAME_DEF_STMT (candidate
);
211 if (is_gimple_assign (def_stmt
)
212 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
214 if (TYPE_PRECISION (TREE_TYPE (candidate
))
215 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))))
216 return gimple_assign_rhs1 (def_stmt
);
223 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
224 statements. Store the name being tested in *NAME and the bit
225 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
226 Returns true if the pattern matched, false otherwise. */
229 recognize_single_bit_test (gcond
*cond
, tree
*name
, tree
*bit
, bool inv
)
233 /* Get at the definition of the result of the bit test. */
234 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
235 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
236 || !integer_zerop (gimple_cond_rhs (cond
)))
238 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
239 if (!is_gimple_assign (stmt
))
242 /* Look at which bit is tested. One form to recognize is
243 D.1985_5 = state_3(D) >> control1_4(D);
244 D.1986_6 = (int) D.1985_5;
246 if (D.1987_7 != 0) */
247 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
248 && integer_onep (gimple_assign_rhs2 (stmt
))
249 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
251 tree orig_name
= gimple_assign_rhs1 (stmt
);
253 /* Look through copies and conversions to eventually
254 find the stmt that computes the shift. */
255 stmt
= SSA_NAME_DEF_STMT (orig_name
);
257 while (is_gimple_assign (stmt
)
258 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
259 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt
)))
260 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
261 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
262 || gimple_assign_ssa_name_copy_p (stmt
)))
263 stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
265 /* If we found such, decompose it. */
266 if (is_gimple_assign (stmt
)
267 && gimple_assign_rhs_code (stmt
) == RSHIFT_EXPR
)
269 /* op0 & (1 << op1) */
270 *bit
= gimple_assign_rhs2 (stmt
);
271 *name
= gimple_assign_rhs1 (stmt
);
276 *bit
= integer_zero_node
;
277 *name
= get_name_for_bit_test (orig_name
);
284 D.1987_7 = op0 & (1 << CST)
285 if (D.1987_7 != 0) */
286 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
287 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
288 && integer_pow2p (gimple_assign_rhs2 (stmt
)))
290 *name
= gimple_assign_rhs1 (stmt
);
291 *bit
= build_int_cst (integer_type_node
,
292 tree_log2 (gimple_assign_rhs2 (stmt
)));
297 D.1986_6 = 1 << control1_4(D)
298 D.1987_7 = op0 & D.1986_6
299 if (D.1987_7 != 0) */
300 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
301 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
302 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
)
306 /* Both arguments of the BIT_AND_EXPR can be the single-bit
307 specifying expression. */
308 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
309 if (is_gimple_assign (tmp
)
310 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
311 && integer_onep (gimple_assign_rhs1 (tmp
)))
313 *name
= gimple_assign_rhs2 (stmt
);
314 *bit
= gimple_assign_rhs2 (tmp
);
318 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt
));
319 if (is_gimple_assign (tmp
)
320 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
321 && integer_onep (gimple_assign_rhs1 (tmp
)))
323 *name
= gimple_assign_rhs1 (stmt
);
324 *bit
= gimple_assign_rhs2 (tmp
);
332 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
333 statements. Store the name being tested in *NAME and the bits
334 in *BITS. The COND_EXPR computes *NAME & *BITS.
335 Returns true if the pattern matched, false otherwise. */
338 recognize_bits_test (gcond
*cond
, tree
*name
, tree
*bits
, bool inv
)
342 /* Get at the definition of the result of the bit test. */
343 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
344 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
345 || !integer_zerop (gimple_cond_rhs (cond
)))
347 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
348 if (!is_gimple_assign (stmt
)
349 || gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
)
352 *name
= get_name_for_bit_test (gimple_assign_rhs1 (stmt
));
353 *bits
= gimple_assign_rhs2 (stmt
);
359 /* Update profile after code in outer_cond_bb was adjusted so
360 outer_cond_bb has no condition. */
363 update_profile_after_ifcombine (basic_block inner_cond_bb
,
364 basic_block outer_cond_bb
)
366 edge outer_to_inner
= find_edge (outer_cond_bb
, inner_cond_bb
);
367 edge outer2
= (EDGE_SUCC (outer_cond_bb
, 0) == outer_to_inner
368 ? EDGE_SUCC (outer_cond_bb
, 1)
369 : EDGE_SUCC (outer_cond_bb
, 0));
370 edge inner_taken
= EDGE_SUCC (inner_cond_bb
, 0);
371 edge inner_not_taken
= EDGE_SUCC (inner_cond_bb
, 1);
373 if (inner_taken
->dest
!= outer2
->dest
)
374 std::swap (inner_taken
, inner_not_taken
);
375 gcc_assert (inner_taken
->dest
== outer2
->dest
);
377 /* In the following we assume that inner_cond_bb has single predecessor. */
378 gcc_assert (single_pred_p (inner_cond_bb
));
380 /* Path outer_cond_bb->(outer2) needs to be merged into path
381 outer_cond_bb->(outer_to_inner)->inner_cond_bb->(inner_taken)
382 and probability of inner_not_taken updated. */
384 inner_cond_bb
->count
= outer_cond_bb
->count
;
386 /* Handle special case where inner_taken probability is always. In this case
387 we know that the overall outcome will be always as well, but combining
388 probabilities will be conservative because it does not know that
389 outer2->probability is inverse of outer_to_inner->probability. */
390 if (inner_taken
->probability
== profile_probability::always ())
393 inner_taken
->probability
= outer2
->probability
+ outer_to_inner
->probability
394 * inner_taken
->probability
;
395 inner_not_taken
->probability
= profile_probability::always ()
396 - inner_taken
->probability
;
398 outer_to_inner
->probability
= profile_probability::always ();
399 outer2
->probability
= profile_probability::never ();
402 /* If-convert on a and pattern with a common else block. The inner
403 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
404 inner_inv, outer_inv and result_inv indicate whether the conditions
406 Returns true if the edges to the common else basic-block were merged. */
409 ifcombine_ifandif (basic_block inner_cond_bb
, bool inner_inv
,
410 basic_block outer_cond_bb
, bool outer_inv
, bool result_inv
)
412 gimple_stmt_iterator gsi
;
413 tree name1
, name2
, bit1
, bit2
, bits1
, bits2
;
415 gcond
*inner_cond
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (inner_cond_bb
));
419 gcond
*outer_cond
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (outer_cond_bb
));
423 /* See if we test a single bit of the same name in both tests. In
424 that case remove the outer test, merging both else edges,
425 and change the inner one to test for
426 name & (bit1 | bit2) == (bit1 | bit2). */
427 if (recognize_single_bit_test (inner_cond
, &name1
, &bit1
, inner_inv
)
428 && recognize_single_bit_test (outer_cond
, &name2
, &bit2
, outer_inv
)
433 if (TREE_CODE (name1
) == SSA_NAME
434 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name1
))
438 gsi
= gsi_for_stmt (inner_cond
);
439 t
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
440 build_int_cst (TREE_TYPE (name1
), 1), bit1
);
441 t2
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
442 build_int_cst (TREE_TYPE (name1
), 1), bit2
);
443 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), t
, t2
);
444 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
445 true, GSI_SAME_STMT
);
446 t2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
447 t2
= force_gimple_operand_gsi (&gsi
, t2
, true, NULL_TREE
,
448 true, GSI_SAME_STMT
);
449 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
,
450 boolean_type_node
, t2
, t
);
451 t
= canonicalize_cond_expr_cond (t
);
454 if (!is_gimple_condexpr_for_cond (t
))
456 gsi
= gsi_for_stmt (inner_cond
);
457 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr_for_cond
,
458 NULL
, true, GSI_SAME_STMT
);
460 gimple_cond_set_condition_from_tree (inner_cond
, t
);
461 update_stmt (inner_cond
);
463 /* Leave CFG optimization to cfg_cleanup. */
464 gimple_cond_set_condition_from_tree (outer_cond
,
465 outer_inv
? boolean_false_node
: boolean_true_node
);
466 update_stmt (outer_cond
);
468 update_profile_after_ifcombine (inner_cond_bb
, outer_cond_bb
);
472 fprintf (dump_file
, "optimizing double bit test to ");
473 print_generic_expr (dump_file
, name1
);
474 fprintf (dump_file
, " & T == T\nwith temporary T = (1 << ");
475 print_generic_expr (dump_file
, bit1
);
476 fprintf (dump_file
, ") | (1 << ");
477 print_generic_expr (dump_file
, bit2
);
478 fprintf (dump_file
, ")\n");
484 /* See if we have two bit tests of the same name in both tests.
485 In that case remove the outer test and change the inner one to
486 test for name & (bits1 | bits2) != 0. */
487 else if (recognize_bits_test (inner_cond
, &name1
, &bits1
, !inner_inv
)
488 && recognize_bits_test (outer_cond
, &name2
, &bits2
, !outer_inv
))
490 gimple_stmt_iterator gsi
;
493 if ((TREE_CODE (name1
) == SSA_NAME
494 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name1
))
495 || (TREE_CODE (name2
) == SSA_NAME
496 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (name2
)))
499 /* Find the common name which is bit-tested. */
502 else if (bits1
== bits2
)
504 std::swap (name2
, bits2
);
505 std::swap (name1
, bits1
);
507 else if (name1
== bits2
)
508 std::swap (name2
, bits2
);
509 else if (bits1
== name2
)
510 std::swap (name1
, bits1
);
514 /* As we strip non-widening conversions in finding a common
515 name that is tested make sure to end up with an integral
516 type for building the bit operations. */
517 if (TYPE_PRECISION (TREE_TYPE (bits1
))
518 >= TYPE_PRECISION (TREE_TYPE (bits2
)))
520 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
521 name1
= fold_convert (TREE_TYPE (bits1
), name1
);
522 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
523 bits2
= fold_convert (TREE_TYPE (bits1
), bits2
);
527 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
528 name1
= fold_convert (TREE_TYPE (bits2
), name1
);
529 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
530 bits1
= fold_convert (TREE_TYPE (bits2
), bits1
);
534 gsi
= gsi_for_stmt (inner_cond
);
535 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), bits1
, bits2
);
536 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
537 true, GSI_SAME_STMT
);
538 t
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
539 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
540 true, GSI_SAME_STMT
);
541 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
, boolean_type_node
, t
,
542 build_int_cst (TREE_TYPE (t
), 0));
543 t
= canonicalize_cond_expr_cond (t
);
546 if (!is_gimple_condexpr_for_cond (t
))
548 gsi
= gsi_for_stmt (inner_cond
);
549 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr_for_cond
,
550 NULL
, true, GSI_SAME_STMT
);
552 gimple_cond_set_condition_from_tree (inner_cond
, t
);
553 update_stmt (inner_cond
);
555 /* Leave CFG optimization to cfg_cleanup. */
556 gimple_cond_set_condition_from_tree (outer_cond
,
557 outer_inv
? boolean_false_node
: boolean_true_node
);
558 update_stmt (outer_cond
);
559 update_profile_after_ifcombine (inner_cond_bb
, outer_cond_bb
);
563 fprintf (dump_file
, "optimizing bits or bits test to ");
564 print_generic_expr (dump_file
, name1
);
565 fprintf (dump_file
, " & T != 0\nwith temporary T = ");
566 print_generic_expr (dump_file
, bits1
);
567 fprintf (dump_file
, " | ");
568 print_generic_expr (dump_file
, bits2
);
569 fprintf (dump_file
, "\n");
575 /* See if we have two comparisons that we can merge into one. */
576 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond
)) == tcc_comparison
577 && TREE_CODE_CLASS (gimple_cond_code (outer_cond
)) == tcc_comparison
)
580 enum tree_code inner_cond_code
= gimple_cond_code (inner_cond
);
581 enum tree_code outer_cond_code
= gimple_cond_code (outer_cond
);
583 /* Invert comparisons if necessary (and possible). */
585 inner_cond_code
= invert_tree_comparison (inner_cond_code
,
586 HONOR_NANS (gimple_cond_lhs (inner_cond
)));
587 if (inner_cond_code
== ERROR_MARK
)
590 outer_cond_code
= invert_tree_comparison (outer_cond_code
,
591 HONOR_NANS (gimple_cond_lhs (outer_cond
)));
592 if (outer_cond_code
== ERROR_MARK
)
594 /* Don't return false so fast, try maybe_fold_or_comparisons? */
596 if (!(t
= maybe_fold_and_comparisons (boolean_type_node
, inner_cond_code
,
597 gimple_cond_lhs (inner_cond
),
598 gimple_cond_rhs (inner_cond
),
600 gimple_cond_lhs (outer_cond
),
601 gimple_cond_rhs (outer_cond
),
602 gimple_bb (outer_cond
))))
605 gimple_stmt_iterator gsi
;
606 bool logical_op_non_short_circuit
= LOGICAL_OP_NON_SHORT_CIRCUIT
;
607 if (param_logical_op_non_short_circuit
!= -1)
608 logical_op_non_short_circuit
609 = param_logical_op_non_short_circuit
;
610 if (!logical_op_non_short_circuit
|| sanitize_coverage_p ())
612 /* Only do this optimization if the inner bb contains only the conditional. */
613 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb
)))
615 t1
= fold_build2_loc (gimple_location (inner_cond
),
618 gimple_cond_lhs (inner_cond
),
619 gimple_cond_rhs (inner_cond
));
620 t2
= fold_build2_loc (gimple_location (outer_cond
),
623 gimple_cond_lhs (outer_cond
),
624 gimple_cond_rhs (outer_cond
));
625 t
= fold_build2_loc (gimple_location (inner_cond
),
626 TRUTH_AND_EXPR
, boolean_type_node
, t1
, t2
);
629 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
632 gsi
= gsi_for_stmt (inner_cond
);
633 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr_for_cond
,
634 NULL
, true, GSI_SAME_STMT
);
637 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
638 t
= canonicalize_cond_expr_cond (t
);
641 if (!is_gimple_condexpr_for_cond (t
))
643 gsi
= gsi_for_stmt (inner_cond
);
644 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr_for_cond
,
645 NULL
, true, GSI_SAME_STMT
);
647 gimple_cond_set_condition_from_tree (inner_cond
, t
);
648 update_stmt (inner_cond
);
650 /* Leave CFG optimization to cfg_cleanup. */
651 gimple_cond_set_condition_from_tree (outer_cond
,
652 outer_inv
? boolean_false_node
: boolean_true_node
);
653 update_stmt (outer_cond
);
654 update_profile_after_ifcombine (inner_cond_bb
, outer_cond_bb
);
658 fprintf (dump_file
, "optimizing two comparisons to ");
659 print_generic_expr (dump_file
, t
);
660 fprintf (dump_file
, "\n");
669 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
670 dispatch to the appropriate if-conversion helper for a particular
671 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
672 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */
675 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb
, basic_block outer_cond_bb
,
676 basic_block then_bb
, basic_block else_bb
,
677 basic_block phi_pred_bb
)
679 /* The && form is characterized by a common else_bb with
680 the two edges leading to it mergable. The latter is
681 guaranteed by matching PHI arguments in the else_bb and
682 the inner cond_bb having no side-effects. */
683 if (phi_pred_bb
!= else_bb
684 && recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &else_bb
)
685 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
))
689 if (q) goto inner_cond_bb; else goto else_bb;
691 if (p) goto ...; else goto else_bb;
696 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, false,
700 /* And a version where the outer condition is negated. */
701 if (phi_pred_bb
!= else_bb
702 && recognize_if_then_else (outer_cond_bb
, &else_bb
, &inner_cond_bb
)
703 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
))
707 if (q) goto else_bb; else goto inner_cond_bb;
709 if (p) goto ...; else goto else_bb;
714 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, true,
718 /* The || form is characterized by a common then_bb with the
719 two edges leading to it mergable. The latter is guaranteed
720 by matching PHI arguments in the then_bb and the inner cond_bb
721 having no side-effects. */
722 if (phi_pred_bb
!= then_bb
723 && recognize_if_then_else (outer_cond_bb
, &then_bb
, &inner_cond_bb
)
724 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
))
728 if (q) goto then_bb; else goto inner_cond_bb;
730 if (q) goto then_bb; else goto ...;
734 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, true,
738 /* And a version where the outer condition is negated. */
739 if (phi_pred_bb
!= then_bb
740 && recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &then_bb
)
741 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
))
745 if (q) goto inner_cond_bb; else goto then_bb;
747 if (q) goto then_bb; else goto ...;
751 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, false,
758 /* Recognize a CFG pattern and dispatch to the appropriate
759 if-conversion helper. We start with BB as the innermost
760 worker basic-block. Returns true if a transformation was done. */
763 tree_ssa_ifcombine_bb (basic_block inner_cond_bb
)
765 basic_block then_bb
= NULL
, else_bb
= NULL
;
767 if (!recognize_if_then_else (inner_cond_bb
, &then_bb
, &else_bb
))
770 /* Recognize && and || of two conditions with a common
771 then/else block which entry edges we can merge. That is:
777 This requires a single predecessor of the inner cond_bb. */
778 if (single_pred_p (inner_cond_bb
)
779 && bb_no_side_effects_p (inner_cond_bb
))
781 basic_block outer_cond_bb
= single_pred (inner_cond_bb
);
783 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
,
784 then_bb
, else_bb
, inner_cond_bb
))
787 if (forwarder_block_to (else_bb
, then_bb
))
789 /* Other possibilities for the && form, if else_bb is
790 empty forwarder block to then_bb. Compared to the above simpler
791 forms this can be treated as if then_bb and else_bb were swapped,
792 and the corresponding inner_cond_bb not inverted because of that.
793 For same_phi_args_p we look at equality of arguments between
794 edge from outer_cond_bb and the forwarder block. */
795 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
799 else if (forwarder_block_to (then_bb
, else_bb
))
801 /* Other possibilities for the || form, if then_bb is
802 empty forwarder block to else_bb. Compared to the above simpler
803 forms this can be treated as if then_bb and else_bb were swapped,
804 and the corresponding inner_cond_bb not inverted because of that.
805 For same_phi_args_p we look at equality of arguments between
806 edge from outer_cond_bb and the forwarder block. */
807 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
816 /* Main entry for the tree if-conversion pass. */
820 const pass_data pass_data_tree_ifcombine
=
822 GIMPLE_PASS
, /* type */
823 "ifcombine", /* name */
824 OPTGROUP_NONE
, /* optinfo_flags */
825 TV_TREE_IFCOMBINE
, /* tv_id */
826 ( PROP_cfg
| PROP_ssa
), /* properties_required */
827 0, /* properties_provided */
828 0, /* properties_destroyed */
829 0, /* todo_flags_start */
830 TODO_update_ssa
, /* todo_flags_finish */
833 class pass_tree_ifcombine
: public gimple_opt_pass
836 pass_tree_ifcombine (gcc::context
*ctxt
)
837 : gimple_opt_pass (pass_data_tree_ifcombine
, ctxt
)
840 /* opt_pass methods: */
841 unsigned int execute (function
*) final override
;
843 }; // class pass_tree_ifcombine
846 pass_tree_ifcombine::execute (function
*fun
)
849 bool cfg_changed
= false;
852 bbs
= single_pred_before_succ_order ();
853 calculate_dominance_info (CDI_DOMINATORS
);
854 mark_ssa_maybe_undefs ();
856 /* Search every basic block for COND_EXPR we may be able to optimize.
858 We walk the blocks in order that guarantees that a block with
859 a single predecessor is processed after the predecessor.
860 This ensures that we collapse outter ifs before visiting the
861 inner ones, and also that we do not try to visit a removed
862 block. This is opposite of PHI-OPT, because we cascade the
863 combining rather than cascading PHIs. */
864 for (i
= n_basic_blocks_for_fn (fun
) - NUM_FIXED_BLOCKS
- 1; i
>= 0; i
--)
866 basic_block bb
= bbs
[i
];
868 if (safe_is_a
<gcond
*> (*gsi_last_bb (bb
)))
869 if (tree_ssa_ifcombine_bb (bb
))
871 /* Clear range info from all stmts in BB which is now executed
872 conditional on a always true/false condition. */
873 reset_flow_sensitive_info_in_bb (bb
);
874 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
877 gassign
*ass
= dyn_cast
<gassign
*> (gsi_stmt (gsi
));
880 tree lhs
= gimple_assign_lhs (ass
);
881 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
882 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
883 && arith_code_with_undefined_signed_overflow
884 (gimple_assign_rhs_code (ass
)))
885 rewrite_to_defined_overflow (&gsi
);
893 return cfg_changed
? TODO_cleanup_cfg
: 0;
899 make_pass_tree_ifcombine (gcc::context
*ctxt
)
901 return new pass_tree_ifcombine (ctxt
);