1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2022 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-pass.h"
33 #include "tree-pretty-print.h"
34 /* rtl is needed only because arm back-end requires it for
36 #include "fold-const.h"
38 #include "gimple-iterator.h"
39 #include "gimple-fold.h"
40 #include "gimplify-me.h"
46 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
47 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
48 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
52 /* This pass combines COND_EXPRs to simplify control flow. It
53 currently recognizes bit tests and comparisons in chains that
54 represent logical and or logical or of two COND_EXPRs.
56 It does so by walking basic blocks in a approximate reverse
57 post-dominator order and trying to match CFG patterns that
58 represent logical and or logical or of two COND_EXPRs.
59 Transformations are done if the COND_EXPR conditions match
62 1. two single bit tests X & (1 << Yn) (for logical and)
64 2. two bit tests X & Yn (for logical or)
66 3. two comparisons X OPn Y (for logical or)
68 To simplify this pass, removing basic blocks and dead code
69 is left to CFG cleanup and DCE. */
72 /* Recognize a if-then-else CFG pattern starting to match with the
73 COND_BB basic-block containing the COND_EXPR. The recognized
74 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
75 *THEN_BB and/or *ELSE_BB are already set, they are required to
76 match the then and else basic-blocks to make the pattern match.
77 Returns true if the pattern matched, false otherwise. */
80 recognize_if_then_else (basic_block cond_bb
,
81 basic_block
*then_bb
, basic_block
*else_bb
)
85 if (EDGE_COUNT (cond_bb
->succs
) != 2)
88 /* Find the then/else edges. */
89 t
= EDGE_SUCC (cond_bb
, 0);
90 e
= EDGE_SUCC (cond_bb
, 1);
91 if (!(t
->flags
& EDGE_TRUE_VALUE
))
93 if (!(t
->flags
& EDGE_TRUE_VALUE
)
94 || !(e
->flags
& EDGE_FALSE_VALUE
))
97 /* Check if the edge destinations point to the required block. */
99 && t
->dest
!= *then_bb
)
102 && e
->dest
!= *else_bb
)
113 /* Verify if the basic block BB does not have side-effects. Return
114 true in this case, else false. */
117 bb_no_side_effects_p (basic_block bb
)
119 gimple_stmt_iterator gsi
;
121 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
123 gimple
*stmt
= gsi_stmt (gsi
);
125 if (is_gimple_debug (stmt
))
129 enum tree_code rhs_code
;
130 if (gimple_has_side_effects (stmt
)
131 || gimple_uses_undefined_value_p (stmt
)
132 || gimple_could_trap_p (stmt
)
133 || gimple_vuse (stmt
)
134 /* We need to rewrite stmts with undefined overflow to use
135 unsigned arithmetic but cannot do so for signed division. */
136 || ((ass
= dyn_cast
<gassign
*> (stmt
))
137 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (ass
)))
138 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (gimple_assign_lhs (ass
)))
139 && ((rhs_code
= gimple_assign_rhs_code (ass
)), true)
140 && (rhs_code
== TRUNC_DIV_EXPR
141 || rhs_code
== CEIL_DIV_EXPR
142 || rhs_code
== FLOOR_DIV_EXPR
143 || rhs_code
== ROUND_DIV_EXPR
)
144 /* We cannot use expr_not_equal_to since we'd have to restrict
145 flow-sensitive info to whats known at the outer if. */
146 && (TREE_CODE (gimple_assign_rhs2 (ass
)) != INTEGER_CST
147 || !integer_minus_onep (gimple_assign_rhs2 (ass
))))
148 /* const calls don't match any of the above, yet they could
149 still have some side-effects - they could contain
150 gimple_could_trap_p statements, like floating point
151 exceptions or integer division by zero. See PR70586.
152 FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
153 should handle this. */
154 || is_gimple_call (stmt
))
161 /* Return true if BB is an empty forwarder block to TO_BB. */
164 forwarder_block_to (basic_block bb
, basic_block to_bb
)
166 return empty_block_p (bb
)
167 && single_succ_p (bb
)
168 && single_succ (bb
) == to_bb
;
171 /* Verify if all PHI node arguments in DEST for edges from BB1 or
172 BB2 to DEST are the same. This makes the CFG merge point
173 free from side-effects. Return true in this case, else false. */
176 same_phi_args_p (basic_block bb1
, basic_block bb2
, basic_block dest
)
178 edge e1
= find_edge (bb1
, dest
);
179 edge e2
= find_edge (bb2
, dest
);
183 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
186 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi
, e1
),
187 PHI_ARG_DEF_FROM_EDGE (phi
, e2
), 0))
194 /* Return the best representative SSA name for CANDIDATE which is used
198 get_name_for_bit_test (tree candidate
)
200 /* Skip single-use names in favor of using the name from a
201 non-widening conversion definition. */
202 if (TREE_CODE (candidate
) == SSA_NAME
203 && has_single_use (candidate
))
205 gimple
*def_stmt
= SSA_NAME_DEF_STMT (candidate
);
206 if (is_gimple_assign (def_stmt
)
207 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
209 if (TYPE_PRECISION (TREE_TYPE (candidate
))
210 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))))
211 return gimple_assign_rhs1 (def_stmt
);
218 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
219 statements. Store the name being tested in *NAME and the bit
220 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
221 Returns true if the pattern matched, false otherwise. */
224 recognize_single_bit_test (gcond
*cond
, tree
*name
, tree
*bit
, bool inv
)
228 /* Get at the definition of the result of the bit test. */
229 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
230 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
231 || !integer_zerop (gimple_cond_rhs (cond
)))
233 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
234 if (!is_gimple_assign (stmt
))
237 /* Look at which bit is tested. One form to recognize is
238 D.1985_5 = state_3(D) >> control1_4(D);
239 D.1986_6 = (int) D.1985_5;
241 if (D.1987_7 != 0) */
242 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
243 && integer_onep (gimple_assign_rhs2 (stmt
))
244 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
246 tree orig_name
= gimple_assign_rhs1 (stmt
);
248 /* Look through copies and conversions to eventually
249 find the stmt that computes the shift. */
250 stmt
= SSA_NAME_DEF_STMT (orig_name
);
252 while (is_gimple_assign (stmt
)
253 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
254 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt
)))
255 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
256 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
257 || gimple_assign_ssa_name_copy_p (stmt
)))
258 stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
260 /* If we found such, decompose it. */
261 if (is_gimple_assign (stmt
)
262 && gimple_assign_rhs_code (stmt
) == RSHIFT_EXPR
)
264 /* op0 & (1 << op1) */
265 *bit
= gimple_assign_rhs2 (stmt
);
266 *name
= gimple_assign_rhs1 (stmt
);
271 *bit
= integer_zero_node
;
272 *name
= get_name_for_bit_test (orig_name
);
279 D.1987_7 = op0 & (1 << CST)
280 if (D.1987_7 != 0) */
281 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
282 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
283 && integer_pow2p (gimple_assign_rhs2 (stmt
)))
285 *name
= gimple_assign_rhs1 (stmt
);
286 *bit
= build_int_cst (integer_type_node
,
287 tree_log2 (gimple_assign_rhs2 (stmt
)));
292 D.1986_6 = 1 << control1_4(D)
293 D.1987_7 = op0 & D.1986_6
294 if (D.1987_7 != 0) */
295 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
296 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
297 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
)
301 /* Both arguments of the BIT_AND_EXPR can be the single-bit
302 specifying expression. */
303 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
304 if (is_gimple_assign (tmp
)
305 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
306 && integer_onep (gimple_assign_rhs1 (tmp
)))
308 *name
= gimple_assign_rhs2 (stmt
);
309 *bit
= gimple_assign_rhs2 (tmp
);
313 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt
));
314 if (is_gimple_assign (tmp
)
315 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
316 && integer_onep (gimple_assign_rhs1 (tmp
)))
318 *name
= gimple_assign_rhs1 (stmt
);
319 *bit
= gimple_assign_rhs2 (tmp
);
327 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
328 statements. Store the name being tested in *NAME and the bits
329 in *BITS. The COND_EXPR computes *NAME & *BITS.
330 Returns true if the pattern matched, false otherwise. */
333 recognize_bits_test (gcond
*cond
, tree
*name
, tree
*bits
, bool inv
)
337 /* Get at the definition of the result of the bit test. */
338 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
339 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
340 || !integer_zerop (gimple_cond_rhs (cond
)))
342 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
343 if (!is_gimple_assign (stmt
)
344 || gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
)
347 *name
= get_name_for_bit_test (gimple_assign_rhs1 (stmt
));
348 *bits
= gimple_assign_rhs2 (stmt
);
354 /* Update profile after code in outer_cond_bb was adjusted so
355 outer_cond_bb has no condition. */
358 update_profile_after_ifcombine (basic_block inner_cond_bb
,
359 basic_block outer_cond_bb
)
361 edge outer_to_inner
= find_edge (outer_cond_bb
, inner_cond_bb
);
362 edge outer2
= (EDGE_SUCC (outer_cond_bb
, 0) == outer_to_inner
363 ? EDGE_SUCC (outer_cond_bb
, 1)
364 : EDGE_SUCC (outer_cond_bb
, 0));
365 edge inner_taken
= EDGE_SUCC (inner_cond_bb
, 0);
366 edge inner_not_taken
= EDGE_SUCC (inner_cond_bb
, 1);
368 if (inner_taken
->dest
!= outer2
->dest
)
369 std::swap (inner_taken
, inner_not_taken
);
370 gcc_assert (inner_taken
->dest
== outer2
->dest
);
372 /* In the following we assume that inner_cond_bb has single predecessor. */
373 gcc_assert (single_pred_p (inner_cond_bb
));
375 /* Path outer_cond_bb->(outer2) needs to be merged into path
376 outer_cond_bb->(outer_to_inner)->inner_cond_bb->(inner_taken)
377 and probability of inner_not_taken updated. */
379 inner_cond_bb
->count
= outer_cond_bb
->count
;
381 /* Handle special case where inner_taken probability is always. In this case
382 we know that the overall outcome will be always as well, but combining
383 probabilities will be conservative because it does not know that
384 outer2->probability is inverse of outer_to_inner->probability. */
385 if (inner_taken
->probability
== profile_probability::always ())
388 inner_taken
->probability
= outer2
->probability
+ outer_to_inner
->probability
389 * inner_taken
->probability
;
390 inner_not_taken
->probability
= profile_probability::always ()
391 - inner_taken
->probability
;
393 outer_to_inner
->probability
= profile_probability::always ();
394 outer2
->probability
= profile_probability::never ();
397 /* If-convert on a and pattern with a common else block. The inner
398 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
399 inner_inv, outer_inv and result_inv indicate whether the conditions
401 Returns true if the edges to the common else basic-block were merged. */
404 ifcombine_ifandif (basic_block inner_cond_bb
, bool inner_inv
,
405 basic_block outer_cond_bb
, bool outer_inv
, bool result_inv
)
407 gimple_stmt_iterator gsi
;
408 gimple
*inner_stmt
, *outer_stmt
;
409 gcond
*inner_cond
, *outer_cond
;
410 tree name1
, name2
, bit1
, bit2
, bits1
, bits2
;
412 inner_stmt
= last_stmt (inner_cond_bb
);
414 || gimple_code (inner_stmt
) != GIMPLE_COND
)
416 inner_cond
= as_a
<gcond
*> (inner_stmt
);
418 outer_stmt
= last_stmt (outer_cond_bb
);
420 || gimple_code (outer_stmt
) != GIMPLE_COND
)
422 outer_cond
= as_a
<gcond
*> (outer_stmt
);
424 /* See if we test a single bit of the same name in both tests. In
425 that case remove the outer test, merging both else edges,
426 and change the inner one to test for
427 name & (bit1 | bit2) == (bit1 | bit2). */
428 if (recognize_single_bit_test (inner_cond
, &name1
, &bit1
, inner_inv
)
429 && recognize_single_bit_test (outer_cond
, &name2
, &bit2
, outer_inv
)
435 gsi
= gsi_for_stmt (inner_cond
);
436 t
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
437 build_int_cst (TREE_TYPE (name1
), 1), bit1
);
438 t2
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
439 build_int_cst (TREE_TYPE (name1
), 1), bit2
);
440 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), t
, t2
);
441 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
442 true, GSI_SAME_STMT
);
443 t2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
444 t2
= force_gimple_operand_gsi (&gsi
, t2
, true, NULL_TREE
,
445 true, GSI_SAME_STMT
);
446 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
,
447 boolean_type_node
, t2
, t
);
448 t
= canonicalize_cond_expr_cond (t
);
451 if (!is_gimple_condexpr_for_cond (t
))
453 gsi
= gsi_for_stmt (inner_cond
);
454 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr_for_cond
,
455 NULL
, true, GSI_SAME_STMT
);
457 gimple_cond_set_condition_from_tree (inner_cond
, t
);
458 update_stmt (inner_cond
);
460 /* Leave CFG optimization to cfg_cleanup. */
461 gimple_cond_set_condition_from_tree (outer_cond
,
462 outer_inv
? boolean_false_node
: boolean_true_node
);
463 update_stmt (outer_cond
);
465 update_profile_after_ifcombine (inner_cond_bb
, outer_cond_bb
);
469 fprintf (dump_file
, "optimizing double bit test to ");
470 print_generic_expr (dump_file
, name1
);
471 fprintf (dump_file
, " & T == T\nwith temporary T = (1 << ");
472 print_generic_expr (dump_file
, bit1
);
473 fprintf (dump_file
, ") | (1 << ");
474 print_generic_expr (dump_file
, bit2
);
475 fprintf (dump_file
, ")\n");
481 /* See if we have two bit tests of the same name in both tests.
482 In that case remove the outer test and change the inner one to
483 test for name & (bits1 | bits2) != 0. */
484 else if (recognize_bits_test (inner_cond
, &name1
, &bits1
, !inner_inv
)
485 && recognize_bits_test (outer_cond
, &name2
, &bits2
, !outer_inv
))
487 gimple_stmt_iterator gsi
;
490 /* Find the common name which is bit-tested. */
493 else if (bits1
== bits2
)
495 std::swap (name2
, bits2
);
496 std::swap (name1
, bits1
);
498 else if (name1
== bits2
)
499 std::swap (name2
, bits2
);
500 else if (bits1
== name2
)
501 std::swap (name1
, bits1
);
505 /* As we strip non-widening conversions in finding a common
506 name that is tested make sure to end up with an integral
507 type for building the bit operations. */
508 if (TYPE_PRECISION (TREE_TYPE (bits1
))
509 >= TYPE_PRECISION (TREE_TYPE (bits2
)))
511 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
512 name1
= fold_convert (TREE_TYPE (bits1
), name1
);
513 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
514 bits2
= fold_convert (TREE_TYPE (bits1
), bits2
);
518 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
519 name1
= fold_convert (TREE_TYPE (bits2
), name1
);
520 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
521 bits1
= fold_convert (TREE_TYPE (bits2
), bits1
);
525 gsi
= gsi_for_stmt (inner_cond
);
526 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), bits1
, bits2
);
527 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
528 true, GSI_SAME_STMT
);
529 t
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
530 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
531 true, GSI_SAME_STMT
);
532 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
, boolean_type_node
, t
,
533 build_int_cst (TREE_TYPE (t
), 0));
534 t
= canonicalize_cond_expr_cond (t
);
537 if (!is_gimple_condexpr_for_cond (t
))
539 gsi
= gsi_for_stmt (inner_cond
);
540 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr_for_cond
,
541 NULL
, true, GSI_SAME_STMT
);
543 gimple_cond_set_condition_from_tree (inner_cond
, t
);
544 update_stmt (inner_cond
);
546 /* Leave CFG optimization to cfg_cleanup. */
547 gimple_cond_set_condition_from_tree (outer_cond
,
548 outer_inv
? boolean_false_node
: boolean_true_node
);
549 update_stmt (outer_cond
);
550 update_profile_after_ifcombine (inner_cond_bb
, outer_cond_bb
);
554 fprintf (dump_file
, "optimizing bits or bits test to ");
555 print_generic_expr (dump_file
, name1
);
556 fprintf (dump_file
, " & T != 0\nwith temporary T = ");
557 print_generic_expr (dump_file
, bits1
);
558 fprintf (dump_file
, " | ");
559 print_generic_expr (dump_file
, bits2
);
560 fprintf (dump_file
, "\n");
566 /* See if we have two comparisons that we can merge into one. */
567 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond
)) == tcc_comparison
568 && TREE_CODE_CLASS (gimple_cond_code (outer_cond
)) == tcc_comparison
)
571 enum tree_code inner_cond_code
= gimple_cond_code (inner_cond
);
572 enum tree_code outer_cond_code
= gimple_cond_code (outer_cond
);
574 /* Invert comparisons if necessary (and possible). */
576 inner_cond_code
= invert_tree_comparison (inner_cond_code
,
577 HONOR_NANS (gimple_cond_lhs (inner_cond
)));
578 if (inner_cond_code
== ERROR_MARK
)
581 outer_cond_code
= invert_tree_comparison (outer_cond_code
,
582 HONOR_NANS (gimple_cond_lhs (outer_cond
)));
583 if (outer_cond_code
== ERROR_MARK
)
585 /* Don't return false so fast, try maybe_fold_or_comparisons? */
587 if (!(t
= maybe_fold_and_comparisons (boolean_type_node
, inner_cond_code
,
588 gimple_cond_lhs (inner_cond
),
589 gimple_cond_rhs (inner_cond
),
591 gimple_cond_lhs (outer_cond
),
592 gimple_cond_rhs (outer_cond
),
593 gimple_bb (outer_cond
))))
596 gimple_stmt_iterator gsi
;
597 bool logical_op_non_short_circuit
= LOGICAL_OP_NON_SHORT_CIRCUIT
;
598 if (param_logical_op_non_short_circuit
!= -1)
599 logical_op_non_short_circuit
600 = param_logical_op_non_short_circuit
;
601 if (!logical_op_non_short_circuit
|| sanitize_coverage_p ())
603 /* Only do this optimization if the inner bb contains only the conditional. */
604 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb
)))
606 t1
= fold_build2_loc (gimple_location (inner_cond
),
609 gimple_cond_lhs (inner_cond
),
610 gimple_cond_rhs (inner_cond
));
611 t2
= fold_build2_loc (gimple_location (outer_cond
),
614 gimple_cond_lhs (outer_cond
),
615 gimple_cond_rhs (outer_cond
));
616 t
= fold_build2_loc (gimple_location (inner_cond
),
617 TRUTH_AND_EXPR
, boolean_type_node
, t1
, t2
);
620 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
623 gsi
= gsi_for_stmt (inner_cond
);
624 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr_for_cond
,
625 NULL
, true, GSI_SAME_STMT
);
628 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
629 t
= canonicalize_cond_expr_cond (t
);
632 if (!is_gimple_condexpr_for_cond (t
))
634 gsi
= gsi_for_stmt (inner_cond
);
635 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr_for_cond
,
636 NULL
, true, GSI_SAME_STMT
);
638 gimple_cond_set_condition_from_tree (inner_cond
, t
);
639 update_stmt (inner_cond
);
641 /* Leave CFG optimization to cfg_cleanup. */
642 gimple_cond_set_condition_from_tree (outer_cond
,
643 outer_inv
? boolean_false_node
: boolean_true_node
);
644 update_stmt (outer_cond
);
645 update_profile_after_ifcombine (inner_cond_bb
, outer_cond_bb
);
649 fprintf (dump_file
, "optimizing two comparisons to ");
650 print_generic_expr (dump_file
, t
);
651 fprintf (dump_file
, "\n");
660 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
661 dispatch to the appropriate if-conversion helper for a particular
662 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
663 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */
666 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb
, basic_block outer_cond_bb
,
667 basic_block then_bb
, basic_block else_bb
,
668 basic_block phi_pred_bb
)
670 /* The && form is characterized by a common else_bb with
671 the two edges leading to it mergable. The latter is
672 guaranteed by matching PHI arguments in the else_bb and
673 the inner cond_bb having no side-effects. */
674 if (phi_pred_bb
!= else_bb
675 && recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &else_bb
)
676 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
))
680 if (q) goto inner_cond_bb; else goto else_bb;
682 if (p) goto ...; else goto else_bb;
687 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, false,
691 /* And a version where the outer condition is negated. */
692 if (phi_pred_bb
!= else_bb
693 && recognize_if_then_else (outer_cond_bb
, &else_bb
, &inner_cond_bb
)
694 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
))
698 if (q) goto else_bb; else goto inner_cond_bb;
700 if (p) goto ...; else goto else_bb;
705 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, true,
709 /* The || form is characterized by a common then_bb with the
710 two edges leading to it mergable. The latter is guaranteed
711 by matching PHI arguments in the then_bb and the inner cond_bb
712 having no side-effects. */
713 if (phi_pred_bb
!= then_bb
714 && recognize_if_then_else (outer_cond_bb
, &then_bb
, &inner_cond_bb
)
715 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
))
719 if (q) goto then_bb; else goto inner_cond_bb;
721 if (q) goto then_bb; else goto ...;
725 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, true,
729 /* And a version where the outer condition is negated. */
730 if (phi_pred_bb
!= then_bb
731 && recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &then_bb
)
732 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
))
736 if (q) goto inner_cond_bb; else goto then_bb;
738 if (q) goto then_bb; else goto ...;
742 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, false,
749 /* Recognize a CFG pattern and dispatch to the appropriate
750 if-conversion helper. We start with BB as the innermost
751 worker basic-block. Returns true if a transformation was done. */
754 tree_ssa_ifcombine_bb (basic_block inner_cond_bb
)
756 basic_block then_bb
= NULL
, else_bb
= NULL
;
758 if (!recognize_if_then_else (inner_cond_bb
, &then_bb
, &else_bb
))
761 /* Recognize && and || of two conditions with a common
762 then/else block which entry edges we can merge. That is:
768 This requires a single predecessor of the inner cond_bb. */
769 if (single_pred_p (inner_cond_bb
)
770 && bb_no_side_effects_p (inner_cond_bb
))
772 basic_block outer_cond_bb
= single_pred (inner_cond_bb
);
774 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
,
775 then_bb
, else_bb
, inner_cond_bb
))
778 if (forwarder_block_to (else_bb
, then_bb
))
780 /* Other possibilities for the && form, if else_bb is
781 empty forwarder block to then_bb. Compared to the above simpler
782 forms this can be treated as if then_bb and else_bb were swapped,
783 and the corresponding inner_cond_bb not inverted because of that.
784 For same_phi_args_p we look at equality of arguments between
785 edge from outer_cond_bb and the forwarder block. */
786 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
790 else if (forwarder_block_to (then_bb
, else_bb
))
792 /* Other possibilities for the || form, if then_bb is
793 empty forwarder block to else_bb. Compared to the above simpler
794 forms this can be treated as if then_bb and else_bb were swapped,
795 and the corresponding inner_cond_bb not inverted because of that.
796 For same_phi_args_p we look at equality of arguments between
797 edge from outer_cond_bb and the forwarder block. */
798 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
807 /* Main entry for the tree if-conversion pass. */
811 const pass_data pass_data_tree_ifcombine
=
813 GIMPLE_PASS
, /* type */
814 "ifcombine", /* name */
815 OPTGROUP_NONE
, /* optinfo_flags */
816 TV_TREE_IFCOMBINE
, /* tv_id */
817 ( PROP_cfg
| PROP_ssa
), /* properties_required */
818 0, /* properties_provided */
819 0, /* properties_destroyed */
820 0, /* todo_flags_start */
821 TODO_update_ssa
, /* todo_flags_finish */
824 class pass_tree_ifcombine
: public gimple_opt_pass
827 pass_tree_ifcombine (gcc::context
*ctxt
)
828 : gimple_opt_pass (pass_data_tree_ifcombine
, ctxt
)
831 /* opt_pass methods: */
832 virtual unsigned int execute (function
*);
834 }; // class pass_tree_ifcombine
837 pass_tree_ifcombine::execute (function
*fun
)
840 bool cfg_changed
= false;
843 bbs
= single_pred_before_succ_order ();
844 calculate_dominance_info (CDI_DOMINATORS
);
846 /* Search every basic block for COND_EXPR we may be able to optimize.
848 We walk the blocks in order that guarantees that a block with
849 a single predecessor is processed after the predecessor.
850 This ensures that we collapse outter ifs before visiting the
851 inner ones, and also that we do not try to visit a removed
852 block. This is opposite of PHI-OPT, because we cascade the
853 combining rather than cascading PHIs. */
854 for (i
= n_basic_blocks_for_fn (fun
) - NUM_FIXED_BLOCKS
- 1; i
>= 0; i
--)
856 basic_block bb
= bbs
[i
];
857 gimple
*stmt
= last_stmt (bb
);
860 && gimple_code (stmt
) == GIMPLE_COND
)
861 if (tree_ssa_ifcombine_bb (bb
))
863 /* Clear range info from all stmts in BB which is now executed
864 conditional on a always true/false condition. */
865 reset_flow_sensitive_info_in_bb (bb
);
866 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);
869 gassign
*ass
= dyn_cast
<gassign
*> (gsi_stmt (gsi
));
872 tree lhs
= gimple_assign_lhs (ass
);
873 if ((INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
874 || POINTER_TYPE_P (TREE_TYPE (lhs
)))
875 && arith_code_with_undefined_signed_overflow
876 (gimple_assign_rhs_code (ass
)))
877 rewrite_to_defined_overflow (ass
, true);
885 return cfg_changed
? TODO_cleanup_cfg
: 0;
891 make_pass_tree_ifcombine (gcc::context
*ctxt
)
893 return new pass_tree_ifcombine (ctxt
);