1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2018 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "tree-pass.h"
33 #include "tree-pretty-print.h"
34 /* rtl is needed only because arm back-end requires it for
36 #include "fold-const.h"
38 #include "gimple-fold.h"
39 #include "gimple-iterator.h"
40 #include "gimplify-me.h"
44 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
45 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
46 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
50 /* This pass combines COND_EXPRs to simplify control flow. It
51 currently recognizes bit tests and comparisons in chains that
52 represent logical and or logical or of two COND_EXPRs.
54 It does so by walking basic blocks in a approximate reverse
55 post-dominator order and trying to match CFG patterns that
56 represent logical and or logical or of two COND_EXPRs.
57 Transformations are done if the COND_EXPR conditions match
60 1. two single bit tests X & (1 << Yn) (for logical and)
62 2. two bit tests X & Yn (for logical or)
64 3. two comparisons X OPn Y (for logical or)
66 To simplify this pass, removing basic blocks and dead code
67 is left to CFG cleanup and DCE. */
70 /* Recognize a if-then-else CFG pattern starting to match with the
71 COND_BB basic-block containing the COND_EXPR. The recognized
72 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
73 *THEN_BB and/or *ELSE_BB are already set, they are required to
74 match the then and else basic-blocks to make the pattern match.
75 Returns true if the pattern matched, false otherwise. */
78 recognize_if_then_else (basic_block cond_bb
,
79 basic_block
*then_bb
, basic_block
*else_bb
)
83 if (EDGE_COUNT (cond_bb
->succs
) != 2)
86 /* Find the then/else edges. */
87 t
= EDGE_SUCC (cond_bb
, 0);
88 e
= EDGE_SUCC (cond_bb
, 1);
89 if (!(t
->flags
& EDGE_TRUE_VALUE
))
91 if (!(t
->flags
& EDGE_TRUE_VALUE
)
92 || !(e
->flags
& EDGE_FALSE_VALUE
))
95 /* Check if the edge destinations point to the required block. */
97 && t
->dest
!= *then_bb
)
100 && e
->dest
!= *else_bb
)
111 /* Verify if the basic block BB does not have side-effects. Return
112 true in this case, else false. */
115 bb_no_side_effects_p (basic_block bb
)
117 gimple_stmt_iterator gsi
;
119 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
121 gimple
*stmt
= gsi_stmt (gsi
);
123 if (is_gimple_debug (stmt
))
126 if (gimple_has_side_effects (stmt
)
127 || gimple_uses_undefined_value_p (stmt
)
128 || gimple_could_trap_p (stmt
)
129 || gimple_vuse (stmt
)
130 /* const calls don't match any of the above, yet they could
131 still have some side-effects - they could contain
132 gimple_could_trap_p statements, like floating point
133 exceptions or integer division by zero. See PR70586.
134 FIXME: perhaps gimple_has_side_effects or gimple_could_trap_p
135 should handle this. */
136 || is_gimple_call (stmt
))
143 /* Return true if BB is an empty forwarder block to TO_BB. */
146 forwarder_block_to (basic_block bb
, basic_block to_bb
)
148 return empty_block_p (bb
)
149 && single_succ_p (bb
)
150 && single_succ (bb
) == to_bb
;
153 /* Verify if all PHI node arguments in DEST for edges from BB1 or
154 BB2 to DEST are the same. This makes the CFG merge point
155 free from side-effects. Return true in this case, else false. */
158 same_phi_args_p (basic_block bb1
, basic_block bb2
, basic_block dest
)
160 edge e1
= find_edge (bb1
, dest
);
161 edge e2
= find_edge (bb2
, dest
);
165 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
168 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi
, e1
),
169 PHI_ARG_DEF_FROM_EDGE (phi
, e2
), 0))
176 /* Return the best representative SSA name for CANDIDATE which is used
180 get_name_for_bit_test (tree candidate
)
182 /* Skip single-use names in favor of using the name from a
183 non-widening conversion definition. */
184 if (TREE_CODE (candidate
) == SSA_NAME
185 && has_single_use (candidate
))
187 gimple
*def_stmt
= SSA_NAME_DEF_STMT (candidate
);
188 if (is_gimple_assign (def_stmt
)
189 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
191 if (TYPE_PRECISION (TREE_TYPE (candidate
))
192 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))))
193 return gimple_assign_rhs1 (def_stmt
);
200 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
201 statements. Store the name being tested in *NAME and the bit
202 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
203 Returns true if the pattern matched, false otherwise. */
206 recognize_single_bit_test (gcond
*cond
, tree
*name
, tree
*bit
, bool inv
)
210 /* Get at the definition of the result of the bit test. */
211 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
212 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
213 || !integer_zerop (gimple_cond_rhs (cond
)))
215 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
216 if (!is_gimple_assign (stmt
))
219 /* Look at which bit is tested. One form to recognize is
220 D.1985_5 = state_3(D) >> control1_4(D);
221 D.1986_6 = (int) D.1985_5;
223 if (D.1987_7 != 0) */
224 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
225 && integer_onep (gimple_assign_rhs2 (stmt
))
226 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
228 tree orig_name
= gimple_assign_rhs1 (stmt
);
230 /* Look through copies and conversions to eventually
231 find the stmt that computes the shift. */
232 stmt
= SSA_NAME_DEF_STMT (orig_name
);
234 while (is_gimple_assign (stmt
)
235 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
236 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt
)))
237 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
238 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
239 || gimple_assign_ssa_name_copy_p (stmt
)))
240 stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
242 /* If we found such, decompose it. */
243 if (is_gimple_assign (stmt
)
244 && gimple_assign_rhs_code (stmt
) == RSHIFT_EXPR
)
246 /* op0 & (1 << op1) */
247 *bit
= gimple_assign_rhs2 (stmt
);
248 *name
= gimple_assign_rhs1 (stmt
);
253 *bit
= integer_zero_node
;
254 *name
= get_name_for_bit_test (orig_name
);
261 D.1987_7 = op0 & (1 << CST)
262 if (D.1987_7 != 0) */
263 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
264 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
265 && integer_pow2p (gimple_assign_rhs2 (stmt
)))
267 *name
= gimple_assign_rhs1 (stmt
);
268 *bit
= build_int_cst (integer_type_node
,
269 tree_log2 (gimple_assign_rhs2 (stmt
)));
274 D.1986_6 = 1 << control1_4(D)
275 D.1987_7 = op0 & D.1986_6
276 if (D.1987_7 != 0) */
277 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
278 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
279 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
)
283 /* Both arguments of the BIT_AND_EXPR can be the single-bit
284 specifying expression. */
285 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
286 if (is_gimple_assign (tmp
)
287 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
288 && integer_onep (gimple_assign_rhs1 (tmp
)))
290 *name
= gimple_assign_rhs2 (stmt
);
291 *bit
= gimple_assign_rhs2 (tmp
);
295 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt
));
296 if (is_gimple_assign (tmp
)
297 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
298 && integer_onep (gimple_assign_rhs1 (tmp
)))
300 *name
= gimple_assign_rhs1 (stmt
);
301 *bit
= gimple_assign_rhs2 (tmp
);
309 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
310 statements. Store the name being tested in *NAME and the bits
311 in *BITS. The COND_EXPR computes *NAME & *BITS.
312 Returns true if the pattern matched, false otherwise. */
315 recognize_bits_test (gcond
*cond
, tree
*name
, tree
*bits
, bool inv
)
319 /* Get at the definition of the result of the bit test. */
320 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
321 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
322 || !integer_zerop (gimple_cond_rhs (cond
)))
324 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
325 if (!is_gimple_assign (stmt
)
326 || gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
)
329 *name
= get_name_for_bit_test (gimple_assign_rhs1 (stmt
));
330 *bits
= gimple_assign_rhs2 (stmt
);
336 /* Update profile after code in outer_cond_bb was adjusted so
337 outer_cond_bb has no condition. */
340 update_profile_after_ifcombine (basic_block inner_cond_bb
,
341 basic_block outer_cond_bb
)
343 edge outer_to_inner
= find_edge (outer_cond_bb
, inner_cond_bb
);
344 edge outer2
= (EDGE_SUCC (outer_cond_bb
, 0) == outer_to_inner
345 ? EDGE_SUCC (outer_cond_bb
, 1)
346 : EDGE_SUCC (outer_cond_bb
, 0));
347 edge inner_taken
= EDGE_SUCC (inner_cond_bb
, 0);
348 edge inner_not_taken
= EDGE_SUCC (inner_cond_bb
, 1);
350 if (inner_taken
->dest
!= outer2
->dest
)
351 std::swap (inner_taken
, inner_not_taken
);
352 gcc_assert (inner_taken
->dest
== outer2
->dest
);
354 /* In the following we assume that inner_cond_bb has single predecessor. */
355 gcc_assert (single_pred_p (inner_cond_bb
));
357 /* Path outer_cond_bb->(outer2) needs to be merged into path
358 outer_cond_bb->(outer_to_inner)->inner_cond_bb->(inner_taken)
359 and probability of inner_not_taken updated. */
361 inner_cond_bb
->count
= outer_cond_bb
->count
;
363 inner_taken
->probability
= outer2
->probability
+ outer_to_inner
->probability
364 * inner_taken
->probability
;
365 inner_not_taken
->probability
= profile_probability::always ()
366 - inner_taken
->probability
;
368 outer_to_inner
->probability
= profile_probability::always ();
369 outer2
->probability
= profile_probability::never ();
372 /* If-convert on a and pattern with a common else block. The inner
373 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
374 inner_inv, outer_inv and result_inv indicate whether the conditions
376 Returns true if the edges to the common else basic-block were merged. */
379 ifcombine_ifandif (basic_block inner_cond_bb
, bool inner_inv
,
380 basic_block outer_cond_bb
, bool outer_inv
, bool result_inv
)
382 gimple_stmt_iterator gsi
;
383 gimple
*inner_stmt
, *outer_stmt
;
384 gcond
*inner_cond
, *outer_cond
;
385 tree name1
, name2
, bit1
, bit2
, bits1
, bits2
;
387 inner_stmt
= last_stmt (inner_cond_bb
);
389 || gimple_code (inner_stmt
) != GIMPLE_COND
)
391 inner_cond
= as_a
<gcond
*> (inner_stmt
);
393 outer_stmt
= last_stmt (outer_cond_bb
);
395 || gimple_code (outer_stmt
) != GIMPLE_COND
)
397 outer_cond
= as_a
<gcond
*> (outer_stmt
);
399 /* See if we test a single bit of the same name in both tests. In
400 that case remove the outer test, merging both else edges,
401 and change the inner one to test for
402 name & (bit1 | bit2) == (bit1 | bit2). */
403 if (recognize_single_bit_test (inner_cond
, &name1
, &bit1
, inner_inv
)
404 && recognize_single_bit_test (outer_cond
, &name2
, &bit2
, outer_inv
)
410 gsi
= gsi_for_stmt (inner_cond
);
411 t
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
412 build_int_cst (TREE_TYPE (name1
), 1), bit1
);
413 t2
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
414 build_int_cst (TREE_TYPE (name1
), 1), bit2
);
415 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), t
, t2
);
416 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
417 true, GSI_SAME_STMT
);
418 t2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
419 t2
= force_gimple_operand_gsi (&gsi
, t2
, true, NULL_TREE
,
420 true, GSI_SAME_STMT
);
421 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
,
422 boolean_type_node
, t2
, t
);
423 t
= canonicalize_cond_expr_cond (t
);
426 gimple_cond_set_condition_from_tree (inner_cond
, t
);
427 update_stmt (inner_cond
);
429 /* Leave CFG optimization to cfg_cleanup. */
430 gimple_cond_set_condition_from_tree (outer_cond
,
431 outer_inv
? boolean_false_node
: boolean_true_node
);
432 update_stmt (outer_cond
);
434 update_profile_after_ifcombine (inner_cond_bb
, outer_cond_bb
);
438 fprintf (dump_file
, "optimizing double bit test to ");
439 print_generic_expr (dump_file
, name1
);
440 fprintf (dump_file
, " & T == T\nwith temporary T = (1 << ");
441 print_generic_expr (dump_file
, bit1
);
442 fprintf (dump_file
, ") | (1 << ");
443 print_generic_expr (dump_file
, bit2
);
444 fprintf (dump_file
, ")\n");
450 /* See if we have two bit tests of the same name in both tests.
451 In that case remove the outer test and change the inner one to
452 test for name & (bits1 | bits2) != 0. */
453 else if (recognize_bits_test (inner_cond
, &name1
, &bits1
, !inner_inv
)
454 && recognize_bits_test (outer_cond
, &name2
, &bits2
, !outer_inv
))
456 gimple_stmt_iterator gsi
;
459 /* Find the common name which is bit-tested. */
462 else if (bits1
== bits2
)
464 std::swap (name2
, bits2
);
465 std::swap (name1
, bits1
);
467 else if (name1
== bits2
)
468 std::swap (name2
, bits2
);
469 else if (bits1
== name2
)
470 std::swap (name1
, bits1
);
474 /* As we strip non-widening conversions in finding a common
475 name that is tested make sure to end up with an integral
476 type for building the bit operations. */
477 if (TYPE_PRECISION (TREE_TYPE (bits1
))
478 >= TYPE_PRECISION (TREE_TYPE (bits2
)))
480 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
481 name1
= fold_convert (TREE_TYPE (bits1
), name1
);
482 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
483 bits2
= fold_convert (TREE_TYPE (bits1
), bits2
);
487 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
488 name1
= fold_convert (TREE_TYPE (bits2
), name1
);
489 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
490 bits1
= fold_convert (TREE_TYPE (bits2
), bits1
);
494 gsi
= gsi_for_stmt (inner_cond
);
495 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), bits1
, bits2
);
496 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
497 true, GSI_SAME_STMT
);
498 t
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
499 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
500 true, GSI_SAME_STMT
);
501 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
, boolean_type_node
, t
,
502 build_int_cst (TREE_TYPE (t
), 0));
503 t
= canonicalize_cond_expr_cond (t
);
506 gimple_cond_set_condition_from_tree (inner_cond
, t
);
507 update_stmt (inner_cond
);
509 /* Leave CFG optimization to cfg_cleanup. */
510 gimple_cond_set_condition_from_tree (outer_cond
,
511 outer_inv
? boolean_false_node
: boolean_true_node
);
512 update_stmt (outer_cond
);
513 update_profile_after_ifcombine (inner_cond_bb
, outer_cond_bb
);
517 fprintf (dump_file
, "optimizing bits or bits test to ");
518 print_generic_expr (dump_file
, name1
);
519 fprintf (dump_file
, " & T != 0\nwith temporary T = ");
520 print_generic_expr (dump_file
, bits1
);
521 fprintf (dump_file
, " | ");
522 print_generic_expr (dump_file
, bits2
);
523 fprintf (dump_file
, "\n");
529 /* See if we have two comparisons that we can merge into one. */
530 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond
)) == tcc_comparison
531 && TREE_CODE_CLASS (gimple_cond_code (outer_cond
)) == tcc_comparison
)
534 enum tree_code inner_cond_code
= gimple_cond_code (inner_cond
);
535 enum tree_code outer_cond_code
= gimple_cond_code (outer_cond
);
537 /* Invert comparisons if necessary (and possible). */
539 inner_cond_code
= invert_tree_comparison (inner_cond_code
,
540 HONOR_NANS (gimple_cond_lhs (inner_cond
)));
541 if (inner_cond_code
== ERROR_MARK
)
544 outer_cond_code
= invert_tree_comparison (outer_cond_code
,
545 HONOR_NANS (gimple_cond_lhs (outer_cond
)));
546 if (outer_cond_code
== ERROR_MARK
)
548 /* Don't return false so fast, try maybe_fold_or_comparisons? */
550 if (!(t
= maybe_fold_and_comparisons (inner_cond_code
,
551 gimple_cond_lhs (inner_cond
),
552 gimple_cond_rhs (inner_cond
),
554 gimple_cond_lhs (outer_cond
),
555 gimple_cond_rhs (outer_cond
))))
558 gimple_stmt_iterator gsi
;
559 if (!LOGICAL_OP_NON_SHORT_CIRCUIT
|| flag_sanitize_coverage
)
561 /* Only do this optimization if the inner bb contains only the conditional. */
562 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb
)))
564 t1
= fold_build2_loc (gimple_location (inner_cond
),
567 gimple_cond_lhs (inner_cond
),
568 gimple_cond_rhs (inner_cond
));
569 t2
= fold_build2_loc (gimple_location (outer_cond
),
572 gimple_cond_lhs (outer_cond
),
573 gimple_cond_rhs (outer_cond
));
574 t
= fold_build2_loc (gimple_location (inner_cond
),
575 TRUTH_AND_EXPR
, boolean_type_node
, t1
, t2
);
578 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
581 gsi
= gsi_for_stmt (inner_cond
);
582 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr
, NULL
, true,
586 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
587 t
= canonicalize_cond_expr_cond (t
);
590 gimple_cond_set_condition_from_tree (inner_cond
, t
);
591 update_stmt (inner_cond
);
593 /* Leave CFG optimization to cfg_cleanup. */
594 gimple_cond_set_condition_from_tree (outer_cond
,
595 outer_inv
? boolean_false_node
: boolean_true_node
);
596 update_stmt (outer_cond
);
597 update_profile_after_ifcombine (inner_cond_bb
, outer_cond_bb
);
601 fprintf (dump_file
, "optimizing two comparisons to ");
602 print_generic_expr (dump_file
, t
);
603 fprintf (dump_file
, "\n");
612 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
613 dispatch to the appropriate if-conversion helper for a particular
614 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
615 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */
618 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb
, basic_block outer_cond_bb
,
619 basic_block then_bb
, basic_block else_bb
,
620 basic_block phi_pred_bb
)
622 /* The && form is characterized by a common else_bb with
623 the two edges leading to it mergable. The latter is
624 guaranteed by matching PHI arguments in the else_bb and
625 the inner cond_bb having no side-effects. */
626 if (phi_pred_bb
!= else_bb
627 && recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &else_bb
)
628 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
))
632 if (q) goto inner_cond_bb; else goto else_bb;
634 if (p) goto ...; else goto else_bb;
639 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, false,
643 /* And a version where the outer condition is negated. */
644 if (phi_pred_bb
!= else_bb
645 && recognize_if_then_else (outer_cond_bb
, &else_bb
, &inner_cond_bb
)
646 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
))
650 if (q) goto else_bb; else goto inner_cond_bb;
652 if (p) goto ...; else goto else_bb;
657 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, true,
661 /* The || form is characterized by a common then_bb with the
662 two edges leading to it mergable. The latter is guaranteed
663 by matching PHI arguments in the then_bb and the inner cond_bb
664 having no side-effects. */
665 if (phi_pred_bb
!= then_bb
666 && recognize_if_then_else (outer_cond_bb
, &then_bb
, &inner_cond_bb
)
667 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
))
671 if (q) goto then_bb; else goto inner_cond_bb;
673 if (q) goto then_bb; else goto ...;
677 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, true,
681 /* And a version where the outer condition is negated. */
682 if (phi_pred_bb
!= then_bb
683 && recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &then_bb
)
684 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
))
688 if (q) goto inner_cond_bb; else goto then_bb;
690 if (q) goto then_bb; else goto ...;
694 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, false,
701 /* Recognize a CFG pattern and dispatch to the appropriate
702 if-conversion helper. We start with BB as the innermost
703 worker basic-block. Returns true if a transformation was done. */
706 tree_ssa_ifcombine_bb (basic_block inner_cond_bb
)
708 basic_block then_bb
= NULL
, else_bb
= NULL
;
710 if (!recognize_if_then_else (inner_cond_bb
, &then_bb
, &else_bb
))
713 /* Recognize && and || of two conditions with a common
714 then/else block which entry edges we can merge. That is:
720 This requires a single predecessor of the inner cond_bb. */
721 if (single_pred_p (inner_cond_bb
)
722 && bb_no_side_effects_p (inner_cond_bb
))
724 basic_block outer_cond_bb
= single_pred (inner_cond_bb
);
726 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
,
727 then_bb
, else_bb
, inner_cond_bb
))
730 if (forwarder_block_to (else_bb
, then_bb
))
732 /* Other possibilities for the && form, if else_bb is
733 empty forwarder block to then_bb. Compared to the above simpler
734 forms this can be treated as if then_bb and else_bb were swapped,
735 and the corresponding inner_cond_bb not inverted because of that.
736 For same_phi_args_p we look at equality of arguments between
737 edge from outer_cond_bb and the forwarder block. */
738 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
742 else if (forwarder_block_to (then_bb
, else_bb
))
744 /* Other possibilities for the || form, if then_bb is
745 empty forwarder block to else_bb. Compared to the above simpler
746 forms this can be treated as if then_bb and else_bb were swapped,
747 and the corresponding inner_cond_bb not inverted because of that.
748 For same_phi_args_p we look at equality of arguments between
749 edge from outer_cond_bb and the forwarder block. */
750 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
759 /* Main entry for the tree if-conversion pass. */
763 const pass_data pass_data_tree_ifcombine
=
765 GIMPLE_PASS
, /* type */
766 "ifcombine", /* name */
767 OPTGROUP_NONE
, /* optinfo_flags */
768 TV_TREE_IFCOMBINE
, /* tv_id */
769 ( PROP_cfg
| PROP_ssa
), /* properties_required */
770 0, /* properties_provided */
771 0, /* properties_destroyed */
772 0, /* todo_flags_start */
773 TODO_update_ssa
, /* todo_flags_finish */
776 class pass_tree_ifcombine
: public gimple_opt_pass
779 pass_tree_ifcombine (gcc::context
*ctxt
)
780 : gimple_opt_pass (pass_data_tree_ifcombine
, ctxt
)
783 /* opt_pass methods: */
784 virtual unsigned int execute (function
*);
786 }; // class pass_tree_ifcombine
789 pass_tree_ifcombine::execute (function
*fun
)
792 bool cfg_changed
= false;
795 bbs
= single_pred_before_succ_order ();
796 calculate_dominance_info (CDI_DOMINATORS
);
798 /* Search every basic block for COND_EXPR we may be able to optimize.
800 We walk the blocks in order that guarantees that a block with
801 a single predecessor is processed after the predecessor.
802 This ensures that we collapse outter ifs before visiting the
803 inner ones, and also that we do not try to visit a removed
804 block. This is opposite of PHI-OPT, because we cascade the
805 combining rather than cascading PHIs. */
806 for (i
= n_basic_blocks_for_fn (fun
) - NUM_FIXED_BLOCKS
- 1; i
>= 0; i
--)
808 basic_block bb
= bbs
[i
];
809 gimple
*stmt
= last_stmt (bb
);
812 && gimple_code (stmt
) == GIMPLE_COND
)
813 if (tree_ssa_ifcombine_bb (bb
))
815 /* Clear range info from all stmts in BB which is now executed
816 conditional on a always true/false condition. */
817 reset_flow_sensitive_info_in_bb (bb
);
824 return cfg_changed
? TODO_cleanup_cfg
: 0;
830 make_pass_tree_ifcombine (gcc::context
*ctxt
)
832 return new pass_tree_ifcombine (ctxt
);