1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2015 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 /* rtl is needed only because arm back-end requires it for
32 #include "double-int.h"
39 #include "fold-const.h"
40 #include "stor-layout.h"
42 #include "hard-reg-set.h"
45 #include "dominance.h"
48 #include "basic-block.h"
49 #include "tree-pretty-print.h"
50 #include "tree-ssa-alias.h"
51 #include "internal-fn.h"
52 #include "gimple-fold.h"
53 #include "gimple-expr.h"
56 #include "gimple-iterator.h"
57 #include "gimplify-me.h"
58 #include "gimple-ssa.h"
60 #include "tree-phinodes.h"
61 #include "ssa-iterators.h"
62 #include "tree-pass.h"
64 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
65 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
66 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
70 /* This pass combines COND_EXPRs to simplify control flow. It
71 currently recognizes bit tests and comparisons in chains that
72 represent logical and or logical or of two COND_EXPRs.
74 It does so by walking basic blocks in a approximate reverse
75 post-dominator order and trying to match CFG patterns that
76 represent logical and or logical or of two COND_EXPRs.
77 Transformations are done if the COND_EXPR conditions match
80 1. two single bit tests X & (1 << Yn) (for logical and)
82 2. two bit tests X & Yn (for logical or)
84 3. two comparisons X OPn Y (for logical or)
86 To simplify this pass, removing basic blocks and dead code
87 is left to CFG cleanup and DCE. */
90 /* Recognize a if-then-else CFG pattern starting to match with the
91 COND_BB basic-block containing the COND_EXPR. The recognized
92 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
93 *THEN_BB and/or *ELSE_BB are already set, they are required to
94 match the then and else basic-blocks to make the pattern match.
95 Returns true if the pattern matched, false otherwise. */
98 recognize_if_then_else (basic_block cond_bb
,
99 basic_block
*then_bb
, basic_block
*else_bb
)
103 if (EDGE_COUNT (cond_bb
->succs
) != 2)
106 /* Find the then/else edges. */
107 t
= EDGE_SUCC (cond_bb
, 0);
108 e
= EDGE_SUCC (cond_bb
, 1);
109 if (!(t
->flags
& EDGE_TRUE_VALUE
))
115 if (!(t
->flags
& EDGE_TRUE_VALUE
)
116 || !(e
->flags
& EDGE_FALSE_VALUE
))
119 /* Check if the edge destinations point to the required block. */
121 && t
->dest
!= *then_bb
)
124 && e
->dest
!= *else_bb
)
135 /* Verify if the basic block BB does not have side-effects. Return
136 true in this case, else false. */
139 bb_no_side_effects_p (basic_block bb
)
141 gimple_stmt_iterator gsi
;
143 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
145 gimple stmt
= gsi_stmt (gsi
);
147 if (is_gimple_debug (stmt
))
150 if (gimple_has_side_effects (stmt
)
151 || gimple_could_trap_p (stmt
)
152 || gimple_vuse (stmt
))
159 /* Return true if BB is an empty forwarder block to TO_BB. */
162 forwarder_block_to (basic_block bb
, basic_block to_bb
)
164 return empty_block_p (bb
)
165 && single_succ_p (bb
)
166 && single_succ (bb
) == to_bb
;
169 /* Verify if all PHI node arguments in DEST for edges from BB1 or
170 BB2 to DEST are the same. This makes the CFG merge point
171 free from side-effects. Return true in this case, else false. */
174 same_phi_args_p (basic_block bb1
, basic_block bb2
, basic_block dest
)
176 edge e1
= find_edge (bb1
, dest
);
177 edge e2
= find_edge (bb2
, dest
);
181 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
184 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi
, e1
),
185 PHI_ARG_DEF_FROM_EDGE (phi
, e2
), 0))
192 /* Return the best representative SSA name for CANDIDATE which is used
196 get_name_for_bit_test (tree candidate
)
198 /* Skip single-use names in favor of using the name from a
199 non-widening conversion definition. */
200 if (TREE_CODE (candidate
) == SSA_NAME
201 && has_single_use (candidate
))
203 gimple def_stmt
= SSA_NAME_DEF_STMT (candidate
);
204 if (is_gimple_assign (def_stmt
)
205 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
207 if (TYPE_PRECISION (TREE_TYPE (candidate
))
208 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))))
209 return gimple_assign_rhs1 (def_stmt
);
216 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
217 statements. Store the name being tested in *NAME and the bit
218 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
219 Returns true if the pattern matched, false otherwise. */
222 recognize_single_bit_test (gcond
*cond
, tree
*name
, tree
*bit
, bool inv
)
226 /* Get at the definition of the result of the bit test. */
227 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
228 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
229 || !integer_zerop (gimple_cond_rhs (cond
)))
231 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
232 if (!is_gimple_assign (stmt
))
235 /* Look at which bit is tested. One form to recognize is
236 D.1985_5 = state_3(D) >> control1_4(D);
237 D.1986_6 = (int) D.1985_5;
239 if (D.1987_7 != 0) */
240 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
241 && integer_onep (gimple_assign_rhs2 (stmt
))
242 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
244 tree orig_name
= gimple_assign_rhs1 (stmt
);
246 /* Look through copies and conversions to eventually
247 find the stmt that computes the shift. */
248 stmt
= SSA_NAME_DEF_STMT (orig_name
);
250 while (is_gimple_assign (stmt
)
251 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
252 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt
)))
253 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
254 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
255 || gimple_assign_ssa_name_copy_p (stmt
)))
256 stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
258 /* If we found such, decompose it. */
259 if (is_gimple_assign (stmt
)
260 && gimple_assign_rhs_code (stmt
) == RSHIFT_EXPR
)
262 /* op0 & (1 << op1) */
263 *bit
= gimple_assign_rhs2 (stmt
);
264 *name
= gimple_assign_rhs1 (stmt
);
269 *bit
= integer_zero_node
;
270 *name
= get_name_for_bit_test (orig_name
);
277 D.1987_7 = op0 & (1 << CST)
278 if (D.1987_7 != 0) */
279 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
280 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
281 && integer_pow2p (gimple_assign_rhs2 (stmt
)))
283 *name
= gimple_assign_rhs1 (stmt
);
284 *bit
= build_int_cst (integer_type_node
,
285 tree_log2 (gimple_assign_rhs2 (stmt
)));
290 D.1986_6 = 1 << control1_4(D)
291 D.1987_7 = op0 & D.1986_6
292 if (D.1987_7 != 0) */
293 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
294 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
295 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
)
299 /* Both arguments of the BIT_AND_EXPR can be the single-bit
300 specifying expression. */
301 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
302 if (is_gimple_assign (tmp
)
303 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
304 && integer_onep (gimple_assign_rhs1 (tmp
)))
306 *name
= gimple_assign_rhs2 (stmt
);
307 *bit
= gimple_assign_rhs2 (tmp
);
311 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt
));
312 if (is_gimple_assign (tmp
)
313 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
314 && integer_onep (gimple_assign_rhs1 (tmp
)))
316 *name
= gimple_assign_rhs1 (stmt
);
317 *bit
= gimple_assign_rhs2 (tmp
);
325 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
326 statements. Store the name being tested in *NAME and the bits
327 in *BITS. The COND_EXPR computes *NAME & *BITS.
328 Returns true if the pattern matched, false otherwise. */
331 recognize_bits_test (gcond
*cond
, tree
*name
, tree
*bits
, bool inv
)
335 /* Get at the definition of the result of the bit test. */
336 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
337 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
338 || !integer_zerop (gimple_cond_rhs (cond
)))
340 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
341 if (!is_gimple_assign (stmt
)
342 || gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
)
345 *name
= get_name_for_bit_test (gimple_assign_rhs1 (stmt
));
346 *bits
= gimple_assign_rhs2 (stmt
);
351 /* If-convert on a and pattern with a common else block. The inner
352 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
353 inner_inv, outer_inv and result_inv indicate whether the conditions
355 Returns true if the edges to the common else basic-block were merged. */
358 ifcombine_ifandif (basic_block inner_cond_bb
, bool inner_inv
,
359 basic_block outer_cond_bb
, bool outer_inv
, bool result_inv
)
361 gimple_stmt_iterator gsi
;
362 gimple inner_stmt
, outer_stmt
;
363 gcond
*inner_cond
, *outer_cond
;
364 tree name1
, name2
, bit1
, bit2
, bits1
, bits2
;
366 inner_stmt
= last_stmt (inner_cond_bb
);
368 || gimple_code (inner_stmt
) != GIMPLE_COND
)
370 inner_cond
= as_a
<gcond
*> (inner_stmt
);
372 outer_stmt
= last_stmt (outer_cond_bb
);
374 || gimple_code (outer_stmt
) != GIMPLE_COND
)
376 outer_cond
= as_a
<gcond
*> (outer_stmt
);
378 /* See if we test a single bit of the same name in both tests. In
379 that case remove the outer test, merging both else edges,
380 and change the inner one to test for
381 name & (bit1 | bit2) == (bit1 | bit2). */
382 if (recognize_single_bit_test (inner_cond
, &name1
, &bit1
, inner_inv
)
383 && recognize_single_bit_test (outer_cond
, &name2
, &bit2
, outer_inv
)
389 gsi
= gsi_for_stmt (inner_cond
);
390 t
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
391 build_int_cst (TREE_TYPE (name1
), 1), bit1
);
392 t2
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
393 build_int_cst (TREE_TYPE (name1
), 1), bit2
);
394 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), t
, t2
);
395 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
396 true, GSI_SAME_STMT
);
397 t2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
398 t2
= force_gimple_operand_gsi (&gsi
, t2
, true, NULL_TREE
,
399 true, GSI_SAME_STMT
);
400 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
,
401 boolean_type_node
, t2
, t
);
402 t
= canonicalize_cond_expr_cond (t
);
405 gimple_cond_set_condition_from_tree (inner_cond
, t
);
406 update_stmt (inner_cond
);
408 /* Leave CFG optimization to cfg_cleanup. */
409 gimple_cond_set_condition_from_tree (outer_cond
,
410 outer_inv
? boolean_false_node
: boolean_true_node
);
411 update_stmt (outer_cond
);
415 fprintf (dump_file
, "optimizing double bit test to ");
416 print_generic_expr (dump_file
, name1
, 0);
417 fprintf (dump_file
, " & T == T\nwith temporary T = (1 << ");
418 print_generic_expr (dump_file
, bit1
, 0);
419 fprintf (dump_file
, ") | (1 << ");
420 print_generic_expr (dump_file
, bit2
, 0);
421 fprintf (dump_file
, ")\n");
427 /* See if we have two bit tests of the same name in both tests.
428 In that case remove the outer test and change the inner one to
429 test for name & (bits1 | bits2) != 0. */
430 else if (recognize_bits_test (inner_cond
, &name1
, &bits1
, !inner_inv
)
431 && recognize_bits_test (outer_cond
, &name2
, &bits2
, !outer_inv
))
433 gimple_stmt_iterator gsi
;
436 /* Find the common name which is bit-tested. */
439 else if (bits1
== bits2
)
441 std::swap (name2
, bits2
);
442 std::swap (name1
, bits1
);
444 else if (name1
== bits2
)
445 std::swap (name2
, bits2
);
446 else if (bits1
== name2
)
447 std::swap (name1
, bits1
);
451 /* As we strip non-widening conversions in finding a common
452 name that is tested make sure to end up with an integral
453 type for building the bit operations. */
454 if (TYPE_PRECISION (TREE_TYPE (bits1
))
455 >= TYPE_PRECISION (TREE_TYPE (bits2
)))
457 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
458 name1
= fold_convert (TREE_TYPE (bits1
), name1
);
459 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
460 bits2
= fold_convert (TREE_TYPE (bits1
), bits2
);
464 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
465 name1
= fold_convert (TREE_TYPE (bits2
), name1
);
466 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
467 bits1
= fold_convert (TREE_TYPE (bits2
), bits1
);
471 gsi
= gsi_for_stmt (inner_cond
);
472 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), bits1
, bits2
);
473 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
474 true, GSI_SAME_STMT
);
475 t
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
476 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
477 true, GSI_SAME_STMT
);
478 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
, boolean_type_node
, t
,
479 build_int_cst (TREE_TYPE (t
), 0));
480 t
= canonicalize_cond_expr_cond (t
);
483 gimple_cond_set_condition_from_tree (inner_cond
, t
);
484 update_stmt (inner_cond
);
486 /* Leave CFG optimization to cfg_cleanup. */
487 gimple_cond_set_condition_from_tree (outer_cond
,
488 outer_inv
? boolean_false_node
: boolean_true_node
);
489 update_stmt (outer_cond
);
493 fprintf (dump_file
, "optimizing bits or bits test to ");
494 print_generic_expr (dump_file
, name1
, 0);
495 fprintf (dump_file
, " & T != 0\nwith temporary T = ");
496 print_generic_expr (dump_file
, bits1
, 0);
497 fprintf (dump_file
, " | ");
498 print_generic_expr (dump_file
, bits2
, 0);
499 fprintf (dump_file
, "\n");
505 /* See if we have two comparisons that we can merge into one. */
506 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond
)) == tcc_comparison
507 && TREE_CODE_CLASS (gimple_cond_code (outer_cond
)) == tcc_comparison
)
510 enum tree_code inner_cond_code
= gimple_cond_code (inner_cond
);
511 enum tree_code outer_cond_code
= gimple_cond_code (outer_cond
);
513 /* Invert comparisons if necessary (and possible). */
515 inner_cond_code
= invert_tree_comparison (inner_cond_code
,
516 HONOR_NANS (gimple_cond_lhs (inner_cond
)));
517 if (inner_cond_code
== ERROR_MARK
)
520 outer_cond_code
= invert_tree_comparison (outer_cond_code
,
521 HONOR_NANS (gimple_cond_lhs (outer_cond
)));
522 if (outer_cond_code
== ERROR_MARK
)
524 /* Don't return false so fast, try maybe_fold_or_comparisons? */
526 if (!(t
= maybe_fold_and_comparisons (inner_cond_code
,
527 gimple_cond_lhs (inner_cond
),
528 gimple_cond_rhs (inner_cond
),
530 gimple_cond_lhs (outer_cond
),
531 gimple_cond_rhs (outer_cond
))))
534 gimple_stmt_iterator gsi
;
535 if (!LOGICAL_OP_NON_SHORT_CIRCUIT
)
537 /* Only do this optimization if the inner bb contains only the conditional. */
538 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb
)))
540 t1
= fold_build2_loc (gimple_location (inner_cond
),
543 gimple_cond_lhs (inner_cond
),
544 gimple_cond_rhs (inner_cond
));
545 t2
= fold_build2_loc (gimple_location (outer_cond
),
548 gimple_cond_lhs (outer_cond
),
549 gimple_cond_rhs (outer_cond
));
550 t
= fold_build2_loc (gimple_location (inner_cond
),
551 TRUTH_AND_EXPR
, boolean_type_node
, t1
, t2
);
554 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
557 gsi
= gsi_for_stmt (inner_cond
);
558 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr
, NULL
, true,
562 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
563 t
= canonicalize_cond_expr_cond (t
);
566 gimple_cond_set_condition_from_tree (inner_cond
, t
);
567 update_stmt (inner_cond
);
569 /* Leave CFG optimization to cfg_cleanup. */
570 gimple_cond_set_condition_from_tree (outer_cond
,
571 outer_inv
? boolean_false_node
: boolean_true_node
);
572 update_stmt (outer_cond
);
576 fprintf (dump_file
, "optimizing two comparisons to ");
577 print_generic_expr (dump_file
, t
, 0);
578 fprintf (dump_file
, "\n");
587 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
588 dispatch to the appropriate if-conversion helper for a particular
589 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
590 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */
593 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb
, basic_block outer_cond_bb
,
594 basic_block then_bb
, basic_block else_bb
,
595 basic_block phi_pred_bb
)
597 /* The && form is characterized by a common else_bb with
598 the two edges leading to it mergable. The latter is
599 guaranteed by matching PHI arguments in the else_bb and
600 the inner cond_bb having no side-effects. */
601 if (phi_pred_bb
!= else_bb
602 && recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &else_bb
)
603 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
)
604 && bb_no_side_effects_p (inner_cond_bb
))
608 if (q) goto inner_cond_bb; else goto else_bb;
610 if (p) goto ...; else goto else_bb;
615 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, false,
619 /* And a version where the outer condition is negated. */
620 if (phi_pred_bb
!= else_bb
621 && recognize_if_then_else (outer_cond_bb
, &else_bb
, &inner_cond_bb
)
622 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
)
623 && bb_no_side_effects_p (inner_cond_bb
))
627 if (q) goto else_bb; else goto inner_cond_bb;
629 if (p) goto ...; else goto else_bb;
634 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, true,
638 /* The || form is characterized by a common then_bb with the
639 two edges leading to it mergable. The latter is guaranteed
640 by matching PHI arguments in the then_bb and the inner cond_bb
641 having no side-effects. */
642 if (phi_pred_bb
!= then_bb
643 && recognize_if_then_else (outer_cond_bb
, &then_bb
, &inner_cond_bb
)
644 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
)
645 && bb_no_side_effects_p (inner_cond_bb
))
649 if (q) goto then_bb; else goto inner_cond_bb;
651 if (q) goto then_bb; else goto ...;
655 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, true,
659 /* And a version where the outer condition is negated. */
660 if (phi_pred_bb
!= then_bb
661 && recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &then_bb
)
662 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
)
663 && bb_no_side_effects_p (inner_cond_bb
))
667 if (q) goto inner_cond_bb; else goto then_bb;
669 if (q) goto then_bb; else goto ...;
673 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, false,
680 /* Recognize a CFG pattern and dispatch to the appropriate
681 if-conversion helper. We start with BB as the innermost
682 worker basic-block. Returns true if a transformation was done. */
685 tree_ssa_ifcombine_bb (basic_block inner_cond_bb
)
687 basic_block then_bb
= NULL
, else_bb
= NULL
;
689 if (!recognize_if_then_else (inner_cond_bb
, &then_bb
, &else_bb
))
692 /* Recognize && and || of two conditions with a common
693 then/else block which entry edges we can merge. That is:
699 This requires a single predecessor of the inner cond_bb. */
700 if (single_pred_p (inner_cond_bb
))
702 basic_block outer_cond_bb
= single_pred (inner_cond_bb
);
704 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
,
705 then_bb
, else_bb
, inner_cond_bb
))
708 if (forwarder_block_to (else_bb
, then_bb
))
710 /* Other possibilities for the && form, if else_bb is
711 empty forwarder block to then_bb. Compared to the above simpler
712 forms this can be treated as if then_bb and else_bb were swapped,
713 and the corresponding inner_cond_bb not inverted because of that.
714 For same_phi_args_p we look at equality of arguments between
715 edge from outer_cond_bb and the forwarder block. */
716 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
720 else if (forwarder_block_to (then_bb
, else_bb
))
722 /* Other possibilities for the || form, if then_bb is
723 empty forwarder block to else_bb. Compared to the above simpler
724 forms this can be treated as if then_bb and else_bb were swapped,
725 and the corresponding inner_cond_bb not inverted because of that.
726 For same_phi_args_p we look at equality of arguments between
727 edge from outer_cond_bb and the forwarder block. */
728 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
737 /* Main entry for the tree if-conversion pass. */
741 const pass_data pass_data_tree_ifcombine
=
743 GIMPLE_PASS
, /* type */
744 "ifcombine", /* name */
745 OPTGROUP_NONE
, /* optinfo_flags */
746 TV_TREE_IFCOMBINE
, /* tv_id */
747 ( PROP_cfg
| PROP_ssa
), /* properties_required */
748 0, /* properties_provided */
749 0, /* properties_destroyed */
750 0, /* todo_flags_start */
751 TODO_update_ssa
, /* todo_flags_finish */
754 class pass_tree_ifcombine
: public gimple_opt_pass
757 pass_tree_ifcombine (gcc::context
*ctxt
)
758 : gimple_opt_pass (pass_data_tree_ifcombine
, ctxt
)
761 /* opt_pass methods: */
762 virtual unsigned int execute (function
*);
764 }; // class pass_tree_ifcombine
767 pass_tree_ifcombine::execute (function
*fun
)
770 bool cfg_changed
= false;
773 bbs
= single_pred_before_succ_order ();
774 calculate_dominance_info (CDI_DOMINATORS
);
776 /* Search every basic block for COND_EXPR we may be able to optimize.
778 We walk the blocks in order that guarantees that a block with
779 a single predecessor is processed after the predecessor.
780 This ensures that we collapse outter ifs before visiting the
781 inner ones, and also that we do not try to visit a removed
782 block. This is opposite of PHI-OPT, because we cascade the
783 combining rather than cascading PHIs. */
784 for (i
= n_basic_blocks_for_fn (fun
) - NUM_FIXED_BLOCKS
- 1; i
>= 0; i
--)
786 basic_block bb
= bbs
[i
];
787 gimple stmt
= last_stmt (bb
);
790 && gimple_code (stmt
) == GIMPLE_COND
)
791 cfg_changed
|= tree_ssa_ifcombine_bb (bb
);
796 return cfg_changed
? TODO_cleanup_cfg
: 0;
802 make_pass_tree_ifcombine (gcc::context
*ctxt
)
804 return new pass_tree_ifcombine (ctxt
);