1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2015 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 /* rtl is needed only because arm back-end requires it for
32 #include "double-int.h"
39 #include "fold-const.h"
40 #include "stor-layout.h"
42 #include "hard-reg-set.h"
45 #include "dominance.h"
48 #include "basic-block.h"
49 #include "tree-pretty-print.h"
50 #include "tree-ssa-alias.h"
51 #include "internal-fn.h"
52 #include "gimple-fold.h"
53 #include "gimple-expr.h"
56 #include "gimple-iterator.h"
57 #include "gimplify-me.h"
58 #include "gimple-ssa.h"
60 #include "tree-phinodes.h"
61 #include "ssa-iterators.h"
62 #include "tree-pass.h"
64 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
65 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
66 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
70 /* This pass combines COND_EXPRs to simplify control flow. It
71 currently recognizes bit tests and comparisons in chains that
72 represent logical and or logical or of two COND_EXPRs.
74 It does so by walking basic blocks in a approximate reverse
75 post-dominator order and trying to match CFG patterns that
76 represent logical and or logical or of two COND_EXPRs.
77 Transformations are done if the COND_EXPR conditions match
80 1. two single bit tests X & (1 << Yn) (for logical and)
82 2. two bit tests X & Yn (for logical or)
84 3. two comparisons X OPn Y (for logical or)
86 To simplify this pass, removing basic blocks and dead code
87 is left to CFG cleanup and DCE. */
90 /* Recognize a if-then-else CFG pattern starting to match with the
91 COND_BB basic-block containing the COND_EXPR. The recognized
92 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
93 *THEN_BB and/or *ELSE_BB are already set, they are required to
94 match the then and else basic-blocks to make the pattern match.
95 Returns true if the pattern matched, false otherwise. */
98 recognize_if_then_else (basic_block cond_bb
,
99 basic_block
*then_bb
, basic_block
*else_bb
)
103 if (EDGE_COUNT (cond_bb
->succs
) != 2)
106 /* Find the then/else edges. */
107 t
= EDGE_SUCC (cond_bb
, 0);
108 e
= EDGE_SUCC (cond_bb
, 1);
109 if (!(t
->flags
& EDGE_TRUE_VALUE
))
115 if (!(t
->flags
& EDGE_TRUE_VALUE
)
116 || !(e
->flags
& EDGE_FALSE_VALUE
))
119 /* Check if the edge destinations point to the required block. */
121 && t
->dest
!= *then_bb
)
124 && e
->dest
!= *else_bb
)
135 /* Verify if the basic block BB does not have side-effects. Return
136 true in this case, else false. */
139 bb_no_side_effects_p (basic_block bb
)
141 gimple_stmt_iterator gsi
;
143 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
145 gimple stmt
= gsi_stmt (gsi
);
147 if (is_gimple_debug (stmt
))
150 if (gimple_has_side_effects (stmt
)
151 || gimple_could_trap_p (stmt
)
152 || gimple_vuse (stmt
))
159 /* Return true if BB is an empty forwarder block to TO_BB. */
162 forwarder_block_to (basic_block bb
, basic_block to_bb
)
164 return empty_block_p (bb
)
165 && single_succ_p (bb
)
166 && single_succ (bb
) == to_bb
;
169 /* Verify if all PHI node arguments in DEST for edges from BB1 or
170 BB2 to DEST are the same. This makes the CFG merge point
171 free from side-effects. Return true in this case, else false. */
174 same_phi_args_p (basic_block bb1
, basic_block bb2
, basic_block dest
)
176 edge e1
= find_edge (bb1
, dest
);
177 edge e2
= find_edge (bb2
, dest
);
181 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
184 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi
, e1
),
185 PHI_ARG_DEF_FROM_EDGE (phi
, e2
), 0))
192 /* Return the best representative SSA name for CANDIDATE which is used
196 get_name_for_bit_test (tree candidate
)
198 /* Skip single-use names in favor of using the name from a
199 non-widening conversion definition. */
200 if (TREE_CODE (candidate
) == SSA_NAME
201 && has_single_use (candidate
))
203 gimple def_stmt
= SSA_NAME_DEF_STMT (candidate
);
204 if (is_gimple_assign (def_stmt
)
205 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
207 if (TYPE_PRECISION (TREE_TYPE (candidate
))
208 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))))
209 return gimple_assign_rhs1 (def_stmt
);
216 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
217 statements. Store the name being tested in *NAME and the bit
218 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
219 Returns true if the pattern matched, false otherwise. */
222 recognize_single_bit_test (gcond
*cond
, tree
*name
, tree
*bit
, bool inv
)
226 /* Get at the definition of the result of the bit test. */
227 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
228 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
229 || !integer_zerop (gimple_cond_rhs (cond
)))
231 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
232 if (!is_gimple_assign (stmt
))
235 /* Look at which bit is tested. One form to recognize is
236 D.1985_5 = state_3(D) >> control1_4(D);
237 D.1986_6 = (int) D.1985_5;
239 if (D.1987_7 != 0) */
240 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
241 && integer_onep (gimple_assign_rhs2 (stmt
))
242 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
244 tree orig_name
= gimple_assign_rhs1 (stmt
);
246 /* Look through copies and conversions to eventually
247 find the stmt that computes the shift. */
248 stmt
= SSA_NAME_DEF_STMT (orig_name
);
250 while (is_gimple_assign (stmt
)
251 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
252 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt
)))
253 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
254 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
255 || gimple_assign_ssa_name_copy_p (stmt
)))
256 stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
258 /* If we found such, decompose it. */
259 if (is_gimple_assign (stmt
)
260 && gimple_assign_rhs_code (stmt
) == RSHIFT_EXPR
)
262 /* op0 & (1 << op1) */
263 *bit
= gimple_assign_rhs2 (stmt
);
264 *name
= gimple_assign_rhs1 (stmt
);
269 *bit
= integer_zero_node
;
270 *name
= get_name_for_bit_test (orig_name
);
277 D.1987_7 = op0 & (1 << CST)
278 if (D.1987_7 != 0) */
279 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
280 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
281 && integer_pow2p (gimple_assign_rhs2 (stmt
)))
283 *name
= gimple_assign_rhs1 (stmt
);
284 *bit
= build_int_cst (integer_type_node
,
285 tree_log2 (gimple_assign_rhs2 (stmt
)));
290 D.1986_6 = 1 << control1_4(D)
291 D.1987_7 = op0 & D.1986_6
292 if (D.1987_7 != 0) */
293 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
294 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
295 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
)
299 /* Both arguments of the BIT_AND_EXPR can be the single-bit
300 specifying expression. */
301 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
302 if (is_gimple_assign (tmp
)
303 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
304 && integer_onep (gimple_assign_rhs1 (tmp
)))
306 *name
= gimple_assign_rhs2 (stmt
);
307 *bit
= gimple_assign_rhs2 (tmp
);
311 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt
));
312 if (is_gimple_assign (tmp
)
313 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
314 && integer_onep (gimple_assign_rhs1 (tmp
)))
316 *name
= gimple_assign_rhs1 (stmt
);
317 *bit
= gimple_assign_rhs2 (tmp
);
325 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
326 statements. Store the name being tested in *NAME and the bits
327 in *BITS. The COND_EXPR computes *NAME & *BITS.
328 Returns true if the pattern matched, false otherwise. */
331 recognize_bits_test (gcond
*cond
, tree
*name
, tree
*bits
, bool inv
)
335 /* Get at the definition of the result of the bit test. */
336 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
337 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
338 || !integer_zerop (gimple_cond_rhs (cond
)))
340 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
341 if (!is_gimple_assign (stmt
)
342 || gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
)
345 *name
= get_name_for_bit_test (gimple_assign_rhs1 (stmt
));
346 *bits
= gimple_assign_rhs2 (stmt
);
351 /* If-convert on a and pattern with a common else block. The inner
352 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
353 inner_inv, outer_inv and result_inv indicate whether the conditions
355 Returns true if the edges to the common else basic-block were merged. */
358 ifcombine_ifandif (basic_block inner_cond_bb
, bool inner_inv
,
359 basic_block outer_cond_bb
, bool outer_inv
, bool result_inv
)
361 gimple_stmt_iterator gsi
;
362 gimple inner_stmt
, outer_stmt
;
363 gcond
*inner_cond
, *outer_cond
;
364 tree name1
, name2
, bit1
, bit2
, bits1
, bits2
;
366 inner_stmt
= last_stmt (inner_cond_bb
);
368 || gimple_code (inner_stmt
) != GIMPLE_COND
)
370 inner_cond
= as_a
<gcond
*> (inner_stmt
);
372 outer_stmt
= last_stmt (outer_cond_bb
);
374 || gimple_code (outer_stmt
) != GIMPLE_COND
)
376 outer_cond
= as_a
<gcond
*> (outer_stmt
);
378 /* See if we test a single bit of the same name in both tests. In
379 that case remove the outer test, merging both else edges,
380 and change the inner one to test for
381 name & (bit1 | bit2) == (bit1 | bit2). */
382 if (recognize_single_bit_test (inner_cond
, &name1
, &bit1
, inner_inv
)
383 && recognize_single_bit_test (outer_cond
, &name2
, &bit2
, outer_inv
)
389 gsi
= gsi_for_stmt (inner_cond
);
390 t
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
391 build_int_cst (TREE_TYPE (name1
), 1), bit1
);
392 t2
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
393 build_int_cst (TREE_TYPE (name1
), 1), bit2
);
394 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), t
, t2
);
395 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
396 true, GSI_SAME_STMT
);
397 t2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
398 t2
= force_gimple_operand_gsi (&gsi
, t2
, true, NULL_TREE
,
399 true, GSI_SAME_STMT
);
400 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
,
401 boolean_type_node
, t2
, t
);
402 t
= canonicalize_cond_expr_cond (t
);
405 gimple_cond_set_condition_from_tree (inner_cond
, t
);
406 update_stmt (inner_cond
);
408 /* Leave CFG optimization to cfg_cleanup. */
409 gimple_cond_set_condition_from_tree (outer_cond
,
410 outer_inv
? boolean_false_node
: boolean_true_node
);
411 update_stmt (outer_cond
);
415 fprintf (dump_file
, "optimizing double bit test to ");
416 print_generic_expr (dump_file
, name1
, 0);
417 fprintf (dump_file
, " & T == T\nwith temporary T = (1 << ");
418 print_generic_expr (dump_file
, bit1
, 0);
419 fprintf (dump_file
, ") | (1 << ");
420 print_generic_expr (dump_file
, bit2
, 0);
421 fprintf (dump_file
, ")\n");
427 /* See if we have two bit tests of the same name in both tests.
428 In that case remove the outer test and change the inner one to
429 test for name & (bits1 | bits2) != 0. */
430 else if (recognize_bits_test (inner_cond
, &name1
, &bits1
, !inner_inv
)
431 && recognize_bits_test (outer_cond
, &name2
, &bits2
, !outer_inv
))
433 gimple_stmt_iterator gsi
;
436 /* Find the common name which is bit-tested. */
439 else if (bits1
== bits2
)
448 else if (name1
== bits2
)
454 else if (bits1
== name2
)
463 /* As we strip non-widening conversions in finding a common
464 name that is tested make sure to end up with an integral
465 type for building the bit operations. */
466 if (TYPE_PRECISION (TREE_TYPE (bits1
))
467 >= TYPE_PRECISION (TREE_TYPE (bits2
)))
469 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
470 name1
= fold_convert (TREE_TYPE (bits1
), name1
);
471 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
472 bits2
= fold_convert (TREE_TYPE (bits1
), bits2
);
476 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
477 name1
= fold_convert (TREE_TYPE (bits2
), name1
);
478 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
479 bits1
= fold_convert (TREE_TYPE (bits2
), bits1
);
483 gsi
= gsi_for_stmt (inner_cond
);
484 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), bits1
, bits2
);
485 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
486 true, GSI_SAME_STMT
);
487 t
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
488 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
489 true, GSI_SAME_STMT
);
490 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
, boolean_type_node
, t
,
491 build_int_cst (TREE_TYPE (t
), 0));
492 t
= canonicalize_cond_expr_cond (t
);
495 gimple_cond_set_condition_from_tree (inner_cond
, t
);
496 update_stmt (inner_cond
);
498 /* Leave CFG optimization to cfg_cleanup. */
499 gimple_cond_set_condition_from_tree (outer_cond
,
500 outer_inv
? boolean_false_node
: boolean_true_node
);
501 update_stmt (outer_cond
);
505 fprintf (dump_file
, "optimizing bits or bits test to ");
506 print_generic_expr (dump_file
, name1
, 0);
507 fprintf (dump_file
, " & T != 0\nwith temporary T = ");
508 print_generic_expr (dump_file
, bits1
, 0);
509 fprintf (dump_file
, " | ");
510 print_generic_expr (dump_file
, bits2
, 0);
511 fprintf (dump_file
, "\n");
517 /* See if we have two comparisons that we can merge into one. */
518 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond
)) == tcc_comparison
519 && TREE_CODE_CLASS (gimple_cond_code (outer_cond
)) == tcc_comparison
)
522 enum tree_code inner_cond_code
= gimple_cond_code (inner_cond
);
523 enum tree_code outer_cond_code
= gimple_cond_code (outer_cond
);
525 /* Invert comparisons if necessary (and possible). */
527 inner_cond_code
= invert_tree_comparison (inner_cond_code
,
528 HONOR_NANS (gimple_cond_lhs (inner_cond
)));
529 if (inner_cond_code
== ERROR_MARK
)
532 outer_cond_code
= invert_tree_comparison (outer_cond_code
,
533 HONOR_NANS (gimple_cond_lhs (outer_cond
)));
534 if (outer_cond_code
== ERROR_MARK
)
536 /* Don't return false so fast, try maybe_fold_or_comparisons? */
538 if (!(t
= maybe_fold_and_comparisons (inner_cond_code
,
539 gimple_cond_lhs (inner_cond
),
540 gimple_cond_rhs (inner_cond
),
542 gimple_cond_lhs (outer_cond
),
543 gimple_cond_rhs (outer_cond
))))
546 gimple_stmt_iterator gsi
;
547 if (!LOGICAL_OP_NON_SHORT_CIRCUIT
)
549 /* Only do this optimization if the inner bb contains only the conditional. */
550 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb
)))
552 t1
= fold_build2_loc (gimple_location (inner_cond
),
555 gimple_cond_lhs (inner_cond
),
556 gimple_cond_rhs (inner_cond
));
557 t2
= fold_build2_loc (gimple_location (outer_cond
),
560 gimple_cond_lhs (outer_cond
),
561 gimple_cond_rhs (outer_cond
));
562 t
= fold_build2_loc (gimple_location (inner_cond
),
563 TRUTH_AND_EXPR
, boolean_type_node
, t1
, t2
);
566 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
569 gsi
= gsi_for_stmt (inner_cond
);
570 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr
, NULL
, true,
574 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
575 t
= canonicalize_cond_expr_cond (t
);
578 gimple_cond_set_condition_from_tree (inner_cond
, t
);
579 update_stmt (inner_cond
);
581 /* Leave CFG optimization to cfg_cleanup. */
582 gimple_cond_set_condition_from_tree (outer_cond
,
583 outer_inv
? boolean_false_node
: boolean_true_node
);
584 update_stmt (outer_cond
);
588 fprintf (dump_file
, "optimizing two comparisons to ");
589 print_generic_expr (dump_file
, t
, 0);
590 fprintf (dump_file
, "\n");
599 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
600 dispatch to the appropriate if-conversion helper for a particular
601 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
602 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */
605 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb
, basic_block outer_cond_bb
,
606 basic_block then_bb
, basic_block else_bb
,
607 basic_block phi_pred_bb
)
609 /* The && form is characterized by a common else_bb with
610 the two edges leading to it mergable. The latter is
611 guaranteed by matching PHI arguments in the else_bb and
612 the inner cond_bb having no side-effects. */
613 if (phi_pred_bb
!= else_bb
614 && recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &else_bb
)
615 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
)
616 && bb_no_side_effects_p (inner_cond_bb
))
620 if (q) goto inner_cond_bb; else goto else_bb;
622 if (p) goto ...; else goto else_bb;
627 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, false,
631 /* And a version where the outer condition is negated. */
632 if (phi_pred_bb
!= else_bb
633 && recognize_if_then_else (outer_cond_bb
, &else_bb
, &inner_cond_bb
)
634 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
)
635 && bb_no_side_effects_p (inner_cond_bb
))
639 if (q) goto else_bb; else goto inner_cond_bb;
641 if (p) goto ...; else goto else_bb;
646 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, true,
650 /* The || form is characterized by a common then_bb with the
651 two edges leading to it mergable. The latter is guaranteed
652 by matching PHI arguments in the then_bb and the inner cond_bb
653 having no side-effects. */
654 if (phi_pred_bb
!= then_bb
655 && recognize_if_then_else (outer_cond_bb
, &then_bb
, &inner_cond_bb
)
656 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
)
657 && bb_no_side_effects_p (inner_cond_bb
))
661 if (q) goto then_bb; else goto inner_cond_bb;
663 if (q) goto then_bb; else goto ...;
667 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, true,
671 /* And a version where the outer condition is negated. */
672 if (phi_pred_bb
!= then_bb
673 && recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &then_bb
)
674 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
)
675 && bb_no_side_effects_p (inner_cond_bb
))
679 if (q) goto inner_cond_bb; else goto then_bb;
681 if (q) goto then_bb; else goto ...;
685 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, false,
692 /* Recognize a CFG pattern and dispatch to the appropriate
693 if-conversion helper. We start with BB as the innermost
694 worker basic-block. Returns true if a transformation was done. */
697 tree_ssa_ifcombine_bb (basic_block inner_cond_bb
)
699 basic_block then_bb
= NULL
, else_bb
= NULL
;
701 if (!recognize_if_then_else (inner_cond_bb
, &then_bb
, &else_bb
))
704 /* Recognize && and || of two conditions with a common
705 then/else block which entry edges we can merge. That is:
711 This requires a single predecessor of the inner cond_bb. */
712 if (single_pred_p (inner_cond_bb
))
714 basic_block outer_cond_bb
= single_pred (inner_cond_bb
);
716 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
,
717 then_bb
, else_bb
, inner_cond_bb
))
720 if (forwarder_block_to (else_bb
, then_bb
))
722 /* Other possibilities for the && form, if else_bb is
723 empty forwarder block to then_bb. Compared to the above simpler
724 forms this can be treated as if then_bb and else_bb were swapped,
725 and the corresponding inner_cond_bb not inverted because of that.
726 For same_phi_args_p we look at equality of arguments between
727 edge from outer_cond_bb and the forwarder block. */
728 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
732 else if (forwarder_block_to (then_bb
, else_bb
))
734 /* Other possibilities for the || form, if then_bb is
735 empty forwarder block to else_bb. Compared to the above simpler
736 forms this can be treated as if then_bb and else_bb were swapped,
737 and the corresponding inner_cond_bb not inverted because of that.
738 For same_phi_args_p we look at equality of arguments between
739 edge from outer_cond_bb and the forwarder block. */
740 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
749 /* Main entry for the tree if-conversion pass. */
753 const pass_data pass_data_tree_ifcombine
=
755 GIMPLE_PASS
, /* type */
756 "ifcombine", /* name */
757 OPTGROUP_NONE
, /* optinfo_flags */
758 TV_TREE_IFCOMBINE
, /* tv_id */
759 ( PROP_cfg
| PROP_ssa
), /* properties_required */
760 0, /* properties_provided */
761 0, /* properties_destroyed */
762 0, /* todo_flags_start */
763 TODO_update_ssa
, /* todo_flags_finish */
766 class pass_tree_ifcombine
: public gimple_opt_pass
769 pass_tree_ifcombine (gcc::context
*ctxt
)
770 : gimple_opt_pass (pass_data_tree_ifcombine
, ctxt
)
773 /* opt_pass methods: */
774 virtual unsigned int execute (function
*);
776 }; // class pass_tree_ifcombine
779 pass_tree_ifcombine::execute (function
*fun
)
782 bool cfg_changed
= false;
785 bbs
= single_pred_before_succ_order ();
786 calculate_dominance_info (CDI_DOMINATORS
);
788 /* Search every basic block for COND_EXPR we may be able to optimize.
790 We walk the blocks in order that guarantees that a block with
791 a single predecessor is processed after the predecessor.
792 This ensures that we collapse outter ifs before visiting the
793 inner ones, and also that we do not try to visit a removed
794 block. This is opposite of PHI-OPT, because we cascade the
795 combining rather than cascading PHIs. */
796 for (i
= n_basic_blocks_for_fn (fun
) - NUM_FIXED_BLOCKS
- 1; i
>= 0; i
--)
798 basic_block bb
= bbs
[i
];
799 gimple stmt
= last_stmt (bb
);
802 && gimple_code (stmt
) == GIMPLE_COND
)
803 cfg_changed
|= tree_ssa_ifcombine_bb (bb
);
808 return cfg_changed
? TODO_cleanup_cfg
: 0;
814 make_pass_tree_ifcombine (gcc::context
*ctxt
)
816 return new pass_tree_ifcombine (ctxt
);