1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2015 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 /* rtl is needed only because arm back-end requires it for
32 #include "fold-const.h"
33 #include "stor-layout.h"
35 #include "hard-reg-set.h"
37 #include "dominance.h"
40 #include "basic-block.h"
41 #include "tree-pretty-print.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-fold.h"
45 #include "gimple-expr.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-ssa.h"
51 #include "tree-phinodes.h"
52 #include "ssa-iterators.h"
53 #include "tree-pass.h"
55 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
56 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
57 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
61 /* This pass combines COND_EXPRs to simplify control flow. It
62 currently recognizes bit tests and comparisons in chains that
63 represent logical and or logical or of two COND_EXPRs.
65 It does so by walking basic blocks in a approximate reverse
66 post-dominator order and trying to match CFG patterns that
67 represent logical and or logical or of two COND_EXPRs.
68 Transformations are done if the COND_EXPR conditions match
71 1. two single bit tests X & (1 << Yn) (for logical and)
73 2. two bit tests X & Yn (for logical or)
75 3. two comparisons X OPn Y (for logical or)
77 To simplify this pass, removing basic blocks and dead code
78 is left to CFG cleanup and DCE. */
81 /* Recognize a if-then-else CFG pattern starting to match with the
82 COND_BB basic-block containing the COND_EXPR. The recognized
83 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
84 *THEN_BB and/or *ELSE_BB are already set, they are required to
85 match the then and else basic-blocks to make the pattern match.
86 Returns true if the pattern matched, false otherwise. */
89 recognize_if_then_else (basic_block cond_bb
,
90 basic_block
*then_bb
, basic_block
*else_bb
)
94 if (EDGE_COUNT (cond_bb
->succs
) != 2)
97 /* Find the then/else edges. */
98 t
= EDGE_SUCC (cond_bb
, 0);
99 e
= EDGE_SUCC (cond_bb
, 1);
100 if (!(t
->flags
& EDGE_TRUE_VALUE
))
102 if (!(t
->flags
& EDGE_TRUE_VALUE
)
103 || !(e
->flags
& EDGE_FALSE_VALUE
))
106 /* Check if the edge destinations point to the required block. */
108 && t
->dest
!= *then_bb
)
111 && e
->dest
!= *else_bb
)
122 /* Verify if the basic block BB does not have side-effects. Return
123 true in this case, else false. */
126 bb_no_side_effects_p (basic_block bb
)
128 gimple_stmt_iterator gsi
;
130 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
132 gimple stmt
= gsi_stmt (gsi
);
134 if (is_gimple_debug (stmt
))
137 if (gimple_has_side_effects (stmt
)
138 || gimple_could_trap_p (stmt
)
139 || gimple_vuse (stmt
))
146 /* Return true if BB is an empty forwarder block to TO_BB. */
149 forwarder_block_to (basic_block bb
, basic_block to_bb
)
151 return empty_block_p (bb
)
152 && single_succ_p (bb
)
153 && single_succ (bb
) == to_bb
;
156 /* Verify if all PHI node arguments in DEST for edges from BB1 or
157 BB2 to DEST are the same. This makes the CFG merge point
158 free from side-effects. Return true in this case, else false. */
161 same_phi_args_p (basic_block bb1
, basic_block bb2
, basic_block dest
)
163 edge e1
= find_edge (bb1
, dest
);
164 edge e2
= find_edge (bb2
, dest
);
168 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
171 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi
, e1
),
172 PHI_ARG_DEF_FROM_EDGE (phi
, e2
), 0))
179 /* Return the best representative SSA name for CANDIDATE which is used
183 get_name_for_bit_test (tree candidate
)
185 /* Skip single-use names in favor of using the name from a
186 non-widening conversion definition. */
187 if (TREE_CODE (candidate
) == SSA_NAME
188 && has_single_use (candidate
))
190 gimple def_stmt
= SSA_NAME_DEF_STMT (candidate
);
191 if (is_gimple_assign (def_stmt
)
192 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
194 if (TYPE_PRECISION (TREE_TYPE (candidate
))
195 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))))
196 return gimple_assign_rhs1 (def_stmt
);
203 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
204 statements. Store the name being tested in *NAME and the bit
205 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
206 Returns true if the pattern matched, false otherwise. */
209 recognize_single_bit_test (gcond
*cond
, tree
*name
, tree
*bit
, bool inv
)
213 /* Get at the definition of the result of the bit test. */
214 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
215 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
216 || !integer_zerop (gimple_cond_rhs (cond
)))
218 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
219 if (!is_gimple_assign (stmt
))
222 /* Look at which bit is tested. One form to recognize is
223 D.1985_5 = state_3(D) >> control1_4(D);
224 D.1986_6 = (int) D.1985_5;
226 if (D.1987_7 != 0) */
227 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
228 && integer_onep (gimple_assign_rhs2 (stmt
))
229 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
231 tree orig_name
= gimple_assign_rhs1 (stmt
);
233 /* Look through copies and conversions to eventually
234 find the stmt that computes the shift. */
235 stmt
= SSA_NAME_DEF_STMT (orig_name
);
237 while (is_gimple_assign (stmt
)
238 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
239 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt
)))
240 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
241 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
242 || gimple_assign_ssa_name_copy_p (stmt
)))
243 stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
245 /* If we found such, decompose it. */
246 if (is_gimple_assign (stmt
)
247 && gimple_assign_rhs_code (stmt
) == RSHIFT_EXPR
)
249 /* op0 & (1 << op1) */
250 *bit
= gimple_assign_rhs2 (stmt
);
251 *name
= gimple_assign_rhs1 (stmt
);
256 *bit
= integer_zero_node
;
257 *name
= get_name_for_bit_test (orig_name
);
264 D.1987_7 = op0 & (1 << CST)
265 if (D.1987_7 != 0) */
266 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
267 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
268 && integer_pow2p (gimple_assign_rhs2 (stmt
)))
270 *name
= gimple_assign_rhs1 (stmt
);
271 *bit
= build_int_cst (integer_type_node
,
272 tree_log2 (gimple_assign_rhs2 (stmt
)));
277 D.1986_6 = 1 << control1_4(D)
278 D.1987_7 = op0 & D.1986_6
279 if (D.1987_7 != 0) */
280 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
281 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
282 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
)
286 /* Both arguments of the BIT_AND_EXPR can be the single-bit
287 specifying expression. */
288 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
289 if (is_gimple_assign (tmp
)
290 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
291 && integer_onep (gimple_assign_rhs1 (tmp
)))
293 *name
= gimple_assign_rhs2 (stmt
);
294 *bit
= gimple_assign_rhs2 (tmp
);
298 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt
));
299 if (is_gimple_assign (tmp
)
300 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
301 && integer_onep (gimple_assign_rhs1 (tmp
)))
303 *name
= gimple_assign_rhs1 (stmt
);
304 *bit
= gimple_assign_rhs2 (tmp
);
312 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
313 statements. Store the name being tested in *NAME and the bits
314 in *BITS. The COND_EXPR computes *NAME & *BITS.
315 Returns true if the pattern matched, false otherwise. */
318 recognize_bits_test (gcond
*cond
, tree
*name
, tree
*bits
, bool inv
)
322 /* Get at the definition of the result of the bit test. */
323 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
324 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
325 || !integer_zerop (gimple_cond_rhs (cond
)))
327 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
328 if (!is_gimple_assign (stmt
)
329 || gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
)
332 *name
= get_name_for_bit_test (gimple_assign_rhs1 (stmt
));
333 *bits
= gimple_assign_rhs2 (stmt
);
338 /* If-convert on a and pattern with a common else block. The inner
339 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
340 inner_inv, outer_inv and result_inv indicate whether the conditions
342 Returns true if the edges to the common else basic-block were merged. */
345 ifcombine_ifandif (basic_block inner_cond_bb
, bool inner_inv
,
346 basic_block outer_cond_bb
, bool outer_inv
, bool result_inv
)
348 gimple_stmt_iterator gsi
;
349 gimple inner_stmt
, outer_stmt
;
350 gcond
*inner_cond
, *outer_cond
;
351 tree name1
, name2
, bit1
, bit2
, bits1
, bits2
;
353 inner_stmt
= last_stmt (inner_cond_bb
);
355 || gimple_code (inner_stmt
) != GIMPLE_COND
)
357 inner_cond
= as_a
<gcond
*> (inner_stmt
);
359 outer_stmt
= last_stmt (outer_cond_bb
);
361 || gimple_code (outer_stmt
) != GIMPLE_COND
)
363 outer_cond
= as_a
<gcond
*> (outer_stmt
);
365 /* See if we test a single bit of the same name in both tests. In
366 that case remove the outer test, merging both else edges,
367 and change the inner one to test for
368 name & (bit1 | bit2) == (bit1 | bit2). */
369 if (recognize_single_bit_test (inner_cond
, &name1
, &bit1
, inner_inv
)
370 && recognize_single_bit_test (outer_cond
, &name2
, &bit2
, outer_inv
)
376 gsi
= gsi_for_stmt (inner_cond
);
377 t
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
378 build_int_cst (TREE_TYPE (name1
), 1), bit1
);
379 t2
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
380 build_int_cst (TREE_TYPE (name1
), 1), bit2
);
381 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), t
, t2
);
382 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
383 true, GSI_SAME_STMT
);
384 t2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
385 t2
= force_gimple_operand_gsi (&gsi
, t2
, true, NULL_TREE
,
386 true, GSI_SAME_STMT
);
387 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
,
388 boolean_type_node
, t2
, t
);
389 t
= canonicalize_cond_expr_cond (t
);
392 gimple_cond_set_condition_from_tree (inner_cond
, t
);
393 update_stmt (inner_cond
);
395 /* Leave CFG optimization to cfg_cleanup. */
396 gimple_cond_set_condition_from_tree (outer_cond
,
397 outer_inv
? boolean_false_node
: boolean_true_node
);
398 update_stmt (outer_cond
);
402 fprintf (dump_file
, "optimizing double bit test to ");
403 print_generic_expr (dump_file
, name1
, 0);
404 fprintf (dump_file
, " & T == T\nwith temporary T = (1 << ");
405 print_generic_expr (dump_file
, bit1
, 0);
406 fprintf (dump_file
, ") | (1 << ");
407 print_generic_expr (dump_file
, bit2
, 0);
408 fprintf (dump_file
, ")\n");
414 /* See if we have two bit tests of the same name in both tests.
415 In that case remove the outer test and change the inner one to
416 test for name & (bits1 | bits2) != 0. */
417 else if (recognize_bits_test (inner_cond
, &name1
, &bits1
, !inner_inv
)
418 && recognize_bits_test (outer_cond
, &name2
, &bits2
, !outer_inv
))
420 gimple_stmt_iterator gsi
;
423 /* Find the common name which is bit-tested. */
426 else if (bits1
== bits2
)
428 std::swap (name2
, bits2
);
429 std::swap (name1
, bits1
);
431 else if (name1
== bits2
)
432 std::swap (name2
, bits2
);
433 else if (bits1
== name2
)
434 std::swap (name1
, bits1
);
438 /* As we strip non-widening conversions in finding a common
439 name that is tested make sure to end up with an integral
440 type for building the bit operations. */
441 if (TYPE_PRECISION (TREE_TYPE (bits1
))
442 >= TYPE_PRECISION (TREE_TYPE (bits2
)))
444 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
445 name1
= fold_convert (TREE_TYPE (bits1
), name1
);
446 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
447 bits2
= fold_convert (TREE_TYPE (bits1
), bits2
);
451 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
452 name1
= fold_convert (TREE_TYPE (bits2
), name1
);
453 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
454 bits1
= fold_convert (TREE_TYPE (bits2
), bits1
);
458 gsi
= gsi_for_stmt (inner_cond
);
459 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), bits1
, bits2
);
460 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
461 true, GSI_SAME_STMT
);
462 t
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
463 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
464 true, GSI_SAME_STMT
);
465 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
, boolean_type_node
, t
,
466 build_int_cst (TREE_TYPE (t
), 0));
467 t
= canonicalize_cond_expr_cond (t
);
470 gimple_cond_set_condition_from_tree (inner_cond
, t
);
471 update_stmt (inner_cond
);
473 /* Leave CFG optimization to cfg_cleanup. */
474 gimple_cond_set_condition_from_tree (outer_cond
,
475 outer_inv
? boolean_false_node
: boolean_true_node
);
476 update_stmt (outer_cond
);
480 fprintf (dump_file
, "optimizing bits or bits test to ");
481 print_generic_expr (dump_file
, name1
, 0);
482 fprintf (dump_file
, " & T != 0\nwith temporary T = ");
483 print_generic_expr (dump_file
, bits1
, 0);
484 fprintf (dump_file
, " | ");
485 print_generic_expr (dump_file
, bits2
, 0);
486 fprintf (dump_file
, "\n");
492 /* See if we have two comparisons that we can merge into one. */
493 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond
)) == tcc_comparison
494 && TREE_CODE_CLASS (gimple_cond_code (outer_cond
)) == tcc_comparison
)
497 enum tree_code inner_cond_code
= gimple_cond_code (inner_cond
);
498 enum tree_code outer_cond_code
= gimple_cond_code (outer_cond
);
500 /* Invert comparisons if necessary (and possible). */
502 inner_cond_code
= invert_tree_comparison (inner_cond_code
,
503 HONOR_NANS (gimple_cond_lhs (inner_cond
)));
504 if (inner_cond_code
== ERROR_MARK
)
507 outer_cond_code
= invert_tree_comparison (outer_cond_code
,
508 HONOR_NANS (gimple_cond_lhs (outer_cond
)));
509 if (outer_cond_code
== ERROR_MARK
)
511 /* Don't return false so fast, try maybe_fold_or_comparisons? */
513 if (!(t
= maybe_fold_and_comparisons (inner_cond_code
,
514 gimple_cond_lhs (inner_cond
),
515 gimple_cond_rhs (inner_cond
),
517 gimple_cond_lhs (outer_cond
),
518 gimple_cond_rhs (outer_cond
))))
521 gimple_stmt_iterator gsi
;
522 if (!LOGICAL_OP_NON_SHORT_CIRCUIT
)
524 /* Only do this optimization if the inner bb contains only the conditional. */
525 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb
)))
527 t1
= fold_build2_loc (gimple_location (inner_cond
),
530 gimple_cond_lhs (inner_cond
),
531 gimple_cond_rhs (inner_cond
));
532 t2
= fold_build2_loc (gimple_location (outer_cond
),
535 gimple_cond_lhs (outer_cond
),
536 gimple_cond_rhs (outer_cond
));
537 t
= fold_build2_loc (gimple_location (inner_cond
),
538 TRUTH_AND_EXPR
, boolean_type_node
, t1
, t2
);
541 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
544 gsi
= gsi_for_stmt (inner_cond
);
545 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr
, NULL
, true,
549 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
550 t
= canonicalize_cond_expr_cond (t
);
553 gimple_cond_set_condition_from_tree (inner_cond
, t
);
554 update_stmt (inner_cond
);
556 /* Leave CFG optimization to cfg_cleanup. */
557 gimple_cond_set_condition_from_tree (outer_cond
,
558 outer_inv
? boolean_false_node
: boolean_true_node
);
559 update_stmt (outer_cond
);
563 fprintf (dump_file
, "optimizing two comparisons to ");
564 print_generic_expr (dump_file
, t
, 0);
565 fprintf (dump_file
, "\n");
574 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
575 dispatch to the appropriate if-conversion helper for a particular
576 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
577 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */
580 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb
, basic_block outer_cond_bb
,
581 basic_block then_bb
, basic_block else_bb
,
582 basic_block phi_pred_bb
)
584 /* The && form is characterized by a common else_bb with
585 the two edges leading to it mergable. The latter is
586 guaranteed by matching PHI arguments in the else_bb and
587 the inner cond_bb having no side-effects. */
588 if (phi_pred_bb
!= else_bb
589 && recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &else_bb
)
590 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
)
591 && bb_no_side_effects_p (inner_cond_bb
))
595 if (q) goto inner_cond_bb; else goto else_bb;
597 if (p) goto ...; else goto else_bb;
602 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, false,
606 /* And a version where the outer condition is negated. */
607 if (phi_pred_bb
!= else_bb
608 && recognize_if_then_else (outer_cond_bb
, &else_bb
, &inner_cond_bb
)
609 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
)
610 && bb_no_side_effects_p (inner_cond_bb
))
614 if (q) goto else_bb; else goto inner_cond_bb;
616 if (p) goto ...; else goto else_bb;
621 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, true,
625 /* The || form is characterized by a common then_bb with the
626 two edges leading to it mergable. The latter is guaranteed
627 by matching PHI arguments in the then_bb and the inner cond_bb
628 having no side-effects. */
629 if (phi_pred_bb
!= then_bb
630 && recognize_if_then_else (outer_cond_bb
, &then_bb
, &inner_cond_bb
)
631 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
)
632 && bb_no_side_effects_p (inner_cond_bb
))
636 if (q) goto then_bb; else goto inner_cond_bb;
638 if (q) goto then_bb; else goto ...;
642 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, true,
646 /* And a version where the outer condition is negated. */
647 if (phi_pred_bb
!= then_bb
648 && recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &then_bb
)
649 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
)
650 && bb_no_side_effects_p (inner_cond_bb
))
654 if (q) goto inner_cond_bb; else goto then_bb;
656 if (q) goto then_bb; else goto ...;
660 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, false,
667 /* Recognize a CFG pattern and dispatch to the appropriate
668 if-conversion helper. We start with BB as the innermost
669 worker basic-block. Returns true if a transformation was done. */
672 tree_ssa_ifcombine_bb (basic_block inner_cond_bb
)
674 basic_block then_bb
= NULL
, else_bb
= NULL
;
676 if (!recognize_if_then_else (inner_cond_bb
, &then_bb
, &else_bb
))
679 /* Recognize && and || of two conditions with a common
680 then/else block which entry edges we can merge. That is:
686 This requires a single predecessor of the inner cond_bb. */
687 if (single_pred_p (inner_cond_bb
))
689 basic_block outer_cond_bb
= single_pred (inner_cond_bb
);
691 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
,
692 then_bb
, else_bb
, inner_cond_bb
))
695 if (forwarder_block_to (else_bb
, then_bb
))
697 /* Other possibilities for the && form, if else_bb is
698 empty forwarder block to then_bb. Compared to the above simpler
699 forms this can be treated as if then_bb and else_bb were swapped,
700 and the corresponding inner_cond_bb not inverted because of that.
701 For same_phi_args_p we look at equality of arguments between
702 edge from outer_cond_bb and the forwarder block. */
703 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
707 else if (forwarder_block_to (then_bb
, else_bb
))
709 /* Other possibilities for the || form, if then_bb is
710 empty forwarder block to else_bb. Compared to the above simpler
711 forms this can be treated as if then_bb and else_bb were swapped,
712 and the corresponding inner_cond_bb not inverted because of that.
713 For same_phi_args_p we look at equality of arguments between
714 edge from outer_cond_bb and the forwarder block. */
715 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
724 /* Main entry for the tree if-conversion pass. */
728 const pass_data pass_data_tree_ifcombine
=
730 GIMPLE_PASS
, /* type */
731 "ifcombine", /* name */
732 OPTGROUP_NONE
, /* optinfo_flags */
733 TV_TREE_IFCOMBINE
, /* tv_id */
734 ( PROP_cfg
| PROP_ssa
), /* properties_required */
735 0, /* properties_provided */
736 0, /* properties_destroyed */
737 0, /* todo_flags_start */
738 TODO_update_ssa
, /* todo_flags_finish */
741 class pass_tree_ifcombine
: public gimple_opt_pass
744 pass_tree_ifcombine (gcc::context
*ctxt
)
745 : gimple_opt_pass (pass_data_tree_ifcombine
, ctxt
)
748 /* opt_pass methods: */
749 virtual unsigned int execute (function
*);
751 }; // class pass_tree_ifcombine
754 pass_tree_ifcombine::execute (function
*fun
)
757 bool cfg_changed
= false;
760 bbs
= single_pred_before_succ_order ();
761 calculate_dominance_info (CDI_DOMINATORS
);
763 /* Search every basic block for COND_EXPR we may be able to optimize.
765 We walk the blocks in order that guarantees that a block with
766 a single predecessor is processed after the predecessor.
767 This ensures that we collapse outter ifs before visiting the
768 inner ones, and also that we do not try to visit a removed
769 block. This is opposite of PHI-OPT, because we cascade the
770 combining rather than cascading PHIs. */
771 for (i
= n_basic_blocks_for_fn (fun
) - NUM_FIXED_BLOCKS
- 1; i
>= 0; i
--)
773 basic_block bb
= bbs
[i
];
774 gimple stmt
= last_stmt (bb
);
777 && gimple_code (stmt
) == GIMPLE_COND
)
778 cfg_changed
|= tree_ssa_ifcombine_bb (bb
);
783 return cfg_changed
? TODO_cleanup_cfg
: 0;
789 make_pass_tree_ifcombine (gcc::context
*ctxt
)
791 return new pass_tree_ifcombine (ctxt
);