1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2015 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 /* rtl is needed only because arm back-end requires it for
33 #include "fold-const.h"
34 #include "stor-layout.h"
36 #include "hard-reg-set.h"
39 #include "dominance.h"
42 #include "basic-block.h"
43 #include "tree-pretty-print.h"
44 #include "tree-ssa-alias.h"
45 #include "internal-fn.h"
46 #include "gimple-fold.h"
47 #include "gimple-expr.h"
50 #include "gimple-iterator.h"
51 #include "gimplify-me.h"
52 #include "gimple-ssa.h"
54 #include "tree-phinodes.h"
55 #include "ssa-iterators.h"
56 #include "tree-pass.h"
58 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
59 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
60 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
64 /* This pass combines COND_EXPRs to simplify control flow. It
65 currently recognizes bit tests and comparisons in chains that
66 represent logical and or logical or of two COND_EXPRs.
68 It does so by walking basic blocks in a approximate reverse
69 post-dominator order and trying to match CFG patterns that
70 represent logical and or logical or of two COND_EXPRs.
71 Transformations are done if the COND_EXPR conditions match
74 1. two single bit tests X & (1 << Yn) (for logical and)
76 2. two bit tests X & Yn (for logical or)
78 3. two comparisons X OPn Y (for logical or)
80 To simplify this pass, removing basic blocks and dead code
81 is left to CFG cleanup and DCE. */
84 /* Recognize a if-then-else CFG pattern starting to match with the
85 COND_BB basic-block containing the COND_EXPR. The recognized
86 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
87 *THEN_BB and/or *ELSE_BB are already set, they are required to
88 match the then and else basic-blocks to make the pattern match.
89 Returns true if the pattern matched, false otherwise. */
92 recognize_if_then_else (basic_block cond_bb
,
93 basic_block
*then_bb
, basic_block
*else_bb
)
97 if (EDGE_COUNT (cond_bb
->succs
) != 2)
100 /* Find the then/else edges. */
101 t
= EDGE_SUCC (cond_bb
, 0);
102 e
= EDGE_SUCC (cond_bb
, 1);
103 if (!(t
->flags
& EDGE_TRUE_VALUE
))
109 if (!(t
->flags
& EDGE_TRUE_VALUE
)
110 || !(e
->flags
& EDGE_FALSE_VALUE
))
113 /* Check if the edge destinations point to the required block. */
115 && t
->dest
!= *then_bb
)
118 && e
->dest
!= *else_bb
)
129 /* Verify if the basic block BB does not have side-effects. Return
130 true in this case, else false. */
133 bb_no_side_effects_p (basic_block bb
)
135 gimple_stmt_iterator gsi
;
137 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
139 gimple stmt
= gsi_stmt (gsi
);
141 if (is_gimple_debug (stmt
))
144 if (gimple_has_side_effects (stmt
)
145 || gimple_could_trap_p (stmt
)
146 || gimple_vuse (stmt
))
153 /* Return true if BB is an empty forwarder block to TO_BB. */
156 forwarder_block_to (basic_block bb
, basic_block to_bb
)
158 return empty_block_p (bb
)
159 && single_succ_p (bb
)
160 && single_succ (bb
) == to_bb
;
163 /* Verify if all PHI node arguments in DEST for edges from BB1 or
164 BB2 to DEST are the same. This makes the CFG merge point
165 free from side-effects. Return true in this case, else false. */
168 same_phi_args_p (basic_block bb1
, basic_block bb2
, basic_block dest
)
170 edge e1
= find_edge (bb1
, dest
);
171 edge e2
= find_edge (bb2
, dest
);
175 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
178 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi
, e1
),
179 PHI_ARG_DEF_FROM_EDGE (phi
, e2
), 0))
186 /* Return the best representative SSA name for CANDIDATE which is used
190 get_name_for_bit_test (tree candidate
)
192 /* Skip single-use names in favor of using the name from a
193 non-widening conversion definition. */
194 if (TREE_CODE (candidate
) == SSA_NAME
195 && has_single_use (candidate
))
197 gimple def_stmt
= SSA_NAME_DEF_STMT (candidate
);
198 if (is_gimple_assign (def_stmt
)
199 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
201 if (TYPE_PRECISION (TREE_TYPE (candidate
))
202 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))))
203 return gimple_assign_rhs1 (def_stmt
);
210 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
211 statements. Store the name being tested in *NAME and the bit
212 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
213 Returns true if the pattern matched, false otherwise. */
216 recognize_single_bit_test (gcond
*cond
, tree
*name
, tree
*bit
, bool inv
)
220 /* Get at the definition of the result of the bit test. */
221 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
222 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
223 || !integer_zerop (gimple_cond_rhs (cond
)))
225 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
226 if (!is_gimple_assign (stmt
))
229 /* Look at which bit is tested. One form to recognize is
230 D.1985_5 = state_3(D) >> control1_4(D);
231 D.1986_6 = (int) D.1985_5;
233 if (D.1987_7 != 0) */
234 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
235 && integer_onep (gimple_assign_rhs2 (stmt
))
236 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
238 tree orig_name
= gimple_assign_rhs1 (stmt
);
240 /* Look through copies and conversions to eventually
241 find the stmt that computes the shift. */
242 stmt
= SSA_NAME_DEF_STMT (orig_name
);
244 while (is_gimple_assign (stmt
)
245 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
246 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt
)))
247 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt
))))
248 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
249 || gimple_assign_ssa_name_copy_p (stmt
)))
250 stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
252 /* If we found such, decompose it. */
253 if (is_gimple_assign (stmt
)
254 && gimple_assign_rhs_code (stmt
) == RSHIFT_EXPR
)
256 /* op0 & (1 << op1) */
257 *bit
= gimple_assign_rhs2 (stmt
);
258 *name
= gimple_assign_rhs1 (stmt
);
263 *bit
= integer_zero_node
;
264 *name
= get_name_for_bit_test (orig_name
);
271 D.1987_7 = op0 & (1 << CST)
272 if (D.1987_7 != 0) */
273 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
274 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
275 && integer_pow2p (gimple_assign_rhs2 (stmt
)))
277 *name
= gimple_assign_rhs1 (stmt
);
278 *bit
= build_int_cst (integer_type_node
,
279 tree_log2 (gimple_assign_rhs2 (stmt
)));
284 D.1986_6 = 1 << control1_4(D)
285 D.1987_7 = op0 & D.1986_6
286 if (D.1987_7 != 0) */
287 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
288 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
289 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
)
293 /* Both arguments of the BIT_AND_EXPR can be the single-bit
294 specifying expression. */
295 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
296 if (is_gimple_assign (tmp
)
297 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
298 && integer_onep (gimple_assign_rhs1 (tmp
)))
300 *name
= gimple_assign_rhs2 (stmt
);
301 *bit
= gimple_assign_rhs2 (tmp
);
305 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt
));
306 if (is_gimple_assign (tmp
)
307 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
308 && integer_onep (gimple_assign_rhs1 (tmp
)))
310 *name
= gimple_assign_rhs1 (stmt
);
311 *bit
= gimple_assign_rhs2 (tmp
);
319 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
320 statements. Store the name being tested in *NAME and the bits
321 in *BITS. The COND_EXPR computes *NAME & *BITS.
322 Returns true if the pattern matched, false otherwise. */
325 recognize_bits_test (gcond
*cond
, tree
*name
, tree
*bits
, bool inv
)
329 /* Get at the definition of the result of the bit test. */
330 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
331 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
332 || !integer_zerop (gimple_cond_rhs (cond
)))
334 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
335 if (!is_gimple_assign (stmt
)
336 || gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
)
339 *name
= get_name_for_bit_test (gimple_assign_rhs1 (stmt
));
340 *bits
= gimple_assign_rhs2 (stmt
);
345 /* If-convert on a and pattern with a common else block. The inner
346 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
347 inner_inv, outer_inv and result_inv indicate whether the conditions
349 Returns true if the edges to the common else basic-block were merged. */
352 ifcombine_ifandif (basic_block inner_cond_bb
, bool inner_inv
,
353 basic_block outer_cond_bb
, bool outer_inv
, bool result_inv
)
355 gimple_stmt_iterator gsi
;
356 gimple inner_stmt
, outer_stmt
;
357 gcond
*inner_cond
, *outer_cond
;
358 tree name1
, name2
, bit1
, bit2
, bits1
, bits2
;
360 inner_stmt
= last_stmt (inner_cond_bb
);
362 || gimple_code (inner_stmt
) != GIMPLE_COND
)
364 inner_cond
= as_a
<gcond
*> (inner_stmt
);
366 outer_stmt
= last_stmt (outer_cond_bb
);
368 || gimple_code (outer_stmt
) != GIMPLE_COND
)
370 outer_cond
= as_a
<gcond
*> (outer_stmt
);
372 /* See if we test a single bit of the same name in both tests. In
373 that case remove the outer test, merging both else edges,
374 and change the inner one to test for
375 name & (bit1 | bit2) == (bit1 | bit2). */
376 if (recognize_single_bit_test (inner_cond
, &name1
, &bit1
, inner_inv
)
377 && recognize_single_bit_test (outer_cond
, &name2
, &bit2
, outer_inv
)
383 gsi
= gsi_for_stmt (inner_cond
);
384 t
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
385 build_int_cst (TREE_TYPE (name1
), 1), bit1
);
386 t2
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
387 build_int_cst (TREE_TYPE (name1
), 1), bit2
);
388 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), t
, t2
);
389 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
390 true, GSI_SAME_STMT
);
391 t2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
392 t2
= force_gimple_operand_gsi (&gsi
, t2
, true, NULL_TREE
,
393 true, GSI_SAME_STMT
);
394 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
,
395 boolean_type_node
, t2
, t
);
396 t
= canonicalize_cond_expr_cond (t
);
399 gimple_cond_set_condition_from_tree (inner_cond
, t
);
400 update_stmt (inner_cond
);
402 /* Leave CFG optimization to cfg_cleanup. */
403 gimple_cond_set_condition_from_tree (outer_cond
,
404 outer_inv
? boolean_false_node
: boolean_true_node
);
405 update_stmt (outer_cond
);
409 fprintf (dump_file
, "optimizing double bit test to ");
410 print_generic_expr (dump_file
, name1
, 0);
411 fprintf (dump_file
, " & T == T\nwith temporary T = (1 << ");
412 print_generic_expr (dump_file
, bit1
, 0);
413 fprintf (dump_file
, ") | (1 << ");
414 print_generic_expr (dump_file
, bit2
, 0);
415 fprintf (dump_file
, ")\n");
421 /* See if we have two bit tests of the same name in both tests.
422 In that case remove the outer test and change the inner one to
423 test for name & (bits1 | bits2) != 0. */
424 else if (recognize_bits_test (inner_cond
, &name1
, &bits1
, !inner_inv
)
425 && recognize_bits_test (outer_cond
, &name2
, &bits2
, !outer_inv
))
427 gimple_stmt_iterator gsi
;
430 /* Find the common name which is bit-tested. */
433 else if (bits1
== bits2
)
435 std::swap (name2
, bits2
);
436 std::swap (name1
, bits1
);
438 else if (name1
== bits2
)
439 std::swap (name2
, bits2
);
440 else if (bits1
== name2
)
441 std::swap (name1
, bits1
);
445 /* As we strip non-widening conversions in finding a common
446 name that is tested make sure to end up with an integral
447 type for building the bit operations. */
448 if (TYPE_PRECISION (TREE_TYPE (bits1
))
449 >= TYPE_PRECISION (TREE_TYPE (bits2
)))
451 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
452 name1
= fold_convert (TREE_TYPE (bits1
), name1
);
453 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
454 bits2
= fold_convert (TREE_TYPE (bits1
), bits2
);
458 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
459 name1
= fold_convert (TREE_TYPE (bits2
), name1
);
460 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
461 bits1
= fold_convert (TREE_TYPE (bits2
), bits1
);
465 gsi
= gsi_for_stmt (inner_cond
);
466 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), bits1
, bits2
);
467 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
468 true, GSI_SAME_STMT
);
469 t
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
470 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
471 true, GSI_SAME_STMT
);
472 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
, boolean_type_node
, t
,
473 build_int_cst (TREE_TYPE (t
), 0));
474 t
= canonicalize_cond_expr_cond (t
);
477 gimple_cond_set_condition_from_tree (inner_cond
, t
);
478 update_stmt (inner_cond
);
480 /* Leave CFG optimization to cfg_cleanup. */
481 gimple_cond_set_condition_from_tree (outer_cond
,
482 outer_inv
? boolean_false_node
: boolean_true_node
);
483 update_stmt (outer_cond
);
487 fprintf (dump_file
, "optimizing bits or bits test to ");
488 print_generic_expr (dump_file
, name1
, 0);
489 fprintf (dump_file
, " & T != 0\nwith temporary T = ");
490 print_generic_expr (dump_file
, bits1
, 0);
491 fprintf (dump_file
, " | ");
492 print_generic_expr (dump_file
, bits2
, 0);
493 fprintf (dump_file
, "\n");
499 /* See if we have two comparisons that we can merge into one. */
500 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond
)) == tcc_comparison
501 && TREE_CODE_CLASS (gimple_cond_code (outer_cond
)) == tcc_comparison
)
504 enum tree_code inner_cond_code
= gimple_cond_code (inner_cond
);
505 enum tree_code outer_cond_code
= gimple_cond_code (outer_cond
);
507 /* Invert comparisons if necessary (and possible). */
509 inner_cond_code
= invert_tree_comparison (inner_cond_code
,
510 HONOR_NANS (gimple_cond_lhs (inner_cond
)));
511 if (inner_cond_code
== ERROR_MARK
)
514 outer_cond_code
= invert_tree_comparison (outer_cond_code
,
515 HONOR_NANS (gimple_cond_lhs (outer_cond
)));
516 if (outer_cond_code
== ERROR_MARK
)
518 /* Don't return false so fast, try maybe_fold_or_comparisons? */
520 if (!(t
= maybe_fold_and_comparisons (inner_cond_code
,
521 gimple_cond_lhs (inner_cond
),
522 gimple_cond_rhs (inner_cond
),
524 gimple_cond_lhs (outer_cond
),
525 gimple_cond_rhs (outer_cond
))))
528 gimple_stmt_iterator gsi
;
529 if (!LOGICAL_OP_NON_SHORT_CIRCUIT
)
531 /* Only do this optimization if the inner bb contains only the conditional. */
532 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb
)))
534 t1
= fold_build2_loc (gimple_location (inner_cond
),
537 gimple_cond_lhs (inner_cond
),
538 gimple_cond_rhs (inner_cond
));
539 t2
= fold_build2_loc (gimple_location (outer_cond
),
542 gimple_cond_lhs (outer_cond
),
543 gimple_cond_rhs (outer_cond
));
544 t
= fold_build2_loc (gimple_location (inner_cond
),
545 TRUTH_AND_EXPR
, boolean_type_node
, t1
, t2
);
548 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
551 gsi
= gsi_for_stmt (inner_cond
);
552 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr
, NULL
, true,
556 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
557 t
= canonicalize_cond_expr_cond (t
);
560 gimple_cond_set_condition_from_tree (inner_cond
, t
);
561 update_stmt (inner_cond
);
563 /* Leave CFG optimization to cfg_cleanup. */
564 gimple_cond_set_condition_from_tree (outer_cond
,
565 outer_inv
? boolean_false_node
: boolean_true_node
);
566 update_stmt (outer_cond
);
570 fprintf (dump_file
, "optimizing two comparisons to ");
571 print_generic_expr (dump_file
, t
, 0);
572 fprintf (dump_file
, "\n");
581 /* Helper function for tree_ssa_ifcombine_bb. Recognize a CFG pattern and
582 dispatch to the appropriate if-conversion helper for a particular
583 set of INNER_COND_BB, OUTER_COND_BB, THEN_BB and ELSE_BB.
584 PHI_PRED_BB should be one of INNER_COND_BB, THEN_BB or ELSE_BB. */
587 tree_ssa_ifcombine_bb_1 (basic_block inner_cond_bb
, basic_block outer_cond_bb
,
588 basic_block then_bb
, basic_block else_bb
,
589 basic_block phi_pred_bb
)
591 /* The && form is characterized by a common else_bb with
592 the two edges leading to it mergable. The latter is
593 guaranteed by matching PHI arguments in the else_bb and
594 the inner cond_bb having no side-effects. */
595 if (phi_pred_bb
!= else_bb
596 && recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &else_bb
)
597 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
)
598 && bb_no_side_effects_p (inner_cond_bb
))
602 if (q) goto inner_cond_bb; else goto else_bb;
604 if (p) goto ...; else goto else_bb;
609 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, false,
613 /* And a version where the outer condition is negated. */
614 if (phi_pred_bb
!= else_bb
615 && recognize_if_then_else (outer_cond_bb
, &else_bb
, &inner_cond_bb
)
616 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, else_bb
)
617 && bb_no_side_effects_p (inner_cond_bb
))
621 if (q) goto else_bb; else goto inner_cond_bb;
623 if (p) goto ...; else goto else_bb;
628 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, true,
632 /* The || form is characterized by a common then_bb with the
633 two edges leading to it mergable. The latter is guaranteed
634 by matching PHI arguments in the then_bb and the inner cond_bb
635 having no side-effects. */
636 if (phi_pred_bb
!= then_bb
637 && recognize_if_then_else (outer_cond_bb
, &then_bb
, &inner_cond_bb
)
638 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
)
639 && bb_no_side_effects_p (inner_cond_bb
))
643 if (q) goto then_bb; else goto inner_cond_bb;
645 if (q) goto then_bb; else goto ...;
649 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, true,
653 /* And a version where the outer condition is negated. */
654 if (phi_pred_bb
!= then_bb
655 && recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &then_bb
)
656 && same_phi_args_p (outer_cond_bb
, phi_pred_bb
, then_bb
)
657 && bb_no_side_effects_p (inner_cond_bb
))
661 if (q) goto inner_cond_bb; else goto then_bb;
663 if (q) goto then_bb; else goto ...;
667 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, false,
674 /* Recognize a CFG pattern and dispatch to the appropriate
675 if-conversion helper. We start with BB as the innermost
676 worker basic-block. Returns true if a transformation was done. */
679 tree_ssa_ifcombine_bb (basic_block inner_cond_bb
)
681 basic_block then_bb
= NULL
, else_bb
= NULL
;
683 if (!recognize_if_then_else (inner_cond_bb
, &then_bb
, &else_bb
))
686 /* Recognize && and || of two conditions with a common
687 then/else block which entry edges we can merge. That is:
693 This requires a single predecessor of the inner cond_bb. */
694 if (single_pred_p (inner_cond_bb
))
696 basic_block outer_cond_bb
= single_pred (inner_cond_bb
);
698 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
,
699 then_bb
, else_bb
, inner_cond_bb
))
702 if (forwarder_block_to (else_bb
, then_bb
))
704 /* Other possibilities for the && form, if else_bb is
705 empty forwarder block to then_bb. Compared to the above simpler
706 forms this can be treated as if then_bb and else_bb were swapped,
707 and the corresponding inner_cond_bb not inverted because of that.
708 For same_phi_args_p we look at equality of arguments between
709 edge from outer_cond_bb and the forwarder block. */
710 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
714 else if (forwarder_block_to (then_bb
, else_bb
))
716 /* Other possibilities for the || form, if then_bb is
717 empty forwarder block to else_bb. Compared to the above simpler
718 forms this can be treated as if then_bb and else_bb were swapped,
719 and the corresponding inner_cond_bb not inverted because of that.
720 For same_phi_args_p we look at equality of arguments between
721 edge from outer_cond_bb and the forwarder block. */
722 if (tree_ssa_ifcombine_bb_1 (inner_cond_bb
, outer_cond_bb
, else_bb
,
731 /* Main entry for the tree if-conversion pass. */
735 const pass_data pass_data_tree_ifcombine
=
737 GIMPLE_PASS
, /* type */
738 "ifcombine", /* name */
739 OPTGROUP_NONE
, /* optinfo_flags */
740 TV_TREE_IFCOMBINE
, /* tv_id */
741 ( PROP_cfg
| PROP_ssa
), /* properties_required */
742 0, /* properties_provided */
743 0, /* properties_destroyed */
744 0, /* todo_flags_start */
745 TODO_update_ssa
, /* todo_flags_finish */
748 class pass_tree_ifcombine
: public gimple_opt_pass
751 pass_tree_ifcombine (gcc::context
*ctxt
)
752 : gimple_opt_pass (pass_data_tree_ifcombine
, ctxt
)
755 /* opt_pass methods: */
756 virtual unsigned int execute (function
*);
758 }; // class pass_tree_ifcombine
761 pass_tree_ifcombine::execute (function
*fun
)
764 bool cfg_changed
= false;
767 bbs
= single_pred_before_succ_order ();
768 calculate_dominance_info (CDI_DOMINATORS
);
770 /* Search every basic block for COND_EXPR we may be able to optimize.
772 We walk the blocks in order that guarantees that a block with
773 a single predecessor is processed after the predecessor.
774 This ensures that we collapse outter ifs before visiting the
775 inner ones, and also that we do not try to visit a removed
776 block. This is opposite of PHI-OPT, because we cascade the
777 combining rather than cascading PHIs. */
778 for (i
= n_basic_blocks_for_fn (fun
) - NUM_FIXED_BLOCKS
- 1; i
>= 0; i
--)
780 basic_block bb
= bbs
[i
];
781 gimple stmt
= last_stmt (bb
);
784 && gimple_code (stmt
) == GIMPLE_COND
)
785 cfg_changed
|= tree_ssa_ifcombine_bb (bb
);
790 return cfg_changed
? TODO_cleanup_cfg
: 0;
796 make_pass_tree_ifcombine (gcc::context
*ctxt
)
798 return new pass_tree_ifcombine (ctxt
);