1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2013 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 /* rtl is needed only because arm back-end requires it for
30 #include "stor-layout.h"
31 #include "basic-block.h"
32 #include "tree-pretty-print.h"
34 #include "gimple-iterator.h"
35 #include "gimplify-me.h"
36 #include "gimple-ssa.h"
38 #include "tree-phinodes.h"
39 #include "ssa-iterators.h"
40 #include "tree-pass.h"
42 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
43 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
44 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
48 /* This pass combines COND_EXPRs to simplify control flow. It
49 currently recognizes bit tests and comparisons in chains that
50 represent logical and or logical or of two COND_EXPRs.
52 It does so by walking basic blocks in a approximate reverse
53 post-dominator order and trying to match CFG patterns that
54 represent logical and or logical or of two COND_EXPRs.
55 Transformations are done if the COND_EXPR conditions match
58 1. two single bit tests X & (1 << Yn) (for logical and)
60 2. two bit tests X & Yn (for logical or)
62 3. two comparisons X OPn Y (for logical or)
64 To simplify this pass, removing basic blocks and dead code
65 is left to CFG cleanup and DCE. */
68 /* Recognize a if-then-else CFG pattern starting to match with the
69 COND_BB basic-block containing the COND_EXPR. The recognized
70 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
71 *THEN_BB and/or *ELSE_BB are already set, they are required to
72 match the then and else basic-blocks to make the pattern match.
73 Returns true if the pattern matched, false otherwise. */
76 recognize_if_then_else (basic_block cond_bb
,
77 basic_block
*then_bb
, basic_block
*else_bb
)
81 if (EDGE_COUNT (cond_bb
->succs
) != 2)
84 /* Find the then/else edges. */
85 t
= EDGE_SUCC (cond_bb
, 0);
86 e
= EDGE_SUCC (cond_bb
, 1);
87 if (!(t
->flags
& EDGE_TRUE_VALUE
))
93 if (!(t
->flags
& EDGE_TRUE_VALUE
)
94 || !(e
->flags
& EDGE_FALSE_VALUE
))
97 /* Check if the edge destinations point to the required block. */
99 && t
->dest
!= *then_bb
)
102 && e
->dest
!= *else_bb
)
113 /* Verify if the basic block BB does not have side-effects. Return
114 true in this case, else false. */
117 bb_no_side_effects_p (basic_block bb
)
119 gimple_stmt_iterator gsi
;
121 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
123 gimple stmt
= gsi_stmt (gsi
);
125 if (gimple_has_side_effects (stmt
)
126 || gimple_vuse (stmt
))
133 /* Verify if all PHI node arguments in DEST for edges from BB1 or
134 BB2 to DEST are the same. This makes the CFG merge point
135 free from side-effects. Return true in this case, else false. */
138 same_phi_args_p (basic_block bb1
, basic_block bb2
, basic_block dest
)
140 edge e1
= find_edge (bb1
, dest
);
141 edge e2
= find_edge (bb2
, dest
);
142 gimple_stmt_iterator gsi
;
145 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
147 phi
= gsi_stmt (gsi
);
148 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi
, e1
),
149 PHI_ARG_DEF_FROM_EDGE (phi
, e2
), 0))
156 /* Return the best representative SSA name for CANDIDATE which is used
160 get_name_for_bit_test (tree candidate
)
162 /* Skip single-use names in favor of using the name from a
163 non-widening conversion definition. */
164 if (TREE_CODE (candidate
) == SSA_NAME
165 && has_single_use (candidate
))
167 gimple def_stmt
= SSA_NAME_DEF_STMT (candidate
);
168 if (is_gimple_assign (def_stmt
)
169 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
171 if (TYPE_PRECISION (TREE_TYPE (candidate
))
172 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))))
173 return gimple_assign_rhs1 (def_stmt
);
180 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
181 statements. Store the name being tested in *NAME and the bit
182 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
183 Returns true if the pattern matched, false otherwise. */
186 recognize_single_bit_test (gimple cond
, tree
*name
, tree
*bit
, bool inv
)
190 /* Get at the definition of the result of the bit test. */
191 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
192 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
193 || !integer_zerop (gimple_cond_rhs (cond
)))
195 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
196 if (!is_gimple_assign (stmt
))
199 /* Look at which bit is tested. One form to recognize is
200 D.1985_5 = state_3(D) >> control1_4(D);
201 D.1986_6 = (int) D.1985_5;
203 if (D.1987_7 != 0) */
204 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
205 && integer_onep (gimple_assign_rhs2 (stmt
))
206 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
208 tree orig_name
= gimple_assign_rhs1 (stmt
);
210 /* Look through copies and conversions to eventually
211 find the stmt that computes the shift. */
212 stmt
= SSA_NAME_DEF_STMT (orig_name
);
214 while (is_gimple_assign (stmt
)
215 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
216 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt
)))
217 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt
)))))
218 || gimple_assign_ssa_name_copy_p (stmt
)))
219 stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
221 /* If we found such, decompose it. */
222 if (is_gimple_assign (stmt
)
223 && gimple_assign_rhs_code (stmt
) == RSHIFT_EXPR
)
225 /* op0 & (1 << op1) */
226 *bit
= gimple_assign_rhs2 (stmt
);
227 *name
= gimple_assign_rhs1 (stmt
);
232 *bit
= integer_zero_node
;
233 *name
= get_name_for_bit_test (orig_name
);
240 D.1987_7 = op0 & (1 << CST)
241 if (D.1987_7 != 0) */
242 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
243 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
244 && integer_pow2p (gimple_assign_rhs2 (stmt
)))
246 *name
= gimple_assign_rhs1 (stmt
);
247 *bit
= build_int_cst (integer_type_node
,
248 tree_log2 (gimple_assign_rhs2 (stmt
)));
253 D.1986_6 = 1 << control1_4(D)
254 D.1987_7 = op0 & D.1986_6
255 if (D.1987_7 != 0) */
256 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
257 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
258 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
)
262 /* Both arguments of the BIT_AND_EXPR can be the single-bit
263 specifying expression. */
264 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
265 if (is_gimple_assign (tmp
)
266 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
267 && integer_onep (gimple_assign_rhs1 (tmp
)))
269 *name
= gimple_assign_rhs2 (stmt
);
270 *bit
= gimple_assign_rhs2 (tmp
);
274 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt
));
275 if (is_gimple_assign (tmp
)
276 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
277 && integer_onep (gimple_assign_rhs1 (tmp
)))
279 *name
= gimple_assign_rhs1 (stmt
);
280 *bit
= gimple_assign_rhs2 (tmp
);
288 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
289 statements. Store the name being tested in *NAME and the bits
290 in *BITS. The COND_EXPR computes *NAME & *BITS.
291 Returns true if the pattern matched, false otherwise. */
294 recognize_bits_test (gimple cond
, tree
*name
, tree
*bits
, bool inv
)
298 /* Get at the definition of the result of the bit test. */
299 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
300 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
301 || !integer_zerop (gimple_cond_rhs (cond
)))
303 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
304 if (!is_gimple_assign (stmt
)
305 || gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
)
308 *name
= get_name_for_bit_test (gimple_assign_rhs1 (stmt
));
309 *bits
= gimple_assign_rhs2 (stmt
);
314 /* If-convert on a and pattern with a common else block. The inner
315 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
316 inner_inv, outer_inv and result_inv indicate whether the conditions
318 Returns true if the edges to the common else basic-block were merged. */
321 ifcombine_ifandif (basic_block inner_cond_bb
, bool inner_inv
,
322 basic_block outer_cond_bb
, bool outer_inv
, bool result_inv
)
324 gimple_stmt_iterator gsi
;
325 gimple inner_cond
, outer_cond
;
326 tree name1
, name2
, bit1
, bit2
, bits1
, bits2
;
328 inner_cond
= last_stmt (inner_cond_bb
);
330 || gimple_code (inner_cond
) != GIMPLE_COND
)
333 outer_cond
= last_stmt (outer_cond_bb
);
335 || gimple_code (outer_cond
) != GIMPLE_COND
)
338 /* See if we test a single bit of the same name in both tests. In
339 that case remove the outer test, merging both else edges,
340 and change the inner one to test for
341 name & (bit1 | bit2) == (bit1 | bit2). */
342 if (recognize_single_bit_test (inner_cond
, &name1
, &bit1
, inner_inv
)
343 && recognize_single_bit_test (outer_cond
, &name2
, &bit2
, outer_inv
)
349 gsi
= gsi_for_stmt (inner_cond
);
350 t
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
351 build_int_cst (TREE_TYPE (name1
), 1), bit1
);
352 t2
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
353 build_int_cst (TREE_TYPE (name1
), 1), bit2
);
354 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), t
, t2
);
355 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
356 true, GSI_SAME_STMT
);
357 t2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
358 t2
= force_gimple_operand_gsi (&gsi
, t2
, true, NULL_TREE
,
359 true, GSI_SAME_STMT
);
360 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
,
361 boolean_type_node
, t2
, t
);
362 t
= canonicalize_cond_expr_cond (t
);
365 gimple_cond_set_condition_from_tree (inner_cond
, t
);
366 update_stmt (inner_cond
);
368 /* Leave CFG optimization to cfg_cleanup. */
369 gimple_cond_set_condition_from_tree (outer_cond
,
370 outer_inv
? boolean_false_node
: boolean_true_node
);
371 update_stmt (outer_cond
);
375 fprintf (dump_file
, "optimizing double bit test to ");
376 print_generic_expr (dump_file
, name1
, 0);
377 fprintf (dump_file
, " & T == T\nwith temporary T = (1 << ");
378 print_generic_expr (dump_file
, bit1
, 0);
379 fprintf (dump_file
, ") | (1 << ");
380 print_generic_expr (dump_file
, bit2
, 0);
381 fprintf (dump_file
, ")\n");
387 /* See if we have two bit tests of the same name in both tests.
388 In that case remove the outer test and change the inner one to
389 test for name & (bits1 | bits2) != 0. */
390 else if (recognize_bits_test (inner_cond
, &name1
, &bits1
, !inner_inv
)
391 && recognize_bits_test (outer_cond
, &name2
, &bits2
, !outer_inv
))
393 gimple_stmt_iterator gsi
;
396 /* Find the common name which is bit-tested. */
399 else if (bits1
== bits2
)
408 else if (name1
== bits2
)
414 else if (bits1
== name2
)
423 /* As we strip non-widening conversions in finding a common
424 name that is tested make sure to end up with an integral
425 type for building the bit operations. */
426 if (TYPE_PRECISION (TREE_TYPE (bits1
))
427 >= TYPE_PRECISION (TREE_TYPE (bits2
)))
429 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
430 name1
= fold_convert (TREE_TYPE (bits1
), name1
);
431 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
432 bits2
= fold_convert (TREE_TYPE (bits1
), bits2
);
436 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
437 name1
= fold_convert (TREE_TYPE (bits2
), name1
);
438 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
439 bits1
= fold_convert (TREE_TYPE (bits2
), bits1
);
443 gsi
= gsi_for_stmt (inner_cond
);
444 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), bits1
, bits2
);
445 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
446 true, GSI_SAME_STMT
);
447 t
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
448 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
449 true, GSI_SAME_STMT
);
450 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
, boolean_type_node
, t
,
451 build_int_cst (TREE_TYPE (t
), 0));
452 t
= canonicalize_cond_expr_cond (t
);
455 gimple_cond_set_condition_from_tree (inner_cond
, t
);
456 update_stmt (inner_cond
);
458 /* Leave CFG optimization to cfg_cleanup. */
459 gimple_cond_set_condition_from_tree (outer_cond
,
460 outer_inv
? boolean_false_node
: boolean_true_node
);
461 update_stmt (outer_cond
);
465 fprintf (dump_file
, "optimizing bits or bits test to ");
466 print_generic_expr (dump_file
, name1
, 0);
467 fprintf (dump_file
, " & T != 0\nwith temporary T = ");
468 print_generic_expr (dump_file
, bits1
, 0);
469 fprintf (dump_file
, " | ");
470 print_generic_expr (dump_file
, bits2
, 0);
471 fprintf (dump_file
, "\n");
477 /* See if we have two comparisons that we can merge into one. */
478 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond
)) == tcc_comparison
479 && TREE_CODE_CLASS (gimple_cond_code (outer_cond
)) == tcc_comparison
)
482 enum tree_code inner_cond_code
= gimple_cond_code (inner_cond
);
483 enum tree_code outer_cond_code
= gimple_cond_code (outer_cond
);
485 /* Invert comparisons if necessary (and possible). */
487 inner_cond_code
= invert_tree_comparison (inner_cond_code
,
488 HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (inner_cond
)))));
489 if (inner_cond_code
== ERROR_MARK
)
492 outer_cond_code
= invert_tree_comparison (outer_cond_code
,
493 HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (outer_cond
)))));
494 if (outer_cond_code
== ERROR_MARK
)
496 /* Don't return false so fast, try maybe_fold_or_comparisons? */
498 if (!(t
= maybe_fold_and_comparisons (inner_cond_code
,
499 gimple_cond_lhs (inner_cond
),
500 gimple_cond_rhs (inner_cond
),
502 gimple_cond_lhs (outer_cond
),
503 gimple_cond_rhs (outer_cond
))))
506 gimple_stmt_iterator gsi
;
507 if (!LOGICAL_OP_NON_SHORT_CIRCUIT
)
509 /* Only do this optimization if the inner bb contains only the conditional. */
510 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb
)))
512 t1
= fold_build2_loc (gimple_location (inner_cond
),
515 gimple_cond_lhs (inner_cond
),
516 gimple_cond_rhs (inner_cond
));
517 t2
= fold_build2_loc (gimple_location (outer_cond
),
520 gimple_cond_lhs (outer_cond
),
521 gimple_cond_rhs (outer_cond
));
522 t
= fold_build2_loc (gimple_location (inner_cond
),
523 TRUTH_AND_EXPR
, boolean_type_node
, t1
, t2
);
526 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
529 gsi
= gsi_for_stmt (inner_cond
);
530 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr
, NULL
, true,
534 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
535 t
= canonicalize_cond_expr_cond (t
);
538 gimple_cond_set_condition_from_tree (inner_cond
, t
);
539 update_stmt (inner_cond
);
541 /* Leave CFG optimization to cfg_cleanup. */
542 gimple_cond_set_condition_from_tree (outer_cond
,
543 outer_inv
? boolean_false_node
: boolean_true_node
);
544 update_stmt (outer_cond
);
548 fprintf (dump_file
, "optimizing two comparisons to ");
549 print_generic_expr (dump_file
, t
, 0);
550 fprintf (dump_file
, "\n");
559 /* Recognize a CFG pattern and dispatch to the appropriate
560 if-conversion helper. We start with BB as the innermost
561 worker basic-block. Returns true if a transformation was done. */
564 tree_ssa_ifcombine_bb (basic_block inner_cond_bb
)
566 basic_block then_bb
= NULL
, else_bb
= NULL
;
568 if (!recognize_if_then_else (inner_cond_bb
, &then_bb
, &else_bb
))
571 /* Recognize && and || of two conditions with a common
572 then/else block which entry edges we can merge. That is:
578 This requires a single predecessor of the inner cond_bb. */
579 if (single_pred_p (inner_cond_bb
))
581 basic_block outer_cond_bb
= single_pred (inner_cond_bb
);
583 /* The && form is characterized by a common else_bb with
584 the two edges leading to it mergable. The latter is
585 guaranteed by matching PHI arguments in the else_bb and
586 the inner cond_bb having no side-effects. */
587 if (recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &else_bb
)
588 && same_phi_args_p (outer_cond_bb
, inner_cond_bb
, else_bb
)
589 && bb_no_side_effects_p (inner_cond_bb
))
593 if (q) goto inner_cond_bb; else goto else_bb;
595 if (p) goto ...; else goto else_bb;
600 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, false,
604 /* And a version where the outer condition is negated. */
605 if (recognize_if_then_else (outer_cond_bb
, &else_bb
, &inner_cond_bb
)
606 && same_phi_args_p (outer_cond_bb
, inner_cond_bb
, else_bb
)
607 && bb_no_side_effects_p (inner_cond_bb
))
611 if (q) goto else_bb; else goto inner_cond_bb;
613 if (p) goto ...; else goto else_bb;
618 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, true,
622 /* The || form is characterized by a common then_bb with the
623 two edges leading to it mergable. The latter is guaranteed
624 by matching PHI arguments in the then_bb and the inner cond_bb
625 having no side-effects. */
626 if (recognize_if_then_else (outer_cond_bb
, &then_bb
, &inner_cond_bb
)
627 && same_phi_args_p (outer_cond_bb
, inner_cond_bb
, then_bb
)
628 && bb_no_side_effects_p (inner_cond_bb
))
632 if (q) goto then_bb; else goto inner_cond_bb;
634 if (q) goto then_bb; else goto ...;
638 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, true,
642 /* And a version where the outer condition is negated. */
643 if (recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &then_bb
)
644 && same_phi_args_p (outer_cond_bb
, inner_cond_bb
, then_bb
)
645 && bb_no_side_effects_p (inner_cond_bb
))
649 if (q) goto inner_cond_bb; else goto then_bb;
651 if (q) goto then_bb; else goto ...;
655 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, false,
663 /* Main entry for the tree if-conversion pass. */
666 tree_ssa_ifcombine (void)
669 bool cfg_changed
= false;
672 bbs
= single_pred_before_succ_order ();
673 calculate_dominance_info (CDI_DOMINATORS
);
675 /* Search every basic block for COND_EXPR we may be able to optimize.
677 We walk the blocks in order that guarantees that a block with
678 a single predecessor is processed after the predecessor.
679 This ensures that we collapse outter ifs before visiting the
680 inner ones, and also that we do not try to visit a removed
681 block. This is opposite of PHI-OPT, because we cascade the
682 combining rather than cascading PHIs. */
683 for (i
= n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
- 1; i
>= 0; i
--)
685 basic_block bb
= bbs
[i
];
686 gimple stmt
= last_stmt (bb
);
689 && gimple_code (stmt
) == GIMPLE_COND
)
690 cfg_changed
|= tree_ssa_ifcombine_bb (bb
);
695 return cfg_changed
? TODO_cleanup_cfg
: 0;
699 gate_ifcombine (void)
706 const pass_data pass_data_tree_ifcombine
=
708 GIMPLE_PASS
, /* type */
709 "ifcombine", /* name */
710 OPTGROUP_NONE
, /* optinfo_flags */
712 true, /* has_execute */
713 TV_TREE_IFCOMBINE
, /* tv_id */
714 ( PROP_cfg
| PROP_ssa
), /* properties_required */
715 0, /* properties_provided */
716 0, /* properties_destroyed */
717 0, /* todo_flags_start */
718 ( TODO_update_ssa
| TODO_verify_ssa
), /* todo_flags_finish */
721 class pass_tree_ifcombine
: public gimple_opt_pass
724 pass_tree_ifcombine (gcc::context
*ctxt
)
725 : gimple_opt_pass (pass_data_tree_ifcombine
, ctxt
)
728 /* opt_pass methods: */
729 bool gate () { return gate_ifcombine (); }
730 unsigned int execute () { return tree_ssa_ifcombine (); }
732 }; // class pass_tree_ifcombine
737 make_pass_tree_ifcombine (gcc::context
*ctxt
)
739 return new pass_tree_ifcombine (ctxt
);