1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2013 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 /* rtl is needed only because arm back-end requires it for
30 #include "basic-block.h"
31 #include "tree-pretty-print.h"
33 #include "gimple-iterator.h"
34 #include "gimplify-me.h"
35 #include "gimple-ssa.h"
37 #include "tree-phinodes.h"
38 #include "ssa-iterators.h"
39 #include "tree-pass.h"
41 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
42 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
43 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
47 /* This pass combines COND_EXPRs to simplify control flow. It
48 currently recognizes bit tests and comparisons in chains that
49 represent logical and or logical or of two COND_EXPRs.
51 It does so by walking basic blocks in a approximate reverse
52 post-dominator order and trying to match CFG patterns that
53 represent logical and or logical or of two COND_EXPRs.
54 Transformations are done if the COND_EXPR conditions match
57 1. two single bit tests X & (1 << Yn) (for logical and)
59 2. two bit tests X & Yn (for logical or)
61 3. two comparisons X OPn Y (for logical or)
63 To simplify this pass, removing basic blocks and dead code
64 is left to CFG cleanup and DCE. */
67 /* Recognize a if-then-else CFG pattern starting to match with the
68 COND_BB basic-block containing the COND_EXPR. The recognized
69 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
70 *THEN_BB and/or *ELSE_BB are already set, they are required to
71 match the then and else basic-blocks to make the pattern match.
72 Returns true if the pattern matched, false otherwise. */
75 recognize_if_then_else (basic_block cond_bb
,
76 basic_block
*then_bb
, basic_block
*else_bb
)
80 if (EDGE_COUNT (cond_bb
->succs
) != 2)
83 /* Find the then/else edges. */
84 t
= EDGE_SUCC (cond_bb
, 0);
85 e
= EDGE_SUCC (cond_bb
, 1);
86 if (!(t
->flags
& EDGE_TRUE_VALUE
))
92 if (!(t
->flags
& EDGE_TRUE_VALUE
)
93 || !(e
->flags
& EDGE_FALSE_VALUE
))
96 /* Check if the edge destinations point to the required block. */
98 && t
->dest
!= *then_bb
)
101 && e
->dest
!= *else_bb
)
112 /* Verify if the basic block BB does not have side-effects. Return
113 true in this case, else false. */
116 bb_no_side_effects_p (basic_block bb
)
118 gimple_stmt_iterator gsi
;
120 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
122 gimple stmt
= gsi_stmt (gsi
);
124 if (gimple_has_side_effects (stmt
)
125 || gimple_vuse (stmt
))
132 /* Verify if all PHI node arguments in DEST for edges from BB1 or
133 BB2 to DEST are the same. This makes the CFG merge point
134 free from side-effects. Return true in this case, else false. */
137 same_phi_args_p (basic_block bb1
, basic_block bb2
, basic_block dest
)
139 edge e1
= find_edge (bb1
, dest
);
140 edge e2
= find_edge (bb2
, dest
);
141 gimple_stmt_iterator gsi
;
144 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
146 phi
= gsi_stmt (gsi
);
147 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi
, e1
),
148 PHI_ARG_DEF_FROM_EDGE (phi
, e2
), 0))
155 /* Return the best representative SSA name for CANDIDATE which is used
159 get_name_for_bit_test (tree candidate
)
161 /* Skip single-use names in favor of using the name from a
162 non-widening conversion definition. */
163 if (TREE_CODE (candidate
) == SSA_NAME
164 && has_single_use (candidate
))
166 gimple def_stmt
= SSA_NAME_DEF_STMT (candidate
);
167 if (is_gimple_assign (def_stmt
)
168 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
170 if (TYPE_PRECISION (TREE_TYPE (candidate
))
171 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))))
172 return gimple_assign_rhs1 (def_stmt
);
179 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
180 statements. Store the name being tested in *NAME and the bit
181 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
182 Returns true if the pattern matched, false otherwise. */
185 recognize_single_bit_test (gimple cond
, tree
*name
, tree
*bit
, bool inv
)
189 /* Get at the definition of the result of the bit test. */
190 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
191 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
192 || !integer_zerop (gimple_cond_rhs (cond
)))
194 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
195 if (!is_gimple_assign (stmt
))
198 /* Look at which bit is tested. One form to recognize is
199 D.1985_5 = state_3(D) >> control1_4(D);
200 D.1986_6 = (int) D.1985_5;
202 if (D.1987_7 != 0) */
203 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
204 && integer_onep (gimple_assign_rhs2 (stmt
))
205 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
207 tree orig_name
= gimple_assign_rhs1 (stmt
);
209 /* Look through copies and conversions to eventually
210 find the stmt that computes the shift. */
211 stmt
= SSA_NAME_DEF_STMT (orig_name
);
213 while (is_gimple_assign (stmt
)
214 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
215 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt
)))
216 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt
)))))
217 || gimple_assign_ssa_name_copy_p (stmt
)))
218 stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
220 /* If we found such, decompose it. */
221 if (is_gimple_assign (stmt
)
222 && gimple_assign_rhs_code (stmt
) == RSHIFT_EXPR
)
224 /* op0 & (1 << op1) */
225 *bit
= gimple_assign_rhs2 (stmt
);
226 *name
= gimple_assign_rhs1 (stmt
);
231 *bit
= integer_zero_node
;
232 *name
= get_name_for_bit_test (orig_name
);
239 D.1987_7 = op0 & (1 << CST)
240 if (D.1987_7 != 0) */
241 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
242 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
243 && integer_pow2p (gimple_assign_rhs2 (stmt
)))
245 *name
= gimple_assign_rhs1 (stmt
);
246 *bit
= build_int_cst (integer_type_node
,
247 tree_log2 (gimple_assign_rhs2 (stmt
)));
252 D.1986_6 = 1 << control1_4(D)
253 D.1987_7 = op0 & D.1986_6
254 if (D.1987_7 != 0) */
255 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
256 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
257 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
)
261 /* Both arguments of the BIT_AND_EXPR can be the single-bit
262 specifying expression. */
263 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
264 if (is_gimple_assign (tmp
)
265 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
266 && integer_onep (gimple_assign_rhs1 (tmp
)))
268 *name
= gimple_assign_rhs2 (stmt
);
269 *bit
= gimple_assign_rhs2 (tmp
);
273 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt
));
274 if (is_gimple_assign (tmp
)
275 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
276 && integer_onep (gimple_assign_rhs1 (tmp
)))
278 *name
= gimple_assign_rhs1 (stmt
);
279 *bit
= gimple_assign_rhs2 (tmp
);
287 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
288 statements. Store the name being tested in *NAME and the bits
289 in *BITS. The COND_EXPR computes *NAME & *BITS.
290 Returns true if the pattern matched, false otherwise. */
293 recognize_bits_test (gimple cond
, tree
*name
, tree
*bits
, bool inv
)
297 /* Get at the definition of the result of the bit test. */
298 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
299 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
300 || !integer_zerop (gimple_cond_rhs (cond
)))
302 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
303 if (!is_gimple_assign (stmt
)
304 || gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
)
307 *name
= get_name_for_bit_test (gimple_assign_rhs1 (stmt
));
308 *bits
= gimple_assign_rhs2 (stmt
);
313 /* If-convert on a and pattern with a common else block. The inner
314 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
315 inner_inv, outer_inv and result_inv indicate whether the conditions
317 Returns true if the edges to the common else basic-block were merged. */
320 ifcombine_ifandif (basic_block inner_cond_bb
, bool inner_inv
,
321 basic_block outer_cond_bb
, bool outer_inv
, bool result_inv
)
323 gimple_stmt_iterator gsi
;
324 gimple inner_cond
, outer_cond
;
325 tree name1
, name2
, bit1
, bit2
, bits1
, bits2
;
327 inner_cond
= last_stmt (inner_cond_bb
);
329 || gimple_code (inner_cond
) != GIMPLE_COND
)
332 outer_cond
= last_stmt (outer_cond_bb
);
334 || gimple_code (outer_cond
) != GIMPLE_COND
)
337 /* See if we test a single bit of the same name in both tests. In
338 that case remove the outer test, merging both else edges,
339 and change the inner one to test for
340 name & (bit1 | bit2) == (bit1 | bit2). */
341 if (recognize_single_bit_test (inner_cond
, &name1
, &bit1
, inner_inv
)
342 && recognize_single_bit_test (outer_cond
, &name2
, &bit2
, outer_inv
)
348 gsi
= gsi_for_stmt (inner_cond
);
349 t
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
350 build_int_cst (TREE_TYPE (name1
), 1), bit1
);
351 t2
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
352 build_int_cst (TREE_TYPE (name1
), 1), bit2
);
353 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), t
, t2
);
354 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
355 true, GSI_SAME_STMT
);
356 t2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
357 t2
= force_gimple_operand_gsi (&gsi
, t2
, true, NULL_TREE
,
358 true, GSI_SAME_STMT
);
359 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
,
360 boolean_type_node
, t2
, t
);
361 t
= canonicalize_cond_expr_cond (t
);
364 gimple_cond_set_condition_from_tree (inner_cond
, t
);
365 update_stmt (inner_cond
);
367 /* Leave CFG optimization to cfg_cleanup. */
368 gimple_cond_set_condition_from_tree (outer_cond
,
369 outer_inv
? boolean_false_node
: boolean_true_node
);
370 update_stmt (outer_cond
);
374 fprintf (dump_file
, "optimizing double bit test to ");
375 print_generic_expr (dump_file
, name1
, 0);
376 fprintf (dump_file
, " & T == T\nwith temporary T = (1 << ");
377 print_generic_expr (dump_file
, bit1
, 0);
378 fprintf (dump_file
, ") | (1 << ");
379 print_generic_expr (dump_file
, bit2
, 0);
380 fprintf (dump_file
, ")\n");
386 /* See if we have two bit tests of the same name in both tests.
387 In that case remove the outer test and change the inner one to
388 test for name & (bits1 | bits2) != 0. */
389 else if (recognize_bits_test (inner_cond
, &name1
, &bits1
, !inner_inv
)
390 && recognize_bits_test (outer_cond
, &name2
, &bits2
, !outer_inv
))
392 gimple_stmt_iterator gsi
;
395 /* Find the common name which is bit-tested. */
398 else if (bits1
== bits2
)
407 else if (name1
== bits2
)
413 else if (bits1
== name2
)
422 /* As we strip non-widening conversions in finding a common
423 name that is tested make sure to end up with an integral
424 type for building the bit operations. */
425 if (TYPE_PRECISION (TREE_TYPE (bits1
))
426 >= TYPE_PRECISION (TREE_TYPE (bits2
)))
428 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
429 name1
= fold_convert (TREE_TYPE (bits1
), name1
);
430 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
431 bits2
= fold_convert (TREE_TYPE (bits1
), bits2
);
435 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
436 name1
= fold_convert (TREE_TYPE (bits2
), name1
);
437 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
438 bits1
= fold_convert (TREE_TYPE (bits2
), bits1
);
442 gsi
= gsi_for_stmt (inner_cond
);
443 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), bits1
, bits2
);
444 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
445 true, GSI_SAME_STMT
);
446 t
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
447 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
448 true, GSI_SAME_STMT
);
449 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
, boolean_type_node
, t
,
450 build_int_cst (TREE_TYPE (t
), 0));
451 t
= canonicalize_cond_expr_cond (t
);
454 gimple_cond_set_condition_from_tree (inner_cond
, t
);
455 update_stmt (inner_cond
);
457 /* Leave CFG optimization to cfg_cleanup. */
458 gimple_cond_set_condition_from_tree (outer_cond
,
459 outer_inv
? boolean_false_node
: boolean_true_node
);
460 update_stmt (outer_cond
);
464 fprintf (dump_file
, "optimizing bits or bits test to ");
465 print_generic_expr (dump_file
, name1
, 0);
466 fprintf (dump_file
, " & T != 0\nwith temporary T = ");
467 print_generic_expr (dump_file
, bits1
, 0);
468 fprintf (dump_file
, " | ");
469 print_generic_expr (dump_file
, bits2
, 0);
470 fprintf (dump_file
, "\n");
476 /* See if we have two comparisons that we can merge into one. */
477 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond
)) == tcc_comparison
478 && TREE_CODE_CLASS (gimple_cond_code (outer_cond
)) == tcc_comparison
)
481 enum tree_code inner_cond_code
= gimple_cond_code (inner_cond
);
482 enum tree_code outer_cond_code
= gimple_cond_code (outer_cond
);
484 /* Invert comparisons if necessary (and possible). */
486 inner_cond_code
= invert_tree_comparison (inner_cond_code
,
487 HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (inner_cond
)))));
488 if (inner_cond_code
== ERROR_MARK
)
491 outer_cond_code
= invert_tree_comparison (outer_cond_code
,
492 HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (outer_cond
)))));
493 if (outer_cond_code
== ERROR_MARK
)
495 /* Don't return false so fast, try maybe_fold_or_comparisons? */
497 if (!(t
= maybe_fold_and_comparisons (inner_cond_code
,
498 gimple_cond_lhs (inner_cond
),
499 gimple_cond_rhs (inner_cond
),
501 gimple_cond_lhs (outer_cond
),
502 gimple_cond_rhs (outer_cond
))))
505 gimple_stmt_iterator gsi
;
506 if (!LOGICAL_OP_NON_SHORT_CIRCUIT
)
508 /* Only do this optimization if the inner bb contains only the conditional. */
509 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb
)))
511 t1
= fold_build2_loc (gimple_location (inner_cond
),
514 gimple_cond_lhs (inner_cond
),
515 gimple_cond_rhs (inner_cond
));
516 t2
= fold_build2_loc (gimple_location (outer_cond
),
519 gimple_cond_lhs (outer_cond
),
520 gimple_cond_rhs (outer_cond
));
521 t
= fold_build2_loc (gimple_location (inner_cond
),
522 TRUTH_AND_EXPR
, boolean_type_node
, t1
, t2
);
525 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
528 gsi
= gsi_for_stmt (inner_cond
);
529 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr
, NULL
, true,
533 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
534 t
= canonicalize_cond_expr_cond (t
);
537 gimple_cond_set_condition_from_tree (inner_cond
, t
);
538 update_stmt (inner_cond
);
540 /* Leave CFG optimization to cfg_cleanup. */
541 gimple_cond_set_condition_from_tree (outer_cond
,
542 outer_inv
? boolean_false_node
: boolean_true_node
);
543 update_stmt (outer_cond
);
547 fprintf (dump_file
, "optimizing two comparisons to ");
548 print_generic_expr (dump_file
, t
, 0);
549 fprintf (dump_file
, "\n");
558 /* Recognize a CFG pattern and dispatch to the appropriate
559 if-conversion helper. We start with BB as the innermost
560 worker basic-block. Returns true if a transformation was done. */
563 tree_ssa_ifcombine_bb (basic_block inner_cond_bb
)
565 basic_block then_bb
= NULL
, else_bb
= NULL
;
567 if (!recognize_if_then_else (inner_cond_bb
, &then_bb
, &else_bb
))
570 /* Recognize && and || of two conditions with a common
571 then/else block which entry edges we can merge. That is:
577 This requires a single predecessor of the inner cond_bb. */
578 if (single_pred_p (inner_cond_bb
))
580 basic_block outer_cond_bb
= single_pred (inner_cond_bb
);
582 /* The && form is characterized by a common else_bb with
583 the two edges leading to it mergable. The latter is
584 guaranteed by matching PHI arguments in the else_bb and
585 the inner cond_bb having no side-effects. */
586 if (recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &else_bb
)
587 && same_phi_args_p (outer_cond_bb
, inner_cond_bb
, else_bb
)
588 && bb_no_side_effects_p (inner_cond_bb
))
592 if (q) goto inner_cond_bb; else goto else_bb;
594 if (p) goto ...; else goto else_bb;
599 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, false,
603 /* And a version where the outer condition is negated. */
604 if (recognize_if_then_else (outer_cond_bb
, &else_bb
, &inner_cond_bb
)
605 && same_phi_args_p (outer_cond_bb
, inner_cond_bb
, else_bb
)
606 && bb_no_side_effects_p (inner_cond_bb
))
610 if (q) goto else_bb; else goto inner_cond_bb;
612 if (p) goto ...; else goto else_bb;
617 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, true,
621 /* The || form is characterized by a common then_bb with the
622 two edges leading to it mergable. The latter is guaranteed
623 by matching PHI arguments in the then_bb and the inner cond_bb
624 having no side-effects. */
625 if (recognize_if_then_else (outer_cond_bb
, &then_bb
, &inner_cond_bb
)
626 && same_phi_args_p (outer_cond_bb
, inner_cond_bb
, then_bb
)
627 && bb_no_side_effects_p (inner_cond_bb
))
631 if (q) goto then_bb; else goto inner_cond_bb;
633 if (q) goto then_bb; else goto ...;
637 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, true,
641 /* And a version where the outer condition is negated. */
642 if (recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &then_bb
)
643 && same_phi_args_p (outer_cond_bb
, inner_cond_bb
, then_bb
)
644 && bb_no_side_effects_p (inner_cond_bb
))
648 if (q) goto inner_cond_bb; else goto then_bb;
650 if (q) goto then_bb; else goto ...;
654 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, false,
662 /* Main entry for the tree if-conversion pass. */
665 tree_ssa_ifcombine (void)
668 bool cfg_changed
= false;
671 bbs
= single_pred_before_succ_order ();
672 calculate_dominance_info (CDI_DOMINATORS
);
674 /* Search every basic block for COND_EXPR we may be able to optimize.
676 We walk the blocks in order that guarantees that a block with
677 a single predecessor is processed after the predecessor.
678 This ensures that we collapse outter ifs before visiting the
679 inner ones, and also that we do not try to visit a removed
680 block. This is opposite of PHI-OPT, because we cascade the
681 combining rather than cascading PHIs. */
682 for (i
= n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
- 1; i
>= 0; i
--)
684 basic_block bb
= bbs
[i
];
685 gimple stmt
= last_stmt (bb
);
688 && gimple_code (stmt
) == GIMPLE_COND
)
689 cfg_changed
|= tree_ssa_ifcombine_bb (bb
);
694 return cfg_changed
? TODO_cleanup_cfg
: 0;
698 gate_ifcombine (void)
705 const pass_data pass_data_tree_ifcombine
=
707 GIMPLE_PASS
, /* type */
708 "ifcombine", /* name */
709 OPTGROUP_NONE
, /* optinfo_flags */
711 true, /* has_execute */
712 TV_TREE_IFCOMBINE
, /* tv_id */
713 ( PROP_cfg
| PROP_ssa
), /* properties_required */
714 0, /* properties_provided */
715 0, /* properties_destroyed */
716 0, /* todo_flags_start */
717 ( TODO_update_ssa
| TODO_verify_ssa
), /* todo_flags_finish */
720 class pass_tree_ifcombine
: public gimple_opt_pass
723 pass_tree_ifcombine (gcc::context
*ctxt
)
724 : gimple_opt_pass (pass_data_tree_ifcombine
, ctxt
)
727 /* opt_pass methods: */
728 bool gate () { return gate_ifcombine (); }
729 unsigned int execute () { return tree_ssa_ifcombine (); }
731 }; // class pass_tree_ifcombine
736 make_pass_tree_ifcombine (gcc::context
*ctxt
)
738 return new pass_tree_ifcombine (ctxt
);