1 /* Combining of if-expressions on trees.
2 Copyright (C) 2007-2013 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 /* rtl is needed only because arm back-end requires it for
30 #include "stor-layout.h"
31 #include "basic-block.h"
32 #include "tree-pretty-print.h"
33 #include "tree-ssa-alias.h"
34 #include "internal-fn.h"
35 #include "gimple-fold.h"
36 #include "gimple-expr.h"
39 #include "gimple-iterator.h"
40 #include "gimplify-me.h"
41 #include "gimple-ssa.h"
43 #include "tree-phinodes.h"
44 #include "ssa-iterators.h"
45 #include "tree-pass.h"
47 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
48 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
49 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
53 /* This pass combines COND_EXPRs to simplify control flow. It
54 currently recognizes bit tests and comparisons in chains that
55 represent logical and or logical or of two COND_EXPRs.
57 It does so by walking basic blocks in a approximate reverse
58 post-dominator order and trying to match CFG patterns that
59 represent logical and or logical or of two COND_EXPRs.
60 Transformations are done if the COND_EXPR conditions match
63 1. two single bit tests X & (1 << Yn) (for logical and)
65 2. two bit tests X & Yn (for logical or)
67 3. two comparisons X OPn Y (for logical or)
69 To simplify this pass, removing basic blocks and dead code
70 is left to CFG cleanup and DCE. */
73 /* Recognize a if-then-else CFG pattern starting to match with the
74 COND_BB basic-block containing the COND_EXPR. The recognized
75 then end else blocks are stored to *THEN_BB and *ELSE_BB. If
76 *THEN_BB and/or *ELSE_BB are already set, they are required to
77 match the then and else basic-blocks to make the pattern match.
78 Returns true if the pattern matched, false otherwise. */
81 recognize_if_then_else (basic_block cond_bb
,
82 basic_block
*then_bb
, basic_block
*else_bb
)
86 if (EDGE_COUNT (cond_bb
->succs
) != 2)
89 /* Find the then/else edges. */
90 t
= EDGE_SUCC (cond_bb
, 0);
91 e
= EDGE_SUCC (cond_bb
, 1);
92 if (!(t
->flags
& EDGE_TRUE_VALUE
))
98 if (!(t
->flags
& EDGE_TRUE_VALUE
)
99 || !(e
->flags
& EDGE_FALSE_VALUE
))
102 /* Check if the edge destinations point to the required block. */
104 && t
->dest
!= *then_bb
)
107 && e
->dest
!= *else_bb
)
118 /* Verify if the basic block BB does not have side-effects. Return
119 true in this case, else false. */
122 bb_no_side_effects_p (basic_block bb
)
124 gimple_stmt_iterator gsi
;
126 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
128 gimple stmt
= gsi_stmt (gsi
);
130 if (gimple_has_side_effects (stmt
)
131 || gimple_vuse (stmt
))
138 /* Verify if all PHI node arguments in DEST for edges from BB1 or
139 BB2 to DEST are the same. This makes the CFG merge point
140 free from side-effects. Return true in this case, else false. */
143 same_phi_args_p (basic_block bb1
, basic_block bb2
, basic_block dest
)
145 edge e1
= find_edge (bb1
, dest
);
146 edge e2
= find_edge (bb2
, dest
);
147 gimple_stmt_iterator gsi
;
150 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
152 phi
= gsi_stmt (gsi
);
153 if (!operand_equal_p (PHI_ARG_DEF_FROM_EDGE (phi
, e1
),
154 PHI_ARG_DEF_FROM_EDGE (phi
, e2
), 0))
161 /* Return the best representative SSA name for CANDIDATE which is used
165 get_name_for_bit_test (tree candidate
)
167 /* Skip single-use names in favor of using the name from a
168 non-widening conversion definition. */
169 if (TREE_CODE (candidate
) == SSA_NAME
170 && has_single_use (candidate
))
172 gimple def_stmt
= SSA_NAME_DEF_STMT (candidate
);
173 if (is_gimple_assign (def_stmt
)
174 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt
)))
176 if (TYPE_PRECISION (TREE_TYPE (candidate
))
177 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (def_stmt
))))
178 return gimple_assign_rhs1 (def_stmt
);
185 /* Recognize a single bit test pattern in GIMPLE_COND and its defining
186 statements. Store the name being tested in *NAME and the bit
187 in *BIT. The GIMPLE_COND computes *NAME & (1 << *BIT).
188 Returns true if the pattern matched, false otherwise. */
191 recognize_single_bit_test (gimple cond
, tree
*name
, tree
*bit
, bool inv
)
195 /* Get at the definition of the result of the bit test. */
196 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
197 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
198 || !integer_zerop (gimple_cond_rhs (cond
)))
200 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
201 if (!is_gimple_assign (stmt
))
204 /* Look at which bit is tested. One form to recognize is
205 D.1985_5 = state_3(D) >> control1_4(D);
206 D.1986_6 = (int) D.1985_5;
208 if (D.1987_7 != 0) */
209 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
210 && integer_onep (gimple_assign_rhs2 (stmt
))
211 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
)
213 tree orig_name
= gimple_assign_rhs1 (stmt
);
215 /* Look through copies and conversions to eventually
216 find the stmt that computes the shift. */
217 stmt
= SSA_NAME_DEF_STMT (orig_name
);
219 while (is_gimple_assign (stmt
)
220 && ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
221 && (TYPE_PRECISION (TREE_TYPE (gimple_assign_lhs (stmt
)))
222 <= TYPE_PRECISION (TREE_TYPE (gimple_assign_rhs1 (stmt
)))))
223 || gimple_assign_ssa_name_copy_p (stmt
)))
224 stmt
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
226 /* If we found such, decompose it. */
227 if (is_gimple_assign (stmt
)
228 && gimple_assign_rhs_code (stmt
) == RSHIFT_EXPR
)
230 /* op0 & (1 << op1) */
231 *bit
= gimple_assign_rhs2 (stmt
);
232 *name
= gimple_assign_rhs1 (stmt
);
237 *bit
= integer_zero_node
;
238 *name
= get_name_for_bit_test (orig_name
);
245 D.1987_7 = op0 & (1 << CST)
246 if (D.1987_7 != 0) */
247 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
248 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
249 && integer_pow2p (gimple_assign_rhs2 (stmt
)))
251 *name
= gimple_assign_rhs1 (stmt
);
252 *bit
= build_int_cst (integer_type_node
,
253 tree_log2 (gimple_assign_rhs2 (stmt
)));
258 D.1986_6 = 1 << control1_4(D)
259 D.1987_7 = op0 & D.1986_6
260 if (D.1987_7 != 0) */
261 if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
262 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
263 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == SSA_NAME
)
267 /* Both arguments of the BIT_AND_EXPR can be the single-bit
268 specifying expression. */
269 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt
));
270 if (is_gimple_assign (tmp
)
271 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
272 && integer_onep (gimple_assign_rhs1 (tmp
)))
274 *name
= gimple_assign_rhs2 (stmt
);
275 *bit
= gimple_assign_rhs2 (tmp
);
279 tmp
= SSA_NAME_DEF_STMT (gimple_assign_rhs2 (stmt
));
280 if (is_gimple_assign (tmp
)
281 && gimple_assign_rhs_code (tmp
) == LSHIFT_EXPR
282 && integer_onep (gimple_assign_rhs1 (tmp
)))
284 *name
= gimple_assign_rhs1 (stmt
);
285 *bit
= gimple_assign_rhs2 (tmp
);
293 /* Recognize a bit test pattern in a GIMPLE_COND and its defining
294 statements. Store the name being tested in *NAME and the bits
295 in *BITS. The COND_EXPR computes *NAME & *BITS.
296 Returns true if the pattern matched, false otherwise. */
299 recognize_bits_test (gimple cond
, tree
*name
, tree
*bits
, bool inv
)
303 /* Get at the definition of the result of the bit test. */
304 if (gimple_cond_code (cond
) != (inv
? EQ_EXPR
: NE_EXPR
)
305 || TREE_CODE (gimple_cond_lhs (cond
)) != SSA_NAME
306 || !integer_zerop (gimple_cond_rhs (cond
)))
308 stmt
= SSA_NAME_DEF_STMT (gimple_cond_lhs (cond
));
309 if (!is_gimple_assign (stmt
)
310 || gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
)
313 *name
= get_name_for_bit_test (gimple_assign_rhs1 (stmt
));
314 *bits
= gimple_assign_rhs2 (stmt
);
319 /* If-convert on a and pattern with a common else block. The inner
320 if is specified by its INNER_COND_BB, the outer by OUTER_COND_BB.
321 inner_inv, outer_inv and result_inv indicate whether the conditions
323 Returns true if the edges to the common else basic-block were merged. */
326 ifcombine_ifandif (basic_block inner_cond_bb
, bool inner_inv
,
327 basic_block outer_cond_bb
, bool outer_inv
, bool result_inv
)
329 gimple_stmt_iterator gsi
;
330 gimple inner_cond
, outer_cond
;
331 tree name1
, name2
, bit1
, bit2
, bits1
, bits2
;
333 inner_cond
= last_stmt (inner_cond_bb
);
335 || gimple_code (inner_cond
) != GIMPLE_COND
)
338 outer_cond
= last_stmt (outer_cond_bb
);
340 || gimple_code (outer_cond
) != GIMPLE_COND
)
343 /* See if we test a single bit of the same name in both tests. In
344 that case remove the outer test, merging both else edges,
345 and change the inner one to test for
346 name & (bit1 | bit2) == (bit1 | bit2). */
347 if (recognize_single_bit_test (inner_cond
, &name1
, &bit1
, inner_inv
)
348 && recognize_single_bit_test (outer_cond
, &name2
, &bit2
, outer_inv
)
354 gsi
= gsi_for_stmt (inner_cond
);
355 t
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
356 build_int_cst (TREE_TYPE (name1
), 1), bit1
);
357 t2
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (name1
),
358 build_int_cst (TREE_TYPE (name1
), 1), bit2
);
359 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), t
, t2
);
360 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
361 true, GSI_SAME_STMT
);
362 t2
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
363 t2
= force_gimple_operand_gsi (&gsi
, t2
, true, NULL_TREE
,
364 true, GSI_SAME_STMT
);
365 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
,
366 boolean_type_node
, t2
, t
);
367 t
= canonicalize_cond_expr_cond (t
);
370 gimple_cond_set_condition_from_tree (inner_cond
, t
);
371 update_stmt (inner_cond
);
373 /* Leave CFG optimization to cfg_cleanup. */
374 gimple_cond_set_condition_from_tree (outer_cond
,
375 outer_inv
? boolean_false_node
: boolean_true_node
);
376 update_stmt (outer_cond
);
380 fprintf (dump_file
, "optimizing double bit test to ");
381 print_generic_expr (dump_file
, name1
, 0);
382 fprintf (dump_file
, " & T == T\nwith temporary T = (1 << ");
383 print_generic_expr (dump_file
, bit1
, 0);
384 fprintf (dump_file
, ") | (1 << ");
385 print_generic_expr (dump_file
, bit2
, 0);
386 fprintf (dump_file
, ")\n");
392 /* See if we have two bit tests of the same name in both tests.
393 In that case remove the outer test and change the inner one to
394 test for name & (bits1 | bits2) != 0. */
395 else if (recognize_bits_test (inner_cond
, &name1
, &bits1
, !inner_inv
)
396 && recognize_bits_test (outer_cond
, &name2
, &bits2
, !outer_inv
))
398 gimple_stmt_iterator gsi
;
401 /* Find the common name which is bit-tested. */
404 else if (bits1
== bits2
)
413 else if (name1
== bits2
)
419 else if (bits1
== name2
)
428 /* As we strip non-widening conversions in finding a common
429 name that is tested make sure to end up with an integral
430 type for building the bit operations. */
431 if (TYPE_PRECISION (TREE_TYPE (bits1
))
432 >= TYPE_PRECISION (TREE_TYPE (bits2
)))
434 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
435 name1
= fold_convert (TREE_TYPE (bits1
), name1
);
436 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
437 bits2
= fold_convert (TREE_TYPE (bits1
), bits2
);
441 bits2
= fold_convert (unsigned_type_for (TREE_TYPE (bits2
)), bits2
);
442 name1
= fold_convert (TREE_TYPE (bits2
), name1
);
443 bits1
= fold_convert (unsigned_type_for (TREE_TYPE (bits1
)), bits1
);
444 bits1
= fold_convert (TREE_TYPE (bits2
), bits1
);
448 gsi
= gsi_for_stmt (inner_cond
);
449 t
= fold_build2 (BIT_IOR_EXPR
, TREE_TYPE (name1
), bits1
, bits2
);
450 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
451 true, GSI_SAME_STMT
);
452 t
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (name1
), name1
, t
);
453 t
= force_gimple_operand_gsi (&gsi
, t
, true, NULL_TREE
,
454 true, GSI_SAME_STMT
);
455 t
= fold_build2 (result_inv
? NE_EXPR
: EQ_EXPR
, boolean_type_node
, t
,
456 build_int_cst (TREE_TYPE (t
), 0));
457 t
= canonicalize_cond_expr_cond (t
);
460 gimple_cond_set_condition_from_tree (inner_cond
, t
);
461 update_stmt (inner_cond
);
463 /* Leave CFG optimization to cfg_cleanup. */
464 gimple_cond_set_condition_from_tree (outer_cond
,
465 outer_inv
? boolean_false_node
: boolean_true_node
);
466 update_stmt (outer_cond
);
470 fprintf (dump_file
, "optimizing bits or bits test to ");
471 print_generic_expr (dump_file
, name1
, 0);
472 fprintf (dump_file
, " & T != 0\nwith temporary T = ");
473 print_generic_expr (dump_file
, bits1
, 0);
474 fprintf (dump_file
, " | ");
475 print_generic_expr (dump_file
, bits2
, 0);
476 fprintf (dump_file
, "\n");
482 /* See if we have two comparisons that we can merge into one. */
483 else if (TREE_CODE_CLASS (gimple_cond_code (inner_cond
)) == tcc_comparison
484 && TREE_CODE_CLASS (gimple_cond_code (outer_cond
)) == tcc_comparison
)
487 enum tree_code inner_cond_code
= gimple_cond_code (inner_cond
);
488 enum tree_code outer_cond_code
= gimple_cond_code (outer_cond
);
490 /* Invert comparisons if necessary (and possible). */
492 inner_cond_code
= invert_tree_comparison (inner_cond_code
,
493 HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (inner_cond
)))));
494 if (inner_cond_code
== ERROR_MARK
)
497 outer_cond_code
= invert_tree_comparison (outer_cond_code
,
498 HONOR_NANS (TYPE_MODE (TREE_TYPE (gimple_cond_lhs (outer_cond
)))));
499 if (outer_cond_code
== ERROR_MARK
)
501 /* Don't return false so fast, try maybe_fold_or_comparisons? */
503 if (!(t
= maybe_fold_and_comparisons (inner_cond_code
,
504 gimple_cond_lhs (inner_cond
),
505 gimple_cond_rhs (inner_cond
),
507 gimple_cond_lhs (outer_cond
),
508 gimple_cond_rhs (outer_cond
))))
511 gimple_stmt_iterator gsi
;
512 if (!LOGICAL_OP_NON_SHORT_CIRCUIT
)
514 /* Only do this optimization if the inner bb contains only the conditional. */
515 if (!gsi_one_before_end_p (gsi_start_nondebug_after_labels_bb (inner_cond_bb
)))
517 t1
= fold_build2_loc (gimple_location (inner_cond
),
520 gimple_cond_lhs (inner_cond
),
521 gimple_cond_rhs (inner_cond
));
522 t2
= fold_build2_loc (gimple_location (outer_cond
),
525 gimple_cond_lhs (outer_cond
),
526 gimple_cond_rhs (outer_cond
));
527 t
= fold_build2_loc (gimple_location (inner_cond
),
528 TRUTH_AND_EXPR
, boolean_type_node
, t1
, t2
);
531 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
534 gsi
= gsi_for_stmt (inner_cond
);
535 t
= force_gimple_operand_gsi_1 (&gsi
, t
, is_gimple_condexpr
, NULL
, true,
539 t
= fold_build1 (TRUTH_NOT_EXPR
, TREE_TYPE (t
), t
);
540 t
= canonicalize_cond_expr_cond (t
);
543 gimple_cond_set_condition_from_tree (inner_cond
, t
);
544 update_stmt (inner_cond
);
546 /* Leave CFG optimization to cfg_cleanup. */
547 gimple_cond_set_condition_from_tree (outer_cond
,
548 outer_inv
? boolean_false_node
: boolean_true_node
);
549 update_stmt (outer_cond
);
553 fprintf (dump_file
, "optimizing two comparisons to ");
554 print_generic_expr (dump_file
, t
, 0);
555 fprintf (dump_file
, "\n");
564 /* Recognize a CFG pattern and dispatch to the appropriate
565 if-conversion helper. We start with BB as the innermost
566 worker basic-block. Returns true if a transformation was done. */
569 tree_ssa_ifcombine_bb (basic_block inner_cond_bb
)
571 basic_block then_bb
= NULL
, else_bb
= NULL
;
573 if (!recognize_if_then_else (inner_cond_bb
, &then_bb
, &else_bb
))
576 /* Recognize && and || of two conditions with a common
577 then/else block which entry edges we can merge. That is:
583 This requires a single predecessor of the inner cond_bb. */
584 if (single_pred_p (inner_cond_bb
))
586 basic_block outer_cond_bb
= single_pred (inner_cond_bb
);
588 /* The && form is characterized by a common else_bb with
589 the two edges leading to it mergable. The latter is
590 guaranteed by matching PHI arguments in the else_bb and
591 the inner cond_bb having no side-effects. */
592 if (recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &else_bb
)
593 && same_phi_args_p (outer_cond_bb
, inner_cond_bb
, else_bb
)
594 && bb_no_side_effects_p (inner_cond_bb
))
598 if (q) goto inner_cond_bb; else goto else_bb;
600 if (p) goto ...; else goto else_bb;
605 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, false,
609 /* And a version where the outer condition is negated. */
610 if (recognize_if_then_else (outer_cond_bb
, &else_bb
, &inner_cond_bb
)
611 && same_phi_args_p (outer_cond_bb
, inner_cond_bb
, else_bb
)
612 && bb_no_side_effects_p (inner_cond_bb
))
616 if (q) goto else_bb; else goto inner_cond_bb;
618 if (p) goto ...; else goto else_bb;
623 return ifcombine_ifandif (inner_cond_bb
, false, outer_cond_bb
, true,
627 /* The || form is characterized by a common then_bb with the
628 two edges leading to it mergable. The latter is guaranteed
629 by matching PHI arguments in the then_bb and the inner cond_bb
630 having no side-effects. */
631 if (recognize_if_then_else (outer_cond_bb
, &then_bb
, &inner_cond_bb
)
632 && same_phi_args_p (outer_cond_bb
, inner_cond_bb
, then_bb
)
633 && bb_no_side_effects_p (inner_cond_bb
))
637 if (q) goto then_bb; else goto inner_cond_bb;
639 if (q) goto then_bb; else goto ...;
643 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, true,
647 /* And a version where the outer condition is negated. */
648 if (recognize_if_then_else (outer_cond_bb
, &inner_cond_bb
, &then_bb
)
649 && same_phi_args_p (outer_cond_bb
, inner_cond_bb
, then_bb
)
650 && bb_no_side_effects_p (inner_cond_bb
))
654 if (q) goto inner_cond_bb; else goto then_bb;
656 if (q) goto then_bb; else goto ...;
660 return ifcombine_ifandif (inner_cond_bb
, true, outer_cond_bb
, false,
668 /* Main entry for the tree if-conversion pass. */
671 tree_ssa_ifcombine (void)
674 bool cfg_changed
= false;
677 bbs
= single_pred_before_succ_order ();
678 calculate_dominance_info (CDI_DOMINATORS
);
680 /* Search every basic block for COND_EXPR we may be able to optimize.
682 We walk the blocks in order that guarantees that a block with
683 a single predecessor is processed after the predecessor.
684 This ensures that we collapse outter ifs before visiting the
685 inner ones, and also that we do not try to visit a removed
686 block. This is opposite of PHI-OPT, because we cascade the
687 combining rather than cascading PHIs. */
688 for (i
= n_basic_blocks_for_fn (cfun
) - NUM_FIXED_BLOCKS
- 1; i
>= 0; i
--)
690 basic_block bb
= bbs
[i
];
691 gimple stmt
= last_stmt (bb
);
694 && gimple_code (stmt
) == GIMPLE_COND
)
695 cfg_changed
|= tree_ssa_ifcombine_bb (bb
);
700 return cfg_changed
? TODO_cleanup_cfg
: 0;
704 gate_ifcombine (void)
711 const pass_data pass_data_tree_ifcombine
=
713 GIMPLE_PASS
, /* type */
714 "ifcombine", /* name */
715 OPTGROUP_NONE
, /* optinfo_flags */
717 true, /* has_execute */
718 TV_TREE_IFCOMBINE
, /* tv_id */
719 ( PROP_cfg
| PROP_ssa
), /* properties_required */
720 0, /* properties_provided */
721 0, /* properties_destroyed */
722 0, /* todo_flags_start */
723 ( TODO_update_ssa
| TODO_verify_ssa
), /* todo_flags_finish */
726 class pass_tree_ifcombine
: public gimple_opt_pass
729 pass_tree_ifcombine (gcc::context
*ctxt
)
730 : gimple_opt_pass (pass_data_tree_ifcombine
, ctxt
)
733 /* opt_pass methods: */
734 bool gate () { return gate_ifcombine (); }
735 unsigned int execute () { return tree_ssa_ifcombine (); }
737 }; // class pass_tree_ifcombine
742 make_pass_tree_ifcombine (gcc::context
*ctxt
)
744 return new pass_tree_ifcombine (ctxt
);