1 /* CFG cleanup for trees.
2 Copyright (C) 2001-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "basic-block.h"
27 #include "diagnostic-core.h"
31 #include "langhooks.h"
34 #include "gimple-iterator.h"
35 #include "gimple-ssa.h"
37 #include "tree-phinodes.h"
38 #include "ssa-iterators.h"
39 #include "tree-ssanames.h"
40 #include "tree-ssa-loop-manip.h"
43 #include "tree-pass.h"
47 #include "tree-ssa-propagate.h"
48 #include "tree-scalar-evolution.h"
50 /* The set of blocks in that at least one of the following changes happened:
51 -- the statement at the end of the block was changed
52 -- the block was newly created
53 -- the set of the predecessors of the block changed
54 -- the set of the successors of the block changed
55 ??? Maybe we could track these changes separately, since they determine
56 what cleanups it makes sense to try on the block. */
57 bitmap cfgcleanup_altered_bbs
;
59 /* Remove any fallthru edge from EV. Return true if an edge was removed. */
62 remove_fallthru_edge (vec
<edge
, va_gc
> *ev
)
67 FOR_EACH_EDGE (e
, ei
, ev
)
68 if ((e
->flags
& EDGE_FALLTHRU
) != 0)
70 if (e
->flags
& EDGE_COMPLEX
)
71 e
->flags
&= ~EDGE_FALLTHRU
;
73 remove_edge_and_dominated_blocks (e
);
80 /* Disconnect an unreachable block in the control expression starting
84 cleanup_control_expr_graph (basic_block bb
, gimple_stmt_iterator gsi
)
88 gimple stmt
= gsi_stmt (gsi
);
91 if (!single_succ_p (bb
))
98 fold_defer_overflow_warnings ();
99 loc
= gimple_location (stmt
);
100 switch (gimple_code (stmt
))
103 val
= fold_binary_loc (loc
, gimple_cond_code (stmt
),
105 gimple_cond_lhs (stmt
),
106 gimple_cond_rhs (stmt
));
110 val
= gimple_switch_index (stmt
);
116 taken_edge
= find_taken_edge (bb
, val
);
119 fold_undefer_and_ignore_overflow_warnings ();
123 /* Remove all the edges except the one that is always executed. */
125 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
131 fold_undefer_overflow_warnings
132 (true, stmt
, WARN_STRICT_OVERFLOW_CONDITIONAL
);
136 taken_edge
->probability
+= e
->probability
;
137 taken_edge
->count
+= e
->count
;
138 remove_edge_and_dominated_blocks (e
);
145 fold_undefer_and_ignore_overflow_warnings ();
146 if (taken_edge
->probability
> REG_BR_PROB_BASE
)
147 taken_edge
->probability
= REG_BR_PROB_BASE
;
150 taken_edge
= single_succ_edge (bb
);
152 bitmap_set_bit (cfgcleanup_altered_bbs
, bb
->index
);
153 gsi_remove (&gsi
, true);
154 taken_edge
->flags
= EDGE_FALLTHRU
;
159 /* Try to remove superfluous control structures in basic block BB. Returns
160 true if anything changes. */
163 cleanup_control_flow_bb (basic_block bb
)
165 gimple_stmt_iterator gsi
;
169 /* If the last statement of the block could throw and now cannot,
170 we need to prune cfg. */
171 retval
|= gimple_purge_dead_eh_edges (bb
);
173 gsi
= gsi_last_bb (bb
);
177 stmt
= gsi_stmt (gsi
);
179 if (gimple_code (stmt
) == GIMPLE_COND
180 || gimple_code (stmt
) == GIMPLE_SWITCH
)
181 retval
|= cleanup_control_expr_graph (bb
, gsi
);
182 else if (gimple_code (stmt
) == GIMPLE_GOTO
183 && TREE_CODE (gimple_goto_dest (stmt
)) == ADDR_EXPR
184 && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt
), 0))
187 /* If we had a computed goto which has a compile-time determinable
188 destination, then we can eliminate the goto. */
192 basic_block target_block
;
194 /* First look at all the outgoing edges. Delete any outgoing
195 edges which do not go to the right block. For the one
196 edge which goes to the right block, fix up its flags. */
197 label
= TREE_OPERAND (gimple_goto_dest (stmt
), 0);
198 target_block
= label_to_block (label
);
199 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
201 if (e
->dest
!= target_block
)
202 remove_edge_and_dominated_blocks (e
);
205 /* Turn off the EDGE_ABNORMAL flag. */
206 e
->flags
&= ~EDGE_ABNORMAL
;
208 /* And set EDGE_FALLTHRU. */
209 e
->flags
|= EDGE_FALLTHRU
;
214 bitmap_set_bit (cfgcleanup_altered_bbs
, bb
->index
);
215 bitmap_set_bit (cfgcleanup_altered_bbs
, target_block
->index
);
217 /* Remove the GOTO_EXPR as it is not needed. The CFG has all the
218 relevant information we need. */
219 gsi_remove (&gsi
, true);
223 /* Check for indirect calls that have been turned into
225 else if (is_gimple_call (stmt
)
226 && gimple_call_noreturn_p (stmt
)
227 && remove_fallthru_edge (bb
->succs
))
233 /* Return true if basic block BB does nothing except pass control
234 flow to another block and that we can safely insert a label at
235 the start of the successor block.
237 As a precondition, we require that BB be not equal to
241 tree_forwarder_block_p (basic_block bb
, bool phi_wanted
)
243 gimple_stmt_iterator gsi
;
246 /* BB must have a single outgoing edge. */
247 if (single_succ_p (bb
) != 1
248 /* If PHI_WANTED is false, BB must not have any PHI nodes.
249 Otherwise, BB must have PHI nodes. */
250 || gimple_seq_empty_p (phi_nodes (bb
)) == phi_wanted
251 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
252 || single_succ (bb
) == EXIT_BLOCK_PTR
253 /* Nor should this be an infinite loop. */
254 || single_succ (bb
) == bb
255 /* BB may not have an abnormal outgoing edge. */
256 || (single_succ_edge (bb
)->flags
& EDGE_ABNORMAL
))
259 gcc_checking_assert (bb
!= ENTRY_BLOCK_PTR
);
261 locus
= single_succ_edge (bb
)->goto_locus
;
263 /* There should not be an edge coming from entry, or an EH edge. */
268 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
269 if (e
->src
== ENTRY_BLOCK_PTR
|| (e
->flags
& EDGE_EH
))
271 /* If goto_locus of any of the edges differs, prevent removing
272 the forwarder block for -O0. */
273 else if (optimize
== 0 && e
->goto_locus
!= locus
)
277 /* Now walk through the statements backward. We can ignore labels,
278 anything else means this is not a forwarder block. */
279 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
281 gimple stmt
= gsi_stmt (gsi
);
283 switch (gimple_code (stmt
))
286 if (DECL_NONLOCAL (gimple_label_label (stmt
)))
288 if (optimize
== 0 && gimple_location (stmt
) != locus
)
292 /* ??? For now, hope there's a corresponding debug
293 assignment at the destination. */
305 /* Protect loop latches, headers and preheaders. */
306 if (bb
->loop_father
->header
== bb
)
308 dest
= EDGE_SUCC (bb
, 0)->dest
;
310 if (dest
->loop_father
->header
== dest
)
316 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
317 those alternatives are equal in each of the PHI nodes, then return
318 true, else return false. */
321 phi_alternatives_equal (basic_block dest
, edge e1
, edge e2
)
323 int n1
= e1
->dest_idx
;
324 int n2
= e2
->dest_idx
;
325 gimple_stmt_iterator gsi
;
327 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
329 gimple phi
= gsi_stmt (gsi
);
330 tree val1
= gimple_phi_arg_def (phi
, n1
);
331 tree val2
= gimple_phi_arg_def (phi
, n2
);
333 gcc_assert (val1
!= NULL_TREE
);
334 gcc_assert (val2
!= NULL_TREE
);
336 if (!operand_equal_for_phi_arg_p (val1
, val2
))
343 /* Removes forwarder block BB. Returns false if this failed. */
346 remove_forwarder_block (basic_block bb
)
348 edge succ
= single_succ_edge (bb
), e
, s
;
349 basic_block dest
= succ
->dest
;
352 gimple_stmt_iterator gsi
, gsi_to
;
353 bool can_move_debug_stmts
;
355 /* We check for infinite loops already in tree_forwarder_block_p.
356 However it may happen that the infinite loop is created
357 afterwards due to removal of forwarders. */
361 /* If the destination block consists of a nonlocal label or is a
362 EH landing pad, do not merge it. */
363 label
= first_stmt (dest
);
365 && gimple_code (label
) == GIMPLE_LABEL
366 && (DECL_NONLOCAL (gimple_label_label (label
))
367 || EH_LANDING_PAD_NR (gimple_label_label (label
)) != 0))
370 /* If there is an abnormal edge to basic block BB, but not into
371 dest, problems might occur during removal of the phi node at out
372 of ssa due to overlapping live ranges of registers.
374 If there is an abnormal edge in DEST, the problems would occur
375 anyway since cleanup_dead_labels would then merge the labels for
376 two different eh regions, and rest of exception handling code
379 So if there is an abnormal edge to BB, proceed only if there is
380 no abnormal edge to DEST and there are no phi nodes in DEST. */
381 if (bb_has_abnormal_pred (bb
)
382 && (bb_has_abnormal_pred (dest
)
383 || !gimple_seq_empty_p (phi_nodes (dest
))))
386 /* If there are phi nodes in DEST, and some of the blocks that are
387 predecessors of BB are also predecessors of DEST, check that the
388 phi node arguments match. */
389 if (!gimple_seq_empty_p (phi_nodes (dest
)))
391 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
393 s
= find_edge (e
->src
, dest
);
397 if (!phi_alternatives_equal (dest
, succ
, s
))
402 can_move_debug_stmts
= MAY_HAVE_DEBUG_STMTS
&& single_pred_p (dest
);
404 /* Redirect the edges. */
405 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
407 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
409 if (e
->flags
& EDGE_ABNORMAL
)
411 /* If there is an abnormal edge, redirect it anyway, and
412 move the labels to the new block to make it legal. */
413 s
= redirect_edge_succ_nodup (e
, dest
);
416 s
= redirect_edge_and_branch (e
, dest
);
420 /* Create arguments for the phi nodes, since the edge was not
422 for (gsi
= gsi_start_phis (dest
);
426 gimple phi
= gsi_stmt (gsi
);
427 source_location l
= gimple_phi_arg_location_from_edge (phi
, succ
);
428 tree def
= gimple_phi_arg_def (phi
, succ
->dest_idx
);
429 add_phi_arg (phi
, unshare_expr (def
), s
, l
);
434 /* Move nonlocal labels and computed goto targets as well as user
435 defined labels and labels with an EH landing pad number to the
436 new block, so that the redirection of the abnormal edges works,
437 jump targets end up in a sane place and debug information for
438 labels is retained. */
439 gsi_to
= gsi_start_bb (dest
);
440 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
443 label
= gsi_stmt (gsi
);
444 if (is_gimple_debug (label
))
446 decl
= gimple_label_label (label
);
447 if (EH_LANDING_PAD_NR (decl
) != 0
448 || DECL_NONLOCAL (decl
)
449 || FORCED_LABEL (decl
)
450 || !DECL_ARTIFICIAL (decl
))
452 gsi_remove (&gsi
, false);
453 gsi_insert_before (&gsi_to
, label
, GSI_SAME_STMT
);
459 /* Move debug statements if the destination has a single predecessor. */
460 if (can_move_debug_stmts
)
462 gsi_to
= gsi_after_labels (dest
);
463 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); )
465 gimple debug
= gsi_stmt (gsi
);
466 if (!is_gimple_debug (debug
))
468 gsi_remove (&gsi
, false);
469 gsi_insert_before (&gsi_to
, debug
, GSI_SAME_STMT
);
473 bitmap_set_bit (cfgcleanup_altered_bbs
, dest
->index
);
475 /* Update the dominators. */
476 if (dom_info_available_p (CDI_DOMINATORS
))
478 basic_block dom
, dombb
, domdest
;
480 dombb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
481 domdest
= get_immediate_dominator (CDI_DOMINATORS
, dest
);
484 /* Shortcut to avoid calling (relatively expensive)
485 nearest_common_dominator unless necessary. */
489 dom
= nearest_common_dominator (CDI_DOMINATORS
, domdest
, dombb
);
491 set_immediate_dominator (CDI_DOMINATORS
, dest
, dom
);
494 /* And kill the forwarder block. */
495 delete_basic_block (bb
);
500 /* STMT is a call that has been discovered noreturn. Fixup the CFG
501 and remove LHS. Return true if something changed. */
504 fixup_noreturn_call (gimple stmt
)
506 basic_block bb
= gimple_bb (stmt
);
507 bool changed
= false;
509 if (gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
512 /* First split basic block if stmt is not last. */
513 if (stmt
!= gsi_stmt (gsi_last_bb (bb
)))
514 split_block (bb
, stmt
);
516 changed
|= remove_fallthru_edge (bb
->succs
);
518 /* If there is LHS, remove it. */
519 if (gimple_call_lhs (stmt
))
521 tree op
= gimple_call_lhs (stmt
);
522 gimple_call_set_lhs (stmt
, NULL_TREE
);
524 /* We need to remove SSA name to avoid checking errors.
525 All uses are dominated by the noreturn and thus will
526 be removed afterwards.
527 We proactively remove affected non-PHI statements to avoid
528 fixup_cfg from trying to update them and crashing. */
529 if (TREE_CODE (op
) == SSA_NAME
)
532 imm_use_iterator iter
;
535 unsigned int bb_index
;
537 bitmap blocks
= BITMAP_ALLOC (NULL
);
539 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, op
)
541 if (gimple_code (use_stmt
) != GIMPLE_PHI
)
542 bitmap_set_bit (blocks
, gimple_bb (use_stmt
)->index
);
544 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
545 SET_USE (use_p
, error_mark_node
);
547 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, bb_index
, bi
)
548 delete_basic_block (BASIC_BLOCK (bb_index
));
549 BITMAP_FREE (blocks
);
550 release_ssa_name (op
);
555 /* Similarly remove VDEF if there is any. */
556 else if (gimple_vdef (stmt
))
562 /* Split basic blocks on calls in the middle of a basic block that are now
563 known not to return, and remove the unreachable code. */
566 split_bbs_on_noreturn_calls (void)
568 bool changed
= false;
572 /* Detect cases where a mid-block call is now known not to return. */
574 while (vec_safe_length (MODIFIED_NORETURN_CALLS (cfun
)))
576 stmt
= MODIFIED_NORETURN_CALLS (cfun
)->pop ();
577 bb
= gimple_bb (stmt
);
578 /* BB might be deleted at this point, so verify first
579 BB is present in the cfg. */
581 || bb
->index
< NUM_FIXED_BLOCKS
582 || bb
->index
>= last_basic_block
583 || BASIC_BLOCK (bb
->index
) != bb
584 || !gimple_call_noreturn_p (stmt
))
587 changed
|= fixup_noreturn_call (stmt
);
593 /* Tries to cleanup cfg in basic block BB. Returns true if anything
597 cleanup_tree_cfg_bb (basic_block bb
)
599 bool retval
= cleanup_control_flow_bb (bb
);
601 if (tree_forwarder_block_p (bb
, false)
602 && remove_forwarder_block (bb
))
605 /* Merging the blocks may create new opportunities for folding
606 conditional branches (due to the elimination of single-valued PHI
608 if (single_succ_p (bb
)
609 && can_merge_blocks_p (bb
, single_succ (bb
)))
611 merge_blocks (bb
, single_succ (bb
));
618 /* Iterate the cfg cleanups, while anything changes. */
621 cleanup_tree_cfg_1 (void)
627 retval
|= split_bbs_on_noreturn_calls ();
629 /* Prepare the worklists of altered blocks. */
630 cfgcleanup_altered_bbs
= BITMAP_ALLOC (NULL
);
632 /* During forwarder block cleanup, we may redirect edges out of
633 SWITCH_EXPRs, which can get expensive. So we want to enable
634 recording of edge to CASE_LABEL_EXPR. */
635 start_recording_case_labels ();
637 /* Start by iterating over all basic blocks. We cannot use FOR_EACH_BB,
638 since the basic blocks may get removed. */
639 n
= last_basic_block
;
640 for (i
= NUM_FIXED_BLOCKS
; i
< n
; i
++)
642 bb
= BASIC_BLOCK (i
);
644 retval
|= cleanup_tree_cfg_bb (bb
);
647 /* Now process the altered blocks, as long as any are available. */
648 while (!bitmap_empty_p (cfgcleanup_altered_bbs
))
650 i
= bitmap_first_set_bit (cfgcleanup_altered_bbs
);
651 bitmap_clear_bit (cfgcleanup_altered_bbs
, i
);
652 if (i
< NUM_FIXED_BLOCKS
)
655 bb
= BASIC_BLOCK (i
);
659 retval
|= cleanup_tree_cfg_bb (bb
);
661 /* Rerun split_bbs_on_noreturn_calls, in case we have altered any noreturn
663 retval
|= split_bbs_on_noreturn_calls ();
666 end_recording_case_labels ();
667 BITMAP_FREE (cfgcleanup_altered_bbs
);
672 /* Remove unreachable blocks and other miscellaneous clean up work.
673 Return true if the flowgraph was modified, false otherwise. */
676 cleanup_tree_cfg_noloop (void)
680 timevar_push (TV_TREE_CLEANUP_CFG
);
682 /* Iterate until there are no more cleanups left to do. If any
683 iteration changed the flowgraph, set CHANGED to true.
685 If dominance information is available, there cannot be any unreachable
687 if (!dom_info_available_p (CDI_DOMINATORS
))
689 changed
= delete_unreachable_blocks ();
690 calculate_dominance_info (CDI_DOMINATORS
);
694 #ifdef ENABLE_CHECKING
695 verify_dominators (CDI_DOMINATORS
);
700 changed
|= cleanup_tree_cfg_1 ();
702 gcc_assert (dom_info_available_p (CDI_DOMINATORS
));
705 #ifdef ENABLE_CHECKING
709 timevar_pop (TV_TREE_CLEANUP_CFG
);
711 if (changed
&& current_loops
)
712 loops_state_set (LOOPS_NEED_FIXUP
);
717 /* Repairs loop structures. */
720 repair_loop_structures (void)
723 unsigned n_new_loops
;
725 calculate_dominance_info (CDI_DOMINATORS
);
727 timevar_push (TV_REPAIR_LOOPS
);
728 changed_bbs
= BITMAP_ALLOC (NULL
);
729 n_new_loops
= fix_loop_structure (changed_bbs
);
731 /* This usually does nothing. But sometimes parts of cfg that originally
732 were inside a loop get out of it due to edge removal (since they
733 become unreachable by back edges from latch). Also a former
734 irreducible loop can become reducible - in this case force a full
735 rewrite into loop-closed SSA form. */
736 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
))
737 rewrite_into_loop_closed_ssa (n_new_loops
? NULL
: changed_bbs
,
740 BITMAP_FREE (changed_bbs
);
742 #ifdef ENABLE_CHECKING
743 verify_loop_structure ();
747 timevar_pop (TV_REPAIR_LOOPS
);
750 /* Cleanup cfg and repair loop structures. */
753 cleanup_tree_cfg (void)
755 bool changed
= cleanup_tree_cfg_noloop ();
757 if (current_loops
!= NULL
758 && loops_state_satisfies_p (LOOPS_NEED_FIXUP
))
759 repair_loop_structures ();
764 /* Tries to merge the PHI nodes at BB into those at BB's sole successor.
765 Returns true if successful. */
768 remove_forwarder_block_with_phi (basic_block bb
)
770 edge succ
= single_succ_edge (bb
);
771 basic_block dest
= succ
->dest
;
773 basic_block dombb
, domdest
, dom
;
775 /* We check for infinite loops already in tree_forwarder_block_p.
776 However it may happen that the infinite loop is created
777 afterwards due to removal of forwarders. */
781 /* If the destination block consists of a nonlocal label, do not
783 label
= first_stmt (dest
);
785 && gimple_code (label
) == GIMPLE_LABEL
786 && DECL_NONLOCAL (gimple_label_label (label
)))
789 /* Redirect each incoming edge to BB to DEST. */
790 while (EDGE_COUNT (bb
->preds
) > 0)
792 edge e
= EDGE_PRED (bb
, 0), s
;
793 gimple_stmt_iterator gsi
;
795 s
= find_edge (e
->src
, dest
);
798 /* We already have an edge S from E->src to DEST. If S and
799 E->dest's sole successor edge have the same PHI arguments
800 at DEST, redirect S to DEST. */
801 if (phi_alternatives_equal (dest
, s
, succ
))
803 e
= redirect_edge_and_branch (e
, dest
);
804 redirect_edge_var_map_clear (e
);
808 /* PHI arguments are different. Create a forwarder block by
809 splitting E so that we can merge PHI arguments on E to
811 e
= single_succ_edge (split_edge (e
));
814 s
= redirect_edge_and_branch (e
, dest
);
816 /* redirect_edge_and_branch must not create a new edge. */
819 /* Add to the PHI nodes at DEST each PHI argument removed at the
821 for (gsi
= gsi_start_phis (dest
);
825 gimple phi
= gsi_stmt (gsi
);
826 tree def
= gimple_phi_arg_def (phi
, succ
->dest_idx
);
827 source_location locus
= gimple_phi_arg_location_from_edge (phi
, succ
);
829 if (TREE_CODE (def
) == SSA_NAME
)
831 edge_var_map_vector
*head
;
835 /* If DEF is one of the results of PHI nodes removed during
836 redirection, replace it with the PHI argument that used
838 head
= redirect_edge_var_map_vector (e
);
839 FOR_EACH_VEC_SAFE_ELT (head
, i
, vm
)
841 tree old_arg
= redirect_edge_var_map_result (vm
);
842 tree new_arg
= redirect_edge_var_map_def (vm
);
847 locus
= redirect_edge_var_map_location (vm
);
853 add_phi_arg (phi
, def
, s
, locus
);
856 redirect_edge_var_map_clear (e
);
859 /* Update the dominators. */
860 dombb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
861 domdest
= get_immediate_dominator (CDI_DOMINATORS
, dest
);
864 /* Shortcut to avoid calling (relatively expensive)
865 nearest_common_dominator unless necessary. */
869 dom
= nearest_common_dominator (CDI_DOMINATORS
, domdest
, dombb
);
871 set_immediate_dominator (CDI_DOMINATORS
, dest
, dom
);
873 /* Remove BB since all of BB's incoming edges have been redirected
875 delete_basic_block (bb
);
880 /* This pass merges PHI nodes if one feeds into another. For example,
881 suppose we have the following:
888 # tem_6 = PHI <tem_17(8), tem_23(7)>;
891 # tem_3 = PHI <tem_6(9), tem_2(5)>;
894 Then we merge the first PHI node into the second one like so:
901 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
906 merge_phi_nodes (void)
908 basic_block
*worklist
= XNEWVEC (basic_block
, n_basic_blocks
);
909 basic_block
*current
= worklist
;
912 calculate_dominance_info (CDI_DOMINATORS
);
914 /* Find all PHI nodes that we may be able to merge. */
919 /* Look for a forwarder block with PHI nodes. */
920 if (!tree_forwarder_block_p (bb
, true))
923 dest
= single_succ (bb
);
925 /* We have to feed into another basic block with PHI
927 if (gimple_seq_empty_p (phi_nodes (dest
))
928 /* We don't want to deal with a basic block with
930 || bb_has_abnormal_pred (bb
))
933 if (!dominated_by_p (CDI_DOMINATORS
, dest
, bb
))
935 /* If BB does not dominate DEST, then the PHI nodes at
936 DEST must be the only users of the results of the PHI
942 gimple_stmt_iterator gsi
;
943 unsigned int dest_idx
= single_succ_edge (bb
)->dest_idx
;
945 /* BB dominates DEST. There may be many users of the PHI
946 nodes in BB. However, there is still a trivial case we
947 can handle. If the result of every PHI in BB is used
948 only by a PHI in DEST, then we can trivially merge the
949 PHI nodes from BB into DEST. */
950 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
953 gimple phi
= gsi_stmt (gsi
);
954 tree result
= gimple_phi_result (phi
);
955 use_operand_p imm_use
;
958 /* If the PHI's result is never used, then we can just
960 if (has_zero_uses (result
))
963 /* Get the single use of the result of this PHI node. */
964 if (!single_imm_use (result
, &imm_use
, &use_stmt
)
965 || gimple_code (use_stmt
) != GIMPLE_PHI
966 || gimple_bb (use_stmt
) != dest
967 || gimple_phi_arg_def (use_stmt
, dest_idx
) != result
)
971 /* If the loop above iterated through all the PHI nodes
972 in BB, then we can merge the PHIs from BB into DEST. */
978 /* Now let's drain WORKLIST. */
979 bool changed
= false;
980 while (current
!= worklist
)
983 changed
|= remove_forwarder_block_with_phi (bb
);
987 /* Removing forwarder blocks can cause formerly irreducible loops
988 to become reducible if we merged two entry blocks. */
991 loops_state_set (LOOPS_NEED_FIXUP
);
997 gate_merge_phi (void)
1004 const pass_data pass_data_merge_phi
=
1006 GIMPLE_PASS
, /* type */
1007 "mergephi", /* name */
1008 OPTGROUP_NONE
, /* optinfo_flags */
1009 true, /* has_gate */
1010 true, /* has_execute */
1011 TV_TREE_MERGE_PHI
, /* tv_id */
1012 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1013 0, /* properties_provided */
1014 0, /* properties_destroyed */
1015 0, /* todo_flags_start */
1016 TODO_verify_ssa
, /* todo_flags_finish */
1019 class pass_merge_phi
: public gimple_opt_pass
1022 pass_merge_phi (gcc::context
*ctxt
)
1023 : gimple_opt_pass (pass_data_merge_phi
, ctxt
)
1026 /* opt_pass methods: */
1027 opt_pass
* clone () { return new pass_merge_phi (m_ctxt
); }
1028 bool gate () { return gate_merge_phi (); }
1029 unsigned int execute () { return merge_phi_nodes (); }
1031 }; // class pass_merge_phi
1036 make_pass_merge_phi (gcc::context
*ctxt
)
1038 return new pass_merge_phi (ctxt
);
1041 /* Pass: cleanup the CFG just before expanding trees to RTL.
1042 This is just a round of label cleanups and case node grouping
1043 because after the tree optimizers have run such cleanups may
1047 execute_cleanup_cfg_post_optimizing (void)
1049 unsigned int todo
= 0;
1050 if (cleanup_tree_cfg ())
1051 todo
|= TODO_update_ssa
;
1052 maybe_remove_unreachable_handlers ();
1053 cleanup_dead_labels ();
1054 group_case_labels ();
1055 if ((flag_compare_debug_opt
|| flag_compare_debug
)
1056 && flag_dump_final_insns
)
1058 FILE *final_output
= fopen (flag_dump_final_insns
, "a");
1062 error ("could not open final insn dump file %qs: %m",
1063 flag_dump_final_insns
);
1064 flag_dump_final_insns
= NULL
;
1068 int save_unnumbered
= flag_dump_unnumbered
;
1069 int save_noaddr
= flag_dump_noaddr
;
1071 flag_dump_noaddr
= flag_dump_unnumbered
= 1;
1072 fprintf (final_output
, "\n");
1073 dump_enumerated_decls (final_output
, dump_flags
| TDF_NOUID
);
1074 flag_dump_noaddr
= save_noaddr
;
1075 flag_dump_unnumbered
= save_unnumbered
;
1076 if (fclose (final_output
))
1078 error ("could not close final insn dump file %qs: %m",
1079 flag_dump_final_insns
);
1080 flag_dump_final_insns
= NULL
;
1089 const pass_data pass_data_cleanup_cfg_post_optimizing
=
1091 GIMPLE_PASS
, /* type */
1092 "optimized", /* name */
1093 OPTGROUP_NONE
, /* optinfo_flags */
1094 false, /* has_gate */
1095 true, /* has_execute */
1096 TV_TREE_CLEANUP_CFG
, /* tv_id */
1097 PROP_cfg
, /* properties_required */
1098 0, /* properties_provided */
1099 0, /* properties_destroyed */
1100 0, /* todo_flags_start */
1101 TODO_remove_unused_locals
, /* todo_flags_finish */
1104 class pass_cleanup_cfg_post_optimizing
: public gimple_opt_pass
1107 pass_cleanup_cfg_post_optimizing (gcc::context
*ctxt
)
1108 : gimple_opt_pass (pass_data_cleanup_cfg_post_optimizing
, ctxt
)
1111 /* opt_pass methods: */
1112 unsigned int execute () {
1113 return execute_cleanup_cfg_post_optimizing ();
1116 }; // class pass_cleanup_cfg_post_optimizing
1121 make_pass_cleanup_cfg_post_optimizing (gcc::context
*ctxt
)
1123 return new pass_cleanup_cfg_post_optimizing (ctxt
);