1 /* CFG cleanup for trees.
2 Copyright (C) 2001-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "basic-block.h"
27 #include "diagnostic-core.h"
30 #include "langhooks.h"
31 #include "tree-ssa-alias.h"
32 #include "internal-fn.h"
34 #include "gimple-expr.h"
38 #include "gimple-iterator.h"
39 #include "gimple-ssa.h"
41 #include "tree-phinodes.h"
42 #include "ssa-iterators.h"
43 #include "stringpool.h"
44 #include "tree-ssanames.h"
45 #include "tree-ssa-loop-manip.h"
49 #include "tree-pass.h"
53 #include "tree-ssa-propagate.h"
54 #include "tree-scalar-evolution.h"
56 /* The set of blocks in that at least one of the following changes happened:
57 -- the statement at the end of the block was changed
58 -- the block was newly created
59 -- the set of the predecessors of the block changed
60 -- the set of the successors of the block changed
61 ??? Maybe we could track these changes separately, since they determine
62 what cleanups it makes sense to try on the block. */
63 bitmap cfgcleanup_altered_bbs
;
65 /* Remove any fallthru edge from EV. Return true if an edge was removed. */
68 remove_fallthru_edge (vec
<edge
, va_gc
> *ev
)
73 FOR_EACH_EDGE (e
, ei
, ev
)
74 if ((e
->flags
& EDGE_FALLTHRU
) != 0)
76 if (e
->flags
& EDGE_COMPLEX
)
77 e
->flags
&= ~EDGE_FALLTHRU
;
79 remove_edge_and_dominated_blocks (e
);
86 /* Disconnect an unreachable block in the control expression starting
90 cleanup_control_expr_graph (basic_block bb
, gimple_stmt_iterator gsi
)
94 gimple stmt
= gsi_stmt (gsi
);
97 if (!single_succ_p (bb
))
104 fold_defer_overflow_warnings ();
105 loc
= gimple_location (stmt
);
106 switch (gimple_code (stmt
))
109 val
= fold_binary_loc (loc
, gimple_cond_code (stmt
),
111 gimple_cond_lhs (stmt
),
112 gimple_cond_rhs (stmt
));
116 val
= gimple_switch_index (stmt
);
122 taken_edge
= find_taken_edge (bb
, val
);
125 fold_undefer_and_ignore_overflow_warnings ();
129 /* Remove all the edges except the one that is always executed. */
131 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
137 fold_undefer_overflow_warnings
138 (true, stmt
, WARN_STRICT_OVERFLOW_CONDITIONAL
);
142 taken_edge
->probability
+= e
->probability
;
143 taken_edge
->count
+= e
->count
;
144 remove_edge_and_dominated_blocks (e
);
151 fold_undefer_and_ignore_overflow_warnings ();
152 if (taken_edge
->probability
> REG_BR_PROB_BASE
)
153 taken_edge
->probability
= REG_BR_PROB_BASE
;
156 taken_edge
= single_succ_edge (bb
);
158 bitmap_set_bit (cfgcleanup_altered_bbs
, bb
->index
);
159 gsi_remove (&gsi
, true);
160 taken_edge
->flags
= EDGE_FALLTHRU
;
165 /* Try to remove superfluous control structures in basic block BB. Returns
166 true if anything changes. */
169 cleanup_control_flow_bb (basic_block bb
)
171 gimple_stmt_iterator gsi
;
175 /* If the last statement of the block could throw and now cannot,
176 we need to prune cfg. */
177 retval
|= gimple_purge_dead_eh_edges (bb
);
179 gsi
= gsi_last_bb (bb
);
183 stmt
= gsi_stmt (gsi
);
185 if (gimple_code (stmt
) == GIMPLE_COND
186 || gimple_code (stmt
) == GIMPLE_SWITCH
)
187 retval
|= cleanup_control_expr_graph (bb
, gsi
);
188 else if (gimple_code (stmt
) == GIMPLE_GOTO
189 && TREE_CODE (gimple_goto_dest (stmt
)) == ADDR_EXPR
190 && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt
), 0))
193 /* If we had a computed goto which has a compile-time determinable
194 destination, then we can eliminate the goto. */
198 basic_block target_block
;
200 /* First look at all the outgoing edges. Delete any outgoing
201 edges which do not go to the right block. For the one
202 edge which goes to the right block, fix up its flags. */
203 label
= TREE_OPERAND (gimple_goto_dest (stmt
), 0);
204 target_block
= label_to_block (label
);
205 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
207 if (e
->dest
!= target_block
)
208 remove_edge_and_dominated_blocks (e
);
211 /* Turn off the EDGE_ABNORMAL flag. */
212 e
->flags
&= ~EDGE_ABNORMAL
;
214 /* And set EDGE_FALLTHRU. */
215 e
->flags
|= EDGE_FALLTHRU
;
220 bitmap_set_bit (cfgcleanup_altered_bbs
, bb
->index
);
221 bitmap_set_bit (cfgcleanup_altered_bbs
, target_block
->index
);
223 /* Remove the GOTO_EXPR as it is not needed. The CFG has all the
224 relevant information we need. */
225 gsi_remove (&gsi
, true);
229 /* Check for indirect calls that have been turned into
231 else if (is_gimple_call (stmt
)
232 && gimple_call_noreturn_p (stmt
)
233 && remove_fallthru_edge (bb
->succs
))
239 /* Return true if basic block BB does nothing except pass control
240 flow to another block and that we can safely insert a label at
241 the start of the successor block.
243 As a precondition, we require that BB be not equal to
247 tree_forwarder_block_p (basic_block bb
, bool phi_wanted
)
249 gimple_stmt_iterator gsi
;
252 /* BB must have a single outgoing edge. */
253 if (single_succ_p (bb
) != 1
254 /* If PHI_WANTED is false, BB must not have any PHI nodes.
255 Otherwise, BB must have PHI nodes. */
256 || gimple_seq_empty_p (phi_nodes (bb
)) == phi_wanted
257 /* BB may not be a predecessor of the exit block. */
258 || single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
)
259 /* Nor should this be an infinite loop. */
260 || single_succ (bb
) == bb
261 /* BB may not have an abnormal outgoing edge. */
262 || (single_succ_edge (bb
)->flags
& EDGE_ABNORMAL
))
265 gcc_checking_assert (bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
));
267 locus
= single_succ_edge (bb
)->goto_locus
;
269 /* There should not be an edge coming from entry, or an EH edge. */
274 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
275 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (cfun
) || (e
->flags
& EDGE_EH
))
277 /* If goto_locus of any of the edges differs, prevent removing
278 the forwarder block for -O0. */
279 else if (optimize
== 0 && e
->goto_locus
!= locus
)
283 /* Now walk through the statements backward. We can ignore labels,
284 anything else means this is not a forwarder block. */
285 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
287 gimple stmt
= gsi_stmt (gsi
);
289 switch (gimple_code (stmt
))
292 if (DECL_NONLOCAL (gimple_label_label (stmt
)))
294 if (optimize
== 0 && gimple_location (stmt
) != locus
)
298 /* ??? For now, hope there's a corresponding debug
299 assignment at the destination. */
311 /* Protect loop headers. */
312 if (bb
->loop_father
->header
== bb
)
315 dest
= EDGE_SUCC (bb
, 0)->dest
;
316 /* Protect loop preheaders and latches if requested. */
317 if (dest
->loop_father
->header
== dest
)
319 if (bb
->loop_father
== dest
->loop_father
)
321 if (loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
))
323 /* If bb doesn't have a single predecessor we'd make this
324 loop have multiple latches. Don't do that if that
325 would in turn require disambiguating them. */
326 return (single_pred_p (bb
)
327 || loops_state_satisfies_p
328 (LOOPS_MAY_HAVE_MULTIPLE_LATCHES
));
330 else if (bb
->loop_father
== loop_outer (dest
->loop_father
))
331 return !loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS
);
332 /* Always preserve other edges into loop headers that are
333 not simple latches or preheaders. */
341 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
342 those alternatives are equal in each of the PHI nodes, then return
343 true, else return false. */
346 phi_alternatives_equal (basic_block dest
, edge e1
, edge e2
)
348 int n1
= e1
->dest_idx
;
349 int n2
= e2
->dest_idx
;
350 gimple_stmt_iterator gsi
;
352 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
354 gimple phi
= gsi_stmt (gsi
);
355 tree val1
= gimple_phi_arg_def (phi
, n1
);
356 tree val2
= gimple_phi_arg_def (phi
, n2
);
358 gcc_assert (val1
!= NULL_TREE
);
359 gcc_assert (val2
!= NULL_TREE
);
361 if (!operand_equal_for_phi_arg_p (val1
, val2
))
368 /* Removes forwarder block BB. Returns false if this failed. */
371 remove_forwarder_block (basic_block bb
)
373 edge succ
= single_succ_edge (bb
), e
, s
;
374 basic_block dest
= succ
->dest
;
377 gimple_stmt_iterator gsi
, gsi_to
;
378 bool can_move_debug_stmts
;
380 /* We check for infinite loops already in tree_forwarder_block_p.
381 However it may happen that the infinite loop is created
382 afterwards due to removal of forwarders. */
386 /* If the destination block consists of a nonlocal label or is a
387 EH landing pad, do not merge it. */
388 label
= first_stmt (dest
);
390 && gimple_code (label
) == GIMPLE_LABEL
391 && (DECL_NONLOCAL (gimple_label_label (label
))
392 || EH_LANDING_PAD_NR (gimple_label_label (label
)) != 0))
395 /* If there is an abnormal edge to basic block BB, but not into
396 dest, problems might occur during removal of the phi node at out
397 of ssa due to overlapping live ranges of registers.
399 If there is an abnormal edge in DEST, the problems would occur
400 anyway since cleanup_dead_labels would then merge the labels for
401 two different eh regions, and rest of exception handling code
404 So if there is an abnormal edge to BB, proceed only if there is
405 no abnormal edge to DEST and there are no phi nodes in DEST. */
406 if (bb_has_abnormal_pred (bb
)
407 && (bb_has_abnormal_pred (dest
)
408 || !gimple_seq_empty_p (phi_nodes (dest
))))
411 /* If there are phi nodes in DEST, and some of the blocks that are
412 predecessors of BB are also predecessors of DEST, check that the
413 phi node arguments match. */
414 if (!gimple_seq_empty_p (phi_nodes (dest
)))
416 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
418 s
= find_edge (e
->src
, dest
);
422 if (!phi_alternatives_equal (dest
, succ
, s
))
427 can_move_debug_stmts
= MAY_HAVE_DEBUG_STMTS
&& single_pred_p (dest
);
429 basic_block pred
= NULL
;
430 if (single_pred_p (bb
))
431 pred
= single_pred (bb
);
433 /* Redirect the edges. */
434 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
436 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
438 if (e
->flags
& EDGE_ABNORMAL
)
440 /* If there is an abnormal edge, redirect it anyway, and
441 move the labels to the new block to make it legal. */
442 s
= redirect_edge_succ_nodup (e
, dest
);
445 s
= redirect_edge_and_branch (e
, dest
);
449 /* Create arguments for the phi nodes, since the edge was not
451 for (gsi
= gsi_start_phis (dest
);
455 gimple phi
= gsi_stmt (gsi
);
456 source_location l
= gimple_phi_arg_location_from_edge (phi
, succ
);
457 tree def
= gimple_phi_arg_def (phi
, succ
->dest_idx
);
458 add_phi_arg (phi
, unshare_expr (def
), s
, l
);
463 /* Move nonlocal labels and computed goto targets as well as user
464 defined labels and labels with an EH landing pad number to the
465 new block, so that the redirection of the abnormal edges works,
466 jump targets end up in a sane place and debug information for
467 labels is retained. */
468 gsi_to
= gsi_start_bb (dest
);
469 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
472 label
= gsi_stmt (gsi
);
473 if (is_gimple_debug (label
))
475 decl
= gimple_label_label (label
);
476 if (EH_LANDING_PAD_NR (decl
) != 0
477 || DECL_NONLOCAL (decl
)
478 || FORCED_LABEL (decl
)
479 || !DECL_ARTIFICIAL (decl
))
481 gsi_remove (&gsi
, false);
482 gsi_insert_before (&gsi_to
, label
, GSI_SAME_STMT
);
488 /* Move debug statements if the destination has a single predecessor. */
489 if (can_move_debug_stmts
)
491 gsi_to
= gsi_after_labels (dest
);
492 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); )
494 gimple debug
= gsi_stmt (gsi
);
495 if (!is_gimple_debug (debug
))
497 gsi_remove (&gsi
, false);
498 gsi_insert_before (&gsi_to
, debug
, GSI_SAME_STMT
);
502 bitmap_set_bit (cfgcleanup_altered_bbs
, dest
->index
);
504 /* Update the dominators. */
505 if (dom_info_available_p (CDI_DOMINATORS
))
507 basic_block dom
, dombb
, domdest
;
509 dombb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
510 domdest
= get_immediate_dominator (CDI_DOMINATORS
, dest
);
513 /* Shortcut to avoid calling (relatively expensive)
514 nearest_common_dominator unless necessary. */
518 dom
= nearest_common_dominator (CDI_DOMINATORS
, domdest
, dombb
);
520 set_immediate_dominator (CDI_DOMINATORS
, dest
, dom
);
523 /* Adjust latch infomation of BB's parent loop as otherwise
524 the cfg hook has a hard time not to kill the loop. */
525 if (current_loops
&& bb
->loop_father
->latch
== bb
)
526 bb
->loop_father
->latch
= pred
;
528 /* And kill the forwarder block. */
529 delete_basic_block (bb
);
534 /* STMT is a call that has been discovered noreturn. Fixup the CFG
535 and remove LHS. Return true if something changed. */
538 fixup_noreturn_call (gimple stmt
)
540 basic_block bb
= gimple_bb (stmt
);
541 bool changed
= false;
543 if (gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
546 /* First split basic block if stmt is not last. */
547 if (stmt
!= gsi_stmt (gsi_last_bb (bb
)))
548 split_block (bb
, stmt
);
550 changed
|= remove_fallthru_edge (bb
->succs
);
552 /* If there is LHS, remove it. */
553 if (gimple_call_lhs (stmt
))
555 tree op
= gimple_call_lhs (stmt
);
556 gimple_call_set_lhs (stmt
, NULL_TREE
);
558 /* We need to remove SSA name to avoid checking errors.
559 All uses are dominated by the noreturn and thus will
560 be removed afterwards.
561 We proactively remove affected non-PHI statements to avoid
562 fixup_cfg from trying to update them and crashing. */
563 if (TREE_CODE (op
) == SSA_NAME
)
566 imm_use_iterator iter
;
569 unsigned int bb_index
;
571 bitmap blocks
= BITMAP_ALLOC (NULL
);
573 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, op
)
575 if (gimple_code (use_stmt
) != GIMPLE_PHI
)
576 bitmap_set_bit (blocks
, gimple_bb (use_stmt
)->index
);
578 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
579 SET_USE (use_p
, error_mark_node
);
581 EXECUTE_IF_SET_IN_BITMAP (blocks
, 0, bb_index
, bi
)
582 delete_basic_block (BASIC_BLOCK_FOR_FN (cfun
, bb_index
));
583 BITMAP_FREE (blocks
);
584 release_ssa_name (op
);
589 /* Similarly remove VDEF if there is any. */
590 else if (gimple_vdef (stmt
))
596 /* Split basic blocks on calls in the middle of a basic block that are now
597 known not to return, and remove the unreachable code. */
600 split_bbs_on_noreturn_calls (void)
602 bool changed
= false;
606 /* Detect cases where a mid-block call is now known not to return. */
608 while (vec_safe_length (MODIFIED_NORETURN_CALLS (cfun
)))
610 stmt
= MODIFIED_NORETURN_CALLS (cfun
)->pop ();
611 bb
= gimple_bb (stmt
);
612 /* BB might be deleted at this point, so verify first
613 BB is present in the cfg. */
615 || bb
->index
< NUM_FIXED_BLOCKS
616 || bb
->index
>= last_basic_block_for_fn (cfun
)
617 || BASIC_BLOCK_FOR_FN (cfun
, bb
->index
) != bb
618 || !gimple_call_noreturn_p (stmt
))
621 changed
|= fixup_noreturn_call (stmt
);
627 /* Tries to cleanup cfg in basic block BB. Returns true if anything
631 cleanup_tree_cfg_bb (basic_block bb
)
633 bool retval
= cleanup_control_flow_bb (bb
);
635 if (tree_forwarder_block_p (bb
, false)
636 && remove_forwarder_block (bb
))
639 /* Merging the blocks may create new opportunities for folding
640 conditional branches (due to the elimination of single-valued PHI
642 if (single_succ_p (bb
)
643 && can_merge_blocks_p (bb
, single_succ (bb
)))
645 merge_blocks (bb
, single_succ (bb
));
652 /* Iterate the cfg cleanups, while anything changes. */
655 cleanup_tree_cfg_1 (void)
661 retval
|= split_bbs_on_noreturn_calls ();
663 /* Prepare the worklists of altered blocks. */
664 cfgcleanup_altered_bbs
= BITMAP_ALLOC (NULL
);
666 /* During forwarder block cleanup, we may redirect edges out of
667 SWITCH_EXPRs, which can get expensive. So we want to enable
668 recording of edge to CASE_LABEL_EXPR. */
669 start_recording_case_labels ();
671 /* Start by iterating over all basic blocks. We cannot use FOR_EACH_BB_FN,
672 since the basic blocks may get removed. */
673 n
= last_basic_block_for_fn (cfun
);
674 for (i
= NUM_FIXED_BLOCKS
; i
< n
; i
++)
676 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
678 retval
|= cleanup_tree_cfg_bb (bb
);
681 /* Now process the altered blocks, as long as any are available. */
682 while (!bitmap_empty_p (cfgcleanup_altered_bbs
))
684 i
= bitmap_first_set_bit (cfgcleanup_altered_bbs
);
685 bitmap_clear_bit (cfgcleanup_altered_bbs
, i
);
686 if (i
< NUM_FIXED_BLOCKS
)
689 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
693 retval
|= cleanup_tree_cfg_bb (bb
);
695 /* Rerun split_bbs_on_noreturn_calls, in case we have altered any noreturn
697 retval
|= split_bbs_on_noreturn_calls ();
700 end_recording_case_labels ();
701 BITMAP_FREE (cfgcleanup_altered_bbs
);
706 /* Remove unreachable blocks and other miscellaneous clean up work.
707 Return true if the flowgraph was modified, false otherwise. */
710 cleanup_tree_cfg_noloop (void)
714 timevar_push (TV_TREE_CLEANUP_CFG
);
716 /* Iterate until there are no more cleanups left to do. If any
717 iteration changed the flowgraph, set CHANGED to true.
719 If dominance information is available, there cannot be any unreachable
721 if (!dom_info_available_p (CDI_DOMINATORS
))
723 changed
= delete_unreachable_blocks ();
724 calculate_dominance_info (CDI_DOMINATORS
);
728 #ifdef ENABLE_CHECKING
729 verify_dominators (CDI_DOMINATORS
);
734 changed
|= cleanup_tree_cfg_1 ();
736 gcc_assert (dom_info_available_p (CDI_DOMINATORS
));
739 #ifdef ENABLE_CHECKING
743 timevar_pop (TV_TREE_CLEANUP_CFG
);
745 if (changed
&& current_loops
)
746 loops_state_set (LOOPS_NEED_FIXUP
);
751 /* Repairs loop structures. */
754 repair_loop_structures (void)
757 unsigned n_new_loops
;
759 calculate_dominance_info (CDI_DOMINATORS
);
761 timevar_push (TV_REPAIR_LOOPS
);
762 changed_bbs
= BITMAP_ALLOC (NULL
);
763 n_new_loops
= fix_loop_structure (changed_bbs
);
765 /* This usually does nothing. But sometimes parts of cfg that originally
766 were inside a loop get out of it due to edge removal (since they
767 become unreachable by back edges from latch). Also a former
768 irreducible loop can become reducible - in this case force a full
769 rewrite into loop-closed SSA form. */
770 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
))
771 rewrite_into_loop_closed_ssa (n_new_loops
? NULL
: changed_bbs
,
774 BITMAP_FREE (changed_bbs
);
776 #ifdef ENABLE_CHECKING
777 verify_loop_structure ();
781 timevar_pop (TV_REPAIR_LOOPS
);
784 /* Cleanup cfg and repair loop structures. */
787 cleanup_tree_cfg (void)
789 bool changed
= cleanup_tree_cfg_noloop ();
791 if (current_loops
!= NULL
792 && loops_state_satisfies_p (LOOPS_NEED_FIXUP
))
793 repair_loop_structures ();
798 /* Tries to merge the PHI nodes at BB into those at BB's sole successor.
799 Returns true if successful. */
802 remove_forwarder_block_with_phi (basic_block bb
)
804 edge succ
= single_succ_edge (bb
);
805 basic_block dest
= succ
->dest
;
807 basic_block dombb
, domdest
, dom
;
809 /* We check for infinite loops already in tree_forwarder_block_p.
810 However it may happen that the infinite loop is created
811 afterwards due to removal of forwarders. */
815 /* If the destination block consists of a nonlocal label, do not
817 label
= first_stmt (dest
);
819 && gimple_code (label
) == GIMPLE_LABEL
820 && DECL_NONLOCAL (gimple_label_label (label
)))
823 /* Redirect each incoming edge to BB to DEST. */
824 while (EDGE_COUNT (bb
->preds
) > 0)
826 edge e
= EDGE_PRED (bb
, 0), s
;
827 gimple_stmt_iterator gsi
;
829 s
= find_edge (e
->src
, dest
);
832 /* We already have an edge S from E->src to DEST. If S and
833 E->dest's sole successor edge have the same PHI arguments
834 at DEST, redirect S to DEST. */
835 if (phi_alternatives_equal (dest
, s
, succ
))
837 e
= redirect_edge_and_branch (e
, dest
);
838 redirect_edge_var_map_clear (e
);
842 /* PHI arguments are different. Create a forwarder block by
843 splitting E so that we can merge PHI arguments on E to
845 e
= single_succ_edge (split_edge (e
));
848 s
= redirect_edge_and_branch (e
, dest
);
850 /* redirect_edge_and_branch must not create a new edge. */
853 /* Add to the PHI nodes at DEST each PHI argument removed at the
855 for (gsi
= gsi_start_phis (dest
);
859 gimple phi
= gsi_stmt (gsi
);
860 tree def
= gimple_phi_arg_def (phi
, succ
->dest_idx
);
861 source_location locus
= gimple_phi_arg_location_from_edge (phi
, succ
);
863 if (TREE_CODE (def
) == SSA_NAME
)
865 edge_var_map_vector
*head
;
869 /* If DEF is one of the results of PHI nodes removed during
870 redirection, replace it with the PHI argument that used
872 head
= redirect_edge_var_map_vector (e
);
873 FOR_EACH_VEC_SAFE_ELT (head
, i
, vm
)
875 tree old_arg
= redirect_edge_var_map_result (vm
);
876 tree new_arg
= redirect_edge_var_map_def (vm
);
881 locus
= redirect_edge_var_map_location (vm
);
887 add_phi_arg (phi
, def
, s
, locus
);
890 redirect_edge_var_map_clear (e
);
893 /* Update the dominators. */
894 dombb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
895 domdest
= get_immediate_dominator (CDI_DOMINATORS
, dest
);
898 /* Shortcut to avoid calling (relatively expensive)
899 nearest_common_dominator unless necessary. */
903 dom
= nearest_common_dominator (CDI_DOMINATORS
, domdest
, dombb
);
905 set_immediate_dominator (CDI_DOMINATORS
, dest
, dom
);
907 /* Remove BB since all of BB's incoming edges have been redirected
909 delete_basic_block (bb
);
914 /* This pass merges PHI nodes if one feeds into another. For example,
915 suppose we have the following:
922 # tem_6 = PHI <tem_17(8), tem_23(7)>;
925 # tem_3 = PHI <tem_6(9), tem_2(5)>;
928 Then we merge the first PHI node into the second one like so:
935 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
940 merge_phi_nodes (void)
942 basic_block
*worklist
= XNEWVEC (basic_block
, n_basic_blocks_for_fn (cfun
));
943 basic_block
*current
= worklist
;
946 calculate_dominance_info (CDI_DOMINATORS
);
948 /* Find all PHI nodes that we may be able to merge. */
949 FOR_EACH_BB_FN (bb
, cfun
)
953 /* Look for a forwarder block with PHI nodes. */
954 if (!tree_forwarder_block_p (bb
, true))
957 dest
= single_succ (bb
);
959 /* We have to feed into another basic block with PHI
961 if (gimple_seq_empty_p (phi_nodes (dest
))
962 /* We don't want to deal with a basic block with
964 || bb_has_abnormal_pred (bb
))
967 if (!dominated_by_p (CDI_DOMINATORS
, dest
, bb
))
969 /* If BB does not dominate DEST, then the PHI nodes at
970 DEST must be the only users of the results of the PHI
976 gimple_stmt_iterator gsi
;
977 unsigned int dest_idx
= single_succ_edge (bb
)->dest_idx
;
979 /* BB dominates DEST. There may be many users of the PHI
980 nodes in BB. However, there is still a trivial case we
981 can handle. If the result of every PHI in BB is used
982 only by a PHI in DEST, then we can trivially merge the
983 PHI nodes from BB into DEST. */
984 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
987 gimple phi
= gsi_stmt (gsi
);
988 tree result
= gimple_phi_result (phi
);
989 use_operand_p imm_use
;
992 /* If the PHI's result is never used, then we can just
994 if (has_zero_uses (result
))
997 /* Get the single use of the result of this PHI node. */
998 if (!single_imm_use (result
, &imm_use
, &use_stmt
)
999 || gimple_code (use_stmt
) != GIMPLE_PHI
1000 || gimple_bb (use_stmt
) != dest
1001 || gimple_phi_arg_def (use_stmt
, dest_idx
) != result
)
1005 /* If the loop above iterated through all the PHI nodes
1006 in BB, then we can merge the PHIs from BB into DEST. */
1007 if (gsi_end_p (gsi
))
1012 /* Now let's drain WORKLIST. */
1013 bool changed
= false;
1014 while (current
!= worklist
)
1017 changed
|= remove_forwarder_block_with_phi (bb
);
1021 /* Removing forwarder blocks can cause formerly irreducible loops
1022 to become reducible if we merged two entry blocks. */
1025 loops_state_set (LOOPS_NEED_FIXUP
);
1031 gate_merge_phi (void)
1038 const pass_data pass_data_merge_phi
=
1040 GIMPLE_PASS
, /* type */
1041 "mergephi", /* name */
1042 OPTGROUP_NONE
, /* optinfo_flags */
1043 true, /* has_gate */
1044 true, /* has_execute */
1045 TV_TREE_MERGE_PHI
, /* tv_id */
1046 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1047 0, /* properties_provided */
1048 0, /* properties_destroyed */
1049 0, /* todo_flags_start */
1050 TODO_verify_ssa
, /* todo_flags_finish */
1053 class pass_merge_phi
: public gimple_opt_pass
1056 pass_merge_phi (gcc::context
*ctxt
)
1057 : gimple_opt_pass (pass_data_merge_phi
, ctxt
)
1060 /* opt_pass methods: */
1061 opt_pass
* clone () { return new pass_merge_phi (m_ctxt
); }
1062 bool gate () { return gate_merge_phi (); }
1063 unsigned int execute () { return merge_phi_nodes (); }
1065 }; // class pass_merge_phi
1070 make_pass_merge_phi (gcc::context
*ctxt
)
1072 return new pass_merge_phi (ctxt
);
1075 /* Pass: cleanup the CFG just before expanding trees to RTL.
1076 This is just a round of label cleanups and case node grouping
1077 because after the tree optimizers have run such cleanups may
1081 execute_cleanup_cfg_post_optimizing (void)
1083 unsigned int todo
= 0;
1084 if (cleanup_tree_cfg ())
1085 todo
|= TODO_update_ssa
;
1086 maybe_remove_unreachable_handlers ();
1087 cleanup_dead_labels ();
1088 group_case_labels ();
1089 if ((flag_compare_debug_opt
|| flag_compare_debug
)
1090 && flag_dump_final_insns
)
1092 FILE *final_output
= fopen (flag_dump_final_insns
, "a");
1096 error ("could not open final insn dump file %qs: %m",
1097 flag_dump_final_insns
);
1098 flag_dump_final_insns
= NULL
;
1102 int save_unnumbered
= flag_dump_unnumbered
;
1103 int save_noaddr
= flag_dump_noaddr
;
1105 flag_dump_noaddr
= flag_dump_unnumbered
= 1;
1106 fprintf (final_output
, "\n");
1107 dump_enumerated_decls (final_output
, dump_flags
| TDF_NOUID
);
1108 flag_dump_noaddr
= save_noaddr
;
1109 flag_dump_unnumbered
= save_unnumbered
;
1110 if (fclose (final_output
))
1112 error ("could not close final insn dump file %qs: %m",
1113 flag_dump_final_insns
);
1114 flag_dump_final_insns
= NULL
;
1123 const pass_data pass_data_cleanup_cfg_post_optimizing
=
1125 GIMPLE_PASS
, /* type */
1126 "optimized", /* name */
1127 OPTGROUP_NONE
, /* optinfo_flags */
1128 false, /* has_gate */
1129 true, /* has_execute */
1130 TV_TREE_CLEANUP_CFG
, /* tv_id */
1131 PROP_cfg
, /* properties_required */
1132 0, /* properties_provided */
1133 0, /* properties_destroyed */
1134 0, /* todo_flags_start */
1135 TODO_remove_unused_locals
, /* todo_flags_finish */
1138 class pass_cleanup_cfg_post_optimizing
: public gimple_opt_pass
1141 pass_cleanup_cfg_post_optimizing (gcc::context
*ctxt
)
1142 : gimple_opt_pass (pass_data_cleanup_cfg_post_optimizing
, ctxt
)
1145 /* opt_pass methods: */
1146 unsigned int execute () {
1147 return execute_cleanup_cfg_post_optimizing ();
1150 }; // class pass_cleanup_cfg_post_optimizing
1155 make_pass_cleanup_cfg_post_optimizing (gcc::context
*ctxt
)
1157 return new pass_cleanup_cfg_post_optimizing (ctxt
);