1 /* CFG cleanup for trees.
2 Copyright (C) 2001-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "tree-pass.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
33 #include "cfgcleanup.h"
36 #include "gimple-iterator.h"
38 #include "tree-ssa-loop-manip.h"
42 #include "tree-scalar-evolution.h"
43 #include "gimple-match.h"
44 #include "gimple-fold.h"
47 /* The set of blocks in that at least one of the following changes happened:
48 -- the statement at the end of the block was changed
49 -- the block was newly created
50 -- the set of the predecessors of the block changed
51 -- the set of the successors of the block changed
52 ??? Maybe we could track these changes separately, since they determine
53 what cleanups it makes sense to try on the block. */
54 bitmap cfgcleanup_altered_bbs
;
56 /* Remove any fallthru edge from EV. Return true if an edge was removed. */
59 remove_fallthru_edge (vec
<edge
, va_gc
> *ev
)
64 FOR_EACH_EDGE (e
, ei
, ev
)
65 if ((e
->flags
& EDGE_FALLTHRU
) != 0)
67 if (e
->flags
& EDGE_COMPLEX
)
68 e
->flags
&= ~EDGE_FALLTHRU
;
70 remove_edge_and_dominated_blocks (e
);
77 /* Disconnect an unreachable block in the control expression starting
81 cleanup_control_expr_graph (basic_block bb
, gimple_stmt_iterator gsi
,
86 gimple
*stmt
= gsi_stmt (gsi
);
88 if (!single_succ_p (bb
))
95 fold_defer_overflow_warnings ();
96 switch (gimple_code (stmt
))
99 /* During a first iteration on the CFG only remove trivially
100 dead edges but mark other conditions for re-evaluation. */
103 val
= const_binop (gimple_cond_code (stmt
), boolean_type_node
,
104 gimple_cond_lhs (stmt
),
105 gimple_cond_rhs (stmt
));
107 bitmap_set_bit (cfgcleanup_altered_bbs
, bb
->index
);
113 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, no_follow_ssa_edges
,
115 && rcode
== INTEGER_CST
)
121 val
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
127 taken_edge
= find_taken_edge (bb
, val
);
130 fold_undefer_and_ignore_overflow_warnings ();
134 /* Remove all the edges except the one that is always executed. */
136 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
142 fold_undefer_overflow_warnings
143 (true, stmt
, WARN_STRICT_OVERFLOW_CONDITIONAL
);
147 taken_edge
->probability
+= e
->probability
;
148 taken_edge
->count
+= e
->count
;
149 remove_edge_and_dominated_blocks (e
);
156 fold_undefer_and_ignore_overflow_warnings ();
157 if (taken_edge
->probability
> REG_BR_PROB_BASE
)
158 taken_edge
->probability
= REG_BR_PROB_BASE
;
161 taken_edge
= single_succ_edge (bb
);
163 bitmap_set_bit (cfgcleanup_altered_bbs
, bb
->index
);
164 gsi_remove (&gsi
, true);
165 taken_edge
->flags
= EDGE_FALLTHRU
;
170 /* Cleanup the GF_CALL_CTRL_ALTERING flag according to
171 to updated gimple_call_flags. */
174 cleanup_call_ctrl_altering_flag (gimple
*bb_end
)
176 if (!is_gimple_call (bb_end
)
177 || !gimple_call_ctrl_altering_p (bb_end
))
180 int flags
= gimple_call_flags (bb_end
);
181 if (((flags
& (ECF_CONST
| ECF_PURE
))
182 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
183 || (flags
& ECF_LEAF
))
184 gimple_call_set_ctrl_altering (bb_end
, false);
187 /* Try to remove superfluous control structures in basic block BB. Returns
188 true if anything changes. */
191 cleanup_control_flow_bb (basic_block bb
, bool first_p
)
193 gimple_stmt_iterator gsi
;
197 /* If the last statement of the block could throw and now cannot,
198 we need to prune cfg. */
199 retval
|= gimple_purge_dead_eh_edges (bb
);
201 gsi
= gsi_last_nondebug_bb (bb
);
205 stmt
= gsi_stmt (gsi
);
207 /* Try to cleanup ctrl altering flag for call which ends bb. */
208 cleanup_call_ctrl_altering_flag (stmt
);
210 if (gimple_code (stmt
) == GIMPLE_COND
211 || gimple_code (stmt
) == GIMPLE_SWITCH
)
213 gcc_checking_assert (gsi_stmt (gsi_last_bb (bb
)) == stmt
);
214 retval
|= cleanup_control_expr_graph (bb
, gsi
, first_p
);
216 else if (gimple_code (stmt
) == GIMPLE_GOTO
217 && TREE_CODE (gimple_goto_dest (stmt
)) == ADDR_EXPR
218 && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt
), 0))
221 /* If we had a computed goto which has a compile-time determinable
222 destination, then we can eliminate the goto. */
226 basic_block target_block
;
228 gcc_checking_assert (gsi_stmt (gsi_last_bb (bb
)) == stmt
);
229 /* First look at all the outgoing edges. Delete any outgoing
230 edges which do not go to the right block. For the one
231 edge which goes to the right block, fix up its flags. */
232 label
= TREE_OPERAND (gimple_goto_dest (stmt
), 0);
233 target_block
= label_to_block (label
);
234 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
236 if (e
->dest
!= target_block
)
237 remove_edge_and_dominated_blocks (e
);
240 /* Turn off the EDGE_ABNORMAL flag. */
241 e
->flags
&= ~EDGE_ABNORMAL
;
243 /* And set EDGE_FALLTHRU. */
244 e
->flags
|= EDGE_FALLTHRU
;
249 bitmap_set_bit (cfgcleanup_altered_bbs
, bb
->index
);
250 bitmap_set_bit (cfgcleanup_altered_bbs
, target_block
->index
);
252 /* Remove the GOTO_EXPR as it is not needed. The CFG has all the
253 relevant information we need. */
254 gsi_remove (&gsi
, true);
258 /* Check for indirect calls that have been turned into
260 else if (is_gimple_call (stmt
)
261 && gimple_call_noreturn_p (stmt
))
263 /* If there are debug stmts after the noreturn call, remove them
264 now, they should be all unreachable anyway. */
265 for (gsi_next (&gsi
); !gsi_end_p (gsi
); )
266 gsi_remove (&gsi
, true);
267 if (remove_fallthru_edge (bb
->succs
))
274 /* Return true if basic block BB does nothing except pass control
275 flow to another block and that we can safely insert a label at
276 the start of the successor block.
278 As a precondition, we require that BB be not equal to
282 tree_forwarder_block_p (basic_block bb
, bool phi_wanted
)
284 gimple_stmt_iterator gsi
;
287 /* BB must have a single outgoing edge. */
288 if (single_succ_p (bb
) != 1
289 /* If PHI_WANTED is false, BB must not have any PHI nodes.
290 Otherwise, BB must have PHI nodes. */
291 || gimple_seq_empty_p (phi_nodes (bb
)) == phi_wanted
292 /* BB may not be a predecessor of the exit block. */
293 || single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
)
294 /* Nor should this be an infinite loop. */
295 || single_succ (bb
) == bb
296 /* BB may not have an abnormal outgoing edge. */
297 || (single_succ_edge (bb
)->flags
& EDGE_ABNORMAL
))
300 gcc_checking_assert (bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
));
302 locus
= single_succ_edge (bb
)->goto_locus
;
304 /* There should not be an edge coming from entry, or an EH edge. */
309 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
310 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (cfun
) || (e
->flags
& EDGE_EH
))
312 /* If goto_locus of any of the edges differs, prevent removing
313 the forwarder block for -O0. */
314 else if (optimize
== 0 && e
->goto_locus
!= locus
)
318 /* Now walk through the statements backward. We can ignore labels,
319 anything else means this is not a forwarder block. */
320 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
322 gimple
*stmt
= gsi_stmt (gsi
);
324 switch (gimple_code (stmt
))
327 if (DECL_NONLOCAL (gimple_label_label (as_a
<glabel
*> (stmt
))))
329 if (optimize
== 0 && gimple_location (stmt
) != locus
)
333 /* ??? For now, hope there's a corresponding debug
334 assignment at the destination. */
346 /* Protect loop headers. */
347 if (bb_loop_header_p (bb
))
350 dest
= EDGE_SUCC (bb
, 0)->dest
;
351 /* Protect loop preheaders and latches if requested. */
352 if (dest
->loop_father
->header
== dest
)
354 if (bb
->loop_father
== dest
->loop_father
)
356 if (loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
))
358 /* If bb doesn't have a single predecessor we'd make this
359 loop have multiple latches. Don't do that if that
360 would in turn require disambiguating them. */
361 return (single_pred_p (bb
)
362 || loops_state_satisfies_p
363 (LOOPS_MAY_HAVE_MULTIPLE_LATCHES
));
365 else if (bb
->loop_father
== loop_outer (dest
->loop_father
))
366 return !loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS
);
367 /* Always preserve other edges into loop headers that are
368 not simple latches or preheaders. */
376 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
377 those alternatives are equal in each of the PHI nodes, then return
378 true, else return false. */
381 phi_alternatives_equal (basic_block dest
, edge e1
, edge e2
)
383 int n1
= e1
->dest_idx
;
384 int n2
= e2
->dest_idx
;
387 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
389 gphi
*phi
= gsi
.phi ();
390 tree val1
= gimple_phi_arg_def (phi
, n1
);
391 tree val2
= gimple_phi_arg_def (phi
, n2
);
393 gcc_assert (val1
!= NULL_TREE
);
394 gcc_assert (val2
!= NULL_TREE
);
396 if (!operand_equal_for_phi_arg_p (val1
, val2
))
403 /* Removes forwarder block BB. Returns false if this failed. */
406 remove_forwarder_block (basic_block bb
)
408 edge succ
= single_succ_edge (bb
), e
, s
;
409 basic_block dest
= succ
->dest
;
412 gimple_stmt_iterator gsi
, gsi_to
;
413 bool can_move_debug_stmts
;
415 /* We check for infinite loops already in tree_forwarder_block_p.
416 However it may happen that the infinite loop is created
417 afterwards due to removal of forwarders. */
421 /* If the destination block consists of a nonlocal label or is a
422 EH landing pad, do not merge it. */
423 label
= first_stmt (dest
);
425 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (label
))
426 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
427 || EH_LANDING_PAD_NR (gimple_label_label (label_stmt
)) != 0)
430 /* If there is an abnormal edge to basic block BB, but not into
431 dest, problems might occur during removal of the phi node at out
432 of ssa due to overlapping live ranges of registers.
434 If there is an abnormal edge in DEST, the problems would occur
435 anyway since cleanup_dead_labels would then merge the labels for
436 two different eh regions, and rest of exception handling code
439 So if there is an abnormal edge to BB, proceed only if there is
440 no abnormal edge to DEST and there are no phi nodes in DEST. */
441 if (bb_has_abnormal_pred (bb
)
442 && (bb_has_abnormal_pred (dest
)
443 || !gimple_seq_empty_p (phi_nodes (dest
))))
446 /* If there are phi nodes in DEST, and some of the blocks that are
447 predecessors of BB are also predecessors of DEST, check that the
448 phi node arguments match. */
449 if (!gimple_seq_empty_p (phi_nodes (dest
)))
451 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
453 s
= find_edge (e
->src
, dest
);
457 if (!phi_alternatives_equal (dest
, succ
, s
))
462 can_move_debug_stmts
= MAY_HAVE_DEBUG_STMTS
&& single_pred_p (dest
);
464 basic_block pred
= NULL
;
465 if (single_pred_p (bb
))
466 pred
= single_pred (bb
);
468 /* Redirect the edges. */
469 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
471 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
473 if (e
->flags
& EDGE_ABNORMAL
)
475 /* If there is an abnormal edge, redirect it anyway, and
476 move the labels to the new block to make it legal. */
477 s
= redirect_edge_succ_nodup (e
, dest
);
480 s
= redirect_edge_and_branch (e
, dest
);
484 /* Create arguments for the phi nodes, since the edge was not
486 for (gphi_iterator psi
= gsi_start_phis (dest
);
490 gphi
*phi
= psi
.phi ();
491 source_location l
= gimple_phi_arg_location_from_edge (phi
, succ
);
492 tree def
= gimple_phi_arg_def (phi
, succ
->dest_idx
);
493 add_phi_arg (phi
, unshare_expr (def
), s
, l
);
498 /* Move nonlocal labels and computed goto targets as well as user
499 defined labels and labels with an EH landing pad number to the
500 new block, so that the redirection of the abnormal edges works,
501 jump targets end up in a sane place and debug information for
502 labels is retained. */
503 gsi_to
= gsi_start_bb (dest
);
504 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
507 label
= gsi_stmt (gsi
);
508 if (is_gimple_debug (label
))
510 decl
= gimple_label_label (as_a
<glabel
*> (label
));
511 if (EH_LANDING_PAD_NR (decl
) != 0
512 || DECL_NONLOCAL (decl
)
513 || FORCED_LABEL (decl
)
514 || !DECL_ARTIFICIAL (decl
))
516 gsi_remove (&gsi
, false);
517 gsi_insert_before (&gsi_to
, label
, GSI_SAME_STMT
);
523 /* Move debug statements if the destination has a single predecessor. */
524 if (can_move_debug_stmts
)
526 gsi_to
= gsi_after_labels (dest
);
527 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); )
529 gimple
*debug
= gsi_stmt (gsi
);
530 if (!is_gimple_debug (debug
))
532 gsi_remove (&gsi
, false);
533 gsi_insert_before (&gsi_to
, debug
, GSI_SAME_STMT
);
537 bitmap_set_bit (cfgcleanup_altered_bbs
, dest
->index
);
539 /* Update the dominators. */
540 if (dom_info_available_p (CDI_DOMINATORS
))
542 basic_block dom
, dombb
, domdest
;
544 dombb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
545 domdest
= get_immediate_dominator (CDI_DOMINATORS
, dest
);
548 /* Shortcut to avoid calling (relatively expensive)
549 nearest_common_dominator unless necessary. */
553 dom
= nearest_common_dominator (CDI_DOMINATORS
, domdest
, dombb
);
555 set_immediate_dominator (CDI_DOMINATORS
, dest
, dom
);
558 /* Adjust latch infomation of BB's parent loop as otherwise
559 the cfg hook has a hard time not to kill the loop. */
560 if (current_loops
&& bb
->loop_father
->latch
== bb
)
561 bb
->loop_father
->latch
= pred
;
563 /* And kill the forwarder block. */
564 delete_basic_block (bb
);
569 /* STMT is a call that has been discovered noreturn. Split the
570 block to prepare fixing up the CFG and remove LHS.
571 Return true if cleanup-cfg needs to run. */
574 fixup_noreturn_call (gimple
*stmt
)
576 basic_block bb
= gimple_bb (stmt
);
577 bool changed
= false;
579 if (gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
582 /* First split basic block if stmt is not last. */
583 if (stmt
!= gsi_stmt (gsi_last_bb (bb
)))
585 if (stmt
== gsi_stmt (gsi_last_nondebug_bb (bb
)))
587 /* Don't split if there are only debug stmts
588 after stmt, that can result in -fcompare-debug
589 failures. Remove the debug stmts instead,
590 they should be all unreachable anyway. */
591 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
592 for (gsi_next (&gsi
); !gsi_end_p (gsi
); )
593 gsi_remove (&gsi
, true);
597 split_block (bb
, stmt
);
602 /* If there is an LHS, remove it, but only if its type has fixed size.
603 The LHS will need to be recreated during RTL expansion and creating
604 temporaries of variable-sized types is not supported. Also don't
605 do this with TREE_ADDRESSABLE types, as assign_temp will abort.
606 Drop LHS regardless of TREE_ADDRESSABLE, if the function call
607 has been changed into a call that does not return a value, like
608 __builtin_unreachable or __cxa_pure_virtual. */
609 tree lhs
= gimple_call_lhs (stmt
);
611 && (should_remove_lhs_p (lhs
)
612 || VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))))
614 gimple_call_set_lhs (stmt
, NULL_TREE
);
616 /* We need to fix up the SSA name to avoid checking errors. */
617 if (TREE_CODE (lhs
) == SSA_NAME
)
619 tree new_var
= create_tmp_reg (TREE_TYPE (lhs
));
620 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, new_var
);
621 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
622 set_ssa_default_def (cfun
, new_var
, lhs
);
628 /* Mark the call as altering control flow. */
629 if (!gimple_call_ctrl_altering_p (stmt
))
631 gimple_call_set_ctrl_altering (stmt
, true);
639 /* Tries to cleanup cfg in basic block BB. Returns true if anything
643 cleanup_tree_cfg_bb (basic_block bb
)
645 if (tree_forwarder_block_p (bb
, false)
646 && remove_forwarder_block (bb
))
649 /* If there is a merge opportunity with the predecessor
650 do nothing now but wait until we process the predecessor.
651 This happens when we visit BBs in a non-optimal order and
652 avoids quadratic behavior with adjusting stmts BB pointer. */
653 if (single_pred_p (bb
)
654 && can_merge_blocks_p (single_pred (bb
), bb
))
655 /* But make sure we _do_ visit it. When we remove unreachable paths
656 ending in a backedge we fail to mark the destinations predecessors
658 bitmap_set_bit (cfgcleanup_altered_bbs
, single_pred (bb
)->index
);
660 /* Merging the blocks may create new opportunities for folding
661 conditional branches (due to the elimination of single-valued PHI
663 else if (single_succ_p (bb
)
664 && can_merge_blocks_p (bb
, single_succ (bb
)))
666 merge_blocks (bb
, single_succ (bb
));
673 /* Iterate the cfg cleanups, while anything changes. */
676 cleanup_tree_cfg_1 (void)
682 /* Prepare the worklists of altered blocks. */
683 cfgcleanup_altered_bbs
= BITMAP_ALLOC (NULL
);
685 /* During forwarder block cleanup, we may redirect edges out of
686 SWITCH_EXPRs, which can get expensive. So we want to enable
687 recording of edge to CASE_LABEL_EXPR. */
688 start_recording_case_labels ();
690 /* We cannot use FOR_EACH_BB_FN for the BB iterations below
691 since the basic blocks may get removed. */
693 /* Start by iterating over all basic blocks looking for edge removal
694 opportunities. Do this first because incoming SSA form may be
695 invalid and we want to avoid performing SSA related tasks such
696 as propgating out a PHI node during BB merging in that state. */
697 n
= last_basic_block_for_fn (cfun
);
698 for (i
= NUM_FIXED_BLOCKS
; i
< n
; i
++)
700 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
702 retval
|= cleanup_control_flow_bb (bb
, true);
705 /* After doing the above SSA form should be valid (or an update SSA
706 should be required). */
708 /* Continue by iterating over all basic blocks looking for BB merging
710 n
= last_basic_block_for_fn (cfun
);
711 for (i
= NUM_FIXED_BLOCKS
; i
< n
; i
++)
713 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
715 retval
|= cleanup_tree_cfg_bb (bb
);
718 /* Now process the altered blocks, as long as any are available. */
719 while (!bitmap_empty_p (cfgcleanup_altered_bbs
))
721 i
= bitmap_first_set_bit (cfgcleanup_altered_bbs
);
722 bitmap_clear_bit (cfgcleanup_altered_bbs
, i
);
723 if (i
< NUM_FIXED_BLOCKS
)
726 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
730 retval
|= cleanup_control_flow_bb (bb
, false);
731 retval
|= cleanup_tree_cfg_bb (bb
);
734 end_recording_case_labels ();
735 BITMAP_FREE (cfgcleanup_altered_bbs
);
740 /* Remove unreachable blocks and other miscellaneous clean up work.
741 Return true if the flowgraph was modified, false otherwise. */
744 cleanup_tree_cfg_noloop (void)
748 timevar_push (TV_TREE_CLEANUP_CFG
);
750 /* Iterate until there are no more cleanups left to do. If any
751 iteration changed the flowgraph, set CHANGED to true.
753 If dominance information is available, there cannot be any unreachable
755 if (!dom_info_available_p (CDI_DOMINATORS
))
757 changed
= delete_unreachable_blocks ();
758 calculate_dominance_info (CDI_DOMINATORS
);
762 checking_verify_dominators (CDI_DOMINATORS
);
766 changed
|= cleanup_tree_cfg_1 ();
768 gcc_assert (dom_info_available_p (CDI_DOMINATORS
));
771 checking_verify_flow_info ();
773 timevar_pop (TV_TREE_CLEANUP_CFG
);
775 if (changed
&& current_loops
)
776 loops_state_set (LOOPS_NEED_FIXUP
);
781 /* Repairs loop structures. */
784 repair_loop_structures (void)
787 unsigned n_new_loops
;
789 calculate_dominance_info (CDI_DOMINATORS
);
791 timevar_push (TV_REPAIR_LOOPS
);
792 changed_bbs
= BITMAP_ALLOC (NULL
);
793 n_new_loops
= fix_loop_structure (changed_bbs
);
795 /* This usually does nothing. But sometimes parts of cfg that originally
796 were inside a loop get out of it due to edge removal (since they
797 become unreachable by back edges from latch). Also a former
798 irreducible loop can become reducible - in this case force a full
799 rewrite into loop-closed SSA form. */
800 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
))
801 rewrite_into_loop_closed_ssa (n_new_loops
? NULL
: changed_bbs
,
804 BITMAP_FREE (changed_bbs
);
806 checking_verify_loop_structure ();
809 timevar_pop (TV_REPAIR_LOOPS
);
812 /* Cleanup cfg and repair loop structures. */
815 cleanup_tree_cfg (void)
817 bool changed
= cleanup_tree_cfg_noloop ();
819 if (current_loops
!= NULL
820 && loops_state_satisfies_p (LOOPS_NEED_FIXUP
))
821 repair_loop_structures ();
826 /* Tries to merge the PHI nodes at BB into those at BB's sole successor.
827 Returns true if successful. */
830 remove_forwarder_block_with_phi (basic_block bb
)
832 edge succ
= single_succ_edge (bb
);
833 basic_block dest
= succ
->dest
;
835 basic_block dombb
, domdest
, dom
;
837 /* We check for infinite loops already in tree_forwarder_block_p.
838 However it may happen that the infinite loop is created
839 afterwards due to removal of forwarders. */
843 /* Removal of forwarders may expose new natural loops and thus
844 a block may turn into a loop header. */
845 if (current_loops
&& bb_loop_header_p (bb
))
848 /* If the destination block consists of a nonlocal label, do not
850 label
= first_stmt (dest
);
852 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (label
))
853 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
856 /* Record BB's single pred in case we need to update the father
857 loop's latch information later. */
858 basic_block pred
= NULL
;
859 if (single_pred_p (bb
))
860 pred
= single_pred (bb
);
862 /* Redirect each incoming edge to BB to DEST. */
863 while (EDGE_COUNT (bb
->preds
) > 0)
865 edge e
= EDGE_PRED (bb
, 0), s
;
868 s
= find_edge (e
->src
, dest
);
871 /* We already have an edge S from E->src to DEST. If S and
872 E->dest's sole successor edge have the same PHI arguments
873 at DEST, redirect S to DEST. */
874 if (phi_alternatives_equal (dest
, s
, succ
))
876 e
= redirect_edge_and_branch (e
, dest
);
877 redirect_edge_var_map_clear (e
);
881 /* PHI arguments are different. Create a forwarder block by
882 splitting E so that we can merge PHI arguments on E to
884 e
= single_succ_edge (split_edge (e
));
887 s
= redirect_edge_and_branch (e
, dest
);
889 /* redirect_edge_and_branch must not create a new edge. */
892 /* Add to the PHI nodes at DEST each PHI argument removed at the
894 for (gsi
= gsi_start_phis (dest
);
898 gphi
*phi
= gsi
.phi ();
899 tree def
= gimple_phi_arg_def (phi
, succ
->dest_idx
);
900 source_location locus
= gimple_phi_arg_location_from_edge (phi
, succ
);
902 if (TREE_CODE (def
) == SSA_NAME
)
904 /* If DEF is one of the results of PHI nodes removed during
905 redirection, replace it with the PHI argument that used
907 vec
<edge_var_map
> *head
= redirect_edge_var_map_vector (e
);
908 size_t length
= head
? head
->length () : 0;
909 for (size_t i
= 0; i
< length
; i
++)
911 edge_var_map
*vm
= &(*head
)[i
];
912 tree old_arg
= redirect_edge_var_map_result (vm
);
913 tree new_arg
= redirect_edge_var_map_def (vm
);
918 locus
= redirect_edge_var_map_location (vm
);
924 add_phi_arg (phi
, def
, s
, locus
);
927 redirect_edge_var_map_clear (e
);
930 /* Update the dominators. */
931 dombb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
932 domdest
= get_immediate_dominator (CDI_DOMINATORS
, dest
);
935 /* Shortcut to avoid calling (relatively expensive)
936 nearest_common_dominator unless necessary. */
940 dom
= nearest_common_dominator (CDI_DOMINATORS
, domdest
, dombb
);
942 set_immediate_dominator (CDI_DOMINATORS
, dest
, dom
);
944 /* Adjust latch infomation of BB's parent loop as otherwise
945 the cfg hook has a hard time not to kill the loop. */
946 if (current_loops
&& bb
->loop_father
->latch
== bb
)
947 bb
->loop_father
->latch
= pred
;
949 /* Remove BB since all of BB's incoming edges have been redirected
951 delete_basic_block (bb
);
956 /* This pass merges PHI nodes if one feeds into another. For example,
957 suppose we have the following:
964 # tem_6 = PHI <tem_17(8), tem_23(7)>;
967 # tem_3 = PHI <tem_6(9), tem_2(5)>;
970 Then we merge the first PHI node into the second one like so:
977 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
983 const pass_data pass_data_merge_phi
=
985 GIMPLE_PASS
, /* type */
986 "mergephi", /* name */
987 OPTGROUP_NONE
, /* optinfo_flags */
988 TV_TREE_MERGE_PHI
, /* tv_id */
989 ( PROP_cfg
| PROP_ssa
), /* properties_required */
990 0, /* properties_provided */
991 0, /* properties_destroyed */
992 0, /* todo_flags_start */
993 0, /* todo_flags_finish */
996 class pass_merge_phi
: public gimple_opt_pass
999 pass_merge_phi (gcc::context
*ctxt
)
1000 : gimple_opt_pass (pass_data_merge_phi
, ctxt
)
1003 /* opt_pass methods: */
1004 opt_pass
* clone () { return new pass_merge_phi (m_ctxt
); }
1005 virtual unsigned int execute (function
*);
1007 }; // class pass_merge_phi
1010 pass_merge_phi::execute (function
*fun
)
1012 basic_block
*worklist
= XNEWVEC (basic_block
, n_basic_blocks_for_fn (fun
));
1013 basic_block
*current
= worklist
;
1016 calculate_dominance_info (CDI_DOMINATORS
);
1018 /* Find all PHI nodes that we may be able to merge. */
1019 FOR_EACH_BB_FN (bb
, fun
)
1023 /* Look for a forwarder block with PHI nodes. */
1024 if (!tree_forwarder_block_p (bb
, true))
1027 dest
= single_succ (bb
);
1029 /* We have to feed into another basic block with PHI
1031 if (gimple_seq_empty_p (phi_nodes (dest
))
1032 /* We don't want to deal with a basic block with
1034 || bb_has_abnormal_pred (bb
))
1037 if (!dominated_by_p (CDI_DOMINATORS
, dest
, bb
))
1039 /* If BB does not dominate DEST, then the PHI nodes at
1040 DEST must be the only users of the results of the PHI
1047 unsigned int dest_idx
= single_succ_edge (bb
)->dest_idx
;
1049 /* BB dominates DEST. There may be many users of the PHI
1050 nodes in BB. However, there is still a trivial case we
1051 can handle. If the result of every PHI in BB is used
1052 only by a PHI in DEST, then we can trivially merge the
1053 PHI nodes from BB into DEST. */
1054 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
1057 gphi
*phi
= gsi
.phi ();
1058 tree result
= gimple_phi_result (phi
);
1059 use_operand_p imm_use
;
1062 /* If the PHI's result is never used, then we can just
1064 if (has_zero_uses (result
))
1067 /* Get the single use of the result of this PHI node. */
1068 if (!single_imm_use (result
, &imm_use
, &use_stmt
)
1069 || gimple_code (use_stmt
) != GIMPLE_PHI
1070 || gimple_bb (use_stmt
) != dest
1071 || gimple_phi_arg_def (use_stmt
, dest_idx
) != result
)
1075 /* If the loop above iterated through all the PHI nodes
1076 in BB, then we can merge the PHIs from BB into DEST. */
1077 if (gsi_end_p (gsi
))
1082 /* Now let's drain WORKLIST. */
1083 bool changed
= false;
1084 while (current
!= worklist
)
1087 changed
|= remove_forwarder_block_with_phi (bb
);
1091 /* Removing forwarder blocks can cause formerly irreducible loops
1092 to become reducible if we merged two entry blocks. */
1095 loops_state_set (LOOPS_NEED_FIXUP
);
1103 make_pass_merge_phi (gcc::context
*ctxt
)
1105 return new pass_merge_phi (ctxt
);
1108 /* Pass: cleanup the CFG just before expanding trees to RTL.
1109 This is just a round of label cleanups and case node grouping
1110 because after the tree optimizers have run such cleanups may
1114 execute_cleanup_cfg_post_optimizing (void)
1116 unsigned int todo
= execute_fixup_cfg ();
1117 if (cleanup_tree_cfg ())
1119 todo
&= ~TODO_cleanup_cfg
;
1120 todo
|= TODO_update_ssa
;
1122 maybe_remove_unreachable_handlers ();
1123 cleanup_dead_labels ();
1124 group_case_labels ();
1125 if ((flag_compare_debug_opt
|| flag_compare_debug
)
1126 && flag_dump_final_insns
)
1128 FILE *final_output
= fopen (flag_dump_final_insns
, "a");
1132 error ("could not open final insn dump file %qs: %m",
1133 flag_dump_final_insns
);
1134 flag_dump_final_insns
= NULL
;
1138 int save_unnumbered
= flag_dump_unnumbered
;
1139 int save_noaddr
= flag_dump_noaddr
;
1141 flag_dump_noaddr
= flag_dump_unnumbered
= 1;
1142 fprintf (final_output
, "\n");
1143 dump_enumerated_decls (final_output
, dump_flags
| TDF_NOUID
);
1144 flag_dump_noaddr
= save_noaddr
;
1145 flag_dump_unnumbered
= save_unnumbered
;
1146 if (fclose (final_output
))
1148 error ("could not close final insn dump file %qs: %m",
1149 flag_dump_final_insns
);
1150 flag_dump_final_insns
= NULL
;
1159 const pass_data pass_data_cleanup_cfg_post_optimizing
=
1161 GIMPLE_PASS
, /* type */
1162 "optimized", /* name */
1163 OPTGROUP_NONE
, /* optinfo_flags */
1164 TV_TREE_CLEANUP_CFG
, /* tv_id */
1165 PROP_cfg
, /* properties_required */
1166 0, /* properties_provided */
1167 0, /* properties_destroyed */
1168 0, /* todo_flags_start */
1169 TODO_remove_unused_locals
, /* todo_flags_finish */
1172 class pass_cleanup_cfg_post_optimizing
: public gimple_opt_pass
1175 pass_cleanup_cfg_post_optimizing (gcc::context
*ctxt
)
1176 : gimple_opt_pass (pass_data_cleanup_cfg_post_optimizing
, ctxt
)
1179 /* opt_pass methods: */
1180 virtual unsigned int execute (function
*)
1182 return execute_cleanup_cfg_post_optimizing ();
1185 }; // class pass_cleanup_cfg_post_optimizing
1190 make_pass_cleanup_cfg_post_optimizing (gcc::context
*ctxt
)
1192 return new pass_cleanup_cfg_post_optimizing (ctxt
);