1 /* CFG cleanup for trees.
2 Copyright (C) 2001-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
28 #include "tree-pass.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
33 #include "cfgcleanup.h"
36 #include "gimple-iterator.h"
38 #include "tree-ssa-loop-manip.h"
42 #include "tree-scalar-evolution.h"
43 #include "gimple-match.h"
44 #include "gimple-fold.h"
47 /* The set of blocks in that at least one of the following changes happened:
48 -- the statement at the end of the block was changed
49 -- the block was newly created
50 -- the set of the predecessors of the block changed
51 -- the set of the successors of the block changed
52 ??? Maybe we could track these changes separately, since they determine
53 what cleanups it makes sense to try on the block. */
54 bitmap cfgcleanup_altered_bbs
;
56 /* Remove any fallthru edge from EV. Return true if an edge was removed. */
59 remove_fallthru_edge (vec
<edge
, va_gc
> *ev
)
64 FOR_EACH_EDGE (e
, ei
, ev
)
65 if ((e
->flags
& EDGE_FALLTHRU
) != 0)
67 if (e
->flags
& EDGE_COMPLEX
)
68 e
->flags
&= ~EDGE_FALLTHRU
;
70 remove_edge_and_dominated_blocks (e
);
77 /* Disconnect an unreachable block in the control expression starting
81 cleanup_control_expr_graph (basic_block bb
, gimple_stmt_iterator gsi
,
86 gimple
*stmt
= gsi_stmt (gsi
);
88 if (!single_succ_p (bb
))
95 fold_defer_overflow_warnings ();
96 switch (gimple_code (stmt
))
99 /* During a first iteration on the CFG only remove trivially
100 dead edges but mark other conditions for re-evaluation. */
103 val
= const_binop (gimple_cond_code (stmt
), boolean_type_node
,
104 gimple_cond_lhs (stmt
),
105 gimple_cond_rhs (stmt
));
107 bitmap_set_bit (cfgcleanup_altered_bbs
, bb
->index
);
113 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, no_follow_ssa_edges
,
115 && rcode
== INTEGER_CST
)
121 val
= gimple_switch_index (as_a
<gswitch
*> (stmt
));
127 taken_edge
= find_taken_edge (bb
, val
);
130 fold_undefer_and_ignore_overflow_warnings ();
134 /* Remove all the edges except the one that is always executed. */
136 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
142 fold_undefer_overflow_warnings
143 (true, stmt
, WARN_STRICT_OVERFLOW_CONDITIONAL
);
147 taken_edge
->probability
+= e
->probability
;
148 taken_edge
->count
+= e
->count
;
149 remove_edge_and_dominated_blocks (e
);
156 fold_undefer_and_ignore_overflow_warnings ();
157 if (taken_edge
->probability
> REG_BR_PROB_BASE
)
158 taken_edge
->probability
= REG_BR_PROB_BASE
;
161 taken_edge
= single_succ_edge (bb
);
163 bitmap_set_bit (cfgcleanup_altered_bbs
, bb
->index
);
164 gsi_remove (&gsi
, true);
165 taken_edge
->flags
= EDGE_FALLTHRU
;
170 /* Cleanup the GF_CALL_CTRL_ALTERING flag according to
171 to updated gimple_call_flags. */
174 cleanup_call_ctrl_altering_flag (gimple
*bb_end
)
176 if (!is_gimple_call (bb_end
)
177 || !gimple_call_ctrl_altering_p (bb_end
))
180 int flags
= gimple_call_flags (bb_end
);
181 if (((flags
& (ECF_CONST
| ECF_PURE
))
182 && !(flags
& ECF_LOOPING_CONST_OR_PURE
))
183 || (flags
& ECF_LEAF
))
184 gimple_call_set_ctrl_altering (bb_end
, false);
187 /* Try to remove superfluous control structures in basic block BB. Returns
188 true if anything changes. */
191 cleanup_control_flow_bb (basic_block bb
, bool first_p
)
193 gimple_stmt_iterator gsi
;
197 /* If the last statement of the block could throw and now cannot,
198 we need to prune cfg. */
199 retval
|= gimple_purge_dead_eh_edges (bb
);
201 gsi
= gsi_last_nondebug_bb (bb
);
205 stmt
= gsi_stmt (gsi
);
207 /* Try to cleanup ctrl altering flag for call which ends bb. */
208 cleanup_call_ctrl_altering_flag (stmt
);
210 if (gimple_code (stmt
) == GIMPLE_COND
211 || gimple_code (stmt
) == GIMPLE_SWITCH
)
213 gcc_checking_assert (gsi_stmt (gsi_last_bb (bb
)) == stmt
);
214 retval
|= cleanup_control_expr_graph (bb
, gsi
, first_p
);
216 else if (gimple_code (stmt
) == GIMPLE_GOTO
217 && TREE_CODE (gimple_goto_dest (stmt
)) == ADDR_EXPR
218 && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt
), 0))
221 /* If we had a computed goto which has a compile-time determinable
222 destination, then we can eliminate the goto. */
226 basic_block target_block
;
228 gcc_checking_assert (gsi_stmt (gsi_last_bb (bb
)) == stmt
);
229 /* First look at all the outgoing edges. Delete any outgoing
230 edges which do not go to the right block. For the one
231 edge which goes to the right block, fix up its flags. */
232 label
= TREE_OPERAND (gimple_goto_dest (stmt
), 0);
233 target_block
= label_to_block (label
);
234 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
236 if (e
->dest
!= target_block
)
237 remove_edge_and_dominated_blocks (e
);
240 /* Turn off the EDGE_ABNORMAL flag. */
241 e
->flags
&= ~EDGE_ABNORMAL
;
243 /* And set EDGE_FALLTHRU. */
244 e
->flags
|= EDGE_FALLTHRU
;
249 bitmap_set_bit (cfgcleanup_altered_bbs
, bb
->index
);
250 bitmap_set_bit (cfgcleanup_altered_bbs
, target_block
->index
);
252 /* Remove the GOTO_EXPR as it is not needed. The CFG has all the
253 relevant information we need. */
254 gsi_remove (&gsi
, true);
258 /* Check for indirect calls that have been turned into
260 else if (is_gimple_call (stmt
)
261 && gimple_call_noreturn_p (stmt
))
263 /* If there are debug stmts after the noreturn call, remove them
264 now, they should be all unreachable anyway. */
265 for (gsi_next (&gsi
); !gsi_end_p (gsi
); )
266 gsi_remove (&gsi
, true);
267 if (remove_fallthru_edge (bb
->succs
))
274 /* Return true if basic block BB does nothing except pass control
275 flow to another block and that we can safely insert a label at
276 the start of the successor block.
278 As a precondition, we require that BB be not equal to
282 tree_forwarder_block_p (basic_block bb
, bool phi_wanted
)
284 gimple_stmt_iterator gsi
;
287 /* BB must have a single outgoing edge. */
288 if (single_succ_p (bb
) != 1
289 /* If PHI_WANTED is false, BB must not have any PHI nodes.
290 Otherwise, BB must have PHI nodes. */
291 || gimple_seq_empty_p (phi_nodes (bb
)) == phi_wanted
292 /* BB may not be a predecessor of the exit block. */
293 || single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
)
294 /* Nor should this be an infinite loop. */
295 || single_succ (bb
) == bb
296 /* BB may not have an abnormal outgoing edge. */
297 || (single_succ_edge (bb
)->flags
& EDGE_ABNORMAL
))
300 gcc_checking_assert (bb
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
));
302 locus
= single_succ_edge (bb
)->goto_locus
;
304 /* There should not be an edge coming from entry, or an EH edge. */
309 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
310 if (e
->src
== ENTRY_BLOCK_PTR_FOR_FN (cfun
) || (e
->flags
& EDGE_EH
))
312 /* If goto_locus of any of the edges differs, prevent removing
313 the forwarder block for -O0. */
314 else if (optimize
== 0 && e
->goto_locus
!= locus
)
318 /* Now walk through the statements backward. We can ignore labels,
319 anything else means this is not a forwarder block. */
320 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
); gsi_prev (&gsi
))
322 gimple
*stmt
= gsi_stmt (gsi
);
324 switch (gimple_code (stmt
))
327 if (DECL_NONLOCAL (gimple_label_label (as_a
<glabel
*> (stmt
))))
329 if (optimize
== 0 && gimple_location (stmt
) != locus
)
333 /* ??? For now, hope there's a corresponding debug
334 assignment at the destination. */
346 /* Protect loop headers. */
347 if (bb
->loop_father
->header
== bb
)
350 dest
= EDGE_SUCC (bb
, 0)->dest
;
351 /* Protect loop preheaders and latches if requested. */
352 if (dest
->loop_father
->header
== dest
)
354 if (bb
->loop_father
== dest
->loop_father
)
356 if (loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES
))
358 /* If bb doesn't have a single predecessor we'd make this
359 loop have multiple latches. Don't do that if that
360 would in turn require disambiguating them. */
361 return (single_pred_p (bb
)
362 || loops_state_satisfies_p
363 (LOOPS_MAY_HAVE_MULTIPLE_LATCHES
));
365 else if (bb
->loop_father
== loop_outer (dest
->loop_father
))
366 return !loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS
);
367 /* Always preserve other edges into loop headers that are
368 not simple latches or preheaders. */
376 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
377 those alternatives are equal in each of the PHI nodes, then return
378 true, else return false. */
381 phi_alternatives_equal (basic_block dest
, edge e1
, edge e2
)
383 int n1
= e1
->dest_idx
;
384 int n2
= e2
->dest_idx
;
387 for (gsi
= gsi_start_phis (dest
); !gsi_end_p (gsi
); gsi_next (&gsi
))
389 gphi
*phi
= gsi
.phi ();
390 tree val1
= gimple_phi_arg_def (phi
, n1
);
391 tree val2
= gimple_phi_arg_def (phi
, n2
);
393 gcc_assert (val1
!= NULL_TREE
);
394 gcc_assert (val2
!= NULL_TREE
);
396 if (!operand_equal_for_phi_arg_p (val1
, val2
))
403 /* Removes forwarder block BB. Returns false if this failed. */
406 remove_forwarder_block (basic_block bb
)
408 edge succ
= single_succ_edge (bb
), e
, s
;
409 basic_block dest
= succ
->dest
;
412 gimple_stmt_iterator gsi
, gsi_to
;
413 bool can_move_debug_stmts
;
415 /* We check for infinite loops already in tree_forwarder_block_p.
416 However it may happen that the infinite loop is created
417 afterwards due to removal of forwarders. */
421 /* If the destination block consists of a nonlocal label or is a
422 EH landing pad, do not merge it. */
423 label
= first_stmt (dest
);
425 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (label
))
426 if (DECL_NONLOCAL (gimple_label_label (label_stmt
))
427 || EH_LANDING_PAD_NR (gimple_label_label (label_stmt
)) != 0)
430 /* If there is an abnormal edge to basic block BB, but not into
431 dest, problems might occur during removal of the phi node at out
432 of ssa due to overlapping live ranges of registers.
434 If there is an abnormal edge in DEST, the problems would occur
435 anyway since cleanup_dead_labels would then merge the labels for
436 two different eh regions, and rest of exception handling code
439 So if there is an abnormal edge to BB, proceed only if there is
440 no abnormal edge to DEST and there are no phi nodes in DEST. */
441 if (bb_has_abnormal_pred (bb
)
442 && (bb_has_abnormal_pred (dest
)
443 || !gimple_seq_empty_p (phi_nodes (dest
))))
446 /* If there are phi nodes in DEST, and some of the blocks that are
447 predecessors of BB are also predecessors of DEST, check that the
448 phi node arguments match. */
449 if (!gimple_seq_empty_p (phi_nodes (dest
)))
451 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
453 s
= find_edge (e
->src
, dest
);
457 if (!phi_alternatives_equal (dest
, succ
, s
))
462 can_move_debug_stmts
= MAY_HAVE_DEBUG_STMTS
&& single_pred_p (dest
);
464 basic_block pred
= NULL
;
465 if (single_pred_p (bb
))
466 pred
= single_pred (bb
);
468 /* Redirect the edges. */
469 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
471 bitmap_set_bit (cfgcleanup_altered_bbs
, e
->src
->index
);
473 if (e
->flags
& EDGE_ABNORMAL
)
475 /* If there is an abnormal edge, redirect it anyway, and
476 move the labels to the new block to make it legal. */
477 s
= redirect_edge_succ_nodup (e
, dest
);
480 s
= redirect_edge_and_branch (e
, dest
);
484 /* Create arguments for the phi nodes, since the edge was not
486 for (gphi_iterator psi
= gsi_start_phis (dest
);
490 gphi
*phi
= psi
.phi ();
491 source_location l
= gimple_phi_arg_location_from_edge (phi
, succ
);
492 tree def
= gimple_phi_arg_def (phi
, succ
->dest_idx
);
493 add_phi_arg (phi
, unshare_expr (def
), s
, l
);
498 /* Move nonlocal labels and computed goto targets as well as user
499 defined labels and labels with an EH landing pad number to the
500 new block, so that the redirection of the abnormal edges works,
501 jump targets end up in a sane place and debug information for
502 labels is retained. */
503 gsi_to
= gsi_start_bb (dest
);
504 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
507 label
= gsi_stmt (gsi
);
508 if (is_gimple_debug (label
))
510 decl
= gimple_label_label (as_a
<glabel
*> (label
));
511 if (EH_LANDING_PAD_NR (decl
) != 0
512 || DECL_NONLOCAL (decl
)
513 || FORCED_LABEL (decl
)
514 || !DECL_ARTIFICIAL (decl
))
516 gsi_remove (&gsi
, false);
517 gsi_insert_before (&gsi_to
, label
, GSI_SAME_STMT
);
523 /* Move debug statements if the destination has a single predecessor. */
524 if (can_move_debug_stmts
)
526 gsi_to
= gsi_after_labels (dest
);
527 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); )
529 gimple
*debug
= gsi_stmt (gsi
);
530 if (!is_gimple_debug (debug
))
532 gsi_remove (&gsi
, false);
533 gsi_insert_before (&gsi_to
, debug
, GSI_SAME_STMT
);
537 bitmap_set_bit (cfgcleanup_altered_bbs
, dest
->index
);
539 /* Update the dominators. */
540 if (dom_info_available_p (CDI_DOMINATORS
))
542 basic_block dom
, dombb
, domdest
;
544 dombb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
545 domdest
= get_immediate_dominator (CDI_DOMINATORS
, dest
);
548 /* Shortcut to avoid calling (relatively expensive)
549 nearest_common_dominator unless necessary. */
553 dom
= nearest_common_dominator (CDI_DOMINATORS
, domdest
, dombb
);
555 set_immediate_dominator (CDI_DOMINATORS
, dest
, dom
);
558 /* Adjust latch infomation of BB's parent loop as otherwise
559 the cfg hook has a hard time not to kill the loop. */
560 if (current_loops
&& bb
->loop_father
->latch
== bb
)
561 bb
->loop_father
->latch
= pred
;
563 /* And kill the forwarder block. */
564 delete_basic_block (bb
);
569 /* STMT is a call that has been discovered noreturn. Split the
570 block to prepare fixing up the CFG and remove LHS.
571 Return true if cleanup-cfg needs to run. */
574 fixup_noreturn_call (gimple
*stmt
)
576 basic_block bb
= gimple_bb (stmt
);
577 bool changed
= false;
579 if (gimple_call_builtin_p (stmt
, BUILT_IN_RETURN
))
582 /* First split basic block if stmt is not last. */
583 if (stmt
!= gsi_stmt (gsi_last_bb (bb
)))
585 if (stmt
== gsi_stmt (gsi_last_nondebug_bb (bb
)))
587 /* Don't split if there are only debug stmts
588 after stmt, that can result in -fcompare-debug
589 failures. Remove the debug stmts instead,
590 they should be all unreachable anyway. */
591 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
592 for (gsi_next (&gsi
); !gsi_end_p (gsi
); )
593 gsi_remove (&gsi
, true);
597 split_block (bb
, stmt
);
602 /* If there is an LHS, remove it, but only if its type has fixed size.
603 The LHS will need to be recreated during RTL expansion and creating
604 temporaries of variable-sized types is not supported. */
605 tree lhs
= gimple_call_lhs (stmt
);
606 if (lhs
&& TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (lhs
))) == INTEGER_CST
)
608 gimple_call_set_lhs (stmt
, NULL_TREE
);
610 /* We need to fix up the SSA name to avoid checking errors. */
611 if (TREE_CODE (lhs
) == SSA_NAME
)
613 tree new_var
= create_tmp_reg (TREE_TYPE (lhs
));
614 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, new_var
);
615 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
616 set_ssa_default_def (cfun
, new_var
, lhs
);
622 /* Mark the call as altering control flow. */
623 if (!gimple_call_ctrl_altering_p (stmt
))
625 gimple_call_set_ctrl_altering (stmt
, true);
633 /* Tries to cleanup cfg in basic block BB. Returns true if anything
637 cleanup_tree_cfg_bb (basic_block bb
)
639 if (tree_forwarder_block_p (bb
, false)
640 && remove_forwarder_block (bb
))
643 /* Merging the blocks may create new opportunities for folding
644 conditional branches (due to the elimination of single-valued PHI
646 if (single_succ_p (bb
)
647 && can_merge_blocks_p (bb
, single_succ (bb
)))
649 /* If there is a merge opportunity with the predecessor
650 do nothing now but wait until we process the predecessor.
651 This happens when we visit BBs in a non-optimal order and
652 avoids quadratic behavior with adjusting stmts BB pointer. */
653 if (single_pred_p (bb
)
654 && can_merge_blocks_p (single_pred (bb
), bb
))
658 merge_blocks (bb
, single_succ (bb
));
666 /* Iterate the cfg cleanups, while anything changes. */
669 cleanup_tree_cfg_1 (void)
675 /* Prepare the worklists of altered blocks. */
676 cfgcleanup_altered_bbs
= BITMAP_ALLOC (NULL
);
678 /* During forwarder block cleanup, we may redirect edges out of
679 SWITCH_EXPRs, which can get expensive. So we want to enable
680 recording of edge to CASE_LABEL_EXPR. */
681 start_recording_case_labels ();
683 /* We cannot use FOR_EACH_BB_FN for the BB iterations below
684 since the basic blocks may get removed. */
686 /* Start by iterating over all basic blocks looking for edge removal
687 opportunities. Do this first because incoming SSA form may be
688 invalid and we want to avoid performing SSA related tasks such
689 as propgating out a PHI node during BB merging in that state. */
690 n
= last_basic_block_for_fn (cfun
);
691 for (i
= NUM_FIXED_BLOCKS
; i
< n
; i
++)
693 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
695 retval
|= cleanup_control_flow_bb (bb
, true);
698 /* After doing the above SSA form should be valid (or an update SSA
699 should be required). */
701 /* Continue by iterating over all basic blocks looking for BB merging
703 n
= last_basic_block_for_fn (cfun
);
704 for (i
= NUM_FIXED_BLOCKS
; i
< n
; i
++)
706 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
708 retval
|= cleanup_tree_cfg_bb (bb
);
711 /* Now process the altered blocks, as long as any are available. */
712 while (!bitmap_empty_p (cfgcleanup_altered_bbs
))
714 i
= bitmap_first_set_bit (cfgcleanup_altered_bbs
);
715 bitmap_clear_bit (cfgcleanup_altered_bbs
, i
);
716 if (i
< NUM_FIXED_BLOCKS
)
719 bb
= BASIC_BLOCK_FOR_FN (cfun
, i
);
723 retval
|= cleanup_control_flow_bb (bb
, false);
724 retval
|= cleanup_tree_cfg_bb (bb
);
727 end_recording_case_labels ();
728 BITMAP_FREE (cfgcleanup_altered_bbs
);
733 /* Remove unreachable blocks and other miscellaneous clean up work.
734 Return true if the flowgraph was modified, false otherwise. */
737 cleanup_tree_cfg_noloop (void)
741 timevar_push (TV_TREE_CLEANUP_CFG
);
743 /* Iterate until there are no more cleanups left to do. If any
744 iteration changed the flowgraph, set CHANGED to true.
746 If dominance information is available, there cannot be any unreachable
748 if (!dom_info_available_p (CDI_DOMINATORS
))
750 changed
= delete_unreachable_blocks ();
751 calculate_dominance_info (CDI_DOMINATORS
);
755 checking_verify_dominators (CDI_DOMINATORS
);
759 changed
|= cleanup_tree_cfg_1 ();
761 gcc_assert (dom_info_available_p (CDI_DOMINATORS
));
764 checking_verify_flow_info ();
766 timevar_pop (TV_TREE_CLEANUP_CFG
);
768 if (changed
&& current_loops
)
769 loops_state_set (LOOPS_NEED_FIXUP
);
774 /* Repairs loop structures. */
777 repair_loop_structures (void)
780 unsigned n_new_loops
;
782 calculate_dominance_info (CDI_DOMINATORS
);
784 timevar_push (TV_REPAIR_LOOPS
);
785 changed_bbs
= BITMAP_ALLOC (NULL
);
786 n_new_loops
= fix_loop_structure (changed_bbs
);
788 /* This usually does nothing. But sometimes parts of cfg that originally
789 were inside a loop get out of it due to edge removal (since they
790 become unreachable by back edges from latch). Also a former
791 irreducible loop can become reducible - in this case force a full
792 rewrite into loop-closed SSA form. */
793 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
))
794 rewrite_into_loop_closed_ssa (n_new_loops
? NULL
: changed_bbs
,
797 BITMAP_FREE (changed_bbs
);
799 checking_verify_loop_structure ();
802 timevar_pop (TV_REPAIR_LOOPS
);
805 /* Cleanup cfg and repair loop structures. */
808 cleanup_tree_cfg (void)
810 bool changed
= cleanup_tree_cfg_noloop ();
812 if (current_loops
!= NULL
813 && loops_state_satisfies_p (LOOPS_NEED_FIXUP
))
814 repair_loop_structures ();
819 /* Tries to merge the PHI nodes at BB into those at BB's sole successor.
820 Returns true if successful. */
823 remove_forwarder_block_with_phi (basic_block bb
)
825 edge succ
= single_succ_edge (bb
);
826 basic_block dest
= succ
->dest
;
828 basic_block dombb
, domdest
, dom
;
830 /* We check for infinite loops already in tree_forwarder_block_p.
831 However it may happen that the infinite loop is created
832 afterwards due to removal of forwarders. */
836 /* If the destination block consists of a nonlocal label, do not
838 label
= first_stmt (dest
);
840 if (glabel
*label_stmt
= dyn_cast
<glabel
*> (label
))
841 if (DECL_NONLOCAL (gimple_label_label (label_stmt
)))
844 /* Record BB's single pred in case we need to update the father
845 loop's latch information later. */
846 basic_block pred
= NULL
;
847 if (single_pred_p (bb
))
848 pred
= single_pred (bb
);
850 /* Redirect each incoming edge to BB to DEST. */
851 while (EDGE_COUNT (bb
->preds
) > 0)
853 edge e
= EDGE_PRED (bb
, 0), s
;
856 s
= find_edge (e
->src
, dest
);
859 /* We already have an edge S from E->src to DEST. If S and
860 E->dest's sole successor edge have the same PHI arguments
861 at DEST, redirect S to DEST. */
862 if (phi_alternatives_equal (dest
, s
, succ
))
864 e
= redirect_edge_and_branch (e
, dest
);
865 redirect_edge_var_map_clear (e
);
869 /* PHI arguments are different. Create a forwarder block by
870 splitting E so that we can merge PHI arguments on E to
872 e
= single_succ_edge (split_edge (e
));
875 s
= redirect_edge_and_branch (e
, dest
);
877 /* redirect_edge_and_branch must not create a new edge. */
880 /* Add to the PHI nodes at DEST each PHI argument removed at the
882 for (gsi
= gsi_start_phis (dest
);
886 gphi
*phi
= gsi
.phi ();
887 tree def
= gimple_phi_arg_def (phi
, succ
->dest_idx
);
888 source_location locus
= gimple_phi_arg_location_from_edge (phi
, succ
);
890 if (TREE_CODE (def
) == SSA_NAME
)
892 /* If DEF is one of the results of PHI nodes removed during
893 redirection, replace it with the PHI argument that used
895 vec
<edge_var_map
> *head
= redirect_edge_var_map_vector (e
);
896 size_t length
= head
? head
->length () : 0;
897 for (size_t i
= 0; i
< length
; i
++)
899 edge_var_map
*vm
= &(*head
)[i
];
900 tree old_arg
= redirect_edge_var_map_result (vm
);
901 tree new_arg
= redirect_edge_var_map_def (vm
);
906 locus
= redirect_edge_var_map_location (vm
);
912 add_phi_arg (phi
, def
, s
, locus
);
915 redirect_edge_var_map_clear (e
);
918 /* Update the dominators. */
919 dombb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
920 domdest
= get_immediate_dominator (CDI_DOMINATORS
, dest
);
923 /* Shortcut to avoid calling (relatively expensive)
924 nearest_common_dominator unless necessary. */
928 dom
= nearest_common_dominator (CDI_DOMINATORS
, domdest
, dombb
);
930 set_immediate_dominator (CDI_DOMINATORS
, dest
, dom
);
932 /* Adjust latch infomation of BB's parent loop as otherwise
933 the cfg hook has a hard time not to kill the loop. */
934 if (current_loops
&& bb
->loop_father
->latch
== bb
)
935 bb
->loop_father
->latch
= pred
;
937 /* Remove BB since all of BB's incoming edges have been redirected
939 delete_basic_block (bb
);
944 /* This pass merges PHI nodes if one feeds into another. For example,
945 suppose we have the following:
952 # tem_6 = PHI <tem_17(8), tem_23(7)>;
955 # tem_3 = PHI <tem_6(9), tem_2(5)>;
958 Then we merge the first PHI node into the second one like so:
965 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
971 const pass_data pass_data_merge_phi
=
973 GIMPLE_PASS
, /* type */
974 "mergephi", /* name */
975 OPTGROUP_NONE
, /* optinfo_flags */
976 TV_TREE_MERGE_PHI
, /* tv_id */
977 ( PROP_cfg
| PROP_ssa
), /* properties_required */
978 0, /* properties_provided */
979 0, /* properties_destroyed */
980 0, /* todo_flags_start */
981 0, /* todo_flags_finish */
984 class pass_merge_phi
: public gimple_opt_pass
987 pass_merge_phi (gcc::context
*ctxt
)
988 : gimple_opt_pass (pass_data_merge_phi
, ctxt
)
991 /* opt_pass methods: */
992 opt_pass
* clone () { return new pass_merge_phi (m_ctxt
); }
993 virtual unsigned int execute (function
*);
995 }; // class pass_merge_phi
998 pass_merge_phi::execute (function
*fun
)
1000 basic_block
*worklist
= XNEWVEC (basic_block
, n_basic_blocks_for_fn (fun
));
1001 basic_block
*current
= worklist
;
1004 calculate_dominance_info (CDI_DOMINATORS
);
1006 /* Find all PHI nodes that we may be able to merge. */
1007 FOR_EACH_BB_FN (bb
, fun
)
1011 /* Look for a forwarder block with PHI nodes. */
1012 if (!tree_forwarder_block_p (bb
, true))
1015 dest
= single_succ (bb
);
1017 /* We have to feed into another basic block with PHI
1019 if (gimple_seq_empty_p (phi_nodes (dest
))
1020 /* We don't want to deal with a basic block with
1022 || bb_has_abnormal_pred (bb
))
1025 if (!dominated_by_p (CDI_DOMINATORS
, dest
, bb
))
1027 /* If BB does not dominate DEST, then the PHI nodes at
1028 DEST must be the only users of the results of the PHI
1035 unsigned int dest_idx
= single_succ_edge (bb
)->dest_idx
;
1037 /* BB dominates DEST. There may be many users of the PHI
1038 nodes in BB. However, there is still a trivial case we
1039 can handle. If the result of every PHI in BB is used
1040 only by a PHI in DEST, then we can trivially merge the
1041 PHI nodes from BB into DEST. */
1042 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
1045 gphi
*phi
= gsi
.phi ();
1046 tree result
= gimple_phi_result (phi
);
1047 use_operand_p imm_use
;
1050 /* If the PHI's result is never used, then we can just
1052 if (has_zero_uses (result
))
1055 /* Get the single use of the result of this PHI node. */
1056 if (!single_imm_use (result
, &imm_use
, &use_stmt
)
1057 || gimple_code (use_stmt
) != GIMPLE_PHI
1058 || gimple_bb (use_stmt
) != dest
1059 || gimple_phi_arg_def (use_stmt
, dest_idx
) != result
)
1063 /* If the loop above iterated through all the PHI nodes
1064 in BB, then we can merge the PHIs from BB into DEST. */
1065 if (gsi_end_p (gsi
))
1070 /* Now let's drain WORKLIST. */
1071 bool changed
= false;
1072 while (current
!= worklist
)
1075 changed
|= remove_forwarder_block_with_phi (bb
);
1079 /* Removing forwarder blocks can cause formerly irreducible loops
1080 to become reducible if we merged two entry blocks. */
1083 loops_state_set (LOOPS_NEED_FIXUP
);
1091 make_pass_merge_phi (gcc::context
*ctxt
)
1093 return new pass_merge_phi (ctxt
);
1096 /* Pass: cleanup the CFG just before expanding trees to RTL.
1097 This is just a round of label cleanups and case node grouping
1098 because after the tree optimizers have run such cleanups may
1102 execute_cleanup_cfg_post_optimizing (void)
1104 unsigned int todo
= execute_fixup_cfg ();
1105 if (cleanup_tree_cfg ())
1107 todo
&= ~TODO_cleanup_cfg
;
1108 todo
|= TODO_update_ssa
;
1110 maybe_remove_unreachable_handlers ();
1111 cleanup_dead_labels ();
1112 group_case_labels ();
1113 if ((flag_compare_debug_opt
|| flag_compare_debug
)
1114 && flag_dump_final_insns
)
1116 FILE *final_output
= fopen (flag_dump_final_insns
, "a");
1120 error ("could not open final insn dump file %qs: %m",
1121 flag_dump_final_insns
);
1122 flag_dump_final_insns
= NULL
;
1126 int save_unnumbered
= flag_dump_unnumbered
;
1127 int save_noaddr
= flag_dump_noaddr
;
1129 flag_dump_noaddr
= flag_dump_unnumbered
= 1;
1130 fprintf (final_output
, "\n");
1131 dump_enumerated_decls (final_output
, dump_flags
| TDF_NOUID
);
1132 flag_dump_noaddr
= save_noaddr
;
1133 flag_dump_unnumbered
= save_unnumbered
;
1134 if (fclose (final_output
))
1136 error ("could not close final insn dump file %qs: %m",
1137 flag_dump_final_insns
);
1138 flag_dump_final_insns
= NULL
;
1147 const pass_data pass_data_cleanup_cfg_post_optimizing
=
1149 GIMPLE_PASS
, /* type */
1150 "optimized", /* name */
1151 OPTGROUP_NONE
, /* optinfo_flags */
1152 TV_TREE_CLEANUP_CFG
, /* tv_id */
1153 PROP_cfg
, /* properties_required */
1154 0, /* properties_provided */
1155 0, /* properties_destroyed */
1156 0, /* todo_flags_start */
1157 TODO_remove_unused_locals
, /* todo_flags_finish */
1160 class pass_cleanup_cfg_post_optimizing
: public gimple_opt_pass
1163 pass_cleanup_cfg_post_optimizing (gcc::context
*ctxt
)
1164 : gimple_opt_pass (pass_data_cleanup_cfg_post_optimizing
, ctxt
)
1167 /* opt_pass methods: */
1168 virtual unsigned int execute (function
*)
1170 return execute_cleanup_cfg_post_optimizing ();
1173 }; // class pass_cleanup_cfg_post_optimizing
1178 make_pass_cleanup_cfg_post_optimizing (gcc::context
*ctxt
)
1180 return new pass_cleanup_cfg_post_optimizing (ctxt
);