1 /* CFG cleanup for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to
18 the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19 Boston, MA 02110-1301, USA. */
23 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
36 #include "langhooks.h"
37 #include "diagnostic.h"
38 #include "tree-flow.h"
40 #include "tree-dump.h"
41 #include "tree-pass.h"
45 #include "cfglayout.h"
47 #include "tree-ssa-propagate.h"
48 #include "tree-scalar-evolution.h"
50 /* Remove any fallthru edge from EV. Return true if an edge was removed. */
53 remove_fallthru_edge (VEC(edge
,gc
) *ev
)
58 FOR_EACH_EDGE (e
, ei
, ev
)
59 if ((e
->flags
& EDGE_FALLTHRU
) != 0)
67 /* Disconnect an unreachable block in the control expression starting
71 cleanup_control_expr_graph (basic_block bb
, block_stmt_iterator bsi
)
75 tree expr
= bsi_stmt (bsi
), val
;
77 if (!single_succ_p (bb
))
82 switch (TREE_CODE (expr
))
85 val
= fold (COND_EXPR_COND (expr
));
89 val
= fold (SWITCH_COND (expr
));
90 if (TREE_CODE (val
) != INTEGER_CST
)
98 taken_edge
= find_taken_edge (bb
, val
);
102 /* Remove all the edges except the one that is always executed. */
103 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
107 taken_edge
->probability
+= e
->probability
;
108 taken_edge
->count
+= e
->count
;
115 if (taken_edge
->probability
> REG_BR_PROB_BASE
)
116 taken_edge
->probability
= REG_BR_PROB_BASE
;
119 taken_edge
= single_succ_edge (bb
);
121 bsi_remove (&bsi
, true);
122 taken_edge
->flags
= EDGE_FALLTHRU
;
124 /* We removed some paths from the cfg. */
125 free_dominance_info (CDI_DOMINATORS
);
130 /* A list of all the noreturn calls passed to modify_stmt.
131 cleanup_control_flow uses it to detect cases where a mid-block
132 indirect call has been turned into a noreturn call. When this
133 happens, all the instructions after the call are no longer
134 reachable and must be deleted as dead. */
136 VEC(tree
,gc
) *modified_noreturn_calls
;
138 /* Try to remove superfluous control structures. */
141 cleanup_control_flow (void)
144 block_stmt_iterator bsi
;
148 /* Detect cases where a mid-block call is now known not to return. */
149 while (VEC_length (tree
, modified_noreturn_calls
))
151 stmt
= VEC_pop (tree
, modified_noreturn_calls
);
152 bb
= bb_for_stmt (stmt
);
153 if (bb
!= NULL
&& last_stmt (bb
) != stmt
&& noreturn_call_p (stmt
))
154 split_block (bb
, stmt
);
161 /* If the last statement of the block could throw and now cannot,
162 we need to prune cfg. */
163 tree_purge_dead_eh_edges (bb
);
168 stmt
= bsi_stmt (bsi
);
170 if (TREE_CODE (stmt
) == COND_EXPR
171 || TREE_CODE (stmt
) == SWITCH_EXPR
)
172 retval
|= cleanup_control_expr_graph (bb
, bsi
);
173 /* If we had a computed goto which has a compile-time determinable
174 destination, then we can eliminate the goto. */
175 else if (TREE_CODE (stmt
) == GOTO_EXPR
176 && TREE_CODE (GOTO_DESTINATION (stmt
)) == ADDR_EXPR
177 && (TREE_CODE (TREE_OPERAND (GOTO_DESTINATION (stmt
), 0))
183 basic_block target_block
;
184 bool removed_edge
= false;
186 /* First look at all the outgoing edges. Delete any outgoing
187 edges which do not go to the right block. For the one
188 edge which goes to the right block, fix up its flags. */
189 label
= TREE_OPERAND (GOTO_DESTINATION (stmt
), 0);
190 target_block
= label_to_block (label
);
191 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
193 if (e
->dest
!= target_block
)
200 /* Turn off the EDGE_ABNORMAL flag. */
201 e
->flags
&= ~EDGE_ABNORMAL
;
203 /* And set EDGE_FALLTHRU. */
204 e
->flags
|= EDGE_FALLTHRU
;
209 /* If we removed one or more edges, then we will need to fix the
210 dominators. It may be possible to incrementally update them. */
212 free_dominance_info (CDI_DOMINATORS
);
214 /* Remove the GOTO_EXPR as it is not needed. The CFG has all the
215 relevant information we need. */
216 bsi_remove (&bsi
, true);
220 /* Check for indirect calls that have been turned into
222 else if (noreturn_call_p (stmt
) && remove_fallthru_edge (bb
->succs
))
224 free_dominance_info (CDI_DOMINATORS
);
231 /* Return true if basic block BB does nothing except pass control
232 flow to another block and that we can safely insert a label at
233 the start of the successor block.
235 As a precondition, we require that BB be not equal to
239 tree_forwarder_block_p (basic_block bb
, bool phi_wanted
)
241 block_stmt_iterator bsi
;
243 /* BB must have a single outgoing edge. */
244 if (single_succ_p (bb
) != 1
245 /* If PHI_WANTED is false, BB must not have any PHI nodes.
246 Otherwise, BB must have PHI nodes. */
247 || (phi_nodes (bb
) != NULL_TREE
) != phi_wanted
248 /* BB may not be a predecessor of EXIT_BLOCK_PTR. */
249 || single_succ (bb
) == EXIT_BLOCK_PTR
250 /* Nor should this be an infinite loop. */
251 || single_succ (bb
) == bb
252 /* BB may not have an abnormal outgoing edge. */
253 || (single_succ_edge (bb
)->flags
& EDGE_ABNORMAL
))
257 gcc_assert (bb
!= ENTRY_BLOCK_PTR
);
260 /* Now walk through the statements backward. We can ignore labels,
261 anything else means this is not a forwarder block. */
262 for (bsi
= bsi_last (bb
); !bsi_end_p (bsi
); bsi_prev (&bsi
))
264 tree stmt
= bsi_stmt (bsi
);
266 switch (TREE_CODE (stmt
))
269 if (DECL_NONLOCAL (LABEL_EXPR_LABEL (stmt
)))
278 if (find_edge (ENTRY_BLOCK_PTR
, bb
))
284 /* Protect loop latches, headers and preheaders. */
285 if (bb
->loop_father
->header
== bb
)
287 dest
= EDGE_SUCC (bb
, 0)->dest
;
289 if (dest
->loop_father
->header
== dest
)
296 /* Return true if BB has at least one abnormal incoming edge. */
299 has_abnormal_incoming_edge_p (basic_block bb
)
304 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
305 if (e
->flags
& EDGE_ABNORMAL
)
311 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
312 those alternatives are equal in each of the PHI nodes, then return
313 true, else return false. */
316 phi_alternatives_equal (basic_block dest
, edge e1
, edge e2
)
318 int n1
= e1
->dest_idx
;
319 int n2
= e2
->dest_idx
;
322 for (phi
= phi_nodes (dest
); phi
; phi
= PHI_CHAIN (phi
))
324 tree val1
= PHI_ARG_DEF (phi
, n1
);
325 tree val2
= PHI_ARG_DEF (phi
, n2
);
327 gcc_assert (val1
!= NULL_TREE
);
328 gcc_assert (val2
!= NULL_TREE
);
330 if (!operand_equal_for_phi_arg_p (val1
, val2
))
337 /* Removes forwarder block BB. Returns false if this failed. If a new
338 forwarder block is created due to redirection of edges, it is
339 stored to worklist. */
342 remove_forwarder_block (basic_block bb
, basic_block
**worklist
)
344 edge succ
= single_succ_edge (bb
), e
, s
;
345 basic_block dest
= succ
->dest
;
349 block_stmt_iterator bsi
, bsi_to
;
350 bool seen_abnormal_edge
= false;
352 /* We check for infinite loops already in tree_forwarder_block_p.
353 However it may happen that the infinite loop is created
354 afterwards due to removal of forwarders. */
358 /* If the destination block consists of a nonlocal label, do not merge
360 label
= first_stmt (dest
);
362 && TREE_CODE (label
) == LABEL_EXPR
363 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label
)))
366 /* If there is an abnormal edge to basic block BB, but not into
367 dest, problems might occur during removal of the phi node at out
368 of ssa due to overlapping live ranges of registers.
370 If there is an abnormal edge in DEST, the problems would occur
371 anyway since cleanup_dead_labels would then merge the labels for
372 two different eh regions, and rest of exception handling code
375 So if there is an abnormal edge to BB, proceed only if there is
376 no abnormal edge to DEST and there are no phi nodes in DEST. */
377 if (has_abnormal_incoming_edge_p (bb
))
379 seen_abnormal_edge
= true;
381 if (has_abnormal_incoming_edge_p (dest
)
382 || phi_nodes (dest
) != NULL_TREE
)
386 /* If there are phi nodes in DEST, and some of the blocks that are
387 predecessors of BB are also predecessors of DEST, check that the
388 phi node arguments match. */
389 if (phi_nodes (dest
))
391 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
393 s
= find_edge (e
->src
, dest
);
397 if (!phi_alternatives_equal (dest
, succ
, s
))
402 /* Redirect the edges. */
403 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
405 if (e
->flags
& EDGE_ABNORMAL
)
407 /* If there is an abnormal edge, redirect it anyway, and
408 move the labels to the new block to make it legal. */
409 s
= redirect_edge_succ_nodup (e
, dest
);
412 s
= redirect_edge_and_branch (e
, dest
);
416 /* Create arguments for the phi nodes, since the edge was not
418 for (phi
= phi_nodes (dest
); phi
; phi
= PHI_CHAIN (phi
))
419 add_phi_arg (phi
, PHI_ARG_DEF (phi
, succ
->dest_idx
), s
);
423 /* The source basic block might become a forwarder. We know
424 that it was not a forwarder before, since it used to have
425 at least two outgoing edges, so we may just add it to
427 if (tree_forwarder_block_p (s
->src
, false))
428 *(*worklist
)++ = s
->src
;
432 if (seen_abnormal_edge
)
434 /* Move the labels to the new block, so that the redirection of
435 the abnormal edges works. */
437 bsi_to
= bsi_start (dest
);
438 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); )
440 label
= bsi_stmt (bsi
);
441 gcc_assert (TREE_CODE (label
) == LABEL_EXPR
);
442 bsi_remove (&bsi
, false);
443 bsi_insert_before (&bsi_to
, label
, BSI_CONTINUE_LINKING
);
447 /* Update the dominators. */
448 if (dom_info_available_p (CDI_DOMINATORS
))
450 basic_block dom
, dombb
, domdest
;
452 dombb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
453 domdest
= get_immediate_dominator (CDI_DOMINATORS
, dest
);
456 /* Shortcut to avoid calling (relatively expensive)
457 nearest_common_dominator unless necessary. */
461 dom
= nearest_common_dominator (CDI_DOMINATORS
, domdest
, dombb
);
463 set_immediate_dominator (CDI_DOMINATORS
, dest
, dom
);
466 /* And kill the forwarder block. */
467 delete_basic_block (bb
);
472 /* Removes forwarder blocks. */
475 cleanup_forwarder_blocks (void)
478 bool changed
= false;
479 basic_block
*worklist
= XNEWVEC (basic_block
, n_basic_blocks
);
480 basic_block
*current
= worklist
;
484 if (tree_forwarder_block_p (bb
, false))
488 while (current
!= worklist
)
491 changed
|= remove_forwarder_block (bb
, ¤t
);
498 /* Do one round of CFG cleanup. */
501 cleanup_tree_cfg_1 (void)
505 retval
= cleanup_control_flow ();
506 retval
|= delete_unreachable_blocks ();
508 /* Forwarder blocks can carry line number information which is
509 useful when debugging, so we only clean them up when
514 /* cleanup_forwarder_blocks can redirect edges out of
515 SWITCH_EXPRs, which can get expensive. So we want to enable
516 recording of edge to CASE_LABEL_EXPR mappings around the call
517 to cleanup_forwarder_blocks. */
518 start_recording_case_labels ();
519 retval
|= cleanup_forwarder_blocks ();
520 end_recording_case_labels ();
523 /* Merging the blocks may create new opportunities for folding
524 conditional branches (due to the elimination of single-valued PHI
526 retval
|= merge_seq_blocks ();
532 /* Remove unreachable blocks and other miscellaneous clean up work.
533 Return true if the flowgraph was modified, false otherwise. */
536 cleanup_tree_cfg (void)
538 bool retval
, changed
;
540 timevar_push (TV_TREE_CLEANUP_CFG
);
542 /* Iterate until there are no more cleanups left to do. If any
543 iteration changed the flowgraph, set CHANGED to true. */
547 retval
= cleanup_tree_cfg_1 ();
554 #ifdef ENABLE_CHECKING
558 timevar_pop (TV_TREE_CLEANUP_CFG
);
563 /* Cleanup cfg and repair loop structures. */
566 cleanup_tree_cfg_loop (void)
568 bool changed
= cleanup_tree_cfg ();
572 bitmap changed_bbs
= BITMAP_ALLOC (NULL
);
573 fix_loop_structure (current_loops
, changed_bbs
);
574 calculate_dominance_info (CDI_DOMINATORS
);
576 /* This usually does nothing. But sometimes parts of cfg that originally
577 were inside a loop get out of it due to edge removal (since they
578 become unreachable by back edges from latch). */
579 rewrite_into_loop_closed_ssa (changed_bbs
, TODO_update_ssa
);
581 BITMAP_FREE (changed_bbs
);
583 #ifdef ENABLE_CHECKING
584 verify_loop_structure (current_loops
);
590 /* Merge the PHI nodes at BB into those at BB's sole successor. */
593 remove_forwarder_block_with_phi (basic_block bb
)
595 edge succ
= single_succ_edge (bb
);
596 basic_block dest
= succ
->dest
;
598 basic_block dombb
, domdest
, dom
;
600 /* We check for infinite loops already in tree_forwarder_block_p.
601 However it may happen that the infinite loop is created
602 afterwards due to removal of forwarders. */
606 /* If the destination block consists of a nonlocal label, do not
608 label
= first_stmt (dest
);
610 && TREE_CODE (label
) == LABEL_EXPR
611 && DECL_NONLOCAL (LABEL_EXPR_LABEL (label
)))
614 /* Redirect each incoming edge to BB to DEST. */
615 while (EDGE_COUNT (bb
->preds
) > 0)
617 edge e
= EDGE_PRED (bb
, 0), s
;
620 s
= find_edge (e
->src
, dest
);
623 /* We already have an edge S from E->src to DEST. If S and
624 E->dest's sole successor edge have the same PHI arguments
625 at DEST, redirect S to DEST. */
626 if (phi_alternatives_equal (dest
, s
, succ
))
628 e
= redirect_edge_and_branch (e
, dest
);
629 PENDING_STMT (e
) = NULL_TREE
;
633 /* PHI arguments are different. Create a forwarder block by
634 splitting E so that we can merge PHI arguments on E to
636 e
= single_succ_edge (split_edge (e
));
639 s
= redirect_edge_and_branch (e
, dest
);
641 /* redirect_edge_and_branch must not create a new edge. */
644 /* Add to the PHI nodes at DEST each PHI argument removed at the
646 for (phi
= phi_nodes (dest
); phi
; phi
= PHI_CHAIN (phi
))
648 tree def
= PHI_ARG_DEF (phi
, succ
->dest_idx
);
650 if (TREE_CODE (def
) == SSA_NAME
)
654 /* If DEF is one of the results of PHI nodes removed during
655 redirection, replace it with the PHI argument that used
657 for (var
= PENDING_STMT (e
); var
; var
= TREE_CHAIN (var
))
659 tree old_arg
= TREE_PURPOSE (var
);
660 tree new_arg
= TREE_VALUE (var
);
670 add_phi_arg (phi
, def
, s
);
673 PENDING_STMT (e
) = NULL
;
676 /* Update the dominators. */
677 dombb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
678 domdest
= get_immediate_dominator (CDI_DOMINATORS
, dest
);
681 /* Shortcut to avoid calling (relatively expensive)
682 nearest_common_dominator unless necessary. */
686 dom
= nearest_common_dominator (CDI_DOMINATORS
, domdest
, dombb
);
688 set_immediate_dominator (CDI_DOMINATORS
, dest
, dom
);
690 /* Remove BB since all of BB's incoming edges have been redirected
692 delete_basic_block (bb
);
695 /* This pass merges PHI nodes if one feeds into another. For example,
696 suppose we have the following:
703 # tem_6 = PHI <tem_17(8), tem_23(7)>;
706 # tem_3 = PHI <tem_6(9), tem_2(5)>;
709 Then we merge the first PHI node into the second one like so:
716 # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
721 merge_phi_nodes (void)
723 basic_block
*worklist
= XNEWVEC (basic_block
, n_basic_blocks
);
724 basic_block
*current
= worklist
;
727 calculate_dominance_info (CDI_DOMINATORS
);
729 /* Find all PHI nodes that we may be able to merge. */
734 /* Look for a forwarder block with PHI nodes. */
735 if (!tree_forwarder_block_p (bb
, true))
738 dest
= single_succ (bb
);
740 /* We have to feed into another basic block with PHI
742 if (!phi_nodes (dest
)
743 /* We don't want to deal with a basic block with
745 || has_abnormal_incoming_edge_p (bb
))
748 if (!dominated_by_p (CDI_DOMINATORS
, dest
, bb
))
750 /* If BB does not dominate DEST, then the PHI nodes at
751 DEST must be the only users of the results of the PHI
758 unsigned int dest_idx
= single_succ_edge (bb
)->dest_idx
;
760 /* BB dominates DEST. There may be many users of the PHI
761 nodes in BB. However, there is still a trivial case we
762 can handle. If the result of every PHI in BB is used
763 only by a PHI in DEST, then we can trivially merge the
764 PHI nodes from BB into DEST. */
765 for (phi
= phi_nodes (bb
); phi
; phi
= PHI_CHAIN (phi
))
767 tree result
= PHI_RESULT (phi
);
768 int num_uses
= num_imm_uses (result
);
769 use_operand_p imm_use
;
772 /* If the PHI's result is never used, then we can just
777 /* Get the single use of the result of this PHI node. */
778 if (!single_imm_use (result
, &imm_use
, &use_stmt
)
779 || TREE_CODE (use_stmt
) != PHI_NODE
780 || bb_for_stmt (use_stmt
) != dest
781 || PHI_ARG_DEF (use_stmt
, dest_idx
) != result
)
785 /* If the loop above iterated thorugh all the PHI nodes
786 in BB, then we can merge the PHIs from BB into DEST. */
792 /* Now let's drain WORKLIST. */
793 while (current
!= worklist
)
796 remove_forwarder_block_with_phi (bb
);
803 gate_merge_phi (void)
808 struct tree_opt_pass pass_merge_phi
= {
809 "mergephi", /* name */
810 gate_merge_phi
, /* gate */
811 merge_phi_nodes
, /* execute */
814 0, /* static_pass_number */
815 TV_TREE_MERGE_PHI
, /* tv_id */
816 PROP_cfg
| PROP_ssa
, /* properties_required */
817 0, /* properties_provided */
818 0, /* properties_destroyed */
819 0, /* todo_flags_start */
820 TODO_dump_func
| TODO_ggc_collect
/* todo_flags_finish */