1 /* Control flow optimization code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This file contains optimizer of the control flow. The main entrypoint is
23 cleanup_cfg. Following optimizations are performed:
25 - Unreachable blocks removal
26 - Edge forwarding (edge to the forwarder block is forwarded to it's
27 successor. Simplification of the branch instruction is performed by
28 underlying infrastructure so branch can be converted to simplejump or
30 - Cross jumping (tail merging)
31 - Conditional jump-around-simplejump simplification
32 - Basic block merging. */
37 #include "hard-reg-set.h"
38 #include "basic-block.h"
41 #include "insn-config.h"
51 /* cleanup_cfg maintains following flags for each basic block. */
55 /* Set if BB is the forwarder block to avoid too many
56 forwarder_block_p calls. */
57 BB_FORWARDER_BLOCK
= 1,
58 BB_NONTHREADABLE_BLOCK
= 2
61 #define BB_FLAGS(BB) (enum bb_flags) (BB)->aux
62 #define BB_SET_FLAG(BB, FLAG) \
63 (BB)->aux = (void *) (long) ((enum bb_flags) (BB)->aux | (FLAG))
64 #define BB_CLEAR_FLAG(BB, FLAG) \
65 (BB)->aux = (void *) (long) ((enum bb_flags) (BB)->aux & ~(FLAG))
67 #define FORWARDER_BLOCK_P(BB) (BB_FLAGS (BB) & BB_FORWARDER_BLOCK)
69 static bool try_crossjump_to_edge
PARAMS ((int, edge
, edge
));
70 static bool try_crossjump_bb
PARAMS ((int, basic_block
));
71 static bool outgoing_edges_match
PARAMS ((int,
72 basic_block
, basic_block
));
73 static int flow_find_cross_jump
PARAMS ((int, basic_block
, basic_block
,
75 static bool insns_match_p
PARAMS ((int, rtx
, rtx
));
77 static bool label_is_jump_target_p
PARAMS ((rtx
, rtx
));
78 static bool tail_recursion_label_p
PARAMS ((rtx
));
79 static void merge_blocks_move_predecessor_nojumps
PARAMS ((basic_block
,
81 static void merge_blocks_move_successor_nojumps
PARAMS ((basic_block
,
83 static bool merge_blocks
PARAMS ((edge
,basic_block
,basic_block
,
85 static bool try_optimize_cfg
PARAMS ((int));
86 static bool try_simplify_condjump
PARAMS ((basic_block
));
87 static bool try_forward_edges
PARAMS ((int, basic_block
));
88 static edge thread_jump
PARAMS ((int, edge
, basic_block
));
89 static bool mark_effect
PARAMS ((rtx
, bitmap
));
90 static void notice_new_block
PARAMS ((basic_block
));
91 static void update_forwarder_flag
PARAMS ((basic_block
));
92 static int mentions_nonequal_regs
PARAMS ((rtx
*, void *));
94 /* Set flags for newly created block. */
103 if (forwarder_block_p (bb
))
104 BB_SET_FLAG (bb
, BB_FORWARDER_BLOCK
);
107 /* Recompute forwarder flag after block has been modified. */
110 update_forwarder_flag (bb
)
113 if (forwarder_block_p (bb
))
114 BB_SET_FLAG (bb
, BB_FORWARDER_BLOCK
);
116 BB_CLEAR_FLAG (bb
, BB_FORWARDER_BLOCK
);
119 /* Simplify a conditional jump around an unconditional jump.
120 Return true if something changed. */
123 try_simplify_condjump (cbranch_block
)
124 basic_block cbranch_block
;
126 basic_block jump_block
, jump_dest_block
, cbranch_dest_block
;
127 edge cbranch_jump_edge
, cbranch_fallthru_edge
;
130 /* Verify that there are exactly two successors. */
131 if (!cbranch_block
->succ
132 || !cbranch_block
->succ
->succ_next
133 || cbranch_block
->succ
->succ_next
->succ_next
)
136 /* Verify that we've got a normal conditional branch at the end
138 cbranch_insn
= cbranch_block
->end
;
139 if (!any_condjump_p (cbranch_insn
))
142 cbranch_fallthru_edge
= FALLTHRU_EDGE (cbranch_block
);
143 cbranch_jump_edge
= BRANCH_EDGE (cbranch_block
);
145 /* The next block must not have multiple predecessors, must not
146 be the last block in the function, and must contain just the
147 unconditional jump. */
148 jump_block
= cbranch_fallthru_edge
->dest
;
149 if (jump_block
->pred
->pred_next
150 || jump_block
->next_bb
== EXIT_BLOCK_PTR
151 || !FORWARDER_BLOCK_P (jump_block
))
153 jump_dest_block
= jump_block
->succ
->dest
;
155 /* The conditional branch must target the block after the
156 unconditional branch. */
157 cbranch_dest_block
= cbranch_jump_edge
->dest
;
159 if (!can_fallthru (jump_block
, cbranch_dest_block
))
162 /* Invert the conditional branch. */
163 if (!invert_jump (cbranch_insn
, block_label (jump_dest_block
), 0))
167 fprintf (rtl_dump_file
, "Simplifying condjump %i around jump %i\n",
168 INSN_UID (cbranch_insn
), INSN_UID (jump_block
->end
));
170 /* Success. Update the CFG to match. Note that after this point
171 the edge variable names appear backwards; the redirection is done
172 this way to preserve edge profile data. */
173 cbranch_jump_edge
= redirect_edge_succ_nodup (cbranch_jump_edge
,
175 cbranch_fallthru_edge
= redirect_edge_succ_nodup (cbranch_fallthru_edge
,
177 cbranch_jump_edge
->flags
|= EDGE_FALLTHRU
;
178 cbranch_fallthru_edge
->flags
&= ~EDGE_FALLTHRU
;
179 update_br_prob_note (cbranch_block
);
181 /* Delete the block with the unconditional jump, and clean up the mess. */
182 flow_delete_block (jump_block
);
183 tidy_fallthru_edge (cbranch_jump_edge
, cbranch_block
, cbranch_dest_block
);
188 /* Attempt to prove that operation is NOOP using CSElib or mark the effect
189 on register. Used by jump threading. */
192 mark_effect (exp
, nonequal
)
198 switch (GET_CODE (exp
))
200 /* In case we do clobber the register, mark it as equal, as we know the
201 value is dead so it don't have to match. */
203 if (REG_P (XEXP (exp
, 0)))
205 dest
= XEXP (exp
, 0);
206 regno
= REGNO (dest
);
207 CLEAR_REGNO_REG_SET (nonequal
, regno
);
208 if (regno
< FIRST_PSEUDO_REGISTER
)
210 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (dest
));
212 CLEAR_REGNO_REG_SET (nonequal
, regno
+ n
);
218 if (rtx_equal_for_cselib_p (SET_DEST (exp
), SET_SRC (exp
)))
220 dest
= SET_DEST (exp
);
225 regno
= REGNO (dest
);
226 SET_REGNO_REG_SET (nonequal
, regno
);
227 if (regno
< FIRST_PSEUDO_REGISTER
)
229 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (dest
));
231 SET_REGNO_REG_SET (nonequal
, regno
+ n
);
240 /* Return nonzero if X is an register set in regset DATA.
241 Called via for_each_rtx. */
243 mentions_nonequal_regs (x
, data
)
247 regset nonequal
= (regset
) data
;
253 if (REGNO_REG_SET_P (nonequal
, regno
))
255 if (regno
< FIRST_PSEUDO_REGISTER
)
257 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (*x
));
259 if (REGNO_REG_SET_P (nonequal
, regno
+ n
))
265 /* Attempt to prove that the basic block B will have no side effects and
266 allways continues in the same edge if reached via E. Return the edge
267 if exist, NULL otherwise. */
270 thread_jump (mode
, e
, b
)
275 rtx set1
, set2
, cond1
, cond2
, insn
;
276 enum rtx_code code1
, code2
, reversed_code2
;
277 bool reverse1
= false;
282 if (BB_FLAGS (b
) & BB_NONTHREADABLE_BLOCK
)
285 /* At the moment, we do handle only conditional jumps, but later we may
286 want to extend this code to tablejumps and others. */
287 if (!e
->src
->succ
->succ_next
|| e
->src
->succ
->succ_next
->succ_next
)
289 if (!b
->succ
|| !b
->succ
->succ_next
|| b
->succ
->succ_next
->succ_next
)
291 BB_SET_FLAG (b
, BB_NONTHREADABLE_BLOCK
);
295 /* Second branch must end with onlyjump, as we will eliminate the jump. */
296 if (!any_condjump_p (e
->src
->end
))
299 if (!any_condjump_p (b
->end
) || !onlyjump_p (b
->end
))
301 BB_SET_FLAG (b
, BB_NONTHREADABLE_BLOCK
);
305 set1
= pc_set (e
->src
->end
);
306 set2
= pc_set (b
->end
);
307 if (((e
->flags
& EDGE_FALLTHRU
) != 0)
308 != (XEXP (SET_SRC (set1
), 1) == pc_rtx
))
311 cond1
= XEXP (SET_SRC (set1
), 0);
312 cond2
= XEXP (SET_SRC (set2
), 0);
314 code1
= reversed_comparison_code (cond1
, e
->src
->end
);
316 code1
= GET_CODE (cond1
);
318 code2
= GET_CODE (cond2
);
319 reversed_code2
= reversed_comparison_code (cond2
, b
->end
);
321 if (!comparison_dominates_p (code1
, code2
)
322 && !comparison_dominates_p (code1
, reversed_code2
))
325 /* Ensure that the comparison operators are equivalent.
326 ??? This is far too pesimistic. We should allow swapped operands,
327 different CCmodes, or for example comparisons for interval, that
328 dominate even when operands are not equivalent. */
329 if (!rtx_equal_p (XEXP (cond1
, 0), XEXP (cond2
, 0))
330 || !rtx_equal_p (XEXP (cond1
, 1), XEXP (cond2
, 1)))
333 /* Short circuit cases where block B contains some side effects, as we can't
335 for (insn
= NEXT_INSN (b
->head
); insn
!= NEXT_INSN (b
->end
);
336 insn
= NEXT_INSN (insn
))
337 if (INSN_P (insn
) && side_effects_p (PATTERN (insn
)))
339 BB_SET_FLAG (b
, BB_NONTHREADABLE_BLOCK
);
345 /* First process all values computed in the source basic block. */
346 for (insn
= NEXT_INSN (e
->src
->head
); insn
!= NEXT_INSN (e
->src
->end
);
347 insn
= NEXT_INSN (insn
))
349 cselib_process_insn (insn
);
351 nonequal
= BITMAP_XMALLOC();
352 CLEAR_REG_SET (nonequal
);
354 /* Now assume that we've continued by the edge E to B and continue
355 processing as if it were same basic block.
356 Our goal is to prove that whole block is an NOOP. */
358 for (insn
= NEXT_INSN (b
->head
); insn
!= NEXT_INSN (b
->end
) && !failed
;
359 insn
= NEXT_INSN (insn
))
363 rtx pat
= PATTERN (insn
);
365 if (GET_CODE (pat
) == PARALLEL
)
367 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
368 failed
|= mark_effect (XVECEXP (pat
, 0, i
), nonequal
);
371 failed
|= mark_effect (pat
, nonequal
);
374 cselib_process_insn (insn
);
377 /* Later we should clear nonequal of dead registers. So far we don't
378 have life information in cfg_cleanup. */
381 BB_SET_FLAG (b
, BB_NONTHREADABLE_BLOCK
);
385 /* cond2 must not mention any register that is not equal to the
387 if (for_each_rtx (&cond2
, mentions_nonequal_regs
, nonequal
))
390 /* In case liveness information is available, we need to prove equivalence
391 only of the live values. */
392 if (mode
& CLEANUP_UPDATE_LIFE
)
393 AND_REG_SET (nonequal
, b
->global_live_at_end
);
395 EXECUTE_IF_SET_IN_REG_SET (nonequal
, 0, i
, goto failed_exit
;);
397 BITMAP_XFREE (nonequal
);
399 if ((comparison_dominates_p (code1
, code2
) != 0)
400 != (XEXP (SET_SRC (set2
), 1) == pc_rtx
))
401 return BRANCH_EDGE (b
);
403 return FALLTHRU_EDGE (b
);
406 BITMAP_XFREE (nonequal
);
411 /* Attempt to forward edges leaving basic block B.
412 Return true if successful. */
415 try_forward_edges (mode
, b
)
419 bool changed
= false;
420 edge e
, next
, *threaded_edges
= NULL
;
422 for (e
= b
->succ
; e
; e
= next
)
424 basic_block target
, first
;
426 bool threaded
= false;
427 int nthreaded_edges
= 0;
431 /* Skip complex edges because we don't know how to update them.
433 Still handle fallthru edges, as we can succeed to forward fallthru
434 edge to the same place as the branch edge of conditional branch
435 and turn conditional branch to an unconditional branch. */
436 if (e
->flags
& EDGE_COMPLEX
)
439 target
= first
= e
->dest
;
442 while (counter
< n_basic_blocks
)
444 basic_block new_target
= NULL
;
445 bool new_target_threaded
= false;
447 if (FORWARDER_BLOCK_P (target
)
448 && target
->succ
->dest
!= EXIT_BLOCK_PTR
)
450 /* Bypass trivial infinite loops. */
451 if (target
== target
->succ
->dest
)
452 counter
= n_basic_blocks
;
453 new_target
= target
->succ
->dest
;
456 /* Allow to thread only over one edge at time to simplify updating
458 else if (mode
& CLEANUP_THREADING
)
460 edge t
= thread_jump (mode
, e
, target
);
464 threaded_edges
= xmalloc (sizeof (*threaded_edges
)
470 /* Detect an infinite loop across blocks not
471 including the start block. */
472 for (i
= 0; i
< nthreaded_edges
; ++i
)
473 if (threaded_edges
[i
] == t
)
475 if (i
< nthreaded_edges
)
477 counter
= n_basic_blocks
;
482 /* Detect an infinite loop across the start block. */
486 if (nthreaded_edges
>= n_basic_blocks
)
488 threaded_edges
[nthreaded_edges
++] = t
;
490 new_target
= t
->dest
;
491 new_target_threaded
= true;
498 /* Avoid killing of loop pre-headers, as it is the place loop
499 optimizer wants to hoist code to.
501 For fallthru forwarders, the LOOP_BEG note must appear between
502 the header of block and CODE_LABEL of the loop, for non forwarders
503 it must appear before the JUMP_INSN. */
504 if (mode
& CLEANUP_PRE_LOOP
)
506 rtx insn
= (target
->succ
->flags
& EDGE_FALLTHRU
507 ? target
->head
: prev_nonnote_insn (target
->end
));
509 if (GET_CODE (insn
) != NOTE
)
510 insn
= NEXT_INSN (insn
);
512 for (; insn
&& GET_CODE (insn
) != CODE_LABEL
&& !INSN_P (insn
);
513 insn
= NEXT_INSN (insn
))
514 if (GET_CODE (insn
) == NOTE
515 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
)
518 if (GET_CODE (insn
) == NOTE
)
524 threaded
|= new_target_threaded
;
527 if (counter
>= n_basic_blocks
)
530 fprintf (rtl_dump_file
, "Infinite loop in BB %i.\n",
533 else if (target
== first
)
534 ; /* We didn't do anything. */
537 /* Save the values now, as the edge may get removed. */
538 gcov_type edge_count
= e
->count
;
539 int edge_probability
= e
->probability
;
543 /* Don't force if target is exit block. */
544 if (threaded
&& target
!= EXIT_BLOCK_PTR
)
546 notice_new_block (redirect_edge_and_branch_force (e
, target
));
548 fprintf (rtl_dump_file
, "Conditionals threaded.\n");
550 else if (!redirect_edge_and_branch (e
, target
))
553 fprintf (rtl_dump_file
,
554 "Forwarding edge %i->%i to %i failed.\n",
555 b
->index
, e
->dest
->index
, target
->index
);
559 /* We successfully forwarded the edge. Now update profile
560 data: for each edge we traversed in the chain, remove
561 the original edge's execution count. */
562 edge_frequency
= ((edge_probability
* b
->frequency
563 + REG_BR_PROB_BASE
/ 2)
566 if (!FORWARDER_BLOCK_P (b
) && forwarder_block_p (b
))
567 BB_SET_FLAG (b
, BB_FORWARDER_BLOCK
);
573 first
->count
-= edge_count
;
574 if (first
->count
< 0)
576 first
->frequency
-= edge_frequency
;
577 if (first
->frequency
< 0)
578 first
->frequency
= 0;
579 if (first
->succ
->succ_next
)
583 if (n
>= nthreaded_edges
)
585 t
= threaded_edges
[n
++];
588 if (first
->frequency
)
589 prob
= edge_frequency
* REG_BR_PROB_BASE
/ first
->frequency
;
592 if (prob
> t
->probability
)
593 prob
= t
->probability
;
594 t
->probability
-= prob
;
595 prob
= REG_BR_PROB_BASE
- prob
;
598 first
->succ
->probability
= REG_BR_PROB_BASE
;
599 first
->succ
->succ_next
->probability
= 0;
602 for (e
= first
->succ
; e
; e
= e
->succ_next
)
603 e
->probability
= ((e
->probability
* REG_BR_PROB_BASE
)
605 update_br_prob_note (first
);
609 /* It is possible that as the result of
610 threading we've removed edge as it is
611 threaded to the fallthru edge. Avoid
612 getting out of sync. */
613 if (n
< nthreaded_edges
614 && first
== threaded_edges
[n
]->src
)
619 t
->count
-= edge_count
;
624 while (first
!= target
);
631 free (threaded_edges
);
635 /* Return true if LABEL is a target of JUMP_INSN. This applies only
636 to non-complex jumps. That is, direct unconditional, conditional,
637 and tablejumps, but not computed jumps or returns. It also does
638 not apply to the fallthru case of a conditional jump. */
641 label_is_jump_target_p (label
, jump_insn
)
642 rtx label
, jump_insn
;
644 rtx tmp
= JUMP_LABEL (jump_insn
);
650 && (tmp
= NEXT_INSN (tmp
)) != NULL_RTX
651 && GET_CODE (tmp
) == JUMP_INSN
652 && (tmp
= PATTERN (tmp
),
653 GET_CODE (tmp
) == ADDR_VEC
654 || GET_CODE (tmp
) == ADDR_DIFF_VEC
))
656 rtvec vec
= XVEC (tmp
, GET_CODE (tmp
) == ADDR_DIFF_VEC
);
657 int i
, veclen
= GET_NUM_ELEM (vec
);
659 for (i
= 0; i
< veclen
; ++i
)
660 if (XEXP (RTVEC_ELT (vec
, i
), 0) == label
)
667 /* Return true if LABEL is used for tail recursion. */
670 tail_recursion_label_p (label
)
675 for (x
= tail_recursion_label_list
; x
; x
= XEXP (x
, 1))
676 if (label
== XEXP (x
, 0))
682 /* Blocks A and B are to be merged into a single block. A has no incoming
683 fallthru edge, so it can be moved before B without adding or modifying
684 any jumps (aside from the jump from A to B). */
687 merge_blocks_move_predecessor_nojumps (a
, b
)
692 barrier
= next_nonnote_insn (a
->end
);
693 if (GET_CODE (barrier
) != BARRIER
)
695 delete_insn (barrier
);
697 /* Move block and loop notes out of the chain so that we do not
700 ??? A better solution would be to squeeze out all the non-nested notes
701 and adjust the block trees appropriately. Even better would be to have
702 a tighter connection between block trees and rtl so that this is not
704 if (squeeze_notes (&a
->head
, &a
->end
))
707 /* Scramble the insn chain. */
708 if (a
->end
!= PREV_INSN (b
->head
))
709 reorder_insns_nobb (a
->head
, a
->end
, PREV_INSN (b
->head
));
710 a
->flags
|= BB_DIRTY
;
713 fprintf (rtl_dump_file
, "Moved block %d before %d and merged.\n",
716 /* Swap the records for the two blocks around. */
719 link_block (a
, b
->prev_bb
);
721 /* Now blocks A and B are contiguous. Merge them. */
722 merge_blocks_nomove (a
, b
);
725 /* Blocks A and B are to be merged into a single block. B has no outgoing
726 fallthru edge, so it can be moved after A without adding or modifying
727 any jumps (aside from the jump from A to B). */
730 merge_blocks_move_successor_nojumps (a
, b
)
733 rtx barrier
, real_b_end
;
736 barrier
= NEXT_INSN (b
->end
);
738 /* Recognize a jump table following block B. */
740 && GET_CODE (barrier
) == CODE_LABEL
741 && NEXT_INSN (barrier
)
742 && GET_CODE (NEXT_INSN (barrier
)) == JUMP_INSN
743 && (GET_CODE (PATTERN (NEXT_INSN (barrier
))) == ADDR_VEC
744 || GET_CODE (PATTERN (NEXT_INSN (barrier
))) == ADDR_DIFF_VEC
))
746 /* Temporarily add the table jump insn to b, so that it will also
747 be moved to the correct location. */
748 b
->end
= NEXT_INSN (barrier
);
749 barrier
= NEXT_INSN (b
->end
);
752 /* There had better have been a barrier there. Delete it. */
753 if (barrier
&& GET_CODE (barrier
) == BARRIER
)
754 delete_insn (barrier
);
756 /* Move block and loop notes out of the chain so that we do not
759 ??? A better solution would be to squeeze out all the non-nested notes
760 and adjust the block trees appropriately. Even better would be to have
761 a tighter connection between block trees and rtl so that this is not
763 if (squeeze_notes (&b
->head
, &b
->end
))
766 /* Scramble the insn chain. */
767 reorder_insns_nobb (b
->head
, b
->end
, a
->end
);
769 /* Restore the real end of b. */
773 fprintf (rtl_dump_file
, "Moved block %d after %d and merged.\n",
776 /* Now blocks A and B are contiguous. Merge them. */
777 merge_blocks_nomove (a
, b
);
780 /* Attempt to merge basic blocks that are potentially non-adjacent.
781 Return true iff the attempt succeeded. */
784 merge_blocks (e
, b
, c
, mode
)
789 /* If C has a tail recursion label, do not merge. There is no
790 edge recorded from the call_placeholder back to this label, as
791 that would make optimize_sibling_and_tail_recursive_calls more
792 complex for no gain. */
793 if ((mode
& CLEANUP_PRE_SIBCALL
)
794 && GET_CODE (c
->head
) == CODE_LABEL
795 && tail_recursion_label_p (c
->head
))
798 /* If B has a fallthru edge to C, no need to move anything. */
799 if (e
->flags
& EDGE_FALLTHRU
)
801 int b_index
= b
->index
, c_index
= c
->index
;
802 merge_blocks_nomove (b
, c
);
803 update_forwarder_flag (b
);
806 fprintf (rtl_dump_file
, "Merged %d and %d without moving.\n",
812 /* Otherwise we will need to move code around. Do that only if expensive
813 transformations are allowed. */
814 else if (mode
& CLEANUP_EXPENSIVE
)
816 edge tmp_edge
, b_fallthru_edge
;
817 bool c_has_outgoing_fallthru
;
818 bool b_has_incoming_fallthru
;
820 /* Avoid overactive code motion, as the forwarder blocks should be
821 eliminated by edge redirection instead. One exception might have
822 been if B is a forwarder block and C has no fallthru edge, but
823 that should be cleaned up by bb-reorder instead. */
824 if (FORWARDER_BLOCK_P (b
) || FORWARDER_BLOCK_P (c
))
827 /* We must make sure to not munge nesting of lexical blocks,
828 and loop notes. This is done by squeezing out all the notes
829 and leaving them there to lie. Not ideal, but functional. */
831 for (tmp_edge
= c
->succ
; tmp_edge
; tmp_edge
= tmp_edge
->succ_next
)
832 if (tmp_edge
->flags
& EDGE_FALLTHRU
)
835 c_has_outgoing_fallthru
= (tmp_edge
!= NULL
);
837 for (tmp_edge
= b
->pred
; tmp_edge
; tmp_edge
= tmp_edge
->pred_next
)
838 if (tmp_edge
->flags
& EDGE_FALLTHRU
)
841 b_has_incoming_fallthru
= (tmp_edge
!= NULL
);
842 b_fallthru_edge
= tmp_edge
;
844 /* Otherwise, we're going to try to move C after B. If C does
845 not have an outgoing fallthru, then it can be moved
846 immediately after B without introducing or modifying jumps. */
847 if (! c_has_outgoing_fallthru
)
849 merge_blocks_move_successor_nojumps (b
, c
);
853 /* If B does not have an incoming fallthru, then it can be moved
854 immediately before C without introducing or modifying jumps.
855 C cannot be the first block, so we do not have to worry about
856 accessing a non-existent block. */
858 if (b_has_incoming_fallthru
)
862 if (b_fallthru_edge
->src
== ENTRY_BLOCK_PTR
)
864 bb
= force_nonfallthru (b_fallthru_edge
);
866 notice_new_block (bb
);
869 merge_blocks_move_predecessor_nojumps (b
, c
);
877 /* Return true if I1 and I2 are equivalent and thus can be crossjumped. */
880 insns_match_p (mode
, i1
, i2
)
881 int mode ATTRIBUTE_UNUSED
;
886 /* Verify that I1 and I2 are equivalent. */
887 if (GET_CODE (i1
) != GET_CODE (i2
))
893 if (GET_CODE (p1
) != GET_CODE (p2
))
896 /* If this is a CALL_INSN, compare register usage information.
897 If we don't check this on stack register machines, the two
898 CALL_INSNs might be merged leaving reg-stack.c with mismatching
899 numbers of stack registers in the same basic block.
900 If we don't check this on machines with delay slots, a delay slot may
901 be filled that clobbers a parameter expected by the subroutine.
903 ??? We take the simple route for now and assume that if they're
904 equal, they were constructed identically. */
906 if (GET_CODE (i1
) == CALL_INSN
907 && !rtx_equal_p (CALL_INSN_FUNCTION_USAGE (i1
),
908 CALL_INSN_FUNCTION_USAGE (i2
)))
912 /* If cross_jump_death_matters is not 0, the insn's mode
913 indicates whether or not the insn contains any stack-like
916 if ((mode
& CLEANUP_POST_REGSTACK
) && stack_regs_mentioned (i1
))
918 /* If register stack conversion has already been done, then
919 death notes must also be compared before it is certain that
920 the two instruction streams match. */
923 HARD_REG_SET i1_regset
, i2_regset
;
925 CLEAR_HARD_REG_SET (i1_regset
);
926 CLEAR_HARD_REG_SET (i2_regset
);
928 for (note
= REG_NOTES (i1
); note
; note
= XEXP (note
, 1))
929 if (REG_NOTE_KIND (note
) == REG_DEAD
&& STACK_REG_P (XEXP (note
, 0)))
930 SET_HARD_REG_BIT (i1_regset
, REGNO (XEXP (note
, 0)));
932 for (note
= REG_NOTES (i2
); note
; note
= XEXP (note
, 1))
933 if (REG_NOTE_KIND (note
) == REG_DEAD
&& STACK_REG_P (XEXP (note
, 0)))
934 SET_HARD_REG_BIT (i2_regset
, REGNO (XEXP (note
, 0)));
936 GO_IF_HARD_REG_EQUAL (i1_regset
, i2_regset
, done
);
946 ? ! rtx_renumbered_equal_p (p1
, p2
) : ! rtx_equal_p (p1
, p2
))
948 /* The following code helps take care of G++ cleanups. */
949 rtx equiv1
= find_reg_equal_equiv_note (i1
);
950 rtx equiv2
= find_reg_equal_equiv_note (i2
);
953 /* If the equivalences are not to a constant, they may
954 reference pseudos that no longer exist, so we can't
956 && (! reload_completed
957 || (CONSTANT_P (XEXP (equiv1
, 0))
958 && rtx_equal_p (XEXP (equiv1
, 0), XEXP (equiv2
, 0)))))
960 rtx s1
= single_set (i1
);
961 rtx s2
= single_set (i2
);
962 if (s1
!= 0 && s2
!= 0
963 && rtx_renumbered_equal_p (SET_DEST (s1
), SET_DEST (s2
)))
965 validate_change (i1
, &SET_SRC (s1
), XEXP (equiv1
, 0), 1);
966 validate_change (i2
, &SET_SRC (s2
), XEXP (equiv2
, 0), 1);
967 if (! rtx_renumbered_equal_p (p1
, p2
))
969 else if (apply_change_group ())
980 /* Look through the insns at the end of BB1 and BB2 and find the longest
981 sequence that are equivalent. Store the first insns for that sequence
982 in *F1 and *F2 and return the sequence length.
984 To simplify callers of this function, if the blocks match exactly,
985 store the head of the blocks in *F1 and *F2. */
988 flow_find_cross_jump (mode
, bb1
, bb2
, f1
, f2
)
989 int mode ATTRIBUTE_UNUSED
;
990 basic_block bb1
, bb2
;
993 rtx i1
, i2
, last1
, last2
, afterlast1
, afterlast2
;
996 /* Skip simple jumps at the end of the blocks. Complex jumps still
997 need to be compared for equivalence, which we'll do below. */
1000 last1
= afterlast1
= last2
= afterlast2
= NULL_RTX
;
1002 || (returnjump_p (i1
) && !side_effects_p (PATTERN (i1
))))
1005 i1
= PREV_INSN (i1
);
1010 || (returnjump_p (i2
) && !side_effects_p (PATTERN (i2
))))
1013 /* Count everything except for unconditional jump as insn. */
1014 if (!simplejump_p (i2
) && !returnjump_p (i2
) && last1
)
1016 i2
= PREV_INSN (i2
);
1022 while (!active_insn_p (i1
) && i1
!= bb1
->head
)
1023 i1
= PREV_INSN (i1
);
1025 while (!active_insn_p (i2
) && i2
!= bb2
->head
)
1026 i2
= PREV_INSN (i2
);
1028 if (i1
== bb1
->head
|| i2
== bb2
->head
)
1031 if (!insns_match_p (mode
, i1
, i2
))
1034 /* Don't begin a cross-jump with a USE or CLOBBER insn. */
1035 if (active_insn_p (i1
))
1037 /* If the merged insns have different REG_EQUAL notes, then
1039 rtx equiv1
= find_reg_equal_equiv_note (i1
);
1040 rtx equiv2
= find_reg_equal_equiv_note (i2
);
1042 if (equiv1
&& !equiv2
)
1043 remove_note (i1
, equiv1
);
1044 else if (!equiv1
&& equiv2
)
1045 remove_note (i2
, equiv2
);
1046 else if (equiv1
&& equiv2
1047 && !rtx_equal_p (XEXP (equiv1
, 0), XEXP (equiv2
, 0)))
1049 remove_note (i1
, equiv1
);
1050 remove_note (i2
, equiv2
);
1053 afterlast1
= last1
, afterlast2
= last2
;
1054 last1
= i1
, last2
= i2
;
1058 i1
= PREV_INSN (i1
);
1059 i2
= PREV_INSN (i2
);
1063 /* Don't allow the insn after a compare to be shared by
1064 cross-jumping unless the compare is also shared. */
1065 if (ninsns
&& reg_mentioned_p (cc0_rtx
, last1
) && ! sets_cc0_p (last1
))
1066 last1
= afterlast1
, last2
= afterlast2
, ninsns
--;
1069 /* Include preceding notes and labels in the cross-jump. One,
1070 this may bring us to the head of the blocks as requested above.
1071 Two, it keeps line number notes as matched as may be. */
1074 while (last1
!= bb1
->head
&& !active_insn_p (PREV_INSN (last1
)))
1075 last1
= PREV_INSN (last1
);
1077 if (last1
!= bb1
->head
&& GET_CODE (PREV_INSN (last1
)) == CODE_LABEL
)
1078 last1
= PREV_INSN (last1
);
1080 while (last2
!= bb2
->head
&& !active_insn_p (PREV_INSN (last2
)))
1081 last2
= PREV_INSN (last2
);
1083 if (last2
!= bb2
->head
&& GET_CODE (PREV_INSN (last2
)) == CODE_LABEL
)
1084 last2
= PREV_INSN (last2
);
1093 /* Return true iff outgoing edges of BB1 and BB2 match, together with
1094 the branch instruction. This means that if we commonize the control
1095 flow before end of the basic block, the semantic remains unchanged.
1097 We may assume that there exists one edge with a common destination. */
1100 outgoing_edges_match (mode
, bb1
, bb2
)
1105 int nehedges1
= 0, nehedges2
= 0;
1106 edge fallthru1
= 0, fallthru2
= 0;
1109 /* If BB1 has only one successor, we may be looking at either an
1110 unconditional jump, or a fake edge to exit. */
1111 if (bb1
->succ
&& !bb1
->succ
->succ_next
1112 && !(bb1
->succ
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)))
1113 return (bb2
->succ
&& !bb2
->succ
->succ_next
1114 && (bb2
->succ
->flags
& (EDGE_COMPLEX
| EDGE_FAKE
)) == 0);
1116 /* Match conditional jumps - this may get tricky when fallthru and branch
1117 edges are crossed. */
1119 && bb1
->succ
->succ_next
1120 && !bb1
->succ
->succ_next
->succ_next
1121 && any_condjump_p (bb1
->end
)
1122 && onlyjump_p (bb1
->end
))
1124 edge b1
, f1
, b2
, f2
;
1125 bool reverse
, match
;
1126 rtx set1
, set2
, cond1
, cond2
;
1127 enum rtx_code code1
, code2
;
1130 || !bb2
->succ
->succ_next
1131 || bb2
->succ
->succ_next
->succ_next
1132 || !any_condjump_p (bb2
->end
)
1133 || !onlyjump_p (bb2
->end
))
1136 /* Do not crossjump across loop boundaries. This is a temporary
1137 workaround for the common scenario in which crossjumping results
1138 in killing the duplicated loop condition, making bb-reorder rotate
1139 the loop incorectly, leaving an extra unconditional jump inside
1142 This check should go away once bb-reorder knows how to duplicate
1143 code in this case or rotate the loops to avoid this scenario. */
1144 if (bb1
->loop_depth
!= bb2
->loop_depth
)
1147 b1
= BRANCH_EDGE (bb1
);
1148 b2
= BRANCH_EDGE (bb2
);
1149 f1
= FALLTHRU_EDGE (bb1
);
1150 f2
= FALLTHRU_EDGE (bb2
);
1152 /* Get around possible forwarders on fallthru edges. Other cases
1153 should be optimized out already. */
1154 if (FORWARDER_BLOCK_P (f1
->dest
))
1155 f1
= f1
->dest
->succ
;
1157 if (FORWARDER_BLOCK_P (f2
->dest
))
1158 f2
= f2
->dest
->succ
;
1160 /* To simplify use of this function, return false if there are
1161 unneeded forwarder blocks. These will get eliminated later
1162 during cleanup_cfg. */
1163 if (FORWARDER_BLOCK_P (f1
->dest
)
1164 || FORWARDER_BLOCK_P (f2
->dest
)
1165 || FORWARDER_BLOCK_P (b1
->dest
)
1166 || FORWARDER_BLOCK_P (b2
->dest
))
1169 if (f1
->dest
== f2
->dest
&& b1
->dest
== b2
->dest
)
1171 else if (f1
->dest
== b2
->dest
&& b1
->dest
== f2
->dest
)
1176 set1
= pc_set (bb1
->end
);
1177 set2
= pc_set (bb2
->end
);
1178 if ((XEXP (SET_SRC (set1
), 1) == pc_rtx
)
1179 != (XEXP (SET_SRC (set2
), 1) == pc_rtx
))
1182 cond1
= XEXP (SET_SRC (set1
), 0);
1183 cond2
= XEXP (SET_SRC (set2
), 0);
1184 code1
= GET_CODE (cond1
);
1186 code2
= reversed_comparison_code (cond2
, bb2
->end
);
1188 code2
= GET_CODE (cond2
);
1190 if (code2
== UNKNOWN
)
1193 /* Verify codes and operands match. */
1194 match
= ((code1
== code2
1195 && rtx_renumbered_equal_p (XEXP (cond1
, 0), XEXP (cond2
, 0))
1196 && rtx_renumbered_equal_p (XEXP (cond1
, 1), XEXP (cond2
, 1)))
1197 || (code1
== swap_condition (code2
)
1198 && rtx_renumbered_equal_p (XEXP (cond1
, 1),
1200 && rtx_renumbered_equal_p (XEXP (cond1
, 0),
1203 /* If we return true, we will join the blocks. Which means that
1204 we will only have one branch prediction bit to work with. Thus
1205 we require the existing branches to have probabilities that are
1209 && maybe_hot_bb_p (bb1
)
1210 && maybe_hot_bb_p (bb2
))
1214 if (b1
->dest
== b2
->dest
)
1215 prob2
= b2
->probability
;
1217 /* Do not use f2 probability as f2 may be forwarded. */
1218 prob2
= REG_BR_PROB_BASE
- b2
->probability
;
1220 /* Fail if the difference in probabilities is greater than 50%.
1221 This rules out two well-predicted branches with opposite
1223 if (abs (b1
->probability
- prob2
) > REG_BR_PROB_BASE
/ 2)
1226 fprintf (rtl_dump_file
,
1227 "Outcomes of branch in bb %i and %i differs to much (%i %i)\n",
1228 bb1
->index
, bb2
->index
, b1
->probability
, prob2
);
1234 if (rtl_dump_file
&& match
)
1235 fprintf (rtl_dump_file
, "Conditionals in bb %i and %i match.\n",
1236 bb1
->index
, bb2
->index
);
1241 /* Generic case - we are seeing an computed jump, table jump or trapping
1244 /* First ensure that the instructions match. There may be many outgoing
1245 edges so this test is generally cheaper.
1246 ??? Currently the tablejumps will never match, as they do have
1247 different tables. */
1248 if (!insns_match_p (mode
, bb1
->end
, bb2
->end
))
1251 /* Search the outgoing edges, ensure that the counts do match, find possible
1252 fallthru and exception handling edges since these needs more
1254 for (e1
= bb1
->succ
, e2
= bb2
->succ
; e1
&& e2
;
1255 e1
= e1
->succ_next
, e2
= e2
->succ_next
)
1257 if (e1
->flags
& EDGE_EH
)
1260 if (e2
->flags
& EDGE_EH
)
1263 if (e1
->flags
& EDGE_FALLTHRU
)
1265 if (e2
->flags
& EDGE_FALLTHRU
)
1269 /* If number of edges of various types does not match, fail. */
1271 || nehedges1
!= nehedges2
1272 || (fallthru1
!= 0) != (fallthru2
!= 0))
1275 /* fallthru edges must be forwarded to the same destination. */
1278 basic_block d1
= (forwarder_block_p (fallthru1
->dest
)
1279 ? fallthru1
->dest
->succ
->dest
: fallthru1
->dest
);
1280 basic_block d2
= (forwarder_block_p (fallthru2
->dest
)
1281 ? fallthru2
->dest
->succ
->dest
: fallthru2
->dest
);
1287 /* In case we do have EH edges, ensure we are in the same region. */
1290 rtx n1
= find_reg_note (bb1
->end
, REG_EH_REGION
, 0);
1291 rtx n2
= find_reg_note (bb2
->end
, REG_EH_REGION
, 0);
1293 if (XEXP (n1
, 0) != XEXP (n2
, 0))
1297 /* We don't need to match the rest of edges as above checks should be enought
1298 to ensure that they are equivalent. */
1302 /* E1 and E2 are edges with the same destination block. Search their
1303 predecessors for common code. If found, redirect control flow from
1304 (maybe the middle of) E1->SRC to (maybe the middle of) E2->SRC. */
1307 try_crossjump_to_edge (mode
, e1
, e2
)
1312 basic_block src1
= e1
->src
, src2
= e2
->src
;
1313 basic_block redirect_to
;
1314 rtx newpos1
, newpos2
;
1319 /* Search backward through forwarder blocks. We don't need to worry
1320 about multiple entry or chained forwarders, as they will be optimized
1321 away. We do this to look past the unconditional jump following a
1322 conditional jump that is required due to the current CFG shape. */
1324 && !src1
->pred
->pred_next
1325 && FORWARDER_BLOCK_P (src1
))
1326 e1
= src1
->pred
, src1
= e1
->src
;
1329 && !src2
->pred
->pred_next
1330 && FORWARDER_BLOCK_P (src2
))
1331 e2
= src2
->pred
, src2
= e2
->src
;
1333 /* Nothing to do if we reach ENTRY, or a common source block. */
1334 if (src1
== ENTRY_BLOCK_PTR
|| src2
== ENTRY_BLOCK_PTR
)
1339 /* Seeing more than 1 forwarder blocks would confuse us later... */
1340 if (FORWARDER_BLOCK_P (e1
->dest
)
1341 && FORWARDER_BLOCK_P (e1
->dest
->succ
->dest
))
1344 if (FORWARDER_BLOCK_P (e2
->dest
)
1345 && FORWARDER_BLOCK_P (e2
->dest
->succ
->dest
))
1348 /* Likewise with dead code (possibly newly created by the other optimizations
1350 if (!src1
->pred
|| !src2
->pred
)
1353 /* Look for the common insn sequence, part the first ... */
1354 if (!outgoing_edges_match (mode
, src1
, src2
))
1357 /* ... and part the second. */
1358 nmatch
= flow_find_cross_jump (mode
, src1
, src2
, &newpos1
, &newpos2
);
1362 /* Avoid splitting if possible. */
1363 if (newpos2
== src2
->head
)
1368 fprintf (rtl_dump_file
, "Splitting bb %i before %i insns\n",
1369 src2
->index
, nmatch
);
1370 redirect_to
= split_block (src2
, PREV_INSN (newpos2
))->dest
;
1374 fprintf (rtl_dump_file
,
1375 "Cross jumping from bb %i to bb %i; %i common insns\n",
1376 src1
->index
, src2
->index
, nmatch
);
1378 redirect_to
->count
+= src1
->count
;
1379 redirect_to
->frequency
+= src1
->frequency
;
1380 /* We may have some registers visible trought the block. */
1381 redirect_to
->flags
|= BB_DIRTY
;
1383 /* Recompute the frequencies and counts of outgoing edges. */
1384 for (s
= redirect_to
->succ
; s
; s
= s
->succ_next
)
1387 basic_block d
= s
->dest
;
1389 if (FORWARDER_BLOCK_P (d
))
1392 for (s2
= src1
->succ
; ; s2
= s2
->succ_next
)
1394 basic_block d2
= s2
->dest
;
1395 if (FORWARDER_BLOCK_P (d2
))
1396 d2
= d2
->succ
->dest
;
1401 s
->count
+= s2
->count
;
1403 /* Take care to update possible forwarder blocks. We verified
1404 that there is no more than one in the chain, so we can't run
1405 into infinite loop. */
1406 if (FORWARDER_BLOCK_P (s
->dest
))
1408 s
->dest
->succ
->count
+= s2
->count
;
1409 s
->dest
->count
+= s2
->count
;
1410 s
->dest
->frequency
+= EDGE_FREQUENCY (s
);
1413 if (FORWARDER_BLOCK_P (s2
->dest
))
1415 s2
->dest
->succ
->count
-= s2
->count
;
1416 if (s2
->dest
->succ
->count
< 0)
1417 s2
->dest
->succ
->count
= 0;
1418 s2
->dest
->count
-= s2
->count
;
1419 s2
->dest
->frequency
-= EDGE_FREQUENCY (s
);
1420 if (s2
->dest
->frequency
< 0)
1421 s2
->dest
->frequency
= 0;
1422 if (s2
->dest
->count
< 0)
1423 s2
->dest
->count
= 0;
1426 if (!redirect_to
->frequency
&& !src1
->frequency
)
1427 s
->probability
= (s
->probability
+ s2
->probability
) / 2;
1430 = ((s
->probability
* redirect_to
->frequency
+
1431 s2
->probability
* src1
->frequency
)
1432 / (redirect_to
->frequency
+ src1
->frequency
));
1435 update_br_prob_note (redirect_to
);
1437 /* Edit SRC1 to go to REDIRECT_TO at NEWPOS1. */
1439 /* Skip possible basic block header. */
1440 if (GET_CODE (newpos1
) == CODE_LABEL
)
1441 newpos1
= NEXT_INSN (newpos1
);
1443 if (GET_CODE (newpos1
) == NOTE
)
1444 newpos1
= NEXT_INSN (newpos1
);
1447 /* Emit the jump insn. */
1448 label
= block_label (redirect_to
);
1449 emit_jump_insn_after (gen_jump (label
), src1
->end
);
1450 JUMP_LABEL (src1
->end
) = label
;
1451 LABEL_NUSES (label
)++;
1453 /* Delete the now unreachable instructions. */
1454 delete_insn_chain (newpos1
, last
);
1456 /* Make sure there is a barrier after the new jump. */
1457 last
= next_nonnote_insn (src1
->end
);
1458 if (!last
|| GET_CODE (last
) != BARRIER
)
1459 emit_barrier_after (src1
->end
);
1463 remove_edge (src1
->succ
);
1464 make_single_succ_edge (src1
, redirect_to
, 0);
1466 update_forwarder_flag (src1
);
1471 /* Search the predecessors of BB for common insn sequences. When found,
1472 share code between them by redirecting control flow. Return true if
1473 any changes made. */
1476 try_crossjump_bb (mode
, bb
)
1480 edge e
, e2
, nexte2
, nexte
, fallthru
;
1484 /* Nothing to do if there is not at least two incoming edges. */
1485 if (!bb
->pred
|| !bb
->pred
->pred_next
)
1488 /* It is always cheapest to redirect a block that ends in a branch to
1489 a block that falls through into BB, as that adds no branches to the
1490 program. We'll try that combination first. */
1491 for (fallthru
= bb
->pred
; fallthru
; fallthru
= fallthru
->pred_next
, n
++)
1493 if (fallthru
->flags
& EDGE_FALLTHRU
)
1500 for (e
= bb
->pred
; e
; e
= nexte
)
1502 nexte
= e
->pred_next
;
1504 /* As noted above, first try with the fallthru predecessor. */
1507 /* Don't combine the fallthru edge into anything else.
1508 If there is a match, we'll do it the other way around. */
1512 if (try_crossjump_to_edge (mode
, e
, fallthru
))
1520 /* Non-obvious work limiting check: Recognize that we're going
1521 to call try_crossjump_bb on every basic block. So if we have
1522 two blocks with lots of outgoing edges (a switch) and they
1523 share lots of common destinations, then we would do the
1524 cross-jump check once for each common destination.
1526 Now, if the blocks actually are cross-jump candidates, then
1527 all of their destinations will be shared. Which means that
1528 we only need check them for cross-jump candidacy once. We
1529 can eliminate redundant checks of crossjump(A,B) by arbitrarily
1530 choosing to do the check from the block for which the edge
1531 in question is the first successor of A. */
1532 if (e
->src
->succ
!= e
)
1535 for (e2
= bb
->pred
; e2
; e2
= nexte2
)
1537 nexte2
= e2
->pred_next
;
1542 /* We've already checked the fallthru edge above. */
1546 /* The "first successor" check above only prevents multiple
1547 checks of crossjump(A,B). In order to prevent redundant
1548 checks of crossjump(B,A), require that A be the block
1549 with the lowest index. */
1550 if (e
->src
->index
> e2
->src
->index
)
1553 if (try_crossjump_to_edge (mode
, e
, e2
))
1565 /* Do simple CFG optimizations - basic block merging, simplifying of jump
1566 instructions etc. Return nonzero if changes were made. */
1569 try_optimize_cfg (mode
)
1572 bool changed_overall
= false;
1577 if (mode
& CLEANUP_CROSSJUMP
)
1578 add_noreturn_fake_exit_edges ();
1581 update_forwarder_flag (bb
);
1583 if (mode
& CLEANUP_UPDATE_LIFE
)
1586 if (! (* targetm
.cannot_modify_jumps_p
) ())
1588 /* Attempt to merge blocks as made possible by edge removal. If
1589 a block has only one successor, and the successor has only
1590 one predecessor, they may be combined. */
1597 fprintf (rtl_dump_file
,
1598 "\n\ntry_optimize_cfg iteration %i\n\n",
1601 for (b
= ENTRY_BLOCK_PTR
->next_bb
; b
!= EXIT_BLOCK_PTR
;)
1605 bool changed_here
= false;
1607 /* Delete trivially dead basic blocks. */
1608 while (b
->pred
== NULL
)
1612 fprintf (rtl_dump_file
, "Deleting block %i.\n",
1615 flow_delete_block (b
);
1620 /* Remove code labels no longer used. Don't do this
1621 before CALL_PLACEHOLDER is removed, as some branches
1622 may be hidden within. */
1623 if (b
->pred
->pred_next
== NULL
1624 && (b
->pred
->flags
& EDGE_FALLTHRU
)
1625 && !(b
->pred
->flags
& EDGE_COMPLEX
)
1626 && GET_CODE (b
->head
) == CODE_LABEL
1627 && (!(mode
& CLEANUP_PRE_SIBCALL
)
1628 || !tail_recursion_label_p (b
->head
))
1629 /* If the previous block ends with a branch to this
1630 block, we can't delete the label. Normally this
1631 is a condjump that is yet to be simplified, but
1632 if CASE_DROPS_THRU, this can be a tablejump with
1633 some element going to the same place as the
1634 default (fallthru). */
1635 && (b
->pred
->src
== ENTRY_BLOCK_PTR
1636 || GET_CODE (b
->pred
->src
->end
) != JUMP_INSN
1637 || ! label_is_jump_target_p (b
->head
,
1638 b
->pred
->src
->end
)))
1640 rtx label
= b
->head
;
1642 b
->head
= NEXT_INSN (b
->head
);
1643 delete_insn_chain (label
, label
);
1645 fprintf (rtl_dump_file
, "Deleted label in block %i.\n",
1649 /* If we fall through an empty block, we can remove it. */
1650 if (b
->pred
->pred_next
== NULL
1651 && (b
->pred
->flags
& EDGE_FALLTHRU
)
1652 && GET_CODE (b
->head
) != CODE_LABEL
1653 && FORWARDER_BLOCK_P (b
)
1654 /* Note that forwarder_block_p true ensures that
1655 there is a successor for this block. */
1656 && (b
->succ
->flags
& EDGE_FALLTHRU
)
1657 && n_basic_blocks
> 1)
1660 fprintf (rtl_dump_file
,
1661 "Deleting fallthru block %i.\n",
1664 c
= b
->prev_bb
== ENTRY_BLOCK_PTR
? b
->next_bb
: b
->prev_bb
;
1665 redirect_edge_succ_nodup (b
->pred
, b
->succ
->dest
);
1666 flow_delete_block (b
);
1671 /* Merge blocks. Loop because chains of blocks might be
1673 while ((s
= b
->succ
) != NULL
1674 && s
->succ_next
== NULL
1675 && !(s
->flags
& EDGE_COMPLEX
)
1676 && (c
= s
->dest
) != EXIT_BLOCK_PTR
1677 && c
->pred
->pred_next
== NULL
1678 /* If the jump insn has side effects,
1679 we can't kill the edge. */
1680 && (GET_CODE (b
->end
) != JUMP_INSN
1681 || simplejump_p (b
->end
))
1682 && merge_blocks (s
, b
, c
, mode
))
1683 changed_here
= true;
1685 /* Simplify branch over branch. */
1686 if ((mode
& CLEANUP_EXPENSIVE
) && try_simplify_condjump (b
))
1687 changed_here
= true;
1689 /* If B has a single outgoing edge, but uses a
1690 non-trivial jump instruction without side-effects, we
1691 can either delete the jump entirely, or replace it
1692 with a simple unconditional jump. Use
1693 redirect_edge_and_branch to do the dirty work. */
1695 && ! b
->succ
->succ_next
1696 && b
->succ
->dest
!= EXIT_BLOCK_PTR
1697 && onlyjump_p (b
->end
)
1698 && redirect_edge_and_branch (b
->succ
, b
->succ
->dest
))
1700 update_forwarder_flag (b
);
1701 changed_here
= true;
1704 /* Simplify branch to branch. */
1705 if (try_forward_edges (mode
, b
))
1706 changed_here
= true;
1708 /* Look for shared code between blocks. */
1709 if ((mode
& CLEANUP_CROSSJUMP
)
1710 && try_crossjump_bb (mode
, b
))
1711 changed_here
= true;
1713 /* Don't get confused by the index shift caused by
1721 if ((mode
& CLEANUP_CROSSJUMP
)
1722 && try_crossjump_bb (mode
, EXIT_BLOCK_PTR
))
1725 #ifdef ENABLE_CHECKING
1727 verify_flow_info ();
1730 changed_overall
|= changed
;
1735 if (mode
& CLEANUP_CROSSJUMP
)
1736 remove_fake_edges ();
1738 clear_aux_for_blocks ();
1740 return changed_overall
;
1743 /* Delete all unreachable basic blocks. */
1746 delete_unreachable_blocks ()
1748 bool changed
= false;
1749 basic_block b
, next_bb
;
1751 find_unreachable_blocks ();
1753 /* Delete all unreachable basic blocks. */
1755 for (b
= ENTRY_BLOCK_PTR
->next_bb
; b
!= EXIT_BLOCK_PTR
; b
= next_bb
)
1757 next_bb
= b
->next_bb
;
1759 if (!(b
->flags
& BB_REACHABLE
))
1761 flow_delete_block (b
);
1767 tidy_fallthru_edges ();
1771 /* Tidy the CFG by deleting unreachable code and whatnot. */
1777 bool changed
= false;
1779 timevar_push (TV_CLEANUP_CFG
);
1780 if (delete_unreachable_blocks ())
1783 /* We've possibly created trivially dead code. Cleanup it right
1784 now to introduce more oppurtunities for try_optimize_cfg. */
1785 if (!(mode
& (CLEANUP_NO_INSN_DEL
1786 | CLEANUP_UPDATE_LIFE
| CLEANUP_PRE_SIBCALL
))
1787 && !reload_completed
)
1788 delete_trivially_dead_insns (get_insns(), max_reg_num ());
1793 while (try_optimize_cfg (mode
))
1795 delete_unreachable_blocks (), changed
= true;
1796 if (mode
& CLEANUP_UPDATE_LIFE
)
1798 /* Cleaning up CFG introduces more oppurtunities for dead code
1799 removal that in turn may introduce more oppurtunities for
1800 cleaning up the CFG. */
1801 if (!update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES
,
1803 | PROP_SCAN_DEAD_CODE
1804 | PROP_KILL_DEAD_CODE
1808 else if (!(mode
& (CLEANUP_NO_INSN_DEL
| CLEANUP_PRE_SIBCALL
))
1809 && !reload_completed
)
1811 if (!delete_trivially_dead_insns (get_insns(), max_reg_num ()))
1816 delete_dead_jumptables ();
1819 /* Kill the data we won't maintain. */
1820 free_EXPR_LIST_list (&label_value_list
);
1821 timevar_pop (TV_CLEANUP_CFG
);