pipe - pre-MP work, change indexing to circular FIFO rindex/windex.
[dragonfly.git] / contrib / gcc-3.4 / gcc / cfgrtl.c
blob42899f4e38337870573d9d6754d93509cb92d5a0
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - Basic CFG/RTL manipulation API documented in cfghooks.h
27 - CFG-aware instruction chain manipulation
28 delete_insn, delete_insn_chain
29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "tree.h"
45 #include "rtl.h"
46 #include "hard-reg-set.h"
47 #include "basic-block.h"
48 #include "regs.h"
49 #include "flags.h"
50 #include "output.h"
51 #include "function.h"
52 #include "except.h"
53 #include "toplev.h"
54 #include "tm_p.h"
55 #include "obstack.h"
56 #include "insn-config.h"
57 #include "cfglayout.h"
58 #include "expr.h"
60 /* Stubs in case we don't have a return insn. */
61 #ifndef HAVE_return
62 #define HAVE_return 0
63 #define gen_return() NULL_RTX
64 #endif
66 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
67 /* ??? Should probably be using LABEL_NUSES instead. It would take a
68 bit of surgery to be able to use or co-opt the routines in jump. */
69 rtx label_value_list;
70 rtx tail_recursion_label_list;
72 static int can_delete_note_p (rtx);
73 static int can_delete_label_p (rtx);
74 static void commit_one_edge_insertion (edge, int);
75 static rtx last_loop_beg_note (rtx);
76 static bool back_edge_of_syntactic_loop_p (basic_block, basic_block);
77 basic_block force_nonfallthru_and_redirect (edge, basic_block);
78 static basic_block rtl_split_edge (edge);
79 static int rtl_verify_flow_info (void);
80 static edge cfg_layout_split_block (basic_block, void *);
81 static bool cfg_layout_redirect_edge_and_branch (edge, basic_block);
82 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
83 static void cfg_layout_delete_block (basic_block);
84 static void rtl_delete_block (basic_block);
85 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
86 static bool rtl_redirect_edge_and_branch (edge, basic_block);
87 static edge rtl_split_block (basic_block, void *);
88 static void rtl_dump_bb (basic_block, FILE *);
89 static int rtl_verify_flow_info_1 (void);
90 static void mark_killed_regs (rtx, rtx, void *);
92 /* Return true if NOTE is not one of the ones that must be kept paired,
93 so that we may simply delete it. */
95 static int
96 can_delete_note_p (rtx note)
98 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
99 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
100 || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
103 /* True if a given label can be deleted. */
105 static int
106 can_delete_label_p (rtx label)
108 return (!LABEL_PRESERVE_P (label)
109 /* User declared labels must be preserved. */
110 && LABEL_NAME (label) == 0
111 && !in_expr_list_p (forced_labels, label)
112 && !in_expr_list_p (label_value_list, label));
115 /* Delete INSN by patching it out. Return the next insn. */
118 delete_insn (rtx insn)
120 rtx next = NEXT_INSN (insn);
121 rtx note;
122 bool really_delete = true;
124 if (GET_CODE (insn) == CODE_LABEL)
126 /* Some labels can't be directly removed from the INSN chain, as they
127 might be references via variables, constant pool etc.
128 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
129 if (! can_delete_label_p (insn))
131 const char *name = LABEL_NAME (insn);
133 really_delete = false;
134 PUT_CODE (insn, NOTE);
135 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
136 NOTE_SOURCE_FILE (insn) = name;
139 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
142 if (really_delete)
144 /* If this insn has already been deleted, something is very wrong. */
145 if (INSN_DELETED_P (insn))
146 abort ();
147 remove_insn (insn);
148 INSN_DELETED_P (insn) = 1;
151 /* If deleting a jump, decrement the use count of the label. Deleting
152 the label itself should happen in the normal course of block merging. */
153 if (GET_CODE (insn) == JUMP_INSN
154 && JUMP_LABEL (insn)
155 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
156 LABEL_NUSES (JUMP_LABEL (insn))--;
158 /* Also if deleting an insn that references a label. */
159 else
161 while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
162 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
164 LABEL_NUSES (XEXP (note, 0))--;
165 remove_note (insn, note);
169 if (GET_CODE (insn) == JUMP_INSN
170 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
171 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
173 rtx pat = PATTERN (insn);
174 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
175 int len = XVECLEN (pat, diff_vec_p);
176 int i;
178 for (i = 0; i < len; i++)
180 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
182 /* When deleting code in bulk (e.g. removing many unreachable
183 blocks) we can delete a label that's a target of the vector
184 before deleting the vector itself. */
185 if (GET_CODE (label) != NOTE)
186 LABEL_NUSES (label)--;
190 return next;
193 /* Like delete_insn but also purge dead edges from BB. */
195 delete_insn_and_edges (rtx insn)
197 rtx x;
198 bool purge = false;
200 if (INSN_P (insn)
201 && BLOCK_FOR_INSN (insn)
202 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
203 purge = true;
204 x = delete_insn (insn);
205 if (purge)
206 purge_dead_edges (BLOCK_FOR_INSN (insn));
207 return x;
210 /* Unlink a chain of insns between START and FINISH, leaving notes
211 that must be paired. */
213 void
214 delete_insn_chain (rtx start, rtx finish)
216 rtx next;
218 /* Unchain the insns one by one. It would be quicker to delete all of these
219 with a single unchaining, rather than one at a time, but we need to keep
220 the NOTE's. */
221 while (1)
223 next = NEXT_INSN (start);
224 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
226 else
227 next = delete_insn (start);
229 if (start == finish)
230 break;
231 start = next;
235 /* Like delete_insn but also purge dead edges from BB. */
236 void
237 delete_insn_chain_and_edges (rtx first, rtx last)
239 bool purge = false;
241 if (INSN_P (last)
242 && BLOCK_FOR_INSN (last)
243 && BB_END (BLOCK_FOR_INSN (last)) == last)
244 purge = true;
245 delete_insn_chain (first, last);
246 if (purge)
247 purge_dead_edges (BLOCK_FOR_INSN (last));
250 /* Create a new basic block consisting of the instructions between HEAD and END
251 inclusive. This function is designed to allow fast BB construction - reuses
252 the note and basic block struct in BB_NOTE, if any and do not grow
253 BASIC_BLOCK chain and should be used directly only by CFG construction code.
254 END can be NULL in to create new empty basic block before HEAD. Both END
255 and HEAD can be NULL to create basic block at the end of INSN chain.
256 AFTER is the basic block we should be put after. */
258 basic_block
259 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
261 basic_block bb;
263 if (bb_note
264 && ! RTX_INTEGRATED_P (bb_note)
265 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
266 && bb->aux == NULL)
268 /* If we found an existing note, thread it back onto the chain. */
270 rtx after;
272 if (GET_CODE (head) == CODE_LABEL)
273 after = head;
274 else
276 after = PREV_INSN (head);
277 head = bb_note;
280 if (after != bb_note && NEXT_INSN (after) != bb_note)
281 reorder_insns_nobb (bb_note, bb_note, after);
283 else
285 /* Otherwise we must create a note and a basic block structure. */
287 bb = alloc_block ();
289 if (!head && !end)
290 head = end = bb_note
291 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
292 else if (GET_CODE (head) == CODE_LABEL && end)
294 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
295 if (head == end)
296 end = bb_note;
298 else
300 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
301 head = bb_note;
302 if (!end)
303 end = head;
306 NOTE_BASIC_BLOCK (bb_note) = bb;
309 /* Always include the bb note in the block. */
310 if (NEXT_INSN (end) == bb_note)
311 end = bb_note;
313 BB_HEAD (bb) = head;
314 BB_END (bb) = end;
315 bb->index = last_basic_block++;
316 bb->flags = BB_NEW;
317 link_block (bb, after);
318 BASIC_BLOCK (bb->index) = bb;
319 update_bb_for_insn (bb);
321 /* Tag the block so that we know it has been used when considering
322 other basic block notes. */
323 bb->aux = bb;
325 return bb;
328 /* Create new basic block consisting of instructions in between HEAD and END
329 and place it to the BB chain after block AFTER. END can be NULL in to
330 create new empty basic block before HEAD. Both END and HEAD can be NULL to
331 create basic block at the end of INSN chain. */
333 static basic_block
334 rtl_create_basic_block (void *headp, void *endp, basic_block after)
336 rtx head = headp, end = endp;
337 basic_block bb;
339 /* Place the new block just after the end. */
340 VARRAY_GROW (basic_block_info, last_basic_block+1);
342 n_basic_blocks++;
344 bb = create_basic_block_structure (head, end, NULL, after);
345 bb->aux = NULL;
346 return bb;
349 static basic_block
350 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
352 basic_block newbb = rtl_create_basic_block (head, end, after);
354 cfg_layout_initialize_rbi (newbb);
355 return newbb;
358 /* Delete the insns in a (non-live) block. We physically delete every
359 non-deleted-note insn, and update the flow graph appropriately.
361 Return nonzero if we deleted an exception handler. */
363 /* ??? Preserving all such notes strikes me as wrong. It would be nice
364 to post-process the stream to remove empty blocks, loops, ranges, etc. */
366 static void
367 rtl_delete_block (basic_block b)
369 rtx insn, end, tmp;
371 /* If the head of this block is a CODE_LABEL, then it might be the
372 label for an exception handler which can't be reached.
374 We need to remove the label from the exception_handler_label list
375 and remove the associated NOTE_INSN_EH_REGION_BEG and
376 NOTE_INSN_EH_REGION_END notes. */
378 /* Get rid of all NOTE_INSN_PREDICTIONs and NOTE_INSN_LOOP_CONTs
379 hanging before the block. */
381 for (insn = PREV_INSN (BB_HEAD (b)); insn; insn = PREV_INSN (insn))
383 if (GET_CODE (insn) != NOTE)
384 break;
385 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION
386 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
387 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
390 insn = BB_HEAD (b);
392 never_reached_warning (insn, BB_END (b));
394 if (GET_CODE (insn) == CODE_LABEL)
395 maybe_remove_eh_handler (insn);
397 /* Include any jump table following the basic block. */
398 end = BB_END (b);
399 if (tablejump_p (end, NULL, &tmp))
400 end = tmp;
402 /* Include any barrier that may follow the basic block. */
403 tmp = next_nonnote_insn (end);
404 if (tmp && GET_CODE (tmp) == BARRIER)
405 end = tmp;
407 /* Selectively delete the entire chain. */
408 BB_HEAD (b) = NULL;
409 delete_insn_chain (insn, end);
411 /* Remove the edges into and out of this block. Note that there may
412 indeed be edges in, if we are removing an unreachable loop. */
413 while (b->pred != NULL)
414 remove_edge (b->pred);
415 while (b->succ != NULL)
416 remove_edge (b->succ);
418 b->pred = NULL;
419 b->succ = NULL;
421 /* Remove the basic block from the array. */
422 expunge_block (b);
425 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
427 void
428 compute_bb_for_insn (void)
430 basic_block bb;
432 FOR_EACH_BB (bb)
434 rtx end = BB_END (bb);
435 rtx insn;
437 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
439 BLOCK_FOR_INSN (insn) = bb;
440 if (insn == end)
441 break;
446 /* Release the basic_block_for_insn array. */
448 void
449 free_bb_for_insn (void)
451 rtx insn;
452 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
453 if (GET_CODE (insn) != BARRIER)
454 BLOCK_FOR_INSN (insn) = NULL;
457 /* Update insns block within BB. */
459 void
460 update_bb_for_insn (basic_block bb)
462 rtx insn;
464 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
466 if (GET_CODE (insn) != BARRIER)
467 set_block_for_insn (insn, bb);
468 if (insn == BB_END (bb))
469 break;
473 /* Split a block BB after insn INSN creating a new fallthru edge.
474 Return the new edge. Note that to keep other parts of the compiler happy,
475 this function renumbers all the basic blocks so that the new
476 one has a number one greater than the block split. */
478 static edge
479 rtl_split_block (basic_block bb, void *insnp)
481 basic_block new_bb;
482 edge new_edge;
483 edge e;
484 rtx insn = insnp;
486 if (!insn)
488 insn = first_insn_after_basic_block_note (bb);
490 if (insn)
491 insn = PREV_INSN (insn);
492 else
493 insn = get_last_insn ();
496 /* We probably should check type of the insn so that we do not create
497 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
498 bother. */
499 if (insn == BB_END (bb))
500 emit_note_after (NOTE_INSN_DELETED, insn);
502 /* Create the new basic block. */
503 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
504 new_bb->count = bb->count;
505 new_bb->frequency = bb->frequency;
506 new_bb->loop_depth = bb->loop_depth;
507 BB_END (bb) = insn;
509 /* Redirect the outgoing edges. */
510 new_bb->succ = bb->succ;
511 bb->succ = NULL;
512 for (e = new_bb->succ; e; e = e->succ_next)
513 e->src = new_bb;
515 new_edge = make_single_succ_edge (bb, new_bb, EDGE_FALLTHRU);
517 if (bb->global_live_at_start)
519 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
520 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
521 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
523 /* We now have to calculate which registers are live at the end
524 of the split basic block and at the start of the new basic
525 block. Start with those registers that are known to be live
526 at the end of the original basic block and get
527 propagate_block to determine which registers are live. */
528 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
529 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
530 COPY_REG_SET (bb->global_live_at_end,
531 new_bb->global_live_at_start);
532 #ifdef HAVE_conditional_execution
533 /* In the presence of conditional execution we are not able to update
534 liveness precisely. */
535 if (reload_completed)
537 bb->flags |= BB_DIRTY;
538 new_bb->flags |= BB_DIRTY;
540 #endif
543 return new_edge;
546 /* Assume that the code of basic block B has been merged into A.
547 Do corresponding CFG updates: redirect edges accordingly etc. */
548 static void
549 update_cfg_after_block_merging (basic_block a, basic_block b)
551 edge e;
553 /* Normally there should only be one successor of A and that is B, but
554 partway though the merge of blocks for conditional_execution we'll
555 be merging a TEST block with THEN and ELSE successors. Free the
556 whole lot of them and hope the caller knows what they're doing. */
557 while (a->succ)
558 remove_edge (a->succ);
560 /* Adjust the edges out of B for the new owner. */
561 for (e = b->succ; e; e = e->succ_next)
562 e->src = a;
563 a->succ = b->succ;
564 a->flags |= b->flags;
566 /* B hasn't quite yet ceased to exist. Attempt to prevent mishap. */
567 b->pred = b->succ = NULL;
568 a->global_live_at_end = b->global_live_at_end;
570 expunge_block (b);
573 /* Blocks A and B are to be merged into a single block A. The insns
574 are already contiguous. */
576 static void
577 rtl_merge_blocks (basic_block a, basic_block b)
579 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
580 rtx del_first = NULL_RTX, del_last = NULL_RTX;
581 int b_empty = 0;
583 /* If there was a CODE_LABEL beginning B, delete it. */
584 if (GET_CODE (b_head) == CODE_LABEL)
586 /* Detect basic blocks with nothing but a label. This can happen
587 in particular at the end of a function. */
588 if (b_head == b_end)
589 b_empty = 1;
591 del_first = del_last = b_head;
592 b_head = NEXT_INSN (b_head);
595 /* Delete the basic block note and handle blocks containing just that
596 note. */
597 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
599 if (b_head == b_end)
600 b_empty = 1;
601 if (! del_last)
602 del_first = b_head;
604 del_last = b_head;
605 b_head = NEXT_INSN (b_head);
608 /* If there was a jump out of A, delete it. */
609 if (GET_CODE (a_end) == JUMP_INSN)
611 rtx prev;
613 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
614 if (GET_CODE (prev) != NOTE
615 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
616 || prev == BB_HEAD (a))
617 break;
619 del_first = a_end;
621 #ifdef HAVE_cc0
622 /* If this was a conditional jump, we need to also delete
623 the insn that set cc0. */
624 if (only_sets_cc0_p (prev))
626 rtx tmp = prev;
628 prev = prev_nonnote_insn (prev);
629 if (!prev)
630 prev = BB_HEAD (a);
631 del_first = tmp;
633 #endif
635 a_end = PREV_INSN (del_first);
637 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
638 del_first = NEXT_INSN (a_end);
640 update_cfg_after_block_merging (a, b);
642 /* Delete everything marked above as well as crap that might be
643 hanging out between the two blocks. */
644 delete_insn_chain (del_first, del_last);
646 /* Reassociate the insns of B with A. */
647 if (!b_empty)
649 rtx x;
651 for (x = a_end; x != b_end; x = NEXT_INSN (x))
652 set_block_for_insn (x, a);
654 set_block_for_insn (b_end, a);
656 a_end = b_end;
659 BB_END (a) = a_end;
662 /* Return true when block A and B can be merged. */
663 static bool
664 rtl_can_merge_blocks (basic_block a,basic_block b)
666 /* There must be exactly one edge in between the blocks. */
667 return (a->succ && !a->succ->succ_next && a->succ->dest == b
668 && !b->pred->pred_next && a != b
669 /* Must be simple edge. */
670 && !(a->succ->flags & EDGE_COMPLEX)
671 && a->next_bb == b
672 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
673 /* If the jump insn has side effects,
674 we can't kill the edge. */
675 && (GET_CODE (BB_END (a)) != JUMP_INSN
676 || (reload_completed
677 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
680 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
681 exist. */
684 block_label (basic_block block)
686 if (block == EXIT_BLOCK_PTR)
687 return NULL_RTX;
689 if (GET_CODE (BB_HEAD (block)) != CODE_LABEL)
691 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
694 return BB_HEAD (block);
697 /* Attempt to perform edge redirection by replacing possibly complex jump
698 instruction by unconditional jump or removing jump completely. This can
699 apply only if all edges now point to the same block. The parameters and
700 return values are equivalent to redirect_edge_and_branch. */
702 bool
703 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
705 basic_block src = e->src;
706 rtx insn = BB_END (src), kill_from;
707 edge tmp;
708 rtx set;
709 int fallthru = 0;
711 /* Verify that all targets will be TARGET. */
712 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
713 if (tmp->dest != target && tmp != e)
714 break;
716 if (tmp || !onlyjump_p (insn))
717 return false;
718 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
719 return false;
721 /* Avoid removing branch with side effects. */
722 set = single_set (insn);
723 if (!set || side_effects_p (set))
724 return false;
726 /* In case we zap a conditional jump, we'll need to kill
727 the cc0 setter too. */
728 kill_from = insn;
729 #ifdef HAVE_cc0
730 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
731 kill_from = PREV_INSN (insn);
732 #endif
734 /* See if we can create the fallthru edge. */
735 if (in_cfglayout || can_fallthru (src, target))
737 if (rtl_dump_file)
738 fprintf (rtl_dump_file, "Removing jump %i.\n", INSN_UID (insn));
739 fallthru = 1;
741 /* Selectively unlink whole insn chain. */
742 if (in_cfglayout)
744 rtx insn = src->rbi->footer;
746 delete_insn_chain (kill_from, BB_END (src));
748 /* Remove barriers but keep jumptables. */
749 while (insn)
751 if (GET_CODE (insn) == BARRIER)
753 if (PREV_INSN (insn))
754 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
755 else
756 src->rbi->footer = NEXT_INSN (insn);
757 if (NEXT_INSN (insn))
758 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
760 if (GET_CODE (insn) == CODE_LABEL)
761 break;
762 insn = NEXT_INSN (insn);
765 else
766 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)));
769 /* If this already is simplejump, redirect it. */
770 else if (simplejump_p (insn))
772 if (e->dest == target)
773 return false;
774 if (rtl_dump_file)
775 fprintf (rtl_dump_file, "Redirecting jump %i from %i to %i.\n",
776 INSN_UID (insn), e->dest->index, target->index);
777 if (!redirect_jump (insn, block_label (target), 0))
779 if (target == EXIT_BLOCK_PTR)
780 return false;
781 abort ();
785 /* Cannot do anything for target exit block. */
786 else if (target == EXIT_BLOCK_PTR)
787 return false;
789 /* Or replace possibly complicated jump insn by simple jump insn. */
790 else
792 rtx target_label = block_label (target);
793 rtx barrier, label, table;
795 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
796 JUMP_LABEL (BB_END (src)) = target_label;
797 LABEL_NUSES (target_label)++;
798 if (rtl_dump_file)
799 fprintf (rtl_dump_file, "Replacing insn %i by jump %i\n",
800 INSN_UID (insn), INSN_UID (BB_END (src)));
803 delete_insn_chain (kill_from, insn);
805 /* Recognize a tablejump that we are converting to a
806 simple jump and remove its associated CODE_LABEL
807 and ADDR_VEC or ADDR_DIFF_VEC. */
808 if (tablejump_p (insn, &label, &table))
809 delete_insn_chain (label, table);
811 barrier = next_nonnote_insn (BB_END (src));
812 if (!barrier || GET_CODE (barrier) != BARRIER)
813 emit_barrier_after (BB_END (src));
814 else
816 if (barrier != NEXT_INSN (BB_END (src)))
818 /* Move the jump before barrier so that the notes
819 which originally were or were created before jump table are
820 inside the basic block. */
821 rtx new_insn = BB_END (src);
822 rtx tmp;
824 for (tmp = NEXT_INSN (BB_END (src)); tmp != barrier;
825 tmp = NEXT_INSN (tmp))
826 set_block_for_insn (tmp, src);
828 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
829 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
831 NEXT_INSN (new_insn) = barrier;
832 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
834 PREV_INSN (new_insn) = PREV_INSN (barrier);
835 PREV_INSN (barrier) = new_insn;
840 /* Keep only one edge out and set proper flags. */
841 while (src->succ->succ_next)
842 remove_edge (src->succ);
843 e = src->succ;
844 if (fallthru)
845 e->flags = EDGE_FALLTHRU;
846 else
847 e->flags = 0;
849 e->probability = REG_BR_PROB_BASE;
850 e->count = src->count;
852 /* We don't want a block to end on a line-number note since that has
853 the potential of changing the code between -g and not -g. */
854 while (GET_CODE (BB_END (e->src)) == NOTE
855 && NOTE_LINE_NUMBER (BB_END (e->src)) >= 0)
856 delete_insn (BB_END (e->src));
858 if (e->dest != target)
859 redirect_edge_succ (e, target);
861 return true;
864 /* Return last loop_beg note appearing after INSN, before start of next
865 basic block. Return INSN if there are no such notes.
867 When emitting jump to redirect a fallthru edge, it should always appear
868 after the LOOP_BEG notes, as loop optimizer expect loop to either start by
869 fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
870 test. */
872 static rtx
873 last_loop_beg_note (rtx insn)
875 rtx last = insn;
877 for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
878 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
879 insn = NEXT_INSN (insn))
880 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
881 last = insn;
883 return last;
886 /* Redirect edge representing branch of (un)conditional jump or tablejump. */
887 static bool
888 redirect_branch_edge (edge e, basic_block target)
890 rtx tmp;
891 rtx old_label = BB_HEAD (e->dest);
892 basic_block src = e->src;
893 rtx insn = BB_END (src);
895 /* We can only redirect non-fallthru edges of jump insn. */
896 if (e->flags & EDGE_FALLTHRU)
897 return false;
898 else if (GET_CODE (insn) != JUMP_INSN)
899 return false;
901 /* Recognize a tablejump and adjust all matching cases. */
902 if (tablejump_p (insn, NULL, &tmp))
904 rtvec vec;
905 int j;
906 rtx new_label = block_label (target);
908 if (target == EXIT_BLOCK_PTR)
909 return false;
910 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
911 vec = XVEC (PATTERN (tmp), 0);
912 else
913 vec = XVEC (PATTERN (tmp), 1);
915 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
916 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
918 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
919 --LABEL_NUSES (old_label);
920 ++LABEL_NUSES (new_label);
923 /* Handle casesi dispatch insns. */
924 if ((tmp = single_set (insn)) != NULL
925 && SET_DEST (tmp) == pc_rtx
926 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
927 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
928 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
930 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
931 new_label);
932 --LABEL_NUSES (old_label);
933 ++LABEL_NUSES (new_label);
936 else
938 /* ?? We may play the games with moving the named labels from
939 one basic block to the other in case only one computed_jump is
940 available. */
941 if (computed_jump_p (insn)
942 /* A return instruction can't be redirected. */
943 || returnjump_p (insn))
944 return false;
946 /* If the insn doesn't go where we think, we're confused. */
947 if (JUMP_LABEL (insn) != old_label)
948 abort ();
950 /* If the substitution doesn't succeed, die. This can happen
951 if the back end emitted unrecognizable instructions or if
952 target is exit block on some arches. */
953 if (!redirect_jump (insn, block_label (target), 0))
955 if (target == EXIT_BLOCK_PTR)
956 return false;
957 abort ();
961 if (rtl_dump_file)
962 fprintf (rtl_dump_file, "Edge %i->%i redirected to %i\n",
963 e->src->index, e->dest->index, target->index);
965 if (e->dest != target)
966 redirect_edge_succ_nodup (e, target);
967 return true;
970 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
971 expense of adding new instructions or reordering basic blocks.
973 Function can be also called with edge destination equivalent to the TARGET.
974 Then it should try the simplifications and do nothing if none is possible.
976 Return true if transformation succeeded. We still return false in case E
977 already destinated TARGET and we didn't managed to simplify instruction
978 stream. */
980 static bool
981 rtl_redirect_edge_and_branch (edge e, basic_block target)
983 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
984 return false;
986 if (e->dest == target)
987 return true;
989 if (try_redirect_by_replacing_jump (e, target, false))
990 return true;
992 if (!redirect_branch_edge (e, target))
993 return false;
995 return true;
998 /* Like force_nonfallthru below, but additionally performs redirection
999 Used by redirect_edge_and_branch_force. */
1001 basic_block
1002 force_nonfallthru_and_redirect (edge e, basic_block target)
1004 basic_block jump_block, new_bb = NULL, src = e->src;
1005 rtx note;
1006 edge new_edge;
1007 int abnormal_edge_flags = 0;
1009 /* In the case the last instruction is conditional jump to the next
1010 instruction, first redirect the jump itself and then continue
1011 by creating a basic block afterwards to redirect fallthru edge. */
1012 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1013 && any_condjump_p (BB_END (e->src))
1014 /* When called from cfglayout, fallthru edges do not
1015 necessarily go to the next block. */
1016 && e->src->next_bb == e->dest
1017 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1019 rtx note;
1020 edge b = unchecked_make_edge (e->src, target, 0);
1022 if (!redirect_jump (BB_END (e->src), block_label (target), 0))
1023 abort ();
1024 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1025 if (note)
1027 int prob = INTVAL (XEXP (note, 0));
1029 b->probability = prob;
1030 b->count = e->count * prob / REG_BR_PROB_BASE;
1031 e->probability -= e->probability;
1032 e->count -= b->count;
1033 if (e->probability < 0)
1034 e->probability = 0;
1035 if (e->count < 0)
1036 e->count = 0;
1040 if (e->flags & EDGE_ABNORMAL)
1042 /* Irritating special case - fallthru edge to the same block as abnormal
1043 edge.
1044 We can't redirect abnormal edge, but we still can split the fallthru
1045 one and create separate abnormal edge to original destination.
1046 This allows bb-reorder to make such edge non-fallthru. */
1047 if (e->dest != target)
1048 abort ();
1049 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1050 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1052 else if (!(e->flags & EDGE_FALLTHRU))
1053 abort ();
1054 else if (e->src == ENTRY_BLOCK_PTR)
1056 /* We can't redirect the entry block. Create an empty block at the
1057 start of the function which we use to add the new jump. */
1058 edge *pe1;
1059 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1061 /* Change the existing edge's source to be the new block, and add
1062 a new edge from the entry block to the new block. */
1063 e->src = bb;
1064 for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
1065 if (*pe1 == e)
1067 *pe1 = e->succ_next;
1068 break;
1070 e->succ_next = 0;
1071 bb->succ = e;
1072 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1075 if (e->src->succ->succ_next || abnormal_edge_flags)
1077 /* Create the new structures. */
1079 /* If the old block ended with a tablejump, skip its table
1080 by searching forward from there. Otherwise start searching
1081 forward from the last instruction of the old block. */
1082 if (!tablejump_p (BB_END (e->src), NULL, &note))
1083 note = BB_END (e->src);
1085 /* Position the new block correctly relative to loop notes. */
1086 note = last_loop_beg_note (note);
1087 note = NEXT_INSN (note);
1089 jump_block = create_basic_block (note, NULL, e->src);
1090 jump_block->count = e->count;
1091 jump_block->frequency = EDGE_FREQUENCY (e);
1092 jump_block->loop_depth = target->loop_depth;
1094 if (target->global_live_at_start)
1096 jump_block->global_live_at_start
1097 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1098 jump_block->global_live_at_end
1099 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1100 COPY_REG_SET (jump_block->global_live_at_start,
1101 target->global_live_at_start);
1102 COPY_REG_SET (jump_block->global_live_at_end,
1103 target->global_live_at_start);
1106 /* Wire edge in. */
1107 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1108 new_edge->probability = e->probability;
1109 new_edge->count = e->count;
1111 /* Redirect old edge. */
1112 redirect_edge_pred (e, jump_block);
1113 e->probability = REG_BR_PROB_BASE;
1115 new_bb = jump_block;
1117 else
1118 jump_block = e->src;
1120 e->flags &= ~EDGE_FALLTHRU;
1121 if (target == EXIT_BLOCK_PTR)
1123 if (HAVE_return)
1124 emit_jump_insn_after_noloc (gen_return (), BB_END (jump_block));
1125 else
1126 abort ();
1128 else
1130 rtx label = block_label (target);
1131 emit_jump_insn_after_noloc (gen_jump (label), BB_END (jump_block));
1132 JUMP_LABEL (BB_END (jump_block)) = label;
1133 LABEL_NUSES (label)++;
1136 emit_barrier_after (BB_END (jump_block));
1137 redirect_edge_succ_nodup (e, target);
1139 if (abnormal_edge_flags)
1140 make_edge (src, target, abnormal_edge_flags);
1142 return new_bb;
1145 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1146 (and possibly create new basic block) to make edge non-fallthru.
1147 Return newly created BB or NULL if none. */
1149 basic_block
1150 force_nonfallthru (edge e)
1152 return force_nonfallthru_and_redirect (e, e->dest);
1155 /* Redirect edge even at the expense of creating new jump insn or
1156 basic block. Return new basic block if created, NULL otherwise.
1157 Abort if conversion is impossible. */
1159 static basic_block
1160 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1162 if (redirect_edge_and_branch (e, target)
1163 || e->dest == target)
1164 return NULL;
1166 /* In case the edge redirection failed, try to force it to be non-fallthru
1167 and redirect newly created simplejump. */
1168 return force_nonfallthru_and_redirect (e, target);
1171 /* The given edge should potentially be a fallthru edge. If that is in
1172 fact true, delete the jump and barriers that are in the way. */
1174 void
1175 tidy_fallthru_edge (edge e, basic_block b, basic_block c)
1177 rtx q;
1179 /* ??? In a late-running flow pass, other folks may have deleted basic
1180 blocks by nopping out blocks, leaving multiple BARRIERs between here
1181 and the target label. They ought to be chastized and fixed.
1183 We can also wind up with a sequence of undeletable labels between
1184 one block and the next.
1186 So search through a sequence of barriers, labels, and notes for
1187 the head of block C and assert that we really do fall through. */
1189 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1190 if (INSN_P (q))
1191 return;
1193 /* Remove what will soon cease being the jump insn from the source block.
1194 If block B consisted only of this single jump, turn it into a deleted
1195 note. */
1196 q = BB_END (b);
1197 if (GET_CODE (q) == JUMP_INSN
1198 && onlyjump_p (q)
1199 && (any_uncondjump_p (q)
1200 || (b->succ == e && e->succ_next == NULL)))
1202 #ifdef HAVE_cc0
1203 /* If this was a conditional jump, we need to also delete
1204 the insn that set cc0. */
1205 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1206 q = PREV_INSN (q);
1207 #endif
1209 q = PREV_INSN (q);
1211 /* We don't want a block to end on a line-number note since that has
1212 the potential of changing the code between -g and not -g. */
1213 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
1214 q = PREV_INSN (q);
1217 /* Selectively unlink the sequence. */
1218 if (q != PREV_INSN (BB_HEAD (c)))
1219 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)));
1221 e->flags |= EDGE_FALLTHRU;
1224 /* Fix up edges that now fall through, or rather should now fall through
1225 but previously required a jump around now deleted blocks. Simplify
1226 the search by only examining blocks numerically adjacent, since this
1227 is how find_basic_blocks created them. */
1229 void
1230 tidy_fallthru_edges (void)
1232 basic_block b, c;
1234 if (ENTRY_BLOCK_PTR->next_bb == EXIT_BLOCK_PTR)
1235 return;
1237 FOR_BB_BETWEEN (b, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, next_bb)
1239 edge s;
1241 c = b->next_bb;
1243 /* We care about simple conditional or unconditional jumps with
1244 a single successor.
1246 If we had a conditional branch to the next instruction when
1247 find_basic_blocks was called, then there will only be one
1248 out edge for the block which ended with the conditional
1249 branch (since we do not create duplicate edges).
1251 Furthermore, the edge will be marked as a fallthru because we
1252 merge the flags for the duplicate edges. So we do not want to
1253 check that the edge is not a FALLTHRU edge. */
1255 if ((s = b->succ) != NULL
1256 && ! (s->flags & EDGE_COMPLEX)
1257 && s->succ_next == NULL
1258 && s->dest == c
1259 /* If the jump insn has side effects, we can't tidy the edge. */
1260 && (GET_CODE (BB_END (b)) != JUMP_INSN
1261 || onlyjump_p (BB_END (b))))
1262 tidy_fallthru_edge (s, b, c);
1266 /* Helper function for split_edge. Return true in case edge BB2 to BB1
1267 is back edge of syntactic loop. */
1269 static bool
1270 back_edge_of_syntactic_loop_p (basic_block bb1, basic_block bb2)
1272 rtx insn;
1273 int count = 0;
1274 basic_block bb;
1276 if (bb1 == bb2)
1277 return true;
1279 /* ??? Could we guarantee that bb indices are monotone, so that we could
1280 just compare them? */
1281 for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
1282 continue;
1284 if (!bb)
1285 return false;
1287 for (insn = BB_END (bb1); insn != BB_HEAD (bb2) && count >= 0;
1288 insn = NEXT_INSN (insn))
1289 if (GET_CODE (insn) == NOTE)
1291 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1292 count++;
1293 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1294 count--;
1297 return count >= 0;
1300 /* Split a (typically critical) edge. Return the new block.
1301 Abort on abnormal edges.
1303 ??? The code generally expects to be called on critical edges.
1304 The case of a block ending in an unconditional jump to a
1305 block with multiple predecessors is not handled optimally. */
1307 static basic_block
1308 rtl_split_edge (edge edge_in)
1310 basic_block bb;
1311 rtx before;
1313 /* Abnormal edges cannot be split. */
1314 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1315 abort ();
1317 /* We are going to place the new block in front of edge destination.
1318 Avoid existence of fallthru predecessors. */
1319 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1321 edge e;
1323 for (e = edge_in->dest->pred; e; e = e->pred_next)
1324 if (e->flags & EDGE_FALLTHRU)
1325 break;
1327 if (e)
1328 force_nonfallthru (e);
1331 /* Create the basic block note.
1333 Where we place the note can have a noticeable impact on the generated
1334 code. Consider this cfg:
1340 +->1-->2--->E
1342 +--+
1344 If we need to insert an insn on the edge from block 0 to block 1,
1345 we want to ensure the instructions we insert are outside of any
1346 loop notes that physically sit between block 0 and block 1. Otherwise
1347 we confuse the loop optimizer into thinking the loop is a phony. */
1349 if (edge_in->dest != EXIT_BLOCK_PTR
1350 && PREV_INSN (BB_HEAD (edge_in->dest))
1351 && GET_CODE (PREV_INSN (BB_HEAD (edge_in->dest))) == NOTE
1352 && (NOTE_LINE_NUMBER (PREV_INSN (BB_HEAD (edge_in->dest)))
1353 == NOTE_INSN_LOOP_BEG)
1354 && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
1355 before = PREV_INSN (BB_HEAD (edge_in->dest));
1356 else if (edge_in->dest != EXIT_BLOCK_PTR)
1357 before = BB_HEAD (edge_in->dest);
1358 else
1359 before = NULL_RTX;
1361 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1362 bb->count = edge_in->count;
1363 bb->frequency = EDGE_FREQUENCY (edge_in);
1365 /* ??? This info is likely going to be out of date very soon. */
1366 if (edge_in->dest->global_live_at_start)
1368 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1369 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1370 COPY_REG_SET (bb->global_live_at_start,
1371 edge_in->dest->global_live_at_start);
1372 COPY_REG_SET (bb->global_live_at_end,
1373 edge_in->dest->global_live_at_start);
1376 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1378 /* For non-fallthru edges, we must adjust the predecessor's
1379 jump instruction to target our new block. */
1380 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1382 if (!redirect_edge_and_branch (edge_in, bb))
1383 abort ();
1385 else
1386 redirect_edge_succ (edge_in, bb);
1388 return bb;
1391 /* Queue instructions for insertion on an edge between two basic blocks.
1392 The new instructions and basic blocks (if any) will not appear in the
1393 CFG until commit_edge_insertions is called. */
1395 void
1396 insert_insn_on_edge (rtx pattern, edge e)
1398 /* We cannot insert instructions on an abnormal critical edge.
1399 It will be easier to find the culprit if we die now. */
1400 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
1401 abort ();
1403 if (e->insns == NULL_RTX)
1404 start_sequence ();
1405 else
1406 push_to_sequence (e->insns);
1408 emit_insn (pattern);
1410 e->insns = get_insns ();
1411 end_sequence ();
1414 /* Called from safe_insert_insn_on_edge through note_stores, marks live
1415 registers that are killed by the store. */
1416 static void
1417 mark_killed_regs (rtx reg, rtx set ATTRIBUTE_UNUSED, void *data)
1419 regset killed = data;
1420 int regno, i;
1422 if (GET_CODE (reg) == SUBREG)
1423 reg = SUBREG_REG (reg);
1424 if (!REG_P (reg))
1425 return;
1426 regno = REGNO (reg);
1427 if (regno >= FIRST_PSEUDO_REGISTER)
1428 SET_REGNO_REG_SET (killed, regno);
1429 else
1431 for (i = 0; i < (int) HARD_REGNO_NREGS (regno, GET_MODE (reg)); i++)
1432 SET_REGNO_REG_SET (killed, regno + i);
1436 /* Similar to insert_insn_on_edge, tries to put INSN to edge E. Additionally
1437 it checks whether this will not clobber the registers that are live on the
1438 edge (i.e. it requires liveness information to be up-to-date) and if there
1439 are some, then it tries to save and restore them. Returns true if
1440 successful. */
1441 bool
1442 safe_insert_insn_on_edge (rtx insn, edge e)
1444 rtx x;
1445 regset_head killed_head;
1446 regset killed = INITIALIZE_REG_SET (killed_head);
1447 rtx save_regs = NULL_RTX;
1448 int regno, noccmode;
1449 enum machine_mode mode;
1451 #ifdef AVOID_CCMODE_COPIES
1452 noccmode = true;
1453 #else
1454 noccmode = false;
1455 #endif
1457 for (x = insn; x; x = NEXT_INSN (x))
1458 if (INSN_P (x))
1459 note_stores (PATTERN (x), mark_killed_regs, killed);
1460 bitmap_operation (killed, killed, e->dest->global_live_at_start,
1461 BITMAP_AND);
1463 EXECUTE_IF_SET_IN_REG_SET (killed, 0, regno,
1465 mode = regno < FIRST_PSEUDO_REGISTER
1466 ? reg_raw_mode[regno]
1467 : GET_MODE (regno_reg_rtx[regno]);
1468 if (mode == VOIDmode)
1469 return false;
1471 if (noccmode && mode == CCmode)
1472 return false;
1474 save_regs = alloc_EXPR_LIST (0,
1475 alloc_EXPR_LIST (0,
1476 gen_reg_rtx (mode),
1477 gen_raw_REG (mode, regno)),
1478 save_regs);
1481 if (save_regs)
1483 rtx from, to;
1485 start_sequence ();
1486 for (x = save_regs; x; x = XEXP (x, 1))
1488 from = XEXP (XEXP (x, 0), 1);
1489 to = XEXP (XEXP (x, 0), 0);
1490 emit_move_insn (to, from);
1492 emit_insn (insn);
1493 for (x = save_regs; x; x = XEXP (x, 1))
1495 from = XEXP (XEXP (x, 0), 0);
1496 to = XEXP (XEXP (x, 0), 1);
1497 emit_move_insn (to, from);
1499 insn = get_insns ();
1500 end_sequence ();
1501 free_EXPR_LIST_list (&save_regs);
1503 insert_insn_on_edge (insn, e);
1505 FREE_REG_SET (killed);
1506 return true;
1509 /* Update the CFG for the instructions queued on edge E. */
1511 static void
1512 commit_one_edge_insertion (edge e, int watch_calls)
1514 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1515 basic_block bb = NULL;
1517 /* Pull the insns off the edge now since the edge might go away. */
1518 insns = e->insns;
1519 e->insns = NULL_RTX;
1521 /* Special case -- avoid inserting code between call and storing
1522 its return value. */
1523 if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
1524 && e->src != ENTRY_BLOCK_PTR
1525 && GET_CODE (BB_END (e->src)) == CALL_INSN)
1527 rtx next = next_nonnote_insn (BB_END (e->src));
1529 after = BB_HEAD (e->dest);
1530 /* The first insn after the call may be a stack pop, skip it. */
1531 while (next
1532 && keep_with_call_p (next))
1534 after = next;
1535 next = next_nonnote_insn (next);
1537 bb = e->dest;
1539 if (!before && !after)
1541 /* Figure out where to put these things. If the destination has
1542 one predecessor, insert there. Except for the exit block. */
1543 if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1545 bb = e->dest;
1547 /* Get the location correct wrt a code label, and "nice" wrt
1548 a basic block note, and before everything else. */
1549 tmp = BB_HEAD (bb);
1550 if (GET_CODE (tmp) == CODE_LABEL)
1551 tmp = NEXT_INSN (tmp);
1552 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1553 tmp = NEXT_INSN (tmp);
1554 if (tmp == BB_HEAD (bb))
1555 before = tmp;
1556 else if (tmp)
1557 after = PREV_INSN (tmp);
1558 else
1559 after = get_last_insn ();
1562 /* If the source has one successor and the edge is not abnormal,
1563 insert there. Except for the entry block. */
1564 else if ((e->flags & EDGE_ABNORMAL) == 0
1565 && e->src->succ->succ_next == NULL
1566 && e->src != ENTRY_BLOCK_PTR)
1568 bb = e->src;
1570 /* It is possible to have a non-simple jump here. Consider a target
1571 where some forms of unconditional jumps clobber a register. This
1572 happens on the fr30 for example.
1574 We know this block has a single successor, so we can just emit
1575 the queued insns before the jump. */
1576 if (GET_CODE (BB_END (bb)) == JUMP_INSN)
1577 for (before = BB_END (bb);
1578 GET_CODE (PREV_INSN (before)) == NOTE
1579 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
1580 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
1582 else
1584 /* We'd better be fallthru, or we've lost track of what's what. */
1585 if ((e->flags & EDGE_FALLTHRU) == 0)
1586 abort ();
1588 after = BB_END (bb);
1591 /* Otherwise we must split the edge. */
1592 else
1594 bb = split_edge (e);
1595 after = BB_END (bb);
1599 /* Now that we've found the spot, do the insertion. */
1601 if (before)
1603 emit_insn_before_noloc (insns, before);
1604 last = prev_nonnote_insn (before);
1606 else
1607 last = emit_insn_after_noloc (insns, after);
1609 if (returnjump_p (last))
1611 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1612 This is not currently a problem because this only happens
1613 for the (single) epilogue, which already has a fallthru edge
1614 to EXIT. */
1616 e = bb->succ;
1617 if (e->dest != EXIT_BLOCK_PTR
1618 || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1619 abort ();
1621 e->flags &= ~EDGE_FALLTHRU;
1622 emit_barrier_after (last);
1624 if (before)
1625 delete_insn (before);
1627 else if (GET_CODE (last) == JUMP_INSN)
1628 abort ();
1630 /* Mark the basic block for find_sub_basic_blocks. */
1631 bb->aux = &bb->aux;
1634 /* Update the CFG for all queued instructions. */
1636 void
1637 commit_edge_insertions (void)
1639 basic_block bb;
1640 sbitmap blocks;
1641 bool changed = false;
1643 #ifdef ENABLE_CHECKING
1644 verify_flow_info ();
1645 #endif
1647 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1649 edge e, next;
1651 for (e = bb->succ; e; e = next)
1653 next = e->succ_next;
1654 if (e->insns)
1656 changed = true;
1657 commit_one_edge_insertion (e, false);
1662 if (!changed)
1663 return;
1665 blocks = sbitmap_alloc (last_basic_block);
1666 sbitmap_zero (blocks);
1667 FOR_EACH_BB (bb)
1668 if (bb->aux)
1670 SET_BIT (blocks, bb->index);
1671 /* Check for forgotten bb->aux values before commit_edge_insertions
1672 call. */
1673 if (bb->aux != &bb->aux)
1674 abort ();
1675 bb->aux = NULL;
1677 find_many_sub_basic_blocks (blocks);
1678 sbitmap_free (blocks);
1681 /* Update the CFG for all queued instructions, taking special care of inserting
1682 code on edges between call and storing its return value. */
1684 void
1685 commit_edge_insertions_watch_calls (void)
1687 basic_block bb;
1688 sbitmap blocks;
1689 bool changed = false;
1691 #ifdef ENABLE_CHECKING
1692 verify_flow_info ();
1693 #endif
1695 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1697 edge e, next;
1699 for (e = bb->succ; e; e = next)
1701 next = e->succ_next;
1702 if (e->insns)
1704 changed = true;
1705 commit_one_edge_insertion (e, true);
1710 if (!changed)
1711 return;
1713 blocks = sbitmap_alloc (last_basic_block);
1714 sbitmap_zero (blocks);
1715 FOR_EACH_BB (bb)
1716 if (bb->aux)
1718 SET_BIT (blocks, bb->index);
1719 /* Check for forgotten bb->aux values before commit_edge_insertions
1720 call. */
1721 if (bb->aux != &bb->aux)
1722 abort ();
1723 bb->aux = NULL;
1725 find_many_sub_basic_blocks (blocks);
1726 sbitmap_free (blocks);
1729 /* Print out one basic block with live information at start and end. */
1731 static void
1732 rtl_dump_bb (basic_block bb, FILE *outf)
1734 rtx insn;
1735 rtx last;
1737 fputs (";; Registers live at start:", outf);
1738 dump_regset (bb->global_live_at_start, outf);
1739 putc ('\n', outf);
1741 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1742 insn = NEXT_INSN (insn))
1743 print_rtl_single (outf, insn);
1745 fputs (";; Registers live at end:", outf);
1746 dump_regset (bb->global_live_at_end, outf);
1747 putc ('\n', outf);
1750 /* Like print_rtl, but also print out live information for the start of each
1751 basic block. */
1753 void
1754 print_rtl_with_bb (FILE *outf, rtx rtx_first)
1756 rtx tmp_rtx;
1758 if (rtx_first == 0)
1759 fprintf (outf, "(nil)\n");
1760 else
1762 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1763 int max_uid = get_max_uid ();
1764 basic_block *start = xcalloc (max_uid, sizeof (basic_block));
1765 basic_block *end = xcalloc (max_uid, sizeof (basic_block));
1766 enum bb_state *in_bb_p = xcalloc (max_uid, sizeof (enum bb_state));
1768 basic_block bb;
1770 FOR_EACH_BB_REVERSE (bb)
1772 rtx x;
1774 start[INSN_UID (BB_HEAD (bb))] = bb;
1775 end[INSN_UID (BB_END (bb))] = bb;
1776 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1778 enum bb_state state = IN_MULTIPLE_BB;
1780 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1781 state = IN_ONE_BB;
1782 in_bb_p[INSN_UID (x)] = state;
1784 if (x == BB_END (bb))
1785 break;
1789 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1791 int did_output;
1793 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1795 fprintf (outf, ";; Start of basic block %d, registers live:",
1796 bb->index);
1797 dump_regset (bb->global_live_at_start, outf);
1798 putc ('\n', outf);
1801 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1802 && GET_CODE (tmp_rtx) != NOTE
1803 && GET_CODE (tmp_rtx) != BARRIER)
1804 fprintf (outf, ";; Insn is not within a basic block\n");
1805 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1806 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1808 did_output = print_rtl_single (outf, tmp_rtx);
1810 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1812 fprintf (outf, ";; End of basic block %d, registers live:\n",
1813 bb->index);
1814 dump_regset (bb->global_live_at_end, outf);
1815 putc ('\n', outf);
1818 if (did_output)
1819 putc ('\n', outf);
1822 free (start);
1823 free (end);
1824 free (in_bb_p);
1827 if (current_function_epilogue_delay_list != 0)
1829 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1830 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1831 tmp_rtx = XEXP (tmp_rtx, 1))
1832 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1836 void
1837 update_br_prob_note (basic_block bb)
1839 rtx note;
1840 if (GET_CODE (BB_END (bb)) != JUMP_INSN)
1841 return;
1842 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1843 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1844 return;
1845 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1848 /* Verify the CFG and RTL consistency common for both underlying RTL and
1849 cfglayout RTL.
1851 Currently it does following checks:
1853 - test head/end pointers
1854 - overlapping of basic blocks
1855 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1856 - tails of basic blocks (ensure that boundary is necessary)
1857 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1858 and NOTE_INSN_BASIC_BLOCK
1860 In future it can be extended check a lot of other stuff as well
1861 (reachability of basic blocks, life information, etc. etc.). */
1862 static int
1863 rtl_verify_flow_info_1 (void)
1865 const int max_uid = get_max_uid ();
1866 rtx last_head = get_last_insn ();
1867 basic_block *bb_info;
1868 rtx x;
1869 int err = 0;
1870 basic_block bb, last_bb_seen;
1872 bb_info = xcalloc (max_uid, sizeof (basic_block));
1874 /* Check bb chain & numbers. */
1875 last_bb_seen = ENTRY_BLOCK_PTR;
1877 FOR_EACH_BB_REVERSE (bb)
1879 rtx head = BB_HEAD (bb);
1880 rtx end = BB_END (bb);
1882 /* Verify the end of the basic block is in the INSN chain. */
1883 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1884 if (x == end)
1885 break;
1887 if (!x)
1889 error ("end insn %d for block %d not found in the insn stream",
1890 INSN_UID (end), bb->index);
1891 err = 1;
1894 /* Work backwards from the end to the head of the basic block
1895 to verify the head is in the RTL chain. */
1896 for (; x != NULL_RTX; x = PREV_INSN (x))
1898 /* While walking over the insn chain, verify insns appear
1899 in only one basic block and initialize the BB_INFO array
1900 used by other passes. */
1901 if (bb_info[INSN_UID (x)] != NULL)
1903 error ("insn %d is in multiple basic blocks (%d and %d)",
1904 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1905 err = 1;
1908 bb_info[INSN_UID (x)] = bb;
1910 if (x == head)
1911 break;
1913 if (!x)
1915 error ("head insn %d for block %d not found in the insn stream",
1916 INSN_UID (head), bb->index);
1917 err = 1;
1920 last_head = x;
1923 /* Now check the basic blocks (boundaries etc.) */
1924 FOR_EACH_BB_REVERSE (bb)
1926 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1927 edge e, fallthru = NULL;
1928 rtx note;
1930 if (INSN_P (BB_END (bb))
1931 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1932 && bb->succ && bb->succ->succ_next
1933 && any_condjump_p (BB_END (bb)))
1935 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
1937 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1938 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1939 err = 1;
1942 for (e = bb->succ; e; e = e->succ_next)
1944 if (e->flags & EDGE_FALLTHRU)
1945 n_fallthru++, fallthru = e;
1947 if ((e->flags & ~(EDGE_DFS_BACK
1948 | EDGE_CAN_FALLTHRU
1949 | EDGE_IRREDUCIBLE_LOOP
1950 | EDGE_LOOP_EXIT)) == 0)
1951 n_branch++;
1953 if (e->flags & EDGE_ABNORMAL_CALL)
1954 n_call++;
1956 if (e->flags & EDGE_EH)
1957 n_eh++;
1958 else if (e->flags & EDGE_ABNORMAL)
1959 n_abnormal++;
1962 if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
1963 && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1965 error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
1966 err = 1;
1968 if (n_branch
1969 && (GET_CODE (BB_END (bb)) != JUMP_INSN
1970 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1971 || any_condjump_p (BB_END (bb))))))
1973 error ("Too many outgoing branch edges from bb %i", bb->index);
1974 err = 1;
1976 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1978 error ("Fallthru edge after unconditional jump %i", bb->index);
1979 err = 1;
1981 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1983 error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
1984 err = 1;
1986 if (n_branch != 1 && any_condjump_p (BB_END (bb))
1987 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1989 error ("Wrong amount of branch edges after conditional jump %i", bb->index);
1990 err = 1;
1992 if (n_call && GET_CODE (BB_END (bb)) != CALL_INSN)
1994 error ("Call edges for non-call insn in bb %i", bb->index);
1995 err = 1;
1997 if (n_abnormal
1998 && (GET_CODE (BB_END (bb)) != CALL_INSN && n_call != n_abnormal)
1999 && (GET_CODE (BB_END (bb)) != JUMP_INSN
2000 || any_condjump_p (BB_END (bb))
2001 || any_uncondjump_p (BB_END (bb))))
2003 error ("Abnormal edges for no purpose in bb %i", bb->index);
2004 err = 1;
2007 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
2008 if (BLOCK_FOR_INSN (x) != bb)
2010 debug_rtx (x);
2011 if (! BLOCK_FOR_INSN (x))
2012 error
2013 ("insn %d inside basic block %d but block_for_insn is NULL",
2014 INSN_UID (x), bb->index);
2015 else
2016 error
2017 ("insn %d inside basic block %d but block_for_insn is %i",
2018 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
2020 err = 1;
2023 /* OK pointers are correct. Now check the header of basic
2024 block. It ought to contain optional CODE_LABEL followed
2025 by NOTE_BASIC_BLOCK. */
2026 x = BB_HEAD (bb);
2027 if (GET_CODE (x) == CODE_LABEL)
2029 if (BB_END (bb) == x)
2031 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2032 bb->index);
2033 err = 1;
2036 x = NEXT_INSN (x);
2039 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2041 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2042 bb->index);
2043 err = 1;
2046 if (BB_END (bb) == x)
2047 /* Do checks for empty blocks her. e */
2049 else
2050 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2052 if (NOTE_INSN_BASIC_BLOCK_P (x))
2054 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2055 INSN_UID (x), bb->index);
2056 err = 1;
2059 if (x == BB_END (bb))
2060 break;
2062 if (control_flow_insn_p (x))
2064 error ("in basic block %d:", bb->index);
2065 fatal_insn ("flow control insn inside a basic block", x);
2070 /* Clean up. */
2071 free (bb_info);
2072 return err;
2075 /* Verify the CFG and RTL consistency common for both underlying RTL and
2076 cfglayout RTL.
2078 Currently it does following checks:
2079 - all checks of rtl_verify_flow_info_1
2080 - check that all insns are in the basic blocks
2081 (except the switch handling code, barriers and notes)
2082 - check that all returns are followed by barriers
2083 - check that all fallthru edge points to the adjacent blocks. */
2084 static int
2085 rtl_verify_flow_info (void)
2087 basic_block bb;
2088 int err = rtl_verify_flow_info_1 ();
2089 rtx x;
2090 int num_bb_notes;
2091 const rtx rtx_first = get_insns ();
2092 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2094 FOR_EACH_BB_REVERSE (bb)
2096 edge e;
2097 for (e = bb->succ; e; e = e->succ_next)
2098 if (e->flags & EDGE_FALLTHRU)
2099 break;
2100 if (!e)
2102 rtx insn;
2104 /* Ensure existence of barrier in BB with no fallthru edges. */
2105 for (insn = BB_END (bb); !insn || GET_CODE (insn) != BARRIER;
2106 insn = NEXT_INSN (insn))
2107 if (!insn
2108 || (GET_CODE (insn) == NOTE
2109 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
2111 error ("missing barrier after block %i", bb->index);
2112 err = 1;
2113 break;
2116 else if (e->src != ENTRY_BLOCK_PTR
2117 && e->dest != EXIT_BLOCK_PTR)
2119 rtx insn;
2121 if (e->src->next_bb != e->dest)
2123 error
2124 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2125 e->src->index, e->dest->index);
2126 err = 1;
2128 else
2129 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2130 insn = NEXT_INSN (insn))
2131 if (GET_CODE (insn) == BARRIER
2132 #ifndef CASE_DROPS_THROUGH
2133 || INSN_P (insn)
2134 #else
2135 || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
2136 #endif
2139 error ("verify_flow_info: Incorrect fallthru %i->%i",
2140 e->src->index, e->dest->index);
2141 fatal_insn ("wrong insn in the fallthru edge", insn);
2142 err = 1;
2147 num_bb_notes = 0;
2148 last_bb_seen = ENTRY_BLOCK_PTR;
2150 for (x = rtx_first; x; x = NEXT_INSN (x))
2152 if (NOTE_INSN_BASIC_BLOCK_P (x))
2154 bb = NOTE_BASIC_BLOCK (x);
2156 num_bb_notes++;
2157 if (bb != last_bb_seen->next_bb)
2158 internal_error ("basic blocks not laid down consecutively");
2160 curr_bb = last_bb_seen = bb;
2163 if (!curr_bb)
2165 switch (GET_CODE (x))
2167 case BARRIER:
2168 case NOTE:
2169 break;
2171 case CODE_LABEL:
2172 /* An addr_vec is placed outside any block block. */
2173 if (NEXT_INSN (x)
2174 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
2175 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2176 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2177 x = NEXT_INSN (x);
2179 /* But in any case, non-deletable labels can appear anywhere. */
2180 break;
2182 default:
2183 fatal_insn ("insn outside basic block", x);
2187 if (INSN_P (x)
2188 && GET_CODE (x) == JUMP_INSN
2189 && returnjump_p (x) && ! condjump_p (x)
2190 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2191 fatal_insn ("return not followed by barrier", x);
2192 if (curr_bb && x == BB_END (curr_bb))
2193 curr_bb = NULL;
2196 if (num_bb_notes != n_basic_blocks)
2197 internal_error
2198 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2199 num_bb_notes, n_basic_blocks);
2201 return err;
2204 /* Assume that the preceding pass has possibly eliminated jump instructions
2205 or converted the unconditional jumps. Eliminate the edges from CFG.
2206 Return true if any edges are eliminated. */
2208 bool
2209 purge_dead_edges (basic_block bb)
2211 edge e, next;
2212 rtx insn = BB_END (bb), note;
2213 bool purged = false;
2215 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2216 if (GET_CODE (insn) == INSN
2217 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2219 rtx eqnote;
2221 if (! may_trap_p (PATTERN (insn))
2222 || ((eqnote = find_reg_equal_equiv_note (insn))
2223 && ! may_trap_p (XEXP (eqnote, 0))))
2224 remove_note (insn, note);
2227 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2228 for (e = bb->succ; e; e = next)
2230 next = e->succ_next;
2231 if (e->flags & EDGE_EH)
2233 if (can_throw_internal (BB_END (bb)))
2234 continue;
2236 else if (e->flags & EDGE_ABNORMAL_CALL)
2238 if (GET_CODE (BB_END (bb)) == CALL_INSN
2239 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2240 || INTVAL (XEXP (note, 0)) >= 0))
2241 continue;
2243 else
2244 continue;
2246 remove_edge (e);
2247 bb->flags |= BB_DIRTY;
2248 purged = true;
2251 if (GET_CODE (insn) == JUMP_INSN)
2253 rtx note;
2254 edge b,f;
2256 /* We do care only about conditional jumps and simplejumps. */
2257 if (!any_condjump_p (insn)
2258 && !returnjump_p (insn)
2259 && !simplejump_p (insn))
2260 return purged;
2262 /* Branch probability/prediction notes are defined only for
2263 condjumps. We've possibly turned condjump into simplejump. */
2264 if (simplejump_p (insn))
2266 note = find_reg_note (insn, REG_BR_PROB, NULL);
2267 if (note)
2268 remove_note (insn, note);
2269 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2270 remove_note (insn, note);
2273 for (e = bb->succ; e; e = next)
2275 next = e->succ_next;
2277 /* Avoid abnormal flags to leak from computed jumps turned
2278 into simplejumps. */
2280 e->flags &= ~EDGE_ABNORMAL;
2282 /* See if this edge is one we should keep. */
2283 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2284 /* A conditional jump can fall through into the next
2285 block, so we should keep the edge. */
2286 continue;
2287 else if (e->dest != EXIT_BLOCK_PTR
2288 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2289 /* If the destination block is the target of the jump,
2290 keep the edge. */
2291 continue;
2292 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2293 /* If the destination block is the exit block, and this
2294 instruction is a return, then keep the edge. */
2295 continue;
2296 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2297 /* Keep the edges that correspond to exceptions thrown by
2298 this instruction and rematerialize the EDGE_ABNORMAL
2299 flag we just cleared above. */
2301 e->flags |= EDGE_ABNORMAL;
2302 continue;
2305 /* We do not need this edge. */
2306 bb->flags |= BB_DIRTY;
2307 purged = true;
2308 remove_edge (e);
2311 if (!bb->succ || !purged)
2312 return purged;
2314 if (rtl_dump_file)
2315 fprintf (rtl_dump_file, "Purged edges from bb %i\n", bb->index);
2317 if (!optimize)
2318 return purged;
2320 /* Redistribute probabilities. */
2321 if (!bb->succ->succ_next)
2323 bb->succ->probability = REG_BR_PROB_BASE;
2324 bb->succ->count = bb->count;
2326 else
2328 note = find_reg_note (insn, REG_BR_PROB, NULL);
2329 if (!note)
2330 return purged;
2332 b = BRANCH_EDGE (bb);
2333 f = FALLTHRU_EDGE (bb);
2334 b->probability = INTVAL (XEXP (note, 0));
2335 f->probability = REG_BR_PROB_BASE - b->probability;
2336 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2337 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2340 return purged;
2342 else if (GET_CODE (insn) == CALL_INSN && SIBLING_CALL_P (insn))
2344 /* First, there should not be any EH or ABCALL edges resulting
2345 from non-local gotos and the like. If there were, we shouldn't
2346 have created the sibcall in the first place. Second, there
2347 should of course never have been a fallthru edge. */
2348 if (!bb->succ || bb->succ->succ_next)
2349 abort ();
2350 if (bb->succ->flags != (EDGE_SIBCALL | EDGE_ABNORMAL))
2351 abort ();
2353 return 0;
2356 /* If we don't see a jump insn, we don't know exactly why the block would
2357 have been broken at this point. Look for a simple, non-fallthru edge,
2358 as these are only created by conditional branches. If we find such an
2359 edge we know that there used to be a jump here and can then safely
2360 remove all non-fallthru edges. */
2361 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2362 e = e->succ_next)
2365 if (!e)
2366 return purged;
2368 for (e = bb->succ; e; e = next)
2370 next = e->succ_next;
2371 if (!(e->flags & EDGE_FALLTHRU))
2373 bb->flags |= BB_DIRTY;
2374 remove_edge (e);
2375 purged = true;
2379 if (!bb->succ || bb->succ->succ_next)
2380 abort ();
2382 bb->succ->probability = REG_BR_PROB_BASE;
2383 bb->succ->count = bb->count;
2385 if (rtl_dump_file)
2386 fprintf (rtl_dump_file, "Purged non-fallthru edges from bb %i\n",
2387 bb->index);
2388 return purged;
2391 /* Search all basic blocks for potentially dead edges and purge them. Return
2392 true if some edge has been eliminated. */
2394 bool
2395 purge_all_dead_edges (int update_life_p)
2397 int purged = false;
2398 sbitmap blocks = 0;
2399 basic_block bb;
2401 if (update_life_p)
2403 blocks = sbitmap_alloc (last_basic_block);
2404 sbitmap_zero (blocks);
2407 FOR_EACH_BB (bb)
2409 bool purged_here = purge_dead_edges (bb);
2411 purged |= purged_here;
2412 if (purged_here && update_life_p)
2413 SET_BIT (blocks, bb->index);
2416 if (update_life_p && purged)
2417 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
2418 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2419 | PROP_KILL_DEAD_CODE);
2421 if (update_life_p)
2422 sbitmap_free (blocks);
2423 return purged;
2426 /* Same as split_block but update cfg_layout structures. */
2427 static edge
2428 cfg_layout_split_block (basic_block bb, void *insnp)
2430 rtx insn = insnp;
2432 edge fallthru = rtl_split_block (bb, insn);
2434 fallthru->dest->rbi->footer = fallthru->src->rbi->footer;
2435 fallthru->src->rbi->footer = NULL;
2436 return fallthru;
2440 /* Redirect Edge to DEST. */
2441 static bool
2442 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2444 basic_block src = e->src;
2445 bool ret;
2447 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2448 return false;
2450 if (e->dest == dest)
2451 return true;
2453 if (e->src != ENTRY_BLOCK_PTR
2454 && try_redirect_by_replacing_jump (e, dest, true))
2455 return true;
2457 if (e->src == ENTRY_BLOCK_PTR
2458 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2460 if (rtl_dump_file)
2461 fprintf (rtl_dump_file, "Redirecting entry edge from bb %i to %i\n",
2462 e->src->index, dest->index);
2464 redirect_edge_succ (e, dest);
2465 return true;
2468 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2469 in the case the basic block appears to be in sequence. Avoid this
2470 transformation. */
2472 if (e->flags & EDGE_FALLTHRU)
2474 /* Redirect any branch edges unified with the fallthru one. */
2475 if (GET_CODE (BB_END (src)) == JUMP_INSN
2476 && label_is_jump_target_p (BB_HEAD (e->dest),
2477 BB_END (src)))
2479 if (rtl_dump_file)
2480 fprintf (rtl_dump_file, "Fallthru edge unified with branch "
2481 "%i->%i redirected to %i\n",
2482 e->src->index, e->dest->index, dest->index);
2483 e->flags &= ~EDGE_FALLTHRU;
2484 if (!redirect_branch_edge (e, dest))
2485 abort ();
2486 e->flags |= EDGE_FALLTHRU;
2487 return true;
2489 /* In case we are redirecting fallthru edge to the branch edge
2490 of conditional jump, remove it. */
2491 if (src->succ->succ_next
2492 && !src->succ->succ_next->succ_next)
2494 edge s = e->succ_next ? e->succ_next : src->succ;
2495 if (s->dest == dest
2496 && any_condjump_p (BB_END (src))
2497 && onlyjump_p (BB_END (src)))
2498 delete_insn (BB_END (src));
2501 if (rtl_dump_file)
2502 fprintf (rtl_dump_file, "Fallthru edge %i->%i redirected to %i\n",
2503 e->src->index, e->dest->index, dest->index);
2504 redirect_edge_succ_nodup (e, dest);
2506 ret = true;
2508 else
2509 ret = redirect_branch_edge (e, dest);
2511 /* We don't want simplejumps in the insn stream during cfglayout. */
2512 if (simplejump_p (BB_END (src)))
2513 abort ();
2515 return ret;
2518 /* Simple wrapper as we always can redirect fallthru edges. */
2519 static basic_block
2520 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2522 if (!cfg_layout_redirect_edge_and_branch (e, dest))
2523 abort ();
2524 return NULL;
2527 /* Same as flow_delete_block but update cfg_layout structures. */
2528 static void
2529 cfg_layout_delete_block (basic_block bb)
2531 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2533 if (bb->rbi->header)
2535 next = BB_HEAD (bb);
2536 if (prev)
2537 NEXT_INSN (prev) = bb->rbi->header;
2538 else
2539 set_first_insn (bb->rbi->header);
2540 PREV_INSN (bb->rbi->header) = prev;
2541 insn = bb->rbi->header;
2542 while (NEXT_INSN (insn))
2543 insn = NEXT_INSN (insn);
2544 NEXT_INSN (insn) = next;
2545 PREV_INSN (next) = insn;
2547 next = NEXT_INSN (BB_END (bb));
2548 if (bb->rbi->footer)
2550 insn = bb->rbi->footer;
2551 while (insn)
2553 if (GET_CODE (insn) == BARRIER)
2555 if (PREV_INSN (insn))
2556 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2557 else
2558 bb->rbi->footer = NEXT_INSN (insn);
2559 if (NEXT_INSN (insn))
2560 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2562 if (GET_CODE (insn) == CODE_LABEL)
2563 break;
2564 insn = NEXT_INSN (insn);
2566 if (bb->rbi->footer)
2568 insn = BB_END (bb);
2569 NEXT_INSN (insn) = bb->rbi->footer;
2570 PREV_INSN (bb->rbi->footer) = insn;
2571 while (NEXT_INSN (insn))
2572 insn = NEXT_INSN (insn);
2573 NEXT_INSN (insn) = next;
2574 if (next)
2575 PREV_INSN (next) = insn;
2576 else
2577 set_last_insn (insn);
2580 if (bb->next_bb != EXIT_BLOCK_PTR)
2581 to = &bb->next_bb->rbi->header;
2582 else
2583 to = &cfg_layout_function_footer;
2584 rtl_delete_block (bb);
2586 if (prev)
2587 prev = NEXT_INSN (prev);
2588 else
2589 prev = get_insns ();
2590 if (next)
2591 next = PREV_INSN (next);
2592 else
2593 next = get_last_insn ();
2595 if (next && NEXT_INSN (next) != prev)
2597 remaints = unlink_insn_chain (prev, next);
2598 insn = remaints;
2599 while (NEXT_INSN (insn))
2600 insn = NEXT_INSN (insn);
2601 NEXT_INSN (insn) = *to;
2602 if (*to)
2603 PREV_INSN (*to) = insn;
2604 *to = remaints;
2608 /* Return true when blocks A and B can be safely merged. */
2609 static bool
2610 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2612 /* There must be exactly one edge in between the blocks. */
2613 return (a->succ && !a->succ->succ_next && a->succ->dest == b
2614 && !b->pred->pred_next && a != b
2615 /* Must be simple edge. */
2616 && !(a->succ->flags & EDGE_COMPLEX)
2617 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2618 /* If the jump insn has side effects,
2619 we can't kill the edge. */
2620 && (GET_CODE (BB_END (a)) != JUMP_INSN
2621 || (reload_completed
2622 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2625 /* Merge block A and B, abort when it is not possible. */
2626 static void
2627 cfg_layout_merge_blocks (basic_block a, basic_block b)
2629 #ifdef ENABLE_CHECKING
2630 if (!cfg_layout_can_merge_blocks_p (a, b))
2631 abort ();
2632 #endif
2634 /* If there was a CODE_LABEL beginning B, delete it. */
2635 if (GET_CODE (BB_HEAD (b)) == CODE_LABEL)
2636 delete_insn (BB_HEAD (b));
2638 /* We should have fallthru edge in a, or we can do dummy redirection to get
2639 it cleaned up. */
2640 if (GET_CODE (BB_END (a)) == JUMP_INSN)
2641 try_redirect_by_replacing_jump (a->succ, b, true);
2642 if (GET_CODE (BB_END (a)) == JUMP_INSN)
2643 abort ();
2645 /* Possible line number notes should appear in between. */
2646 if (b->rbi->header)
2648 rtx first = BB_END (a), last;
2650 last = emit_insn_after_noloc (b->rbi->header, BB_END (a));
2651 delete_insn_chain (NEXT_INSN (first), last);
2652 b->rbi->header = NULL;
2655 /* In the case basic blocks are not adjacent, move them around. */
2656 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2658 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2660 emit_insn_after_noloc (first, BB_END (a));
2661 /* Skip possible DELETED_LABEL insn. */
2662 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2663 first = NEXT_INSN (first);
2664 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2665 abort ();
2666 BB_HEAD (b) = NULL;
2667 delete_insn (first);
2669 /* Otherwise just re-associate the instructions. */
2670 else
2672 rtx insn;
2674 for (insn = BB_HEAD (b);
2675 insn != NEXT_INSN (BB_END (b));
2676 insn = NEXT_INSN (insn))
2677 set_block_for_insn (insn, a);
2678 insn = BB_HEAD (b);
2679 /* Skip possible DELETED_LABEL insn. */
2680 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2681 insn = NEXT_INSN (insn);
2682 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2683 abort ();
2684 BB_HEAD (b) = NULL;
2685 BB_END (a) = BB_END (b);
2686 delete_insn (insn);
2689 /* Possible tablejumps and barriers should appear after the block. */
2690 if (b->rbi->footer)
2692 if (!a->rbi->footer)
2693 a->rbi->footer = b->rbi->footer;
2694 else
2696 rtx last = a->rbi->footer;
2698 while (NEXT_INSN (last))
2699 last = NEXT_INSN (last);
2700 NEXT_INSN (last) = b->rbi->footer;
2701 PREV_INSN (b->rbi->footer) = last;
2703 b->rbi->footer = NULL;
2706 if (rtl_dump_file)
2707 fprintf (rtl_dump_file, "Merged blocks %d and %d.\n",
2708 a->index, b->index);
2710 update_cfg_after_block_merging (a, b);
2713 /* Split edge E. */
2714 static basic_block
2715 cfg_layout_split_edge (edge e)
2717 edge new_e;
2718 basic_block new_bb =
2719 create_basic_block (e->src != ENTRY_BLOCK_PTR
2720 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2721 NULL_RTX, e->src);
2723 new_bb->count = e->count;
2724 new_bb->frequency = EDGE_FREQUENCY (e);
2726 /* ??? This info is likely going to be out of date very soon, but we must
2727 create it to avoid getting an ICE later. */
2728 if (e->dest->global_live_at_start)
2730 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2731 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
2732 COPY_REG_SET (new_bb->global_live_at_start,
2733 e->dest->global_live_at_start);
2734 COPY_REG_SET (new_bb->global_live_at_end,
2735 e->dest->global_live_at_start);
2738 new_e = make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2739 new_e->probability = REG_BR_PROB_BASE;
2740 new_e->count = e->count;
2741 redirect_edge_and_branch_force (e, new_bb);
2743 return new_bb;
2746 /* Implementation of CFG manipulation for linearized RTL. */
2747 struct cfg_hooks rtl_cfg_hooks = {
2748 rtl_verify_flow_info,
2749 rtl_dump_bb,
2750 rtl_create_basic_block,
2751 rtl_redirect_edge_and_branch,
2752 rtl_redirect_edge_and_branch_force,
2753 rtl_delete_block,
2754 rtl_split_block,
2755 rtl_can_merge_blocks, /* can_merge_blocks_p */
2756 rtl_merge_blocks,
2757 rtl_split_edge
2760 /* Implementation of CFG manipulation for cfg layout RTL, where
2761 basic block connected via fallthru edges does not have to be adjacent.
2762 This representation will hopefully become the default one in future
2763 version of the compiler. */
2764 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
2765 rtl_verify_flow_info_1,
2766 rtl_dump_bb,
2767 cfg_layout_create_basic_block,
2768 cfg_layout_redirect_edge_and_branch,
2769 cfg_layout_redirect_edge_and_branch_force,
2770 cfg_layout_delete_block,
2771 cfg_layout_split_block,
2772 cfg_layout_can_merge_blocks_p,
2773 cfg_layout_merge_blocks,
2774 cfg_layout_split_edge