* gcc.dg/const-elim-1.c: xfail for xtensa.
[official-gcc.git] / gcc / cfgrtl.c
blob35d7c9ebe1bebe133e4e8ce75d995c7538204c49
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - Basic CFG/RTL manipulation API documented in cfghooks.h
27 - CFG-aware instruction chain manipulation
28 delete_insn, delete_insn_chain
29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "tree.h"
45 #include "rtl.h"
46 #include "hard-reg-set.h"
47 #include "basic-block.h"
48 #include "regs.h"
49 #include "flags.h"
50 #include "output.h"
51 #include "function.h"
52 #include "except.h"
53 #include "toplev.h"
54 #include "tm_p.h"
55 #include "obstack.h"
56 #include "insn-config.h"
57 #include "cfglayout.h"
58 #include "expr.h"
60 /* Stubs in case we don't have a return insn. */
61 #ifndef HAVE_return
62 #define HAVE_return 0
63 #define gen_return() NULL_RTX
64 #endif
66 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
67 /* ??? Should probably be using LABEL_NUSES instead. It would take a
68 bit of surgery to be able to use or co-opt the routines in jump. */
69 rtx label_value_list;
70 rtx tail_recursion_label_list;
72 static int can_delete_note_p (rtx);
73 static int can_delete_label_p (rtx);
74 static void commit_one_edge_insertion (edge, int);
75 static rtx last_loop_beg_note (rtx);
76 static bool back_edge_of_syntactic_loop_p (basic_block, basic_block);
77 basic_block force_nonfallthru_and_redirect (edge, basic_block);
78 static basic_block rtl_split_edge (edge);
79 static bool rtl_move_block_after (basic_block, basic_block);
80 static int rtl_verify_flow_info (void);
81 static basic_block cfg_layout_split_block (basic_block, void *);
82 static bool cfg_layout_redirect_edge_and_branch (edge, basic_block);
83 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
84 static void cfg_layout_delete_block (basic_block);
85 static void rtl_delete_block (basic_block);
86 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
87 static bool rtl_redirect_edge_and_branch (edge, basic_block);
88 static basic_block rtl_split_block (basic_block, void *);
89 static void rtl_dump_bb (basic_block, FILE *, int);
90 static int rtl_verify_flow_info_1 (void);
91 static void mark_killed_regs (rtx, rtx, void *);
92 static void rtl_make_forwarder_block (edge);
94 /* Return true if NOTE is not one of the ones that must be kept paired,
95 so that we may simply delete it. */
97 static int
98 can_delete_note_p (rtx note)
100 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
101 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
102 || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
105 /* True if a given label can be deleted. */
107 static int
108 can_delete_label_p (rtx label)
110 return (!LABEL_PRESERVE_P (label)
111 /* User declared labels must be preserved. */
112 && LABEL_NAME (label) == 0
113 && !in_expr_list_p (forced_labels, label)
114 && !in_expr_list_p (label_value_list, label));
117 /* Delete INSN by patching it out. Return the next insn. */
120 delete_insn (rtx insn)
122 rtx next = NEXT_INSN (insn);
123 rtx note;
124 bool really_delete = true;
126 if (GET_CODE (insn) == CODE_LABEL)
128 /* Some labels can't be directly removed from the INSN chain, as they
129 might be references via variables, constant pool etc.
130 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
131 if (! can_delete_label_p (insn))
133 const char *name = LABEL_NAME (insn);
135 really_delete = false;
136 PUT_CODE (insn, NOTE);
137 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
138 NOTE_SOURCE_FILE (insn) = name;
141 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
144 if (really_delete)
146 /* If this insn has already been deleted, something is very wrong. */
147 if (INSN_DELETED_P (insn))
148 abort ();
149 remove_insn (insn);
150 INSN_DELETED_P (insn) = 1;
153 /* If deleting a jump, decrement the use count of the label. Deleting
154 the label itself should happen in the normal course of block merging. */
155 if (GET_CODE (insn) == JUMP_INSN
156 && JUMP_LABEL (insn)
157 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
158 LABEL_NUSES (JUMP_LABEL (insn))--;
160 /* Also if deleting an insn that references a label. */
161 else
163 while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
164 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
166 LABEL_NUSES (XEXP (note, 0))--;
167 remove_note (insn, note);
171 if (GET_CODE (insn) == JUMP_INSN
172 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
173 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
175 rtx pat = PATTERN (insn);
176 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
177 int len = XVECLEN (pat, diff_vec_p);
178 int i;
180 for (i = 0; i < len; i++)
182 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
184 /* When deleting code in bulk (e.g. removing many unreachable
185 blocks) we can delete a label that's a target of the vector
186 before deleting the vector itself. */
187 if (GET_CODE (label) != NOTE)
188 LABEL_NUSES (label)--;
192 return next;
195 /* Like delete_insn but also purge dead edges from BB. */
197 delete_insn_and_edges (rtx insn)
199 rtx x;
200 bool purge = false;
202 if (INSN_P (insn)
203 && BLOCK_FOR_INSN (insn)
204 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
205 purge = true;
206 x = delete_insn (insn);
207 if (purge)
208 purge_dead_edges (BLOCK_FOR_INSN (insn));
209 return x;
212 /* Unlink a chain of insns between START and FINISH, leaving notes
213 that must be paired. */
215 void
216 delete_insn_chain (rtx start, rtx finish)
218 rtx next;
220 /* Unchain the insns one by one. It would be quicker to delete all of these
221 with a single unchaining, rather than one at a time, but we need to keep
222 the NOTE's. */
223 while (1)
225 next = NEXT_INSN (start);
226 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
228 else
229 next = delete_insn (start);
231 if (start == finish)
232 break;
233 start = next;
237 /* Like delete_insn but also purge dead edges from BB. */
238 void
239 delete_insn_chain_and_edges (rtx first, rtx last)
241 bool purge = false;
243 if (INSN_P (last)
244 && BLOCK_FOR_INSN (last)
245 && BB_END (BLOCK_FOR_INSN (last)) == last)
246 purge = true;
247 delete_insn_chain (first, last);
248 if (purge)
249 purge_dead_edges (BLOCK_FOR_INSN (last));
252 /* Create a new basic block consisting of the instructions between HEAD and END
253 inclusive. This function is designed to allow fast BB construction - reuses
254 the note and basic block struct in BB_NOTE, if any and do not grow
255 BASIC_BLOCK chain and should be used directly only by CFG construction code.
256 END can be NULL in to create new empty basic block before HEAD. Both END
257 and HEAD can be NULL to create basic block at the end of INSN chain.
258 AFTER is the basic block we should be put after. */
260 basic_block
261 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
263 basic_block bb;
265 if (bb_note
266 && ! RTX_INTEGRATED_P (bb_note)
267 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
268 && bb->aux == NULL)
270 /* If we found an existing note, thread it back onto the chain. */
272 rtx after;
274 if (GET_CODE (head) == CODE_LABEL)
275 after = head;
276 else
278 after = PREV_INSN (head);
279 head = bb_note;
282 if (after != bb_note && NEXT_INSN (after) != bb_note)
283 reorder_insns_nobb (bb_note, bb_note, after);
285 else
287 /* Otherwise we must create a note and a basic block structure. */
289 bb = alloc_block ();
291 if (!head && !end)
292 head = end = bb_note
293 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
294 else if (GET_CODE (head) == CODE_LABEL && end)
296 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
297 if (head == end)
298 end = bb_note;
300 else
302 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
303 head = bb_note;
304 if (!end)
305 end = head;
308 NOTE_BASIC_BLOCK (bb_note) = bb;
311 /* Always include the bb note in the block. */
312 if (NEXT_INSN (end) == bb_note)
313 end = bb_note;
315 BB_HEAD (bb) = head;
316 BB_END (bb) = end;
317 bb->index = last_basic_block++;
318 bb->flags = BB_NEW;
319 link_block (bb, after);
320 BASIC_BLOCK (bb->index) = bb;
321 update_bb_for_insn (bb);
323 /* Tag the block so that we know it has been used when considering
324 other basic block notes. */
325 bb->aux = bb;
327 return bb;
330 /* Create new basic block consisting of instructions in between HEAD and END
331 and place it to the BB chain after block AFTER. END can be NULL in to
332 create new empty basic block before HEAD. Both END and HEAD can be NULL to
333 create basic block at the end of INSN chain. */
335 static basic_block
336 rtl_create_basic_block (void *headp, void *endp, basic_block after)
338 rtx head = headp, end = endp;
339 basic_block bb;
341 /* Place the new block just after the end. */
342 VARRAY_GROW (basic_block_info, last_basic_block + 1);
344 n_basic_blocks++;
346 bb = create_basic_block_structure (head, end, NULL, after);
347 bb->aux = NULL;
348 return bb;
351 static basic_block
352 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
354 basic_block newbb = rtl_create_basic_block (head, end, after);
356 cfg_layout_initialize_rbi (newbb);
357 return newbb;
360 /* Delete the insns in a (non-live) block. We physically delete every
361 non-deleted-note insn, and update the flow graph appropriately.
363 Return nonzero if we deleted an exception handler. */
365 /* ??? Preserving all such notes strikes me as wrong. It would be nice
366 to post-process the stream to remove empty blocks, loops, ranges, etc. */
368 static void
369 rtl_delete_block (basic_block b)
371 rtx insn, end, tmp;
373 /* If the head of this block is a CODE_LABEL, then it might be the
374 label for an exception handler which can't be reached.
376 We need to remove the label from the exception_handler_label list
377 and remove the associated NOTE_INSN_EH_REGION_BEG and
378 NOTE_INSN_EH_REGION_END notes. */
380 /* Get rid of all NOTE_INSN_PREDICTIONs and NOTE_INSN_LOOP_CONTs
381 hanging before the block. */
383 for (insn = PREV_INSN (BB_HEAD (b)); insn; insn = PREV_INSN (insn))
385 if (GET_CODE (insn) != NOTE)
386 break;
387 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION
388 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
389 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
392 insn = BB_HEAD (b);
394 never_reached_warning (insn, BB_END (b));
396 if (GET_CODE (insn) == CODE_LABEL)
397 maybe_remove_eh_handler (insn);
399 /* Include any jump table following the basic block. */
400 end = BB_END (b);
401 if (tablejump_p (end, NULL, &tmp))
402 end = tmp;
404 /* Include any barrier that may follow the basic block. */
405 tmp = next_nonnote_insn (end);
406 if (tmp && GET_CODE (tmp) == BARRIER)
407 end = tmp;
409 /* Selectively delete the entire chain. */
410 BB_HEAD (b) = NULL;
411 delete_insn_chain (insn, end);
414 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
416 void
417 compute_bb_for_insn (void)
419 basic_block bb;
421 FOR_EACH_BB (bb)
423 rtx end = BB_END (bb);
424 rtx insn;
426 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
428 BLOCK_FOR_INSN (insn) = bb;
429 if (insn == end)
430 break;
435 /* Release the basic_block_for_insn array. */
437 void
438 free_bb_for_insn (void)
440 rtx insn;
441 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
442 if (GET_CODE (insn) != BARRIER)
443 BLOCK_FOR_INSN (insn) = NULL;
446 /* Update insns block within BB. */
448 void
449 update_bb_for_insn (basic_block bb)
451 rtx insn;
453 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
455 if (GET_CODE (insn) != BARRIER)
456 set_block_for_insn (insn, bb);
457 if (insn == BB_END (bb))
458 break;
462 /* Creates a new basic block just after basic block B by splitting
463 everything after specified instruction I. */
465 static basic_block
466 rtl_split_block (basic_block bb, void *insnp)
468 basic_block new_bb;
469 rtx insn = insnp;
470 edge e;
472 if (!insn)
474 insn = first_insn_after_basic_block_note (bb);
476 if (insn)
477 insn = PREV_INSN (insn);
478 else
479 insn = get_last_insn ();
482 /* We probably should check type of the insn so that we do not create
483 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
484 bother. */
485 if (insn == BB_END (bb))
486 emit_note_after (NOTE_INSN_DELETED, insn);
488 /* Create the new basic block. */
489 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
490 BB_END (bb) = insn;
492 /* Redirect the outgoing edges. */
493 new_bb->succ = bb->succ;
494 bb->succ = NULL;
495 for (e = new_bb->succ; e; e = e->succ_next)
496 e->src = new_bb;
498 if (bb->global_live_at_start)
500 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
501 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
502 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
504 /* We now have to calculate which registers are live at the end
505 of the split basic block and at the start of the new basic
506 block. Start with those registers that are known to be live
507 at the end of the original basic block and get
508 propagate_block to determine which registers are live. */
509 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
510 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
511 COPY_REG_SET (bb->global_live_at_end,
512 new_bb->global_live_at_start);
513 #ifdef HAVE_conditional_execution
514 /* In the presence of conditional execution we are not able to update
515 liveness precisely. */
516 if (reload_completed)
518 bb->flags |= BB_DIRTY;
519 new_bb->flags |= BB_DIRTY;
521 #endif
524 return new_bb;
527 /* Blocks A and B are to be merged into a single block A. The insns
528 are already contiguous. */
530 static void
531 rtl_merge_blocks (basic_block a, basic_block b)
533 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
534 rtx del_first = NULL_RTX, del_last = NULL_RTX;
535 int b_empty = 0;
537 /* If there was a CODE_LABEL beginning B, delete it. */
538 if (GET_CODE (b_head) == CODE_LABEL)
540 /* Detect basic blocks with nothing but a label. This can happen
541 in particular at the end of a function. */
542 if (b_head == b_end)
543 b_empty = 1;
545 del_first = del_last = b_head;
546 b_head = NEXT_INSN (b_head);
549 /* Delete the basic block note and handle blocks containing just that
550 note. */
551 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
553 if (b_head == b_end)
554 b_empty = 1;
555 if (! del_last)
556 del_first = b_head;
558 del_last = b_head;
559 b_head = NEXT_INSN (b_head);
562 /* If there was a jump out of A, delete it. */
563 if (GET_CODE (a_end) == JUMP_INSN)
565 rtx prev;
567 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
568 if (GET_CODE (prev) != NOTE
569 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
570 || prev == BB_HEAD (a))
571 break;
573 del_first = a_end;
575 #ifdef HAVE_cc0
576 /* If this was a conditional jump, we need to also delete
577 the insn that set cc0. */
578 if (only_sets_cc0_p (prev))
580 rtx tmp = prev;
582 prev = prev_nonnote_insn (prev);
583 if (!prev)
584 prev = BB_HEAD (a);
585 del_first = tmp;
587 #endif
589 a_end = PREV_INSN (del_first);
591 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
592 del_first = NEXT_INSN (a_end);
594 /* Delete everything marked above as well as crap that might be
595 hanging out between the two blocks. */
596 BB_HEAD (b) = NULL;
597 delete_insn_chain (del_first, del_last);
599 /* Reassociate the insns of B with A. */
600 if (!b_empty)
602 rtx x;
604 for (x = a_end; x != b_end; x = NEXT_INSN (x))
605 set_block_for_insn (x, a);
607 set_block_for_insn (b_end, a);
609 a_end = b_end;
612 BB_END (a) = a_end;
615 /* Return true when block A and B can be merged. */
616 static bool
617 rtl_can_merge_blocks (basic_block a,basic_block b)
619 /* There must be exactly one edge in between the blocks. */
620 return (a->succ && !a->succ->succ_next && a->succ->dest == b
621 && !b->pred->pred_next && a != b
622 /* Must be simple edge. */
623 && !(a->succ->flags & EDGE_COMPLEX)
624 && a->next_bb == b
625 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
626 /* If the jump insn has side effects,
627 we can't kill the edge. */
628 && (GET_CODE (BB_END (a)) != JUMP_INSN
629 || (reload_completed
630 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
633 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
634 exist. */
637 block_label (basic_block block)
639 if (block == EXIT_BLOCK_PTR)
640 return NULL_RTX;
642 if (GET_CODE (BB_HEAD (block)) != CODE_LABEL)
644 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
647 return BB_HEAD (block);
650 /* Attempt to perform edge redirection by replacing possibly complex jump
651 instruction by unconditional jump or removing jump completely. This can
652 apply only if all edges now point to the same block. The parameters and
653 return values are equivalent to redirect_edge_and_branch. */
655 bool
656 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
658 basic_block src = e->src;
659 rtx insn = BB_END (src), kill_from;
660 edge tmp;
661 rtx set;
662 int fallthru = 0;
664 /* Verify that all targets will be TARGET. */
665 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
666 if (tmp->dest != target && tmp != e)
667 break;
669 if (tmp || !onlyjump_p (insn))
670 return false;
671 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
672 return false;
674 /* Avoid removing branch with side effects. */
675 set = single_set (insn);
676 if (!set || side_effects_p (set))
677 return false;
679 /* In case we zap a conditional jump, we'll need to kill
680 the cc0 setter too. */
681 kill_from = insn;
682 #ifdef HAVE_cc0
683 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
684 kill_from = PREV_INSN (insn);
685 #endif
687 /* See if we can create the fallthru edge. */
688 if (in_cfglayout || can_fallthru (src, target))
690 if (dump_file)
691 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
692 fallthru = 1;
694 /* Selectively unlink whole insn chain. */
695 if (in_cfglayout)
697 rtx insn = src->rbi->footer;
699 delete_insn_chain (kill_from, BB_END (src));
701 /* Remove barriers but keep jumptables. */
702 while (insn)
704 if (GET_CODE (insn) == BARRIER)
706 if (PREV_INSN (insn))
707 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
708 else
709 src->rbi->footer = NEXT_INSN (insn);
710 if (NEXT_INSN (insn))
711 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
713 if (GET_CODE (insn) == CODE_LABEL)
714 break;
715 insn = NEXT_INSN (insn);
718 else
719 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)));
722 /* If this already is simplejump, redirect it. */
723 else if (simplejump_p (insn))
725 if (e->dest == target)
726 return false;
727 if (dump_file)
728 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
729 INSN_UID (insn), e->dest->index, target->index);
730 if (!redirect_jump (insn, block_label (target), 0))
732 if (target == EXIT_BLOCK_PTR)
733 return false;
734 abort ();
738 /* Cannot do anything for target exit block. */
739 else if (target == EXIT_BLOCK_PTR)
740 return false;
742 /* Or replace possibly complicated jump insn by simple jump insn. */
743 else
745 rtx target_label = block_label (target);
746 rtx barrier, label, table;
748 emit_jump_insn_after (gen_jump (target_label), insn);
749 JUMP_LABEL (BB_END (src)) = target_label;
750 LABEL_NUSES (target_label)++;
751 if (dump_file)
752 fprintf (dump_file, "Replacing insn %i by jump %i\n",
753 INSN_UID (insn), INSN_UID (BB_END (src)));
756 delete_insn_chain (kill_from, insn);
758 /* Recognize a tablejump that we are converting to a
759 simple jump and remove its associated CODE_LABEL
760 and ADDR_VEC or ADDR_DIFF_VEC. */
761 if (tablejump_p (insn, &label, &table))
762 delete_insn_chain (label, table);
764 barrier = next_nonnote_insn (BB_END (src));
765 if (!barrier || GET_CODE (barrier) != BARRIER)
766 emit_barrier_after (BB_END (src));
767 else
769 if (barrier != NEXT_INSN (BB_END (src)))
771 /* Move the jump before barrier so that the notes
772 which originally were or were created before jump table are
773 inside the basic block. */
774 rtx new_insn = BB_END (src);
775 rtx tmp;
777 for (tmp = NEXT_INSN (BB_END (src)); tmp != barrier;
778 tmp = NEXT_INSN (tmp))
779 set_block_for_insn (tmp, src);
781 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
782 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
784 NEXT_INSN (new_insn) = barrier;
785 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
787 PREV_INSN (new_insn) = PREV_INSN (barrier);
788 PREV_INSN (barrier) = new_insn;
793 /* Keep only one edge out and set proper flags. */
794 while (src->succ->succ_next)
795 remove_edge (src->succ);
796 e = src->succ;
797 if (fallthru)
798 e->flags = EDGE_FALLTHRU;
799 else
800 e->flags = 0;
802 e->probability = REG_BR_PROB_BASE;
803 e->count = src->count;
805 /* We don't want a block to end on a line-number note since that has
806 the potential of changing the code between -g and not -g. */
807 while (GET_CODE (BB_END (e->src)) == NOTE
808 && NOTE_LINE_NUMBER (BB_END (e->src)) >= 0)
809 delete_insn (BB_END (e->src));
811 if (e->dest != target)
812 redirect_edge_succ (e, target);
814 return true;
817 /* Return last loop_beg note appearing after INSN, before start of next
818 basic block. Return INSN if there are no such notes.
820 When emitting jump to redirect a fallthru edge, it should always appear
821 after the LOOP_BEG notes, as loop optimizer expect loop to either start by
822 fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
823 test. */
825 static rtx
826 last_loop_beg_note (rtx insn)
828 rtx last = insn;
830 for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
831 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
832 insn = NEXT_INSN (insn))
833 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
834 last = insn;
836 return last;
839 /* Redirect edge representing branch of (un)conditional jump or tablejump. */
840 static bool
841 redirect_branch_edge (edge e, basic_block target)
843 rtx tmp;
844 rtx old_label = BB_HEAD (e->dest);
845 basic_block src = e->src;
846 rtx insn = BB_END (src);
848 /* We can only redirect non-fallthru edges of jump insn. */
849 if (e->flags & EDGE_FALLTHRU)
850 return false;
851 else if (GET_CODE (insn) != JUMP_INSN)
852 return false;
854 /* Recognize a tablejump and adjust all matching cases. */
855 if (tablejump_p (insn, NULL, &tmp))
857 rtvec vec;
858 int j;
859 rtx new_label = block_label (target);
861 if (target == EXIT_BLOCK_PTR)
862 return false;
863 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
864 vec = XVEC (PATTERN (tmp), 0);
865 else
866 vec = XVEC (PATTERN (tmp), 1);
868 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
869 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
871 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
872 --LABEL_NUSES (old_label);
873 ++LABEL_NUSES (new_label);
876 /* Handle casesi dispatch insns. */
877 if ((tmp = single_set (insn)) != NULL
878 && SET_DEST (tmp) == pc_rtx
879 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
880 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
881 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
883 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
884 new_label);
885 --LABEL_NUSES (old_label);
886 ++LABEL_NUSES (new_label);
889 else
891 /* ?? We may play the games with moving the named labels from
892 one basic block to the other in case only one computed_jump is
893 available. */
894 if (computed_jump_p (insn)
895 /* A return instruction can't be redirected. */
896 || returnjump_p (insn))
897 return false;
899 /* If the insn doesn't go where we think, we're confused. */
900 if (JUMP_LABEL (insn) != old_label)
901 abort ();
903 /* If the substitution doesn't succeed, die. This can happen
904 if the back end emitted unrecognizable instructions or if
905 target is exit block on some arches. */
906 if (!redirect_jump (insn, block_label (target), 0))
908 if (target == EXIT_BLOCK_PTR)
909 return false;
910 abort ();
914 if (dump_file)
915 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
916 e->src->index, e->dest->index, target->index);
918 if (e->dest != target)
919 redirect_edge_succ_nodup (e, target);
920 return true;
923 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
924 expense of adding new instructions or reordering basic blocks.
926 Function can be also called with edge destination equivalent to the TARGET.
927 Then it should try the simplifications and do nothing if none is possible.
929 Return true if transformation succeeded. We still return false in case E
930 already destinated TARGET and we didn't managed to simplify instruction
931 stream. */
933 static bool
934 rtl_redirect_edge_and_branch (edge e, basic_block target)
936 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
937 return false;
939 if (e->dest == target)
940 return true;
942 if (try_redirect_by_replacing_jump (e, target, false))
943 return true;
945 if (!redirect_branch_edge (e, target))
946 return false;
948 return true;
951 /* Like force_nonfallthru below, but additionally performs redirection
952 Used by redirect_edge_and_branch_force. */
954 basic_block
955 force_nonfallthru_and_redirect (edge e, basic_block target)
957 basic_block jump_block, new_bb = NULL, src = e->src;
958 rtx note;
959 edge new_edge;
960 int abnormal_edge_flags = 0;
962 /* In the case the last instruction is conditional jump to the next
963 instruction, first redirect the jump itself and then continue
964 by creating a basic block afterwards to redirect fallthru edge. */
965 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
966 && any_condjump_p (BB_END (e->src))
967 /* When called from cfglayout, fallthru edges do not
968 necessarily go to the next block. */
969 && e->src->next_bb == e->dest
970 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
972 rtx note;
973 edge b = unchecked_make_edge (e->src, target, 0);
975 if (!redirect_jump (BB_END (e->src), block_label (target), 0))
976 abort ();
977 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
978 if (note)
980 int prob = INTVAL (XEXP (note, 0));
982 b->probability = prob;
983 b->count = e->count * prob / REG_BR_PROB_BASE;
984 e->probability -= e->probability;
985 e->count -= b->count;
986 if (e->probability < 0)
987 e->probability = 0;
988 if (e->count < 0)
989 e->count = 0;
993 if (e->flags & EDGE_ABNORMAL)
995 /* Irritating special case - fallthru edge to the same block as abnormal
996 edge.
997 We can't redirect abnormal edge, but we still can split the fallthru
998 one and create separate abnormal edge to original destination.
999 This allows bb-reorder to make such edge non-fallthru. */
1000 if (e->dest != target)
1001 abort ();
1002 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1003 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1005 else if (!(e->flags & EDGE_FALLTHRU))
1006 abort ();
1007 else if (e->src == ENTRY_BLOCK_PTR)
1009 /* We can't redirect the entry block. Create an empty block at the
1010 start of the function which we use to add the new jump. */
1011 edge *pe1;
1012 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1014 /* Change the existing edge's source to be the new block, and add
1015 a new edge from the entry block to the new block. */
1016 e->src = bb;
1017 for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
1018 if (*pe1 == e)
1020 *pe1 = e->succ_next;
1021 break;
1023 e->succ_next = 0;
1024 bb->succ = e;
1025 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1028 if (e->src->succ->succ_next || abnormal_edge_flags)
1030 /* Create the new structures. */
1032 /* If the old block ended with a tablejump, skip its table
1033 by searching forward from there. Otherwise start searching
1034 forward from the last instruction of the old block. */
1035 if (!tablejump_p (BB_END (e->src), NULL, &note))
1036 note = BB_END (e->src);
1038 /* Position the new block correctly relative to loop notes. */
1039 note = last_loop_beg_note (note);
1040 note = NEXT_INSN (note);
1042 jump_block = create_basic_block (note, NULL, e->src);
1043 jump_block->count = e->count;
1044 jump_block->frequency = EDGE_FREQUENCY (e);
1045 jump_block->loop_depth = target->loop_depth;
1047 if (target->global_live_at_start)
1049 jump_block->global_live_at_start
1050 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1051 jump_block->global_live_at_end
1052 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1053 COPY_REG_SET (jump_block->global_live_at_start,
1054 target->global_live_at_start);
1055 COPY_REG_SET (jump_block->global_live_at_end,
1056 target->global_live_at_start);
1059 /* Wire edge in. */
1060 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1061 new_edge->probability = e->probability;
1062 new_edge->count = e->count;
1064 /* Redirect old edge. */
1065 redirect_edge_pred (e, jump_block);
1066 e->probability = REG_BR_PROB_BASE;
1068 new_bb = jump_block;
1070 else
1071 jump_block = e->src;
1073 e->flags &= ~EDGE_FALLTHRU;
1074 if (target == EXIT_BLOCK_PTR)
1076 if (HAVE_return)
1077 emit_jump_insn_after (gen_return (), BB_END (jump_block));
1078 else
1079 abort ();
1081 else
1083 rtx label = block_label (target);
1084 emit_jump_insn_after (gen_jump (label), BB_END (jump_block));
1085 JUMP_LABEL (BB_END (jump_block)) = label;
1086 LABEL_NUSES (label)++;
1089 emit_barrier_after (BB_END (jump_block));
1090 redirect_edge_succ_nodup (e, target);
1092 if (abnormal_edge_flags)
1093 make_edge (src, target, abnormal_edge_flags);
1095 return new_bb;
1098 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1099 (and possibly create new basic block) to make edge non-fallthru.
1100 Return newly created BB or NULL if none. */
1102 basic_block
1103 force_nonfallthru (edge e)
1105 return force_nonfallthru_and_redirect (e, e->dest);
1108 /* Redirect edge even at the expense of creating new jump insn or
1109 basic block. Return new basic block if created, NULL otherwise.
1110 Abort if conversion is impossible. */
1112 static basic_block
1113 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1115 if (redirect_edge_and_branch (e, target)
1116 || e->dest == target)
1117 return NULL;
1119 /* In case the edge redirection failed, try to force it to be non-fallthru
1120 and redirect newly created simplejump. */
1121 return force_nonfallthru_and_redirect (e, target);
1124 /* The given edge should potentially be a fallthru edge. If that is in
1125 fact true, delete the jump and barriers that are in the way. */
1127 static void
1128 rtl_tidy_fallthru_edge (edge e)
1130 rtx q;
1131 basic_block b = e->src, c = b->next_bb;
1133 /* ??? In a late-running flow pass, other folks may have deleted basic
1134 blocks by nopping out blocks, leaving multiple BARRIERs between here
1135 and the target label. They ought to be chastized and fixed.
1137 We can also wind up with a sequence of undeletable labels between
1138 one block and the next.
1140 So search through a sequence of barriers, labels, and notes for
1141 the head of block C and assert that we really do fall through. */
1143 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1144 if (INSN_P (q))
1145 return;
1147 /* Remove what will soon cease being the jump insn from the source block.
1148 If block B consisted only of this single jump, turn it into a deleted
1149 note. */
1150 q = BB_END (b);
1151 if (GET_CODE (q) == JUMP_INSN
1152 && onlyjump_p (q)
1153 && (any_uncondjump_p (q)
1154 || (b->succ == e && e->succ_next == NULL)))
1156 #ifdef HAVE_cc0
1157 /* If this was a conditional jump, we need to also delete
1158 the insn that set cc0. */
1159 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1160 q = PREV_INSN (q);
1161 #endif
1163 q = PREV_INSN (q);
1165 /* We don't want a block to end on a line-number note since that has
1166 the potential of changing the code between -g and not -g. */
1167 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
1168 q = PREV_INSN (q);
1171 /* Selectively unlink the sequence. */
1172 if (q != PREV_INSN (BB_HEAD (c)))
1173 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)));
1175 e->flags |= EDGE_FALLTHRU;
1178 /* Helper function for split_edge. Return true in case edge BB2 to BB1
1179 is back edge of syntactic loop. */
1181 static bool
1182 back_edge_of_syntactic_loop_p (basic_block bb1, basic_block bb2)
1184 rtx insn;
1185 int count = 0;
1186 basic_block bb;
1188 if (bb1 == bb2)
1189 return true;
1191 /* ??? Could we guarantee that bb indices are monotone, so that we could
1192 just compare them? */
1193 for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
1194 continue;
1196 if (!bb)
1197 return false;
1199 for (insn = BB_END (bb1); insn != BB_HEAD (bb2) && count >= 0;
1200 insn = NEXT_INSN (insn))
1201 if (GET_CODE (insn) == NOTE)
1203 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1204 count++;
1205 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1206 count--;
1209 return count >= 0;
1212 /* Should move basic block BB after basic block AFTER. NIY. */
1214 static bool
1215 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1216 basic_block after ATTRIBUTE_UNUSED)
1218 return false;
1221 /* Split a (typically critical) edge. Return the new block.
1222 Abort on abnormal edges.
1224 ??? The code generally expects to be called on critical edges.
1225 The case of a block ending in an unconditional jump to a
1226 block with multiple predecessors is not handled optimally. */
1228 static basic_block
1229 rtl_split_edge (edge edge_in)
1231 basic_block bb;
1232 rtx before;
1234 /* Abnormal edges cannot be split. */
1235 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1236 abort ();
1238 /* We are going to place the new block in front of edge destination.
1239 Avoid existence of fallthru predecessors. */
1240 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1242 edge e;
1244 for (e = edge_in->dest->pred; e; e = e->pred_next)
1245 if (e->flags & EDGE_FALLTHRU)
1246 break;
1248 if (e)
1249 force_nonfallthru (e);
1252 /* Create the basic block note.
1254 Where we place the note can have a noticeable impact on the generated
1255 code. Consider this cfg:
1261 +->1-->2--->E
1263 +--+
1265 If we need to insert an insn on the edge from block 0 to block 1,
1266 we want to ensure the instructions we insert are outside of any
1267 loop notes that physically sit between block 0 and block 1. Otherwise
1268 we confuse the loop optimizer into thinking the loop is a phony. */
1270 if (edge_in->dest != EXIT_BLOCK_PTR
1271 && PREV_INSN (BB_HEAD (edge_in->dest))
1272 && GET_CODE (PREV_INSN (BB_HEAD (edge_in->dest))) == NOTE
1273 && (NOTE_LINE_NUMBER (PREV_INSN (BB_HEAD (edge_in->dest)))
1274 == NOTE_INSN_LOOP_BEG)
1275 && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
1276 before = PREV_INSN (BB_HEAD (edge_in->dest));
1277 else if (edge_in->dest != EXIT_BLOCK_PTR)
1278 before = BB_HEAD (edge_in->dest);
1279 else
1280 before = NULL_RTX;
1282 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1284 /* ??? This info is likely going to be out of date very soon. */
1285 if (edge_in->dest->global_live_at_start)
1287 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1288 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1289 COPY_REG_SET (bb->global_live_at_start,
1290 edge_in->dest->global_live_at_start);
1291 COPY_REG_SET (bb->global_live_at_end,
1292 edge_in->dest->global_live_at_start);
1295 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1297 /* For non-fallthru edges, we must adjust the predecessor's
1298 jump instruction to target our new block. */
1299 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1301 if (!redirect_edge_and_branch (edge_in, bb))
1302 abort ();
1304 else
1305 redirect_edge_succ (edge_in, bb);
1307 return bb;
1310 /* Queue instructions for insertion on an edge between two basic blocks.
1311 The new instructions and basic blocks (if any) will not appear in the
1312 CFG until commit_edge_insertions is called. */
1314 void
1315 insert_insn_on_edge (rtx pattern, edge e)
1317 /* We cannot insert instructions on an abnormal critical edge.
1318 It will be easier to find the culprit if we die now. */
1319 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
1320 abort ();
1322 if (e->insns == NULL_RTX)
1323 start_sequence ();
1324 else
1325 push_to_sequence (e->insns);
1327 emit_insn (pattern);
1329 e->insns = get_insns ();
1330 end_sequence ();
1333 /* Called from safe_insert_insn_on_edge through note_stores, marks live
1334 registers that are killed by the store. */
1335 static void
1336 mark_killed_regs (rtx reg, rtx set ATTRIBUTE_UNUSED, void *data)
1338 regset killed = data;
1339 int regno, i;
1341 if (GET_CODE (reg) == SUBREG)
1342 reg = SUBREG_REG (reg);
1343 if (!REG_P (reg))
1344 return;
1345 regno = REGNO (reg);
1346 if (regno >= FIRST_PSEUDO_REGISTER)
1347 SET_REGNO_REG_SET (killed, regno);
1348 else
1350 for (i = 0; i < (int) hard_regno_nregs[regno][GET_MODE (reg)]; i++)
1351 SET_REGNO_REG_SET (killed, regno + i);
1355 /* Similar to insert_insn_on_edge, tries to put INSN to edge E. Additionally
1356 it checks whether this will not clobber the registers that are live on the
1357 edge (i.e. it requires liveness information to be up-to-date) and if there
1358 are some, then it tries to save and restore them. Returns true if
1359 successful. */
1360 bool
1361 safe_insert_insn_on_edge (rtx insn, edge e)
1363 rtx x;
1364 regset_head killed_head;
1365 regset killed = INITIALIZE_REG_SET (killed_head);
1366 rtx save_regs = NULL_RTX;
1367 int regno, noccmode;
1368 enum machine_mode mode;
1370 #ifdef AVOID_CCMODE_COPIES
1371 noccmode = true;
1372 #else
1373 noccmode = false;
1374 #endif
1376 for (x = insn; x; x = NEXT_INSN (x))
1377 if (INSN_P (x))
1378 note_stores (PATTERN (x), mark_killed_regs, killed);
1379 bitmap_operation (killed, killed, e->dest->global_live_at_start,
1380 BITMAP_AND);
1382 EXECUTE_IF_SET_IN_REG_SET (killed, 0, regno,
1384 mode = regno < FIRST_PSEUDO_REGISTER
1385 ? reg_raw_mode[regno]
1386 : GET_MODE (regno_reg_rtx[regno]);
1387 if (mode == VOIDmode)
1388 return false;
1390 if (noccmode && mode == CCmode)
1391 return false;
1393 save_regs = alloc_EXPR_LIST (0,
1394 alloc_EXPR_LIST (0,
1395 gen_reg_rtx (mode),
1396 gen_raw_REG (mode, regno)),
1397 save_regs);
1400 if (save_regs)
1402 rtx from, to;
1404 start_sequence ();
1405 for (x = save_regs; x; x = XEXP (x, 1))
1407 from = XEXP (XEXP (x, 0), 1);
1408 to = XEXP (XEXP (x, 0), 0);
1409 emit_move_insn (to, from);
1411 emit_insn (insn);
1412 for (x = save_regs; x; x = XEXP (x, 1))
1414 from = XEXP (XEXP (x, 0), 0);
1415 to = XEXP (XEXP (x, 0), 1);
1416 emit_move_insn (to, from);
1418 insn = get_insns ();
1419 end_sequence ();
1420 free_EXPR_LIST_list (&save_regs);
1422 insert_insn_on_edge (insn, e);
1424 FREE_REG_SET (killed);
1425 return true;
1428 /* Update the CFG for the instructions queued on edge E. */
1430 static void
1431 commit_one_edge_insertion (edge e, int watch_calls)
1433 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1434 basic_block bb = NULL;
1436 /* Pull the insns off the edge now since the edge might go away. */
1437 insns = e->insns;
1438 e->insns = NULL_RTX;
1440 /* Special case -- avoid inserting code between call and storing
1441 its return value. */
1442 if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
1443 && e->src != ENTRY_BLOCK_PTR
1444 && GET_CODE (BB_END (e->src)) == CALL_INSN)
1446 rtx next = next_nonnote_insn (BB_END (e->src));
1448 after = BB_HEAD (e->dest);
1449 /* The first insn after the call may be a stack pop, skip it. */
1450 while (next
1451 && keep_with_call_p (next))
1453 after = next;
1454 next = next_nonnote_insn (next);
1456 bb = e->dest;
1458 if (!before && !after)
1460 /* Figure out where to put these things. If the destination has
1461 one predecessor, insert there. Except for the exit block. */
1462 if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1464 bb = e->dest;
1466 /* Get the location correct wrt a code label, and "nice" wrt
1467 a basic block note, and before everything else. */
1468 tmp = BB_HEAD (bb);
1469 if (GET_CODE (tmp) == CODE_LABEL)
1470 tmp = NEXT_INSN (tmp);
1471 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1472 tmp = NEXT_INSN (tmp);
1473 if (tmp == BB_HEAD (bb))
1474 before = tmp;
1475 else if (tmp)
1476 after = PREV_INSN (tmp);
1477 else
1478 after = get_last_insn ();
1481 /* If the source has one successor and the edge is not abnormal,
1482 insert there. Except for the entry block. */
1483 else if ((e->flags & EDGE_ABNORMAL) == 0
1484 && e->src->succ->succ_next == NULL
1485 && e->src != ENTRY_BLOCK_PTR)
1487 bb = e->src;
1489 /* It is possible to have a non-simple jump here. Consider a target
1490 where some forms of unconditional jumps clobber a register. This
1491 happens on the fr30 for example.
1493 We know this block has a single successor, so we can just emit
1494 the queued insns before the jump. */
1495 if (GET_CODE (BB_END (bb)) == JUMP_INSN)
1496 for (before = BB_END (bb);
1497 GET_CODE (PREV_INSN (before)) == NOTE
1498 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
1499 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
1501 else
1503 /* We'd better be fallthru, or we've lost track of what's what. */
1504 if ((e->flags & EDGE_FALLTHRU) == 0)
1505 abort ();
1507 after = BB_END (bb);
1510 /* Otherwise we must split the edge. */
1511 else
1513 bb = split_edge (e);
1514 after = BB_END (bb);
1518 /* Now that we've found the spot, do the insertion. */
1520 if (before)
1522 emit_insn_before (insns, before);
1523 last = prev_nonnote_insn (before);
1525 else
1526 last = emit_insn_after (insns, after);
1528 if (returnjump_p (last))
1530 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1531 This is not currently a problem because this only happens
1532 for the (single) epilogue, which already has a fallthru edge
1533 to EXIT. */
1535 e = bb->succ;
1536 if (e->dest != EXIT_BLOCK_PTR
1537 || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1538 abort ();
1540 e->flags &= ~EDGE_FALLTHRU;
1541 emit_barrier_after (last);
1543 if (before)
1544 delete_insn (before);
1546 else if (GET_CODE (last) == JUMP_INSN)
1547 abort ();
1549 /* Mark the basic block for find_sub_basic_blocks. */
1550 bb->aux = &bb->aux;
1553 /* Update the CFG for all queued instructions. */
1555 void
1556 commit_edge_insertions (void)
1558 basic_block bb;
1559 sbitmap blocks;
1560 bool changed = false;
1562 #ifdef ENABLE_CHECKING
1563 verify_flow_info ();
1564 #endif
1566 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1568 edge e, next;
1570 for (e = bb->succ; e; e = next)
1572 next = e->succ_next;
1573 if (e->insns)
1575 changed = true;
1576 commit_one_edge_insertion (e, false);
1581 if (!changed)
1582 return;
1584 blocks = sbitmap_alloc (last_basic_block);
1585 sbitmap_zero (blocks);
1586 FOR_EACH_BB (bb)
1587 if (bb->aux)
1589 SET_BIT (blocks, bb->index);
1590 /* Check for forgotten bb->aux values before commit_edge_insertions
1591 call. */
1592 if (bb->aux != &bb->aux)
1593 abort ();
1594 bb->aux = NULL;
1596 find_many_sub_basic_blocks (blocks);
1597 sbitmap_free (blocks);
1600 /* Update the CFG for all queued instructions, taking special care of inserting
1601 code on edges between call and storing its return value. */
1603 void
1604 commit_edge_insertions_watch_calls (void)
1606 basic_block bb;
1607 sbitmap blocks;
1608 bool changed = false;
1610 #ifdef ENABLE_CHECKING
1611 verify_flow_info ();
1612 #endif
1614 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1616 edge e, next;
1618 for (e = bb->succ; e; e = next)
1620 next = e->succ_next;
1621 if (e->insns)
1623 changed = true;
1624 commit_one_edge_insertion (e, true);
1629 if (!changed)
1630 return;
1632 blocks = sbitmap_alloc (last_basic_block);
1633 sbitmap_zero (blocks);
1634 FOR_EACH_BB (bb)
1635 if (bb->aux)
1637 SET_BIT (blocks, bb->index);
1638 /* Check for forgotten bb->aux values before commit_edge_insertions
1639 call. */
1640 if (bb->aux != &bb->aux)
1641 abort ();
1642 bb->aux = NULL;
1644 find_many_sub_basic_blocks (blocks);
1645 sbitmap_free (blocks);
1648 /* Print out RTL-specific basic block information (live information
1649 at start and end). */
1651 static void
1652 rtl_dump_bb (basic_block bb, FILE *outf, int indent)
1654 rtx insn;
1655 rtx last;
1656 char *s_indent;
1658 s_indent = (char *) alloca ((size_t) indent + 1);
1659 memset ((void *) s_indent, ' ', (size_t) indent);
1660 s_indent[indent] = '\0';
1662 fprintf (outf, ";;%s Registers live at start: ", s_indent);
1663 dump_regset (bb->global_live_at_start, outf);
1664 putc ('\n', outf);
1666 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1667 insn = NEXT_INSN (insn))
1668 print_rtl_single (outf, insn);
1670 fprintf (outf, ";;%s Registers live at end: ", s_indent);
1671 dump_regset (bb->global_live_at_end, outf);
1672 putc ('\n', outf);
1675 /* Like print_rtl, but also print out live information for the start of each
1676 basic block. */
1678 void
1679 print_rtl_with_bb (FILE *outf, rtx rtx_first)
1681 rtx tmp_rtx;
1683 if (rtx_first == 0)
1684 fprintf (outf, "(nil)\n");
1685 else
1687 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1688 int max_uid = get_max_uid ();
1689 basic_block *start = xcalloc (max_uid, sizeof (basic_block));
1690 basic_block *end = xcalloc (max_uid, sizeof (basic_block));
1691 enum bb_state *in_bb_p = xcalloc (max_uid, sizeof (enum bb_state));
1693 basic_block bb;
1695 FOR_EACH_BB_REVERSE (bb)
1697 rtx x;
1699 start[INSN_UID (BB_HEAD (bb))] = bb;
1700 end[INSN_UID (BB_END (bb))] = bb;
1701 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1703 enum bb_state state = IN_MULTIPLE_BB;
1705 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1706 state = IN_ONE_BB;
1707 in_bb_p[INSN_UID (x)] = state;
1709 if (x == BB_END (bb))
1710 break;
1714 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1716 int did_output;
1718 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1720 fprintf (outf, ";; Start of basic block %d, registers live:",
1721 bb->index);
1722 dump_regset (bb->global_live_at_start, outf);
1723 putc ('\n', outf);
1726 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1727 && GET_CODE (tmp_rtx) != NOTE
1728 && GET_CODE (tmp_rtx) != BARRIER)
1729 fprintf (outf, ";; Insn is not within a basic block\n");
1730 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1731 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1733 did_output = print_rtl_single (outf, tmp_rtx);
1735 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1737 fprintf (outf, ";; End of basic block %d, registers live:\n",
1738 bb->index);
1739 dump_regset (bb->global_live_at_end, outf);
1740 putc ('\n', outf);
1743 if (did_output)
1744 putc ('\n', outf);
1747 free (start);
1748 free (end);
1749 free (in_bb_p);
1752 if (current_function_epilogue_delay_list != 0)
1754 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1755 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1756 tmp_rtx = XEXP (tmp_rtx, 1))
1757 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1761 void
1762 update_br_prob_note (basic_block bb)
1764 rtx note;
1765 if (GET_CODE (BB_END (bb)) != JUMP_INSN)
1766 return;
1767 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1768 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1769 return;
1770 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1773 /* Verify the CFG and RTL consistency common for both underlying RTL and
1774 cfglayout RTL.
1776 Currently it does following checks:
1778 - test head/end pointers
1779 - overlapping of basic blocks
1780 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1781 - tails of basic blocks (ensure that boundary is necessary)
1782 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1783 and NOTE_INSN_BASIC_BLOCK
1785 In future it can be extended check a lot of other stuff as well
1786 (reachability of basic blocks, life information, etc. etc.). */
1788 static int
1789 rtl_verify_flow_info_1 (void)
1791 const int max_uid = get_max_uid ();
1792 rtx last_head = get_last_insn ();
1793 basic_block *bb_info;
1794 rtx x;
1795 int err = 0;
1796 basic_block bb, last_bb_seen;
1798 bb_info = xcalloc (max_uid, sizeof (basic_block));
1800 /* Check bb chain & numbers. */
1801 last_bb_seen = ENTRY_BLOCK_PTR;
1803 FOR_EACH_BB_REVERSE (bb)
1805 rtx head = BB_HEAD (bb);
1806 rtx end = BB_END (bb);
1808 /* Verify the end of the basic block is in the INSN chain. */
1809 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1810 if (x == end)
1811 break;
1813 if (!x)
1815 error ("end insn %d for block %d not found in the insn stream",
1816 INSN_UID (end), bb->index);
1817 err = 1;
1820 /* Work backwards from the end to the head of the basic block
1821 to verify the head is in the RTL chain. */
1822 for (; x != NULL_RTX; x = PREV_INSN (x))
1824 /* While walking over the insn chain, verify insns appear
1825 in only one basic block and initialize the BB_INFO array
1826 used by other passes. */
1827 if (bb_info[INSN_UID (x)] != NULL)
1829 error ("insn %d is in multiple basic blocks (%d and %d)",
1830 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1831 err = 1;
1834 bb_info[INSN_UID (x)] = bb;
1836 if (x == head)
1837 break;
1839 if (!x)
1841 error ("head insn %d for block %d not found in the insn stream",
1842 INSN_UID (head), bb->index);
1843 err = 1;
1846 last_head = x;
1849 /* Now check the basic blocks (boundaries etc.) */
1850 FOR_EACH_BB_REVERSE (bb)
1852 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1853 edge e, fallthru = NULL;
1854 rtx note;
1856 if (INSN_P (BB_END (bb))
1857 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1858 && bb->succ && bb->succ->succ_next
1859 && any_condjump_p (BB_END (bb)))
1861 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
1863 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1864 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1865 err = 1;
1868 for (e = bb->succ; e; e = e->succ_next)
1870 if (e->flags & EDGE_FALLTHRU)
1871 n_fallthru++, fallthru = e;
1873 if ((e->flags & ~(EDGE_DFS_BACK
1874 | EDGE_CAN_FALLTHRU
1875 | EDGE_IRREDUCIBLE_LOOP
1876 | EDGE_LOOP_EXIT)) == 0)
1877 n_branch++;
1879 if (e->flags & EDGE_ABNORMAL_CALL)
1880 n_call++;
1882 if (e->flags & EDGE_EH)
1883 n_eh++;
1884 else if (e->flags & EDGE_ABNORMAL)
1885 n_abnormal++;
1888 if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
1889 && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1891 error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
1892 err = 1;
1894 if (n_branch
1895 && (GET_CODE (BB_END (bb)) != JUMP_INSN
1896 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1897 || any_condjump_p (BB_END (bb))))))
1899 error ("Too many outgoing branch edges from bb %i", bb->index);
1900 err = 1;
1902 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1904 error ("Fallthru edge after unconditional jump %i", bb->index);
1905 err = 1;
1907 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1909 error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
1910 err = 1;
1912 if (n_branch != 1 && any_condjump_p (BB_END (bb))
1913 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1915 error ("Wrong amount of branch edges after conditional jump %i", bb->index);
1916 err = 1;
1918 if (n_call && GET_CODE (BB_END (bb)) != CALL_INSN)
1920 error ("Call edges for non-call insn in bb %i", bb->index);
1921 err = 1;
1923 if (n_abnormal
1924 && (GET_CODE (BB_END (bb)) != CALL_INSN && n_call != n_abnormal)
1925 && (GET_CODE (BB_END (bb)) != JUMP_INSN
1926 || any_condjump_p (BB_END (bb))
1927 || any_uncondjump_p (BB_END (bb))))
1929 error ("Abnormal edges for no purpose in bb %i", bb->index);
1930 err = 1;
1933 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
1934 if (BLOCK_FOR_INSN (x) != bb)
1936 debug_rtx (x);
1937 if (! BLOCK_FOR_INSN (x))
1938 error
1939 ("insn %d inside basic block %d but block_for_insn is NULL",
1940 INSN_UID (x), bb->index);
1941 else
1942 error
1943 ("insn %d inside basic block %d but block_for_insn is %i",
1944 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1946 err = 1;
1949 /* OK pointers are correct. Now check the header of basic
1950 block. It ought to contain optional CODE_LABEL followed
1951 by NOTE_BASIC_BLOCK. */
1952 x = BB_HEAD (bb);
1953 if (GET_CODE (x) == CODE_LABEL)
1955 if (BB_END (bb) == x)
1957 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1958 bb->index);
1959 err = 1;
1962 x = NEXT_INSN (x);
1965 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1967 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1968 bb->index);
1969 err = 1;
1972 if (BB_END (bb) == x)
1973 /* Do checks for empty blocks her. e */
1975 else
1976 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1978 if (NOTE_INSN_BASIC_BLOCK_P (x))
1980 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1981 INSN_UID (x), bb->index);
1982 err = 1;
1985 if (x == BB_END (bb))
1986 break;
1988 if (control_flow_insn_p (x))
1990 error ("in basic block %d:", bb->index);
1991 fatal_insn ("flow control insn inside a basic block", x);
1996 /* Clean up. */
1997 free (bb_info);
1998 return err;
2001 /* Verify the CFG and RTL consistency common for both underlying RTL and
2002 cfglayout RTL.
2004 Currently it does following checks:
2005 - all checks of rtl_verify_flow_info_1
2006 - check that all insns are in the basic blocks
2007 (except the switch handling code, barriers and notes)
2008 - check that all returns are followed by barriers
2009 - check that all fallthru edge points to the adjacent blocks. */
2010 static int
2011 rtl_verify_flow_info (void)
2013 basic_block bb;
2014 int err = rtl_verify_flow_info_1 ();
2015 rtx x;
2016 int num_bb_notes;
2017 const rtx rtx_first = get_insns ();
2018 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2020 FOR_EACH_BB_REVERSE (bb)
2022 edge e;
2023 for (e = bb->succ; e; e = e->succ_next)
2024 if (e->flags & EDGE_FALLTHRU)
2025 break;
2026 if (!e)
2028 rtx insn;
2030 /* Ensure existence of barrier in BB with no fallthru edges. */
2031 for (insn = BB_END (bb); !insn || GET_CODE (insn) != BARRIER;
2032 insn = NEXT_INSN (insn))
2033 if (!insn
2034 || (GET_CODE (insn) == NOTE
2035 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
2037 error ("missing barrier after block %i", bb->index);
2038 err = 1;
2039 break;
2042 else if (e->src != ENTRY_BLOCK_PTR
2043 && e->dest != EXIT_BLOCK_PTR)
2045 rtx insn;
2047 if (e->src->next_bb != e->dest)
2049 error
2050 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2051 e->src->index, e->dest->index);
2052 err = 1;
2054 else
2055 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2056 insn = NEXT_INSN (insn))
2057 if (GET_CODE (insn) == BARRIER
2058 #ifndef CASE_DROPS_THROUGH
2059 || INSN_P (insn)
2060 #else
2061 || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
2062 #endif
2065 error ("verify_flow_info: Incorrect fallthru %i->%i",
2066 e->src->index, e->dest->index);
2067 fatal_insn ("wrong insn in the fallthru edge", insn);
2068 err = 1;
2073 num_bb_notes = 0;
2074 last_bb_seen = ENTRY_BLOCK_PTR;
2076 for (x = rtx_first; x; x = NEXT_INSN (x))
2078 if (NOTE_INSN_BASIC_BLOCK_P (x))
2080 bb = NOTE_BASIC_BLOCK (x);
2082 num_bb_notes++;
2083 if (bb != last_bb_seen->next_bb)
2084 internal_error ("basic blocks not laid down consecutively");
2086 curr_bb = last_bb_seen = bb;
2089 if (!curr_bb)
2091 switch (GET_CODE (x))
2093 case BARRIER:
2094 case NOTE:
2095 break;
2097 case CODE_LABEL:
2098 /* An addr_vec is placed outside any block block. */
2099 if (NEXT_INSN (x)
2100 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
2101 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2102 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2103 x = NEXT_INSN (x);
2105 /* But in any case, non-deletable labels can appear anywhere. */
2106 break;
2108 default:
2109 fatal_insn ("insn outside basic block", x);
2113 if (INSN_P (x)
2114 && GET_CODE (x) == JUMP_INSN
2115 && returnjump_p (x) && ! condjump_p (x)
2116 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2117 fatal_insn ("return not followed by barrier", x);
2118 if (curr_bb && x == BB_END (curr_bb))
2119 curr_bb = NULL;
2122 if (num_bb_notes != n_basic_blocks)
2123 internal_error
2124 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2125 num_bb_notes, n_basic_blocks);
2127 return err;
2130 /* Assume that the preceding pass has possibly eliminated jump instructions
2131 or converted the unconditional jumps. Eliminate the edges from CFG.
2132 Return true if any edges are eliminated. */
2134 bool
2135 purge_dead_edges (basic_block bb)
2137 edge e, next;
2138 rtx insn = BB_END (bb), note;
2139 bool purged = false;
2141 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2142 if (GET_CODE (insn) == INSN
2143 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2145 rtx eqnote;
2147 if (! may_trap_p (PATTERN (insn))
2148 || ((eqnote = find_reg_equal_equiv_note (insn))
2149 && ! may_trap_p (XEXP (eqnote, 0))))
2150 remove_note (insn, note);
2153 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2154 for (e = bb->succ; e; e = next)
2156 next = e->succ_next;
2157 if (e->flags & EDGE_EH)
2159 if (can_throw_internal (BB_END (bb)))
2160 continue;
2162 else if (e->flags & EDGE_ABNORMAL_CALL)
2164 if (GET_CODE (BB_END (bb)) == CALL_INSN
2165 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2166 || INTVAL (XEXP (note, 0)) >= 0))
2167 continue;
2169 else
2170 continue;
2172 remove_edge (e);
2173 bb->flags |= BB_DIRTY;
2174 purged = true;
2177 if (GET_CODE (insn) == JUMP_INSN)
2179 rtx note;
2180 edge b,f;
2182 /* We do care only about conditional jumps and simplejumps. */
2183 if (!any_condjump_p (insn)
2184 && !returnjump_p (insn)
2185 && !simplejump_p (insn))
2186 return purged;
2188 /* Branch probability/prediction notes are defined only for
2189 condjumps. We've possibly turned condjump into simplejump. */
2190 if (simplejump_p (insn))
2192 note = find_reg_note (insn, REG_BR_PROB, NULL);
2193 if (note)
2194 remove_note (insn, note);
2195 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2196 remove_note (insn, note);
2199 for (e = bb->succ; e; e = next)
2201 next = e->succ_next;
2203 /* Avoid abnormal flags to leak from computed jumps turned
2204 into simplejumps. */
2206 e->flags &= ~EDGE_ABNORMAL;
2208 /* See if this edge is one we should keep. */
2209 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2210 /* A conditional jump can fall through into the next
2211 block, so we should keep the edge. */
2212 continue;
2213 else if (e->dest != EXIT_BLOCK_PTR
2214 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2215 /* If the destination block is the target of the jump,
2216 keep the edge. */
2217 continue;
2218 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2219 /* If the destination block is the exit block, and this
2220 instruction is a return, then keep the edge. */
2221 continue;
2222 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2223 /* Keep the edges that correspond to exceptions thrown by
2224 this instruction and rematerialize the EDGE_ABNORMAL
2225 flag we just cleared above. */
2227 e->flags |= EDGE_ABNORMAL;
2228 continue;
2231 /* We do not need this edge. */
2232 bb->flags |= BB_DIRTY;
2233 purged = true;
2234 remove_edge (e);
2237 if (!bb->succ || !purged)
2238 return purged;
2240 if (dump_file)
2241 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2243 if (!optimize)
2244 return purged;
2246 /* Redistribute probabilities. */
2247 if (!bb->succ->succ_next)
2249 bb->succ->probability = REG_BR_PROB_BASE;
2250 bb->succ->count = bb->count;
2252 else
2254 note = find_reg_note (insn, REG_BR_PROB, NULL);
2255 if (!note)
2256 return purged;
2258 b = BRANCH_EDGE (bb);
2259 f = FALLTHRU_EDGE (bb);
2260 b->probability = INTVAL (XEXP (note, 0));
2261 f->probability = REG_BR_PROB_BASE - b->probability;
2262 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2263 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2266 return purged;
2268 else if (GET_CODE (insn) == CALL_INSN && SIBLING_CALL_P (insn))
2270 /* First, there should not be any EH or ABCALL edges resulting
2271 from non-local gotos and the like. If there were, we shouldn't
2272 have created the sibcall in the first place. Second, there
2273 should of course never have been a fallthru edge. */
2274 if (!bb->succ || bb->succ->succ_next)
2275 abort ();
2276 if (bb->succ->flags != (EDGE_SIBCALL | EDGE_ABNORMAL))
2277 abort ();
2279 return 0;
2282 /* If we don't see a jump insn, we don't know exactly why the block would
2283 have been broken at this point. Look for a simple, non-fallthru edge,
2284 as these are only created by conditional branches. If we find such an
2285 edge we know that there used to be a jump here and can then safely
2286 remove all non-fallthru edges. */
2287 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2288 e = e->succ_next)
2291 if (!e)
2292 return purged;
2294 for (e = bb->succ; e; e = next)
2296 next = e->succ_next;
2297 if (!(e->flags & EDGE_FALLTHRU))
2299 bb->flags |= BB_DIRTY;
2300 remove_edge (e);
2301 purged = true;
2305 if (!bb->succ || bb->succ->succ_next)
2306 abort ();
2308 bb->succ->probability = REG_BR_PROB_BASE;
2309 bb->succ->count = bb->count;
2311 if (dump_file)
2312 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2313 bb->index);
2314 return purged;
2317 /* Search all basic blocks for potentially dead edges and purge them. Return
2318 true if some edge has been eliminated. */
2320 bool
2321 purge_all_dead_edges (int update_life_p)
2323 int purged = false;
2324 sbitmap blocks = 0;
2325 basic_block bb;
2327 if (update_life_p)
2329 blocks = sbitmap_alloc (last_basic_block);
2330 sbitmap_zero (blocks);
2333 FOR_EACH_BB (bb)
2335 bool purged_here = purge_dead_edges (bb);
2337 purged |= purged_here;
2338 if (purged_here && update_life_p)
2339 SET_BIT (blocks, bb->index);
2342 if (update_life_p && purged)
2343 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
2344 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2345 | PROP_KILL_DEAD_CODE);
2347 if (update_life_p)
2348 sbitmap_free (blocks);
2349 return purged;
2352 /* Same as split_block but update cfg_layout structures. */
2354 static basic_block
2355 cfg_layout_split_block (basic_block bb, void *insnp)
2357 rtx insn = insnp;
2358 basic_block new_bb = rtl_split_block (bb, insn);
2360 new_bb->rbi->footer = bb->rbi->footer;
2361 bb->rbi->footer = NULL;
2363 return new_bb;
2367 /* Redirect Edge to DEST. */
2368 static bool
2369 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2371 basic_block src = e->src;
2372 bool ret;
2374 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2375 return false;
2377 if (e->dest == dest)
2378 return true;
2380 if (e->src != ENTRY_BLOCK_PTR
2381 && try_redirect_by_replacing_jump (e, dest, true))
2382 return true;
2384 if (e->src == ENTRY_BLOCK_PTR
2385 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2387 if (dump_file)
2388 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2389 e->src->index, dest->index);
2391 redirect_edge_succ (e, dest);
2392 return true;
2395 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2396 in the case the basic block appears to be in sequence. Avoid this
2397 transformation. */
2399 if (e->flags & EDGE_FALLTHRU)
2401 /* Redirect any branch edges unified with the fallthru one. */
2402 if (GET_CODE (BB_END (src)) == JUMP_INSN
2403 && label_is_jump_target_p (BB_HEAD (e->dest),
2404 BB_END (src)))
2406 if (dump_file)
2407 fprintf (dump_file, "Fallthru edge unified with branch "
2408 "%i->%i redirected to %i\n",
2409 e->src->index, e->dest->index, dest->index);
2410 e->flags &= ~EDGE_FALLTHRU;
2411 if (!redirect_branch_edge (e, dest))
2412 abort ();
2413 e->flags |= EDGE_FALLTHRU;
2414 return true;
2416 /* In case we are redirecting fallthru edge to the branch edge
2417 of conditional jump, remove it. */
2418 if (src->succ->succ_next
2419 && !src->succ->succ_next->succ_next)
2421 edge s = e->succ_next ? e->succ_next : src->succ;
2422 if (s->dest == dest
2423 && any_condjump_p (BB_END (src))
2424 && onlyjump_p (BB_END (src)))
2425 delete_insn (BB_END (src));
2427 redirect_edge_succ_nodup (e, dest);
2428 if (dump_file)
2429 fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
2430 e->src->index, e->dest->index, dest->index);
2432 ret = true;
2434 else
2435 ret = redirect_branch_edge (e, dest);
2437 /* We don't want simplejumps in the insn stream during cfglayout. */
2438 if (simplejump_p (BB_END (src)))
2439 abort ();
2441 return ret;
2444 /* Simple wrapper as we always can redirect fallthru edges. */
2445 static basic_block
2446 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2448 if (!cfg_layout_redirect_edge_and_branch (e, dest))
2449 abort ();
2450 return NULL;
2453 /* Same as delete_basic_block but update cfg_layout structures. */
2455 static void
2456 cfg_layout_delete_block (basic_block bb)
2458 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2460 if (bb->rbi->header)
2462 next = BB_HEAD (bb);
2463 if (prev)
2464 NEXT_INSN (prev) = bb->rbi->header;
2465 else
2466 set_first_insn (bb->rbi->header);
2467 PREV_INSN (bb->rbi->header) = prev;
2468 insn = bb->rbi->header;
2469 while (NEXT_INSN (insn))
2470 insn = NEXT_INSN (insn);
2471 NEXT_INSN (insn) = next;
2472 PREV_INSN (next) = insn;
2474 next = NEXT_INSN (BB_END (bb));
2475 if (bb->rbi->footer)
2477 insn = bb->rbi->footer;
2478 while (insn)
2480 if (GET_CODE (insn) == BARRIER)
2482 if (PREV_INSN (insn))
2483 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2484 else
2485 bb->rbi->footer = NEXT_INSN (insn);
2486 if (NEXT_INSN (insn))
2487 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2489 if (GET_CODE (insn) == CODE_LABEL)
2490 break;
2491 insn = NEXT_INSN (insn);
2493 if (bb->rbi->footer)
2495 insn = BB_END (bb);
2496 NEXT_INSN (insn) = bb->rbi->footer;
2497 PREV_INSN (bb->rbi->footer) = insn;
2498 while (NEXT_INSN (insn))
2499 insn = NEXT_INSN (insn);
2500 NEXT_INSN (insn) = next;
2501 if (next)
2502 PREV_INSN (next) = insn;
2503 else
2504 set_last_insn (insn);
2507 if (bb->next_bb != EXIT_BLOCK_PTR)
2508 to = &bb->next_bb->rbi->header;
2509 else
2510 to = &cfg_layout_function_footer;
2511 rtl_delete_block (bb);
2513 if (prev)
2514 prev = NEXT_INSN (prev);
2515 else
2516 prev = get_insns ();
2517 if (next)
2518 next = PREV_INSN (next);
2519 else
2520 next = get_last_insn ();
2522 if (next && NEXT_INSN (next) != prev)
2524 remaints = unlink_insn_chain (prev, next);
2525 insn = remaints;
2526 while (NEXT_INSN (insn))
2527 insn = NEXT_INSN (insn);
2528 NEXT_INSN (insn) = *to;
2529 if (*to)
2530 PREV_INSN (*to) = insn;
2531 *to = remaints;
2535 /* Return true when blocks A and B can be safely merged. */
2536 static bool
2537 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2539 /* There must be exactly one edge in between the blocks. */
2540 return (a->succ && !a->succ->succ_next && a->succ->dest == b
2541 && !b->pred->pred_next && a != b
2542 /* Must be simple edge. */
2543 && !(a->succ->flags & EDGE_COMPLEX)
2544 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2545 /* If the jump insn has side effects,
2546 we can't kill the edge. */
2547 && (GET_CODE (BB_END (a)) != JUMP_INSN
2548 || (reload_completed
2549 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2552 /* Merge block A and B, abort when it is not possible. */
2553 static void
2554 cfg_layout_merge_blocks (basic_block a, basic_block b)
2556 #ifdef ENABLE_CHECKING
2557 if (!cfg_layout_can_merge_blocks_p (a, b))
2558 abort ();
2559 #endif
2561 /* If there was a CODE_LABEL beginning B, delete it. */
2562 if (GET_CODE (BB_HEAD (b)) == CODE_LABEL)
2563 delete_insn (BB_HEAD (b));
2565 /* We should have fallthru edge in a, or we can do dummy redirection to get
2566 it cleaned up. */
2567 if (GET_CODE (BB_END (a)) == JUMP_INSN)
2568 try_redirect_by_replacing_jump (a->succ, b, true);
2569 if (GET_CODE (BB_END (a)) == JUMP_INSN)
2570 abort ();
2572 /* Possible line number notes should appear in between. */
2573 if (b->rbi->header)
2575 rtx first = BB_END (a), last;
2577 last = emit_insn_after (b->rbi->header, BB_END (a));
2578 delete_insn_chain (NEXT_INSN (first), last);
2579 b->rbi->header = NULL;
2582 /* In the case basic blocks are not adjacent, move them around. */
2583 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2585 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2587 emit_insn_after (first, BB_END (a));
2588 /* Skip possible DELETED_LABEL insn. */
2589 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2590 first = NEXT_INSN (first);
2591 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2592 abort ();
2593 BB_HEAD (b) = NULL;
2594 delete_insn (first);
2596 /* Otherwise just re-associate the instructions. */
2597 else
2599 rtx insn;
2601 for (insn = BB_HEAD (b);
2602 insn != NEXT_INSN (BB_END (b));
2603 insn = NEXT_INSN (insn))
2604 set_block_for_insn (insn, a);
2605 insn = BB_HEAD (b);
2606 /* Skip possible DELETED_LABEL insn. */
2607 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2608 insn = NEXT_INSN (insn);
2609 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2610 abort ();
2611 BB_HEAD (b) = NULL;
2612 BB_END (a) = BB_END (b);
2613 delete_insn (insn);
2616 /* Possible tablejumps and barriers should appear after the block. */
2617 if (b->rbi->footer)
2619 if (!a->rbi->footer)
2620 a->rbi->footer = b->rbi->footer;
2621 else
2623 rtx last = a->rbi->footer;
2625 while (NEXT_INSN (last))
2626 last = NEXT_INSN (last);
2627 NEXT_INSN (last) = b->rbi->footer;
2628 PREV_INSN (b->rbi->footer) = last;
2630 b->rbi->footer = NULL;
2633 if (dump_file)
2634 fprintf (dump_file, "Merged blocks %d and %d.\n",
2635 a->index, b->index);
2638 /* Split edge E. */
2640 static basic_block
2641 cfg_layout_split_edge (edge e)
2643 edge new_e;
2644 basic_block new_bb =
2645 create_basic_block (e->src != ENTRY_BLOCK_PTR
2646 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2647 NULL_RTX, e->src);
2649 new_e = make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2650 redirect_edge_and_branch_force (e, new_bb);
2652 return new_bb;
2655 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
2657 static void
2658 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2662 /* Implementation of CFG manipulation for linearized RTL. */
2663 struct cfg_hooks rtl_cfg_hooks = {
2664 "rtl",
2665 rtl_verify_flow_info,
2666 rtl_dump_bb,
2667 rtl_create_basic_block,
2668 rtl_redirect_edge_and_branch,
2669 rtl_redirect_edge_and_branch_force,
2670 rtl_delete_block,
2671 rtl_split_block,
2672 rtl_move_block_after,
2673 rtl_can_merge_blocks, /* can_merge_blocks_p */
2674 rtl_merge_blocks,
2675 rtl_split_edge,
2676 rtl_make_forwarder_block,
2677 rtl_tidy_fallthru_edge
2680 /* Implementation of CFG manipulation for cfg layout RTL, where
2681 basic block connected via fallthru edges does not have to be adjacent.
2682 This representation will hopefully become the default one in future
2683 version of the compiler. */
2684 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
2685 "cfglayout mode",
2686 rtl_verify_flow_info_1,
2687 rtl_dump_bb,
2688 cfg_layout_create_basic_block,
2689 cfg_layout_redirect_edge_and_branch,
2690 cfg_layout_redirect_edge_and_branch_force,
2691 cfg_layout_delete_block,
2692 cfg_layout_split_block,
2693 rtl_move_block_after,
2694 cfg_layout_can_merge_blocks_p,
2695 cfg_layout_merge_blocks,
2696 cfg_layout_split_edge,
2697 rtl_make_forwarder_block,
2698 NULL