* gcc.dg/i386-387-1.c: Add new test for __builtin_fmod.
[official-gcc.git] / gcc / cfgrtl.c
blob461eb34a6f725246102944b64148604c2c9f26c0
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - Basic CFG/RTL manipulation API documented in cfghooks.h
27 - CFG-aware instruction chain manipulation
28 delete_insn, delete_insn_chain
29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "tree.h"
45 #include "rtl.h"
46 #include "hard-reg-set.h"
47 #include "basic-block.h"
48 #include "regs.h"
49 #include "flags.h"
50 #include "output.h"
51 #include "function.h"
52 #include "except.h"
53 #include "toplev.h"
54 #include "tm_p.h"
55 #include "obstack.h"
56 #include "insn-config.h"
57 #include "cfglayout.h"
58 #include "expr.h"
61 /* The labels mentioned in non-jump rtl. Valid during find_basic_blocks. */
62 /* ??? Should probably be using LABEL_NUSES instead. It would take a
63 bit of surgery to be able to use or co-opt the routines in jump. */
64 rtx label_value_list;
65 rtx tail_recursion_label_list;
67 static int can_delete_note_p (rtx);
68 static int can_delete_label_p (rtx);
69 static void commit_one_edge_insertion (edge, int);
70 static rtx last_loop_beg_note (rtx);
71 static bool back_edge_of_syntactic_loop_p (basic_block, basic_block);
72 basic_block force_nonfallthru_and_redirect (edge, basic_block);
73 static basic_block rtl_split_edge (edge);
74 static bool rtl_move_block_after (basic_block, basic_block);
75 static int rtl_verify_flow_info (void);
76 static basic_block cfg_layout_split_block (basic_block, void *);
77 static bool cfg_layout_redirect_edge_and_branch (edge, basic_block);
78 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
79 static void cfg_layout_delete_block (basic_block);
80 static void rtl_delete_block (basic_block);
81 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
82 static bool rtl_redirect_edge_and_branch (edge, basic_block);
83 static basic_block rtl_split_block (basic_block, void *);
84 static void rtl_dump_bb (basic_block, FILE *, int);
85 static int rtl_verify_flow_info_1 (void);
86 static void mark_killed_regs (rtx, rtx, void *);
87 static void rtl_make_forwarder_block (edge);
89 /* Return true if NOTE is not one of the ones that must be kept paired,
90 so that we may simply delete it. */
92 static int
93 can_delete_note_p (rtx note)
95 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
96 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK
97 || NOTE_LINE_NUMBER (note) == NOTE_INSN_UNLIKELY_EXECUTED_CODE
98 || NOTE_LINE_NUMBER (note) == NOTE_INSN_PREDICTION);
101 /* True if a given label can be deleted. */
103 static int
104 can_delete_label_p (rtx label)
106 return (!LABEL_PRESERVE_P (label)
107 /* User declared labels must be preserved. */
108 && LABEL_NAME (label) == 0
109 && !in_expr_list_p (forced_labels, label)
110 && !in_expr_list_p (label_value_list, label));
113 /* Delete INSN by patching it out. Return the next insn. */
116 delete_insn (rtx insn)
118 rtx next = NEXT_INSN (insn);
119 rtx note;
120 bool really_delete = true;
122 if (GET_CODE (insn) == CODE_LABEL)
124 /* Some labels can't be directly removed from the INSN chain, as they
125 might be references via variables, constant pool etc.
126 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
127 if (! can_delete_label_p (insn))
129 const char *name = LABEL_NAME (insn);
131 really_delete = false;
132 PUT_CODE (insn, NOTE);
133 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
134 NOTE_SOURCE_FILE (insn) = name;
137 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
140 if (really_delete)
142 /* If this insn has already been deleted, something is very wrong. */
143 if (INSN_DELETED_P (insn))
144 abort ();
145 remove_insn (insn);
146 INSN_DELETED_P (insn) = 1;
149 /* If deleting a jump, decrement the use count of the label. Deleting
150 the label itself should happen in the normal course of block merging. */
151 if (GET_CODE (insn) == JUMP_INSN
152 && JUMP_LABEL (insn)
153 && GET_CODE (JUMP_LABEL (insn)) == CODE_LABEL)
154 LABEL_NUSES (JUMP_LABEL (insn))--;
156 /* Also if deleting an insn that references a label. */
157 else
159 while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
160 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
162 LABEL_NUSES (XEXP (note, 0))--;
163 remove_note (insn, note);
167 if (GET_CODE (insn) == JUMP_INSN
168 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
169 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
171 rtx pat = PATTERN (insn);
172 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
173 int len = XVECLEN (pat, diff_vec_p);
174 int i;
176 for (i = 0; i < len; i++)
178 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
180 /* When deleting code in bulk (e.g. removing many unreachable
181 blocks) we can delete a label that's a target of the vector
182 before deleting the vector itself. */
183 if (GET_CODE (label) != NOTE)
184 LABEL_NUSES (label)--;
188 return next;
191 /* Like delete_insn but also purge dead edges from BB. */
193 delete_insn_and_edges (rtx insn)
195 rtx x;
196 bool purge = false;
198 if (INSN_P (insn)
199 && BLOCK_FOR_INSN (insn)
200 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
201 purge = true;
202 x = delete_insn (insn);
203 if (purge)
204 purge_dead_edges (BLOCK_FOR_INSN (insn));
205 return x;
208 /* Unlink a chain of insns between START and FINISH, leaving notes
209 that must be paired. */
211 void
212 delete_insn_chain (rtx start, rtx finish)
214 rtx next;
216 /* Unchain the insns one by one. It would be quicker to delete all of these
217 with a single unchaining, rather than one at a time, but we need to keep
218 the NOTE's. */
219 while (1)
221 next = NEXT_INSN (start);
222 if (GET_CODE (start) == NOTE && !can_delete_note_p (start))
224 else
225 next = delete_insn (start);
227 if (start == finish)
228 break;
229 start = next;
233 /* Like delete_insn but also purge dead edges from BB. */
234 void
235 delete_insn_chain_and_edges (rtx first, rtx last)
237 bool purge = false;
239 if (INSN_P (last)
240 && BLOCK_FOR_INSN (last)
241 && BB_END (BLOCK_FOR_INSN (last)) == last)
242 purge = true;
243 delete_insn_chain (first, last);
244 if (purge)
245 purge_dead_edges (BLOCK_FOR_INSN (last));
248 /* Create a new basic block consisting of the instructions between HEAD and END
249 inclusive. This function is designed to allow fast BB construction - reuses
250 the note and basic block struct in BB_NOTE, if any and do not grow
251 BASIC_BLOCK chain and should be used directly only by CFG construction code.
252 END can be NULL in to create new empty basic block before HEAD. Both END
253 and HEAD can be NULL to create basic block at the end of INSN chain.
254 AFTER is the basic block we should be put after. */
256 basic_block
257 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
259 basic_block bb;
261 if (bb_note
262 && ! RTX_INTEGRATED_P (bb_note)
263 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
264 && bb->aux == NULL)
266 /* If we found an existing note, thread it back onto the chain. */
268 rtx after;
270 if (GET_CODE (head) == CODE_LABEL)
271 after = head;
272 else
274 after = PREV_INSN (head);
275 head = bb_note;
278 if (after != bb_note && NEXT_INSN (after) != bb_note)
279 reorder_insns_nobb (bb_note, bb_note, after);
281 else
283 /* Otherwise we must create a note and a basic block structure. */
285 bb = alloc_block ();
287 if (!head && !end)
288 head = end = bb_note
289 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
290 else if (GET_CODE (head) == CODE_LABEL && end)
292 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
293 if (head == end)
294 end = bb_note;
296 else
298 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
299 head = bb_note;
300 if (!end)
301 end = head;
304 NOTE_BASIC_BLOCK (bb_note) = bb;
307 /* Always include the bb note in the block. */
308 if (NEXT_INSN (end) == bb_note)
309 end = bb_note;
311 BB_HEAD (bb) = head;
312 BB_END (bb) = end;
313 bb->index = last_basic_block++;
314 bb->flags = BB_NEW;
315 link_block (bb, after);
316 BASIC_BLOCK (bb->index) = bb;
317 update_bb_for_insn (bb);
318 bb->partition = UNPARTITIONED;
320 /* Tag the block so that we know it has been used when considering
321 other basic block notes. */
322 bb->aux = bb;
324 return bb;
327 /* Create new basic block consisting of instructions in between HEAD and END
328 and place it to the BB chain after block AFTER. END can be NULL in to
329 create new empty basic block before HEAD. Both END and HEAD can be NULL to
330 create basic block at the end of INSN chain. */
332 static basic_block
333 rtl_create_basic_block (void *headp, void *endp, basic_block after)
335 rtx head = headp, end = endp;
336 basic_block bb;
338 /* Grow the basic block array if needed. */
339 if ((size_t) last_basic_block >= VARRAY_SIZE (basic_block_info))
341 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
342 VARRAY_GROW (basic_block_info, new_size);
345 n_basic_blocks++;
347 bb = create_basic_block_structure (head, end, NULL, after);
348 bb->aux = NULL;
349 return bb;
352 static basic_block
353 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
355 basic_block newbb = rtl_create_basic_block (head, end, after);
357 cfg_layout_initialize_rbi (newbb);
358 return newbb;
361 /* Delete the insns in a (non-live) block. We physically delete every
362 non-deleted-note insn, and update the flow graph appropriately.
364 Return nonzero if we deleted an exception handler. */
366 /* ??? Preserving all such notes strikes me as wrong. It would be nice
367 to post-process the stream to remove empty blocks, loops, ranges, etc. */
369 static void
370 rtl_delete_block (basic_block b)
372 rtx insn, end, tmp;
374 /* If the head of this block is a CODE_LABEL, then it might be the
375 label for an exception handler which can't be reached.
377 We need to remove the label from the exception_handler_label list
378 and remove the associated NOTE_INSN_EH_REGION_BEG and
379 NOTE_INSN_EH_REGION_END notes. */
381 /* Get rid of all NOTE_INSN_PREDICTIONs and NOTE_INSN_LOOP_CONTs
382 hanging before the block. */
384 for (insn = PREV_INSN (BB_HEAD (b)); insn; insn = PREV_INSN (insn))
386 if (GET_CODE (insn) != NOTE)
387 break;
388 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PREDICTION
389 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
390 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
393 insn = BB_HEAD (b);
395 never_reached_warning (insn, BB_END (b));
397 if (GET_CODE (insn) == CODE_LABEL)
398 maybe_remove_eh_handler (insn);
400 /* Include any jump table following the basic block. */
401 end = BB_END (b);
402 if (tablejump_p (end, NULL, &tmp))
403 end = tmp;
405 /* Include any barrier that may follow the basic block. */
406 tmp = next_nonnote_insn (end);
407 if (tmp && GET_CODE (tmp) == BARRIER)
408 end = tmp;
410 /* Selectively delete the entire chain. */
411 BB_HEAD (b) = NULL;
412 delete_insn_chain (insn, end);
415 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
417 void
418 compute_bb_for_insn (void)
420 basic_block bb;
422 FOR_EACH_BB (bb)
424 rtx end = BB_END (bb);
425 rtx insn;
427 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
429 BLOCK_FOR_INSN (insn) = bb;
430 if (insn == end)
431 break;
436 /* Release the basic_block_for_insn array. */
438 void
439 free_bb_for_insn (void)
441 rtx insn;
442 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
443 if (GET_CODE (insn) != BARRIER)
444 BLOCK_FOR_INSN (insn) = NULL;
447 /* Update insns block within BB. */
449 void
450 update_bb_for_insn (basic_block bb)
452 rtx insn;
454 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
456 if (GET_CODE (insn) != BARRIER)
457 set_block_for_insn (insn, bb);
458 if (insn == BB_END (bb))
459 break;
463 /* Creates a new basic block just after basic block B by splitting
464 everything after specified instruction I. */
466 static basic_block
467 rtl_split_block (basic_block bb, void *insnp)
469 basic_block new_bb;
470 rtx insn = insnp;
471 edge e;
473 if (!insn)
475 insn = first_insn_after_basic_block_note (bb);
477 if (insn)
478 insn = PREV_INSN (insn);
479 else
480 insn = get_last_insn ();
483 /* We probably should check type of the insn so that we do not create
484 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
485 bother. */
486 if (insn == BB_END (bb))
487 emit_note_after (NOTE_INSN_DELETED, insn);
489 /* Create the new basic block. */
490 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
491 BB_END (bb) = insn;
493 /* Redirect the outgoing edges. */
494 new_bb->succ = bb->succ;
495 bb->succ = NULL;
496 for (e = new_bb->succ; e; e = e->succ_next)
497 e->src = new_bb;
499 if (bb->global_live_at_start)
501 new_bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
502 new_bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
503 COPY_REG_SET (new_bb->global_live_at_end, bb->global_live_at_end);
505 /* We now have to calculate which registers are live at the end
506 of the split basic block and at the start of the new basic
507 block. Start with those registers that are known to be live
508 at the end of the original basic block and get
509 propagate_block to determine which registers are live. */
510 COPY_REG_SET (new_bb->global_live_at_start, bb->global_live_at_end);
511 propagate_block (new_bb, new_bb->global_live_at_start, NULL, NULL, 0);
512 COPY_REG_SET (bb->global_live_at_end,
513 new_bb->global_live_at_start);
514 #ifdef HAVE_conditional_execution
515 /* In the presence of conditional execution we are not able to update
516 liveness precisely. */
517 if (reload_completed)
519 bb->flags |= BB_DIRTY;
520 new_bb->flags |= BB_DIRTY;
522 #endif
525 return new_bb;
528 /* Blocks A and B are to be merged into a single block A. The insns
529 are already contiguous. */
531 static void
532 rtl_merge_blocks (basic_block a, basic_block b)
534 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
535 rtx del_first = NULL_RTX, del_last = NULL_RTX;
536 int b_empty = 0;
538 /* If there was a CODE_LABEL beginning B, delete it. */
539 if (GET_CODE (b_head) == CODE_LABEL)
541 /* Detect basic blocks with nothing but a label. This can happen
542 in particular at the end of a function. */
543 if (b_head == b_end)
544 b_empty = 1;
546 del_first = del_last = b_head;
547 b_head = NEXT_INSN (b_head);
550 /* Delete the basic block note and handle blocks containing just that
551 note. */
552 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
554 if (b_head == b_end)
555 b_empty = 1;
556 if (! del_last)
557 del_first = b_head;
559 del_last = b_head;
560 b_head = NEXT_INSN (b_head);
563 /* If there was a jump out of A, delete it. */
564 if (GET_CODE (a_end) == JUMP_INSN)
566 rtx prev;
568 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
569 if (GET_CODE (prev) != NOTE
570 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
571 || prev == BB_HEAD (a))
572 break;
574 del_first = a_end;
576 #ifdef HAVE_cc0
577 /* If this was a conditional jump, we need to also delete
578 the insn that set cc0. */
579 if (only_sets_cc0_p (prev))
581 rtx tmp = prev;
583 prev = prev_nonnote_insn (prev);
584 if (!prev)
585 prev = BB_HEAD (a);
586 del_first = tmp;
588 #endif
590 a_end = PREV_INSN (del_first);
592 else if (GET_CODE (NEXT_INSN (a_end)) == BARRIER)
593 del_first = NEXT_INSN (a_end);
595 /* Delete everything marked above as well as crap that might be
596 hanging out between the two blocks. */
597 BB_HEAD (b) = NULL;
598 delete_insn_chain (del_first, del_last);
600 /* Reassociate the insns of B with A. */
601 if (!b_empty)
603 rtx x;
605 for (x = a_end; x != b_end; x = NEXT_INSN (x))
606 set_block_for_insn (x, a);
608 set_block_for_insn (b_end, a);
610 a_end = b_end;
613 BB_END (a) = a_end;
616 /* Return true when block A and B can be merged. */
617 static bool
618 rtl_can_merge_blocks (basic_block a,basic_block b)
620 bool partitions_ok = true;
622 /* If we are partitioning hot/cold basic blocks, we don't want to
623 mess up unconditional or indirect jumps that cross between hot
624 and cold sections. */
626 if (flag_reorder_blocks_and_partition
627 && (find_reg_note (BB_END (a), REG_CROSSING_JUMP, NULL_RTX)
628 || find_reg_note (BB_END (b), REG_CROSSING_JUMP, NULL_RTX)
629 || a->partition != b->partition))
630 partitions_ok = false;
632 /* There must be exactly one edge in between the blocks. */
633 return (a->succ && !a->succ->succ_next && a->succ->dest == b
634 && !b->pred->pred_next && a != b
635 /* Must be simple edge. */
636 && !(a->succ->flags & EDGE_COMPLEX)
637 && partitions_ok
638 && a->next_bb == b
639 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
640 /* If the jump insn has side effects,
641 we can't kill the edge. */
642 && (GET_CODE (BB_END (a)) != JUMP_INSN
643 || (reload_completed
644 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
647 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
648 exist. */
651 block_label (basic_block block)
653 if (block == EXIT_BLOCK_PTR)
654 return NULL_RTX;
656 if (GET_CODE (BB_HEAD (block)) != CODE_LABEL)
658 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
661 return BB_HEAD (block);
664 /* Attempt to perform edge redirection by replacing possibly complex jump
665 instruction by unconditional jump or removing jump completely. This can
666 apply only if all edges now point to the same block. The parameters and
667 return values are equivalent to redirect_edge_and_branch. */
669 bool
670 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
672 basic_block src = e->src;
673 rtx insn = BB_END (src), kill_from;
674 edge tmp;
675 rtx set;
676 int fallthru = 0;
679 /* If we are partitioning hot/cold basic blocks, we don't want to
680 mess up unconditional or indirect jumps that cross between hot
681 and cold sections. */
683 if (flag_reorder_blocks_and_partition
684 && find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX))
685 return false;
687 /* Verify that all targets will be TARGET. */
688 for (tmp = src->succ; tmp; tmp = tmp->succ_next)
689 if (tmp->dest != target && tmp != e)
690 break;
692 if (tmp || !onlyjump_p (insn))
693 return false;
694 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
695 return false;
697 /* Avoid removing branch with side effects. */
698 set = single_set (insn);
699 if (!set || side_effects_p (set))
700 return false;
702 /* In case we zap a conditional jump, we'll need to kill
703 the cc0 setter too. */
704 kill_from = insn;
705 #ifdef HAVE_cc0
706 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
707 kill_from = PREV_INSN (insn);
708 #endif
710 /* See if we can create the fallthru edge. */
711 if (in_cfglayout || can_fallthru (src, target))
713 if (dump_file)
714 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
715 fallthru = 1;
717 /* Selectively unlink whole insn chain. */
718 if (in_cfglayout)
720 rtx insn = src->rbi->footer;
722 delete_insn_chain (kill_from, BB_END (src));
724 /* Remove barriers but keep jumptables. */
725 while (insn)
727 if (GET_CODE (insn) == BARRIER)
729 if (PREV_INSN (insn))
730 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
731 else
732 src->rbi->footer = NEXT_INSN (insn);
733 if (NEXT_INSN (insn))
734 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
736 if (GET_CODE (insn) == CODE_LABEL)
737 break;
738 insn = NEXT_INSN (insn);
741 else
742 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)));
745 /* If this already is simplejump, redirect it. */
746 else if (simplejump_p (insn))
748 if (e->dest == target)
749 return false;
750 if (dump_file)
751 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
752 INSN_UID (insn), e->dest->index, target->index);
753 if (!redirect_jump (insn, block_label (target), 0))
755 if (target == EXIT_BLOCK_PTR)
756 return false;
757 abort ();
761 /* Cannot do anything for target exit block. */
762 else if (target == EXIT_BLOCK_PTR)
763 return false;
765 /* Or replace possibly complicated jump insn by simple jump insn. */
766 else
768 rtx target_label = block_label (target);
769 rtx barrier, label, table;
771 emit_jump_insn_after (gen_jump (target_label), insn);
772 JUMP_LABEL (BB_END (src)) = target_label;
773 LABEL_NUSES (target_label)++;
774 if (dump_file)
775 fprintf (dump_file, "Replacing insn %i by jump %i\n",
776 INSN_UID (insn), INSN_UID (BB_END (src)));
779 delete_insn_chain (kill_from, insn);
781 /* Recognize a tablejump that we are converting to a
782 simple jump and remove its associated CODE_LABEL
783 and ADDR_VEC or ADDR_DIFF_VEC. */
784 if (tablejump_p (insn, &label, &table))
785 delete_insn_chain (label, table);
787 barrier = next_nonnote_insn (BB_END (src));
788 if (!barrier || GET_CODE (barrier) != BARRIER)
789 emit_barrier_after (BB_END (src));
790 else
792 if (barrier != NEXT_INSN (BB_END (src)))
794 /* Move the jump before barrier so that the notes
795 which originally were or were created before jump table are
796 inside the basic block. */
797 rtx new_insn = BB_END (src);
798 rtx tmp;
800 for (tmp = NEXT_INSN (BB_END (src)); tmp != barrier;
801 tmp = NEXT_INSN (tmp))
802 set_block_for_insn (tmp, src);
804 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
805 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
807 NEXT_INSN (new_insn) = barrier;
808 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
810 PREV_INSN (new_insn) = PREV_INSN (barrier);
811 PREV_INSN (barrier) = new_insn;
816 /* Keep only one edge out and set proper flags. */
817 while (src->succ->succ_next)
818 remove_edge (src->succ);
819 e = src->succ;
820 if (fallthru)
821 e->flags = EDGE_FALLTHRU;
822 else
823 e->flags = 0;
825 e->probability = REG_BR_PROB_BASE;
826 e->count = src->count;
828 /* We don't want a block to end on a line-number note since that has
829 the potential of changing the code between -g and not -g. */
830 while (GET_CODE (BB_END (e->src)) == NOTE
831 && NOTE_LINE_NUMBER (BB_END (e->src)) >= 0)
832 delete_insn (BB_END (e->src));
834 if (e->dest != target)
835 redirect_edge_succ (e, target);
837 return true;
840 /* Return last loop_beg note appearing after INSN, before start of next
841 basic block. Return INSN if there are no such notes.
843 When emitting jump to redirect a fallthru edge, it should always appear
844 after the LOOP_BEG notes, as loop optimizer expect loop to either start by
845 fallthru edge or jump following the LOOP_BEG note jumping to the loop exit
846 test. */
848 static rtx
849 last_loop_beg_note (rtx insn)
851 rtx last = insn;
853 for (insn = NEXT_INSN (insn); insn && GET_CODE (insn) == NOTE
854 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BASIC_BLOCK;
855 insn = NEXT_INSN (insn))
856 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
857 last = insn;
859 return last;
862 /* Redirect edge representing branch of (un)conditional jump or tablejump. */
863 static bool
864 redirect_branch_edge (edge e, basic_block target)
866 rtx tmp;
867 rtx old_label = BB_HEAD (e->dest);
868 basic_block src = e->src;
869 rtx insn = BB_END (src);
871 /* We can only redirect non-fallthru edges of jump insn. */
872 if (e->flags & EDGE_FALLTHRU)
873 return false;
874 else if (GET_CODE (insn) != JUMP_INSN)
875 return false;
877 /* Recognize a tablejump and adjust all matching cases. */
878 if (tablejump_p (insn, NULL, &tmp))
880 rtvec vec;
881 int j;
882 rtx new_label = block_label (target);
884 if (target == EXIT_BLOCK_PTR)
885 return false;
886 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
887 vec = XVEC (PATTERN (tmp), 0);
888 else
889 vec = XVEC (PATTERN (tmp), 1);
891 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
892 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
894 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
895 --LABEL_NUSES (old_label);
896 ++LABEL_NUSES (new_label);
899 /* Handle casesi dispatch insns. */
900 if ((tmp = single_set (insn)) != NULL
901 && SET_DEST (tmp) == pc_rtx
902 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
903 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
904 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
906 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (VOIDmode,
907 new_label);
908 --LABEL_NUSES (old_label);
909 ++LABEL_NUSES (new_label);
912 else
914 /* ?? We may play the games with moving the named labels from
915 one basic block to the other in case only one computed_jump is
916 available. */
917 if (computed_jump_p (insn)
918 /* A return instruction can't be redirected. */
919 || returnjump_p (insn))
920 return false;
922 /* If the insn doesn't go where we think, we're confused. */
923 if (JUMP_LABEL (insn) != old_label)
924 abort ();
926 /* If the substitution doesn't succeed, die. This can happen
927 if the back end emitted unrecognizable instructions or if
928 target is exit block on some arches. */
929 if (!redirect_jump (insn, block_label (target), 0))
931 if (target == EXIT_BLOCK_PTR)
932 return false;
933 abort ();
937 if (dump_file)
938 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
939 e->src->index, e->dest->index, target->index);
941 if (e->dest != target)
942 redirect_edge_succ_nodup (e, target);
943 return true;
946 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
947 expense of adding new instructions or reordering basic blocks.
949 Function can be also called with edge destination equivalent to the TARGET.
950 Then it should try the simplifications and do nothing if none is possible.
952 Return true if transformation succeeded. We still return false in case E
953 already destinated TARGET and we didn't managed to simplify instruction
954 stream. */
956 static bool
957 rtl_redirect_edge_and_branch (edge e, basic_block target)
959 basic_block src = e->src;
961 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
962 return false;
964 if (e->dest == target)
965 return true;
967 if (try_redirect_by_replacing_jump (e, target, false))
969 src->flags |= BB_DIRTY;
970 return true;
973 if (!redirect_branch_edge (e, target))
974 return false;
976 src->flags |= BB_DIRTY;
977 return true;
980 /* Like force_nonfallthru below, but additionally performs redirection
981 Used by redirect_edge_and_branch_force. */
983 basic_block
984 force_nonfallthru_and_redirect (edge e, basic_block target)
986 basic_block jump_block, new_bb = NULL, src = e->src;
987 rtx note;
988 edge new_edge;
989 int abnormal_edge_flags = 0;
991 /* In the case the last instruction is conditional jump to the next
992 instruction, first redirect the jump itself and then continue
993 by creating a basic block afterwards to redirect fallthru edge. */
994 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
995 && any_condjump_p (BB_END (e->src))
996 /* When called from cfglayout, fallthru edges do not
997 necessarily go to the next block. */
998 && e->src->next_bb == e->dest
999 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1001 rtx note;
1002 edge b = unchecked_make_edge (e->src, target, 0);
1004 if (!redirect_jump (BB_END (e->src), block_label (target), 0))
1005 abort ();
1006 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1007 if (note)
1009 int prob = INTVAL (XEXP (note, 0));
1011 b->probability = prob;
1012 b->count = e->count * prob / REG_BR_PROB_BASE;
1013 e->probability -= e->probability;
1014 e->count -= b->count;
1015 if (e->probability < 0)
1016 e->probability = 0;
1017 if (e->count < 0)
1018 e->count = 0;
1022 if (e->flags & EDGE_ABNORMAL)
1024 /* Irritating special case - fallthru edge to the same block as abnormal
1025 edge.
1026 We can't redirect abnormal edge, but we still can split the fallthru
1027 one and create separate abnormal edge to original destination.
1028 This allows bb-reorder to make such edge non-fallthru. */
1029 if (e->dest != target)
1030 abort ();
1031 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1032 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1034 else if (!(e->flags & EDGE_FALLTHRU))
1035 abort ();
1036 else if (e->src == ENTRY_BLOCK_PTR)
1038 /* We can't redirect the entry block. Create an empty block at the
1039 start of the function which we use to add the new jump. */
1040 edge *pe1;
1041 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1043 /* Change the existing edge's source to be the new block, and add
1044 a new edge from the entry block to the new block. */
1045 e->src = bb;
1046 for (pe1 = &ENTRY_BLOCK_PTR->succ; *pe1; pe1 = &(*pe1)->succ_next)
1047 if (*pe1 == e)
1049 *pe1 = e->succ_next;
1050 break;
1052 e->succ_next = 0;
1053 bb->succ = e;
1054 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1057 if (e->src->succ->succ_next || abnormal_edge_flags)
1059 /* Create the new structures. */
1061 /* If the old block ended with a tablejump, skip its table
1062 by searching forward from there. Otherwise start searching
1063 forward from the last instruction of the old block. */
1064 if (!tablejump_p (BB_END (e->src), NULL, &note))
1065 note = BB_END (e->src);
1067 /* Position the new block correctly relative to loop notes. */
1068 note = last_loop_beg_note (note);
1069 note = NEXT_INSN (note);
1071 jump_block = create_basic_block (note, NULL, e->src);
1072 jump_block->count = e->count;
1073 jump_block->frequency = EDGE_FREQUENCY (e);
1074 jump_block->loop_depth = target->loop_depth;
1076 if (target->global_live_at_start)
1078 jump_block->global_live_at_start
1079 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1080 jump_block->global_live_at_end
1081 = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1082 COPY_REG_SET (jump_block->global_live_at_start,
1083 target->global_live_at_start);
1084 COPY_REG_SET (jump_block->global_live_at_end,
1085 target->global_live_at_start);
1088 /* Make sure new block ends up in correct hot/cold section. */
1090 jump_block->partition = e->src->partition;
1091 if (flag_reorder_blocks_and_partition)
1093 if (e->src->partition == COLD_PARTITION)
1095 rtx bb_note, new_note;
1096 for (bb_note = BB_HEAD (jump_block);
1097 bb_note && bb_note != NEXT_INSN (BB_END (jump_block));
1098 bb_note = NEXT_INSN (bb_note))
1099 if (GET_CODE (bb_note) == NOTE
1100 && NOTE_LINE_NUMBER (bb_note) == NOTE_INSN_BASIC_BLOCK)
1101 break;
1102 new_note = emit_note_after (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
1103 bb_note);
1104 NOTE_BASIC_BLOCK (new_note) = jump_block;
1105 jump_block->partition = COLD_PARTITION;
1107 if (GET_CODE (BB_END (jump_block)) == JUMP_INSN
1108 && !any_condjump_p (BB_END (jump_block))
1109 && jump_block->succ->crossing_edge )
1110 REG_NOTES (BB_END (jump_block)) = gen_rtx_EXPR_LIST
1111 (REG_CROSSING_JUMP, NULL_RTX,
1112 REG_NOTES (BB_END (jump_block)));
1115 /* Wire edge in. */
1116 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1117 new_edge->probability = e->probability;
1118 new_edge->count = e->count;
1120 /* Redirect old edge. */
1121 redirect_edge_pred (e, jump_block);
1122 e->probability = REG_BR_PROB_BASE;
1124 new_bb = jump_block;
1126 else
1127 jump_block = e->src;
1129 e->flags &= ~EDGE_FALLTHRU;
1130 if (target == EXIT_BLOCK_PTR)
1132 #ifdef HAVE_return
1133 emit_jump_insn_after (gen_return (), BB_END (jump_block));
1134 #else
1135 abort ();
1136 #endif
1138 else
1140 rtx label = block_label (target);
1141 emit_jump_insn_after (gen_jump (label), BB_END (jump_block));
1142 JUMP_LABEL (BB_END (jump_block)) = label;
1143 LABEL_NUSES (label)++;
1146 emit_barrier_after (BB_END (jump_block));
1147 redirect_edge_succ_nodup (e, target);
1149 if (abnormal_edge_flags)
1150 make_edge (src, target, abnormal_edge_flags);
1152 return new_bb;
1155 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1156 (and possibly create new basic block) to make edge non-fallthru.
1157 Return newly created BB or NULL if none. */
1159 basic_block
1160 force_nonfallthru (edge e)
1162 return force_nonfallthru_and_redirect (e, e->dest);
1165 /* Redirect edge even at the expense of creating new jump insn or
1166 basic block. Return new basic block if created, NULL otherwise.
1167 Abort if conversion is impossible. */
1169 static basic_block
1170 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1172 if (redirect_edge_and_branch (e, target)
1173 || e->dest == target)
1174 return NULL;
1176 /* In case the edge redirection failed, try to force it to be non-fallthru
1177 and redirect newly created simplejump. */
1178 return force_nonfallthru_and_redirect (e, target);
1181 /* The given edge should potentially be a fallthru edge. If that is in
1182 fact true, delete the jump and barriers that are in the way. */
1184 static void
1185 rtl_tidy_fallthru_edge (edge e)
1187 rtx q;
1188 basic_block b = e->src, c = b->next_bb;
1190 /* ??? In a late-running flow pass, other folks may have deleted basic
1191 blocks by nopping out blocks, leaving multiple BARRIERs between here
1192 and the target label. They ought to be chastized and fixed.
1194 We can also wind up with a sequence of undeletable labels between
1195 one block and the next.
1197 So search through a sequence of barriers, labels, and notes for
1198 the head of block C and assert that we really do fall through. */
1200 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1201 if (INSN_P (q))
1202 return;
1204 /* Remove what will soon cease being the jump insn from the source block.
1205 If block B consisted only of this single jump, turn it into a deleted
1206 note. */
1207 q = BB_END (b);
1208 if (GET_CODE (q) == JUMP_INSN
1209 && onlyjump_p (q)
1210 && (any_uncondjump_p (q)
1211 || (b->succ == e && e->succ_next == NULL)))
1213 #ifdef HAVE_cc0
1214 /* If this was a conditional jump, we need to also delete
1215 the insn that set cc0. */
1216 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1217 q = PREV_INSN (q);
1218 #endif
1220 q = PREV_INSN (q);
1222 /* We don't want a block to end on a line-number note since that has
1223 the potential of changing the code between -g and not -g. */
1224 while (GET_CODE (q) == NOTE && NOTE_LINE_NUMBER (q) >= 0)
1225 q = PREV_INSN (q);
1228 /* Selectively unlink the sequence. */
1229 if (q != PREV_INSN (BB_HEAD (c)))
1230 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)));
1232 e->flags |= EDGE_FALLTHRU;
1235 /* Helper function for split_edge. Return true in case edge BB2 to BB1
1236 is back edge of syntactic loop. */
1238 static bool
1239 back_edge_of_syntactic_loop_p (basic_block bb1, basic_block bb2)
1241 rtx insn;
1242 int count = 0;
1243 basic_block bb;
1245 if (bb1 == bb2)
1246 return true;
1248 /* ??? Could we guarantee that bb indices are monotone, so that we could
1249 just compare them? */
1250 for (bb = bb1; bb && bb != bb2; bb = bb->next_bb)
1251 continue;
1253 if (!bb)
1254 return false;
1256 for (insn = BB_END (bb1); insn != BB_HEAD (bb2) && count >= 0;
1257 insn = NEXT_INSN (insn))
1258 if (GET_CODE (insn) == NOTE)
1260 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)
1261 count++;
1262 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
1263 count--;
1266 return count >= 0;
1269 /* Should move basic block BB after basic block AFTER. NIY. */
1271 static bool
1272 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1273 basic_block after ATTRIBUTE_UNUSED)
1275 return false;
1278 /* Split a (typically critical) edge. Return the new block.
1279 Abort on abnormal edges.
1281 ??? The code generally expects to be called on critical edges.
1282 The case of a block ending in an unconditional jump to a
1283 block with multiple predecessors is not handled optimally. */
1285 static basic_block
1286 rtl_split_edge (edge edge_in)
1288 basic_block bb;
1289 rtx before;
1291 /* Abnormal edges cannot be split. */
1292 if ((edge_in->flags & EDGE_ABNORMAL) != 0)
1293 abort ();
1295 /* We are going to place the new block in front of edge destination.
1296 Avoid existence of fallthru predecessors. */
1297 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1299 edge e;
1301 for (e = edge_in->dest->pred; e; e = e->pred_next)
1302 if (e->flags & EDGE_FALLTHRU)
1303 break;
1305 if (e)
1306 force_nonfallthru (e);
1309 /* Create the basic block note.
1311 Where we place the note can have a noticeable impact on the generated
1312 code. Consider this cfg:
1318 +->1-->2--->E
1320 +--+
1322 If we need to insert an insn on the edge from block 0 to block 1,
1323 we want to ensure the instructions we insert are outside of any
1324 loop notes that physically sit between block 0 and block 1. Otherwise
1325 we confuse the loop optimizer into thinking the loop is a phony. */
1327 if (edge_in->dest != EXIT_BLOCK_PTR
1328 && PREV_INSN (BB_HEAD (edge_in->dest))
1329 && GET_CODE (PREV_INSN (BB_HEAD (edge_in->dest))) == NOTE
1330 && (NOTE_LINE_NUMBER (PREV_INSN (BB_HEAD (edge_in->dest)))
1331 == NOTE_INSN_LOOP_BEG)
1332 && !back_edge_of_syntactic_loop_p (edge_in->dest, edge_in->src))
1333 before = PREV_INSN (BB_HEAD (edge_in->dest));
1334 else if (edge_in->dest != EXIT_BLOCK_PTR)
1335 before = BB_HEAD (edge_in->dest);
1336 else
1337 before = NULL_RTX;
1339 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1341 /* ??? This info is likely going to be out of date very soon. */
1342 if (edge_in->dest->global_live_at_start)
1344 bb->global_live_at_start = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1345 bb->global_live_at_end = OBSTACK_ALLOC_REG_SET (&flow_obstack);
1346 COPY_REG_SET (bb->global_live_at_start,
1347 edge_in->dest->global_live_at_start);
1348 COPY_REG_SET (bb->global_live_at_end,
1349 edge_in->dest->global_live_at_start);
1352 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1354 /* For non-fallthru edges, we must adjust the predecessor's
1355 jump instruction to target our new block. */
1356 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1358 if (!redirect_edge_and_branch (edge_in, bb))
1359 abort ();
1361 else
1362 redirect_edge_succ (edge_in, bb);
1364 return bb;
1367 /* Queue instructions for insertion on an edge between two basic blocks.
1368 The new instructions and basic blocks (if any) will not appear in the
1369 CFG until commit_edge_insertions is called. */
1371 void
1372 insert_insn_on_edge (rtx pattern, edge e)
1374 /* We cannot insert instructions on an abnormal critical edge.
1375 It will be easier to find the culprit if we die now. */
1376 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
1377 abort ();
1379 if (e->insns == NULL_RTX)
1380 start_sequence ();
1381 else
1382 push_to_sequence (e->insns);
1384 emit_insn (pattern);
1386 e->insns = get_insns ();
1387 end_sequence ();
1390 /* Called from safe_insert_insn_on_edge through note_stores, marks live
1391 registers that are killed by the store. */
1392 static void
1393 mark_killed_regs (rtx reg, rtx set ATTRIBUTE_UNUSED, void *data)
1395 regset killed = data;
1396 int regno, i;
1398 if (GET_CODE (reg) == SUBREG)
1399 reg = SUBREG_REG (reg);
1400 if (!REG_P (reg))
1401 return;
1402 regno = REGNO (reg);
1403 if (regno >= FIRST_PSEUDO_REGISTER)
1404 SET_REGNO_REG_SET (killed, regno);
1405 else
1407 for (i = 0; i < (int) hard_regno_nregs[regno][GET_MODE (reg)]; i++)
1408 SET_REGNO_REG_SET (killed, regno + i);
1412 /* Similar to insert_insn_on_edge, tries to put INSN to edge E. Additionally
1413 it checks whether this will not clobber the registers that are live on the
1414 edge (i.e. it requires liveness information to be up-to-date) and if there
1415 are some, then it tries to save and restore them. Returns true if
1416 successful. */
1417 bool
1418 safe_insert_insn_on_edge (rtx insn, edge e)
1420 rtx x;
1421 regset_head killed_head;
1422 regset killed = INITIALIZE_REG_SET (killed_head);
1423 rtx save_regs = NULL_RTX;
1424 int regno, noccmode;
1425 enum machine_mode mode;
1427 #ifdef AVOID_CCMODE_COPIES
1428 noccmode = true;
1429 #else
1430 noccmode = false;
1431 #endif
1433 for (x = insn; x; x = NEXT_INSN (x))
1434 if (INSN_P (x))
1435 note_stores (PATTERN (x), mark_killed_regs, killed);
1436 bitmap_operation (killed, killed, e->dest->global_live_at_start,
1437 BITMAP_AND);
1439 EXECUTE_IF_SET_IN_REG_SET (killed, 0, regno,
1441 mode = regno < FIRST_PSEUDO_REGISTER
1442 ? reg_raw_mode[regno]
1443 : GET_MODE (regno_reg_rtx[regno]);
1444 if (mode == VOIDmode)
1445 return false;
1447 if (noccmode && mode == CCmode)
1448 return false;
1450 save_regs = alloc_EXPR_LIST (0,
1451 alloc_EXPR_LIST (0,
1452 gen_reg_rtx (mode),
1453 gen_raw_REG (mode, regno)),
1454 save_regs);
1457 if (save_regs)
1459 rtx from, to;
1461 start_sequence ();
1462 for (x = save_regs; x; x = XEXP (x, 1))
1464 from = XEXP (XEXP (x, 0), 1);
1465 to = XEXP (XEXP (x, 0), 0);
1466 emit_move_insn (to, from);
1468 emit_insn (insn);
1469 for (x = save_regs; x; x = XEXP (x, 1))
1471 from = XEXP (XEXP (x, 0), 0);
1472 to = XEXP (XEXP (x, 0), 1);
1473 emit_move_insn (to, from);
1475 insn = get_insns ();
1476 end_sequence ();
1477 free_EXPR_LIST_list (&save_regs);
1479 insert_insn_on_edge (insn, e);
1481 FREE_REG_SET (killed);
1482 return true;
1485 /* Update the CFG for the instructions queued on edge E. */
1487 static void
1488 commit_one_edge_insertion (edge e, int watch_calls)
1490 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1491 basic_block bb = NULL;
1493 /* Pull the insns off the edge now since the edge might go away. */
1494 insns = e->insns;
1495 e->insns = NULL_RTX;
1497 /* Special case -- avoid inserting code between call and storing
1498 its return value. */
1499 if (watch_calls && (e->flags & EDGE_FALLTHRU) && !e->dest->pred->pred_next
1500 && e->src != ENTRY_BLOCK_PTR
1501 && GET_CODE (BB_END (e->src)) == CALL_INSN)
1503 rtx next = next_nonnote_insn (BB_END (e->src));
1505 after = BB_HEAD (e->dest);
1506 /* The first insn after the call may be a stack pop, skip it. */
1507 while (next
1508 && keep_with_call_p (next))
1510 after = next;
1511 next = next_nonnote_insn (next);
1513 bb = e->dest;
1515 if (!before && !after)
1517 /* Figure out where to put these things. If the destination has
1518 one predecessor, insert there. Except for the exit block. */
1519 if (e->dest->pred->pred_next == NULL && e->dest != EXIT_BLOCK_PTR)
1521 bb = e->dest;
1523 /* Get the location correct wrt a code label, and "nice" wrt
1524 a basic block note, and before everything else. */
1525 tmp = BB_HEAD (bb);
1526 if (GET_CODE (tmp) == CODE_LABEL)
1527 tmp = NEXT_INSN (tmp);
1528 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1529 tmp = NEXT_INSN (tmp);
1530 if (tmp
1531 && GET_CODE (tmp) == NOTE
1532 && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_UNLIKELY_EXECUTED_CODE)
1533 tmp = NEXT_INSN (tmp);
1534 if (tmp == BB_HEAD (bb))
1535 before = tmp;
1536 else if (tmp)
1537 after = PREV_INSN (tmp);
1538 else
1539 after = get_last_insn ();
1542 /* If the source has one successor and the edge is not abnormal,
1543 insert there. Except for the entry block. */
1544 else if ((e->flags & EDGE_ABNORMAL) == 0
1545 && e->src->succ->succ_next == NULL
1546 && e->src != ENTRY_BLOCK_PTR)
1548 bb = e->src;
1550 /* It is possible to have a non-simple jump here. Consider a target
1551 where some forms of unconditional jumps clobber a register. This
1552 happens on the fr30 for example.
1554 We know this block has a single successor, so we can just emit
1555 the queued insns before the jump. */
1556 if (GET_CODE (BB_END (bb)) == JUMP_INSN)
1557 for (before = BB_END (bb);
1558 GET_CODE (PREV_INSN (before)) == NOTE
1559 && NOTE_LINE_NUMBER (PREV_INSN (before)) ==
1560 NOTE_INSN_LOOP_BEG; before = PREV_INSN (before))
1562 else
1564 /* We'd better be fallthru, or we've lost track of what's what. */
1565 if ((e->flags & EDGE_FALLTHRU) == 0)
1566 abort ();
1568 after = BB_END (bb);
1571 /* Otherwise we must split the edge. */
1572 else
1574 bb = split_edge (e);
1575 after = BB_END (bb);
1577 /* If we are partitioning hot/cold basic blocks, we must make sure
1578 that the new basic block ends up in the correct section. */
1580 bb->partition = e->src->partition;
1581 if (flag_reorder_blocks_and_partition
1582 && e->src != ENTRY_BLOCK_PTR
1583 && e->src->partition == COLD_PARTITION)
1585 rtx bb_note, new_note, cur_insn;
1587 bb_note = NULL_RTX;
1588 for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
1589 cur_insn = NEXT_INSN (cur_insn))
1590 if (GET_CODE (cur_insn) == NOTE
1591 && NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_BASIC_BLOCK)
1593 bb_note = cur_insn;
1594 break;
1597 new_note = emit_note_after (NOTE_INSN_UNLIKELY_EXECUTED_CODE,
1598 bb_note);
1599 NOTE_BASIC_BLOCK (new_note) = bb;
1600 if (GET_CODE (BB_END (bb)) == JUMP_INSN
1601 && !any_condjump_p (BB_END (bb))
1602 && bb->succ->crossing_edge )
1603 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
1604 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
1605 if (after == bb_note)
1606 after = new_note;
1611 /* Now that we've found the spot, do the insertion. */
1613 if (before)
1615 emit_insn_before (insns, before);
1616 last = prev_nonnote_insn (before);
1618 else
1619 last = emit_insn_after (insns, after);
1621 if (returnjump_p (last))
1623 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1624 This is not currently a problem because this only happens
1625 for the (single) epilogue, which already has a fallthru edge
1626 to EXIT. */
1628 e = bb->succ;
1629 if (e->dest != EXIT_BLOCK_PTR
1630 || e->succ_next != NULL || (e->flags & EDGE_FALLTHRU) == 0)
1631 abort ();
1633 e->flags &= ~EDGE_FALLTHRU;
1634 emit_barrier_after (last);
1636 if (before)
1637 delete_insn (before);
1639 else if (GET_CODE (last) == JUMP_INSN)
1640 abort ();
1642 /* Mark the basic block for find_sub_basic_blocks. */
1643 bb->aux = &bb->aux;
1646 /* Update the CFG for all queued instructions. */
1648 void
1649 commit_edge_insertions (void)
1651 basic_block bb;
1652 sbitmap blocks;
1653 bool changed = false;
1655 #ifdef ENABLE_CHECKING
1656 verify_flow_info ();
1657 #endif
1659 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1661 edge e, next;
1663 for (e = bb->succ; e; e = next)
1665 next = e->succ_next;
1666 if (e->insns)
1668 changed = true;
1669 commit_one_edge_insertion (e, false);
1674 if (!changed)
1675 return;
1677 blocks = sbitmap_alloc (last_basic_block);
1678 sbitmap_zero (blocks);
1679 FOR_EACH_BB (bb)
1680 if (bb->aux)
1682 SET_BIT (blocks, bb->index);
1683 /* Check for forgotten bb->aux values before commit_edge_insertions
1684 call. */
1685 if (bb->aux != &bb->aux)
1686 abort ();
1687 bb->aux = NULL;
1689 find_many_sub_basic_blocks (blocks);
1690 sbitmap_free (blocks);
1693 /* Update the CFG for all queued instructions, taking special care of inserting
1694 code on edges between call and storing its return value. */
1696 void
1697 commit_edge_insertions_watch_calls (void)
1699 basic_block bb;
1700 sbitmap blocks;
1701 bool changed = false;
1703 #ifdef ENABLE_CHECKING
1704 verify_flow_info ();
1705 #endif
1707 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1709 edge e, next;
1711 for (e = bb->succ; e; e = next)
1713 next = e->succ_next;
1714 if (e->insns)
1716 changed = true;
1717 commit_one_edge_insertion (e, true);
1722 if (!changed)
1723 return;
1725 blocks = sbitmap_alloc (last_basic_block);
1726 sbitmap_zero (blocks);
1727 FOR_EACH_BB (bb)
1728 if (bb->aux)
1730 SET_BIT (blocks, bb->index);
1731 /* Check for forgotten bb->aux values before commit_edge_insertions
1732 call. */
1733 if (bb->aux != &bb->aux)
1734 abort ();
1735 bb->aux = NULL;
1737 find_many_sub_basic_blocks (blocks);
1738 sbitmap_free (blocks);
1741 /* Print out RTL-specific basic block information (live information
1742 at start and end). */
1744 static void
1745 rtl_dump_bb (basic_block bb, FILE *outf, int indent)
1747 rtx insn;
1748 rtx last;
1749 char *s_indent;
1751 s_indent = alloca ((size_t) indent + 1);
1752 memset (s_indent, ' ', (size_t) indent);
1753 s_indent[indent] = '\0';
1755 fprintf (outf, ";;%s Registers live at start: ", s_indent);
1756 dump_regset (bb->global_live_at_start, outf);
1757 putc ('\n', outf);
1759 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1760 insn = NEXT_INSN (insn))
1761 print_rtl_single (outf, insn);
1763 fprintf (outf, ";;%s Registers live at end: ", s_indent);
1764 dump_regset (bb->global_live_at_end, outf);
1765 putc ('\n', outf);
1768 /* Like print_rtl, but also print out live information for the start of each
1769 basic block. */
1771 void
1772 print_rtl_with_bb (FILE *outf, rtx rtx_first)
1774 rtx tmp_rtx;
1776 if (rtx_first == 0)
1777 fprintf (outf, "(nil)\n");
1778 else
1780 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1781 int max_uid = get_max_uid ();
1782 basic_block *start = xcalloc (max_uid, sizeof (basic_block));
1783 basic_block *end = xcalloc (max_uid, sizeof (basic_block));
1784 enum bb_state *in_bb_p = xcalloc (max_uid, sizeof (enum bb_state));
1786 basic_block bb;
1788 FOR_EACH_BB_REVERSE (bb)
1790 rtx x;
1792 start[INSN_UID (BB_HEAD (bb))] = bb;
1793 end[INSN_UID (BB_END (bb))] = bb;
1794 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1796 enum bb_state state = IN_MULTIPLE_BB;
1798 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1799 state = IN_ONE_BB;
1800 in_bb_p[INSN_UID (x)] = state;
1802 if (x == BB_END (bb))
1803 break;
1807 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1809 int did_output;
1811 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1813 fprintf (outf, ";; Start of basic block %d, registers live:",
1814 bb->index);
1815 dump_regset (bb->global_live_at_start, outf);
1816 putc ('\n', outf);
1819 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1820 && GET_CODE (tmp_rtx) != NOTE
1821 && GET_CODE (tmp_rtx) != BARRIER)
1822 fprintf (outf, ";; Insn is not within a basic block\n");
1823 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1824 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1826 did_output = print_rtl_single (outf, tmp_rtx);
1828 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1830 fprintf (outf, ";; End of basic block %d, registers live:\n",
1831 bb->index);
1832 dump_regset (bb->global_live_at_end, outf);
1833 putc ('\n', outf);
1836 if (did_output)
1837 putc ('\n', outf);
1840 free (start);
1841 free (end);
1842 free (in_bb_p);
1845 if (current_function_epilogue_delay_list != 0)
1847 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1848 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1849 tmp_rtx = XEXP (tmp_rtx, 1))
1850 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1854 void
1855 update_br_prob_note (basic_block bb)
1857 rtx note;
1858 if (GET_CODE (BB_END (bb)) != JUMP_INSN)
1859 return;
1860 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1861 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1862 return;
1863 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1866 /* Verify the CFG and RTL consistency common for both underlying RTL and
1867 cfglayout RTL.
1869 Currently it does following checks:
1871 - test head/end pointers
1872 - overlapping of basic blocks
1873 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1874 - tails of basic blocks (ensure that boundary is necessary)
1875 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1876 and NOTE_INSN_BASIC_BLOCK
1877 - verify that no fall_thru edge crosses hot/cold partition boundaries
1879 In future it can be extended check a lot of other stuff as well
1880 (reachability of basic blocks, life information, etc. etc.). */
1882 static int
1883 rtl_verify_flow_info_1 (void)
1885 const int max_uid = get_max_uid ();
1886 rtx last_head = get_last_insn ();
1887 basic_block *bb_info;
1888 rtx x;
1889 int err = 0;
1890 basic_block bb, last_bb_seen;
1892 bb_info = xcalloc (max_uid, sizeof (basic_block));
1894 /* Check bb chain & numbers. */
1895 last_bb_seen = ENTRY_BLOCK_PTR;
1897 FOR_EACH_BB_REVERSE (bb)
1899 rtx head = BB_HEAD (bb);
1900 rtx end = BB_END (bb);
1902 /* Verify the end of the basic block is in the INSN chain. */
1903 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1904 if (x == end)
1905 break;
1907 if (!x)
1909 error ("end insn %d for block %d not found in the insn stream",
1910 INSN_UID (end), bb->index);
1911 err = 1;
1914 /* Work backwards from the end to the head of the basic block
1915 to verify the head is in the RTL chain. */
1916 for (; x != NULL_RTX; x = PREV_INSN (x))
1918 /* While walking over the insn chain, verify insns appear
1919 in only one basic block and initialize the BB_INFO array
1920 used by other passes. */
1921 if (bb_info[INSN_UID (x)] != NULL)
1923 error ("insn %d is in multiple basic blocks (%d and %d)",
1924 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1925 err = 1;
1928 bb_info[INSN_UID (x)] = bb;
1930 if (x == head)
1931 break;
1933 if (!x)
1935 error ("head insn %d for block %d not found in the insn stream",
1936 INSN_UID (head), bb->index);
1937 err = 1;
1940 last_head = x;
1943 /* Now check the basic blocks (boundaries etc.) */
1944 FOR_EACH_BB_REVERSE (bb)
1946 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1947 edge e, fallthru = NULL;
1948 rtx note;
1950 if (INSN_P (BB_END (bb))
1951 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1952 && bb->succ && bb->succ->succ_next
1953 && any_condjump_p (BB_END (bb)))
1955 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability)
1957 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1958 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1959 err = 1;
1962 for (e = bb->succ; e; e = e->succ_next)
1964 if (e->flags & EDGE_FALLTHRU)
1966 n_fallthru++, fallthru = e;
1967 if (e->crossing_edge)
1969 error ("Fallthru edge crosses section boundary (bb %i)",
1970 e->src->index);
1971 err = 1;
1975 if ((e->flags & ~(EDGE_DFS_BACK
1976 | EDGE_CAN_FALLTHRU
1977 | EDGE_IRREDUCIBLE_LOOP
1978 | EDGE_LOOP_EXIT)) == 0)
1979 n_branch++;
1981 if (e->flags & EDGE_ABNORMAL_CALL)
1982 n_call++;
1984 if (e->flags & EDGE_EH)
1985 n_eh++;
1986 else if (e->flags & EDGE_ABNORMAL)
1987 n_abnormal++;
1990 if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
1991 && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1993 error ("Missing REG_EH_REGION note in the end of bb %i", bb->index);
1994 err = 1;
1996 if (n_branch
1997 && (GET_CODE (BB_END (bb)) != JUMP_INSN
1998 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1999 || any_condjump_p (BB_END (bb))))))
2001 error ("Too many outgoing branch edges from bb %i", bb->index);
2002 err = 1;
2004 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2006 error ("Fallthru edge after unconditional jump %i", bb->index);
2007 err = 1;
2009 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2011 error ("Wrong amount of branch edges after unconditional jump %i", bb->index);
2012 err = 1;
2014 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2015 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2017 error ("Wrong amount of branch edges after conditional jump %i", bb->index);
2018 err = 1;
2020 if (n_call && GET_CODE (BB_END (bb)) != CALL_INSN)
2022 error ("Call edges for non-call insn in bb %i", bb->index);
2023 err = 1;
2025 if (n_abnormal
2026 && (GET_CODE (BB_END (bb)) != CALL_INSN && n_call != n_abnormal)
2027 && (GET_CODE (BB_END (bb)) != JUMP_INSN
2028 || any_condjump_p (BB_END (bb))
2029 || any_uncondjump_p (BB_END (bb))))
2031 error ("Abnormal edges for no purpose in bb %i", bb->index);
2032 err = 1;
2035 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
2036 if (BLOCK_FOR_INSN (x) != bb)
2038 debug_rtx (x);
2039 if (! BLOCK_FOR_INSN (x))
2040 error
2041 ("insn %d inside basic block %d but block_for_insn is NULL",
2042 INSN_UID (x), bb->index);
2043 else
2044 error
2045 ("insn %d inside basic block %d but block_for_insn is %i",
2046 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
2048 err = 1;
2051 /* OK pointers are correct. Now check the header of basic
2052 block. It ought to contain optional CODE_LABEL followed
2053 by NOTE_BASIC_BLOCK. */
2054 x = BB_HEAD (bb);
2055 if (GET_CODE (x) == CODE_LABEL)
2057 if (BB_END (bb) == x)
2059 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2060 bb->index);
2061 err = 1;
2064 x = NEXT_INSN (x);
2067 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2069 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2070 bb->index);
2071 err = 1;
2074 if (BB_END (bb) == x)
2075 /* Do checks for empty blocks her. e */
2077 else
2078 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2080 if (NOTE_INSN_BASIC_BLOCK_P (x))
2082 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2083 INSN_UID (x), bb->index);
2084 err = 1;
2087 if (x == BB_END (bb))
2088 break;
2090 if (control_flow_insn_p (x))
2092 error ("in basic block %d:", bb->index);
2093 fatal_insn ("flow control insn inside a basic block", x);
2098 /* Clean up. */
2099 free (bb_info);
2100 return err;
2103 /* Verify the CFG and RTL consistency common for both underlying RTL and
2104 cfglayout RTL.
2106 Currently it does following checks:
2107 - all checks of rtl_verify_flow_info_1
2108 - check that all insns are in the basic blocks
2109 (except the switch handling code, barriers and notes)
2110 - check that all returns are followed by barriers
2111 - check that all fallthru edge points to the adjacent blocks. */
2112 static int
2113 rtl_verify_flow_info (void)
2115 basic_block bb;
2116 int err = rtl_verify_flow_info_1 ();
2117 rtx x;
2118 int num_bb_notes;
2119 const rtx rtx_first = get_insns ();
2120 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2122 FOR_EACH_BB_REVERSE (bb)
2124 edge e;
2125 for (e = bb->succ; e; e = e->succ_next)
2126 if (e->flags & EDGE_FALLTHRU)
2127 break;
2128 if (!e)
2130 rtx insn;
2132 /* Ensure existence of barrier in BB with no fallthru edges. */
2133 for (insn = BB_END (bb); !insn || GET_CODE (insn) != BARRIER;
2134 insn = NEXT_INSN (insn))
2135 if (!insn
2136 || (GET_CODE (insn) == NOTE
2137 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
2139 error ("missing barrier after block %i", bb->index);
2140 err = 1;
2141 break;
2144 else if (e->src != ENTRY_BLOCK_PTR
2145 && e->dest != EXIT_BLOCK_PTR)
2147 rtx insn;
2149 if (e->src->next_bb != e->dest)
2151 error
2152 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2153 e->src->index, e->dest->index);
2154 err = 1;
2156 else
2157 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2158 insn = NEXT_INSN (insn))
2159 if (GET_CODE (insn) == BARRIER
2160 #ifndef CASE_DROPS_THROUGH
2161 || INSN_P (insn)
2162 #else
2163 || (INSN_P (insn) && ! JUMP_TABLE_DATA_P (insn))
2164 #endif
2167 error ("verify_flow_info: Incorrect fallthru %i->%i",
2168 e->src->index, e->dest->index);
2169 fatal_insn ("wrong insn in the fallthru edge", insn);
2170 err = 1;
2175 num_bb_notes = 0;
2176 last_bb_seen = ENTRY_BLOCK_PTR;
2178 for (x = rtx_first; x; x = NEXT_INSN (x))
2180 if (NOTE_INSN_BASIC_BLOCK_P (x))
2182 bb = NOTE_BASIC_BLOCK (x);
2184 num_bb_notes++;
2185 if (bb != last_bb_seen->next_bb)
2186 internal_error ("basic blocks not laid down consecutively");
2188 curr_bb = last_bb_seen = bb;
2191 if (!curr_bb)
2193 switch (GET_CODE (x))
2195 case BARRIER:
2196 case NOTE:
2197 break;
2199 case CODE_LABEL:
2200 /* An addr_vec is placed outside any basic block. */
2201 if (NEXT_INSN (x)
2202 && GET_CODE (NEXT_INSN (x)) == JUMP_INSN
2203 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2204 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2205 x = NEXT_INSN (x);
2207 /* But in any case, non-deletable labels can appear anywhere. */
2208 break;
2210 default:
2211 fatal_insn ("insn outside basic block", x);
2215 if (INSN_P (x)
2216 && GET_CODE (x) == JUMP_INSN
2217 && returnjump_p (x) && ! condjump_p (x)
2218 && ! (NEXT_INSN (x) && GET_CODE (NEXT_INSN (x)) == BARRIER))
2219 fatal_insn ("return not followed by barrier", x);
2220 if (curr_bb && x == BB_END (curr_bb))
2221 curr_bb = NULL;
2224 if (num_bb_notes != n_basic_blocks)
2225 internal_error
2226 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2227 num_bb_notes, n_basic_blocks);
2229 return err;
2232 /* Assume that the preceding pass has possibly eliminated jump instructions
2233 or converted the unconditional jumps. Eliminate the edges from CFG.
2234 Return true if any edges are eliminated. */
2236 bool
2237 purge_dead_edges (basic_block bb)
2239 edge e, next;
2240 rtx insn = BB_END (bb), note;
2241 bool purged = false;
2243 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2244 if (GET_CODE (insn) == INSN
2245 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2247 rtx eqnote;
2249 if (! may_trap_p (PATTERN (insn))
2250 || ((eqnote = find_reg_equal_equiv_note (insn))
2251 && ! may_trap_p (XEXP (eqnote, 0))))
2252 remove_note (insn, note);
2255 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2256 for (e = bb->succ; e; e = next)
2258 next = e->succ_next;
2259 if (e->flags & EDGE_EH)
2261 if (can_throw_internal (BB_END (bb)))
2262 continue;
2264 else if (e->flags & EDGE_ABNORMAL_CALL)
2266 if (GET_CODE (BB_END (bb)) == CALL_INSN
2267 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2268 || INTVAL (XEXP (note, 0)) >= 0))
2269 continue;
2271 else
2272 continue;
2274 remove_edge (e);
2275 bb->flags |= BB_DIRTY;
2276 purged = true;
2279 if (GET_CODE (insn) == JUMP_INSN)
2281 rtx note;
2282 edge b,f;
2284 /* We do care only about conditional jumps and simplejumps. */
2285 if (!any_condjump_p (insn)
2286 && !returnjump_p (insn)
2287 && !simplejump_p (insn))
2288 return purged;
2290 /* Branch probability/prediction notes are defined only for
2291 condjumps. We've possibly turned condjump into simplejump. */
2292 if (simplejump_p (insn))
2294 note = find_reg_note (insn, REG_BR_PROB, NULL);
2295 if (note)
2296 remove_note (insn, note);
2297 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2298 remove_note (insn, note);
2301 for (e = bb->succ; e; e = next)
2303 next = e->succ_next;
2305 /* Avoid abnormal flags to leak from computed jumps turned
2306 into simplejumps. */
2308 e->flags &= ~EDGE_ABNORMAL;
2310 /* See if this edge is one we should keep. */
2311 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2312 /* A conditional jump can fall through into the next
2313 block, so we should keep the edge. */
2314 continue;
2315 else if (e->dest != EXIT_BLOCK_PTR
2316 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2317 /* If the destination block is the target of the jump,
2318 keep the edge. */
2319 continue;
2320 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2321 /* If the destination block is the exit block, and this
2322 instruction is a return, then keep the edge. */
2323 continue;
2324 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2325 /* Keep the edges that correspond to exceptions thrown by
2326 this instruction and rematerialize the EDGE_ABNORMAL
2327 flag we just cleared above. */
2329 e->flags |= EDGE_ABNORMAL;
2330 continue;
2333 /* We do not need this edge. */
2334 bb->flags |= BB_DIRTY;
2335 purged = true;
2336 remove_edge (e);
2339 if (!bb->succ || !purged)
2340 return purged;
2342 if (dump_file)
2343 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2345 if (!optimize)
2346 return purged;
2348 /* Redistribute probabilities. */
2349 if (!bb->succ->succ_next)
2351 bb->succ->probability = REG_BR_PROB_BASE;
2352 bb->succ->count = bb->count;
2354 else
2356 note = find_reg_note (insn, REG_BR_PROB, NULL);
2357 if (!note)
2358 return purged;
2360 b = BRANCH_EDGE (bb);
2361 f = FALLTHRU_EDGE (bb);
2362 b->probability = INTVAL (XEXP (note, 0));
2363 f->probability = REG_BR_PROB_BASE - b->probability;
2364 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2365 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2368 return purged;
2370 else if (GET_CODE (insn) == CALL_INSN && SIBLING_CALL_P (insn))
2372 /* First, there should not be any EH or ABCALL edges resulting
2373 from non-local gotos and the like. If there were, we shouldn't
2374 have created the sibcall in the first place. Second, there
2375 should of course never have been a fallthru edge. */
2376 if (!bb->succ || bb->succ->succ_next)
2377 abort ();
2378 if (bb->succ->flags != (EDGE_SIBCALL | EDGE_ABNORMAL))
2379 abort ();
2381 return 0;
2384 /* If we don't see a jump insn, we don't know exactly why the block would
2385 have been broken at this point. Look for a simple, non-fallthru edge,
2386 as these are only created by conditional branches. If we find such an
2387 edge we know that there used to be a jump here and can then safely
2388 remove all non-fallthru edges. */
2389 for (e = bb->succ; e && (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU));
2390 e = e->succ_next)
2393 if (!e)
2394 return purged;
2396 for (e = bb->succ; e; e = next)
2398 next = e->succ_next;
2399 if (!(e->flags & EDGE_FALLTHRU))
2401 bb->flags |= BB_DIRTY;
2402 remove_edge (e);
2403 purged = true;
2407 if (!bb->succ || bb->succ->succ_next)
2408 abort ();
2410 bb->succ->probability = REG_BR_PROB_BASE;
2411 bb->succ->count = bb->count;
2413 if (dump_file)
2414 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2415 bb->index);
2416 return purged;
2419 /* Search all basic blocks for potentially dead edges and purge them. Return
2420 true if some edge has been eliminated. */
2422 bool
2423 purge_all_dead_edges (int update_life_p)
2425 int purged = false;
2426 sbitmap blocks = 0;
2427 basic_block bb;
2429 if (update_life_p)
2431 blocks = sbitmap_alloc (last_basic_block);
2432 sbitmap_zero (blocks);
2435 FOR_EACH_BB (bb)
2437 bool purged_here = purge_dead_edges (bb);
2439 purged |= purged_here;
2440 if (purged_here && update_life_p)
2441 SET_BIT (blocks, bb->index);
2444 if (update_life_p && purged)
2445 update_life_info (blocks, UPDATE_LIFE_GLOBAL,
2446 PROP_DEATH_NOTES | PROP_SCAN_DEAD_CODE
2447 | PROP_KILL_DEAD_CODE);
2449 if (update_life_p)
2450 sbitmap_free (blocks);
2451 return purged;
2454 /* Same as split_block but update cfg_layout structures. */
2456 static basic_block
2457 cfg_layout_split_block (basic_block bb, void *insnp)
2459 rtx insn = insnp;
2460 basic_block new_bb = rtl_split_block (bb, insn);
2462 new_bb->rbi->footer = bb->rbi->footer;
2463 bb->rbi->footer = NULL;
2465 return new_bb;
2469 /* Redirect Edge to DEST. */
2470 static bool
2471 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2473 basic_block src = e->src;
2474 bool ret;
2476 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2477 return false;
2479 if (e->dest == dest)
2480 return true;
2482 if (e->src != ENTRY_BLOCK_PTR
2483 && try_redirect_by_replacing_jump (e, dest, true))
2485 src->flags |= BB_DIRTY;
2486 return true;
2489 if (e->src == ENTRY_BLOCK_PTR
2490 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2492 if (dump_file)
2493 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2494 e->src->index, dest->index);
2496 e->src->flags |= BB_DIRTY;
2497 redirect_edge_succ (e, dest);
2498 return true;
2501 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2502 in the case the basic block appears to be in sequence. Avoid this
2503 transformation. */
2505 if (e->flags & EDGE_FALLTHRU)
2507 /* Redirect any branch edges unified with the fallthru one. */
2508 if (GET_CODE (BB_END (src)) == JUMP_INSN
2509 && label_is_jump_target_p (BB_HEAD (e->dest),
2510 BB_END (src)))
2512 if (dump_file)
2513 fprintf (dump_file, "Fallthru edge unified with branch "
2514 "%i->%i redirected to %i\n",
2515 e->src->index, e->dest->index, dest->index);
2516 e->flags &= ~EDGE_FALLTHRU;
2517 if (!redirect_branch_edge (e, dest))
2518 abort ();
2519 e->flags |= EDGE_FALLTHRU;
2520 e->src->flags |= BB_DIRTY;
2521 return true;
2523 /* In case we are redirecting fallthru edge to the branch edge
2524 of conditional jump, remove it. */
2525 if (src->succ->succ_next
2526 && !src->succ->succ_next->succ_next)
2528 edge s = e->succ_next ? e->succ_next : src->succ;
2529 if (s->dest == dest
2530 && any_condjump_p (BB_END (src))
2531 && onlyjump_p (BB_END (src)))
2532 delete_insn (BB_END (src));
2535 if (dump_file)
2536 fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
2537 e->src->index, e->dest->index, dest->index);
2538 redirect_edge_succ_nodup (e, dest);
2540 ret = true;
2542 else
2543 ret = redirect_branch_edge (e, dest);
2545 /* We don't want simplejumps in the insn stream during cfglayout. */
2546 if (simplejump_p (BB_END (src)))
2547 abort ();
2549 src->flags |= BB_DIRTY;
2550 return ret;
2553 /* Simple wrapper as we always can redirect fallthru edges. */
2554 static basic_block
2555 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2557 if (!cfg_layout_redirect_edge_and_branch (e, dest))
2558 abort ();
2559 return NULL;
2562 /* Same as delete_basic_block but update cfg_layout structures. */
2564 static void
2565 cfg_layout_delete_block (basic_block bb)
2567 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2569 if (bb->rbi->header)
2571 next = BB_HEAD (bb);
2572 if (prev)
2573 NEXT_INSN (prev) = bb->rbi->header;
2574 else
2575 set_first_insn (bb->rbi->header);
2576 PREV_INSN (bb->rbi->header) = prev;
2577 insn = bb->rbi->header;
2578 while (NEXT_INSN (insn))
2579 insn = NEXT_INSN (insn);
2580 NEXT_INSN (insn) = next;
2581 PREV_INSN (next) = insn;
2583 next = NEXT_INSN (BB_END (bb));
2584 if (bb->rbi->footer)
2586 insn = bb->rbi->footer;
2587 while (insn)
2589 if (GET_CODE (insn) == BARRIER)
2591 if (PREV_INSN (insn))
2592 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2593 else
2594 bb->rbi->footer = NEXT_INSN (insn);
2595 if (NEXT_INSN (insn))
2596 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2598 if (GET_CODE (insn) == CODE_LABEL)
2599 break;
2600 insn = NEXT_INSN (insn);
2602 if (bb->rbi->footer)
2604 insn = BB_END (bb);
2605 NEXT_INSN (insn) = bb->rbi->footer;
2606 PREV_INSN (bb->rbi->footer) = insn;
2607 while (NEXT_INSN (insn))
2608 insn = NEXT_INSN (insn);
2609 NEXT_INSN (insn) = next;
2610 if (next)
2611 PREV_INSN (next) = insn;
2612 else
2613 set_last_insn (insn);
2616 if (bb->next_bb != EXIT_BLOCK_PTR)
2617 to = &bb->next_bb->rbi->header;
2618 else
2619 to = &cfg_layout_function_footer;
2620 rtl_delete_block (bb);
2622 if (prev)
2623 prev = NEXT_INSN (prev);
2624 else
2625 prev = get_insns ();
2626 if (next)
2627 next = PREV_INSN (next);
2628 else
2629 next = get_last_insn ();
2631 if (next && NEXT_INSN (next) != prev)
2633 remaints = unlink_insn_chain (prev, next);
2634 insn = remaints;
2635 while (NEXT_INSN (insn))
2636 insn = NEXT_INSN (insn);
2637 NEXT_INSN (insn) = *to;
2638 if (*to)
2639 PREV_INSN (*to) = insn;
2640 *to = remaints;
2644 /* Return true when blocks A and B can be safely merged. */
2645 static bool
2646 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2648 bool partitions_ok = true;
2650 /* If we are partitioning hot/cold basic blocks, we don't want to
2651 mess up unconditional or indirect jumps that cross between hot
2652 and cold sections. */
2654 if (flag_reorder_blocks_and_partition
2655 && (find_reg_note (BB_END (a), REG_CROSSING_JUMP, NULL_RTX)
2656 || find_reg_note (BB_END (b), REG_CROSSING_JUMP, NULL_RTX)
2657 || a->partition != b->partition))
2658 partitions_ok = false;
2660 /* There must be exactly one edge in between the blocks. */
2661 return (a->succ && !a->succ->succ_next && a->succ->dest == b
2662 && !b->pred->pred_next && a != b
2663 /* Must be simple edge. */
2664 && !(a->succ->flags & EDGE_COMPLEX)
2665 && partitions_ok
2666 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2667 /* If the jump insn has side effects,
2668 we can't kill the edge. */
2669 && (GET_CODE (BB_END (a)) != JUMP_INSN
2670 || (reload_completed
2671 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2674 /* Merge block A and B, abort when it is not possible. */
2675 static void
2676 cfg_layout_merge_blocks (basic_block a, basic_block b)
2678 #ifdef ENABLE_CHECKING
2679 if (!cfg_layout_can_merge_blocks_p (a, b))
2680 abort ();
2681 #endif
2683 /* If there was a CODE_LABEL beginning B, delete it. */
2684 if (GET_CODE (BB_HEAD (b)) == CODE_LABEL)
2685 delete_insn (BB_HEAD (b));
2687 /* We should have fallthru edge in a, or we can do dummy redirection to get
2688 it cleaned up. */
2689 if (GET_CODE (BB_END (a)) == JUMP_INSN)
2690 try_redirect_by_replacing_jump (a->succ, b, true);
2691 if (GET_CODE (BB_END (a)) == JUMP_INSN)
2692 abort ();
2694 /* Possible line number notes should appear in between. */
2695 if (b->rbi->header)
2697 rtx first = BB_END (a), last;
2699 last = emit_insn_after (b->rbi->header, BB_END (a));
2700 delete_insn_chain (NEXT_INSN (first), last);
2701 b->rbi->header = NULL;
2704 /* In the case basic blocks are not adjacent, move them around. */
2705 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2707 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2709 emit_insn_after (first, BB_END (a));
2710 /* Skip possible DELETED_LABEL insn. */
2711 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2712 first = NEXT_INSN (first);
2713 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2714 abort ();
2715 BB_HEAD (b) = NULL;
2716 delete_insn (first);
2718 /* Otherwise just re-associate the instructions. */
2719 else
2721 rtx insn;
2723 for (insn = BB_HEAD (b);
2724 insn != NEXT_INSN (BB_END (b));
2725 insn = NEXT_INSN (insn))
2726 set_block_for_insn (insn, a);
2727 insn = BB_HEAD (b);
2728 /* Skip possible DELETED_LABEL insn. */
2729 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2730 insn = NEXT_INSN (insn);
2731 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2732 abort ();
2733 BB_HEAD (b) = NULL;
2734 BB_END (a) = BB_END (b);
2735 delete_insn (insn);
2738 /* Possible tablejumps and barriers should appear after the block. */
2739 if (b->rbi->footer)
2741 if (!a->rbi->footer)
2742 a->rbi->footer = b->rbi->footer;
2743 else
2745 rtx last = a->rbi->footer;
2747 while (NEXT_INSN (last))
2748 last = NEXT_INSN (last);
2749 NEXT_INSN (last) = b->rbi->footer;
2750 PREV_INSN (b->rbi->footer) = last;
2752 b->rbi->footer = NULL;
2755 if (dump_file)
2756 fprintf (dump_file, "Merged blocks %d and %d.\n",
2757 a->index, b->index);
2760 /* Split edge E. */
2762 static basic_block
2763 cfg_layout_split_edge (edge e)
2765 edge new_e;
2766 basic_block new_bb =
2767 create_basic_block (e->src != ENTRY_BLOCK_PTR
2768 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2769 NULL_RTX, e->src);
2771 new_e = make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2772 redirect_edge_and_branch_force (e, new_bb);
2774 return new_bb;
2777 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
2779 static void
2780 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2784 /* Implementation of CFG manipulation for linearized RTL. */
2785 struct cfg_hooks rtl_cfg_hooks = {
2786 "rtl",
2787 rtl_verify_flow_info,
2788 rtl_dump_bb,
2789 rtl_create_basic_block,
2790 rtl_redirect_edge_and_branch,
2791 rtl_redirect_edge_and_branch_force,
2792 rtl_delete_block,
2793 rtl_split_block,
2794 rtl_move_block_after,
2795 rtl_can_merge_blocks, /* can_merge_blocks_p */
2796 rtl_merge_blocks,
2797 rtl_split_edge,
2798 rtl_make_forwarder_block,
2799 rtl_tidy_fallthru_edge
2802 /* Implementation of CFG manipulation for cfg layout RTL, where
2803 basic block connected via fallthru edges does not have to be adjacent.
2804 This representation will hopefully become the default one in future
2805 version of the compiler. */
2806 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
2807 "cfglayout mode",
2808 rtl_verify_flow_info_1,
2809 rtl_dump_bb,
2810 cfg_layout_create_basic_block,
2811 cfg_layout_redirect_edge_and_branch,
2812 cfg_layout_redirect_edge_and_branch_force,
2813 cfg_layout_delete_block,
2814 cfg_layout_split_block,
2815 rtl_move_block_after,
2816 cfg_layout_can_merge_blocks_p,
2817 cfg_layout_merge_blocks,
2818 cfg_layout_split_edge,
2819 rtl_make_forwarder_block,
2820 NULL