Add files that I missed when importing NaCl changes earlier
[gcc/nacl-gcc.git] / gcc / cfgrtl.c
blob47b0913cdffdb0853d626bd6df31cc9c0b47867e
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This file contains low level functions to manipulate the CFG and analyze it
22 that are aware of the RTL intermediate language.
24 Available functionality:
25 - Basic CFG/RTL manipulation API documented in cfghooks.h
26 - CFG-aware instruction chain manipulation
27 delete_insn, delete_insn_chain
28 - Edge splitting and committing to edges
29 insert_insn_on_edge, commit_edge_insertions
30 - CFG updating after insn simplification
31 purge_dead_edges, purge_all_dead_edges
33 Functions not supposed for generic use:
34 - Infrastructure to determine quickly basic block for insn
35 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
36 - Edge redirection with updating and optimizing of insn chain
37 block_label, tidy_fallthru_edge, force_nonfallthru */
39 #include "config.h"
40 #include "system.h"
41 #include "coretypes.h"
42 #include "tm.h"
43 #include "tree.h"
44 #include "rtl.h"
45 #include "hard-reg-set.h"
46 #include "basic-block.h"
47 #include "regs.h"
48 #include "flags.h"
49 #include "output.h"
50 #include "function.h"
51 #include "except.h"
52 #include "toplev.h"
53 #include "tm_p.h"
54 #include "obstack.h"
55 #include "insn-config.h"
56 #include "cfglayout.h"
57 #include "expr.h"
58 #include "target.h"
59 #include "cfgloop.h"
60 #include "ggc.h"
61 #include "tree-pass.h"
63 static int can_delete_note_p (rtx);
64 static int can_delete_label_p (rtx);
65 static void commit_one_edge_insertion (edge, int);
66 static basic_block rtl_split_edge (edge);
67 static bool rtl_move_block_after (basic_block, basic_block);
68 static int rtl_verify_flow_info (void);
69 static basic_block cfg_layout_split_block (basic_block, void *);
70 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
71 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
72 static void cfg_layout_delete_block (basic_block);
73 static void rtl_delete_block (basic_block);
74 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
75 static edge rtl_redirect_edge_and_branch (edge, basic_block);
76 static basic_block rtl_split_block (basic_block, void *);
77 static void rtl_dump_bb (basic_block, FILE *, int);
78 static int rtl_verify_flow_info_1 (void);
79 static void rtl_make_forwarder_block (edge);
81 /* Return true if NOTE is not one of the ones that must be kept paired,
82 so that we may simply delete it. */
84 static int
85 can_delete_note_p (rtx note)
87 return (NOTE_LINE_NUMBER (note) == NOTE_INSN_DELETED
88 || NOTE_LINE_NUMBER (note) == NOTE_INSN_BASIC_BLOCK);
91 /* True if a given label can be deleted. */
93 static int
94 can_delete_label_p (rtx label)
96 return (!LABEL_PRESERVE_P (label)
97 /* User declared labels must be preserved. */
98 && LABEL_NAME (label) == 0
99 && !in_expr_list_p (forced_labels, label));
102 /* Delete INSN by patching it out. Return the next insn. */
105 delete_insn (rtx insn)
107 rtx next = NEXT_INSN (insn);
108 rtx note;
109 bool really_delete = true;
111 if (LABEL_P (insn))
113 /* Some labels can't be directly removed from the INSN chain, as they
114 might be references via variables, constant pool etc.
115 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
116 if (! can_delete_label_p (insn))
118 const char *name = LABEL_NAME (insn);
120 really_delete = false;
121 PUT_CODE (insn, NOTE);
122 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
123 NOTE_DELETED_LABEL_NAME (insn) = name;
126 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
129 if (really_delete)
131 /* If this insn has already been deleted, something is very wrong. */
132 gcc_assert (!INSN_DELETED_P (insn));
133 remove_insn (insn);
134 INSN_DELETED_P (insn) = 1;
137 /* If deleting a jump, decrement the use count of the label. Deleting
138 the label itself should happen in the normal course of block merging. */
139 if (JUMP_P (insn)
140 && JUMP_LABEL (insn)
141 && LABEL_P (JUMP_LABEL (insn)))
142 LABEL_NUSES (JUMP_LABEL (insn))--;
144 /* Also if deleting an insn that references a label. */
145 else
147 while ((note = find_reg_note (insn, REG_LABEL, NULL_RTX)) != NULL_RTX
148 && LABEL_P (XEXP (note, 0)))
150 LABEL_NUSES (XEXP (note, 0))--;
151 remove_note (insn, note);
155 if (JUMP_P (insn)
156 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
157 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
159 rtx pat = PATTERN (insn);
160 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
161 int len = XVECLEN (pat, diff_vec_p);
162 int i;
164 for (i = 0; i < len; i++)
166 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
168 /* When deleting code in bulk (e.g. removing many unreachable
169 blocks) we can delete a label that's a target of the vector
170 before deleting the vector itself. */
171 if (!NOTE_P (label))
172 LABEL_NUSES (label)--;
176 return next;
179 /* Like delete_insn but also purge dead edges from BB. */
181 delete_insn_and_edges (rtx insn)
183 rtx x;
184 bool purge = false;
186 if (INSN_P (insn)
187 && BLOCK_FOR_INSN (insn)
188 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
189 purge = true;
190 x = delete_insn (insn);
191 if (purge)
192 purge_dead_edges (BLOCK_FOR_INSN (insn));
193 return x;
196 /* Unlink a chain of insns between START and FINISH, leaving notes
197 that must be paired. */
199 void
200 delete_insn_chain (rtx start, rtx finish)
202 rtx next;
204 /* Unchain the insns one by one. It would be quicker to delete all of these
205 with a single unchaining, rather than one at a time, but we need to keep
206 the NOTE's. */
207 while (1)
209 next = NEXT_INSN (start);
210 if (NOTE_P (start) && !can_delete_note_p (start))
212 else
213 next = delete_insn (start);
215 if (start == finish)
216 break;
217 start = next;
221 /* Like delete_insn but also purge dead edges from BB. */
222 void
223 delete_insn_chain_and_edges (rtx first, rtx last)
225 bool purge = false;
227 if (INSN_P (last)
228 && BLOCK_FOR_INSN (last)
229 && BB_END (BLOCK_FOR_INSN (last)) == last)
230 purge = true;
231 delete_insn_chain (first, last);
232 if (purge)
233 purge_dead_edges (BLOCK_FOR_INSN (last));
236 /* Create a new basic block consisting of the instructions between HEAD and END
237 inclusive. This function is designed to allow fast BB construction - reuses
238 the note and basic block struct in BB_NOTE, if any and do not grow
239 BASIC_BLOCK chain and should be used directly only by CFG construction code.
240 END can be NULL in to create new empty basic block before HEAD. Both END
241 and HEAD can be NULL to create basic block at the end of INSN chain.
242 AFTER is the basic block we should be put after. */
244 basic_block
245 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
247 basic_block bb;
249 if (bb_note
250 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
251 && bb->aux == NULL)
253 /* If we found an existing note, thread it back onto the chain. */
255 rtx after;
257 if (LABEL_P (head))
258 after = head;
259 else
261 after = PREV_INSN (head);
262 head = bb_note;
265 if (after != bb_note && NEXT_INSN (after) != bb_note)
266 reorder_insns_nobb (bb_note, bb_note, after);
268 else
270 /* Otherwise we must create a note and a basic block structure. */
272 bb = alloc_block ();
274 init_rtl_bb_info (bb);
275 if (!head && !end)
276 head = end = bb_note
277 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
278 else if (LABEL_P (head) && end)
280 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
281 if (head == end)
282 end = bb_note;
284 else
286 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
287 head = bb_note;
288 if (!end)
289 end = head;
292 NOTE_BASIC_BLOCK (bb_note) = bb;
295 /* Always include the bb note in the block. */
296 if (NEXT_INSN (end) == bb_note)
297 end = bb_note;
299 BB_HEAD (bb) = head;
300 BB_END (bb) = end;
301 bb->index = last_basic_block++;
302 bb->flags = BB_NEW | BB_RTL;
303 link_block (bb, after);
304 SET_BASIC_BLOCK (bb->index, bb);
305 update_bb_for_insn (bb);
306 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
308 /* Tag the block so that we know it has been used when considering
309 other basic block notes. */
310 bb->aux = bb;
312 return bb;
315 /* Create new basic block consisting of instructions in between HEAD and END
316 and place it to the BB chain after block AFTER. END can be NULL in to
317 create new empty basic block before HEAD. Both END and HEAD can be NULL to
318 create basic block at the end of INSN chain. */
320 static basic_block
321 rtl_create_basic_block (void *headp, void *endp, basic_block after)
323 rtx head = headp, end = endp;
324 basic_block bb;
326 /* Grow the basic block array if needed. */
327 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
329 size_t old_size = VEC_length (basic_block, basic_block_info);
330 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
331 basic_block *p;
332 VEC_safe_grow (basic_block, gc, basic_block_info, new_size);
333 p = VEC_address (basic_block, basic_block_info);
334 memset (&p[old_size], 0, sizeof (basic_block) * (new_size - old_size));
337 n_basic_blocks++;
339 bb = create_basic_block_structure (head, end, NULL, after);
340 bb->aux = NULL;
341 return bb;
344 static basic_block
345 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
347 basic_block newbb = rtl_create_basic_block (head, end, after);
349 return newbb;
352 /* Delete the insns in a (non-live) block. We physically delete every
353 non-deleted-note insn, and update the flow graph appropriately.
355 Return nonzero if we deleted an exception handler. */
357 /* ??? Preserving all such notes strikes me as wrong. It would be nice
358 to post-process the stream to remove empty blocks, loops, ranges, etc. */
360 static void
361 rtl_delete_block (basic_block b)
363 rtx insn, end;
365 /* If the head of this block is a CODE_LABEL, then it might be the
366 label for an exception handler which can't be reached. We need
367 to remove the label from the exception_handler_label list. */
368 insn = BB_HEAD (b);
369 if (LABEL_P (insn))
370 maybe_remove_eh_handler (insn);
372 end = get_last_bb_insn (b);
374 /* Selectively delete the entire chain. */
375 BB_HEAD (b) = NULL;
376 delete_insn_chain (insn, end);
377 if (b->il.rtl->global_live_at_start)
379 FREE_REG_SET (b->il.rtl->global_live_at_start);
380 FREE_REG_SET (b->il.rtl->global_live_at_end);
381 b->il.rtl->global_live_at_start = NULL;
382 b->il.rtl->global_live_at_end = NULL;
386 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
388 void
389 compute_bb_for_insn (void)
391 basic_block bb;
393 FOR_EACH_BB (bb)
395 rtx end = BB_END (bb);
396 rtx insn;
398 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
400 BLOCK_FOR_INSN (insn) = bb;
401 if (insn == end)
402 break;
407 /* Release the basic_block_for_insn array. */
409 unsigned int
410 free_bb_for_insn (void)
412 rtx insn;
413 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
414 if (!BARRIER_P (insn))
415 BLOCK_FOR_INSN (insn) = NULL;
416 return 0;
419 struct tree_opt_pass pass_free_cfg =
421 NULL, /* name */
422 NULL, /* gate */
423 free_bb_for_insn, /* execute */
424 NULL, /* sub */
425 NULL, /* next */
426 0, /* static_pass_number */
427 0, /* tv_id */
428 0, /* properties_required */
429 0, /* properties_provided */
430 PROP_cfg, /* properties_destroyed */
431 0, /* todo_flags_start */
432 0, /* todo_flags_finish */
433 0 /* letter */
436 /* Return RTX to emit after when we want to emit code on the entry of function. */
438 entry_of_function (void)
440 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
441 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
444 /* Emit INSN at the entry point of the function, ensuring that it is only
445 executed once per function. */
446 void
447 emit_insn_at_entry (rtx insn)
449 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
450 edge e = ei_safe_edge (ei);
451 gcc_assert (e->flags & EDGE_FALLTHRU);
453 insert_insn_on_edge (insn, e);
454 commit_edge_insertions ();
457 /* Update insns block within BB. */
459 void
460 update_bb_for_insn (basic_block bb)
462 rtx insn;
464 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
466 if (!BARRIER_P (insn))
467 set_block_for_insn (insn, bb);
468 if (insn == BB_END (bb))
469 break;
473 /* Creates a new basic block just after basic block B by splitting
474 everything after specified instruction I. */
476 static basic_block
477 rtl_split_block (basic_block bb, void *insnp)
479 basic_block new_bb;
480 rtx insn = insnp;
481 edge e;
482 edge_iterator ei;
484 if (!insn)
486 insn = first_insn_after_basic_block_note (bb);
488 if (insn)
489 insn = PREV_INSN (insn);
490 else
491 insn = get_last_insn ();
494 /* We probably should check type of the insn so that we do not create
495 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
496 bother. */
497 if (insn == BB_END (bb))
498 emit_note_after (NOTE_INSN_DELETED, insn);
500 /* Create the new basic block. */
501 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
502 BB_COPY_PARTITION (new_bb, bb);
503 BB_END (bb) = insn;
505 /* Redirect the outgoing edges. */
506 new_bb->succs = bb->succs;
507 bb->succs = NULL;
508 FOR_EACH_EDGE (e, ei, new_bb->succs)
509 e->src = new_bb;
511 if (bb->il.rtl->global_live_at_start)
513 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
514 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
515 COPY_REG_SET (new_bb->il.rtl->global_live_at_end, bb->il.rtl->global_live_at_end);
517 /* We now have to calculate which registers are live at the end
518 of the split basic block and at the start of the new basic
519 block. Start with those registers that are known to be live
520 at the end of the original basic block and get
521 propagate_block to determine which registers are live. */
522 COPY_REG_SET (new_bb->il.rtl->global_live_at_start, bb->il.rtl->global_live_at_end);
523 propagate_block (new_bb, new_bb->il.rtl->global_live_at_start, NULL, NULL, 0);
524 COPY_REG_SET (bb->il.rtl->global_live_at_end,
525 new_bb->il.rtl->global_live_at_start);
526 #ifdef HAVE_conditional_execution
527 /* In the presence of conditional execution we are not able to update
528 liveness precisely. */
529 if (reload_completed)
531 bb->flags |= BB_DIRTY;
532 new_bb->flags |= BB_DIRTY;
534 #endif
537 return new_bb;
540 /* Blocks A and B are to be merged into a single block A. The insns
541 are already contiguous. */
543 static void
544 rtl_merge_blocks (basic_block a, basic_block b)
546 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
547 rtx del_first = NULL_RTX, del_last = NULL_RTX;
548 int b_empty = 0;
550 /* If there was a CODE_LABEL beginning B, delete it. */
551 if (LABEL_P (b_head))
553 /* This might have been an EH label that no longer has incoming
554 EH edges. Update data structures to match. */
555 maybe_remove_eh_handler (b_head);
557 /* Detect basic blocks with nothing but a label. This can happen
558 in particular at the end of a function. */
559 if (b_head == b_end)
560 b_empty = 1;
562 del_first = del_last = b_head;
563 b_head = NEXT_INSN (b_head);
566 /* Delete the basic block note and handle blocks containing just that
567 note. */
568 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
570 if (b_head == b_end)
571 b_empty = 1;
572 if (! del_last)
573 del_first = b_head;
575 del_last = b_head;
576 b_head = NEXT_INSN (b_head);
579 /* If there was a jump out of A, delete it. */
580 if (JUMP_P (a_end))
582 rtx prev;
584 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
585 if (!NOTE_P (prev)
586 || NOTE_LINE_NUMBER (prev) == NOTE_INSN_BASIC_BLOCK
587 || prev == BB_HEAD (a))
588 break;
590 del_first = a_end;
592 #ifdef HAVE_cc0
593 /* If this was a conditional jump, we need to also delete
594 the insn that set cc0. */
595 if (only_sets_cc0_p (prev))
597 rtx tmp = prev;
599 prev = prev_nonnote_insn (prev);
600 if (!prev)
601 prev = BB_HEAD (a);
602 del_first = tmp;
604 #endif
606 a_end = PREV_INSN (del_first);
608 else if (BARRIER_P (NEXT_INSN (a_end)))
609 del_first = NEXT_INSN (a_end);
611 /* Delete everything marked above as well as crap that might be
612 hanging out between the two blocks. */
613 BB_HEAD (b) = NULL;
614 delete_insn_chain (del_first, del_last);
616 /* Reassociate the insns of B with A. */
617 if (!b_empty)
619 rtx x;
621 for (x = a_end; x != b_end; x = NEXT_INSN (x))
622 set_block_for_insn (x, a);
624 set_block_for_insn (b_end, a);
626 a_end = b_end;
629 BB_END (a) = a_end;
630 a->il.rtl->global_live_at_end = b->il.rtl->global_live_at_end;
633 /* Return true when block A and B can be merged. */
634 static bool
635 rtl_can_merge_blocks (basic_block a,basic_block b)
637 /* If we are partitioning hot/cold basic blocks, we don't want to
638 mess up unconditional or indirect jumps that cross between hot
639 and cold sections.
641 Basic block partitioning may result in some jumps that appear to
642 be optimizable (or blocks that appear to be mergeable), but which really
643 must be left untouched (they are required to make it safely across
644 partition boundaries). See the comments at the top of
645 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
647 if (BB_PARTITION (a) != BB_PARTITION (b))
648 return false;
650 /* There must be exactly one edge in between the blocks. */
651 return (single_succ_p (a)
652 && single_succ (a) == b
653 && single_pred_p (b)
654 && a != b
655 /* Must be simple edge. */
656 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
657 && a->next_bb == b
658 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
659 /* If the jump insn has side effects,
660 we can't kill the edge. */
661 && (!JUMP_P (BB_END (a))
662 || (reload_completed
663 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
666 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
667 exist. */
670 block_label (basic_block block)
672 if (block == EXIT_BLOCK_PTR)
673 return NULL_RTX;
675 if (!LABEL_P (BB_HEAD (block)))
677 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
680 return BB_HEAD (block);
683 /* Attempt to perform edge redirection by replacing possibly complex jump
684 instruction by unconditional jump or removing jump completely. This can
685 apply only if all edges now point to the same block. The parameters and
686 return values are equivalent to redirect_edge_and_branch. */
688 edge
689 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
691 basic_block src = e->src;
692 rtx insn = BB_END (src), kill_from;
693 rtx set;
694 int fallthru = 0;
696 /* If we are partitioning hot/cold basic blocks, we don't want to
697 mess up unconditional or indirect jumps that cross between hot
698 and cold sections.
700 Basic block partitioning may result in some jumps that appear to
701 be optimizable (or blocks that appear to be mergeable), but which really
702 must be left untouched (they are required to make it safely across
703 partition boundaries). See the comments at the top of
704 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
706 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
707 || BB_PARTITION (src) != BB_PARTITION (target))
708 return NULL;
710 /* We can replace or remove a complex jump only when we have exactly
711 two edges. Also, if we have exactly one outgoing edge, we can
712 redirect that. */
713 if (EDGE_COUNT (src->succs) >= 3
714 /* Verify that all targets will be TARGET. Specifically, the
715 edge that is not E must also go to TARGET. */
716 || (EDGE_COUNT (src->succs) == 2
717 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
718 return NULL;
720 if (!onlyjump_p (insn))
721 return NULL;
722 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
723 return NULL;
725 /* Avoid removing branch with side effects. */
726 set = single_set (insn);
727 if (!set || side_effects_p (set))
728 return NULL;
730 /* In case we zap a conditional jump, we'll need to kill
731 the cc0 setter too. */
732 kill_from = insn;
733 #ifdef HAVE_cc0
734 if (reg_mentioned_p (cc0_rtx, PATTERN (insn)))
735 kill_from = PREV_INSN (insn);
736 #endif
738 /* See if we can create the fallthru edge. */
739 if (in_cfglayout || can_fallthru (src, target))
741 if (dump_file)
742 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
743 fallthru = 1;
745 /* Selectively unlink whole insn chain. */
746 if (in_cfglayout)
748 rtx insn = src->il.rtl->footer;
750 delete_insn_chain (kill_from, BB_END (src));
752 /* Remove barriers but keep jumptables. */
753 while (insn)
755 if (BARRIER_P (insn))
757 if (PREV_INSN (insn))
758 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
759 else
760 src->il.rtl->footer = NEXT_INSN (insn);
761 if (NEXT_INSN (insn))
762 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
764 if (LABEL_P (insn))
765 break;
766 insn = NEXT_INSN (insn);
769 else
770 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)));
773 /* If this already is simplejump, redirect it. */
774 else if (simplejump_p (insn))
776 if (e->dest == target)
777 return NULL;
778 if (dump_file)
779 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
780 INSN_UID (insn), e->dest->index, target->index);
781 if (!redirect_jump (insn, block_label (target), 0))
783 gcc_assert (target == EXIT_BLOCK_PTR);
784 return NULL;
788 /* Cannot do anything for target exit block. */
789 else if (target == EXIT_BLOCK_PTR)
790 return NULL;
792 /* Or replace possibly complicated jump insn by simple jump insn. */
793 else
795 rtx target_label = block_label (target);
796 rtx barrier, label, table;
798 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
799 JUMP_LABEL (BB_END (src)) = target_label;
800 LABEL_NUSES (target_label)++;
801 if (dump_file)
802 fprintf (dump_file, "Replacing insn %i by jump %i\n",
803 INSN_UID (insn), INSN_UID (BB_END (src)));
806 delete_insn_chain (kill_from, insn);
808 /* Recognize a tablejump that we are converting to a
809 simple jump and remove its associated CODE_LABEL
810 and ADDR_VEC or ADDR_DIFF_VEC. */
811 if (tablejump_p (insn, &label, &table))
812 delete_insn_chain (label, table);
814 barrier = next_nonnote_insn (BB_END (src));
815 if (!barrier || !BARRIER_P (barrier))
816 emit_barrier_after (BB_END (src));
817 else
819 if (barrier != NEXT_INSN (BB_END (src)))
821 /* Move the jump before barrier so that the notes
822 which originally were or were created before jump table are
823 inside the basic block. */
824 rtx new_insn = BB_END (src);
825 rtx tmp;
827 for (tmp = NEXT_INSN (BB_END (src)); tmp != barrier;
828 tmp = NEXT_INSN (tmp))
829 set_block_for_insn (tmp, src);
831 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
832 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
834 NEXT_INSN (new_insn) = barrier;
835 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
837 PREV_INSN (new_insn) = PREV_INSN (barrier);
838 PREV_INSN (barrier) = new_insn;
843 /* Keep only one edge out and set proper flags. */
844 if (!single_succ_p (src))
845 remove_edge (e);
846 gcc_assert (single_succ_p (src));
848 e = single_succ_edge (src);
849 if (fallthru)
850 e->flags = EDGE_FALLTHRU;
851 else
852 e->flags = 0;
854 e->probability = REG_BR_PROB_BASE;
855 e->count = src->count;
857 /* We don't want a block to end on a line-number note since that has
858 the potential of changing the code between -g and not -g. */
859 while (NOTE_P (BB_END (e->src))
860 && NOTE_LINE_NUMBER (BB_END (e->src)) >= 0)
861 delete_insn (BB_END (e->src));
863 if (e->dest != target)
864 redirect_edge_succ (e, target);
866 return e;
869 /* Redirect edge representing branch of (un)conditional jump or tablejump,
870 NULL on failure */
871 static edge
872 redirect_branch_edge (edge e, basic_block target)
874 rtx tmp;
875 rtx old_label = BB_HEAD (e->dest);
876 basic_block src = e->src;
877 rtx insn = BB_END (src);
879 /* We can only redirect non-fallthru edges of jump insn. */
880 if (e->flags & EDGE_FALLTHRU)
881 return NULL;
882 else if (!JUMP_P (insn))
883 return NULL;
885 /* Recognize a tablejump and adjust all matching cases. */
886 if (tablejump_p (insn, NULL, &tmp))
888 rtvec vec;
889 int j;
890 rtx new_label = block_label (target);
892 if (target == EXIT_BLOCK_PTR)
893 return NULL;
894 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
895 vec = XVEC (PATTERN (tmp), 0);
896 else
897 vec = XVEC (PATTERN (tmp), 1);
899 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
900 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
902 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
903 --LABEL_NUSES (old_label);
904 ++LABEL_NUSES (new_label);
907 /* Handle casesi dispatch insns. */
908 if ((tmp = single_set (insn)) != NULL
909 && SET_DEST (tmp) == pc_rtx
910 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
911 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
912 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
914 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
915 new_label);
916 --LABEL_NUSES (old_label);
917 ++LABEL_NUSES (new_label);
920 else
922 /* ?? We may play the games with moving the named labels from
923 one basic block to the other in case only one computed_jump is
924 available. */
925 if (computed_jump_p (insn)
926 /* A return instruction can't be redirected. */
927 || returnjump_p (insn))
928 return NULL;
930 /* If the insn doesn't go where we think, we're confused. */
931 gcc_assert (JUMP_LABEL (insn) == old_label);
933 /* If the substitution doesn't succeed, die. This can happen
934 if the back end emitted unrecognizable instructions or if
935 target is exit block on some arches. */
936 if (!redirect_jump (insn, block_label (target), 0))
938 gcc_assert (target == EXIT_BLOCK_PTR);
939 return NULL;
943 if (dump_file)
944 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
945 e->src->index, e->dest->index, target->index);
947 if (e->dest != target)
948 e = redirect_edge_succ_nodup (e, target);
949 return e;
952 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
953 expense of adding new instructions or reordering basic blocks.
955 Function can be also called with edge destination equivalent to the TARGET.
956 Then it should try the simplifications and do nothing if none is possible.
958 Return edge representing the branch if transformation succeeded. Return NULL
959 on failure.
960 We still return NULL in case E already destinated TARGET and we didn't
961 managed to simplify instruction stream. */
963 static edge
964 rtl_redirect_edge_and_branch (edge e, basic_block target)
966 edge ret;
967 basic_block src = e->src;
969 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
970 return NULL;
972 if (e->dest == target)
973 return e;
975 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
977 src->flags |= BB_DIRTY;
978 return ret;
981 ret = redirect_branch_edge (e, target);
982 if (!ret)
983 return NULL;
985 src->flags |= BB_DIRTY;
986 return ret;
989 /* Like force_nonfallthru below, but additionally performs redirection
990 Used by redirect_edge_and_branch_force. */
992 static basic_block
993 force_nonfallthru_and_redirect (edge e, basic_block target)
995 basic_block jump_block, new_bb = NULL, src = e->src;
996 rtx note;
997 edge new_edge;
998 int abnormal_edge_flags = 0;
1000 /* In the case the last instruction is conditional jump to the next
1001 instruction, first redirect the jump itself and then continue
1002 by creating a basic block afterwards to redirect fallthru edge. */
1003 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1004 && any_condjump_p (BB_END (e->src))
1005 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1007 rtx note;
1008 edge b = unchecked_make_edge (e->src, target, 0);
1009 bool redirected;
1011 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1012 gcc_assert (redirected);
1014 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1015 if (note)
1017 int prob = INTVAL (XEXP (note, 0));
1019 b->probability = prob;
1020 b->count = e->count * prob / REG_BR_PROB_BASE;
1021 e->probability -= e->probability;
1022 e->count -= b->count;
1023 if (e->probability < 0)
1024 e->probability = 0;
1025 if (e->count < 0)
1026 e->count = 0;
1030 if (e->flags & EDGE_ABNORMAL)
1032 /* Irritating special case - fallthru edge to the same block as abnormal
1033 edge.
1034 We can't redirect abnormal edge, but we still can split the fallthru
1035 one and create separate abnormal edge to original destination.
1036 This allows bb-reorder to make such edge non-fallthru. */
1037 gcc_assert (e->dest == target);
1038 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1039 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1041 else
1043 if ((e->flags & EDGE_FALLTHRU) == 0) {
1044 printf("Edge flags were incorrect %x\n", e->flags);
1046 gcc_assert (e->flags & EDGE_FALLTHRU);
1047 if (e->src == ENTRY_BLOCK_PTR)
1049 /* We can't redirect the entry block. Create an empty block
1050 at the start of the function which we use to add the new
1051 jump. */
1052 edge tmp;
1053 edge_iterator ei;
1054 bool found = false;
1056 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1058 /* Change the existing edge's source to be the new block, and add
1059 a new edge from the entry block to the new block. */
1060 e->src = bb;
1061 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1063 if (tmp == e)
1065 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1066 found = true;
1067 break;
1069 else
1070 ei_next (&ei);
1073 gcc_assert (found);
1075 VEC_safe_push (edge, gc, bb->succs, e);
1076 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1080 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
1082 /* Create the new structures. */
1084 /* If the old block ended with a tablejump, skip its table
1085 by searching forward from there. Otherwise start searching
1086 forward from the last instruction of the old block. */
1087 if (!tablejump_p (BB_END (e->src), NULL, &note))
1088 note = BB_END (e->src);
1089 note = NEXT_INSN (note);
1091 jump_block = create_basic_block (note, NULL, e->src);
1092 jump_block->count = e->count;
1093 jump_block->frequency = EDGE_FREQUENCY (e);
1094 jump_block->loop_depth = target->loop_depth;
1096 if (target->il.rtl->global_live_at_start)
1098 jump_block->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1099 jump_block->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1100 COPY_REG_SET (jump_block->il.rtl->global_live_at_start,
1101 target->il.rtl->global_live_at_start);
1102 COPY_REG_SET (jump_block->il.rtl->global_live_at_end,
1103 target->il.rtl->global_live_at_start);
1106 /* Make sure new block ends up in correct hot/cold section. */
1108 BB_COPY_PARTITION (jump_block, e->src);
1109 if (flag_reorder_blocks_and_partition
1110 && targetm.have_named_sections
1111 && JUMP_P (BB_END (jump_block))
1112 && !any_condjump_p (BB_END (jump_block))
1113 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1114 REG_NOTES (BB_END (jump_block)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
1115 NULL_RTX,
1116 REG_NOTES
1117 (BB_END
1118 (jump_block)));
1120 /* Wire edge in. */
1121 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1122 new_edge->probability = e->probability;
1123 new_edge->count = e->count;
1125 /* Redirect old edge. */
1126 redirect_edge_pred (e, jump_block);
1127 e->probability = REG_BR_PROB_BASE;
1129 new_bb = jump_block;
1131 else
1132 jump_block = e->src;
1134 e->flags &= ~EDGE_FALLTHRU;
1135 if (target == EXIT_BLOCK_PTR)
1137 #ifdef HAVE_return
1138 emit_jump_insn_after_noloc (gen_return (), BB_END (jump_block));
1139 #else
1140 gcc_unreachable ();
1141 #endif
1143 else
1145 rtx label = block_label (target);
1146 emit_jump_insn_after_noloc (gen_jump (label), BB_END (jump_block));
1147 JUMP_LABEL (BB_END (jump_block)) = label;
1148 LABEL_NUSES (label)++;
1151 emit_barrier_after (BB_END (jump_block));
1152 redirect_edge_succ_nodup (e, target);
1154 if (abnormal_edge_flags)
1155 make_edge (src, target, abnormal_edge_flags);
1157 return new_bb;
1160 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1161 (and possibly create new basic block) to make edge non-fallthru.
1162 Return newly created BB or NULL if none. */
1164 basic_block
1165 force_nonfallthru (edge e)
1167 return force_nonfallthru_and_redirect (e, e->dest);
1170 /* Redirect edge even at the expense of creating new jump insn or
1171 basic block. Return new basic block if created, NULL otherwise.
1172 Conversion must be possible. */
1174 static basic_block
1175 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1177 if (redirect_edge_and_branch (e, target)
1178 || e->dest == target)
1179 return NULL;
1181 /* In case the edge redirection failed, try to force it to be non-fallthru
1182 and redirect newly created simplejump. */
1183 e->src->flags |= BB_DIRTY;
1184 return force_nonfallthru_and_redirect (e, target);
1187 /* The given edge should potentially be a fallthru edge. If that is in
1188 fact true, delete the jump and barriers that are in the way. */
1190 static void
1191 rtl_tidy_fallthru_edge (edge e)
1193 rtx q;
1194 basic_block b = e->src, c = b->next_bb;
1196 /* ??? In a late-running flow pass, other folks may have deleted basic
1197 blocks by nopping out blocks, leaving multiple BARRIERs between here
1198 and the target label. They ought to be chastised and fixed.
1200 We can also wind up with a sequence of undeletable labels between
1201 one block and the next.
1203 So search through a sequence of barriers, labels, and notes for
1204 the head of block C and assert that we really do fall through. */
1206 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1207 if (INSN_P (q))
1208 return;
1210 /* Remove what will soon cease being the jump insn from the source block.
1211 If block B consisted only of this single jump, turn it into a deleted
1212 note. */
1213 q = BB_END (b);
1214 if (JUMP_P (q)
1215 && onlyjump_p (q)
1216 && (any_uncondjump_p (q)
1217 || single_succ_p (b)))
1219 #ifdef HAVE_cc0
1220 /* If this was a conditional jump, we need to also delete
1221 the insn that set cc0. */
1222 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1223 q = PREV_INSN (q);
1224 #endif
1226 q = PREV_INSN (q);
1228 /* We don't want a block to end on a line-number note since that has
1229 the potential of changing the code between -g and not -g. */
1230 while (NOTE_P (q) && NOTE_LINE_NUMBER (q) >= 0)
1231 q = PREV_INSN (q);
1234 /* Selectively unlink the sequence. */
1235 if (q != PREV_INSN (BB_HEAD (c)))
1236 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)));
1238 e->flags |= EDGE_FALLTHRU;
1241 /* Should move basic block BB after basic block AFTER. NIY. */
1243 static bool
1244 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1245 basic_block after ATTRIBUTE_UNUSED)
1247 return false;
1250 /* Split a (typically critical) edge. Return the new block.
1251 The edge must not be abnormal.
1253 ??? The code generally expects to be called on critical edges.
1254 The case of a block ending in an unconditional jump to a
1255 block with multiple predecessors is not handled optimally. */
1257 static basic_block
1258 rtl_split_edge (edge edge_in)
1260 basic_block bb;
1261 rtx before;
1263 /* Abnormal edges cannot be split. */
1264 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1266 /* We are going to place the new block in front of edge destination.
1267 Avoid existence of fallthru predecessors. */
1268 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1270 edge e;
1271 edge_iterator ei;
1273 FOR_EACH_EDGE (e, ei, edge_in->dest->preds)
1274 if (e->flags & EDGE_FALLTHRU)
1275 break;
1277 if (e)
1278 force_nonfallthru (e);
1281 /* Create the basic block note. */
1282 if (edge_in->dest != EXIT_BLOCK_PTR)
1283 before = BB_HEAD (edge_in->dest);
1284 else
1285 before = NULL_RTX;
1287 /* If this is a fall through edge to the exit block, the blocks might be
1288 not adjacent, and the right place is the after the source. */
1289 if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
1291 before = NEXT_INSN (BB_END (edge_in->src));
1292 bb = create_basic_block (before, NULL, edge_in->src);
1293 BB_COPY_PARTITION (bb, edge_in->src);
1295 else
1297 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1298 /* ??? Why not edge_in->dest->prev_bb here? */
1299 BB_COPY_PARTITION (bb, edge_in->dest);
1302 /* ??? This info is likely going to be out of date very soon. */
1303 if (edge_in->dest->il.rtl->global_live_at_start)
1305 bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
1306 bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
1307 COPY_REG_SET (bb->il.rtl->global_live_at_start,
1308 edge_in->dest->il.rtl->global_live_at_start);
1309 COPY_REG_SET (bb->il.rtl->global_live_at_end,
1310 edge_in->dest->il.rtl->global_live_at_start);
1313 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1315 /* For non-fallthru edges, we must adjust the predecessor's
1316 jump instruction to target our new block. */
1317 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1319 edge redirected = redirect_edge_and_branch (edge_in, bb);
1320 gcc_assert (redirected);
1322 else
1323 redirect_edge_succ (edge_in, bb);
1325 return bb;
1328 /* Queue instructions for insertion on an edge between two basic blocks.
1329 The new instructions and basic blocks (if any) will not appear in the
1330 CFG until commit_edge_insertions is called. */
1332 void
1333 insert_insn_on_edge (rtx pattern, edge e)
1335 /* We cannot insert instructions on an abnormal critical edge.
1336 It will be easier to find the culprit if we die now. */
1337 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1339 if (e->insns.r == NULL_RTX)
1340 start_sequence ();
1341 else
1342 push_to_sequence (e->insns.r);
1344 emit_insn (pattern);
1346 e->insns.r = get_insns ();
1347 end_sequence ();
1350 /* Update the CFG for the instructions queued on edge E. */
1352 static void
1353 commit_one_edge_insertion (edge e, int watch_calls)
1355 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1356 basic_block bb = NULL;
1358 /* Pull the insns off the edge now since the edge might go away. */
1359 insns = e->insns.r;
1360 e->insns.r = NULL_RTX;
1362 /* Special case -- avoid inserting code between call and storing
1363 its return value. */
1364 if (watch_calls && (e->flags & EDGE_FALLTHRU)
1365 && single_pred_p (e->dest)
1366 && e->src != ENTRY_BLOCK_PTR
1367 && CALL_P (BB_END (e->src)))
1369 rtx next = next_nonnote_insn (BB_END (e->src));
1371 after = BB_HEAD (e->dest);
1372 /* The first insn after the call may be a stack pop, skip it. */
1373 while (next
1374 && keep_with_call_p (next))
1376 after = next;
1377 next = next_nonnote_insn (next);
1379 bb = e->dest;
1381 if (!before && !after)
1383 /* Figure out where to put these things. If the destination has
1384 one predecessor, insert there. Except for the exit block. */
1385 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1387 bb = e->dest;
1389 /* Get the location correct wrt a code label, and "nice" wrt
1390 a basic block note, and before everything else. */
1391 tmp = BB_HEAD (bb);
1392 if (LABEL_P (tmp))
1393 tmp = NEXT_INSN (tmp);
1394 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1395 tmp = NEXT_INSN (tmp);
1396 if (tmp == BB_HEAD (bb))
1397 before = tmp;
1398 else if (tmp)
1399 after = PREV_INSN (tmp);
1400 else
1401 after = get_last_insn ();
1404 /* If the source has one successor and the edge is not abnormal,
1405 insert there. Except for the entry block. */
1406 else if ((e->flags & EDGE_ABNORMAL) == 0
1407 && single_succ_p (e->src)
1408 && e->src != ENTRY_BLOCK_PTR)
1410 bb = e->src;
1412 /* It is possible to have a non-simple jump here. Consider a target
1413 where some forms of unconditional jumps clobber a register. This
1414 happens on the fr30 for example.
1416 We know this block has a single successor, so we can just emit
1417 the queued insns before the jump. */
1418 if (JUMP_P (BB_END (bb)))
1419 before = BB_END (bb);
1420 else
1422 /* We'd better be fallthru, or we've lost track of
1423 what's what. */
1424 gcc_assert (e->flags & EDGE_FALLTHRU);
1426 after = BB_END (bb);
1429 /* Otherwise we must split the edge. */
1430 else
1432 bb = split_edge (e);
1433 after = BB_END (bb);
1435 if (flag_reorder_blocks_and_partition
1436 && targetm.have_named_sections
1437 && e->src != ENTRY_BLOCK_PTR
1438 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1439 && !(e->flags & EDGE_CROSSING))
1441 rtx bb_note, cur_insn;
1443 bb_note = NULL_RTX;
1444 for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
1445 cur_insn = NEXT_INSN (cur_insn))
1446 if (NOTE_P (cur_insn)
1447 && NOTE_LINE_NUMBER (cur_insn) == NOTE_INSN_BASIC_BLOCK)
1449 bb_note = cur_insn;
1450 break;
1453 if (JUMP_P (BB_END (bb))
1454 && !any_condjump_p (BB_END (bb))
1455 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1456 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
1457 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
1462 /* Now that we've found the spot, do the insertion. */
1464 if (before)
1466 emit_insn_before_noloc (insns, before);
1467 last = prev_nonnote_insn (before);
1469 else
1470 last = emit_insn_after_noloc (insns, after);
1472 if (returnjump_p (last))
1474 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1475 This is not currently a problem because this only happens
1476 for the (single) epilogue, which already has a fallthru edge
1477 to EXIT. */
1479 e = single_succ_edge (bb);
1480 gcc_assert (e->dest == EXIT_BLOCK_PTR
1481 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1483 e->flags &= ~EDGE_FALLTHRU;
1484 emit_barrier_after (last);
1486 if (before)
1487 delete_insn (before);
1489 else
1490 gcc_assert (!JUMP_P (last));
1492 /* Mark the basic block for find_many_sub_basic_blocks. */
1493 bb->aux = &bb->aux;
1496 /* Update the CFG for all queued instructions. */
1498 void
1499 commit_edge_insertions (void)
1501 basic_block bb;
1502 sbitmap blocks;
1503 bool changed = false;
1505 #ifdef ENABLE_CHECKING
1506 verify_flow_info ();
1507 #endif
1509 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1511 edge e;
1512 edge_iterator ei;
1514 FOR_EACH_EDGE (e, ei, bb->succs)
1515 if (e->insns.r)
1517 changed = true;
1518 commit_one_edge_insertion (e, false);
1522 if (!changed)
1523 return;
1525 blocks = sbitmap_alloc (last_basic_block);
1526 sbitmap_zero (blocks);
1527 FOR_EACH_BB (bb)
1528 if (bb->aux)
1530 SET_BIT (blocks, bb->index);
1531 /* Check for forgotten bb->aux values before commit_edge_insertions
1532 call. */
1533 gcc_assert (bb->aux == &bb->aux);
1534 bb->aux = NULL;
1536 find_many_sub_basic_blocks (blocks);
1537 sbitmap_free (blocks);
1540 /* Update the CFG for all queued instructions, taking special care of inserting
1541 code on edges between call and storing its return value. */
1543 void
1544 commit_edge_insertions_watch_calls (void)
1546 basic_block bb;
1547 sbitmap blocks;
1548 bool changed = false;
1550 #ifdef ENABLE_CHECKING
1551 verify_flow_info ();
1552 #endif
1554 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1556 edge e;
1557 edge_iterator ei;
1559 FOR_EACH_EDGE (e, ei, bb->succs)
1560 if (e->insns.r)
1562 changed = true;
1563 commit_one_edge_insertion (e, true);
1567 if (!changed)
1568 return;
1570 blocks = sbitmap_alloc (last_basic_block);
1571 sbitmap_zero (blocks);
1572 FOR_EACH_BB (bb)
1573 if (bb->aux)
1575 SET_BIT (blocks, bb->index);
1576 /* Check for forgotten bb->aux values before commit_edge_insertions
1577 call. */
1578 gcc_assert (bb->aux == &bb->aux);
1579 bb->aux = NULL;
1581 find_many_sub_basic_blocks (blocks);
1582 sbitmap_free (blocks);
1585 /* Print out RTL-specific basic block information (live information
1586 at start and end). */
1588 static void
1589 rtl_dump_bb (basic_block bb, FILE *outf, int indent)
1591 rtx insn;
1592 rtx last;
1593 char *s_indent;
1595 s_indent = alloca ((size_t) indent + 1);
1596 memset (s_indent, ' ', (size_t) indent);
1597 s_indent[indent] = '\0';
1599 fprintf (outf, ";;%s Registers live at start: ", s_indent);
1600 dump_regset (bb->il.rtl->global_live_at_start, outf);
1601 putc ('\n', outf);
1603 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1604 insn = NEXT_INSN (insn))
1605 print_rtl_single (outf, insn);
1607 fprintf (outf, ";;%s Registers live at end: ", s_indent);
1608 dump_regset (bb->il.rtl->global_live_at_end, outf);
1609 putc ('\n', outf);
1612 /* Like print_rtl, but also print out live information for the start of each
1613 basic block. */
1615 void
1616 print_rtl_with_bb (FILE *outf, rtx rtx_first)
1618 rtx tmp_rtx;
1620 if (rtx_first == 0)
1621 fprintf (outf, "(nil)\n");
1622 else
1624 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1625 int max_uid = get_max_uid ();
1626 basic_block *start = XCNEWVEC (basic_block, max_uid);
1627 basic_block *end = XCNEWVEC (basic_block, max_uid);
1628 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1630 basic_block bb;
1632 FOR_EACH_BB_REVERSE (bb)
1634 rtx x;
1636 start[INSN_UID (BB_HEAD (bb))] = bb;
1637 end[INSN_UID (BB_END (bb))] = bb;
1638 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1640 enum bb_state state = IN_MULTIPLE_BB;
1642 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1643 state = IN_ONE_BB;
1644 in_bb_p[INSN_UID (x)] = state;
1646 if (x == BB_END (bb))
1647 break;
1651 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1653 int did_output;
1655 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1657 fprintf (outf, ";; Start of basic block %d, registers live:",
1658 bb->index);
1659 dump_regset (bb->il.rtl->global_live_at_start, outf);
1660 putc ('\n', outf);
1663 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1664 && !NOTE_P (tmp_rtx)
1665 && !BARRIER_P (tmp_rtx))
1666 fprintf (outf, ";; Insn is not within a basic block\n");
1667 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1668 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1670 did_output = print_rtl_single (outf, tmp_rtx);
1672 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1674 fprintf (outf, ";; End of basic block %d, registers live:\n",
1675 bb->index);
1676 dump_regset (bb->il.rtl->global_live_at_end, outf);
1677 putc ('\n', outf);
1680 if (did_output)
1681 putc ('\n', outf);
1684 free (start);
1685 free (end);
1686 free (in_bb_p);
1689 if (current_function_epilogue_delay_list != 0)
1691 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1692 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1693 tmp_rtx = XEXP (tmp_rtx, 1))
1694 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1698 void
1699 update_br_prob_note (basic_block bb)
1701 rtx note;
1702 if (!JUMP_P (BB_END (bb)))
1703 return;
1704 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1705 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1706 return;
1707 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1710 /* Get the last insn associated with block BB (that includes barriers and
1711 tablejumps after BB). */
1713 get_last_bb_insn (basic_block bb)
1715 rtx tmp;
1716 rtx end = BB_END (bb);
1718 /* Include any jump table following the basic block. */
1719 if (tablejump_p (end, NULL, &tmp))
1720 end = tmp;
1722 /* Include any barriers that may follow the basic block. */
1723 tmp = next_nonnote_insn (end);
1724 while (tmp && BARRIER_P (tmp))
1726 end = tmp;
1727 tmp = next_nonnote_insn (end);
1730 return end;
1733 /* Verify the CFG and RTL consistency common for both underlying RTL and
1734 cfglayout RTL.
1736 Currently it does following checks:
1738 - test head/end pointers
1739 - overlapping of basic blocks
1740 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1741 - tails of basic blocks (ensure that boundary is necessary)
1742 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1743 and NOTE_INSN_BASIC_BLOCK
1744 - verify that no fall_thru edge crosses hot/cold partition boundaries
1746 In future it can be extended check a lot of other stuff as well
1747 (reachability of basic blocks, life information, etc. etc.). */
1749 static int
1750 rtl_verify_flow_info_1 (void)
1752 const int max_uid = get_max_uid ();
1753 rtx last_head = get_last_insn ();
1754 basic_block *bb_info;
1755 rtx x;
1756 int err = 0;
1757 basic_block bb;
1759 bb_info = XCNEWVEC (basic_block, max_uid);
1761 FOR_EACH_BB_REVERSE (bb)
1763 rtx head = BB_HEAD (bb);
1764 rtx end = BB_END (bb);
1766 /* Verify the end of the basic block is in the INSN chain. */
1767 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1768 if (x == end)
1769 break;
1771 if (!(bb->flags & BB_RTL))
1773 error ("BB_RTL flag not set for block %d", bb->index);
1774 err = 1;
1777 if (!x)
1779 error ("end insn %d for block %d not found in the insn stream",
1780 INSN_UID (end), bb->index);
1781 err = 1;
1784 /* Work backwards from the end to the head of the basic block
1785 to verify the head is in the RTL chain. */
1786 for (; x != NULL_RTX; x = PREV_INSN (x))
1788 /* While walking over the insn chain, verify insns appear
1789 in only one basic block and initialize the BB_INFO array
1790 used by other passes. */
1791 if (bb_info[INSN_UID (x)] != NULL)
1793 error ("insn %d is in multiple basic blocks (%d and %d)",
1794 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1795 err = 1;
1798 bb_info[INSN_UID (x)] = bb;
1800 if (x == head)
1801 break;
1803 if (!x)
1805 error ("head insn %d for block %d not found in the insn stream",
1806 INSN_UID (head), bb->index);
1807 err = 1;
1810 last_head = x;
1813 /* Now check the basic blocks (boundaries etc.) */
1814 FOR_EACH_BB_REVERSE (bb)
1816 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1817 edge e, fallthru = NULL;
1818 rtx note;
1819 edge_iterator ei;
1821 if (JUMP_P (BB_END (bb))
1822 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1823 && EDGE_COUNT (bb->succs) >= 2
1824 && any_condjump_p (BB_END (bb)))
1826 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1827 && profile_status != PROFILE_ABSENT)
1829 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1830 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1831 err = 1;
1834 FOR_EACH_EDGE (e, ei, bb->succs)
1836 if (e->flags & EDGE_FALLTHRU)
1838 n_fallthru++, fallthru = e;
1839 if ((e->flags & EDGE_CROSSING)
1840 || (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
1841 && e->src != ENTRY_BLOCK_PTR
1842 && e->dest != EXIT_BLOCK_PTR))
1844 error ("fallthru edge crosses section boundary (bb %i)",
1845 e->src->index);
1846 err = 1;
1850 if ((e->flags & ~(EDGE_DFS_BACK
1851 | EDGE_CAN_FALLTHRU
1852 | EDGE_IRREDUCIBLE_LOOP
1853 | EDGE_LOOP_EXIT
1854 | EDGE_CROSSING)) == 0)
1855 n_branch++;
1857 if (e->flags & EDGE_ABNORMAL_CALL)
1858 n_call++;
1860 if (e->flags & EDGE_EH)
1861 n_eh++;
1862 else if (e->flags & EDGE_ABNORMAL)
1863 n_abnormal++;
1866 if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
1867 && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1869 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
1870 err = 1;
1872 if (n_branch
1873 && (!JUMP_P (BB_END (bb))
1874 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1875 || any_condjump_p (BB_END (bb))))))
1877 error ("too many outgoing branch edges from bb %i", bb->index);
1878 err = 1;
1880 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1882 error ("fallthru edge after unconditional jump %i", bb->index);
1883 err = 1;
1885 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1887 error ("wrong amount of branch edges after unconditional jump %i", bb->index);
1888 err = 1;
1890 if (n_branch != 1 && any_condjump_p (BB_END (bb))
1891 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1893 error ("wrong amount of branch edges after conditional jump %i",
1894 bb->index);
1895 err = 1;
1897 if (n_call && !CALL_P (BB_END (bb)))
1899 error ("call edges for non-call insn in bb %i", bb->index);
1900 err = 1;
1902 if (n_abnormal
1903 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
1904 && (!JUMP_P (BB_END (bb))
1905 || any_condjump_p (BB_END (bb))
1906 || any_uncondjump_p (BB_END (bb))))
1908 error ("abnormal edges for no purpose in bb %i", bb->index);
1909 err = 1;
1912 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
1913 /* We may have a barrier inside a basic block before dead code
1914 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
1915 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
1917 debug_rtx (x);
1918 if (! BLOCK_FOR_INSN (x))
1919 error
1920 ("insn %d inside basic block %d but block_for_insn is NULL",
1921 INSN_UID (x), bb->index);
1922 else
1923 error
1924 ("insn %d inside basic block %d but block_for_insn is %i",
1925 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1927 err = 1;
1930 /* OK pointers are correct. Now check the header of basic
1931 block. It ought to contain optional CODE_LABEL followed
1932 by NOTE_BASIC_BLOCK. */
1933 x = BB_HEAD (bb);
1934 if (LABEL_P (x))
1936 if (BB_END (bb) == x)
1938 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1939 bb->index);
1940 err = 1;
1943 x = NEXT_INSN (x);
1946 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1948 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1949 bb->index);
1950 err = 1;
1953 if (BB_END (bb) == x)
1954 /* Do checks for empty blocks here. */
1956 else
1957 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1959 if (NOTE_INSN_BASIC_BLOCK_P (x))
1961 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1962 INSN_UID (x), bb->index);
1963 err = 1;
1966 if (x == BB_END (bb))
1967 break;
1969 if (control_flow_insn_p (x))
1971 error ("in basic block %d:", bb->index);
1972 fatal_insn ("flow control insn inside a basic block", x);
1977 /* Clean up. */
1978 free (bb_info);
1979 return err;
1982 /* Verify the CFG and RTL consistency common for both underlying RTL and
1983 cfglayout RTL.
1985 Currently it does following checks:
1986 - all checks of rtl_verify_flow_info_1
1987 - check that all insns are in the basic blocks
1988 (except the switch handling code, barriers and notes)
1989 - check that all returns are followed by barriers
1990 - check that all fallthru edge points to the adjacent blocks. */
1991 static int
1992 rtl_verify_flow_info (void)
1994 basic_block bb;
1995 int err = rtl_verify_flow_info_1 ();
1996 rtx x;
1997 int num_bb_notes;
1998 const rtx rtx_first = get_insns ();
1999 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2001 FOR_EACH_BB_REVERSE (bb)
2003 edge e;
2004 edge_iterator ei;
2006 if (bb->predictions)
2008 error ("bb prediction set for block %i, but it is not used in RTL land", bb->index);
2009 err = 1;
2012 FOR_EACH_EDGE (e, ei, bb->succs)
2013 if (e->flags & EDGE_FALLTHRU)
2014 break;
2015 if (!e)
2017 rtx insn;
2019 /* Ensure existence of barrier in BB with no fallthru edges. */
2020 for (insn = BB_END (bb); !insn || !BARRIER_P (insn);
2021 insn = NEXT_INSN (insn))
2022 if (!insn
2023 || (NOTE_P (insn)
2024 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_BASIC_BLOCK))
2026 error ("missing barrier after block %i", bb->index);
2027 err = 1;
2028 break;
2031 else if (e->src != ENTRY_BLOCK_PTR
2032 && e->dest != EXIT_BLOCK_PTR)
2034 rtx insn;
2036 if (e->src->next_bb != e->dest)
2038 error
2039 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2040 e->src->index, e->dest->index);
2041 err = 1;
2043 else
2044 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2045 insn = NEXT_INSN (insn))
2046 if (BARRIER_P (insn) || INSN_P (insn))
2048 error ("verify_flow_info: Incorrect fallthru %i->%i",
2049 e->src->index, e->dest->index);
2050 fatal_insn ("wrong insn in the fallthru edge", insn);
2051 err = 1;
2056 num_bb_notes = 0;
2057 last_bb_seen = ENTRY_BLOCK_PTR;
2059 for (x = rtx_first; x; x = NEXT_INSN (x))
2061 if (NOTE_INSN_BASIC_BLOCK_P (x))
2063 bb = NOTE_BASIC_BLOCK (x);
2065 num_bb_notes++;
2066 if (bb != last_bb_seen->next_bb)
2067 internal_error ("basic blocks not laid down consecutively");
2069 curr_bb = last_bb_seen = bb;
2072 if (!curr_bb)
2074 switch (GET_CODE (x))
2076 case BARRIER:
2077 case NOTE:
2078 break;
2080 case CODE_LABEL:
2081 /* An addr_vec is placed outside any basic block. */
2082 if (NEXT_INSN (x)
2083 && JUMP_P (NEXT_INSN (x))
2084 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2085 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2086 x = NEXT_INSN (x);
2088 /* But in any case, non-deletable labels can appear anywhere. */
2089 break;
2091 default:
2092 fatal_insn ("insn outside basic block", x);
2096 if (JUMP_P (x)
2097 && returnjump_p (x) && ! condjump_p (x)
2098 && ! (NEXT_INSN (x) && BARRIER_P (NEXT_INSN (x))))
2099 fatal_insn ("return not followed by barrier", x);
2100 if (curr_bb && x == BB_END (curr_bb))
2101 curr_bb = NULL;
2104 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2105 internal_error
2106 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2107 num_bb_notes, n_basic_blocks);
2109 return err;
2112 /* Assume that the preceding pass has possibly eliminated jump instructions
2113 or converted the unconditional jumps. Eliminate the edges from CFG.
2114 Return true if any edges are eliminated. */
2116 bool
2117 purge_dead_edges (basic_block bb)
2119 edge e;
2120 rtx insn = BB_END (bb), note;
2121 bool purged = false;
2122 bool found;
2123 edge_iterator ei;
2125 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2126 if (NONJUMP_INSN_P (insn)
2127 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2129 rtx eqnote;
2131 if (! may_trap_p (PATTERN (insn))
2132 || ((eqnote = find_reg_equal_equiv_note (insn))
2133 && ! may_trap_p (XEXP (eqnote, 0))))
2134 remove_note (insn, note);
2137 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2138 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2140 /* There are three types of edges we need to handle correctly here: EH
2141 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2142 latter can appear when nonlocal gotos are used. */
2143 if (e->flags & EDGE_EH)
2145 if (can_throw_internal (BB_END (bb))
2146 /* If this is a call edge, verify that this is a call insn. */
2147 && (! (e->flags & EDGE_ABNORMAL_CALL)
2148 || CALL_P (BB_END (bb))))
2150 ei_next (&ei);
2151 continue;
2154 else if (e->flags & EDGE_ABNORMAL_CALL)
2156 if (CALL_P (BB_END (bb))
2157 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2158 || INTVAL (XEXP (note, 0)) >= 0))
2160 ei_next (&ei);
2161 continue;
2164 else
2166 ei_next (&ei);
2167 continue;
2170 remove_edge (e);
2171 bb->flags |= BB_DIRTY;
2172 purged = true;
2175 if (JUMP_P (insn))
2177 rtx note;
2178 edge b,f;
2179 edge_iterator ei;
2181 /* We do care only about conditional jumps and simplejumps. */
2182 if (!any_condjump_p (insn)
2183 && !returnjump_p (insn)
2184 && !simplejump_p (insn))
2185 return purged;
2187 /* Branch probability/prediction notes are defined only for
2188 condjumps. We've possibly turned condjump into simplejump. */
2189 if (simplejump_p (insn))
2191 note = find_reg_note (insn, REG_BR_PROB, NULL);
2192 if (note)
2193 remove_note (insn, note);
2194 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2195 remove_note (insn, note);
2198 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2200 /* Avoid abnormal flags to leak from computed jumps turned
2201 into simplejumps. */
2203 e->flags &= ~EDGE_ABNORMAL;
2205 /* See if this edge is one we should keep. */
2206 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2207 /* A conditional jump can fall through into the next
2208 block, so we should keep the edge. */
2210 ei_next (&ei);
2211 continue;
2213 else if (e->dest != EXIT_BLOCK_PTR
2214 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2215 /* If the destination block is the target of the jump,
2216 keep the edge. */
2218 ei_next (&ei);
2219 continue;
2221 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2222 /* If the destination block is the exit block, and this
2223 instruction is a return, then keep the edge. */
2225 ei_next (&ei);
2226 continue;
2228 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2229 /* Keep the edges that correspond to exceptions thrown by
2230 this instruction and rematerialize the EDGE_ABNORMAL
2231 flag we just cleared above. */
2233 e->flags |= EDGE_ABNORMAL;
2234 ei_next (&ei);
2235 continue;
2238 /* We do not need this edge. */
2239 bb->flags |= BB_DIRTY;
2240 purged = true;
2241 remove_edge (e);
2244 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2245 return purged;
2247 if (dump_file)
2248 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2250 if (!optimize)
2251 return purged;
2253 /* Redistribute probabilities. */
2254 if (single_succ_p (bb))
2256 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2257 single_succ_edge (bb)->count = bb->count;
2259 else
2261 note = find_reg_note (insn, REG_BR_PROB, NULL);
2262 if (!note)
2263 return purged;
2265 b = BRANCH_EDGE (bb);
2266 f = FALLTHRU_EDGE (bb);
2267 b->probability = INTVAL (XEXP (note, 0));
2268 f->probability = REG_BR_PROB_BASE - b->probability;
2269 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2270 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2273 return purged;
2275 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2277 /* First, there should not be any EH or ABCALL edges resulting
2278 from non-local gotos and the like. If there were, we shouldn't
2279 have created the sibcall in the first place. Second, there
2280 should of course never have been a fallthru edge. */
2281 gcc_assert (single_succ_p (bb));
2282 gcc_assert (single_succ_edge (bb)->flags
2283 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2285 return 0;
2288 /* If we don't see a jump insn, we don't know exactly why the block would
2289 have been broken at this point. Look for a simple, non-fallthru edge,
2290 as these are only created by conditional branches. If we find such an
2291 edge we know that there used to be a jump here and can then safely
2292 remove all non-fallthru edges. */
2293 found = false;
2294 FOR_EACH_EDGE (e, ei, bb->succs)
2295 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2297 found = true;
2298 break;
2301 if (!found)
2302 return purged;
2304 /* Remove all but the fake and fallthru edges. The fake edge may be
2305 the only successor for this block in the case of noreturn
2306 calls. */
2307 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2309 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2311 bb->flags |= BB_DIRTY;
2312 remove_edge (e);
2313 purged = true;
2315 else
2316 ei_next (&ei);
2319 gcc_assert (single_succ_p (bb));
2321 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2322 single_succ_edge (bb)->count = bb->count;
2324 if (dump_file)
2325 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2326 bb->index);
2327 return purged;
2330 /* Search all basic blocks for potentially dead edges and purge them. Return
2331 true if some edge has been eliminated. */
2333 bool
2334 purge_all_dead_edges (void)
2336 int purged = false;
2337 basic_block bb;
2339 FOR_EACH_BB (bb)
2341 bool purged_here = purge_dead_edges (bb);
2343 purged |= purged_here;
2346 return purged;
2349 /* Same as split_block but update cfg_layout structures. */
2351 static basic_block
2352 cfg_layout_split_block (basic_block bb, void *insnp)
2354 rtx insn = insnp;
2355 basic_block new_bb = rtl_split_block (bb, insn);
2357 new_bb->il.rtl->footer = bb->il.rtl->footer;
2358 bb->il.rtl->footer = NULL;
2360 return new_bb;
2364 /* Redirect Edge to DEST. */
2365 static edge
2366 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2368 basic_block src = e->src;
2369 edge ret;
2371 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2372 return NULL;
2374 if (e->dest == dest)
2375 return e;
2377 if (e->src != ENTRY_BLOCK_PTR
2378 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
2380 src->flags |= BB_DIRTY;
2381 return ret;
2384 if (e->src == ENTRY_BLOCK_PTR
2385 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2387 if (dump_file)
2388 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2389 e->src->index, dest->index);
2391 e->src->flags |= BB_DIRTY;
2392 redirect_edge_succ (e, dest);
2393 return e;
2396 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2397 in the case the basic block appears to be in sequence. Avoid this
2398 transformation. */
2400 if (e->flags & EDGE_FALLTHRU)
2402 /* Redirect any branch edges unified with the fallthru one. */
2403 if (JUMP_P (BB_END (src))
2404 && label_is_jump_target_p (BB_HEAD (e->dest),
2405 BB_END (src)))
2407 edge redirected;
2409 if (dump_file)
2410 fprintf (dump_file, "Fallthru edge unified with branch "
2411 "%i->%i redirected to %i\n",
2412 e->src->index, e->dest->index, dest->index);
2413 e->flags &= ~EDGE_FALLTHRU;
2414 redirected = redirect_branch_edge (e, dest);
2415 gcc_assert (redirected);
2416 e->flags |= EDGE_FALLTHRU;
2417 e->src->flags |= BB_DIRTY;
2418 return e;
2420 /* In case we are redirecting fallthru edge to the branch edge
2421 of conditional jump, remove it. */
2422 if (EDGE_COUNT (src->succs) == 2)
2424 /* Find the edge that is different from E. */
2425 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
2427 if (s->dest == dest
2428 && any_condjump_p (BB_END (src))
2429 && onlyjump_p (BB_END (src)))
2430 delete_insn (BB_END (src));
2432 ret = redirect_edge_succ_nodup (e, dest);
2433 if (dump_file)
2434 fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
2435 e->src->index, e->dest->index, dest->index);
2437 else
2438 ret = redirect_branch_edge (e, dest);
2440 /* We don't want simplejumps in the insn stream during cfglayout. */
2441 gcc_assert (!simplejump_p (BB_END (src)));
2443 src->flags |= BB_DIRTY;
2444 return ret;
2447 /* Simple wrapper as we always can redirect fallthru edges. */
2448 static basic_block
2449 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2451 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2453 gcc_assert (redirected);
2454 return NULL;
2457 /* Same as delete_basic_block but update cfg_layout structures. */
2459 static void
2460 cfg_layout_delete_block (basic_block bb)
2462 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2464 if (bb->il.rtl->header)
2466 next = BB_HEAD (bb);
2467 if (prev)
2468 NEXT_INSN (prev) = bb->il.rtl->header;
2469 else
2470 set_first_insn (bb->il.rtl->header);
2471 PREV_INSN (bb->il.rtl->header) = prev;
2472 insn = bb->il.rtl->header;
2473 while (NEXT_INSN (insn))
2474 insn = NEXT_INSN (insn);
2475 NEXT_INSN (insn) = next;
2476 PREV_INSN (next) = insn;
2478 next = NEXT_INSN (BB_END (bb));
2479 if (bb->il.rtl->footer)
2481 insn = bb->il.rtl->footer;
2482 while (insn)
2484 if (BARRIER_P (insn))
2486 if (PREV_INSN (insn))
2487 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2488 else
2489 bb->il.rtl->footer = NEXT_INSN (insn);
2490 if (NEXT_INSN (insn))
2491 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2493 if (LABEL_P (insn))
2494 break;
2495 insn = NEXT_INSN (insn);
2497 if (bb->il.rtl->footer)
2499 insn = BB_END (bb);
2500 NEXT_INSN (insn) = bb->il.rtl->footer;
2501 PREV_INSN (bb->il.rtl->footer) = insn;
2502 while (NEXT_INSN (insn))
2503 insn = NEXT_INSN (insn);
2504 NEXT_INSN (insn) = next;
2505 if (next)
2506 PREV_INSN (next) = insn;
2507 else
2508 set_last_insn (insn);
2511 if (bb->next_bb != EXIT_BLOCK_PTR)
2512 to = &bb->next_bb->il.rtl->header;
2513 else
2514 to = &cfg_layout_function_footer;
2516 rtl_delete_block (bb);
2518 if (prev)
2519 prev = NEXT_INSN (prev);
2520 else
2521 prev = get_insns ();
2522 if (next)
2523 next = PREV_INSN (next);
2524 else
2525 next = get_last_insn ();
2527 if (next && NEXT_INSN (next) != prev)
2529 remaints = unlink_insn_chain (prev, next);
2530 insn = remaints;
2531 while (NEXT_INSN (insn))
2532 insn = NEXT_INSN (insn);
2533 NEXT_INSN (insn) = *to;
2534 if (*to)
2535 PREV_INSN (*to) = insn;
2536 *to = remaints;
2540 /* Return true when blocks A and B can be safely merged. */
2541 static bool
2542 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2544 /* If we are partitioning hot/cold basic blocks, we don't want to
2545 mess up unconditional or indirect jumps that cross between hot
2546 and cold sections.
2548 Basic block partitioning may result in some jumps that appear to
2549 be optimizable (or blocks that appear to be mergeable), but which really
2550 must be left untouched (they are required to make it safely across
2551 partition boundaries). See the comments at the top of
2552 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2554 if (BB_PARTITION (a) != BB_PARTITION (b))
2555 return false;
2557 /* There must be exactly one edge in between the blocks. */
2558 return (single_succ_p (a)
2559 && single_succ (a) == b
2560 && single_pred_p (b) == 1
2561 && a != b
2562 /* Must be simple edge. */
2563 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
2564 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2565 /* If the jump insn has side effects,
2566 we can't kill the edge. */
2567 && (!JUMP_P (BB_END (a))
2568 || (reload_completed
2569 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2572 /* Merge block A and B. The blocks must be mergeable. */
2574 static void
2575 cfg_layout_merge_blocks (basic_block a, basic_block b)
2577 #ifdef ENABLE_CHECKING
2578 gcc_assert (cfg_layout_can_merge_blocks_p (a, b));
2579 #endif
2581 /* If there was a CODE_LABEL beginning B, delete it. */
2582 if (LABEL_P (BB_HEAD (b)))
2584 /* This might have been an EH label that no longer has incoming
2585 EH edges. Update data structures to match. */
2586 maybe_remove_eh_handler (BB_HEAD (b));
2588 delete_insn (BB_HEAD (b));
2591 /* We should have fallthru edge in a, or we can do dummy redirection to get
2592 it cleaned up. */
2593 if (JUMP_P (BB_END (a)))
2594 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
2595 gcc_assert (!JUMP_P (BB_END (a)));
2597 /* Possible line number notes should appear in between. */
2598 if (b->il.rtl->header)
2600 rtx first = BB_END (a), last;
2602 last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a));
2603 delete_insn_chain (NEXT_INSN (first), last);
2604 b->il.rtl->header = NULL;
2607 /* In the case basic blocks are not adjacent, move them around. */
2608 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2610 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2612 emit_insn_after_noloc (first, BB_END (a));
2613 /* Skip possible DELETED_LABEL insn. */
2614 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2615 first = NEXT_INSN (first);
2616 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
2617 BB_HEAD (b) = NULL;
2618 delete_insn (first);
2620 /* Otherwise just re-associate the instructions. */
2621 else
2623 rtx insn;
2625 for (insn = BB_HEAD (b);
2626 insn != NEXT_INSN (BB_END (b));
2627 insn = NEXT_INSN (insn))
2628 set_block_for_insn (insn, a);
2629 insn = BB_HEAD (b);
2630 /* Skip possible DELETED_LABEL insn. */
2631 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2632 insn = NEXT_INSN (insn);
2633 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
2634 BB_HEAD (b) = NULL;
2635 BB_END (a) = BB_END (b);
2636 delete_insn (insn);
2639 /* Possible tablejumps and barriers should appear after the block. */
2640 if (b->il.rtl->footer)
2642 if (!a->il.rtl->footer)
2643 a->il.rtl->footer = b->il.rtl->footer;
2644 else
2646 rtx last = a->il.rtl->footer;
2648 while (NEXT_INSN (last))
2649 last = NEXT_INSN (last);
2650 NEXT_INSN (last) = b->il.rtl->footer;
2651 PREV_INSN (b->il.rtl->footer) = last;
2653 b->il.rtl->footer = NULL;
2655 a->il.rtl->global_live_at_end = b->il.rtl->global_live_at_end;
2657 if (dump_file)
2658 fprintf (dump_file, "Merged blocks %d and %d.\n",
2659 a->index, b->index);
2662 /* Split edge E. */
2664 static basic_block
2665 cfg_layout_split_edge (edge e)
2667 basic_block new_bb =
2668 create_basic_block (e->src != ENTRY_BLOCK_PTR
2669 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2670 NULL_RTX, e->src);
2672 /* ??? This info is likely going to be out of date very soon, but we must
2673 create it to avoid getting an ICE later. */
2674 if (e->dest->il.rtl->global_live_at_start)
2676 new_bb->il.rtl->global_live_at_start = ALLOC_REG_SET (&reg_obstack);
2677 new_bb->il.rtl->global_live_at_end = ALLOC_REG_SET (&reg_obstack);
2678 COPY_REG_SET (new_bb->il.rtl->global_live_at_start,
2679 e->dest->il.rtl->global_live_at_start);
2680 COPY_REG_SET (new_bb->il.rtl->global_live_at_end,
2681 e->dest->il.rtl->global_live_at_start);
2684 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2685 redirect_edge_and_branch_force (e, new_bb);
2687 return new_bb;
2690 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
2692 static void
2693 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2697 /* Return 1 if BB ends with a call, possibly followed by some
2698 instructions that must stay with the call, 0 otherwise. */
2700 static bool
2701 rtl_block_ends_with_call_p (basic_block bb)
2703 rtx insn = BB_END (bb);
2705 while (!CALL_P (insn)
2706 && insn != BB_HEAD (bb)
2707 && keep_with_call_p (insn))
2708 insn = PREV_INSN (insn);
2709 return (CALL_P (insn));
2712 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
2714 static bool
2715 rtl_block_ends_with_condjump_p (basic_block bb)
2717 return any_condjump_p (BB_END (bb));
2720 /* Return true if we need to add fake edge to exit.
2721 Helper function for rtl_flow_call_edges_add. */
2723 static bool
2724 need_fake_edge_p (rtx insn)
2726 if (!INSN_P (insn))
2727 return false;
2729 if ((CALL_P (insn)
2730 && !SIBLING_CALL_P (insn)
2731 && !find_reg_note (insn, REG_NORETURN, NULL)
2732 && !CONST_OR_PURE_CALL_P (insn)))
2733 return true;
2735 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2736 && MEM_VOLATILE_P (PATTERN (insn)))
2737 || (GET_CODE (PATTERN (insn)) == PARALLEL
2738 && asm_noperands (insn) != -1
2739 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2740 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2743 /* Add fake edges to the function exit for any non constant and non noreturn
2744 calls, volatile inline assembly in the bitmap of blocks specified by
2745 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
2746 that were split.
2748 The goal is to expose cases in which entering a basic block does not imply
2749 that all subsequent instructions must be executed. */
2751 static int
2752 rtl_flow_call_edges_add (sbitmap blocks)
2754 int i;
2755 int blocks_split = 0;
2756 int last_bb = last_basic_block;
2757 bool check_last_block = false;
2759 if (n_basic_blocks == NUM_FIXED_BLOCKS)
2760 return 0;
2762 if (! blocks)
2763 check_last_block = true;
2764 else
2765 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
2767 /* In the last basic block, before epilogue generation, there will be
2768 a fallthru edge to EXIT. Special care is required if the last insn
2769 of the last basic block is a call because make_edge folds duplicate
2770 edges, which would result in the fallthru edge also being marked
2771 fake, which would result in the fallthru edge being removed by
2772 remove_fake_edges, which would result in an invalid CFG.
2774 Moreover, we can't elide the outgoing fake edge, since the block
2775 profiler needs to take this into account in order to solve the minimal
2776 spanning tree in the case that the call doesn't return.
2778 Handle this by adding a dummy instruction in a new last basic block. */
2779 if (check_last_block)
2781 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
2782 rtx insn = BB_END (bb);
2784 /* Back up past insns that must be kept in the same block as a call. */
2785 while (insn != BB_HEAD (bb)
2786 && keep_with_call_p (insn))
2787 insn = PREV_INSN (insn);
2789 if (need_fake_edge_p (insn))
2791 edge e;
2793 e = find_edge (bb, EXIT_BLOCK_PTR);
2794 if (e)
2796 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
2797 commit_edge_insertions ();
2802 /* Now add fake edges to the function exit for any non constant
2803 calls since there is no way that we can determine if they will
2804 return or not... */
2806 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
2808 basic_block bb = BASIC_BLOCK (i);
2809 rtx insn;
2810 rtx prev_insn;
2812 if (!bb)
2813 continue;
2815 if (blocks && !TEST_BIT (blocks, i))
2816 continue;
2818 for (insn = BB_END (bb); ; insn = prev_insn)
2820 prev_insn = PREV_INSN (insn);
2821 if (need_fake_edge_p (insn))
2823 edge e;
2824 rtx split_at_insn = insn;
2826 /* Don't split the block between a call and an insn that should
2827 remain in the same block as the call. */
2828 if (CALL_P (insn))
2829 while (split_at_insn != BB_END (bb)
2830 && keep_with_call_p (NEXT_INSN (split_at_insn)))
2831 split_at_insn = NEXT_INSN (split_at_insn);
2833 /* The handling above of the final block before the epilogue
2834 should be enough to verify that there is no edge to the exit
2835 block in CFG already. Calling make_edge in such case would
2836 cause us to mark that edge as fake and remove it later. */
2838 #ifdef ENABLE_CHECKING
2839 if (split_at_insn == BB_END (bb))
2841 e = find_edge (bb, EXIT_BLOCK_PTR);
2842 gcc_assert (e == NULL);
2844 #endif
2846 /* Note that the following may create a new basic block
2847 and renumber the existing basic blocks. */
2848 if (split_at_insn != BB_END (bb))
2850 e = split_block (bb, split_at_insn);
2851 if (e)
2852 blocks_split++;
2855 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2858 if (insn == BB_HEAD (bb))
2859 break;
2863 if (blocks_split)
2864 verify_flow_info ();
2866 return blocks_split;
2869 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
2870 the conditional branch target, SECOND_HEAD should be the fall-thru
2871 there is no need to handle this here the loop versioning code handles
2872 this. the reason for SECON_HEAD is that it is needed for condition
2873 in trees, and this should be of the same type since it is a hook. */
2874 static void
2875 rtl_lv_add_condition_to_bb (basic_block first_head ,
2876 basic_block second_head ATTRIBUTE_UNUSED,
2877 basic_block cond_bb, void *comp_rtx)
2879 rtx label, seq, jump;
2880 rtx op0 = XEXP ((rtx)comp_rtx, 0);
2881 rtx op1 = XEXP ((rtx)comp_rtx, 1);
2882 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
2883 enum machine_mode mode;
2886 label = block_label (first_head);
2887 mode = GET_MODE (op0);
2888 if (mode == VOIDmode)
2889 mode = GET_MODE (op1);
2891 start_sequence ();
2892 op0 = force_operand (op0, NULL_RTX);
2893 op1 = force_operand (op1, NULL_RTX);
2894 do_compare_rtx_and_jump (op0, op1, comp, 0,
2895 mode, NULL_RTX, NULL_RTX, label);
2896 jump = get_last_insn ();
2897 JUMP_LABEL (jump) = label;
2898 LABEL_NUSES (label)++;
2899 seq = get_insns ();
2900 end_sequence ();
2902 /* Add the new cond , in the new head. */
2903 emit_insn_after(seq, BB_END(cond_bb));
2907 /* Given a block B with unconditional branch at its end, get the
2908 store the return the branch edge and the fall-thru edge in
2909 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
2910 static void
2911 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
2912 edge *fallthru_edge)
2914 edge e = EDGE_SUCC (b, 0);
2916 if (e->flags & EDGE_FALLTHRU)
2918 *fallthru_edge = e;
2919 *branch_edge = EDGE_SUCC (b, 1);
2921 else
2923 *branch_edge = e;
2924 *fallthru_edge = EDGE_SUCC (b, 1);
2928 void
2929 init_rtl_bb_info (basic_block bb)
2931 gcc_assert (!bb->il.rtl);
2932 bb->il.rtl = ggc_alloc_cleared (sizeof (struct rtl_bb_info));
2936 /* Add EXPR to the end of basic block BB. */
2939 insert_insn_end_bb_new (rtx pat, basic_block bb)
2941 rtx insn = BB_END (bb);
2942 rtx new_insn;
2943 rtx pat_end = pat;
2945 while (NEXT_INSN (pat_end) != NULL_RTX)
2946 pat_end = NEXT_INSN (pat_end);
2948 /* If the last insn is a jump, insert EXPR in front [taking care to
2949 handle cc0, etc. properly]. Similarly we need to care trapping
2950 instructions in presence of non-call exceptions. */
2952 if (JUMP_P (insn)
2953 || (NONJUMP_INSN_P (insn)
2954 && (!single_succ_p (bb)
2955 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
2957 #ifdef HAVE_cc0
2958 rtx note;
2959 #endif
2960 /* If this is a jump table, then we can't insert stuff here. Since
2961 we know the previous real insn must be the tablejump, we insert
2962 the new instruction just before the tablejump. */
2963 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
2964 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
2965 insn = prev_real_insn (insn);
2967 #ifdef HAVE_cc0
2968 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
2969 if cc0 isn't set. */
2970 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2971 if (note)
2972 insn = XEXP (note, 0);
2973 else
2975 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
2976 if (maybe_cc0_setter
2977 && INSN_P (maybe_cc0_setter)
2978 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
2979 insn = maybe_cc0_setter;
2981 #endif
2982 /* FIXME: What if something in cc0/jump uses value set in new
2983 insn? */
2984 new_insn = emit_insn_before_noloc (pat, insn);
2987 /* Likewise if the last insn is a call, as will happen in the presence
2988 of exception handling. */
2989 else if (CALL_P (insn)
2990 && (!single_succ_p (bb)
2991 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
2993 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
2994 we search backward and place the instructions before the first
2995 parameter is loaded. Do this for everyone for consistency and a
2996 presumption that we'll get better code elsewhere as well. */
2998 /* Since different machines initialize their parameter registers
2999 in different orders, assume nothing. Collect the set of all
3000 parameter registers. */
3001 insn = find_first_parameter_load (insn, BB_HEAD (bb));
3003 /* If we found all the parameter loads, then we want to insert
3004 before the first parameter load.
3006 If we did not find all the parameter loads, then we might have
3007 stopped on the head of the block, which could be a CODE_LABEL.
3008 If we inserted before the CODE_LABEL, then we would be putting
3009 the insn in the wrong basic block. In that case, put the insn
3010 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
3011 while (LABEL_P (insn)
3012 || NOTE_INSN_BASIC_BLOCK_P (insn))
3013 insn = NEXT_INSN (insn);
3015 new_insn = emit_insn_before_noloc (pat, insn);
3017 else
3018 new_insn = emit_insn_after_noloc (pat, insn);
3020 return new_insn;
3023 /* Implementation of CFG manipulation for linearized RTL. */
3024 struct cfg_hooks rtl_cfg_hooks = {
3025 "rtl",
3026 rtl_verify_flow_info,
3027 rtl_dump_bb,
3028 rtl_create_basic_block,
3029 rtl_redirect_edge_and_branch,
3030 rtl_redirect_edge_and_branch_force,
3031 rtl_delete_block,
3032 rtl_split_block,
3033 rtl_move_block_after,
3034 rtl_can_merge_blocks, /* can_merge_blocks_p */
3035 rtl_merge_blocks,
3036 rtl_predict_edge,
3037 rtl_predicted_by_p,
3038 NULL, /* can_duplicate_block_p */
3039 NULL, /* duplicate_block */
3040 rtl_split_edge,
3041 rtl_make_forwarder_block,
3042 rtl_tidy_fallthru_edge,
3043 rtl_block_ends_with_call_p,
3044 rtl_block_ends_with_condjump_p,
3045 rtl_flow_call_edges_add,
3046 NULL, /* execute_on_growing_pred */
3047 NULL, /* execute_on_shrinking_pred */
3048 NULL, /* duplicate loop for trees */
3049 NULL, /* lv_add_condition_to_bb */
3050 NULL, /* lv_adjust_loop_header_phi*/
3051 NULL, /* extract_cond_bb_edges */
3052 NULL /* flush_pending_stmts */
3055 /* Implementation of CFG manipulation for cfg layout RTL, where
3056 basic block connected via fallthru edges does not have to be adjacent.
3057 This representation will hopefully become the default one in future
3058 version of the compiler. */
3060 /* We do not want to declare these functions in a header file, since they
3061 should only be used through the cfghooks interface, and we do not want to
3062 move them here since it would require also moving quite a lot of related
3063 code. */
3064 extern bool cfg_layout_can_duplicate_bb_p (basic_block);
3065 extern basic_block cfg_layout_duplicate_bb (basic_block);
3067 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
3068 "cfglayout mode",
3069 rtl_verify_flow_info_1,
3070 rtl_dump_bb,
3071 cfg_layout_create_basic_block,
3072 cfg_layout_redirect_edge_and_branch,
3073 cfg_layout_redirect_edge_and_branch_force,
3074 cfg_layout_delete_block,
3075 cfg_layout_split_block,
3076 rtl_move_block_after,
3077 cfg_layout_can_merge_blocks_p,
3078 cfg_layout_merge_blocks,
3079 rtl_predict_edge,
3080 rtl_predicted_by_p,
3081 cfg_layout_can_duplicate_bb_p,
3082 cfg_layout_duplicate_bb,
3083 cfg_layout_split_edge,
3084 rtl_make_forwarder_block,
3085 NULL,
3086 rtl_block_ends_with_call_p,
3087 rtl_block_ends_with_condjump_p,
3088 rtl_flow_call_edges_add,
3089 NULL, /* execute_on_growing_pred */
3090 NULL, /* execute_on_shrinking_pred */
3091 duplicate_loop_to_header_edge, /* duplicate loop for trees */
3092 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3093 NULL, /* lv_adjust_loop_header_phi*/
3094 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
3095 NULL /* flush_pending_stmts */