re PR rtl-optimization/34522 (inefficient code for long long multiply when only low...
[official-gcc.git] / gcc / cfgrtl.c
blobc157e08d84b18fac54d73f8237b3dfc04effd5ff
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This file contains low level functions to manipulate the CFG and analyze it
23 that are aware of the RTL intermediate language.
25 Available functionality:
26 - Basic CFG/RTL manipulation API documented in cfghooks.h
27 - CFG-aware instruction chain manipulation
28 delete_insn, delete_insn_chain
29 - Edge splitting and committing to edges
30 insert_insn_on_edge, commit_edge_insertions
31 - CFG updating after insn simplification
32 purge_dead_edges, purge_all_dead_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "tree.h"
45 #include "rtl.h"
46 #include "hard-reg-set.h"
47 #include "basic-block.h"
48 #include "regs.h"
49 #include "flags.h"
50 #include "output.h"
51 #include "function.h"
52 #include "except.h"
53 #include "toplev.h"
54 #include "tm_p.h"
55 #include "obstack.h"
56 #include "insn-config.h"
57 #include "cfglayout.h"
58 #include "expr.h"
59 #include "target.h"
60 #include "cfgloop.h"
61 #include "ggc.h"
62 #include "tree-pass.h"
63 #include "df.h"
65 static int can_delete_note_p (const_rtx);
66 static int can_delete_label_p (const_rtx);
67 static void commit_one_edge_insertion (edge);
68 static basic_block rtl_split_edge (edge);
69 static bool rtl_move_block_after (basic_block, basic_block);
70 static int rtl_verify_flow_info (void);
71 static basic_block cfg_layout_split_block (basic_block, void *);
72 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
73 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
74 static void cfg_layout_delete_block (basic_block);
75 static void rtl_delete_block (basic_block);
76 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
77 static edge rtl_redirect_edge_and_branch (edge, basic_block);
78 static basic_block rtl_split_block (basic_block, void *);
79 static void rtl_dump_bb (basic_block, FILE *, int);
80 static int rtl_verify_flow_info_1 (void);
81 static void rtl_make_forwarder_block (edge);
83 /* Return true if NOTE is not one of the ones that must be kept paired,
84 so that we may simply delete it. */
86 static int
87 can_delete_note_p (const_rtx note)
89 return (NOTE_KIND (note) == NOTE_INSN_DELETED
90 || NOTE_KIND (note) == NOTE_INSN_BASIC_BLOCK);
93 /* True if a given label can be deleted. */
95 static int
96 can_delete_label_p (const_rtx label)
98 return (!LABEL_PRESERVE_P (label)
99 /* User declared labels must be preserved. */
100 && LABEL_NAME (label) == 0
101 && !in_expr_list_p (forced_labels, label));
104 /* Delete INSN by patching it out. Return the next insn. */
107 delete_insn (rtx insn)
109 rtx next = NEXT_INSN (insn);
110 rtx note;
111 bool really_delete = true;
113 if (LABEL_P (insn))
115 /* Some labels can't be directly removed from the INSN chain, as they
116 might be references via variables, constant pool etc.
117 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
118 if (! can_delete_label_p (insn))
120 const char *name = LABEL_NAME (insn);
122 really_delete = false;
123 PUT_CODE (insn, NOTE);
124 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
125 NOTE_DELETED_LABEL_NAME (insn) = name;
128 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
131 if (really_delete)
133 /* If this insn has already been deleted, something is very wrong. */
134 gcc_assert (!INSN_DELETED_P (insn));
135 remove_insn (insn);
136 INSN_DELETED_P (insn) = 1;
139 /* If deleting a jump, decrement the use count of the label. Deleting
140 the label itself should happen in the normal course of block merging. */
141 if (JUMP_P (insn))
143 if (JUMP_LABEL (insn)
144 && LABEL_P (JUMP_LABEL (insn)))
145 LABEL_NUSES (JUMP_LABEL (insn))--;
147 /* If there are more targets, remove them too. */
148 while ((note
149 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
150 && LABEL_P (XEXP (note, 0)))
152 LABEL_NUSES (XEXP (note, 0))--;
153 remove_note (insn, note);
157 /* Also if deleting any insn that references a label as an operand. */
158 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
159 && LABEL_P (XEXP (note, 0)))
161 LABEL_NUSES (XEXP (note, 0))--;
162 remove_note (insn, note);
165 if (JUMP_P (insn)
166 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
167 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
169 rtx pat = PATTERN (insn);
170 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
171 int len = XVECLEN (pat, diff_vec_p);
172 int i;
174 for (i = 0; i < len; i++)
176 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
178 /* When deleting code in bulk (e.g. removing many unreachable
179 blocks) we can delete a label that's a target of the vector
180 before deleting the vector itself. */
181 if (!NOTE_P (label))
182 LABEL_NUSES (label)--;
186 return next;
189 /* Like delete_insn but also purge dead edges from BB. */
192 delete_insn_and_edges (rtx insn)
194 rtx x;
195 bool purge = false;
197 if (INSN_P (insn)
198 && BLOCK_FOR_INSN (insn)
199 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
200 purge = true;
201 x = delete_insn (insn);
202 if (purge)
203 purge_dead_edges (BLOCK_FOR_INSN (insn));
204 return x;
207 /* Unlink a chain of insns between START and FINISH, leaving notes
208 that must be paired. If CLEAR_BB is true, we set bb field for
209 insns that cannot be removed to NULL. */
211 void
212 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
214 rtx next;
216 /* Unchain the insns one by one. It would be quicker to delete all of these
217 with a single unchaining, rather than one at a time, but we need to keep
218 the NOTE's. */
219 while (1)
221 next = NEXT_INSN (start);
222 if (NOTE_P (start) && !can_delete_note_p (start))
224 else
225 next = delete_insn (start);
227 if (clear_bb && !INSN_DELETED_P (start))
228 set_block_for_insn (start, NULL);
230 if (start == finish)
231 break;
232 start = next;
236 /* Like delete_insn_chain but also purge dead edges from BB. */
238 void
239 delete_insn_chain_and_edges (rtx first, rtx last)
241 bool purge = false;
243 if (INSN_P (last)
244 && BLOCK_FOR_INSN (last)
245 && BB_END (BLOCK_FOR_INSN (last)) == last)
246 purge = true;
247 delete_insn_chain (first, last, false);
248 if (purge)
249 purge_dead_edges (BLOCK_FOR_INSN (last));
252 /* Create a new basic block consisting of the instructions between HEAD and END
253 inclusive. This function is designed to allow fast BB construction - reuses
254 the note and basic block struct in BB_NOTE, if any and do not grow
255 BASIC_BLOCK chain and should be used directly only by CFG construction code.
256 END can be NULL in to create new empty basic block before HEAD. Both END
257 and HEAD can be NULL to create basic block at the end of INSN chain.
258 AFTER is the basic block we should be put after. */
260 basic_block
261 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
263 basic_block bb;
265 if (bb_note
266 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
267 && bb->aux == NULL)
269 /* If we found an existing note, thread it back onto the chain. */
271 rtx after;
273 if (LABEL_P (head))
274 after = head;
275 else
277 after = PREV_INSN (head);
278 head = bb_note;
281 if (after != bb_note && NEXT_INSN (after) != bb_note)
282 reorder_insns_nobb (bb_note, bb_note, after);
284 else
286 /* Otherwise we must create a note and a basic block structure. */
288 bb = alloc_block ();
290 init_rtl_bb_info (bb);
291 if (!head && !end)
292 head = end = bb_note
293 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
294 else if (LABEL_P (head) && end)
296 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
297 if (head == end)
298 end = bb_note;
300 else
302 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
303 head = bb_note;
304 if (!end)
305 end = head;
308 NOTE_BASIC_BLOCK (bb_note) = bb;
311 /* Always include the bb note in the block. */
312 if (NEXT_INSN (end) == bb_note)
313 end = bb_note;
315 BB_HEAD (bb) = head;
316 BB_END (bb) = end;
317 bb->index = last_basic_block++;
318 bb->flags = BB_NEW | BB_RTL;
319 link_block (bb, after);
320 SET_BASIC_BLOCK (bb->index, bb);
321 df_bb_refs_record (bb->index, false);
322 update_bb_for_insn (bb);
323 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
325 /* Tag the block so that we know it has been used when considering
326 other basic block notes. */
327 bb->aux = bb;
329 return bb;
332 /* Create new basic block consisting of instructions in between HEAD and END
333 and place it to the BB chain after block AFTER. END can be NULL in to
334 create new empty basic block before HEAD. Both END and HEAD can be NULL to
335 create basic block at the end of INSN chain. */
337 static basic_block
338 rtl_create_basic_block (void *headp, void *endp, basic_block after)
340 rtx head = (rtx) headp, end = (rtx) endp;
341 basic_block bb;
343 /* Grow the basic block array if needed. */
344 if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
346 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
347 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
350 n_basic_blocks++;
352 bb = create_basic_block_structure (head, end, NULL, after);
353 bb->aux = NULL;
354 return bb;
357 static basic_block
358 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
360 basic_block newbb = rtl_create_basic_block (head, end, after);
362 return newbb;
365 /* Delete the insns in a (non-live) block. We physically delete every
366 non-deleted-note insn, and update the flow graph appropriately.
368 Return nonzero if we deleted an exception handler. */
370 /* ??? Preserving all such notes strikes me as wrong. It would be nice
371 to post-process the stream to remove empty blocks, loops, ranges, etc. */
373 static void
374 rtl_delete_block (basic_block b)
376 rtx insn, end;
378 /* If the head of this block is a CODE_LABEL, then it might be the
379 label for an exception handler which can't be reached. We need
380 to remove the label from the exception_handler_label list. */
381 insn = BB_HEAD (b);
382 if (LABEL_P (insn))
383 maybe_remove_eh_handler (insn);
385 end = get_last_bb_insn (b);
387 /* Selectively delete the entire chain. */
388 BB_HEAD (b) = NULL;
389 delete_insn_chain (insn, end, true);
392 if (dump_file)
393 fprintf (dump_file, "deleting block %d\n", b->index);
394 df_bb_delete (b->index);
397 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
399 void
400 compute_bb_for_insn (void)
402 basic_block bb;
404 FOR_EACH_BB (bb)
406 rtx end = BB_END (bb);
407 rtx insn;
409 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
411 BLOCK_FOR_INSN (insn) = bb;
412 if (insn == end)
413 break;
418 /* Release the basic_block_for_insn array. */
420 unsigned int
421 free_bb_for_insn (void)
423 rtx insn;
424 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
425 if (!BARRIER_P (insn))
426 BLOCK_FOR_INSN (insn) = NULL;
427 return 0;
430 struct tree_opt_pass pass_free_cfg =
432 NULL, /* name */
433 NULL, /* gate */
434 free_bb_for_insn, /* execute */
435 NULL, /* sub */
436 NULL, /* next */
437 0, /* static_pass_number */
438 0, /* tv_id */
439 0, /* properties_required */
440 0, /* properties_provided */
441 PROP_cfg, /* properties_destroyed */
442 0, /* todo_flags_start */
443 0, /* todo_flags_finish */
444 0 /* letter */
447 /* Return RTX to emit after when we want to emit code on the entry of function. */
449 entry_of_function (void)
451 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
452 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
455 /* Emit INSN at the entry point of the function, ensuring that it is only
456 executed once per function. */
457 void
458 emit_insn_at_entry (rtx insn)
460 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
461 edge e = ei_safe_edge (ei);
462 gcc_assert (e->flags & EDGE_FALLTHRU);
464 insert_insn_on_edge (insn, e);
465 commit_edge_insertions ();
468 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
469 (or BARRIER if found) and notify df of the bb change.
470 The insn chain range is inclusive
471 (i.e. both BEGIN and END will be updated. */
473 static void
474 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
476 rtx insn;
478 end = NEXT_INSN (end);
479 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
480 if (!BARRIER_P (insn))
481 df_insn_change_bb (insn, bb);
484 /* Update BLOCK_FOR_INSN of insns in BB to BB,
485 and notify df of the change. */
487 void
488 update_bb_for_insn (basic_block bb)
490 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
494 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
495 note associated with the BLOCK. */
497 static rtx
498 first_insn_after_basic_block_note (basic_block block)
500 rtx insn;
502 /* Get the first instruction in the block. */
503 insn = BB_HEAD (block);
505 if (insn == NULL_RTX)
506 return NULL_RTX;
507 if (LABEL_P (insn))
508 insn = NEXT_INSN (insn);
509 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
511 return NEXT_INSN (insn);
514 /* Creates a new basic block just after basic block B by splitting
515 everything after specified instruction I. */
517 static basic_block
518 rtl_split_block (basic_block bb, void *insnp)
520 basic_block new_bb;
521 rtx insn = (rtx) insnp;
522 edge e;
523 edge_iterator ei;
525 if (!insn)
527 insn = first_insn_after_basic_block_note (bb);
529 if (insn)
530 insn = PREV_INSN (insn);
531 else
532 insn = get_last_insn ();
535 /* We probably should check type of the insn so that we do not create
536 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
537 bother. */
538 if (insn == BB_END (bb))
539 emit_note_after (NOTE_INSN_DELETED, insn);
541 /* Create the new basic block. */
542 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
543 BB_COPY_PARTITION (new_bb, bb);
544 BB_END (bb) = insn;
546 /* Redirect the outgoing edges. */
547 new_bb->succs = bb->succs;
548 bb->succs = NULL;
549 FOR_EACH_EDGE (e, ei, new_bb->succs)
550 e->src = new_bb;
552 /* The new block starts off being dirty. */
553 df_set_bb_dirty (bb);
554 return new_bb;
557 /* Blocks A and B are to be merged into a single block A. The insns
558 are already contiguous. */
560 static void
561 rtl_merge_blocks (basic_block a, basic_block b)
563 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
564 rtx del_first = NULL_RTX, del_last = NULL_RTX;
565 int b_empty = 0;
567 if (dump_file)
568 fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
570 /* If there was a CODE_LABEL beginning B, delete it. */
571 if (LABEL_P (b_head))
573 /* This might have been an EH label that no longer has incoming
574 EH edges. Update data structures to match. */
575 maybe_remove_eh_handler (b_head);
577 /* Detect basic blocks with nothing but a label. This can happen
578 in particular at the end of a function. */
579 if (b_head == b_end)
580 b_empty = 1;
582 del_first = del_last = b_head;
583 b_head = NEXT_INSN (b_head);
586 /* Delete the basic block note and handle blocks containing just that
587 note. */
588 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
590 if (b_head == b_end)
591 b_empty = 1;
592 if (! del_last)
593 del_first = b_head;
595 del_last = b_head;
596 b_head = NEXT_INSN (b_head);
599 /* If there was a jump out of A, delete it. */
600 if (JUMP_P (a_end))
602 rtx prev;
604 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
605 if (!NOTE_P (prev)
606 || NOTE_INSN_BASIC_BLOCK_P (prev)
607 || prev == BB_HEAD (a))
608 break;
610 del_first = a_end;
612 #ifdef HAVE_cc0
613 /* If this was a conditional jump, we need to also delete
614 the insn that set cc0. */
615 if (only_sets_cc0_p (prev))
617 rtx tmp = prev;
619 prev = prev_nonnote_insn (prev);
620 if (!prev)
621 prev = BB_HEAD (a);
622 del_first = tmp;
624 #endif
626 a_end = PREV_INSN (del_first);
628 else if (BARRIER_P (NEXT_INSN (a_end)))
629 del_first = NEXT_INSN (a_end);
631 /* Delete everything marked above as well as crap that might be
632 hanging out between the two blocks. */
633 BB_HEAD (b) = NULL;
634 delete_insn_chain (del_first, del_last, true);
636 /* Reassociate the insns of B with A. */
637 if (!b_empty)
639 update_bb_for_insn_chain (a_end, b_end, a);
641 a_end = b_end;
644 df_bb_delete (b->index);
645 BB_END (a) = a_end;
649 /* Return true when block A and B can be merged. */
651 static bool
652 rtl_can_merge_blocks (basic_block a, basic_block b)
654 /* If we are partitioning hot/cold basic blocks, we don't want to
655 mess up unconditional or indirect jumps that cross between hot
656 and cold sections.
658 Basic block partitioning may result in some jumps that appear to
659 be optimizable (or blocks that appear to be mergeable), but which really
660 must be left untouched (they are required to make it safely across
661 partition boundaries). See the comments at the top of
662 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
664 if (BB_PARTITION (a) != BB_PARTITION (b))
665 return false;
667 /* There must be exactly one edge in between the blocks. */
668 return (single_succ_p (a)
669 && single_succ (a) == b
670 && single_pred_p (b)
671 && a != b
672 /* Must be simple edge. */
673 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
674 && a->next_bb == b
675 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
676 /* If the jump insn has side effects,
677 we can't kill the edge. */
678 && (!JUMP_P (BB_END (a))
679 || (reload_completed
680 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
683 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
684 exist. */
687 block_label (basic_block block)
689 if (block == EXIT_BLOCK_PTR)
690 return NULL_RTX;
692 if (!LABEL_P (BB_HEAD (block)))
694 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
697 return BB_HEAD (block);
700 /* Attempt to perform edge redirection by replacing possibly complex jump
701 instruction by unconditional jump or removing jump completely. This can
702 apply only if all edges now point to the same block. The parameters and
703 return values are equivalent to redirect_edge_and_branch. */
705 edge
706 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
708 basic_block src = e->src;
709 rtx insn = BB_END (src), kill_from;
710 rtx set;
711 int fallthru = 0;
713 /* If we are partitioning hot/cold basic blocks, we don't want to
714 mess up unconditional or indirect jumps that cross between hot
715 and cold sections.
717 Basic block partitioning may result in some jumps that appear to
718 be optimizable (or blocks that appear to be mergeable), but which really
719 must be left untouched (they are required to make it safely across
720 partition boundaries). See the comments at the top of
721 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
723 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
724 || BB_PARTITION (src) != BB_PARTITION (target))
725 return NULL;
727 /* We can replace or remove a complex jump only when we have exactly
728 two edges. Also, if we have exactly one outgoing edge, we can
729 redirect that. */
730 if (EDGE_COUNT (src->succs) >= 3
731 /* Verify that all targets will be TARGET. Specifically, the
732 edge that is not E must also go to TARGET. */
733 || (EDGE_COUNT (src->succs) == 2
734 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
735 return NULL;
737 if (!onlyjump_p (insn))
738 return NULL;
739 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
740 return NULL;
742 /* Avoid removing branch with side effects. */
743 set = single_set (insn);
744 if (!set || side_effects_p (set))
745 return NULL;
747 /* In case we zap a conditional jump, we'll need to kill
748 the cc0 setter too. */
749 kill_from = insn;
750 #ifdef HAVE_cc0
751 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
752 && only_sets_cc0_p (PREV_INSN (insn)))
753 kill_from = PREV_INSN (insn);
754 #endif
756 /* See if we can create the fallthru edge. */
757 if (in_cfglayout || can_fallthru (src, target))
759 if (dump_file)
760 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
761 fallthru = 1;
763 /* Selectively unlink whole insn chain. */
764 if (in_cfglayout)
766 rtx insn = src->il.rtl->footer;
768 delete_insn_chain (kill_from, BB_END (src), false);
770 /* Remove barriers but keep jumptables. */
771 while (insn)
773 if (BARRIER_P (insn))
775 if (PREV_INSN (insn))
776 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
777 else
778 src->il.rtl->footer = NEXT_INSN (insn);
779 if (NEXT_INSN (insn))
780 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
782 if (LABEL_P (insn))
783 break;
784 insn = NEXT_INSN (insn);
787 else
788 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
789 false);
792 /* If this already is simplejump, redirect it. */
793 else if (simplejump_p (insn))
795 if (e->dest == target)
796 return NULL;
797 if (dump_file)
798 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
799 INSN_UID (insn), e->dest->index, target->index);
800 if (!redirect_jump (insn, block_label (target), 0))
802 gcc_assert (target == EXIT_BLOCK_PTR);
803 return NULL;
807 /* Cannot do anything for target exit block. */
808 else if (target == EXIT_BLOCK_PTR)
809 return NULL;
811 /* Or replace possibly complicated jump insn by simple jump insn. */
812 else
814 rtx target_label = block_label (target);
815 rtx barrier, label, table;
817 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
818 JUMP_LABEL (BB_END (src)) = target_label;
819 LABEL_NUSES (target_label)++;
820 if (dump_file)
821 fprintf (dump_file, "Replacing insn %i by jump %i\n",
822 INSN_UID (insn), INSN_UID (BB_END (src)));
825 delete_insn_chain (kill_from, insn, false);
827 /* Recognize a tablejump that we are converting to a
828 simple jump and remove its associated CODE_LABEL
829 and ADDR_VEC or ADDR_DIFF_VEC. */
830 if (tablejump_p (insn, &label, &table))
831 delete_insn_chain (label, table, false);
833 barrier = next_nonnote_insn (BB_END (src));
834 if (!barrier || !BARRIER_P (barrier))
835 emit_barrier_after (BB_END (src));
836 else
838 if (barrier != NEXT_INSN (BB_END (src)))
840 /* Move the jump before barrier so that the notes
841 which originally were or were created before jump table are
842 inside the basic block. */
843 rtx new_insn = BB_END (src);
845 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
846 PREV_INSN (barrier), src);
848 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
849 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
851 NEXT_INSN (new_insn) = barrier;
852 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
854 PREV_INSN (new_insn) = PREV_INSN (barrier);
855 PREV_INSN (barrier) = new_insn;
860 /* Keep only one edge out and set proper flags. */
861 if (!single_succ_p (src))
862 remove_edge (e);
863 gcc_assert (single_succ_p (src));
865 e = single_succ_edge (src);
866 if (fallthru)
867 e->flags = EDGE_FALLTHRU;
868 else
869 e->flags = 0;
871 e->probability = REG_BR_PROB_BASE;
872 e->count = src->count;
874 if (e->dest != target)
875 redirect_edge_succ (e, target);
876 return e;
879 /* Redirect edge representing branch of (un)conditional jump or tablejump,
880 NULL on failure */
881 static edge
882 redirect_branch_edge (edge e, basic_block target)
884 rtx tmp;
885 rtx old_label = BB_HEAD (e->dest);
886 basic_block src = e->src;
887 rtx insn = BB_END (src);
889 /* We can only redirect non-fallthru edges of jump insn. */
890 if (e->flags & EDGE_FALLTHRU)
891 return NULL;
892 else if (!JUMP_P (insn))
893 return NULL;
895 /* Recognize a tablejump and adjust all matching cases. */
896 if (tablejump_p (insn, NULL, &tmp))
898 rtvec vec;
899 int j;
900 rtx new_label = block_label (target);
902 if (target == EXIT_BLOCK_PTR)
903 return NULL;
904 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
905 vec = XVEC (PATTERN (tmp), 0);
906 else
907 vec = XVEC (PATTERN (tmp), 1);
909 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
910 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
912 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
913 --LABEL_NUSES (old_label);
914 ++LABEL_NUSES (new_label);
917 /* Handle casesi dispatch insns. */
918 if ((tmp = single_set (insn)) != NULL
919 && SET_DEST (tmp) == pc_rtx
920 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
921 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
922 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
924 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
925 new_label);
926 --LABEL_NUSES (old_label);
927 ++LABEL_NUSES (new_label);
930 else
932 /* ?? We may play the games with moving the named labels from
933 one basic block to the other in case only one computed_jump is
934 available. */
935 if (computed_jump_p (insn)
936 /* A return instruction can't be redirected. */
937 || returnjump_p (insn))
938 return NULL;
940 /* If the insn doesn't go where we think, we're confused. */
941 gcc_assert (JUMP_LABEL (insn) == old_label);
943 /* If the substitution doesn't succeed, die. This can happen
944 if the back end emitted unrecognizable instructions or if
945 target is exit block on some arches. */
946 if (!redirect_jump (insn, block_label (target), 0))
948 gcc_assert (target == EXIT_BLOCK_PTR);
949 return NULL;
953 if (dump_file)
954 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
955 e->src->index, e->dest->index, target->index);
957 if (e->dest != target)
958 e = redirect_edge_succ_nodup (e, target);
960 return e;
963 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
964 expense of adding new instructions or reordering basic blocks.
966 Function can be also called with edge destination equivalent to the TARGET.
967 Then it should try the simplifications and do nothing if none is possible.
969 Return edge representing the branch if transformation succeeded. Return NULL
970 on failure.
971 We still return NULL in case E already destinated TARGET and we didn't
972 managed to simplify instruction stream. */
974 static edge
975 rtl_redirect_edge_and_branch (edge e, basic_block target)
977 edge ret;
978 basic_block src = e->src;
980 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
981 return NULL;
983 if (e->dest == target)
984 return e;
986 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
988 df_set_bb_dirty (src);
989 return ret;
992 ret = redirect_branch_edge (e, target);
993 if (!ret)
994 return NULL;
996 df_set_bb_dirty (src);
997 return ret;
1000 /* Like force_nonfallthru below, but additionally performs redirection
1001 Used by redirect_edge_and_branch_force. */
1003 static basic_block
1004 force_nonfallthru_and_redirect (edge e, basic_block target)
1006 basic_block jump_block, new_bb = NULL, src = e->src;
1007 rtx note;
1008 edge new_edge;
1009 int abnormal_edge_flags = 0;
1011 /* In the case the last instruction is conditional jump to the next
1012 instruction, first redirect the jump itself and then continue
1013 by creating a basic block afterwards to redirect fallthru edge. */
1014 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1015 && any_condjump_p (BB_END (e->src))
1016 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1018 rtx note;
1019 edge b = unchecked_make_edge (e->src, target, 0);
1020 bool redirected;
1022 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1023 gcc_assert (redirected);
1025 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1026 if (note)
1028 int prob = INTVAL (XEXP (note, 0));
1030 b->probability = prob;
1031 b->count = e->count * prob / REG_BR_PROB_BASE;
1032 e->probability -= e->probability;
1033 e->count -= b->count;
1034 if (e->probability < 0)
1035 e->probability = 0;
1036 if (e->count < 0)
1037 e->count = 0;
1041 if (e->flags & EDGE_ABNORMAL)
1043 /* Irritating special case - fallthru edge to the same block as abnormal
1044 edge.
1045 We can't redirect abnormal edge, but we still can split the fallthru
1046 one and create separate abnormal edge to original destination.
1047 This allows bb-reorder to make such edge non-fallthru. */
1048 gcc_assert (e->dest == target);
1049 abnormal_edge_flags = e->flags & ~(EDGE_FALLTHRU | EDGE_CAN_FALLTHRU);
1050 e->flags &= EDGE_FALLTHRU | EDGE_CAN_FALLTHRU;
1052 else
1054 gcc_assert (e->flags & EDGE_FALLTHRU);
1055 if (e->src == ENTRY_BLOCK_PTR)
1057 /* We can't redirect the entry block. Create an empty block
1058 at the start of the function which we use to add the new
1059 jump. */
1060 edge tmp;
1061 edge_iterator ei;
1062 bool found = false;
1064 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1066 /* Change the existing edge's source to be the new block, and add
1067 a new edge from the entry block to the new block. */
1068 e->src = bb;
1069 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1071 if (tmp == e)
1073 VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
1074 found = true;
1075 break;
1077 else
1078 ei_next (&ei);
1081 gcc_assert (found);
1083 VEC_safe_push (edge, gc, bb->succs, e);
1084 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1088 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags)
1090 /* Create the new structures. */
1092 /* If the old block ended with a tablejump, skip its table
1093 by searching forward from there. Otherwise start searching
1094 forward from the last instruction of the old block. */
1095 if (!tablejump_p (BB_END (e->src), NULL, &note))
1096 note = BB_END (e->src);
1097 note = NEXT_INSN (note);
1099 jump_block = create_basic_block (note, NULL, e->src);
1100 jump_block->count = e->count;
1101 jump_block->frequency = EDGE_FREQUENCY (e);
1102 jump_block->loop_depth = target->loop_depth;
1104 /* Make sure new block ends up in correct hot/cold section. */
1106 BB_COPY_PARTITION (jump_block, e->src);
1107 if (flag_reorder_blocks_and_partition
1108 && targetm.have_named_sections
1109 && JUMP_P (BB_END (jump_block))
1110 && !any_condjump_p (BB_END (jump_block))
1111 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1112 REG_NOTES (BB_END (jump_block)) = gen_rtx_EXPR_LIST (REG_CROSSING_JUMP,
1113 NULL_RTX,
1114 REG_NOTES
1115 (BB_END
1116 (jump_block)));
1118 /* Wire edge in. */
1119 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1120 new_edge->probability = e->probability;
1121 new_edge->count = e->count;
1123 /* Redirect old edge. */
1124 redirect_edge_pred (e, jump_block);
1125 e->probability = REG_BR_PROB_BASE;
1127 new_bb = jump_block;
1129 else
1130 jump_block = e->src;
1132 e->flags &= ~EDGE_FALLTHRU;
1133 if (target == EXIT_BLOCK_PTR)
1135 #ifdef HAVE_return
1136 emit_jump_insn_after_noloc (gen_return (), BB_END (jump_block));
1137 #else
1138 gcc_unreachable ();
1139 #endif
1141 else
1143 rtx label = block_label (target);
1144 emit_jump_insn_after_noloc (gen_jump (label), BB_END (jump_block));
1145 JUMP_LABEL (BB_END (jump_block)) = label;
1146 LABEL_NUSES (label)++;
1149 emit_barrier_after (BB_END (jump_block));
1150 redirect_edge_succ_nodup (e, target);
1152 if (abnormal_edge_flags)
1153 make_edge (src, target, abnormal_edge_flags);
1155 df_mark_solutions_dirty ();
1156 return new_bb;
1159 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1160 (and possibly create new basic block) to make edge non-fallthru.
1161 Return newly created BB or NULL if none. */
1163 basic_block
1164 force_nonfallthru (edge e)
1166 return force_nonfallthru_and_redirect (e, e->dest);
1169 /* Redirect edge even at the expense of creating new jump insn or
1170 basic block. Return new basic block if created, NULL otherwise.
1171 Conversion must be possible. */
1173 static basic_block
1174 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1176 if (redirect_edge_and_branch (e, target)
1177 || e->dest == target)
1178 return NULL;
1180 /* In case the edge redirection failed, try to force it to be non-fallthru
1181 and redirect newly created simplejump. */
1182 df_set_bb_dirty (e->src);
1183 return force_nonfallthru_and_redirect (e, target);
1186 /* The given edge should potentially be a fallthru edge. If that is in
1187 fact true, delete the jump and barriers that are in the way. */
1189 static void
1190 rtl_tidy_fallthru_edge (edge e)
1192 rtx q;
1193 basic_block b = e->src, c = b->next_bb;
1195 /* ??? In a late-running flow pass, other folks may have deleted basic
1196 blocks by nopping out blocks, leaving multiple BARRIERs between here
1197 and the target label. They ought to be chastised and fixed.
1199 We can also wind up with a sequence of undeletable labels between
1200 one block and the next.
1202 So search through a sequence of barriers, labels, and notes for
1203 the head of block C and assert that we really do fall through. */
1205 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1206 if (INSN_P (q))
1207 return;
1209 /* Remove what will soon cease being the jump insn from the source block.
1210 If block B consisted only of this single jump, turn it into a deleted
1211 note. */
1212 q = BB_END (b);
1213 if (JUMP_P (q)
1214 && onlyjump_p (q)
1215 && (any_uncondjump_p (q)
1216 || single_succ_p (b)))
1218 #ifdef HAVE_cc0
1219 /* If this was a conditional jump, we need to also delete
1220 the insn that set cc0. */
1221 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1222 q = PREV_INSN (q);
1223 #endif
1225 q = PREV_INSN (q);
1228 /* Selectively unlink the sequence. */
1229 if (q != PREV_INSN (BB_HEAD (c)))
1230 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1232 e->flags |= EDGE_FALLTHRU;
1235 /* Should move basic block BB after basic block AFTER. NIY. */
1237 static bool
1238 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1239 basic_block after ATTRIBUTE_UNUSED)
1241 return false;
1244 /* Split a (typically critical) edge. Return the new block.
1245 The edge must not be abnormal.
1247 ??? The code generally expects to be called on critical edges.
1248 The case of a block ending in an unconditional jump to a
1249 block with multiple predecessors is not handled optimally. */
1251 static basic_block
1252 rtl_split_edge (edge edge_in)
1254 basic_block bb;
1255 rtx before;
1257 /* Abnormal edges cannot be split. */
1258 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1260 /* We are going to place the new block in front of edge destination.
1261 Avoid existence of fallthru predecessors. */
1262 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1264 edge e;
1265 edge_iterator ei;
1267 FOR_EACH_EDGE (e, ei, edge_in->dest->preds)
1268 if (e->flags & EDGE_FALLTHRU)
1269 break;
1271 if (e)
1272 force_nonfallthru (e);
1275 /* Create the basic block note. */
1276 if (edge_in->dest != EXIT_BLOCK_PTR)
1277 before = BB_HEAD (edge_in->dest);
1278 else
1279 before = NULL_RTX;
1281 /* If this is a fall through edge to the exit block, the blocks might be
1282 not adjacent, and the right place is the after the source. */
1283 if (edge_in->flags & EDGE_FALLTHRU && edge_in->dest == EXIT_BLOCK_PTR)
1285 before = NEXT_INSN (BB_END (edge_in->src));
1286 bb = create_basic_block (before, NULL, edge_in->src);
1287 BB_COPY_PARTITION (bb, edge_in->src);
1289 else
1291 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1292 /* ??? Why not edge_in->dest->prev_bb here? */
1293 BB_COPY_PARTITION (bb, edge_in->dest);
1296 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1298 /* For non-fallthru edges, we must adjust the predecessor's
1299 jump instruction to target our new block. */
1300 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1302 edge redirected = redirect_edge_and_branch (edge_in, bb);
1303 gcc_assert (redirected);
1305 else
1306 redirect_edge_succ (edge_in, bb);
1308 return bb;
1311 /* Queue instructions for insertion on an edge between two basic blocks.
1312 The new instructions and basic blocks (if any) will not appear in the
1313 CFG until commit_edge_insertions is called. */
1315 void
1316 insert_insn_on_edge (rtx pattern, edge e)
1318 /* We cannot insert instructions on an abnormal critical edge.
1319 It will be easier to find the culprit if we die now. */
1320 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1322 if (e->insns.r == NULL_RTX)
1323 start_sequence ();
1324 else
1325 push_to_sequence (e->insns.r);
1327 emit_insn (pattern);
1329 e->insns.r = get_insns ();
1330 end_sequence ();
1333 /* Update the CFG for the instructions queued on edge E. */
1335 static void
1336 commit_one_edge_insertion (edge e)
1338 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1339 basic_block bb = NULL;
1341 /* Pull the insns off the edge now since the edge might go away. */
1342 insns = e->insns.r;
1343 e->insns.r = NULL_RTX;
1345 if (!before && !after)
1347 /* Figure out where to put these things. If the destination has
1348 one predecessor, insert there. Except for the exit block. */
1349 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1351 bb = e->dest;
1353 /* Get the location correct wrt a code label, and "nice" wrt
1354 a basic block note, and before everything else. */
1355 tmp = BB_HEAD (bb);
1356 if (LABEL_P (tmp))
1357 tmp = NEXT_INSN (tmp);
1358 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1359 tmp = NEXT_INSN (tmp);
1360 if (tmp == BB_HEAD (bb))
1361 before = tmp;
1362 else if (tmp)
1363 after = PREV_INSN (tmp);
1364 else
1365 after = get_last_insn ();
1368 /* If the source has one successor and the edge is not abnormal,
1369 insert there. Except for the entry block. */
1370 else if ((e->flags & EDGE_ABNORMAL) == 0
1371 && single_succ_p (e->src)
1372 && e->src != ENTRY_BLOCK_PTR)
1374 bb = e->src;
1376 /* It is possible to have a non-simple jump here. Consider a target
1377 where some forms of unconditional jumps clobber a register. This
1378 happens on the fr30 for example.
1380 We know this block has a single successor, so we can just emit
1381 the queued insns before the jump. */
1382 if (JUMP_P (BB_END (bb)))
1383 before = BB_END (bb);
1384 else
1386 /* We'd better be fallthru, or we've lost track of
1387 what's what. */
1388 gcc_assert (e->flags & EDGE_FALLTHRU);
1390 after = BB_END (bb);
1393 /* Otherwise we must split the edge. */
1394 else
1396 bb = split_edge (e);
1397 after = BB_END (bb);
1399 if (flag_reorder_blocks_and_partition
1400 && targetm.have_named_sections
1401 && e->src != ENTRY_BLOCK_PTR
1402 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1403 && !(e->flags & EDGE_CROSSING))
1405 rtx bb_note, cur_insn;
1407 bb_note = NULL_RTX;
1408 for (cur_insn = BB_HEAD (bb); cur_insn != NEXT_INSN (BB_END (bb));
1409 cur_insn = NEXT_INSN (cur_insn))
1410 if (NOTE_INSN_BASIC_BLOCK_P (cur_insn))
1412 bb_note = cur_insn;
1413 break;
1416 if (JUMP_P (BB_END (bb))
1417 && !any_condjump_p (BB_END (bb))
1418 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1419 REG_NOTES (BB_END (bb)) = gen_rtx_EXPR_LIST
1420 (REG_CROSSING_JUMP, NULL_RTX, REG_NOTES (BB_END (bb)));
1425 /* Now that we've found the spot, do the insertion. */
1427 if (before)
1429 emit_insn_before_noloc (insns, before, bb);
1430 last = prev_nonnote_insn (before);
1432 else
1433 last = emit_insn_after_noloc (insns, after, bb);
1435 if (returnjump_p (last))
1437 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1438 This is not currently a problem because this only happens
1439 for the (single) epilogue, which already has a fallthru edge
1440 to EXIT. */
1442 e = single_succ_edge (bb);
1443 gcc_assert (e->dest == EXIT_BLOCK_PTR
1444 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1446 e->flags &= ~EDGE_FALLTHRU;
1447 emit_barrier_after (last);
1449 if (before)
1450 delete_insn (before);
1452 else
1453 gcc_assert (!JUMP_P (last));
1455 /* Mark the basic block for find_many_sub_basic_blocks. */
1456 if (current_ir_type () != IR_RTL_CFGLAYOUT)
1457 bb->aux = &bb->aux;
1460 /* Update the CFG for all queued instructions. */
1462 void
1463 commit_edge_insertions (void)
1465 basic_block bb;
1466 sbitmap blocks;
1467 bool changed = false;
1469 #ifdef ENABLE_CHECKING
1470 verify_flow_info ();
1471 #endif
1473 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1475 edge e;
1476 edge_iterator ei;
1478 FOR_EACH_EDGE (e, ei, bb->succs)
1479 if (e->insns.r)
1481 changed = true;
1482 commit_one_edge_insertion (e);
1486 if (!changed)
1487 return;
1489 /* In the old rtl CFG API, it was OK to insert control flow on an
1490 edge, apparently? In cfglayout mode, this will *not* work, and
1491 the caller is responsible for making sure that control flow is
1492 valid at all times. */
1493 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1494 return;
1496 blocks = sbitmap_alloc (last_basic_block);
1497 sbitmap_zero (blocks);
1498 FOR_EACH_BB (bb)
1499 if (bb->aux)
1501 SET_BIT (blocks, bb->index);
1502 /* Check for forgotten bb->aux values before commit_edge_insertions
1503 call. */
1504 gcc_assert (bb->aux == &bb->aux);
1505 bb->aux = NULL;
1507 find_many_sub_basic_blocks (blocks);
1508 sbitmap_free (blocks);
1512 /* Print out RTL-specific basic block information (live information
1513 at start and end). */
1515 static void
1516 rtl_dump_bb (basic_block bb, FILE *outf, int indent)
1518 rtx insn;
1519 rtx last;
1520 char *s_indent;
1522 s_indent = (char *) alloca ((size_t) indent + 1);
1523 memset (s_indent, ' ', (size_t) indent);
1524 s_indent[indent] = '\0';
1526 if (df)
1528 df_dump_top (bb, outf);
1529 putc ('\n', outf);
1532 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1533 insn = NEXT_INSN (insn))
1534 print_rtl_single (outf, insn);
1536 if (df)
1538 df_dump_bottom (bb, outf);
1539 putc ('\n', outf);
1544 /* Like print_rtl, but also print out live information for the start of each
1545 basic block. */
1547 void
1548 print_rtl_with_bb (FILE *outf, const_rtx rtx_first)
1550 const_rtx tmp_rtx;
1551 if (rtx_first == 0)
1552 fprintf (outf, "(nil)\n");
1553 else
1555 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1556 int max_uid = get_max_uid ();
1557 basic_block *start = XCNEWVEC (basic_block, max_uid);
1558 basic_block *end = XCNEWVEC (basic_block, max_uid);
1559 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1561 basic_block bb;
1563 if (df)
1564 df_dump_start (outf);
1566 FOR_EACH_BB_REVERSE (bb)
1568 rtx x;
1570 start[INSN_UID (BB_HEAD (bb))] = bb;
1571 end[INSN_UID (BB_END (bb))] = bb;
1572 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1574 enum bb_state state = IN_MULTIPLE_BB;
1576 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1577 state = IN_ONE_BB;
1578 in_bb_p[INSN_UID (x)] = state;
1580 if (x == BB_END (bb))
1581 break;
1585 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1587 int did_output;
1588 if ((bb = start[INSN_UID (tmp_rtx)]) != NULL)
1590 edge e;
1591 edge_iterator ei;
1593 fprintf (outf, ";; Start of basic block (");
1594 FOR_EACH_EDGE (e, ei, bb->preds)
1595 fprintf (outf, " %d", e->src->index);
1596 fprintf (outf, ") -> %d\n", bb->index);
1598 if (df)
1600 df_dump_top (bb, outf);
1601 putc ('\n', outf);
1603 FOR_EACH_EDGE (e, ei, bb->preds)
1605 fputs (";; Pred edge ", outf);
1606 dump_edge_info (outf, e, 0);
1607 fputc ('\n', outf);
1611 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1612 && !NOTE_P (tmp_rtx)
1613 && !BARRIER_P (tmp_rtx))
1614 fprintf (outf, ";; Insn is not within a basic block\n");
1615 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1616 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1618 did_output = print_rtl_single (outf, tmp_rtx);
1620 if ((bb = end[INSN_UID (tmp_rtx)]) != NULL)
1622 edge e;
1623 edge_iterator ei;
1625 fprintf (outf, ";; End of basic block %d -> (", bb->index);
1626 FOR_EACH_EDGE (e, ei, bb->succs)
1627 fprintf (outf, " %d", e->dest->index);
1628 fprintf (outf, ")\n");
1630 if (df)
1632 df_dump_bottom (bb, outf);
1633 putc ('\n', outf);
1635 putc ('\n', outf);
1636 FOR_EACH_EDGE (e, ei, bb->succs)
1638 fputs (";; Succ edge ", outf);
1639 dump_edge_info (outf, e, 1);
1640 fputc ('\n', outf);
1643 if (did_output)
1644 putc ('\n', outf);
1647 free (start);
1648 free (end);
1649 free (in_bb_p);
1652 if (current_function_epilogue_delay_list != 0)
1654 fprintf (outf, "\n;; Insns in epilogue delay list:\n\n");
1655 for (tmp_rtx = current_function_epilogue_delay_list; tmp_rtx != 0;
1656 tmp_rtx = XEXP (tmp_rtx, 1))
1657 print_rtl_single (outf, XEXP (tmp_rtx, 0));
1661 void
1662 update_br_prob_note (basic_block bb)
1664 rtx note;
1665 if (!JUMP_P (BB_END (bb)))
1666 return;
1667 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
1668 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
1669 return;
1670 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
1673 /* Get the last insn associated with block BB (that includes barriers and
1674 tablejumps after BB). */
1676 get_last_bb_insn (basic_block bb)
1678 rtx tmp;
1679 rtx end = BB_END (bb);
1681 /* Include any jump table following the basic block. */
1682 if (tablejump_p (end, NULL, &tmp))
1683 end = tmp;
1685 /* Include any barriers that may follow the basic block. */
1686 tmp = next_nonnote_insn (end);
1687 while (tmp && BARRIER_P (tmp))
1689 end = tmp;
1690 tmp = next_nonnote_insn (end);
1693 return end;
1696 /* Verify the CFG and RTL consistency common for both underlying RTL and
1697 cfglayout RTL.
1699 Currently it does following checks:
1701 - overlapping of basic blocks
1702 - insns with wrong BLOCK_FOR_INSN pointers
1703 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
1704 - tails of basic blocks (ensure that boundary is necessary)
1705 - scans body of the basic block for JUMP_INSN, CODE_LABEL
1706 and NOTE_INSN_BASIC_BLOCK
1707 - verify that no fall_thru edge crosses hot/cold partition boundaries
1708 - verify that there are no pending RTL branch predictions
1710 In future it can be extended check a lot of other stuff as well
1711 (reachability of basic blocks, life information, etc. etc.). */
1713 static int
1714 rtl_verify_flow_info_1 (void)
1716 rtx x;
1717 int err = 0;
1718 basic_block bb;
1720 /* Check the general integrity of the basic blocks. */
1721 FOR_EACH_BB_REVERSE (bb)
1723 rtx insn;
1725 if (!(bb->flags & BB_RTL))
1727 error ("BB_RTL flag not set for block %d", bb->index);
1728 err = 1;
1731 FOR_BB_INSNS (bb, insn)
1732 if (BLOCK_FOR_INSN (insn) != bb)
1734 error ("insn %d basic block pointer is %d, should be %d",
1735 INSN_UID (insn),
1736 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
1737 bb->index);
1738 err = 1;
1741 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
1742 if (!BARRIER_P (insn)
1743 && BLOCK_FOR_INSN (insn) != NULL)
1745 error ("insn %d in header of bb %d has non-NULL basic block",
1746 INSN_UID (insn), bb->index);
1747 err = 1;
1749 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
1750 if (!BARRIER_P (insn)
1751 && BLOCK_FOR_INSN (insn) != NULL)
1753 error ("insn %d in footer of bb %d has non-NULL basic block",
1754 INSN_UID (insn), bb->index);
1755 err = 1;
1759 /* Now check the basic blocks (boundaries etc.) */
1760 FOR_EACH_BB_REVERSE (bb)
1762 int n_fallthru = 0, n_eh = 0, n_call = 0, n_abnormal = 0, n_branch = 0;
1763 edge e, fallthru = NULL;
1764 rtx note;
1765 edge_iterator ei;
1767 if (JUMP_P (BB_END (bb))
1768 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
1769 && EDGE_COUNT (bb->succs) >= 2
1770 && any_condjump_p (BB_END (bb)))
1772 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
1773 && profile_status != PROFILE_ABSENT)
1775 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
1776 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
1777 err = 1;
1780 FOR_EACH_EDGE (e, ei, bb->succs)
1782 if (e->flags & EDGE_FALLTHRU)
1784 n_fallthru++, fallthru = e;
1785 if ((e->flags & EDGE_CROSSING)
1786 || (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
1787 && e->src != ENTRY_BLOCK_PTR
1788 && e->dest != EXIT_BLOCK_PTR))
1790 error ("fallthru edge crosses section boundary (bb %i)",
1791 e->src->index);
1792 err = 1;
1796 if ((e->flags & ~(EDGE_DFS_BACK
1797 | EDGE_CAN_FALLTHRU
1798 | EDGE_IRREDUCIBLE_LOOP
1799 | EDGE_LOOP_EXIT
1800 | EDGE_CROSSING)) == 0)
1801 n_branch++;
1803 if (e->flags & EDGE_ABNORMAL_CALL)
1804 n_call++;
1806 if (e->flags & EDGE_EH)
1807 n_eh++;
1808 else if (e->flags & EDGE_ABNORMAL)
1809 n_abnormal++;
1812 if (n_eh && GET_CODE (PATTERN (BB_END (bb))) != RESX
1813 && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
1815 error ("missing REG_EH_REGION note in the end of bb %i", bb->index);
1816 err = 1;
1818 if (n_branch
1819 && (!JUMP_P (BB_END (bb))
1820 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
1821 || any_condjump_p (BB_END (bb))))))
1823 error ("too many outgoing branch edges from bb %i", bb->index);
1824 err = 1;
1826 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
1828 error ("fallthru edge after unconditional jump %i", bb->index);
1829 err = 1;
1831 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
1833 error ("wrong amount of branch edges after unconditional jump %i", bb->index);
1834 err = 1;
1836 if (n_branch != 1 && any_condjump_p (BB_END (bb))
1837 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
1839 error ("wrong amount of branch edges after conditional jump %i",
1840 bb->index);
1841 err = 1;
1843 if (n_call && !CALL_P (BB_END (bb)))
1845 error ("call edges for non-call insn in bb %i", bb->index);
1846 err = 1;
1848 if (n_abnormal
1849 && (!CALL_P (BB_END (bb)) && n_call != n_abnormal)
1850 && (!JUMP_P (BB_END (bb))
1851 || any_condjump_p (BB_END (bb))
1852 || any_uncondjump_p (BB_END (bb))))
1854 error ("abnormal edges for no purpose in bb %i", bb->index);
1855 err = 1;
1858 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
1859 /* We may have a barrier inside a basic block before dead code
1860 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
1861 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
1863 debug_rtx (x);
1864 if (! BLOCK_FOR_INSN (x))
1865 error
1866 ("insn %d inside basic block %d but block_for_insn is NULL",
1867 INSN_UID (x), bb->index);
1868 else
1869 error
1870 ("insn %d inside basic block %d but block_for_insn is %i",
1871 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
1873 err = 1;
1876 /* OK pointers are correct. Now check the header of basic
1877 block. It ought to contain optional CODE_LABEL followed
1878 by NOTE_BASIC_BLOCK. */
1879 x = BB_HEAD (bb);
1880 if (LABEL_P (x))
1882 if (BB_END (bb) == x)
1884 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1885 bb->index);
1886 err = 1;
1889 x = NEXT_INSN (x);
1892 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
1894 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
1895 bb->index);
1896 err = 1;
1899 if (BB_END (bb) == x)
1900 /* Do checks for empty blocks here. */
1902 else
1903 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
1905 if (NOTE_INSN_BASIC_BLOCK_P (x))
1907 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
1908 INSN_UID (x), bb->index);
1909 err = 1;
1912 if (x == BB_END (bb))
1913 break;
1915 if (control_flow_insn_p (x))
1917 error ("in basic block %d:", bb->index);
1918 fatal_insn ("flow control insn inside a basic block", x);
1923 /* Clean up. */
1924 return err;
1927 /* Verify the CFG and RTL consistency common for both underlying RTL and
1928 cfglayout RTL.
1930 Currently it does following checks:
1931 - all checks of rtl_verify_flow_info_1
1932 - test head/end pointers
1933 - check that all insns are in the basic blocks
1934 (except the switch handling code, barriers and notes)
1935 - check that all returns are followed by barriers
1936 - check that all fallthru edge points to the adjacent blocks. */
1938 static int
1939 rtl_verify_flow_info (void)
1941 basic_block bb;
1942 int err = rtl_verify_flow_info_1 ();
1943 rtx x;
1944 rtx last_head = get_last_insn ();
1945 basic_block *bb_info;
1946 int num_bb_notes;
1947 const rtx rtx_first = get_insns ();
1948 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
1949 const int max_uid = get_max_uid ();
1951 bb_info = XCNEWVEC (basic_block, max_uid);
1953 FOR_EACH_BB_REVERSE (bb)
1955 edge e;
1956 edge_iterator ei;
1957 rtx head = BB_HEAD (bb);
1958 rtx end = BB_END (bb);
1960 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
1962 /* Verify the end of the basic block is in the INSN chain. */
1963 if (x == end)
1964 break;
1966 /* And that the code outside of basic blocks has NULL bb field. */
1967 if (!BARRIER_P (x)
1968 && BLOCK_FOR_INSN (x) != NULL)
1970 error ("insn %d outside of basic blocks has non-NULL bb field",
1971 INSN_UID (x));
1972 err = 1;
1976 if (!x)
1978 error ("end insn %d for block %d not found in the insn stream",
1979 INSN_UID (end), bb->index);
1980 err = 1;
1983 /* Work backwards from the end to the head of the basic block
1984 to verify the head is in the RTL chain. */
1985 for (; x != NULL_RTX; x = PREV_INSN (x))
1987 /* While walking over the insn chain, verify insns appear
1988 in only one basic block. */
1989 if (bb_info[INSN_UID (x)] != NULL)
1991 error ("insn %d is in multiple basic blocks (%d and %d)",
1992 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
1993 err = 1;
1996 bb_info[INSN_UID (x)] = bb;
1998 if (x == head)
1999 break;
2001 if (!x)
2003 error ("head insn %d for block %d not found in the insn stream",
2004 INSN_UID (head), bb->index);
2005 err = 1;
2008 last_head = PREV_INSN (x);
2010 FOR_EACH_EDGE (e, ei, bb->succs)
2011 if (e->flags & EDGE_FALLTHRU)
2012 break;
2013 if (!e)
2015 rtx insn;
2017 /* Ensure existence of barrier in BB with no fallthru edges. */
2018 for (insn = BB_END (bb); !insn || !BARRIER_P (insn);
2019 insn = NEXT_INSN (insn))
2020 if (!insn
2021 || NOTE_INSN_BASIC_BLOCK_P (insn))
2023 error ("missing barrier after block %i", bb->index);
2024 err = 1;
2025 break;
2028 else if (e->src != ENTRY_BLOCK_PTR
2029 && e->dest != EXIT_BLOCK_PTR)
2031 rtx insn;
2033 if (e->src->next_bb != e->dest)
2035 error
2036 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2037 e->src->index, e->dest->index);
2038 err = 1;
2040 else
2041 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2042 insn = NEXT_INSN (insn))
2043 if (BARRIER_P (insn) || INSN_P (insn))
2045 error ("verify_flow_info: Incorrect fallthru %i->%i",
2046 e->src->index, e->dest->index);
2047 fatal_insn ("wrong insn in the fallthru edge", insn);
2048 err = 1;
2053 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2055 /* Check that the code before the first basic block has NULL
2056 bb field. */
2057 if (!BARRIER_P (x)
2058 && BLOCK_FOR_INSN (x) != NULL)
2060 error ("insn %d outside of basic blocks has non-NULL bb field",
2061 INSN_UID (x));
2062 err = 1;
2065 free (bb_info);
2067 num_bb_notes = 0;
2068 last_bb_seen = ENTRY_BLOCK_PTR;
2070 for (x = rtx_first; x; x = NEXT_INSN (x))
2072 if (NOTE_INSN_BASIC_BLOCK_P (x))
2074 bb = NOTE_BASIC_BLOCK (x);
2076 num_bb_notes++;
2077 if (bb != last_bb_seen->next_bb)
2078 internal_error ("basic blocks not laid down consecutively");
2080 curr_bb = last_bb_seen = bb;
2083 if (!curr_bb)
2085 switch (GET_CODE (x))
2087 case BARRIER:
2088 case NOTE:
2089 break;
2091 case CODE_LABEL:
2092 /* An addr_vec is placed outside any basic block. */
2093 if (NEXT_INSN (x)
2094 && JUMP_P (NEXT_INSN (x))
2095 && (GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_DIFF_VEC
2096 || GET_CODE (PATTERN (NEXT_INSN (x))) == ADDR_VEC))
2097 x = NEXT_INSN (x);
2099 /* But in any case, non-deletable labels can appear anywhere. */
2100 break;
2102 default:
2103 fatal_insn ("insn outside basic block", x);
2107 if (JUMP_P (x)
2108 && returnjump_p (x) && ! condjump_p (x)
2109 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2110 fatal_insn ("return not followed by barrier", x);
2111 if (curr_bb && x == BB_END (curr_bb))
2112 curr_bb = NULL;
2115 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2116 internal_error
2117 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2118 num_bb_notes, n_basic_blocks);
2120 return err;
2123 /* Assume that the preceding pass has possibly eliminated jump instructions
2124 or converted the unconditional jumps. Eliminate the edges from CFG.
2125 Return true if any edges are eliminated. */
2127 bool
2128 purge_dead_edges (basic_block bb)
2130 edge e;
2131 rtx insn = BB_END (bb), note;
2132 bool purged = false;
2133 bool found;
2134 edge_iterator ei;
2136 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2137 if (NONJUMP_INSN_P (insn)
2138 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2140 rtx eqnote;
2142 if (! may_trap_p (PATTERN (insn))
2143 || ((eqnote = find_reg_equal_equiv_note (insn))
2144 && ! may_trap_p (XEXP (eqnote, 0))))
2145 remove_note (insn, note);
2148 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2149 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2151 /* There are three types of edges we need to handle correctly here: EH
2152 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2153 latter can appear when nonlocal gotos are used. */
2154 if (e->flags & EDGE_EH)
2156 if (can_throw_internal (BB_END (bb))
2157 /* If this is a call edge, verify that this is a call insn. */
2158 && (! (e->flags & EDGE_ABNORMAL_CALL)
2159 || CALL_P (BB_END (bb))))
2161 ei_next (&ei);
2162 continue;
2165 else if (e->flags & EDGE_ABNORMAL_CALL)
2167 if (CALL_P (BB_END (bb))
2168 && (! (note = find_reg_note (insn, REG_EH_REGION, NULL))
2169 || INTVAL (XEXP (note, 0)) >= 0))
2171 ei_next (&ei);
2172 continue;
2175 else
2177 ei_next (&ei);
2178 continue;
2181 remove_edge (e);
2182 df_set_bb_dirty (bb);
2183 purged = true;
2186 if (JUMP_P (insn))
2188 rtx note;
2189 edge b,f;
2190 edge_iterator ei;
2192 /* We do care only about conditional jumps and simplejumps. */
2193 if (!any_condjump_p (insn)
2194 && !returnjump_p (insn)
2195 && !simplejump_p (insn))
2196 return purged;
2198 /* Branch probability/prediction notes are defined only for
2199 condjumps. We've possibly turned condjump into simplejump. */
2200 if (simplejump_p (insn))
2202 note = find_reg_note (insn, REG_BR_PROB, NULL);
2203 if (note)
2204 remove_note (insn, note);
2205 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2206 remove_note (insn, note);
2209 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2211 /* Avoid abnormal flags to leak from computed jumps turned
2212 into simplejumps. */
2214 e->flags &= ~EDGE_ABNORMAL;
2216 /* See if this edge is one we should keep. */
2217 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2218 /* A conditional jump can fall through into the next
2219 block, so we should keep the edge. */
2221 ei_next (&ei);
2222 continue;
2224 else if (e->dest != EXIT_BLOCK_PTR
2225 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2226 /* If the destination block is the target of the jump,
2227 keep the edge. */
2229 ei_next (&ei);
2230 continue;
2232 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2233 /* If the destination block is the exit block, and this
2234 instruction is a return, then keep the edge. */
2236 ei_next (&ei);
2237 continue;
2239 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2240 /* Keep the edges that correspond to exceptions thrown by
2241 this instruction and rematerialize the EDGE_ABNORMAL
2242 flag we just cleared above. */
2244 e->flags |= EDGE_ABNORMAL;
2245 ei_next (&ei);
2246 continue;
2249 /* We do not need this edge. */
2250 df_set_bb_dirty (bb);
2251 purged = true;
2252 remove_edge (e);
2255 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2256 return purged;
2258 if (dump_file)
2259 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2261 if (!optimize)
2262 return purged;
2264 /* Redistribute probabilities. */
2265 if (single_succ_p (bb))
2267 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2268 single_succ_edge (bb)->count = bb->count;
2270 else
2272 note = find_reg_note (insn, REG_BR_PROB, NULL);
2273 if (!note)
2274 return purged;
2276 b = BRANCH_EDGE (bb);
2277 f = FALLTHRU_EDGE (bb);
2278 b->probability = INTVAL (XEXP (note, 0));
2279 f->probability = REG_BR_PROB_BASE - b->probability;
2280 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2281 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2284 return purged;
2286 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2288 /* First, there should not be any EH or ABCALL edges resulting
2289 from non-local gotos and the like. If there were, we shouldn't
2290 have created the sibcall in the first place. Second, there
2291 should of course never have been a fallthru edge. */
2292 gcc_assert (single_succ_p (bb));
2293 gcc_assert (single_succ_edge (bb)->flags
2294 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2296 return 0;
2299 /* If we don't see a jump insn, we don't know exactly why the block would
2300 have been broken at this point. Look for a simple, non-fallthru edge,
2301 as these are only created by conditional branches. If we find such an
2302 edge we know that there used to be a jump here and can then safely
2303 remove all non-fallthru edges. */
2304 found = false;
2305 FOR_EACH_EDGE (e, ei, bb->succs)
2306 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2308 found = true;
2309 break;
2312 if (!found)
2313 return purged;
2315 /* Remove all but the fake and fallthru edges. The fake edge may be
2316 the only successor for this block in the case of noreturn
2317 calls. */
2318 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2320 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2322 df_set_bb_dirty (bb);
2323 remove_edge (e);
2324 purged = true;
2326 else
2327 ei_next (&ei);
2330 gcc_assert (single_succ_p (bb));
2332 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2333 single_succ_edge (bb)->count = bb->count;
2335 if (dump_file)
2336 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2337 bb->index);
2338 return purged;
2341 /* Search all basic blocks for potentially dead edges and purge them. Return
2342 true if some edge has been eliminated. */
2344 bool
2345 purge_all_dead_edges (void)
2347 int purged = false;
2348 basic_block bb;
2350 FOR_EACH_BB (bb)
2352 bool purged_here = purge_dead_edges (bb);
2354 purged |= purged_here;
2357 return purged;
2360 /* Same as split_block but update cfg_layout structures. */
2362 static basic_block
2363 cfg_layout_split_block (basic_block bb, void *insnp)
2365 rtx insn = (rtx) insnp;
2366 basic_block new_bb = rtl_split_block (bb, insn);
2368 new_bb->il.rtl->footer = bb->il.rtl->footer;
2369 bb->il.rtl->footer = NULL;
2371 return new_bb;
2374 /* Redirect Edge to DEST. */
2375 static edge
2376 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
2378 basic_block src = e->src;
2379 edge ret;
2381 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
2382 return NULL;
2384 if (e->dest == dest)
2385 return e;
2387 if (e->src != ENTRY_BLOCK_PTR
2388 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
2390 df_set_bb_dirty (src);
2391 return ret;
2394 if (e->src == ENTRY_BLOCK_PTR
2395 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
2397 if (dump_file)
2398 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
2399 e->src->index, dest->index);
2401 df_set_bb_dirty (e->src);
2402 redirect_edge_succ (e, dest);
2403 return e;
2406 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
2407 in the case the basic block appears to be in sequence. Avoid this
2408 transformation. */
2410 if (e->flags & EDGE_FALLTHRU)
2412 /* Redirect any branch edges unified with the fallthru one. */
2413 if (JUMP_P (BB_END (src))
2414 && label_is_jump_target_p (BB_HEAD (e->dest),
2415 BB_END (src)))
2417 edge redirected;
2419 if (dump_file)
2420 fprintf (dump_file, "Fallthru edge unified with branch "
2421 "%i->%i redirected to %i\n",
2422 e->src->index, e->dest->index, dest->index);
2423 e->flags &= ~EDGE_FALLTHRU;
2424 redirected = redirect_branch_edge (e, dest);
2425 gcc_assert (redirected);
2426 e->flags |= EDGE_FALLTHRU;
2427 df_set_bb_dirty (e->src);
2428 return e;
2430 /* In case we are redirecting fallthru edge to the branch edge
2431 of conditional jump, remove it. */
2432 if (EDGE_COUNT (src->succs) == 2)
2434 /* Find the edge that is different from E. */
2435 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
2437 if (s->dest == dest
2438 && any_condjump_p (BB_END (src))
2439 && onlyjump_p (BB_END (src)))
2440 delete_insn (BB_END (src));
2442 ret = redirect_edge_succ_nodup (e, dest);
2443 if (dump_file)
2444 fprintf (dump_file, "Fallthru edge %i->%i redirected to %i\n",
2445 e->src->index, e->dest->index, dest->index);
2447 else
2448 ret = redirect_branch_edge (e, dest);
2450 /* We don't want simplejumps in the insn stream during cfglayout. */
2451 gcc_assert (!simplejump_p (BB_END (src)));
2453 df_set_bb_dirty (src);
2454 return ret;
2457 /* Simple wrapper as we always can redirect fallthru edges. */
2458 static basic_block
2459 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
2461 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
2463 gcc_assert (redirected);
2464 return NULL;
2467 /* Same as delete_basic_block but update cfg_layout structures. */
2469 static void
2470 cfg_layout_delete_block (basic_block bb)
2472 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
2474 if (bb->il.rtl->header)
2476 next = BB_HEAD (bb);
2477 if (prev)
2478 NEXT_INSN (prev) = bb->il.rtl->header;
2479 else
2480 set_first_insn (bb->il.rtl->header);
2481 PREV_INSN (bb->il.rtl->header) = prev;
2482 insn = bb->il.rtl->header;
2483 while (NEXT_INSN (insn))
2484 insn = NEXT_INSN (insn);
2485 NEXT_INSN (insn) = next;
2486 PREV_INSN (next) = insn;
2488 next = NEXT_INSN (BB_END (bb));
2489 if (bb->il.rtl->footer)
2491 insn = bb->il.rtl->footer;
2492 while (insn)
2494 if (BARRIER_P (insn))
2496 if (PREV_INSN (insn))
2497 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
2498 else
2499 bb->il.rtl->footer = NEXT_INSN (insn);
2500 if (NEXT_INSN (insn))
2501 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
2503 if (LABEL_P (insn))
2504 break;
2505 insn = NEXT_INSN (insn);
2507 if (bb->il.rtl->footer)
2509 insn = BB_END (bb);
2510 NEXT_INSN (insn) = bb->il.rtl->footer;
2511 PREV_INSN (bb->il.rtl->footer) = insn;
2512 while (NEXT_INSN (insn))
2513 insn = NEXT_INSN (insn);
2514 NEXT_INSN (insn) = next;
2515 if (next)
2516 PREV_INSN (next) = insn;
2517 else
2518 set_last_insn (insn);
2521 if (bb->next_bb != EXIT_BLOCK_PTR)
2522 to = &bb->next_bb->il.rtl->header;
2523 else
2524 to = &cfg_layout_function_footer;
2526 rtl_delete_block (bb);
2528 if (prev)
2529 prev = NEXT_INSN (prev);
2530 else
2531 prev = get_insns ();
2532 if (next)
2533 next = PREV_INSN (next);
2534 else
2535 next = get_last_insn ();
2537 if (next && NEXT_INSN (next) != prev)
2539 remaints = unlink_insn_chain (prev, next);
2540 insn = remaints;
2541 while (NEXT_INSN (insn))
2542 insn = NEXT_INSN (insn);
2543 NEXT_INSN (insn) = *to;
2544 if (*to)
2545 PREV_INSN (*to) = insn;
2546 *to = remaints;
2550 /* Return true when blocks A and B can be safely merged. */
2552 static bool
2553 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
2555 /* If we are partitioning hot/cold basic blocks, we don't want to
2556 mess up unconditional or indirect jumps that cross between hot
2557 and cold sections.
2559 Basic block partitioning may result in some jumps that appear to
2560 be optimizable (or blocks that appear to be mergeable), but which really
2561 must be left untouched (they are required to make it safely across
2562 partition boundaries). See the comments at the top of
2563 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
2565 if (BB_PARTITION (a) != BB_PARTITION (b))
2566 return false;
2568 /* There must be exactly one edge in between the blocks. */
2569 return (single_succ_p (a)
2570 && single_succ (a) == b
2571 && single_pred_p (b) == 1
2572 && a != b
2573 /* Must be simple edge. */
2574 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
2575 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
2576 /* If the jump insn has side effects, we can't kill the edge.
2577 When not optimizing, try_redirect_by_replacing_jump will
2578 not allow us to redirect an edge by replacing a table jump. */
2579 && (!JUMP_P (BB_END (a))
2580 || ((!optimize || reload_completed)
2581 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
2584 /* Merge block A and B. The blocks must be mergeable. */
2586 static void
2587 cfg_layout_merge_blocks (basic_block a, basic_block b)
2589 #ifdef ENABLE_CHECKING
2590 gcc_assert (cfg_layout_can_merge_blocks_p (a, b));
2591 #endif
2593 if (dump_file)
2594 fprintf (dump_file, "merging block %d into block %d\n", b->index, a->index);
2596 /* If there was a CODE_LABEL beginning B, delete it. */
2597 if (LABEL_P (BB_HEAD (b)))
2599 /* This might have been an EH label that no longer has incoming
2600 EH edges. Update data structures to match. */
2601 maybe_remove_eh_handler (BB_HEAD (b));
2603 delete_insn (BB_HEAD (b));
2606 /* We should have fallthru edge in a, or we can do dummy redirection to get
2607 it cleaned up. */
2608 if (JUMP_P (BB_END (a)))
2609 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
2610 gcc_assert (!JUMP_P (BB_END (a)));
2612 /* Possible line number notes should appear in between. */
2613 if (b->il.rtl->header)
2615 rtx first = BB_END (a), last;
2617 last = emit_insn_after_noloc (b->il.rtl->header, BB_END (a), a);
2618 delete_insn_chain (NEXT_INSN (first), last, false);
2619 b->il.rtl->header = NULL;
2622 /* In the case basic blocks are not adjacent, move them around. */
2623 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
2625 rtx first = unlink_insn_chain (BB_HEAD (b), BB_END (b));
2627 emit_insn_after_noloc (first, BB_END (a), a);
2628 /* Skip possible DELETED_LABEL insn. */
2629 if (!NOTE_INSN_BASIC_BLOCK_P (first))
2630 first = NEXT_INSN (first);
2631 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (first));
2632 BB_HEAD (b) = NULL;
2634 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
2635 We need to explicitly call. */
2636 update_bb_for_insn_chain (NEXT_INSN (first),
2637 BB_END (b),
2640 delete_insn (first);
2642 /* Otherwise just re-associate the instructions. */
2643 else
2645 rtx insn;
2647 update_bb_for_insn_chain (BB_HEAD (b), BB_END (b), a);
2649 insn = BB_HEAD (b);
2650 /* Skip possible DELETED_LABEL insn. */
2651 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
2652 insn = NEXT_INSN (insn);
2653 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
2654 BB_HEAD (b) = NULL;
2655 BB_END (a) = BB_END (b);
2656 delete_insn (insn);
2659 df_bb_delete (b->index);
2661 /* Possible tablejumps and barriers should appear after the block. */
2662 if (b->il.rtl->footer)
2664 if (!a->il.rtl->footer)
2665 a->il.rtl->footer = b->il.rtl->footer;
2666 else
2668 rtx last = a->il.rtl->footer;
2670 while (NEXT_INSN (last))
2671 last = NEXT_INSN (last);
2672 NEXT_INSN (last) = b->il.rtl->footer;
2673 PREV_INSN (b->il.rtl->footer) = last;
2675 b->il.rtl->footer = NULL;
2678 if (dump_file)
2679 fprintf (dump_file, "Merged blocks %d and %d.\n",
2680 a->index, b->index);
2683 /* Split edge E. */
2685 static basic_block
2686 cfg_layout_split_edge (edge e)
2688 basic_block new_bb =
2689 create_basic_block (e->src != ENTRY_BLOCK_PTR
2690 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
2691 NULL_RTX, e->src);
2693 if (e->dest == EXIT_BLOCK_PTR)
2694 BB_COPY_PARTITION (new_bb, e->src);
2695 else
2696 BB_COPY_PARTITION (new_bb, e->dest);
2697 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
2698 redirect_edge_and_branch_force (e, new_bb);
2700 return new_bb;
2703 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
2705 static void
2706 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
2710 /* Return 1 if BB ends with a call, possibly followed by some
2711 instructions that must stay with the call, 0 otherwise. */
2713 static bool
2714 rtl_block_ends_with_call_p (basic_block bb)
2716 rtx insn = BB_END (bb);
2718 while (!CALL_P (insn)
2719 && insn != BB_HEAD (bb)
2720 && (keep_with_call_p (insn)
2721 || NOTE_P (insn)))
2722 insn = PREV_INSN (insn);
2723 return (CALL_P (insn));
2726 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
2728 static bool
2729 rtl_block_ends_with_condjump_p (const_basic_block bb)
2731 return any_condjump_p (BB_END (bb));
2734 /* Return true if we need to add fake edge to exit.
2735 Helper function for rtl_flow_call_edges_add. */
2737 static bool
2738 need_fake_edge_p (const_rtx insn)
2740 if (!INSN_P (insn))
2741 return false;
2743 if ((CALL_P (insn)
2744 && !SIBLING_CALL_P (insn)
2745 && !find_reg_note (insn, REG_NORETURN, NULL)
2746 && !CONST_OR_PURE_CALL_P (insn)))
2747 return true;
2749 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2750 && MEM_VOLATILE_P (PATTERN (insn)))
2751 || (GET_CODE (PATTERN (insn)) == PARALLEL
2752 && asm_noperands (insn) != -1
2753 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
2754 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
2757 /* Add fake edges to the function exit for any non constant and non noreturn
2758 calls, volatile inline assembly in the bitmap of blocks specified by
2759 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
2760 that were split.
2762 The goal is to expose cases in which entering a basic block does not imply
2763 that all subsequent instructions must be executed. */
2765 static int
2766 rtl_flow_call_edges_add (sbitmap blocks)
2768 int i;
2769 int blocks_split = 0;
2770 int last_bb = last_basic_block;
2771 bool check_last_block = false;
2773 if (n_basic_blocks == NUM_FIXED_BLOCKS)
2774 return 0;
2776 if (! blocks)
2777 check_last_block = true;
2778 else
2779 check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
2781 /* In the last basic block, before epilogue generation, there will be
2782 a fallthru edge to EXIT. Special care is required if the last insn
2783 of the last basic block is a call because make_edge folds duplicate
2784 edges, which would result in the fallthru edge also being marked
2785 fake, which would result in the fallthru edge being removed by
2786 remove_fake_edges, which would result in an invalid CFG.
2788 Moreover, we can't elide the outgoing fake edge, since the block
2789 profiler needs to take this into account in order to solve the minimal
2790 spanning tree in the case that the call doesn't return.
2792 Handle this by adding a dummy instruction in a new last basic block. */
2793 if (check_last_block)
2795 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
2796 rtx insn = BB_END (bb);
2798 /* Back up past insns that must be kept in the same block as a call. */
2799 while (insn != BB_HEAD (bb)
2800 && keep_with_call_p (insn))
2801 insn = PREV_INSN (insn);
2803 if (need_fake_edge_p (insn))
2805 edge e;
2807 e = find_edge (bb, EXIT_BLOCK_PTR);
2808 if (e)
2810 insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
2811 commit_edge_insertions ();
2816 /* Now add fake edges to the function exit for any non constant
2817 calls since there is no way that we can determine if they will
2818 return or not... */
2820 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
2822 basic_block bb = BASIC_BLOCK (i);
2823 rtx insn;
2824 rtx prev_insn;
2826 if (!bb)
2827 continue;
2829 if (blocks && !TEST_BIT (blocks, i))
2830 continue;
2832 for (insn = BB_END (bb); ; insn = prev_insn)
2834 prev_insn = PREV_INSN (insn);
2835 if (need_fake_edge_p (insn))
2837 edge e;
2838 rtx split_at_insn = insn;
2840 /* Don't split the block between a call and an insn that should
2841 remain in the same block as the call. */
2842 if (CALL_P (insn))
2843 while (split_at_insn != BB_END (bb)
2844 && keep_with_call_p (NEXT_INSN (split_at_insn)))
2845 split_at_insn = NEXT_INSN (split_at_insn);
2847 /* The handling above of the final block before the epilogue
2848 should be enough to verify that there is no edge to the exit
2849 block in CFG already. Calling make_edge in such case would
2850 cause us to mark that edge as fake and remove it later. */
2852 #ifdef ENABLE_CHECKING
2853 if (split_at_insn == BB_END (bb))
2855 e = find_edge (bb, EXIT_BLOCK_PTR);
2856 gcc_assert (e == NULL);
2858 #endif
2860 /* Note that the following may create a new basic block
2861 and renumber the existing basic blocks. */
2862 if (split_at_insn != BB_END (bb))
2864 e = split_block (bb, split_at_insn);
2865 if (e)
2866 blocks_split++;
2869 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
2872 if (insn == BB_HEAD (bb))
2873 break;
2877 if (blocks_split)
2878 verify_flow_info ();
2880 return blocks_split;
2883 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
2884 the conditional branch target, SECOND_HEAD should be the fall-thru
2885 there is no need to handle this here the loop versioning code handles
2886 this. the reason for SECON_HEAD is that it is needed for condition
2887 in trees, and this should be of the same type since it is a hook. */
2888 static void
2889 rtl_lv_add_condition_to_bb (basic_block first_head ,
2890 basic_block second_head ATTRIBUTE_UNUSED,
2891 basic_block cond_bb, void *comp_rtx)
2893 rtx label, seq, jump;
2894 rtx op0 = XEXP ((rtx)comp_rtx, 0);
2895 rtx op1 = XEXP ((rtx)comp_rtx, 1);
2896 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
2897 enum machine_mode mode;
2900 label = block_label (first_head);
2901 mode = GET_MODE (op0);
2902 if (mode == VOIDmode)
2903 mode = GET_MODE (op1);
2905 start_sequence ();
2906 op0 = force_operand (op0, NULL_RTX);
2907 op1 = force_operand (op1, NULL_RTX);
2908 do_compare_rtx_and_jump (op0, op1, comp, 0,
2909 mode, NULL_RTX, NULL_RTX, label);
2910 jump = get_last_insn ();
2911 JUMP_LABEL (jump) = label;
2912 LABEL_NUSES (label)++;
2913 seq = get_insns ();
2914 end_sequence ();
2916 /* Add the new cond , in the new head. */
2917 emit_insn_after(seq, BB_END(cond_bb));
2921 /* Given a block B with unconditional branch at its end, get the
2922 store the return the branch edge and the fall-thru edge in
2923 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
2924 static void
2925 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
2926 edge *fallthru_edge)
2928 edge e = EDGE_SUCC (b, 0);
2930 if (e->flags & EDGE_FALLTHRU)
2932 *fallthru_edge = e;
2933 *branch_edge = EDGE_SUCC (b, 1);
2935 else
2937 *branch_edge = e;
2938 *fallthru_edge = EDGE_SUCC (b, 1);
2942 void
2943 init_rtl_bb_info (basic_block bb)
2945 gcc_assert (!bb->il.rtl);
2946 bb->il.rtl = GGC_CNEW (struct rtl_bb_info);
2950 /* Add EXPR to the end of basic block BB. */
2953 insert_insn_end_bb_new (rtx pat, basic_block bb)
2955 rtx insn = BB_END (bb);
2956 rtx new_insn;
2957 rtx pat_end = pat;
2959 while (NEXT_INSN (pat_end) != NULL_RTX)
2960 pat_end = NEXT_INSN (pat_end);
2962 /* If the last insn is a jump, insert EXPR in front [taking care to
2963 handle cc0, etc. properly]. Similarly we need to care trapping
2964 instructions in presence of non-call exceptions. */
2966 if (JUMP_P (insn)
2967 || (NONJUMP_INSN_P (insn)
2968 && (!single_succ_p (bb)
2969 || single_succ_edge (bb)->flags & EDGE_ABNORMAL)))
2971 #ifdef HAVE_cc0
2972 rtx note;
2973 #endif
2974 /* If this is a jump table, then we can't insert stuff here. Since
2975 we know the previous real insn must be the tablejump, we insert
2976 the new instruction just before the tablejump. */
2977 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
2978 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
2979 insn = prev_real_insn (insn);
2981 #ifdef HAVE_cc0
2982 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
2983 if cc0 isn't set. */
2984 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
2985 if (note)
2986 insn = XEXP (note, 0);
2987 else
2989 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
2990 if (maybe_cc0_setter
2991 && INSN_P (maybe_cc0_setter)
2992 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
2993 insn = maybe_cc0_setter;
2995 #endif
2996 /* FIXME: What if something in cc0/jump uses value set in new
2997 insn? */
2998 new_insn = emit_insn_before_noloc (pat, insn, bb);
3001 /* Likewise if the last insn is a call, as will happen in the presence
3002 of exception handling. */
3003 else if (CALL_P (insn)
3004 && (!single_succ_p (bb)
3005 || single_succ_edge (bb)->flags & EDGE_ABNORMAL))
3007 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
3008 we search backward and place the instructions before the first
3009 parameter is loaded. Do this for everyone for consistency and a
3010 presumption that we'll get better code elsewhere as well. */
3012 /* Since different machines initialize their parameter registers
3013 in different orders, assume nothing. Collect the set of all
3014 parameter registers. */
3015 insn = find_first_parameter_load (insn, BB_HEAD (bb));
3017 /* If we found all the parameter loads, then we want to insert
3018 before the first parameter load.
3020 If we did not find all the parameter loads, then we might have
3021 stopped on the head of the block, which could be a CODE_LABEL.
3022 If we inserted before the CODE_LABEL, then we would be putting
3023 the insn in the wrong basic block. In that case, put the insn
3024 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
3025 while (LABEL_P (insn)
3026 || NOTE_INSN_BASIC_BLOCK_P (insn))
3027 insn = NEXT_INSN (insn);
3029 new_insn = emit_insn_before_noloc (pat, insn, bb);
3031 else
3032 new_insn = emit_insn_after_noloc (pat, insn, bb);
3034 return new_insn;
3037 /* Returns true if it is possible to remove edge E by redirecting
3038 it to the destination of the other edge from E->src. */
3040 static bool
3041 rtl_can_remove_branch_p (const_edge e)
3043 const_basic_block src = e->src;
3044 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
3045 const_rtx insn = BB_END (src), set;
3047 /* The conditions are taken from try_redirect_by_replacing_jump. */
3048 if (target == EXIT_BLOCK_PTR)
3049 return false;
3051 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3052 return false;
3054 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
3055 || BB_PARTITION (src) != BB_PARTITION (target))
3056 return false;
3058 if (!onlyjump_p (insn)
3059 || tablejump_p (insn, NULL, NULL))
3060 return false;
3062 set = single_set (insn);
3063 if (!set || side_effects_p (set))
3064 return false;
3066 return true;
3069 /* Implementation of CFG manipulation for linearized RTL. */
3070 struct cfg_hooks rtl_cfg_hooks = {
3071 "rtl",
3072 rtl_verify_flow_info,
3073 rtl_dump_bb,
3074 rtl_create_basic_block,
3075 rtl_redirect_edge_and_branch,
3076 rtl_redirect_edge_and_branch_force,
3077 rtl_can_remove_branch_p,
3078 rtl_delete_block,
3079 rtl_split_block,
3080 rtl_move_block_after,
3081 rtl_can_merge_blocks, /* can_merge_blocks_p */
3082 rtl_merge_blocks,
3083 rtl_predict_edge,
3084 rtl_predicted_by_p,
3085 NULL, /* can_duplicate_block_p */
3086 NULL, /* duplicate_block */
3087 rtl_split_edge,
3088 rtl_make_forwarder_block,
3089 rtl_tidy_fallthru_edge,
3090 rtl_block_ends_with_call_p,
3091 rtl_block_ends_with_condjump_p,
3092 rtl_flow_call_edges_add,
3093 NULL, /* execute_on_growing_pred */
3094 NULL, /* execute_on_shrinking_pred */
3095 NULL, /* duplicate loop for trees */
3096 NULL, /* lv_add_condition_to_bb */
3097 NULL, /* lv_adjust_loop_header_phi*/
3098 NULL, /* extract_cond_bb_edges */
3099 NULL /* flush_pending_stmts */
3102 /* Implementation of CFG manipulation for cfg layout RTL, where
3103 basic block connected via fallthru edges does not have to be adjacent.
3104 This representation will hopefully become the default one in future
3105 version of the compiler. */
3107 /* We do not want to declare these functions in a header file, since they
3108 should only be used through the cfghooks interface, and we do not want to
3109 move them here since it would require also moving quite a lot of related
3110 code. They are in cfglayout.c. */
3111 extern bool cfg_layout_can_duplicate_bb_p (const_basic_block);
3112 extern basic_block cfg_layout_duplicate_bb (basic_block);
3114 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
3115 "cfglayout mode",
3116 rtl_verify_flow_info_1,
3117 rtl_dump_bb,
3118 cfg_layout_create_basic_block,
3119 cfg_layout_redirect_edge_and_branch,
3120 cfg_layout_redirect_edge_and_branch_force,
3121 rtl_can_remove_branch_p,
3122 cfg_layout_delete_block,
3123 cfg_layout_split_block,
3124 rtl_move_block_after,
3125 cfg_layout_can_merge_blocks_p,
3126 cfg_layout_merge_blocks,
3127 rtl_predict_edge,
3128 rtl_predicted_by_p,
3129 cfg_layout_can_duplicate_bb_p,
3130 cfg_layout_duplicate_bb,
3131 cfg_layout_split_edge,
3132 rtl_make_forwarder_block,
3133 NULL,
3134 rtl_block_ends_with_call_p,
3135 rtl_block_ends_with_condjump_p,
3136 rtl_flow_call_edges_add,
3137 NULL, /* execute_on_growing_pred */
3138 NULL, /* execute_on_shrinking_pred */
3139 duplicate_loop_to_header_edge, /* duplicate loop for trees */
3140 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
3141 NULL, /* lv_adjust_loop_header_phi*/
3142 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
3143 NULL /* flush_pending_stmts */