gcc/ChangeLog
[official-gcc.git] / gcc / cfgrtl.c
blob0ea297e4c60b17a8dd9695b40429f0f2bee6a572
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "tree.h"
45 #include "hard-reg-set.h"
46 #include "basic-block.h"
47 #include "regs.h"
48 #include "flags.h"
49 #include "function.h"
50 #include "except.h"
51 #include "rtl-error.h"
52 #include "tm_p.h"
53 #include "obstack.h"
54 #include "insn-attr.h"
55 #include "insn-config.h"
56 #include "expr.h"
57 #include "target.h"
58 #include "common/common-target.h"
59 #include "cfgloop.h"
60 #include "ggc.h"
61 #include "tree-pass.h"
62 #include "df.h"
64 /* Holds the interesting leading and trailing notes for the function.
65 Only applicable if the CFG is in cfglayout mode. */
66 static GTY(()) rtx cfg_layout_function_footer;
67 static GTY(()) rtx cfg_layout_function_header;
69 static rtx skip_insns_after_block (basic_block);
70 static void record_effective_endpoints (void);
71 static rtx label_for_bb (basic_block);
72 static void fixup_reorder_chain (void);
74 void verify_insn_chain (void);
75 static void fixup_fallthru_exit_predecessor (void);
76 static int can_delete_note_p (const_rtx);
77 static int can_delete_label_p (const_rtx);
78 static basic_block rtl_split_edge (edge);
79 static bool rtl_move_block_after (basic_block, basic_block);
80 static int rtl_verify_flow_info (void);
81 static basic_block cfg_layout_split_block (basic_block, void *);
82 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
83 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
84 static void cfg_layout_delete_block (basic_block);
85 static void rtl_delete_block (basic_block);
86 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
87 static edge rtl_redirect_edge_and_branch (edge, basic_block);
88 static basic_block rtl_split_block (basic_block, void *);
89 static void rtl_dump_bb (FILE *, basic_block, int, int);
90 static int rtl_verify_flow_info_1 (void);
91 static void rtl_make_forwarder_block (edge);
93 /* Return true if NOTE is not one of the ones that must be kept paired,
94 so that we may simply delete it. */
96 static int
97 can_delete_note_p (const_rtx note)
99 switch (NOTE_KIND (note))
101 case NOTE_INSN_DELETED:
102 case NOTE_INSN_BASIC_BLOCK:
103 case NOTE_INSN_EPILOGUE_BEG:
104 return true;
106 default:
107 return false;
111 /* True if a given label can be deleted. */
113 static int
114 can_delete_label_p (const_rtx label)
116 return (!LABEL_PRESERVE_P (label)
117 /* User declared labels must be preserved. */
118 && LABEL_NAME (label) == 0
119 && !in_expr_list_p (forced_labels, label));
122 /* Delete INSN by patching it out. */
124 void
125 delete_insn (rtx insn)
127 rtx note;
128 bool really_delete = true;
130 if (LABEL_P (insn))
132 /* Some labels can't be directly removed from the INSN chain, as they
133 might be references via variables, constant pool etc.
134 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
135 if (! can_delete_label_p (insn))
137 const char *name = LABEL_NAME (insn);
138 basic_block bb = BLOCK_FOR_INSN (insn);
139 rtx bb_note = NEXT_INSN (insn);
141 really_delete = false;
142 PUT_CODE (insn, NOTE);
143 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
144 NOTE_DELETED_LABEL_NAME (insn) = name;
146 /* If the note following the label starts a basic block, and the
147 label is a member of the same basic block, interchange the two. */
148 if (bb_note != NULL_RTX
149 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
150 && bb != NULL
151 && bb == BLOCK_FOR_INSN (bb_note))
153 reorder_insns_nobb (insn, insn, bb_note);
154 BB_HEAD (bb) = bb_note;
155 if (BB_END (bb) == bb_note)
156 BB_END (bb) = insn;
160 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
163 if (really_delete)
165 /* If this insn has already been deleted, something is very wrong. */
166 gcc_assert (!INSN_DELETED_P (insn));
167 if (INSN_P (insn))
168 df_insn_delete (insn);
169 remove_insn (insn);
170 INSN_DELETED_P (insn) = 1;
173 /* If deleting a jump, decrement the use count of the label. Deleting
174 the label itself should happen in the normal course of block merging. */
175 if (JUMP_P (insn))
177 if (JUMP_LABEL (insn)
178 && LABEL_P (JUMP_LABEL (insn)))
179 LABEL_NUSES (JUMP_LABEL (insn))--;
181 /* If there are more targets, remove them too. */
182 while ((note
183 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
184 && LABEL_P (XEXP (note, 0)))
186 LABEL_NUSES (XEXP (note, 0))--;
187 remove_note (insn, note);
191 /* Also if deleting any insn that references a label as an operand. */
192 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
193 && LABEL_P (XEXP (note, 0)))
195 LABEL_NUSES (XEXP (note, 0))--;
196 remove_note (insn, note);
199 if (JUMP_TABLE_DATA_P (insn))
201 rtx pat = PATTERN (insn);
202 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
203 int len = XVECLEN (pat, diff_vec_p);
204 int i;
206 for (i = 0; i < len; i++)
208 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
210 /* When deleting code in bulk (e.g. removing many unreachable
211 blocks) we can delete a label that's a target of the vector
212 before deleting the vector itself. */
213 if (!NOTE_P (label))
214 LABEL_NUSES (label)--;
219 /* Like delete_insn but also purge dead edges from BB. */
221 void
222 delete_insn_and_edges (rtx insn)
224 bool purge = false;
226 if (INSN_P (insn)
227 && BLOCK_FOR_INSN (insn)
228 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
229 purge = true;
230 delete_insn (insn);
231 if (purge)
232 purge_dead_edges (BLOCK_FOR_INSN (insn));
235 /* Unlink a chain of insns between START and FINISH, leaving notes
236 that must be paired. If CLEAR_BB is true, we set bb field for
237 insns that cannot be removed to NULL. */
239 void
240 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
242 rtx prev, current;
244 /* Unchain the insns one by one. It would be quicker to delete all of these
245 with a single unchaining, rather than one at a time, but we need to keep
246 the NOTE's. */
247 current = finish;
248 while (1)
250 prev = PREV_INSN (current);
251 if (NOTE_P (current) && !can_delete_note_p (current))
253 else
254 delete_insn (current);
256 if (clear_bb && !INSN_DELETED_P (current))
257 set_block_for_insn (current, NULL);
259 if (current == start)
260 break;
261 current = prev;
265 /* Create a new basic block consisting of the instructions between HEAD and END
266 inclusive. This function is designed to allow fast BB construction - reuses
267 the note and basic block struct in BB_NOTE, if any and do not grow
268 BASIC_BLOCK chain and should be used directly only by CFG construction code.
269 END can be NULL in to create new empty basic block before HEAD. Both END
270 and HEAD can be NULL to create basic block at the end of INSN chain.
271 AFTER is the basic block we should be put after. */
273 basic_block
274 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
276 basic_block bb;
278 if (bb_note
279 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
280 && bb->aux == NULL)
282 /* If we found an existing note, thread it back onto the chain. */
284 rtx after;
286 if (LABEL_P (head))
287 after = head;
288 else
290 after = PREV_INSN (head);
291 head = bb_note;
294 if (after != bb_note && NEXT_INSN (after) != bb_note)
295 reorder_insns_nobb (bb_note, bb_note, after);
297 else
299 /* Otherwise we must create a note and a basic block structure. */
301 bb = alloc_block ();
303 init_rtl_bb_info (bb);
304 if (!head && !end)
305 head = end = bb_note
306 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
307 else if (LABEL_P (head) && end)
309 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
310 if (head == end)
311 end = bb_note;
313 else
315 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
316 head = bb_note;
317 if (!end)
318 end = head;
321 NOTE_BASIC_BLOCK (bb_note) = bb;
324 /* Always include the bb note in the block. */
325 if (NEXT_INSN (end) == bb_note)
326 end = bb_note;
328 BB_HEAD (bb) = head;
329 BB_END (bb) = end;
330 bb->index = last_basic_block++;
331 bb->flags = BB_NEW | BB_RTL;
332 link_block (bb, after);
333 SET_BASIC_BLOCK (bb->index, bb);
334 df_bb_refs_record (bb->index, false);
335 update_bb_for_insn (bb);
336 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
338 /* Tag the block so that we know it has been used when considering
339 other basic block notes. */
340 bb->aux = bb;
342 return bb;
345 /* Create new basic block consisting of instructions in between HEAD and END
346 and place it to the BB chain after block AFTER. END can be NULL to
347 create a new empty basic block before HEAD. Both END and HEAD can be
348 NULL to create basic block at the end of INSN chain. */
350 static basic_block
351 rtl_create_basic_block (void *headp, void *endp, basic_block after)
353 rtx head = (rtx) headp, end = (rtx) endp;
354 basic_block bb;
356 /* Grow the basic block array if needed. */
357 if ((size_t) last_basic_block >= basic_block_info->length ())
359 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
360 vec_safe_grow_cleared (basic_block_info, new_size);
363 n_basic_blocks++;
365 bb = create_basic_block_structure (head, end, NULL, after);
366 bb->aux = NULL;
367 return bb;
370 static basic_block
371 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
373 basic_block newbb = rtl_create_basic_block (head, end, after);
375 return newbb;
378 /* Delete the insns in a (non-live) block. We physically delete every
379 non-deleted-note insn, and update the flow graph appropriately.
381 Return nonzero if we deleted an exception handler. */
383 /* ??? Preserving all such notes strikes me as wrong. It would be nice
384 to post-process the stream to remove empty blocks, loops, ranges, etc. */
386 static void
387 rtl_delete_block (basic_block b)
389 rtx insn, end;
391 /* If the head of this block is a CODE_LABEL, then it might be the
392 label for an exception handler which can't be reached. We need
393 to remove the label from the exception_handler_label list. */
394 insn = BB_HEAD (b);
396 end = get_last_bb_insn (b);
398 /* Selectively delete the entire chain. */
399 BB_HEAD (b) = NULL;
400 delete_insn_chain (insn, end, true);
403 if (dump_file)
404 fprintf (dump_file, "deleting block %d\n", b->index);
405 df_bb_delete (b->index);
408 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
410 void
411 compute_bb_for_insn (void)
413 basic_block bb;
415 FOR_EACH_BB (bb)
417 rtx end = BB_END (bb);
418 rtx insn;
420 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
422 BLOCK_FOR_INSN (insn) = bb;
423 if (insn == end)
424 break;
429 /* Release the basic_block_for_insn array. */
431 unsigned int
432 free_bb_for_insn (void)
434 rtx insn;
435 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
436 if (!BARRIER_P (insn))
437 BLOCK_FOR_INSN (insn) = NULL;
438 return 0;
441 static unsigned int
442 rest_of_pass_free_cfg (void)
444 #ifdef DELAY_SLOTS
445 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
446 valid at that point so it would be too late to call df_analyze. */
447 if (optimize > 0 && flag_delayed_branch)
449 df_note_add_problem ();
450 df_analyze ();
452 #endif
454 free_bb_for_insn ();
455 return 0;
458 struct rtl_opt_pass pass_free_cfg =
461 RTL_PASS,
462 "*free_cfg", /* name */
463 OPTGROUP_NONE, /* optinfo_flags */
464 NULL, /* gate */
465 rest_of_pass_free_cfg, /* execute */
466 NULL, /* sub */
467 NULL, /* next */
468 0, /* static_pass_number */
469 TV_NONE, /* tv_id */
470 0, /* properties_required */
471 0, /* properties_provided */
472 PROP_cfg, /* properties_destroyed */
473 0, /* todo_flags_start */
474 0, /* todo_flags_finish */
478 /* Return RTX to emit after when we want to emit code on the entry of function. */
480 entry_of_function (void)
482 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
483 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
486 /* Emit INSN at the entry point of the function, ensuring that it is only
487 executed once per function. */
488 void
489 emit_insn_at_entry (rtx insn)
491 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
492 edge e = ei_safe_edge (ei);
493 gcc_assert (e->flags & EDGE_FALLTHRU);
495 insert_insn_on_edge (insn, e);
496 commit_edge_insertions ();
499 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
500 (or BARRIER if found) and notify df of the bb change.
501 The insn chain range is inclusive
502 (i.e. both BEGIN and END will be updated. */
504 static void
505 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
507 rtx insn;
509 end = NEXT_INSN (end);
510 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
511 if (!BARRIER_P (insn))
512 df_insn_change_bb (insn, bb);
515 /* Update BLOCK_FOR_INSN of insns in BB to BB,
516 and notify df of the change. */
518 void
519 update_bb_for_insn (basic_block bb)
521 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
525 /* Like active_insn_p, except keep the return value clobber around
526 even after reload. */
528 static bool
529 flow_active_insn_p (const_rtx insn)
531 if (active_insn_p (insn))
532 return true;
534 /* A clobber of the function return value exists for buggy
535 programs that fail to return a value. Its effect is to
536 keep the return value from being live across the entire
537 function. If we allow it to be skipped, we introduce the
538 possibility for register lifetime confusion. */
539 if (GET_CODE (PATTERN (insn)) == CLOBBER
540 && REG_P (XEXP (PATTERN (insn), 0))
541 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
542 return true;
544 return false;
547 /* Return true if the block has no effect and only forwards control flow to
548 its single destination. */
550 bool
551 contains_no_active_insn_p (const_basic_block bb)
553 rtx insn;
555 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
556 || !single_succ_p (bb))
557 return false;
559 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
560 if (INSN_P (insn) && flow_active_insn_p (insn))
561 return false;
563 return (!INSN_P (insn)
564 || (JUMP_P (insn) && simplejump_p (insn))
565 || !flow_active_insn_p (insn));
568 /* Likewise, but protect loop latches, headers and preheaders. */
569 /* FIXME: Make this a cfg hook. */
571 bool
572 forwarder_block_p (const_basic_block bb)
574 if (!contains_no_active_insn_p (bb))
575 return false;
577 /* Protect loop latches, headers and preheaders. */
578 if (current_loops)
580 basic_block dest;
581 if (bb->loop_father->header == bb)
582 return false;
583 dest = EDGE_SUCC (bb, 0)->dest;
584 if (dest->loop_father->header == dest)
585 return false;
588 return true;
591 /* Return nonzero if we can reach target from src by falling through. */
592 /* FIXME: Make this a cfg hook. */
594 bool
595 can_fallthru (basic_block src, basic_block target)
597 rtx insn = BB_END (src);
598 rtx insn2;
599 edge e;
600 edge_iterator ei;
602 if (target == EXIT_BLOCK_PTR)
603 return true;
604 if (src->next_bb != target)
605 return 0;
606 FOR_EACH_EDGE (e, ei, src->succs)
607 if (e->dest == EXIT_BLOCK_PTR
608 && e->flags & EDGE_FALLTHRU)
609 return 0;
611 insn2 = BB_HEAD (target);
612 if (insn2 && !active_insn_p (insn2))
613 insn2 = next_active_insn (insn2);
615 /* ??? Later we may add code to move jump tables offline. */
616 return next_active_insn (insn) == insn2;
619 /* Return nonzero if we could reach target from src by falling through,
620 if the target was made adjacent. If we already have a fall-through
621 edge to the exit block, we can't do that. */
622 static bool
623 could_fall_through (basic_block src, basic_block target)
625 edge e;
626 edge_iterator ei;
628 if (target == EXIT_BLOCK_PTR)
629 return true;
630 FOR_EACH_EDGE (e, ei, src->succs)
631 if (e->dest == EXIT_BLOCK_PTR
632 && e->flags & EDGE_FALLTHRU)
633 return 0;
634 return true;
637 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
639 bb_note (basic_block bb)
641 rtx note;
643 note = BB_HEAD (bb);
644 if (LABEL_P (note))
645 note = NEXT_INSN (note);
647 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
648 return note;
651 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
652 note associated with the BLOCK. */
654 static rtx
655 first_insn_after_basic_block_note (basic_block block)
657 rtx insn;
659 /* Get the first instruction in the block. */
660 insn = BB_HEAD (block);
662 if (insn == NULL_RTX)
663 return NULL_RTX;
664 if (LABEL_P (insn))
665 insn = NEXT_INSN (insn);
666 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
668 return NEXT_INSN (insn);
671 /* Creates a new basic block just after basic block B by splitting
672 everything after specified instruction I. */
674 static basic_block
675 rtl_split_block (basic_block bb, void *insnp)
677 basic_block new_bb;
678 rtx insn = (rtx) insnp;
679 edge e;
680 edge_iterator ei;
682 if (!insn)
684 insn = first_insn_after_basic_block_note (bb);
686 if (insn)
688 rtx next = insn;
690 insn = PREV_INSN (insn);
692 /* If the block contains only debug insns, insn would have
693 been NULL in a non-debug compilation, and then we'd end
694 up emitting a DELETED note. For -fcompare-debug
695 stability, emit the note too. */
696 if (insn != BB_END (bb)
697 && DEBUG_INSN_P (next)
698 && DEBUG_INSN_P (BB_END (bb)))
700 while (next != BB_END (bb) && DEBUG_INSN_P (next))
701 next = NEXT_INSN (next);
703 if (next == BB_END (bb))
704 emit_note_after (NOTE_INSN_DELETED, next);
707 else
708 insn = get_last_insn ();
711 /* We probably should check type of the insn so that we do not create
712 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
713 bother. */
714 if (insn == BB_END (bb))
715 emit_note_after (NOTE_INSN_DELETED, insn);
717 /* Create the new basic block. */
718 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
719 BB_COPY_PARTITION (new_bb, bb);
720 BB_END (bb) = insn;
722 /* Redirect the outgoing edges. */
723 new_bb->succs = bb->succs;
724 bb->succs = NULL;
725 FOR_EACH_EDGE (e, ei, new_bb->succs)
726 e->src = new_bb;
728 /* The new block starts off being dirty. */
729 df_set_bb_dirty (bb);
730 return new_bb;
733 /* Return true if the single edge between blocks A and B is the only place
734 in RTL which holds some unique locus. */
736 static bool
737 unique_locus_on_edge_between_p (basic_block a, basic_block b)
739 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
740 rtx insn, end;
742 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
743 return false;
745 /* First scan block A backward. */
746 insn = BB_END (a);
747 end = PREV_INSN (BB_HEAD (a));
748 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
749 insn = PREV_INSN (insn);
751 if (insn != end && INSN_LOCATION (insn) == goto_locus)
752 return false;
754 /* Then scan block B forward. */
755 insn = BB_HEAD (b);
756 if (insn)
758 end = NEXT_INSN (BB_END (b));
759 while (insn != end && !NONDEBUG_INSN_P (insn))
760 insn = NEXT_INSN (insn);
762 if (insn != end && INSN_HAS_LOCATION (insn)
763 && INSN_LOCATION (insn) == goto_locus)
764 return false;
767 return true;
770 /* If the single edge between blocks A and B is the only place in RTL which
771 holds some unique locus, emit a nop with that locus between the blocks. */
773 static void
774 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
776 if (!unique_locus_on_edge_between_p (a, b))
777 return;
779 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
780 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
783 /* Blocks A and B are to be merged into a single block A. The insns
784 are already contiguous. */
786 static void
787 rtl_merge_blocks (basic_block a, basic_block b)
789 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
790 rtx del_first = NULL_RTX, del_last = NULL_RTX;
791 rtx b_debug_start = b_end, b_debug_end = b_end;
792 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
793 int b_empty = 0;
795 if (dump_file)
796 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
797 a->index);
799 while (DEBUG_INSN_P (b_end))
800 b_end = PREV_INSN (b_debug_start = b_end);
802 /* If there was a CODE_LABEL beginning B, delete it. */
803 if (LABEL_P (b_head))
805 /* Detect basic blocks with nothing but a label. This can happen
806 in particular at the end of a function. */
807 if (b_head == b_end)
808 b_empty = 1;
810 del_first = del_last = b_head;
811 b_head = NEXT_INSN (b_head);
814 /* Delete the basic block note and handle blocks containing just that
815 note. */
816 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
818 if (b_head == b_end)
819 b_empty = 1;
820 if (! del_last)
821 del_first = b_head;
823 del_last = b_head;
824 b_head = NEXT_INSN (b_head);
827 /* If there was a jump out of A, delete it. */
828 if (JUMP_P (a_end))
830 rtx prev;
832 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
833 if (!NOTE_P (prev)
834 || NOTE_INSN_BASIC_BLOCK_P (prev)
835 || prev == BB_HEAD (a))
836 break;
838 del_first = a_end;
840 #ifdef HAVE_cc0
841 /* If this was a conditional jump, we need to also delete
842 the insn that set cc0. */
843 if (only_sets_cc0_p (prev))
845 rtx tmp = prev;
847 prev = prev_nonnote_insn (prev);
848 if (!prev)
849 prev = BB_HEAD (a);
850 del_first = tmp;
852 #endif
854 a_end = PREV_INSN (del_first);
856 else if (BARRIER_P (NEXT_INSN (a_end)))
857 del_first = NEXT_INSN (a_end);
859 /* Delete everything marked above as well as crap that might be
860 hanging out between the two blocks. */
861 BB_END (a) = a_end;
862 BB_HEAD (b) = b_empty ? NULL_RTX : b_head;
863 delete_insn_chain (del_first, del_last, true);
865 /* When not optimizing CFG and the edge is the only place in RTL which holds
866 some unique locus, emit a nop with that locus in between. */
867 if (!optimize)
869 emit_nop_for_unique_locus_between (a, b);
870 a_end = BB_END (a);
873 /* Reassociate the insns of B with A. */
874 if (!b_empty)
876 update_bb_for_insn_chain (a_end, b_debug_end, a);
878 BB_END (a) = b_debug_end;
879 BB_HEAD (b) = NULL_RTX;
881 else if (b_end != b_debug_end)
883 /* Move any deleted labels and other notes between the end of A
884 and the debug insns that make up B after the debug insns,
885 bringing the debug insns into A while keeping the notes after
886 the end of A. */
887 if (NEXT_INSN (a_end) != b_debug_start)
888 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
889 b_debug_end);
890 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
891 BB_END (a) = b_debug_end;
894 df_bb_delete (b->index);
896 /* If B was a forwarder block, propagate the locus on the edge. */
897 if (forwarder_p
898 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
899 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
901 if (dump_file)
902 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
906 /* Return true when block A and B can be merged. */
908 static bool
909 rtl_can_merge_blocks (basic_block a, basic_block b)
911 /* If we are partitioning hot/cold basic blocks, we don't want to
912 mess up unconditional or indirect jumps that cross between hot
913 and cold sections.
915 Basic block partitioning may result in some jumps that appear to
916 be optimizable (or blocks that appear to be mergeable), but which really
917 must be left untouched (they are required to make it safely across
918 partition boundaries). See the comments at the top of
919 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
921 if (BB_PARTITION (a) != BB_PARTITION (b))
922 return false;
924 /* Protect the loop latches. */
925 if (current_loops && b->loop_father->latch == b)
926 return false;
928 /* There must be exactly one edge in between the blocks. */
929 return (single_succ_p (a)
930 && single_succ (a) == b
931 && single_pred_p (b)
932 && a != b
933 /* Must be simple edge. */
934 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
935 && a->next_bb == b
936 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
937 /* If the jump insn has side effects,
938 we can't kill the edge. */
939 && (!JUMP_P (BB_END (a))
940 || (reload_completed
941 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
944 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
945 exist. */
948 block_label (basic_block block)
950 if (block == EXIT_BLOCK_PTR)
951 return NULL_RTX;
953 if (!LABEL_P (BB_HEAD (block)))
955 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
958 return BB_HEAD (block);
961 /* Attempt to perform edge redirection by replacing possibly complex jump
962 instruction by unconditional jump or removing jump completely. This can
963 apply only if all edges now point to the same block. The parameters and
964 return values are equivalent to redirect_edge_and_branch. */
966 edge
967 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
969 basic_block src = e->src;
970 rtx insn = BB_END (src), kill_from;
971 rtx set;
972 int fallthru = 0;
974 /* If we are partitioning hot/cold basic blocks, we don't want to
975 mess up unconditional or indirect jumps that cross between hot
976 and cold sections.
978 Basic block partitioning may result in some jumps that appear to
979 be optimizable (or blocks that appear to be mergeable), but which really
980 must be left untouched (they are required to make it safely across
981 partition boundaries). See the comments at the top of
982 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
984 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
985 || BB_PARTITION (src) != BB_PARTITION (target))
986 return NULL;
988 /* We can replace or remove a complex jump only when we have exactly
989 two edges. Also, if we have exactly one outgoing edge, we can
990 redirect that. */
991 if (EDGE_COUNT (src->succs) >= 3
992 /* Verify that all targets will be TARGET. Specifically, the
993 edge that is not E must also go to TARGET. */
994 || (EDGE_COUNT (src->succs) == 2
995 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
996 return NULL;
998 if (!onlyjump_p (insn))
999 return NULL;
1000 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1001 return NULL;
1003 /* Avoid removing branch with side effects. */
1004 set = single_set (insn);
1005 if (!set || side_effects_p (set))
1006 return NULL;
1008 /* In case we zap a conditional jump, we'll need to kill
1009 the cc0 setter too. */
1010 kill_from = insn;
1011 #ifdef HAVE_cc0
1012 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1013 && only_sets_cc0_p (PREV_INSN (insn)))
1014 kill_from = PREV_INSN (insn);
1015 #endif
1017 /* See if we can create the fallthru edge. */
1018 if (in_cfglayout || can_fallthru (src, target))
1020 if (dump_file)
1021 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1022 fallthru = 1;
1024 /* Selectively unlink whole insn chain. */
1025 if (in_cfglayout)
1027 rtx insn = BB_FOOTER (src);
1029 delete_insn_chain (kill_from, BB_END (src), false);
1031 /* Remove barriers but keep jumptables. */
1032 while (insn)
1034 if (BARRIER_P (insn))
1036 if (PREV_INSN (insn))
1037 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1038 else
1039 BB_FOOTER (src) = NEXT_INSN (insn);
1040 if (NEXT_INSN (insn))
1041 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1043 if (LABEL_P (insn))
1044 break;
1045 insn = NEXT_INSN (insn);
1048 else
1049 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1050 false);
1053 /* If this already is simplejump, redirect it. */
1054 else if (simplejump_p (insn))
1056 if (e->dest == target)
1057 return NULL;
1058 if (dump_file)
1059 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1060 INSN_UID (insn), e->dest->index, target->index);
1061 if (!redirect_jump (insn, block_label (target), 0))
1063 gcc_assert (target == EXIT_BLOCK_PTR);
1064 return NULL;
1068 /* Cannot do anything for target exit block. */
1069 else if (target == EXIT_BLOCK_PTR)
1070 return NULL;
1072 /* Or replace possibly complicated jump insn by simple jump insn. */
1073 else
1075 rtx target_label = block_label (target);
1076 rtx barrier, label, table;
1078 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
1079 JUMP_LABEL (BB_END (src)) = target_label;
1080 LABEL_NUSES (target_label)++;
1081 if (dump_file)
1082 fprintf (dump_file, "Replacing insn %i by jump %i\n",
1083 INSN_UID (insn), INSN_UID (BB_END (src)));
1086 delete_insn_chain (kill_from, insn, false);
1088 /* Recognize a tablejump that we are converting to a
1089 simple jump and remove its associated CODE_LABEL
1090 and ADDR_VEC or ADDR_DIFF_VEC. */
1091 if (tablejump_p (insn, &label, &table))
1092 delete_insn_chain (label, table, false);
1094 barrier = next_nonnote_insn (BB_END (src));
1095 if (!barrier || !BARRIER_P (barrier))
1096 emit_barrier_after (BB_END (src));
1097 else
1099 if (barrier != NEXT_INSN (BB_END (src)))
1101 /* Move the jump before barrier so that the notes
1102 which originally were or were created before jump table are
1103 inside the basic block. */
1104 rtx new_insn = BB_END (src);
1106 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1107 PREV_INSN (barrier), src);
1109 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1110 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1112 NEXT_INSN (new_insn) = barrier;
1113 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1115 PREV_INSN (new_insn) = PREV_INSN (barrier);
1116 PREV_INSN (barrier) = new_insn;
1121 /* Keep only one edge out and set proper flags. */
1122 if (!single_succ_p (src))
1123 remove_edge (e);
1124 gcc_assert (single_succ_p (src));
1126 e = single_succ_edge (src);
1127 if (fallthru)
1128 e->flags = EDGE_FALLTHRU;
1129 else
1130 e->flags = 0;
1132 e->probability = REG_BR_PROB_BASE;
1133 e->count = src->count;
1135 if (e->dest != target)
1136 redirect_edge_succ (e, target);
1137 return e;
1140 /* Subroutine of redirect_branch_edge that tries to patch the jump
1141 instruction INSN so that it reaches block NEW. Do this
1142 only when it originally reached block OLD. Return true if this
1143 worked or the original target wasn't OLD, return false if redirection
1144 doesn't work. */
1146 static bool
1147 patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
1149 rtx tmp;
1150 /* Recognize a tablejump and adjust all matching cases. */
1151 if (tablejump_p (insn, NULL, &tmp))
1153 rtvec vec;
1154 int j;
1155 rtx new_label = block_label (new_bb);
1157 if (new_bb == EXIT_BLOCK_PTR)
1158 return false;
1159 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1160 vec = XVEC (PATTERN (tmp), 0);
1161 else
1162 vec = XVEC (PATTERN (tmp), 1);
1164 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1165 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1167 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1168 --LABEL_NUSES (old_label);
1169 ++LABEL_NUSES (new_label);
1172 /* Handle casesi dispatch insns. */
1173 if ((tmp = single_set (insn)) != NULL
1174 && SET_DEST (tmp) == pc_rtx
1175 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1176 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1177 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1179 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1180 new_label);
1181 --LABEL_NUSES (old_label);
1182 ++LABEL_NUSES (new_label);
1185 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1187 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1188 rtx new_label, note;
1190 if (new_bb == EXIT_BLOCK_PTR)
1191 return false;
1192 new_label = block_label (new_bb);
1194 for (i = 0; i < n; ++i)
1196 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1197 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1198 if (XEXP (old_ref, 0) == old_label)
1200 ASM_OPERANDS_LABEL (tmp, i)
1201 = gen_rtx_LABEL_REF (Pmode, new_label);
1202 --LABEL_NUSES (old_label);
1203 ++LABEL_NUSES (new_label);
1207 if (JUMP_LABEL (insn) == old_label)
1209 JUMP_LABEL (insn) = new_label;
1210 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1211 if (note)
1212 remove_note (insn, note);
1214 else
1216 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1217 if (note)
1218 remove_note (insn, note);
1219 if (JUMP_LABEL (insn) != new_label
1220 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1221 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1223 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1224 != NULL_RTX)
1225 XEXP (note, 0) = new_label;
1227 else
1229 /* ?? We may play the games with moving the named labels from
1230 one basic block to the other in case only one computed_jump is
1231 available. */
1232 if (computed_jump_p (insn)
1233 /* A return instruction can't be redirected. */
1234 || returnjump_p (insn))
1235 return false;
1237 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1239 /* If the insn doesn't go where we think, we're confused. */
1240 gcc_assert (JUMP_LABEL (insn) == old_label);
1242 /* If the substitution doesn't succeed, die. This can happen
1243 if the back end emitted unrecognizable instructions or if
1244 target is exit block on some arches. */
1245 if (!redirect_jump (insn, block_label (new_bb), 0))
1247 gcc_assert (new_bb == EXIT_BLOCK_PTR);
1248 return false;
1252 return true;
1256 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1257 NULL on failure */
1258 static edge
1259 redirect_branch_edge (edge e, basic_block target)
1261 rtx old_label = BB_HEAD (e->dest);
1262 basic_block src = e->src;
1263 rtx insn = BB_END (src);
1265 /* We can only redirect non-fallthru edges of jump insn. */
1266 if (e->flags & EDGE_FALLTHRU)
1267 return NULL;
1268 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1269 return NULL;
1271 if (!currently_expanding_to_rtl)
1273 if (!patch_jump_insn (insn, old_label, target))
1274 return NULL;
1276 else
1277 /* When expanding this BB might actually contain multiple
1278 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1279 Redirect all of those that match our label. */
1280 FOR_BB_INSNS (src, insn)
1281 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1282 return NULL;
1284 if (dump_file)
1285 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1286 e->src->index, e->dest->index, target->index);
1288 if (e->dest != target)
1289 e = redirect_edge_succ_nodup (e, target);
1291 return e;
1294 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1295 expense of adding new instructions or reordering basic blocks.
1297 Function can be also called with edge destination equivalent to the TARGET.
1298 Then it should try the simplifications and do nothing if none is possible.
1300 Return edge representing the branch if transformation succeeded. Return NULL
1301 on failure.
1302 We still return NULL in case E already destinated TARGET and we didn't
1303 managed to simplify instruction stream. */
1305 static edge
1306 rtl_redirect_edge_and_branch (edge e, basic_block target)
1308 edge ret;
1309 basic_block src = e->src;
1311 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1312 return NULL;
1314 if (e->dest == target)
1315 return e;
1317 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1319 df_set_bb_dirty (src);
1320 return ret;
1323 ret = redirect_branch_edge (e, target);
1324 if (!ret)
1325 return NULL;
1327 df_set_bb_dirty (src);
1328 return ret;
1331 /* Like force_nonfallthru below, but additionally performs redirection
1332 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1333 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1334 simple_return_rtx, indicating which kind of returnjump to create.
1335 It should be NULL otherwise. */
1337 basic_block
1338 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1340 basic_block jump_block, new_bb = NULL, src = e->src;
1341 rtx note;
1342 edge new_edge;
1343 int abnormal_edge_flags = 0;
1344 bool asm_goto_edge = false;
1345 int loc;
1347 /* In the case the last instruction is conditional jump to the next
1348 instruction, first redirect the jump itself and then continue
1349 by creating a basic block afterwards to redirect fallthru edge. */
1350 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1351 && any_condjump_p (BB_END (e->src))
1352 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1354 rtx note;
1355 edge b = unchecked_make_edge (e->src, target, 0);
1356 bool redirected;
1358 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1359 gcc_assert (redirected);
1361 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1362 if (note)
1364 int prob = INTVAL (XEXP (note, 0));
1366 b->probability = prob;
1367 /* Update this to use GCOV_COMPUTE_SCALE. */
1368 b->count = e->count * prob / REG_BR_PROB_BASE;
1369 e->probability -= e->probability;
1370 e->count -= b->count;
1371 if (e->probability < 0)
1372 e->probability = 0;
1373 if (e->count < 0)
1374 e->count = 0;
1378 if (e->flags & EDGE_ABNORMAL)
1380 /* Irritating special case - fallthru edge to the same block as abnormal
1381 edge.
1382 We can't redirect abnormal edge, but we still can split the fallthru
1383 one and create separate abnormal edge to original destination.
1384 This allows bb-reorder to make such edge non-fallthru. */
1385 gcc_assert (e->dest == target);
1386 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1387 e->flags &= EDGE_FALLTHRU;
1389 else
1391 gcc_assert (e->flags & EDGE_FALLTHRU);
1392 if (e->src == ENTRY_BLOCK_PTR)
1394 /* We can't redirect the entry block. Create an empty block
1395 at the start of the function which we use to add the new
1396 jump. */
1397 edge tmp;
1398 edge_iterator ei;
1399 bool found = false;
1401 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1403 /* Change the existing edge's source to be the new block, and add
1404 a new edge from the entry block to the new block. */
1405 e->src = bb;
1406 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1408 if (tmp == e)
1410 ENTRY_BLOCK_PTR->succs->unordered_remove (ei.index);
1411 found = true;
1412 break;
1414 else
1415 ei_next (&ei);
1418 gcc_assert (found);
1420 vec_safe_push (bb->succs, e);
1421 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1425 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1426 don't point to the target or fallthru label. */
1427 if (JUMP_P (BB_END (e->src))
1428 && target != EXIT_BLOCK_PTR
1429 && (e->flags & EDGE_FALLTHRU)
1430 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1432 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1433 bool adjust_jump_target = false;
1435 for (i = 0; i < n; ++i)
1437 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1439 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1440 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1441 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1442 adjust_jump_target = true;
1444 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1445 asm_goto_edge = true;
1447 if (adjust_jump_target)
1449 rtx insn = BB_END (e->src), note;
1450 rtx old_label = BB_HEAD (e->dest);
1451 rtx new_label = BB_HEAD (target);
1453 if (JUMP_LABEL (insn) == old_label)
1455 JUMP_LABEL (insn) = new_label;
1456 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1457 if (note)
1458 remove_note (insn, note);
1460 else
1462 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1463 if (note)
1464 remove_note (insn, note);
1465 if (JUMP_LABEL (insn) != new_label
1466 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1467 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1469 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1470 != NULL_RTX)
1471 XEXP (note, 0) = new_label;
1475 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1477 gcov_type count = e->count;
1478 int probability = e->probability;
1479 /* Create the new structures. */
1481 /* If the old block ended with a tablejump, skip its table
1482 by searching forward from there. Otherwise start searching
1483 forward from the last instruction of the old block. */
1484 if (!tablejump_p (BB_END (e->src), NULL, &note))
1485 note = BB_END (e->src);
1486 note = NEXT_INSN (note);
1488 jump_block = create_basic_block (note, NULL, e->src);
1489 jump_block->count = count;
1490 jump_block->frequency = EDGE_FREQUENCY (e);
1492 /* Make sure new block ends up in correct hot/cold section. */
1494 BB_COPY_PARTITION (jump_block, e->src);
1495 if (flag_reorder_blocks_and_partition
1496 && targetm_common.have_named_sections
1497 && JUMP_P (BB_END (jump_block))
1498 && !any_condjump_p (BB_END (jump_block))
1499 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1500 add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
1502 /* Wire edge in. */
1503 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1504 new_edge->probability = probability;
1505 new_edge->count = count;
1507 /* Redirect old edge. */
1508 redirect_edge_pred (e, jump_block);
1509 e->probability = REG_BR_PROB_BASE;
1511 /* If asm goto has any label refs to target's label,
1512 add also edge from asm goto bb to target. */
1513 if (asm_goto_edge)
1515 new_edge->probability /= 2;
1516 new_edge->count /= 2;
1517 jump_block->count /= 2;
1518 jump_block->frequency /= 2;
1519 new_edge = make_edge (new_edge->src, target,
1520 e->flags & ~EDGE_FALLTHRU);
1521 new_edge->probability = probability - probability / 2;
1522 new_edge->count = count - count / 2;
1525 new_bb = jump_block;
1527 else
1528 jump_block = e->src;
1530 loc = e->goto_locus;
1531 e->flags &= ~EDGE_FALLTHRU;
1532 if (target == EXIT_BLOCK_PTR)
1534 if (jump_label == ret_rtx)
1536 #ifdef HAVE_return
1537 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1538 #else
1539 gcc_unreachable ();
1540 #endif
1542 else
1544 gcc_assert (jump_label == simple_return_rtx);
1545 #ifdef HAVE_simple_return
1546 emit_jump_insn_after_setloc (gen_simple_return (),
1547 BB_END (jump_block), loc);
1548 #else
1549 gcc_unreachable ();
1550 #endif
1552 set_return_jump_label (BB_END (jump_block));
1554 else
1556 rtx label = block_label (target);
1557 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1558 JUMP_LABEL (BB_END (jump_block)) = label;
1559 LABEL_NUSES (label)++;
1562 emit_barrier_after (BB_END (jump_block));
1563 redirect_edge_succ_nodup (e, target);
1565 if (abnormal_edge_flags)
1566 make_edge (src, target, abnormal_edge_flags);
1568 df_mark_solutions_dirty ();
1569 return new_bb;
1572 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1573 (and possibly create new basic block) to make edge non-fallthru.
1574 Return newly created BB or NULL if none. */
1576 static basic_block
1577 rtl_force_nonfallthru (edge e)
1579 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1582 /* Redirect edge even at the expense of creating new jump insn or
1583 basic block. Return new basic block if created, NULL otherwise.
1584 Conversion must be possible. */
1586 static basic_block
1587 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1589 if (redirect_edge_and_branch (e, target)
1590 || e->dest == target)
1591 return NULL;
1593 /* In case the edge redirection failed, try to force it to be non-fallthru
1594 and redirect newly created simplejump. */
1595 df_set_bb_dirty (e->src);
1596 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1599 /* The given edge should potentially be a fallthru edge. If that is in
1600 fact true, delete the jump and barriers that are in the way. */
1602 static void
1603 rtl_tidy_fallthru_edge (edge e)
1605 rtx q;
1606 basic_block b = e->src, c = b->next_bb;
1608 /* ??? In a late-running flow pass, other folks may have deleted basic
1609 blocks by nopping out blocks, leaving multiple BARRIERs between here
1610 and the target label. They ought to be chastised and fixed.
1612 We can also wind up with a sequence of undeletable labels between
1613 one block and the next.
1615 So search through a sequence of barriers, labels, and notes for
1616 the head of block C and assert that we really do fall through. */
1618 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1619 if (INSN_P (q))
1620 return;
1622 /* Remove what will soon cease being the jump insn from the source block.
1623 If block B consisted only of this single jump, turn it into a deleted
1624 note. */
1625 q = BB_END (b);
1626 if (JUMP_P (q)
1627 && onlyjump_p (q)
1628 && (any_uncondjump_p (q)
1629 || single_succ_p (b)))
1631 #ifdef HAVE_cc0
1632 /* If this was a conditional jump, we need to also delete
1633 the insn that set cc0. */
1634 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1635 q = PREV_INSN (q);
1636 #endif
1638 q = PREV_INSN (q);
1641 /* Selectively unlink the sequence. */
1642 if (q != PREV_INSN (BB_HEAD (c)))
1643 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1645 e->flags |= EDGE_FALLTHRU;
1648 /* Should move basic block BB after basic block AFTER. NIY. */
1650 static bool
1651 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1652 basic_block after ATTRIBUTE_UNUSED)
1654 return false;
1657 /* Split a (typically critical) edge. Return the new block.
1658 The edge must not be abnormal.
1660 ??? The code generally expects to be called on critical edges.
1661 The case of a block ending in an unconditional jump to a
1662 block with multiple predecessors is not handled optimally. */
1664 static basic_block
1665 rtl_split_edge (edge edge_in)
1667 basic_block bb;
1668 rtx before;
1670 /* Abnormal edges cannot be split. */
1671 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1673 /* We are going to place the new block in front of edge destination.
1674 Avoid existence of fallthru predecessors. */
1675 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1677 edge e = find_fallthru_edge (edge_in->dest->preds);
1679 if (e)
1680 force_nonfallthru (e);
1683 /* Create the basic block note. */
1684 if (edge_in->dest != EXIT_BLOCK_PTR)
1685 before = BB_HEAD (edge_in->dest);
1686 else
1687 before = NULL_RTX;
1689 /* If this is a fall through edge to the exit block, the blocks might be
1690 not adjacent, and the right place is after the source. */
1691 if ((edge_in->flags & EDGE_FALLTHRU) && edge_in->dest == EXIT_BLOCK_PTR)
1693 before = NEXT_INSN (BB_END (edge_in->src));
1694 bb = create_basic_block (before, NULL, edge_in->src);
1695 BB_COPY_PARTITION (bb, edge_in->src);
1697 else
1699 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1700 /* ??? Why not edge_in->dest->prev_bb here? */
1701 BB_COPY_PARTITION (bb, edge_in->dest);
1704 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1706 /* For non-fallthru edges, we must adjust the predecessor's
1707 jump instruction to target our new block. */
1708 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1710 edge redirected = redirect_edge_and_branch (edge_in, bb);
1711 gcc_assert (redirected);
1713 else
1715 if (edge_in->src != ENTRY_BLOCK_PTR)
1717 /* For asm goto even splitting of fallthru edge might
1718 need insn patching, as other labels might point to the
1719 old label. */
1720 rtx last = BB_END (edge_in->src);
1721 if (last
1722 && JUMP_P (last)
1723 && edge_in->dest != EXIT_BLOCK_PTR
1724 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1725 && patch_jump_insn (last, before, bb))
1726 df_set_bb_dirty (edge_in->src);
1728 redirect_edge_succ (edge_in, bb);
1731 return bb;
1734 /* Queue instructions for insertion on an edge between two basic blocks.
1735 The new instructions and basic blocks (if any) will not appear in the
1736 CFG until commit_edge_insertions is called. */
1738 void
1739 insert_insn_on_edge (rtx pattern, edge e)
1741 /* We cannot insert instructions on an abnormal critical edge.
1742 It will be easier to find the culprit if we die now. */
1743 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1745 if (e->insns.r == NULL_RTX)
1746 start_sequence ();
1747 else
1748 push_to_sequence (e->insns.r);
1750 emit_insn (pattern);
1752 e->insns.r = get_insns ();
1753 end_sequence ();
1756 /* Update the CFG for the instructions queued on edge E. */
1758 void
1759 commit_one_edge_insertion (edge e)
1761 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1762 basic_block bb;
1764 /* Pull the insns off the edge now since the edge might go away. */
1765 insns = e->insns.r;
1766 e->insns.r = NULL_RTX;
1768 /* Figure out where to put these insns. If the destination has
1769 one predecessor, insert there. Except for the exit block. */
1770 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1772 bb = e->dest;
1774 /* Get the location correct wrt a code label, and "nice" wrt
1775 a basic block note, and before everything else. */
1776 tmp = BB_HEAD (bb);
1777 if (LABEL_P (tmp))
1778 tmp = NEXT_INSN (tmp);
1779 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1780 tmp = NEXT_INSN (tmp);
1781 if (tmp == BB_HEAD (bb))
1782 before = tmp;
1783 else if (tmp)
1784 after = PREV_INSN (tmp);
1785 else
1786 after = get_last_insn ();
1789 /* If the source has one successor and the edge is not abnormal,
1790 insert there. Except for the entry block. */
1791 else if ((e->flags & EDGE_ABNORMAL) == 0
1792 && single_succ_p (e->src)
1793 && e->src != ENTRY_BLOCK_PTR)
1795 bb = e->src;
1797 /* It is possible to have a non-simple jump here. Consider a target
1798 where some forms of unconditional jumps clobber a register. This
1799 happens on the fr30 for example.
1801 We know this block has a single successor, so we can just emit
1802 the queued insns before the jump. */
1803 if (JUMP_P (BB_END (bb)))
1804 before = BB_END (bb);
1805 else
1807 /* We'd better be fallthru, or we've lost track of what's what. */
1808 gcc_assert (e->flags & EDGE_FALLTHRU);
1810 after = BB_END (bb);
1814 /* Otherwise we must split the edge. */
1815 else
1817 bb = split_edge (e);
1818 after = BB_END (bb);
1820 if (flag_reorder_blocks_and_partition
1821 && targetm_common.have_named_sections
1822 && e->src != ENTRY_BLOCK_PTR
1823 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1824 && !(e->flags & EDGE_CROSSING)
1825 && JUMP_P (after)
1826 && !any_condjump_p (after)
1827 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1828 add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1831 /* Now that we've found the spot, do the insertion. */
1832 if (before)
1834 emit_insn_before_noloc (insns, before, bb);
1835 last = prev_nonnote_insn (before);
1837 else
1838 last = emit_insn_after_noloc (insns, after, bb);
1840 if (returnjump_p (last))
1842 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1843 This is not currently a problem because this only happens
1844 for the (single) epilogue, which already has a fallthru edge
1845 to EXIT. */
1847 e = single_succ_edge (bb);
1848 gcc_assert (e->dest == EXIT_BLOCK_PTR
1849 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1851 e->flags &= ~EDGE_FALLTHRU;
1852 emit_barrier_after (last);
1854 if (before)
1855 delete_insn (before);
1857 else
1858 gcc_assert (!JUMP_P (last));
1861 /* Update the CFG for all queued instructions. */
1863 void
1864 commit_edge_insertions (void)
1866 basic_block bb;
1868 #ifdef ENABLE_CHECKING
1869 verify_flow_info ();
1870 #endif
1872 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1874 edge e;
1875 edge_iterator ei;
1877 FOR_EACH_EDGE (e, ei, bb->succs)
1878 if (e->insns.r)
1879 commit_one_edge_insertion (e);
1884 /* Print out RTL-specific basic block information (live information
1885 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
1886 documented in dumpfile.h. */
1888 static void
1889 rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
1891 rtx insn;
1892 rtx last;
1893 char *s_indent;
1895 s_indent = (char *) alloca ((size_t) indent + 1);
1896 memset (s_indent, ' ', (size_t) indent);
1897 s_indent[indent] = '\0';
1899 if (df && (flags & TDF_DETAILS))
1901 df_dump_top (bb, outf);
1902 putc ('\n', outf);
1905 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
1906 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1907 insn = NEXT_INSN (insn))
1909 if (flags & TDF_DETAILS)
1910 df_dump_insn_top (insn, outf);
1911 if (! (flags & TDF_SLIM))
1912 print_rtl_single (outf, insn);
1913 else
1914 dump_insn_slim (outf, insn);
1915 if (flags & TDF_DETAILS)
1916 df_dump_insn_bottom (insn, outf);
1919 if (df && (flags & TDF_DETAILS))
1921 df_dump_bottom (bb, outf);
1922 putc ('\n', outf);
1927 /* Like dump_function_to_file, but for RTL. Print out dataflow information
1928 for the start of each basic block. FLAGS are the TDF_* masks documented
1929 in dumpfile.h. */
1931 void
1932 print_rtl_with_bb (FILE *outf, const_rtx rtx_first, int flags)
1934 const_rtx tmp_rtx;
1935 if (rtx_first == 0)
1936 fprintf (outf, "(nil)\n");
1937 else
1939 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1940 int max_uid = get_max_uid ();
1941 basic_block *start = XCNEWVEC (basic_block, max_uid);
1942 basic_block *end = XCNEWVEC (basic_block, max_uid);
1943 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1944 basic_block bb;
1946 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
1947 insns, but the CFG is not maintained so the basic block info
1948 is not reliable. Therefore it's omitted from the dumps. */
1949 if (! (cfun->curr_properties & PROP_cfg))
1950 flags &= ~TDF_BLOCKS;
1952 if (df)
1953 df_dump_start (outf);
1955 if (flags & TDF_BLOCKS)
1957 FOR_EACH_BB_REVERSE (bb)
1959 rtx x;
1961 start[INSN_UID (BB_HEAD (bb))] = bb;
1962 end[INSN_UID (BB_END (bb))] = bb;
1963 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1965 enum bb_state state = IN_MULTIPLE_BB;
1967 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1968 state = IN_ONE_BB;
1969 in_bb_p[INSN_UID (x)] = state;
1971 if (x == BB_END (bb))
1972 break;
1977 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1979 if (flags & TDF_BLOCKS)
1981 bb = start[INSN_UID (tmp_rtx)];
1982 if (bb != NULL)
1984 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
1985 if (df && (flags & TDF_DETAILS))
1986 df_dump_top (bb, outf);
1989 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1990 && !NOTE_P (tmp_rtx)
1991 && !BARRIER_P (tmp_rtx))
1992 fprintf (outf, ";; Insn is not within a basic block\n");
1993 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1994 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1997 if (flags & TDF_DETAILS)
1998 df_dump_insn_top (tmp_rtx, outf);
1999 if (! (flags & TDF_SLIM))
2000 print_rtl_single (outf, tmp_rtx);
2001 else
2002 dump_insn_slim (outf, tmp_rtx);
2003 if (flags & TDF_DETAILS)
2004 df_dump_insn_bottom (tmp_rtx, outf);
2006 if (flags & TDF_BLOCKS)
2008 bb = end[INSN_UID (tmp_rtx)];
2009 if (bb != NULL)
2011 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2012 if (df && (flags & TDF_DETAILS))
2013 df_dump_bottom (bb, outf);
2014 putc ('\n', outf);
2019 free (start);
2020 free (end);
2021 free (in_bb_p);
2025 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2027 void
2028 update_br_prob_note (basic_block bb)
2030 rtx note;
2031 if (!JUMP_P (BB_END (bb)))
2032 return;
2033 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2034 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
2035 return;
2036 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
2039 /* Get the last insn associated with block BB (that includes barriers and
2040 tablejumps after BB). */
2042 get_last_bb_insn (basic_block bb)
2044 rtx tmp;
2045 rtx end = BB_END (bb);
2047 /* Include any jump table following the basic block. */
2048 if (tablejump_p (end, NULL, &tmp))
2049 end = tmp;
2051 /* Include any barriers that may follow the basic block. */
2052 tmp = next_nonnote_insn_bb (end);
2053 while (tmp && BARRIER_P (tmp))
2055 end = tmp;
2056 tmp = next_nonnote_insn_bb (end);
2059 return end;
2062 /* Verify, in the basic block chain, that there is at most one switch
2063 between hot/cold partitions. This condition will not be true until
2064 after reorder_basic_blocks is called. */
2066 static int
2067 verify_hot_cold_block_grouping (void)
2069 basic_block bb;
2070 int err = 0;
2071 bool switched_sections = false;
2072 int current_partition = BB_UNPARTITIONED;
2074 if (!crtl->bb_reorder_complete)
2075 return err;
2077 FOR_EACH_BB (bb)
2079 if (current_partition != BB_UNPARTITIONED
2080 && BB_PARTITION (bb) != current_partition)
2082 if (switched_sections)
2084 error ("multiple hot/cold transitions found (bb %i)",
2085 bb->index);
2086 err = 1;
2088 else
2089 switched_sections = true;
2091 if (!crtl->has_bb_partition)
2092 error ("partition found but function partition flag not set");
2094 current_partition = BB_PARTITION (bb);
2097 return err;
2101 /* Perform several checks on the edges out of each block, such as
2102 the consistency of the branch probabilities, the correctness
2103 of hot/cold partition crossing edges, and the number of expected
2104 successor edges. */
2106 static int
2107 rtl_verify_edges (void)
2109 int err = 0;
2110 basic_block bb;
2112 FOR_EACH_BB_REVERSE (bb)
2114 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2115 int n_eh = 0, n_abnormal = 0;
2116 edge e, fallthru = NULL;
2117 edge_iterator ei;
2118 rtx note;
2120 if (JUMP_P (BB_END (bb))
2121 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2122 && EDGE_COUNT (bb->succs) >= 2
2123 && any_condjump_p (BB_END (bb)))
2125 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
2126 && profile_status != PROFILE_ABSENT)
2128 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
2129 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
2130 err = 1;
2134 FOR_EACH_EDGE (e, ei, bb->succs)
2136 bool is_crossing;
2138 if (e->flags & EDGE_FALLTHRU)
2139 n_fallthru++, fallthru = e;
2141 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2142 && e->src != ENTRY_BLOCK_PTR
2143 && e->dest != EXIT_BLOCK_PTR);
2144 if (e->flags & EDGE_CROSSING)
2146 if (!is_crossing)
2148 error ("EDGE_CROSSING incorrectly set across same section");
2149 err = 1;
2151 if (e->flags & EDGE_FALLTHRU)
2153 error ("fallthru edge crosses section boundary in bb %i",
2154 e->src->index);
2155 err = 1;
2157 if (e->flags & EDGE_EH)
2159 error ("EH edge crosses section boundary in bb %i",
2160 e->src->index);
2161 err = 1;
2164 else if (is_crossing)
2166 error ("EDGE_CROSSING missing across section boundary");
2167 err = 1;
2170 if ((e->flags & ~(EDGE_DFS_BACK
2171 | EDGE_CAN_FALLTHRU
2172 | EDGE_IRREDUCIBLE_LOOP
2173 | EDGE_LOOP_EXIT
2174 | EDGE_CROSSING
2175 | EDGE_PRESERVE)) == 0)
2176 n_branch++;
2178 if (e->flags & EDGE_ABNORMAL_CALL)
2179 n_abnormal_call++;
2181 if (e->flags & EDGE_SIBCALL)
2182 n_sibcall++;
2184 if (e->flags & EDGE_EH)
2185 n_eh++;
2187 if (e->flags & EDGE_ABNORMAL)
2188 n_abnormal++;
2191 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2193 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2194 err = 1;
2196 if (n_eh > 1)
2198 error ("too many exception handling edges in bb %i", bb->index);
2199 err = 1;
2201 if (n_branch
2202 && (!JUMP_P (BB_END (bb))
2203 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2204 || any_condjump_p (BB_END (bb))))))
2206 error ("too many outgoing branch edges from bb %i", bb->index);
2207 err = 1;
2209 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2211 error ("fallthru edge after unconditional jump in bb %i", bb->index);
2212 err = 1;
2214 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2216 error ("wrong number of branch edges after unconditional jump"
2217 " in bb %i", bb->index);
2218 err = 1;
2220 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2221 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2223 error ("wrong amount of branch edges after conditional jump"
2224 " in bb %i", bb->index);
2225 err = 1;
2227 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2229 error ("abnormal call edges for non-call insn in bb %i", bb->index);
2230 err = 1;
2232 if (n_sibcall && !CALL_P (BB_END (bb)))
2234 error ("sibcall edges for non-call insn in bb %i", bb->index);
2235 err = 1;
2237 if (n_abnormal > n_eh
2238 && !(CALL_P (BB_END (bb))
2239 && n_abnormal == n_abnormal_call + n_sibcall)
2240 && (!JUMP_P (BB_END (bb))
2241 || any_condjump_p (BB_END (bb))
2242 || any_uncondjump_p (BB_END (bb))))
2244 error ("abnormal edges for no purpose in bb %i", bb->index);
2245 err = 1;
2249 /* Clean up. */
2250 return err;
2253 /* Checks on the instructions within blocks. Currently checks that each
2254 block starts with a basic block note, and that basic block notes and
2255 control flow jumps are not found in the middle of the block. */
2257 static int
2258 rtl_verify_bb_insns (void)
2260 rtx x;
2261 int err = 0;
2262 basic_block bb;
2264 FOR_EACH_BB_REVERSE (bb)
2266 /* Now check the header of basic
2267 block. It ought to contain optional CODE_LABEL followed
2268 by NOTE_BASIC_BLOCK. */
2269 x = BB_HEAD (bb);
2270 if (LABEL_P (x))
2272 if (BB_END (bb) == x)
2274 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2275 bb->index);
2276 err = 1;
2279 x = NEXT_INSN (x);
2282 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2284 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2285 bb->index);
2286 err = 1;
2289 if (BB_END (bb) == x)
2290 /* Do checks for empty blocks here. */
2292 else
2293 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2295 if (NOTE_INSN_BASIC_BLOCK_P (x))
2297 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2298 INSN_UID (x), bb->index);
2299 err = 1;
2302 if (x == BB_END (bb))
2303 break;
2305 if (control_flow_insn_p (x))
2307 error ("in basic block %d:", bb->index);
2308 fatal_insn ("flow control insn inside a basic block", x);
2313 /* Clean up. */
2314 return err;
2317 /* Verify that block pointers for instructions in basic blocks, headers and
2318 footers are set appropriately. */
2320 static int
2321 rtl_verify_bb_pointers (void)
2323 int err = 0;
2324 basic_block bb;
2326 /* Check the general integrity of the basic blocks. */
2327 FOR_EACH_BB_REVERSE (bb)
2329 rtx insn;
2331 if (!(bb->flags & BB_RTL))
2333 error ("BB_RTL flag not set for block %d", bb->index);
2334 err = 1;
2337 FOR_BB_INSNS (bb, insn)
2338 if (BLOCK_FOR_INSN (insn) != bb)
2340 error ("insn %d basic block pointer is %d, should be %d",
2341 INSN_UID (insn),
2342 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2343 bb->index);
2344 err = 1;
2347 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2348 if (!BARRIER_P (insn)
2349 && BLOCK_FOR_INSN (insn) != NULL)
2351 error ("insn %d in header of bb %d has non-NULL basic block",
2352 INSN_UID (insn), bb->index);
2353 err = 1;
2355 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2356 if (!BARRIER_P (insn)
2357 && BLOCK_FOR_INSN (insn) != NULL)
2359 error ("insn %d in footer of bb %d has non-NULL basic block",
2360 INSN_UID (insn), bb->index);
2361 err = 1;
2365 /* Clean up. */
2366 return err;
2369 /* Verify the CFG and RTL consistency common for both underlying RTL and
2370 cfglayout RTL.
2372 Currently it does following checks:
2374 - overlapping of basic blocks
2375 - insns with wrong BLOCK_FOR_INSN pointers
2376 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2377 - tails of basic blocks (ensure that boundary is necessary)
2378 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2379 and NOTE_INSN_BASIC_BLOCK
2380 - verify that no fall_thru edge crosses hot/cold partition boundaries
2381 - verify that there are no pending RTL branch predictions
2382 - verify that there is a single hot/cold partition boundary after bbro
2384 In future it can be extended check a lot of other stuff as well
2385 (reachability of basic blocks, life information, etc. etc.). */
2387 static int
2388 rtl_verify_flow_info_1 (void)
2390 int err = 0;
2392 err |= rtl_verify_bb_pointers ();
2394 err |= rtl_verify_bb_insns ();
2396 err |= rtl_verify_edges ();
2398 err |= verify_hot_cold_block_grouping();
2400 return err;
2403 /* Walk the instruction chain and verify that bb head/end pointers
2404 are correct, and that instructions are in exactly one bb and have
2405 correct block pointers. */
2407 static int
2408 rtl_verify_bb_insn_chain (void)
2410 basic_block bb;
2411 int err = 0;
2412 rtx x;
2413 rtx last_head = get_last_insn ();
2414 basic_block *bb_info;
2415 const int max_uid = get_max_uid ();
2417 bb_info = XCNEWVEC (basic_block, max_uid);
2419 FOR_EACH_BB_REVERSE (bb)
2421 rtx head = BB_HEAD (bb);
2422 rtx end = BB_END (bb);
2424 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2426 /* Verify the end of the basic block is in the INSN chain. */
2427 if (x == end)
2428 break;
2430 /* And that the code outside of basic blocks has NULL bb field. */
2431 if (!BARRIER_P (x)
2432 && BLOCK_FOR_INSN (x) != NULL)
2434 error ("insn %d outside of basic blocks has non-NULL bb field",
2435 INSN_UID (x));
2436 err = 1;
2440 if (!x)
2442 error ("end insn %d for block %d not found in the insn stream",
2443 INSN_UID (end), bb->index);
2444 err = 1;
2447 /* Work backwards from the end to the head of the basic block
2448 to verify the head is in the RTL chain. */
2449 for (; x != NULL_RTX; x = PREV_INSN (x))
2451 /* While walking over the insn chain, verify insns appear
2452 in only one basic block. */
2453 if (bb_info[INSN_UID (x)] != NULL)
2455 error ("insn %d is in multiple basic blocks (%d and %d)",
2456 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2457 err = 1;
2460 bb_info[INSN_UID (x)] = bb;
2462 if (x == head)
2463 break;
2465 if (!x)
2467 error ("head insn %d for block %d not found in the insn stream",
2468 INSN_UID (head), bb->index);
2469 err = 1;
2472 last_head = PREV_INSN (x);
2475 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2477 /* Check that the code before the first basic block has NULL
2478 bb field. */
2479 if (!BARRIER_P (x)
2480 && BLOCK_FOR_INSN (x) != NULL)
2482 error ("insn %d outside of basic blocks has non-NULL bb field",
2483 INSN_UID (x));
2484 err = 1;
2487 free (bb_info);
2489 return err;
2492 /* Verify that fallthru edges point to adjacent blocks in layout order and
2493 that barriers exist after non-fallthru blocks. */
2495 static int
2496 rtl_verify_fallthru (void)
2498 basic_block bb;
2499 int err = 0;
2501 FOR_EACH_BB_REVERSE (bb)
2503 edge e;
2505 e = find_fallthru_edge (bb->succs);
2506 if (!e)
2508 rtx insn;
2510 /* Ensure existence of barrier in BB with no fallthru edges. */
2511 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2513 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2515 error ("missing barrier after block %i", bb->index);
2516 err = 1;
2517 break;
2519 if (BARRIER_P (insn))
2520 break;
2523 else if (e->src != ENTRY_BLOCK_PTR
2524 && e->dest != EXIT_BLOCK_PTR)
2526 rtx insn;
2528 if (e->src->next_bb != e->dest)
2530 error
2531 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2532 e->src->index, e->dest->index);
2533 err = 1;
2535 else
2536 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2537 insn = NEXT_INSN (insn))
2538 if (BARRIER_P (insn) || INSN_P (insn))
2540 error ("verify_flow_info: Incorrect fallthru %i->%i",
2541 e->src->index, e->dest->index);
2542 fatal_insn ("wrong insn in the fallthru edge", insn);
2543 err = 1;
2548 return err;
2551 /* Verify that blocks are laid out in consecutive order. While walking the
2552 instructions, verify that all expected instructions are inside the basic
2553 blocks, and that all returns are followed by barriers. */
2555 static int
2556 rtl_verify_bb_layout (void)
2558 basic_block bb;
2559 int err = 0;
2560 rtx x;
2561 int num_bb_notes;
2562 const rtx rtx_first = get_insns ();
2563 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2565 num_bb_notes = 0;
2566 last_bb_seen = ENTRY_BLOCK_PTR;
2568 for (x = rtx_first; x; x = NEXT_INSN (x))
2570 if (NOTE_INSN_BASIC_BLOCK_P (x))
2572 bb = NOTE_BASIC_BLOCK (x);
2574 num_bb_notes++;
2575 if (bb != last_bb_seen->next_bb)
2576 internal_error ("basic blocks not laid down consecutively");
2578 curr_bb = last_bb_seen = bb;
2581 if (!curr_bb)
2583 switch (GET_CODE (x))
2585 case BARRIER:
2586 case NOTE:
2587 break;
2589 case CODE_LABEL:
2590 /* An ADDR_VEC is placed outside any basic block. */
2591 if (NEXT_INSN (x)
2592 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2593 x = NEXT_INSN (x);
2595 /* But in any case, non-deletable labels can appear anywhere. */
2596 break;
2598 default:
2599 fatal_insn ("insn outside basic block", x);
2603 if (JUMP_P (x)
2604 && returnjump_p (x) && ! condjump_p (x)
2605 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2606 fatal_insn ("return not followed by barrier", x);
2608 if (curr_bb && x == BB_END (curr_bb))
2609 curr_bb = NULL;
2612 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2613 internal_error
2614 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2615 num_bb_notes, n_basic_blocks);
2617 return err;
2620 /* Verify the CFG and RTL consistency common for both underlying RTL and
2621 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
2623 Currently it does following checks:
2624 - all checks of rtl_verify_flow_info_1
2625 - test head/end pointers
2626 - check that blocks are laid out in consecutive order
2627 - check that all insns are in the basic blocks
2628 (except the switch handling code, barriers and notes)
2629 - check that all returns are followed by barriers
2630 - check that all fallthru edge points to the adjacent blocks. */
2632 static int
2633 rtl_verify_flow_info (void)
2635 int err = 0;
2637 err |= rtl_verify_flow_info_1 ();
2639 err |= rtl_verify_bb_insn_chain ();
2641 err |= rtl_verify_fallthru ();
2643 err |= rtl_verify_bb_layout ();
2645 return err;
2648 /* Assume that the preceding pass has possibly eliminated jump instructions
2649 or converted the unconditional jumps. Eliminate the edges from CFG.
2650 Return true if any edges are eliminated. */
2652 bool
2653 purge_dead_edges (basic_block bb)
2655 edge e;
2656 rtx insn = BB_END (bb), note;
2657 bool purged = false;
2658 bool found;
2659 edge_iterator ei;
2661 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2663 insn = PREV_INSN (insn);
2664 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2666 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2667 if (NONJUMP_INSN_P (insn)
2668 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2670 rtx eqnote;
2672 if (! may_trap_p (PATTERN (insn))
2673 || ((eqnote = find_reg_equal_equiv_note (insn))
2674 && ! may_trap_p (XEXP (eqnote, 0))))
2675 remove_note (insn, note);
2678 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2679 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2681 bool remove = false;
2683 /* There are three types of edges we need to handle correctly here: EH
2684 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2685 latter can appear when nonlocal gotos are used. */
2686 if (e->flags & EDGE_ABNORMAL_CALL)
2688 if (!CALL_P (insn))
2689 remove = true;
2690 else if (can_nonlocal_goto (insn))
2692 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2694 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
2696 else
2697 remove = true;
2699 else if (e->flags & EDGE_EH)
2700 remove = !can_throw_internal (insn);
2702 if (remove)
2704 remove_edge (e);
2705 df_set_bb_dirty (bb);
2706 purged = true;
2708 else
2709 ei_next (&ei);
2712 if (JUMP_P (insn))
2714 rtx note;
2715 edge b,f;
2716 edge_iterator ei;
2718 /* We do care only about conditional jumps and simplejumps. */
2719 if (!any_condjump_p (insn)
2720 && !returnjump_p (insn)
2721 && !simplejump_p (insn))
2722 return purged;
2724 /* Branch probability/prediction notes are defined only for
2725 condjumps. We've possibly turned condjump into simplejump. */
2726 if (simplejump_p (insn))
2728 note = find_reg_note (insn, REG_BR_PROB, NULL);
2729 if (note)
2730 remove_note (insn, note);
2731 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2732 remove_note (insn, note);
2735 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2737 /* Avoid abnormal flags to leak from computed jumps turned
2738 into simplejumps. */
2740 e->flags &= ~EDGE_ABNORMAL;
2742 /* See if this edge is one we should keep. */
2743 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2744 /* A conditional jump can fall through into the next
2745 block, so we should keep the edge. */
2747 ei_next (&ei);
2748 continue;
2750 else if (e->dest != EXIT_BLOCK_PTR
2751 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2752 /* If the destination block is the target of the jump,
2753 keep the edge. */
2755 ei_next (&ei);
2756 continue;
2758 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2759 /* If the destination block is the exit block, and this
2760 instruction is a return, then keep the edge. */
2762 ei_next (&ei);
2763 continue;
2765 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2766 /* Keep the edges that correspond to exceptions thrown by
2767 this instruction and rematerialize the EDGE_ABNORMAL
2768 flag we just cleared above. */
2770 e->flags |= EDGE_ABNORMAL;
2771 ei_next (&ei);
2772 continue;
2775 /* We do not need this edge. */
2776 df_set_bb_dirty (bb);
2777 purged = true;
2778 remove_edge (e);
2781 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2782 return purged;
2784 if (dump_file)
2785 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2787 if (!optimize)
2788 return purged;
2790 /* Redistribute probabilities. */
2791 if (single_succ_p (bb))
2793 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2794 single_succ_edge (bb)->count = bb->count;
2796 else
2798 note = find_reg_note (insn, REG_BR_PROB, NULL);
2799 if (!note)
2800 return purged;
2802 b = BRANCH_EDGE (bb);
2803 f = FALLTHRU_EDGE (bb);
2804 b->probability = INTVAL (XEXP (note, 0));
2805 f->probability = REG_BR_PROB_BASE - b->probability;
2806 /* Update these to use GCOV_COMPUTE_SCALE. */
2807 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2808 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2811 return purged;
2813 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2815 /* First, there should not be any EH or ABCALL edges resulting
2816 from non-local gotos and the like. If there were, we shouldn't
2817 have created the sibcall in the first place. Second, there
2818 should of course never have been a fallthru edge. */
2819 gcc_assert (single_succ_p (bb));
2820 gcc_assert (single_succ_edge (bb)->flags
2821 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2823 return 0;
2826 /* If we don't see a jump insn, we don't know exactly why the block would
2827 have been broken at this point. Look for a simple, non-fallthru edge,
2828 as these are only created by conditional branches. If we find such an
2829 edge we know that there used to be a jump here and can then safely
2830 remove all non-fallthru edges. */
2831 found = false;
2832 FOR_EACH_EDGE (e, ei, bb->succs)
2833 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2835 found = true;
2836 break;
2839 if (!found)
2840 return purged;
2842 /* Remove all but the fake and fallthru edges. The fake edge may be
2843 the only successor for this block in the case of noreturn
2844 calls. */
2845 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2847 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2849 df_set_bb_dirty (bb);
2850 remove_edge (e);
2851 purged = true;
2853 else
2854 ei_next (&ei);
2857 gcc_assert (single_succ_p (bb));
2859 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2860 single_succ_edge (bb)->count = bb->count;
2862 if (dump_file)
2863 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2864 bb->index);
2865 return purged;
2868 /* Search all basic blocks for potentially dead edges and purge them. Return
2869 true if some edge has been eliminated. */
2871 bool
2872 purge_all_dead_edges (void)
2874 int purged = false;
2875 basic_block bb;
2877 FOR_EACH_BB (bb)
2879 bool purged_here = purge_dead_edges (bb);
2881 purged |= purged_here;
2884 return purged;
2887 /* This is used by a few passes that emit some instructions after abnormal
2888 calls, moving the basic block's end, while they in fact do want to emit
2889 them on the fallthru edge. Look for abnormal call edges, find backward
2890 the call in the block and insert the instructions on the edge instead.
2892 Similarly, handle instructions throwing exceptions internally.
2894 Return true when instructions have been found and inserted on edges. */
2896 bool
2897 fixup_abnormal_edges (void)
2899 bool inserted = false;
2900 basic_block bb;
2902 FOR_EACH_BB (bb)
2904 edge e;
2905 edge_iterator ei;
2907 /* Look for cases we are interested in - calls or instructions causing
2908 exceptions. */
2909 FOR_EACH_EDGE (e, ei, bb->succs)
2910 if ((e->flags & EDGE_ABNORMAL_CALL)
2911 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
2912 == (EDGE_ABNORMAL | EDGE_EH)))
2913 break;
2915 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
2917 rtx insn;
2919 /* Get past the new insns generated. Allow notes, as the insns
2920 may be already deleted. */
2921 insn = BB_END (bb);
2922 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
2923 && !can_throw_internal (insn)
2924 && insn != BB_HEAD (bb))
2925 insn = PREV_INSN (insn);
2927 if (CALL_P (insn) || can_throw_internal (insn))
2929 rtx stop, next;
2931 e = find_fallthru_edge (bb->succs);
2933 stop = NEXT_INSN (BB_END (bb));
2934 BB_END (bb) = insn;
2936 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
2938 next = NEXT_INSN (insn);
2939 if (INSN_P (insn))
2941 delete_insn (insn);
2943 /* Sometimes there's still the return value USE.
2944 If it's placed after a trapping call (i.e. that
2945 call is the last insn anyway), we have no fallthru
2946 edge. Simply delete this use and don't try to insert
2947 on the non-existent edge. */
2948 if (GET_CODE (PATTERN (insn)) != USE)
2950 /* We're not deleting it, we're moving it. */
2951 INSN_DELETED_P (insn) = 0;
2952 PREV_INSN (insn) = NULL_RTX;
2953 NEXT_INSN (insn) = NULL_RTX;
2955 insert_insn_on_edge (insn, e);
2956 inserted = true;
2959 else if (!BARRIER_P (insn))
2960 set_block_for_insn (insn, NULL);
2964 /* It may be that we don't find any trapping insn. In this
2965 case we discovered quite late that the insn that had been
2966 marked as can_throw_internal in fact couldn't trap at all.
2967 So we should in fact delete the EH edges out of the block. */
2968 else
2969 purge_dead_edges (bb);
2973 return inserted;
2976 /* Cut the insns from FIRST to LAST out of the insns stream. */
2979 unlink_insn_chain (rtx first, rtx last)
2981 rtx prevfirst = PREV_INSN (first);
2982 rtx nextlast = NEXT_INSN (last);
2984 PREV_INSN (first) = NULL;
2985 NEXT_INSN (last) = NULL;
2986 if (prevfirst)
2987 NEXT_INSN (prevfirst) = nextlast;
2988 if (nextlast)
2989 PREV_INSN (nextlast) = prevfirst;
2990 else
2991 set_last_insn (prevfirst);
2992 if (!prevfirst)
2993 set_first_insn (nextlast);
2994 return first;
2997 /* Skip over inter-block insns occurring after BB which are typically
2998 associated with BB (e.g., barriers). If there are any such insns,
2999 we return the last one. Otherwise, we return the end of BB. */
3001 static rtx
3002 skip_insns_after_block (basic_block bb)
3004 rtx insn, last_insn, next_head, prev;
3006 next_head = NULL_RTX;
3007 if (bb->next_bb != EXIT_BLOCK_PTR)
3008 next_head = BB_HEAD (bb->next_bb);
3010 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3012 if (insn == next_head)
3013 break;
3015 switch (GET_CODE (insn))
3017 case BARRIER:
3018 last_insn = insn;
3019 continue;
3021 case NOTE:
3022 switch (NOTE_KIND (insn))
3024 case NOTE_INSN_BLOCK_END:
3025 gcc_unreachable ();
3026 continue;
3027 default:
3028 continue;
3029 break;
3031 break;
3033 case CODE_LABEL:
3034 if (NEXT_INSN (insn)
3035 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3037 insn = NEXT_INSN (insn);
3038 last_insn = insn;
3039 continue;
3041 break;
3043 default:
3044 break;
3047 break;
3050 /* It is possible to hit contradictory sequence. For instance:
3052 jump_insn
3053 NOTE_INSN_BLOCK_BEG
3054 barrier
3056 Where barrier belongs to jump_insn, but the note does not. This can be
3057 created by removing the basic block originally following
3058 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3060 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3062 prev = PREV_INSN (insn);
3063 if (NOTE_P (insn))
3064 switch (NOTE_KIND (insn))
3066 case NOTE_INSN_BLOCK_END:
3067 gcc_unreachable ();
3068 break;
3069 case NOTE_INSN_DELETED:
3070 case NOTE_INSN_DELETED_LABEL:
3071 case NOTE_INSN_DELETED_DEBUG_LABEL:
3072 continue;
3073 default:
3074 reorder_insns (insn, insn, last_insn);
3078 return last_insn;
3081 /* Locate or create a label for a given basic block. */
3083 static rtx
3084 label_for_bb (basic_block bb)
3086 rtx label = BB_HEAD (bb);
3088 if (!LABEL_P (label))
3090 if (dump_file)
3091 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3093 label = block_label (bb);
3096 return label;
3099 /* Locate the effective beginning and end of the insn chain for each
3100 block, as defined by skip_insns_after_block above. */
3102 static void
3103 record_effective_endpoints (void)
3105 rtx next_insn;
3106 basic_block bb;
3107 rtx insn;
3109 for (insn = get_insns ();
3110 insn
3111 && NOTE_P (insn)
3112 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3113 insn = NEXT_INSN (insn))
3114 continue;
3115 /* No basic blocks at all? */
3116 gcc_assert (insn);
3118 if (PREV_INSN (insn))
3119 cfg_layout_function_header =
3120 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3121 else
3122 cfg_layout_function_header = NULL_RTX;
3124 next_insn = get_insns ();
3125 FOR_EACH_BB (bb)
3127 rtx end;
3129 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3130 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3131 PREV_INSN (BB_HEAD (bb)));
3132 end = skip_insns_after_block (bb);
3133 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3134 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3135 next_insn = NEXT_INSN (BB_END (bb));
3138 cfg_layout_function_footer = next_insn;
3139 if (cfg_layout_function_footer)
3140 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3143 static unsigned int
3144 into_cfg_layout_mode (void)
3146 cfg_layout_initialize (0);
3147 return 0;
3150 static unsigned int
3151 outof_cfg_layout_mode (void)
3153 basic_block bb;
3155 FOR_EACH_BB (bb)
3156 if (bb->next_bb != EXIT_BLOCK_PTR)
3157 bb->aux = bb->next_bb;
3159 cfg_layout_finalize ();
3161 return 0;
3164 struct rtl_opt_pass pass_into_cfg_layout_mode =
3167 RTL_PASS,
3168 "into_cfglayout", /* name */
3169 OPTGROUP_NONE, /* optinfo_flags */
3170 NULL, /* gate */
3171 into_cfg_layout_mode, /* execute */
3172 NULL, /* sub */
3173 NULL, /* next */
3174 0, /* static_pass_number */
3175 TV_CFG, /* tv_id */
3176 0, /* properties_required */
3177 PROP_cfglayout, /* properties_provided */
3178 0, /* properties_destroyed */
3179 0, /* todo_flags_start */
3180 0 /* todo_flags_finish */
3184 struct rtl_opt_pass pass_outof_cfg_layout_mode =
3187 RTL_PASS,
3188 "outof_cfglayout", /* name */
3189 OPTGROUP_NONE, /* optinfo_flags */
3190 NULL, /* gate */
3191 outof_cfg_layout_mode, /* execute */
3192 NULL, /* sub */
3193 NULL, /* next */
3194 0, /* static_pass_number */
3195 TV_CFG, /* tv_id */
3196 0, /* properties_required */
3197 0, /* properties_provided */
3198 PROP_cfglayout, /* properties_destroyed */
3199 0, /* todo_flags_start */
3200 0 /* todo_flags_finish */
3205 /* Link the basic blocks in the correct order, compacting the basic
3206 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3207 function also clears the basic block header and footer fields.
3209 This function is usually called after a pass (e.g. tracer) finishes
3210 some transformations while in cfglayout mode. The required sequence
3211 of the basic blocks is in a linked list along the bb->aux field.
3212 This functions re-links the basic block prev_bb and next_bb pointers
3213 accordingly, and it compacts and renumbers the blocks.
3215 FIXME: This currently works only for RTL, but the only RTL-specific
3216 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3217 to GIMPLE a long time ago, but it doesn't relink the basic block
3218 chain. It could do that (to give better initial RTL) if this function
3219 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
3221 void
3222 relink_block_chain (bool stay_in_cfglayout_mode)
3224 basic_block bb, prev_bb;
3225 int index;
3227 /* Maybe dump the re-ordered sequence. */
3228 if (dump_file)
3230 fprintf (dump_file, "Reordered sequence:\n");
3231 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
3233 bb = (basic_block) bb->aux, index++)
3235 fprintf (dump_file, " %i ", index);
3236 if (get_bb_original (bb))
3237 fprintf (dump_file, "duplicate of %i ",
3238 get_bb_original (bb)->index);
3239 else if (forwarder_block_p (bb)
3240 && !LABEL_P (BB_HEAD (bb)))
3241 fprintf (dump_file, "compensation ");
3242 else
3243 fprintf (dump_file, "bb %i ", bb->index);
3244 fprintf (dump_file, " [%i]\n", bb->frequency);
3248 /* Now reorder the blocks. */
3249 prev_bb = ENTRY_BLOCK_PTR;
3250 bb = ENTRY_BLOCK_PTR->next_bb;
3251 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3253 bb->prev_bb = prev_bb;
3254 prev_bb->next_bb = bb;
3256 prev_bb->next_bb = EXIT_BLOCK_PTR;
3257 EXIT_BLOCK_PTR->prev_bb = prev_bb;
3259 /* Then, clean up the aux fields. */
3260 FOR_ALL_BB (bb)
3262 bb->aux = NULL;
3263 if (!stay_in_cfglayout_mode)
3264 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3267 /* Maybe reset the original copy tables, they are not valid anymore
3268 when we renumber the basic blocks in compact_blocks. If we are
3269 are going out of cfglayout mode, don't re-allocate the tables. */
3270 free_original_copy_tables ();
3271 if (stay_in_cfglayout_mode)
3272 initialize_original_copy_tables ();
3274 /* Finally, put basic_block_info in the new order. */
3275 compact_blocks ();
3279 /* Given a reorder chain, rearrange the code to match. */
3281 static void
3282 fixup_reorder_chain (void)
3284 basic_block bb;
3285 rtx insn = NULL;
3287 if (cfg_layout_function_header)
3289 set_first_insn (cfg_layout_function_header);
3290 insn = cfg_layout_function_header;
3291 while (NEXT_INSN (insn))
3292 insn = NEXT_INSN (insn);
3295 /* First do the bulk reordering -- rechain the blocks without regard to
3296 the needed changes to jumps and labels. */
3298 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
3300 if (BB_HEADER (bb))
3302 if (insn)
3303 NEXT_INSN (insn) = BB_HEADER (bb);
3304 else
3305 set_first_insn (BB_HEADER (bb));
3306 PREV_INSN (BB_HEADER (bb)) = insn;
3307 insn = BB_HEADER (bb);
3308 while (NEXT_INSN (insn))
3309 insn = NEXT_INSN (insn);
3311 if (insn)
3312 NEXT_INSN (insn) = BB_HEAD (bb);
3313 else
3314 set_first_insn (BB_HEAD (bb));
3315 PREV_INSN (BB_HEAD (bb)) = insn;
3316 insn = BB_END (bb);
3317 if (BB_FOOTER (bb))
3319 NEXT_INSN (insn) = BB_FOOTER (bb);
3320 PREV_INSN (BB_FOOTER (bb)) = insn;
3321 while (NEXT_INSN (insn))
3322 insn = NEXT_INSN (insn);
3326 NEXT_INSN (insn) = cfg_layout_function_footer;
3327 if (cfg_layout_function_footer)
3328 PREV_INSN (cfg_layout_function_footer) = insn;
3330 while (NEXT_INSN (insn))
3331 insn = NEXT_INSN (insn);
3333 set_last_insn (insn);
3334 #ifdef ENABLE_CHECKING
3335 verify_insn_chain ();
3336 #endif
3338 /* Now add jumps and labels as needed to match the blocks new
3339 outgoing edges. */
3341 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
3343 edge e_fall, e_taken, e;
3344 rtx bb_end_insn;
3345 rtx ret_label = NULL_RTX;
3346 basic_block nb, src_bb;
3347 edge_iterator ei;
3349 if (EDGE_COUNT (bb->succs) == 0)
3350 continue;
3352 /* Find the old fallthru edge, and another non-EH edge for
3353 a taken jump. */
3354 e_taken = e_fall = NULL;
3356 FOR_EACH_EDGE (e, ei, bb->succs)
3357 if (e->flags & EDGE_FALLTHRU)
3358 e_fall = e;
3359 else if (! (e->flags & EDGE_EH))
3360 e_taken = e;
3362 bb_end_insn = BB_END (bb);
3363 if (JUMP_P (bb_end_insn))
3365 ret_label = JUMP_LABEL (bb_end_insn);
3366 if (any_condjump_p (bb_end_insn))
3368 /* This might happen if the conditional jump has side
3369 effects and could therefore not be optimized away.
3370 Make the basic block to end with a barrier in order
3371 to prevent rtl_verify_flow_info from complaining. */
3372 if (!e_fall)
3374 gcc_assert (!onlyjump_p (bb_end_insn)
3375 || returnjump_p (bb_end_insn));
3376 emit_barrier_after (bb_end_insn);
3377 continue;
3380 /* If the old fallthru is still next, nothing to do. */
3381 if (bb->aux == e_fall->dest
3382 || e_fall->dest == EXIT_BLOCK_PTR)
3383 continue;
3385 /* The degenerated case of conditional jump jumping to the next
3386 instruction can happen for jumps with side effects. We need
3387 to construct a forwarder block and this will be done just
3388 fine by force_nonfallthru below. */
3389 if (!e_taken)
3392 /* There is another special case: if *neither* block is next,
3393 such as happens at the very end of a function, then we'll
3394 need to add a new unconditional jump. Choose the taken
3395 edge based on known or assumed probability. */
3396 else if (bb->aux != e_taken->dest)
3398 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3400 if (note
3401 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
3402 && invert_jump (bb_end_insn,
3403 (e_fall->dest == EXIT_BLOCK_PTR
3404 ? NULL_RTX
3405 : label_for_bb (e_fall->dest)), 0))
3407 e_fall->flags &= ~EDGE_FALLTHRU;
3408 gcc_checking_assert (could_fall_through
3409 (e_taken->src, e_taken->dest));
3410 e_taken->flags |= EDGE_FALLTHRU;
3411 update_br_prob_note (bb);
3412 e = e_fall, e_fall = e_taken, e_taken = e;
3416 /* If the "jumping" edge is a crossing edge, and the fall
3417 through edge is non-crossing, leave things as they are. */
3418 else if ((e_taken->flags & EDGE_CROSSING)
3419 && !(e_fall->flags & EDGE_CROSSING))
3420 continue;
3422 /* Otherwise we can try to invert the jump. This will
3423 basically never fail, however, keep up the pretense. */
3424 else if (invert_jump (bb_end_insn,
3425 (e_fall->dest == EXIT_BLOCK_PTR
3426 ? NULL_RTX
3427 : label_for_bb (e_fall->dest)), 0))
3429 e_fall->flags &= ~EDGE_FALLTHRU;
3430 gcc_checking_assert (could_fall_through
3431 (e_taken->src, e_taken->dest));
3432 e_taken->flags |= EDGE_FALLTHRU;
3433 update_br_prob_note (bb);
3434 if (LABEL_NUSES (ret_label) == 0
3435 && single_pred_p (e_taken->dest))
3436 delete_insn (ret_label);
3437 continue;
3440 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3442 /* If the old fallthru is still next or if
3443 asm goto doesn't have a fallthru (e.g. when followed by
3444 __builtin_unreachable ()), nothing to do. */
3445 if (! e_fall
3446 || bb->aux == e_fall->dest
3447 || e_fall->dest == EXIT_BLOCK_PTR)
3448 continue;
3450 /* Otherwise we'll have to use the fallthru fixup below. */
3452 else
3454 /* Otherwise we have some return, switch or computed
3455 jump. In the 99% case, there should not have been a
3456 fallthru edge. */
3457 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3458 continue;
3461 else
3463 /* No fallthru implies a noreturn function with EH edges, or
3464 something similarly bizarre. In any case, we don't need to
3465 do anything. */
3466 if (! e_fall)
3467 continue;
3469 /* If the fallthru block is still next, nothing to do. */
3470 if (bb->aux == e_fall->dest)
3471 continue;
3473 /* A fallthru to exit block. */
3474 if (e_fall->dest == EXIT_BLOCK_PTR)
3475 continue;
3478 /* We got here if we need to add a new jump insn.
3479 Note force_nonfallthru can delete E_FALL and thus we have to
3480 save E_FALL->src prior to the call to force_nonfallthru. */
3481 src_bb = e_fall->src;
3482 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3483 if (nb)
3485 nb->aux = bb->aux;
3486 bb->aux = nb;
3487 /* Don't process this new block. */
3488 bb = nb;
3490 /* Make sure new bb is tagged for correct section (same as
3491 fall-thru source, since you cannot fall-thru across
3492 section boundaries). */
3493 BB_COPY_PARTITION (src_bb, single_pred (bb));
3494 if (flag_reorder_blocks_and_partition
3495 && targetm_common.have_named_sections
3496 && JUMP_P (BB_END (bb))
3497 && !any_condjump_p (BB_END (bb))
3498 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
3499 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
3503 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3505 /* Annoying special case - jump around dead jumptables left in the code. */
3506 FOR_EACH_BB (bb)
3508 edge e = find_fallthru_edge (bb->succs);
3510 if (e && !can_fallthru (e->src, e->dest))
3511 force_nonfallthru (e);
3514 /* Ensure goto_locus from edges has some instructions with that locus
3515 in RTL. */
3516 if (!optimize)
3517 FOR_EACH_BB (bb)
3519 edge e;
3520 edge_iterator ei;
3522 FOR_EACH_EDGE (e, ei, bb->succs)
3523 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3524 && !(e->flags & EDGE_ABNORMAL))
3526 edge e2;
3527 edge_iterator ei2;
3528 basic_block dest, nb;
3529 rtx end;
3531 insn = BB_END (e->src);
3532 end = PREV_INSN (BB_HEAD (e->src));
3533 while (insn != end
3534 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
3535 insn = PREV_INSN (insn);
3536 if (insn != end
3537 && INSN_LOCATION (insn) == e->goto_locus)
3538 continue;
3539 if (simplejump_p (BB_END (e->src))
3540 && !INSN_HAS_LOCATION (BB_END (e->src)))
3542 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
3543 continue;
3545 dest = e->dest;
3546 if (dest == EXIT_BLOCK_PTR)
3548 /* Non-fallthru edges to the exit block cannot be split. */
3549 if (!(e->flags & EDGE_FALLTHRU))
3550 continue;
3552 else
3554 insn = BB_HEAD (dest);
3555 end = NEXT_INSN (BB_END (dest));
3556 while (insn != end && !NONDEBUG_INSN_P (insn))
3557 insn = NEXT_INSN (insn);
3558 if (insn != end && INSN_HAS_LOCATION (insn)
3559 && INSN_LOCATION (insn) == e->goto_locus)
3560 continue;
3562 nb = split_edge (e);
3563 if (!INSN_P (BB_END (nb)))
3564 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3565 nb);
3566 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
3568 /* If there are other incoming edges to the destination block
3569 with the same goto locus, redirect them to the new block as
3570 well, this can prevent other such blocks from being created
3571 in subsequent iterations of the loop. */
3572 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
3573 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
3574 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
3575 && e->goto_locus == e2->goto_locus)
3576 redirect_edge_and_branch (e2, nb);
3577 else
3578 ei_next (&ei2);
3583 /* Perform sanity checks on the insn chain.
3584 1. Check that next/prev pointers are consistent in both the forward and
3585 reverse direction.
3586 2. Count insns in chain, going both directions, and check if equal.
3587 3. Check that get_last_insn () returns the actual end of chain. */
3589 DEBUG_FUNCTION void
3590 verify_insn_chain (void)
3592 rtx x, prevx, nextx;
3593 int insn_cnt1, insn_cnt2;
3595 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3596 x != 0;
3597 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3598 gcc_assert (PREV_INSN (x) == prevx);
3600 gcc_assert (prevx == get_last_insn ());
3602 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3603 x != 0;
3604 nextx = x, insn_cnt2++, x = PREV_INSN (x))
3605 gcc_assert (NEXT_INSN (x) == nextx);
3607 gcc_assert (insn_cnt1 == insn_cnt2);
3610 /* If we have assembler epilogues, the block falling through to exit must
3611 be the last one in the reordered chain when we reach final. Ensure
3612 that this condition is met. */
3613 static void
3614 fixup_fallthru_exit_predecessor (void)
3616 edge e;
3617 basic_block bb = NULL;
3619 /* This transformation is not valid before reload, because we might
3620 separate a call from the instruction that copies the return
3621 value. */
3622 gcc_assert (reload_completed);
3624 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
3625 if (e)
3626 bb = e->src;
3628 if (bb && bb->aux)
3630 basic_block c = ENTRY_BLOCK_PTR->next_bb;
3632 /* If the very first block is the one with the fall-through exit
3633 edge, we have to split that block. */
3634 if (c == bb)
3636 bb = split_block (bb, NULL)->dest;
3637 bb->aux = c->aux;
3638 c->aux = bb;
3639 BB_FOOTER (bb) = BB_FOOTER (c);
3640 BB_FOOTER (c) = NULL;
3643 while (c->aux != bb)
3644 c = (basic_block) c->aux;
3646 c->aux = bb->aux;
3647 while (c->aux)
3648 c = (basic_block) c->aux;
3650 c->aux = bb;
3651 bb->aux = NULL;
3655 /* In case there are more than one fallthru predecessors of exit, force that
3656 there is only one. */
3658 static void
3659 force_one_exit_fallthru (void)
3661 edge e, predecessor = NULL;
3662 bool more = false;
3663 edge_iterator ei;
3664 basic_block forwarder, bb;
3666 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3667 if (e->flags & EDGE_FALLTHRU)
3669 if (predecessor == NULL)
3670 predecessor = e;
3671 else
3673 more = true;
3674 break;
3678 if (!more)
3679 return;
3681 /* Exit has several fallthru predecessors. Create a forwarder block for
3682 them. */
3683 forwarder = split_edge (predecessor);
3684 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
3686 if (e->src == forwarder
3687 || !(e->flags & EDGE_FALLTHRU))
3688 ei_next (&ei);
3689 else
3690 redirect_edge_and_branch_force (e, forwarder);
3693 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
3694 exit block. */
3695 FOR_EACH_BB (bb)
3697 if (bb->aux == NULL && bb != forwarder)
3699 bb->aux = forwarder;
3700 break;
3705 /* Return true in case it is possible to duplicate the basic block BB. */
3707 static bool
3708 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
3710 /* Do not attempt to duplicate tablejumps, as we need to unshare
3711 the dispatch table. This is difficult to do, as the instructions
3712 computing jump destination may be hoisted outside the basic block. */
3713 if (tablejump_p (BB_END (bb), NULL, NULL))
3714 return false;
3716 /* Do not duplicate blocks containing insns that can't be copied. */
3717 if (targetm.cannot_copy_insn_p)
3719 rtx insn = BB_HEAD (bb);
3720 while (1)
3722 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
3723 return false;
3724 if (insn == BB_END (bb))
3725 break;
3726 insn = NEXT_INSN (insn);
3730 return true;
3734 duplicate_insn_chain (rtx from, rtx to)
3736 rtx insn, next, last, copy;
3738 /* Avoid updating of boundaries of previous basic block. The
3739 note will get removed from insn stream in fixup. */
3740 last = emit_note (NOTE_INSN_DELETED);
3742 /* Create copy at the end of INSN chain. The chain will
3743 be reordered later. */
3744 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
3746 switch (GET_CODE (insn))
3748 case DEBUG_INSN:
3749 /* Don't duplicate label debug insns. */
3750 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
3751 break;
3752 /* FALLTHRU */
3753 case INSN:
3754 case CALL_INSN:
3755 case JUMP_INSN:
3756 copy = emit_copy_of_insn_after (insn, get_last_insn ());
3757 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
3758 && ANY_RETURN_P (JUMP_LABEL (insn)))
3759 JUMP_LABEL (copy) = JUMP_LABEL (insn);
3760 maybe_copy_prologue_epilogue_insn (insn, copy);
3761 break;
3763 case JUMP_TABLE_DATA:
3764 /* Avoid copying of dispatch tables. We never duplicate
3765 tablejumps, so this can hit only in case the table got
3766 moved far from original jump.
3767 Avoid copying following barrier as well if any
3768 (and debug insns in between). */
3769 for (next = NEXT_INSN (insn);
3770 next != NEXT_INSN (to);
3771 next = NEXT_INSN (next))
3772 if (!DEBUG_INSN_P (next))
3773 break;
3774 if (next != NEXT_INSN (to) && BARRIER_P (next))
3775 insn = next;
3776 break;
3778 case CODE_LABEL:
3779 break;
3781 case BARRIER:
3782 emit_barrier ();
3783 break;
3785 case NOTE:
3786 switch (NOTE_KIND (insn))
3788 /* In case prologue is empty and function contain label
3789 in first BB, we may want to copy the block. */
3790 case NOTE_INSN_PROLOGUE_END:
3792 case NOTE_INSN_DELETED:
3793 case NOTE_INSN_DELETED_LABEL:
3794 case NOTE_INSN_DELETED_DEBUG_LABEL:
3795 /* No problem to strip these. */
3796 case NOTE_INSN_FUNCTION_BEG:
3797 /* There is always just single entry to function. */
3798 case NOTE_INSN_BASIC_BLOCK:
3799 break;
3801 case NOTE_INSN_EPILOGUE_BEG:
3802 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
3803 emit_note_copy (insn);
3804 break;
3806 default:
3807 /* All other notes should have already been eliminated. */
3808 gcc_unreachable ();
3810 break;
3811 default:
3812 gcc_unreachable ();
3815 insn = NEXT_INSN (last);
3816 delete_insn (last);
3817 return insn;
3820 /* Create a duplicate of the basic block BB. */
3822 static basic_block
3823 cfg_layout_duplicate_bb (basic_block bb)
3825 rtx insn;
3826 basic_block new_bb;
3828 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
3829 new_bb = create_basic_block (insn,
3830 insn ? get_last_insn () : NULL,
3831 EXIT_BLOCK_PTR->prev_bb);
3833 BB_COPY_PARTITION (new_bb, bb);
3834 if (BB_HEADER (bb))
3836 insn = BB_HEADER (bb);
3837 while (NEXT_INSN (insn))
3838 insn = NEXT_INSN (insn);
3839 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
3840 if (insn)
3841 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3844 if (BB_FOOTER (bb))
3846 insn = BB_FOOTER (bb);
3847 while (NEXT_INSN (insn))
3848 insn = NEXT_INSN (insn);
3849 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
3850 if (insn)
3851 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3854 return new_bb;
3858 /* Main entry point to this module - initialize the datastructures for
3859 CFG layout changes. It keeps LOOPS up-to-date if not null.
3861 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
3863 void
3864 cfg_layout_initialize (unsigned int flags)
3866 rtx x;
3867 basic_block bb;
3869 initialize_original_copy_tables ();
3871 cfg_layout_rtl_register_cfg_hooks ();
3873 record_effective_endpoints ();
3875 /* Make sure that the targets of non local gotos are marked. */
3876 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
3878 bb = BLOCK_FOR_INSN (XEXP (x, 0));
3879 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
3882 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
3885 /* Splits superblocks. */
3886 void
3887 break_superblocks (void)
3889 sbitmap superblocks;
3890 bool need = false;
3891 basic_block bb;
3893 superblocks = sbitmap_alloc (last_basic_block);
3894 bitmap_clear (superblocks);
3896 FOR_EACH_BB (bb)
3897 if (bb->flags & BB_SUPERBLOCK)
3899 bb->flags &= ~BB_SUPERBLOCK;
3900 bitmap_set_bit (superblocks, bb->index);
3901 need = true;
3904 if (need)
3906 rebuild_jump_labels (get_insns ());
3907 find_many_sub_basic_blocks (superblocks);
3910 free (superblocks);
3913 /* Finalize the changes: reorder insn list according to the sequence specified
3914 by aux pointers, enter compensation code, rebuild scope forest. */
3916 void
3917 cfg_layout_finalize (void)
3919 #ifdef ENABLE_CHECKING
3920 verify_flow_info ();
3921 #endif
3922 force_one_exit_fallthru ();
3923 rtl_register_cfg_hooks ();
3924 if (reload_completed
3925 #ifdef HAVE_epilogue
3926 && !HAVE_epilogue
3927 #endif
3929 fixup_fallthru_exit_predecessor ();
3930 fixup_reorder_chain ();
3932 rebuild_jump_labels (get_insns ());
3933 delete_dead_jumptables ();
3935 #ifdef ENABLE_CHECKING
3936 verify_insn_chain ();
3937 verify_flow_info ();
3938 #endif
3942 /* Same as split_block but update cfg_layout structures. */
3944 static basic_block
3945 cfg_layout_split_block (basic_block bb, void *insnp)
3947 rtx insn = (rtx) insnp;
3948 basic_block new_bb = rtl_split_block (bb, insn);
3950 BB_FOOTER (new_bb) = BB_FOOTER (bb);
3951 BB_FOOTER (bb) = NULL;
3953 return new_bb;
3956 /* Redirect Edge to DEST. */
3957 static edge
3958 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
3960 basic_block src = e->src;
3961 edge ret;
3963 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3964 return NULL;
3966 if (e->dest == dest)
3967 return e;
3969 if (e->src != ENTRY_BLOCK_PTR
3970 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
3972 df_set_bb_dirty (src);
3973 return ret;
3976 if (e->src == ENTRY_BLOCK_PTR
3977 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
3979 if (dump_file)
3980 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
3981 e->src->index, dest->index);
3983 df_set_bb_dirty (e->src);
3984 redirect_edge_succ (e, dest);
3985 return e;
3988 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
3989 in the case the basic block appears to be in sequence. Avoid this
3990 transformation. */
3992 if (e->flags & EDGE_FALLTHRU)
3994 /* Redirect any branch edges unified with the fallthru one. */
3995 if (JUMP_P (BB_END (src))
3996 && label_is_jump_target_p (BB_HEAD (e->dest),
3997 BB_END (src)))
3999 edge redirected;
4001 if (dump_file)
4002 fprintf (dump_file, "Fallthru edge unified with branch "
4003 "%i->%i redirected to %i\n",
4004 e->src->index, e->dest->index, dest->index);
4005 e->flags &= ~EDGE_FALLTHRU;
4006 redirected = redirect_branch_edge (e, dest);
4007 gcc_assert (redirected);
4008 redirected->flags |= EDGE_FALLTHRU;
4009 df_set_bb_dirty (redirected->src);
4010 return redirected;
4012 /* In case we are redirecting fallthru edge to the branch edge
4013 of conditional jump, remove it. */
4014 if (EDGE_COUNT (src->succs) == 2)
4016 /* Find the edge that is different from E. */
4017 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4019 if (s->dest == dest
4020 && any_condjump_p (BB_END (src))
4021 && onlyjump_p (BB_END (src)))
4022 delete_insn (BB_END (src));
4024 if (dump_file)
4025 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4026 e->src->index, e->dest->index, dest->index);
4027 ret = redirect_edge_succ_nodup (e, dest);
4029 else
4030 ret = redirect_branch_edge (e, dest);
4032 /* We don't want simplejumps in the insn stream during cfglayout. */
4033 gcc_assert (!simplejump_p (BB_END (src)));
4035 df_set_bb_dirty (src);
4036 return ret;
4039 /* Simple wrapper as we always can redirect fallthru edges. */
4040 static basic_block
4041 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4043 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4045 gcc_assert (redirected);
4046 return NULL;
4049 /* Same as delete_basic_block but update cfg_layout structures. */
4051 static void
4052 cfg_layout_delete_block (basic_block bb)
4054 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
4056 if (BB_HEADER (bb))
4058 next = BB_HEAD (bb);
4059 if (prev)
4060 NEXT_INSN (prev) = BB_HEADER (bb);
4061 else
4062 set_first_insn (BB_HEADER (bb));
4063 PREV_INSN (BB_HEADER (bb)) = prev;
4064 insn = BB_HEADER (bb);
4065 while (NEXT_INSN (insn))
4066 insn = NEXT_INSN (insn);
4067 NEXT_INSN (insn) = next;
4068 PREV_INSN (next) = insn;
4070 next = NEXT_INSN (BB_END (bb));
4071 if (BB_FOOTER (bb))
4073 insn = BB_FOOTER (bb);
4074 while (insn)
4076 if (BARRIER_P (insn))
4078 if (PREV_INSN (insn))
4079 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4080 else
4081 BB_FOOTER (bb) = NEXT_INSN (insn);
4082 if (NEXT_INSN (insn))
4083 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4085 if (LABEL_P (insn))
4086 break;
4087 insn = NEXT_INSN (insn);
4089 if (BB_FOOTER (bb))
4091 insn = BB_END (bb);
4092 NEXT_INSN (insn) = BB_FOOTER (bb);
4093 PREV_INSN (BB_FOOTER (bb)) = insn;
4094 while (NEXT_INSN (insn))
4095 insn = NEXT_INSN (insn);
4096 NEXT_INSN (insn) = next;
4097 if (next)
4098 PREV_INSN (next) = insn;
4099 else
4100 set_last_insn (insn);
4103 if (bb->next_bb != EXIT_BLOCK_PTR)
4104 to = &BB_HEADER (bb->next_bb);
4105 else
4106 to = &cfg_layout_function_footer;
4108 rtl_delete_block (bb);
4110 if (prev)
4111 prev = NEXT_INSN (prev);
4112 else
4113 prev = get_insns ();
4114 if (next)
4115 next = PREV_INSN (next);
4116 else
4117 next = get_last_insn ();
4119 if (next && NEXT_INSN (next) != prev)
4121 remaints = unlink_insn_chain (prev, next);
4122 insn = remaints;
4123 while (NEXT_INSN (insn))
4124 insn = NEXT_INSN (insn);
4125 NEXT_INSN (insn) = *to;
4126 if (*to)
4127 PREV_INSN (*to) = insn;
4128 *to = remaints;
4132 /* Return true when blocks A and B can be safely merged. */
4134 static bool
4135 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4137 /* If we are partitioning hot/cold basic blocks, we don't want to
4138 mess up unconditional or indirect jumps that cross between hot
4139 and cold sections.
4141 Basic block partitioning may result in some jumps that appear to
4142 be optimizable (or blocks that appear to be mergeable), but which really
4143 must be left untouched (they are required to make it safely across
4144 partition boundaries). See the comments at the top of
4145 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4147 if (BB_PARTITION (a) != BB_PARTITION (b))
4148 return false;
4150 /* Protect the loop latches. */
4151 if (current_loops && b->loop_father->latch == b)
4152 return false;
4154 /* If we would end up moving B's instructions, make sure it doesn't fall
4155 through into the exit block, since we cannot recover from a fallthrough
4156 edge into the exit block occurring in the middle of a function. */
4157 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4159 edge e = find_fallthru_edge (b->succs);
4160 if (e && e->dest == EXIT_BLOCK_PTR)
4161 return false;
4164 /* There must be exactly one edge in between the blocks. */
4165 return (single_succ_p (a)
4166 && single_succ (a) == b
4167 && single_pred_p (b) == 1
4168 && a != b
4169 /* Must be simple edge. */
4170 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4171 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
4172 /* If the jump insn has side effects, we can't kill the edge.
4173 When not optimizing, try_redirect_by_replacing_jump will
4174 not allow us to redirect an edge by replacing a table jump. */
4175 && (!JUMP_P (BB_END (a))
4176 || ((!optimize || reload_completed)
4177 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4180 /* Merge block A and B. The blocks must be mergeable. */
4182 static void
4183 cfg_layout_merge_blocks (basic_block a, basic_block b)
4185 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
4186 rtx insn;
4188 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4190 if (dump_file)
4191 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4192 a->index);
4194 /* If there was a CODE_LABEL beginning B, delete it. */
4195 if (LABEL_P (BB_HEAD (b)))
4197 delete_insn (BB_HEAD (b));
4200 /* We should have fallthru edge in a, or we can do dummy redirection to get
4201 it cleaned up. */
4202 if (JUMP_P (BB_END (a)))
4203 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4204 gcc_assert (!JUMP_P (BB_END (a)));
4206 /* When not optimizing CFG and the edge is the only place in RTL which holds
4207 some unique locus, emit a nop with that locus in between. */
4208 if (!optimize)
4209 emit_nop_for_unique_locus_between (a, b);
4211 /* Move things from b->footer after a->footer. */
4212 if (BB_FOOTER (b))
4214 if (!BB_FOOTER (a))
4215 BB_FOOTER (a) = BB_FOOTER (b);
4216 else
4218 rtx last = BB_FOOTER (a);
4220 while (NEXT_INSN (last))
4221 last = NEXT_INSN (last);
4222 NEXT_INSN (last) = BB_FOOTER (b);
4223 PREV_INSN (BB_FOOTER (b)) = last;
4225 BB_FOOTER (b) = NULL;
4228 /* Move things from b->header before a->footer.
4229 Note that this may include dead tablejump data, but we don't clean
4230 those up until we go out of cfglayout mode. */
4231 if (BB_HEADER (b))
4233 if (! BB_FOOTER (a))
4234 BB_FOOTER (a) = BB_HEADER (b);
4235 else
4237 rtx last = BB_HEADER (b);
4239 while (NEXT_INSN (last))
4240 last = NEXT_INSN (last);
4241 NEXT_INSN (last) = BB_FOOTER (a);
4242 PREV_INSN (BB_FOOTER (a)) = last;
4243 BB_FOOTER (a) = BB_HEADER (b);
4245 BB_HEADER (b) = NULL;
4248 /* In the case basic blocks are not adjacent, move them around. */
4249 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4251 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4253 emit_insn_after_noloc (insn, BB_END (a), a);
4255 /* Otherwise just re-associate the instructions. */
4256 else
4258 insn = BB_HEAD (b);
4259 BB_END (a) = BB_END (b);
4262 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4263 We need to explicitly call. */
4264 update_bb_for_insn_chain (insn, BB_END (b), a);
4266 /* Skip possible DELETED_LABEL insn. */
4267 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4268 insn = NEXT_INSN (insn);
4269 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4270 BB_HEAD (b) = BB_END (b) = NULL;
4271 delete_insn (insn);
4273 df_bb_delete (b->index);
4275 /* If B was a forwarder block, propagate the locus on the edge. */
4276 if (forwarder_p
4277 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
4278 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4280 if (dump_file)
4281 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4284 /* Split edge E. */
4286 static basic_block
4287 cfg_layout_split_edge (edge e)
4289 basic_block new_bb =
4290 create_basic_block (e->src != ENTRY_BLOCK_PTR
4291 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
4292 NULL_RTX, e->src);
4294 if (e->dest == EXIT_BLOCK_PTR)
4295 BB_COPY_PARTITION (new_bb, e->src);
4296 else
4297 BB_COPY_PARTITION (new_bb, e->dest);
4298 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4299 redirect_edge_and_branch_force (e, new_bb);
4301 return new_bb;
4304 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4306 static void
4307 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4311 /* Return true if BB contains only labels or non-executable
4312 instructions. */
4314 static bool
4315 rtl_block_empty_p (basic_block bb)
4317 rtx insn;
4319 if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
4320 return true;
4322 FOR_BB_INSNS (bb, insn)
4323 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4324 return false;
4326 return true;
4329 /* Split a basic block if it ends with a conditional branch and if
4330 the other part of the block is not empty. */
4332 static basic_block
4333 rtl_split_block_before_cond_jump (basic_block bb)
4335 rtx insn;
4336 rtx split_point = NULL;
4337 rtx last = NULL;
4338 bool found_code = false;
4340 FOR_BB_INSNS (bb, insn)
4342 if (any_condjump_p (insn))
4343 split_point = last;
4344 else if (NONDEBUG_INSN_P (insn))
4345 found_code = true;
4346 last = insn;
4349 /* Did not find everything. */
4350 if (found_code && split_point)
4351 return split_block (bb, split_point)->dest;
4352 else
4353 return NULL;
4356 /* Return 1 if BB ends with a call, possibly followed by some
4357 instructions that must stay with the call, 0 otherwise. */
4359 static bool
4360 rtl_block_ends_with_call_p (basic_block bb)
4362 rtx insn = BB_END (bb);
4364 while (!CALL_P (insn)
4365 && insn != BB_HEAD (bb)
4366 && (keep_with_call_p (insn)
4367 || NOTE_P (insn)
4368 || DEBUG_INSN_P (insn)))
4369 insn = PREV_INSN (insn);
4370 return (CALL_P (insn));
4373 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4375 static bool
4376 rtl_block_ends_with_condjump_p (const_basic_block bb)
4378 return any_condjump_p (BB_END (bb));
4381 /* Return true if we need to add fake edge to exit.
4382 Helper function for rtl_flow_call_edges_add. */
4384 static bool
4385 need_fake_edge_p (const_rtx insn)
4387 if (!INSN_P (insn))
4388 return false;
4390 if ((CALL_P (insn)
4391 && !SIBLING_CALL_P (insn)
4392 && !find_reg_note (insn, REG_NORETURN, NULL)
4393 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4394 return true;
4396 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4397 && MEM_VOLATILE_P (PATTERN (insn)))
4398 || (GET_CODE (PATTERN (insn)) == PARALLEL
4399 && asm_noperands (insn) != -1
4400 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4401 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4404 /* Add fake edges to the function exit for any non constant and non noreturn
4405 calls, volatile inline assembly in the bitmap of blocks specified by
4406 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4407 that were split.
4409 The goal is to expose cases in which entering a basic block does not imply
4410 that all subsequent instructions must be executed. */
4412 static int
4413 rtl_flow_call_edges_add (sbitmap blocks)
4415 int i;
4416 int blocks_split = 0;
4417 int last_bb = last_basic_block;
4418 bool check_last_block = false;
4420 if (n_basic_blocks == NUM_FIXED_BLOCKS)
4421 return 0;
4423 if (! blocks)
4424 check_last_block = true;
4425 else
4426 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4428 /* In the last basic block, before epilogue generation, there will be
4429 a fallthru edge to EXIT. Special care is required if the last insn
4430 of the last basic block is a call because make_edge folds duplicate
4431 edges, which would result in the fallthru edge also being marked
4432 fake, which would result in the fallthru edge being removed by
4433 remove_fake_edges, which would result in an invalid CFG.
4435 Moreover, we can't elide the outgoing fake edge, since the block
4436 profiler needs to take this into account in order to solve the minimal
4437 spanning tree in the case that the call doesn't return.
4439 Handle this by adding a dummy instruction in a new last basic block. */
4440 if (check_last_block)
4442 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4443 rtx insn = BB_END (bb);
4445 /* Back up past insns that must be kept in the same block as a call. */
4446 while (insn != BB_HEAD (bb)
4447 && keep_with_call_p (insn))
4448 insn = PREV_INSN (insn);
4450 if (need_fake_edge_p (insn))
4452 edge e;
4454 e = find_edge (bb, EXIT_BLOCK_PTR);
4455 if (e)
4457 insert_insn_on_edge (gen_use (const0_rtx), e);
4458 commit_edge_insertions ();
4463 /* Now add fake edges to the function exit for any non constant
4464 calls since there is no way that we can determine if they will
4465 return or not... */
4467 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4469 basic_block bb = BASIC_BLOCK (i);
4470 rtx insn;
4471 rtx prev_insn;
4473 if (!bb)
4474 continue;
4476 if (blocks && !bitmap_bit_p (blocks, i))
4477 continue;
4479 for (insn = BB_END (bb); ; insn = prev_insn)
4481 prev_insn = PREV_INSN (insn);
4482 if (need_fake_edge_p (insn))
4484 edge e;
4485 rtx split_at_insn = insn;
4487 /* Don't split the block between a call and an insn that should
4488 remain in the same block as the call. */
4489 if (CALL_P (insn))
4490 while (split_at_insn != BB_END (bb)
4491 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4492 split_at_insn = NEXT_INSN (split_at_insn);
4494 /* The handling above of the final block before the epilogue
4495 should be enough to verify that there is no edge to the exit
4496 block in CFG already. Calling make_edge in such case would
4497 cause us to mark that edge as fake and remove it later. */
4499 #ifdef ENABLE_CHECKING
4500 if (split_at_insn == BB_END (bb))
4502 e = find_edge (bb, EXIT_BLOCK_PTR);
4503 gcc_assert (e == NULL);
4505 #endif
4507 /* Note that the following may create a new basic block
4508 and renumber the existing basic blocks. */
4509 if (split_at_insn != BB_END (bb))
4511 e = split_block (bb, split_at_insn);
4512 if (e)
4513 blocks_split++;
4516 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
4519 if (insn == BB_HEAD (bb))
4520 break;
4524 if (blocks_split)
4525 verify_flow_info ();
4527 return blocks_split;
4530 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
4531 the conditional branch target, SECOND_HEAD should be the fall-thru
4532 there is no need to handle this here the loop versioning code handles
4533 this. the reason for SECON_HEAD is that it is needed for condition
4534 in trees, and this should be of the same type since it is a hook. */
4535 static void
4536 rtl_lv_add_condition_to_bb (basic_block first_head ,
4537 basic_block second_head ATTRIBUTE_UNUSED,
4538 basic_block cond_bb, void *comp_rtx)
4540 rtx label, seq, jump;
4541 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4542 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4543 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4544 enum machine_mode mode;
4547 label = block_label (first_head);
4548 mode = GET_MODE (op0);
4549 if (mode == VOIDmode)
4550 mode = GET_MODE (op1);
4552 start_sequence ();
4553 op0 = force_operand (op0, NULL_RTX);
4554 op1 = force_operand (op1, NULL_RTX);
4555 do_compare_rtx_and_jump (op0, op1, comp, 0,
4556 mode, NULL_RTX, NULL_RTX, label, -1);
4557 jump = get_last_insn ();
4558 JUMP_LABEL (jump) = label;
4559 LABEL_NUSES (label)++;
4560 seq = get_insns ();
4561 end_sequence ();
4563 /* Add the new cond , in the new head. */
4564 emit_insn_after(seq, BB_END(cond_bb));
4568 /* Given a block B with unconditional branch at its end, get the
4569 store the return the branch edge and the fall-thru edge in
4570 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4571 static void
4572 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4573 edge *fallthru_edge)
4575 edge e = EDGE_SUCC (b, 0);
4577 if (e->flags & EDGE_FALLTHRU)
4579 *fallthru_edge = e;
4580 *branch_edge = EDGE_SUCC (b, 1);
4582 else
4584 *branch_edge = e;
4585 *fallthru_edge = EDGE_SUCC (b, 1);
4589 void
4590 init_rtl_bb_info (basic_block bb)
4592 gcc_assert (!bb->il.x.rtl);
4593 bb->il.x.head_ = NULL;
4594 bb->il.x.rtl = ggc_alloc_cleared_rtl_bb_info ();
4597 /* Returns true if it is possible to remove edge E by redirecting
4598 it to the destination of the other edge from E->src. */
4600 static bool
4601 rtl_can_remove_branch_p (const_edge e)
4603 const_basic_block src = e->src;
4604 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
4605 const_rtx insn = BB_END (src), set;
4607 /* The conditions are taken from try_redirect_by_replacing_jump. */
4608 if (target == EXIT_BLOCK_PTR)
4609 return false;
4611 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4612 return false;
4614 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
4615 || BB_PARTITION (src) != BB_PARTITION (target))
4616 return false;
4618 if (!onlyjump_p (insn)
4619 || tablejump_p (insn, NULL, NULL))
4620 return false;
4622 set = single_set (insn);
4623 if (!set || side_effects_p (set))
4624 return false;
4626 return true;
4629 static basic_block
4630 rtl_duplicate_bb (basic_block bb)
4632 bb = cfg_layout_duplicate_bb (bb);
4633 bb->aux = NULL;
4634 return bb;
4637 /* Do book-keeping of basic block BB for the profile consistency checker.
4638 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
4639 then do post-pass accounting. Store the counting in RECORD. */
4640 static void
4641 rtl_account_profile_record (basic_block bb, int after_pass,
4642 struct profile_record *record)
4644 rtx insn;
4645 FOR_BB_INSNS (bb, insn)
4646 if (INSN_P (insn))
4648 record->size[after_pass]
4649 += insn_rtx_cost (PATTERN (insn), false);
4650 if (profile_status == PROFILE_READ)
4651 record->time[after_pass]
4652 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
4653 else if (profile_status == PROFILE_GUESSED)
4654 record->time[after_pass]
4655 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
4659 /* Implementation of CFG manipulation for linearized RTL. */
4660 struct cfg_hooks rtl_cfg_hooks = {
4661 "rtl",
4662 rtl_verify_flow_info,
4663 rtl_dump_bb,
4664 rtl_dump_bb_for_graph,
4665 rtl_create_basic_block,
4666 rtl_redirect_edge_and_branch,
4667 rtl_redirect_edge_and_branch_force,
4668 rtl_can_remove_branch_p,
4669 rtl_delete_block,
4670 rtl_split_block,
4671 rtl_move_block_after,
4672 rtl_can_merge_blocks, /* can_merge_blocks_p */
4673 rtl_merge_blocks,
4674 rtl_predict_edge,
4675 rtl_predicted_by_p,
4676 cfg_layout_can_duplicate_bb_p,
4677 rtl_duplicate_bb,
4678 rtl_split_edge,
4679 rtl_make_forwarder_block,
4680 rtl_tidy_fallthru_edge,
4681 rtl_force_nonfallthru,
4682 rtl_block_ends_with_call_p,
4683 rtl_block_ends_with_condjump_p,
4684 rtl_flow_call_edges_add,
4685 NULL, /* execute_on_growing_pred */
4686 NULL, /* execute_on_shrinking_pred */
4687 NULL, /* duplicate loop for trees */
4688 NULL, /* lv_add_condition_to_bb */
4689 NULL, /* lv_adjust_loop_header_phi*/
4690 NULL, /* extract_cond_bb_edges */
4691 NULL, /* flush_pending_stmts */
4692 rtl_block_empty_p, /* block_empty_p */
4693 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
4694 rtl_account_profile_record,
4697 /* Implementation of CFG manipulation for cfg layout RTL, where
4698 basic block connected via fallthru edges does not have to be adjacent.
4699 This representation will hopefully become the default one in future
4700 version of the compiler. */
4702 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
4703 "cfglayout mode",
4704 rtl_verify_flow_info_1,
4705 rtl_dump_bb,
4706 rtl_dump_bb_for_graph,
4707 cfg_layout_create_basic_block,
4708 cfg_layout_redirect_edge_and_branch,
4709 cfg_layout_redirect_edge_and_branch_force,
4710 rtl_can_remove_branch_p,
4711 cfg_layout_delete_block,
4712 cfg_layout_split_block,
4713 rtl_move_block_after,
4714 cfg_layout_can_merge_blocks_p,
4715 cfg_layout_merge_blocks,
4716 rtl_predict_edge,
4717 rtl_predicted_by_p,
4718 cfg_layout_can_duplicate_bb_p,
4719 cfg_layout_duplicate_bb,
4720 cfg_layout_split_edge,
4721 rtl_make_forwarder_block,
4722 NULL, /* tidy_fallthru_edge */
4723 rtl_force_nonfallthru,
4724 rtl_block_ends_with_call_p,
4725 rtl_block_ends_with_condjump_p,
4726 rtl_flow_call_edges_add,
4727 NULL, /* execute_on_growing_pred */
4728 NULL, /* execute_on_shrinking_pred */
4729 duplicate_loop_to_header_edge, /* duplicate loop for trees */
4730 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
4731 NULL, /* lv_adjust_loop_header_phi*/
4732 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
4733 NULL, /* flush_pending_stmts */
4734 rtl_block_empty_p, /* block_empty_p */
4735 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
4736 rtl_account_profile_record,
4739 #include "gt-cfgrtl.h"