* g++.dg/cpp0x/constexpr-53094-2.C: Ignore non-standard ABI
[official-gcc.git] / gcc / cfgrtl.c
blobba07f89bd5c370e69e1014391784a8a1595e73eb
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "tree.h"
45 #include "hard-reg-set.h"
46 #include "basic-block.h"
47 #include "regs.h"
48 #include "flags.h"
49 #include "function.h"
50 #include "except.h"
51 #include "rtl-error.h"
52 #include "tm_p.h"
53 #include "obstack.h"
54 #include "insn-attr.h"
55 #include "insn-config.h"
56 #include "expr.h"
57 #include "target.h"
58 #include "common/common-target.h"
59 #include "cfgloop.h"
60 #include "ggc.h"
61 #include "tree-pass.h"
62 #include "df.h"
64 /* Holds the interesting leading and trailing notes for the function.
65 Only applicable if the CFG is in cfglayout mode. */
66 static GTY(()) rtx cfg_layout_function_footer;
67 static GTY(()) rtx cfg_layout_function_header;
69 static rtx skip_insns_after_block (basic_block);
70 static void record_effective_endpoints (void);
71 static rtx label_for_bb (basic_block);
72 static void fixup_reorder_chain (void);
74 void verify_insn_chain (void);
75 static void fixup_fallthru_exit_predecessor (void);
76 static int can_delete_note_p (const_rtx);
77 static int can_delete_label_p (const_rtx);
78 static basic_block rtl_split_edge (edge);
79 static bool rtl_move_block_after (basic_block, basic_block);
80 static int rtl_verify_flow_info (void);
81 static basic_block cfg_layout_split_block (basic_block, void *);
82 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
83 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
84 static void cfg_layout_delete_block (basic_block);
85 static void rtl_delete_block (basic_block);
86 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
87 static edge rtl_redirect_edge_and_branch (edge, basic_block);
88 static basic_block rtl_split_block (basic_block, void *);
89 static void rtl_dump_bb (FILE *, basic_block, int, int);
90 static int rtl_verify_flow_info_1 (void);
91 static void rtl_make_forwarder_block (edge);
93 /* Return true if NOTE is not one of the ones that must be kept paired,
94 so that we may simply delete it. */
96 static int
97 can_delete_note_p (const_rtx note)
99 switch (NOTE_KIND (note))
101 case NOTE_INSN_DELETED:
102 case NOTE_INSN_BASIC_BLOCK:
103 case NOTE_INSN_EPILOGUE_BEG:
104 return true;
106 default:
107 return false;
111 /* True if a given label can be deleted. */
113 static int
114 can_delete_label_p (const_rtx label)
116 return (!LABEL_PRESERVE_P (label)
117 /* User declared labels must be preserved. */
118 && LABEL_NAME (label) == 0
119 && !in_expr_list_p (forced_labels, label));
122 /* Delete INSN by patching it out. */
124 void
125 delete_insn (rtx insn)
127 rtx note;
128 bool really_delete = true;
130 if (LABEL_P (insn))
132 /* Some labels can't be directly removed from the INSN chain, as they
133 might be references via variables, constant pool etc.
134 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
135 if (! can_delete_label_p (insn))
137 const char *name = LABEL_NAME (insn);
138 basic_block bb = BLOCK_FOR_INSN (insn);
139 rtx bb_note = NEXT_INSN (insn);
141 really_delete = false;
142 PUT_CODE (insn, NOTE);
143 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
144 NOTE_DELETED_LABEL_NAME (insn) = name;
146 if (bb_note != NULL_RTX && NOTE_INSN_BASIC_BLOCK_P (bb_note)
147 && BLOCK_FOR_INSN (bb_note) == bb)
149 reorder_insns_nobb (insn, insn, bb_note);
150 BB_HEAD (bb) = bb_note;
151 if (BB_END (bb) == bb_note)
152 BB_END (bb) = insn;
156 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
159 if (really_delete)
161 /* If this insn has already been deleted, something is very wrong. */
162 gcc_assert (!INSN_DELETED_P (insn));
163 remove_insn (insn);
164 INSN_DELETED_P (insn) = 1;
167 /* If deleting a jump, decrement the use count of the label. Deleting
168 the label itself should happen in the normal course of block merging. */
169 if (JUMP_P (insn))
171 if (JUMP_LABEL (insn)
172 && LABEL_P (JUMP_LABEL (insn)))
173 LABEL_NUSES (JUMP_LABEL (insn))--;
175 /* If there are more targets, remove them too. */
176 while ((note
177 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
178 && LABEL_P (XEXP (note, 0)))
180 LABEL_NUSES (XEXP (note, 0))--;
181 remove_note (insn, note);
185 /* Also if deleting any insn that references a label as an operand. */
186 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
187 && LABEL_P (XEXP (note, 0)))
189 LABEL_NUSES (XEXP (note, 0))--;
190 remove_note (insn, note);
193 if (JUMP_TABLE_DATA_P (insn))
195 rtx pat = PATTERN (insn);
196 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
197 int len = XVECLEN (pat, diff_vec_p);
198 int i;
200 for (i = 0; i < len; i++)
202 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
204 /* When deleting code in bulk (e.g. removing many unreachable
205 blocks) we can delete a label that's a target of the vector
206 before deleting the vector itself. */
207 if (!NOTE_P (label))
208 LABEL_NUSES (label)--;
213 /* Like delete_insn but also purge dead edges from BB. */
215 void
216 delete_insn_and_edges (rtx insn)
218 bool purge = false;
220 if (INSN_P (insn)
221 && BLOCK_FOR_INSN (insn)
222 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
223 purge = true;
224 delete_insn (insn);
225 if (purge)
226 purge_dead_edges (BLOCK_FOR_INSN (insn));
229 /* Unlink a chain of insns between START and FINISH, leaving notes
230 that must be paired. If CLEAR_BB is true, we set bb field for
231 insns that cannot be removed to NULL. */
233 void
234 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
236 rtx prev, current;
238 /* Unchain the insns one by one. It would be quicker to delete all of these
239 with a single unchaining, rather than one at a time, but we need to keep
240 the NOTE's. */
241 current = finish;
242 while (1)
244 prev = PREV_INSN (current);
245 if (NOTE_P (current) && !can_delete_note_p (current))
247 else
248 delete_insn (current);
250 if (clear_bb && !INSN_DELETED_P (current))
251 set_block_for_insn (current, NULL);
253 if (current == start)
254 break;
255 current = prev;
259 /* Create a new basic block consisting of the instructions between HEAD and END
260 inclusive. This function is designed to allow fast BB construction - reuses
261 the note and basic block struct in BB_NOTE, if any and do not grow
262 BASIC_BLOCK chain and should be used directly only by CFG construction code.
263 END can be NULL in to create new empty basic block before HEAD. Both END
264 and HEAD can be NULL to create basic block at the end of INSN chain.
265 AFTER is the basic block we should be put after. */
267 basic_block
268 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
270 basic_block bb;
272 if (bb_note
273 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
274 && bb->aux == NULL)
276 /* If we found an existing note, thread it back onto the chain. */
278 rtx after;
280 if (LABEL_P (head))
281 after = head;
282 else
284 after = PREV_INSN (head);
285 head = bb_note;
288 if (after != bb_note && NEXT_INSN (after) != bb_note)
289 reorder_insns_nobb (bb_note, bb_note, after);
291 else
293 /* Otherwise we must create a note and a basic block structure. */
295 bb = alloc_block ();
297 init_rtl_bb_info (bb);
298 if (!head && !end)
299 head = end = bb_note
300 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
301 else if (LABEL_P (head) && end)
303 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
304 if (head == end)
305 end = bb_note;
307 else
309 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
310 head = bb_note;
311 if (!end)
312 end = head;
315 NOTE_BASIC_BLOCK (bb_note) = bb;
318 /* Always include the bb note in the block. */
319 if (NEXT_INSN (end) == bb_note)
320 end = bb_note;
322 BB_HEAD (bb) = head;
323 BB_END (bb) = end;
324 bb->index = last_basic_block++;
325 bb->flags = BB_NEW | BB_RTL;
326 link_block (bb, after);
327 SET_BASIC_BLOCK (bb->index, bb);
328 df_bb_refs_record (bb->index, false);
329 update_bb_for_insn (bb);
330 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
332 /* Tag the block so that we know it has been used when considering
333 other basic block notes. */
334 bb->aux = bb;
336 return bb;
339 /* Create new basic block consisting of instructions in between HEAD and END
340 and place it to the BB chain after block AFTER. END can be NULL to
341 create a new empty basic block before HEAD. Both END and HEAD can be
342 NULL to create basic block at the end of INSN chain. */
344 static basic_block
345 rtl_create_basic_block (void *headp, void *endp, basic_block after)
347 rtx head = (rtx) headp, end = (rtx) endp;
348 basic_block bb;
350 /* Grow the basic block array if needed. */
351 if ((size_t) last_basic_block >= basic_block_info->length ())
353 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
354 vec_safe_grow_cleared (basic_block_info, new_size);
357 n_basic_blocks++;
359 bb = create_basic_block_structure (head, end, NULL, after);
360 bb->aux = NULL;
361 return bb;
364 static basic_block
365 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
367 basic_block newbb = rtl_create_basic_block (head, end, after);
369 return newbb;
372 /* Delete the insns in a (non-live) block. We physically delete every
373 non-deleted-note insn, and update the flow graph appropriately.
375 Return nonzero if we deleted an exception handler. */
377 /* ??? Preserving all such notes strikes me as wrong. It would be nice
378 to post-process the stream to remove empty blocks, loops, ranges, etc. */
380 static void
381 rtl_delete_block (basic_block b)
383 rtx insn, end;
385 /* If the head of this block is a CODE_LABEL, then it might be the
386 label for an exception handler which can't be reached. We need
387 to remove the label from the exception_handler_label list. */
388 insn = BB_HEAD (b);
390 end = get_last_bb_insn (b);
392 /* Selectively delete the entire chain. */
393 BB_HEAD (b) = NULL;
394 delete_insn_chain (insn, end, true);
397 if (dump_file)
398 fprintf (dump_file, "deleting block %d\n", b->index);
399 df_bb_delete (b->index);
402 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
404 void
405 compute_bb_for_insn (void)
407 basic_block bb;
409 FOR_EACH_BB (bb)
411 rtx end = BB_END (bb);
412 rtx insn;
414 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
416 BLOCK_FOR_INSN (insn) = bb;
417 if (insn == end)
418 break;
423 /* Release the basic_block_for_insn array. */
425 unsigned int
426 free_bb_for_insn (void)
428 rtx insn;
429 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
430 if (!BARRIER_P (insn))
431 BLOCK_FOR_INSN (insn) = NULL;
432 return 0;
435 static unsigned int
436 rest_of_pass_free_cfg (void)
438 #ifdef DELAY_SLOTS
439 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
440 valid at that point so it would be too late to call df_analyze. */
441 if (optimize > 0 && flag_delayed_branch)
443 df_note_add_problem ();
444 df_analyze ();
446 #endif
448 free_bb_for_insn ();
449 return 0;
452 struct rtl_opt_pass pass_free_cfg =
455 RTL_PASS,
456 "*free_cfg", /* name */
457 OPTGROUP_NONE, /* optinfo_flags */
458 NULL, /* gate */
459 rest_of_pass_free_cfg, /* execute */
460 NULL, /* sub */
461 NULL, /* next */
462 0, /* static_pass_number */
463 TV_NONE, /* tv_id */
464 0, /* properties_required */
465 0, /* properties_provided */
466 PROP_cfg, /* properties_destroyed */
467 0, /* todo_flags_start */
468 0, /* todo_flags_finish */
472 /* Return RTX to emit after when we want to emit code on the entry of function. */
474 entry_of_function (void)
476 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
477 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
480 /* Emit INSN at the entry point of the function, ensuring that it is only
481 executed once per function. */
482 void
483 emit_insn_at_entry (rtx insn)
485 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
486 edge e = ei_safe_edge (ei);
487 gcc_assert (e->flags & EDGE_FALLTHRU);
489 insert_insn_on_edge (insn, e);
490 commit_edge_insertions ();
493 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
494 (or BARRIER if found) and notify df of the bb change.
495 The insn chain range is inclusive
496 (i.e. both BEGIN and END will be updated. */
498 static void
499 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
501 rtx insn;
503 end = NEXT_INSN (end);
504 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
505 if (!BARRIER_P (insn))
506 df_insn_change_bb (insn, bb);
509 /* Update BLOCK_FOR_INSN of insns in BB to BB,
510 and notify df of the change. */
512 void
513 update_bb_for_insn (basic_block bb)
515 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
519 /* Like active_insn_p, except keep the return value clobber around
520 even after reload. */
522 static bool
523 flow_active_insn_p (const_rtx insn)
525 if (active_insn_p (insn))
526 return true;
528 /* A clobber of the function return value exists for buggy
529 programs that fail to return a value. Its effect is to
530 keep the return value from being live across the entire
531 function. If we allow it to be skipped, we introduce the
532 possibility for register lifetime confusion. */
533 if (GET_CODE (PATTERN (insn)) == CLOBBER
534 && REG_P (XEXP (PATTERN (insn), 0))
535 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
536 return true;
538 return false;
541 /* Return true if the block has no effect and only forwards control flow to
542 its single destination. */
544 bool
545 contains_no_active_insn_p (const_basic_block bb)
547 rtx insn;
549 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
550 || !single_succ_p (bb))
551 return false;
553 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
554 if (INSN_P (insn) && flow_active_insn_p (insn))
555 return false;
557 return (!INSN_P (insn)
558 || (JUMP_P (insn) && simplejump_p (insn))
559 || !flow_active_insn_p (insn));
562 /* Likewise, but protect loop latches, headers and preheaders. */
563 /* FIXME: Make this a cfg hook. */
565 bool
566 forwarder_block_p (const_basic_block bb)
568 if (!contains_no_active_insn_p (bb))
569 return false;
571 /* Protect loop latches, headers and preheaders. */
572 if (current_loops)
574 basic_block dest;
575 if (bb->loop_father->header == bb)
576 return false;
577 dest = EDGE_SUCC (bb, 0)->dest;
578 if (dest->loop_father->header == dest)
579 return false;
582 return true;
585 /* Return nonzero if we can reach target from src by falling through. */
586 /* FIXME: Make this a cfg hook. */
588 bool
589 can_fallthru (basic_block src, basic_block target)
591 rtx insn = BB_END (src);
592 rtx insn2;
593 edge e;
594 edge_iterator ei;
596 if (target == EXIT_BLOCK_PTR)
597 return true;
598 if (src->next_bb != target)
599 return 0;
600 FOR_EACH_EDGE (e, ei, src->succs)
601 if (e->dest == EXIT_BLOCK_PTR
602 && e->flags & EDGE_FALLTHRU)
603 return 0;
605 insn2 = BB_HEAD (target);
606 if (insn2 && !active_insn_p (insn2))
607 insn2 = next_active_insn (insn2);
609 /* ??? Later we may add code to move jump tables offline. */
610 return next_active_insn (insn) == insn2;
613 /* Return nonzero if we could reach target from src by falling through,
614 if the target was made adjacent. If we already have a fall-through
615 edge to the exit block, we can't do that. */
616 static bool
617 could_fall_through (basic_block src, basic_block target)
619 edge e;
620 edge_iterator ei;
622 if (target == EXIT_BLOCK_PTR)
623 return true;
624 FOR_EACH_EDGE (e, ei, src->succs)
625 if (e->dest == EXIT_BLOCK_PTR
626 && e->flags & EDGE_FALLTHRU)
627 return 0;
628 return true;
631 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
633 bb_note (basic_block bb)
635 rtx note;
637 note = BB_HEAD (bb);
638 if (LABEL_P (note))
639 note = NEXT_INSN (note);
641 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
642 return note;
645 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
646 note associated with the BLOCK. */
648 static rtx
649 first_insn_after_basic_block_note (basic_block block)
651 rtx insn;
653 /* Get the first instruction in the block. */
654 insn = BB_HEAD (block);
656 if (insn == NULL_RTX)
657 return NULL_RTX;
658 if (LABEL_P (insn))
659 insn = NEXT_INSN (insn);
660 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
662 return NEXT_INSN (insn);
665 /* Creates a new basic block just after basic block B by splitting
666 everything after specified instruction I. */
668 static basic_block
669 rtl_split_block (basic_block bb, void *insnp)
671 basic_block new_bb;
672 rtx insn = (rtx) insnp;
673 edge e;
674 edge_iterator ei;
676 if (!insn)
678 insn = first_insn_after_basic_block_note (bb);
680 if (insn)
682 rtx next = insn;
684 insn = PREV_INSN (insn);
686 /* If the block contains only debug insns, insn would have
687 been NULL in a non-debug compilation, and then we'd end
688 up emitting a DELETED note. For -fcompare-debug
689 stability, emit the note too. */
690 if (insn != BB_END (bb)
691 && DEBUG_INSN_P (next)
692 && DEBUG_INSN_P (BB_END (bb)))
694 while (next != BB_END (bb) && DEBUG_INSN_P (next))
695 next = NEXT_INSN (next);
697 if (next == BB_END (bb))
698 emit_note_after (NOTE_INSN_DELETED, next);
701 else
702 insn = get_last_insn ();
705 /* We probably should check type of the insn so that we do not create
706 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
707 bother. */
708 if (insn == BB_END (bb))
709 emit_note_after (NOTE_INSN_DELETED, insn);
711 /* Create the new basic block. */
712 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
713 BB_COPY_PARTITION (new_bb, bb);
714 BB_END (bb) = insn;
716 /* Redirect the outgoing edges. */
717 new_bb->succs = bb->succs;
718 bb->succs = NULL;
719 FOR_EACH_EDGE (e, ei, new_bb->succs)
720 e->src = new_bb;
722 /* The new block starts off being dirty. */
723 df_set_bb_dirty (bb);
724 return new_bb;
727 /* Return true if the single edge between blocks A and B is the only place
728 in RTL which holds some unique locus. */
730 static bool
731 unique_locus_on_edge_between_p (basic_block a, basic_block b)
733 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
734 rtx insn, end;
736 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
737 return false;
739 /* First scan block A backward. */
740 insn = BB_END (a);
741 end = PREV_INSN (BB_HEAD (a));
742 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
743 insn = PREV_INSN (insn);
745 if (insn != end && INSN_LOCATION (insn) == goto_locus)
746 return false;
748 /* Then scan block B forward. */
749 insn = BB_HEAD (b);
750 if (insn)
752 end = NEXT_INSN (BB_END (b));
753 while (insn != end && !NONDEBUG_INSN_P (insn))
754 insn = NEXT_INSN (insn);
756 if (insn != end && INSN_HAS_LOCATION (insn)
757 && INSN_LOCATION (insn) == goto_locus)
758 return false;
761 return true;
764 /* If the single edge between blocks A and B is the only place in RTL which
765 holds some unique locus, emit a nop with that locus between the blocks. */
767 static void
768 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
770 if (!unique_locus_on_edge_between_p (a, b))
771 return;
773 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
774 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
777 /* Blocks A and B are to be merged into a single block A. The insns
778 are already contiguous. */
780 static void
781 rtl_merge_blocks (basic_block a, basic_block b)
783 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
784 rtx del_first = NULL_RTX, del_last = NULL_RTX;
785 rtx b_debug_start = b_end, b_debug_end = b_end;
786 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
787 int b_empty = 0;
789 if (dump_file)
790 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
791 a->index);
793 while (DEBUG_INSN_P (b_end))
794 b_end = PREV_INSN (b_debug_start = b_end);
796 /* If there was a CODE_LABEL beginning B, delete it. */
797 if (LABEL_P (b_head))
799 /* Detect basic blocks with nothing but a label. This can happen
800 in particular at the end of a function. */
801 if (b_head == b_end)
802 b_empty = 1;
804 del_first = del_last = b_head;
805 b_head = NEXT_INSN (b_head);
808 /* Delete the basic block note and handle blocks containing just that
809 note. */
810 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
812 if (b_head == b_end)
813 b_empty = 1;
814 if (! del_last)
815 del_first = b_head;
817 del_last = b_head;
818 b_head = NEXT_INSN (b_head);
821 /* If there was a jump out of A, delete it. */
822 if (JUMP_P (a_end))
824 rtx prev;
826 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
827 if (!NOTE_P (prev)
828 || NOTE_INSN_BASIC_BLOCK_P (prev)
829 || prev == BB_HEAD (a))
830 break;
832 del_first = a_end;
834 #ifdef HAVE_cc0
835 /* If this was a conditional jump, we need to also delete
836 the insn that set cc0. */
837 if (only_sets_cc0_p (prev))
839 rtx tmp = prev;
841 prev = prev_nonnote_insn (prev);
842 if (!prev)
843 prev = BB_HEAD (a);
844 del_first = tmp;
846 #endif
848 a_end = PREV_INSN (del_first);
850 else if (BARRIER_P (NEXT_INSN (a_end)))
851 del_first = NEXT_INSN (a_end);
853 /* Delete everything marked above as well as crap that might be
854 hanging out between the two blocks. */
855 BB_END (a) = a_end;
856 BB_HEAD (b) = b_empty ? NULL_RTX : b_head;
857 delete_insn_chain (del_first, del_last, true);
859 /* When not optimizing CFG and the edge is the only place in RTL which holds
860 some unique locus, emit a nop with that locus in between. */
861 if (!optimize)
863 emit_nop_for_unique_locus_between (a, b);
864 a_end = BB_END (a);
867 /* Reassociate the insns of B with A. */
868 if (!b_empty)
870 update_bb_for_insn_chain (a_end, b_debug_end, a);
872 BB_END (a) = b_debug_end;
873 BB_HEAD (b) = NULL_RTX;
875 else if (b_end != b_debug_end)
877 /* Move any deleted labels and other notes between the end of A
878 and the debug insns that make up B after the debug insns,
879 bringing the debug insns into A while keeping the notes after
880 the end of A. */
881 if (NEXT_INSN (a_end) != b_debug_start)
882 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
883 b_debug_end);
884 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
885 BB_END (a) = b_debug_end;
888 df_bb_delete (b->index);
890 /* If B was a forwarder block, propagate the locus on the edge. */
891 if (forwarder_p
892 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
893 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
895 if (dump_file)
896 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
900 /* Return true when block A and B can be merged. */
902 static bool
903 rtl_can_merge_blocks (basic_block a, basic_block b)
905 /* If we are partitioning hot/cold basic blocks, we don't want to
906 mess up unconditional or indirect jumps that cross between hot
907 and cold sections.
909 Basic block partitioning may result in some jumps that appear to
910 be optimizable (or blocks that appear to be mergeable), but which really
911 must be left untouched (they are required to make it safely across
912 partition boundaries). See the comments at the top of
913 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
915 if (BB_PARTITION (a) != BB_PARTITION (b))
916 return false;
918 /* Protect the loop latches. */
919 if (current_loops && b->loop_father->latch == b)
920 return false;
922 /* There must be exactly one edge in between the blocks. */
923 return (single_succ_p (a)
924 && single_succ (a) == b
925 && single_pred_p (b)
926 && a != b
927 /* Must be simple edge. */
928 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
929 && a->next_bb == b
930 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
931 /* If the jump insn has side effects,
932 we can't kill the edge. */
933 && (!JUMP_P (BB_END (a))
934 || (reload_completed
935 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
938 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
939 exist. */
942 block_label (basic_block block)
944 if (block == EXIT_BLOCK_PTR)
945 return NULL_RTX;
947 if (!LABEL_P (BB_HEAD (block)))
949 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
952 return BB_HEAD (block);
955 /* Attempt to perform edge redirection by replacing possibly complex jump
956 instruction by unconditional jump or removing jump completely. This can
957 apply only if all edges now point to the same block. The parameters and
958 return values are equivalent to redirect_edge_and_branch. */
960 edge
961 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
963 basic_block src = e->src;
964 rtx insn = BB_END (src), kill_from;
965 rtx set;
966 int fallthru = 0;
968 /* If we are partitioning hot/cold basic blocks, we don't want to
969 mess up unconditional or indirect jumps that cross between hot
970 and cold sections.
972 Basic block partitioning may result in some jumps that appear to
973 be optimizable (or blocks that appear to be mergeable), but which really
974 must be left untouched (they are required to make it safely across
975 partition boundaries). See the comments at the top of
976 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
978 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
979 || BB_PARTITION (src) != BB_PARTITION (target))
980 return NULL;
982 /* We can replace or remove a complex jump only when we have exactly
983 two edges. Also, if we have exactly one outgoing edge, we can
984 redirect that. */
985 if (EDGE_COUNT (src->succs) >= 3
986 /* Verify that all targets will be TARGET. Specifically, the
987 edge that is not E must also go to TARGET. */
988 || (EDGE_COUNT (src->succs) == 2
989 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
990 return NULL;
992 if (!onlyjump_p (insn))
993 return NULL;
994 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
995 return NULL;
997 /* Avoid removing branch with side effects. */
998 set = single_set (insn);
999 if (!set || side_effects_p (set))
1000 return NULL;
1002 /* In case we zap a conditional jump, we'll need to kill
1003 the cc0 setter too. */
1004 kill_from = insn;
1005 #ifdef HAVE_cc0
1006 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1007 && only_sets_cc0_p (PREV_INSN (insn)))
1008 kill_from = PREV_INSN (insn);
1009 #endif
1011 /* See if we can create the fallthru edge. */
1012 if (in_cfglayout || can_fallthru (src, target))
1014 if (dump_file)
1015 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1016 fallthru = 1;
1018 /* Selectively unlink whole insn chain. */
1019 if (in_cfglayout)
1021 rtx insn = BB_FOOTER (src);
1023 delete_insn_chain (kill_from, BB_END (src), false);
1025 /* Remove barriers but keep jumptables. */
1026 while (insn)
1028 if (BARRIER_P (insn))
1030 if (PREV_INSN (insn))
1031 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1032 else
1033 BB_FOOTER (src) = NEXT_INSN (insn);
1034 if (NEXT_INSN (insn))
1035 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1037 if (LABEL_P (insn))
1038 break;
1039 insn = NEXT_INSN (insn);
1042 else
1043 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1044 false);
1047 /* If this already is simplejump, redirect it. */
1048 else if (simplejump_p (insn))
1050 if (e->dest == target)
1051 return NULL;
1052 if (dump_file)
1053 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1054 INSN_UID (insn), e->dest->index, target->index);
1055 if (!redirect_jump (insn, block_label (target), 0))
1057 gcc_assert (target == EXIT_BLOCK_PTR);
1058 return NULL;
1062 /* Cannot do anything for target exit block. */
1063 else if (target == EXIT_BLOCK_PTR)
1064 return NULL;
1066 /* Or replace possibly complicated jump insn by simple jump insn. */
1067 else
1069 rtx target_label = block_label (target);
1070 rtx barrier, label, table;
1072 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
1073 JUMP_LABEL (BB_END (src)) = target_label;
1074 LABEL_NUSES (target_label)++;
1075 if (dump_file)
1076 fprintf (dump_file, "Replacing insn %i by jump %i\n",
1077 INSN_UID (insn), INSN_UID (BB_END (src)));
1080 delete_insn_chain (kill_from, insn, false);
1082 /* Recognize a tablejump that we are converting to a
1083 simple jump and remove its associated CODE_LABEL
1084 and ADDR_VEC or ADDR_DIFF_VEC. */
1085 if (tablejump_p (insn, &label, &table))
1086 delete_insn_chain (label, table, false);
1088 barrier = next_nonnote_insn (BB_END (src));
1089 if (!barrier || !BARRIER_P (barrier))
1090 emit_barrier_after (BB_END (src));
1091 else
1093 if (barrier != NEXT_INSN (BB_END (src)))
1095 /* Move the jump before barrier so that the notes
1096 which originally were or were created before jump table are
1097 inside the basic block. */
1098 rtx new_insn = BB_END (src);
1100 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1101 PREV_INSN (barrier), src);
1103 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1104 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1106 NEXT_INSN (new_insn) = barrier;
1107 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1109 PREV_INSN (new_insn) = PREV_INSN (barrier);
1110 PREV_INSN (barrier) = new_insn;
1115 /* Keep only one edge out and set proper flags. */
1116 if (!single_succ_p (src))
1117 remove_edge (e);
1118 gcc_assert (single_succ_p (src));
1120 e = single_succ_edge (src);
1121 if (fallthru)
1122 e->flags = EDGE_FALLTHRU;
1123 else
1124 e->flags = 0;
1126 e->probability = REG_BR_PROB_BASE;
1127 e->count = src->count;
1129 if (e->dest != target)
1130 redirect_edge_succ (e, target);
1131 return e;
1134 /* Subroutine of redirect_branch_edge that tries to patch the jump
1135 instruction INSN so that it reaches block NEW. Do this
1136 only when it originally reached block OLD. Return true if this
1137 worked or the original target wasn't OLD, return false if redirection
1138 doesn't work. */
1140 static bool
1141 patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
1143 rtx tmp;
1144 /* Recognize a tablejump and adjust all matching cases. */
1145 if (tablejump_p (insn, NULL, &tmp))
1147 rtvec vec;
1148 int j;
1149 rtx new_label = block_label (new_bb);
1151 if (new_bb == EXIT_BLOCK_PTR)
1152 return false;
1153 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1154 vec = XVEC (PATTERN (tmp), 0);
1155 else
1156 vec = XVEC (PATTERN (tmp), 1);
1158 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1159 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1161 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1162 --LABEL_NUSES (old_label);
1163 ++LABEL_NUSES (new_label);
1166 /* Handle casesi dispatch insns. */
1167 if ((tmp = single_set (insn)) != NULL
1168 && SET_DEST (tmp) == pc_rtx
1169 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1170 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1171 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1173 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1174 new_label);
1175 --LABEL_NUSES (old_label);
1176 ++LABEL_NUSES (new_label);
1179 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1181 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1182 rtx new_label, note;
1184 if (new_bb == EXIT_BLOCK_PTR)
1185 return false;
1186 new_label = block_label (new_bb);
1188 for (i = 0; i < n; ++i)
1190 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1191 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1192 if (XEXP (old_ref, 0) == old_label)
1194 ASM_OPERANDS_LABEL (tmp, i)
1195 = gen_rtx_LABEL_REF (Pmode, new_label);
1196 --LABEL_NUSES (old_label);
1197 ++LABEL_NUSES (new_label);
1201 if (JUMP_LABEL (insn) == old_label)
1203 JUMP_LABEL (insn) = new_label;
1204 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1205 if (note)
1206 remove_note (insn, note);
1208 else
1210 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1211 if (note)
1212 remove_note (insn, note);
1213 if (JUMP_LABEL (insn) != new_label
1214 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1215 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1217 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1218 != NULL_RTX)
1219 XEXP (note, 0) = new_label;
1221 else
1223 /* ?? We may play the games with moving the named labels from
1224 one basic block to the other in case only one computed_jump is
1225 available. */
1226 if (computed_jump_p (insn)
1227 /* A return instruction can't be redirected. */
1228 || returnjump_p (insn))
1229 return false;
1231 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1233 /* If the insn doesn't go where we think, we're confused. */
1234 gcc_assert (JUMP_LABEL (insn) == old_label);
1236 /* If the substitution doesn't succeed, die. This can happen
1237 if the back end emitted unrecognizable instructions or if
1238 target is exit block on some arches. */
1239 if (!redirect_jump (insn, block_label (new_bb), 0))
1241 gcc_assert (new_bb == EXIT_BLOCK_PTR);
1242 return false;
1246 return true;
1250 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1251 NULL on failure */
1252 static edge
1253 redirect_branch_edge (edge e, basic_block target)
1255 rtx old_label = BB_HEAD (e->dest);
1256 basic_block src = e->src;
1257 rtx insn = BB_END (src);
1259 /* We can only redirect non-fallthru edges of jump insn. */
1260 if (e->flags & EDGE_FALLTHRU)
1261 return NULL;
1262 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1263 return NULL;
1265 if (!currently_expanding_to_rtl)
1267 if (!patch_jump_insn (insn, old_label, target))
1268 return NULL;
1270 else
1271 /* When expanding this BB might actually contain multiple
1272 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1273 Redirect all of those that match our label. */
1274 FOR_BB_INSNS (src, insn)
1275 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1276 return NULL;
1278 if (dump_file)
1279 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1280 e->src->index, e->dest->index, target->index);
1282 if (e->dest != target)
1283 e = redirect_edge_succ_nodup (e, target);
1285 return e;
1288 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1289 expense of adding new instructions or reordering basic blocks.
1291 Function can be also called with edge destination equivalent to the TARGET.
1292 Then it should try the simplifications and do nothing if none is possible.
1294 Return edge representing the branch if transformation succeeded. Return NULL
1295 on failure.
1296 We still return NULL in case E already destinated TARGET and we didn't
1297 managed to simplify instruction stream. */
1299 static edge
1300 rtl_redirect_edge_and_branch (edge e, basic_block target)
1302 edge ret;
1303 basic_block src = e->src;
1305 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1306 return NULL;
1308 if (e->dest == target)
1309 return e;
1311 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1313 df_set_bb_dirty (src);
1314 return ret;
1317 ret = redirect_branch_edge (e, target);
1318 if (!ret)
1319 return NULL;
1321 df_set_bb_dirty (src);
1322 return ret;
1325 /* Like force_nonfallthru below, but additionally performs redirection
1326 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1327 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1328 simple_return_rtx, indicating which kind of returnjump to create.
1329 It should be NULL otherwise. */
1331 basic_block
1332 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1334 basic_block jump_block, new_bb = NULL, src = e->src;
1335 rtx note;
1336 edge new_edge;
1337 int abnormal_edge_flags = 0;
1338 bool asm_goto_edge = false;
1339 int loc;
1341 /* In the case the last instruction is conditional jump to the next
1342 instruction, first redirect the jump itself and then continue
1343 by creating a basic block afterwards to redirect fallthru edge. */
1344 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1345 && any_condjump_p (BB_END (e->src))
1346 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1348 rtx note;
1349 edge b = unchecked_make_edge (e->src, target, 0);
1350 bool redirected;
1352 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1353 gcc_assert (redirected);
1355 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1356 if (note)
1358 int prob = INTVAL (XEXP (note, 0));
1360 b->probability = prob;
1361 b->count = e->count * prob / REG_BR_PROB_BASE;
1362 e->probability -= e->probability;
1363 e->count -= b->count;
1364 if (e->probability < 0)
1365 e->probability = 0;
1366 if (e->count < 0)
1367 e->count = 0;
1371 if (e->flags & EDGE_ABNORMAL)
1373 /* Irritating special case - fallthru edge to the same block as abnormal
1374 edge.
1375 We can't redirect abnormal edge, but we still can split the fallthru
1376 one and create separate abnormal edge to original destination.
1377 This allows bb-reorder to make such edge non-fallthru. */
1378 gcc_assert (e->dest == target);
1379 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1380 e->flags &= EDGE_FALLTHRU;
1382 else
1384 gcc_assert (e->flags & EDGE_FALLTHRU);
1385 if (e->src == ENTRY_BLOCK_PTR)
1387 /* We can't redirect the entry block. Create an empty block
1388 at the start of the function which we use to add the new
1389 jump. */
1390 edge tmp;
1391 edge_iterator ei;
1392 bool found = false;
1394 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1396 /* Change the existing edge's source to be the new block, and add
1397 a new edge from the entry block to the new block. */
1398 e->src = bb;
1399 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1401 if (tmp == e)
1403 ENTRY_BLOCK_PTR->succs->unordered_remove (ei.index);
1404 found = true;
1405 break;
1407 else
1408 ei_next (&ei);
1411 gcc_assert (found);
1413 vec_safe_push (bb->succs, e);
1414 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1418 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1419 don't point to the target or fallthru label. */
1420 if (JUMP_P (BB_END (e->src))
1421 && target != EXIT_BLOCK_PTR
1422 && (e->flags & EDGE_FALLTHRU)
1423 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1425 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1426 bool adjust_jump_target = false;
1428 for (i = 0; i < n; ++i)
1430 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1432 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1433 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1434 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1435 adjust_jump_target = true;
1437 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1438 asm_goto_edge = true;
1440 if (adjust_jump_target)
1442 rtx insn = BB_END (e->src), note;
1443 rtx old_label = BB_HEAD (e->dest);
1444 rtx new_label = BB_HEAD (target);
1446 if (JUMP_LABEL (insn) == old_label)
1448 JUMP_LABEL (insn) = new_label;
1449 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1450 if (note)
1451 remove_note (insn, note);
1453 else
1455 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1456 if (note)
1457 remove_note (insn, note);
1458 if (JUMP_LABEL (insn) != new_label
1459 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1460 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1462 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1463 != NULL_RTX)
1464 XEXP (note, 0) = new_label;
1468 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1470 gcov_type count = e->count;
1471 int probability = e->probability;
1472 /* Create the new structures. */
1474 /* If the old block ended with a tablejump, skip its table
1475 by searching forward from there. Otherwise start searching
1476 forward from the last instruction of the old block. */
1477 if (!tablejump_p (BB_END (e->src), NULL, &note))
1478 note = BB_END (e->src);
1479 note = NEXT_INSN (note);
1481 jump_block = create_basic_block (note, NULL, e->src);
1482 jump_block->count = count;
1483 jump_block->frequency = EDGE_FREQUENCY (e);
1485 /* Make sure new block ends up in correct hot/cold section. */
1487 BB_COPY_PARTITION (jump_block, e->src);
1488 if (flag_reorder_blocks_and_partition
1489 && targetm_common.have_named_sections
1490 && JUMP_P (BB_END (jump_block))
1491 && !any_condjump_p (BB_END (jump_block))
1492 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1493 add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
1495 /* Wire edge in. */
1496 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1497 new_edge->probability = probability;
1498 new_edge->count = count;
1500 /* Redirect old edge. */
1501 redirect_edge_pred (e, jump_block);
1502 e->probability = REG_BR_PROB_BASE;
1504 /* If asm goto has any label refs to target's label,
1505 add also edge from asm goto bb to target. */
1506 if (asm_goto_edge)
1508 new_edge->probability /= 2;
1509 new_edge->count /= 2;
1510 jump_block->count /= 2;
1511 jump_block->frequency /= 2;
1512 new_edge = make_edge (new_edge->src, target,
1513 e->flags & ~EDGE_FALLTHRU);
1514 new_edge->probability = probability - probability / 2;
1515 new_edge->count = count - count / 2;
1518 new_bb = jump_block;
1520 else
1521 jump_block = e->src;
1523 loc = e->goto_locus;
1524 e->flags &= ~EDGE_FALLTHRU;
1525 if (target == EXIT_BLOCK_PTR)
1527 if (jump_label == ret_rtx)
1529 #ifdef HAVE_return
1530 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1531 #else
1532 gcc_unreachable ();
1533 #endif
1535 else
1537 gcc_assert (jump_label == simple_return_rtx);
1538 #ifdef HAVE_simple_return
1539 emit_jump_insn_after_setloc (gen_simple_return (),
1540 BB_END (jump_block), loc);
1541 #else
1542 gcc_unreachable ();
1543 #endif
1545 set_return_jump_label (BB_END (jump_block));
1547 else
1549 rtx label = block_label (target);
1550 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1551 JUMP_LABEL (BB_END (jump_block)) = label;
1552 LABEL_NUSES (label)++;
1555 emit_barrier_after (BB_END (jump_block));
1556 redirect_edge_succ_nodup (e, target);
1558 if (abnormal_edge_flags)
1559 make_edge (src, target, abnormal_edge_flags);
1561 df_mark_solutions_dirty ();
1562 return new_bb;
1565 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1566 (and possibly create new basic block) to make edge non-fallthru.
1567 Return newly created BB or NULL if none. */
1569 static basic_block
1570 rtl_force_nonfallthru (edge e)
1572 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1575 /* Redirect edge even at the expense of creating new jump insn or
1576 basic block. Return new basic block if created, NULL otherwise.
1577 Conversion must be possible. */
1579 static basic_block
1580 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1582 if (redirect_edge_and_branch (e, target)
1583 || e->dest == target)
1584 return NULL;
1586 /* In case the edge redirection failed, try to force it to be non-fallthru
1587 and redirect newly created simplejump. */
1588 df_set_bb_dirty (e->src);
1589 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1592 /* The given edge should potentially be a fallthru edge. If that is in
1593 fact true, delete the jump and barriers that are in the way. */
1595 static void
1596 rtl_tidy_fallthru_edge (edge e)
1598 rtx q;
1599 basic_block b = e->src, c = b->next_bb;
1601 /* ??? In a late-running flow pass, other folks may have deleted basic
1602 blocks by nopping out blocks, leaving multiple BARRIERs between here
1603 and the target label. They ought to be chastised and fixed.
1605 We can also wind up with a sequence of undeletable labels between
1606 one block and the next.
1608 So search through a sequence of barriers, labels, and notes for
1609 the head of block C and assert that we really do fall through. */
1611 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1612 if (INSN_P (q))
1613 return;
1615 /* Remove what will soon cease being the jump insn from the source block.
1616 If block B consisted only of this single jump, turn it into a deleted
1617 note. */
1618 q = BB_END (b);
1619 if (JUMP_P (q)
1620 && onlyjump_p (q)
1621 && (any_uncondjump_p (q)
1622 || single_succ_p (b)))
1624 #ifdef HAVE_cc0
1625 /* If this was a conditional jump, we need to also delete
1626 the insn that set cc0. */
1627 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1628 q = PREV_INSN (q);
1629 #endif
1631 q = PREV_INSN (q);
1634 /* Selectively unlink the sequence. */
1635 if (q != PREV_INSN (BB_HEAD (c)))
1636 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1638 e->flags |= EDGE_FALLTHRU;
1641 /* Should move basic block BB after basic block AFTER. NIY. */
1643 static bool
1644 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1645 basic_block after ATTRIBUTE_UNUSED)
1647 return false;
1650 /* Split a (typically critical) edge. Return the new block.
1651 The edge must not be abnormal.
1653 ??? The code generally expects to be called on critical edges.
1654 The case of a block ending in an unconditional jump to a
1655 block with multiple predecessors is not handled optimally. */
1657 static basic_block
1658 rtl_split_edge (edge edge_in)
1660 basic_block bb;
1661 rtx before;
1663 /* Abnormal edges cannot be split. */
1664 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1666 /* We are going to place the new block in front of edge destination.
1667 Avoid existence of fallthru predecessors. */
1668 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1670 edge e = find_fallthru_edge (edge_in->dest->preds);
1672 if (e)
1673 force_nonfallthru (e);
1676 /* Create the basic block note. */
1677 if (edge_in->dest != EXIT_BLOCK_PTR)
1678 before = BB_HEAD (edge_in->dest);
1679 else
1680 before = NULL_RTX;
1682 /* If this is a fall through edge to the exit block, the blocks might be
1683 not adjacent, and the right place is after the source. */
1684 if ((edge_in->flags & EDGE_FALLTHRU) && edge_in->dest == EXIT_BLOCK_PTR)
1686 before = NEXT_INSN (BB_END (edge_in->src));
1687 bb = create_basic_block (before, NULL, edge_in->src);
1688 BB_COPY_PARTITION (bb, edge_in->src);
1690 else
1692 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1693 /* ??? Why not edge_in->dest->prev_bb here? */
1694 BB_COPY_PARTITION (bb, edge_in->dest);
1697 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1699 /* For non-fallthru edges, we must adjust the predecessor's
1700 jump instruction to target our new block. */
1701 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1703 edge redirected = redirect_edge_and_branch (edge_in, bb);
1704 gcc_assert (redirected);
1706 else
1708 if (edge_in->src != ENTRY_BLOCK_PTR)
1710 /* For asm goto even splitting of fallthru edge might
1711 need insn patching, as other labels might point to the
1712 old label. */
1713 rtx last = BB_END (edge_in->src);
1714 if (last
1715 && JUMP_P (last)
1716 && edge_in->dest != EXIT_BLOCK_PTR
1717 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1718 && patch_jump_insn (last, before, bb))
1719 df_set_bb_dirty (edge_in->src);
1721 redirect_edge_succ (edge_in, bb);
1724 return bb;
1727 /* Queue instructions for insertion on an edge between two basic blocks.
1728 The new instructions and basic blocks (if any) will not appear in the
1729 CFG until commit_edge_insertions is called. */
1731 void
1732 insert_insn_on_edge (rtx pattern, edge e)
1734 /* We cannot insert instructions on an abnormal critical edge.
1735 It will be easier to find the culprit if we die now. */
1736 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1738 if (e->insns.r == NULL_RTX)
1739 start_sequence ();
1740 else
1741 push_to_sequence (e->insns.r);
1743 emit_insn (pattern);
1745 e->insns.r = get_insns ();
1746 end_sequence ();
1749 /* Update the CFG for the instructions queued on edge E. */
1751 void
1752 commit_one_edge_insertion (edge e)
1754 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1755 basic_block bb;
1757 /* Pull the insns off the edge now since the edge might go away. */
1758 insns = e->insns.r;
1759 e->insns.r = NULL_RTX;
1761 /* Figure out where to put these insns. If the destination has
1762 one predecessor, insert there. Except for the exit block. */
1763 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1765 bb = e->dest;
1767 /* Get the location correct wrt a code label, and "nice" wrt
1768 a basic block note, and before everything else. */
1769 tmp = BB_HEAD (bb);
1770 if (LABEL_P (tmp))
1771 tmp = NEXT_INSN (tmp);
1772 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1773 tmp = NEXT_INSN (tmp);
1774 if (tmp == BB_HEAD (bb))
1775 before = tmp;
1776 else if (tmp)
1777 after = PREV_INSN (tmp);
1778 else
1779 after = get_last_insn ();
1782 /* If the source has one successor and the edge is not abnormal,
1783 insert there. Except for the entry block. */
1784 else if ((e->flags & EDGE_ABNORMAL) == 0
1785 && single_succ_p (e->src)
1786 && e->src != ENTRY_BLOCK_PTR)
1788 bb = e->src;
1790 /* It is possible to have a non-simple jump here. Consider a target
1791 where some forms of unconditional jumps clobber a register. This
1792 happens on the fr30 for example.
1794 We know this block has a single successor, so we can just emit
1795 the queued insns before the jump. */
1796 if (JUMP_P (BB_END (bb)))
1797 before = BB_END (bb);
1798 else
1800 /* We'd better be fallthru, or we've lost track of what's what. */
1801 gcc_assert (e->flags & EDGE_FALLTHRU);
1803 after = BB_END (bb);
1807 /* Otherwise we must split the edge. */
1808 else
1810 bb = split_edge (e);
1811 after = BB_END (bb);
1813 if (flag_reorder_blocks_and_partition
1814 && targetm_common.have_named_sections
1815 && e->src != ENTRY_BLOCK_PTR
1816 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1817 && !(e->flags & EDGE_CROSSING)
1818 && JUMP_P (after)
1819 && !any_condjump_p (after)
1820 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1821 add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1824 /* Now that we've found the spot, do the insertion. */
1825 if (before)
1827 emit_insn_before_noloc (insns, before, bb);
1828 last = prev_nonnote_insn (before);
1830 else
1831 last = emit_insn_after_noloc (insns, after, bb);
1833 if (returnjump_p (last))
1835 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1836 This is not currently a problem because this only happens
1837 for the (single) epilogue, which already has a fallthru edge
1838 to EXIT. */
1840 e = single_succ_edge (bb);
1841 gcc_assert (e->dest == EXIT_BLOCK_PTR
1842 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1844 e->flags &= ~EDGE_FALLTHRU;
1845 emit_barrier_after (last);
1847 if (before)
1848 delete_insn (before);
1850 else
1851 gcc_assert (!JUMP_P (last));
1854 /* Update the CFG for all queued instructions. */
1856 void
1857 commit_edge_insertions (void)
1859 basic_block bb;
1861 #ifdef ENABLE_CHECKING
1862 verify_flow_info ();
1863 #endif
1865 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1867 edge e;
1868 edge_iterator ei;
1870 FOR_EACH_EDGE (e, ei, bb->succs)
1871 if (e->insns.r)
1872 commit_one_edge_insertion (e);
1877 /* Print out RTL-specific basic block information (live information
1878 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
1879 documented in dumpfile.h. */
1881 static void
1882 rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
1884 rtx insn;
1885 rtx last;
1886 char *s_indent;
1888 s_indent = (char *) alloca ((size_t) indent + 1);
1889 memset (s_indent, ' ', (size_t) indent);
1890 s_indent[indent] = '\0';
1892 if (df && (flags & TDF_DETAILS))
1894 df_dump_top (bb, outf);
1895 putc ('\n', outf);
1898 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
1899 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1900 insn = NEXT_INSN (insn))
1902 if (flags & TDF_DETAILS)
1903 df_dump_insn_top (insn, outf);
1904 if (! (flags & TDF_SLIM))
1905 print_rtl_single (outf, insn);
1906 else
1907 dump_insn_slim (outf, insn);
1908 if (flags & TDF_DETAILS)
1909 df_dump_insn_bottom (insn, outf);
1912 if (df && (flags & TDF_DETAILS))
1914 df_dump_bottom (bb, outf);
1915 putc ('\n', outf);
1920 /* Like dump_function_to_file, but for RTL. Print out dataflow information
1921 for the start of each basic block. FLAGS are the TDF_* masks documented
1922 in dumpfile.h. */
1924 void
1925 print_rtl_with_bb (FILE *outf, const_rtx rtx_first, int flags)
1927 const_rtx tmp_rtx;
1928 if (rtx_first == 0)
1929 fprintf (outf, "(nil)\n");
1930 else
1932 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1933 int max_uid = get_max_uid ();
1934 basic_block *start = XCNEWVEC (basic_block, max_uid);
1935 basic_block *end = XCNEWVEC (basic_block, max_uid);
1936 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1937 basic_block bb;
1939 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
1940 insns, but the CFG is not maintained so the basic block info
1941 is not reliable. Therefore it's omitted from the dumps. */
1942 if (! (cfun->curr_properties & PROP_cfg))
1943 flags &= ~TDF_BLOCKS;
1945 if (df)
1946 df_dump_start (outf);
1948 if (flags & TDF_BLOCKS)
1950 FOR_EACH_BB_REVERSE (bb)
1952 rtx x;
1954 start[INSN_UID (BB_HEAD (bb))] = bb;
1955 end[INSN_UID (BB_END (bb))] = bb;
1956 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1958 enum bb_state state = IN_MULTIPLE_BB;
1960 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1961 state = IN_ONE_BB;
1962 in_bb_p[INSN_UID (x)] = state;
1964 if (x == BB_END (bb))
1965 break;
1970 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1972 if (flags & TDF_BLOCKS)
1974 bb = start[INSN_UID (tmp_rtx)];
1975 if (bb != NULL)
1977 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
1978 if (df && (flags & TDF_DETAILS))
1979 df_dump_top (bb, outf);
1982 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1983 && !NOTE_P (tmp_rtx)
1984 && !BARRIER_P (tmp_rtx))
1985 fprintf (outf, ";; Insn is not within a basic block\n");
1986 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1987 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1990 if (flags & TDF_DETAILS)
1991 df_dump_insn_top (tmp_rtx, outf);
1992 if (! (flags & TDF_SLIM))
1993 print_rtl_single (outf, tmp_rtx);
1994 else
1995 dump_insn_slim (outf, tmp_rtx);
1996 if (flags & TDF_DETAILS)
1997 df_dump_insn_bottom (tmp_rtx, outf);
1999 if (flags & TDF_BLOCKS)
2001 bb = end[INSN_UID (tmp_rtx)];
2002 if (bb != NULL)
2004 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2005 if (df && (flags & TDF_DETAILS))
2006 df_dump_bottom (bb, outf);
2007 putc ('\n', outf);
2012 free (start);
2013 free (end);
2014 free (in_bb_p);
2018 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2020 void
2021 update_br_prob_note (basic_block bb)
2023 rtx note;
2024 if (!JUMP_P (BB_END (bb)))
2025 return;
2026 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2027 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
2028 return;
2029 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
2032 /* Get the last insn associated with block BB (that includes barriers and
2033 tablejumps after BB). */
2035 get_last_bb_insn (basic_block bb)
2037 rtx tmp;
2038 rtx end = BB_END (bb);
2040 /* Include any jump table following the basic block. */
2041 if (tablejump_p (end, NULL, &tmp))
2042 end = tmp;
2044 /* Include any barriers that may follow the basic block. */
2045 tmp = next_nonnote_insn_bb (end);
2046 while (tmp && BARRIER_P (tmp))
2048 end = tmp;
2049 tmp = next_nonnote_insn_bb (end);
2052 return end;
2055 /* Verify the CFG and RTL consistency common for both underlying RTL and
2056 cfglayout RTL.
2058 Currently it does following checks:
2060 - overlapping of basic blocks
2061 - insns with wrong BLOCK_FOR_INSN pointers
2062 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2063 - tails of basic blocks (ensure that boundary is necessary)
2064 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2065 and NOTE_INSN_BASIC_BLOCK
2066 - verify that no fall_thru edge crosses hot/cold partition boundaries
2067 - verify that there are no pending RTL branch predictions
2069 In future it can be extended check a lot of other stuff as well
2070 (reachability of basic blocks, life information, etc. etc.). */
2072 static int
2073 rtl_verify_flow_info_1 (void)
2075 rtx x;
2076 int err = 0;
2077 basic_block bb;
2079 /* Check the general integrity of the basic blocks. */
2080 FOR_EACH_BB_REVERSE (bb)
2082 rtx insn;
2084 if (!(bb->flags & BB_RTL))
2086 error ("BB_RTL flag not set for block %d", bb->index);
2087 err = 1;
2090 FOR_BB_INSNS (bb, insn)
2091 if (BLOCK_FOR_INSN (insn) != bb)
2093 error ("insn %d basic block pointer is %d, should be %d",
2094 INSN_UID (insn),
2095 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2096 bb->index);
2097 err = 1;
2100 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2101 if (!BARRIER_P (insn)
2102 && BLOCK_FOR_INSN (insn) != NULL)
2104 error ("insn %d in header of bb %d has non-NULL basic block",
2105 INSN_UID (insn), bb->index);
2106 err = 1;
2108 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2109 if (!BARRIER_P (insn)
2110 && BLOCK_FOR_INSN (insn) != NULL)
2112 error ("insn %d in footer of bb %d has non-NULL basic block",
2113 INSN_UID (insn), bb->index);
2114 err = 1;
2118 /* Now check the basic blocks (boundaries etc.) */
2119 FOR_EACH_BB_REVERSE (bb)
2121 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2122 int n_eh = 0, n_abnormal = 0;
2123 edge e, fallthru = NULL;
2124 rtx note;
2125 edge_iterator ei;
2127 if (JUMP_P (BB_END (bb))
2128 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2129 && EDGE_COUNT (bb->succs) >= 2
2130 && any_condjump_p (BB_END (bb)))
2132 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
2133 && profile_status != PROFILE_ABSENT)
2135 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
2136 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
2137 err = 1;
2140 FOR_EACH_EDGE (e, ei, bb->succs)
2142 bool is_crossing;
2144 if (e->flags & EDGE_FALLTHRU)
2145 n_fallthru++, fallthru = e;
2147 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2148 && e->src != ENTRY_BLOCK_PTR
2149 && e->dest != EXIT_BLOCK_PTR);
2150 if (e->flags & EDGE_CROSSING)
2152 if (!is_crossing)
2154 error ("EDGE_CROSSING incorrectly set across same section");
2155 err = 1;
2157 if (e->flags & EDGE_FALLTHRU)
2159 error ("fallthru edge crosses section boundary in bb %i",
2160 e->src->index);
2161 err = 1;
2163 if (e->flags & EDGE_EH)
2165 error ("EH edge crosses section boundary in bb %i",
2166 e->src->index);
2167 err = 1;
2170 else if (is_crossing)
2172 error ("EDGE_CROSSING missing across section boundary");
2173 err = 1;
2176 if ((e->flags & ~(EDGE_DFS_BACK
2177 | EDGE_CAN_FALLTHRU
2178 | EDGE_IRREDUCIBLE_LOOP
2179 | EDGE_LOOP_EXIT
2180 | EDGE_CROSSING
2181 | EDGE_PRESERVE)) == 0)
2182 n_branch++;
2184 if (e->flags & EDGE_ABNORMAL_CALL)
2185 n_abnormal_call++;
2187 if (e->flags & EDGE_SIBCALL)
2188 n_sibcall++;
2190 if (e->flags & EDGE_EH)
2191 n_eh++;
2193 if (e->flags & EDGE_ABNORMAL)
2194 n_abnormal++;
2197 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2199 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2200 err = 1;
2202 if (n_eh > 1)
2204 error ("too many exception handling edges in bb %i", bb->index);
2205 err = 1;
2207 if (n_branch
2208 && (!JUMP_P (BB_END (bb))
2209 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2210 || any_condjump_p (BB_END (bb))))))
2212 error ("too many outgoing branch edges from bb %i", bb->index);
2213 err = 1;
2215 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2217 error ("fallthru edge after unconditional jump in bb %i", bb->index);
2218 err = 1;
2220 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2222 error ("wrong number of branch edges after unconditional jump"
2223 " in bb %i", bb->index);
2224 err = 1;
2226 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2227 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2229 error ("wrong amount of branch edges after conditional jump"
2230 " in bb %i", bb->index);
2231 err = 1;
2233 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2235 error ("abnormal call edges for non-call insn in bb %i", bb->index);
2236 err = 1;
2238 if (n_sibcall && !CALL_P (BB_END (bb)))
2240 error ("sibcall edges for non-call insn in bb %i", bb->index);
2241 err = 1;
2243 if (n_abnormal > n_eh
2244 && !(CALL_P (BB_END (bb))
2245 && n_abnormal == n_abnormal_call + n_sibcall)
2246 && (!JUMP_P (BB_END (bb))
2247 || any_condjump_p (BB_END (bb))
2248 || any_uncondjump_p (BB_END (bb))))
2250 error ("abnormal edges for no purpose in bb %i", bb->index);
2251 err = 1;
2254 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
2255 /* We may have a barrier inside a basic block before dead code
2256 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
2257 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
2259 debug_rtx (x);
2260 if (! BLOCK_FOR_INSN (x))
2261 error
2262 ("insn %d inside basic block %d but block_for_insn is NULL",
2263 INSN_UID (x), bb->index);
2264 else
2265 error
2266 ("insn %d inside basic block %d but block_for_insn is %i",
2267 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
2269 err = 1;
2272 /* OK pointers are correct. Now check the header of basic
2273 block. It ought to contain optional CODE_LABEL followed
2274 by NOTE_BASIC_BLOCK. */
2275 x = BB_HEAD (bb);
2276 if (LABEL_P (x))
2278 if (BB_END (bb) == x)
2280 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2281 bb->index);
2282 err = 1;
2285 x = NEXT_INSN (x);
2288 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2290 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2291 bb->index);
2292 err = 1;
2295 if (BB_END (bb) == x)
2296 /* Do checks for empty blocks here. */
2298 else
2299 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2301 if (NOTE_INSN_BASIC_BLOCK_P (x))
2303 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2304 INSN_UID (x), bb->index);
2305 err = 1;
2308 if (x == BB_END (bb))
2309 break;
2311 if (control_flow_insn_p (x))
2313 error ("in basic block %d:", bb->index);
2314 fatal_insn ("flow control insn inside a basic block", x);
2319 /* Clean up. */
2320 return err;
2323 /* Verify the CFG and RTL consistency common for both underlying RTL and
2324 cfglayout RTL.
2326 Currently it does following checks:
2327 - all checks of rtl_verify_flow_info_1
2328 - test head/end pointers
2329 - check that all insns are in the basic blocks
2330 (except the switch handling code, barriers and notes)
2331 - check that all returns are followed by barriers
2332 - check that all fallthru edge points to the adjacent blocks. */
2334 static int
2335 rtl_verify_flow_info (void)
2337 basic_block bb;
2338 int err = rtl_verify_flow_info_1 ();
2339 rtx x;
2340 rtx last_head = get_last_insn ();
2341 basic_block *bb_info;
2342 int num_bb_notes;
2343 const rtx rtx_first = get_insns ();
2344 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2345 const int max_uid = get_max_uid ();
2347 bb_info = XCNEWVEC (basic_block, max_uid);
2349 FOR_EACH_BB_REVERSE (bb)
2351 edge e;
2352 rtx head = BB_HEAD (bb);
2353 rtx end = BB_END (bb);
2355 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2357 /* Verify the end of the basic block is in the INSN chain. */
2358 if (x == end)
2359 break;
2361 /* And that the code outside of basic blocks has NULL bb field. */
2362 if (!BARRIER_P (x)
2363 && BLOCK_FOR_INSN (x) != NULL)
2365 error ("insn %d outside of basic blocks has non-NULL bb field",
2366 INSN_UID (x));
2367 err = 1;
2371 if (!x)
2373 error ("end insn %d for block %d not found in the insn stream",
2374 INSN_UID (end), bb->index);
2375 err = 1;
2378 /* Work backwards from the end to the head of the basic block
2379 to verify the head is in the RTL chain. */
2380 for (; x != NULL_RTX; x = PREV_INSN (x))
2382 /* While walking over the insn chain, verify insns appear
2383 in only one basic block. */
2384 if (bb_info[INSN_UID (x)] != NULL)
2386 error ("insn %d is in multiple basic blocks (%d and %d)",
2387 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2388 err = 1;
2391 bb_info[INSN_UID (x)] = bb;
2393 if (x == head)
2394 break;
2396 if (!x)
2398 error ("head insn %d for block %d not found in the insn stream",
2399 INSN_UID (head), bb->index);
2400 err = 1;
2403 last_head = PREV_INSN (x);
2405 e = find_fallthru_edge (bb->succs);
2406 if (!e)
2408 rtx insn;
2410 /* Ensure existence of barrier in BB with no fallthru edges. */
2411 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2413 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2415 error ("missing barrier after block %i", bb->index);
2416 err = 1;
2417 break;
2419 if (BARRIER_P (insn))
2420 break;
2423 else if (e->src != ENTRY_BLOCK_PTR
2424 && e->dest != EXIT_BLOCK_PTR)
2426 rtx insn;
2428 if (e->src->next_bb != e->dest)
2430 error
2431 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2432 e->src->index, e->dest->index);
2433 err = 1;
2435 else
2436 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2437 insn = NEXT_INSN (insn))
2438 if (BARRIER_P (insn) || INSN_P (insn))
2440 error ("verify_flow_info: Incorrect fallthru %i->%i",
2441 e->src->index, e->dest->index);
2442 fatal_insn ("wrong insn in the fallthru edge", insn);
2443 err = 1;
2448 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2450 /* Check that the code before the first basic block has NULL
2451 bb field. */
2452 if (!BARRIER_P (x)
2453 && BLOCK_FOR_INSN (x) != NULL)
2455 error ("insn %d outside of basic blocks has non-NULL bb field",
2456 INSN_UID (x));
2457 err = 1;
2460 free (bb_info);
2462 num_bb_notes = 0;
2463 last_bb_seen = ENTRY_BLOCK_PTR;
2465 for (x = rtx_first; x; x = NEXT_INSN (x))
2467 if (NOTE_INSN_BASIC_BLOCK_P (x))
2469 bb = NOTE_BASIC_BLOCK (x);
2471 num_bb_notes++;
2472 if (bb != last_bb_seen->next_bb)
2473 internal_error ("basic blocks not laid down consecutively");
2475 curr_bb = last_bb_seen = bb;
2478 if (!curr_bb)
2480 switch (GET_CODE (x))
2482 case BARRIER:
2483 case NOTE:
2484 break;
2486 case CODE_LABEL:
2487 /* An addr_vec is placed outside any basic block. */
2488 if (NEXT_INSN (x)
2489 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2490 x = NEXT_INSN (x);
2492 /* But in any case, non-deletable labels can appear anywhere. */
2493 break;
2495 default:
2496 fatal_insn ("insn outside basic block", x);
2500 if (JUMP_P (x)
2501 && returnjump_p (x) && ! condjump_p (x)
2502 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2503 fatal_insn ("return not followed by barrier", x);
2504 if (curr_bb && x == BB_END (curr_bb))
2505 curr_bb = NULL;
2508 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2509 internal_error
2510 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2511 num_bb_notes, n_basic_blocks);
2513 return err;
2516 /* Assume that the preceding pass has possibly eliminated jump instructions
2517 or converted the unconditional jumps. Eliminate the edges from CFG.
2518 Return true if any edges are eliminated. */
2520 bool
2521 purge_dead_edges (basic_block bb)
2523 edge e;
2524 rtx insn = BB_END (bb), note;
2525 bool purged = false;
2526 bool found;
2527 edge_iterator ei;
2529 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2531 insn = PREV_INSN (insn);
2532 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2534 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2535 if (NONJUMP_INSN_P (insn)
2536 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2538 rtx eqnote;
2540 if (! may_trap_p (PATTERN (insn))
2541 || ((eqnote = find_reg_equal_equiv_note (insn))
2542 && ! may_trap_p (XEXP (eqnote, 0))))
2543 remove_note (insn, note);
2546 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2547 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2549 bool remove = false;
2551 /* There are three types of edges we need to handle correctly here: EH
2552 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2553 latter can appear when nonlocal gotos are used. */
2554 if (e->flags & EDGE_ABNORMAL_CALL)
2556 if (!CALL_P (insn))
2557 remove = true;
2558 else if (can_nonlocal_goto (insn))
2560 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2562 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
2564 else
2565 remove = true;
2567 else if (e->flags & EDGE_EH)
2568 remove = !can_throw_internal (insn);
2570 if (remove)
2572 remove_edge (e);
2573 df_set_bb_dirty (bb);
2574 purged = true;
2576 else
2577 ei_next (&ei);
2580 if (JUMP_P (insn))
2582 rtx note;
2583 edge b,f;
2584 edge_iterator ei;
2586 /* We do care only about conditional jumps and simplejumps. */
2587 if (!any_condjump_p (insn)
2588 && !returnjump_p (insn)
2589 && !simplejump_p (insn))
2590 return purged;
2592 /* Branch probability/prediction notes are defined only for
2593 condjumps. We've possibly turned condjump into simplejump. */
2594 if (simplejump_p (insn))
2596 note = find_reg_note (insn, REG_BR_PROB, NULL);
2597 if (note)
2598 remove_note (insn, note);
2599 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2600 remove_note (insn, note);
2603 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2605 /* Avoid abnormal flags to leak from computed jumps turned
2606 into simplejumps. */
2608 e->flags &= ~EDGE_ABNORMAL;
2610 /* See if this edge is one we should keep. */
2611 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2612 /* A conditional jump can fall through into the next
2613 block, so we should keep the edge. */
2615 ei_next (&ei);
2616 continue;
2618 else if (e->dest != EXIT_BLOCK_PTR
2619 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2620 /* If the destination block is the target of the jump,
2621 keep the edge. */
2623 ei_next (&ei);
2624 continue;
2626 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2627 /* If the destination block is the exit block, and this
2628 instruction is a return, then keep the edge. */
2630 ei_next (&ei);
2631 continue;
2633 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2634 /* Keep the edges that correspond to exceptions thrown by
2635 this instruction and rematerialize the EDGE_ABNORMAL
2636 flag we just cleared above. */
2638 e->flags |= EDGE_ABNORMAL;
2639 ei_next (&ei);
2640 continue;
2643 /* We do not need this edge. */
2644 df_set_bb_dirty (bb);
2645 purged = true;
2646 remove_edge (e);
2649 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2650 return purged;
2652 if (dump_file)
2653 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2655 if (!optimize)
2656 return purged;
2658 /* Redistribute probabilities. */
2659 if (single_succ_p (bb))
2661 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2662 single_succ_edge (bb)->count = bb->count;
2664 else
2666 note = find_reg_note (insn, REG_BR_PROB, NULL);
2667 if (!note)
2668 return purged;
2670 b = BRANCH_EDGE (bb);
2671 f = FALLTHRU_EDGE (bb);
2672 b->probability = INTVAL (XEXP (note, 0));
2673 f->probability = REG_BR_PROB_BASE - b->probability;
2674 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2675 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2678 return purged;
2680 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2682 /* First, there should not be any EH or ABCALL edges resulting
2683 from non-local gotos and the like. If there were, we shouldn't
2684 have created the sibcall in the first place. Second, there
2685 should of course never have been a fallthru edge. */
2686 gcc_assert (single_succ_p (bb));
2687 gcc_assert (single_succ_edge (bb)->flags
2688 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2690 return 0;
2693 /* If we don't see a jump insn, we don't know exactly why the block would
2694 have been broken at this point. Look for a simple, non-fallthru edge,
2695 as these are only created by conditional branches. If we find such an
2696 edge we know that there used to be a jump here and can then safely
2697 remove all non-fallthru edges. */
2698 found = false;
2699 FOR_EACH_EDGE (e, ei, bb->succs)
2700 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2702 found = true;
2703 break;
2706 if (!found)
2707 return purged;
2709 /* Remove all but the fake and fallthru edges. The fake edge may be
2710 the only successor for this block in the case of noreturn
2711 calls. */
2712 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2714 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2716 df_set_bb_dirty (bb);
2717 remove_edge (e);
2718 purged = true;
2720 else
2721 ei_next (&ei);
2724 gcc_assert (single_succ_p (bb));
2726 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2727 single_succ_edge (bb)->count = bb->count;
2729 if (dump_file)
2730 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2731 bb->index);
2732 return purged;
2735 /* Search all basic blocks for potentially dead edges and purge them. Return
2736 true if some edge has been eliminated. */
2738 bool
2739 purge_all_dead_edges (void)
2741 int purged = false;
2742 basic_block bb;
2744 FOR_EACH_BB (bb)
2746 bool purged_here = purge_dead_edges (bb);
2748 purged |= purged_here;
2751 return purged;
2754 /* This is used by a few passes that emit some instructions after abnormal
2755 calls, moving the basic block's end, while they in fact do want to emit
2756 them on the fallthru edge. Look for abnormal call edges, find backward
2757 the call in the block and insert the instructions on the edge instead.
2759 Similarly, handle instructions throwing exceptions internally.
2761 Return true when instructions have been found and inserted on edges. */
2763 bool
2764 fixup_abnormal_edges (void)
2766 bool inserted = false;
2767 basic_block bb;
2769 FOR_EACH_BB (bb)
2771 edge e;
2772 edge_iterator ei;
2774 /* Look for cases we are interested in - calls or instructions causing
2775 exceptions. */
2776 FOR_EACH_EDGE (e, ei, bb->succs)
2777 if ((e->flags & EDGE_ABNORMAL_CALL)
2778 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
2779 == (EDGE_ABNORMAL | EDGE_EH)))
2780 break;
2782 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
2784 rtx insn;
2786 /* Get past the new insns generated. Allow notes, as the insns
2787 may be already deleted. */
2788 insn = BB_END (bb);
2789 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
2790 && !can_throw_internal (insn)
2791 && insn != BB_HEAD (bb))
2792 insn = PREV_INSN (insn);
2794 if (CALL_P (insn) || can_throw_internal (insn))
2796 rtx stop, next;
2798 e = find_fallthru_edge (bb->succs);
2800 stop = NEXT_INSN (BB_END (bb));
2801 BB_END (bb) = insn;
2803 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
2805 next = NEXT_INSN (insn);
2806 if (INSN_P (insn))
2808 delete_insn (insn);
2810 /* Sometimes there's still the return value USE.
2811 If it's placed after a trapping call (i.e. that
2812 call is the last insn anyway), we have no fallthru
2813 edge. Simply delete this use and don't try to insert
2814 on the non-existent edge. */
2815 if (GET_CODE (PATTERN (insn)) != USE)
2817 /* We're not deleting it, we're moving it. */
2818 INSN_DELETED_P (insn) = 0;
2819 PREV_INSN (insn) = NULL_RTX;
2820 NEXT_INSN (insn) = NULL_RTX;
2822 insert_insn_on_edge (insn, e);
2823 inserted = true;
2826 else if (!BARRIER_P (insn))
2827 set_block_for_insn (insn, NULL);
2831 /* It may be that we don't find any trapping insn. In this
2832 case we discovered quite late that the insn that had been
2833 marked as can_throw_internal in fact couldn't trap at all.
2834 So we should in fact delete the EH edges out of the block. */
2835 else
2836 purge_dead_edges (bb);
2840 return inserted;
2843 /* Cut the insns from FIRST to LAST out of the insns stream. */
2846 unlink_insn_chain (rtx first, rtx last)
2848 rtx prevfirst = PREV_INSN (first);
2849 rtx nextlast = NEXT_INSN (last);
2851 PREV_INSN (first) = NULL;
2852 NEXT_INSN (last) = NULL;
2853 if (prevfirst)
2854 NEXT_INSN (prevfirst) = nextlast;
2855 if (nextlast)
2856 PREV_INSN (nextlast) = prevfirst;
2857 else
2858 set_last_insn (prevfirst);
2859 if (!prevfirst)
2860 set_first_insn (nextlast);
2861 return first;
2864 /* Skip over inter-block insns occurring after BB which are typically
2865 associated with BB (e.g., barriers). If there are any such insns,
2866 we return the last one. Otherwise, we return the end of BB. */
2868 static rtx
2869 skip_insns_after_block (basic_block bb)
2871 rtx insn, last_insn, next_head, prev;
2873 next_head = NULL_RTX;
2874 if (bb->next_bb != EXIT_BLOCK_PTR)
2875 next_head = BB_HEAD (bb->next_bb);
2877 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
2879 if (insn == next_head)
2880 break;
2882 switch (GET_CODE (insn))
2884 case BARRIER:
2885 last_insn = insn;
2886 continue;
2888 case NOTE:
2889 switch (NOTE_KIND (insn))
2891 case NOTE_INSN_BLOCK_END:
2892 gcc_unreachable ();
2893 continue;
2894 default:
2895 continue;
2896 break;
2898 break;
2900 case CODE_LABEL:
2901 if (NEXT_INSN (insn)
2902 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
2904 insn = NEXT_INSN (insn);
2905 last_insn = insn;
2906 continue;
2908 break;
2910 default:
2911 break;
2914 break;
2917 /* It is possible to hit contradictory sequence. For instance:
2919 jump_insn
2920 NOTE_INSN_BLOCK_BEG
2921 barrier
2923 Where barrier belongs to jump_insn, but the note does not. This can be
2924 created by removing the basic block originally following
2925 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
2927 for (insn = last_insn; insn != BB_END (bb); insn = prev)
2929 prev = PREV_INSN (insn);
2930 if (NOTE_P (insn))
2931 switch (NOTE_KIND (insn))
2933 case NOTE_INSN_BLOCK_END:
2934 gcc_unreachable ();
2935 break;
2936 case NOTE_INSN_DELETED:
2937 case NOTE_INSN_DELETED_LABEL:
2938 case NOTE_INSN_DELETED_DEBUG_LABEL:
2939 continue;
2940 default:
2941 reorder_insns (insn, insn, last_insn);
2945 return last_insn;
2948 /* Locate or create a label for a given basic block. */
2950 static rtx
2951 label_for_bb (basic_block bb)
2953 rtx label = BB_HEAD (bb);
2955 if (!LABEL_P (label))
2957 if (dump_file)
2958 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
2960 label = block_label (bb);
2963 return label;
2966 /* Locate the effective beginning and end of the insn chain for each
2967 block, as defined by skip_insns_after_block above. */
2969 static void
2970 record_effective_endpoints (void)
2972 rtx next_insn;
2973 basic_block bb;
2974 rtx insn;
2976 for (insn = get_insns ();
2977 insn
2978 && NOTE_P (insn)
2979 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
2980 insn = NEXT_INSN (insn))
2981 continue;
2982 /* No basic blocks at all? */
2983 gcc_assert (insn);
2985 if (PREV_INSN (insn))
2986 cfg_layout_function_header =
2987 unlink_insn_chain (get_insns (), PREV_INSN (insn));
2988 else
2989 cfg_layout_function_header = NULL_RTX;
2991 next_insn = get_insns ();
2992 FOR_EACH_BB (bb)
2994 rtx end;
2996 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
2997 BB_HEADER (bb) = unlink_insn_chain (next_insn,
2998 PREV_INSN (BB_HEAD (bb)));
2999 end = skip_insns_after_block (bb);
3000 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3001 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3002 next_insn = NEXT_INSN (BB_END (bb));
3005 cfg_layout_function_footer = next_insn;
3006 if (cfg_layout_function_footer)
3007 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3010 static unsigned int
3011 into_cfg_layout_mode (void)
3013 cfg_layout_initialize (0);
3014 return 0;
3017 static unsigned int
3018 outof_cfg_layout_mode (void)
3020 basic_block bb;
3022 FOR_EACH_BB (bb)
3023 if (bb->next_bb != EXIT_BLOCK_PTR)
3024 bb->aux = bb->next_bb;
3026 cfg_layout_finalize ();
3028 return 0;
3031 struct rtl_opt_pass pass_into_cfg_layout_mode =
3034 RTL_PASS,
3035 "into_cfglayout", /* name */
3036 OPTGROUP_NONE, /* optinfo_flags */
3037 NULL, /* gate */
3038 into_cfg_layout_mode, /* execute */
3039 NULL, /* sub */
3040 NULL, /* next */
3041 0, /* static_pass_number */
3042 TV_CFG, /* tv_id */
3043 0, /* properties_required */
3044 PROP_cfglayout, /* properties_provided */
3045 0, /* properties_destroyed */
3046 0, /* todo_flags_start */
3047 0 /* todo_flags_finish */
3051 struct rtl_opt_pass pass_outof_cfg_layout_mode =
3054 RTL_PASS,
3055 "outof_cfglayout", /* name */
3056 OPTGROUP_NONE, /* optinfo_flags */
3057 NULL, /* gate */
3058 outof_cfg_layout_mode, /* execute */
3059 NULL, /* sub */
3060 NULL, /* next */
3061 0, /* static_pass_number */
3062 TV_CFG, /* tv_id */
3063 0, /* properties_required */
3064 0, /* properties_provided */
3065 PROP_cfglayout, /* properties_destroyed */
3066 0, /* todo_flags_start */
3067 0 /* todo_flags_finish */
3072 /* Link the basic blocks in the correct order, compacting the basic
3073 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3074 function also clears the basic block header and footer fields.
3076 This function is usually called after a pass (e.g. tracer) finishes
3077 some transformations while in cfglayout mode. The required sequence
3078 of the basic blocks is in a linked list along the bb->aux field.
3079 This functions re-links the basic block prev_bb and next_bb pointers
3080 accordingly, and it compacts and renumbers the blocks.
3082 FIXME: This currently works only for RTL, but the only RTL-specific
3083 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3084 to GIMPLE a long time ago, but it doesn't relink the basic block
3085 chain. It could do that (to give better initial RTL) if this function
3086 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
3088 void
3089 relink_block_chain (bool stay_in_cfglayout_mode)
3091 basic_block bb, prev_bb;
3092 int index;
3094 /* Maybe dump the re-ordered sequence. */
3095 if (dump_file)
3097 fprintf (dump_file, "Reordered sequence:\n");
3098 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
3100 bb = (basic_block) bb->aux, index++)
3102 fprintf (dump_file, " %i ", index);
3103 if (get_bb_original (bb))
3104 fprintf (dump_file, "duplicate of %i ",
3105 get_bb_original (bb)->index);
3106 else if (forwarder_block_p (bb)
3107 && !LABEL_P (BB_HEAD (bb)))
3108 fprintf (dump_file, "compensation ");
3109 else
3110 fprintf (dump_file, "bb %i ", bb->index);
3111 fprintf (dump_file, " [%i]\n", bb->frequency);
3115 /* Now reorder the blocks. */
3116 prev_bb = ENTRY_BLOCK_PTR;
3117 bb = ENTRY_BLOCK_PTR->next_bb;
3118 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3120 bb->prev_bb = prev_bb;
3121 prev_bb->next_bb = bb;
3123 prev_bb->next_bb = EXIT_BLOCK_PTR;
3124 EXIT_BLOCK_PTR->prev_bb = prev_bb;
3126 /* Then, clean up the aux fields. */
3127 FOR_ALL_BB (bb)
3129 bb->aux = NULL;
3130 if (!stay_in_cfglayout_mode)
3131 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3134 /* Maybe reset the original copy tables, they are not valid anymore
3135 when we renumber the basic blocks in compact_blocks. If we are
3136 are going out of cfglayout mode, don't re-allocate the tables. */
3137 free_original_copy_tables ();
3138 if (stay_in_cfglayout_mode)
3139 initialize_original_copy_tables ();
3141 /* Finally, put basic_block_info in the new order. */
3142 compact_blocks ();
3146 /* Given a reorder chain, rearrange the code to match. */
3148 static void
3149 fixup_reorder_chain (void)
3151 basic_block bb;
3152 rtx insn = NULL;
3154 if (cfg_layout_function_header)
3156 set_first_insn (cfg_layout_function_header);
3157 insn = cfg_layout_function_header;
3158 while (NEXT_INSN (insn))
3159 insn = NEXT_INSN (insn);
3162 /* First do the bulk reordering -- rechain the blocks without regard to
3163 the needed changes to jumps and labels. */
3165 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
3167 if (BB_HEADER (bb))
3169 if (insn)
3170 NEXT_INSN (insn) = BB_HEADER (bb);
3171 else
3172 set_first_insn (BB_HEADER (bb));
3173 PREV_INSN (BB_HEADER (bb)) = insn;
3174 insn = BB_HEADER (bb);
3175 while (NEXT_INSN (insn))
3176 insn = NEXT_INSN (insn);
3178 if (insn)
3179 NEXT_INSN (insn) = BB_HEAD (bb);
3180 else
3181 set_first_insn (BB_HEAD (bb));
3182 PREV_INSN (BB_HEAD (bb)) = insn;
3183 insn = BB_END (bb);
3184 if (BB_FOOTER (bb))
3186 NEXT_INSN (insn) = BB_FOOTER (bb);
3187 PREV_INSN (BB_FOOTER (bb)) = insn;
3188 while (NEXT_INSN (insn))
3189 insn = NEXT_INSN (insn);
3193 NEXT_INSN (insn) = cfg_layout_function_footer;
3194 if (cfg_layout_function_footer)
3195 PREV_INSN (cfg_layout_function_footer) = insn;
3197 while (NEXT_INSN (insn))
3198 insn = NEXT_INSN (insn);
3200 set_last_insn (insn);
3201 #ifdef ENABLE_CHECKING
3202 verify_insn_chain ();
3203 #endif
3205 /* Now add jumps and labels as needed to match the blocks new
3206 outgoing edges. */
3208 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
3210 edge e_fall, e_taken, e;
3211 rtx bb_end_insn;
3212 rtx ret_label = NULL_RTX;
3213 basic_block nb, src_bb;
3214 edge_iterator ei;
3216 if (EDGE_COUNT (bb->succs) == 0)
3217 continue;
3219 /* Find the old fallthru edge, and another non-EH edge for
3220 a taken jump. */
3221 e_taken = e_fall = NULL;
3223 FOR_EACH_EDGE (e, ei, bb->succs)
3224 if (e->flags & EDGE_FALLTHRU)
3225 e_fall = e;
3226 else if (! (e->flags & EDGE_EH))
3227 e_taken = e;
3229 bb_end_insn = BB_END (bb);
3230 if (JUMP_P (bb_end_insn))
3232 ret_label = JUMP_LABEL (bb_end_insn);
3233 if (any_condjump_p (bb_end_insn))
3235 /* This might happen if the conditional jump has side
3236 effects and could therefore not be optimized away.
3237 Make the basic block to end with a barrier in order
3238 to prevent rtl_verify_flow_info from complaining. */
3239 if (!e_fall)
3241 gcc_assert (!onlyjump_p (bb_end_insn)
3242 || returnjump_p (bb_end_insn));
3243 BB_FOOTER (bb) = emit_barrier_after (bb_end_insn);
3244 continue;
3247 /* If the old fallthru is still next, nothing to do. */
3248 if (bb->aux == e_fall->dest
3249 || e_fall->dest == EXIT_BLOCK_PTR)
3250 continue;
3252 /* The degenerated case of conditional jump jumping to the next
3253 instruction can happen for jumps with side effects. We need
3254 to construct a forwarder block and this will be done just
3255 fine by force_nonfallthru below. */
3256 if (!e_taken)
3259 /* There is another special case: if *neither* block is next,
3260 such as happens at the very end of a function, then we'll
3261 need to add a new unconditional jump. Choose the taken
3262 edge based on known or assumed probability. */
3263 else if (bb->aux != e_taken->dest)
3265 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3267 if (note
3268 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
3269 && invert_jump (bb_end_insn,
3270 (e_fall->dest == EXIT_BLOCK_PTR
3271 ? NULL_RTX
3272 : label_for_bb (e_fall->dest)), 0))
3274 e_fall->flags &= ~EDGE_FALLTHRU;
3275 gcc_checking_assert (could_fall_through
3276 (e_taken->src, e_taken->dest));
3277 e_taken->flags |= EDGE_FALLTHRU;
3278 update_br_prob_note (bb);
3279 e = e_fall, e_fall = e_taken, e_taken = e;
3283 /* If the "jumping" edge is a crossing edge, and the fall
3284 through edge is non-crossing, leave things as they are. */
3285 else if ((e_taken->flags & EDGE_CROSSING)
3286 && !(e_fall->flags & EDGE_CROSSING))
3287 continue;
3289 /* Otherwise we can try to invert the jump. This will
3290 basically never fail, however, keep up the pretense. */
3291 else if (invert_jump (bb_end_insn,
3292 (e_fall->dest == EXIT_BLOCK_PTR
3293 ? NULL_RTX
3294 : label_for_bb (e_fall->dest)), 0))
3296 e_fall->flags &= ~EDGE_FALLTHRU;
3297 gcc_checking_assert (could_fall_through
3298 (e_taken->src, e_taken->dest));
3299 e_taken->flags |= EDGE_FALLTHRU;
3300 update_br_prob_note (bb);
3301 if (LABEL_NUSES (ret_label) == 0
3302 && single_pred_p (e_taken->dest))
3303 delete_insn (ret_label);
3304 continue;
3307 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3309 /* If the old fallthru is still next or if
3310 asm goto doesn't have a fallthru (e.g. when followed by
3311 __builtin_unreachable ()), nothing to do. */
3312 if (! e_fall
3313 || bb->aux == e_fall->dest
3314 || e_fall->dest == EXIT_BLOCK_PTR)
3315 continue;
3317 /* Otherwise we'll have to use the fallthru fixup below. */
3319 else
3321 /* Otherwise we have some return, switch or computed
3322 jump. In the 99% case, there should not have been a
3323 fallthru edge. */
3324 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3325 continue;
3328 else
3330 /* No fallthru implies a noreturn function with EH edges, or
3331 something similarly bizarre. In any case, we don't need to
3332 do anything. */
3333 if (! e_fall)
3334 continue;
3336 /* If the fallthru block is still next, nothing to do. */
3337 if (bb->aux == e_fall->dest)
3338 continue;
3340 /* A fallthru to exit block. */
3341 if (e_fall->dest == EXIT_BLOCK_PTR)
3342 continue;
3345 /* We got here if we need to add a new jump insn.
3346 Note force_nonfallthru can delete E_FALL and thus we have to
3347 save E_FALL->src prior to the call to force_nonfallthru. */
3348 src_bb = e_fall->src;
3349 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3350 if (nb)
3352 nb->aux = bb->aux;
3353 bb->aux = nb;
3354 /* Don't process this new block. */
3355 bb = nb;
3357 /* Make sure new bb is tagged for correct section (same as
3358 fall-thru source, since you cannot fall-thru across
3359 section boundaries). */
3360 BB_COPY_PARTITION (src_bb, single_pred (bb));
3361 if (flag_reorder_blocks_and_partition
3362 && targetm_common.have_named_sections
3363 && JUMP_P (BB_END (bb))
3364 && !any_condjump_p (BB_END (bb))
3365 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
3366 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
3370 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3372 /* Annoying special case - jump around dead jumptables left in the code. */
3373 FOR_EACH_BB (bb)
3375 edge e = find_fallthru_edge (bb->succs);
3377 if (e && !can_fallthru (e->src, e->dest))
3378 force_nonfallthru (e);
3381 /* Ensure goto_locus from edges has some instructions with that locus
3382 in RTL. */
3383 if (!optimize)
3384 FOR_EACH_BB (bb)
3386 edge e;
3387 edge_iterator ei;
3389 FOR_EACH_EDGE (e, ei, bb->succs)
3390 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3391 && !(e->flags & EDGE_ABNORMAL))
3393 edge e2;
3394 edge_iterator ei2;
3395 basic_block dest, nb;
3396 rtx end;
3398 insn = BB_END (e->src);
3399 end = PREV_INSN (BB_HEAD (e->src));
3400 while (insn != end
3401 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
3402 insn = PREV_INSN (insn);
3403 if (insn != end
3404 && INSN_LOCATION (insn) == e->goto_locus)
3405 continue;
3406 if (simplejump_p (BB_END (e->src))
3407 && !INSN_HAS_LOCATION (BB_END (e->src)))
3409 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
3410 continue;
3412 dest = e->dest;
3413 if (dest == EXIT_BLOCK_PTR)
3415 /* Non-fallthru edges to the exit block cannot be split. */
3416 if (!(e->flags & EDGE_FALLTHRU))
3417 continue;
3419 else
3421 insn = BB_HEAD (dest);
3422 end = NEXT_INSN (BB_END (dest));
3423 while (insn != end && !NONDEBUG_INSN_P (insn))
3424 insn = NEXT_INSN (insn);
3425 if (insn != end && INSN_HAS_LOCATION (insn)
3426 && INSN_LOCATION (insn) == e->goto_locus)
3427 continue;
3429 nb = split_edge (e);
3430 if (!INSN_P (BB_END (nb)))
3431 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3432 nb);
3433 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
3435 /* If there are other incoming edges to the destination block
3436 with the same goto locus, redirect them to the new block as
3437 well, this can prevent other such blocks from being created
3438 in subsequent iterations of the loop. */
3439 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
3440 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
3441 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
3442 && e->goto_locus == e2->goto_locus)
3443 redirect_edge_and_branch (e2, nb);
3444 else
3445 ei_next (&ei2);
3450 /* Perform sanity checks on the insn chain.
3451 1. Check that next/prev pointers are consistent in both the forward and
3452 reverse direction.
3453 2. Count insns in chain, going both directions, and check if equal.
3454 3. Check that get_last_insn () returns the actual end of chain. */
3456 DEBUG_FUNCTION void
3457 verify_insn_chain (void)
3459 rtx x, prevx, nextx;
3460 int insn_cnt1, insn_cnt2;
3462 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3463 x != 0;
3464 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3465 gcc_assert (PREV_INSN (x) == prevx);
3467 gcc_assert (prevx == get_last_insn ());
3469 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3470 x != 0;
3471 nextx = x, insn_cnt2++, x = PREV_INSN (x))
3472 gcc_assert (NEXT_INSN (x) == nextx);
3474 gcc_assert (insn_cnt1 == insn_cnt2);
3477 /* If we have assembler epilogues, the block falling through to exit must
3478 be the last one in the reordered chain when we reach final. Ensure
3479 that this condition is met. */
3480 static void
3481 fixup_fallthru_exit_predecessor (void)
3483 edge e;
3484 basic_block bb = NULL;
3486 /* This transformation is not valid before reload, because we might
3487 separate a call from the instruction that copies the return
3488 value. */
3489 gcc_assert (reload_completed);
3491 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
3492 if (e)
3493 bb = e->src;
3495 if (bb && bb->aux)
3497 basic_block c = ENTRY_BLOCK_PTR->next_bb;
3499 /* If the very first block is the one with the fall-through exit
3500 edge, we have to split that block. */
3501 if (c == bb)
3503 bb = split_block (bb, NULL)->dest;
3504 bb->aux = c->aux;
3505 c->aux = bb;
3506 BB_FOOTER (bb) = BB_FOOTER (c);
3507 BB_FOOTER (c) = NULL;
3510 while (c->aux != bb)
3511 c = (basic_block) c->aux;
3513 c->aux = bb->aux;
3514 while (c->aux)
3515 c = (basic_block) c->aux;
3517 c->aux = bb;
3518 bb->aux = NULL;
3522 /* In case there are more than one fallthru predecessors of exit, force that
3523 there is only one. */
3525 static void
3526 force_one_exit_fallthru (void)
3528 edge e, predecessor = NULL;
3529 bool more = false;
3530 edge_iterator ei;
3531 basic_block forwarder, bb;
3533 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3534 if (e->flags & EDGE_FALLTHRU)
3536 if (predecessor == NULL)
3537 predecessor = e;
3538 else
3540 more = true;
3541 break;
3545 if (!more)
3546 return;
3548 /* Exit has several fallthru predecessors. Create a forwarder block for
3549 them. */
3550 forwarder = split_edge (predecessor);
3551 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
3553 if (e->src == forwarder
3554 || !(e->flags & EDGE_FALLTHRU))
3555 ei_next (&ei);
3556 else
3557 redirect_edge_and_branch_force (e, forwarder);
3560 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
3561 exit block. */
3562 FOR_EACH_BB (bb)
3564 if (bb->aux == NULL && bb != forwarder)
3566 bb->aux = forwarder;
3567 break;
3572 /* Return true in case it is possible to duplicate the basic block BB. */
3574 static bool
3575 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
3577 /* Do not attempt to duplicate tablejumps, as we need to unshare
3578 the dispatch table. This is difficult to do, as the instructions
3579 computing jump destination may be hoisted outside the basic block. */
3580 if (tablejump_p (BB_END (bb), NULL, NULL))
3581 return false;
3583 /* Do not duplicate blocks containing insns that can't be copied. */
3584 if (targetm.cannot_copy_insn_p)
3586 rtx insn = BB_HEAD (bb);
3587 while (1)
3589 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
3590 return false;
3591 if (insn == BB_END (bb))
3592 break;
3593 insn = NEXT_INSN (insn);
3597 return true;
3601 duplicate_insn_chain (rtx from, rtx to)
3603 rtx insn, last, copy;
3605 /* Avoid updating of boundaries of previous basic block. The
3606 note will get removed from insn stream in fixup. */
3607 last = emit_note (NOTE_INSN_DELETED);
3609 /* Create copy at the end of INSN chain. The chain will
3610 be reordered later. */
3611 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
3613 switch (GET_CODE (insn))
3615 case DEBUG_INSN:
3616 /* Don't duplicate label debug insns. */
3617 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
3618 break;
3619 /* FALLTHRU */
3620 case INSN:
3621 case CALL_INSN:
3622 case JUMP_INSN:
3623 /* Avoid copying of dispatch tables. We never duplicate
3624 tablejumps, so this can hit only in case the table got
3625 moved far from original jump. */
3626 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
3627 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
3629 /* Avoid copying following barrier as well if any
3630 (and debug insns in between). */
3631 rtx next;
3633 for (next = NEXT_INSN (insn);
3634 next != NEXT_INSN (to);
3635 next = NEXT_INSN (next))
3636 if (!DEBUG_INSN_P (next))
3637 break;
3638 if (next != NEXT_INSN (to) && BARRIER_P (next))
3639 insn = next;
3640 break;
3642 copy = emit_copy_of_insn_after (insn, get_last_insn ());
3643 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
3644 && ANY_RETURN_P (JUMP_LABEL (insn)))
3645 JUMP_LABEL (copy) = JUMP_LABEL (insn);
3646 maybe_copy_prologue_epilogue_insn (insn, copy);
3647 break;
3649 case CODE_LABEL:
3650 break;
3652 case BARRIER:
3653 emit_barrier ();
3654 break;
3656 case NOTE:
3657 switch (NOTE_KIND (insn))
3659 /* In case prologue is empty and function contain label
3660 in first BB, we may want to copy the block. */
3661 case NOTE_INSN_PROLOGUE_END:
3663 case NOTE_INSN_DELETED:
3664 case NOTE_INSN_DELETED_LABEL:
3665 case NOTE_INSN_DELETED_DEBUG_LABEL:
3666 /* No problem to strip these. */
3667 case NOTE_INSN_FUNCTION_BEG:
3668 /* There is always just single entry to function. */
3669 case NOTE_INSN_BASIC_BLOCK:
3670 break;
3672 case NOTE_INSN_EPILOGUE_BEG:
3673 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
3674 emit_note_copy (insn);
3675 break;
3677 default:
3678 /* All other notes should have already been eliminated. */
3679 gcc_unreachable ();
3681 break;
3682 default:
3683 gcc_unreachable ();
3686 insn = NEXT_INSN (last);
3687 delete_insn (last);
3688 return insn;
3691 /* Create a duplicate of the basic block BB. */
3693 static basic_block
3694 cfg_layout_duplicate_bb (basic_block bb)
3696 rtx insn;
3697 basic_block new_bb;
3699 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
3700 new_bb = create_basic_block (insn,
3701 insn ? get_last_insn () : NULL,
3702 EXIT_BLOCK_PTR->prev_bb);
3704 BB_COPY_PARTITION (new_bb, bb);
3705 if (BB_HEADER (bb))
3707 insn = BB_HEADER (bb);
3708 while (NEXT_INSN (insn))
3709 insn = NEXT_INSN (insn);
3710 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
3711 if (insn)
3712 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3715 if (BB_FOOTER (bb))
3717 insn = BB_FOOTER (bb);
3718 while (NEXT_INSN (insn))
3719 insn = NEXT_INSN (insn);
3720 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
3721 if (insn)
3722 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3725 return new_bb;
3729 /* Main entry point to this module - initialize the datastructures for
3730 CFG layout changes. It keeps LOOPS up-to-date if not null.
3732 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
3734 void
3735 cfg_layout_initialize (unsigned int flags)
3737 rtx x;
3738 basic_block bb;
3740 initialize_original_copy_tables ();
3742 cfg_layout_rtl_register_cfg_hooks ();
3744 record_effective_endpoints ();
3746 /* Make sure that the targets of non local gotos are marked. */
3747 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
3749 bb = BLOCK_FOR_INSN (XEXP (x, 0));
3750 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
3753 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
3756 /* Splits superblocks. */
3757 void
3758 break_superblocks (void)
3760 sbitmap superblocks;
3761 bool need = false;
3762 basic_block bb;
3764 superblocks = sbitmap_alloc (last_basic_block);
3765 bitmap_clear (superblocks);
3767 FOR_EACH_BB (bb)
3768 if (bb->flags & BB_SUPERBLOCK)
3770 bb->flags &= ~BB_SUPERBLOCK;
3771 bitmap_set_bit (superblocks, bb->index);
3772 need = true;
3775 if (need)
3777 rebuild_jump_labels (get_insns ());
3778 find_many_sub_basic_blocks (superblocks);
3781 free (superblocks);
3784 /* Finalize the changes: reorder insn list according to the sequence specified
3785 by aux pointers, enter compensation code, rebuild scope forest. */
3787 void
3788 cfg_layout_finalize (void)
3790 #ifdef ENABLE_CHECKING
3791 verify_flow_info ();
3792 #endif
3793 force_one_exit_fallthru ();
3794 rtl_register_cfg_hooks ();
3795 if (reload_completed
3796 #ifdef HAVE_epilogue
3797 && !HAVE_epilogue
3798 #endif
3800 fixup_fallthru_exit_predecessor ();
3801 fixup_reorder_chain ();
3803 rebuild_jump_labels (get_insns ());
3804 delete_dead_jumptables ();
3806 #ifdef ENABLE_CHECKING
3807 verify_insn_chain ();
3808 verify_flow_info ();
3809 #endif
3813 /* Same as split_block but update cfg_layout structures. */
3815 static basic_block
3816 cfg_layout_split_block (basic_block bb, void *insnp)
3818 rtx insn = (rtx) insnp;
3819 basic_block new_bb = rtl_split_block (bb, insn);
3821 BB_FOOTER (new_bb) = BB_FOOTER (bb);
3822 BB_FOOTER (bb) = NULL;
3824 return new_bb;
3827 /* Redirect Edge to DEST. */
3828 static edge
3829 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
3831 basic_block src = e->src;
3832 edge ret;
3834 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3835 return NULL;
3837 if (e->dest == dest)
3838 return e;
3840 if (e->src != ENTRY_BLOCK_PTR
3841 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
3843 df_set_bb_dirty (src);
3844 return ret;
3847 if (e->src == ENTRY_BLOCK_PTR
3848 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
3850 if (dump_file)
3851 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
3852 e->src->index, dest->index);
3854 df_set_bb_dirty (e->src);
3855 redirect_edge_succ (e, dest);
3856 return e;
3859 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
3860 in the case the basic block appears to be in sequence. Avoid this
3861 transformation. */
3863 if (e->flags & EDGE_FALLTHRU)
3865 /* Redirect any branch edges unified with the fallthru one. */
3866 if (JUMP_P (BB_END (src))
3867 && label_is_jump_target_p (BB_HEAD (e->dest),
3868 BB_END (src)))
3870 edge redirected;
3872 if (dump_file)
3873 fprintf (dump_file, "Fallthru edge unified with branch "
3874 "%i->%i redirected to %i\n",
3875 e->src->index, e->dest->index, dest->index);
3876 e->flags &= ~EDGE_FALLTHRU;
3877 redirected = redirect_branch_edge (e, dest);
3878 gcc_assert (redirected);
3879 redirected->flags |= EDGE_FALLTHRU;
3880 df_set_bb_dirty (redirected->src);
3881 return redirected;
3883 /* In case we are redirecting fallthru edge to the branch edge
3884 of conditional jump, remove it. */
3885 if (EDGE_COUNT (src->succs) == 2)
3887 /* Find the edge that is different from E. */
3888 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
3890 if (s->dest == dest
3891 && any_condjump_p (BB_END (src))
3892 && onlyjump_p (BB_END (src)))
3893 delete_insn (BB_END (src));
3895 if (dump_file)
3896 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
3897 e->src->index, e->dest->index, dest->index);
3898 ret = redirect_edge_succ_nodup (e, dest);
3900 else
3901 ret = redirect_branch_edge (e, dest);
3903 /* We don't want simplejumps in the insn stream during cfglayout. */
3904 gcc_assert (!simplejump_p (BB_END (src)));
3906 df_set_bb_dirty (src);
3907 return ret;
3910 /* Simple wrapper as we always can redirect fallthru edges. */
3911 static basic_block
3912 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
3914 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
3916 gcc_assert (redirected);
3917 return NULL;
3920 /* Same as delete_basic_block but update cfg_layout structures. */
3922 static void
3923 cfg_layout_delete_block (basic_block bb)
3925 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
3927 if (BB_HEADER (bb))
3929 next = BB_HEAD (bb);
3930 if (prev)
3931 NEXT_INSN (prev) = BB_HEADER (bb);
3932 else
3933 set_first_insn (BB_HEADER (bb));
3934 PREV_INSN (BB_HEADER (bb)) = prev;
3935 insn = BB_HEADER (bb);
3936 while (NEXT_INSN (insn))
3937 insn = NEXT_INSN (insn);
3938 NEXT_INSN (insn) = next;
3939 PREV_INSN (next) = insn;
3941 next = NEXT_INSN (BB_END (bb));
3942 if (BB_FOOTER (bb))
3944 insn = BB_FOOTER (bb);
3945 while (insn)
3947 if (BARRIER_P (insn))
3949 if (PREV_INSN (insn))
3950 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
3951 else
3952 BB_FOOTER (bb) = NEXT_INSN (insn);
3953 if (NEXT_INSN (insn))
3954 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
3956 if (LABEL_P (insn))
3957 break;
3958 insn = NEXT_INSN (insn);
3960 if (BB_FOOTER (bb))
3962 insn = BB_END (bb);
3963 NEXT_INSN (insn) = BB_FOOTER (bb);
3964 PREV_INSN (BB_FOOTER (bb)) = insn;
3965 while (NEXT_INSN (insn))
3966 insn = NEXT_INSN (insn);
3967 NEXT_INSN (insn) = next;
3968 if (next)
3969 PREV_INSN (next) = insn;
3970 else
3971 set_last_insn (insn);
3974 if (bb->next_bb != EXIT_BLOCK_PTR)
3975 to = &BB_HEADER (bb->next_bb);
3976 else
3977 to = &cfg_layout_function_footer;
3979 rtl_delete_block (bb);
3981 if (prev)
3982 prev = NEXT_INSN (prev);
3983 else
3984 prev = get_insns ();
3985 if (next)
3986 next = PREV_INSN (next);
3987 else
3988 next = get_last_insn ();
3990 if (next && NEXT_INSN (next) != prev)
3992 remaints = unlink_insn_chain (prev, next);
3993 insn = remaints;
3994 while (NEXT_INSN (insn))
3995 insn = NEXT_INSN (insn);
3996 NEXT_INSN (insn) = *to;
3997 if (*to)
3998 PREV_INSN (*to) = insn;
3999 *to = remaints;
4003 /* Return true when blocks A and B can be safely merged. */
4005 static bool
4006 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4008 /* If we are partitioning hot/cold basic blocks, we don't want to
4009 mess up unconditional or indirect jumps that cross between hot
4010 and cold sections.
4012 Basic block partitioning may result in some jumps that appear to
4013 be optimizable (or blocks that appear to be mergeable), but which really
4014 must be left untouched (they are required to make it safely across
4015 partition boundaries). See the comments at the top of
4016 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4018 if (BB_PARTITION (a) != BB_PARTITION (b))
4019 return false;
4021 /* Protect the loop latches. */
4022 if (current_loops && b->loop_father->latch == b)
4023 return false;
4025 /* If we would end up moving B's instructions, make sure it doesn't fall
4026 through into the exit block, since we cannot recover from a fallthrough
4027 edge into the exit block occurring in the middle of a function. */
4028 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4030 edge e = find_fallthru_edge (b->succs);
4031 if (e && e->dest == EXIT_BLOCK_PTR)
4032 return false;
4035 /* There must be exactly one edge in between the blocks. */
4036 return (single_succ_p (a)
4037 && single_succ (a) == b
4038 && single_pred_p (b) == 1
4039 && a != b
4040 /* Must be simple edge. */
4041 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4042 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
4043 /* If the jump insn has side effects, we can't kill the edge.
4044 When not optimizing, try_redirect_by_replacing_jump will
4045 not allow us to redirect an edge by replacing a table jump. */
4046 && (!JUMP_P (BB_END (a))
4047 || ((!optimize || reload_completed)
4048 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4051 /* Merge block A and B. The blocks must be mergeable. */
4053 static void
4054 cfg_layout_merge_blocks (basic_block a, basic_block b)
4056 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
4057 rtx insn;
4059 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4061 if (dump_file)
4062 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4063 a->index);
4065 /* If there was a CODE_LABEL beginning B, delete it. */
4066 if (LABEL_P (BB_HEAD (b)))
4068 delete_insn (BB_HEAD (b));
4071 /* We should have fallthru edge in a, or we can do dummy redirection to get
4072 it cleaned up. */
4073 if (JUMP_P (BB_END (a)))
4074 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4075 gcc_assert (!JUMP_P (BB_END (a)));
4077 /* When not optimizing CFG and the edge is the only place in RTL which holds
4078 some unique locus, emit a nop with that locus in between. */
4079 if (!optimize)
4080 emit_nop_for_unique_locus_between (a, b);
4082 /* Possible line number notes should appear in between. */
4083 if (BB_HEADER (b))
4085 rtx first = BB_END (a), last;
4087 last = emit_insn_after_noloc (BB_HEADER (b), BB_END (a), a);
4088 /* The above might add a BARRIER as BB_END, but as barriers
4089 aren't valid parts of a bb, remove_insn doesn't update
4090 BB_END if it is a barrier. So adjust BB_END here. */
4091 while (BB_END (a) != first && BARRIER_P (BB_END (a)))
4092 BB_END (a) = PREV_INSN (BB_END (a));
4093 delete_insn_chain (NEXT_INSN (first), last, false);
4094 BB_HEADER (b) = NULL;
4097 /* In the case basic blocks are not adjacent, move them around. */
4098 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4100 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4102 emit_insn_after_noloc (insn, BB_END (a), a);
4104 /* Otherwise just re-associate the instructions. */
4105 else
4107 insn = BB_HEAD (b);
4108 BB_END (a) = BB_END (b);
4111 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4112 We need to explicitly call. */
4113 update_bb_for_insn_chain (insn, BB_END (b), a);
4115 /* Skip possible DELETED_LABEL insn. */
4116 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4117 insn = NEXT_INSN (insn);
4118 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4119 BB_HEAD (b) = NULL;
4120 delete_insn (insn);
4122 df_bb_delete (b->index);
4124 /* Possible tablejumps and barriers should appear after the block. */
4125 if (BB_FOOTER (b))
4127 if (!BB_FOOTER (a))
4128 BB_FOOTER (a) = BB_FOOTER (b);
4129 else
4131 rtx last = BB_FOOTER (a);
4133 while (NEXT_INSN (last))
4134 last = NEXT_INSN (last);
4135 NEXT_INSN (last) = BB_FOOTER (b);
4136 PREV_INSN (BB_FOOTER (b)) = last;
4138 BB_FOOTER (b) = NULL;
4141 /* If B was a forwarder block, propagate the locus on the edge. */
4142 if (forwarder_p
4143 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
4144 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4146 if (dump_file)
4147 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4150 /* Split edge E. */
4152 static basic_block
4153 cfg_layout_split_edge (edge e)
4155 basic_block new_bb =
4156 create_basic_block (e->src != ENTRY_BLOCK_PTR
4157 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
4158 NULL_RTX, e->src);
4160 if (e->dest == EXIT_BLOCK_PTR)
4161 BB_COPY_PARTITION (new_bb, e->src);
4162 else
4163 BB_COPY_PARTITION (new_bb, e->dest);
4164 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4165 redirect_edge_and_branch_force (e, new_bb);
4167 return new_bb;
4170 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4172 static void
4173 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4177 /* Return true if BB contains only labels or non-executable
4178 instructions. */
4180 static bool
4181 rtl_block_empty_p (basic_block bb)
4183 rtx insn;
4185 if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
4186 return true;
4188 FOR_BB_INSNS (bb, insn)
4189 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4190 return false;
4192 return true;
4195 /* Split a basic block if it ends with a conditional branch and if
4196 the other part of the block is not empty. */
4198 static basic_block
4199 rtl_split_block_before_cond_jump (basic_block bb)
4201 rtx insn;
4202 rtx split_point = NULL;
4203 rtx last = NULL;
4204 bool found_code = false;
4206 FOR_BB_INSNS (bb, insn)
4208 if (any_condjump_p (insn))
4209 split_point = last;
4210 else if (NONDEBUG_INSN_P (insn))
4211 found_code = true;
4212 last = insn;
4215 /* Did not find everything. */
4216 if (found_code && split_point)
4217 return split_block (bb, split_point)->dest;
4218 else
4219 return NULL;
4222 /* Return 1 if BB ends with a call, possibly followed by some
4223 instructions that must stay with the call, 0 otherwise. */
4225 static bool
4226 rtl_block_ends_with_call_p (basic_block bb)
4228 rtx insn = BB_END (bb);
4230 while (!CALL_P (insn)
4231 && insn != BB_HEAD (bb)
4232 && (keep_with_call_p (insn)
4233 || NOTE_P (insn)
4234 || DEBUG_INSN_P (insn)))
4235 insn = PREV_INSN (insn);
4236 return (CALL_P (insn));
4239 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4241 static bool
4242 rtl_block_ends_with_condjump_p (const_basic_block bb)
4244 return any_condjump_p (BB_END (bb));
4247 /* Return true if we need to add fake edge to exit.
4248 Helper function for rtl_flow_call_edges_add. */
4250 static bool
4251 need_fake_edge_p (const_rtx insn)
4253 if (!INSN_P (insn))
4254 return false;
4256 if ((CALL_P (insn)
4257 && !SIBLING_CALL_P (insn)
4258 && !find_reg_note (insn, REG_NORETURN, NULL)
4259 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4260 return true;
4262 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4263 && MEM_VOLATILE_P (PATTERN (insn)))
4264 || (GET_CODE (PATTERN (insn)) == PARALLEL
4265 && asm_noperands (insn) != -1
4266 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4267 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4270 /* Add fake edges to the function exit for any non constant and non noreturn
4271 calls, volatile inline assembly in the bitmap of blocks specified by
4272 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4273 that were split.
4275 The goal is to expose cases in which entering a basic block does not imply
4276 that all subsequent instructions must be executed. */
4278 static int
4279 rtl_flow_call_edges_add (sbitmap blocks)
4281 int i;
4282 int blocks_split = 0;
4283 int last_bb = last_basic_block;
4284 bool check_last_block = false;
4286 if (n_basic_blocks == NUM_FIXED_BLOCKS)
4287 return 0;
4289 if (! blocks)
4290 check_last_block = true;
4291 else
4292 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4294 /* In the last basic block, before epilogue generation, there will be
4295 a fallthru edge to EXIT. Special care is required if the last insn
4296 of the last basic block is a call because make_edge folds duplicate
4297 edges, which would result in the fallthru edge also being marked
4298 fake, which would result in the fallthru edge being removed by
4299 remove_fake_edges, which would result in an invalid CFG.
4301 Moreover, we can't elide the outgoing fake edge, since the block
4302 profiler needs to take this into account in order to solve the minimal
4303 spanning tree in the case that the call doesn't return.
4305 Handle this by adding a dummy instruction in a new last basic block. */
4306 if (check_last_block)
4308 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4309 rtx insn = BB_END (bb);
4311 /* Back up past insns that must be kept in the same block as a call. */
4312 while (insn != BB_HEAD (bb)
4313 && keep_with_call_p (insn))
4314 insn = PREV_INSN (insn);
4316 if (need_fake_edge_p (insn))
4318 edge e;
4320 e = find_edge (bb, EXIT_BLOCK_PTR);
4321 if (e)
4323 insert_insn_on_edge (gen_use (const0_rtx), e);
4324 commit_edge_insertions ();
4329 /* Now add fake edges to the function exit for any non constant
4330 calls since there is no way that we can determine if they will
4331 return or not... */
4333 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4335 basic_block bb = BASIC_BLOCK (i);
4336 rtx insn;
4337 rtx prev_insn;
4339 if (!bb)
4340 continue;
4342 if (blocks && !bitmap_bit_p (blocks, i))
4343 continue;
4345 for (insn = BB_END (bb); ; insn = prev_insn)
4347 prev_insn = PREV_INSN (insn);
4348 if (need_fake_edge_p (insn))
4350 edge e;
4351 rtx split_at_insn = insn;
4353 /* Don't split the block between a call and an insn that should
4354 remain in the same block as the call. */
4355 if (CALL_P (insn))
4356 while (split_at_insn != BB_END (bb)
4357 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4358 split_at_insn = NEXT_INSN (split_at_insn);
4360 /* The handling above of the final block before the epilogue
4361 should be enough to verify that there is no edge to the exit
4362 block in CFG already. Calling make_edge in such case would
4363 cause us to mark that edge as fake and remove it later. */
4365 #ifdef ENABLE_CHECKING
4366 if (split_at_insn == BB_END (bb))
4368 e = find_edge (bb, EXIT_BLOCK_PTR);
4369 gcc_assert (e == NULL);
4371 #endif
4373 /* Note that the following may create a new basic block
4374 and renumber the existing basic blocks. */
4375 if (split_at_insn != BB_END (bb))
4377 e = split_block (bb, split_at_insn);
4378 if (e)
4379 blocks_split++;
4382 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
4385 if (insn == BB_HEAD (bb))
4386 break;
4390 if (blocks_split)
4391 verify_flow_info ();
4393 return blocks_split;
4396 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
4397 the conditional branch target, SECOND_HEAD should be the fall-thru
4398 there is no need to handle this here the loop versioning code handles
4399 this. the reason for SECON_HEAD is that it is needed for condition
4400 in trees, and this should be of the same type since it is a hook. */
4401 static void
4402 rtl_lv_add_condition_to_bb (basic_block first_head ,
4403 basic_block second_head ATTRIBUTE_UNUSED,
4404 basic_block cond_bb, void *comp_rtx)
4406 rtx label, seq, jump;
4407 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4408 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4409 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4410 enum machine_mode mode;
4413 label = block_label (first_head);
4414 mode = GET_MODE (op0);
4415 if (mode == VOIDmode)
4416 mode = GET_MODE (op1);
4418 start_sequence ();
4419 op0 = force_operand (op0, NULL_RTX);
4420 op1 = force_operand (op1, NULL_RTX);
4421 do_compare_rtx_and_jump (op0, op1, comp, 0,
4422 mode, NULL_RTX, NULL_RTX, label, -1);
4423 jump = get_last_insn ();
4424 JUMP_LABEL (jump) = label;
4425 LABEL_NUSES (label)++;
4426 seq = get_insns ();
4427 end_sequence ();
4429 /* Add the new cond , in the new head. */
4430 emit_insn_after(seq, BB_END(cond_bb));
4434 /* Given a block B with unconditional branch at its end, get the
4435 store the return the branch edge and the fall-thru edge in
4436 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4437 static void
4438 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4439 edge *fallthru_edge)
4441 edge e = EDGE_SUCC (b, 0);
4443 if (e->flags & EDGE_FALLTHRU)
4445 *fallthru_edge = e;
4446 *branch_edge = EDGE_SUCC (b, 1);
4448 else
4450 *branch_edge = e;
4451 *fallthru_edge = EDGE_SUCC (b, 1);
4455 void
4456 init_rtl_bb_info (basic_block bb)
4458 gcc_assert (!bb->il.x.rtl);
4459 bb->il.x.head_ = NULL;
4460 bb->il.x.rtl = ggc_alloc_cleared_rtl_bb_info ();
4463 /* Returns true if it is possible to remove edge E by redirecting
4464 it to the destination of the other edge from E->src. */
4466 static bool
4467 rtl_can_remove_branch_p (const_edge e)
4469 const_basic_block src = e->src;
4470 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
4471 const_rtx insn = BB_END (src), set;
4473 /* The conditions are taken from try_redirect_by_replacing_jump. */
4474 if (target == EXIT_BLOCK_PTR)
4475 return false;
4477 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4478 return false;
4480 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
4481 || BB_PARTITION (src) != BB_PARTITION (target))
4482 return false;
4484 if (!onlyjump_p (insn)
4485 || tablejump_p (insn, NULL, NULL))
4486 return false;
4488 set = single_set (insn);
4489 if (!set || side_effects_p (set))
4490 return false;
4492 return true;
4495 static basic_block
4496 rtl_duplicate_bb (basic_block bb)
4498 bb = cfg_layout_duplicate_bb (bb);
4499 bb->aux = NULL;
4500 return bb;
4503 /* Do book-keeping of basic block BB for the profile consistency checker.
4504 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
4505 then do post-pass accounting. Store the counting in RECORD. */
4506 static void
4507 rtl_account_profile_record (basic_block bb, int after_pass,
4508 struct profile_record *record)
4510 rtx insn;
4511 FOR_BB_INSNS (bb, insn)
4512 if (INSN_P (insn))
4514 record->size[after_pass]
4515 += insn_rtx_cost (PATTERN (insn), false);
4516 if (profile_status == PROFILE_READ)
4517 record->time[after_pass]
4518 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
4519 else if (profile_status == PROFILE_GUESSED)
4520 record->time[after_pass]
4521 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
4525 /* Implementation of CFG manipulation for linearized RTL. */
4526 struct cfg_hooks rtl_cfg_hooks = {
4527 "rtl",
4528 rtl_verify_flow_info,
4529 rtl_dump_bb,
4530 rtl_dump_bb_for_graph,
4531 rtl_create_basic_block,
4532 rtl_redirect_edge_and_branch,
4533 rtl_redirect_edge_and_branch_force,
4534 rtl_can_remove_branch_p,
4535 rtl_delete_block,
4536 rtl_split_block,
4537 rtl_move_block_after,
4538 rtl_can_merge_blocks, /* can_merge_blocks_p */
4539 rtl_merge_blocks,
4540 rtl_predict_edge,
4541 rtl_predicted_by_p,
4542 cfg_layout_can_duplicate_bb_p,
4543 rtl_duplicate_bb,
4544 rtl_split_edge,
4545 rtl_make_forwarder_block,
4546 rtl_tidy_fallthru_edge,
4547 rtl_force_nonfallthru,
4548 rtl_block_ends_with_call_p,
4549 rtl_block_ends_with_condjump_p,
4550 rtl_flow_call_edges_add,
4551 NULL, /* execute_on_growing_pred */
4552 NULL, /* execute_on_shrinking_pred */
4553 NULL, /* duplicate loop for trees */
4554 NULL, /* lv_add_condition_to_bb */
4555 NULL, /* lv_adjust_loop_header_phi*/
4556 NULL, /* extract_cond_bb_edges */
4557 NULL, /* flush_pending_stmts */
4558 rtl_block_empty_p, /* block_empty_p */
4559 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
4560 rtl_account_profile_record,
4563 /* Implementation of CFG manipulation for cfg layout RTL, where
4564 basic block connected via fallthru edges does not have to be adjacent.
4565 This representation will hopefully become the default one in future
4566 version of the compiler. */
4568 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
4569 "cfglayout mode",
4570 rtl_verify_flow_info_1,
4571 rtl_dump_bb,
4572 rtl_dump_bb_for_graph,
4573 cfg_layout_create_basic_block,
4574 cfg_layout_redirect_edge_and_branch,
4575 cfg_layout_redirect_edge_and_branch_force,
4576 rtl_can_remove_branch_p,
4577 cfg_layout_delete_block,
4578 cfg_layout_split_block,
4579 rtl_move_block_after,
4580 cfg_layout_can_merge_blocks_p,
4581 cfg_layout_merge_blocks,
4582 rtl_predict_edge,
4583 rtl_predicted_by_p,
4584 cfg_layout_can_duplicate_bb_p,
4585 cfg_layout_duplicate_bb,
4586 cfg_layout_split_edge,
4587 rtl_make_forwarder_block,
4588 NULL, /* tidy_fallthru_edge */
4589 rtl_force_nonfallthru,
4590 rtl_block_ends_with_call_p,
4591 rtl_block_ends_with_condjump_p,
4592 rtl_flow_call_edges_add,
4593 NULL, /* execute_on_growing_pred */
4594 NULL, /* execute_on_shrinking_pred */
4595 duplicate_loop_to_header_edge, /* duplicate loop for trees */
4596 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
4597 NULL, /* lv_adjust_loop_header_phi*/
4598 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
4599 NULL, /* flush_pending_stmts */
4600 rtl_block_empty_p, /* block_empty_p */
4601 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
4602 rtl_account_profile_record,
4605 #include "gt-cfgrtl.h"