2013-02-12 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / cfgrtl.c
blobc6ed44f8a7e7041bd8c3100f9095c42b394c121f
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "tree.h"
45 #include "hard-reg-set.h"
46 #include "basic-block.h"
47 #include "regs.h"
48 #include "flags.h"
49 #include "function.h"
50 #include "except.h"
51 #include "rtl-error.h"
52 #include "tm_p.h"
53 #include "obstack.h"
54 #include "insn-attr.h"
55 #include "insn-config.h"
56 #include "expr.h"
57 #include "target.h"
58 #include "common/common-target.h"
59 #include "cfgloop.h"
60 #include "ggc.h"
61 #include "tree-pass.h"
62 #include "df.h"
64 /* Holds the interesting leading and trailing notes for the function.
65 Only applicable if the CFG is in cfglayout mode. */
66 static GTY(()) rtx cfg_layout_function_footer;
67 static GTY(()) rtx cfg_layout_function_header;
69 static rtx skip_insns_after_block (basic_block);
70 static void record_effective_endpoints (void);
71 static rtx label_for_bb (basic_block);
72 static void fixup_reorder_chain (void);
74 void verify_insn_chain (void);
75 static void fixup_fallthru_exit_predecessor (void);
76 static int can_delete_note_p (const_rtx);
77 static int can_delete_label_p (const_rtx);
78 static basic_block rtl_split_edge (edge);
79 static bool rtl_move_block_after (basic_block, basic_block);
80 static int rtl_verify_flow_info (void);
81 static basic_block cfg_layout_split_block (basic_block, void *);
82 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
83 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
84 static void cfg_layout_delete_block (basic_block);
85 static void rtl_delete_block (basic_block);
86 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
87 static edge rtl_redirect_edge_and_branch (edge, basic_block);
88 static basic_block rtl_split_block (basic_block, void *);
89 static void rtl_dump_bb (FILE *, basic_block, int, int);
90 static int rtl_verify_flow_info_1 (void);
91 static void rtl_make_forwarder_block (edge);
93 /* Return true if NOTE is not one of the ones that must be kept paired,
94 so that we may simply delete it. */
96 static int
97 can_delete_note_p (const_rtx note)
99 switch (NOTE_KIND (note))
101 case NOTE_INSN_DELETED:
102 case NOTE_INSN_BASIC_BLOCK:
103 case NOTE_INSN_EPILOGUE_BEG:
104 return true;
106 default:
107 return false;
111 /* True if a given label can be deleted. */
113 static int
114 can_delete_label_p (const_rtx label)
116 return (!LABEL_PRESERVE_P (label)
117 /* User declared labels must be preserved. */
118 && LABEL_NAME (label) == 0
119 && !in_expr_list_p (forced_labels, label));
122 /* Delete INSN by patching it out. */
124 void
125 delete_insn (rtx insn)
127 rtx note;
128 bool really_delete = true;
130 if (LABEL_P (insn))
132 /* Some labels can't be directly removed from the INSN chain, as they
133 might be references via variables, constant pool etc.
134 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
135 if (! can_delete_label_p (insn))
137 const char *name = LABEL_NAME (insn);
138 basic_block bb, label_bb = BLOCK_FOR_INSN (insn);
139 rtx bb_note = NEXT_INSN (insn);
141 really_delete = false;
142 PUT_CODE (insn, NOTE);
143 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
144 NOTE_DELETED_LABEL_NAME (insn) = name;
146 /* If the note following the label starts a basic block, and the
147 label is a member of the same basic block, interchange the two.
148 If the label is not marked with a bb, assume it's the same bb. */
149 if (bb_note != NULL_RTX
150 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
151 && (label_bb == NOTE_BASIC_BLOCK (bb_note)
152 || label_bb == NULL))
154 reorder_insns_nobb (insn, insn, bb_note);
155 bb = NOTE_BASIC_BLOCK (bb_note);
156 BB_HEAD (bb) = bb_note;
157 if (BB_END (bb) == bb_note)
158 BB_END (bb) = insn;
162 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
165 if (really_delete)
167 /* If this insn has already been deleted, something is very wrong. */
168 gcc_assert (!INSN_DELETED_P (insn));
169 remove_insn (insn);
170 INSN_DELETED_P (insn) = 1;
173 /* If deleting a jump, decrement the use count of the label. Deleting
174 the label itself should happen in the normal course of block merging. */
175 if (JUMP_P (insn))
177 if (JUMP_LABEL (insn)
178 && LABEL_P (JUMP_LABEL (insn)))
179 LABEL_NUSES (JUMP_LABEL (insn))--;
181 /* If there are more targets, remove them too. */
182 while ((note
183 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
184 && LABEL_P (XEXP (note, 0)))
186 LABEL_NUSES (XEXP (note, 0))--;
187 remove_note (insn, note);
191 /* Also if deleting any insn that references a label as an operand. */
192 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
193 && LABEL_P (XEXP (note, 0)))
195 LABEL_NUSES (XEXP (note, 0))--;
196 remove_note (insn, note);
199 if (JUMP_TABLE_DATA_P (insn))
201 rtx pat = PATTERN (insn);
202 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
203 int len = XVECLEN (pat, diff_vec_p);
204 int i;
206 for (i = 0; i < len; i++)
208 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
210 /* When deleting code in bulk (e.g. removing many unreachable
211 blocks) we can delete a label that's a target of the vector
212 before deleting the vector itself. */
213 if (!NOTE_P (label))
214 LABEL_NUSES (label)--;
219 /* Like delete_insn but also purge dead edges from BB. */
221 void
222 delete_insn_and_edges (rtx insn)
224 bool purge = false;
226 if (INSN_P (insn)
227 && BLOCK_FOR_INSN (insn)
228 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
229 purge = true;
230 delete_insn (insn);
231 if (purge)
232 purge_dead_edges (BLOCK_FOR_INSN (insn));
235 /* Unlink a chain of insns between START and FINISH, leaving notes
236 that must be paired. If CLEAR_BB is true, we set bb field for
237 insns that cannot be removed to NULL. */
239 void
240 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
242 rtx prev, current;
244 /* Unchain the insns one by one. It would be quicker to delete all of these
245 with a single unchaining, rather than one at a time, but we need to keep
246 the NOTE's. */
247 current = finish;
248 while (1)
250 prev = PREV_INSN (current);
251 if (NOTE_P (current) && !can_delete_note_p (current))
253 else
254 delete_insn (current);
256 if (clear_bb && !INSN_DELETED_P (current))
257 set_block_for_insn (current, NULL);
259 if (current == start)
260 break;
261 current = prev;
265 /* Create a new basic block consisting of the instructions between HEAD and END
266 inclusive. This function is designed to allow fast BB construction - reuses
267 the note and basic block struct in BB_NOTE, if any and do not grow
268 BASIC_BLOCK chain and should be used directly only by CFG construction code.
269 END can be NULL in to create new empty basic block before HEAD. Both END
270 and HEAD can be NULL to create basic block at the end of INSN chain.
271 AFTER is the basic block we should be put after. */
273 basic_block
274 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
276 basic_block bb;
278 if (bb_note
279 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
280 && bb->aux == NULL)
282 /* If we found an existing note, thread it back onto the chain. */
284 rtx after;
286 if (LABEL_P (head))
287 after = head;
288 else
290 after = PREV_INSN (head);
291 head = bb_note;
294 if (after != bb_note && NEXT_INSN (after) != bb_note)
295 reorder_insns_nobb (bb_note, bb_note, after);
297 else
299 /* Otherwise we must create a note and a basic block structure. */
301 bb = alloc_block ();
303 init_rtl_bb_info (bb);
304 if (!head && !end)
305 head = end = bb_note
306 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
307 else if (LABEL_P (head) && end)
309 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
310 if (head == end)
311 end = bb_note;
313 else
315 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
316 head = bb_note;
317 if (!end)
318 end = head;
321 NOTE_BASIC_BLOCK (bb_note) = bb;
324 /* Always include the bb note in the block. */
325 if (NEXT_INSN (end) == bb_note)
326 end = bb_note;
328 BB_HEAD (bb) = head;
329 BB_END (bb) = end;
330 bb->index = last_basic_block++;
331 bb->flags = BB_NEW | BB_RTL;
332 link_block (bb, after);
333 SET_BASIC_BLOCK (bb->index, bb);
334 df_bb_refs_record (bb->index, false);
335 update_bb_for_insn (bb);
336 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
338 /* Tag the block so that we know it has been used when considering
339 other basic block notes. */
340 bb->aux = bb;
342 return bb;
345 /* Create new basic block consisting of instructions in between HEAD and END
346 and place it to the BB chain after block AFTER. END can be NULL to
347 create a new empty basic block before HEAD. Both END and HEAD can be
348 NULL to create basic block at the end of INSN chain. */
350 static basic_block
351 rtl_create_basic_block (void *headp, void *endp, basic_block after)
353 rtx head = (rtx) headp, end = (rtx) endp;
354 basic_block bb;
356 /* Grow the basic block array if needed. */
357 if ((size_t) last_basic_block >= basic_block_info->length ())
359 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
360 vec_safe_grow_cleared (basic_block_info, new_size);
363 n_basic_blocks++;
365 bb = create_basic_block_structure (head, end, NULL, after);
366 bb->aux = NULL;
367 return bb;
370 static basic_block
371 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
373 basic_block newbb = rtl_create_basic_block (head, end, after);
375 return newbb;
378 /* Delete the insns in a (non-live) block. We physically delete every
379 non-deleted-note insn, and update the flow graph appropriately.
381 Return nonzero if we deleted an exception handler. */
383 /* ??? Preserving all such notes strikes me as wrong. It would be nice
384 to post-process the stream to remove empty blocks, loops, ranges, etc. */
386 static void
387 rtl_delete_block (basic_block b)
389 rtx insn, end;
391 /* If the head of this block is a CODE_LABEL, then it might be the
392 label for an exception handler which can't be reached. We need
393 to remove the label from the exception_handler_label list. */
394 insn = BB_HEAD (b);
396 end = get_last_bb_insn (b);
398 /* Selectively delete the entire chain. */
399 BB_HEAD (b) = NULL;
400 delete_insn_chain (insn, end, true);
403 if (dump_file)
404 fprintf (dump_file, "deleting block %d\n", b->index);
405 df_bb_delete (b->index);
408 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
410 void
411 compute_bb_for_insn (void)
413 basic_block bb;
415 FOR_EACH_BB (bb)
417 rtx end = BB_END (bb);
418 rtx insn;
420 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
422 BLOCK_FOR_INSN (insn) = bb;
423 if (insn == end)
424 break;
429 /* Release the basic_block_for_insn array. */
431 unsigned int
432 free_bb_for_insn (void)
434 rtx insn;
435 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
436 if (!BARRIER_P (insn))
437 BLOCK_FOR_INSN (insn) = NULL;
438 return 0;
441 static unsigned int
442 rest_of_pass_free_cfg (void)
444 #ifdef DELAY_SLOTS
445 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
446 valid at that point so it would be too late to call df_analyze. */
447 if (optimize > 0 && flag_delayed_branch)
449 df_note_add_problem ();
450 df_analyze ();
452 #endif
454 free_bb_for_insn ();
455 return 0;
458 struct rtl_opt_pass pass_free_cfg =
461 RTL_PASS,
462 "*free_cfg", /* name */
463 OPTGROUP_NONE, /* optinfo_flags */
464 NULL, /* gate */
465 rest_of_pass_free_cfg, /* execute */
466 NULL, /* sub */
467 NULL, /* next */
468 0, /* static_pass_number */
469 TV_NONE, /* tv_id */
470 0, /* properties_required */
471 0, /* properties_provided */
472 PROP_cfg, /* properties_destroyed */
473 0, /* todo_flags_start */
474 0, /* todo_flags_finish */
478 /* Return RTX to emit after when we want to emit code on the entry of function. */
480 entry_of_function (void)
482 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
483 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
486 /* Emit INSN at the entry point of the function, ensuring that it is only
487 executed once per function. */
488 void
489 emit_insn_at_entry (rtx insn)
491 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
492 edge e = ei_safe_edge (ei);
493 gcc_assert (e->flags & EDGE_FALLTHRU);
495 insert_insn_on_edge (insn, e);
496 commit_edge_insertions ();
499 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
500 (or BARRIER if found) and notify df of the bb change.
501 The insn chain range is inclusive
502 (i.e. both BEGIN and END will be updated. */
504 static void
505 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
507 rtx insn;
509 end = NEXT_INSN (end);
510 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
511 if (!BARRIER_P (insn))
512 df_insn_change_bb (insn, bb);
515 /* Update BLOCK_FOR_INSN of insns in BB to BB,
516 and notify df of the change. */
518 void
519 update_bb_for_insn (basic_block bb)
521 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
525 /* Like active_insn_p, except keep the return value clobber around
526 even after reload. */
528 static bool
529 flow_active_insn_p (const_rtx insn)
531 if (active_insn_p (insn))
532 return true;
534 /* A clobber of the function return value exists for buggy
535 programs that fail to return a value. Its effect is to
536 keep the return value from being live across the entire
537 function. If we allow it to be skipped, we introduce the
538 possibility for register lifetime confusion. */
539 if (GET_CODE (PATTERN (insn)) == CLOBBER
540 && REG_P (XEXP (PATTERN (insn), 0))
541 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
542 return true;
544 return false;
547 /* Return true if the block has no effect and only forwards control flow to
548 its single destination. */
550 bool
551 contains_no_active_insn_p (const_basic_block bb)
553 rtx insn;
555 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
556 || !single_succ_p (bb))
557 return false;
559 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
560 if (INSN_P (insn) && flow_active_insn_p (insn))
561 return false;
563 return (!INSN_P (insn)
564 || (JUMP_P (insn) && simplejump_p (insn))
565 || !flow_active_insn_p (insn));
568 /* Likewise, but protect loop latches, headers and preheaders. */
569 /* FIXME: Make this a cfg hook. */
571 bool
572 forwarder_block_p (const_basic_block bb)
574 if (!contains_no_active_insn_p (bb))
575 return false;
577 /* Protect loop latches, headers and preheaders. */
578 if (current_loops)
580 basic_block dest;
581 if (bb->loop_father->header == bb)
582 return false;
583 dest = EDGE_SUCC (bb, 0)->dest;
584 if (dest->loop_father->header == dest)
585 return false;
588 return true;
591 /* Return nonzero if we can reach target from src by falling through. */
592 /* FIXME: Make this a cfg hook. */
594 bool
595 can_fallthru (basic_block src, basic_block target)
597 rtx insn = BB_END (src);
598 rtx insn2;
599 edge e;
600 edge_iterator ei;
602 if (target == EXIT_BLOCK_PTR)
603 return true;
604 if (src->next_bb != target)
605 return 0;
606 FOR_EACH_EDGE (e, ei, src->succs)
607 if (e->dest == EXIT_BLOCK_PTR
608 && e->flags & EDGE_FALLTHRU)
609 return 0;
611 insn2 = BB_HEAD (target);
612 if (insn2 && !active_insn_p (insn2))
613 insn2 = next_active_insn (insn2);
615 /* ??? Later we may add code to move jump tables offline. */
616 return next_active_insn (insn) == insn2;
619 /* Return nonzero if we could reach target from src by falling through,
620 if the target was made adjacent. If we already have a fall-through
621 edge to the exit block, we can't do that. */
622 static bool
623 could_fall_through (basic_block src, basic_block target)
625 edge e;
626 edge_iterator ei;
628 if (target == EXIT_BLOCK_PTR)
629 return true;
630 FOR_EACH_EDGE (e, ei, src->succs)
631 if (e->dest == EXIT_BLOCK_PTR
632 && e->flags & EDGE_FALLTHRU)
633 return 0;
634 return true;
637 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
639 bb_note (basic_block bb)
641 rtx note;
643 note = BB_HEAD (bb);
644 if (LABEL_P (note))
645 note = NEXT_INSN (note);
647 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
648 return note;
651 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
652 note associated with the BLOCK. */
654 static rtx
655 first_insn_after_basic_block_note (basic_block block)
657 rtx insn;
659 /* Get the first instruction in the block. */
660 insn = BB_HEAD (block);
662 if (insn == NULL_RTX)
663 return NULL_RTX;
664 if (LABEL_P (insn))
665 insn = NEXT_INSN (insn);
666 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
668 return NEXT_INSN (insn);
671 /* Creates a new basic block just after basic block B by splitting
672 everything after specified instruction I. */
674 static basic_block
675 rtl_split_block (basic_block bb, void *insnp)
677 basic_block new_bb;
678 rtx insn = (rtx) insnp;
679 edge e;
680 edge_iterator ei;
682 if (!insn)
684 insn = first_insn_after_basic_block_note (bb);
686 if (insn)
688 rtx next = insn;
690 insn = PREV_INSN (insn);
692 /* If the block contains only debug insns, insn would have
693 been NULL in a non-debug compilation, and then we'd end
694 up emitting a DELETED note. For -fcompare-debug
695 stability, emit the note too. */
696 if (insn != BB_END (bb)
697 && DEBUG_INSN_P (next)
698 && DEBUG_INSN_P (BB_END (bb)))
700 while (next != BB_END (bb) && DEBUG_INSN_P (next))
701 next = NEXT_INSN (next);
703 if (next == BB_END (bb))
704 emit_note_after (NOTE_INSN_DELETED, next);
707 else
708 insn = get_last_insn ();
711 /* We probably should check type of the insn so that we do not create
712 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
713 bother. */
714 if (insn == BB_END (bb))
715 emit_note_after (NOTE_INSN_DELETED, insn);
717 /* Create the new basic block. */
718 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
719 BB_COPY_PARTITION (new_bb, bb);
720 BB_END (bb) = insn;
722 /* Redirect the outgoing edges. */
723 new_bb->succs = bb->succs;
724 bb->succs = NULL;
725 FOR_EACH_EDGE (e, ei, new_bb->succs)
726 e->src = new_bb;
728 /* The new block starts off being dirty. */
729 df_set_bb_dirty (bb);
730 return new_bb;
733 /* Return true if the single edge between blocks A and B is the only place
734 in RTL which holds some unique locus. */
736 static bool
737 unique_locus_on_edge_between_p (basic_block a, basic_block b)
739 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
740 rtx insn, end;
742 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
743 return false;
745 /* First scan block A backward. */
746 insn = BB_END (a);
747 end = PREV_INSN (BB_HEAD (a));
748 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
749 insn = PREV_INSN (insn);
751 if (insn != end && INSN_LOCATION (insn) == goto_locus)
752 return false;
754 /* Then scan block B forward. */
755 insn = BB_HEAD (b);
756 if (insn)
758 end = NEXT_INSN (BB_END (b));
759 while (insn != end && !NONDEBUG_INSN_P (insn))
760 insn = NEXT_INSN (insn);
762 if (insn != end && INSN_HAS_LOCATION (insn)
763 && INSN_LOCATION (insn) == goto_locus)
764 return false;
767 return true;
770 /* If the single edge between blocks A and B is the only place in RTL which
771 holds some unique locus, emit a nop with that locus between the blocks. */
773 static void
774 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
776 if (!unique_locus_on_edge_between_p (a, b))
777 return;
779 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
780 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
783 /* Blocks A and B are to be merged into a single block A. The insns
784 are already contiguous. */
786 static void
787 rtl_merge_blocks (basic_block a, basic_block b)
789 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
790 rtx del_first = NULL_RTX, del_last = NULL_RTX;
791 rtx b_debug_start = b_end, b_debug_end = b_end;
792 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
793 int b_empty = 0;
795 if (dump_file)
796 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
797 a->index);
799 while (DEBUG_INSN_P (b_end))
800 b_end = PREV_INSN (b_debug_start = b_end);
802 /* If there was a CODE_LABEL beginning B, delete it. */
803 if (LABEL_P (b_head))
805 /* Detect basic blocks with nothing but a label. This can happen
806 in particular at the end of a function. */
807 if (b_head == b_end)
808 b_empty = 1;
810 del_first = del_last = b_head;
811 b_head = NEXT_INSN (b_head);
814 /* Delete the basic block note and handle blocks containing just that
815 note. */
816 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
818 if (b_head == b_end)
819 b_empty = 1;
820 if (! del_last)
821 del_first = b_head;
823 del_last = b_head;
824 b_head = NEXT_INSN (b_head);
827 /* If there was a jump out of A, delete it. */
828 if (JUMP_P (a_end))
830 rtx prev;
832 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
833 if (!NOTE_P (prev)
834 || NOTE_INSN_BASIC_BLOCK_P (prev)
835 || prev == BB_HEAD (a))
836 break;
838 del_first = a_end;
840 #ifdef HAVE_cc0
841 /* If this was a conditional jump, we need to also delete
842 the insn that set cc0. */
843 if (only_sets_cc0_p (prev))
845 rtx tmp = prev;
847 prev = prev_nonnote_insn (prev);
848 if (!prev)
849 prev = BB_HEAD (a);
850 del_first = tmp;
852 #endif
854 a_end = PREV_INSN (del_first);
856 else if (BARRIER_P (NEXT_INSN (a_end)))
857 del_first = NEXT_INSN (a_end);
859 /* Delete everything marked above as well as crap that might be
860 hanging out between the two blocks. */
861 BB_END (a) = a_end;
862 BB_HEAD (b) = b_empty ? NULL_RTX : b_head;
863 delete_insn_chain (del_first, del_last, true);
865 /* When not optimizing CFG and the edge is the only place in RTL which holds
866 some unique locus, emit a nop with that locus in between. */
867 if (!optimize)
869 emit_nop_for_unique_locus_between (a, b);
870 a_end = BB_END (a);
873 /* Reassociate the insns of B with A. */
874 if (!b_empty)
876 update_bb_for_insn_chain (a_end, b_debug_end, a);
878 BB_END (a) = b_debug_end;
879 BB_HEAD (b) = NULL_RTX;
881 else if (b_end != b_debug_end)
883 /* Move any deleted labels and other notes between the end of A
884 and the debug insns that make up B after the debug insns,
885 bringing the debug insns into A while keeping the notes after
886 the end of A. */
887 if (NEXT_INSN (a_end) != b_debug_start)
888 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
889 b_debug_end);
890 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
891 BB_END (a) = b_debug_end;
894 df_bb_delete (b->index);
896 /* If B was a forwarder block, propagate the locus on the edge. */
897 if (forwarder_p
898 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
899 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
901 if (dump_file)
902 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
906 /* Return true when block A and B can be merged. */
908 static bool
909 rtl_can_merge_blocks (basic_block a, basic_block b)
911 /* If we are partitioning hot/cold basic blocks, we don't want to
912 mess up unconditional or indirect jumps that cross between hot
913 and cold sections.
915 Basic block partitioning may result in some jumps that appear to
916 be optimizable (or blocks that appear to be mergeable), but which really
917 must be left untouched (they are required to make it safely across
918 partition boundaries). See the comments at the top of
919 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
921 if (BB_PARTITION (a) != BB_PARTITION (b))
922 return false;
924 /* Protect the loop latches. */
925 if (current_loops && b->loop_father->latch == b)
926 return false;
928 /* There must be exactly one edge in between the blocks. */
929 return (single_succ_p (a)
930 && single_succ (a) == b
931 && single_pred_p (b)
932 && a != b
933 /* Must be simple edge. */
934 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
935 && a->next_bb == b
936 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
937 /* If the jump insn has side effects,
938 we can't kill the edge. */
939 && (!JUMP_P (BB_END (a))
940 || (reload_completed
941 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
944 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
945 exist. */
948 block_label (basic_block block)
950 if (block == EXIT_BLOCK_PTR)
951 return NULL_RTX;
953 if (!LABEL_P (BB_HEAD (block)))
955 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
958 return BB_HEAD (block);
961 /* Attempt to perform edge redirection by replacing possibly complex jump
962 instruction by unconditional jump or removing jump completely. This can
963 apply only if all edges now point to the same block. The parameters and
964 return values are equivalent to redirect_edge_and_branch. */
966 edge
967 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
969 basic_block src = e->src;
970 rtx insn = BB_END (src), kill_from;
971 rtx set;
972 int fallthru = 0;
974 /* If we are partitioning hot/cold basic blocks, we don't want to
975 mess up unconditional or indirect jumps that cross between hot
976 and cold sections.
978 Basic block partitioning may result in some jumps that appear to
979 be optimizable (or blocks that appear to be mergeable), but which really
980 must be left untouched (they are required to make it safely across
981 partition boundaries). See the comments at the top of
982 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
984 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
985 || BB_PARTITION (src) != BB_PARTITION (target))
986 return NULL;
988 /* We can replace or remove a complex jump only when we have exactly
989 two edges. Also, if we have exactly one outgoing edge, we can
990 redirect that. */
991 if (EDGE_COUNT (src->succs) >= 3
992 /* Verify that all targets will be TARGET. Specifically, the
993 edge that is not E must also go to TARGET. */
994 || (EDGE_COUNT (src->succs) == 2
995 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
996 return NULL;
998 if (!onlyjump_p (insn))
999 return NULL;
1000 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1001 return NULL;
1003 /* Avoid removing branch with side effects. */
1004 set = single_set (insn);
1005 if (!set || side_effects_p (set))
1006 return NULL;
1008 /* In case we zap a conditional jump, we'll need to kill
1009 the cc0 setter too. */
1010 kill_from = insn;
1011 #ifdef HAVE_cc0
1012 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1013 && only_sets_cc0_p (PREV_INSN (insn)))
1014 kill_from = PREV_INSN (insn);
1015 #endif
1017 /* See if we can create the fallthru edge. */
1018 if (in_cfglayout || can_fallthru (src, target))
1020 if (dump_file)
1021 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1022 fallthru = 1;
1024 /* Selectively unlink whole insn chain. */
1025 if (in_cfglayout)
1027 rtx insn = BB_FOOTER (src);
1029 delete_insn_chain (kill_from, BB_END (src), false);
1031 /* Remove barriers but keep jumptables. */
1032 while (insn)
1034 if (BARRIER_P (insn))
1036 if (PREV_INSN (insn))
1037 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1038 else
1039 BB_FOOTER (src) = NEXT_INSN (insn);
1040 if (NEXT_INSN (insn))
1041 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1043 if (LABEL_P (insn))
1044 break;
1045 insn = NEXT_INSN (insn);
1048 else
1049 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1050 false);
1053 /* If this already is simplejump, redirect it. */
1054 else if (simplejump_p (insn))
1056 if (e->dest == target)
1057 return NULL;
1058 if (dump_file)
1059 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1060 INSN_UID (insn), e->dest->index, target->index);
1061 if (!redirect_jump (insn, block_label (target), 0))
1063 gcc_assert (target == EXIT_BLOCK_PTR);
1064 return NULL;
1068 /* Cannot do anything for target exit block. */
1069 else if (target == EXIT_BLOCK_PTR)
1070 return NULL;
1072 /* Or replace possibly complicated jump insn by simple jump insn. */
1073 else
1075 rtx target_label = block_label (target);
1076 rtx barrier, label, table;
1078 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
1079 JUMP_LABEL (BB_END (src)) = target_label;
1080 LABEL_NUSES (target_label)++;
1081 if (dump_file)
1082 fprintf (dump_file, "Replacing insn %i by jump %i\n",
1083 INSN_UID (insn), INSN_UID (BB_END (src)));
1086 delete_insn_chain (kill_from, insn, false);
1088 /* Recognize a tablejump that we are converting to a
1089 simple jump and remove its associated CODE_LABEL
1090 and ADDR_VEC or ADDR_DIFF_VEC. */
1091 if (tablejump_p (insn, &label, &table))
1092 delete_insn_chain (label, table, false);
1094 barrier = next_nonnote_insn (BB_END (src));
1095 if (!barrier || !BARRIER_P (barrier))
1096 emit_barrier_after (BB_END (src));
1097 else
1099 if (barrier != NEXT_INSN (BB_END (src)))
1101 /* Move the jump before barrier so that the notes
1102 which originally were or were created before jump table are
1103 inside the basic block. */
1104 rtx new_insn = BB_END (src);
1106 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1107 PREV_INSN (barrier), src);
1109 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1110 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1112 NEXT_INSN (new_insn) = barrier;
1113 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1115 PREV_INSN (new_insn) = PREV_INSN (barrier);
1116 PREV_INSN (barrier) = new_insn;
1121 /* Keep only one edge out and set proper flags. */
1122 if (!single_succ_p (src))
1123 remove_edge (e);
1124 gcc_assert (single_succ_p (src));
1126 e = single_succ_edge (src);
1127 if (fallthru)
1128 e->flags = EDGE_FALLTHRU;
1129 else
1130 e->flags = 0;
1132 e->probability = REG_BR_PROB_BASE;
1133 e->count = src->count;
1135 if (e->dest != target)
1136 redirect_edge_succ (e, target);
1137 return e;
1140 /* Subroutine of redirect_branch_edge that tries to patch the jump
1141 instruction INSN so that it reaches block NEW. Do this
1142 only when it originally reached block OLD. Return true if this
1143 worked or the original target wasn't OLD, return false if redirection
1144 doesn't work. */
1146 static bool
1147 patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
1149 rtx tmp;
1150 /* Recognize a tablejump and adjust all matching cases. */
1151 if (tablejump_p (insn, NULL, &tmp))
1153 rtvec vec;
1154 int j;
1155 rtx new_label = block_label (new_bb);
1157 if (new_bb == EXIT_BLOCK_PTR)
1158 return false;
1159 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1160 vec = XVEC (PATTERN (tmp), 0);
1161 else
1162 vec = XVEC (PATTERN (tmp), 1);
1164 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1165 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1167 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1168 --LABEL_NUSES (old_label);
1169 ++LABEL_NUSES (new_label);
1172 /* Handle casesi dispatch insns. */
1173 if ((tmp = single_set (insn)) != NULL
1174 && SET_DEST (tmp) == pc_rtx
1175 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1176 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1177 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1179 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1180 new_label);
1181 --LABEL_NUSES (old_label);
1182 ++LABEL_NUSES (new_label);
1185 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1187 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1188 rtx new_label, note;
1190 if (new_bb == EXIT_BLOCK_PTR)
1191 return false;
1192 new_label = block_label (new_bb);
1194 for (i = 0; i < n; ++i)
1196 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1197 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1198 if (XEXP (old_ref, 0) == old_label)
1200 ASM_OPERANDS_LABEL (tmp, i)
1201 = gen_rtx_LABEL_REF (Pmode, new_label);
1202 --LABEL_NUSES (old_label);
1203 ++LABEL_NUSES (new_label);
1207 if (JUMP_LABEL (insn) == old_label)
1209 JUMP_LABEL (insn) = new_label;
1210 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1211 if (note)
1212 remove_note (insn, note);
1214 else
1216 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1217 if (note)
1218 remove_note (insn, note);
1219 if (JUMP_LABEL (insn) != new_label
1220 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1221 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1223 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1224 != NULL_RTX)
1225 XEXP (note, 0) = new_label;
1227 else
1229 /* ?? We may play the games with moving the named labels from
1230 one basic block to the other in case only one computed_jump is
1231 available. */
1232 if (computed_jump_p (insn)
1233 /* A return instruction can't be redirected. */
1234 || returnjump_p (insn))
1235 return false;
1237 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1239 /* If the insn doesn't go where we think, we're confused. */
1240 gcc_assert (JUMP_LABEL (insn) == old_label);
1242 /* If the substitution doesn't succeed, die. This can happen
1243 if the back end emitted unrecognizable instructions or if
1244 target is exit block on some arches. */
1245 if (!redirect_jump (insn, block_label (new_bb), 0))
1247 gcc_assert (new_bb == EXIT_BLOCK_PTR);
1248 return false;
1252 return true;
1256 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1257 NULL on failure */
1258 static edge
1259 redirect_branch_edge (edge e, basic_block target)
1261 rtx old_label = BB_HEAD (e->dest);
1262 basic_block src = e->src;
1263 rtx insn = BB_END (src);
1265 /* We can only redirect non-fallthru edges of jump insn. */
1266 if (e->flags & EDGE_FALLTHRU)
1267 return NULL;
1268 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1269 return NULL;
1271 if (!currently_expanding_to_rtl)
1273 if (!patch_jump_insn (insn, old_label, target))
1274 return NULL;
1276 else
1277 /* When expanding this BB might actually contain multiple
1278 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1279 Redirect all of those that match our label. */
1280 FOR_BB_INSNS (src, insn)
1281 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1282 return NULL;
1284 if (dump_file)
1285 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1286 e->src->index, e->dest->index, target->index);
1288 if (e->dest != target)
1289 e = redirect_edge_succ_nodup (e, target);
1291 return e;
1294 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1295 expense of adding new instructions or reordering basic blocks.
1297 Function can be also called with edge destination equivalent to the TARGET.
1298 Then it should try the simplifications and do nothing if none is possible.
1300 Return edge representing the branch if transformation succeeded. Return NULL
1301 on failure.
1302 We still return NULL in case E already destinated TARGET and we didn't
1303 managed to simplify instruction stream. */
1305 static edge
1306 rtl_redirect_edge_and_branch (edge e, basic_block target)
1308 edge ret;
1309 basic_block src = e->src;
1311 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1312 return NULL;
1314 if (e->dest == target)
1315 return e;
1317 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1319 df_set_bb_dirty (src);
1320 return ret;
1323 ret = redirect_branch_edge (e, target);
1324 if (!ret)
1325 return NULL;
1327 df_set_bb_dirty (src);
1328 return ret;
1331 /* Like force_nonfallthru below, but additionally performs redirection
1332 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1333 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1334 simple_return_rtx, indicating which kind of returnjump to create.
1335 It should be NULL otherwise. */
1337 basic_block
1338 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1340 basic_block jump_block, new_bb = NULL, src = e->src;
1341 rtx note;
1342 edge new_edge;
1343 int abnormal_edge_flags = 0;
1344 bool asm_goto_edge = false;
1345 int loc;
1347 /* In the case the last instruction is conditional jump to the next
1348 instruction, first redirect the jump itself and then continue
1349 by creating a basic block afterwards to redirect fallthru edge. */
1350 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1351 && any_condjump_p (BB_END (e->src))
1352 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1354 rtx note;
1355 edge b = unchecked_make_edge (e->src, target, 0);
1356 bool redirected;
1358 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1359 gcc_assert (redirected);
1361 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1362 if (note)
1364 int prob = INTVAL (XEXP (note, 0));
1366 b->probability = prob;
1367 b->count = e->count * prob / REG_BR_PROB_BASE;
1368 e->probability -= e->probability;
1369 e->count -= b->count;
1370 if (e->probability < 0)
1371 e->probability = 0;
1372 if (e->count < 0)
1373 e->count = 0;
1377 if (e->flags & EDGE_ABNORMAL)
1379 /* Irritating special case - fallthru edge to the same block as abnormal
1380 edge.
1381 We can't redirect abnormal edge, but we still can split the fallthru
1382 one and create separate abnormal edge to original destination.
1383 This allows bb-reorder to make such edge non-fallthru. */
1384 gcc_assert (e->dest == target);
1385 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1386 e->flags &= EDGE_FALLTHRU;
1388 else
1390 gcc_assert (e->flags & EDGE_FALLTHRU);
1391 if (e->src == ENTRY_BLOCK_PTR)
1393 /* We can't redirect the entry block. Create an empty block
1394 at the start of the function which we use to add the new
1395 jump. */
1396 edge tmp;
1397 edge_iterator ei;
1398 bool found = false;
1400 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1402 /* Change the existing edge's source to be the new block, and add
1403 a new edge from the entry block to the new block. */
1404 e->src = bb;
1405 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1407 if (tmp == e)
1409 ENTRY_BLOCK_PTR->succs->unordered_remove (ei.index);
1410 found = true;
1411 break;
1413 else
1414 ei_next (&ei);
1417 gcc_assert (found);
1419 vec_safe_push (bb->succs, e);
1420 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1424 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1425 don't point to the target or fallthru label. */
1426 if (JUMP_P (BB_END (e->src))
1427 && target != EXIT_BLOCK_PTR
1428 && (e->flags & EDGE_FALLTHRU)
1429 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1431 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1432 bool adjust_jump_target = false;
1434 for (i = 0; i < n; ++i)
1436 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1438 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1439 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1440 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1441 adjust_jump_target = true;
1443 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1444 asm_goto_edge = true;
1446 if (adjust_jump_target)
1448 rtx insn = BB_END (e->src), note;
1449 rtx old_label = BB_HEAD (e->dest);
1450 rtx new_label = BB_HEAD (target);
1452 if (JUMP_LABEL (insn) == old_label)
1454 JUMP_LABEL (insn) = new_label;
1455 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1456 if (note)
1457 remove_note (insn, note);
1459 else
1461 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1462 if (note)
1463 remove_note (insn, note);
1464 if (JUMP_LABEL (insn) != new_label
1465 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1466 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1468 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1469 != NULL_RTX)
1470 XEXP (note, 0) = new_label;
1474 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1476 gcov_type count = e->count;
1477 int probability = e->probability;
1478 /* Create the new structures. */
1480 /* If the old block ended with a tablejump, skip its table
1481 by searching forward from there. Otherwise start searching
1482 forward from the last instruction of the old block. */
1483 if (!tablejump_p (BB_END (e->src), NULL, &note))
1484 note = BB_END (e->src);
1485 note = NEXT_INSN (note);
1487 jump_block = create_basic_block (note, NULL, e->src);
1488 jump_block->count = count;
1489 jump_block->frequency = EDGE_FREQUENCY (e);
1491 /* Make sure new block ends up in correct hot/cold section. */
1493 BB_COPY_PARTITION (jump_block, e->src);
1494 if (flag_reorder_blocks_and_partition
1495 && targetm_common.have_named_sections
1496 && JUMP_P (BB_END (jump_block))
1497 && !any_condjump_p (BB_END (jump_block))
1498 && (EDGE_SUCC (jump_block, 0)->flags & EDGE_CROSSING))
1499 add_reg_note (BB_END (jump_block), REG_CROSSING_JUMP, NULL_RTX);
1501 /* Wire edge in. */
1502 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1503 new_edge->probability = probability;
1504 new_edge->count = count;
1506 /* Redirect old edge. */
1507 redirect_edge_pred (e, jump_block);
1508 e->probability = REG_BR_PROB_BASE;
1510 /* If asm goto has any label refs to target's label,
1511 add also edge from asm goto bb to target. */
1512 if (asm_goto_edge)
1514 new_edge->probability /= 2;
1515 new_edge->count /= 2;
1516 jump_block->count /= 2;
1517 jump_block->frequency /= 2;
1518 new_edge = make_edge (new_edge->src, target,
1519 e->flags & ~EDGE_FALLTHRU);
1520 new_edge->probability = probability - probability / 2;
1521 new_edge->count = count - count / 2;
1524 new_bb = jump_block;
1526 else
1527 jump_block = e->src;
1529 loc = e->goto_locus;
1530 e->flags &= ~EDGE_FALLTHRU;
1531 if (target == EXIT_BLOCK_PTR)
1533 if (jump_label == ret_rtx)
1535 #ifdef HAVE_return
1536 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1537 #else
1538 gcc_unreachable ();
1539 #endif
1541 else
1543 gcc_assert (jump_label == simple_return_rtx);
1544 #ifdef HAVE_simple_return
1545 emit_jump_insn_after_setloc (gen_simple_return (),
1546 BB_END (jump_block), loc);
1547 #else
1548 gcc_unreachable ();
1549 #endif
1551 set_return_jump_label (BB_END (jump_block));
1553 else
1555 rtx label = block_label (target);
1556 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1557 JUMP_LABEL (BB_END (jump_block)) = label;
1558 LABEL_NUSES (label)++;
1561 emit_barrier_after (BB_END (jump_block));
1562 redirect_edge_succ_nodup (e, target);
1564 if (abnormal_edge_flags)
1565 make_edge (src, target, abnormal_edge_flags);
1567 df_mark_solutions_dirty ();
1568 return new_bb;
1571 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1572 (and possibly create new basic block) to make edge non-fallthru.
1573 Return newly created BB or NULL if none. */
1575 static basic_block
1576 rtl_force_nonfallthru (edge e)
1578 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1581 /* Redirect edge even at the expense of creating new jump insn or
1582 basic block. Return new basic block if created, NULL otherwise.
1583 Conversion must be possible. */
1585 static basic_block
1586 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1588 if (redirect_edge_and_branch (e, target)
1589 || e->dest == target)
1590 return NULL;
1592 /* In case the edge redirection failed, try to force it to be non-fallthru
1593 and redirect newly created simplejump. */
1594 df_set_bb_dirty (e->src);
1595 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1598 /* The given edge should potentially be a fallthru edge. If that is in
1599 fact true, delete the jump and barriers that are in the way. */
1601 static void
1602 rtl_tidy_fallthru_edge (edge e)
1604 rtx q;
1605 basic_block b = e->src, c = b->next_bb;
1607 /* ??? In a late-running flow pass, other folks may have deleted basic
1608 blocks by nopping out blocks, leaving multiple BARRIERs between here
1609 and the target label. They ought to be chastised and fixed.
1611 We can also wind up with a sequence of undeletable labels between
1612 one block and the next.
1614 So search through a sequence of barriers, labels, and notes for
1615 the head of block C and assert that we really do fall through. */
1617 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1618 if (INSN_P (q))
1619 return;
1621 /* Remove what will soon cease being the jump insn from the source block.
1622 If block B consisted only of this single jump, turn it into a deleted
1623 note. */
1624 q = BB_END (b);
1625 if (JUMP_P (q)
1626 && onlyjump_p (q)
1627 && (any_uncondjump_p (q)
1628 || single_succ_p (b)))
1630 #ifdef HAVE_cc0
1631 /* If this was a conditional jump, we need to also delete
1632 the insn that set cc0. */
1633 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1634 q = PREV_INSN (q);
1635 #endif
1637 q = PREV_INSN (q);
1640 /* Selectively unlink the sequence. */
1641 if (q != PREV_INSN (BB_HEAD (c)))
1642 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1644 e->flags |= EDGE_FALLTHRU;
1647 /* Should move basic block BB after basic block AFTER. NIY. */
1649 static bool
1650 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1651 basic_block after ATTRIBUTE_UNUSED)
1653 return false;
1656 /* Split a (typically critical) edge. Return the new block.
1657 The edge must not be abnormal.
1659 ??? The code generally expects to be called on critical edges.
1660 The case of a block ending in an unconditional jump to a
1661 block with multiple predecessors is not handled optimally. */
1663 static basic_block
1664 rtl_split_edge (edge edge_in)
1666 basic_block bb;
1667 rtx before;
1669 /* Abnormal edges cannot be split. */
1670 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1672 /* We are going to place the new block in front of edge destination.
1673 Avoid existence of fallthru predecessors. */
1674 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1676 edge e = find_fallthru_edge (edge_in->dest->preds);
1678 if (e)
1679 force_nonfallthru (e);
1682 /* Create the basic block note. */
1683 if (edge_in->dest != EXIT_BLOCK_PTR)
1684 before = BB_HEAD (edge_in->dest);
1685 else
1686 before = NULL_RTX;
1688 /* If this is a fall through edge to the exit block, the blocks might be
1689 not adjacent, and the right place is after the source. */
1690 if ((edge_in->flags & EDGE_FALLTHRU) && edge_in->dest == EXIT_BLOCK_PTR)
1692 before = NEXT_INSN (BB_END (edge_in->src));
1693 bb = create_basic_block (before, NULL, edge_in->src);
1694 BB_COPY_PARTITION (bb, edge_in->src);
1696 else
1698 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1699 /* ??? Why not edge_in->dest->prev_bb here? */
1700 BB_COPY_PARTITION (bb, edge_in->dest);
1703 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1705 /* For non-fallthru edges, we must adjust the predecessor's
1706 jump instruction to target our new block. */
1707 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1709 edge redirected = redirect_edge_and_branch (edge_in, bb);
1710 gcc_assert (redirected);
1712 else
1714 if (edge_in->src != ENTRY_BLOCK_PTR)
1716 /* For asm goto even splitting of fallthru edge might
1717 need insn patching, as other labels might point to the
1718 old label. */
1719 rtx last = BB_END (edge_in->src);
1720 if (last
1721 && JUMP_P (last)
1722 && edge_in->dest != EXIT_BLOCK_PTR
1723 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1724 && patch_jump_insn (last, before, bb))
1725 df_set_bb_dirty (edge_in->src);
1727 redirect_edge_succ (edge_in, bb);
1730 return bb;
1733 /* Queue instructions for insertion on an edge between two basic blocks.
1734 The new instructions and basic blocks (if any) will not appear in the
1735 CFG until commit_edge_insertions is called. */
1737 void
1738 insert_insn_on_edge (rtx pattern, edge e)
1740 /* We cannot insert instructions on an abnormal critical edge.
1741 It will be easier to find the culprit if we die now. */
1742 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1744 if (e->insns.r == NULL_RTX)
1745 start_sequence ();
1746 else
1747 push_to_sequence (e->insns.r);
1749 emit_insn (pattern);
1751 e->insns.r = get_insns ();
1752 end_sequence ();
1755 /* Update the CFG for the instructions queued on edge E. */
1757 void
1758 commit_one_edge_insertion (edge e)
1760 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1761 basic_block bb;
1763 /* Pull the insns off the edge now since the edge might go away. */
1764 insns = e->insns.r;
1765 e->insns.r = NULL_RTX;
1767 /* Figure out where to put these insns. If the destination has
1768 one predecessor, insert there. Except for the exit block. */
1769 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1771 bb = e->dest;
1773 /* Get the location correct wrt a code label, and "nice" wrt
1774 a basic block note, and before everything else. */
1775 tmp = BB_HEAD (bb);
1776 if (LABEL_P (tmp))
1777 tmp = NEXT_INSN (tmp);
1778 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1779 tmp = NEXT_INSN (tmp);
1780 if (tmp == BB_HEAD (bb))
1781 before = tmp;
1782 else if (tmp)
1783 after = PREV_INSN (tmp);
1784 else
1785 after = get_last_insn ();
1788 /* If the source has one successor and the edge is not abnormal,
1789 insert there. Except for the entry block. */
1790 else if ((e->flags & EDGE_ABNORMAL) == 0
1791 && single_succ_p (e->src)
1792 && e->src != ENTRY_BLOCK_PTR)
1794 bb = e->src;
1796 /* It is possible to have a non-simple jump here. Consider a target
1797 where some forms of unconditional jumps clobber a register. This
1798 happens on the fr30 for example.
1800 We know this block has a single successor, so we can just emit
1801 the queued insns before the jump. */
1802 if (JUMP_P (BB_END (bb)))
1803 before = BB_END (bb);
1804 else
1806 /* We'd better be fallthru, or we've lost track of what's what. */
1807 gcc_assert (e->flags & EDGE_FALLTHRU);
1809 after = BB_END (bb);
1813 /* Otherwise we must split the edge. */
1814 else
1816 bb = split_edge (e);
1817 after = BB_END (bb);
1819 if (flag_reorder_blocks_and_partition
1820 && targetm_common.have_named_sections
1821 && e->src != ENTRY_BLOCK_PTR
1822 && BB_PARTITION (e->src) == BB_COLD_PARTITION
1823 && !(e->flags & EDGE_CROSSING)
1824 && JUMP_P (after)
1825 && !any_condjump_p (after)
1826 && (single_succ_edge (bb)->flags & EDGE_CROSSING))
1827 add_reg_note (after, REG_CROSSING_JUMP, NULL_RTX);
1830 /* Now that we've found the spot, do the insertion. */
1831 if (before)
1833 emit_insn_before_noloc (insns, before, bb);
1834 last = prev_nonnote_insn (before);
1836 else
1837 last = emit_insn_after_noloc (insns, after, bb);
1839 if (returnjump_p (last))
1841 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1842 This is not currently a problem because this only happens
1843 for the (single) epilogue, which already has a fallthru edge
1844 to EXIT. */
1846 e = single_succ_edge (bb);
1847 gcc_assert (e->dest == EXIT_BLOCK_PTR
1848 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
1850 e->flags &= ~EDGE_FALLTHRU;
1851 emit_barrier_after (last);
1853 if (before)
1854 delete_insn (before);
1856 else
1857 gcc_assert (!JUMP_P (last));
1860 /* Update the CFG for all queued instructions. */
1862 void
1863 commit_edge_insertions (void)
1865 basic_block bb;
1867 #ifdef ENABLE_CHECKING
1868 verify_flow_info ();
1869 #endif
1871 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
1873 edge e;
1874 edge_iterator ei;
1876 FOR_EACH_EDGE (e, ei, bb->succs)
1877 if (e->insns.r)
1878 commit_one_edge_insertion (e);
1883 /* Print out RTL-specific basic block information (live information
1884 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
1885 documented in dumpfile.h. */
1887 static void
1888 rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
1890 rtx insn;
1891 rtx last;
1892 char *s_indent;
1894 s_indent = (char *) alloca ((size_t) indent + 1);
1895 memset (s_indent, ' ', (size_t) indent);
1896 s_indent[indent] = '\0';
1898 if (df && (flags & TDF_DETAILS))
1900 df_dump_top (bb, outf);
1901 putc ('\n', outf);
1904 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
1905 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
1906 insn = NEXT_INSN (insn))
1908 if (flags & TDF_DETAILS)
1909 df_dump_insn_top (insn, outf);
1910 if (! (flags & TDF_SLIM))
1911 print_rtl_single (outf, insn);
1912 else
1913 dump_insn_slim (outf, insn);
1914 if (flags & TDF_DETAILS)
1915 df_dump_insn_bottom (insn, outf);
1918 if (df && (flags & TDF_DETAILS))
1920 df_dump_bottom (bb, outf);
1921 putc ('\n', outf);
1926 /* Like dump_function_to_file, but for RTL. Print out dataflow information
1927 for the start of each basic block. FLAGS are the TDF_* masks documented
1928 in dumpfile.h. */
1930 void
1931 print_rtl_with_bb (FILE *outf, const_rtx rtx_first, int flags)
1933 const_rtx tmp_rtx;
1934 if (rtx_first == 0)
1935 fprintf (outf, "(nil)\n");
1936 else
1938 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
1939 int max_uid = get_max_uid ();
1940 basic_block *start = XCNEWVEC (basic_block, max_uid);
1941 basic_block *end = XCNEWVEC (basic_block, max_uid);
1942 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
1943 basic_block bb;
1945 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
1946 insns, but the CFG is not maintained so the basic block info
1947 is not reliable. Therefore it's omitted from the dumps. */
1948 if (! (cfun->curr_properties & PROP_cfg))
1949 flags &= ~TDF_BLOCKS;
1951 if (df)
1952 df_dump_start (outf);
1954 if (flags & TDF_BLOCKS)
1956 FOR_EACH_BB_REVERSE (bb)
1958 rtx x;
1960 start[INSN_UID (BB_HEAD (bb))] = bb;
1961 end[INSN_UID (BB_END (bb))] = bb;
1962 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
1964 enum bb_state state = IN_MULTIPLE_BB;
1966 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
1967 state = IN_ONE_BB;
1968 in_bb_p[INSN_UID (x)] = state;
1970 if (x == BB_END (bb))
1971 break;
1976 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
1978 if (flags & TDF_BLOCKS)
1980 bb = start[INSN_UID (tmp_rtx)];
1981 if (bb != NULL)
1983 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
1984 if (df && (flags & TDF_DETAILS))
1985 df_dump_top (bb, outf);
1988 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
1989 && !NOTE_P (tmp_rtx)
1990 && !BARRIER_P (tmp_rtx))
1991 fprintf (outf, ";; Insn is not within a basic block\n");
1992 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
1993 fprintf (outf, ";; Insn is in multiple basic blocks\n");
1996 if (flags & TDF_DETAILS)
1997 df_dump_insn_top (tmp_rtx, outf);
1998 if (! (flags & TDF_SLIM))
1999 print_rtl_single (outf, tmp_rtx);
2000 else
2001 dump_insn_slim (outf, tmp_rtx);
2002 if (flags & TDF_DETAILS)
2003 df_dump_insn_bottom (tmp_rtx, outf);
2005 if (flags & TDF_BLOCKS)
2007 bb = end[INSN_UID (tmp_rtx)];
2008 if (bb != NULL)
2010 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2011 if (df && (flags & TDF_DETAILS))
2012 df_dump_bottom (bb, outf);
2013 putc ('\n', outf);
2018 free (start);
2019 free (end);
2020 free (in_bb_p);
2024 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2026 void
2027 update_br_prob_note (basic_block bb)
2029 rtx note;
2030 if (!JUMP_P (BB_END (bb)))
2031 return;
2032 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2033 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
2034 return;
2035 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
2038 /* Get the last insn associated with block BB (that includes barriers and
2039 tablejumps after BB). */
2041 get_last_bb_insn (basic_block bb)
2043 rtx tmp;
2044 rtx end = BB_END (bb);
2046 /* Include any jump table following the basic block. */
2047 if (tablejump_p (end, NULL, &tmp))
2048 end = tmp;
2050 /* Include any barriers that may follow the basic block. */
2051 tmp = next_nonnote_insn_bb (end);
2052 while (tmp && BARRIER_P (tmp))
2054 end = tmp;
2055 tmp = next_nonnote_insn_bb (end);
2058 return end;
2061 /* Verify the CFG and RTL consistency common for both underlying RTL and
2062 cfglayout RTL.
2064 Currently it does following checks:
2066 - overlapping of basic blocks
2067 - insns with wrong BLOCK_FOR_INSN pointers
2068 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2069 - tails of basic blocks (ensure that boundary is necessary)
2070 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2071 and NOTE_INSN_BASIC_BLOCK
2072 - verify that no fall_thru edge crosses hot/cold partition boundaries
2073 - verify that there are no pending RTL branch predictions
2075 In future it can be extended check a lot of other stuff as well
2076 (reachability of basic blocks, life information, etc. etc.). */
2078 static int
2079 rtl_verify_flow_info_1 (void)
2081 rtx x;
2082 int err = 0;
2083 basic_block bb;
2085 /* Check the general integrity of the basic blocks. */
2086 FOR_EACH_BB_REVERSE (bb)
2088 rtx insn;
2090 if (!(bb->flags & BB_RTL))
2092 error ("BB_RTL flag not set for block %d", bb->index);
2093 err = 1;
2096 FOR_BB_INSNS (bb, insn)
2097 if (BLOCK_FOR_INSN (insn) != bb)
2099 error ("insn %d basic block pointer is %d, should be %d",
2100 INSN_UID (insn),
2101 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2102 bb->index);
2103 err = 1;
2106 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2107 if (!BARRIER_P (insn)
2108 && BLOCK_FOR_INSN (insn) != NULL)
2110 error ("insn %d in header of bb %d has non-NULL basic block",
2111 INSN_UID (insn), bb->index);
2112 err = 1;
2114 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2115 if (!BARRIER_P (insn)
2116 && BLOCK_FOR_INSN (insn) != NULL)
2118 error ("insn %d in footer of bb %d has non-NULL basic block",
2119 INSN_UID (insn), bb->index);
2120 err = 1;
2124 /* Now check the basic blocks (boundaries etc.) */
2125 FOR_EACH_BB_REVERSE (bb)
2127 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2128 int n_eh = 0, n_abnormal = 0;
2129 edge e, fallthru = NULL;
2130 rtx note;
2131 edge_iterator ei;
2133 if (JUMP_P (BB_END (bb))
2134 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2135 && EDGE_COUNT (bb->succs) >= 2
2136 && any_condjump_p (BB_END (bb)))
2138 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
2139 && profile_status != PROFILE_ABSENT)
2141 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
2142 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
2143 err = 1;
2146 FOR_EACH_EDGE (e, ei, bb->succs)
2148 bool is_crossing;
2150 if (e->flags & EDGE_FALLTHRU)
2151 n_fallthru++, fallthru = e;
2153 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2154 && e->src != ENTRY_BLOCK_PTR
2155 && e->dest != EXIT_BLOCK_PTR);
2156 if (e->flags & EDGE_CROSSING)
2158 if (!is_crossing)
2160 error ("EDGE_CROSSING incorrectly set across same section");
2161 err = 1;
2163 if (e->flags & EDGE_FALLTHRU)
2165 error ("fallthru edge crosses section boundary in bb %i",
2166 e->src->index);
2167 err = 1;
2169 if (e->flags & EDGE_EH)
2171 error ("EH edge crosses section boundary in bb %i",
2172 e->src->index);
2173 err = 1;
2176 else if (is_crossing)
2178 error ("EDGE_CROSSING missing across section boundary");
2179 err = 1;
2182 if ((e->flags & ~(EDGE_DFS_BACK
2183 | EDGE_CAN_FALLTHRU
2184 | EDGE_IRREDUCIBLE_LOOP
2185 | EDGE_LOOP_EXIT
2186 | EDGE_CROSSING
2187 | EDGE_PRESERVE)) == 0)
2188 n_branch++;
2190 if (e->flags & EDGE_ABNORMAL_CALL)
2191 n_abnormal_call++;
2193 if (e->flags & EDGE_SIBCALL)
2194 n_sibcall++;
2196 if (e->flags & EDGE_EH)
2197 n_eh++;
2199 if (e->flags & EDGE_ABNORMAL)
2200 n_abnormal++;
2203 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2205 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2206 err = 1;
2208 if (n_eh > 1)
2210 error ("too many exception handling edges in bb %i", bb->index);
2211 err = 1;
2213 if (n_branch
2214 && (!JUMP_P (BB_END (bb))
2215 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2216 || any_condjump_p (BB_END (bb))))))
2218 error ("too many outgoing branch edges from bb %i", bb->index);
2219 err = 1;
2221 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2223 error ("fallthru edge after unconditional jump in bb %i", bb->index);
2224 err = 1;
2226 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2228 error ("wrong number of branch edges after unconditional jump"
2229 " in bb %i", bb->index);
2230 err = 1;
2232 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2233 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2235 error ("wrong amount of branch edges after conditional jump"
2236 " in bb %i", bb->index);
2237 err = 1;
2239 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2241 error ("abnormal call edges for non-call insn in bb %i", bb->index);
2242 err = 1;
2244 if (n_sibcall && !CALL_P (BB_END (bb)))
2246 error ("sibcall edges for non-call insn in bb %i", bb->index);
2247 err = 1;
2249 if (n_abnormal > n_eh
2250 && !(CALL_P (BB_END (bb))
2251 && n_abnormal == n_abnormal_call + n_sibcall)
2252 && (!JUMP_P (BB_END (bb))
2253 || any_condjump_p (BB_END (bb))
2254 || any_uncondjump_p (BB_END (bb))))
2256 error ("abnormal edges for no purpose in bb %i", bb->index);
2257 err = 1;
2260 for (x = BB_HEAD (bb); x != NEXT_INSN (BB_END (bb)); x = NEXT_INSN (x))
2261 /* We may have a barrier inside a basic block before dead code
2262 elimination. There is no BLOCK_FOR_INSN field in a barrier. */
2263 if (!BARRIER_P (x) && BLOCK_FOR_INSN (x) != bb)
2265 debug_rtx (x);
2266 if (! BLOCK_FOR_INSN (x))
2267 error
2268 ("insn %d inside basic block %d but block_for_insn is NULL",
2269 INSN_UID (x), bb->index);
2270 else
2271 error
2272 ("insn %d inside basic block %d but block_for_insn is %i",
2273 INSN_UID (x), bb->index, BLOCK_FOR_INSN (x)->index);
2275 err = 1;
2278 /* OK pointers are correct. Now check the header of basic
2279 block. It ought to contain optional CODE_LABEL followed
2280 by NOTE_BASIC_BLOCK. */
2281 x = BB_HEAD (bb);
2282 if (LABEL_P (x))
2284 if (BB_END (bb) == x)
2286 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2287 bb->index);
2288 err = 1;
2291 x = NEXT_INSN (x);
2294 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2296 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2297 bb->index);
2298 err = 1;
2301 if (BB_END (bb) == x)
2302 /* Do checks for empty blocks here. */
2304 else
2305 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2307 if (NOTE_INSN_BASIC_BLOCK_P (x))
2309 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2310 INSN_UID (x), bb->index);
2311 err = 1;
2314 if (x == BB_END (bb))
2315 break;
2317 if (control_flow_insn_p (x))
2319 error ("in basic block %d:", bb->index);
2320 fatal_insn ("flow control insn inside a basic block", x);
2325 /* Clean up. */
2326 return err;
2329 /* Verify the CFG and RTL consistency common for both underlying RTL and
2330 cfglayout RTL.
2332 Currently it does following checks:
2333 - all checks of rtl_verify_flow_info_1
2334 - test head/end pointers
2335 - check that all insns are in the basic blocks
2336 (except the switch handling code, barriers and notes)
2337 - check that all returns are followed by barriers
2338 - check that all fallthru edge points to the adjacent blocks. */
2340 static int
2341 rtl_verify_flow_info (void)
2343 basic_block bb;
2344 int err = rtl_verify_flow_info_1 ();
2345 rtx x;
2346 rtx last_head = get_last_insn ();
2347 basic_block *bb_info;
2348 int num_bb_notes;
2349 const rtx rtx_first = get_insns ();
2350 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2351 const int max_uid = get_max_uid ();
2353 bb_info = XCNEWVEC (basic_block, max_uid);
2355 FOR_EACH_BB_REVERSE (bb)
2357 edge e;
2358 rtx head = BB_HEAD (bb);
2359 rtx end = BB_END (bb);
2361 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2363 /* Verify the end of the basic block is in the INSN chain. */
2364 if (x == end)
2365 break;
2367 /* And that the code outside of basic blocks has NULL bb field. */
2368 if (!BARRIER_P (x)
2369 && BLOCK_FOR_INSN (x) != NULL)
2371 error ("insn %d outside of basic blocks has non-NULL bb field",
2372 INSN_UID (x));
2373 err = 1;
2377 if (!x)
2379 error ("end insn %d for block %d not found in the insn stream",
2380 INSN_UID (end), bb->index);
2381 err = 1;
2384 /* Work backwards from the end to the head of the basic block
2385 to verify the head is in the RTL chain. */
2386 for (; x != NULL_RTX; x = PREV_INSN (x))
2388 /* While walking over the insn chain, verify insns appear
2389 in only one basic block. */
2390 if (bb_info[INSN_UID (x)] != NULL)
2392 error ("insn %d is in multiple basic blocks (%d and %d)",
2393 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2394 err = 1;
2397 bb_info[INSN_UID (x)] = bb;
2399 if (x == head)
2400 break;
2402 if (!x)
2404 error ("head insn %d for block %d not found in the insn stream",
2405 INSN_UID (head), bb->index);
2406 err = 1;
2409 last_head = PREV_INSN (x);
2411 e = find_fallthru_edge (bb->succs);
2412 if (!e)
2414 rtx insn;
2416 /* Ensure existence of barrier in BB with no fallthru edges. */
2417 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2419 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2421 error ("missing barrier after block %i", bb->index);
2422 err = 1;
2423 break;
2425 if (BARRIER_P (insn))
2426 break;
2429 else if (e->src != ENTRY_BLOCK_PTR
2430 && e->dest != EXIT_BLOCK_PTR)
2432 rtx insn;
2434 if (e->src->next_bb != e->dest)
2436 error
2437 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2438 e->src->index, e->dest->index);
2439 err = 1;
2441 else
2442 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2443 insn = NEXT_INSN (insn))
2444 if (BARRIER_P (insn) || INSN_P (insn))
2446 error ("verify_flow_info: Incorrect fallthru %i->%i",
2447 e->src->index, e->dest->index);
2448 fatal_insn ("wrong insn in the fallthru edge", insn);
2449 err = 1;
2454 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2456 /* Check that the code before the first basic block has NULL
2457 bb field. */
2458 if (!BARRIER_P (x)
2459 && BLOCK_FOR_INSN (x) != NULL)
2461 error ("insn %d outside of basic blocks has non-NULL bb field",
2462 INSN_UID (x));
2463 err = 1;
2466 free (bb_info);
2468 num_bb_notes = 0;
2469 last_bb_seen = ENTRY_BLOCK_PTR;
2471 for (x = rtx_first; x; x = NEXT_INSN (x))
2473 if (NOTE_INSN_BASIC_BLOCK_P (x))
2475 bb = NOTE_BASIC_BLOCK (x);
2477 num_bb_notes++;
2478 if (bb != last_bb_seen->next_bb)
2479 internal_error ("basic blocks not laid down consecutively");
2481 curr_bb = last_bb_seen = bb;
2484 if (!curr_bb)
2486 switch (GET_CODE (x))
2488 case BARRIER:
2489 case NOTE:
2490 break;
2492 case CODE_LABEL:
2493 /* An addr_vec is placed outside any basic block. */
2494 if (NEXT_INSN (x)
2495 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2496 x = NEXT_INSN (x);
2498 /* But in any case, non-deletable labels can appear anywhere. */
2499 break;
2501 default:
2502 fatal_insn ("insn outside basic block", x);
2506 if (JUMP_P (x)
2507 && returnjump_p (x) && ! condjump_p (x)
2508 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2509 fatal_insn ("return not followed by barrier", x);
2510 if (curr_bb && x == BB_END (curr_bb))
2511 curr_bb = NULL;
2514 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2515 internal_error
2516 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2517 num_bb_notes, n_basic_blocks);
2519 return err;
2522 /* Assume that the preceding pass has possibly eliminated jump instructions
2523 or converted the unconditional jumps. Eliminate the edges from CFG.
2524 Return true if any edges are eliminated. */
2526 bool
2527 purge_dead_edges (basic_block bb)
2529 edge e;
2530 rtx insn = BB_END (bb), note;
2531 bool purged = false;
2532 bool found;
2533 edge_iterator ei;
2535 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2537 insn = PREV_INSN (insn);
2538 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2540 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2541 if (NONJUMP_INSN_P (insn)
2542 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2544 rtx eqnote;
2546 if (! may_trap_p (PATTERN (insn))
2547 || ((eqnote = find_reg_equal_equiv_note (insn))
2548 && ! may_trap_p (XEXP (eqnote, 0))))
2549 remove_note (insn, note);
2552 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2553 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2555 bool remove = false;
2557 /* There are three types of edges we need to handle correctly here: EH
2558 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2559 latter can appear when nonlocal gotos are used. */
2560 if (e->flags & EDGE_ABNORMAL_CALL)
2562 if (!CALL_P (insn))
2563 remove = true;
2564 else if (can_nonlocal_goto (insn))
2566 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2568 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
2570 else
2571 remove = true;
2573 else if (e->flags & EDGE_EH)
2574 remove = !can_throw_internal (insn);
2576 if (remove)
2578 remove_edge (e);
2579 df_set_bb_dirty (bb);
2580 purged = true;
2582 else
2583 ei_next (&ei);
2586 if (JUMP_P (insn))
2588 rtx note;
2589 edge b,f;
2590 edge_iterator ei;
2592 /* We do care only about conditional jumps and simplejumps. */
2593 if (!any_condjump_p (insn)
2594 && !returnjump_p (insn)
2595 && !simplejump_p (insn))
2596 return purged;
2598 /* Branch probability/prediction notes are defined only for
2599 condjumps. We've possibly turned condjump into simplejump. */
2600 if (simplejump_p (insn))
2602 note = find_reg_note (insn, REG_BR_PROB, NULL);
2603 if (note)
2604 remove_note (insn, note);
2605 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
2606 remove_note (insn, note);
2609 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2611 /* Avoid abnormal flags to leak from computed jumps turned
2612 into simplejumps. */
2614 e->flags &= ~EDGE_ABNORMAL;
2616 /* See if this edge is one we should keep. */
2617 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
2618 /* A conditional jump can fall through into the next
2619 block, so we should keep the edge. */
2621 ei_next (&ei);
2622 continue;
2624 else if (e->dest != EXIT_BLOCK_PTR
2625 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
2626 /* If the destination block is the target of the jump,
2627 keep the edge. */
2629 ei_next (&ei);
2630 continue;
2632 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
2633 /* If the destination block is the exit block, and this
2634 instruction is a return, then keep the edge. */
2636 ei_next (&ei);
2637 continue;
2639 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2640 /* Keep the edges that correspond to exceptions thrown by
2641 this instruction and rematerialize the EDGE_ABNORMAL
2642 flag we just cleared above. */
2644 e->flags |= EDGE_ABNORMAL;
2645 ei_next (&ei);
2646 continue;
2649 /* We do not need this edge. */
2650 df_set_bb_dirty (bb);
2651 purged = true;
2652 remove_edge (e);
2655 if (EDGE_COUNT (bb->succs) == 0 || !purged)
2656 return purged;
2658 if (dump_file)
2659 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
2661 if (!optimize)
2662 return purged;
2664 /* Redistribute probabilities. */
2665 if (single_succ_p (bb))
2667 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2668 single_succ_edge (bb)->count = bb->count;
2670 else
2672 note = find_reg_note (insn, REG_BR_PROB, NULL);
2673 if (!note)
2674 return purged;
2676 b = BRANCH_EDGE (bb);
2677 f = FALLTHRU_EDGE (bb);
2678 b->probability = INTVAL (XEXP (note, 0));
2679 f->probability = REG_BR_PROB_BASE - b->probability;
2680 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
2681 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
2684 return purged;
2686 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
2688 /* First, there should not be any EH or ABCALL edges resulting
2689 from non-local gotos and the like. If there were, we shouldn't
2690 have created the sibcall in the first place. Second, there
2691 should of course never have been a fallthru edge. */
2692 gcc_assert (single_succ_p (bb));
2693 gcc_assert (single_succ_edge (bb)->flags
2694 == (EDGE_SIBCALL | EDGE_ABNORMAL));
2696 return 0;
2699 /* If we don't see a jump insn, we don't know exactly why the block would
2700 have been broken at this point. Look for a simple, non-fallthru edge,
2701 as these are only created by conditional branches. If we find such an
2702 edge we know that there used to be a jump here and can then safely
2703 remove all non-fallthru edges. */
2704 found = false;
2705 FOR_EACH_EDGE (e, ei, bb->succs)
2706 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
2708 found = true;
2709 break;
2712 if (!found)
2713 return purged;
2715 /* Remove all but the fake and fallthru edges. The fake edge may be
2716 the only successor for this block in the case of noreturn
2717 calls. */
2718 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2720 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
2722 df_set_bb_dirty (bb);
2723 remove_edge (e);
2724 purged = true;
2726 else
2727 ei_next (&ei);
2730 gcc_assert (single_succ_p (bb));
2732 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
2733 single_succ_edge (bb)->count = bb->count;
2735 if (dump_file)
2736 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
2737 bb->index);
2738 return purged;
2741 /* Search all basic blocks for potentially dead edges and purge them. Return
2742 true if some edge has been eliminated. */
2744 bool
2745 purge_all_dead_edges (void)
2747 int purged = false;
2748 basic_block bb;
2750 FOR_EACH_BB (bb)
2752 bool purged_here = purge_dead_edges (bb);
2754 purged |= purged_here;
2757 return purged;
2760 /* This is used by a few passes that emit some instructions after abnormal
2761 calls, moving the basic block's end, while they in fact do want to emit
2762 them on the fallthru edge. Look for abnormal call edges, find backward
2763 the call in the block and insert the instructions on the edge instead.
2765 Similarly, handle instructions throwing exceptions internally.
2767 Return true when instructions have been found and inserted on edges. */
2769 bool
2770 fixup_abnormal_edges (void)
2772 bool inserted = false;
2773 basic_block bb;
2775 FOR_EACH_BB (bb)
2777 edge e;
2778 edge_iterator ei;
2780 /* Look for cases we are interested in - calls or instructions causing
2781 exceptions. */
2782 FOR_EACH_EDGE (e, ei, bb->succs)
2783 if ((e->flags & EDGE_ABNORMAL_CALL)
2784 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
2785 == (EDGE_ABNORMAL | EDGE_EH)))
2786 break;
2788 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
2790 rtx insn;
2792 /* Get past the new insns generated. Allow notes, as the insns
2793 may be already deleted. */
2794 insn = BB_END (bb);
2795 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
2796 && !can_throw_internal (insn)
2797 && insn != BB_HEAD (bb))
2798 insn = PREV_INSN (insn);
2800 if (CALL_P (insn) || can_throw_internal (insn))
2802 rtx stop, next;
2804 e = find_fallthru_edge (bb->succs);
2806 stop = NEXT_INSN (BB_END (bb));
2807 BB_END (bb) = insn;
2809 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
2811 next = NEXT_INSN (insn);
2812 if (INSN_P (insn))
2814 delete_insn (insn);
2816 /* Sometimes there's still the return value USE.
2817 If it's placed after a trapping call (i.e. that
2818 call is the last insn anyway), we have no fallthru
2819 edge. Simply delete this use and don't try to insert
2820 on the non-existent edge. */
2821 if (GET_CODE (PATTERN (insn)) != USE)
2823 /* We're not deleting it, we're moving it. */
2824 INSN_DELETED_P (insn) = 0;
2825 PREV_INSN (insn) = NULL_RTX;
2826 NEXT_INSN (insn) = NULL_RTX;
2828 insert_insn_on_edge (insn, e);
2829 inserted = true;
2832 else if (!BARRIER_P (insn))
2833 set_block_for_insn (insn, NULL);
2837 /* It may be that we don't find any trapping insn. In this
2838 case we discovered quite late that the insn that had been
2839 marked as can_throw_internal in fact couldn't trap at all.
2840 So we should in fact delete the EH edges out of the block. */
2841 else
2842 purge_dead_edges (bb);
2846 return inserted;
2849 /* Cut the insns from FIRST to LAST out of the insns stream. */
2852 unlink_insn_chain (rtx first, rtx last)
2854 rtx prevfirst = PREV_INSN (first);
2855 rtx nextlast = NEXT_INSN (last);
2857 PREV_INSN (first) = NULL;
2858 NEXT_INSN (last) = NULL;
2859 if (prevfirst)
2860 NEXT_INSN (prevfirst) = nextlast;
2861 if (nextlast)
2862 PREV_INSN (nextlast) = prevfirst;
2863 else
2864 set_last_insn (prevfirst);
2865 if (!prevfirst)
2866 set_first_insn (nextlast);
2867 return first;
2870 /* Skip over inter-block insns occurring after BB which are typically
2871 associated with BB (e.g., barriers). If there are any such insns,
2872 we return the last one. Otherwise, we return the end of BB. */
2874 static rtx
2875 skip_insns_after_block (basic_block bb)
2877 rtx insn, last_insn, next_head, prev;
2879 next_head = NULL_RTX;
2880 if (bb->next_bb != EXIT_BLOCK_PTR)
2881 next_head = BB_HEAD (bb->next_bb);
2883 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
2885 if (insn == next_head)
2886 break;
2888 switch (GET_CODE (insn))
2890 case BARRIER:
2891 last_insn = insn;
2892 continue;
2894 case NOTE:
2895 switch (NOTE_KIND (insn))
2897 case NOTE_INSN_BLOCK_END:
2898 gcc_unreachable ();
2899 continue;
2900 default:
2901 continue;
2902 break;
2904 break;
2906 case CODE_LABEL:
2907 if (NEXT_INSN (insn)
2908 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
2910 insn = NEXT_INSN (insn);
2911 last_insn = insn;
2912 continue;
2914 break;
2916 default:
2917 break;
2920 break;
2923 /* It is possible to hit contradictory sequence. For instance:
2925 jump_insn
2926 NOTE_INSN_BLOCK_BEG
2927 barrier
2929 Where barrier belongs to jump_insn, but the note does not. This can be
2930 created by removing the basic block originally following
2931 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
2933 for (insn = last_insn; insn != BB_END (bb); insn = prev)
2935 prev = PREV_INSN (insn);
2936 if (NOTE_P (insn))
2937 switch (NOTE_KIND (insn))
2939 case NOTE_INSN_BLOCK_END:
2940 gcc_unreachable ();
2941 break;
2942 case NOTE_INSN_DELETED:
2943 case NOTE_INSN_DELETED_LABEL:
2944 case NOTE_INSN_DELETED_DEBUG_LABEL:
2945 continue;
2946 default:
2947 reorder_insns (insn, insn, last_insn);
2951 return last_insn;
2954 /* Locate or create a label for a given basic block. */
2956 static rtx
2957 label_for_bb (basic_block bb)
2959 rtx label = BB_HEAD (bb);
2961 if (!LABEL_P (label))
2963 if (dump_file)
2964 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
2966 label = block_label (bb);
2969 return label;
2972 /* Locate the effective beginning and end of the insn chain for each
2973 block, as defined by skip_insns_after_block above. */
2975 static void
2976 record_effective_endpoints (void)
2978 rtx next_insn;
2979 basic_block bb;
2980 rtx insn;
2982 for (insn = get_insns ();
2983 insn
2984 && NOTE_P (insn)
2985 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
2986 insn = NEXT_INSN (insn))
2987 continue;
2988 /* No basic blocks at all? */
2989 gcc_assert (insn);
2991 if (PREV_INSN (insn))
2992 cfg_layout_function_header =
2993 unlink_insn_chain (get_insns (), PREV_INSN (insn));
2994 else
2995 cfg_layout_function_header = NULL_RTX;
2997 next_insn = get_insns ();
2998 FOR_EACH_BB (bb)
3000 rtx end;
3002 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3003 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3004 PREV_INSN (BB_HEAD (bb)));
3005 end = skip_insns_after_block (bb);
3006 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3007 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3008 next_insn = NEXT_INSN (BB_END (bb));
3011 cfg_layout_function_footer = next_insn;
3012 if (cfg_layout_function_footer)
3013 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3016 static unsigned int
3017 into_cfg_layout_mode (void)
3019 cfg_layout_initialize (0);
3020 return 0;
3023 static unsigned int
3024 outof_cfg_layout_mode (void)
3026 basic_block bb;
3028 FOR_EACH_BB (bb)
3029 if (bb->next_bb != EXIT_BLOCK_PTR)
3030 bb->aux = bb->next_bb;
3032 cfg_layout_finalize ();
3034 return 0;
3037 struct rtl_opt_pass pass_into_cfg_layout_mode =
3040 RTL_PASS,
3041 "into_cfglayout", /* name */
3042 OPTGROUP_NONE, /* optinfo_flags */
3043 NULL, /* gate */
3044 into_cfg_layout_mode, /* execute */
3045 NULL, /* sub */
3046 NULL, /* next */
3047 0, /* static_pass_number */
3048 TV_CFG, /* tv_id */
3049 0, /* properties_required */
3050 PROP_cfglayout, /* properties_provided */
3051 0, /* properties_destroyed */
3052 0, /* todo_flags_start */
3053 0 /* todo_flags_finish */
3057 struct rtl_opt_pass pass_outof_cfg_layout_mode =
3060 RTL_PASS,
3061 "outof_cfglayout", /* name */
3062 OPTGROUP_NONE, /* optinfo_flags */
3063 NULL, /* gate */
3064 outof_cfg_layout_mode, /* execute */
3065 NULL, /* sub */
3066 NULL, /* next */
3067 0, /* static_pass_number */
3068 TV_CFG, /* tv_id */
3069 0, /* properties_required */
3070 0, /* properties_provided */
3071 PROP_cfglayout, /* properties_destroyed */
3072 0, /* todo_flags_start */
3073 0 /* todo_flags_finish */
3078 /* Link the basic blocks in the correct order, compacting the basic
3079 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3080 function also clears the basic block header and footer fields.
3082 This function is usually called after a pass (e.g. tracer) finishes
3083 some transformations while in cfglayout mode. The required sequence
3084 of the basic blocks is in a linked list along the bb->aux field.
3085 This functions re-links the basic block prev_bb and next_bb pointers
3086 accordingly, and it compacts and renumbers the blocks.
3088 FIXME: This currently works only for RTL, but the only RTL-specific
3089 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3090 to GIMPLE a long time ago, but it doesn't relink the basic block
3091 chain. It could do that (to give better initial RTL) if this function
3092 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
3094 void
3095 relink_block_chain (bool stay_in_cfglayout_mode)
3097 basic_block bb, prev_bb;
3098 int index;
3100 /* Maybe dump the re-ordered sequence. */
3101 if (dump_file)
3103 fprintf (dump_file, "Reordered sequence:\n");
3104 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
3106 bb = (basic_block) bb->aux, index++)
3108 fprintf (dump_file, " %i ", index);
3109 if (get_bb_original (bb))
3110 fprintf (dump_file, "duplicate of %i ",
3111 get_bb_original (bb)->index);
3112 else if (forwarder_block_p (bb)
3113 && !LABEL_P (BB_HEAD (bb)))
3114 fprintf (dump_file, "compensation ");
3115 else
3116 fprintf (dump_file, "bb %i ", bb->index);
3117 fprintf (dump_file, " [%i]\n", bb->frequency);
3121 /* Now reorder the blocks. */
3122 prev_bb = ENTRY_BLOCK_PTR;
3123 bb = ENTRY_BLOCK_PTR->next_bb;
3124 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3126 bb->prev_bb = prev_bb;
3127 prev_bb->next_bb = bb;
3129 prev_bb->next_bb = EXIT_BLOCK_PTR;
3130 EXIT_BLOCK_PTR->prev_bb = prev_bb;
3132 /* Then, clean up the aux fields. */
3133 FOR_ALL_BB (bb)
3135 bb->aux = NULL;
3136 if (!stay_in_cfglayout_mode)
3137 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3140 /* Maybe reset the original copy tables, they are not valid anymore
3141 when we renumber the basic blocks in compact_blocks. If we are
3142 are going out of cfglayout mode, don't re-allocate the tables. */
3143 free_original_copy_tables ();
3144 if (stay_in_cfglayout_mode)
3145 initialize_original_copy_tables ();
3147 /* Finally, put basic_block_info in the new order. */
3148 compact_blocks ();
3152 /* Given a reorder chain, rearrange the code to match. */
3154 static void
3155 fixup_reorder_chain (void)
3157 basic_block bb;
3158 rtx insn = NULL;
3160 if (cfg_layout_function_header)
3162 set_first_insn (cfg_layout_function_header);
3163 insn = cfg_layout_function_header;
3164 while (NEXT_INSN (insn))
3165 insn = NEXT_INSN (insn);
3168 /* First do the bulk reordering -- rechain the blocks without regard to
3169 the needed changes to jumps and labels. */
3171 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
3173 if (BB_HEADER (bb))
3175 if (insn)
3176 NEXT_INSN (insn) = BB_HEADER (bb);
3177 else
3178 set_first_insn (BB_HEADER (bb));
3179 PREV_INSN (BB_HEADER (bb)) = insn;
3180 insn = BB_HEADER (bb);
3181 while (NEXT_INSN (insn))
3182 insn = NEXT_INSN (insn);
3184 if (insn)
3185 NEXT_INSN (insn) = BB_HEAD (bb);
3186 else
3187 set_first_insn (BB_HEAD (bb));
3188 PREV_INSN (BB_HEAD (bb)) = insn;
3189 insn = BB_END (bb);
3190 if (BB_FOOTER (bb))
3192 NEXT_INSN (insn) = BB_FOOTER (bb);
3193 PREV_INSN (BB_FOOTER (bb)) = insn;
3194 while (NEXT_INSN (insn))
3195 insn = NEXT_INSN (insn);
3199 NEXT_INSN (insn) = cfg_layout_function_footer;
3200 if (cfg_layout_function_footer)
3201 PREV_INSN (cfg_layout_function_footer) = insn;
3203 while (NEXT_INSN (insn))
3204 insn = NEXT_INSN (insn);
3206 set_last_insn (insn);
3207 #ifdef ENABLE_CHECKING
3208 verify_insn_chain ();
3209 #endif
3211 /* Now add jumps and labels as needed to match the blocks new
3212 outgoing edges. */
3214 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
3216 edge e_fall, e_taken, e;
3217 rtx bb_end_insn;
3218 rtx ret_label = NULL_RTX;
3219 basic_block nb, src_bb;
3220 edge_iterator ei;
3222 if (EDGE_COUNT (bb->succs) == 0)
3223 continue;
3225 /* Find the old fallthru edge, and another non-EH edge for
3226 a taken jump. */
3227 e_taken = e_fall = NULL;
3229 FOR_EACH_EDGE (e, ei, bb->succs)
3230 if (e->flags & EDGE_FALLTHRU)
3231 e_fall = e;
3232 else if (! (e->flags & EDGE_EH))
3233 e_taken = e;
3235 bb_end_insn = BB_END (bb);
3236 if (JUMP_P (bb_end_insn))
3238 ret_label = JUMP_LABEL (bb_end_insn);
3239 if (any_condjump_p (bb_end_insn))
3241 /* This might happen if the conditional jump has side
3242 effects and could therefore not be optimized away.
3243 Make the basic block to end with a barrier in order
3244 to prevent rtl_verify_flow_info from complaining. */
3245 if (!e_fall)
3247 gcc_assert (!onlyjump_p (bb_end_insn)
3248 || returnjump_p (bb_end_insn));
3249 BB_FOOTER (bb) = emit_barrier_after (bb_end_insn);
3250 continue;
3253 /* If the old fallthru is still next, nothing to do. */
3254 if (bb->aux == e_fall->dest
3255 || e_fall->dest == EXIT_BLOCK_PTR)
3256 continue;
3258 /* The degenerated case of conditional jump jumping to the next
3259 instruction can happen for jumps with side effects. We need
3260 to construct a forwarder block and this will be done just
3261 fine by force_nonfallthru below. */
3262 if (!e_taken)
3265 /* There is another special case: if *neither* block is next,
3266 such as happens at the very end of a function, then we'll
3267 need to add a new unconditional jump. Choose the taken
3268 edge based on known or assumed probability. */
3269 else if (bb->aux != e_taken->dest)
3271 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3273 if (note
3274 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
3275 && invert_jump (bb_end_insn,
3276 (e_fall->dest == EXIT_BLOCK_PTR
3277 ? NULL_RTX
3278 : label_for_bb (e_fall->dest)), 0))
3280 e_fall->flags &= ~EDGE_FALLTHRU;
3281 gcc_checking_assert (could_fall_through
3282 (e_taken->src, e_taken->dest));
3283 e_taken->flags |= EDGE_FALLTHRU;
3284 update_br_prob_note (bb);
3285 e = e_fall, e_fall = e_taken, e_taken = e;
3289 /* If the "jumping" edge is a crossing edge, and the fall
3290 through edge is non-crossing, leave things as they are. */
3291 else if ((e_taken->flags & EDGE_CROSSING)
3292 && !(e_fall->flags & EDGE_CROSSING))
3293 continue;
3295 /* Otherwise we can try to invert the jump. This will
3296 basically never fail, however, keep up the pretense. */
3297 else if (invert_jump (bb_end_insn,
3298 (e_fall->dest == EXIT_BLOCK_PTR
3299 ? NULL_RTX
3300 : label_for_bb (e_fall->dest)), 0))
3302 e_fall->flags &= ~EDGE_FALLTHRU;
3303 gcc_checking_assert (could_fall_through
3304 (e_taken->src, e_taken->dest));
3305 e_taken->flags |= EDGE_FALLTHRU;
3306 update_br_prob_note (bb);
3307 if (LABEL_NUSES (ret_label) == 0
3308 && single_pred_p (e_taken->dest))
3309 delete_insn (ret_label);
3310 continue;
3313 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3315 /* If the old fallthru is still next or if
3316 asm goto doesn't have a fallthru (e.g. when followed by
3317 __builtin_unreachable ()), nothing to do. */
3318 if (! e_fall
3319 || bb->aux == e_fall->dest
3320 || e_fall->dest == EXIT_BLOCK_PTR)
3321 continue;
3323 /* Otherwise we'll have to use the fallthru fixup below. */
3325 else
3327 /* Otherwise we have some return, switch or computed
3328 jump. In the 99% case, there should not have been a
3329 fallthru edge. */
3330 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3331 continue;
3334 else
3336 /* No fallthru implies a noreturn function with EH edges, or
3337 something similarly bizarre. In any case, we don't need to
3338 do anything. */
3339 if (! e_fall)
3340 continue;
3342 /* If the fallthru block is still next, nothing to do. */
3343 if (bb->aux == e_fall->dest)
3344 continue;
3346 /* A fallthru to exit block. */
3347 if (e_fall->dest == EXIT_BLOCK_PTR)
3348 continue;
3351 /* We got here if we need to add a new jump insn.
3352 Note force_nonfallthru can delete E_FALL and thus we have to
3353 save E_FALL->src prior to the call to force_nonfallthru. */
3354 src_bb = e_fall->src;
3355 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3356 if (nb)
3358 nb->aux = bb->aux;
3359 bb->aux = nb;
3360 /* Don't process this new block. */
3361 bb = nb;
3363 /* Make sure new bb is tagged for correct section (same as
3364 fall-thru source, since you cannot fall-thru across
3365 section boundaries). */
3366 BB_COPY_PARTITION (src_bb, single_pred (bb));
3367 if (flag_reorder_blocks_and_partition
3368 && targetm_common.have_named_sections
3369 && JUMP_P (BB_END (bb))
3370 && !any_condjump_p (BB_END (bb))
3371 && (EDGE_SUCC (bb, 0)->flags & EDGE_CROSSING))
3372 add_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX);
3376 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3378 /* Annoying special case - jump around dead jumptables left in the code. */
3379 FOR_EACH_BB (bb)
3381 edge e = find_fallthru_edge (bb->succs);
3383 if (e && !can_fallthru (e->src, e->dest))
3384 force_nonfallthru (e);
3387 /* Ensure goto_locus from edges has some instructions with that locus
3388 in RTL. */
3389 if (!optimize)
3390 FOR_EACH_BB (bb)
3392 edge e;
3393 edge_iterator ei;
3395 FOR_EACH_EDGE (e, ei, bb->succs)
3396 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3397 && !(e->flags & EDGE_ABNORMAL))
3399 edge e2;
3400 edge_iterator ei2;
3401 basic_block dest, nb;
3402 rtx end;
3404 insn = BB_END (e->src);
3405 end = PREV_INSN (BB_HEAD (e->src));
3406 while (insn != end
3407 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
3408 insn = PREV_INSN (insn);
3409 if (insn != end
3410 && INSN_LOCATION (insn) == e->goto_locus)
3411 continue;
3412 if (simplejump_p (BB_END (e->src))
3413 && !INSN_HAS_LOCATION (BB_END (e->src)))
3415 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
3416 continue;
3418 dest = e->dest;
3419 if (dest == EXIT_BLOCK_PTR)
3421 /* Non-fallthru edges to the exit block cannot be split. */
3422 if (!(e->flags & EDGE_FALLTHRU))
3423 continue;
3425 else
3427 insn = BB_HEAD (dest);
3428 end = NEXT_INSN (BB_END (dest));
3429 while (insn != end && !NONDEBUG_INSN_P (insn))
3430 insn = NEXT_INSN (insn);
3431 if (insn != end && INSN_HAS_LOCATION (insn)
3432 && INSN_LOCATION (insn) == e->goto_locus)
3433 continue;
3435 nb = split_edge (e);
3436 if (!INSN_P (BB_END (nb)))
3437 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3438 nb);
3439 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
3441 /* If there are other incoming edges to the destination block
3442 with the same goto locus, redirect them to the new block as
3443 well, this can prevent other such blocks from being created
3444 in subsequent iterations of the loop. */
3445 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
3446 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
3447 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
3448 && e->goto_locus == e2->goto_locus)
3449 redirect_edge_and_branch (e2, nb);
3450 else
3451 ei_next (&ei2);
3456 /* Perform sanity checks on the insn chain.
3457 1. Check that next/prev pointers are consistent in both the forward and
3458 reverse direction.
3459 2. Count insns in chain, going both directions, and check if equal.
3460 3. Check that get_last_insn () returns the actual end of chain. */
3462 DEBUG_FUNCTION void
3463 verify_insn_chain (void)
3465 rtx x, prevx, nextx;
3466 int insn_cnt1, insn_cnt2;
3468 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3469 x != 0;
3470 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3471 gcc_assert (PREV_INSN (x) == prevx);
3473 gcc_assert (prevx == get_last_insn ());
3475 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3476 x != 0;
3477 nextx = x, insn_cnt2++, x = PREV_INSN (x))
3478 gcc_assert (NEXT_INSN (x) == nextx);
3480 gcc_assert (insn_cnt1 == insn_cnt2);
3483 /* If we have assembler epilogues, the block falling through to exit must
3484 be the last one in the reordered chain when we reach final. Ensure
3485 that this condition is met. */
3486 static void
3487 fixup_fallthru_exit_predecessor (void)
3489 edge e;
3490 basic_block bb = NULL;
3492 /* This transformation is not valid before reload, because we might
3493 separate a call from the instruction that copies the return
3494 value. */
3495 gcc_assert (reload_completed);
3497 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
3498 if (e)
3499 bb = e->src;
3501 if (bb && bb->aux)
3503 basic_block c = ENTRY_BLOCK_PTR->next_bb;
3505 /* If the very first block is the one with the fall-through exit
3506 edge, we have to split that block. */
3507 if (c == bb)
3509 bb = split_block (bb, NULL)->dest;
3510 bb->aux = c->aux;
3511 c->aux = bb;
3512 BB_FOOTER (bb) = BB_FOOTER (c);
3513 BB_FOOTER (c) = NULL;
3516 while (c->aux != bb)
3517 c = (basic_block) c->aux;
3519 c->aux = bb->aux;
3520 while (c->aux)
3521 c = (basic_block) c->aux;
3523 c->aux = bb;
3524 bb->aux = NULL;
3528 /* In case there are more than one fallthru predecessors of exit, force that
3529 there is only one. */
3531 static void
3532 force_one_exit_fallthru (void)
3534 edge e, predecessor = NULL;
3535 bool more = false;
3536 edge_iterator ei;
3537 basic_block forwarder, bb;
3539 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3540 if (e->flags & EDGE_FALLTHRU)
3542 if (predecessor == NULL)
3543 predecessor = e;
3544 else
3546 more = true;
3547 break;
3551 if (!more)
3552 return;
3554 /* Exit has several fallthru predecessors. Create a forwarder block for
3555 them. */
3556 forwarder = split_edge (predecessor);
3557 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
3559 if (e->src == forwarder
3560 || !(e->flags & EDGE_FALLTHRU))
3561 ei_next (&ei);
3562 else
3563 redirect_edge_and_branch_force (e, forwarder);
3566 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
3567 exit block. */
3568 FOR_EACH_BB (bb)
3570 if (bb->aux == NULL && bb != forwarder)
3572 bb->aux = forwarder;
3573 break;
3578 /* Return true in case it is possible to duplicate the basic block BB. */
3580 static bool
3581 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
3583 /* Do not attempt to duplicate tablejumps, as we need to unshare
3584 the dispatch table. This is difficult to do, as the instructions
3585 computing jump destination may be hoisted outside the basic block. */
3586 if (tablejump_p (BB_END (bb), NULL, NULL))
3587 return false;
3589 /* Do not duplicate blocks containing insns that can't be copied. */
3590 if (targetm.cannot_copy_insn_p)
3592 rtx insn = BB_HEAD (bb);
3593 while (1)
3595 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
3596 return false;
3597 if (insn == BB_END (bb))
3598 break;
3599 insn = NEXT_INSN (insn);
3603 return true;
3607 duplicate_insn_chain (rtx from, rtx to)
3609 rtx insn, last, copy;
3611 /* Avoid updating of boundaries of previous basic block. The
3612 note will get removed from insn stream in fixup. */
3613 last = emit_note (NOTE_INSN_DELETED);
3615 /* Create copy at the end of INSN chain. The chain will
3616 be reordered later. */
3617 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
3619 switch (GET_CODE (insn))
3621 case DEBUG_INSN:
3622 /* Don't duplicate label debug insns. */
3623 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
3624 break;
3625 /* FALLTHRU */
3626 case INSN:
3627 case CALL_INSN:
3628 case JUMP_INSN:
3629 /* Avoid copying of dispatch tables. We never duplicate
3630 tablejumps, so this can hit only in case the table got
3631 moved far from original jump. */
3632 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
3633 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
3635 /* Avoid copying following barrier as well if any
3636 (and debug insns in between). */
3637 rtx next;
3639 for (next = NEXT_INSN (insn);
3640 next != NEXT_INSN (to);
3641 next = NEXT_INSN (next))
3642 if (!DEBUG_INSN_P (next))
3643 break;
3644 if (next != NEXT_INSN (to) && BARRIER_P (next))
3645 insn = next;
3646 break;
3648 copy = emit_copy_of_insn_after (insn, get_last_insn ());
3649 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
3650 && ANY_RETURN_P (JUMP_LABEL (insn)))
3651 JUMP_LABEL (copy) = JUMP_LABEL (insn);
3652 maybe_copy_prologue_epilogue_insn (insn, copy);
3653 break;
3655 case CODE_LABEL:
3656 break;
3658 case BARRIER:
3659 emit_barrier ();
3660 break;
3662 case NOTE:
3663 switch (NOTE_KIND (insn))
3665 /* In case prologue is empty and function contain label
3666 in first BB, we may want to copy the block. */
3667 case NOTE_INSN_PROLOGUE_END:
3669 case NOTE_INSN_DELETED:
3670 case NOTE_INSN_DELETED_LABEL:
3671 case NOTE_INSN_DELETED_DEBUG_LABEL:
3672 /* No problem to strip these. */
3673 case NOTE_INSN_FUNCTION_BEG:
3674 /* There is always just single entry to function. */
3675 case NOTE_INSN_BASIC_BLOCK:
3676 break;
3678 case NOTE_INSN_EPILOGUE_BEG:
3679 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
3680 emit_note_copy (insn);
3681 break;
3683 default:
3684 /* All other notes should have already been eliminated. */
3685 gcc_unreachable ();
3687 break;
3688 default:
3689 gcc_unreachable ();
3692 insn = NEXT_INSN (last);
3693 delete_insn (last);
3694 return insn;
3697 /* Create a duplicate of the basic block BB. */
3699 static basic_block
3700 cfg_layout_duplicate_bb (basic_block bb)
3702 rtx insn;
3703 basic_block new_bb;
3705 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
3706 new_bb = create_basic_block (insn,
3707 insn ? get_last_insn () : NULL,
3708 EXIT_BLOCK_PTR->prev_bb);
3710 BB_COPY_PARTITION (new_bb, bb);
3711 if (BB_HEADER (bb))
3713 insn = BB_HEADER (bb);
3714 while (NEXT_INSN (insn))
3715 insn = NEXT_INSN (insn);
3716 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
3717 if (insn)
3718 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3721 if (BB_FOOTER (bb))
3723 insn = BB_FOOTER (bb);
3724 while (NEXT_INSN (insn))
3725 insn = NEXT_INSN (insn);
3726 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
3727 if (insn)
3728 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
3731 return new_bb;
3735 /* Main entry point to this module - initialize the datastructures for
3736 CFG layout changes. It keeps LOOPS up-to-date if not null.
3738 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
3740 void
3741 cfg_layout_initialize (unsigned int flags)
3743 rtx x;
3744 basic_block bb;
3746 initialize_original_copy_tables ();
3748 cfg_layout_rtl_register_cfg_hooks ();
3750 record_effective_endpoints ();
3752 /* Make sure that the targets of non local gotos are marked. */
3753 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
3755 bb = BLOCK_FOR_INSN (XEXP (x, 0));
3756 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
3759 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
3762 /* Splits superblocks. */
3763 void
3764 break_superblocks (void)
3766 sbitmap superblocks;
3767 bool need = false;
3768 basic_block bb;
3770 superblocks = sbitmap_alloc (last_basic_block);
3771 bitmap_clear (superblocks);
3773 FOR_EACH_BB (bb)
3774 if (bb->flags & BB_SUPERBLOCK)
3776 bb->flags &= ~BB_SUPERBLOCK;
3777 bitmap_set_bit (superblocks, bb->index);
3778 need = true;
3781 if (need)
3783 rebuild_jump_labels (get_insns ());
3784 find_many_sub_basic_blocks (superblocks);
3787 free (superblocks);
3790 /* Finalize the changes: reorder insn list according to the sequence specified
3791 by aux pointers, enter compensation code, rebuild scope forest. */
3793 void
3794 cfg_layout_finalize (void)
3796 #ifdef ENABLE_CHECKING
3797 verify_flow_info ();
3798 #endif
3799 force_one_exit_fallthru ();
3800 rtl_register_cfg_hooks ();
3801 if (reload_completed
3802 #ifdef HAVE_epilogue
3803 && !HAVE_epilogue
3804 #endif
3806 fixup_fallthru_exit_predecessor ();
3807 fixup_reorder_chain ();
3809 rebuild_jump_labels (get_insns ());
3810 delete_dead_jumptables ();
3812 #ifdef ENABLE_CHECKING
3813 verify_insn_chain ();
3814 verify_flow_info ();
3815 #endif
3819 /* Same as split_block but update cfg_layout structures. */
3821 static basic_block
3822 cfg_layout_split_block (basic_block bb, void *insnp)
3824 rtx insn = (rtx) insnp;
3825 basic_block new_bb = rtl_split_block (bb, insn);
3827 BB_FOOTER (new_bb) = BB_FOOTER (bb);
3828 BB_FOOTER (bb) = NULL;
3830 return new_bb;
3833 /* Redirect Edge to DEST. */
3834 static edge
3835 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
3837 basic_block src = e->src;
3838 edge ret;
3840 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
3841 return NULL;
3843 if (e->dest == dest)
3844 return e;
3846 if (e->src != ENTRY_BLOCK_PTR
3847 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
3849 df_set_bb_dirty (src);
3850 return ret;
3853 if (e->src == ENTRY_BLOCK_PTR
3854 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
3856 if (dump_file)
3857 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
3858 e->src->index, dest->index);
3860 df_set_bb_dirty (e->src);
3861 redirect_edge_succ (e, dest);
3862 return e;
3865 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
3866 in the case the basic block appears to be in sequence. Avoid this
3867 transformation. */
3869 if (e->flags & EDGE_FALLTHRU)
3871 /* Redirect any branch edges unified with the fallthru one. */
3872 if (JUMP_P (BB_END (src))
3873 && label_is_jump_target_p (BB_HEAD (e->dest),
3874 BB_END (src)))
3876 edge redirected;
3878 if (dump_file)
3879 fprintf (dump_file, "Fallthru edge unified with branch "
3880 "%i->%i redirected to %i\n",
3881 e->src->index, e->dest->index, dest->index);
3882 e->flags &= ~EDGE_FALLTHRU;
3883 redirected = redirect_branch_edge (e, dest);
3884 gcc_assert (redirected);
3885 redirected->flags |= EDGE_FALLTHRU;
3886 df_set_bb_dirty (redirected->src);
3887 return redirected;
3889 /* In case we are redirecting fallthru edge to the branch edge
3890 of conditional jump, remove it. */
3891 if (EDGE_COUNT (src->succs) == 2)
3893 /* Find the edge that is different from E. */
3894 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
3896 if (s->dest == dest
3897 && any_condjump_p (BB_END (src))
3898 && onlyjump_p (BB_END (src)))
3899 delete_insn (BB_END (src));
3901 if (dump_file)
3902 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
3903 e->src->index, e->dest->index, dest->index);
3904 ret = redirect_edge_succ_nodup (e, dest);
3906 else
3907 ret = redirect_branch_edge (e, dest);
3909 /* We don't want simplejumps in the insn stream during cfglayout. */
3910 gcc_assert (!simplejump_p (BB_END (src)));
3912 df_set_bb_dirty (src);
3913 return ret;
3916 /* Simple wrapper as we always can redirect fallthru edges. */
3917 static basic_block
3918 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
3920 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
3922 gcc_assert (redirected);
3923 return NULL;
3926 /* Same as delete_basic_block but update cfg_layout structures. */
3928 static void
3929 cfg_layout_delete_block (basic_block bb)
3931 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
3933 if (BB_HEADER (bb))
3935 next = BB_HEAD (bb);
3936 if (prev)
3937 NEXT_INSN (prev) = BB_HEADER (bb);
3938 else
3939 set_first_insn (BB_HEADER (bb));
3940 PREV_INSN (BB_HEADER (bb)) = prev;
3941 insn = BB_HEADER (bb);
3942 while (NEXT_INSN (insn))
3943 insn = NEXT_INSN (insn);
3944 NEXT_INSN (insn) = next;
3945 PREV_INSN (next) = insn;
3947 next = NEXT_INSN (BB_END (bb));
3948 if (BB_FOOTER (bb))
3950 insn = BB_FOOTER (bb);
3951 while (insn)
3953 if (BARRIER_P (insn))
3955 if (PREV_INSN (insn))
3956 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
3957 else
3958 BB_FOOTER (bb) = NEXT_INSN (insn);
3959 if (NEXT_INSN (insn))
3960 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
3962 if (LABEL_P (insn))
3963 break;
3964 insn = NEXT_INSN (insn);
3966 if (BB_FOOTER (bb))
3968 insn = BB_END (bb);
3969 NEXT_INSN (insn) = BB_FOOTER (bb);
3970 PREV_INSN (BB_FOOTER (bb)) = insn;
3971 while (NEXT_INSN (insn))
3972 insn = NEXT_INSN (insn);
3973 NEXT_INSN (insn) = next;
3974 if (next)
3975 PREV_INSN (next) = insn;
3976 else
3977 set_last_insn (insn);
3980 if (bb->next_bb != EXIT_BLOCK_PTR)
3981 to = &BB_HEADER (bb->next_bb);
3982 else
3983 to = &cfg_layout_function_footer;
3985 rtl_delete_block (bb);
3987 if (prev)
3988 prev = NEXT_INSN (prev);
3989 else
3990 prev = get_insns ();
3991 if (next)
3992 next = PREV_INSN (next);
3993 else
3994 next = get_last_insn ();
3996 if (next && NEXT_INSN (next) != prev)
3998 remaints = unlink_insn_chain (prev, next);
3999 insn = remaints;
4000 while (NEXT_INSN (insn))
4001 insn = NEXT_INSN (insn);
4002 NEXT_INSN (insn) = *to;
4003 if (*to)
4004 PREV_INSN (*to) = insn;
4005 *to = remaints;
4009 /* Return true when blocks A and B can be safely merged. */
4011 static bool
4012 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4014 /* If we are partitioning hot/cold basic blocks, we don't want to
4015 mess up unconditional or indirect jumps that cross between hot
4016 and cold sections.
4018 Basic block partitioning may result in some jumps that appear to
4019 be optimizable (or blocks that appear to be mergeable), but which really
4020 must be left untouched (they are required to make it safely across
4021 partition boundaries). See the comments at the top of
4022 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4024 if (BB_PARTITION (a) != BB_PARTITION (b))
4025 return false;
4027 /* Protect the loop latches. */
4028 if (current_loops && b->loop_father->latch == b)
4029 return false;
4031 /* If we would end up moving B's instructions, make sure it doesn't fall
4032 through into the exit block, since we cannot recover from a fallthrough
4033 edge into the exit block occurring in the middle of a function. */
4034 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4036 edge e = find_fallthru_edge (b->succs);
4037 if (e && e->dest == EXIT_BLOCK_PTR)
4038 return false;
4041 /* There must be exactly one edge in between the blocks. */
4042 return (single_succ_p (a)
4043 && single_succ (a) == b
4044 && single_pred_p (b) == 1
4045 && a != b
4046 /* Must be simple edge. */
4047 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4048 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
4049 /* If the jump insn has side effects, we can't kill the edge.
4050 When not optimizing, try_redirect_by_replacing_jump will
4051 not allow us to redirect an edge by replacing a table jump. */
4052 && (!JUMP_P (BB_END (a))
4053 || ((!optimize || reload_completed)
4054 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4057 /* Merge block A and B. The blocks must be mergeable. */
4059 static void
4060 cfg_layout_merge_blocks (basic_block a, basic_block b)
4062 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
4063 rtx insn;
4065 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4067 if (dump_file)
4068 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4069 a->index);
4071 /* If there was a CODE_LABEL beginning B, delete it. */
4072 if (LABEL_P (BB_HEAD (b)))
4074 delete_insn (BB_HEAD (b));
4077 /* We should have fallthru edge in a, or we can do dummy redirection to get
4078 it cleaned up. */
4079 if (JUMP_P (BB_END (a)))
4080 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4081 gcc_assert (!JUMP_P (BB_END (a)));
4083 /* When not optimizing CFG and the edge is the only place in RTL which holds
4084 some unique locus, emit a nop with that locus in between. */
4085 if (!optimize)
4086 emit_nop_for_unique_locus_between (a, b);
4088 /* Possible line number notes should appear in between. */
4089 if (BB_HEADER (b))
4091 rtx first = BB_END (a), last;
4093 last = emit_insn_after_noloc (BB_HEADER (b), BB_END (a), a);
4094 /* The above might add a BARRIER as BB_END, but as barriers
4095 aren't valid parts of a bb, remove_insn doesn't update
4096 BB_END if it is a barrier. So adjust BB_END here. */
4097 while (BB_END (a) != first && BARRIER_P (BB_END (a)))
4098 BB_END (a) = PREV_INSN (BB_END (a));
4099 delete_insn_chain (NEXT_INSN (first), last, false);
4100 BB_HEADER (b) = NULL;
4103 /* In the case basic blocks are not adjacent, move them around. */
4104 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4106 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4108 emit_insn_after_noloc (insn, BB_END (a), a);
4110 /* Otherwise just re-associate the instructions. */
4111 else
4113 insn = BB_HEAD (b);
4114 BB_END (a) = BB_END (b);
4117 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4118 We need to explicitly call. */
4119 update_bb_for_insn_chain (insn, BB_END (b), a);
4121 /* Skip possible DELETED_LABEL insn. */
4122 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4123 insn = NEXT_INSN (insn);
4124 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4125 BB_HEAD (b) = NULL;
4126 delete_insn (insn);
4128 df_bb_delete (b->index);
4130 /* Possible tablejumps and barriers should appear after the block. */
4131 if (BB_FOOTER (b))
4133 if (!BB_FOOTER (a))
4134 BB_FOOTER (a) = BB_FOOTER (b);
4135 else
4137 rtx last = BB_FOOTER (a);
4139 while (NEXT_INSN (last))
4140 last = NEXT_INSN (last);
4141 NEXT_INSN (last) = BB_FOOTER (b);
4142 PREV_INSN (BB_FOOTER (b)) = last;
4144 BB_FOOTER (b) = NULL;
4147 /* If B was a forwarder block, propagate the locus on the edge. */
4148 if (forwarder_p
4149 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
4150 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4152 if (dump_file)
4153 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4156 /* Split edge E. */
4158 static basic_block
4159 cfg_layout_split_edge (edge e)
4161 basic_block new_bb =
4162 create_basic_block (e->src != ENTRY_BLOCK_PTR
4163 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
4164 NULL_RTX, e->src);
4166 if (e->dest == EXIT_BLOCK_PTR)
4167 BB_COPY_PARTITION (new_bb, e->src);
4168 else
4169 BB_COPY_PARTITION (new_bb, e->dest);
4170 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4171 redirect_edge_and_branch_force (e, new_bb);
4173 return new_bb;
4176 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4178 static void
4179 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4183 /* Return true if BB contains only labels or non-executable
4184 instructions. */
4186 static bool
4187 rtl_block_empty_p (basic_block bb)
4189 rtx insn;
4191 if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
4192 return true;
4194 FOR_BB_INSNS (bb, insn)
4195 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4196 return false;
4198 return true;
4201 /* Split a basic block if it ends with a conditional branch and if
4202 the other part of the block is not empty. */
4204 static basic_block
4205 rtl_split_block_before_cond_jump (basic_block bb)
4207 rtx insn;
4208 rtx split_point = NULL;
4209 rtx last = NULL;
4210 bool found_code = false;
4212 FOR_BB_INSNS (bb, insn)
4214 if (any_condjump_p (insn))
4215 split_point = last;
4216 else if (NONDEBUG_INSN_P (insn))
4217 found_code = true;
4218 last = insn;
4221 /* Did not find everything. */
4222 if (found_code && split_point)
4223 return split_block (bb, split_point)->dest;
4224 else
4225 return NULL;
4228 /* Return 1 if BB ends with a call, possibly followed by some
4229 instructions that must stay with the call, 0 otherwise. */
4231 static bool
4232 rtl_block_ends_with_call_p (basic_block bb)
4234 rtx insn = BB_END (bb);
4236 while (!CALL_P (insn)
4237 && insn != BB_HEAD (bb)
4238 && (keep_with_call_p (insn)
4239 || NOTE_P (insn)
4240 || DEBUG_INSN_P (insn)))
4241 insn = PREV_INSN (insn);
4242 return (CALL_P (insn));
4245 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4247 static bool
4248 rtl_block_ends_with_condjump_p (const_basic_block bb)
4250 return any_condjump_p (BB_END (bb));
4253 /* Return true if we need to add fake edge to exit.
4254 Helper function for rtl_flow_call_edges_add. */
4256 static bool
4257 need_fake_edge_p (const_rtx insn)
4259 if (!INSN_P (insn))
4260 return false;
4262 if ((CALL_P (insn)
4263 && !SIBLING_CALL_P (insn)
4264 && !find_reg_note (insn, REG_NORETURN, NULL)
4265 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4266 return true;
4268 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4269 && MEM_VOLATILE_P (PATTERN (insn)))
4270 || (GET_CODE (PATTERN (insn)) == PARALLEL
4271 && asm_noperands (insn) != -1
4272 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4273 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4276 /* Add fake edges to the function exit for any non constant and non noreturn
4277 calls, volatile inline assembly in the bitmap of blocks specified by
4278 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4279 that were split.
4281 The goal is to expose cases in which entering a basic block does not imply
4282 that all subsequent instructions must be executed. */
4284 static int
4285 rtl_flow_call_edges_add (sbitmap blocks)
4287 int i;
4288 int blocks_split = 0;
4289 int last_bb = last_basic_block;
4290 bool check_last_block = false;
4292 if (n_basic_blocks == NUM_FIXED_BLOCKS)
4293 return 0;
4295 if (! blocks)
4296 check_last_block = true;
4297 else
4298 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4300 /* In the last basic block, before epilogue generation, there will be
4301 a fallthru edge to EXIT. Special care is required if the last insn
4302 of the last basic block is a call because make_edge folds duplicate
4303 edges, which would result in the fallthru edge also being marked
4304 fake, which would result in the fallthru edge being removed by
4305 remove_fake_edges, which would result in an invalid CFG.
4307 Moreover, we can't elide the outgoing fake edge, since the block
4308 profiler needs to take this into account in order to solve the minimal
4309 spanning tree in the case that the call doesn't return.
4311 Handle this by adding a dummy instruction in a new last basic block. */
4312 if (check_last_block)
4314 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4315 rtx insn = BB_END (bb);
4317 /* Back up past insns that must be kept in the same block as a call. */
4318 while (insn != BB_HEAD (bb)
4319 && keep_with_call_p (insn))
4320 insn = PREV_INSN (insn);
4322 if (need_fake_edge_p (insn))
4324 edge e;
4326 e = find_edge (bb, EXIT_BLOCK_PTR);
4327 if (e)
4329 insert_insn_on_edge (gen_use (const0_rtx), e);
4330 commit_edge_insertions ();
4335 /* Now add fake edges to the function exit for any non constant
4336 calls since there is no way that we can determine if they will
4337 return or not... */
4339 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4341 basic_block bb = BASIC_BLOCK (i);
4342 rtx insn;
4343 rtx prev_insn;
4345 if (!bb)
4346 continue;
4348 if (blocks && !bitmap_bit_p (blocks, i))
4349 continue;
4351 for (insn = BB_END (bb); ; insn = prev_insn)
4353 prev_insn = PREV_INSN (insn);
4354 if (need_fake_edge_p (insn))
4356 edge e;
4357 rtx split_at_insn = insn;
4359 /* Don't split the block between a call and an insn that should
4360 remain in the same block as the call. */
4361 if (CALL_P (insn))
4362 while (split_at_insn != BB_END (bb)
4363 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4364 split_at_insn = NEXT_INSN (split_at_insn);
4366 /* The handling above of the final block before the epilogue
4367 should be enough to verify that there is no edge to the exit
4368 block in CFG already. Calling make_edge in such case would
4369 cause us to mark that edge as fake and remove it later. */
4371 #ifdef ENABLE_CHECKING
4372 if (split_at_insn == BB_END (bb))
4374 e = find_edge (bb, EXIT_BLOCK_PTR);
4375 gcc_assert (e == NULL);
4377 #endif
4379 /* Note that the following may create a new basic block
4380 and renumber the existing basic blocks. */
4381 if (split_at_insn != BB_END (bb))
4383 e = split_block (bb, split_at_insn);
4384 if (e)
4385 blocks_split++;
4388 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
4391 if (insn == BB_HEAD (bb))
4392 break;
4396 if (blocks_split)
4397 verify_flow_info ();
4399 return blocks_split;
4402 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
4403 the conditional branch target, SECOND_HEAD should be the fall-thru
4404 there is no need to handle this here the loop versioning code handles
4405 this. the reason for SECON_HEAD is that it is needed for condition
4406 in trees, and this should be of the same type since it is a hook. */
4407 static void
4408 rtl_lv_add_condition_to_bb (basic_block first_head ,
4409 basic_block second_head ATTRIBUTE_UNUSED,
4410 basic_block cond_bb, void *comp_rtx)
4412 rtx label, seq, jump;
4413 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4414 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4415 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4416 enum machine_mode mode;
4419 label = block_label (first_head);
4420 mode = GET_MODE (op0);
4421 if (mode == VOIDmode)
4422 mode = GET_MODE (op1);
4424 start_sequence ();
4425 op0 = force_operand (op0, NULL_RTX);
4426 op1 = force_operand (op1, NULL_RTX);
4427 do_compare_rtx_and_jump (op0, op1, comp, 0,
4428 mode, NULL_RTX, NULL_RTX, label, -1);
4429 jump = get_last_insn ();
4430 JUMP_LABEL (jump) = label;
4431 LABEL_NUSES (label)++;
4432 seq = get_insns ();
4433 end_sequence ();
4435 /* Add the new cond , in the new head. */
4436 emit_insn_after(seq, BB_END(cond_bb));
4440 /* Given a block B with unconditional branch at its end, get the
4441 store the return the branch edge and the fall-thru edge in
4442 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4443 static void
4444 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4445 edge *fallthru_edge)
4447 edge e = EDGE_SUCC (b, 0);
4449 if (e->flags & EDGE_FALLTHRU)
4451 *fallthru_edge = e;
4452 *branch_edge = EDGE_SUCC (b, 1);
4454 else
4456 *branch_edge = e;
4457 *fallthru_edge = EDGE_SUCC (b, 1);
4461 void
4462 init_rtl_bb_info (basic_block bb)
4464 gcc_assert (!bb->il.x.rtl);
4465 bb->il.x.head_ = NULL;
4466 bb->il.x.rtl = ggc_alloc_cleared_rtl_bb_info ();
4469 /* Returns true if it is possible to remove edge E by redirecting
4470 it to the destination of the other edge from E->src. */
4472 static bool
4473 rtl_can_remove_branch_p (const_edge e)
4475 const_basic_block src = e->src;
4476 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
4477 const_rtx insn = BB_END (src), set;
4479 /* The conditions are taken from try_redirect_by_replacing_jump. */
4480 if (target == EXIT_BLOCK_PTR)
4481 return false;
4483 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4484 return false;
4486 if (find_reg_note (insn, REG_CROSSING_JUMP, NULL_RTX)
4487 || BB_PARTITION (src) != BB_PARTITION (target))
4488 return false;
4490 if (!onlyjump_p (insn)
4491 || tablejump_p (insn, NULL, NULL))
4492 return false;
4494 set = single_set (insn);
4495 if (!set || side_effects_p (set))
4496 return false;
4498 return true;
4501 static basic_block
4502 rtl_duplicate_bb (basic_block bb)
4504 bb = cfg_layout_duplicate_bb (bb);
4505 bb->aux = NULL;
4506 return bb;
4509 /* Do book-keeping of basic block BB for the profile consistency checker.
4510 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
4511 then do post-pass accounting. Store the counting in RECORD. */
4512 static void
4513 rtl_account_profile_record (basic_block bb, int after_pass,
4514 struct profile_record *record)
4516 rtx insn;
4517 FOR_BB_INSNS (bb, insn)
4518 if (INSN_P (insn))
4520 record->size[after_pass]
4521 += insn_rtx_cost (PATTERN (insn), false);
4522 if (profile_status == PROFILE_READ)
4523 record->time[after_pass]
4524 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
4525 else if (profile_status == PROFILE_GUESSED)
4526 record->time[after_pass]
4527 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
4531 /* Implementation of CFG manipulation for linearized RTL. */
4532 struct cfg_hooks rtl_cfg_hooks = {
4533 "rtl",
4534 rtl_verify_flow_info,
4535 rtl_dump_bb,
4536 rtl_dump_bb_for_graph,
4537 rtl_create_basic_block,
4538 rtl_redirect_edge_and_branch,
4539 rtl_redirect_edge_and_branch_force,
4540 rtl_can_remove_branch_p,
4541 rtl_delete_block,
4542 rtl_split_block,
4543 rtl_move_block_after,
4544 rtl_can_merge_blocks, /* can_merge_blocks_p */
4545 rtl_merge_blocks,
4546 rtl_predict_edge,
4547 rtl_predicted_by_p,
4548 cfg_layout_can_duplicate_bb_p,
4549 rtl_duplicate_bb,
4550 rtl_split_edge,
4551 rtl_make_forwarder_block,
4552 rtl_tidy_fallthru_edge,
4553 rtl_force_nonfallthru,
4554 rtl_block_ends_with_call_p,
4555 rtl_block_ends_with_condjump_p,
4556 rtl_flow_call_edges_add,
4557 NULL, /* execute_on_growing_pred */
4558 NULL, /* execute_on_shrinking_pred */
4559 NULL, /* duplicate loop for trees */
4560 NULL, /* lv_add_condition_to_bb */
4561 NULL, /* lv_adjust_loop_header_phi*/
4562 NULL, /* extract_cond_bb_edges */
4563 NULL, /* flush_pending_stmts */
4564 rtl_block_empty_p, /* block_empty_p */
4565 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
4566 rtl_account_profile_record,
4569 /* Implementation of CFG manipulation for cfg layout RTL, where
4570 basic block connected via fallthru edges does not have to be adjacent.
4571 This representation will hopefully become the default one in future
4572 version of the compiler. */
4574 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
4575 "cfglayout mode",
4576 rtl_verify_flow_info_1,
4577 rtl_dump_bb,
4578 rtl_dump_bb_for_graph,
4579 cfg_layout_create_basic_block,
4580 cfg_layout_redirect_edge_and_branch,
4581 cfg_layout_redirect_edge_and_branch_force,
4582 rtl_can_remove_branch_p,
4583 cfg_layout_delete_block,
4584 cfg_layout_split_block,
4585 rtl_move_block_after,
4586 cfg_layout_can_merge_blocks_p,
4587 cfg_layout_merge_blocks,
4588 rtl_predict_edge,
4589 rtl_predicted_by_p,
4590 cfg_layout_can_duplicate_bb_p,
4591 cfg_layout_duplicate_bb,
4592 cfg_layout_split_edge,
4593 rtl_make_forwarder_block,
4594 NULL, /* tidy_fallthru_edge */
4595 rtl_force_nonfallthru,
4596 rtl_block_ends_with_call_p,
4597 rtl_block_ends_with_condjump_p,
4598 rtl_flow_call_edges_add,
4599 NULL, /* execute_on_growing_pred */
4600 NULL, /* execute_on_shrinking_pred */
4601 duplicate_loop_to_header_edge, /* duplicate loop for trees */
4602 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
4603 NULL, /* lv_adjust_loop_header_phi*/
4604 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
4605 NULL, /* flush_pending_stmts */
4606 rtl_block_empty_p, /* block_empty_p */
4607 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
4608 rtl_account_profile_record,
4611 #include "gt-cfgrtl.h"