Backport r203445 from v17
[official-gcc.git] / gcc-4_8 / gcc / cfgrtl.c
blob7766591c55cc4ea97ff1bd38702cc7b71ea496e3
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "tree.h"
45 #include "hard-reg-set.h"
46 #include "basic-block.h"
47 #include "bb-reorder.h"
48 #include "regs.h"
49 #include "flags.h"
50 #include "function.h"
51 #include "except.h"
52 #include "rtl-error.h"
53 #include "tm_p.h"
54 #include "obstack.h"
55 #include "insn-attr.h"
56 #include "insn-config.h"
57 #include "expr.h"
58 #include "target.h"
59 #include "common/common-target.h"
60 #include "cfgloop.h"
61 #include "ggc.h"
62 #include "tree-pass.h"
63 #include "df.h"
65 /* Holds the interesting leading and trailing notes for the function.
66 Only applicable if the CFG is in cfglayout mode. */
67 static GTY(()) rtx cfg_layout_function_footer;
68 static GTY(()) rtx cfg_layout_function_header;
70 static rtx skip_insns_after_block (basic_block);
71 static void record_effective_endpoints (void);
72 static rtx label_for_bb (basic_block);
73 static void fixup_reorder_chain (void);
75 void verify_insn_chain (void);
76 static void fixup_fallthru_exit_predecessor (void);
77 static int can_delete_note_p (const_rtx);
78 static int can_delete_label_p (const_rtx);
79 static basic_block rtl_split_edge (edge);
80 static bool rtl_move_block_after (basic_block, basic_block);
81 static int rtl_verify_flow_info (void);
82 static basic_block cfg_layout_split_block (basic_block, void *);
83 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
84 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
85 static void cfg_layout_delete_block (basic_block);
86 static void rtl_delete_block (basic_block);
87 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
88 static edge rtl_redirect_edge_and_branch (edge, basic_block);
89 static basic_block rtl_split_block (basic_block, void *);
90 static void rtl_dump_bb (FILE *, basic_block, int, int);
91 static int rtl_verify_flow_info_1 (void);
92 static void rtl_make_forwarder_block (edge);
94 /* Return true if NOTE is not one of the ones that must be kept paired,
95 so that we may simply delete it. */
97 static int
98 can_delete_note_p (const_rtx note)
100 switch (NOTE_KIND (note))
102 case NOTE_INSN_DELETED:
103 case NOTE_INSN_BASIC_BLOCK:
104 case NOTE_INSN_EPILOGUE_BEG:
105 return true;
107 default:
108 return false;
112 /* True if a given label can be deleted. */
114 static int
115 can_delete_label_p (const_rtx label)
117 return (!LABEL_PRESERVE_P (label)
118 /* User declared labels must be preserved. */
119 && LABEL_NAME (label) == 0
120 && !in_expr_list_p (forced_labels, label));
123 /* Delete INSN by patching it out. */
125 void
126 delete_insn (rtx insn)
128 rtx note;
129 bool really_delete = true;
131 if (LABEL_P (insn))
133 /* Some labels can't be directly removed from the INSN chain, as they
134 might be references via variables, constant pool etc.
135 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
136 if (! can_delete_label_p (insn))
138 const char *name = LABEL_NAME (insn);
139 basic_block bb = BLOCK_FOR_INSN (insn);
140 rtx bb_note = NEXT_INSN (insn);
142 really_delete = false;
143 PUT_CODE (insn, NOTE);
144 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
145 NOTE_DELETED_LABEL_NAME (insn) = name;
147 /* If the note following the label starts a basic block, and the
148 label is a member of the same basic block, interchange the two. */
149 if (bb_note != NULL_RTX
150 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
151 && bb != NULL
152 && bb == BLOCK_FOR_INSN (bb_note))
154 reorder_insns_nobb (insn, insn, bb_note);
155 BB_HEAD (bb) = bb_note;
156 if (BB_END (bb) == bb_note)
157 BB_END (bb) = insn;
161 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
164 if (really_delete)
166 /* If this insn has already been deleted, something is very wrong. */
167 gcc_assert (!INSN_DELETED_P (insn));
168 remove_insn (insn);
169 INSN_DELETED_P (insn) = 1;
172 /* If deleting a jump, decrement the use count of the label. Deleting
173 the label itself should happen in the normal course of block merging. */
174 if (JUMP_P (insn))
176 if (JUMP_LABEL (insn)
177 && LABEL_P (JUMP_LABEL (insn)))
178 LABEL_NUSES (JUMP_LABEL (insn))--;
180 /* If there are more targets, remove them too. */
181 while ((note
182 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
183 && LABEL_P (XEXP (note, 0)))
185 LABEL_NUSES (XEXP (note, 0))--;
186 remove_note (insn, note);
190 /* Also if deleting any insn that references a label as an operand. */
191 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
192 && LABEL_P (XEXP (note, 0)))
194 LABEL_NUSES (XEXP (note, 0))--;
195 remove_note (insn, note);
198 if (JUMP_TABLE_DATA_P (insn))
200 rtx pat = PATTERN (insn);
201 int diff_vec_p = GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC;
202 int len = XVECLEN (pat, diff_vec_p);
203 int i;
205 for (i = 0; i < len; i++)
207 rtx label = XEXP (XVECEXP (pat, diff_vec_p, i), 0);
209 /* When deleting code in bulk (e.g. removing many unreachable
210 blocks) we can delete a label that's a target of the vector
211 before deleting the vector itself. */
212 if (!NOTE_P (label))
213 LABEL_NUSES (label)--;
218 /* Like delete_insn but also purge dead edges from BB. */
220 void
221 delete_insn_and_edges (rtx insn)
223 bool purge = false;
225 if (INSN_P (insn)
226 && BLOCK_FOR_INSN (insn)
227 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
228 purge = true;
229 delete_insn (insn);
230 if (purge)
231 purge_dead_edges (BLOCK_FOR_INSN (insn));
234 /* Unlink a chain of insns between START and FINISH, leaving notes
235 that must be paired. If CLEAR_BB is true, we set bb field for
236 insns that cannot be removed to NULL. */
238 void
239 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
241 rtx prev, current;
243 /* Unchain the insns one by one. It would be quicker to delete all of these
244 with a single unchaining, rather than one at a time, but we need to keep
245 the NOTE's. */
246 current = finish;
247 while (1)
249 prev = PREV_INSN (current);
250 if (NOTE_P (current) && !can_delete_note_p (current))
252 else
253 delete_insn (current);
255 if (clear_bb && !INSN_DELETED_P (current))
256 set_block_for_insn (current, NULL);
258 if (current == start)
259 break;
260 current = prev;
264 /* Create a new basic block consisting of the instructions between HEAD and END
265 inclusive. This function is designed to allow fast BB construction - reuses
266 the note and basic block struct in BB_NOTE, if any and do not grow
267 BASIC_BLOCK chain and should be used directly only by CFG construction code.
268 END can be NULL in to create new empty basic block before HEAD. Both END
269 and HEAD can be NULL to create basic block at the end of INSN chain.
270 AFTER is the basic block we should be put after. */
272 basic_block
273 create_basic_block_structure (rtx head, rtx end, rtx bb_note, basic_block after)
275 basic_block bb;
277 if (bb_note
278 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
279 && bb->aux == NULL)
281 /* If we found an existing note, thread it back onto the chain. */
283 rtx after;
285 if (LABEL_P (head))
286 after = head;
287 else
289 after = PREV_INSN (head);
290 head = bb_note;
293 if (after != bb_note && NEXT_INSN (after) != bb_note)
294 reorder_insns_nobb (bb_note, bb_note, after);
296 else
298 /* Otherwise we must create a note and a basic block structure. */
300 bb = alloc_block ();
302 init_rtl_bb_info (bb);
303 if (!head && !end)
304 head = end = bb_note
305 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
306 else if (LABEL_P (head) && end)
308 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
309 if (head == end)
310 end = bb_note;
312 else
314 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
315 head = bb_note;
316 if (!end)
317 end = head;
320 NOTE_BASIC_BLOCK (bb_note) = bb;
323 /* Always include the bb note in the block. */
324 if (NEXT_INSN (end) == bb_note)
325 end = bb_note;
327 BB_HEAD (bb) = head;
328 BB_END (bb) = end;
329 bb->index = last_basic_block++;
330 bb->flags = BB_NEW | BB_RTL;
331 link_block (bb, after);
332 SET_BASIC_BLOCK (bb->index, bb);
333 df_bb_refs_record (bb->index, false);
334 update_bb_for_insn (bb);
335 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
337 /* Tag the block so that we know it has been used when considering
338 other basic block notes. */
339 bb->aux = bb;
341 return bb;
344 /* Create new basic block consisting of instructions in between HEAD and END
345 and place it to the BB chain after block AFTER. END can be NULL to
346 create a new empty basic block before HEAD. Both END and HEAD can be
347 NULL to create basic block at the end of INSN chain. */
349 static basic_block
350 rtl_create_basic_block (void *headp, void *endp, basic_block after)
352 rtx head = (rtx) headp, end = (rtx) endp;
353 basic_block bb;
355 /* Grow the basic block array if needed. */
356 if ((size_t) last_basic_block >= basic_block_info->length ())
358 size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
359 vec_safe_grow_cleared (basic_block_info, new_size);
362 n_basic_blocks++;
364 bb = create_basic_block_structure (head, end, NULL, after);
365 bb->aux = NULL;
366 return bb;
369 static basic_block
370 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
372 basic_block newbb = rtl_create_basic_block (head, end, after);
374 return newbb;
377 /* Delete the insns in a (non-live) block. We physically delete every
378 non-deleted-note insn, and update the flow graph appropriately.
380 Return nonzero if we deleted an exception handler. */
382 /* ??? Preserving all such notes strikes me as wrong. It would be nice
383 to post-process the stream to remove empty blocks, loops, ranges, etc. */
385 static void
386 rtl_delete_block (basic_block b)
388 rtx insn, end;
390 /* If the head of this block is a CODE_LABEL, then it might be the
391 label for an exception handler which can't be reached. We need
392 to remove the label from the exception_handler_label list. */
393 insn = BB_HEAD (b);
395 end = get_last_bb_insn (b);
397 /* Selectively delete the entire chain. */
398 BB_HEAD (b) = NULL;
399 delete_insn_chain (insn, end, true);
402 if (dump_file)
403 fprintf (dump_file, "deleting block %d\n", b->index);
404 df_bb_delete (b->index);
407 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
409 void
410 compute_bb_for_insn (void)
412 basic_block bb;
414 FOR_EACH_BB (bb)
416 rtx end = BB_END (bb);
417 rtx insn;
419 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
421 BLOCK_FOR_INSN (insn) = bb;
422 if (insn == end)
423 break;
428 /* Release the basic_block_for_insn array. */
430 unsigned int
431 free_bb_for_insn (void)
433 rtx insn;
434 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
435 if (!BARRIER_P (insn))
436 BLOCK_FOR_INSN (insn) = NULL;
437 return 0;
440 static unsigned int
441 rest_of_pass_free_cfg (void)
443 #ifdef DELAY_SLOTS
444 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
445 valid at that point so it would be too late to call df_analyze. */
446 if (optimize > 0 && flag_delayed_branch)
448 df_note_add_problem ();
449 df_analyze ();
451 #endif
453 if (crtl->has_bb_partition)
454 insert_section_boundary_note ();
456 free_bb_for_insn ();
457 return 0;
460 struct rtl_opt_pass pass_free_cfg =
463 RTL_PASS,
464 "*free_cfg", /* name */
465 OPTGROUP_NONE, /* optinfo_flags */
466 NULL, /* gate */
467 rest_of_pass_free_cfg, /* execute */
468 NULL, /* sub */
469 NULL, /* next */
470 0, /* static_pass_number */
471 TV_NONE, /* tv_id */
472 0, /* properties_required */
473 0, /* properties_provided */
474 PROP_cfg, /* properties_destroyed */
475 0, /* todo_flags_start */
476 0, /* todo_flags_finish */
480 /* Return RTX to emit after when we want to emit code on the entry of function. */
482 entry_of_function (void)
484 return (n_basic_blocks > NUM_FIXED_BLOCKS ?
485 BB_HEAD (ENTRY_BLOCK_PTR->next_bb) : get_insns ());
488 /* Emit INSN at the entry point of the function, ensuring that it is only
489 executed once per function. */
490 void
491 emit_insn_at_entry (rtx insn)
493 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR->succs);
494 edge e = ei_safe_edge (ei);
495 gcc_assert (e->flags & EDGE_FALLTHRU);
497 insert_insn_on_edge (insn, e);
498 commit_edge_insertions ();
501 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
502 (or BARRIER if found) and notify df of the bb change.
503 The insn chain range is inclusive
504 (i.e. both BEGIN and END will be updated. */
506 static void
507 update_bb_for_insn_chain (rtx begin, rtx end, basic_block bb)
509 rtx insn;
511 end = NEXT_INSN (end);
512 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
513 if (!BARRIER_P (insn))
514 df_insn_change_bb (insn, bb);
517 /* Update BLOCK_FOR_INSN of insns in BB to BB,
518 and notify df of the change. */
520 void
521 update_bb_for_insn (basic_block bb)
523 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
527 /* Like active_insn_p, except keep the return value clobber around
528 even after reload. */
530 static bool
531 flow_active_insn_p (const_rtx insn)
533 if (active_insn_p (insn))
534 return true;
536 /* A clobber of the function return value exists for buggy
537 programs that fail to return a value. Its effect is to
538 keep the return value from being live across the entire
539 function. If we allow it to be skipped, we introduce the
540 possibility for register lifetime confusion. */
541 if (GET_CODE (PATTERN (insn)) == CLOBBER
542 && REG_P (XEXP (PATTERN (insn), 0))
543 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
544 return true;
546 return false;
549 /* Return true if the block has no effect and only forwards control flow to
550 its single destination. */
552 bool
553 contains_no_active_insn_p (const_basic_block bb)
555 rtx insn;
557 if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
558 || !single_succ_p (bb))
559 return false;
561 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
562 if (INSN_P (insn) && flow_active_insn_p (insn))
563 return false;
565 return (!INSN_P (insn)
566 || (JUMP_P (insn) && simplejump_p (insn))
567 || !flow_active_insn_p (insn));
570 /* Likewise, but protect loop latches, headers and preheaders. */
571 /* FIXME: Make this a cfg hook. */
573 bool
574 forwarder_block_p (const_basic_block bb)
576 if (!contains_no_active_insn_p (bb))
577 return false;
579 /* Protect loop latches, headers and preheaders. */
580 if (current_loops)
582 basic_block dest;
583 if (bb->loop_father->header == bb)
584 return false;
585 dest = EDGE_SUCC (bb, 0)->dest;
586 if (dest->loop_father->header == dest)
587 return false;
590 return true;
593 /* Return nonzero if we can reach target from src by falling through. */
594 /* FIXME: Make this a cfg hook. */
596 bool
597 can_fallthru (basic_block src, basic_block target)
599 rtx insn = BB_END (src);
600 rtx insn2;
601 edge e;
602 edge_iterator ei;
604 if (target == EXIT_BLOCK_PTR)
605 return true;
606 if (src->next_bb != target)
607 return 0;
608 FOR_EACH_EDGE (e, ei, src->succs)
609 if (e->dest == EXIT_BLOCK_PTR
610 && e->flags & EDGE_FALLTHRU)
611 return 0;
613 insn2 = BB_HEAD (target);
614 if (insn2 && !active_insn_p (insn2))
615 insn2 = next_active_insn (insn2);
617 /* ??? Later we may add code to move jump tables offline. */
618 return next_active_insn (insn) == insn2;
621 /* Return nonzero if we could reach target from src by falling through,
622 if the target was made adjacent. If we already have a fall-through
623 edge to the exit block, we can't do that. */
624 static bool
625 could_fall_through (basic_block src, basic_block target)
627 edge e;
628 edge_iterator ei;
630 if (target == EXIT_BLOCK_PTR)
631 return true;
632 FOR_EACH_EDGE (e, ei, src->succs)
633 if (e->dest == EXIT_BLOCK_PTR
634 && e->flags & EDGE_FALLTHRU)
635 return 0;
636 return true;
639 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
641 bb_note (basic_block bb)
643 rtx note;
645 note = BB_HEAD (bb);
646 if (LABEL_P (note))
647 note = NEXT_INSN (note);
649 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
650 return note;
653 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
654 note associated with the BLOCK. */
656 static rtx
657 first_insn_after_basic_block_note (basic_block block)
659 rtx insn;
661 /* Get the first instruction in the block. */
662 insn = BB_HEAD (block);
664 if (insn == NULL_RTX)
665 return NULL_RTX;
666 if (LABEL_P (insn))
667 insn = NEXT_INSN (insn);
668 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
670 return NEXT_INSN (insn);
673 /* Creates a new basic block just after basic block B by splitting
674 everything after specified instruction I. */
676 static basic_block
677 rtl_split_block (basic_block bb, void *insnp)
679 basic_block new_bb;
680 rtx insn = (rtx) insnp;
681 edge e;
682 edge_iterator ei;
684 if (!insn)
686 insn = first_insn_after_basic_block_note (bb);
688 if (insn)
690 rtx next = insn;
692 insn = PREV_INSN (insn);
694 /* If the block contains only debug insns, insn would have
695 been NULL in a non-debug compilation, and then we'd end
696 up emitting a DELETED note. For -fcompare-debug
697 stability, emit the note too. */
698 if (insn != BB_END (bb)
699 && DEBUG_INSN_P (next)
700 && DEBUG_INSN_P (BB_END (bb)))
702 while (next != BB_END (bb) && DEBUG_INSN_P (next))
703 next = NEXT_INSN (next);
705 if (next == BB_END (bb))
706 emit_note_after (NOTE_INSN_DELETED, next);
709 else
710 insn = get_last_insn ();
713 /* We probably should check type of the insn so that we do not create
714 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
715 bother. */
716 if (insn == BB_END (bb))
717 emit_note_after (NOTE_INSN_DELETED, insn);
719 /* Create the new basic block. */
720 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
721 BB_COPY_PARTITION (new_bb, bb);
722 BB_END (bb) = insn;
724 /* Redirect the outgoing edges. */
725 new_bb->succs = bb->succs;
726 bb->succs = NULL;
727 FOR_EACH_EDGE (e, ei, new_bb->succs)
728 e->src = new_bb;
730 /* The new block starts off being dirty. */
731 df_set_bb_dirty (bb);
732 return new_bb;
735 /* Return true if the single edge between blocks A and B is the only place
736 in RTL which holds some unique locus. */
738 static bool
739 unique_locus_on_edge_between_p (basic_block a, basic_block b)
741 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
742 rtx insn, end;
744 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
745 return false;
747 /* First scan block A backward. */
748 insn = BB_END (a);
749 end = PREV_INSN (BB_HEAD (a));
750 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
751 insn = PREV_INSN (insn);
753 if (insn != end && INSN_LOCATION (insn) == goto_locus)
754 return false;
756 /* Then scan block B forward. */
757 insn = BB_HEAD (b);
758 if (insn)
760 end = NEXT_INSN (BB_END (b));
761 while (insn != end && !NONDEBUG_INSN_P (insn))
762 insn = NEXT_INSN (insn);
764 if (insn != end && INSN_HAS_LOCATION (insn)
765 && INSN_LOCATION (insn) == goto_locus)
766 return false;
769 return true;
772 /* If the single edge between blocks A and B is the only place in RTL which
773 holds some unique locus, emit a nop with that locus between the blocks. */
775 static void
776 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
778 if (!unique_locus_on_edge_between_p (a, b))
779 return;
781 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
782 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
785 /* Blocks A and B are to be merged into a single block A. The insns
786 are already contiguous. */
788 static void
789 rtl_merge_blocks (basic_block a, basic_block b)
791 rtx b_head = BB_HEAD (b), b_end = BB_END (b), a_end = BB_END (a);
792 rtx del_first = NULL_RTX, del_last = NULL_RTX;
793 rtx b_debug_start = b_end, b_debug_end = b_end;
794 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
795 int b_empty = 0;
797 if (dump_file)
798 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
799 a->index);
801 while (DEBUG_INSN_P (b_end))
802 b_end = PREV_INSN (b_debug_start = b_end);
804 /* If there was a CODE_LABEL beginning B, delete it. */
805 if (LABEL_P (b_head))
807 /* Detect basic blocks with nothing but a label. This can happen
808 in particular at the end of a function. */
809 if (b_head == b_end)
810 b_empty = 1;
812 del_first = del_last = b_head;
813 b_head = NEXT_INSN (b_head);
816 /* Delete the basic block note and handle blocks containing just that
817 note. */
818 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
820 if (b_head == b_end)
821 b_empty = 1;
822 if (! del_last)
823 del_first = b_head;
825 del_last = b_head;
826 b_head = NEXT_INSN (b_head);
829 /* If there was a jump out of A, delete it. */
830 if (JUMP_P (a_end))
832 rtx prev;
834 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
835 if (!NOTE_P (prev)
836 || NOTE_INSN_BASIC_BLOCK_P (prev)
837 || prev == BB_HEAD (a))
838 break;
840 del_first = a_end;
842 #ifdef HAVE_cc0
843 /* If this was a conditional jump, we need to also delete
844 the insn that set cc0. */
845 if (only_sets_cc0_p (prev))
847 rtx tmp = prev;
849 prev = prev_nonnote_insn (prev);
850 if (!prev)
851 prev = BB_HEAD (a);
852 del_first = tmp;
854 #endif
856 a_end = PREV_INSN (del_first);
858 else if (BARRIER_P (NEXT_INSN (a_end)))
859 del_first = NEXT_INSN (a_end);
861 /* Delete everything marked above as well as crap that might be
862 hanging out between the two blocks. */
863 BB_END (a) = a_end;
864 BB_HEAD (b) = b_empty ? NULL_RTX : b_head;
865 delete_insn_chain (del_first, del_last, true);
867 /* When not optimizing CFG and the edge is the only place in RTL which holds
868 some unique locus, emit a nop with that locus in between. */
869 if (!optimize)
871 emit_nop_for_unique_locus_between (a, b);
872 a_end = BB_END (a);
875 /* Reassociate the insns of B with A. */
876 if (!b_empty)
878 update_bb_for_insn_chain (a_end, b_debug_end, a);
880 BB_END (a) = b_debug_end;
881 BB_HEAD (b) = NULL_RTX;
883 else if (b_end != b_debug_end)
885 /* Move any deleted labels and other notes between the end of A
886 and the debug insns that make up B after the debug insns,
887 bringing the debug insns into A while keeping the notes after
888 the end of A. */
889 if (NEXT_INSN (a_end) != b_debug_start)
890 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
891 b_debug_end);
892 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
893 BB_END (a) = b_debug_end;
896 df_bb_delete (b->index);
898 /* If B was a forwarder block, propagate the locus on the edge. */
899 if (forwarder_p
900 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
901 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
903 if (dump_file)
904 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
908 /* Return true when block A and B can be merged. */
910 static bool
911 rtl_can_merge_blocks (basic_block a, basic_block b)
913 /* If we are partitioning hot/cold basic blocks, we don't want to
914 mess up unconditional or indirect jumps that cross between hot
915 and cold sections.
917 Basic block partitioning may result in some jumps that appear to
918 be optimizable (or blocks that appear to be mergeable), but which really
919 must be left untouched (they are required to make it safely across
920 partition boundaries). See the comments at the top of
921 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
923 if (BB_PARTITION (a) != BB_PARTITION (b))
924 return false;
926 /* Protect the loop latches. */
927 if (current_loops && b->loop_father->latch == b)
928 return false;
930 /* There must be exactly one edge in between the blocks. */
931 return (single_succ_p (a)
932 && single_succ (a) == b
933 && single_pred_p (b)
934 && a != b
935 /* Must be simple edge. */
936 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
937 && a->next_bb == b
938 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
939 /* If the jump insn has side effects,
940 we can't kill the edge. */
941 && (!JUMP_P (BB_END (a))
942 || (reload_completed
943 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
946 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
947 exist. */
950 block_label (basic_block block)
952 if (block == EXIT_BLOCK_PTR)
953 return NULL_RTX;
955 if (!LABEL_P (BB_HEAD (block)))
957 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
960 return BB_HEAD (block);
963 /* Attempt to perform edge redirection by replacing possibly complex jump
964 instruction by unconditional jump or removing jump completely. This can
965 apply only if all edges now point to the same block. The parameters and
966 return values are equivalent to redirect_edge_and_branch. */
968 edge
969 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
971 basic_block src = e->src;
972 rtx insn = BB_END (src), kill_from;
973 rtx set;
974 int fallthru = 0;
976 /* If we are partitioning hot/cold basic blocks, we don't want to
977 mess up unconditional or indirect jumps that cross between hot
978 and cold sections.
980 Basic block partitioning may result in some jumps that appear to
981 be optimizable (or blocks that appear to be mergeable), but which really
982 must be left untouched (they are required to make it safely across
983 partition boundaries). See the comments at the top of
984 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
986 if (BB_PARTITION (src) != BB_PARTITION (target))
987 return NULL;
989 /* We can replace or remove a complex jump only when we have exactly
990 two edges. Also, if we have exactly one outgoing edge, we can
991 redirect that. */
992 if (EDGE_COUNT (src->succs) >= 3
993 /* Verify that all targets will be TARGET. Specifically, the
994 edge that is not E must also go to TARGET. */
995 || (EDGE_COUNT (src->succs) == 2
996 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
997 return NULL;
999 if (!onlyjump_p (insn))
1000 return NULL;
1001 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1002 return NULL;
1004 /* Avoid removing branch with side effects. */
1005 set = single_set (insn);
1006 if (!set || side_effects_p (set))
1007 return NULL;
1009 /* In case we zap a conditional jump, we'll need to kill
1010 the cc0 setter too. */
1011 kill_from = insn;
1012 #ifdef HAVE_cc0
1013 if (reg_mentioned_p (cc0_rtx, PATTERN (insn))
1014 && only_sets_cc0_p (PREV_INSN (insn)))
1015 kill_from = PREV_INSN (insn);
1016 #endif
1018 /* See if we can create the fallthru edge. */
1019 if (in_cfglayout || can_fallthru (src, target))
1021 if (dump_file)
1022 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1023 fallthru = 1;
1025 /* Selectively unlink whole insn chain. */
1026 if (in_cfglayout)
1028 rtx insn = BB_FOOTER (src);
1030 delete_insn_chain (kill_from, BB_END (src), false);
1032 /* Remove barriers but keep jumptables. */
1033 while (insn)
1035 if (BARRIER_P (insn))
1037 if (PREV_INSN (insn))
1038 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1039 else
1040 BB_FOOTER (src) = NEXT_INSN (insn);
1041 if (NEXT_INSN (insn))
1042 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1044 if (LABEL_P (insn))
1045 break;
1046 insn = NEXT_INSN (insn);
1049 else
1050 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1051 false);
1054 /* If this already is simplejump, redirect it. */
1055 else if (simplejump_p (insn))
1057 if (e->dest == target)
1058 return NULL;
1059 if (dump_file)
1060 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1061 INSN_UID (insn), e->dest->index, target->index);
1062 if (!redirect_jump (insn, block_label (target), 0))
1064 gcc_assert (target == EXIT_BLOCK_PTR);
1065 return NULL;
1069 /* Cannot do anything for target exit block. */
1070 else if (target == EXIT_BLOCK_PTR)
1071 return NULL;
1073 /* Or replace possibly complicated jump insn by simple jump insn. */
1074 else
1076 rtx target_label = block_label (target);
1077 rtx barrier, label, table;
1079 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
1080 JUMP_LABEL (BB_END (src)) = target_label;
1081 LABEL_NUSES (target_label)++;
1082 if (dump_file)
1083 fprintf (dump_file, "Replacing insn %i by jump %i\n",
1084 INSN_UID (insn), INSN_UID (BB_END (src)));
1087 delete_insn_chain (kill_from, insn, false);
1089 /* Recognize a tablejump that we are converting to a
1090 simple jump and remove its associated CODE_LABEL
1091 and ADDR_VEC or ADDR_DIFF_VEC. */
1092 if (tablejump_p (insn, &label, &table))
1093 delete_insn_chain (label, table, false);
1095 barrier = next_nonnote_insn (BB_END (src));
1096 if (!barrier || !BARRIER_P (barrier))
1097 emit_barrier_after (BB_END (src));
1098 else
1100 if (barrier != NEXT_INSN (BB_END (src)))
1102 /* Move the jump before barrier so that the notes
1103 which originally were or were created before jump table are
1104 inside the basic block. */
1105 rtx new_insn = BB_END (src);
1107 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1108 PREV_INSN (barrier), src);
1110 NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1111 PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1113 NEXT_INSN (new_insn) = barrier;
1114 NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1116 PREV_INSN (new_insn) = PREV_INSN (barrier);
1117 PREV_INSN (barrier) = new_insn;
1122 /* Keep only one edge out and set proper flags. */
1123 if (!single_succ_p (src))
1124 remove_edge (e);
1125 gcc_assert (single_succ_p (src));
1127 e = single_succ_edge (src);
1128 if (fallthru)
1129 e->flags = EDGE_FALLTHRU;
1130 else
1131 e->flags = 0;
1133 e->probability = REG_BR_PROB_BASE;
1134 e->count = src->count;
1136 if (e->dest != target)
1137 redirect_edge_succ (e, target);
1138 return e;
1141 /* Subroutine of redirect_branch_edge that tries to patch the jump
1142 instruction INSN so that it reaches block NEW. Do this
1143 only when it originally reached block OLD. Return true if this
1144 worked or the original target wasn't OLD, return false if redirection
1145 doesn't work. */
1147 static bool
1148 patch_jump_insn (rtx insn, rtx old_label, basic_block new_bb)
1150 rtx tmp;
1151 /* Recognize a tablejump and adjust all matching cases. */
1152 if (tablejump_p (insn, NULL, &tmp))
1154 rtvec vec;
1155 int j;
1156 rtx new_label = block_label (new_bb);
1158 if (new_bb == EXIT_BLOCK_PTR)
1159 return false;
1160 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC)
1161 vec = XVEC (PATTERN (tmp), 0);
1162 else
1163 vec = XVEC (PATTERN (tmp), 1);
1165 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1166 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1168 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1169 --LABEL_NUSES (old_label);
1170 ++LABEL_NUSES (new_label);
1173 /* Handle casesi dispatch insns. */
1174 if ((tmp = single_set (insn)) != NULL
1175 && SET_DEST (tmp) == pc_rtx
1176 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1177 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1178 && XEXP (XEXP (SET_SRC (tmp), 2), 0) == old_label)
1180 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1181 new_label);
1182 --LABEL_NUSES (old_label);
1183 ++LABEL_NUSES (new_label);
1186 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1188 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1189 rtx new_label, note;
1191 if (new_bb == EXIT_BLOCK_PTR)
1192 return false;
1193 new_label = block_label (new_bb);
1195 for (i = 0; i < n; ++i)
1197 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1198 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1199 if (XEXP (old_ref, 0) == old_label)
1201 ASM_OPERANDS_LABEL (tmp, i)
1202 = gen_rtx_LABEL_REF (Pmode, new_label);
1203 --LABEL_NUSES (old_label);
1204 ++LABEL_NUSES (new_label);
1208 if (JUMP_LABEL (insn) == old_label)
1210 JUMP_LABEL (insn) = new_label;
1211 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1212 if (note)
1213 remove_note (insn, note);
1215 else
1217 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1218 if (note)
1219 remove_note (insn, note);
1220 if (JUMP_LABEL (insn) != new_label
1221 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1222 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1224 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1225 != NULL_RTX)
1226 XEXP (note, 0) = new_label;
1228 else
1230 /* ?? We may play the games with moving the named labels from
1231 one basic block to the other in case only one computed_jump is
1232 available. */
1233 if (computed_jump_p (insn)
1234 /* A return instruction can't be redirected. */
1235 || returnjump_p (insn))
1236 return false;
1238 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1240 /* If the insn doesn't go where we think, we're confused. */
1241 gcc_assert (JUMP_LABEL (insn) == old_label);
1243 /* If the substitution doesn't succeed, die. This can happen
1244 if the back end emitted unrecognizable instructions or if
1245 target is exit block on some arches. */
1246 if (!redirect_jump (insn, block_label (new_bb), 0))
1248 gcc_assert (new_bb == EXIT_BLOCK_PTR);
1249 return false;
1253 return true;
1257 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1258 NULL on failure */
1259 static edge
1260 redirect_branch_edge (edge e, basic_block target)
1262 rtx old_label = BB_HEAD (e->dest);
1263 basic_block src = e->src;
1264 rtx insn = BB_END (src);
1266 /* We can only redirect non-fallthru edges of jump insn. */
1267 if (e->flags & EDGE_FALLTHRU)
1268 return NULL;
1269 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1270 return NULL;
1272 if (!currently_expanding_to_rtl)
1274 if (!patch_jump_insn (insn, old_label, target))
1275 return NULL;
1277 else
1278 /* When expanding this BB might actually contain multiple
1279 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1280 Redirect all of those that match our label. */
1281 FOR_BB_INSNS (src, insn)
1282 if (JUMP_P (insn) && !patch_jump_insn (insn, old_label, target))
1283 return NULL;
1285 if (dump_file)
1286 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1287 e->src->index, e->dest->index, target->index);
1289 if (e->dest != target)
1290 e = redirect_edge_succ_nodup (e, target);
1292 return e;
1295 /* Called when edge E has been redirected to a new destination,
1296 in order to update the region crossing flag on the edge and
1297 jump. */
1299 static void
1300 fixup_partition_crossing (edge e)
1302 rtx note;
1304 if (e->src == ENTRY_BLOCK_PTR || e->dest == EXIT_BLOCK_PTR)
1305 return;
1306 /* If we redirected an existing edge, it may already be marked
1307 crossing, even though the new src is missing a reg crossing note.
1308 But make sure reg crossing note doesn't already exist before
1309 inserting. */
1310 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1312 e->flags |= EDGE_CROSSING;
1313 note = find_reg_note (BB_END (e->src), REG_CROSSING_JUMP, NULL_RTX);
1314 if (JUMP_P (BB_END (e->src))
1315 && !note)
1316 add_reg_note (BB_END (e->src), REG_CROSSING_JUMP, NULL_RTX);
1318 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1320 e->flags &= ~EDGE_CROSSING;
1321 /* Remove the section crossing note from jump at end of
1322 src if it exists, and if no other successors are
1323 still crossing. */
1324 note = find_reg_note (BB_END (e->src), REG_CROSSING_JUMP, NULL_RTX);
1325 if (note)
1327 bool has_crossing_succ = false;
1328 edge e2;
1329 edge_iterator ei;
1330 FOR_EACH_EDGE (e2, ei, e->src->succs)
1332 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1333 if (has_crossing_succ)
1334 break;
1336 if (!has_crossing_succ)
1337 remove_note (BB_END (e->src), note);
1342 /* Called when block BB has been reassigned to the cold partition,
1343 because it is now dominated by another cold block,
1344 to ensure that the region crossing attributes are updated. */
1346 static void
1347 fixup_new_cold_bb (basic_block bb)
1349 edge e;
1350 edge_iterator ei;
1352 /* This is called when a hot bb is found to now be dominated
1353 by a cold bb and therefore needs to become cold. Therefore,
1354 its preds will no longer be region crossing. Any non-dominating
1355 preds that were previously hot would also have become cold
1356 in the caller for the same region. Any preds that were previously
1357 region-crossing will be adjusted in fixup_partition_crossing. */
1358 FOR_EACH_EDGE (e, ei, bb->preds)
1360 fixup_partition_crossing (e);
1363 /* Possibly need to make bb's successor edges region crossing,
1364 or remove stale region crossing. */
1365 FOR_EACH_EDGE (e, ei, bb->succs)
1367 /* We can't have fall-through edges across partition boundaries.
1368 Note that force_nonfallthru will do any necessary partition
1369 boundary fixup by calling fixup_partition_crossing itself. */
1370 if ((e->flags & EDGE_FALLTHRU)
1371 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1372 && e->dest != EXIT_BLOCK_PTR)
1373 force_nonfallthru (e);
1374 else
1375 fixup_partition_crossing (e);
1379 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1380 expense of adding new instructions or reordering basic blocks.
1382 Function can be also called with edge destination equivalent to the TARGET.
1383 Then it should try the simplifications and do nothing if none is possible.
1385 Return edge representing the branch if transformation succeeded. Return NULL
1386 on failure.
1387 We still return NULL in case E already destinated TARGET and we didn't
1388 managed to simplify instruction stream. */
1390 static edge
1391 rtl_redirect_edge_and_branch (edge e, basic_block target)
1393 edge ret;
1394 basic_block src = e->src;
1395 basic_block dest = e->dest;
1397 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1398 return NULL;
1400 if (dest == target)
1401 return e;
1403 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1405 df_set_bb_dirty (src);
1406 fixup_partition_crossing (ret);
1407 return ret;
1410 ret = redirect_branch_edge (e, target);
1411 if (!ret)
1412 return NULL;
1414 df_set_bb_dirty (src);
1415 fixup_partition_crossing (ret);
1416 return ret;
1419 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1421 void
1422 emit_barrier_after_bb (basic_block bb)
1424 rtx barrier = emit_barrier_after (BB_END (bb));
1425 gcc_assert (current_ir_type() == IR_RTL_CFGRTL
1426 || current_ir_type () == IR_RTL_CFGLAYOUT);
1427 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1428 BB_FOOTER (bb) = unlink_insn_chain (barrier, barrier);
1431 /* Like force_nonfallthru below, but additionally performs redirection
1432 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1433 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1434 simple_return_rtx, indicating which kind of returnjump to create.
1435 It should be NULL otherwise. */
1437 basic_block
1438 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1440 basic_block jump_block, new_bb = NULL, src = e->src;
1441 rtx note;
1442 edge new_edge;
1443 int abnormal_edge_flags = 0;
1444 bool asm_goto_edge = false;
1445 int loc;
1447 /* In the case the last instruction is conditional jump to the next
1448 instruction, first redirect the jump itself and then continue
1449 by creating a basic block afterwards to redirect fallthru edge. */
1450 if (e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR
1451 && any_condjump_p (BB_END (e->src))
1452 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1454 rtx note;
1455 edge b = unchecked_make_edge (e->src, target, 0);
1456 bool redirected;
1458 redirected = redirect_jump (BB_END (e->src), block_label (target), 0);
1459 gcc_assert (redirected);
1461 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1462 if (note)
1464 int prob = INTVAL (XEXP (note, 0));
1466 b->probability = prob;
1467 b->count = e->count * prob / REG_BR_PROB_BASE;
1468 e->probability -= e->probability;
1469 e->count -= b->count;
1470 if (e->probability < 0)
1471 e->probability = 0;
1472 if (e->count < 0)
1473 e->count = 0;
1477 if (e->flags & EDGE_ABNORMAL)
1479 /* Irritating special case - fallthru edge to the same block as abnormal
1480 edge.
1481 We can't redirect abnormal edge, but we still can split the fallthru
1482 one and create separate abnormal edge to original destination.
1483 This allows bb-reorder to make such edge non-fallthru. */
1484 gcc_assert (e->dest == target);
1485 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1486 e->flags &= EDGE_FALLTHRU;
1488 else
1490 gcc_assert (e->flags & EDGE_FALLTHRU);
1491 if (e->src == ENTRY_BLOCK_PTR)
1493 /* We can't redirect the entry block. Create an empty block
1494 at the start of the function which we use to add the new
1495 jump. */
1496 edge tmp;
1497 edge_iterator ei;
1498 bool found = false;
1500 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL, ENTRY_BLOCK_PTR);
1502 /* Change the existing edge's source to be the new block, and add
1503 a new edge from the entry block to the new block. */
1504 e->src = bb;
1505 for (ei = ei_start (ENTRY_BLOCK_PTR->succs); (tmp = ei_safe_edge (ei)); )
1507 if (tmp == e)
1509 ENTRY_BLOCK_PTR->succs->unordered_remove (ei.index);
1510 found = true;
1511 break;
1513 else
1514 ei_next (&ei);
1517 gcc_assert (found);
1519 vec_safe_push (bb->succs, e);
1520 make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
1524 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1525 don't point to the target or fallthru label. */
1526 if (JUMP_P (BB_END (e->src))
1527 && target != EXIT_BLOCK_PTR
1528 && (e->flags & EDGE_FALLTHRU)
1529 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1531 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1532 bool adjust_jump_target = false;
1534 for (i = 0; i < n; ++i)
1536 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1538 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1539 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1540 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1541 adjust_jump_target = true;
1543 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1544 asm_goto_edge = true;
1546 if (adjust_jump_target)
1548 rtx insn = BB_END (e->src), note;
1549 rtx old_label = BB_HEAD (e->dest);
1550 rtx new_label = BB_HEAD (target);
1552 if (JUMP_LABEL (insn) == old_label)
1554 JUMP_LABEL (insn) = new_label;
1555 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1556 if (note)
1557 remove_note (insn, note);
1559 else
1561 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1562 if (note)
1563 remove_note (insn, note);
1564 if (JUMP_LABEL (insn) != new_label
1565 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1566 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1568 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1569 != NULL_RTX)
1570 XEXP (note, 0) = new_label;
1574 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1576 gcov_type count = e->count;
1577 int probability = e->probability;
1578 /* Create the new structures. */
1580 /* If the old block ended with a tablejump, skip its table
1581 by searching forward from there. Otherwise start searching
1582 forward from the last instruction of the old block. */
1583 if (!tablejump_p (BB_END (e->src), NULL, &note))
1584 note = BB_END (e->src);
1585 note = NEXT_INSN (note);
1587 jump_block = create_basic_block (note, NULL, e->src);
1588 jump_block->count = count;
1589 jump_block->frequency = EDGE_FREQUENCY (e);
1591 /* Make sure new block ends up in correct hot/cold section. */
1593 BB_COPY_PARTITION (jump_block, e->src);
1595 /* Wire edge in. */
1596 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1597 new_edge->probability = probability;
1598 new_edge->count = count;
1600 /* Redirect old edge. */
1601 redirect_edge_pred (e, jump_block);
1602 e->probability = REG_BR_PROB_BASE;
1604 /* If e->src was previously region crossing, it no longer is
1605 and the reg crossing note should be removed. */
1606 fixup_partition_crossing (new_edge);
1608 /* If asm goto has any label refs to target's label,
1609 add also edge from asm goto bb to target. */
1610 if (asm_goto_edge)
1612 new_edge->probability /= 2;
1613 new_edge->count /= 2;
1614 jump_block->count /= 2;
1615 jump_block->frequency /= 2;
1616 new_edge = make_edge (new_edge->src, target,
1617 e->flags & ~EDGE_FALLTHRU);
1618 new_edge->probability = probability - probability / 2;
1619 new_edge->count = count - count / 2;
1622 new_bb = jump_block;
1624 else
1625 jump_block = e->src;
1627 loc = e->goto_locus;
1628 e->flags &= ~EDGE_FALLTHRU;
1629 if (target == EXIT_BLOCK_PTR)
1631 if (jump_label == ret_rtx)
1633 #ifdef HAVE_return
1634 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1635 #else
1636 gcc_unreachable ();
1637 #endif
1639 else
1641 gcc_assert (jump_label == simple_return_rtx);
1642 #ifdef HAVE_simple_return
1643 emit_jump_insn_after_setloc (gen_simple_return (),
1644 BB_END (jump_block), loc);
1645 #else
1646 gcc_unreachable ();
1647 #endif
1649 set_return_jump_label (BB_END (jump_block));
1651 else
1653 rtx label = block_label (target);
1654 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1655 JUMP_LABEL (BB_END (jump_block)) = label;
1656 LABEL_NUSES (label)++;
1659 /* We might be in cfg layout mode, and if so, the following routine will
1660 insert the barrier correctly. */
1661 emit_barrier_after_bb (jump_block);
1662 redirect_edge_succ_nodup (e, target);
1664 if (abnormal_edge_flags)
1665 make_edge (src, target, abnormal_edge_flags);
1667 df_mark_solutions_dirty ();
1668 fixup_partition_crossing (e);
1669 return new_bb;
1672 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1673 (and possibly create new basic block) to make edge non-fallthru.
1674 Return newly created BB or NULL if none. */
1676 static basic_block
1677 rtl_force_nonfallthru (edge e)
1679 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1682 /* Redirect edge even at the expense of creating new jump insn or
1683 basic block. Return new basic block if created, NULL otherwise.
1684 Conversion must be possible. */
1686 static basic_block
1687 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1689 if (redirect_edge_and_branch (e, target)
1690 || e->dest == target)
1691 return NULL;
1693 /* In case the edge redirection failed, try to force it to be non-fallthru
1694 and redirect newly created simplejump. */
1695 df_set_bb_dirty (e->src);
1696 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1699 /* The given edge should potentially be a fallthru edge. If that is in
1700 fact true, delete the jump and barriers that are in the way. */
1702 static void
1703 rtl_tidy_fallthru_edge (edge e)
1705 rtx q;
1706 basic_block b = e->src, c = b->next_bb;
1708 /* ??? In a late-running flow pass, other folks may have deleted basic
1709 blocks by nopping out blocks, leaving multiple BARRIERs between here
1710 and the target label. They ought to be chastised and fixed.
1712 We can also wind up with a sequence of undeletable labels between
1713 one block and the next.
1715 So search through a sequence of barriers, labels, and notes for
1716 the head of block C and assert that we really do fall through. */
1718 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1719 if (INSN_P (q))
1720 return;
1722 /* Remove what will soon cease being the jump insn from the source block.
1723 If block B consisted only of this single jump, turn it into a deleted
1724 note. */
1725 q = BB_END (b);
1726 if (JUMP_P (q)
1727 && onlyjump_p (q)
1728 && (any_uncondjump_p (q)
1729 || single_succ_p (b)))
1731 #ifdef HAVE_cc0
1732 /* If this was a conditional jump, we need to also delete
1733 the insn that set cc0. */
1734 if (any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1735 q = PREV_INSN (q);
1736 #endif
1738 q = PREV_INSN (q);
1741 /* Selectively unlink the sequence. */
1742 if (q != PREV_INSN (BB_HEAD (c)))
1743 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1745 e->flags |= EDGE_FALLTHRU;
1748 /* Should move basic block BB after basic block AFTER. NIY. */
1750 static bool
1751 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1752 basic_block after ATTRIBUTE_UNUSED)
1754 return false;
1757 /* Locate the last bb in the same partition as START_BB. */
1759 static basic_block
1760 last_bb_in_partition (basic_block start_bb)
1762 basic_block bb;
1763 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR, next_bb)
1765 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1766 return bb;
1768 /* Return bb before EXIT_BLOCK_PTR. */
1769 return bb->prev_bb;
1772 /* Split a (typically critical) edge. Return the new block.
1773 The edge must not be abnormal.
1775 ??? The code generally expects to be called on critical edges.
1776 The case of a block ending in an unconditional jump to a
1777 block with multiple predecessors is not handled optimally. */
1779 static basic_block
1780 rtl_split_edge (edge edge_in)
1782 basic_block bb, new_bb;
1783 rtx before;
1785 /* Abnormal edges cannot be split. */
1786 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1788 /* We are going to place the new block in front of edge destination.
1789 Avoid existence of fallthru predecessors. */
1790 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1792 edge e = find_fallthru_edge (edge_in->dest->preds);
1794 if (e)
1795 force_nonfallthru (e);
1798 /* Create the basic block note. */
1799 if (edge_in->dest != EXIT_BLOCK_PTR)
1800 before = BB_HEAD (edge_in->dest);
1801 else
1802 before = NULL_RTX;
1804 /* If this is a fall through edge to the exit block, the blocks might be
1805 not adjacent, and the right place is after the source. */
1806 if ((edge_in->flags & EDGE_FALLTHRU) && edge_in->dest == EXIT_BLOCK_PTR)
1808 before = NEXT_INSN (BB_END (edge_in->src));
1809 bb = create_basic_block (before, NULL, edge_in->src);
1810 BB_COPY_PARTITION (bb, edge_in->src);
1812 else
1814 if (edge_in->src == ENTRY_BLOCK_PTR)
1816 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1817 BB_COPY_PARTITION (bb, edge_in->dest);
1819 else
1821 basic_block after = edge_in->dest->prev_bb;
1822 /* If this is post-bb reordering, and the edge crosses a partition
1823 boundary, the new block needs to be inserted in the bb chain
1824 at the end of the src partition (since we put the new bb into
1825 that partition, see below). Otherwise we may end up creating
1826 an extra partition crossing in the chain, which is illegal.
1827 It can't go after the src, because src may have a fall-through
1828 to a different block. */
1829 if (crtl->bb_reorder_complete
1830 && (edge_in->flags & EDGE_CROSSING))
1832 after = last_bb_in_partition (edge_in->src);
1833 before = NEXT_INSN (BB_END (after));
1834 /* The instruction following the last bb in partition should
1835 be a barrier, since it cannot end in a fall-through. */
1836 gcc_checking_assert (BARRIER_P (before));
1837 before = NEXT_INSN (before);
1839 bb = create_basic_block (before, NULL, after);
1840 /* Put the split bb into the src partition, to avoid creating
1841 a situation where a cold bb dominates a hot bb, in the case
1842 where src is cold and dest is hot. The src will dominate
1843 the new bb (whereas it might not have dominated dest). */
1844 BB_COPY_PARTITION (bb, edge_in->src);
1848 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1850 /* Can't allow a region crossing edge to be fallthrough. */
1851 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1852 && edge_in->dest != EXIT_BLOCK_PTR)
1854 new_bb = force_nonfallthru (single_succ_edge (bb));
1855 gcc_assert (!new_bb);
1858 /* For non-fallthru edges, we must adjust the predecessor's
1859 jump instruction to target our new block. */
1860 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1862 edge redirected = redirect_edge_and_branch (edge_in, bb);
1863 gcc_assert (redirected);
1865 else
1867 if (edge_in->src != ENTRY_BLOCK_PTR)
1869 /* For asm goto even splitting of fallthru edge might
1870 need insn patching, as other labels might point to the
1871 old label. */
1872 rtx last = BB_END (edge_in->src);
1873 if (last
1874 && JUMP_P (last)
1875 && edge_in->dest != EXIT_BLOCK_PTR
1876 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1877 && patch_jump_insn (last, before, bb))
1878 df_set_bb_dirty (edge_in->src);
1880 redirect_edge_succ (edge_in, bb);
1883 return bb;
1886 /* Queue instructions for insertion on an edge between two basic blocks.
1887 The new instructions and basic blocks (if any) will not appear in the
1888 CFG until commit_edge_insertions is called. */
1890 void
1891 insert_insn_on_edge (rtx pattern, edge e)
1893 /* We cannot insert instructions on an abnormal critical edge.
1894 It will be easier to find the culprit if we die now. */
1895 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1897 if (e->insns.r == NULL_RTX)
1898 start_sequence ();
1899 else
1900 push_to_sequence (e->insns.r);
1902 emit_insn (pattern);
1904 e->insns.r = get_insns ();
1905 end_sequence ();
1908 /* Update the CFG for the instructions queued on edge E. */
1910 void
1911 commit_one_edge_insertion (edge e)
1913 rtx before = NULL_RTX, after = NULL_RTX, insns, tmp, last;
1914 basic_block bb;
1916 /* Pull the insns off the edge now since the edge might go away. */
1917 insns = e->insns.r;
1918 e->insns.r = NULL_RTX;
1920 /* Figure out where to put these insns. If the destination has
1921 one predecessor, insert there. Except for the exit block. */
1922 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR)
1924 bb = e->dest;
1926 /* Get the location correct wrt a code label, and "nice" wrt
1927 a basic block note, and before everything else. */
1928 tmp = BB_HEAD (bb);
1929 if (LABEL_P (tmp))
1930 tmp = NEXT_INSN (tmp);
1931 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
1932 tmp = NEXT_INSN (tmp);
1933 if (tmp == BB_HEAD (bb))
1934 before = tmp;
1935 else if (tmp)
1936 after = PREV_INSN (tmp);
1937 else
1938 after = get_last_insn ();
1941 /* If the source has one successor and the edge is not abnormal,
1942 insert there. Except for the entry block.
1943 Don't do this if the predecessor ends in a jump other than
1944 unconditional simple jump. E.g. for asm goto that points all
1945 its labels at the fallthru basic block, we can't insert instructions
1946 before the asm goto, as the asm goto can have various of side effects,
1947 and can't emit instructions after the asm goto, as it must end
1948 the basic block. */
1949 else if ((e->flags & EDGE_ABNORMAL) == 0
1950 && single_succ_p (e->src)
1951 && e->src != ENTRY_BLOCK_PTR
1952 && (!JUMP_P (BB_END (e->src))
1953 || simplejump_p (BB_END (e->src))))
1955 bb = e->src;
1957 /* It is possible to have a non-simple jump here. Consider a target
1958 where some forms of unconditional jumps clobber a register. This
1959 happens on the fr30 for example.
1961 We know this block has a single successor, so we can just emit
1962 the queued insns before the jump. */
1963 if (JUMP_P (BB_END (bb)))
1964 before = BB_END (bb);
1965 else
1967 /* We'd better be fallthru, or we've lost track of what's what. */
1968 gcc_assert (e->flags & EDGE_FALLTHRU);
1970 after = BB_END (bb);
1974 /* Otherwise we must split the edge. */
1975 else
1977 bb = split_edge (e);
1979 /* If E crossed a partition boundary, we needed to make bb end in
1980 a region-crossing jump, even though it was originally fallthru. */
1981 if (JUMP_P (BB_END (bb)))
1982 before = BB_END (bb);
1983 else
1984 after = BB_END (bb);
1987 /* Now that we've found the spot, do the insertion. */
1988 if (before)
1990 emit_insn_before_noloc (insns, before, bb);
1991 last = prev_nonnote_insn (before);
1993 else
1994 last = emit_insn_after_noloc (insns, after, bb);
1996 if (returnjump_p (last))
1998 /* ??? Remove all outgoing edges from BB and add one for EXIT.
1999 This is not currently a problem because this only happens
2000 for the (single) epilogue, which already has a fallthru edge
2001 to EXIT. */
2003 e = single_succ_edge (bb);
2004 gcc_assert (e->dest == EXIT_BLOCK_PTR
2005 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2007 e->flags &= ~EDGE_FALLTHRU;
2008 emit_barrier_after (last);
2010 if (before)
2011 delete_insn (before);
2013 else
2014 gcc_assert (!JUMP_P (last));
2017 /* Update the CFG for all queued instructions. */
2019 void
2020 commit_edge_insertions (void)
2022 basic_block bb;
2024 /* Optimization passes that invoke this routine can cause hot blocks
2025 previously reached by both hot and cold blocks to become dominated only
2026 by cold blocks. This will cause the verification below to fail,
2027 and lead to now cold code in the hot section. In some cases this
2028 may only be visible after newly unreachable blocks are deleted,
2029 which will be done by fixup_partitions. */
2030 fixup_partitions ();
2032 #ifdef ENABLE_CHECKING
2033 verify_flow_info ();
2034 #endif
2036 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
2038 edge e;
2039 edge_iterator ei;
2041 FOR_EACH_EDGE (e, ei, bb->succs)
2042 if (e->insns.r)
2043 commit_one_edge_insertion (e);
2048 /* Print out RTL-specific basic block information (live information
2049 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2050 documented in dumpfile.h. */
2052 static void
2053 rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
2055 rtx insn;
2056 rtx last;
2057 char *s_indent;
2059 s_indent = (char *) alloca ((size_t) indent + 1);
2060 memset (s_indent, ' ', (size_t) indent);
2061 s_indent[indent] = '\0';
2063 if (df && (flags & TDF_DETAILS))
2065 df_dump_top (bb, outf);
2066 putc ('\n', outf);
2069 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2070 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
2071 insn = NEXT_INSN (insn))
2073 if (flags & TDF_DETAILS)
2074 df_dump_insn_top (insn, outf);
2075 if (! (flags & TDF_SLIM))
2076 print_rtl_single (outf, insn);
2077 else
2078 dump_insn_slim (outf, insn);
2079 if (flags & TDF_DETAILS)
2080 df_dump_insn_bottom (insn, outf);
2083 if (df && (flags & TDF_DETAILS))
2085 df_dump_bottom (bb, outf);
2086 putc ('\n', outf);
2091 /* Like dump_function_to_file, but for RTL. Print out dataflow information
2092 for the start of each basic block. FLAGS are the TDF_* masks documented
2093 in dumpfile.h. */
2095 void
2096 print_rtl_with_bb (FILE *outf, const_rtx rtx_first, int flags)
2098 const_rtx tmp_rtx;
2099 if (rtx_first == 0)
2100 fprintf (outf, "(nil)\n");
2101 else
2103 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2104 int max_uid = get_max_uid ();
2105 basic_block *start = XCNEWVEC (basic_block, max_uid);
2106 basic_block *end = XCNEWVEC (basic_block, max_uid);
2107 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2108 basic_block bb;
2110 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2111 insns, but the CFG is not maintained so the basic block info
2112 is not reliable. Therefore it's omitted from the dumps. */
2113 if (! (cfun->curr_properties & PROP_cfg))
2114 flags &= ~TDF_BLOCKS;
2116 if (df)
2117 df_dump_start (outf);
2119 if (flags & TDF_BLOCKS)
2121 FOR_EACH_BB_REVERSE (bb)
2123 rtx x;
2125 start[INSN_UID (BB_HEAD (bb))] = bb;
2126 end[INSN_UID (BB_END (bb))] = bb;
2127 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2129 enum bb_state state = IN_MULTIPLE_BB;
2131 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2132 state = IN_ONE_BB;
2133 in_bb_p[INSN_UID (x)] = state;
2135 if (x == BB_END (bb))
2136 break;
2141 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
2143 if (flags & TDF_BLOCKS)
2145 bb = start[INSN_UID (tmp_rtx)];
2146 if (bb != NULL)
2148 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
2149 if (df && (flags & TDF_DETAILS))
2150 df_dump_top (bb, outf);
2153 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2154 && !NOTE_P (tmp_rtx)
2155 && !BARRIER_P (tmp_rtx))
2156 fprintf (outf, ";; Insn is not within a basic block\n");
2157 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2158 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2161 if (flags & TDF_DETAILS)
2162 df_dump_insn_top (tmp_rtx, outf);
2163 if (! (flags & TDF_SLIM))
2164 print_rtl_single (outf, tmp_rtx);
2165 else
2166 dump_insn_slim (outf, tmp_rtx);
2167 if (flags & TDF_DETAILS)
2168 df_dump_insn_bottom (tmp_rtx, outf);
2170 if (flags & TDF_BLOCKS)
2172 bb = end[INSN_UID (tmp_rtx)];
2173 if (bb != NULL)
2175 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2176 if (df && (flags & TDF_DETAILS))
2177 df_dump_bottom (bb, outf);
2178 putc ('\n', outf);
2183 free (start);
2184 free (end);
2185 free (in_bb_p);
2189 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2191 void
2192 update_br_prob_note (basic_block bb)
2194 rtx note;
2195 if (!JUMP_P (BB_END (bb)))
2196 return;
2197 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2198 if (!note || INTVAL (XEXP (note, 0)) == BRANCH_EDGE (bb)->probability)
2199 return;
2200 XEXP (note, 0) = GEN_INT (BRANCH_EDGE (bb)->probability);
2203 /* Get the last insn associated with block BB (that includes barriers and
2204 tablejumps after BB). */
2206 get_last_bb_insn (basic_block bb)
2208 rtx tmp;
2209 rtx end = BB_END (bb);
2211 /* Include any jump table following the basic block. */
2212 if (tablejump_p (end, NULL, &tmp))
2213 end = tmp;
2215 /* Include any barriers that may follow the basic block. */
2216 tmp = next_nonnote_insn_bb (end);
2217 while (tmp && BARRIER_P (tmp))
2219 end = tmp;
2220 tmp = next_nonnote_insn_bb (end);
2223 return end;
2226 /* Sanity check partition hotness to ensure that basic blocks in
2227   the cold partition don't dominate basic blocks in the hot partition.
2228 If FLAG_ONLY is true, report violations as errors. Otherwise
2229 re-mark the dominated blocks as cold, since this is run after
2230 cfg optimizations that may make hot blocks previously reached
2231 by both hot and cold blocks now only reachable along cold paths. */
2233 static vec<basic_block>
2234 find_partition_fixes (bool flag_only)
2236 basic_block bb;
2237 vec<basic_block> bbs_in_cold_partition = vNULL;
2238 vec<basic_block> bbs_to_fix = vNULL;
2240 /* Callers check this. */
2241 gcc_checking_assert (crtl->has_bb_partition);
2243 FOR_EACH_BB (bb)
2244 if ((BB_PARTITION (bb) == BB_COLD_PARTITION))
2245 bbs_in_cold_partition.safe_push (bb);
2247 if (bbs_in_cold_partition.is_empty ())
2248 return vNULL;
2250 bool dom_calculated_here = !dom_info_available_p (CDI_DOMINATORS);
2252 if (dom_calculated_here)
2253 calculate_dominance_info (CDI_DOMINATORS);
2255 while (! bbs_in_cold_partition.is_empty ())
2257 bb = bbs_in_cold_partition.pop ();
2258 /* Any blocks dominated by a block in the cold section
2259 must also be cold. */
2260 basic_block son;
2261 for (son = first_dom_son (CDI_DOMINATORS, bb);
2262 son;
2263 son = next_dom_son (CDI_DOMINATORS, son))
2265 /* If son is not yet cold, then mark it cold here and
2266 enqueue it for further processing. */
2267 if ((BB_PARTITION (son) != BB_COLD_PARTITION))
2269 if (flag_only)
2270 error ("non-cold basic block %d dominated "
2271 "by a block in the cold partition (%d)", son->index, bb->index);
2272 else
2273 BB_SET_PARTITION (son, BB_COLD_PARTITION);
2274 bbs_to_fix.safe_push (son);
2275 bbs_in_cold_partition.safe_push (son);
2280 if (dom_calculated_here)
2281 free_dominance_info (CDI_DOMINATORS);
2283 return bbs_to_fix;
2286 /* Perform cleanup on the hot/cold bb partitioning after optimization
2287 passes that modify the cfg. */
2289 void
2290 fixup_partitions (void)
2292 basic_block bb;
2294 if (!crtl->has_bb_partition)
2295 return;
2297 /* Delete any blocks that became unreachable and weren't
2298 already cleaned up, for example during edge forwarding
2299 and convert_jumps_to_returns. This will expose more
2300 opportunities for fixing the partition boundaries here.
2301 Also, the calculation of the dominance graph during verification
2302 will assert if there are unreachable nodes. */
2303 delete_unreachable_blocks ();
2305 /* If there are partitions, do a sanity check on them: A basic block in
2306   a cold partition cannot dominate a basic block in a hot partition.
2307 Fixup any that now violate this requirement, as a result of edge
2308 forwarding and unreachable block deletion.  */
2309 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2311 /* Do the partition fixup after all necessary blocks have been converted to
2312 cold, so that we only update the region crossings the minimum number of
2313 places, which can require forcing edges to be non fallthru. */
2314 while (! bbs_to_fix.is_empty ())
2316 bb = bbs_to_fix.pop ();
2317 fixup_new_cold_bb (bb);
2321 /* Verify, in the basic block chain, that there is at most one switch
2322 between hot/cold partitions. This condition will not be true until
2323 after reorder_basic_blocks is called. */
2325 static int
2326 verify_hot_cold_block_grouping (void)
2328 basic_block bb;
2329 int err = 0;
2330 bool switched_sections = false;
2331 int current_partition = BB_UNPARTITIONED;
2333 /* Even after bb reordering is complete, we go into cfglayout mode
2334 again (in compgoto). Ensure we don't call this before going back
2335 into linearized RTL when any layout fixes would have been committed. */
2336 if (!crtl->bb_reorder_complete
2337 || current_ir_type() != IR_RTL_CFGRTL)
2338 return err;
2340 FOR_EACH_BB (bb)
2342 if (current_partition != BB_UNPARTITIONED
2343 && BB_PARTITION (bb) != current_partition)
2345 if (switched_sections)
2347 error ("multiple hot/cold transitions found (bb %i)",
2348 bb->index);
2349 err = 1;
2351 else
2352 switched_sections = true;
2354 if (!crtl->has_bb_partition)
2355 error ("partition found but function partition flag not set");
2357 current_partition = BB_PARTITION (bb);
2360 return err;
2364 /* Perform several checks on the edges out of each block, such as
2365 the consistency of the branch probabilities, the correctness
2366 of hot/cold partition crossing edges, and the number of expected
2367 successor edges. Also verify that the dominance relationship
2368 between hot/cold blocks is sane. */
2370 static int
2371 rtl_verify_edges (void)
2373 int err = 0;
2374 basic_block bb;
2376 FOR_EACH_BB_REVERSE (bb)
2378 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2379 int n_eh = 0, n_abnormal = 0;
2380 edge e, fallthru = NULL;
2381 edge_iterator ei;
2382 rtx note;
2383 bool has_crossing_edge = false;
2385 if (JUMP_P (BB_END (bb))
2386 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2387 && EDGE_COUNT (bb->succs) >= 2
2388 && any_condjump_p (BB_END (bb)))
2390 if (INTVAL (XEXP (note, 0)) != BRANCH_EDGE (bb)->probability
2391 && profile_status != PROFILE_ABSENT)
2393 error ("verify_flow_info: REG_BR_PROB does not match cfg %wi %i",
2394 INTVAL (XEXP (note, 0)), BRANCH_EDGE (bb)->probability);
2395 err = 1;
2399 FOR_EACH_EDGE (e, ei, bb->succs)
2401 bool is_crossing;
2403 if (e->flags & EDGE_FALLTHRU)
2404 n_fallthru++, fallthru = e;
2406 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2407 && e->src != ENTRY_BLOCK_PTR
2408 && e->dest != EXIT_BLOCK_PTR);
2409 has_crossing_edge |= is_crossing;
2410 if (e->flags & EDGE_CROSSING)
2412 if (!is_crossing)
2414 error ("EDGE_CROSSING incorrectly set across same section");
2415 err = 1;
2417 if (e->flags & EDGE_FALLTHRU)
2419 error ("fallthru edge crosses section boundary in bb %i",
2420 e->src->index);
2421 err = 1;
2423 if (e->flags & EDGE_EH)
2425 error ("EH edge crosses section boundary in bb %i",
2426 e->src->index);
2427 err = 1;
2429 if (JUMP_P (BB_END (bb))
2430 && !find_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX))
2432 error ("No region crossing jump at section boundary in bb %i",
2433 bb->index);
2434 err = 1;
2437 else if (is_crossing)
2439 error ("EDGE_CROSSING missing across section boundary");
2440 err = 1;
2443 if ((e->flags & ~(EDGE_DFS_BACK
2444 | EDGE_CAN_FALLTHRU
2445 | EDGE_IRREDUCIBLE_LOOP
2446 | EDGE_LOOP_EXIT
2447 | EDGE_CROSSING
2448 | EDGE_PRESERVE)) == 0)
2449 n_branch++;
2451 if (e->flags & EDGE_ABNORMAL_CALL)
2452 n_abnormal_call++;
2454 if (e->flags & EDGE_SIBCALL)
2455 n_sibcall++;
2457 if (e->flags & EDGE_EH)
2458 n_eh++;
2460 if (e->flags & EDGE_ABNORMAL)
2461 n_abnormal++;
2464 if (!has_crossing_edge
2465 && find_reg_note (BB_END (bb), REG_CROSSING_JUMP, NULL_RTX))
2467 print_rtl_with_bb (stderr, get_insns (), TDF_RTL | TDF_BLOCKS | TDF_DETAILS);
2468 error ("Region crossing jump across same section in bb %i",
2469 bb->index);
2470 err = 1;
2473 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2475 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2476 err = 1;
2478 if (n_eh > 1)
2480 error ("too many exception handling edges in bb %i", bb->index);
2481 err = 1;
2483 if (n_branch
2484 && (!JUMP_P (BB_END (bb))
2485 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2486 || any_condjump_p (BB_END (bb))))))
2488 error ("too many outgoing branch edges from bb %i", bb->index);
2489 err = 1;
2491 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2493 error ("fallthru edge after unconditional jump in bb %i", bb->index);
2494 err = 1;
2496 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2498 error ("wrong number of branch edges after unconditional jump"
2499 " in bb %i", bb->index);
2500 err = 1;
2502 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2503 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2505 error ("wrong amount of branch edges after conditional jump"
2506 " in bb %i", bb->index);
2507 err = 1;
2509 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2511 error ("abnormal call edges for non-call insn in bb %i", bb->index);
2512 err = 1;
2514 if (n_sibcall && !CALL_P (BB_END (bb)))
2516 error ("sibcall edges for non-call insn in bb %i", bb->index);
2517 err = 1;
2519 if (n_abnormal > n_eh
2520 && !(CALL_P (BB_END (bb))
2521 && n_abnormal == n_abnormal_call + n_sibcall)
2522 && (!JUMP_P (BB_END (bb))
2523 || any_condjump_p (BB_END (bb))
2524 || any_uncondjump_p (BB_END (bb))))
2526 error ("abnormal edges for no purpose in bb %i", bb->index);
2527 err = 1;
2531 /* If there are partitions, do a sanity check on them: A basic block in
2532   a cold partition cannot dominate a basic block in a hot partition.  */
2533 if (crtl->has_bb_partition && !err)
2535 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2536 err = !bbs_to_fix.is_empty ();
2539 /* Clean up. */
2540 return err;
2543 /* Checks on the instructions within blocks. Currently checks that each
2544 block starts with a basic block note, and that basic block notes and
2545 control flow jumps are not found in the middle of the block. */
2547 static int
2548 rtl_verify_bb_insns (void)
2550 rtx x;
2551 int err = 0;
2552 basic_block bb;
2554 FOR_EACH_BB_REVERSE (bb)
2556 /* Now check the header of basic
2557 block. It ought to contain optional CODE_LABEL followed
2558 by NOTE_BASIC_BLOCK. */
2559 x = BB_HEAD (bb);
2560 if (LABEL_P (x))
2562 if (BB_END (bb) == x)
2564 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2565 bb->index);
2566 err = 1;
2569 x = NEXT_INSN (x);
2572 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2574 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2575 bb->index);
2576 err = 1;
2579 if (BB_END (bb) == x)
2580 /* Do checks for empty blocks here. */
2582 else
2583 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2585 if (NOTE_INSN_BASIC_BLOCK_P (x))
2587 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2588 INSN_UID (x), bb->index);
2589 err = 1;
2592 if (x == BB_END (bb))
2593 break;
2595 if (control_flow_insn_p (x))
2597 error ("in basic block %d:", bb->index);
2598 fatal_insn ("flow control insn inside a basic block", x);
2603 /* Clean up. */
2604 return err;
2607 /* Verify that block pointers for instructions in basic blocks, headers and
2608 footers are set appropriately. */
2610 static int
2611 rtl_verify_bb_pointers (void)
2613 int err = 0;
2614 basic_block bb;
2616 /* Check the general integrity of the basic blocks. */
2617 FOR_EACH_BB_REVERSE (bb)
2619 rtx insn;
2621 if (!(bb->flags & BB_RTL))
2623 error ("BB_RTL flag not set for block %d", bb->index);
2624 err = 1;
2627 FOR_BB_INSNS (bb, insn)
2628 if (BLOCK_FOR_INSN (insn) != bb)
2630 error ("insn %d basic block pointer is %d, should be %d",
2631 INSN_UID (insn),
2632 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2633 bb->index);
2634 err = 1;
2637 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2638 if (!BARRIER_P (insn)
2639 && BLOCK_FOR_INSN (insn) != NULL)
2641 error ("insn %d in header of bb %d has non-NULL basic block",
2642 INSN_UID (insn), bb->index);
2643 err = 1;
2645 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2646 if (!BARRIER_P (insn)
2647 && BLOCK_FOR_INSN (insn) != NULL)
2649 error ("insn %d in footer of bb %d has non-NULL basic block",
2650 INSN_UID (insn), bb->index);
2651 err = 1;
2655 /* Clean up. */
2656 return err;
2659 /* Verify the CFG and RTL consistency common for both underlying RTL and
2660 cfglayout RTL.
2662 Currently it does following checks:
2664 - overlapping of basic blocks
2665 - insns with wrong BLOCK_FOR_INSN pointers
2666 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2667 - tails of basic blocks (ensure that boundary is necessary)
2668 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2669 and NOTE_INSN_BASIC_BLOCK
2670 - verify that no fall_thru edge crosses hot/cold partition boundaries
2671 - verify that there are no pending RTL branch predictions
2672 - verify that hot blocks are not dominated by cold blocks
2674 In future it can be extended check a lot of other stuff as well
2675 (reachability of basic blocks, life information, etc. etc.). */
2677 static int
2678 rtl_verify_flow_info_1 (void)
2680 int err = 0;
2682 err |= rtl_verify_bb_pointers ();
2684 err |= rtl_verify_bb_insns ();
2686 err |= rtl_verify_edges ();
2688 return err;
2691 /* Walk the instruction chain and verify that bb head/end pointers
2692 are correct, and that instructions are in exactly one bb and have
2693 correct block pointers. */
2695 static int
2696 rtl_verify_bb_insn_chain (void)
2698 basic_block bb;
2699 int err = 0;
2700 rtx x;
2701 rtx last_head = get_last_insn ();
2702 basic_block *bb_info;
2703 const int max_uid = get_max_uid ();
2705 bb_info = XCNEWVEC (basic_block, max_uid);
2707 FOR_EACH_BB_REVERSE (bb)
2709 rtx head = BB_HEAD (bb);
2710 rtx end = BB_END (bb);
2712 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2714 /* Verify the end of the basic block is in the INSN chain. */
2715 if (x == end)
2716 break;
2718 /* And that the code outside of basic blocks has NULL bb field. */
2719 if (!BARRIER_P (x)
2720 && BLOCK_FOR_INSN (x) != NULL)
2722 error ("insn %d outside of basic blocks has non-NULL bb field",
2723 INSN_UID (x));
2724 err = 1;
2728 if (!x)
2730 error ("end insn %d for block %d not found in the insn stream",
2731 INSN_UID (end), bb->index);
2732 err = 1;
2735 /* Work backwards from the end to the head of the basic block
2736 to verify the head is in the RTL chain. */
2737 for (; x != NULL_RTX; x = PREV_INSN (x))
2739 /* While walking over the insn chain, verify insns appear
2740 in only one basic block. */
2741 if (bb_info[INSN_UID (x)] != NULL)
2743 error ("insn %d is in multiple basic blocks (%d and %d)",
2744 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2745 err = 1;
2748 bb_info[INSN_UID (x)] = bb;
2750 if (x == head)
2751 break;
2753 if (!x)
2755 error ("head insn %d for block %d not found in the insn stream",
2756 INSN_UID (head), bb->index);
2757 err = 1;
2760 last_head = PREV_INSN (x);
2763 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2765 /* Check that the code before the first basic block has NULL
2766 bb field. */
2767 if (!BARRIER_P (x)
2768 && BLOCK_FOR_INSN (x) != NULL)
2770 error ("insn %d outside of basic blocks has non-NULL bb field",
2771 INSN_UID (x));
2772 err = 1;
2775 free (bb_info);
2777 return err;
2780 /* Verify that fallthru edges point to adjacent blocks in layout order and
2781 that barriers exist after non-fallthru blocks. */
2783 static int
2784 rtl_verify_fallthru (void)
2786 basic_block bb;
2787 int err = 0;
2789 FOR_EACH_BB_REVERSE (bb)
2791 edge e;
2793 e = find_fallthru_edge (bb->succs);
2794 if (!e)
2796 rtx insn;
2798 /* Ensure existence of barrier in BB with no fallthru edges. */
2799 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2801 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2803 error ("missing barrier after block %i", bb->index);
2804 err = 1;
2805 break;
2807 if (BARRIER_P (insn))
2808 break;
2811 else if (e->src != ENTRY_BLOCK_PTR
2812 && e->dest != EXIT_BLOCK_PTR)
2814 rtx insn;
2816 if (e->src->next_bb != e->dest)
2818 error
2819 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2820 e->src->index, e->dest->index);
2821 err = 1;
2823 else
2824 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2825 insn = NEXT_INSN (insn))
2826 if (BARRIER_P (insn) || INSN_P (insn))
2828 error ("verify_flow_info: Incorrect fallthru %i->%i",
2829 e->src->index, e->dest->index);
2830 fatal_insn ("wrong insn in the fallthru edge", insn);
2831 err = 1;
2836 return err;
2839 /* Verify that blocks are laid out in consecutive order. While walking the
2840 instructions, verify that all expected instructions are inside the basic
2841 blocks, and that all returns are followed by barriers. */
2843 static int
2844 rtl_verify_bb_layout (void)
2846 basic_block bb;
2847 int err = 0;
2848 rtx x;
2849 int num_bb_notes;
2850 const rtx rtx_first = get_insns ();
2851 basic_block last_bb_seen = ENTRY_BLOCK_PTR, curr_bb = NULL;
2853 num_bb_notes = 0;
2854 last_bb_seen = ENTRY_BLOCK_PTR;
2856 for (x = rtx_first; x; x = NEXT_INSN (x))
2858 if (NOTE_INSN_BASIC_BLOCK_P (x))
2860 bb = NOTE_BASIC_BLOCK (x);
2862 num_bb_notes++;
2863 if (bb != last_bb_seen->next_bb)
2864 internal_error ("basic blocks not laid down consecutively");
2866 curr_bb = last_bb_seen = bb;
2869 if (!curr_bb)
2871 switch (GET_CODE (x))
2873 case BARRIER:
2874 case NOTE:
2875 break;
2877 case CODE_LABEL:
2878 /* An addr_vec is placed outside any basic block. */
2879 if (NEXT_INSN (x)
2880 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2881 x = NEXT_INSN (x);
2883 /* But in any case, non-deletable labels can appear anywhere. */
2884 break;
2886 default:
2887 fatal_insn ("insn outside basic block", x);
2891 if (JUMP_P (x)
2892 && returnjump_p (x) && ! condjump_p (x)
2893 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2894 fatal_insn ("return not followed by barrier", x);
2896 if (curr_bb && x == BB_END (curr_bb))
2897 curr_bb = NULL;
2900 if (num_bb_notes != n_basic_blocks - NUM_FIXED_BLOCKS)
2901 internal_error
2902 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2903 num_bb_notes, n_basic_blocks);
2905 return err;
2908 /* Verify the CFG and RTL consistency common for both underlying RTL and
2909 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
2911 Currently it does following checks:
2912 - all checks of rtl_verify_flow_info_1
2913 - test head/end pointers
2914 - check that blocks are laid out in consecutive order
2915 - check that all insns are in the basic blocks
2916 (except the switch handling code, barriers and notes)
2917 - check that all returns are followed by barriers
2918 - check that all fallthru edge points to the adjacent blocks
2919 - verify that there is a single hot/cold partition boundary after bbro */
2921 static int
2922 rtl_verify_flow_info (void)
2924 int err = 0;
2926 err |= rtl_verify_flow_info_1 ();
2928 err |= rtl_verify_bb_insn_chain ();
2930 err |= rtl_verify_fallthru ();
2932 err |= rtl_verify_bb_layout ();
2934 err |= verify_hot_cold_block_grouping ();
2936 return err;
2939 /* Assume that the preceding pass has possibly eliminated jump instructions
2940 or converted the unconditional jumps. Eliminate the edges from CFG.
2941 Return true if any edges are eliminated. */
2943 bool
2944 purge_dead_edges (basic_block bb)
2946 edge e;
2947 rtx insn = BB_END (bb), note;
2948 bool purged = false;
2949 bool found;
2950 edge_iterator ei;
2952 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
2954 insn = PREV_INSN (insn);
2955 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
2957 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
2958 if (NONJUMP_INSN_P (insn)
2959 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
2961 rtx eqnote;
2963 if (! may_trap_p (PATTERN (insn))
2964 || ((eqnote = find_reg_equal_equiv_note (insn))
2965 && ! may_trap_p (XEXP (eqnote, 0))))
2966 remove_note (insn, note);
2969 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
2970 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2972 bool remove = false;
2974 /* There are three types of edges we need to handle correctly here: EH
2975 edges, abnormal call EH edges, and abnormal call non-EH edges. The
2976 latter can appear when nonlocal gotos are used. */
2977 if (e->flags & EDGE_ABNORMAL_CALL)
2979 if (!CALL_P (insn))
2980 remove = true;
2981 else if (can_nonlocal_goto (insn))
2983 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
2985 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
2987 else
2988 remove = true;
2990 else if (e->flags & EDGE_EH)
2991 remove = !can_throw_internal (insn);
2993 if (remove)
2995 remove_edge (e);
2996 df_set_bb_dirty (bb);
2997 purged = true;
2999 else
3000 ei_next (&ei);
3003 if (JUMP_P (insn))
3005 rtx note;
3006 edge b,f;
3007 edge_iterator ei;
3009 /* We do care only about conditional jumps and simplejumps. */
3010 if (!any_condjump_p (insn)
3011 && !returnjump_p (insn)
3012 && !simplejump_p (insn))
3013 return purged;
3015 /* Branch probability/prediction notes are defined only for
3016 condjumps. We've possibly turned condjump into simplejump. */
3017 if (simplejump_p (insn))
3019 note = find_reg_note (insn, REG_BR_PROB, NULL);
3020 if (note)
3021 remove_note (insn, note);
3022 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3023 remove_note (insn, note);
3026 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3028 /* Avoid abnormal flags to leak from computed jumps turned
3029 into simplejumps. */
3031 e->flags &= ~EDGE_ABNORMAL;
3033 /* See if this edge is one we should keep. */
3034 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3035 /* A conditional jump can fall through into the next
3036 block, so we should keep the edge. */
3038 ei_next (&ei);
3039 continue;
3041 else if (e->dest != EXIT_BLOCK_PTR
3042 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3043 /* If the destination block is the target of the jump,
3044 keep the edge. */
3046 ei_next (&ei);
3047 continue;
3049 else if (e->dest == EXIT_BLOCK_PTR && returnjump_p (insn))
3050 /* If the destination block is the exit block, and this
3051 instruction is a return, then keep the edge. */
3053 ei_next (&ei);
3054 continue;
3056 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3057 /* Keep the edges that correspond to exceptions thrown by
3058 this instruction and rematerialize the EDGE_ABNORMAL
3059 flag we just cleared above. */
3061 e->flags |= EDGE_ABNORMAL;
3062 ei_next (&ei);
3063 continue;
3066 /* We do not need this edge. */
3067 df_set_bb_dirty (bb);
3068 purged = true;
3069 remove_edge (e);
3072 if (EDGE_COUNT (bb->succs) == 0 || !purged)
3073 return purged;
3075 if (dump_file)
3076 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3078 if (!optimize)
3079 return purged;
3081 /* Redistribute probabilities. */
3082 if (single_succ_p (bb))
3084 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3085 single_succ_edge (bb)->count = bb->count;
3087 else
3089 note = find_reg_note (insn, REG_BR_PROB, NULL);
3090 if (!note)
3091 return purged;
3093 b = BRANCH_EDGE (bb);
3094 f = FALLTHRU_EDGE (bb);
3095 b->probability = INTVAL (XEXP (note, 0));
3096 f->probability = REG_BR_PROB_BASE - b->probability;
3097 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
3098 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
3101 return purged;
3103 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3105 /* First, there should not be any EH or ABCALL edges resulting
3106 from non-local gotos and the like. If there were, we shouldn't
3107 have created the sibcall in the first place. Second, there
3108 should of course never have been a fallthru edge. */
3109 gcc_assert (single_succ_p (bb));
3110 gcc_assert (single_succ_edge (bb)->flags
3111 == (EDGE_SIBCALL | EDGE_ABNORMAL));
3113 return 0;
3116 /* If we don't see a jump insn, we don't know exactly why the block would
3117 have been broken at this point. Look for a simple, non-fallthru edge,
3118 as these are only created by conditional branches. If we find such an
3119 edge we know that there used to be a jump here and can then safely
3120 remove all non-fallthru edges. */
3121 found = false;
3122 FOR_EACH_EDGE (e, ei, bb->succs)
3123 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3125 found = true;
3126 break;
3129 if (!found)
3130 return purged;
3132 /* Remove all but the fake and fallthru edges. The fake edge may be
3133 the only successor for this block in the case of noreturn
3134 calls. */
3135 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3137 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3139 df_set_bb_dirty (bb);
3140 remove_edge (e);
3141 purged = true;
3143 else
3144 ei_next (&ei);
3147 gcc_assert (single_succ_p (bb));
3149 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3150 single_succ_edge (bb)->count = bb->count;
3152 if (dump_file)
3153 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3154 bb->index);
3155 return purged;
3158 /* Search all basic blocks for potentially dead edges and purge them. Return
3159 true if some edge has been eliminated. */
3161 bool
3162 purge_all_dead_edges (void)
3164 int purged = false;
3165 basic_block bb;
3167 FOR_EACH_BB (bb)
3169 bool purged_here = purge_dead_edges (bb);
3171 purged |= purged_here;
3174 return purged;
3177 /* This is used by a few passes that emit some instructions after abnormal
3178 calls, moving the basic block's end, while they in fact do want to emit
3179 them on the fallthru edge. Look for abnormal call edges, find backward
3180 the call in the block and insert the instructions on the edge instead.
3182 Similarly, handle instructions throwing exceptions internally.
3184 Return true when instructions have been found and inserted on edges. */
3186 bool
3187 fixup_abnormal_edges (void)
3189 bool inserted = false;
3190 basic_block bb;
3192 FOR_EACH_BB (bb)
3194 edge e;
3195 edge_iterator ei;
3197 /* Look for cases we are interested in - calls or instructions causing
3198 exceptions. */
3199 FOR_EACH_EDGE (e, ei, bb->succs)
3200 if ((e->flags & EDGE_ABNORMAL_CALL)
3201 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3202 == (EDGE_ABNORMAL | EDGE_EH)))
3203 break;
3205 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3207 rtx insn;
3209 /* Get past the new insns generated. Allow notes, as the insns
3210 may be already deleted. */
3211 insn = BB_END (bb);
3212 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3213 && !can_throw_internal (insn)
3214 && insn != BB_HEAD (bb))
3215 insn = PREV_INSN (insn);
3217 if (CALL_P (insn) || can_throw_internal (insn))
3219 rtx stop, next;
3221 e = find_fallthru_edge (bb->succs);
3223 stop = NEXT_INSN (BB_END (bb));
3224 BB_END (bb) = insn;
3226 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3228 next = NEXT_INSN (insn);
3229 if (INSN_P (insn))
3231 delete_insn (insn);
3233 /* Sometimes there's still the return value USE.
3234 If it's placed after a trapping call (i.e. that
3235 call is the last insn anyway), we have no fallthru
3236 edge. Simply delete this use and don't try to insert
3237 on the non-existent edge. */
3238 if (GET_CODE (PATTERN (insn)) != USE)
3240 /* We're not deleting it, we're moving it. */
3241 INSN_DELETED_P (insn) = 0;
3242 PREV_INSN (insn) = NULL_RTX;
3243 NEXT_INSN (insn) = NULL_RTX;
3245 insert_insn_on_edge (insn, e);
3246 inserted = true;
3249 else if (!BARRIER_P (insn))
3250 set_block_for_insn (insn, NULL);
3254 /* It may be that we don't find any trapping insn. In this
3255 case we discovered quite late that the insn that had been
3256 marked as can_throw_internal in fact couldn't trap at all.
3257 So we should in fact delete the EH edges out of the block. */
3258 else
3259 purge_dead_edges (bb);
3263 return inserted;
3266 /* Cut the insns from FIRST to LAST out of the insns stream. */
3269 unlink_insn_chain (rtx first, rtx last)
3271 rtx prevfirst = PREV_INSN (first);
3272 rtx nextlast = NEXT_INSN (last);
3274 PREV_INSN (first) = NULL;
3275 NEXT_INSN (last) = NULL;
3276 if (prevfirst)
3277 NEXT_INSN (prevfirst) = nextlast;
3278 if (nextlast)
3279 PREV_INSN (nextlast) = prevfirst;
3280 else
3281 set_last_insn (prevfirst);
3282 if (!prevfirst)
3283 set_first_insn (nextlast);
3284 return first;
3287 /* Skip over inter-block insns occurring after BB which are typically
3288 associated with BB (e.g., barriers). If there are any such insns,
3289 we return the last one. Otherwise, we return the end of BB. */
3291 static rtx
3292 skip_insns_after_block (basic_block bb)
3294 rtx insn, last_insn, next_head, prev;
3296 next_head = NULL_RTX;
3297 if (bb->next_bb != EXIT_BLOCK_PTR)
3298 next_head = BB_HEAD (bb->next_bb);
3300 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3302 if (insn == next_head)
3303 break;
3305 switch (GET_CODE (insn))
3307 case BARRIER:
3308 last_insn = insn;
3309 continue;
3311 case NOTE:
3312 switch (NOTE_KIND (insn))
3314 case NOTE_INSN_BLOCK_END:
3315 gcc_unreachable ();
3316 continue;
3317 default:
3318 continue;
3319 break;
3321 break;
3323 case CODE_LABEL:
3324 if (NEXT_INSN (insn)
3325 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3327 insn = NEXT_INSN (insn);
3328 last_insn = insn;
3329 continue;
3331 break;
3333 default:
3334 break;
3337 break;
3340 /* It is possible to hit contradictory sequence. For instance:
3342 jump_insn
3343 NOTE_INSN_BLOCK_BEG
3344 barrier
3346 Where barrier belongs to jump_insn, but the note does not. This can be
3347 created by removing the basic block originally following
3348 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3350 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3352 prev = PREV_INSN (insn);
3353 if (NOTE_P (insn))
3354 switch (NOTE_KIND (insn))
3356 case NOTE_INSN_BLOCK_END:
3357 gcc_unreachable ();
3358 break;
3359 case NOTE_INSN_DELETED:
3360 case NOTE_INSN_DELETED_LABEL:
3361 case NOTE_INSN_DELETED_DEBUG_LABEL:
3362 continue;
3363 default:
3364 reorder_insns (insn, insn, last_insn);
3368 return last_insn;
3371 /* Locate or create a label for a given basic block. */
3373 static rtx
3374 label_for_bb (basic_block bb)
3376 rtx label = BB_HEAD (bb);
3378 if (!LABEL_P (label))
3380 if (dump_file)
3381 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3383 label = block_label (bb);
3386 return label;
3389 /* Locate the effective beginning and end of the insn chain for each
3390 block, as defined by skip_insns_after_block above. */
3392 static void
3393 record_effective_endpoints (void)
3395 rtx next_insn;
3396 basic_block bb;
3397 rtx insn;
3399 for (insn = get_insns ();
3400 insn
3401 && NOTE_P (insn)
3402 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3403 insn = NEXT_INSN (insn))
3404 continue;
3405 /* No basic blocks at all? */
3406 gcc_assert (insn);
3408 if (PREV_INSN (insn))
3409 cfg_layout_function_header =
3410 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3411 else
3412 cfg_layout_function_header = NULL_RTX;
3414 next_insn = get_insns ();
3415 FOR_EACH_BB (bb)
3417 rtx end;
3419 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3420 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3421 PREV_INSN (BB_HEAD (bb)));
3422 end = skip_insns_after_block (bb);
3423 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3424 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3425 next_insn = NEXT_INSN (BB_END (bb));
3428 cfg_layout_function_footer = next_insn;
3429 if (cfg_layout_function_footer)
3430 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3433 static unsigned int
3434 into_cfg_layout_mode (void)
3436 cfg_layout_initialize (0);
3437 return 0;
3440 static unsigned int
3441 outof_cfg_layout_mode (void)
3443 basic_block bb;
3445 FOR_EACH_BB (bb)
3446 if (bb->next_bb != EXIT_BLOCK_PTR)
3447 bb->aux = bb->next_bb;
3449 cfg_layout_finalize ();
3451 return 0;
3454 struct rtl_opt_pass pass_into_cfg_layout_mode =
3457 RTL_PASS,
3458 "into_cfglayout", /* name */
3459 OPTGROUP_NONE, /* optinfo_flags */
3460 NULL, /* gate */
3461 into_cfg_layout_mode, /* execute */
3462 NULL, /* sub */
3463 NULL, /* next */
3464 0, /* static_pass_number */
3465 TV_CFG, /* tv_id */
3466 0, /* properties_required */
3467 PROP_cfglayout, /* properties_provided */
3468 0, /* properties_destroyed */
3469 0, /* todo_flags_start */
3470 0 /* todo_flags_finish */
3474 struct rtl_opt_pass pass_outof_cfg_layout_mode =
3477 RTL_PASS,
3478 "outof_cfglayout", /* name */
3479 OPTGROUP_NONE, /* optinfo_flags */
3480 NULL, /* gate */
3481 outof_cfg_layout_mode, /* execute */
3482 NULL, /* sub */
3483 NULL, /* next */
3484 0, /* static_pass_number */
3485 TV_CFG, /* tv_id */
3486 0, /* properties_required */
3487 0, /* properties_provided */
3488 PROP_cfglayout, /* properties_destroyed */
3489 0, /* todo_flags_start */
3490 0 /* todo_flags_finish */
3495 /* Link the basic blocks in the correct order, compacting the basic
3496 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3497 function also clears the basic block header and footer fields.
3499 This function is usually called after a pass (e.g. tracer) finishes
3500 some transformations while in cfglayout mode. The required sequence
3501 of the basic blocks is in a linked list along the bb->aux field.
3502 This functions re-links the basic block prev_bb and next_bb pointers
3503 accordingly, and it compacts and renumbers the blocks.
3505 FIXME: This currently works only for RTL, but the only RTL-specific
3506 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3507 to GIMPLE a long time ago, but it doesn't relink the basic block
3508 chain. It could do that (to give better initial RTL) if this function
3509 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
3511 void
3512 relink_block_chain (bool stay_in_cfglayout_mode)
3514 basic_block bb, prev_bb;
3515 int index;
3517 /* Maybe dump the re-ordered sequence. */
3518 if (dump_file)
3520 fprintf (dump_file, "Reordered sequence:\n");
3521 for (bb = ENTRY_BLOCK_PTR->next_bb, index = NUM_FIXED_BLOCKS;
3523 bb = (basic_block) bb->aux, index++)
3525 fprintf (dump_file, " %i ", index);
3526 if (get_bb_original (bb))
3527 fprintf (dump_file, "duplicate of %i ",
3528 get_bb_original (bb)->index);
3529 else if (forwarder_block_p (bb)
3530 && !LABEL_P (BB_HEAD (bb)))
3531 fprintf (dump_file, "compensation ");
3532 else
3533 fprintf (dump_file, "bb %i ", bb->index);
3534 fprintf (dump_file, " [%i]\n", bb->frequency);
3538 /* Now reorder the blocks. */
3539 prev_bb = ENTRY_BLOCK_PTR;
3540 bb = ENTRY_BLOCK_PTR->next_bb;
3541 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3543 bb->prev_bb = prev_bb;
3544 prev_bb->next_bb = bb;
3546 prev_bb->next_bb = EXIT_BLOCK_PTR;
3547 EXIT_BLOCK_PTR->prev_bb = prev_bb;
3549 /* Then, clean up the aux fields. */
3550 FOR_ALL_BB (bb)
3552 bb->aux = NULL;
3553 if (!stay_in_cfglayout_mode)
3554 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3557 /* Maybe reset the original copy tables, they are not valid anymore
3558 when we renumber the basic blocks in compact_blocks. If we are
3559 are going out of cfglayout mode, don't re-allocate the tables. */
3560 free_original_copy_tables ();
3561 if (stay_in_cfglayout_mode)
3562 initialize_original_copy_tables ();
3564 /* Finally, put basic_block_info in the new order. */
3565 compact_blocks ();
3569 /* Given a reorder chain, rearrange the code to match. */
3571 static void
3572 fixup_reorder_chain (void)
3574 basic_block bb;
3575 rtx insn = NULL;
3577 if (cfg_layout_function_header)
3579 set_first_insn (cfg_layout_function_header);
3580 insn = cfg_layout_function_header;
3581 while (NEXT_INSN (insn))
3582 insn = NEXT_INSN (insn);
3585 /* First do the bulk reordering -- rechain the blocks without regard to
3586 the needed changes to jumps and labels. */
3588 for (bb = ENTRY_BLOCK_PTR->next_bb; bb; bb = (basic_block) bb->aux)
3590 if (BB_HEADER (bb))
3592 if (insn)
3593 NEXT_INSN (insn) = BB_HEADER (bb);
3594 else
3595 set_first_insn (BB_HEADER (bb));
3596 PREV_INSN (BB_HEADER (bb)) = insn;
3597 insn = BB_HEADER (bb);
3598 while (NEXT_INSN (insn))
3599 insn = NEXT_INSN (insn);
3601 if (insn)
3602 NEXT_INSN (insn) = BB_HEAD (bb);
3603 else
3604 set_first_insn (BB_HEAD (bb));
3605 PREV_INSN (BB_HEAD (bb)) = insn;
3606 insn = BB_END (bb);
3607 if (BB_FOOTER (bb))
3609 NEXT_INSN (insn) = BB_FOOTER (bb);
3610 PREV_INSN (BB_FOOTER (bb)) = insn;
3611 while (NEXT_INSN (insn))
3612 insn = NEXT_INSN (insn);
3616 NEXT_INSN (insn) = cfg_layout_function_footer;
3617 if (cfg_layout_function_footer)
3618 PREV_INSN (cfg_layout_function_footer) = insn;
3620 while (NEXT_INSN (insn))
3621 insn = NEXT_INSN (insn);
3623 set_last_insn (insn);
3624 #ifdef ENABLE_CHECKING
3625 verify_insn_chain ();
3626 #endif
3628 /* Now add jumps and labels as needed to match the blocks new
3629 outgoing edges. */
3631 for (bb = ENTRY_BLOCK_PTR->next_bb; bb ; bb = (basic_block) bb->aux)
3633 edge e_fall, e_taken, e;
3634 rtx bb_end_insn;
3635 rtx ret_label = NULL_RTX;
3636 basic_block nb;
3637 edge_iterator ei;
3639 if (EDGE_COUNT (bb->succs) == 0)
3640 continue;
3642 /* Find the old fallthru edge, and another non-EH edge for
3643 a taken jump. */
3644 e_taken = e_fall = NULL;
3646 FOR_EACH_EDGE (e, ei, bb->succs)
3647 if (e->flags & EDGE_FALLTHRU)
3648 e_fall = e;
3649 else if (! (e->flags & EDGE_EH))
3650 e_taken = e;
3652 bb_end_insn = BB_END (bb);
3653 if (JUMP_P (bb_end_insn))
3655 ret_label = JUMP_LABEL (bb_end_insn);
3656 if (any_condjump_p (bb_end_insn))
3658 /* This might happen if the conditional jump has side
3659 effects and could therefore not be optimized away.
3660 Make the basic block to end with a barrier in order
3661 to prevent rtl_verify_flow_info from complaining. */
3662 if (!e_fall)
3664 gcc_assert (!onlyjump_p (bb_end_insn)
3665 || returnjump_p (bb_end_insn));
3666 BB_FOOTER (bb) = emit_barrier_after (bb_end_insn);
3667 continue;
3670 /* If the old fallthru is still next, nothing to do. */
3671 if (bb->aux == e_fall->dest
3672 || e_fall->dest == EXIT_BLOCK_PTR)
3673 continue;
3675 /* The degenerated case of conditional jump jumping to the next
3676 instruction can happen for jumps with side effects. We need
3677 to construct a forwarder block and this will be done just
3678 fine by force_nonfallthru below. */
3679 if (!e_taken)
3682 /* There is another special case: if *neither* block is next,
3683 such as happens at the very end of a function, then we'll
3684 need to add a new unconditional jump. Choose the taken
3685 edge based on known or assumed probability. */
3686 else if (bb->aux != e_taken->dest)
3688 rtx note = find_reg_note (bb_end_insn, REG_BR_PROB, 0);
3690 if (note
3691 && INTVAL (XEXP (note, 0)) < REG_BR_PROB_BASE / 2
3692 && invert_jump (bb_end_insn,
3693 (e_fall->dest == EXIT_BLOCK_PTR
3694 ? NULL_RTX
3695 : label_for_bb (e_fall->dest)), 0))
3697 e_fall->flags &= ~EDGE_FALLTHRU;
3698 gcc_checking_assert (could_fall_through
3699 (e_taken->src, e_taken->dest));
3700 e_taken->flags |= EDGE_FALLTHRU;
3701 update_br_prob_note (bb);
3702 e = e_fall, e_fall = e_taken, e_taken = e;
3706 /* If the "jumping" edge is a crossing edge, and the fall
3707 through edge is non-crossing, leave things as they are. */
3708 else if ((e_taken->flags & EDGE_CROSSING)
3709 && !(e_fall->flags & EDGE_CROSSING))
3710 continue;
3712 /* Otherwise we can try to invert the jump. This will
3713 basically never fail, however, keep up the pretense. */
3714 else if (invert_jump (bb_end_insn,
3715 (e_fall->dest == EXIT_BLOCK_PTR
3716 ? NULL_RTX
3717 : label_for_bb (e_fall->dest)), 0))
3719 e_fall->flags &= ~EDGE_FALLTHRU;
3720 gcc_checking_assert (could_fall_through
3721 (e_taken->src, e_taken->dest));
3722 e_taken->flags |= EDGE_FALLTHRU;
3723 update_br_prob_note (bb);
3724 if (LABEL_NUSES (ret_label) == 0
3725 && single_pred_p (e_taken->dest))
3726 delete_insn (ret_label);
3727 continue;
3730 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3732 /* If the old fallthru is still next or if
3733 asm goto doesn't have a fallthru (e.g. when followed by
3734 __builtin_unreachable ()), nothing to do. */
3735 if (! e_fall
3736 || bb->aux == e_fall->dest
3737 || e_fall->dest == EXIT_BLOCK_PTR)
3738 continue;
3740 /* Otherwise we'll have to use the fallthru fixup below. */
3742 else
3744 /* Otherwise we have some return, switch or computed
3745 jump. In the 99% case, there should not have been a
3746 fallthru edge. */
3747 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3748 continue;
3751 else
3753 /* No fallthru implies a noreturn function with EH edges, or
3754 something similarly bizarre. In any case, we don't need to
3755 do anything. */
3756 if (! e_fall)
3757 continue;
3759 /* If the fallthru block is still next, nothing to do. */
3760 if (bb->aux == e_fall->dest)
3761 continue;
3763 /* A fallthru to exit block. */
3764 if (e_fall->dest == EXIT_BLOCK_PTR)
3765 continue;
3768 /* We got here if we need to add a new jump insn.
3769 Note force_nonfallthru can delete E_FALL and thus we have to
3770 save E_FALL->src prior to the call to force_nonfallthru. */
3771 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3772 if (nb)
3774 nb->aux = bb->aux;
3775 bb->aux = nb;
3776 /* Don't process this new block. */
3777 bb = nb;
3781 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3783 /* Annoying special case - jump around dead jumptables left in the code. */
3784 FOR_EACH_BB (bb)
3786 edge e = find_fallthru_edge (bb->succs);
3788 if (e && !can_fallthru (e->src, e->dest))
3789 force_nonfallthru (e);
3792 /* Ensure goto_locus from edges has some instructions with that locus
3793 in RTL. */
3794 if (!optimize)
3795 FOR_EACH_BB (bb)
3797 edge e;
3798 edge_iterator ei;
3800 FOR_EACH_EDGE (e, ei, bb->succs)
3801 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3802 && !(e->flags & EDGE_ABNORMAL))
3804 edge e2;
3805 edge_iterator ei2;
3806 basic_block dest, nb;
3807 rtx end;
3809 insn = BB_END (e->src);
3810 end = PREV_INSN (BB_HEAD (e->src));
3811 while (insn != end
3812 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
3813 insn = PREV_INSN (insn);
3814 if (insn != end
3815 && INSN_LOCATION (insn) == e->goto_locus)
3816 continue;
3817 if (simplejump_p (BB_END (e->src))
3818 && !INSN_HAS_LOCATION (BB_END (e->src)))
3820 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
3821 continue;
3823 dest = e->dest;
3824 if (dest == EXIT_BLOCK_PTR)
3826 /* Non-fallthru edges to the exit block cannot be split. */
3827 if (!(e->flags & EDGE_FALLTHRU))
3828 continue;
3830 else
3832 insn = BB_HEAD (dest);
3833 end = NEXT_INSN (BB_END (dest));
3834 while (insn != end && !NONDEBUG_INSN_P (insn))
3835 insn = NEXT_INSN (insn);
3836 if (insn != end && INSN_HAS_LOCATION (insn)
3837 && INSN_LOCATION (insn) == e->goto_locus)
3838 continue;
3840 nb = split_edge (e);
3841 if (!INSN_P (BB_END (nb)))
3842 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3843 nb);
3844 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
3846 /* If there are other incoming edges to the destination block
3847 with the same goto locus, redirect them to the new block as
3848 well, this can prevent other such blocks from being created
3849 in subsequent iterations of the loop. */
3850 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
3851 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
3852 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
3853 && e->goto_locus == e2->goto_locus)
3854 redirect_edge_and_branch (e2, nb);
3855 else
3856 ei_next (&ei2);
3861 /* Perform sanity checks on the insn chain.
3862 1. Check that next/prev pointers are consistent in both the forward and
3863 reverse direction.
3864 2. Count insns in chain, going both directions, and check if equal.
3865 3. Check that get_last_insn () returns the actual end of chain. */
3867 DEBUG_FUNCTION void
3868 verify_insn_chain (void)
3870 rtx x, prevx, nextx;
3871 int insn_cnt1, insn_cnt2;
3873 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
3874 x != 0;
3875 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
3876 gcc_assert (PREV_INSN (x) == prevx);
3878 gcc_assert (prevx == get_last_insn ());
3880 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
3881 x != 0;
3882 nextx = x, insn_cnt2++, x = PREV_INSN (x))
3883 gcc_assert (NEXT_INSN (x) == nextx);
3885 gcc_assert (insn_cnt1 == insn_cnt2);
3888 /* If we have assembler epilogues, the block falling through to exit must
3889 be the last one in the reordered chain when we reach final. Ensure
3890 that this condition is met. */
3891 static void
3892 fixup_fallthru_exit_predecessor (void)
3894 edge e;
3895 basic_block bb = NULL;
3897 /* This transformation is not valid before reload, because we might
3898 separate a call from the instruction that copies the return
3899 value. */
3900 gcc_assert (reload_completed);
3902 e = find_fallthru_edge (EXIT_BLOCK_PTR->preds);
3903 if (e)
3904 bb = e->src;
3906 if (bb && bb->aux)
3908 basic_block c = ENTRY_BLOCK_PTR->next_bb;
3910 /* If the very first block is the one with the fall-through exit
3911 edge, we have to split that block. */
3912 if (c == bb)
3914 bb = split_block (bb, NULL)->dest;
3915 bb->aux = c->aux;
3916 c->aux = bb;
3917 BB_FOOTER (bb) = BB_FOOTER (c);
3918 BB_FOOTER (c) = NULL;
3921 while (c->aux != bb)
3922 c = (basic_block) c->aux;
3924 c->aux = bb->aux;
3925 while (c->aux)
3926 c = (basic_block) c->aux;
3928 c->aux = bb;
3929 bb->aux = NULL;
3933 /* In case there are more than one fallthru predecessors of exit, force that
3934 there is only one. */
3936 static void
3937 force_one_exit_fallthru (void)
3939 edge e, predecessor = NULL;
3940 bool more = false;
3941 edge_iterator ei;
3942 basic_block forwarder, bb;
3944 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR->preds)
3945 if (e->flags & EDGE_FALLTHRU)
3947 if (predecessor == NULL)
3948 predecessor = e;
3949 else
3951 more = true;
3952 break;
3956 if (!more)
3957 return;
3959 /* Exit has several fallthru predecessors. Create a forwarder block for
3960 them. */
3961 forwarder = split_edge (predecessor);
3962 for (ei = ei_start (EXIT_BLOCK_PTR->preds); (e = ei_safe_edge (ei)); )
3964 if (e->src == forwarder
3965 || !(e->flags & EDGE_FALLTHRU))
3966 ei_next (&ei);
3967 else
3968 redirect_edge_and_branch_force (e, forwarder);
3971 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
3972 exit block. */
3973 FOR_EACH_BB (bb)
3975 if (bb->aux == NULL && bb != forwarder)
3977 bb->aux = forwarder;
3978 break;
3983 /* Return true in case it is possible to duplicate the basic block BB. */
3985 static bool
3986 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
3988 /* Do not attempt to duplicate tablejumps, as we need to unshare
3989 the dispatch table. This is difficult to do, as the instructions
3990 computing jump destination may be hoisted outside the basic block. */
3991 if (tablejump_p (BB_END (bb), NULL, NULL))
3992 return false;
3994 /* Do not duplicate blocks containing insns that can't be copied. */
3995 if (targetm.cannot_copy_insn_p)
3997 rtx insn = BB_HEAD (bb);
3998 while (1)
4000 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4001 return false;
4002 if (insn == BB_END (bb))
4003 break;
4004 insn = NEXT_INSN (insn);
4008 return true;
4012 duplicate_insn_chain (rtx from, rtx to)
4014 rtx insn, last, copy;
4016 /* Avoid updating of boundaries of previous basic block. The
4017 note will get removed from insn stream in fixup. */
4018 last = emit_note (NOTE_INSN_DELETED);
4020 /* Create copy at the end of INSN chain. The chain will
4021 be reordered later. */
4022 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4024 switch (GET_CODE (insn))
4026 case DEBUG_INSN:
4027 /* Don't duplicate label debug insns. */
4028 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4029 break;
4030 /* FALLTHRU */
4031 case INSN:
4032 case CALL_INSN:
4033 case JUMP_INSN:
4034 /* Avoid copying of dispatch tables. We never duplicate
4035 tablejumps, so this can hit only in case the table got
4036 moved far from original jump. */
4037 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4038 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4040 /* Avoid copying following barrier as well if any
4041 (and debug insns in between). */
4042 rtx next;
4044 for (next = NEXT_INSN (insn);
4045 next != NEXT_INSN (to);
4046 next = NEXT_INSN (next))
4047 if (!DEBUG_INSN_P (next))
4048 break;
4049 if (next != NEXT_INSN (to) && BARRIER_P (next))
4050 insn = next;
4051 break;
4053 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4054 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4055 && ANY_RETURN_P (JUMP_LABEL (insn)))
4056 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4057 maybe_copy_prologue_epilogue_insn (insn, copy);
4058 break;
4060 case CODE_LABEL:
4061 break;
4063 case BARRIER:
4064 emit_barrier ();
4065 break;
4067 case NOTE:
4068 switch (NOTE_KIND (insn))
4070 /* In case prologue is empty and function contain label
4071 in first BB, we may want to copy the block. */
4072 case NOTE_INSN_PROLOGUE_END:
4074 case NOTE_INSN_DELETED:
4075 case NOTE_INSN_DELETED_LABEL:
4076 case NOTE_INSN_DELETED_DEBUG_LABEL:
4077 /* No problem to strip these. */
4078 case NOTE_INSN_FUNCTION_BEG:
4079 /* There is always just single entry to function. */
4080 case NOTE_INSN_BASIC_BLOCK:
4081 /* We should only switch text sections once. */
4082 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4083 break;
4085 case NOTE_INSN_EPILOGUE_BEG:
4086 emit_note_copy (insn);
4087 break;
4089 default:
4090 /* All other notes should have already been eliminated. */
4091 gcc_unreachable ();
4093 break;
4094 default:
4095 gcc_unreachable ();
4098 insn = NEXT_INSN (last);
4099 delete_insn (last);
4100 return insn;
4103 /* Create a duplicate of the basic block BB. */
4105 static basic_block
4106 cfg_layout_duplicate_bb (basic_block bb)
4108 rtx insn;
4109 basic_block new_bb;
4111 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4112 new_bb = create_basic_block (insn,
4113 insn ? get_last_insn () : NULL,
4114 EXIT_BLOCK_PTR->prev_bb);
4116 BB_COPY_PARTITION (new_bb, bb);
4117 if (BB_HEADER (bb))
4119 insn = BB_HEADER (bb);
4120 while (NEXT_INSN (insn))
4121 insn = NEXT_INSN (insn);
4122 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4123 if (insn)
4124 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4127 if (BB_FOOTER (bb))
4129 insn = BB_FOOTER (bb);
4130 while (NEXT_INSN (insn))
4131 insn = NEXT_INSN (insn);
4132 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4133 if (insn)
4134 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4137 return new_bb;
4141 /* Main entry point to this module - initialize the datastructures for
4142 CFG layout changes. It keeps LOOPS up-to-date if not null.
4144 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4146 void
4147 cfg_layout_initialize (unsigned int flags)
4149 rtx x;
4150 basic_block bb;
4152 /* Once bb reordering is complete, cfg layout mode should not be re-entered.
4153 Entering cfg layout mode will perform optimizations on the cfg that
4154 could affect the bb layout negatively or even require fixups. An
4155 example of the latter is if edge forwarding performed when optimizing
4156 the cfg layout required moving a block from the hot to the cold section
4157 under -freorder-blocks-and-partition. This would create an illegal
4158 partitioning unless some manual fixup was performed. */
4159 gcc_assert (!crtl->bb_reorder_complete);
4161 initialize_original_copy_tables ();
4163 cfg_layout_rtl_register_cfg_hooks ();
4165 record_effective_endpoints ();
4167 /* Make sure that the targets of non local gotos are marked. */
4168 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1))
4170 bb = BLOCK_FOR_INSN (XEXP (x, 0));
4171 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4174 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4177 /* Splits superblocks. */
4178 void
4179 break_superblocks (void)
4181 sbitmap superblocks;
4182 bool need = false;
4183 basic_block bb;
4185 superblocks = sbitmap_alloc (last_basic_block);
4186 bitmap_clear (superblocks);
4188 FOR_EACH_BB (bb)
4189 if (bb->flags & BB_SUPERBLOCK)
4191 bb->flags &= ~BB_SUPERBLOCK;
4192 bitmap_set_bit (superblocks, bb->index);
4193 need = true;
4196 if (need)
4198 rebuild_jump_labels (get_insns ());
4199 find_many_sub_basic_blocks (superblocks);
4202 free (superblocks);
4205 /* Finalize the changes: reorder insn list according to the sequence specified
4206 by aux pointers, enter compensation code, rebuild scope forest. */
4208 void
4209 cfg_layout_finalize (void)
4211 #ifdef ENABLE_CHECKING
4212 verify_flow_info ();
4213 #endif
4214 force_one_exit_fallthru ();
4215 rtl_register_cfg_hooks ();
4216 if (reload_completed
4217 #ifdef HAVE_epilogue
4218 && !HAVE_epilogue
4219 #endif
4221 fixup_fallthru_exit_predecessor ();
4222 fixup_reorder_chain ();
4224 rebuild_jump_labels (get_insns ());
4225 delete_dead_jumptables ();
4227 #ifdef ENABLE_CHECKING
4228 verify_insn_chain ();
4229 verify_flow_info ();
4230 #endif
4234 /* Same as split_block but update cfg_layout structures. */
4236 static basic_block
4237 cfg_layout_split_block (basic_block bb, void *insnp)
4239 rtx insn = (rtx) insnp;
4240 basic_block new_bb = rtl_split_block (bb, insn);
4242 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4243 BB_FOOTER (bb) = NULL;
4245 return new_bb;
4248 /* Redirect Edge to DEST. */
4249 static edge
4250 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4252 basic_block src = e->src;
4253 edge ret;
4255 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4256 return NULL;
4258 if (e->dest == dest)
4259 return e;
4261 if (e->src != ENTRY_BLOCK_PTR
4262 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4264 df_set_bb_dirty (src);
4265 return ret;
4268 if (e->src == ENTRY_BLOCK_PTR
4269 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4271 if (dump_file)
4272 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4273 e->src->index, dest->index);
4275 df_set_bb_dirty (e->src);
4276 redirect_edge_succ (e, dest);
4277 return e;
4280 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4281 in the case the basic block appears to be in sequence. Avoid this
4282 transformation. */
4284 if (e->flags & EDGE_FALLTHRU)
4286 /* Redirect any branch edges unified with the fallthru one. */
4287 if (JUMP_P (BB_END (src))
4288 && label_is_jump_target_p (BB_HEAD (e->dest),
4289 BB_END (src)))
4291 edge redirected;
4293 if (dump_file)
4294 fprintf (dump_file, "Fallthru edge unified with branch "
4295 "%i->%i redirected to %i\n",
4296 e->src->index, e->dest->index, dest->index);
4297 e->flags &= ~EDGE_FALLTHRU;
4298 redirected = redirect_branch_edge (e, dest);
4299 gcc_assert (redirected);
4300 redirected->flags |= EDGE_FALLTHRU;
4301 df_set_bb_dirty (redirected->src);
4302 return redirected;
4304 /* In case we are redirecting fallthru edge to the branch edge
4305 of conditional jump, remove it. */
4306 if (EDGE_COUNT (src->succs) == 2)
4308 /* Find the edge that is different from E. */
4309 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4311 if (s->dest == dest
4312 && any_condjump_p (BB_END (src))
4313 && onlyjump_p (BB_END (src)))
4314 delete_insn (BB_END (src));
4316 if (dump_file)
4317 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4318 e->src->index, e->dest->index, dest->index);
4319 ret = redirect_edge_succ_nodup (e, dest);
4321 else
4322 ret = redirect_branch_edge (e, dest);
4324 /* We don't want simplejumps in the insn stream during cfglayout. */
4325 gcc_assert (!simplejump_p (BB_END (src)));
4327 df_set_bb_dirty (src);
4328 return ret;
4331 /* Simple wrapper as we always can redirect fallthru edges. */
4332 static basic_block
4333 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4335 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4337 gcc_assert (redirected);
4338 return NULL;
4341 /* Same as delete_basic_block but update cfg_layout structures. */
4343 static void
4344 cfg_layout_delete_block (basic_block bb)
4346 rtx insn, next, prev = PREV_INSN (BB_HEAD (bb)), *to, remaints;
4348 if (BB_HEADER (bb))
4350 next = BB_HEAD (bb);
4351 if (prev)
4352 NEXT_INSN (prev) = BB_HEADER (bb);
4353 else
4354 set_first_insn (BB_HEADER (bb));
4355 PREV_INSN (BB_HEADER (bb)) = prev;
4356 insn = BB_HEADER (bb);
4357 while (NEXT_INSN (insn))
4358 insn = NEXT_INSN (insn);
4359 NEXT_INSN (insn) = next;
4360 PREV_INSN (next) = insn;
4362 next = NEXT_INSN (BB_END (bb));
4363 if (BB_FOOTER (bb))
4365 insn = BB_FOOTER (bb);
4366 while (insn)
4368 if (BARRIER_P (insn))
4370 if (PREV_INSN (insn))
4371 NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4372 else
4373 BB_FOOTER (bb) = NEXT_INSN (insn);
4374 if (NEXT_INSN (insn))
4375 PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4377 if (LABEL_P (insn))
4378 break;
4379 insn = NEXT_INSN (insn);
4381 if (BB_FOOTER (bb))
4383 insn = BB_END (bb);
4384 NEXT_INSN (insn) = BB_FOOTER (bb);
4385 PREV_INSN (BB_FOOTER (bb)) = insn;
4386 while (NEXT_INSN (insn))
4387 insn = NEXT_INSN (insn);
4388 NEXT_INSN (insn) = next;
4389 if (next)
4390 PREV_INSN (next) = insn;
4391 else
4392 set_last_insn (insn);
4395 if (bb->next_bb != EXIT_BLOCK_PTR)
4396 to = &BB_HEADER (bb->next_bb);
4397 else
4398 to = &cfg_layout_function_footer;
4400 rtl_delete_block (bb);
4402 if (prev)
4403 prev = NEXT_INSN (prev);
4404 else
4405 prev = get_insns ();
4406 if (next)
4407 next = PREV_INSN (next);
4408 else
4409 next = get_last_insn ();
4411 if (next && NEXT_INSN (next) != prev)
4413 remaints = unlink_insn_chain (prev, next);
4414 insn = remaints;
4415 while (NEXT_INSN (insn))
4416 insn = NEXT_INSN (insn);
4417 NEXT_INSN (insn) = *to;
4418 if (*to)
4419 PREV_INSN (*to) = insn;
4420 *to = remaints;
4424 /* Return true when blocks A and B can be safely merged. */
4426 static bool
4427 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4429 /* If we are partitioning hot/cold basic blocks, we don't want to
4430 mess up unconditional or indirect jumps that cross between hot
4431 and cold sections.
4433 Basic block partitioning may result in some jumps that appear to
4434 be optimizable (or blocks that appear to be mergeable), but which really
4435 must be left untouched (they are required to make it safely across
4436 partition boundaries). See the comments at the top of
4437 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4439 if (BB_PARTITION (a) != BB_PARTITION (b))
4440 return false;
4442 /* Protect the loop latches. */
4443 if (current_loops && b->loop_father->latch == b)
4444 return false;
4446 /* If we would end up moving B's instructions, make sure it doesn't fall
4447 through into the exit block, since we cannot recover from a fallthrough
4448 edge into the exit block occurring in the middle of a function. */
4449 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4451 edge e = find_fallthru_edge (b->succs);
4452 if (e && e->dest == EXIT_BLOCK_PTR)
4453 return false;
4456 /* There must be exactly one edge in between the blocks. */
4457 return (single_succ_p (a)
4458 && single_succ (a) == b
4459 && single_pred_p (b) == 1
4460 && a != b
4461 /* Must be simple edge. */
4462 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4463 && a != ENTRY_BLOCK_PTR && b != EXIT_BLOCK_PTR
4464 /* If the jump insn has side effects, we can't kill the edge.
4465 When not optimizing, try_redirect_by_replacing_jump will
4466 not allow us to redirect an edge by replacing a table jump. */
4467 && (!JUMP_P (BB_END (a))
4468 || ((!optimize || reload_completed)
4469 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4472 /* Merge block A and B. The blocks must be mergeable. */
4474 static void
4475 cfg_layout_merge_blocks (basic_block a, basic_block b)
4477 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
4478 rtx insn;
4480 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4482 if (dump_file)
4483 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4484 a->index);
4486 /* If there was a CODE_LABEL beginning B, delete it. */
4487 if (LABEL_P (BB_HEAD (b)))
4489 delete_insn (BB_HEAD (b));
4492 /* We should have fallthru edge in a, or we can do dummy redirection to get
4493 it cleaned up. */
4494 if (JUMP_P (BB_END (a)))
4495 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4496 gcc_assert (!JUMP_P (BB_END (a)));
4498 /* When not optimizing CFG and the edge is the only place in RTL which holds
4499 some unique locus, emit a nop with that locus in between. */
4500 if (!optimize)
4501 emit_nop_for_unique_locus_between (a, b);
4503 /* Possible line number notes should appear in between. */
4504 if (BB_HEADER (b))
4506 rtx first = BB_END (a), last;
4508 last = emit_insn_after_noloc (BB_HEADER (b), BB_END (a), a);
4509 /* The above might add a BARRIER as BB_END, but as barriers
4510 aren't valid parts of a bb, remove_insn doesn't update
4511 BB_END if it is a barrier. So adjust BB_END here. */
4512 while (BB_END (a) != first && BARRIER_P (BB_END (a)))
4513 BB_END (a) = PREV_INSN (BB_END (a));
4514 delete_insn_chain (NEXT_INSN (first), last, false);
4515 BB_HEADER (b) = NULL;
4518 /* In the case basic blocks are not adjacent, move them around. */
4519 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4521 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4523 emit_insn_after_noloc (insn, BB_END (a), a);
4525 /* Otherwise just re-associate the instructions. */
4526 else
4528 insn = BB_HEAD (b);
4529 BB_END (a) = BB_END (b);
4532 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4533 We need to explicitly call. */
4534 update_bb_for_insn_chain (insn, BB_END (b), a);
4536 /* Skip possible DELETED_LABEL insn. */
4537 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4538 insn = NEXT_INSN (insn);
4539 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4540 BB_HEAD (b) = NULL;
4541 delete_insn (insn);
4543 df_bb_delete (b->index);
4545 /* Possible tablejumps and barriers should appear after the block. */
4546 if (BB_FOOTER (b))
4548 if (!BB_FOOTER (a))
4549 BB_FOOTER (a) = BB_FOOTER (b);
4550 else
4552 rtx last = BB_FOOTER (a);
4554 while (NEXT_INSN (last))
4555 last = NEXT_INSN (last);
4556 NEXT_INSN (last) = BB_FOOTER (b);
4557 PREV_INSN (BB_FOOTER (b)) = last;
4559 BB_FOOTER (b) = NULL;
4562 /* If B was a forwarder block, propagate the locus on the edge. */
4563 if (forwarder_p
4564 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
4565 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4567 if (dump_file)
4568 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4571 /* Split edge E. */
4573 static basic_block
4574 cfg_layout_split_edge (edge e)
4576 basic_block new_bb =
4577 create_basic_block (e->src != ENTRY_BLOCK_PTR
4578 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
4579 NULL_RTX, e->src);
4581 if (e->dest == EXIT_BLOCK_PTR)
4582 BB_COPY_PARTITION (new_bb, e->src);
4583 else
4584 BB_COPY_PARTITION (new_bb, e->dest);
4585 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4586 redirect_edge_and_branch_force (e, new_bb);
4588 return new_bb;
4591 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4593 static void
4594 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4598 /* Return true if BB contains only labels or non-executable
4599 instructions. */
4601 static bool
4602 rtl_block_empty_p (basic_block bb)
4604 rtx insn;
4606 if (bb == ENTRY_BLOCK_PTR || bb == EXIT_BLOCK_PTR)
4607 return true;
4609 FOR_BB_INSNS (bb, insn)
4610 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4611 return false;
4613 return true;
4616 /* Split a basic block if it ends with a conditional branch and if
4617 the other part of the block is not empty. */
4619 static basic_block
4620 rtl_split_block_before_cond_jump (basic_block bb)
4622 rtx insn;
4623 rtx split_point = NULL;
4624 rtx last = NULL;
4625 bool found_code = false;
4627 FOR_BB_INSNS (bb, insn)
4629 if (any_condjump_p (insn))
4630 split_point = last;
4631 else if (NONDEBUG_INSN_P (insn))
4632 found_code = true;
4633 last = insn;
4636 /* Did not find everything. */
4637 if (found_code && split_point)
4638 return split_block (bb, split_point)->dest;
4639 else
4640 return NULL;
4643 /* Return 1 if BB ends with a call, possibly followed by some
4644 instructions that must stay with the call, 0 otherwise. */
4646 static bool
4647 rtl_block_ends_with_call_p (basic_block bb)
4649 rtx insn = BB_END (bb);
4651 while (!CALL_P (insn)
4652 && insn != BB_HEAD (bb)
4653 && (keep_with_call_p (insn)
4654 || NOTE_P (insn)
4655 || DEBUG_INSN_P (insn)))
4656 insn = PREV_INSN (insn);
4657 return (CALL_P (insn));
4660 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4662 static bool
4663 rtl_block_ends_with_condjump_p (const_basic_block bb)
4665 return any_condjump_p (BB_END (bb));
4668 /* Return true if we need to add fake edge to exit.
4669 Helper function for rtl_flow_call_edges_add. */
4671 static bool
4672 need_fake_edge_p (const_rtx insn)
4674 if (!INSN_P (insn))
4675 return false;
4677 if ((CALL_P (insn)
4678 && !SIBLING_CALL_P (insn)
4679 && !find_reg_note (insn, REG_NORETURN, NULL)
4680 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4681 return true;
4683 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4684 && MEM_VOLATILE_P (PATTERN (insn)))
4685 || (GET_CODE (PATTERN (insn)) == PARALLEL
4686 && asm_noperands (insn) != -1
4687 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4688 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4691 /* Add fake edges to the function exit for any non constant and non noreturn
4692 calls, volatile inline assembly in the bitmap of blocks specified by
4693 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4694 that were split.
4696 The goal is to expose cases in which entering a basic block does not imply
4697 that all subsequent instructions must be executed. */
4699 static int
4700 rtl_flow_call_edges_add (sbitmap blocks)
4702 int i;
4703 int blocks_split = 0;
4704 int last_bb = last_basic_block;
4705 bool check_last_block = false;
4707 if (n_basic_blocks == NUM_FIXED_BLOCKS)
4708 return 0;
4710 if (! blocks)
4711 check_last_block = true;
4712 else
4713 check_last_block = bitmap_bit_p (blocks, EXIT_BLOCK_PTR->prev_bb->index);
4715 /* In the last basic block, before epilogue generation, there will be
4716 a fallthru edge to EXIT. Special care is required if the last insn
4717 of the last basic block is a call because make_edge folds duplicate
4718 edges, which would result in the fallthru edge also being marked
4719 fake, which would result in the fallthru edge being removed by
4720 remove_fake_edges, which would result in an invalid CFG.
4722 Moreover, we can't elide the outgoing fake edge, since the block
4723 profiler needs to take this into account in order to solve the minimal
4724 spanning tree in the case that the call doesn't return.
4726 Handle this by adding a dummy instruction in a new last basic block. */
4727 if (check_last_block)
4729 basic_block bb = EXIT_BLOCK_PTR->prev_bb;
4730 rtx insn = BB_END (bb);
4732 /* Back up past insns that must be kept in the same block as a call. */
4733 while (insn != BB_HEAD (bb)
4734 && keep_with_call_p (insn))
4735 insn = PREV_INSN (insn);
4737 if (need_fake_edge_p (insn))
4739 edge e;
4741 e = find_edge (bb, EXIT_BLOCK_PTR);
4742 if (e)
4744 insert_insn_on_edge (gen_use (const0_rtx), e);
4745 commit_edge_insertions ();
4750 /* Now add fake edges to the function exit for any non constant
4751 calls since there is no way that we can determine if they will
4752 return or not... */
4754 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4756 basic_block bb = BASIC_BLOCK (i);
4757 rtx insn;
4758 rtx prev_insn;
4760 if (!bb)
4761 continue;
4763 if (blocks && !bitmap_bit_p (blocks, i))
4764 continue;
4766 for (insn = BB_END (bb); ; insn = prev_insn)
4768 prev_insn = PREV_INSN (insn);
4769 if (need_fake_edge_p (insn))
4771 edge e;
4772 rtx split_at_insn = insn;
4774 /* Don't split the block between a call and an insn that should
4775 remain in the same block as the call. */
4776 if (CALL_P (insn))
4777 while (split_at_insn != BB_END (bb)
4778 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4779 split_at_insn = NEXT_INSN (split_at_insn);
4781 /* The handling above of the final block before the epilogue
4782 should be enough to verify that there is no edge to the exit
4783 block in CFG already. Calling make_edge in such case would
4784 cause us to mark that edge as fake and remove it later. */
4786 #ifdef ENABLE_CHECKING
4787 if (split_at_insn == BB_END (bb))
4789 e = find_edge (bb, EXIT_BLOCK_PTR);
4790 gcc_assert (e == NULL);
4792 #endif
4794 /* Note that the following may create a new basic block
4795 and renumber the existing basic blocks. */
4796 if (split_at_insn != BB_END (bb))
4798 e = split_block (bb, split_at_insn);
4799 if (e)
4800 blocks_split++;
4803 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
4806 if (insn == BB_HEAD (bb))
4807 break;
4811 if (blocks_split)
4812 verify_flow_info ();
4814 return blocks_split;
4817 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
4818 the conditional branch target, SECOND_HEAD should be the fall-thru
4819 there is no need to handle this here the loop versioning code handles
4820 this. the reason for SECON_HEAD is that it is needed for condition
4821 in trees, and this should be of the same type since it is a hook. */
4822 static void
4823 rtl_lv_add_condition_to_bb (basic_block first_head ,
4824 basic_block second_head ATTRIBUTE_UNUSED,
4825 basic_block cond_bb, void *comp_rtx)
4827 rtx label, seq, jump;
4828 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4829 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4830 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4831 enum machine_mode mode;
4834 label = block_label (first_head);
4835 mode = GET_MODE (op0);
4836 if (mode == VOIDmode)
4837 mode = GET_MODE (op1);
4839 start_sequence ();
4840 op0 = force_operand (op0, NULL_RTX);
4841 op1 = force_operand (op1, NULL_RTX);
4842 do_compare_rtx_and_jump (op0, op1, comp, 0,
4843 mode, NULL_RTX, NULL_RTX, label, -1);
4844 jump = get_last_insn ();
4845 JUMP_LABEL (jump) = label;
4846 LABEL_NUSES (label)++;
4847 seq = get_insns ();
4848 end_sequence ();
4850 /* Add the new cond , in the new head. */
4851 emit_insn_after(seq, BB_END(cond_bb));
4855 /* Given a block B with unconditional branch at its end, get the
4856 store the return the branch edge and the fall-thru edge in
4857 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4858 static void
4859 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4860 edge *fallthru_edge)
4862 edge e = EDGE_SUCC (b, 0);
4864 if (e->flags & EDGE_FALLTHRU)
4866 *fallthru_edge = e;
4867 *branch_edge = EDGE_SUCC (b, 1);
4869 else
4871 *branch_edge = e;
4872 *fallthru_edge = EDGE_SUCC (b, 1);
4876 void
4877 init_rtl_bb_info (basic_block bb)
4879 gcc_assert (!bb->il.x.rtl);
4880 bb->il.x.head_ = NULL;
4881 bb->il.x.rtl = ggc_alloc_cleared_rtl_bb_info ();
4884 /* Returns true if it is possible to remove edge E by redirecting
4885 it to the destination of the other edge from E->src. */
4887 static bool
4888 rtl_can_remove_branch_p (const_edge e)
4890 const_basic_block src = e->src;
4891 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
4892 const_rtx insn = BB_END (src), set;
4894 /* The conditions are taken from try_redirect_by_replacing_jump. */
4895 if (target == EXIT_BLOCK_PTR)
4896 return false;
4898 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4899 return false;
4901 if (BB_PARTITION (src) != BB_PARTITION (target))
4902 return false;
4904 if (!onlyjump_p (insn)
4905 || tablejump_p (insn, NULL, NULL))
4906 return false;
4908 set = single_set (insn);
4909 if (!set || side_effects_p (set))
4910 return false;
4912 return true;
4915 static basic_block
4916 rtl_duplicate_bb (basic_block bb)
4918 bb = cfg_layout_duplicate_bb (bb);
4919 bb->aux = NULL;
4920 return bb;
4923 /* Do book-keeping of basic block BB for the profile consistency checker.
4924 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
4925 then do post-pass accounting. Store the counting in RECORD. */
4926 static void
4927 rtl_account_profile_record (basic_block bb, int after_pass,
4928 struct profile_record *record)
4930 rtx insn;
4931 FOR_BB_INSNS (bb, insn)
4932 if (INSN_P (insn))
4934 record->size[after_pass]
4935 += insn_rtx_cost (PATTERN (insn), false);
4936 if (profile_status == PROFILE_READ)
4937 record->time[after_pass]
4938 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
4939 else if (profile_status == PROFILE_GUESSED)
4940 record->time[after_pass]
4941 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
4945 /* Implementation of CFG manipulation for linearized RTL. */
4946 struct cfg_hooks rtl_cfg_hooks = {
4947 "rtl",
4948 rtl_verify_flow_info,
4949 rtl_dump_bb,
4950 rtl_dump_bb_for_graph,
4951 rtl_create_basic_block,
4952 rtl_redirect_edge_and_branch,
4953 rtl_redirect_edge_and_branch_force,
4954 rtl_can_remove_branch_p,
4955 rtl_delete_block,
4956 rtl_split_block,
4957 rtl_move_block_after,
4958 rtl_can_merge_blocks, /* can_merge_blocks_p */
4959 rtl_merge_blocks,
4960 rtl_predict_edge,
4961 rtl_predicted_by_p,
4962 cfg_layout_can_duplicate_bb_p,
4963 rtl_duplicate_bb,
4964 rtl_split_edge,
4965 rtl_make_forwarder_block,
4966 rtl_tidy_fallthru_edge,
4967 rtl_force_nonfallthru,
4968 rtl_block_ends_with_call_p,
4969 rtl_block_ends_with_condjump_p,
4970 rtl_flow_call_edges_add,
4971 NULL, /* execute_on_growing_pred */
4972 NULL, /* execute_on_shrinking_pred */
4973 NULL, /* duplicate loop for trees */
4974 NULL, /* lv_add_condition_to_bb */
4975 NULL, /* lv_adjust_loop_header_phi*/
4976 NULL, /* extract_cond_bb_edges */
4977 NULL, /* flush_pending_stmts */
4978 rtl_block_empty_p, /* block_empty_p */
4979 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
4980 rtl_account_profile_record,
4983 /* Implementation of CFG manipulation for cfg layout RTL, where
4984 basic block connected via fallthru edges does not have to be adjacent.
4985 This representation will hopefully become the default one in future
4986 version of the compiler. */
4988 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
4989 "cfglayout mode",
4990 rtl_verify_flow_info_1,
4991 rtl_dump_bb,
4992 rtl_dump_bb_for_graph,
4993 cfg_layout_create_basic_block,
4994 cfg_layout_redirect_edge_and_branch,
4995 cfg_layout_redirect_edge_and_branch_force,
4996 rtl_can_remove_branch_p,
4997 cfg_layout_delete_block,
4998 cfg_layout_split_block,
4999 rtl_move_block_after,
5000 cfg_layout_can_merge_blocks_p,
5001 cfg_layout_merge_blocks,
5002 rtl_predict_edge,
5003 rtl_predicted_by_p,
5004 cfg_layout_can_duplicate_bb_p,
5005 cfg_layout_duplicate_bb,
5006 cfg_layout_split_edge,
5007 rtl_make_forwarder_block,
5008 NULL, /* tidy_fallthru_edge */
5009 rtl_force_nonfallthru,
5010 rtl_block_ends_with_call_p,
5011 rtl_block_ends_with_condjump_p,
5012 rtl_flow_call_edges_add,
5013 NULL, /* execute_on_growing_pred */
5014 NULL, /* execute_on_shrinking_pred */
5015 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5016 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5017 NULL, /* lv_adjust_loop_header_phi*/
5018 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5019 NULL, /* flush_pending_stmts */
5020 rtl_block_empty_p, /* block_empty_p */
5021 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5022 rtl_account_profile_record,
5025 #include "gt-cfgrtl.h"