gcc/
[official-gcc.git] / gcc / cfgrtl.c
blob13f8372735ef72a697cf9c3f36d343ca5bfbd86f
1 /* Control flow graph manipulation code for GNU compiler.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains low level functions to manipulate the CFG and analyze it
21 that are aware of the RTL intermediate language.
23 Available functionality:
24 - Basic CFG/RTL manipulation API documented in cfghooks.h
25 - CFG-aware instruction chain manipulation
26 delete_insn, delete_insn_chain
27 - Edge splitting and committing to edges
28 insert_insn_on_edge, commit_edge_insertions
29 - CFG updating after insn simplification
30 purge_dead_edges, purge_all_dead_edges
31 - CFG fixing after coarse manipulation
32 fixup_abnormal_edges
34 Functions not supposed for generic use:
35 - Infrastructure to determine quickly basic block for insn
36 compute_bb_for_insn, update_bb_for_insn, set_block_for_insn,
37 - Edge redirection with updating and optimizing of insn chain
38 block_label, tidy_fallthru_edge, force_nonfallthru */
40 #include "config.h"
41 #include "system.h"
42 #include "coretypes.h"
43 #include "tm.h"
44 #include "hash-set.h"
45 #include "vec.h"
46 #include "input.h"
47 #include "alias.h"
48 #include "symtab.h"
49 #include "inchash.h"
50 #include "tree.h"
51 #include "hard-reg-set.h"
52 #include "predict.h"
53 #include "hashtab.h"
54 #include "function.h"
55 #include "dominance.h"
56 #include "cfg.h"
57 #include "cfgrtl.h"
58 #include "cfganal.h"
59 #include "cfgbuild.h"
60 #include "cfgcleanup.h"
61 #include "basic-block.h"
62 #include "bb-reorder.h"
63 #include "regs.h"
64 #include "flags.h"
65 #include "except.h"
66 #include "rtl-error.h"
67 #include "tm_p.h"
68 #include "obstack.h"
69 #include "insn-attr.h"
70 #include "insn-config.h"
71 #include "rtl.h"
72 #include "statistics.h"
73 #include "expmed.h"
74 #include "dojump.h"
75 #include "explow.h"
76 #include "calls.h"
77 #include "emit-rtl.h"
78 #include "varasm.h"
79 #include "stmt.h"
80 #include "expr.h"
81 #include "target.h"
82 #include "common/common-target.h"
83 #include "cfgloop.h"
84 #include "ggc.h"
85 #include "tree-pass.h"
86 #include "df.h"
88 /* Holds the interesting leading and trailing notes for the function.
89 Only applicable if the CFG is in cfglayout mode. */
90 static GTY(()) rtx_insn *cfg_layout_function_footer;
91 static GTY(()) rtx_insn *cfg_layout_function_header;
93 static rtx_insn *skip_insns_after_block (basic_block);
94 static void record_effective_endpoints (void);
95 static void fixup_reorder_chain (void);
97 void verify_insn_chain (void);
98 static void fixup_fallthru_exit_predecessor (void);
99 static int can_delete_note_p (const rtx_note *);
100 static int can_delete_label_p (const rtx_code_label *);
101 static basic_block rtl_split_edge (edge);
102 static bool rtl_move_block_after (basic_block, basic_block);
103 static int rtl_verify_flow_info (void);
104 static basic_block cfg_layout_split_block (basic_block, void *);
105 static edge cfg_layout_redirect_edge_and_branch (edge, basic_block);
106 static basic_block cfg_layout_redirect_edge_and_branch_force (edge, basic_block);
107 static void cfg_layout_delete_block (basic_block);
108 static void rtl_delete_block (basic_block);
109 static basic_block rtl_redirect_edge_and_branch_force (edge, basic_block);
110 static edge rtl_redirect_edge_and_branch (edge, basic_block);
111 static basic_block rtl_split_block (basic_block, void *);
112 static void rtl_dump_bb (FILE *, basic_block, int, int);
113 static int rtl_verify_flow_info_1 (void);
114 static void rtl_make_forwarder_block (edge);
116 /* Return true if NOTE is not one of the ones that must be kept paired,
117 so that we may simply delete it. */
119 static int
120 can_delete_note_p (const rtx_note *note)
122 switch (NOTE_KIND (note))
124 case NOTE_INSN_DELETED:
125 case NOTE_INSN_BASIC_BLOCK:
126 case NOTE_INSN_EPILOGUE_BEG:
127 return true;
129 default:
130 return false;
134 /* True if a given label can be deleted. */
136 static int
137 can_delete_label_p (const rtx_code_label *label)
139 return (!LABEL_PRESERVE_P (label)
140 /* User declared labels must be preserved. */
141 && LABEL_NAME (label) == 0
142 && !in_insn_list_p (forced_labels, label));
145 /* Delete INSN by patching it out. */
147 void
148 delete_insn (rtx uncast_insn)
150 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn);
151 rtx note;
152 bool really_delete = true;
154 if (LABEL_P (insn))
156 /* Some labels can't be directly removed from the INSN chain, as they
157 might be references via variables, constant pool etc.
158 Convert them to the special NOTE_INSN_DELETED_LABEL note. */
159 if (! can_delete_label_p (as_a <rtx_code_label *> (insn)))
161 const char *name = LABEL_NAME (insn);
162 basic_block bb = BLOCK_FOR_INSN (insn);
163 rtx_insn *bb_note = NEXT_INSN (insn);
165 really_delete = false;
166 PUT_CODE (insn, NOTE);
167 NOTE_KIND (insn) = NOTE_INSN_DELETED_LABEL;
168 NOTE_DELETED_LABEL_NAME (insn) = name;
170 /* If the note following the label starts a basic block, and the
171 label is a member of the same basic block, interchange the two. */
172 if (bb_note != NULL_RTX
173 && NOTE_INSN_BASIC_BLOCK_P (bb_note)
174 && bb != NULL
175 && bb == BLOCK_FOR_INSN (bb_note))
177 reorder_insns_nobb (insn, insn, bb_note);
178 BB_HEAD (bb) = bb_note;
179 if (BB_END (bb) == bb_note)
180 BB_END (bb) = insn;
184 remove_node_from_insn_list (insn, &nonlocal_goto_handler_labels);
187 if (really_delete)
189 /* If this insn has already been deleted, something is very wrong. */
190 gcc_assert (!insn->deleted ());
191 if (INSN_P (insn))
192 df_insn_delete (insn);
193 remove_insn (insn);
194 insn->set_deleted ();
197 /* If deleting a jump, decrement the use count of the label. Deleting
198 the label itself should happen in the normal course of block merging. */
199 if (JUMP_P (insn))
201 if (JUMP_LABEL (insn)
202 && LABEL_P (JUMP_LABEL (insn)))
203 LABEL_NUSES (JUMP_LABEL (insn))--;
205 /* If there are more targets, remove them too. */
206 while ((note
207 = find_reg_note (insn, REG_LABEL_TARGET, NULL_RTX)) != NULL_RTX
208 && LABEL_P (XEXP (note, 0)))
210 LABEL_NUSES (XEXP (note, 0))--;
211 remove_note (insn, note);
215 /* Also if deleting any insn that references a label as an operand. */
216 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, NULL_RTX)) != NULL_RTX
217 && LABEL_P (XEXP (note, 0)))
219 LABEL_NUSES (XEXP (note, 0))--;
220 remove_note (insn, note);
223 if (rtx_jump_table_data *table = dyn_cast <rtx_jump_table_data *> (insn))
225 rtvec vec = table->get_labels ();
226 int len = GET_NUM_ELEM (vec);
227 int i;
229 for (i = 0; i < len; i++)
231 rtx label = XEXP (RTVEC_ELT (vec, i), 0);
233 /* When deleting code in bulk (e.g. removing many unreachable
234 blocks) we can delete a label that's a target of the vector
235 before deleting the vector itself. */
236 if (!NOTE_P (label))
237 LABEL_NUSES (label)--;
242 /* Like delete_insn but also purge dead edges from BB. */
244 void
245 delete_insn_and_edges (rtx_insn *insn)
247 bool purge = false;
249 if (INSN_P (insn)
250 && BLOCK_FOR_INSN (insn)
251 && BB_END (BLOCK_FOR_INSN (insn)) == insn)
252 purge = true;
253 delete_insn (insn);
254 if (purge)
255 purge_dead_edges (BLOCK_FOR_INSN (insn));
258 /* Unlink a chain of insns between START and FINISH, leaving notes
259 that must be paired. If CLEAR_BB is true, we set bb field for
260 insns that cannot be removed to NULL. */
262 void
263 delete_insn_chain (rtx start, rtx finish, bool clear_bb)
265 rtx_insn *prev, *current;
267 /* Unchain the insns one by one. It would be quicker to delete all of these
268 with a single unchaining, rather than one at a time, but we need to keep
269 the NOTE's. */
270 current = safe_as_a <rtx_insn *> (finish);
271 while (1)
273 prev = PREV_INSN (current);
274 if (NOTE_P (current) && !can_delete_note_p (as_a <rtx_note *> (current)))
276 else
277 delete_insn (current);
279 if (clear_bb && !current->deleted ())
280 set_block_for_insn (current, NULL);
282 if (current == start)
283 break;
284 current = prev;
288 /* Create a new basic block consisting of the instructions between HEAD and END
289 inclusive. This function is designed to allow fast BB construction - reuses
290 the note and basic block struct in BB_NOTE, if any and do not grow
291 BASIC_BLOCK chain and should be used directly only by CFG construction code.
292 END can be NULL in to create new empty basic block before HEAD. Both END
293 and HEAD can be NULL to create basic block at the end of INSN chain.
294 AFTER is the basic block we should be put after. */
296 basic_block
297 create_basic_block_structure (rtx_insn *head, rtx_insn *end, rtx_note *bb_note,
298 basic_block after)
300 basic_block bb;
302 if (bb_note
303 && (bb = NOTE_BASIC_BLOCK (bb_note)) != NULL
304 && bb->aux == NULL)
306 /* If we found an existing note, thread it back onto the chain. */
308 rtx_insn *after;
310 if (LABEL_P (head))
311 after = head;
312 else
314 after = PREV_INSN (head);
315 head = bb_note;
318 if (after != bb_note && NEXT_INSN (after) != bb_note)
319 reorder_insns_nobb (bb_note, bb_note, after);
321 else
323 /* Otherwise we must create a note and a basic block structure. */
325 bb = alloc_block ();
327 init_rtl_bb_info (bb);
328 if (!head && !end)
329 head = end = bb_note
330 = emit_note_after (NOTE_INSN_BASIC_BLOCK, get_last_insn ());
331 else if (LABEL_P (head) && end)
333 bb_note = emit_note_after (NOTE_INSN_BASIC_BLOCK, head);
334 if (head == end)
335 end = bb_note;
337 else
339 bb_note = emit_note_before (NOTE_INSN_BASIC_BLOCK, head);
340 head = bb_note;
341 if (!end)
342 end = head;
345 NOTE_BASIC_BLOCK (bb_note) = bb;
348 /* Always include the bb note in the block. */
349 if (NEXT_INSN (end) == bb_note)
350 end = bb_note;
352 BB_HEAD (bb) = head;
353 BB_END (bb) = end;
354 bb->index = last_basic_block_for_fn (cfun)++;
355 bb->flags = BB_NEW | BB_RTL;
356 link_block (bb, after);
357 SET_BASIC_BLOCK_FOR_FN (cfun, bb->index, bb);
358 df_bb_refs_record (bb->index, false);
359 update_bb_for_insn (bb);
360 BB_SET_PARTITION (bb, BB_UNPARTITIONED);
362 /* Tag the block so that we know it has been used when considering
363 other basic block notes. */
364 bb->aux = bb;
366 return bb;
369 /* Create new basic block consisting of instructions in between HEAD and END
370 and place it to the BB chain after block AFTER. END can be NULL to
371 create a new empty basic block before HEAD. Both END and HEAD can be
372 NULL to create basic block at the end of INSN chain. */
374 static basic_block
375 rtl_create_basic_block (void *headp, void *endp, basic_block after)
377 rtx_insn *head = (rtx_insn *) headp;
378 rtx_insn *end = (rtx_insn *) endp;
379 basic_block bb;
381 /* Grow the basic block array if needed. */
382 if ((size_t) last_basic_block_for_fn (cfun)
383 >= basic_block_info_for_fn (cfun)->length ())
385 size_t new_size =
386 (last_basic_block_for_fn (cfun)
387 + (last_basic_block_for_fn (cfun) + 3) / 4);
388 vec_safe_grow_cleared (basic_block_info_for_fn (cfun), new_size);
391 n_basic_blocks_for_fn (cfun)++;
393 bb = create_basic_block_structure (head, end, NULL, after);
394 bb->aux = NULL;
395 return bb;
398 static basic_block
399 cfg_layout_create_basic_block (void *head, void *end, basic_block after)
401 basic_block newbb = rtl_create_basic_block (head, end, after);
403 return newbb;
406 /* Delete the insns in a (non-live) block. We physically delete every
407 non-deleted-note insn, and update the flow graph appropriately.
409 Return nonzero if we deleted an exception handler. */
411 /* ??? Preserving all such notes strikes me as wrong. It would be nice
412 to post-process the stream to remove empty blocks, loops, ranges, etc. */
414 static void
415 rtl_delete_block (basic_block b)
417 rtx_insn *insn, *end;
419 /* If the head of this block is a CODE_LABEL, then it might be the
420 label for an exception handler which can't be reached. We need
421 to remove the label from the exception_handler_label list. */
422 insn = BB_HEAD (b);
424 end = get_last_bb_insn (b);
426 /* Selectively delete the entire chain. */
427 BB_HEAD (b) = NULL;
428 delete_insn_chain (insn, end, true);
431 if (dump_file)
432 fprintf (dump_file, "deleting block %d\n", b->index);
433 df_bb_delete (b->index);
436 /* Records the basic block struct in BLOCK_FOR_INSN for every insn. */
438 void
439 compute_bb_for_insn (void)
441 basic_block bb;
443 FOR_EACH_BB_FN (bb, cfun)
445 rtx_insn *end = BB_END (bb);
446 rtx_insn *insn;
448 for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
450 BLOCK_FOR_INSN (insn) = bb;
451 if (insn == end)
452 break;
457 /* Release the basic_block_for_insn array. */
459 unsigned int
460 free_bb_for_insn (void)
462 rtx_insn *insn;
463 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
464 if (!BARRIER_P (insn))
465 BLOCK_FOR_INSN (insn) = NULL;
466 return 0;
469 namespace {
471 const pass_data pass_data_free_cfg =
473 RTL_PASS, /* type */
474 "*free_cfg", /* name */
475 OPTGROUP_NONE, /* optinfo_flags */
476 TV_NONE, /* tv_id */
477 0, /* properties_required */
478 0, /* properties_provided */
479 PROP_cfg, /* properties_destroyed */
480 0, /* todo_flags_start */
481 0, /* todo_flags_finish */
484 class pass_free_cfg : public rtl_opt_pass
486 public:
487 pass_free_cfg (gcc::context *ctxt)
488 : rtl_opt_pass (pass_data_free_cfg, ctxt)
491 /* opt_pass methods: */
492 virtual unsigned int execute (function *);
494 }; // class pass_free_cfg
496 unsigned int
497 pass_free_cfg::execute (function *)
499 #ifdef DELAY_SLOTS
500 /* The resource.c machinery uses DF but the CFG isn't guaranteed to be
501 valid at that point so it would be too late to call df_analyze. */
502 if (optimize > 0 && flag_delayed_branch)
504 df_note_add_problem ();
505 df_analyze ();
507 #endif
509 if (crtl->has_bb_partition)
510 insert_section_boundary_note ();
512 free_bb_for_insn ();
513 return 0;
516 } // anon namespace
518 rtl_opt_pass *
519 make_pass_free_cfg (gcc::context *ctxt)
521 return new pass_free_cfg (ctxt);
524 /* Return RTX to emit after when we want to emit code on the entry of function. */
525 rtx_insn *
526 entry_of_function (void)
528 return (n_basic_blocks_for_fn (cfun) > NUM_FIXED_BLOCKS ?
529 BB_HEAD (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb) : get_insns ());
532 /* Emit INSN at the entry point of the function, ensuring that it is only
533 executed once per function. */
534 void
535 emit_insn_at_entry (rtx insn)
537 edge_iterator ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
538 edge e = ei_safe_edge (ei);
539 gcc_assert (e->flags & EDGE_FALLTHRU);
541 insert_insn_on_edge (insn, e);
542 commit_edge_insertions ();
545 /* Update BLOCK_FOR_INSN of insns between BEGIN and END
546 (or BARRIER if found) and notify df of the bb change.
547 The insn chain range is inclusive
548 (i.e. both BEGIN and END will be updated. */
550 static void
551 update_bb_for_insn_chain (rtx_insn *begin, rtx_insn *end, basic_block bb)
553 rtx_insn *insn;
555 end = NEXT_INSN (end);
556 for (insn = begin; insn != end; insn = NEXT_INSN (insn))
557 if (!BARRIER_P (insn))
558 df_insn_change_bb (insn, bb);
561 /* Update BLOCK_FOR_INSN of insns in BB to BB,
562 and notify df of the change. */
564 void
565 update_bb_for_insn (basic_block bb)
567 update_bb_for_insn_chain (BB_HEAD (bb), BB_END (bb), bb);
571 /* Like active_insn_p, except keep the return value clobber around
572 even after reload. */
574 static bool
575 flow_active_insn_p (const rtx_insn *insn)
577 if (active_insn_p (insn))
578 return true;
580 /* A clobber of the function return value exists for buggy
581 programs that fail to return a value. Its effect is to
582 keep the return value from being live across the entire
583 function. If we allow it to be skipped, we introduce the
584 possibility for register lifetime confusion. */
585 if (GET_CODE (PATTERN (insn)) == CLOBBER
586 && REG_P (XEXP (PATTERN (insn), 0))
587 && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
588 return true;
590 return false;
593 /* Return true if the block has no effect and only forwards control flow to
594 its single destination. */
596 bool
597 contains_no_active_insn_p (const_basic_block bb)
599 rtx_insn *insn;
601 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun) || bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
602 || !single_succ_p (bb))
603 return false;
605 for (insn = BB_HEAD (bb); insn != BB_END (bb); insn = NEXT_INSN (insn))
606 if (INSN_P (insn) && flow_active_insn_p (insn))
607 return false;
609 return (!INSN_P (insn)
610 || (JUMP_P (insn) && simplejump_p (insn))
611 || !flow_active_insn_p (insn));
614 /* Likewise, but protect loop latches, headers and preheaders. */
615 /* FIXME: Make this a cfg hook. */
617 bool
618 forwarder_block_p (const_basic_block bb)
620 if (!contains_no_active_insn_p (bb))
621 return false;
623 /* Protect loop latches, headers and preheaders. */
624 if (current_loops)
626 basic_block dest;
627 if (bb->loop_father->header == bb)
628 return false;
629 dest = EDGE_SUCC (bb, 0)->dest;
630 if (dest->loop_father->header == dest)
631 return false;
634 return true;
637 /* Return nonzero if we can reach target from src by falling through. */
638 /* FIXME: Make this a cfg hook, the result is only valid in cfgrtl mode. */
640 bool
641 can_fallthru (basic_block src, basic_block target)
643 rtx_insn *insn = BB_END (src);
644 rtx_insn *insn2;
645 edge e;
646 edge_iterator ei;
648 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
649 return true;
650 if (src->next_bb != target)
651 return false;
653 /* ??? Later we may add code to move jump tables offline. */
654 if (tablejump_p (insn, NULL, NULL))
655 return false;
657 FOR_EACH_EDGE (e, ei, src->succs)
658 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
659 && e->flags & EDGE_FALLTHRU)
660 return false;
662 insn2 = BB_HEAD (target);
663 if (!active_insn_p (insn2))
664 insn2 = next_active_insn (insn2);
666 return next_active_insn (insn) == insn2;
669 /* Return nonzero if we could reach target from src by falling through,
670 if the target was made adjacent. If we already have a fall-through
671 edge to the exit block, we can't do that. */
672 static bool
673 could_fall_through (basic_block src, basic_block target)
675 edge e;
676 edge_iterator ei;
678 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
679 return true;
680 FOR_EACH_EDGE (e, ei, src->succs)
681 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
682 && e->flags & EDGE_FALLTHRU)
683 return 0;
684 return true;
687 /* Return the NOTE_INSN_BASIC_BLOCK of BB. */
688 rtx_note *
689 bb_note (basic_block bb)
691 rtx_insn *note;
693 note = BB_HEAD (bb);
694 if (LABEL_P (note))
695 note = NEXT_INSN (note);
697 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (note));
698 return as_a <rtx_note *> (note);
701 /* Return the INSN immediately following the NOTE_INSN_BASIC_BLOCK
702 note associated with the BLOCK. */
704 static rtx_insn *
705 first_insn_after_basic_block_note (basic_block block)
707 rtx_insn *insn;
709 /* Get the first instruction in the block. */
710 insn = BB_HEAD (block);
712 if (insn == NULL_RTX)
713 return NULL;
714 if (LABEL_P (insn))
715 insn = NEXT_INSN (insn);
716 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
718 return NEXT_INSN (insn);
721 /* Creates a new basic block just after basic block BB by splitting
722 everything after specified instruction INSNP. */
724 static basic_block
725 rtl_split_block (basic_block bb, void *insnp)
727 basic_block new_bb;
728 rtx_insn *insn = (rtx_insn *) insnp;
729 edge e;
730 edge_iterator ei;
732 if (!insn)
734 insn = first_insn_after_basic_block_note (bb);
736 if (insn)
738 rtx_insn *next = insn;
740 insn = PREV_INSN (insn);
742 /* If the block contains only debug insns, insn would have
743 been NULL in a non-debug compilation, and then we'd end
744 up emitting a DELETED note. For -fcompare-debug
745 stability, emit the note too. */
746 if (insn != BB_END (bb)
747 && DEBUG_INSN_P (next)
748 && DEBUG_INSN_P (BB_END (bb)))
750 while (next != BB_END (bb) && DEBUG_INSN_P (next))
751 next = NEXT_INSN (next);
753 if (next == BB_END (bb))
754 emit_note_after (NOTE_INSN_DELETED, next);
757 else
758 insn = get_last_insn ();
761 /* We probably should check type of the insn so that we do not create
762 inconsistent cfg. It is checked in verify_flow_info anyway, so do not
763 bother. */
764 if (insn == BB_END (bb))
765 emit_note_after (NOTE_INSN_DELETED, insn);
767 /* Create the new basic block. */
768 new_bb = create_basic_block (NEXT_INSN (insn), BB_END (bb), bb);
769 BB_COPY_PARTITION (new_bb, bb);
770 BB_END (bb) = insn;
772 /* Redirect the outgoing edges. */
773 new_bb->succs = bb->succs;
774 bb->succs = NULL;
775 FOR_EACH_EDGE (e, ei, new_bb->succs)
776 e->src = new_bb;
778 /* The new block starts off being dirty. */
779 df_set_bb_dirty (bb);
780 return new_bb;
783 /* Return true if the single edge between blocks A and B is the only place
784 in RTL which holds some unique locus. */
786 static bool
787 unique_locus_on_edge_between_p (basic_block a, basic_block b)
789 const location_t goto_locus = EDGE_SUCC (a, 0)->goto_locus;
790 rtx_insn *insn, *end;
792 if (LOCATION_LOCUS (goto_locus) == UNKNOWN_LOCATION)
793 return false;
795 /* First scan block A backward. */
796 insn = BB_END (a);
797 end = PREV_INSN (BB_HEAD (a));
798 while (insn != end && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
799 insn = PREV_INSN (insn);
801 if (insn != end && INSN_LOCATION (insn) == goto_locus)
802 return false;
804 /* Then scan block B forward. */
805 insn = BB_HEAD (b);
806 if (insn)
808 end = NEXT_INSN (BB_END (b));
809 while (insn != end && !NONDEBUG_INSN_P (insn))
810 insn = NEXT_INSN (insn);
812 if (insn != end && INSN_HAS_LOCATION (insn)
813 && INSN_LOCATION (insn) == goto_locus)
814 return false;
817 return true;
820 /* If the single edge between blocks A and B is the only place in RTL which
821 holds some unique locus, emit a nop with that locus between the blocks. */
823 static void
824 emit_nop_for_unique_locus_between (basic_block a, basic_block b)
826 if (!unique_locus_on_edge_between_p (a, b))
827 return;
829 BB_END (a) = emit_insn_after_noloc (gen_nop (), BB_END (a), a);
830 INSN_LOCATION (BB_END (a)) = EDGE_SUCC (a, 0)->goto_locus;
833 /* Blocks A and B are to be merged into a single block A. The insns
834 are already contiguous. */
836 static void
837 rtl_merge_blocks (basic_block a, basic_block b)
839 rtx_insn *b_head = BB_HEAD (b), *b_end = BB_END (b), *a_end = BB_END (a);
840 rtx_insn *del_first = NULL, *del_last = NULL;
841 rtx_insn *b_debug_start = b_end, *b_debug_end = b_end;
842 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
843 int b_empty = 0;
845 if (dump_file)
846 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
847 a->index);
849 while (DEBUG_INSN_P (b_end))
850 b_end = PREV_INSN (b_debug_start = b_end);
852 /* If there was a CODE_LABEL beginning B, delete it. */
853 if (LABEL_P (b_head))
855 /* Detect basic blocks with nothing but a label. This can happen
856 in particular at the end of a function. */
857 if (b_head == b_end)
858 b_empty = 1;
860 del_first = del_last = b_head;
861 b_head = NEXT_INSN (b_head);
864 /* Delete the basic block note and handle blocks containing just that
865 note. */
866 if (NOTE_INSN_BASIC_BLOCK_P (b_head))
868 if (b_head == b_end)
869 b_empty = 1;
870 if (! del_last)
871 del_first = b_head;
873 del_last = b_head;
874 b_head = NEXT_INSN (b_head);
877 /* If there was a jump out of A, delete it. */
878 if (JUMP_P (a_end))
880 rtx_insn *prev;
882 for (prev = PREV_INSN (a_end); ; prev = PREV_INSN (prev))
883 if (!NOTE_P (prev)
884 || NOTE_INSN_BASIC_BLOCK_P (prev)
885 || prev == BB_HEAD (a))
886 break;
888 del_first = a_end;
890 /* If this was a conditional jump, we need to also delete
891 the insn that set cc0. */
892 if (HAVE_cc0 && only_sets_cc0_p (prev))
894 rtx_insn *tmp = prev;
896 prev = prev_nonnote_insn (prev);
897 if (!prev)
898 prev = BB_HEAD (a);
899 del_first = tmp;
902 a_end = PREV_INSN (del_first);
904 else if (BARRIER_P (NEXT_INSN (a_end)))
905 del_first = NEXT_INSN (a_end);
907 /* Delete everything marked above as well as crap that might be
908 hanging out between the two blocks. */
909 BB_END (a) = a_end;
910 BB_HEAD (b) = b_empty ? NULL : b_head;
911 delete_insn_chain (del_first, del_last, true);
913 /* When not optimizing and the edge is the only place in RTL which holds
914 some unique locus, emit a nop with that locus in between. */
915 if (!optimize)
917 emit_nop_for_unique_locus_between (a, b);
918 a_end = BB_END (a);
921 /* Reassociate the insns of B with A. */
922 if (!b_empty)
924 update_bb_for_insn_chain (a_end, b_debug_end, a);
926 BB_END (a) = b_debug_end;
927 BB_HEAD (b) = NULL;
929 else if (b_end != b_debug_end)
931 /* Move any deleted labels and other notes between the end of A
932 and the debug insns that make up B after the debug insns,
933 bringing the debug insns into A while keeping the notes after
934 the end of A. */
935 if (NEXT_INSN (a_end) != b_debug_start)
936 reorder_insns_nobb (NEXT_INSN (a_end), PREV_INSN (b_debug_start),
937 b_debug_end);
938 update_bb_for_insn_chain (b_debug_start, b_debug_end, a);
939 BB_END (a) = b_debug_end;
942 df_bb_delete (b->index);
944 /* If B was a forwarder block, propagate the locus on the edge. */
945 if (forwarder_p
946 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
947 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
949 if (dump_file)
950 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
954 /* Return true when block A and B can be merged. */
956 static bool
957 rtl_can_merge_blocks (basic_block a, basic_block b)
959 /* If we are partitioning hot/cold basic blocks, we don't want to
960 mess up unconditional or indirect jumps that cross between hot
961 and cold sections.
963 Basic block partitioning may result in some jumps that appear to
964 be optimizable (or blocks that appear to be mergeable), but which really
965 must be left untouched (they are required to make it safely across
966 partition boundaries). See the comments at the top of
967 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
969 if (BB_PARTITION (a) != BB_PARTITION (b))
970 return false;
972 /* Protect the loop latches. */
973 if (current_loops && b->loop_father->latch == b)
974 return false;
976 /* There must be exactly one edge in between the blocks. */
977 return (single_succ_p (a)
978 && single_succ (a) == b
979 && single_pred_p (b)
980 && a != b
981 /* Must be simple edge. */
982 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
983 && a->next_bb == b
984 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
985 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
986 /* If the jump insn has side effects,
987 we can't kill the edge. */
988 && (!JUMP_P (BB_END (a))
989 || (reload_completed
990 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
993 /* Return the label in the head of basic block BLOCK. Create one if it doesn't
994 exist. */
996 rtx_code_label *
997 block_label (basic_block block)
999 if (block == EXIT_BLOCK_PTR_FOR_FN (cfun))
1000 return NULL;
1002 if (!LABEL_P (BB_HEAD (block)))
1004 BB_HEAD (block) = emit_label_before (gen_label_rtx (), BB_HEAD (block));
1007 return as_a <rtx_code_label *> (BB_HEAD (block));
1010 /* Attempt to perform edge redirection by replacing possibly complex jump
1011 instruction by unconditional jump or removing jump completely. This can
1012 apply only if all edges now point to the same block. The parameters and
1013 return values are equivalent to redirect_edge_and_branch. */
1015 edge
1016 try_redirect_by_replacing_jump (edge e, basic_block target, bool in_cfglayout)
1018 basic_block src = e->src;
1019 rtx_insn *insn = BB_END (src), *kill_from;
1020 rtx set;
1021 int fallthru = 0;
1023 /* If we are partitioning hot/cold basic blocks, we don't want to
1024 mess up unconditional or indirect jumps that cross between hot
1025 and cold sections.
1027 Basic block partitioning may result in some jumps that appear to
1028 be optimizable (or blocks that appear to be mergeable), but which really
1029 must be left untouched (they are required to make it safely across
1030 partition boundaries). See the comments at the top of
1031 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
1033 if (BB_PARTITION (src) != BB_PARTITION (target))
1034 return NULL;
1036 /* We can replace or remove a complex jump only when we have exactly
1037 two edges. Also, if we have exactly one outgoing edge, we can
1038 redirect that. */
1039 if (EDGE_COUNT (src->succs) >= 3
1040 /* Verify that all targets will be TARGET. Specifically, the
1041 edge that is not E must also go to TARGET. */
1042 || (EDGE_COUNT (src->succs) == 2
1043 && EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest != target))
1044 return NULL;
1046 if (!onlyjump_p (insn))
1047 return NULL;
1048 if ((!optimize || reload_completed) && tablejump_p (insn, NULL, NULL))
1049 return NULL;
1051 /* Avoid removing branch with side effects. */
1052 set = single_set (insn);
1053 if (!set || side_effects_p (set))
1054 return NULL;
1056 /* In case we zap a conditional jump, we'll need to kill
1057 the cc0 setter too. */
1058 kill_from = insn;
1059 if (HAVE_cc0 && reg_mentioned_p (cc0_rtx, PATTERN (insn))
1060 && only_sets_cc0_p (PREV_INSN (insn)))
1061 kill_from = PREV_INSN (insn);
1063 /* See if we can create the fallthru edge. */
1064 if (in_cfglayout || can_fallthru (src, target))
1066 if (dump_file)
1067 fprintf (dump_file, "Removing jump %i.\n", INSN_UID (insn));
1068 fallthru = 1;
1070 /* Selectively unlink whole insn chain. */
1071 if (in_cfglayout)
1073 rtx_insn *insn = BB_FOOTER (src);
1075 delete_insn_chain (kill_from, BB_END (src), false);
1077 /* Remove barriers but keep jumptables. */
1078 while (insn)
1080 if (BARRIER_P (insn))
1082 if (PREV_INSN (insn))
1083 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
1084 else
1085 BB_FOOTER (src) = NEXT_INSN (insn);
1086 if (NEXT_INSN (insn))
1087 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
1089 if (LABEL_P (insn))
1090 break;
1091 insn = NEXT_INSN (insn);
1094 else
1095 delete_insn_chain (kill_from, PREV_INSN (BB_HEAD (target)),
1096 false);
1099 /* If this already is simplejump, redirect it. */
1100 else if (simplejump_p (insn))
1102 if (e->dest == target)
1103 return NULL;
1104 if (dump_file)
1105 fprintf (dump_file, "Redirecting jump %i from %i to %i.\n",
1106 INSN_UID (insn), e->dest->index, target->index);
1107 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1108 block_label (target), 0))
1110 gcc_assert (target == EXIT_BLOCK_PTR_FOR_FN (cfun));
1111 return NULL;
1115 /* Cannot do anything for target exit block. */
1116 else if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1117 return NULL;
1119 /* Or replace possibly complicated jump insn by simple jump insn. */
1120 else
1122 rtx_code_label *target_label = block_label (target);
1123 rtx_insn *barrier;
1124 rtx label;
1125 rtx_jump_table_data *table;
1127 emit_jump_insn_after_noloc (gen_jump (target_label), insn);
1128 JUMP_LABEL (BB_END (src)) = target_label;
1129 LABEL_NUSES (target_label)++;
1130 if (dump_file)
1131 fprintf (dump_file, "Replacing insn %i by jump %i\n",
1132 INSN_UID (insn), INSN_UID (BB_END (src)));
1135 delete_insn_chain (kill_from, insn, false);
1137 /* Recognize a tablejump that we are converting to a
1138 simple jump and remove its associated CODE_LABEL
1139 and ADDR_VEC or ADDR_DIFF_VEC. */
1140 if (tablejump_p (insn, &label, &table))
1141 delete_insn_chain (label, table, false);
1143 barrier = next_nonnote_insn (BB_END (src));
1144 if (!barrier || !BARRIER_P (barrier))
1145 emit_barrier_after (BB_END (src));
1146 else
1148 if (barrier != NEXT_INSN (BB_END (src)))
1150 /* Move the jump before barrier so that the notes
1151 which originally were or were created before jump table are
1152 inside the basic block. */
1153 rtx_insn *new_insn = BB_END (src);
1155 update_bb_for_insn_chain (NEXT_INSN (BB_END (src)),
1156 PREV_INSN (barrier), src);
1158 SET_NEXT_INSN (PREV_INSN (new_insn)) = NEXT_INSN (new_insn);
1159 SET_PREV_INSN (NEXT_INSN (new_insn)) = PREV_INSN (new_insn);
1161 SET_NEXT_INSN (new_insn) = barrier;
1162 SET_NEXT_INSN (PREV_INSN (barrier)) = new_insn;
1164 SET_PREV_INSN (new_insn) = PREV_INSN (barrier);
1165 SET_PREV_INSN (barrier) = new_insn;
1170 /* Keep only one edge out and set proper flags. */
1171 if (!single_succ_p (src))
1172 remove_edge (e);
1173 gcc_assert (single_succ_p (src));
1175 e = single_succ_edge (src);
1176 if (fallthru)
1177 e->flags = EDGE_FALLTHRU;
1178 else
1179 e->flags = 0;
1181 e->probability = REG_BR_PROB_BASE;
1182 e->count = src->count;
1184 if (e->dest != target)
1185 redirect_edge_succ (e, target);
1186 return e;
1189 /* Subroutine of redirect_branch_edge that tries to patch the jump
1190 instruction INSN so that it reaches block NEW. Do this
1191 only when it originally reached block OLD. Return true if this
1192 worked or the original target wasn't OLD, return false if redirection
1193 doesn't work. */
1195 static bool
1196 patch_jump_insn (rtx_insn *insn, rtx_insn *old_label, basic_block new_bb)
1198 rtx_jump_table_data *table;
1199 rtx tmp;
1200 /* Recognize a tablejump and adjust all matching cases. */
1201 if (tablejump_p (insn, NULL, &table))
1203 rtvec vec;
1204 int j;
1205 rtx_code_label *new_label = block_label (new_bb);
1207 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1208 return false;
1209 vec = table->get_labels ();
1211 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j)
1212 if (XEXP (RTVEC_ELT (vec, j), 0) == old_label)
1214 RTVEC_ELT (vec, j) = gen_rtx_LABEL_REF (Pmode, new_label);
1215 --LABEL_NUSES (old_label);
1216 ++LABEL_NUSES (new_label);
1219 /* Handle casesi dispatch insns. */
1220 if ((tmp = single_set (insn)) != NULL
1221 && SET_DEST (tmp) == pc_rtx
1222 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
1223 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF
1224 && LABEL_REF_LABEL (XEXP (SET_SRC (tmp), 2)) == old_label)
1226 XEXP (SET_SRC (tmp), 2) = gen_rtx_LABEL_REF (Pmode,
1227 new_label);
1228 --LABEL_NUSES (old_label);
1229 ++LABEL_NUSES (new_label);
1232 else if ((tmp = extract_asm_operands (PATTERN (insn))) != NULL)
1234 int i, n = ASM_OPERANDS_LABEL_LENGTH (tmp);
1235 rtx note;
1237 if (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1238 return false;
1239 rtx_code_label *new_label = block_label (new_bb);
1241 for (i = 0; i < n; ++i)
1243 rtx old_ref = ASM_OPERANDS_LABEL (tmp, i);
1244 gcc_assert (GET_CODE (old_ref) == LABEL_REF);
1245 if (XEXP (old_ref, 0) == old_label)
1247 ASM_OPERANDS_LABEL (tmp, i)
1248 = gen_rtx_LABEL_REF (Pmode, new_label);
1249 --LABEL_NUSES (old_label);
1250 ++LABEL_NUSES (new_label);
1254 if (JUMP_LABEL (insn) == old_label)
1256 JUMP_LABEL (insn) = new_label;
1257 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1258 if (note)
1259 remove_note (insn, note);
1261 else
1263 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1264 if (note)
1265 remove_note (insn, note);
1266 if (JUMP_LABEL (insn) != new_label
1267 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1268 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1270 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1271 != NULL_RTX)
1272 XEXP (note, 0) = new_label;
1274 else
1276 /* ?? We may play the games with moving the named labels from
1277 one basic block to the other in case only one computed_jump is
1278 available. */
1279 if (computed_jump_p (insn)
1280 /* A return instruction can't be redirected. */
1281 || returnjump_p (insn))
1282 return false;
1284 if (!currently_expanding_to_rtl || JUMP_LABEL (insn) == old_label)
1286 /* If the insn doesn't go where we think, we're confused. */
1287 gcc_assert (JUMP_LABEL (insn) == old_label);
1289 /* If the substitution doesn't succeed, die. This can happen
1290 if the back end emitted unrecognizable instructions or if
1291 target is exit block on some arches. */
1292 if (!redirect_jump (as_a <rtx_jump_insn *> (insn),
1293 block_label (new_bb), 0))
1295 gcc_assert (new_bb == EXIT_BLOCK_PTR_FOR_FN (cfun));
1296 return false;
1300 return true;
1304 /* Redirect edge representing branch of (un)conditional jump or tablejump,
1305 NULL on failure */
1306 static edge
1307 redirect_branch_edge (edge e, basic_block target)
1309 rtx_insn *old_label = BB_HEAD (e->dest);
1310 basic_block src = e->src;
1311 rtx_insn *insn = BB_END (src);
1313 /* We can only redirect non-fallthru edges of jump insn. */
1314 if (e->flags & EDGE_FALLTHRU)
1315 return NULL;
1316 else if (!JUMP_P (insn) && !currently_expanding_to_rtl)
1317 return NULL;
1319 if (!currently_expanding_to_rtl)
1321 if (!patch_jump_insn (as_a <rtx_jump_insn *> (insn), old_label, target))
1322 return NULL;
1324 else
1325 /* When expanding this BB might actually contain multiple
1326 jumps (i.e. not yet split by find_many_sub_basic_blocks).
1327 Redirect all of those that match our label. */
1328 FOR_BB_INSNS (src, insn)
1329 if (JUMP_P (insn) && !patch_jump_insn (as_a <rtx_jump_insn *> (insn),
1330 old_label, target))
1331 return NULL;
1333 if (dump_file)
1334 fprintf (dump_file, "Edge %i->%i redirected to %i\n",
1335 e->src->index, e->dest->index, target->index);
1337 if (e->dest != target)
1338 e = redirect_edge_succ_nodup (e, target);
1340 return e;
1343 /* Called when edge E has been redirected to a new destination,
1344 in order to update the region crossing flag on the edge and
1345 jump. */
1347 static void
1348 fixup_partition_crossing (edge e)
1350 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun) || e->dest
1351 == EXIT_BLOCK_PTR_FOR_FN (cfun))
1352 return;
1353 /* If we redirected an existing edge, it may already be marked
1354 crossing, even though the new src is missing a reg crossing note.
1355 But make sure reg crossing note doesn't already exist before
1356 inserting. */
1357 if (BB_PARTITION (e->src) != BB_PARTITION (e->dest))
1359 e->flags |= EDGE_CROSSING;
1360 if (JUMP_P (BB_END (e->src))
1361 && !CROSSING_JUMP_P (BB_END (e->src)))
1362 CROSSING_JUMP_P (BB_END (e->src)) = 1;
1364 else if (BB_PARTITION (e->src) == BB_PARTITION (e->dest))
1366 e->flags &= ~EDGE_CROSSING;
1367 /* Remove the section crossing note from jump at end of
1368 src if it exists, and if no other successors are
1369 still crossing. */
1370 if (JUMP_P (BB_END (e->src)) && CROSSING_JUMP_P (BB_END (e->src)))
1372 bool has_crossing_succ = false;
1373 edge e2;
1374 edge_iterator ei;
1375 FOR_EACH_EDGE (e2, ei, e->src->succs)
1377 has_crossing_succ |= (e2->flags & EDGE_CROSSING);
1378 if (has_crossing_succ)
1379 break;
1381 if (!has_crossing_succ)
1382 CROSSING_JUMP_P (BB_END (e->src)) = 0;
1387 /* Called when block BB has been reassigned to the cold partition,
1388 because it is now dominated by another cold block,
1389 to ensure that the region crossing attributes are updated. */
1391 static void
1392 fixup_new_cold_bb (basic_block bb)
1394 edge e;
1395 edge_iterator ei;
1397 /* This is called when a hot bb is found to now be dominated
1398 by a cold bb and therefore needs to become cold. Therefore,
1399 its preds will no longer be region crossing. Any non-dominating
1400 preds that were previously hot would also have become cold
1401 in the caller for the same region. Any preds that were previously
1402 region-crossing will be adjusted in fixup_partition_crossing. */
1403 FOR_EACH_EDGE (e, ei, bb->preds)
1405 fixup_partition_crossing (e);
1408 /* Possibly need to make bb's successor edges region crossing,
1409 or remove stale region crossing. */
1410 FOR_EACH_EDGE (e, ei, bb->succs)
1412 /* We can't have fall-through edges across partition boundaries.
1413 Note that force_nonfallthru will do any necessary partition
1414 boundary fixup by calling fixup_partition_crossing itself. */
1415 if ((e->flags & EDGE_FALLTHRU)
1416 && BB_PARTITION (bb) != BB_PARTITION (e->dest)
1417 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1418 force_nonfallthru (e);
1419 else
1420 fixup_partition_crossing (e);
1424 /* Attempt to change code to redirect edge E to TARGET. Don't do that on
1425 expense of adding new instructions or reordering basic blocks.
1427 Function can be also called with edge destination equivalent to the TARGET.
1428 Then it should try the simplifications and do nothing if none is possible.
1430 Return edge representing the branch if transformation succeeded. Return NULL
1431 on failure.
1432 We still return NULL in case E already destinated TARGET and we didn't
1433 managed to simplify instruction stream. */
1435 static edge
1436 rtl_redirect_edge_and_branch (edge e, basic_block target)
1438 edge ret;
1439 basic_block src = e->src;
1440 basic_block dest = e->dest;
1442 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
1443 return NULL;
1445 if (dest == target)
1446 return e;
1448 if ((ret = try_redirect_by_replacing_jump (e, target, false)) != NULL)
1450 df_set_bb_dirty (src);
1451 fixup_partition_crossing (ret);
1452 return ret;
1455 ret = redirect_branch_edge (e, target);
1456 if (!ret)
1457 return NULL;
1459 df_set_bb_dirty (src);
1460 fixup_partition_crossing (ret);
1461 return ret;
1464 /* Emit a barrier after BB, into the footer if we are in CFGLAYOUT mode. */
1466 void
1467 emit_barrier_after_bb (basic_block bb)
1469 rtx_barrier *barrier = emit_barrier_after (BB_END (bb));
1470 gcc_assert (current_ir_type () == IR_RTL_CFGRTL
1471 || current_ir_type () == IR_RTL_CFGLAYOUT);
1472 if (current_ir_type () == IR_RTL_CFGLAYOUT)
1474 rtx_insn *insn = unlink_insn_chain (barrier, barrier);
1476 if (BB_FOOTER (bb))
1478 rtx_insn *footer_tail = BB_FOOTER (bb);
1480 while (NEXT_INSN (footer_tail))
1481 footer_tail = NEXT_INSN (footer_tail);
1482 if (!BARRIER_P (footer_tail))
1484 SET_NEXT_INSN (footer_tail) = insn;
1485 SET_PREV_INSN (insn) = footer_tail;
1488 else
1489 BB_FOOTER (bb) = insn;
1493 /* Like force_nonfallthru below, but additionally performs redirection
1494 Used by redirect_edge_and_branch_force. JUMP_LABEL is used only
1495 when redirecting to the EXIT_BLOCK, it is either ret_rtx or
1496 simple_return_rtx, indicating which kind of returnjump to create.
1497 It should be NULL otherwise. */
1499 basic_block
1500 force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
1502 basic_block jump_block, new_bb = NULL, src = e->src;
1503 rtx note;
1504 edge new_edge;
1505 int abnormal_edge_flags = 0;
1506 bool asm_goto_edge = false;
1507 int loc;
1509 /* In the case the last instruction is conditional jump to the next
1510 instruction, first redirect the jump itself and then continue
1511 by creating a basic block afterwards to redirect fallthru edge. */
1512 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
1513 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1514 && any_condjump_p (BB_END (e->src))
1515 && JUMP_LABEL (BB_END (e->src)) == BB_HEAD (e->dest))
1517 rtx note;
1518 edge b = unchecked_make_edge (e->src, target, 0);
1519 bool redirected;
1521 redirected = redirect_jump (as_a <rtx_jump_insn *> (BB_END (e->src)),
1522 block_label (target), 0);
1523 gcc_assert (redirected);
1525 note = find_reg_note (BB_END (e->src), REG_BR_PROB, NULL_RTX);
1526 if (note)
1528 int prob = XINT (note, 0);
1530 b->probability = prob;
1531 /* Update this to use GCOV_COMPUTE_SCALE. */
1532 b->count = e->count * prob / REG_BR_PROB_BASE;
1533 e->probability -= e->probability;
1534 e->count -= b->count;
1535 if (e->probability < 0)
1536 e->probability = 0;
1537 if (e->count < 0)
1538 e->count = 0;
1542 if (e->flags & EDGE_ABNORMAL)
1544 /* Irritating special case - fallthru edge to the same block as abnormal
1545 edge.
1546 We can't redirect abnormal edge, but we still can split the fallthru
1547 one and create separate abnormal edge to original destination.
1548 This allows bb-reorder to make such edge non-fallthru. */
1549 gcc_assert (e->dest == target);
1550 abnormal_edge_flags = e->flags & ~EDGE_FALLTHRU;
1551 e->flags &= EDGE_FALLTHRU;
1553 else
1555 gcc_assert (e->flags & EDGE_FALLTHRU);
1556 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1558 /* We can't redirect the entry block. Create an empty block
1559 at the start of the function which we use to add the new
1560 jump. */
1561 edge tmp;
1562 edge_iterator ei;
1563 bool found = false;
1565 basic_block bb = create_basic_block (BB_HEAD (e->dest), NULL,
1566 ENTRY_BLOCK_PTR_FOR_FN (cfun));
1568 /* Change the existing edge's source to be the new block, and add
1569 a new edge from the entry block to the new block. */
1570 e->src = bb;
1571 for (ei = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs);
1572 (tmp = ei_safe_edge (ei)); )
1574 if (tmp == e)
1576 ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs->unordered_remove (ei.index);
1577 found = true;
1578 break;
1580 else
1581 ei_next (&ei);
1584 gcc_assert (found);
1586 vec_safe_push (bb->succs, e);
1587 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb,
1588 EDGE_FALLTHRU);
1592 /* If e->src ends with asm goto, see if any of the ASM_OPERANDS_LABELs
1593 don't point to the target or fallthru label. */
1594 if (JUMP_P (BB_END (e->src))
1595 && target != EXIT_BLOCK_PTR_FOR_FN (cfun)
1596 && (e->flags & EDGE_FALLTHRU)
1597 && (note = extract_asm_operands (PATTERN (BB_END (e->src)))))
1599 int i, n = ASM_OPERANDS_LABEL_LENGTH (note);
1600 bool adjust_jump_target = false;
1602 for (i = 0; i < n; ++i)
1604 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (e->dest))
1606 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))--;
1607 XEXP (ASM_OPERANDS_LABEL (note, i), 0) = block_label (target);
1608 LABEL_NUSES (XEXP (ASM_OPERANDS_LABEL (note, i), 0))++;
1609 adjust_jump_target = true;
1611 if (XEXP (ASM_OPERANDS_LABEL (note, i), 0) == BB_HEAD (target))
1612 asm_goto_edge = true;
1614 if (adjust_jump_target)
1616 rtx_insn *insn = BB_END (e->src);
1617 rtx note;
1618 rtx_insn *old_label = BB_HEAD (e->dest);
1619 rtx_insn *new_label = BB_HEAD (target);
1621 if (JUMP_LABEL (insn) == old_label)
1623 JUMP_LABEL (insn) = new_label;
1624 note = find_reg_note (insn, REG_LABEL_TARGET, new_label);
1625 if (note)
1626 remove_note (insn, note);
1628 else
1630 note = find_reg_note (insn, REG_LABEL_TARGET, old_label);
1631 if (note)
1632 remove_note (insn, note);
1633 if (JUMP_LABEL (insn) != new_label
1634 && !find_reg_note (insn, REG_LABEL_TARGET, new_label))
1635 add_reg_note (insn, REG_LABEL_TARGET, new_label);
1637 while ((note = find_reg_note (insn, REG_LABEL_OPERAND, old_label))
1638 != NULL_RTX)
1639 XEXP (note, 0) = new_label;
1643 if (EDGE_COUNT (e->src->succs) >= 2 || abnormal_edge_flags || asm_goto_edge)
1645 rtx_insn *new_head;
1646 gcov_type count = e->count;
1647 int probability = e->probability;
1648 /* Create the new structures. */
1650 /* If the old block ended with a tablejump, skip its table
1651 by searching forward from there. Otherwise start searching
1652 forward from the last instruction of the old block. */
1653 rtx_jump_table_data *table;
1654 if (tablejump_p (BB_END (e->src), NULL, &table))
1655 new_head = table;
1656 else
1657 new_head = BB_END (e->src);
1658 new_head = NEXT_INSN (new_head);
1660 jump_block = create_basic_block (new_head, NULL, e->src);
1661 jump_block->count = count;
1662 jump_block->frequency = EDGE_FREQUENCY (e);
1664 /* Make sure new block ends up in correct hot/cold section. */
1666 BB_COPY_PARTITION (jump_block, e->src);
1668 /* Wire edge in. */
1669 new_edge = make_edge (e->src, jump_block, EDGE_FALLTHRU);
1670 new_edge->probability = probability;
1671 new_edge->count = count;
1673 /* Redirect old edge. */
1674 redirect_edge_pred (e, jump_block);
1675 e->probability = REG_BR_PROB_BASE;
1677 /* If e->src was previously region crossing, it no longer is
1678 and the reg crossing note should be removed. */
1679 fixup_partition_crossing (new_edge);
1681 /* If asm goto has any label refs to target's label,
1682 add also edge from asm goto bb to target. */
1683 if (asm_goto_edge)
1685 new_edge->probability /= 2;
1686 new_edge->count /= 2;
1687 jump_block->count /= 2;
1688 jump_block->frequency /= 2;
1689 new_edge = make_edge (new_edge->src, target,
1690 e->flags & ~EDGE_FALLTHRU);
1691 new_edge->probability = probability - probability / 2;
1692 new_edge->count = count - count / 2;
1695 new_bb = jump_block;
1697 else
1698 jump_block = e->src;
1700 loc = e->goto_locus;
1701 e->flags &= ~EDGE_FALLTHRU;
1702 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
1704 if (jump_label == ret_rtx)
1706 if (!HAVE_return)
1707 gcc_unreachable ();
1709 emit_jump_insn_after_setloc (gen_return (), BB_END (jump_block), loc);
1711 else
1713 gcc_assert (jump_label == simple_return_rtx);
1714 if (!HAVE_simple_return)
1715 gcc_unreachable ();
1717 emit_jump_insn_after_setloc (gen_simple_return (),
1718 BB_END (jump_block), loc);
1720 set_return_jump_label (BB_END (jump_block));
1722 else
1724 rtx_code_label *label = block_label (target);
1725 emit_jump_insn_after_setloc (gen_jump (label), BB_END (jump_block), loc);
1726 JUMP_LABEL (BB_END (jump_block)) = label;
1727 LABEL_NUSES (label)++;
1730 /* We might be in cfg layout mode, and if so, the following routine will
1731 insert the barrier correctly. */
1732 emit_barrier_after_bb (jump_block);
1733 redirect_edge_succ_nodup (e, target);
1735 if (abnormal_edge_flags)
1736 make_edge (src, target, abnormal_edge_flags);
1738 df_mark_solutions_dirty ();
1739 fixup_partition_crossing (e);
1740 return new_bb;
1743 /* Edge E is assumed to be fallthru edge. Emit needed jump instruction
1744 (and possibly create new basic block) to make edge non-fallthru.
1745 Return newly created BB or NULL if none. */
1747 static basic_block
1748 rtl_force_nonfallthru (edge e)
1750 return force_nonfallthru_and_redirect (e, e->dest, NULL_RTX);
1753 /* Redirect edge even at the expense of creating new jump insn or
1754 basic block. Return new basic block if created, NULL otherwise.
1755 Conversion must be possible. */
1757 static basic_block
1758 rtl_redirect_edge_and_branch_force (edge e, basic_block target)
1760 if (redirect_edge_and_branch (e, target)
1761 || e->dest == target)
1762 return NULL;
1764 /* In case the edge redirection failed, try to force it to be non-fallthru
1765 and redirect newly created simplejump. */
1766 df_set_bb_dirty (e->src);
1767 return force_nonfallthru_and_redirect (e, target, NULL_RTX);
1770 /* The given edge should potentially be a fallthru edge. If that is in
1771 fact true, delete the jump and barriers that are in the way. */
1773 static void
1774 rtl_tidy_fallthru_edge (edge e)
1776 rtx_insn *q;
1777 basic_block b = e->src, c = b->next_bb;
1779 /* ??? In a late-running flow pass, other folks may have deleted basic
1780 blocks by nopping out blocks, leaving multiple BARRIERs between here
1781 and the target label. They ought to be chastised and fixed.
1783 We can also wind up with a sequence of undeletable labels between
1784 one block and the next.
1786 So search through a sequence of barriers, labels, and notes for
1787 the head of block C and assert that we really do fall through. */
1789 for (q = NEXT_INSN (BB_END (b)); q != BB_HEAD (c); q = NEXT_INSN (q))
1790 if (INSN_P (q))
1791 return;
1793 /* Remove what will soon cease being the jump insn from the source block.
1794 If block B consisted only of this single jump, turn it into a deleted
1795 note. */
1796 q = BB_END (b);
1797 if (JUMP_P (q)
1798 && onlyjump_p (q)
1799 && (any_uncondjump_p (q)
1800 || single_succ_p (b)))
1802 rtx label;
1803 rtx_jump_table_data *table;
1805 if (tablejump_p (q, &label, &table))
1807 /* The label is likely mentioned in some instruction before
1808 the tablejump and might not be DCEd, so turn it into
1809 a note instead and move before the tablejump that is going to
1810 be deleted. */
1811 const char *name = LABEL_NAME (label);
1812 PUT_CODE (label, NOTE);
1813 NOTE_KIND (label) = NOTE_INSN_DELETED_LABEL;
1814 NOTE_DELETED_LABEL_NAME (label) = name;
1815 rtx_insn *lab = safe_as_a <rtx_insn *> (label);
1816 reorder_insns (lab, lab, PREV_INSN (q));
1817 delete_insn (table);
1820 /* If this was a conditional jump, we need to also delete
1821 the insn that set cc0. */
1822 if (HAVE_cc0 && any_condjump_p (q) && only_sets_cc0_p (PREV_INSN (q)))
1823 q = PREV_INSN (q);
1825 q = PREV_INSN (q);
1828 /* Selectively unlink the sequence. */
1829 if (q != PREV_INSN (BB_HEAD (c)))
1830 delete_insn_chain (NEXT_INSN (q), PREV_INSN (BB_HEAD (c)), false);
1832 e->flags |= EDGE_FALLTHRU;
1835 /* Should move basic block BB after basic block AFTER. NIY. */
1837 static bool
1838 rtl_move_block_after (basic_block bb ATTRIBUTE_UNUSED,
1839 basic_block after ATTRIBUTE_UNUSED)
1841 return false;
1844 /* Locate the last bb in the same partition as START_BB. */
1846 static basic_block
1847 last_bb_in_partition (basic_block start_bb)
1849 basic_block bb;
1850 FOR_BB_BETWEEN (bb, start_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
1852 if (BB_PARTITION (start_bb) != BB_PARTITION (bb->next_bb))
1853 return bb;
1855 /* Return bb before the exit block. */
1856 return bb->prev_bb;
1859 /* Split a (typically critical) edge. Return the new block.
1860 The edge must not be abnormal.
1862 ??? The code generally expects to be called on critical edges.
1863 The case of a block ending in an unconditional jump to a
1864 block with multiple predecessors is not handled optimally. */
1866 static basic_block
1867 rtl_split_edge (edge edge_in)
1869 basic_block bb, new_bb;
1870 rtx_insn *before;
1872 /* Abnormal edges cannot be split. */
1873 gcc_assert (!(edge_in->flags & EDGE_ABNORMAL));
1875 /* We are going to place the new block in front of edge destination.
1876 Avoid existence of fallthru predecessors. */
1877 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1879 edge e = find_fallthru_edge (edge_in->dest->preds);
1881 if (e)
1882 force_nonfallthru (e);
1885 /* Create the basic block note. */
1886 if (edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1887 before = BB_HEAD (edge_in->dest);
1888 else
1889 before = NULL;
1891 /* If this is a fall through edge to the exit block, the blocks might be
1892 not adjacent, and the right place is after the source. */
1893 if ((edge_in->flags & EDGE_FALLTHRU)
1894 && edge_in->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1896 before = NEXT_INSN (BB_END (edge_in->src));
1897 bb = create_basic_block (before, NULL, edge_in->src);
1898 BB_COPY_PARTITION (bb, edge_in->src);
1900 else
1902 if (edge_in->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1904 bb = create_basic_block (before, NULL, edge_in->dest->prev_bb);
1905 BB_COPY_PARTITION (bb, edge_in->dest);
1907 else
1909 basic_block after = edge_in->dest->prev_bb;
1910 /* If this is post-bb reordering, and the edge crosses a partition
1911 boundary, the new block needs to be inserted in the bb chain
1912 at the end of the src partition (since we put the new bb into
1913 that partition, see below). Otherwise we may end up creating
1914 an extra partition crossing in the chain, which is illegal.
1915 It can't go after the src, because src may have a fall-through
1916 to a different block. */
1917 if (crtl->bb_reorder_complete
1918 && (edge_in->flags & EDGE_CROSSING))
1920 after = last_bb_in_partition (edge_in->src);
1921 before = get_last_bb_insn (after);
1922 /* The instruction following the last bb in partition should
1923 be a barrier, since it cannot end in a fall-through. */
1924 gcc_checking_assert (BARRIER_P (before));
1925 before = NEXT_INSN (before);
1927 bb = create_basic_block (before, NULL, after);
1928 /* Put the split bb into the src partition, to avoid creating
1929 a situation where a cold bb dominates a hot bb, in the case
1930 where src is cold and dest is hot. The src will dominate
1931 the new bb (whereas it might not have dominated dest). */
1932 BB_COPY_PARTITION (bb, edge_in->src);
1936 make_single_succ_edge (bb, edge_in->dest, EDGE_FALLTHRU);
1938 /* Can't allow a region crossing edge to be fallthrough. */
1939 if (BB_PARTITION (bb) != BB_PARTITION (edge_in->dest)
1940 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
1942 new_bb = force_nonfallthru (single_succ_edge (bb));
1943 gcc_assert (!new_bb);
1946 /* For non-fallthru edges, we must adjust the predecessor's
1947 jump instruction to target our new block. */
1948 if ((edge_in->flags & EDGE_FALLTHRU) == 0)
1950 edge redirected = redirect_edge_and_branch (edge_in, bb);
1951 gcc_assert (redirected);
1953 else
1955 if (edge_in->src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
1957 /* For asm goto even splitting of fallthru edge might
1958 need insn patching, as other labels might point to the
1959 old label. */
1960 rtx_insn *last = BB_END (edge_in->src);
1961 if (last
1962 && JUMP_P (last)
1963 && edge_in->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
1964 && extract_asm_operands (PATTERN (last)) != NULL_RTX
1965 && patch_jump_insn (last, before, bb))
1966 df_set_bb_dirty (edge_in->src);
1968 redirect_edge_succ (edge_in, bb);
1971 return bb;
1974 /* Queue instructions for insertion on an edge between two basic blocks.
1975 The new instructions and basic blocks (if any) will not appear in the
1976 CFG until commit_edge_insertions is called. */
1978 void
1979 insert_insn_on_edge (rtx pattern, edge e)
1981 /* We cannot insert instructions on an abnormal critical edge.
1982 It will be easier to find the culprit if we die now. */
1983 gcc_assert (!((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e)));
1985 if (e->insns.r == NULL_RTX)
1986 start_sequence ();
1987 else
1988 push_to_sequence (e->insns.r);
1990 emit_insn (pattern);
1992 e->insns.r = get_insns ();
1993 end_sequence ();
1996 /* Update the CFG for the instructions queued on edge E. */
1998 void
1999 commit_one_edge_insertion (edge e)
2001 rtx_insn *before = NULL, *after = NULL, *insns, *tmp, *last;
2002 basic_block bb;
2004 /* Pull the insns off the edge now since the edge might go away. */
2005 insns = e->insns.r;
2006 e->insns.r = NULL;
2008 /* Figure out where to put these insns. If the destination has
2009 one predecessor, insert there. Except for the exit block. */
2010 if (single_pred_p (e->dest) && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2012 bb = e->dest;
2014 /* Get the location correct wrt a code label, and "nice" wrt
2015 a basic block note, and before everything else. */
2016 tmp = BB_HEAD (bb);
2017 if (LABEL_P (tmp))
2018 tmp = NEXT_INSN (tmp);
2019 if (NOTE_INSN_BASIC_BLOCK_P (tmp))
2020 tmp = NEXT_INSN (tmp);
2021 if (tmp == BB_HEAD (bb))
2022 before = tmp;
2023 else if (tmp)
2024 after = PREV_INSN (tmp);
2025 else
2026 after = get_last_insn ();
2029 /* If the source has one successor and the edge is not abnormal,
2030 insert there. Except for the entry block.
2031 Don't do this if the predecessor ends in a jump other than
2032 unconditional simple jump. E.g. for asm goto that points all
2033 its labels at the fallthru basic block, we can't insert instructions
2034 before the asm goto, as the asm goto can have various of side effects,
2035 and can't emit instructions after the asm goto, as it must end
2036 the basic block. */
2037 else if ((e->flags & EDGE_ABNORMAL) == 0
2038 && single_succ_p (e->src)
2039 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2040 && (!JUMP_P (BB_END (e->src))
2041 || simplejump_p (BB_END (e->src))))
2043 bb = e->src;
2045 /* It is possible to have a non-simple jump here. Consider a target
2046 where some forms of unconditional jumps clobber a register. This
2047 happens on the fr30 for example.
2049 We know this block has a single successor, so we can just emit
2050 the queued insns before the jump. */
2051 if (JUMP_P (BB_END (bb)))
2052 before = BB_END (bb);
2053 else
2055 /* We'd better be fallthru, or we've lost track of what's what. */
2056 gcc_assert (e->flags & EDGE_FALLTHRU);
2058 after = BB_END (bb);
2062 /* Otherwise we must split the edge. */
2063 else
2065 bb = split_edge (e);
2067 /* If E crossed a partition boundary, we needed to make bb end in
2068 a region-crossing jump, even though it was originally fallthru. */
2069 if (JUMP_P (BB_END (bb)))
2070 before = BB_END (bb);
2071 else
2072 after = BB_END (bb);
2075 /* Now that we've found the spot, do the insertion. */
2076 if (before)
2078 emit_insn_before_noloc (insns, before, bb);
2079 last = prev_nonnote_insn (before);
2081 else
2082 last = emit_insn_after_noloc (insns, after, bb);
2084 if (returnjump_p (last))
2086 /* ??? Remove all outgoing edges from BB and add one for EXIT.
2087 This is not currently a problem because this only happens
2088 for the (single) epilogue, which already has a fallthru edge
2089 to EXIT. */
2091 e = single_succ_edge (bb);
2092 gcc_assert (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
2093 && single_succ_p (bb) && (e->flags & EDGE_FALLTHRU));
2095 e->flags &= ~EDGE_FALLTHRU;
2096 emit_barrier_after (last);
2098 if (before)
2099 delete_insn (before);
2101 else
2102 gcc_assert (!JUMP_P (last));
2105 /* Update the CFG for all queued instructions. */
2107 void
2108 commit_edge_insertions (void)
2110 basic_block bb;
2112 /* Optimization passes that invoke this routine can cause hot blocks
2113 previously reached by both hot and cold blocks to become dominated only
2114 by cold blocks. This will cause the verification below to fail,
2115 and lead to now cold code in the hot section. In some cases this
2116 may only be visible after newly unreachable blocks are deleted,
2117 which will be done by fixup_partitions. */
2118 fixup_partitions ();
2120 #ifdef ENABLE_CHECKING
2121 verify_flow_info ();
2122 #endif
2124 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
2125 EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
2127 edge e;
2128 edge_iterator ei;
2130 FOR_EACH_EDGE (e, ei, bb->succs)
2131 if (e->insns.r)
2132 commit_one_edge_insertion (e);
2137 /* Print out RTL-specific basic block information (live information
2138 at start and end with TDF_DETAILS). FLAGS are the TDF_* masks
2139 documented in dumpfile.h. */
2141 static void
2142 rtl_dump_bb (FILE *outf, basic_block bb, int indent, int flags)
2144 rtx_insn *insn;
2145 rtx_insn *last;
2146 char *s_indent;
2148 s_indent = (char *) alloca ((size_t) indent + 1);
2149 memset (s_indent, ' ', (size_t) indent);
2150 s_indent[indent] = '\0';
2152 if (df && (flags & TDF_DETAILS))
2154 df_dump_top (bb, outf);
2155 putc ('\n', outf);
2158 if (bb->index != ENTRY_BLOCK && bb->index != EXIT_BLOCK)
2159 for (insn = BB_HEAD (bb), last = NEXT_INSN (BB_END (bb)); insn != last;
2160 insn = NEXT_INSN (insn))
2162 if (flags & TDF_DETAILS)
2163 df_dump_insn_top (insn, outf);
2164 if (! (flags & TDF_SLIM))
2165 print_rtl_single (outf, insn);
2166 else
2167 dump_insn_slim (outf, insn);
2168 if (flags & TDF_DETAILS)
2169 df_dump_insn_bottom (insn, outf);
2172 if (df && (flags & TDF_DETAILS))
2174 df_dump_bottom (bb, outf);
2175 putc ('\n', outf);
2180 /* Like dump_function_to_file, but for RTL. Print out dataflow information
2181 for the start of each basic block. FLAGS are the TDF_* masks documented
2182 in dumpfile.h. */
2184 void
2185 print_rtl_with_bb (FILE *outf, const rtx_insn *rtx_first, int flags)
2187 const rtx_insn *tmp_rtx;
2188 if (rtx_first == 0)
2189 fprintf (outf, "(nil)\n");
2190 else
2192 enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
2193 int max_uid = get_max_uid ();
2194 basic_block *start = XCNEWVEC (basic_block, max_uid);
2195 basic_block *end = XCNEWVEC (basic_block, max_uid);
2196 enum bb_state *in_bb_p = XCNEWVEC (enum bb_state, max_uid);
2197 basic_block bb;
2199 /* After freeing the CFG, we still have BLOCK_FOR_INSN set on most
2200 insns, but the CFG is not maintained so the basic block info
2201 is not reliable. Therefore it's omitted from the dumps. */
2202 if (! (cfun->curr_properties & PROP_cfg))
2203 flags &= ~TDF_BLOCKS;
2205 if (df)
2206 df_dump_start (outf);
2208 if (flags & TDF_BLOCKS)
2210 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2212 rtx_insn *x;
2214 start[INSN_UID (BB_HEAD (bb))] = bb;
2215 end[INSN_UID (BB_END (bb))] = bb;
2216 for (x = BB_HEAD (bb); x != NULL_RTX; x = NEXT_INSN (x))
2218 enum bb_state state = IN_MULTIPLE_BB;
2220 if (in_bb_p[INSN_UID (x)] == NOT_IN_BB)
2221 state = IN_ONE_BB;
2222 in_bb_p[INSN_UID (x)] = state;
2224 if (x == BB_END (bb))
2225 break;
2230 for (tmp_rtx = rtx_first; NULL != tmp_rtx; tmp_rtx = NEXT_INSN (tmp_rtx))
2232 if (flags & TDF_BLOCKS)
2234 bb = start[INSN_UID (tmp_rtx)];
2235 if (bb != NULL)
2237 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, true, false);
2238 if (df && (flags & TDF_DETAILS))
2239 df_dump_top (bb, outf);
2242 if (in_bb_p[INSN_UID (tmp_rtx)] == NOT_IN_BB
2243 && !NOTE_P (tmp_rtx)
2244 && !BARRIER_P (tmp_rtx))
2245 fprintf (outf, ";; Insn is not within a basic block\n");
2246 else if (in_bb_p[INSN_UID (tmp_rtx)] == IN_MULTIPLE_BB)
2247 fprintf (outf, ";; Insn is in multiple basic blocks\n");
2250 if (flags & TDF_DETAILS)
2251 df_dump_insn_top (tmp_rtx, outf);
2252 if (! (flags & TDF_SLIM))
2253 print_rtl_single (outf, tmp_rtx);
2254 else
2255 dump_insn_slim (outf, tmp_rtx);
2256 if (flags & TDF_DETAILS)
2257 df_dump_insn_bottom (tmp_rtx, outf);
2259 if (flags & TDF_BLOCKS)
2261 bb = end[INSN_UID (tmp_rtx)];
2262 if (bb != NULL)
2264 dump_bb_info (outf, bb, 0, dump_flags | TDF_COMMENT, false, true);
2265 if (df && (flags & TDF_DETAILS))
2266 df_dump_bottom (bb, outf);
2267 putc ('\n', outf);
2272 free (start);
2273 free (end);
2274 free (in_bb_p);
2278 /* Update the branch probability of BB if a REG_BR_PROB is present. */
2280 void
2281 update_br_prob_note (basic_block bb)
2283 rtx note;
2284 if (!JUMP_P (BB_END (bb)))
2285 return;
2286 note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX);
2287 if (!note || XINT (note, 0) == BRANCH_EDGE (bb)->probability)
2288 return;
2289 XINT (note, 0) = BRANCH_EDGE (bb)->probability;
2292 /* Get the last insn associated with block BB (that includes barriers and
2293 tablejumps after BB). */
2294 rtx_insn *
2295 get_last_bb_insn (basic_block bb)
2297 rtx_jump_table_data *table;
2298 rtx_insn *tmp;
2299 rtx_insn *end = BB_END (bb);
2301 /* Include any jump table following the basic block. */
2302 if (tablejump_p (end, NULL, &table))
2303 end = table;
2305 /* Include any barriers that may follow the basic block. */
2306 tmp = next_nonnote_insn_bb (end);
2307 while (tmp && BARRIER_P (tmp))
2309 end = tmp;
2310 tmp = next_nonnote_insn_bb (end);
2313 return end;
2316 /* Sanity check partition hotness to ensure that basic blocks in
2317   the cold partition don't dominate basic blocks in the hot partition.
2318 If FLAG_ONLY is true, report violations as errors. Otherwise
2319 re-mark the dominated blocks as cold, since this is run after
2320 cfg optimizations that may make hot blocks previously reached
2321 by both hot and cold blocks now only reachable along cold paths. */
2323 static vec<basic_block>
2324 find_partition_fixes (bool flag_only)
2326 basic_block bb;
2327 vec<basic_block> bbs_in_cold_partition = vNULL;
2328 vec<basic_block> bbs_to_fix = vNULL;
2330 /* Callers check this. */
2331 gcc_checking_assert (crtl->has_bb_partition);
2333 FOR_EACH_BB_FN (bb, cfun)
2334 if ((BB_PARTITION (bb) == BB_COLD_PARTITION))
2335 bbs_in_cold_partition.safe_push (bb);
2337 if (bbs_in_cold_partition.is_empty ())
2338 return vNULL;
2340 bool dom_calculated_here = !dom_info_available_p (CDI_DOMINATORS);
2342 if (dom_calculated_here)
2343 calculate_dominance_info (CDI_DOMINATORS);
2345 while (! bbs_in_cold_partition.is_empty ())
2347 bb = bbs_in_cold_partition.pop ();
2348 /* Any blocks dominated by a block in the cold section
2349 must also be cold. */
2350 basic_block son;
2351 for (son = first_dom_son (CDI_DOMINATORS, bb);
2352 son;
2353 son = next_dom_son (CDI_DOMINATORS, son))
2355 /* If son is not yet cold, then mark it cold here and
2356 enqueue it for further processing. */
2357 if ((BB_PARTITION (son) != BB_COLD_PARTITION))
2359 if (flag_only)
2360 error ("non-cold basic block %d dominated "
2361 "by a block in the cold partition (%d)", son->index, bb->index);
2362 else
2363 BB_SET_PARTITION (son, BB_COLD_PARTITION);
2364 bbs_to_fix.safe_push (son);
2365 bbs_in_cold_partition.safe_push (son);
2370 if (dom_calculated_here)
2371 free_dominance_info (CDI_DOMINATORS);
2373 return bbs_to_fix;
2376 /* Perform cleanup on the hot/cold bb partitioning after optimization
2377 passes that modify the cfg. */
2379 void
2380 fixup_partitions (void)
2382 basic_block bb;
2384 if (!crtl->has_bb_partition)
2385 return;
2387 /* Delete any blocks that became unreachable and weren't
2388 already cleaned up, for example during edge forwarding
2389 and convert_jumps_to_returns. This will expose more
2390 opportunities for fixing the partition boundaries here.
2391 Also, the calculation of the dominance graph during verification
2392 will assert if there are unreachable nodes. */
2393 delete_unreachable_blocks ();
2395 /* If there are partitions, do a sanity check on them: A basic block in
2396   a cold partition cannot dominate a basic block in a hot partition.
2397 Fixup any that now violate this requirement, as a result of edge
2398 forwarding and unreachable block deletion.  */
2399 vec<basic_block> bbs_to_fix = find_partition_fixes (false);
2401 /* Do the partition fixup after all necessary blocks have been converted to
2402 cold, so that we only update the region crossings the minimum number of
2403 places, which can require forcing edges to be non fallthru. */
2404 while (! bbs_to_fix.is_empty ())
2406 bb = bbs_to_fix.pop ();
2407 fixup_new_cold_bb (bb);
2411 /* Verify, in the basic block chain, that there is at most one switch
2412 between hot/cold partitions. This condition will not be true until
2413 after reorder_basic_blocks is called. */
2415 static int
2416 verify_hot_cold_block_grouping (void)
2418 basic_block bb;
2419 int err = 0;
2420 bool switched_sections = false;
2421 int current_partition = BB_UNPARTITIONED;
2423 /* Even after bb reordering is complete, we go into cfglayout mode
2424 again (in compgoto). Ensure we don't call this before going back
2425 into linearized RTL when any layout fixes would have been committed. */
2426 if (!crtl->bb_reorder_complete
2427 || current_ir_type () != IR_RTL_CFGRTL)
2428 return err;
2430 FOR_EACH_BB_FN (bb, cfun)
2432 if (current_partition != BB_UNPARTITIONED
2433 && BB_PARTITION (bb) != current_partition)
2435 if (switched_sections)
2437 error ("multiple hot/cold transitions found (bb %i)",
2438 bb->index);
2439 err = 1;
2441 else
2442 switched_sections = true;
2444 if (!crtl->has_bb_partition)
2445 error ("partition found but function partition flag not set");
2447 current_partition = BB_PARTITION (bb);
2450 return err;
2454 /* Perform several checks on the edges out of each block, such as
2455 the consistency of the branch probabilities, the correctness
2456 of hot/cold partition crossing edges, and the number of expected
2457 successor edges. Also verify that the dominance relationship
2458 between hot/cold blocks is sane. */
2460 static int
2461 rtl_verify_edges (void)
2463 int err = 0;
2464 basic_block bb;
2466 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2468 int n_fallthru = 0, n_branch = 0, n_abnormal_call = 0, n_sibcall = 0;
2469 int n_eh = 0, n_abnormal = 0;
2470 edge e, fallthru = NULL;
2471 edge_iterator ei;
2472 rtx note;
2473 bool has_crossing_edge = false;
2475 if (JUMP_P (BB_END (bb))
2476 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, NULL_RTX))
2477 && EDGE_COUNT (bb->succs) >= 2
2478 && any_condjump_p (BB_END (bb)))
2480 if (XINT (note, 0) != BRANCH_EDGE (bb)->probability
2481 && profile_status_for_fn (cfun) != PROFILE_ABSENT)
2483 error ("verify_flow_info: REG_BR_PROB does not match cfg %i %i",
2484 XINT (note, 0), BRANCH_EDGE (bb)->probability);
2485 err = 1;
2489 FOR_EACH_EDGE (e, ei, bb->succs)
2491 bool is_crossing;
2493 if (e->flags & EDGE_FALLTHRU)
2494 n_fallthru++, fallthru = e;
2496 is_crossing = (BB_PARTITION (e->src) != BB_PARTITION (e->dest)
2497 && e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2498 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun));
2499 has_crossing_edge |= is_crossing;
2500 if (e->flags & EDGE_CROSSING)
2502 if (!is_crossing)
2504 error ("EDGE_CROSSING incorrectly set across same section");
2505 err = 1;
2507 if (e->flags & EDGE_FALLTHRU)
2509 error ("fallthru edge crosses section boundary in bb %i",
2510 e->src->index);
2511 err = 1;
2513 if (e->flags & EDGE_EH)
2515 error ("EH edge crosses section boundary in bb %i",
2516 e->src->index);
2517 err = 1;
2519 if (JUMP_P (BB_END (bb)) && !CROSSING_JUMP_P (BB_END (bb)))
2521 error ("No region crossing jump at section boundary in bb %i",
2522 bb->index);
2523 err = 1;
2526 else if (is_crossing)
2528 error ("EDGE_CROSSING missing across section boundary");
2529 err = 1;
2532 if ((e->flags & ~(EDGE_DFS_BACK
2533 | EDGE_CAN_FALLTHRU
2534 | EDGE_IRREDUCIBLE_LOOP
2535 | EDGE_LOOP_EXIT
2536 | EDGE_CROSSING
2537 | EDGE_PRESERVE)) == 0)
2538 n_branch++;
2540 if (e->flags & EDGE_ABNORMAL_CALL)
2541 n_abnormal_call++;
2543 if (e->flags & EDGE_SIBCALL)
2544 n_sibcall++;
2546 if (e->flags & EDGE_EH)
2547 n_eh++;
2549 if (e->flags & EDGE_ABNORMAL)
2550 n_abnormal++;
2553 if (!has_crossing_edge
2554 && JUMP_P (BB_END (bb))
2555 && CROSSING_JUMP_P (BB_END (bb)))
2557 print_rtl_with_bb (stderr, get_insns (), TDF_RTL | TDF_BLOCKS | TDF_DETAILS);
2558 error ("Region crossing jump across same section in bb %i",
2559 bb->index);
2560 err = 1;
2563 if (n_eh && !find_reg_note (BB_END (bb), REG_EH_REGION, NULL_RTX))
2565 error ("missing REG_EH_REGION note at the end of bb %i", bb->index);
2566 err = 1;
2568 if (n_eh > 1)
2570 error ("too many exception handling edges in bb %i", bb->index);
2571 err = 1;
2573 if (n_branch
2574 && (!JUMP_P (BB_END (bb))
2575 || (n_branch > 1 && (any_uncondjump_p (BB_END (bb))
2576 || any_condjump_p (BB_END (bb))))))
2578 error ("too many outgoing branch edges from bb %i", bb->index);
2579 err = 1;
2581 if (n_fallthru && any_uncondjump_p (BB_END (bb)))
2583 error ("fallthru edge after unconditional jump in bb %i", bb->index);
2584 err = 1;
2586 if (n_branch != 1 && any_uncondjump_p (BB_END (bb)))
2588 error ("wrong number of branch edges after unconditional jump"
2589 " in bb %i", bb->index);
2590 err = 1;
2592 if (n_branch != 1 && any_condjump_p (BB_END (bb))
2593 && JUMP_LABEL (BB_END (bb)) != BB_HEAD (fallthru->dest))
2595 error ("wrong amount of branch edges after conditional jump"
2596 " in bb %i", bb->index);
2597 err = 1;
2599 if (n_abnormal_call && !CALL_P (BB_END (bb)))
2601 error ("abnormal call edges for non-call insn in bb %i", bb->index);
2602 err = 1;
2604 if (n_sibcall && !CALL_P (BB_END (bb)))
2606 error ("sibcall edges for non-call insn in bb %i", bb->index);
2607 err = 1;
2609 if (n_abnormal > n_eh
2610 && !(CALL_P (BB_END (bb))
2611 && n_abnormal == n_abnormal_call + n_sibcall)
2612 && (!JUMP_P (BB_END (bb))
2613 || any_condjump_p (BB_END (bb))
2614 || any_uncondjump_p (BB_END (bb))))
2616 error ("abnormal edges for no purpose in bb %i", bb->index);
2617 err = 1;
2621 /* If there are partitions, do a sanity check on them: A basic block in
2622   a cold partition cannot dominate a basic block in a hot partition.  */
2623 if (crtl->has_bb_partition && !err)
2625 vec<basic_block> bbs_to_fix = find_partition_fixes (true);
2626 err = !bbs_to_fix.is_empty ();
2629 /* Clean up. */
2630 return err;
2633 /* Checks on the instructions within blocks. Currently checks that each
2634 block starts with a basic block note, and that basic block notes and
2635 control flow jumps are not found in the middle of the block. */
2637 static int
2638 rtl_verify_bb_insns (void)
2640 rtx_insn *x;
2641 int err = 0;
2642 basic_block bb;
2644 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2646 /* Now check the header of basic
2647 block. It ought to contain optional CODE_LABEL followed
2648 by NOTE_BASIC_BLOCK. */
2649 x = BB_HEAD (bb);
2650 if (LABEL_P (x))
2652 if (BB_END (bb) == x)
2654 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2655 bb->index);
2656 err = 1;
2659 x = NEXT_INSN (x);
2662 if (!NOTE_INSN_BASIC_BLOCK_P (x) || NOTE_BASIC_BLOCK (x) != bb)
2664 error ("NOTE_INSN_BASIC_BLOCK is missing for block %d",
2665 bb->index);
2666 err = 1;
2669 if (BB_END (bb) == x)
2670 /* Do checks for empty blocks here. */
2672 else
2673 for (x = NEXT_INSN (x); x; x = NEXT_INSN (x))
2675 if (NOTE_INSN_BASIC_BLOCK_P (x))
2677 error ("NOTE_INSN_BASIC_BLOCK %d in middle of basic block %d",
2678 INSN_UID (x), bb->index);
2679 err = 1;
2682 if (x == BB_END (bb))
2683 break;
2685 if (control_flow_insn_p (x))
2687 error ("in basic block %d:", bb->index);
2688 fatal_insn ("flow control insn inside a basic block", x);
2693 /* Clean up. */
2694 return err;
2697 /* Verify that block pointers for instructions in basic blocks, headers and
2698 footers are set appropriately. */
2700 static int
2701 rtl_verify_bb_pointers (void)
2703 int err = 0;
2704 basic_block bb;
2706 /* Check the general integrity of the basic blocks. */
2707 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2709 rtx_insn *insn;
2711 if (!(bb->flags & BB_RTL))
2713 error ("BB_RTL flag not set for block %d", bb->index);
2714 err = 1;
2717 FOR_BB_INSNS (bb, insn)
2718 if (BLOCK_FOR_INSN (insn) != bb)
2720 error ("insn %d basic block pointer is %d, should be %d",
2721 INSN_UID (insn),
2722 BLOCK_FOR_INSN (insn) ? BLOCK_FOR_INSN (insn)->index : 0,
2723 bb->index);
2724 err = 1;
2727 for (insn = BB_HEADER (bb); insn; insn = NEXT_INSN (insn))
2728 if (!BARRIER_P (insn)
2729 && BLOCK_FOR_INSN (insn) != NULL)
2731 error ("insn %d in header of bb %d has non-NULL basic block",
2732 INSN_UID (insn), bb->index);
2733 err = 1;
2735 for (insn = BB_FOOTER (bb); insn; insn = NEXT_INSN (insn))
2736 if (!BARRIER_P (insn)
2737 && BLOCK_FOR_INSN (insn) != NULL)
2739 error ("insn %d in footer of bb %d has non-NULL basic block",
2740 INSN_UID (insn), bb->index);
2741 err = 1;
2745 /* Clean up. */
2746 return err;
2749 /* Verify the CFG and RTL consistency common for both underlying RTL and
2750 cfglayout RTL.
2752 Currently it does following checks:
2754 - overlapping of basic blocks
2755 - insns with wrong BLOCK_FOR_INSN pointers
2756 - headers of basic blocks (the NOTE_INSN_BASIC_BLOCK note)
2757 - tails of basic blocks (ensure that boundary is necessary)
2758 - scans body of the basic block for JUMP_INSN, CODE_LABEL
2759 and NOTE_INSN_BASIC_BLOCK
2760 - verify that no fall_thru edge crosses hot/cold partition boundaries
2761 - verify that there are no pending RTL branch predictions
2762 - verify that hot blocks are not dominated by cold blocks
2764 In future it can be extended check a lot of other stuff as well
2765 (reachability of basic blocks, life information, etc. etc.). */
2767 static int
2768 rtl_verify_flow_info_1 (void)
2770 int err = 0;
2772 err |= rtl_verify_bb_pointers ();
2774 err |= rtl_verify_bb_insns ();
2776 err |= rtl_verify_edges ();
2778 return err;
2781 /* Walk the instruction chain and verify that bb head/end pointers
2782 are correct, and that instructions are in exactly one bb and have
2783 correct block pointers. */
2785 static int
2786 rtl_verify_bb_insn_chain (void)
2788 basic_block bb;
2789 int err = 0;
2790 rtx_insn *x;
2791 rtx_insn *last_head = get_last_insn ();
2792 basic_block *bb_info;
2793 const int max_uid = get_max_uid ();
2795 bb_info = XCNEWVEC (basic_block, max_uid);
2797 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2799 rtx_insn *head = BB_HEAD (bb);
2800 rtx_insn *end = BB_END (bb);
2802 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2804 /* Verify the end of the basic block is in the INSN chain. */
2805 if (x == end)
2806 break;
2808 /* And that the code outside of basic blocks has NULL bb field. */
2809 if (!BARRIER_P (x)
2810 && BLOCK_FOR_INSN (x) != NULL)
2812 error ("insn %d outside of basic blocks has non-NULL bb field",
2813 INSN_UID (x));
2814 err = 1;
2818 if (!x)
2820 error ("end insn %d for block %d not found in the insn stream",
2821 INSN_UID (end), bb->index);
2822 err = 1;
2825 /* Work backwards from the end to the head of the basic block
2826 to verify the head is in the RTL chain. */
2827 for (; x != NULL_RTX; x = PREV_INSN (x))
2829 /* While walking over the insn chain, verify insns appear
2830 in only one basic block. */
2831 if (bb_info[INSN_UID (x)] != NULL)
2833 error ("insn %d is in multiple basic blocks (%d and %d)",
2834 INSN_UID (x), bb->index, bb_info[INSN_UID (x)]->index);
2835 err = 1;
2838 bb_info[INSN_UID (x)] = bb;
2840 if (x == head)
2841 break;
2843 if (!x)
2845 error ("head insn %d for block %d not found in the insn stream",
2846 INSN_UID (head), bb->index);
2847 err = 1;
2850 last_head = PREV_INSN (x);
2853 for (x = last_head; x != NULL_RTX; x = PREV_INSN (x))
2855 /* Check that the code before the first basic block has NULL
2856 bb field. */
2857 if (!BARRIER_P (x)
2858 && BLOCK_FOR_INSN (x) != NULL)
2860 error ("insn %d outside of basic blocks has non-NULL bb field",
2861 INSN_UID (x));
2862 err = 1;
2865 free (bb_info);
2867 return err;
2870 /* Verify that fallthru edges point to adjacent blocks in layout order and
2871 that barriers exist after non-fallthru blocks. */
2873 static int
2874 rtl_verify_fallthru (void)
2876 basic_block bb;
2877 int err = 0;
2879 FOR_EACH_BB_REVERSE_FN (bb, cfun)
2881 edge e;
2883 e = find_fallthru_edge (bb->succs);
2884 if (!e)
2886 rtx_insn *insn;
2888 /* Ensure existence of barrier in BB with no fallthru edges. */
2889 for (insn = NEXT_INSN (BB_END (bb)); ; insn = NEXT_INSN (insn))
2891 if (!insn || NOTE_INSN_BASIC_BLOCK_P (insn))
2893 error ("missing barrier after block %i", bb->index);
2894 err = 1;
2895 break;
2897 if (BARRIER_P (insn))
2898 break;
2901 else if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
2902 && e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
2904 rtx_insn *insn;
2906 if (e->src->next_bb != e->dest)
2908 error
2909 ("verify_flow_info: Incorrect blocks for fallthru %i->%i",
2910 e->src->index, e->dest->index);
2911 err = 1;
2913 else
2914 for (insn = NEXT_INSN (BB_END (e->src)); insn != BB_HEAD (e->dest);
2915 insn = NEXT_INSN (insn))
2916 if (BARRIER_P (insn) || INSN_P (insn))
2918 error ("verify_flow_info: Incorrect fallthru %i->%i",
2919 e->src->index, e->dest->index);
2920 fatal_insn ("wrong insn in the fallthru edge", insn);
2921 err = 1;
2926 return err;
2929 /* Verify that blocks are laid out in consecutive order. While walking the
2930 instructions, verify that all expected instructions are inside the basic
2931 blocks, and that all returns are followed by barriers. */
2933 static int
2934 rtl_verify_bb_layout (void)
2936 basic_block bb;
2937 int err = 0;
2938 rtx_insn *x;
2939 int num_bb_notes;
2940 rtx_insn * const rtx_first = get_insns ();
2941 basic_block last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun), curr_bb = NULL;
2943 num_bb_notes = 0;
2944 last_bb_seen = ENTRY_BLOCK_PTR_FOR_FN (cfun);
2946 for (x = rtx_first; x; x = NEXT_INSN (x))
2948 if (NOTE_INSN_BASIC_BLOCK_P (x))
2950 bb = NOTE_BASIC_BLOCK (x);
2952 num_bb_notes++;
2953 if (bb != last_bb_seen->next_bb)
2954 internal_error ("basic blocks not laid down consecutively");
2956 curr_bb = last_bb_seen = bb;
2959 if (!curr_bb)
2961 switch (GET_CODE (x))
2963 case BARRIER:
2964 case NOTE:
2965 break;
2967 case CODE_LABEL:
2968 /* An ADDR_VEC is placed outside any basic block. */
2969 if (NEXT_INSN (x)
2970 && JUMP_TABLE_DATA_P (NEXT_INSN (x)))
2971 x = NEXT_INSN (x);
2973 /* But in any case, non-deletable labels can appear anywhere. */
2974 break;
2976 default:
2977 fatal_insn ("insn outside basic block", x);
2981 if (JUMP_P (x)
2982 && returnjump_p (x) && ! condjump_p (x)
2983 && ! (next_nonnote_insn (x) && BARRIER_P (next_nonnote_insn (x))))
2984 fatal_insn ("return not followed by barrier", x);
2986 if (curr_bb && x == BB_END (curr_bb))
2987 curr_bb = NULL;
2990 if (num_bb_notes != n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS)
2991 internal_error
2992 ("number of bb notes in insn chain (%d) != n_basic_blocks (%d)",
2993 num_bb_notes, n_basic_blocks_for_fn (cfun));
2995 return err;
2998 /* Verify the CFG and RTL consistency common for both underlying RTL and
2999 cfglayout RTL, plus consistency checks specific to linearized RTL mode.
3001 Currently it does following checks:
3002 - all checks of rtl_verify_flow_info_1
3003 - test head/end pointers
3004 - check that blocks are laid out in consecutive order
3005 - check that all insns are in the basic blocks
3006 (except the switch handling code, barriers and notes)
3007 - check that all returns are followed by barriers
3008 - check that all fallthru edge points to the adjacent blocks
3009 - verify that there is a single hot/cold partition boundary after bbro */
3011 static int
3012 rtl_verify_flow_info (void)
3014 int err = 0;
3016 err |= rtl_verify_flow_info_1 ();
3018 err |= rtl_verify_bb_insn_chain ();
3020 err |= rtl_verify_fallthru ();
3022 err |= rtl_verify_bb_layout ();
3024 err |= verify_hot_cold_block_grouping ();
3026 return err;
3029 /* Assume that the preceding pass has possibly eliminated jump instructions
3030 or converted the unconditional jumps. Eliminate the edges from CFG.
3031 Return true if any edges are eliminated. */
3033 bool
3034 purge_dead_edges (basic_block bb)
3036 edge e;
3037 rtx_insn *insn = BB_END (bb);
3038 rtx note;
3039 bool purged = false;
3040 bool found;
3041 edge_iterator ei;
3043 if (DEBUG_INSN_P (insn) && insn != BB_HEAD (bb))
3045 insn = PREV_INSN (insn);
3046 while ((DEBUG_INSN_P (insn) || NOTE_P (insn)) && insn != BB_HEAD (bb));
3048 /* If this instruction cannot trap, remove REG_EH_REGION notes. */
3049 if (NONJUMP_INSN_P (insn)
3050 && (note = find_reg_note (insn, REG_EH_REGION, NULL)))
3052 rtx eqnote;
3054 if (! may_trap_p (PATTERN (insn))
3055 || ((eqnote = find_reg_equal_equiv_note (insn))
3056 && ! may_trap_p (XEXP (eqnote, 0))))
3057 remove_note (insn, note);
3060 /* Cleanup abnormal edges caused by exceptions or non-local gotos. */
3061 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3063 bool remove = false;
3065 /* There are three types of edges we need to handle correctly here: EH
3066 edges, abnormal call EH edges, and abnormal call non-EH edges. The
3067 latter can appear when nonlocal gotos are used. */
3068 if (e->flags & EDGE_ABNORMAL_CALL)
3070 if (!CALL_P (insn))
3071 remove = true;
3072 else if (can_nonlocal_goto (insn))
3074 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3076 else if (flag_tm && find_reg_note (insn, REG_TM, NULL))
3078 else
3079 remove = true;
3081 else if (e->flags & EDGE_EH)
3082 remove = !can_throw_internal (insn);
3084 if (remove)
3086 remove_edge (e);
3087 df_set_bb_dirty (bb);
3088 purged = true;
3090 else
3091 ei_next (&ei);
3094 if (JUMP_P (insn))
3096 rtx note;
3097 edge b,f;
3098 edge_iterator ei;
3100 /* We do care only about conditional jumps and simplejumps. */
3101 if (!any_condjump_p (insn)
3102 && !returnjump_p (insn)
3103 && !simplejump_p (insn))
3104 return purged;
3106 /* Branch probability/prediction notes are defined only for
3107 condjumps. We've possibly turned condjump into simplejump. */
3108 if (simplejump_p (insn))
3110 note = find_reg_note (insn, REG_BR_PROB, NULL);
3111 if (note)
3112 remove_note (insn, note);
3113 while ((note = find_reg_note (insn, REG_BR_PRED, NULL)))
3114 remove_note (insn, note);
3117 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3119 /* Avoid abnormal flags to leak from computed jumps turned
3120 into simplejumps. */
3122 e->flags &= ~EDGE_ABNORMAL;
3124 /* See if this edge is one we should keep. */
3125 if ((e->flags & EDGE_FALLTHRU) && any_condjump_p (insn))
3126 /* A conditional jump can fall through into the next
3127 block, so we should keep the edge. */
3129 ei_next (&ei);
3130 continue;
3132 else if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
3133 && BB_HEAD (e->dest) == JUMP_LABEL (insn))
3134 /* If the destination block is the target of the jump,
3135 keep the edge. */
3137 ei_next (&ei);
3138 continue;
3140 else if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)
3141 && returnjump_p (insn))
3142 /* If the destination block is the exit block, and this
3143 instruction is a return, then keep the edge. */
3145 ei_next (&ei);
3146 continue;
3148 else if ((e->flags & EDGE_EH) && can_throw_internal (insn))
3149 /* Keep the edges that correspond to exceptions thrown by
3150 this instruction and rematerialize the EDGE_ABNORMAL
3151 flag we just cleared above. */
3153 e->flags |= EDGE_ABNORMAL;
3154 ei_next (&ei);
3155 continue;
3158 /* We do not need this edge. */
3159 df_set_bb_dirty (bb);
3160 purged = true;
3161 remove_edge (e);
3164 if (EDGE_COUNT (bb->succs) == 0 || !purged)
3165 return purged;
3167 if (dump_file)
3168 fprintf (dump_file, "Purged edges from bb %i\n", bb->index);
3170 if (!optimize)
3171 return purged;
3173 /* Redistribute probabilities. */
3174 if (single_succ_p (bb))
3176 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3177 single_succ_edge (bb)->count = bb->count;
3179 else
3181 note = find_reg_note (insn, REG_BR_PROB, NULL);
3182 if (!note)
3183 return purged;
3185 b = BRANCH_EDGE (bb);
3186 f = FALLTHRU_EDGE (bb);
3187 b->probability = XINT (note, 0);
3188 f->probability = REG_BR_PROB_BASE - b->probability;
3189 /* Update these to use GCOV_COMPUTE_SCALE. */
3190 b->count = bb->count * b->probability / REG_BR_PROB_BASE;
3191 f->count = bb->count * f->probability / REG_BR_PROB_BASE;
3194 return purged;
3196 else if (CALL_P (insn) && SIBLING_CALL_P (insn))
3198 /* First, there should not be any EH or ABCALL edges resulting
3199 from non-local gotos and the like. If there were, we shouldn't
3200 have created the sibcall in the first place. Second, there
3201 should of course never have been a fallthru edge. */
3202 gcc_assert (single_succ_p (bb));
3203 gcc_assert (single_succ_edge (bb)->flags
3204 == (EDGE_SIBCALL | EDGE_ABNORMAL));
3206 return 0;
3209 /* If we don't see a jump insn, we don't know exactly why the block would
3210 have been broken at this point. Look for a simple, non-fallthru edge,
3211 as these are only created by conditional branches. If we find such an
3212 edge we know that there used to be a jump here and can then safely
3213 remove all non-fallthru edges. */
3214 found = false;
3215 FOR_EACH_EDGE (e, ei, bb->succs)
3216 if (! (e->flags & (EDGE_COMPLEX | EDGE_FALLTHRU)))
3218 found = true;
3219 break;
3222 if (!found)
3223 return purged;
3225 /* Remove all but the fake and fallthru edges. The fake edge may be
3226 the only successor for this block in the case of noreturn
3227 calls. */
3228 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3230 if (!(e->flags & (EDGE_FALLTHRU | EDGE_FAKE)))
3232 df_set_bb_dirty (bb);
3233 remove_edge (e);
3234 purged = true;
3236 else
3237 ei_next (&ei);
3240 gcc_assert (single_succ_p (bb));
3242 single_succ_edge (bb)->probability = REG_BR_PROB_BASE;
3243 single_succ_edge (bb)->count = bb->count;
3245 if (dump_file)
3246 fprintf (dump_file, "Purged non-fallthru edges from bb %i\n",
3247 bb->index);
3248 return purged;
3251 /* Search all basic blocks for potentially dead edges and purge them. Return
3252 true if some edge has been eliminated. */
3254 bool
3255 purge_all_dead_edges (void)
3257 int purged = false;
3258 basic_block bb;
3260 FOR_EACH_BB_FN (bb, cfun)
3262 bool purged_here = purge_dead_edges (bb);
3264 purged |= purged_here;
3267 return purged;
3270 /* This is used by a few passes that emit some instructions after abnormal
3271 calls, moving the basic block's end, while they in fact do want to emit
3272 them on the fallthru edge. Look for abnormal call edges, find backward
3273 the call in the block and insert the instructions on the edge instead.
3275 Similarly, handle instructions throwing exceptions internally.
3277 Return true when instructions have been found and inserted on edges. */
3279 bool
3280 fixup_abnormal_edges (void)
3282 bool inserted = false;
3283 basic_block bb;
3285 FOR_EACH_BB_FN (bb, cfun)
3287 edge e;
3288 edge_iterator ei;
3290 /* Look for cases we are interested in - calls or instructions causing
3291 exceptions. */
3292 FOR_EACH_EDGE (e, ei, bb->succs)
3293 if ((e->flags & EDGE_ABNORMAL_CALL)
3294 || ((e->flags & (EDGE_ABNORMAL | EDGE_EH))
3295 == (EDGE_ABNORMAL | EDGE_EH)))
3296 break;
3298 if (e && !CALL_P (BB_END (bb)) && !can_throw_internal (BB_END (bb)))
3300 rtx_insn *insn;
3302 /* Get past the new insns generated. Allow notes, as the insns
3303 may be already deleted. */
3304 insn = BB_END (bb);
3305 while ((NONJUMP_INSN_P (insn) || NOTE_P (insn))
3306 && !can_throw_internal (insn)
3307 && insn != BB_HEAD (bb))
3308 insn = PREV_INSN (insn);
3310 if (CALL_P (insn) || can_throw_internal (insn))
3312 rtx_insn *stop, *next;
3314 e = find_fallthru_edge (bb->succs);
3316 stop = NEXT_INSN (BB_END (bb));
3317 BB_END (bb) = insn;
3319 for (insn = NEXT_INSN (insn); insn != stop; insn = next)
3321 next = NEXT_INSN (insn);
3322 if (INSN_P (insn))
3324 delete_insn (insn);
3326 /* Sometimes there's still the return value USE.
3327 If it's placed after a trapping call (i.e. that
3328 call is the last insn anyway), we have no fallthru
3329 edge. Simply delete this use and don't try to insert
3330 on the non-existent edge. */
3331 if (GET_CODE (PATTERN (insn)) != USE)
3333 /* We're not deleting it, we're moving it. */
3334 insn->set_undeleted ();
3335 SET_PREV_INSN (insn) = NULL_RTX;
3336 SET_NEXT_INSN (insn) = NULL_RTX;
3338 insert_insn_on_edge (insn, e);
3339 inserted = true;
3342 else if (!BARRIER_P (insn))
3343 set_block_for_insn (insn, NULL);
3347 /* It may be that we don't find any trapping insn. In this
3348 case we discovered quite late that the insn that had been
3349 marked as can_throw_internal in fact couldn't trap at all.
3350 So we should in fact delete the EH edges out of the block. */
3351 else
3352 purge_dead_edges (bb);
3356 return inserted;
3359 /* Cut the insns from FIRST to LAST out of the insns stream. */
3361 rtx_insn *
3362 unlink_insn_chain (rtx_insn *first, rtx_insn *last)
3364 rtx_insn *prevfirst = PREV_INSN (first);
3365 rtx_insn *nextlast = NEXT_INSN (last);
3367 SET_PREV_INSN (first) = NULL;
3368 SET_NEXT_INSN (last) = NULL;
3369 if (prevfirst)
3370 SET_NEXT_INSN (prevfirst) = nextlast;
3371 if (nextlast)
3372 SET_PREV_INSN (nextlast) = prevfirst;
3373 else
3374 set_last_insn (prevfirst);
3375 if (!prevfirst)
3376 set_first_insn (nextlast);
3377 return first;
3380 /* Skip over inter-block insns occurring after BB which are typically
3381 associated with BB (e.g., barriers). If there are any such insns,
3382 we return the last one. Otherwise, we return the end of BB. */
3384 static rtx_insn *
3385 skip_insns_after_block (basic_block bb)
3387 rtx_insn *insn, *last_insn, *next_head, *prev;
3389 next_head = NULL;
3390 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
3391 next_head = BB_HEAD (bb->next_bb);
3393 for (last_insn = insn = BB_END (bb); (insn = NEXT_INSN (insn)) != 0; )
3395 if (insn == next_head)
3396 break;
3398 switch (GET_CODE (insn))
3400 case BARRIER:
3401 last_insn = insn;
3402 continue;
3404 case NOTE:
3405 switch (NOTE_KIND (insn))
3407 case NOTE_INSN_BLOCK_END:
3408 gcc_unreachable ();
3409 continue;
3410 default:
3411 continue;
3412 break;
3414 break;
3416 case CODE_LABEL:
3417 if (NEXT_INSN (insn)
3418 && JUMP_TABLE_DATA_P (NEXT_INSN (insn)))
3420 insn = NEXT_INSN (insn);
3421 last_insn = insn;
3422 continue;
3424 break;
3426 default:
3427 break;
3430 break;
3433 /* It is possible to hit contradictory sequence. For instance:
3435 jump_insn
3436 NOTE_INSN_BLOCK_BEG
3437 barrier
3439 Where barrier belongs to jump_insn, but the note does not. This can be
3440 created by removing the basic block originally following
3441 NOTE_INSN_BLOCK_BEG. In such case reorder the notes. */
3443 for (insn = last_insn; insn != BB_END (bb); insn = prev)
3445 prev = PREV_INSN (insn);
3446 if (NOTE_P (insn))
3447 switch (NOTE_KIND (insn))
3449 case NOTE_INSN_BLOCK_END:
3450 gcc_unreachable ();
3451 break;
3452 case NOTE_INSN_DELETED:
3453 case NOTE_INSN_DELETED_LABEL:
3454 case NOTE_INSN_DELETED_DEBUG_LABEL:
3455 continue;
3456 default:
3457 reorder_insns (insn, insn, last_insn);
3461 return last_insn;
3464 /* Locate or create a label for a given basic block. */
3466 static rtx_insn *
3467 label_for_bb (basic_block bb)
3469 rtx_insn *label = BB_HEAD (bb);
3471 if (!LABEL_P (label))
3473 if (dump_file)
3474 fprintf (dump_file, "Emitting label for block %d\n", bb->index);
3476 label = block_label (bb);
3479 return label;
3482 /* Locate the effective beginning and end of the insn chain for each
3483 block, as defined by skip_insns_after_block above. */
3485 static void
3486 record_effective_endpoints (void)
3488 rtx_insn *next_insn;
3489 basic_block bb;
3490 rtx_insn *insn;
3492 for (insn = get_insns ();
3493 insn
3494 && NOTE_P (insn)
3495 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK;
3496 insn = NEXT_INSN (insn))
3497 continue;
3498 /* No basic blocks at all? */
3499 gcc_assert (insn);
3501 if (PREV_INSN (insn))
3502 cfg_layout_function_header =
3503 unlink_insn_chain (get_insns (), PREV_INSN (insn));
3504 else
3505 cfg_layout_function_header = NULL;
3507 next_insn = get_insns ();
3508 FOR_EACH_BB_FN (bb, cfun)
3510 rtx_insn *end;
3512 if (PREV_INSN (BB_HEAD (bb)) && next_insn != BB_HEAD (bb))
3513 BB_HEADER (bb) = unlink_insn_chain (next_insn,
3514 PREV_INSN (BB_HEAD (bb)));
3515 end = skip_insns_after_block (bb);
3516 if (NEXT_INSN (BB_END (bb)) && BB_END (bb) != end)
3517 BB_FOOTER (bb) = unlink_insn_chain (NEXT_INSN (BB_END (bb)), end);
3518 next_insn = NEXT_INSN (BB_END (bb));
3521 cfg_layout_function_footer = next_insn;
3522 if (cfg_layout_function_footer)
3523 cfg_layout_function_footer = unlink_insn_chain (cfg_layout_function_footer, get_last_insn ());
3526 namespace {
3528 const pass_data pass_data_into_cfg_layout_mode =
3530 RTL_PASS, /* type */
3531 "into_cfglayout", /* name */
3532 OPTGROUP_NONE, /* optinfo_flags */
3533 TV_CFG, /* tv_id */
3534 0, /* properties_required */
3535 PROP_cfglayout, /* properties_provided */
3536 0, /* properties_destroyed */
3537 0, /* todo_flags_start */
3538 0, /* todo_flags_finish */
3541 class pass_into_cfg_layout_mode : public rtl_opt_pass
3543 public:
3544 pass_into_cfg_layout_mode (gcc::context *ctxt)
3545 : rtl_opt_pass (pass_data_into_cfg_layout_mode, ctxt)
3548 /* opt_pass methods: */
3549 virtual unsigned int execute (function *)
3551 cfg_layout_initialize (0);
3552 return 0;
3555 }; // class pass_into_cfg_layout_mode
3557 } // anon namespace
3559 rtl_opt_pass *
3560 make_pass_into_cfg_layout_mode (gcc::context *ctxt)
3562 return new pass_into_cfg_layout_mode (ctxt);
3565 namespace {
3567 const pass_data pass_data_outof_cfg_layout_mode =
3569 RTL_PASS, /* type */
3570 "outof_cfglayout", /* name */
3571 OPTGROUP_NONE, /* optinfo_flags */
3572 TV_CFG, /* tv_id */
3573 0, /* properties_required */
3574 0, /* properties_provided */
3575 PROP_cfglayout, /* properties_destroyed */
3576 0, /* todo_flags_start */
3577 0, /* todo_flags_finish */
3580 class pass_outof_cfg_layout_mode : public rtl_opt_pass
3582 public:
3583 pass_outof_cfg_layout_mode (gcc::context *ctxt)
3584 : rtl_opt_pass (pass_data_outof_cfg_layout_mode, ctxt)
3587 /* opt_pass methods: */
3588 virtual unsigned int execute (function *);
3590 }; // class pass_outof_cfg_layout_mode
3592 unsigned int
3593 pass_outof_cfg_layout_mode::execute (function *fun)
3595 basic_block bb;
3597 FOR_EACH_BB_FN (bb, fun)
3598 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (fun))
3599 bb->aux = bb->next_bb;
3601 cfg_layout_finalize ();
3603 return 0;
3606 } // anon namespace
3608 rtl_opt_pass *
3609 make_pass_outof_cfg_layout_mode (gcc::context *ctxt)
3611 return new pass_outof_cfg_layout_mode (ctxt);
3615 /* Link the basic blocks in the correct order, compacting the basic
3616 block queue while at it. If STAY_IN_CFGLAYOUT_MODE is false, this
3617 function also clears the basic block header and footer fields.
3619 This function is usually called after a pass (e.g. tracer) finishes
3620 some transformations while in cfglayout mode. The required sequence
3621 of the basic blocks is in a linked list along the bb->aux field.
3622 This functions re-links the basic block prev_bb and next_bb pointers
3623 accordingly, and it compacts and renumbers the blocks.
3625 FIXME: This currently works only for RTL, but the only RTL-specific
3626 bits are the STAY_IN_CFGLAYOUT_MODE bits. The tracer pass was moved
3627 to GIMPLE a long time ago, but it doesn't relink the basic block
3628 chain. It could do that (to give better initial RTL) if this function
3629 is made IR-agnostic (and moved to cfganal.c or cfg.c while at it). */
3631 void
3632 relink_block_chain (bool stay_in_cfglayout_mode)
3634 basic_block bb, prev_bb;
3635 int index;
3637 /* Maybe dump the re-ordered sequence. */
3638 if (dump_file)
3640 fprintf (dump_file, "Reordered sequence:\n");
3641 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, index =
3642 NUM_FIXED_BLOCKS;
3644 bb = (basic_block) bb->aux, index++)
3646 fprintf (dump_file, " %i ", index);
3647 if (get_bb_original (bb))
3648 fprintf (dump_file, "duplicate of %i ",
3649 get_bb_original (bb)->index);
3650 else if (forwarder_block_p (bb)
3651 && !LABEL_P (BB_HEAD (bb)))
3652 fprintf (dump_file, "compensation ");
3653 else
3654 fprintf (dump_file, "bb %i ", bb->index);
3655 fprintf (dump_file, " [%i]\n", bb->frequency);
3659 /* Now reorder the blocks. */
3660 prev_bb = ENTRY_BLOCK_PTR_FOR_FN (cfun);
3661 bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
3662 for (; bb; prev_bb = bb, bb = (basic_block) bb->aux)
3664 bb->prev_bb = prev_bb;
3665 prev_bb->next_bb = bb;
3667 prev_bb->next_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
3668 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb = prev_bb;
3670 /* Then, clean up the aux fields. */
3671 FOR_ALL_BB_FN (bb, cfun)
3673 bb->aux = NULL;
3674 if (!stay_in_cfglayout_mode)
3675 BB_HEADER (bb) = BB_FOOTER (bb) = NULL;
3678 /* Maybe reset the original copy tables, they are not valid anymore
3679 when we renumber the basic blocks in compact_blocks. If we are
3680 are going out of cfglayout mode, don't re-allocate the tables. */
3681 free_original_copy_tables ();
3682 if (stay_in_cfglayout_mode)
3683 initialize_original_copy_tables ();
3685 /* Finally, put basic_block_info in the new order. */
3686 compact_blocks ();
3690 /* Given a reorder chain, rearrange the code to match. */
3692 static void
3693 fixup_reorder_chain (void)
3695 basic_block bb;
3696 rtx_insn *insn = NULL;
3698 if (cfg_layout_function_header)
3700 set_first_insn (cfg_layout_function_header);
3701 insn = cfg_layout_function_header;
3702 while (NEXT_INSN (insn))
3703 insn = NEXT_INSN (insn);
3706 /* First do the bulk reordering -- rechain the blocks without regard to
3707 the needed changes to jumps and labels. */
3709 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = (basic_block)
3710 bb->aux)
3712 if (BB_HEADER (bb))
3714 if (insn)
3715 SET_NEXT_INSN (insn) = BB_HEADER (bb);
3716 else
3717 set_first_insn (BB_HEADER (bb));
3718 SET_PREV_INSN (BB_HEADER (bb)) = insn;
3719 insn = BB_HEADER (bb);
3720 while (NEXT_INSN (insn))
3721 insn = NEXT_INSN (insn);
3723 if (insn)
3724 SET_NEXT_INSN (insn) = BB_HEAD (bb);
3725 else
3726 set_first_insn (BB_HEAD (bb));
3727 SET_PREV_INSN (BB_HEAD (bb)) = insn;
3728 insn = BB_END (bb);
3729 if (BB_FOOTER (bb))
3731 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
3732 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
3733 while (NEXT_INSN (insn))
3734 insn = NEXT_INSN (insn);
3738 SET_NEXT_INSN (insn) = cfg_layout_function_footer;
3739 if (cfg_layout_function_footer)
3740 SET_PREV_INSN (cfg_layout_function_footer) = insn;
3742 while (NEXT_INSN (insn))
3743 insn = NEXT_INSN (insn);
3745 set_last_insn (insn);
3746 #ifdef ENABLE_CHECKING
3747 verify_insn_chain ();
3748 #endif
3750 /* Now add jumps and labels as needed to match the blocks new
3751 outgoing edges. */
3753 for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb ; bb = (basic_block)
3754 bb->aux)
3756 edge e_fall, e_taken, e;
3757 rtx_insn *bb_end_insn;
3758 rtx ret_label = NULL_RTX;
3759 basic_block nb;
3760 edge_iterator ei;
3762 if (EDGE_COUNT (bb->succs) == 0)
3763 continue;
3765 /* Find the old fallthru edge, and another non-EH edge for
3766 a taken jump. */
3767 e_taken = e_fall = NULL;
3769 FOR_EACH_EDGE (e, ei, bb->succs)
3770 if (e->flags & EDGE_FALLTHRU)
3771 e_fall = e;
3772 else if (! (e->flags & EDGE_EH))
3773 e_taken = e;
3775 bb_end_insn = BB_END (bb);
3776 if (rtx_jump_insn *bb_end_jump = dyn_cast <rtx_jump_insn *> (bb_end_insn))
3778 ret_label = JUMP_LABEL (bb_end_jump);
3779 if (any_condjump_p (bb_end_jump))
3781 /* This might happen if the conditional jump has side
3782 effects and could therefore not be optimized away.
3783 Make the basic block to end with a barrier in order
3784 to prevent rtl_verify_flow_info from complaining. */
3785 if (!e_fall)
3787 gcc_assert (!onlyjump_p (bb_end_jump)
3788 || returnjump_p (bb_end_jump)
3789 || (e_taken->flags & EDGE_CROSSING));
3790 emit_barrier_after (bb_end_jump);
3791 continue;
3794 /* If the old fallthru is still next, nothing to do. */
3795 if (bb->aux == e_fall->dest
3796 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3797 continue;
3799 /* The degenerated case of conditional jump jumping to the next
3800 instruction can happen for jumps with side effects. We need
3801 to construct a forwarder block and this will be done just
3802 fine by force_nonfallthru below. */
3803 if (!e_taken)
3806 /* There is another special case: if *neither* block is next,
3807 such as happens at the very end of a function, then we'll
3808 need to add a new unconditional jump. Choose the taken
3809 edge based on known or assumed probability. */
3810 else if (bb->aux != e_taken->dest)
3812 rtx note = find_reg_note (bb_end_jump, REG_BR_PROB, 0);
3814 if (note
3815 && XINT (note, 0) < REG_BR_PROB_BASE / 2
3816 && invert_jump (bb_end_jump,
3817 (e_fall->dest
3818 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3819 ? NULL_RTX
3820 : label_for_bb (e_fall->dest)), 0))
3822 e_fall->flags &= ~EDGE_FALLTHRU;
3823 gcc_checking_assert (could_fall_through
3824 (e_taken->src, e_taken->dest));
3825 e_taken->flags |= EDGE_FALLTHRU;
3826 update_br_prob_note (bb);
3827 e = e_fall, e_fall = e_taken, e_taken = e;
3831 /* If the "jumping" edge is a crossing edge, and the fall
3832 through edge is non-crossing, leave things as they are. */
3833 else if ((e_taken->flags & EDGE_CROSSING)
3834 && !(e_fall->flags & EDGE_CROSSING))
3835 continue;
3837 /* Otherwise we can try to invert the jump. This will
3838 basically never fail, however, keep up the pretense. */
3839 else if (invert_jump (bb_end_jump,
3840 (e_fall->dest
3841 == EXIT_BLOCK_PTR_FOR_FN (cfun)
3842 ? NULL_RTX
3843 : label_for_bb (e_fall->dest)), 0))
3845 e_fall->flags &= ~EDGE_FALLTHRU;
3846 gcc_checking_assert (could_fall_through
3847 (e_taken->src, e_taken->dest));
3848 e_taken->flags |= EDGE_FALLTHRU;
3849 update_br_prob_note (bb);
3850 if (LABEL_NUSES (ret_label) == 0
3851 && single_pred_p (e_taken->dest))
3852 delete_insn (ret_label);
3853 continue;
3856 else if (extract_asm_operands (PATTERN (bb_end_insn)) != NULL)
3858 /* If the old fallthru is still next or if
3859 asm goto doesn't have a fallthru (e.g. when followed by
3860 __builtin_unreachable ()), nothing to do. */
3861 if (! e_fall
3862 || bb->aux == e_fall->dest
3863 || e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3864 continue;
3866 /* Otherwise we'll have to use the fallthru fixup below. */
3868 else
3870 /* Otherwise we have some return, switch or computed
3871 jump. In the 99% case, there should not have been a
3872 fallthru edge. */
3873 gcc_assert (returnjump_p (bb_end_insn) || !e_fall);
3874 continue;
3877 else
3879 /* No fallthru implies a noreturn function with EH edges, or
3880 something similarly bizarre. In any case, we don't need to
3881 do anything. */
3882 if (! e_fall)
3883 continue;
3885 /* If the fallthru block is still next, nothing to do. */
3886 if (bb->aux == e_fall->dest)
3887 continue;
3889 /* A fallthru to exit block. */
3890 if (e_fall->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3891 continue;
3894 /* We got here if we need to add a new jump insn.
3895 Note force_nonfallthru can delete E_FALL and thus we have to
3896 save E_FALL->src prior to the call to force_nonfallthru. */
3897 nb = force_nonfallthru_and_redirect (e_fall, e_fall->dest, ret_label);
3898 if (nb)
3900 nb->aux = bb->aux;
3901 bb->aux = nb;
3902 /* Don't process this new block. */
3903 bb = nb;
3907 relink_block_chain (/*stay_in_cfglayout_mode=*/false);
3909 /* Annoying special case - jump around dead jumptables left in the code. */
3910 FOR_EACH_BB_FN (bb, cfun)
3912 edge e = find_fallthru_edge (bb->succs);
3914 if (e && !can_fallthru (e->src, e->dest))
3915 force_nonfallthru (e);
3918 /* Ensure goto_locus from edges has some instructions with that locus
3919 in RTL. */
3920 if (!optimize)
3921 FOR_EACH_BB_FN (bb, cfun)
3923 edge e;
3924 edge_iterator ei;
3926 FOR_EACH_EDGE (e, ei, bb->succs)
3927 if (LOCATION_LOCUS (e->goto_locus) != UNKNOWN_LOCATION
3928 && !(e->flags & EDGE_ABNORMAL))
3930 edge e2;
3931 edge_iterator ei2;
3932 basic_block dest, nb;
3933 rtx_insn *end;
3935 insn = BB_END (e->src);
3936 end = PREV_INSN (BB_HEAD (e->src));
3937 while (insn != end
3938 && (!NONDEBUG_INSN_P (insn) || !INSN_HAS_LOCATION (insn)))
3939 insn = PREV_INSN (insn);
3940 if (insn != end
3941 && INSN_LOCATION (insn) == e->goto_locus)
3942 continue;
3943 if (simplejump_p (BB_END (e->src))
3944 && !INSN_HAS_LOCATION (BB_END (e->src)))
3946 INSN_LOCATION (BB_END (e->src)) = e->goto_locus;
3947 continue;
3949 dest = e->dest;
3950 if (dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
3952 /* Non-fallthru edges to the exit block cannot be split. */
3953 if (!(e->flags & EDGE_FALLTHRU))
3954 continue;
3956 else
3958 insn = BB_HEAD (dest);
3959 end = NEXT_INSN (BB_END (dest));
3960 while (insn != end && !NONDEBUG_INSN_P (insn))
3961 insn = NEXT_INSN (insn);
3962 if (insn != end && INSN_HAS_LOCATION (insn)
3963 && INSN_LOCATION (insn) == e->goto_locus)
3964 continue;
3966 nb = split_edge (e);
3967 if (!INSN_P (BB_END (nb)))
3968 BB_END (nb) = emit_insn_after_noloc (gen_nop (), BB_END (nb),
3969 nb);
3970 INSN_LOCATION (BB_END (nb)) = e->goto_locus;
3972 /* If there are other incoming edges to the destination block
3973 with the same goto locus, redirect them to the new block as
3974 well, this can prevent other such blocks from being created
3975 in subsequent iterations of the loop. */
3976 for (ei2 = ei_start (dest->preds); (e2 = ei_safe_edge (ei2)); )
3977 if (LOCATION_LOCUS (e2->goto_locus) != UNKNOWN_LOCATION
3978 && !(e2->flags & (EDGE_ABNORMAL | EDGE_FALLTHRU))
3979 && e->goto_locus == e2->goto_locus)
3980 redirect_edge_and_branch (e2, nb);
3981 else
3982 ei_next (&ei2);
3987 /* Perform sanity checks on the insn chain.
3988 1. Check that next/prev pointers are consistent in both the forward and
3989 reverse direction.
3990 2. Count insns in chain, going both directions, and check if equal.
3991 3. Check that get_last_insn () returns the actual end of chain. */
3993 DEBUG_FUNCTION void
3994 verify_insn_chain (void)
3996 rtx_insn *x, *prevx, *nextx;
3997 int insn_cnt1, insn_cnt2;
3999 for (prevx = NULL, insn_cnt1 = 1, x = get_insns ();
4000 x != 0;
4001 prevx = x, insn_cnt1++, x = NEXT_INSN (x))
4002 gcc_assert (PREV_INSN (x) == prevx);
4004 gcc_assert (prevx == get_last_insn ());
4006 for (nextx = NULL, insn_cnt2 = 1, x = get_last_insn ();
4007 x != 0;
4008 nextx = x, insn_cnt2++, x = PREV_INSN (x))
4009 gcc_assert (NEXT_INSN (x) == nextx);
4011 gcc_assert (insn_cnt1 == insn_cnt2);
4014 /* If we have assembler epilogues, the block falling through to exit must
4015 be the last one in the reordered chain when we reach final. Ensure
4016 that this condition is met. */
4017 static void
4018 fixup_fallthru_exit_predecessor (void)
4020 edge e;
4021 basic_block bb = NULL;
4023 /* This transformation is not valid before reload, because we might
4024 separate a call from the instruction that copies the return
4025 value. */
4026 gcc_assert (reload_completed);
4028 e = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4029 if (e)
4030 bb = e->src;
4032 if (bb && bb->aux)
4034 basic_block c = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
4036 /* If the very first block is the one with the fall-through exit
4037 edge, we have to split that block. */
4038 if (c == bb)
4040 bb = split_block_after_labels (bb)->dest;
4041 bb->aux = c->aux;
4042 c->aux = bb;
4043 BB_FOOTER (bb) = BB_FOOTER (c);
4044 BB_FOOTER (c) = NULL;
4047 while (c->aux != bb)
4048 c = (basic_block) c->aux;
4050 c->aux = bb->aux;
4051 while (c->aux)
4052 c = (basic_block) c->aux;
4054 c->aux = bb;
4055 bb->aux = NULL;
4059 /* In case there are more than one fallthru predecessors of exit, force that
4060 there is only one. */
4062 static void
4063 force_one_exit_fallthru (void)
4065 edge e, predecessor = NULL;
4066 bool more = false;
4067 edge_iterator ei;
4068 basic_block forwarder, bb;
4070 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
4071 if (e->flags & EDGE_FALLTHRU)
4073 if (predecessor == NULL)
4074 predecessor = e;
4075 else
4077 more = true;
4078 break;
4082 if (!more)
4083 return;
4085 /* Exit has several fallthru predecessors. Create a forwarder block for
4086 them. */
4087 forwarder = split_edge (predecessor);
4088 for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
4089 (e = ei_safe_edge (ei)); )
4091 if (e->src == forwarder
4092 || !(e->flags & EDGE_FALLTHRU))
4093 ei_next (&ei);
4094 else
4095 redirect_edge_and_branch_force (e, forwarder);
4098 /* Fix up the chain of blocks -- make FORWARDER immediately precede the
4099 exit block. */
4100 FOR_EACH_BB_FN (bb, cfun)
4102 if (bb->aux == NULL && bb != forwarder)
4104 bb->aux = forwarder;
4105 break;
4110 /* Return true in case it is possible to duplicate the basic block BB. */
4112 static bool
4113 cfg_layout_can_duplicate_bb_p (const_basic_block bb)
4115 /* Do not attempt to duplicate tablejumps, as we need to unshare
4116 the dispatch table. This is difficult to do, as the instructions
4117 computing jump destination may be hoisted outside the basic block. */
4118 if (tablejump_p (BB_END (bb), NULL, NULL))
4119 return false;
4121 /* Do not duplicate blocks containing insns that can't be copied. */
4122 if (targetm.cannot_copy_insn_p)
4124 rtx_insn *insn = BB_HEAD (bb);
4125 while (1)
4127 if (INSN_P (insn) && targetm.cannot_copy_insn_p (insn))
4128 return false;
4129 if (insn == BB_END (bb))
4130 break;
4131 insn = NEXT_INSN (insn);
4135 return true;
4138 rtx_insn *
4139 duplicate_insn_chain (rtx_insn *from, rtx_insn *to)
4141 rtx_insn *insn, *next, *copy;
4142 rtx_note *last;
4144 /* Avoid updating of boundaries of previous basic block. The
4145 note will get removed from insn stream in fixup. */
4146 last = emit_note (NOTE_INSN_DELETED);
4148 /* Create copy at the end of INSN chain. The chain will
4149 be reordered later. */
4150 for (insn = from; insn != NEXT_INSN (to); insn = NEXT_INSN (insn))
4152 switch (GET_CODE (insn))
4154 case DEBUG_INSN:
4155 /* Don't duplicate label debug insns. */
4156 if (TREE_CODE (INSN_VAR_LOCATION_DECL (insn)) == LABEL_DECL)
4157 break;
4158 /* FALLTHRU */
4159 case INSN:
4160 case CALL_INSN:
4161 case JUMP_INSN:
4162 copy = emit_copy_of_insn_after (insn, get_last_insn ());
4163 if (JUMP_P (insn) && JUMP_LABEL (insn) != NULL_RTX
4164 && ANY_RETURN_P (JUMP_LABEL (insn)))
4165 JUMP_LABEL (copy) = JUMP_LABEL (insn);
4166 maybe_copy_prologue_epilogue_insn (insn, copy);
4167 break;
4169 case JUMP_TABLE_DATA:
4170 /* Avoid copying of dispatch tables. We never duplicate
4171 tablejumps, so this can hit only in case the table got
4172 moved far from original jump.
4173 Avoid copying following barrier as well if any
4174 (and debug insns in between). */
4175 for (next = NEXT_INSN (insn);
4176 next != NEXT_INSN (to);
4177 next = NEXT_INSN (next))
4178 if (!DEBUG_INSN_P (next))
4179 break;
4180 if (next != NEXT_INSN (to) && BARRIER_P (next))
4181 insn = next;
4182 break;
4184 case CODE_LABEL:
4185 break;
4187 case BARRIER:
4188 emit_barrier ();
4189 break;
4191 case NOTE:
4192 switch (NOTE_KIND (insn))
4194 /* In case prologue is empty and function contain label
4195 in first BB, we may want to copy the block. */
4196 case NOTE_INSN_PROLOGUE_END:
4198 case NOTE_INSN_DELETED:
4199 case NOTE_INSN_DELETED_LABEL:
4200 case NOTE_INSN_DELETED_DEBUG_LABEL:
4201 /* No problem to strip these. */
4202 case NOTE_INSN_FUNCTION_BEG:
4203 /* There is always just single entry to function. */
4204 case NOTE_INSN_BASIC_BLOCK:
4205 /* We should only switch text sections once. */
4206 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4207 break;
4209 case NOTE_INSN_EPILOGUE_BEG:
4210 case NOTE_INSN_UPDATE_SJLJ_CONTEXT:
4211 emit_note_copy (as_a <rtx_note *> (insn));
4212 break;
4214 default:
4215 /* All other notes should have already been eliminated. */
4216 gcc_unreachable ();
4218 break;
4219 default:
4220 gcc_unreachable ();
4223 insn = NEXT_INSN (last);
4224 delete_insn (last);
4225 return insn;
4228 /* Create a duplicate of the basic block BB. */
4230 static basic_block
4231 cfg_layout_duplicate_bb (basic_block bb)
4233 rtx_insn *insn;
4234 basic_block new_bb;
4236 insn = duplicate_insn_chain (BB_HEAD (bb), BB_END (bb));
4237 new_bb = create_basic_block (insn,
4238 insn ? get_last_insn () : NULL,
4239 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb);
4241 BB_COPY_PARTITION (new_bb, bb);
4242 if (BB_HEADER (bb))
4244 insn = BB_HEADER (bb);
4245 while (NEXT_INSN (insn))
4246 insn = NEXT_INSN (insn);
4247 insn = duplicate_insn_chain (BB_HEADER (bb), insn);
4248 if (insn)
4249 BB_HEADER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4252 if (BB_FOOTER (bb))
4254 insn = BB_FOOTER (bb);
4255 while (NEXT_INSN (insn))
4256 insn = NEXT_INSN (insn);
4257 insn = duplicate_insn_chain (BB_FOOTER (bb), insn);
4258 if (insn)
4259 BB_FOOTER (new_bb) = unlink_insn_chain (insn, get_last_insn ());
4262 return new_bb;
4266 /* Main entry point to this module - initialize the datastructures for
4267 CFG layout changes. It keeps LOOPS up-to-date if not null.
4269 FLAGS is a set of additional flags to pass to cleanup_cfg(). */
4271 void
4272 cfg_layout_initialize (unsigned int flags)
4274 rtx_insn_list *x;
4275 basic_block bb;
4277 /* Once bb partitioning is complete, cfg layout mode should not be
4278 re-entered. Entering cfg layout mode may require fixups. As an
4279 example, if edge forwarding performed when optimizing the cfg
4280 layout required moving a block from the hot to the cold
4281 section. This would create an illegal partitioning unless some
4282 manual fixup was performed. */
4283 gcc_assert (!(crtl->bb_reorder_complete
4284 && flag_reorder_blocks_and_partition));
4286 initialize_original_copy_tables ();
4288 cfg_layout_rtl_register_cfg_hooks ();
4290 record_effective_endpoints ();
4292 /* Make sure that the targets of non local gotos are marked. */
4293 for (x = nonlocal_goto_handler_labels; x; x = x->next ())
4295 bb = BLOCK_FOR_INSN (x->insn ());
4296 bb->flags |= BB_NON_LOCAL_GOTO_TARGET;
4299 cleanup_cfg (CLEANUP_CFGLAYOUT | flags);
4302 /* Splits superblocks. */
4303 void
4304 break_superblocks (void)
4306 sbitmap superblocks;
4307 bool need = false;
4308 basic_block bb;
4310 superblocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
4311 bitmap_clear (superblocks);
4313 FOR_EACH_BB_FN (bb, cfun)
4314 if (bb->flags & BB_SUPERBLOCK)
4316 bb->flags &= ~BB_SUPERBLOCK;
4317 bitmap_set_bit (superblocks, bb->index);
4318 need = true;
4321 if (need)
4323 rebuild_jump_labels (get_insns ());
4324 find_many_sub_basic_blocks (superblocks);
4327 free (superblocks);
4330 /* Finalize the changes: reorder insn list according to the sequence specified
4331 by aux pointers, enter compensation code, rebuild scope forest. */
4333 void
4334 cfg_layout_finalize (void)
4336 #ifdef ENABLE_CHECKING
4337 verify_flow_info ();
4338 #endif
4339 force_one_exit_fallthru ();
4340 rtl_register_cfg_hooks ();
4341 if (reload_completed && !HAVE_epilogue)
4342 fixup_fallthru_exit_predecessor ();
4343 fixup_reorder_chain ();
4345 rebuild_jump_labels (get_insns ());
4346 delete_dead_jumptables ();
4348 #ifdef ENABLE_CHECKING
4349 verify_insn_chain ();
4350 verify_flow_info ();
4351 #endif
4355 /* Same as split_block but update cfg_layout structures. */
4357 static basic_block
4358 cfg_layout_split_block (basic_block bb, void *insnp)
4360 rtx insn = (rtx) insnp;
4361 basic_block new_bb = rtl_split_block (bb, insn);
4363 BB_FOOTER (new_bb) = BB_FOOTER (bb);
4364 BB_FOOTER (bb) = NULL;
4366 return new_bb;
4369 /* Redirect Edge to DEST. */
4370 static edge
4371 cfg_layout_redirect_edge_and_branch (edge e, basic_block dest)
4373 basic_block src = e->src;
4374 edge ret;
4376 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
4377 return NULL;
4379 if (e->dest == dest)
4380 return e;
4382 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4383 && (ret = try_redirect_by_replacing_jump (e, dest, true)))
4385 df_set_bb_dirty (src);
4386 return ret;
4389 if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4390 && (e->flags & EDGE_FALLTHRU) && !(e->flags & EDGE_COMPLEX))
4392 if (dump_file)
4393 fprintf (dump_file, "Redirecting entry edge from bb %i to %i\n",
4394 e->src->index, dest->index);
4396 df_set_bb_dirty (e->src);
4397 redirect_edge_succ (e, dest);
4398 return e;
4401 /* Redirect_edge_and_branch may decide to turn branch into fallthru edge
4402 in the case the basic block appears to be in sequence. Avoid this
4403 transformation. */
4405 if (e->flags & EDGE_FALLTHRU)
4407 /* Redirect any branch edges unified with the fallthru one. */
4408 if (JUMP_P (BB_END (src))
4409 && label_is_jump_target_p (BB_HEAD (e->dest),
4410 BB_END (src)))
4412 edge redirected;
4414 if (dump_file)
4415 fprintf (dump_file, "Fallthru edge unified with branch "
4416 "%i->%i redirected to %i\n",
4417 e->src->index, e->dest->index, dest->index);
4418 e->flags &= ~EDGE_FALLTHRU;
4419 redirected = redirect_branch_edge (e, dest);
4420 gcc_assert (redirected);
4421 redirected->flags |= EDGE_FALLTHRU;
4422 df_set_bb_dirty (redirected->src);
4423 return redirected;
4425 /* In case we are redirecting fallthru edge to the branch edge
4426 of conditional jump, remove it. */
4427 if (EDGE_COUNT (src->succs) == 2)
4429 /* Find the edge that is different from E. */
4430 edge s = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e);
4432 if (s->dest == dest
4433 && any_condjump_p (BB_END (src))
4434 && onlyjump_p (BB_END (src)))
4435 delete_insn (BB_END (src));
4437 if (dump_file)
4438 fprintf (dump_file, "Redirecting fallthru edge %i->%i to %i\n",
4439 e->src->index, e->dest->index, dest->index);
4440 ret = redirect_edge_succ_nodup (e, dest);
4442 else
4443 ret = redirect_branch_edge (e, dest);
4445 /* We don't want simplejumps in the insn stream during cfglayout. */
4446 gcc_assert (!simplejump_p (BB_END (src)));
4448 df_set_bb_dirty (src);
4449 return ret;
4452 /* Simple wrapper as we always can redirect fallthru edges. */
4453 static basic_block
4454 cfg_layout_redirect_edge_and_branch_force (edge e, basic_block dest)
4456 edge redirected = cfg_layout_redirect_edge_and_branch (e, dest);
4458 gcc_assert (redirected);
4459 return NULL;
4462 /* Same as delete_basic_block but update cfg_layout structures. */
4464 static void
4465 cfg_layout_delete_block (basic_block bb)
4467 rtx_insn *insn, *next, *prev = PREV_INSN (BB_HEAD (bb)), *remaints;
4468 rtx_insn **to;
4470 if (BB_HEADER (bb))
4472 next = BB_HEAD (bb);
4473 if (prev)
4474 SET_NEXT_INSN (prev) = BB_HEADER (bb);
4475 else
4476 set_first_insn (BB_HEADER (bb));
4477 SET_PREV_INSN (BB_HEADER (bb)) = prev;
4478 insn = BB_HEADER (bb);
4479 while (NEXT_INSN (insn))
4480 insn = NEXT_INSN (insn);
4481 SET_NEXT_INSN (insn) = next;
4482 SET_PREV_INSN (next) = insn;
4484 next = NEXT_INSN (BB_END (bb));
4485 if (BB_FOOTER (bb))
4487 insn = BB_FOOTER (bb);
4488 while (insn)
4490 if (BARRIER_P (insn))
4492 if (PREV_INSN (insn))
4493 SET_NEXT_INSN (PREV_INSN (insn)) = NEXT_INSN (insn);
4494 else
4495 BB_FOOTER (bb) = NEXT_INSN (insn);
4496 if (NEXT_INSN (insn))
4497 SET_PREV_INSN (NEXT_INSN (insn)) = PREV_INSN (insn);
4499 if (LABEL_P (insn))
4500 break;
4501 insn = NEXT_INSN (insn);
4503 if (BB_FOOTER (bb))
4505 insn = BB_END (bb);
4506 SET_NEXT_INSN (insn) = BB_FOOTER (bb);
4507 SET_PREV_INSN (BB_FOOTER (bb)) = insn;
4508 while (NEXT_INSN (insn))
4509 insn = NEXT_INSN (insn);
4510 SET_NEXT_INSN (insn) = next;
4511 if (next)
4512 SET_PREV_INSN (next) = insn;
4513 else
4514 set_last_insn (insn);
4517 if (bb->next_bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4518 to = &BB_HEADER (bb->next_bb);
4519 else
4520 to = &cfg_layout_function_footer;
4522 rtl_delete_block (bb);
4524 if (prev)
4525 prev = NEXT_INSN (prev);
4526 else
4527 prev = get_insns ();
4528 if (next)
4529 next = PREV_INSN (next);
4530 else
4531 next = get_last_insn ();
4533 if (next && NEXT_INSN (next) != prev)
4535 remaints = unlink_insn_chain (prev, next);
4536 insn = remaints;
4537 while (NEXT_INSN (insn))
4538 insn = NEXT_INSN (insn);
4539 SET_NEXT_INSN (insn) = *to;
4540 if (*to)
4541 SET_PREV_INSN (*to) = insn;
4542 *to = remaints;
4546 /* Return true when blocks A and B can be safely merged. */
4548 static bool
4549 cfg_layout_can_merge_blocks_p (basic_block a, basic_block b)
4551 /* If we are partitioning hot/cold basic blocks, we don't want to
4552 mess up unconditional or indirect jumps that cross between hot
4553 and cold sections.
4555 Basic block partitioning may result in some jumps that appear to
4556 be optimizable (or blocks that appear to be mergeable), but which really
4557 must be left untouched (they are required to make it safely across
4558 partition boundaries). See the comments at the top of
4559 bb-reorder.c:partition_hot_cold_basic_blocks for complete details. */
4561 if (BB_PARTITION (a) != BB_PARTITION (b))
4562 return false;
4564 /* Protect the loop latches. */
4565 if (current_loops && b->loop_father->latch == b)
4566 return false;
4568 /* If we would end up moving B's instructions, make sure it doesn't fall
4569 through into the exit block, since we cannot recover from a fallthrough
4570 edge into the exit block occurring in the middle of a function. */
4571 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4573 edge e = find_fallthru_edge (b->succs);
4574 if (e && e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4575 return false;
4578 /* There must be exactly one edge in between the blocks. */
4579 return (single_succ_p (a)
4580 && single_succ (a) == b
4581 && single_pred_p (b) == 1
4582 && a != b
4583 /* Must be simple edge. */
4584 && !(single_succ_edge (a)->flags & EDGE_COMPLEX)
4585 && a != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4586 && b != EXIT_BLOCK_PTR_FOR_FN (cfun)
4587 /* If the jump insn has side effects, we can't kill the edge.
4588 When not optimizing, try_redirect_by_replacing_jump will
4589 not allow us to redirect an edge by replacing a table jump. */
4590 && (!JUMP_P (BB_END (a))
4591 || ((!optimize || reload_completed)
4592 ? simplejump_p (BB_END (a)) : onlyjump_p (BB_END (a)))));
4595 /* Merge block A and B. The blocks must be mergeable. */
4597 static void
4598 cfg_layout_merge_blocks (basic_block a, basic_block b)
4600 bool forwarder_p = (b->flags & BB_FORWARDER_BLOCK) != 0;
4601 rtx_insn *insn;
4603 gcc_checking_assert (cfg_layout_can_merge_blocks_p (a, b));
4605 if (dump_file)
4606 fprintf (dump_file, "Merging block %d into block %d...\n", b->index,
4607 a->index);
4609 /* If there was a CODE_LABEL beginning B, delete it. */
4610 if (LABEL_P (BB_HEAD (b)))
4612 delete_insn (BB_HEAD (b));
4615 /* We should have fallthru edge in a, or we can do dummy redirection to get
4616 it cleaned up. */
4617 if (JUMP_P (BB_END (a)))
4618 try_redirect_by_replacing_jump (EDGE_SUCC (a, 0), b, true);
4619 gcc_assert (!JUMP_P (BB_END (a)));
4621 /* When not optimizing and the edge is the only place in RTL which holds
4622 some unique locus, emit a nop with that locus in between. */
4623 if (!optimize)
4624 emit_nop_for_unique_locus_between (a, b);
4626 /* Move things from b->footer after a->footer. */
4627 if (BB_FOOTER (b))
4629 if (!BB_FOOTER (a))
4630 BB_FOOTER (a) = BB_FOOTER (b);
4631 else
4633 rtx_insn *last = BB_FOOTER (a);
4635 while (NEXT_INSN (last))
4636 last = NEXT_INSN (last);
4637 SET_NEXT_INSN (last) = BB_FOOTER (b);
4638 SET_PREV_INSN (BB_FOOTER (b)) = last;
4640 BB_FOOTER (b) = NULL;
4643 /* Move things from b->header before a->footer.
4644 Note that this may include dead tablejump data, but we don't clean
4645 those up until we go out of cfglayout mode. */
4646 if (BB_HEADER (b))
4648 if (! BB_FOOTER (a))
4649 BB_FOOTER (a) = BB_HEADER (b);
4650 else
4652 rtx_insn *last = BB_HEADER (b);
4654 while (NEXT_INSN (last))
4655 last = NEXT_INSN (last);
4656 SET_NEXT_INSN (last) = BB_FOOTER (a);
4657 SET_PREV_INSN (BB_FOOTER (a)) = last;
4658 BB_FOOTER (a) = BB_HEADER (b);
4660 BB_HEADER (b) = NULL;
4663 /* In the case basic blocks are not adjacent, move them around. */
4664 if (NEXT_INSN (BB_END (a)) != BB_HEAD (b))
4666 insn = unlink_insn_chain (BB_HEAD (b), BB_END (b));
4668 emit_insn_after_noloc (insn, BB_END (a), a);
4670 /* Otherwise just re-associate the instructions. */
4671 else
4673 insn = BB_HEAD (b);
4674 BB_END (a) = BB_END (b);
4677 /* emit_insn_after_noloc doesn't call df_insn_change_bb.
4678 We need to explicitly call. */
4679 update_bb_for_insn_chain (insn, BB_END (b), a);
4681 /* Skip possible DELETED_LABEL insn. */
4682 if (!NOTE_INSN_BASIC_BLOCK_P (insn))
4683 insn = NEXT_INSN (insn);
4684 gcc_assert (NOTE_INSN_BASIC_BLOCK_P (insn));
4685 BB_HEAD (b) = BB_END (b) = NULL;
4686 delete_insn (insn);
4688 df_bb_delete (b->index);
4690 /* If B was a forwarder block, propagate the locus on the edge. */
4691 if (forwarder_p
4692 && LOCATION_LOCUS (EDGE_SUCC (b, 0)->goto_locus) == UNKNOWN_LOCATION)
4693 EDGE_SUCC (b, 0)->goto_locus = EDGE_SUCC (a, 0)->goto_locus;
4695 if (dump_file)
4696 fprintf (dump_file, "Merged blocks %d and %d.\n", a->index, b->index);
4699 /* Split edge E. */
4701 static basic_block
4702 cfg_layout_split_edge (edge e)
4704 basic_block new_bb =
4705 create_basic_block (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
4706 ? NEXT_INSN (BB_END (e->src)) : get_insns (),
4707 NULL_RTX, e->src);
4709 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
4710 BB_COPY_PARTITION (new_bb, e->src);
4711 else
4712 BB_COPY_PARTITION (new_bb, e->dest);
4713 make_edge (new_bb, e->dest, EDGE_FALLTHRU);
4714 redirect_edge_and_branch_force (e, new_bb);
4716 return new_bb;
4719 /* Do postprocessing after making a forwarder block joined by edge FALLTHRU. */
4721 static void
4722 rtl_make_forwarder_block (edge fallthru ATTRIBUTE_UNUSED)
4726 /* Return true if BB contains only labels or non-executable
4727 instructions. */
4729 static bool
4730 rtl_block_empty_p (basic_block bb)
4732 rtx_insn *insn;
4734 if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun)
4735 || bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
4736 return true;
4738 FOR_BB_INSNS (bb, insn)
4739 if (NONDEBUG_INSN_P (insn) && !any_uncondjump_p (insn))
4740 return false;
4742 return true;
4745 /* Split a basic block if it ends with a conditional branch and if
4746 the other part of the block is not empty. */
4748 static basic_block
4749 rtl_split_block_before_cond_jump (basic_block bb)
4751 rtx_insn *insn;
4752 rtx_insn *split_point = NULL;
4753 rtx_insn *last = NULL;
4754 bool found_code = false;
4756 FOR_BB_INSNS (bb, insn)
4758 if (any_condjump_p (insn))
4759 split_point = last;
4760 else if (NONDEBUG_INSN_P (insn))
4761 found_code = true;
4762 last = insn;
4765 /* Did not find everything. */
4766 if (found_code && split_point)
4767 return split_block (bb, split_point)->dest;
4768 else
4769 return NULL;
4772 /* Return 1 if BB ends with a call, possibly followed by some
4773 instructions that must stay with the call, 0 otherwise. */
4775 static bool
4776 rtl_block_ends_with_call_p (basic_block bb)
4778 rtx_insn *insn = BB_END (bb);
4780 while (!CALL_P (insn)
4781 && insn != BB_HEAD (bb)
4782 && (keep_with_call_p (insn)
4783 || NOTE_P (insn)
4784 || DEBUG_INSN_P (insn)))
4785 insn = PREV_INSN (insn);
4786 return (CALL_P (insn));
4789 /* Return 1 if BB ends with a conditional branch, 0 otherwise. */
4791 static bool
4792 rtl_block_ends_with_condjump_p (const_basic_block bb)
4794 return any_condjump_p (BB_END (bb));
4797 /* Return true if we need to add fake edge to exit.
4798 Helper function for rtl_flow_call_edges_add. */
4800 static bool
4801 need_fake_edge_p (const rtx_insn *insn)
4803 if (!INSN_P (insn))
4804 return false;
4806 if ((CALL_P (insn)
4807 && !SIBLING_CALL_P (insn)
4808 && !find_reg_note (insn, REG_NORETURN, NULL)
4809 && !(RTL_CONST_OR_PURE_CALL_P (insn))))
4810 return true;
4812 return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
4813 && MEM_VOLATILE_P (PATTERN (insn)))
4814 || (GET_CODE (PATTERN (insn)) == PARALLEL
4815 && asm_noperands (insn) != -1
4816 && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
4817 || GET_CODE (PATTERN (insn)) == ASM_INPUT);
4820 /* Add fake edges to the function exit for any non constant and non noreturn
4821 calls, volatile inline assembly in the bitmap of blocks specified by
4822 BLOCKS or to the whole CFG if BLOCKS is zero. Return the number of blocks
4823 that were split.
4825 The goal is to expose cases in which entering a basic block does not imply
4826 that all subsequent instructions must be executed. */
4828 static int
4829 rtl_flow_call_edges_add (sbitmap blocks)
4831 int i;
4832 int blocks_split = 0;
4833 int last_bb = last_basic_block_for_fn (cfun);
4834 bool check_last_block = false;
4836 if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS)
4837 return 0;
4839 if (! blocks)
4840 check_last_block = true;
4841 else
4842 check_last_block = bitmap_bit_p (blocks,
4843 EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb->index);
4845 /* In the last basic block, before epilogue generation, there will be
4846 a fallthru edge to EXIT. Special care is required if the last insn
4847 of the last basic block is a call because make_edge folds duplicate
4848 edges, which would result in the fallthru edge also being marked
4849 fake, which would result in the fallthru edge being removed by
4850 remove_fake_edges, which would result in an invalid CFG.
4852 Moreover, we can't elide the outgoing fake edge, since the block
4853 profiler needs to take this into account in order to solve the minimal
4854 spanning tree in the case that the call doesn't return.
4856 Handle this by adding a dummy instruction in a new last basic block. */
4857 if (check_last_block)
4859 basic_block bb = EXIT_BLOCK_PTR_FOR_FN (cfun)->prev_bb;
4860 rtx_insn *insn = BB_END (bb);
4862 /* Back up past insns that must be kept in the same block as a call. */
4863 while (insn != BB_HEAD (bb)
4864 && keep_with_call_p (insn))
4865 insn = PREV_INSN (insn);
4867 if (need_fake_edge_p (insn))
4869 edge e;
4871 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4872 if (e)
4874 insert_insn_on_edge (gen_use (const0_rtx), e);
4875 commit_edge_insertions ();
4880 /* Now add fake edges to the function exit for any non constant
4881 calls since there is no way that we can determine if they will
4882 return or not... */
4884 for (i = NUM_FIXED_BLOCKS; i < last_bb; i++)
4886 basic_block bb = BASIC_BLOCK_FOR_FN (cfun, i);
4887 rtx_insn *insn;
4888 rtx_insn *prev_insn;
4890 if (!bb)
4891 continue;
4893 if (blocks && !bitmap_bit_p (blocks, i))
4894 continue;
4896 for (insn = BB_END (bb); ; insn = prev_insn)
4898 prev_insn = PREV_INSN (insn);
4899 if (need_fake_edge_p (insn))
4901 edge e;
4902 rtx_insn *split_at_insn = insn;
4904 /* Don't split the block between a call and an insn that should
4905 remain in the same block as the call. */
4906 if (CALL_P (insn))
4907 while (split_at_insn != BB_END (bb)
4908 && keep_with_call_p (NEXT_INSN (split_at_insn)))
4909 split_at_insn = NEXT_INSN (split_at_insn);
4911 /* The handling above of the final block before the epilogue
4912 should be enough to verify that there is no edge to the exit
4913 block in CFG already. Calling make_edge in such case would
4914 cause us to mark that edge as fake and remove it later. */
4916 #ifdef ENABLE_CHECKING
4917 if (split_at_insn == BB_END (bb))
4919 e = find_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun));
4920 gcc_assert (e == NULL);
4922 #endif
4924 /* Note that the following may create a new basic block
4925 and renumber the existing basic blocks. */
4926 if (split_at_insn != BB_END (bb))
4928 e = split_block (bb, split_at_insn);
4929 if (e)
4930 blocks_split++;
4933 make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
4936 if (insn == BB_HEAD (bb))
4937 break;
4941 if (blocks_split)
4942 verify_flow_info ();
4944 return blocks_split;
4947 /* Add COMP_RTX as a condition at end of COND_BB. FIRST_HEAD is
4948 the conditional branch target, SECOND_HEAD should be the fall-thru
4949 there is no need to handle this here the loop versioning code handles
4950 this. the reason for SECON_HEAD is that it is needed for condition
4951 in trees, and this should be of the same type since it is a hook. */
4952 static void
4953 rtl_lv_add_condition_to_bb (basic_block first_head ,
4954 basic_block second_head ATTRIBUTE_UNUSED,
4955 basic_block cond_bb, void *comp_rtx)
4957 rtx_code_label *label;
4958 rtx_insn *seq, *jump;
4959 rtx op0 = XEXP ((rtx)comp_rtx, 0);
4960 rtx op1 = XEXP ((rtx)comp_rtx, 1);
4961 enum rtx_code comp = GET_CODE ((rtx)comp_rtx);
4962 machine_mode mode;
4965 label = block_label (first_head);
4966 mode = GET_MODE (op0);
4967 if (mode == VOIDmode)
4968 mode = GET_MODE (op1);
4970 start_sequence ();
4971 op0 = force_operand (op0, NULL_RTX);
4972 op1 = force_operand (op1, NULL_RTX);
4973 do_compare_rtx_and_jump (op0, op1, comp, 0, mode, NULL_RTX, NULL, label, -1);
4974 jump = get_last_insn ();
4975 JUMP_LABEL (jump) = label;
4976 LABEL_NUSES (label)++;
4977 seq = get_insns ();
4978 end_sequence ();
4980 /* Add the new cond, in the new head. */
4981 emit_insn_after (seq, BB_END (cond_bb));
4985 /* Given a block B with unconditional branch at its end, get the
4986 store the return the branch edge and the fall-thru edge in
4987 BRANCH_EDGE and FALLTHRU_EDGE respectively. */
4988 static void
4989 rtl_extract_cond_bb_edges (basic_block b, edge *branch_edge,
4990 edge *fallthru_edge)
4992 edge e = EDGE_SUCC (b, 0);
4994 if (e->flags & EDGE_FALLTHRU)
4996 *fallthru_edge = e;
4997 *branch_edge = EDGE_SUCC (b, 1);
4999 else
5001 *branch_edge = e;
5002 *fallthru_edge = EDGE_SUCC (b, 1);
5006 void
5007 init_rtl_bb_info (basic_block bb)
5009 gcc_assert (!bb->il.x.rtl);
5010 bb->il.x.head_ = NULL;
5011 bb->il.x.rtl = ggc_cleared_alloc<rtl_bb_info> ();
5014 /* Returns true if it is possible to remove edge E by redirecting
5015 it to the destination of the other edge from E->src. */
5017 static bool
5018 rtl_can_remove_branch_p (const_edge e)
5020 const_basic_block src = e->src;
5021 const_basic_block target = EDGE_SUCC (src, EDGE_SUCC (src, 0) == e)->dest;
5022 const rtx_insn *insn = BB_END (src);
5023 rtx set;
5025 /* The conditions are taken from try_redirect_by_replacing_jump. */
5026 if (target == EXIT_BLOCK_PTR_FOR_FN (cfun))
5027 return false;
5029 if (e->flags & (EDGE_ABNORMAL_CALL | EDGE_EH))
5030 return false;
5032 if (BB_PARTITION (src) != BB_PARTITION (target))
5033 return false;
5035 if (!onlyjump_p (insn)
5036 || tablejump_p (insn, NULL, NULL))
5037 return false;
5039 set = single_set (insn);
5040 if (!set || side_effects_p (set))
5041 return false;
5043 return true;
5046 static basic_block
5047 rtl_duplicate_bb (basic_block bb)
5049 bb = cfg_layout_duplicate_bb (bb);
5050 bb->aux = NULL;
5051 return bb;
5054 /* Do book-keeping of basic block BB for the profile consistency checker.
5055 If AFTER_PASS is 0, do pre-pass accounting, or if AFTER_PASS is 1
5056 then do post-pass accounting. Store the counting in RECORD. */
5057 static void
5058 rtl_account_profile_record (basic_block bb, int after_pass,
5059 struct profile_record *record)
5061 rtx_insn *insn;
5062 FOR_BB_INSNS (bb, insn)
5063 if (INSN_P (insn))
5065 record->size[after_pass]
5066 += insn_rtx_cost (PATTERN (insn), false);
5067 if (profile_status_for_fn (cfun) == PROFILE_READ)
5068 record->time[after_pass]
5069 += insn_rtx_cost (PATTERN (insn), true) * bb->count;
5070 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
5071 record->time[after_pass]
5072 += insn_rtx_cost (PATTERN (insn), true) * bb->frequency;
5076 /* Implementation of CFG manipulation for linearized RTL. */
5077 struct cfg_hooks rtl_cfg_hooks = {
5078 "rtl",
5079 rtl_verify_flow_info,
5080 rtl_dump_bb,
5081 rtl_dump_bb_for_graph,
5082 rtl_create_basic_block,
5083 rtl_redirect_edge_and_branch,
5084 rtl_redirect_edge_and_branch_force,
5085 rtl_can_remove_branch_p,
5086 rtl_delete_block,
5087 rtl_split_block,
5088 rtl_move_block_after,
5089 rtl_can_merge_blocks, /* can_merge_blocks_p */
5090 rtl_merge_blocks,
5091 rtl_predict_edge,
5092 rtl_predicted_by_p,
5093 cfg_layout_can_duplicate_bb_p,
5094 rtl_duplicate_bb,
5095 rtl_split_edge,
5096 rtl_make_forwarder_block,
5097 rtl_tidy_fallthru_edge,
5098 rtl_force_nonfallthru,
5099 rtl_block_ends_with_call_p,
5100 rtl_block_ends_with_condjump_p,
5101 rtl_flow_call_edges_add,
5102 NULL, /* execute_on_growing_pred */
5103 NULL, /* execute_on_shrinking_pred */
5104 NULL, /* duplicate loop for trees */
5105 NULL, /* lv_add_condition_to_bb */
5106 NULL, /* lv_adjust_loop_header_phi*/
5107 NULL, /* extract_cond_bb_edges */
5108 NULL, /* flush_pending_stmts */
5109 rtl_block_empty_p, /* block_empty_p */
5110 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5111 rtl_account_profile_record,
5114 /* Implementation of CFG manipulation for cfg layout RTL, where
5115 basic block connected via fallthru edges does not have to be adjacent.
5116 This representation will hopefully become the default one in future
5117 version of the compiler. */
5119 struct cfg_hooks cfg_layout_rtl_cfg_hooks = {
5120 "cfglayout mode",
5121 rtl_verify_flow_info_1,
5122 rtl_dump_bb,
5123 rtl_dump_bb_for_graph,
5124 cfg_layout_create_basic_block,
5125 cfg_layout_redirect_edge_and_branch,
5126 cfg_layout_redirect_edge_and_branch_force,
5127 rtl_can_remove_branch_p,
5128 cfg_layout_delete_block,
5129 cfg_layout_split_block,
5130 rtl_move_block_after,
5131 cfg_layout_can_merge_blocks_p,
5132 cfg_layout_merge_blocks,
5133 rtl_predict_edge,
5134 rtl_predicted_by_p,
5135 cfg_layout_can_duplicate_bb_p,
5136 cfg_layout_duplicate_bb,
5137 cfg_layout_split_edge,
5138 rtl_make_forwarder_block,
5139 NULL, /* tidy_fallthru_edge */
5140 rtl_force_nonfallthru,
5141 rtl_block_ends_with_call_p,
5142 rtl_block_ends_with_condjump_p,
5143 rtl_flow_call_edges_add,
5144 NULL, /* execute_on_growing_pred */
5145 NULL, /* execute_on_shrinking_pred */
5146 duplicate_loop_to_header_edge, /* duplicate loop for trees */
5147 rtl_lv_add_condition_to_bb, /* lv_add_condition_to_bb */
5148 NULL, /* lv_adjust_loop_header_phi*/
5149 rtl_extract_cond_bb_edges, /* extract_cond_bb_edges */
5150 NULL, /* flush_pending_stmts */
5151 rtl_block_empty_p, /* block_empty_p */
5152 rtl_split_block_before_cond_jump, /* split_block_before_cond_jump */
5153 rtl_account_profile_record,
5156 #include "gt-cfgrtl.h"